1 /* Emit RTL for the GCC expander.
2    Copyright (C) 1987-2021 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 
21 /* Middle-to-low level generation of rtx code and insns.
22 
23    This file contains support functions for creating rtl expressions
24    and manipulating them in the doubly-linked chain of insns.
25 
26    The patterns of the insns are created by machine-dependent
27    routines in insn-emit.c, which is generated automatically from
28    the machine description.  These routines make the individual rtx's
29    of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30    which are automatically generated from rtl.def; what is machine
31    dependent is the kind of rtx's they make and what arguments they
32    use.  */
33 
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "memmodel.h"
38 #include "backend.h"
39 #include "target.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "df.h"
43 #include "tm_p.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
46 #include "regs.h"
47 #include "emit-rtl.h"
48 #include "recog.h"
49 #include "diagnostic-core.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "cfgrtl.h"
54 #include "tree-eh.h"
55 #include "explow.h"
56 #include "expr.h"
57 #include "builtins.h"
58 #include "rtl-iter.h"
59 #include "stor-layout.h"
60 #include "opts.h"
61 #include "predict.h"
62 #include "rtx-vector-builder.h"
63 #include "gimple.h"
64 #include "gimple-ssa.h"
65 #include "gimplify.h"
66 
67 struct target_rtl default_target_rtl;
68 #if SWITCHABLE_TARGET
69 struct target_rtl *this_target_rtl = &default_target_rtl;
70 #endif
71 
72 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
73 
74 /* Commonly used modes.  */
75 
76 scalar_int_mode byte_mode;	/* Mode whose width is BITS_PER_UNIT.  */
77 scalar_int_mode word_mode;	/* Mode whose width is BITS_PER_WORD.  */
78 scalar_int_mode ptr_mode;	/* Mode whose width is POINTER_SIZE.  */
79 
80 /* Datastructures maintained for currently processed function in RTL form.  */
81 
82 struct rtl_data x_rtl;
83 
84 /* Indexed by pseudo register number, gives the rtx for that pseudo.
85    Allocated in parallel with regno_pointer_align.
86    FIXME: We could put it into emit_status struct, but gengtype is not able to deal
87    with length attribute nested in top level structures.  */
88 
89 rtx * regno_reg_rtx;
90 
91 /* This is *not* reset after each function.  It gives each CODE_LABEL
92    in the entire compilation a unique label number.  */
93 
94 static GTY(()) int label_num = 1;
95 
96 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
97    the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
98    record a copy of const[012]_rtx and constm1_rtx.  CONSTM1_RTX
99    is set only for MODE_INT and MODE_VECTOR_INT modes.  */
100 
101 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
102 
103 rtx const_true_rtx;
104 
105 REAL_VALUE_TYPE dconst0;
106 REAL_VALUE_TYPE dconst1;
107 REAL_VALUE_TYPE dconst2;
108 REAL_VALUE_TYPE dconstm1;
109 REAL_VALUE_TYPE dconsthalf;
110 
111 /* Record fixed-point constant 0 and 1.  */
112 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
113 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
114 
115 /* We make one copy of (const_int C) where C is in
116    [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
117    to save space during the compilation and simplify comparisons of
118    integers.  */
119 
120 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
121 
122 /* Standard pieces of rtx, to be substituted directly into things.  */
123 rtx pc_rtx;
124 rtx ret_rtx;
125 rtx simple_return_rtx;
126 
127 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
128    this pointer should normally never be dereferenced), but is required to be
129    distinct from NULL_RTX.  Currently used by peephole2 pass.  */
130 rtx_insn *invalid_insn_rtx;
131 
132 /* A hash table storing CONST_INTs whose absolute value is greater
133    than MAX_SAVED_CONST_INT.  */
134 
135 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
136 {
137   typedef HOST_WIDE_INT compare_type;
138 
139   static hashval_t hash (rtx i);
140   static bool equal (rtx i, HOST_WIDE_INT h);
141 };
142 
143 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
144 
145 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
146 {
147   static hashval_t hash (rtx x);
148   static bool equal (rtx x, rtx y);
149 };
150 
151 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
152 
153 struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
154 {
155   typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
156 
157   static hashval_t hash (rtx x);
158   static bool equal (rtx x, const compare_type &y);
159 };
160 
161 static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
162 
163 /* A hash table storing register attribute structures.  */
164 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
165 {
166   static hashval_t hash (reg_attrs *x);
167   static bool equal (reg_attrs *a, reg_attrs *b);
168 };
169 
170 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
171 
172 /* A hash table storing all CONST_DOUBLEs.  */
173 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
174 {
175   static hashval_t hash (rtx x);
176   static bool equal (rtx x, rtx y);
177 };
178 
179 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
180 
181 /* A hash table storing all CONST_FIXEDs.  */
182 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
183 {
184   static hashval_t hash (rtx x);
185   static bool equal (rtx x, rtx y);
186 };
187 
188 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
189 
190 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
191 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
192 #define first_label_num (crtl->emit.x_first_label_num)
193 
194 static void set_used_decls (tree);
195 static void mark_label_nuses (rtx);
196 #if TARGET_SUPPORTS_WIDE_INT
197 static rtx lookup_const_wide_int (rtx);
198 #endif
199 static rtx lookup_const_double (rtx);
200 static rtx lookup_const_fixed (rtx);
201 static rtx gen_const_vector (machine_mode, int);
202 static void copy_rtx_if_shared_1 (rtx *orig);
203 
204 /* Probability of the conditional branch currently proceeded by try_split.  */
205 profile_probability split_branch_probability;
206 
207 /* Returns a hash code for X (which is a really a CONST_INT).  */
208 
209 hashval_t
hash(rtx x)210 const_int_hasher::hash (rtx x)
211 {
212   return (hashval_t) INTVAL (x);
213 }
214 
215 /* Returns nonzero if the value represented by X (which is really a
216    CONST_INT) is the same as that given by Y (which is really a
217    HOST_WIDE_INT *).  */
218 
219 bool
equal(rtx x,HOST_WIDE_INT y)220 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
221 {
222   return (INTVAL (x) == y);
223 }
224 
225 #if TARGET_SUPPORTS_WIDE_INT
226 /* Returns a hash code for X (which is a really a CONST_WIDE_INT).  */
227 
228 hashval_t
hash(rtx x)229 const_wide_int_hasher::hash (rtx x)
230 {
231   int i;
232   unsigned HOST_WIDE_INT hash = 0;
233   const_rtx xr = x;
234 
235   for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
236     hash += CONST_WIDE_INT_ELT (xr, i);
237 
238   return (hashval_t) hash;
239 }
240 
241 /* Returns nonzero if the value represented by X (which is really a
242    CONST_WIDE_INT) is the same as that given by Y (which is really a
243    CONST_WIDE_INT).  */
244 
245 bool
equal(rtx x,rtx y)246 const_wide_int_hasher::equal (rtx x, rtx y)
247 {
248   int i;
249   const_rtx xr = x;
250   const_rtx yr = y;
251   if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
252     return false;
253 
254   for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
255     if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
256       return false;
257 
258   return true;
259 }
260 #endif
261 
262 /* Returns a hash code for CONST_POLY_INT X.  */
263 
264 hashval_t
hash(rtx x)265 const_poly_int_hasher::hash (rtx x)
266 {
267   inchash::hash h;
268   h.add_int (GET_MODE (x));
269   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
270     h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
271   return h.end ();
272 }
273 
274 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y.  */
275 
276 bool
equal(rtx x,const compare_type & y)277 const_poly_int_hasher::equal (rtx x, const compare_type &y)
278 {
279   if (GET_MODE (x) != y.first)
280     return false;
281   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
282     if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
283       return false;
284   return true;
285 }
286 
287 /* Returns a hash code for X (which is really a CONST_DOUBLE).  */
288 hashval_t
hash(rtx x)289 const_double_hasher::hash (rtx x)
290 {
291   const_rtx const value = x;
292   hashval_t h;
293 
294   if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
295     h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
296   else
297     {
298       h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
299       /* MODE is used in the comparison, so it should be in the hash.  */
300       h ^= GET_MODE (value);
301     }
302   return h;
303 }
304 
305 /* Returns nonzero if the value represented by X (really a ...)
306    is the same as that represented by Y (really a ...) */
307 bool
equal(rtx x,rtx y)308 const_double_hasher::equal (rtx x, rtx y)
309 {
310   const_rtx const a = x, b = y;
311 
312   if (GET_MODE (a) != GET_MODE (b))
313     return 0;
314   if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
315     return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
316 	    && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
317   else
318     return real_identical (CONST_DOUBLE_REAL_VALUE (a),
319 			   CONST_DOUBLE_REAL_VALUE (b));
320 }
321 
322 /* Returns a hash code for X (which is really a CONST_FIXED).  */
323 
324 hashval_t
hash(rtx x)325 const_fixed_hasher::hash (rtx x)
326 {
327   const_rtx const value = x;
328   hashval_t h;
329 
330   h = fixed_hash (CONST_FIXED_VALUE (value));
331   /* MODE is used in the comparison, so it should be in the hash.  */
332   h ^= GET_MODE (value);
333   return h;
334 }
335 
336 /* Returns nonzero if the value represented by X is the same as that
337    represented by Y.  */
338 
339 bool
equal(rtx x,rtx y)340 const_fixed_hasher::equal (rtx x, rtx y)
341 {
342   const_rtx const a = x, b = y;
343 
344   if (GET_MODE (a) != GET_MODE (b))
345     return 0;
346   return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
347 }
348 
349 /* Return true if the given memory attributes are equal.  */
350 
351 bool
mem_attrs_eq_p(const class mem_attrs * p,const class mem_attrs * q)352 mem_attrs_eq_p (const class mem_attrs *p, const class mem_attrs *q)
353 {
354   if (p == q)
355     return true;
356   if (!p || !q)
357     return false;
358   return (p->alias == q->alias
359 	  && p->offset_known_p == q->offset_known_p
360 	  && (!p->offset_known_p || known_eq (p->offset, q->offset))
361 	  && p->size_known_p == q->size_known_p
362 	  && (!p->size_known_p || known_eq (p->size, q->size))
363 	  && p->align == q->align
364 	  && p->addrspace == q->addrspace
365 	  && (p->expr == q->expr
366 	      || (p->expr != NULL_TREE && q->expr != NULL_TREE
367 		  && operand_equal_p (p->expr, q->expr, 0))));
368 }
369 
370 /* Set MEM's memory attributes so that they are the same as ATTRS.  */
371 
372 static void
set_mem_attrs(rtx mem,mem_attrs * attrs)373 set_mem_attrs (rtx mem, mem_attrs *attrs)
374 {
375   /* If everything is the default, we can just clear the attributes.  */
376   if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
377     {
378       MEM_ATTRS (mem) = 0;
379       return;
380     }
381 
382   if (!MEM_ATTRS (mem)
383       || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
384     {
385       MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
386       memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
387     }
388 }
389 
390 /* Returns a hash code for X (which is a really a reg_attrs *).  */
391 
392 hashval_t
hash(reg_attrs * x)393 reg_attr_hasher::hash (reg_attrs *x)
394 {
395   const reg_attrs *const p = x;
396 
397   inchash::hash h;
398   h.add_ptr (p->decl);
399   h.add_poly_hwi (p->offset);
400   return h.end ();
401 }
402 
403 /* Returns nonzero if the value represented by X  is the same as that given by
404    Y.  */
405 
406 bool
equal(reg_attrs * x,reg_attrs * y)407 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
408 {
409   const reg_attrs *const p = x;
410   const reg_attrs *const q = y;
411 
412   return (p->decl == q->decl && known_eq (p->offset, q->offset));
413 }
414 /* Allocate a new reg_attrs structure and insert it into the hash table if
415    one identical to it is not already in the table.  We are doing this for
416    MEM of mode MODE.  */
417 
418 static reg_attrs *
get_reg_attrs(tree decl,poly_int64 offset)419 get_reg_attrs (tree decl, poly_int64 offset)
420 {
421   reg_attrs attrs;
422 
423   /* If everything is the default, we can just return zero.  */
424   if (decl == 0 && known_eq (offset, 0))
425     return 0;
426 
427   attrs.decl = decl;
428   attrs.offset = offset;
429 
430   reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
431   if (*slot == 0)
432     {
433       *slot = ggc_alloc<reg_attrs> ();
434       memcpy (*slot, &attrs, sizeof (reg_attrs));
435     }
436 
437   return *slot;
438 }
439 
440 
441 #if !HAVE_blockage
442 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
443    and to block register equivalences to be seen across this insn.  */
444 
445 rtx
gen_blockage(void)446 gen_blockage (void)
447 {
448   rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
449   MEM_VOLATILE_P (x) = true;
450   return x;
451 }
452 #endif
453 
454 
455 /* Set the mode and register number of X to MODE and REGNO.  */
456 
457 void
set_mode_and_regno(rtx x,machine_mode mode,unsigned int regno)458 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
459 {
460   unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
461 			? hard_regno_nregs (regno, mode)
462 			: 1);
463   PUT_MODE_RAW (x, mode);
464   set_regno_raw (x, regno, nregs);
465 }
466 
467 /* Initialize a fresh REG rtx with mode MODE and register REGNO.  */
468 
469 rtx
init_raw_REG(rtx x,machine_mode mode,unsigned int regno)470 init_raw_REG (rtx x, machine_mode mode, unsigned int regno)
471 {
472   set_mode_and_regno (x, mode, regno);
473   REG_ATTRS (x) = NULL;
474   ORIGINAL_REGNO (x) = regno;
475   return x;
476 }
477 
478 /* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
479    don't attempt to share with the various global pieces of rtl (such as
480    frame_pointer_rtx).  */
481 
482 rtx
gen_raw_REG(machine_mode mode,unsigned int regno)483 gen_raw_REG (machine_mode mode, unsigned int regno)
484 {
485   rtx x = rtx_alloc (REG MEM_STAT_INFO);
486   init_raw_REG (x, mode, regno);
487   return x;
488 }
489 
490 /* There are some RTL codes that require special attention; the generation
491    functions do the raw handling.  If you add to this list, modify
492    special_rtx in gengenrtl.c as well.  */
493 
494 rtx_expr_list *
gen_rtx_EXPR_LIST(machine_mode mode,rtx expr,rtx expr_list)495 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
496 {
497   return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
498 						 expr_list));
499 }
500 
501 rtx_insn_list *
gen_rtx_INSN_LIST(machine_mode mode,rtx insn,rtx insn_list)502 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
503 {
504   return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
505 						 insn_list));
506 }
507 
508 rtx_insn *
gen_rtx_INSN(machine_mode mode,rtx_insn * prev_insn,rtx_insn * next_insn,basic_block bb,rtx pattern,int location,int code,rtx reg_notes)509 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
510 	      basic_block bb, rtx pattern, int location, int code,
511 	      rtx reg_notes)
512 {
513   return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
514 						 prev_insn, next_insn,
515 						 bb, pattern, location, code,
516 						 reg_notes));
517 }
518 
519 rtx
gen_rtx_CONST_INT(machine_mode mode ATTRIBUTE_UNUSED,HOST_WIDE_INT arg)520 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
521 {
522   if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
523     return const_int_rtx[arg + MAX_SAVED_CONST_INT];
524 
525 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
526   if (const_true_rtx && arg == STORE_FLAG_VALUE)
527     return const_true_rtx;
528 #endif
529 
530   /* Look up the CONST_INT in the hash table.  */
531   rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
532 						   INSERT);
533   if (*slot == 0)
534     *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
535 
536   return *slot;
537 }
538 
539 rtx
gen_int_mode(poly_int64 c,machine_mode mode)540 gen_int_mode (poly_int64 c, machine_mode mode)
541 {
542   c = trunc_int_for_mode (c, mode);
543   if (c.is_constant ())
544     return GEN_INT (c.coeffs[0]);
545   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
546   return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
547 }
548 
549 /* CONST_DOUBLEs might be created from pairs of integers, or from
550    REAL_VALUE_TYPEs.  Also, their length is known only at run time,
551    so we cannot use gen_rtx_raw_CONST_DOUBLE.  */
552 
553 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
554    hash table.  If so, return its counterpart; otherwise add it
555    to the hash table and return it.  */
556 static rtx
lookup_const_double(rtx real)557 lookup_const_double (rtx real)
558 {
559   rtx *slot = const_double_htab->find_slot (real, INSERT);
560   if (*slot == 0)
561     *slot = real;
562 
563   return *slot;
564 }
565 
566 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
567    VALUE in mode MODE.  */
568 rtx
const_double_from_real_value(REAL_VALUE_TYPE value,machine_mode mode)569 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
570 {
571   rtx real = rtx_alloc (CONST_DOUBLE);
572   PUT_MODE (real, mode);
573 
574   real->u.rv = value;
575 
576   return lookup_const_double (real);
577 }
578 
579 /* Determine whether FIXED, a CONST_FIXED, already exists in the
580    hash table.  If so, return its counterpart; otherwise add it
581    to the hash table and return it.  */
582 
583 static rtx
lookup_const_fixed(rtx fixed)584 lookup_const_fixed (rtx fixed)
585 {
586   rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
587   if (*slot == 0)
588     *slot = fixed;
589 
590   return *slot;
591 }
592 
593 /* Return a CONST_FIXED rtx for a fixed-point value specified by
594    VALUE in mode MODE.  */
595 
596 rtx
const_fixed_from_fixed_value(FIXED_VALUE_TYPE value,machine_mode mode)597 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
598 {
599   rtx fixed = rtx_alloc (CONST_FIXED);
600   PUT_MODE (fixed, mode);
601 
602   fixed->u.fv = value;
603 
604   return lookup_const_fixed (fixed);
605 }
606 
607 #if TARGET_SUPPORTS_WIDE_INT == 0
608 /* Constructs double_int from rtx CST.  */
609 
610 double_int
rtx_to_double_int(const_rtx cst)611 rtx_to_double_int (const_rtx cst)
612 {
613   double_int r;
614 
615   if (CONST_INT_P (cst))
616       r = double_int::from_shwi (INTVAL (cst));
617   else if (CONST_DOUBLE_AS_INT_P (cst))
618     {
619       r.low = CONST_DOUBLE_LOW (cst);
620       r.high = CONST_DOUBLE_HIGH (cst);
621     }
622   else
623     gcc_unreachable ();
624 
625   return r;
626 }
627 #endif
628 
629 #if TARGET_SUPPORTS_WIDE_INT
630 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
631    If so, return its counterpart; otherwise add it to the hash table and
632    return it.  */
633 
634 static rtx
lookup_const_wide_int(rtx wint)635 lookup_const_wide_int (rtx wint)
636 {
637   rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
638   if (*slot == 0)
639     *slot = wint;
640 
641   return *slot;
642 }
643 #endif
644 
645 /* Return an rtx constant for V, given that the constant has mode MODE.
646    The returned rtx will be a CONST_INT if V fits, otherwise it will be
647    a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
648    (if TARGET_SUPPORTS_WIDE_INT).  */
649 
650 static rtx
immed_wide_int_const_1(const wide_int_ref & v,machine_mode mode)651 immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
652 {
653   unsigned int len = v.get_len ();
654   /* Not scalar_int_mode because we also allow pointer bound modes.  */
655   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
656 
657   /* Allow truncation but not extension since we do not know if the
658      number is signed or unsigned.  */
659   gcc_assert (prec <= v.get_precision ());
660 
661   if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
662     return gen_int_mode (v.elt (0), mode);
663 
664 #if TARGET_SUPPORTS_WIDE_INT
665   {
666     unsigned int i;
667     rtx value;
668     unsigned int blocks_needed
669       = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
670 
671     if (len > blocks_needed)
672       len = blocks_needed;
673 
674     value = const_wide_int_alloc (len);
675 
676     /* It is so tempting to just put the mode in here.  Must control
677        myself ... */
678     PUT_MODE (value, VOIDmode);
679     CWI_PUT_NUM_ELEM (value, len);
680 
681     for (i = 0; i < len; i++)
682       CONST_WIDE_INT_ELT (value, i) = v.elt (i);
683 
684     return lookup_const_wide_int (value);
685   }
686 #else
687   return immed_double_const (v.elt (0), v.elt (1), mode);
688 #endif
689 }
690 
691 #if TARGET_SUPPORTS_WIDE_INT == 0
692 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
693    of ints: I0 is the low-order word and I1 is the high-order word.
694    For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
695    implied upper bits are copies of the high bit of i1.  The value
696    itself is neither signed nor unsigned.  Do not use this routine for
697    non-integer modes; convert to REAL_VALUE_TYPE and use
698    const_double_from_real_value.  */
699 
700 rtx
immed_double_const(HOST_WIDE_INT i0,HOST_WIDE_INT i1,machine_mode mode)701 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
702 {
703   rtx value;
704   unsigned int i;
705 
706   /* There are the following cases (note that there are no modes with
707      HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
708 
709      1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
710 	gen_int_mode.
711      2) If the value of the integer fits into HOST_WIDE_INT anyway
712         (i.e., i1 consists only from copies of the sign bit, and sign
713 	of i0 and i1 are the same), then we return a CONST_INT for i0.
714      3) Otherwise, we create a CONST_DOUBLE for i0 and i1.  */
715   scalar_mode smode;
716   if (is_a <scalar_mode> (mode, &smode)
717       && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
718     return gen_int_mode (i0, mode);
719 
720   /* If this integer fits in one word, return a CONST_INT.  */
721   if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
722     return GEN_INT (i0);
723 
724   /* We use VOIDmode for integers.  */
725   value = rtx_alloc (CONST_DOUBLE);
726   PUT_MODE (value, VOIDmode);
727 
728   CONST_DOUBLE_LOW (value) = i0;
729   CONST_DOUBLE_HIGH (value) = i1;
730 
731   for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
732     XWINT (value, i) = 0;
733 
734   return lookup_const_double (value);
735 }
736 #endif
737 
738 /* Return an rtx representation of C in mode MODE.  */
739 
740 rtx
immed_wide_int_const(const poly_wide_int_ref & c,machine_mode mode)741 immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
742 {
743   if (c.is_constant ())
744     return immed_wide_int_const_1 (c.coeffs[0], mode);
745 
746   /* Not scalar_int_mode because we also allow pointer bound modes.  */
747   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
748 
749   /* Allow truncation but not extension since we do not know if the
750      number is signed or unsigned.  */
751   gcc_assert (prec <= c.coeffs[0].get_precision ());
752   poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
753 
754   /* See whether we already have an rtx for this constant.  */
755   inchash::hash h;
756   h.add_int (mode);
757   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
758     h.add_wide_int (newc.coeffs[i]);
759   const_poly_int_hasher::compare_type typed_value (mode, newc);
760   rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
761 							h.end (), INSERT);
762   rtx x = *slot;
763   if (x)
764     return x;
765 
766   /* Create a new rtx.  There's a choice to be made here between installing
767      the actual mode of the rtx or leaving it as VOIDmode (for consistency
768      with CONST_INT).  In practice the handling of the codes is different
769      enough that we get no benefit from using VOIDmode, and various places
770      assume that VOIDmode implies CONST_INT.  Using the real mode seems like
771      the right long-term direction anyway.  */
772   typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
773   size_t extra_size = twi::extra_size (prec);
774   x = rtx_alloc_v (CONST_POLY_INT,
775 		   sizeof (struct const_poly_int_def) + extra_size);
776   PUT_MODE (x, mode);
777   CONST_POLY_INT_COEFFS (x).set_precision (prec);
778   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
779     CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
780 
781   *slot = x;
782   return x;
783 }
784 
785 rtx
gen_rtx_REG(machine_mode mode,unsigned int regno)786 gen_rtx_REG (machine_mode mode, unsigned int regno)
787 {
788   /* In case the MD file explicitly references the frame pointer, have
789      all such references point to the same frame pointer.  This is
790      used during frame pointer elimination to distinguish the explicit
791      references to these registers from pseudos that happened to be
792      assigned to them.
793 
794      If we have eliminated the frame pointer or arg pointer, we will
795      be using it as a normal register, for example as a spill
796      register.  In such cases, we might be accessing it in a mode that
797      is not Pmode and therefore cannot use the pre-allocated rtx.
798 
799      Also don't do this when we are making new REGs in reload, since
800      we don't want to get confused with the real pointers.  */
801 
802   if (mode == Pmode && !reload_in_progress && !lra_in_progress)
803     {
804       if (regno == FRAME_POINTER_REGNUM
805 	  && (!reload_completed || frame_pointer_needed))
806 	return frame_pointer_rtx;
807 
808       if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
809 	  && regno == HARD_FRAME_POINTER_REGNUM
810 	  && (!reload_completed || frame_pointer_needed))
811 	return hard_frame_pointer_rtx;
812 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
813       if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
814 	  && regno == ARG_POINTER_REGNUM)
815 	return arg_pointer_rtx;
816 #endif
817 #ifdef RETURN_ADDRESS_POINTER_REGNUM
818       if (regno == RETURN_ADDRESS_POINTER_REGNUM)
819 	return return_address_pointer_rtx;
820 #endif
821       if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
822 	  && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
823 	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
824 	return pic_offset_table_rtx;
825       if (regno == STACK_POINTER_REGNUM)
826 	return stack_pointer_rtx;
827     }
828 
829 #if 0
830   /* If the per-function register table has been set up, try to re-use
831      an existing entry in that table to avoid useless generation of RTL.
832 
833      This code is disabled for now until we can fix the various backends
834      which depend on having non-shared hard registers in some cases.   Long
835      term we want to re-enable this code as it can significantly cut down
836      on the amount of useless RTL that gets generated.
837 
838      We'll also need to fix some code that runs after reload that wants to
839      set ORIGINAL_REGNO.  */
840 
841   if (cfun
842       && cfun->emit
843       && regno_reg_rtx
844       && regno < FIRST_PSEUDO_REGISTER
845       && reg_raw_mode[regno] == mode)
846     return regno_reg_rtx[regno];
847 #endif
848 
849   return gen_raw_REG (mode, regno);
850 }
851 
852 rtx
gen_rtx_MEM(machine_mode mode,rtx addr)853 gen_rtx_MEM (machine_mode mode, rtx addr)
854 {
855   rtx rt = gen_rtx_raw_MEM (mode, addr);
856 
857   /* This field is not cleared by the mere allocation of the rtx, so
858      we clear it here.  */
859   MEM_ATTRS (rt) = 0;
860 
861   return rt;
862 }
863 
864 /* Generate a memory referring to non-trapping constant memory.  */
865 
866 rtx
gen_const_mem(machine_mode mode,rtx addr)867 gen_const_mem (machine_mode mode, rtx addr)
868 {
869   rtx mem = gen_rtx_MEM (mode, addr);
870   MEM_READONLY_P (mem) = 1;
871   MEM_NOTRAP_P (mem) = 1;
872   return mem;
873 }
874 
875 /* Generate a MEM referring to fixed portions of the frame, e.g., register
876    save areas.  */
877 
878 rtx
gen_frame_mem(machine_mode mode,rtx addr)879 gen_frame_mem (machine_mode mode, rtx addr)
880 {
881   rtx mem = gen_rtx_MEM (mode, addr);
882   MEM_NOTRAP_P (mem) = 1;
883   set_mem_alias_set (mem, get_frame_alias_set ());
884   return mem;
885 }
886 
887 /* Generate a MEM referring to a temporary use of the stack, not part
888     of the fixed stack frame.  For example, something which is pushed
889     by a target splitter.  */
890 rtx
gen_tmp_stack_mem(machine_mode mode,rtx addr)891 gen_tmp_stack_mem (machine_mode mode, rtx addr)
892 {
893   rtx mem = gen_rtx_MEM (mode, addr);
894   MEM_NOTRAP_P (mem) = 1;
895   if (!cfun->calls_alloca)
896     set_mem_alias_set (mem, get_frame_alias_set ());
897   return mem;
898 }
899 
900 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET).  Return true if
901    this construct would be valid, and false otherwise.  */
902 
903 bool
validate_subreg(machine_mode omode,machine_mode imode,const_rtx reg,poly_uint64 offset)904 validate_subreg (machine_mode omode, machine_mode imode,
905 		 const_rtx reg, poly_uint64 offset)
906 {
907   poly_uint64 isize = GET_MODE_SIZE (imode);
908   poly_uint64 osize = GET_MODE_SIZE (omode);
909 
910   /* The sizes must be ordered, so that we know whether the subreg
911      is partial, paradoxical or complete.  */
912   if (!ordered_p (isize, osize))
913     return false;
914 
915   /* All subregs must be aligned.  */
916   if (!multiple_p (offset, osize))
917     return false;
918 
919   /* The subreg offset cannot be outside the inner object.  */
920   if (maybe_ge (offset, isize))
921     return false;
922 
923   poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
924 
925   /* ??? This should not be here.  Temporarily continue to allow word_mode
926      subregs of anything.  The most common offender is (subreg:SI (reg:DF)).
927      Generally, backends are doing something sketchy but it'll take time to
928      fix them all.  */
929   if (omode == word_mode)
930     ;
931   /* ??? Similarly, e.g. with (subreg:DF (reg:TI)).  Though store_bit_field
932      is the culprit here, and not the backends.  */
933   else if (known_ge (osize, regsize) && known_ge (isize, osize))
934     ;
935   /* Allow component subregs of complex and vector.  Though given the below
936      extraction rules, it's not always clear what that means.  */
937   else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
938 	   && GET_MODE_INNER (imode) == omode)
939     ;
940   /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
941      i.e. (subreg:V4SF (reg:SF) 0) or (subreg:V4SF (reg:V2SF) 0).  This
942      surely isn't the cleanest way to represent this.  It's questionable
943      if this ought to be represented at all -- why can't this all be hidden
944      in post-reload splitters that make arbitrarily mode changes to the
945      registers themselves.  */
946   else if (VECTOR_MODE_P (omode)
947 	   && GET_MODE_INNER (omode) == GET_MODE_INNER (imode))
948     ;
949   /* Subregs involving floating point modes are not allowed to
950      change size.  Therefore (subreg:DI (reg:DF) 0) is fine, but
951      (subreg:SI (reg:DF) 0) isn't.  */
952   else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
953     {
954       if (! (known_eq (isize, osize)
955 	     /* LRA can use subreg to store a floating point value in
956 		an integer mode.  Although the floating point and the
957 		integer modes need the same number of hard registers,
958 		the size of floating point mode can be less than the
959 		integer mode.  LRA also uses subregs for a register
960 		should be used in different mode in on insn.  */
961 	     || lra_in_progress))
962 	return false;
963     }
964 
965   /* Paradoxical subregs must have offset zero.  */
966   if (maybe_gt (osize, isize))
967     return known_eq (offset, 0U);
968 
969   /* This is a normal subreg.  Verify that the offset is representable.  */
970 
971   /* For hard registers, we already have most of these rules collected in
972      subreg_offset_representable_p.  */
973   if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
974     {
975       unsigned int regno = REGNO (reg);
976 
977       if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
978 	  && GET_MODE_INNER (imode) == omode)
979 	;
980       else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
981 	return false;
982 
983       return subreg_offset_representable_p (regno, imode, offset, omode);
984     }
985 
986   /* The outer size must be ordered wrt the register size, otherwise
987      we wouldn't know at compile time how many registers the outer
988      mode occupies.  */
989   if (!ordered_p (osize, regsize))
990     return false;
991 
992   /* For pseudo registers, we want most of the same checks.  Namely:
993 
994      Assume that the pseudo register will be allocated to hard registers
995      that can hold REGSIZE bytes each.  If OSIZE is not a multiple of REGSIZE,
996      the remainder must correspond to the lowpart of the containing hard
997      register.  If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
998      otherwise it is at the lowest offset.
999 
1000      Given that we've already checked the mode and offset alignment,
1001      we only have to check subblock subregs here.  */
1002   if (maybe_lt (osize, regsize)
1003       && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
1004     {
1005       /* It is invalid for the target to pick a register size for a mode
1006 	 that isn't ordered wrt to the size of that mode.  */
1007       poly_uint64 block_size = ordered_min (isize, regsize);
1008       unsigned int start_reg;
1009       poly_uint64 offset_within_reg;
1010       if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
1011 	  || (BYTES_BIG_ENDIAN
1012 	      ? maybe_ne (offset_within_reg, block_size - osize)
1013 	      : maybe_ne (offset_within_reg, 0U)))
1014 	return false;
1015     }
1016   return true;
1017 }
1018 
1019 rtx
gen_rtx_SUBREG(machine_mode mode,rtx reg,poly_uint64 offset)1020 gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
1021 {
1022   gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
1023   return gen_rtx_raw_SUBREG (mode, reg, offset);
1024 }
1025 
1026 /* Generate a SUBREG representing the least-significant part of REG if MODE
1027    is smaller than mode of REG, otherwise paradoxical SUBREG.  */
1028 
1029 rtx
gen_lowpart_SUBREG(machine_mode mode,rtx reg)1030 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
1031 {
1032   machine_mode inmode;
1033 
1034   inmode = GET_MODE (reg);
1035   if (inmode == VOIDmode)
1036     inmode = mode;
1037   return gen_rtx_SUBREG (mode, reg,
1038 			 subreg_lowpart_offset (mode, inmode));
1039 }
1040 
1041 rtx
gen_rtx_VAR_LOCATION(machine_mode mode,tree decl,rtx loc,enum var_init_status status)1042 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
1043 		      enum var_init_status status)
1044 {
1045   rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
1046   PAT_VAR_LOCATION_STATUS (x) = status;
1047   return x;
1048 }
1049 
1050 
1051 /* Create an rtvec and stores within it the RTXen passed in the arguments.  */
1052 
1053 rtvec
gen_rtvec(int n,...)1054 gen_rtvec (int n, ...)
1055 {
1056   int i;
1057   rtvec rt_val;
1058   va_list p;
1059 
1060   va_start (p, n);
1061 
1062   /* Don't allocate an empty rtvec...  */
1063   if (n == 0)
1064     {
1065       va_end (p);
1066       return NULL_RTVEC;
1067     }
1068 
1069   rt_val = rtvec_alloc (n);
1070 
1071   for (i = 0; i < n; i++)
1072     rt_val->elem[i] = va_arg (p, rtx);
1073 
1074   va_end (p);
1075   return rt_val;
1076 }
1077 
1078 rtvec
gen_rtvec_v(int n,rtx * argp)1079 gen_rtvec_v (int n, rtx *argp)
1080 {
1081   int i;
1082   rtvec rt_val;
1083 
1084   /* Don't allocate an empty rtvec...  */
1085   if (n == 0)
1086     return NULL_RTVEC;
1087 
1088   rt_val = rtvec_alloc (n);
1089 
1090   for (i = 0; i < n; i++)
1091     rt_val->elem[i] = *argp++;
1092 
1093   return rt_val;
1094 }
1095 
1096 rtvec
gen_rtvec_v(int n,rtx_insn ** argp)1097 gen_rtvec_v (int n, rtx_insn **argp)
1098 {
1099   int i;
1100   rtvec rt_val;
1101 
1102   /* Don't allocate an empty rtvec...  */
1103   if (n == 0)
1104     return NULL_RTVEC;
1105 
1106   rt_val = rtvec_alloc (n);
1107 
1108   for (i = 0; i < n; i++)
1109     rt_val->elem[i] = *argp++;
1110 
1111   return rt_val;
1112 }
1113 
1114 
1115 /* Return the number of bytes between the start of an OUTER_MODE
1116    in-memory value and the start of an INNER_MODE in-memory value,
1117    given that the former is a lowpart of the latter.  It may be a
1118    paradoxical lowpart, in which case the offset will be negative
1119    on big-endian targets.  */
1120 
1121 poly_int64
byte_lowpart_offset(machine_mode outer_mode,machine_mode inner_mode)1122 byte_lowpart_offset (machine_mode outer_mode,
1123 		     machine_mode inner_mode)
1124 {
1125   if (paradoxical_subreg_p (outer_mode, inner_mode))
1126     return -subreg_lowpart_offset (inner_mode, outer_mode);
1127   else
1128     return subreg_lowpart_offset (outer_mode, inner_mode);
1129 }
1130 
1131 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1132    from address X.  For paradoxical big-endian subregs this is a
1133    negative value, otherwise it's the same as OFFSET.  */
1134 
1135 poly_int64
subreg_memory_offset(machine_mode outer_mode,machine_mode inner_mode,poly_uint64 offset)1136 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1137 		      poly_uint64 offset)
1138 {
1139   if (paradoxical_subreg_p (outer_mode, inner_mode))
1140     {
1141       gcc_assert (known_eq (offset, 0U));
1142       return -subreg_lowpart_offset (inner_mode, outer_mode);
1143     }
1144   return offset;
1145 }
1146 
1147 /* As above, but return the offset that existing subreg X would have
1148    if SUBREG_REG (X) were stored in memory.  The only significant thing
1149    about the current SUBREG_REG is its mode.  */
1150 
1151 poly_int64
subreg_memory_offset(const_rtx x)1152 subreg_memory_offset (const_rtx x)
1153 {
1154   return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1155 			       SUBREG_BYTE (x));
1156 }
1157 
1158 /* Generate a REG rtx for a new pseudo register of mode MODE.
1159    This pseudo is assigned the next sequential register number.  */
1160 
1161 rtx
gen_reg_rtx(machine_mode mode)1162 gen_reg_rtx (machine_mode mode)
1163 {
1164   rtx val;
1165   unsigned int align = GET_MODE_ALIGNMENT (mode);
1166 
1167   gcc_assert (can_create_pseudo_p ());
1168 
1169   /* If a virtual register with bigger mode alignment is generated,
1170      increase stack alignment estimation because it might be spilled
1171      to stack later.  */
1172   if (SUPPORTS_STACK_ALIGNMENT
1173       && crtl->stack_alignment_estimated < align
1174       && !crtl->stack_realign_processed)
1175     {
1176       unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1177       if (crtl->stack_alignment_estimated < min_align)
1178 	crtl->stack_alignment_estimated = min_align;
1179     }
1180 
1181   if (generating_concat_p
1182       && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1183 	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1184     {
1185       /* For complex modes, don't make a single pseudo.
1186 	 Instead, make a CONCAT of two pseudos.
1187 	 This allows noncontiguous allocation of the real and imaginary parts,
1188 	 which makes much better code.  Besides, allocating DCmode
1189 	 pseudos overstrains reload on some machines like the 386.  */
1190       rtx realpart, imagpart;
1191       machine_mode partmode = GET_MODE_INNER (mode);
1192 
1193       realpart = gen_reg_rtx (partmode);
1194       imagpart = gen_reg_rtx (partmode);
1195       return gen_rtx_CONCAT (mode, realpart, imagpart);
1196     }
1197 
1198   /* Do not call gen_reg_rtx with uninitialized crtl.  */
1199   gcc_assert (crtl->emit.regno_pointer_align_length);
1200 
1201   crtl->emit.ensure_regno_capacity ();
1202   gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
1203 
1204   val = gen_raw_REG (mode, reg_rtx_no);
1205   regno_reg_rtx[reg_rtx_no++] = val;
1206   return val;
1207 }
1208 
1209 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1210    enough to have elements in the range 0 <= idx <= reg_rtx_no.  */
1211 
1212 void
ensure_regno_capacity()1213 emit_status::ensure_regno_capacity ()
1214 {
1215   int old_size = regno_pointer_align_length;
1216 
1217   if (reg_rtx_no < old_size)
1218     return;
1219 
1220   int new_size = old_size * 2;
1221   while (reg_rtx_no >= new_size)
1222     new_size *= 2;
1223 
1224   char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1225   memset (tmp + old_size, 0, new_size - old_size);
1226   regno_pointer_align = (unsigned char *) tmp;
1227 
1228   rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1229   memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1230   regno_reg_rtx = new1;
1231 
1232   crtl->emit.regno_pointer_align_length = new_size;
1233 }
1234 
1235 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise.  */
1236 
1237 bool
reg_is_parm_p(rtx reg)1238 reg_is_parm_p (rtx reg)
1239 {
1240   tree decl;
1241 
1242   gcc_assert (REG_P (reg));
1243   decl = REG_EXPR (reg);
1244   return (decl && TREE_CODE (decl) == PARM_DECL);
1245 }
1246 
1247 /* Update NEW with the same attributes as REG, but with OFFSET added
1248    to the REG_OFFSET.  */
1249 
1250 static void
update_reg_offset(rtx new_rtx,rtx reg,poly_int64 offset)1251 update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
1252 {
1253   REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1254 				       REG_OFFSET (reg) + offset);
1255 }
1256 
1257 /* Generate a register with same attributes as REG, but with OFFSET
1258    added to the REG_OFFSET.  */
1259 
1260 rtx
gen_rtx_REG_offset(rtx reg,machine_mode mode,unsigned int regno,poly_int64 offset)1261 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1262 		    poly_int64 offset)
1263 {
1264   rtx new_rtx = gen_rtx_REG (mode, regno);
1265 
1266   update_reg_offset (new_rtx, reg, offset);
1267   return new_rtx;
1268 }
1269 
1270 /* Generate a new pseudo-register with the same attributes as REG, but
1271    with OFFSET added to the REG_OFFSET.  */
1272 
1273 rtx
gen_reg_rtx_offset(rtx reg,machine_mode mode,int offset)1274 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1275 {
1276   rtx new_rtx = gen_reg_rtx (mode);
1277 
1278   update_reg_offset (new_rtx, reg, offset);
1279   return new_rtx;
1280 }
1281 
1282 /* Adjust REG in-place so that it has mode MODE.  It is assumed that the
1283    new register is a (possibly paradoxical) lowpart of the old one.  */
1284 
1285 void
adjust_reg_mode(rtx reg,machine_mode mode)1286 adjust_reg_mode (rtx reg, machine_mode mode)
1287 {
1288   update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1289   PUT_MODE (reg, mode);
1290 }
1291 
1292 /* Copy REG's attributes from X, if X has any attributes.  If REG and X
1293    have different modes, REG is a (possibly paradoxical) lowpart of X.  */
1294 
1295 void
set_reg_attrs_from_value(rtx reg,rtx x)1296 set_reg_attrs_from_value (rtx reg, rtx x)
1297 {
1298   poly_int64 offset;
1299   bool can_be_reg_pointer = true;
1300 
1301   /* Don't call mark_reg_pointer for incompatible pointer sign
1302      extension.  */
1303   while (GET_CODE (x) == SIGN_EXTEND
1304 	 || GET_CODE (x) == ZERO_EXTEND
1305 	 || GET_CODE (x) == TRUNCATE
1306 	 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1307     {
1308 #if defined(POINTERS_EXTEND_UNSIGNED)
1309       if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1310 	   || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1311 	   || (paradoxical_subreg_p (x)
1312 	       && ! (SUBREG_PROMOTED_VAR_P (x)
1313 		     && SUBREG_CHECK_PROMOTED_SIGN (x,
1314 						    POINTERS_EXTEND_UNSIGNED))))
1315 	  && !targetm.have_ptr_extend ())
1316 	can_be_reg_pointer = false;
1317 #endif
1318       x = XEXP (x, 0);
1319     }
1320 
1321   /* Hard registers can be reused for multiple purposes within the same
1322      function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1323      on them is wrong.  */
1324   if (HARD_REGISTER_P (reg))
1325     return;
1326 
1327   offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1328   if (MEM_P (x))
1329     {
1330       if (MEM_OFFSET_KNOWN_P (x))
1331 	REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1332 					 MEM_OFFSET (x) + offset);
1333       if (can_be_reg_pointer && MEM_POINTER (x))
1334 	mark_reg_pointer (reg, 0);
1335     }
1336   else if (REG_P (x))
1337     {
1338       if (REG_ATTRS (x))
1339 	update_reg_offset (reg, x, offset);
1340       if (can_be_reg_pointer && REG_POINTER (x))
1341 	mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1342     }
1343 }
1344 
1345 /* Generate a REG rtx for a new pseudo register, copying the mode
1346    and attributes from X.  */
1347 
1348 rtx
gen_reg_rtx_and_attrs(rtx x)1349 gen_reg_rtx_and_attrs (rtx x)
1350 {
1351   rtx reg = gen_reg_rtx (GET_MODE (x));
1352   set_reg_attrs_from_value (reg, x);
1353   return reg;
1354 }
1355 
1356 /* Set the register attributes for registers contained in PARM_RTX.
1357    Use needed values from memory attributes of MEM.  */
1358 
1359 void
set_reg_attrs_for_parm(rtx parm_rtx,rtx mem)1360 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1361 {
1362   if (REG_P (parm_rtx))
1363     set_reg_attrs_from_value (parm_rtx, mem);
1364   else if (GET_CODE (parm_rtx) == PARALLEL)
1365     {
1366       /* Check for a NULL entry in the first slot, used to indicate that the
1367 	 parameter goes both on the stack and in registers.  */
1368       int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1369       for (; i < XVECLEN (parm_rtx, 0); i++)
1370 	{
1371 	  rtx x = XVECEXP (parm_rtx, 0, i);
1372 	  if (REG_P (XEXP (x, 0)))
1373 	    REG_ATTRS (XEXP (x, 0))
1374 	      = get_reg_attrs (MEM_EXPR (mem),
1375 			       INTVAL (XEXP (x, 1)));
1376 	}
1377     }
1378 }
1379 
1380 /* Set the REG_ATTRS for registers in value X, given that X represents
1381    decl T.  */
1382 
1383 void
set_reg_attrs_for_decl_rtl(tree t,rtx x)1384 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1385 {
1386   if (!t)
1387     return;
1388   tree tdecl = t;
1389   if (GET_CODE (x) == SUBREG)
1390     {
1391       gcc_assert (subreg_lowpart_p (x));
1392       x = SUBREG_REG (x);
1393     }
1394   if (REG_P (x))
1395     REG_ATTRS (x)
1396       = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1397 					       DECL_P (tdecl)
1398 					       ? DECL_MODE (tdecl)
1399 					       : TYPE_MODE (TREE_TYPE (tdecl))));
1400   if (GET_CODE (x) == CONCAT)
1401     {
1402       if (REG_P (XEXP (x, 0)))
1403         REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1404       if (REG_P (XEXP (x, 1)))
1405 	REG_ATTRS (XEXP (x, 1))
1406 	  = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1407     }
1408   if (GET_CODE (x) == PARALLEL)
1409     {
1410       int i, start;
1411 
1412       /* Check for a NULL entry, used to indicate that the parameter goes
1413 	 both on the stack and in registers.  */
1414       if (XEXP (XVECEXP (x, 0, 0), 0))
1415 	start = 0;
1416       else
1417 	start = 1;
1418 
1419       for (i = start; i < XVECLEN (x, 0); i++)
1420 	{
1421 	  rtx y = XVECEXP (x, 0, i);
1422 	  if (REG_P (XEXP (y, 0)))
1423 	    REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1424 	}
1425     }
1426 }
1427 
1428 /* Assign the RTX X to declaration T.  */
1429 
1430 void
set_decl_rtl(tree t,rtx x)1431 set_decl_rtl (tree t, rtx x)
1432 {
1433   DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1434   if (x)
1435     set_reg_attrs_for_decl_rtl (t, x);
1436 }
1437 
1438 /* Assign the RTX X to parameter declaration T.  BY_REFERENCE_P is true
1439    if the ABI requires the parameter to be passed by reference.  */
1440 
1441 void
set_decl_incoming_rtl(tree t,rtx x,bool by_reference_p)1442 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1443 {
1444   DECL_INCOMING_RTL (t) = x;
1445   if (x && !by_reference_p)
1446     set_reg_attrs_for_decl_rtl (t, x);
1447 }
1448 
1449 /* Identify REG (which may be a CONCAT) as a user register.  */
1450 
1451 void
mark_user_reg(rtx reg)1452 mark_user_reg (rtx reg)
1453 {
1454   if (GET_CODE (reg) == CONCAT)
1455     {
1456       REG_USERVAR_P (XEXP (reg, 0)) = 1;
1457       REG_USERVAR_P (XEXP (reg, 1)) = 1;
1458     }
1459   else
1460     {
1461       gcc_assert (REG_P (reg));
1462       REG_USERVAR_P (reg) = 1;
1463     }
1464 }
1465 
1466 /* Identify REG as a probable pointer register and show its alignment
1467    as ALIGN, if nonzero.  */
1468 
1469 void
mark_reg_pointer(rtx reg,int align)1470 mark_reg_pointer (rtx reg, int align)
1471 {
1472   if (! REG_POINTER (reg))
1473     {
1474       REG_POINTER (reg) = 1;
1475 
1476       if (align)
1477 	REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1478     }
1479   else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1480     /* We can no-longer be sure just how aligned this pointer is.  */
1481     REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1482 }
1483 
1484 /* Return 1 plus largest pseudo reg number used in the current function.  */
1485 
1486 int
max_reg_num(void)1487 max_reg_num (void)
1488 {
1489   return reg_rtx_no;
1490 }
1491 
1492 /* Return 1 + the largest label number used so far in the current function.  */
1493 
1494 int
max_label_num(void)1495 max_label_num (void)
1496 {
1497   return label_num;
1498 }
1499 
1500 /* Return first label number used in this function (if any were used).  */
1501 
1502 int
get_first_label_num(void)1503 get_first_label_num (void)
1504 {
1505   return first_label_num;
1506 }
1507 
1508 /* If the rtx for label was created during the expansion of a nested
1509    function, then first_label_num won't include this label number.
1510    Fix this now so that array indices work later.  */
1511 
1512 void
maybe_set_first_label_num(rtx_code_label * x)1513 maybe_set_first_label_num (rtx_code_label *x)
1514 {
1515   if (CODE_LABEL_NUMBER (x) < first_label_num)
1516     first_label_num = CODE_LABEL_NUMBER (x);
1517 }
1518 
1519 /* For use by the RTL function loader, when mingling with normal
1520    functions.
1521    Ensure that label_num is greater than the label num of X, to avoid
1522    duplicate labels in the generated assembler.  */
1523 
1524 void
maybe_set_max_label_num(rtx_code_label * x)1525 maybe_set_max_label_num (rtx_code_label *x)
1526 {
1527   if (CODE_LABEL_NUMBER (x) >= label_num)
1528     label_num = CODE_LABEL_NUMBER (x) + 1;
1529 }
1530 
1531 
1532 /* Return a value representing some low-order bits of X, where the number
1533    of low-order bits is given by MODE.  Note that no conversion is done
1534    between floating-point and fixed-point values, rather, the bit
1535    representation is returned.
1536 
1537    This function handles the cases in common between gen_lowpart, below,
1538    and two variants in cse.c and combine.c.  These are the cases that can
1539    be safely handled at all points in the compilation.
1540 
1541    If this is not a case we can handle, return 0.  */
1542 
1543 rtx
gen_lowpart_common(machine_mode mode,rtx x)1544 gen_lowpart_common (machine_mode mode, rtx x)
1545 {
1546   poly_uint64 msize = GET_MODE_SIZE (mode);
1547   machine_mode innermode;
1548 
1549   /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1550      so we have to make one up.  Yuk.  */
1551   innermode = GET_MODE (x);
1552   if (CONST_INT_P (x)
1553       && known_le (msize * BITS_PER_UNIT,
1554 		   (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
1555     innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1556   else if (innermode == VOIDmode)
1557     innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
1558 
1559   gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1560 
1561   if (innermode == mode)
1562     return x;
1563 
1564   /* The size of the outer and inner modes must be ordered.  */
1565   poly_uint64 xsize = GET_MODE_SIZE (innermode);
1566   if (!ordered_p (msize, xsize))
1567     return 0;
1568 
1569   if (SCALAR_FLOAT_MODE_P (mode))
1570     {
1571       /* Don't allow paradoxical FLOAT_MODE subregs.  */
1572       if (maybe_gt (msize, xsize))
1573 	return 0;
1574     }
1575   else
1576     {
1577       /* MODE must occupy no more of the underlying registers than X.  */
1578       poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
1579       unsigned int mregs, xregs;
1580       if (!can_div_away_from_zero_p (msize, regsize, &mregs)
1581 	  || !can_div_away_from_zero_p (xsize, regsize, &xregs)
1582 	  || mregs > xregs)
1583 	return 0;
1584     }
1585 
1586   scalar_int_mode int_mode, int_innermode, from_mode;
1587   if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1588       && is_a <scalar_int_mode> (mode, &int_mode)
1589       && is_a <scalar_int_mode> (innermode, &int_innermode)
1590       && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
1591     {
1592       /* If we are getting the low-order part of something that has been
1593 	 sign- or zero-extended, we can either just use the object being
1594 	 extended or make a narrower extension.  If we want an even smaller
1595 	 piece than the size of the object being extended, call ourselves
1596 	 recursively.
1597 
1598 	 This case is used mostly by combine and cse.  */
1599 
1600       if (from_mode == int_mode)
1601 	return XEXP (x, 0);
1602       else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1603 	return gen_lowpart_common (int_mode, XEXP (x, 0));
1604       else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1605 	return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
1606     }
1607   else if (GET_CODE (x) == SUBREG || REG_P (x)
1608 	   || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1609 	   || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1610 	   || CONST_POLY_INT_P (x))
1611     return lowpart_subreg (mode, x, innermode);
1612 
1613   /* Otherwise, we can't do this.  */
1614   return 0;
1615 }
1616 
1617 rtx
gen_highpart(machine_mode mode,rtx x)1618 gen_highpart (machine_mode mode, rtx x)
1619 {
1620   poly_uint64 msize = GET_MODE_SIZE (mode);
1621   rtx result;
1622 
1623   /* This case loses if X is a subreg.  To catch bugs early,
1624      complain if an invalid MODE is used even in other cases.  */
1625   gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
1626 	      || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
1627 
1628   /* gen_lowpart_common handles a lot of special cases due to needing to handle
1629      paradoxical subregs; it only calls simplify_gen_subreg when certain that
1630      it will produce something meaningful.  The only case we need to handle
1631      specially here is MEM.  */
1632   if (MEM_P (x))
1633     {
1634       poly_int64 offset = subreg_highpart_offset (mode, GET_MODE (x));
1635       return adjust_address (x, mode, offset);
1636     }
1637 
1638   result = simplify_gen_subreg (mode, x, GET_MODE (x),
1639 				subreg_highpart_offset (mode, GET_MODE (x)));
1640   /* Since we handle MEM directly above, we should never get a MEM back
1641      from simplify_gen_subreg.  */
1642   gcc_assert (result && !MEM_P (result));
1643 
1644   return result;
1645 }
1646 
1647 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1648    be VOIDmode constant.  */
1649 rtx
gen_highpart_mode(machine_mode outermode,machine_mode innermode,rtx exp)1650 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1651 {
1652   if (GET_MODE (exp) != VOIDmode)
1653     {
1654       gcc_assert (GET_MODE (exp) == innermode);
1655       return gen_highpart (outermode, exp);
1656     }
1657   return simplify_gen_subreg (outermode, exp, innermode,
1658 			      subreg_highpart_offset (outermode, innermode));
1659 }
1660 
1661 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1662    OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes.  */
1663 
1664 poly_uint64
subreg_size_lowpart_offset(poly_uint64 outer_bytes,poly_uint64 inner_bytes)1665 subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1666 {
1667   gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1668   if (maybe_gt (outer_bytes, inner_bytes))
1669     /* Paradoxical subregs always have a SUBREG_BYTE of 0.  */
1670     return 0;
1671 
1672   if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1673     return inner_bytes - outer_bytes;
1674   else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1675     return 0;
1676   else
1677     return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1678 }
1679 
1680 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1681    OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes.  */
1682 
1683 poly_uint64
subreg_size_highpart_offset(poly_uint64 outer_bytes,poly_uint64 inner_bytes)1684 subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1685 {
1686   gcc_assert (known_ge (inner_bytes, outer_bytes));
1687 
1688   if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1689     return 0;
1690   else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1691     return inner_bytes - outer_bytes;
1692   else
1693     return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1694 					(inner_bytes - outer_bytes)
1695 					* BITS_PER_UNIT);
1696 }
1697 
1698 /* Return 1 iff X, assumed to be a SUBREG,
1699    refers to the least significant part of its containing reg.
1700    If X is not a SUBREG, always return 1 (it is its own low part!).  */
1701 
1702 int
subreg_lowpart_p(const_rtx x)1703 subreg_lowpart_p (const_rtx x)
1704 {
1705   if (GET_CODE (x) != SUBREG)
1706     return 1;
1707   else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1708     return 0;
1709 
1710   return known_eq (subreg_lowpart_offset (GET_MODE (x),
1711 					  GET_MODE (SUBREG_REG (x))),
1712 		   SUBREG_BYTE (x));
1713 }
1714 
1715 /* Return subword OFFSET of operand OP.
1716    The word number, OFFSET, is interpreted as the word number starting
1717    at the low-order address.  OFFSET 0 is the low-order word if not
1718    WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1719 
1720    If we cannot extract the required word, we return zero.  Otherwise,
1721    an rtx corresponding to the requested word will be returned.
1722 
1723    VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
1724    reload has completed, a valid address will always be returned.  After
1725    reload, if a valid address cannot be returned, we return zero.
1726 
1727    If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1728    it is the responsibility of the caller.
1729 
1730    MODE is the mode of OP in case it is a CONST_INT.
1731 
1732    ??? This is still rather broken for some cases.  The problem for the
1733    moment is that all callers of this thing provide no 'goal mode' to
1734    tell us to work with.  This exists because all callers were written
1735    in a word based SUBREG world.
1736    Now use of this function can be deprecated by simplify_subreg in most
1737    cases.
1738  */
1739 
1740 rtx
operand_subword(rtx op,poly_uint64 offset,int validate_address,machine_mode mode)1741 operand_subword (rtx op, poly_uint64 offset, int validate_address,
1742 		 machine_mode mode)
1743 {
1744   if (mode == VOIDmode)
1745     mode = GET_MODE (op);
1746 
1747   gcc_assert (mode != VOIDmode);
1748 
1749   /* If OP is narrower than a word, fail.  */
1750   if (mode != BLKmode
1751       && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
1752     return 0;
1753 
1754   /* If we want a word outside OP, return zero.  */
1755   if (mode != BLKmode
1756       && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
1757     return const0_rtx;
1758 
1759   /* Form a new MEM at the requested address.  */
1760   if (MEM_P (op))
1761     {
1762       rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1763 
1764       if (! validate_address)
1765 	return new_rtx;
1766 
1767       else if (reload_completed)
1768 	{
1769 	  if (! strict_memory_address_addr_space_p (word_mode,
1770 						    XEXP (new_rtx, 0),
1771 						    MEM_ADDR_SPACE (op)))
1772 	    return 0;
1773 	}
1774       else
1775 	return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1776     }
1777 
1778   /* Rest can be handled by simplify_subreg.  */
1779   return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1780 }
1781 
1782 /* Similar to `operand_subword', but never return 0.  If we can't
1783    extract the required subword, put OP into a register and try again.
1784    The second attempt must succeed.  We always validate the address in
1785    this case.
1786 
1787    MODE is the mode of OP, in case it is CONST_INT.  */
1788 
1789 rtx
operand_subword_force(rtx op,poly_uint64 offset,machine_mode mode)1790 operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
1791 {
1792   rtx result = operand_subword (op, offset, 1, mode);
1793 
1794   if (result)
1795     return result;
1796 
1797   if (mode != BLKmode && mode != VOIDmode)
1798     {
1799       /* If this is a register which cannot be accessed by words, copy it
1800 	 to a pseudo register.  */
1801       if (REG_P (op))
1802 	op = copy_to_reg (op);
1803       else
1804 	op = force_reg (mode, op);
1805     }
1806 
1807   result = operand_subword (op, offset, 1, mode);
1808   gcc_assert (result);
1809 
1810   return result;
1811 }
1812 
mem_attrs()1813 mem_attrs::mem_attrs ()
1814   : expr (NULL_TREE),
1815     offset (0),
1816     size (0),
1817     alias (0),
1818     align (0),
1819     addrspace (ADDR_SPACE_GENERIC),
1820     offset_known_p (false),
1821     size_known_p (false)
1822 {}
1823 
1824 /* Returns 1 if both MEM_EXPR can be considered equal
1825    and 0 otherwise.  */
1826 
1827 int
mem_expr_equal_p(const_tree expr1,const_tree expr2)1828 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1829 {
1830   if (expr1 == expr2)
1831     return 1;
1832 
1833   if (! expr1 || ! expr2)
1834     return 0;
1835 
1836   if (TREE_CODE (expr1) != TREE_CODE (expr2))
1837     return 0;
1838 
1839   return operand_equal_p (expr1, expr2, 0);
1840 }
1841 
1842 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1843    bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1844    -1 if not known.  */
1845 
1846 int
get_mem_align_offset(rtx mem,unsigned int align)1847 get_mem_align_offset (rtx mem, unsigned int align)
1848 {
1849   tree expr;
1850   poly_uint64 offset;
1851 
1852   /* This function can't use
1853      if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1854 	 || (MAX (MEM_ALIGN (mem),
1855 	          MAX (align, get_object_alignment (MEM_EXPR (mem))))
1856 	     < align))
1857        return -1;
1858      else
1859        return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1860      for two reasons:
1861      - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1862        for <variable>.  get_inner_reference doesn't handle it and
1863        even if it did, the alignment in that case needs to be determined
1864        from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1865      - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1866        isn't sufficiently aligned, the object it is in might be.  */
1867   gcc_assert (MEM_P (mem));
1868   expr = MEM_EXPR (mem);
1869   if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1870     return -1;
1871 
1872   offset = MEM_OFFSET (mem);
1873   if (DECL_P (expr))
1874     {
1875       if (DECL_ALIGN (expr) < align)
1876 	return -1;
1877     }
1878   else if (INDIRECT_REF_P (expr))
1879     {
1880       if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1881 	return -1;
1882     }
1883   else if (TREE_CODE (expr) == COMPONENT_REF)
1884     {
1885       while (1)
1886 	{
1887 	  tree inner = TREE_OPERAND (expr, 0);
1888 	  tree field = TREE_OPERAND (expr, 1);
1889 	  tree byte_offset = component_ref_field_offset (expr);
1890 	  tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1891 
1892 	  poly_uint64 suboffset;
1893 	  if (!byte_offset
1894 	      || !poly_int_tree_p (byte_offset, &suboffset)
1895 	      || !tree_fits_uhwi_p (bit_offset))
1896 	    return -1;
1897 
1898 	  offset += suboffset;
1899 	  offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1900 
1901 	  if (inner == NULL_TREE)
1902 	    {
1903 	      if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1904 		  < (unsigned int) align)
1905 		return -1;
1906 	      break;
1907 	    }
1908 	  else if (DECL_P (inner))
1909 	    {
1910 	      if (DECL_ALIGN (inner) < align)
1911 		return -1;
1912 	      break;
1913 	    }
1914 	  else if (TREE_CODE (inner) != COMPONENT_REF)
1915 	    return -1;
1916 	  expr = inner;
1917 	}
1918     }
1919   else
1920     return -1;
1921 
1922   HOST_WIDE_INT misalign;
1923   if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
1924     return -1;
1925   return misalign;
1926 }
1927 
1928 /* Given REF (a MEM) and T, either the type of X or the expression
1929    corresponding to REF, set the memory attributes.  OBJECTP is nonzero
1930    if we are making a new object of this type.  BITPOS is nonzero if
1931    there is an offset outstanding on T that will be applied later.  */
1932 
1933 void
set_mem_attributes_minus_bitpos(rtx ref,tree t,int objectp,poly_int64 bitpos)1934 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1935 				 poly_int64 bitpos)
1936 {
1937   poly_int64 apply_bitpos = 0;
1938   tree type;
1939   class mem_attrs attrs, *defattrs, *refattrs;
1940   addr_space_t as;
1941 
1942   /* It can happen that type_for_mode was given a mode for which there
1943      is no language-level type.  In which case it returns NULL, which
1944      we can see here.  */
1945   if (t == NULL_TREE)
1946     return;
1947 
1948   type = TYPE_P (t) ? t : TREE_TYPE (t);
1949   if (type == error_mark_node)
1950     return;
1951 
1952   /* If we have already set DECL_RTL = ref, get_alias_set will get the
1953      wrong answer, as it assumes that DECL_RTL already has the right alias
1954      info.  Callers should not set DECL_RTL until after the call to
1955      set_mem_attributes.  */
1956   gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1957 
1958   /* Get the alias set from the expression or type (perhaps using a
1959      front-end routine) and use it.  */
1960   attrs.alias = get_alias_set (t);
1961 
1962   MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1963   MEM_POINTER (ref) = POINTER_TYPE_P (type);
1964 
1965   /* Default values from pre-existing memory attributes if present.  */
1966   refattrs = MEM_ATTRS (ref);
1967   if (refattrs)
1968     {
1969       /* ??? Can this ever happen?  Calling this routine on a MEM that
1970 	 already carries memory attributes should probably be invalid.  */
1971       attrs.expr = refattrs->expr;
1972       attrs.offset_known_p = refattrs->offset_known_p;
1973       attrs.offset = refattrs->offset;
1974       attrs.size_known_p = refattrs->size_known_p;
1975       attrs.size = refattrs->size;
1976       attrs.align = refattrs->align;
1977     }
1978 
1979   /* Otherwise, default values from the mode of the MEM reference.  */
1980   else
1981     {
1982       defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1983       gcc_assert (!defattrs->expr);
1984       gcc_assert (!defattrs->offset_known_p);
1985 
1986       /* Respect mode size.  */
1987       attrs.size_known_p = defattrs->size_known_p;
1988       attrs.size = defattrs->size;
1989       /* ??? Is this really necessary?  We probably should always get
1990 	 the size from the type below.  */
1991 
1992       /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1993          if T is an object, always compute the object alignment below.  */
1994       if (TYPE_P (t))
1995 	attrs.align = defattrs->align;
1996       else
1997 	attrs.align = BITS_PER_UNIT;
1998       /* ??? If T is a type, respecting mode alignment may *also* be wrong
1999 	 e.g. if the type carries an alignment attribute.  Should we be
2000 	 able to simply always use TYPE_ALIGN?  */
2001     }
2002 
2003   /* We can set the alignment from the type if we are making an object or if
2004      this is an INDIRECT_REF.  */
2005   if (objectp || TREE_CODE (t) == INDIRECT_REF)
2006     attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
2007 
2008   /* If the size is known, we can set that.  */
2009   tree new_size = TYPE_SIZE_UNIT (type);
2010 
2011   /* The address-space is that of the type.  */
2012   as = TYPE_ADDR_SPACE (type);
2013 
2014   /* If T is not a type, we may be able to deduce some more information about
2015      the expression.  */
2016   if (! TYPE_P (t))
2017     {
2018       tree base;
2019 
2020       if (TREE_THIS_VOLATILE (t))
2021 	MEM_VOLATILE_P (ref) = 1;
2022 
2023       /* Now remove any conversions: they don't change what the underlying
2024 	 object is.  Likewise for SAVE_EXPR.  */
2025       while (CONVERT_EXPR_P (t)
2026 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
2027 	     || TREE_CODE (t) == SAVE_EXPR)
2028 	t = TREE_OPERAND (t, 0);
2029 
2030       /* Note whether this expression can trap.  */
2031       MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
2032 
2033       base = get_base_address (t);
2034       if (base)
2035 	{
2036 	  if (DECL_P (base)
2037 	      && TREE_READONLY (base)
2038 	      && (TREE_STATIC (base) || DECL_EXTERNAL (base))
2039 	      && !TREE_THIS_VOLATILE (base))
2040 	    MEM_READONLY_P (ref) = 1;
2041 
2042 	  /* Mark static const strings readonly as well.  */
2043 	  if (TREE_CODE (base) == STRING_CST
2044 	      && TREE_READONLY (base)
2045 	      && TREE_STATIC (base))
2046 	    MEM_READONLY_P (ref) = 1;
2047 
2048 	  /* Address-space information is on the base object.  */
2049 	  if (TREE_CODE (base) == MEM_REF
2050 	      || TREE_CODE (base) == TARGET_MEM_REF)
2051 	    as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
2052 								      0))));
2053 	  else
2054 	    as = TYPE_ADDR_SPACE (TREE_TYPE (base));
2055 	}
2056 
2057       /* If this expression uses it's parent's alias set, mark it such
2058 	 that we won't change it.  */
2059       if (component_uses_parent_alias_set_from (t) != NULL_TREE)
2060 	MEM_KEEP_ALIAS_SET_P (ref) = 1;
2061 
2062       /* If this is a decl, set the attributes of the MEM from it.  */
2063       if (DECL_P (t))
2064 	{
2065 	  attrs.expr = t;
2066 	  attrs.offset_known_p = true;
2067 	  attrs.offset = 0;
2068 	  apply_bitpos = bitpos;
2069 	  new_size = DECL_SIZE_UNIT (t);
2070 	}
2071 
2072       /* ???  If we end up with a constant or a descriptor do not
2073 	 record a MEM_EXPR.  */
2074       else if (CONSTANT_CLASS_P (t)
2075 	       || TREE_CODE (t) == CONSTRUCTOR)
2076 	;
2077 
2078       /* If this is a field reference, record it.  */
2079       else if (TREE_CODE (t) == COMPONENT_REF)
2080 	{
2081 	  attrs.expr = t;
2082 	  attrs.offset_known_p = true;
2083 	  attrs.offset = 0;
2084 	  apply_bitpos = bitpos;
2085 	  if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2086 	    new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
2087 	}
2088 
2089       /* Else record it.  */
2090       else
2091 	{
2092 	  gcc_assert (handled_component_p (t)
2093 		      || TREE_CODE (t) == MEM_REF
2094 		      || TREE_CODE (t) == TARGET_MEM_REF);
2095 	  attrs.expr = t;
2096 	  attrs.offset_known_p = true;
2097 	  attrs.offset = 0;
2098 	  apply_bitpos = bitpos;
2099 	}
2100 
2101       /* If this is a reference based on a partitioned decl replace the
2102 	 base with a MEM_REF of the pointer representative we created
2103 	 during stack slot partitioning.  */
2104       if (attrs.expr
2105 	  && VAR_P (base)
2106 	  && ! is_global_var (base)
2107 	  && cfun->gimple_df->decls_to_pointers != NULL)
2108 	{
2109 	  tree *namep = cfun->gimple_df->decls_to_pointers->get (base);
2110 	  if (namep)
2111 	    {
2112 	      attrs.expr = unshare_expr (attrs.expr);
2113 	      tree *orig_base = &attrs.expr;
2114 	      while (handled_component_p (*orig_base))
2115 		orig_base = &TREE_OPERAND (*orig_base, 0);
2116 	      tree aptrt = reference_alias_ptr_type (*orig_base);
2117 	      *orig_base = build2 (MEM_REF, TREE_TYPE (*orig_base), *namep,
2118 				   build_int_cst (aptrt, 0));
2119 	    }
2120 	}
2121 
2122       /* Compute the alignment.  */
2123       unsigned int obj_align;
2124       unsigned HOST_WIDE_INT obj_bitpos;
2125       get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2126       unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
2127       if (diff_align != 0)
2128 	obj_align = MIN (obj_align, diff_align);
2129       attrs.align = MAX (attrs.align, obj_align);
2130     }
2131 
2132   poly_uint64 const_size;
2133   if (poly_int_tree_p (new_size, &const_size))
2134     {
2135       attrs.size_known_p = true;
2136       attrs.size = const_size;
2137     }
2138 
2139   /* If we modified OFFSET based on T, then subtract the outstanding
2140      bit position offset.  Similarly, increase the size of the accessed
2141      object to contain the negative offset.  */
2142   if (maybe_ne (apply_bitpos, 0))
2143     {
2144       gcc_assert (attrs.offset_known_p);
2145       poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2146       attrs.offset -= bytepos;
2147       if (attrs.size_known_p)
2148 	attrs.size += bytepos;
2149     }
2150 
2151   /* Now set the attributes we computed above.  */
2152   attrs.addrspace = as;
2153   set_mem_attrs (ref, &attrs);
2154 }
2155 
2156 void
set_mem_attributes(rtx ref,tree t,int objectp)2157 set_mem_attributes (rtx ref, tree t, int objectp)
2158 {
2159   set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2160 }
2161 
2162 /* Set the alias set of MEM to SET.  */
2163 
2164 void
set_mem_alias_set(rtx mem,alias_set_type set)2165 set_mem_alias_set (rtx mem, alias_set_type set)
2166 {
2167   /* If the new and old alias sets don't conflict, something is wrong.  */
2168   gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2169   mem_attrs attrs (*get_mem_attrs (mem));
2170   attrs.alias = set;
2171   set_mem_attrs (mem, &attrs);
2172 }
2173 
2174 /* Set the address space of MEM to ADDRSPACE (target-defined).  */
2175 
2176 void
set_mem_addr_space(rtx mem,addr_space_t addrspace)2177 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2178 {
2179   mem_attrs attrs (*get_mem_attrs (mem));
2180   attrs.addrspace = addrspace;
2181   set_mem_attrs (mem, &attrs);
2182 }
2183 
2184 /* Set the alignment of MEM to ALIGN bits.  */
2185 
2186 void
set_mem_align(rtx mem,unsigned int align)2187 set_mem_align (rtx mem, unsigned int align)
2188 {
2189   mem_attrs attrs (*get_mem_attrs (mem));
2190   attrs.align = align;
2191   set_mem_attrs (mem, &attrs);
2192 }
2193 
2194 /* Set the expr for MEM to EXPR.  */
2195 
2196 void
set_mem_expr(rtx mem,tree expr)2197 set_mem_expr (rtx mem, tree expr)
2198 {
2199   mem_attrs attrs (*get_mem_attrs (mem));
2200   attrs.expr = expr;
2201   set_mem_attrs (mem, &attrs);
2202 }
2203 
2204 /* Set the offset of MEM to OFFSET.  */
2205 
2206 void
set_mem_offset(rtx mem,poly_int64 offset)2207 set_mem_offset (rtx mem, poly_int64 offset)
2208 {
2209   mem_attrs attrs (*get_mem_attrs (mem));
2210   attrs.offset_known_p = true;
2211   attrs.offset = offset;
2212   set_mem_attrs (mem, &attrs);
2213 }
2214 
2215 /* Clear the offset of MEM.  */
2216 
2217 void
clear_mem_offset(rtx mem)2218 clear_mem_offset (rtx mem)
2219 {
2220   mem_attrs attrs (*get_mem_attrs (mem));
2221   attrs.offset_known_p = false;
2222   set_mem_attrs (mem, &attrs);
2223 }
2224 
2225 /* Set the size of MEM to SIZE.  */
2226 
2227 void
set_mem_size(rtx mem,poly_int64 size)2228 set_mem_size (rtx mem, poly_int64 size)
2229 {
2230   mem_attrs attrs (*get_mem_attrs (mem));
2231   attrs.size_known_p = true;
2232   attrs.size = size;
2233   set_mem_attrs (mem, &attrs);
2234 }
2235 
2236 /* Clear the size of MEM.  */
2237 
2238 void
clear_mem_size(rtx mem)2239 clear_mem_size (rtx mem)
2240 {
2241   mem_attrs attrs (*get_mem_attrs (mem));
2242   attrs.size_known_p = false;
2243   set_mem_attrs (mem, &attrs);
2244 }
2245 
2246 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2247    and its address changed to ADDR.  (VOIDmode means don't change the mode.
2248    NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
2249    returned memory location is required to be valid.  INPLACE is true if any
2250    changes can be made directly to MEMREF or false if MEMREF must be treated
2251    as immutable.
2252 
2253    The memory attributes are not changed.  */
2254 
2255 static rtx
change_address_1(rtx memref,machine_mode mode,rtx addr,int validate,bool inplace)2256 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2257 		  bool inplace)
2258 {
2259   addr_space_t as;
2260   rtx new_rtx;
2261 
2262   gcc_assert (MEM_P (memref));
2263   as = MEM_ADDR_SPACE (memref);
2264   if (mode == VOIDmode)
2265     mode = GET_MODE (memref);
2266   if (addr == 0)
2267     addr = XEXP (memref, 0);
2268   if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2269       && (!validate || memory_address_addr_space_p (mode, addr, as)))
2270     return memref;
2271 
2272   /* Don't validate address for LRA.  LRA can make the address valid
2273      by itself in most efficient way.  */
2274   if (validate && !lra_in_progress)
2275     {
2276       if (reload_in_progress || reload_completed)
2277 	gcc_assert (memory_address_addr_space_p (mode, addr, as));
2278       else
2279 	addr = memory_address_addr_space (mode, addr, as);
2280     }
2281 
2282   if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2283     return memref;
2284 
2285   if (inplace)
2286     {
2287       XEXP (memref, 0) = addr;
2288       return memref;
2289     }
2290 
2291   new_rtx = gen_rtx_MEM (mode, addr);
2292   MEM_COPY_ATTRIBUTES (new_rtx, memref);
2293   return new_rtx;
2294 }
2295 
2296 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2297    way we are changing MEMREF, so we only preserve the alias set.  */
2298 
2299 rtx
change_address(rtx memref,machine_mode mode,rtx addr)2300 change_address (rtx memref, machine_mode mode, rtx addr)
2301 {
2302   rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2303   machine_mode mmode = GET_MODE (new_rtx);
2304   class mem_attrs *defattrs;
2305 
2306   mem_attrs attrs (*get_mem_attrs (memref));
2307   defattrs = mode_mem_attrs[(int) mmode];
2308   attrs.expr = NULL_TREE;
2309   attrs.offset_known_p = false;
2310   attrs.size_known_p = defattrs->size_known_p;
2311   attrs.size = defattrs->size;
2312   attrs.align = defattrs->align;
2313 
2314   /* If there are no changes, just return the original memory reference.  */
2315   if (new_rtx == memref)
2316     {
2317       if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2318 	return new_rtx;
2319 
2320       new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2321       MEM_COPY_ATTRIBUTES (new_rtx, memref);
2322     }
2323 
2324   set_mem_attrs (new_rtx, &attrs);
2325   return new_rtx;
2326 }
2327 
2328 /* Return a memory reference like MEMREF, but with its mode changed
2329    to MODE and its address offset by OFFSET bytes.  If VALIDATE is
2330    nonzero, the memory address is forced to be valid.
2331    If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2332    and the caller is responsible for adjusting MEMREF base register.
2333    If ADJUST_OBJECT is zero, the underlying object associated with the
2334    memory reference is left unchanged and the caller is responsible for
2335    dealing with it.  Otherwise, if the new memory reference is outside
2336    the underlying object, even partially, then the object is dropped.
2337    SIZE, if nonzero, is the size of an access in cases where MODE
2338    has no inherent size.  */
2339 
2340 rtx
adjust_address_1(rtx memref,machine_mode mode,poly_int64 offset,int validate,int adjust_address,int adjust_object,poly_int64 size)2341 adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
2342 		  int validate, int adjust_address, int adjust_object,
2343 		  poly_int64 size)
2344 {
2345   rtx addr = XEXP (memref, 0);
2346   rtx new_rtx;
2347   scalar_int_mode address_mode;
2348   class mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
2349   unsigned HOST_WIDE_INT max_align;
2350 #ifdef POINTERS_EXTEND_UNSIGNED
2351   scalar_int_mode pointer_mode
2352     = targetm.addr_space.pointer_mode (attrs.addrspace);
2353 #endif
2354 
2355   /* VOIDmode means no mode change for change_address_1.  */
2356   if (mode == VOIDmode)
2357     mode = GET_MODE (memref);
2358 
2359   /* Take the size of non-BLKmode accesses from the mode.  */
2360   defattrs = mode_mem_attrs[(int) mode];
2361   if (defattrs->size_known_p)
2362     size = defattrs->size;
2363 
2364   /* If there are no changes, just return the original memory reference.  */
2365   if (mode == GET_MODE (memref)
2366       && known_eq (offset, 0)
2367       && (known_eq (size, 0)
2368 	  || (attrs.size_known_p && known_eq (attrs.size, size)))
2369       && (!validate || memory_address_addr_space_p (mode, addr,
2370 						    attrs.addrspace)))
2371     return memref;
2372 
2373   /* ??? Prefer to create garbage instead of creating shared rtl.
2374      This may happen even if offset is nonzero -- consider
2375      (plus (plus reg reg) const_int) -- so do this always.  */
2376   addr = copy_rtx (addr);
2377 
2378   /* Convert a possibly large offset to a signed value within the
2379      range of the target address space.  */
2380   address_mode = get_address_mode (memref);
2381   offset = trunc_int_for_mode (offset, address_mode);
2382 
2383   if (adjust_address)
2384     {
2385       /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2386 	 object, we can merge it into the LO_SUM.  */
2387       if (GET_MODE (memref) != BLKmode
2388 	  && GET_CODE (addr) == LO_SUM
2389 	  && known_in_range_p (offset,
2390 			       0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
2391 				   / BITS_PER_UNIT)))
2392 	addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2393 			       plus_constant (address_mode,
2394 					      XEXP (addr, 1), offset));
2395 #ifdef POINTERS_EXTEND_UNSIGNED
2396       /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2397 	 in that mode, we merge it into the ZERO_EXTEND.  We take advantage of
2398 	 the fact that pointers are not allowed to overflow.  */
2399       else if (POINTERS_EXTEND_UNSIGNED > 0
2400 	       && GET_CODE (addr) == ZERO_EXTEND
2401 	       && GET_MODE (XEXP (addr, 0)) == pointer_mode
2402 	       && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
2403 	addr = gen_rtx_ZERO_EXTEND (address_mode,
2404 				    plus_constant (pointer_mode,
2405 						   XEXP (addr, 0), offset));
2406 #endif
2407       else
2408 	addr = plus_constant (address_mode, addr, offset);
2409     }
2410 
2411   new_rtx = change_address_1 (memref, mode, addr, validate, false);
2412 
2413   /* If the address is a REG, change_address_1 rightfully returns memref,
2414      but this would destroy memref's MEM_ATTRS.  */
2415   if (new_rtx == memref && maybe_ne (offset, 0))
2416     new_rtx = copy_rtx (new_rtx);
2417 
2418   /* Conservatively drop the object if we don't know where we start from.  */
2419   if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2420     {
2421       attrs.expr = NULL_TREE;
2422       attrs.alias = 0;
2423     }
2424 
2425   /* Compute the new values of the memory attributes due to this adjustment.
2426      We add the offsets and update the alignment.  */
2427   if (attrs.offset_known_p)
2428     {
2429       attrs.offset += offset;
2430 
2431       /* Drop the object if the new left end is not within its bounds.  */
2432       if (adjust_object && maybe_lt (attrs.offset, 0))
2433 	{
2434 	  attrs.expr = NULL_TREE;
2435 	  attrs.alias = 0;
2436 	}
2437     }
2438 
2439   /* Compute the new alignment by taking the MIN of the alignment and the
2440      lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2441      if zero.  */
2442   if (maybe_ne (offset, 0))
2443     {
2444       max_align = known_alignment (offset) * BITS_PER_UNIT;
2445       attrs.align = MIN (attrs.align, max_align);
2446     }
2447 
2448   if (maybe_ne (size, 0))
2449     {
2450       /* Drop the object if the new right end is not within its bounds.  */
2451       if (adjust_object && maybe_gt (offset + size, attrs.size))
2452 	{
2453 	  attrs.expr = NULL_TREE;
2454 	  attrs.alias = 0;
2455 	}
2456       attrs.size_known_p = true;
2457       attrs.size = size;
2458     }
2459   else if (attrs.size_known_p)
2460     {
2461       gcc_assert (!adjust_object);
2462       attrs.size -= offset;
2463       /* ??? The store_by_pieces machinery generates negative sizes,
2464 	 so don't assert for that here.  */
2465     }
2466 
2467   set_mem_attrs (new_rtx, &attrs);
2468 
2469   return new_rtx;
2470 }
2471 
2472 /* Return a memory reference like MEMREF, but with its mode changed
2473    to MODE and its address changed to ADDR, which is assumed to be
2474    MEMREF offset by OFFSET bytes.  If VALIDATE is
2475    nonzero, the memory address is forced to be valid.  */
2476 
2477 rtx
adjust_automodify_address_1(rtx memref,machine_mode mode,rtx addr,poly_int64 offset,int validate)2478 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2479 			     poly_int64 offset, int validate)
2480 {
2481   memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2482   return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2483 }
2484 
2485 /* Return a memory reference like MEMREF, but whose address is changed by
2486    adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
2487    known to be in OFFSET (possibly 1).  */
2488 
2489 rtx
offset_address(rtx memref,rtx offset,unsigned HOST_WIDE_INT pow2)2490 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2491 {
2492   rtx new_rtx, addr = XEXP (memref, 0);
2493   machine_mode address_mode;
2494   class mem_attrs *defattrs;
2495 
2496   mem_attrs attrs (*get_mem_attrs (memref));
2497   address_mode = get_address_mode (memref);
2498   new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2499 
2500   /* At this point we don't know _why_ the address is invalid.  It
2501      could have secondary memory references, multiplies or anything.
2502 
2503      However, if we did go and rearrange things, we can wind up not
2504      being able to recognize the magic around pic_offset_table_rtx.
2505      This stuff is fragile, and is yet another example of why it is
2506      bad to expose PIC machinery too early.  */
2507   if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2508 				     attrs.addrspace)
2509       && GET_CODE (addr) == PLUS
2510       && XEXP (addr, 0) == pic_offset_table_rtx)
2511     {
2512       addr = force_reg (GET_MODE (addr), addr);
2513       new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2514     }
2515 
2516   update_temp_slot_address (XEXP (memref, 0), new_rtx);
2517   new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2518 
2519   /* If there are no changes, just return the original memory reference.  */
2520   if (new_rtx == memref)
2521     return new_rtx;
2522 
2523   /* Update the alignment to reflect the offset.  Reset the offset, which
2524      we don't know.  */
2525   defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2526   attrs.offset_known_p = false;
2527   attrs.size_known_p = defattrs->size_known_p;
2528   attrs.size = defattrs->size;
2529   attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2530   set_mem_attrs (new_rtx, &attrs);
2531   return new_rtx;
2532 }
2533 
2534 /* Return a memory reference like MEMREF, but with its address changed to
2535    ADDR.  The caller is asserting that the actual piece of memory pointed
2536    to is the same, just the form of the address is being changed, such as
2537    by putting something into a register.  INPLACE is true if any changes
2538    can be made directly to MEMREF or false if MEMREF must be treated as
2539    immutable.  */
2540 
2541 rtx
replace_equiv_address(rtx memref,rtx addr,bool inplace)2542 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2543 {
2544   /* change_address_1 copies the memory attribute structure without change
2545      and that's exactly what we want here.  */
2546   update_temp_slot_address (XEXP (memref, 0), addr);
2547   return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2548 }
2549 
2550 /* Likewise, but the reference is not required to be valid.  */
2551 
2552 rtx
replace_equiv_address_nv(rtx memref,rtx addr,bool inplace)2553 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2554 {
2555   return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2556 }
2557 
2558 /* Return a memory reference like MEMREF, but with its mode widened to
2559    MODE and offset by OFFSET.  This would be used by targets that e.g.
2560    cannot issue QImode memory operations and have to use SImode memory
2561    operations plus masking logic.  */
2562 
2563 rtx
widen_memory_access(rtx memref,machine_mode mode,poly_int64 offset)2564 widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
2565 {
2566   rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2567   poly_uint64 size = GET_MODE_SIZE (mode);
2568 
2569   /* If there are no changes, just return the original memory reference.  */
2570   if (new_rtx == memref)
2571     return new_rtx;
2572 
2573   mem_attrs attrs (*get_mem_attrs (new_rtx));
2574 
2575   /* If we don't know what offset we were at within the expression, then
2576      we can't know if we've overstepped the bounds.  */
2577   if (! attrs.offset_known_p)
2578     attrs.expr = NULL_TREE;
2579 
2580   while (attrs.expr)
2581     {
2582       if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2583 	{
2584 	  tree field = TREE_OPERAND (attrs.expr, 1);
2585 	  tree offset = component_ref_field_offset (attrs.expr);
2586 
2587 	  if (! DECL_SIZE_UNIT (field))
2588 	    {
2589 	      attrs.expr = NULL_TREE;
2590 	      break;
2591 	    }
2592 
2593 	  /* Is the field at least as large as the access?  If so, ok,
2594 	     otherwise strip back to the containing structure.  */
2595 	  if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2596 	      && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2597 	      && known_ge (attrs.offset, 0))
2598 	    break;
2599 
2600 	  poly_uint64 suboffset;
2601 	  if (!poly_int_tree_p (offset, &suboffset))
2602 	    {
2603 	      attrs.expr = NULL_TREE;
2604 	      break;
2605 	    }
2606 
2607 	  attrs.expr = TREE_OPERAND (attrs.expr, 0);
2608 	  attrs.offset += suboffset;
2609 	  attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2610 			   / BITS_PER_UNIT);
2611 	}
2612       /* Similarly for the decl.  */
2613       else if (DECL_P (attrs.expr)
2614 	       && DECL_SIZE_UNIT (attrs.expr)
2615 	       && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2616 	       && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2617 			   size)
2618 	       && known_ge (attrs.offset, 0))
2619 	break;
2620       else
2621 	{
2622 	  /* The widened memory access overflows the expression, which means
2623 	     that it could alias another expression.  Zap it.  */
2624 	  attrs.expr = NULL_TREE;
2625 	  break;
2626 	}
2627     }
2628 
2629   if (! attrs.expr)
2630     attrs.offset_known_p = false;
2631 
2632   /* The widened memory may alias other stuff, so zap the alias set.  */
2633   /* ??? Maybe use get_alias_set on any remaining expression.  */
2634   attrs.alias = 0;
2635   attrs.size_known_p = true;
2636   attrs.size = size;
2637   set_mem_attrs (new_rtx, &attrs);
2638   return new_rtx;
2639 }
2640 
2641 /* A fake decl that is used as the MEM_EXPR of spill slots.  */
2642 static GTY(()) tree spill_slot_decl;
2643 
2644 tree
get_spill_slot_decl(bool force_build_p)2645 get_spill_slot_decl (bool force_build_p)
2646 {
2647   tree d = spill_slot_decl;
2648   rtx rd;
2649 
2650   if (d || !force_build_p)
2651     return d;
2652 
2653   d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2654 		  VAR_DECL, get_identifier ("%sfp"), void_type_node);
2655   DECL_ARTIFICIAL (d) = 1;
2656   DECL_IGNORED_P (d) = 1;
2657   TREE_USED (d) = 1;
2658   spill_slot_decl = d;
2659 
2660   rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2661   MEM_NOTRAP_P (rd) = 1;
2662   mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
2663   attrs.alias = new_alias_set ();
2664   attrs.expr = d;
2665   set_mem_attrs (rd, &attrs);
2666   SET_DECL_RTL (d, rd);
2667 
2668   return d;
2669 }
2670 
2671 /* Given MEM, a result from assign_stack_local, fill in the memory
2672    attributes as appropriate for a register allocator spill slot.
2673    These slots are not aliasable by other memory.  We arrange for
2674    them all to use a single MEM_EXPR, so that the aliasing code can
2675    work properly in the case of shared spill slots.  */
2676 
2677 void
set_mem_attrs_for_spill(rtx mem)2678 set_mem_attrs_for_spill (rtx mem)
2679 {
2680   rtx addr;
2681 
2682   mem_attrs attrs (*get_mem_attrs (mem));
2683   attrs.expr = get_spill_slot_decl (true);
2684   attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2685   attrs.addrspace = ADDR_SPACE_GENERIC;
2686 
2687   /* We expect the incoming memory to be of the form:
2688 	(mem:MODE (plus (reg sfp) (const_int offset)))
2689      with perhaps the plus missing for offset = 0.  */
2690   addr = XEXP (mem, 0);
2691   attrs.offset_known_p = true;
2692   strip_offset (addr, &attrs.offset);
2693 
2694   set_mem_attrs (mem, &attrs);
2695   MEM_NOTRAP_P (mem) = 1;
2696 }
2697 
2698 /* Return a newly created CODE_LABEL rtx with a unique label number.  */
2699 
2700 rtx_code_label *
gen_label_rtx(void)2701 gen_label_rtx (void)
2702 {
2703   return as_a <rtx_code_label *> (
2704 	    gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2705 				NULL, label_num++, NULL));
2706 }
2707 
2708 /* For procedure integration.  */
2709 
2710 /* Install new pointers to the first and last insns in the chain.
2711    Also, set cur_insn_uid to one higher than the last in use.
2712    Used for an inline-procedure after copying the insn chain.  */
2713 
2714 void
set_new_first_and_last_insn(rtx_insn * first,rtx_insn * last)2715 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2716 {
2717   rtx_insn *insn;
2718 
2719   set_first_insn (first);
2720   set_last_insn (last);
2721   cur_insn_uid = 0;
2722 
2723   if (param_min_nondebug_insn_uid || MAY_HAVE_DEBUG_INSNS)
2724     {
2725       int debug_count = 0;
2726 
2727       cur_insn_uid = param_min_nondebug_insn_uid - 1;
2728       cur_debug_insn_uid = 0;
2729 
2730       for (insn = first; insn; insn = NEXT_INSN (insn))
2731 	if (INSN_UID (insn) < param_min_nondebug_insn_uid)
2732 	  cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2733 	else
2734 	  {
2735 	    cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2736 	    if (DEBUG_INSN_P (insn))
2737 	      debug_count++;
2738 	  }
2739 
2740       if (debug_count)
2741 	cur_debug_insn_uid = param_min_nondebug_insn_uid + debug_count;
2742       else
2743 	cur_debug_insn_uid++;
2744     }
2745   else
2746     for (insn = first; insn; insn = NEXT_INSN (insn))
2747       cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2748 
2749   cur_insn_uid++;
2750 }
2751 
2752 /* Go through all the RTL insn bodies and copy any invalid shared
2753    structure.  This routine should only be called once.  */
2754 
2755 static void
unshare_all_rtl_1(rtx_insn * insn)2756 unshare_all_rtl_1 (rtx_insn *insn)
2757 {
2758   /* Unshare just about everything else.  */
2759   unshare_all_rtl_in_chain (insn);
2760 
2761   /* Make sure the addresses of stack slots found outside the insn chain
2762      (such as, in DECL_RTL of a variable) are not shared
2763      with the insn chain.
2764 
2765      This special care is necessary when the stack slot MEM does not
2766      actually appear in the insn chain.  If it does appear, its address
2767      is unshared from all else at that point.  */
2768   unsigned int i;
2769   rtx temp;
2770   FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2771     (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2772 }
2773 
2774 /* Go through all the RTL insn bodies and copy any invalid shared
2775    structure, again.  This is a fairly expensive thing to do so it
2776    should be done sparingly.  */
2777 
2778 void
unshare_all_rtl_again(rtx_insn * insn)2779 unshare_all_rtl_again (rtx_insn *insn)
2780 {
2781   rtx_insn *p;
2782   tree decl;
2783 
2784   for (p = insn; p; p = NEXT_INSN (p))
2785     if (INSN_P (p))
2786       {
2787 	reset_used_flags (PATTERN (p));
2788 	reset_used_flags (REG_NOTES (p));
2789 	if (CALL_P (p))
2790 	  reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2791       }
2792 
2793   /* Make sure that virtual stack slots are not shared.  */
2794   set_used_decls (DECL_INITIAL (cfun->decl));
2795 
2796   /* Make sure that virtual parameters are not shared.  */
2797   for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2798     set_used_flags (DECL_RTL (decl));
2799 
2800   rtx temp;
2801   unsigned int i;
2802   FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2803     reset_used_flags (temp);
2804 
2805   unshare_all_rtl_1 (insn);
2806 }
2807 
2808 unsigned int
unshare_all_rtl(void)2809 unshare_all_rtl (void)
2810 {
2811   unshare_all_rtl_1 (get_insns ());
2812 
2813   for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2814     {
2815       if (DECL_RTL_SET_P (decl))
2816 	SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2817       DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2818     }
2819 
2820   return 0;
2821 }
2822 
2823 
2824 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2825    Recursively does the same for subexpressions.  */
2826 
2827 static void
verify_rtx_sharing(rtx orig,rtx insn)2828 verify_rtx_sharing (rtx orig, rtx insn)
2829 {
2830   rtx x = orig;
2831   int i;
2832   enum rtx_code code;
2833   const char *format_ptr;
2834 
2835   if (x == 0)
2836     return;
2837 
2838   code = GET_CODE (x);
2839 
2840   /* These types may be freely shared.  */
2841 
2842   switch (code)
2843     {
2844     case REG:
2845     case DEBUG_EXPR:
2846     case VALUE:
2847     CASE_CONST_ANY:
2848     case SYMBOL_REF:
2849     case LABEL_REF:
2850     case CODE_LABEL:
2851     case PC:
2852     case RETURN:
2853     case SIMPLE_RETURN:
2854     case SCRATCH:
2855       /* SCRATCH must be shared because they represent distinct values.  */
2856       return;
2857     case CLOBBER:
2858       /* Share clobbers of hard registers, but do not share pseudo reg
2859          clobbers or clobbers of hard registers that originated as pseudos.
2860          This is needed to allow safe register renaming.  */
2861       if (REG_P (XEXP (x, 0))
2862 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2863 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2864 	return;
2865       break;
2866 
2867     case CONST:
2868       if (shared_const_p (orig))
2869 	return;
2870       break;
2871 
2872     case MEM:
2873       /* A MEM is allowed to be shared if its address is constant.  */
2874       if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2875 	  || reload_completed || reload_in_progress)
2876 	return;
2877 
2878       break;
2879 
2880     default:
2881       break;
2882     }
2883 
2884   /* This rtx may not be shared.  If it has already been seen,
2885      replace it with a copy of itself.  */
2886   if (flag_checking && RTX_FLAG (x, used))
2887     {
2888       error ("invalid rtl sharing found in the insn");
2889       debug_rtx (insn);
2890       error ("shared rtx");
2891       debug_rtx (x);
2892       internal_error ("internal consistency failure");
2893     }
2894   gcc_assert (!RTX_FLAG (x, used));
2895 
2896   RTX_FLAG (x, used) = 1;
2897 
2898   /* Now scan the subexpressions recursively.  */
2899 
2900   format_ptr = GET_RTX_FORMAT (code);
2901 
2902   for (i = 0; i < GET_RTX_LENGTH (code); i++)
2903     {
2904       switch (*format_ptr++)
2905 	{
2906 	case 'e':
2907 	  verify_rtx_sharing (XEXP (x, i), insn);
2908 	  break;
2909 
2910 	case 'E':
2911 	  if (XVEC (x, i) != NULL)
2912 	    {
2913 	      int j;
2914 	      int len = XVECLEN (x, i);
2915 
2916 	      for (j = 0; j < len; j++)
2917 		{
2918 		  /* We allow sharing of ASM_OPERANDS inside single
2919 		     instruction.  */
2920 		  if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2921 		      && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2922 			  == ASM_OPERANDS))
2923 		    verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2924 		  else
2925 		    verify_rtx_sharing (XVECEXP (x, i, j), insn);
2926 		}
2927 	    }
2928 	  break;
2929 	}
2930     }
2931   return;
2932 }
2933 
2934 /* Reset used-flags for INSN.  */
2935 
2936 static void
reset_insn_used_flags(rtx insn)2937 reset_insn_used_flags (rtx insn)
2938 {
2939   gcc_assert (INSN_P (insn));
2940   reset_used_flags (PATTERN (insn));
2941   reset_used_flags (REG_NOTES (insn));
2942   if (CALL_P (insn))
2943     reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2944 }
2945 
2946 /* Go through all the RTL insn bodies and clear all the USED bits.  */
2947 
2948 static void
reset_all_used_flags(void)2949 reset_all_used_flags (void)
2950 {
2951   rtx_insn *p;
2952 
2953   for (p = get_insns (); p; p = NEXT_INSN (p))
2954     if (INSN_P (p))
2955       {
2956 	rtx pat = PATTERN (p);
2957 	if (GET_CODE (pat) != SEQUENCE)
2958 	  reset_insn_used_flags (p);
2959 	else
2960 	  {
2961 	    gcc_assert (REG_NOTES (p) == NULL);
2962 	    for (int i = 0; i < XVECLEN (pat, 0); i++)
2963 	      {
2964 		rtx insn = XVECEXP (pat, 0, i);
2965 		if (INSN_P (insn))
2966 		  reset_insn_used_flags (insn);
2967 	      }
2968 	  }
2969       }
2970 }
2971 
2972 /* Verify sharing in INSN.  */
2973 
2974 static void
verify_insn_sharing(rtx insn)2975 verify_insn_sharing (rtx insn)
2976 {
2977   gcc_assert (INSN_P (insn));
2978   verify_rtx_sharing (PATTERN (insn), insn);
2979   verify_rtx_sharing (REG_NOTES (insn), insn);
2980   if (CALL_P (insn))
2981     verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
2982 }
2983 
2984 /* Go through all the RTL insn bodies and check that there is no unexpected
2985    sharing in between the subexpressions.  */
2986 
2987 DEBUG_FUNCTION void
verify_rtl_sharing(void)2988 verify_rtl_sharing (void)
2989 {
2990   rtx_insn *p;
2991 
2992   timevar_push (TV_VERIFY_RTL_SHARING);
2993 
2994   reset_all_used_flags ();
2995 
2996   for (p = get_insns (); p; p = NEXT_INSN (p))
2997     if (INSN_P (p))
2998       {
2999 	rtx pat = PATTERN (p);
3000 	if (GET_CODE (pat) != SEQUENCE)
3001 	  verify_insn_sharing (p);
3002 	else
3003 	  for (int i = 0; i < XVECLEN (pat, 0); i++)
3004 	      {
3005 		rtx insn = XVECEXP (pat, 0, i);
3006 		if (INSN_P (insn))
3007 		  verify_insn_sharing (insn);
3008 	      }
3009       }
3010 
3011   reset_all_used_flags ();
3012 
3013   timevar_pop (TV_VERIFY_RTL_SHARING);
3014 }
3015 
3016 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3017    Assumes the mark bits are cleared at entry.  */
3018 
3019 void
unshare_all_rtl_in_chain(rtx_insn * insn)3020 unshare_all_rtl_in_chain (rtx_insn *insn)
3021 {
3022   for (; insn; insn = NEXT_INSN (insn))
3023     if (INSN_P (insn))
3024       {
3025 	PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3026 	REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
3027 	if (CALL_P (insn))
3028 	  CALL_INSN_FUNCTION_USAGE (insn)
3029 	    = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
3030       }
3031 }
3032 
3033 /* Go through all virtual stack slots of a function and mark them as
3034    shared.  We never replace the DECL_RTLs themselves with a copy,
3035    but expressions mentioned into a DECL_RTL cannot be shared with
3036    expressions in the instruction stream.
3037 
3038    Note that reload may convert pseudo registers into memories in-place.
3039    Pseudo registers are always shared, but MEMs never are.  Thus if we
3040    reset the used flags on MEMs in the instruction stream, we must set
3041    them again on MEMs that appear in DECL_RTLs.  */
3042 
3043 static void
set_used_decls(tree blk)3044 set_used_decls (tree blk)
3045 {
3046   tree t;
3047 
3048   /* Mark decls.  */
3049   for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
3050     if (DECL_RTL_SET_P (t))
3051       set_used_flags (DECL_RTL (t));
3052 
3053   /* Now process sub-blocks.  */
3054   for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
3055     set_used_decls (t);
3056 }
3057 
3058 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3059    Recursively does the same for subexpressions.  Uses
3060    copy_rtx_if_shared_1 to reduce stack space.  */
3061 
3062 rtx
copy_rtx_if_shared(rtx orig)3063 copy_rtx_if_shared (rtx orig)
3064 {
3065   copy_rtx_if_shared_1 (&orig);
3066   return orig;
3067 }
3068 
3069 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3070    use.  Recursively does the same for subexpressions.  */
3071 
3072 static void
copy_rtx_if_shared_1(rtx * orig1)3073 copy_rtx_if_shared_1 (rtx *orig1)
3074 {
3075   rtx x;
3076   int i;
3077   enum rtx_code code;
3078   rtx *last_ptr;
3079   const char *format_ptr;
3080   int copied = 0;
3081   int length;
3082 
3083   /* Repeat is used to turn tail-recursion into iteration.  */
3084 repeat:
3085   x = *orig1;
3086 
3087   if (x == 0)
3088     return;
3089 
3090   code = GET_CODE (x);
3091 
3092   /* These types may be freely shared.  */
3093 
3094   switch (code)
3095     {
3096     case REG:
3097     case DEBUG_EXPR:
3098     case VALUE:
3099     CASE_CONST_ANY:
3100     case SYMBOL_REF:
3101     case LABEL_REF:
3102     case CODE_LABEL:
3103     case PC:
3104     case RETURN:
3105     case SIMPLE_RETURN:
3106     case SCRATCH:
3107       /* SCRATCH must be shared because they represent distinct values.  */
3108       return;
3109     case CLOBBER:
3110       /* Share clobbers of hard registers, but do not share pseudo reg
3111          clobbers or clobbers of hard registers that originated as pseudos.
3112          This is needed to allow safe register renaming.  */
3113       if (REG_P (XEXP (x, 0))
3114 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3115 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3116 	return;
3117       break;
3118 
3119     case CONST:
3120       if (shared_const_p (x))
3121 	return;
3122       break;
3123 
3124     case DEBUG_INSN:
3125     case INSN:
3126     case JUMP_INSN:
3127     case CALL_INSN:
3128     case NOTE:
3129     case BARRIER:
3130       /* The chain of insns is not being copied.  */
3131       return;
3132 
3133     default:
3134       break;
3135     }
3136 
3137   /* This rtx may not be shared.  If it has already been seen,
3138      replace it with a copy of itself.  */
3139 
3140   if (RTX_FLAG (x, used))
3141     {
3142       x = shallow_copy_rtx (x);
3143       copied = 1;
3144     }
3145   RTX_FLAG (x, used) = 1;
3146 
3147   /* Now scan the subexpressions recursively.
3148      We can store any replaced subexpressions directly into X
3149      since we know X is not shared!  Any vectors in X
3150      must be copied if X was copied.  */
3151 
3152   format_ptr = GET_RTX_FORMAT (code);
3153   length = GET_RTX_LENGTH (code);
3154   last_ptr = NULL;
3155 
3156   for (i = 0; i < length; i++)
3157     {
3158       switch (*format_ptr++)
3159 	{
3160 	case 'e':
3161           if (last_ptr)
3162             copy_rtx_if_shared_1 (last_ptr);
3163 	  last_ptr = &XEXP (x, i);
3164 	  break;
3165 
3166 	case 'E':
3167 	  if (XVEC (x, i) != NULL)
3168 	    {
3169 	      int j;
3170 	      int len = XVECLEN (x, i);
3171 
3172               /* Copy the vector iff I copied the rtx and the length
3173 		 is nonzero.  */
3174 	      if (copied && len > 0)
3175 		XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3176 
3177               /* Call recursively on all inside the vector.  */
3178 	      for (j = 0; j < len; j++)
3179                 {
3180 		  if (last_ptr)
3181 		    copy_rtx_if_shared_1 (last_ptr);
3182                   last_ptr = &XVECEXP (x, i, j);
3183                 }
3184 	    }
3185 	  break;
3186 	}
3187     }
3188   *orig1 = x;
3189   if (last_ptr)
3190     {
3191       orig1 = last_ptr;
3192       goto repeat;
3193     }
3194   return;
3195 }
3196 
3197 /* Set the USED bit in X and its non-shareable subparts to FLAG.  */
3198 
3199 static void
mark_used_flags(rtx x,int flag)3200 mark_used_flags (rtx x, int flag)
3201 {
3202   int i, j;
3203   enum rtx_code code;
3204   const char *format_ptr;
3205   int length;
3206 
3207   /* Repeat is used to turn tail-recursion into iteration.  */
3208 repeat:
3209   if (x == 0)
3210     return;
3211 
3212   code = GET_CODE (x);
3213 
3214   /* These types may be freely shared so we needn't do any resetting
3215      for them.  */
3216 
3217   switch (code)
3218     {
3219     case REG:
3220     case DEBUG_EXPR:
3221     case VALUE:
3222     CASE_CONST_ANY:
3223     case SYMBOL_REF:
3224     case CODE_LABEL:
3225     case PC:
3226     case RETURN:
3227     case SIMPLE_RETURN:
3228       return;
3229 
3230     case DEBUG_INSN:
3231     case INSN:
3232     case JUMP_INSN:
3233     case CALL_INSN:
3234     case NOTE:
3235     case LABEL_REF:
3236     case BARRIER:
3237       /* The chain of insns is not being copied.  */
3238       return;
3239 
3240     default:
3241       break;
3242     }
3243 
3244   RTX_FLAG (x, used) = flag;
3245 
3246   format_ptr = GET_RTX_FORMAT (code);
3247   length = GET_RTX_LENGTH (code);
3248 
3249   for (i = 0; i < length; i++)
3250     {
3251       switch (*format_ptr++)
3252 	{
3253 	case 'e':
3254           if (i == length-1)
3255             {
3256               x = XEXP (x, i);
3257 	      goto repeat;
3258             }
3259 	  mark_used_flags (XEXP (x, i), flag);
3260 	  break;
3261 
3262 	case 'E':
3263 	  for (j = 0; j < XVECLEN (x, i); j++)
3264 	    mark_used_flags (XVECEXP (x, i, j), flag);
3265 	  break;
3266 	}
3267     }
3268 }
3269 
3270 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3271    to look for shared sub-parts.  */
3272 
3273 void
reset_used_flags(rtx x)3274 reset_used_flags (rtx x)
3275 {
3276   mark_used_flags (x, 0);
3277 }
3278 
3279 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3280    to look for shared sub-parts.  */
3281 
3282 void
set_used_flags(rtx x)3283 set_used_flags (rtx x)
3284 {
3285   mark_used_flags (x, 1);
3286 }
3287 
3288 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3289    Return X or the rtx for the pseudo reg the value of X was copied into.
3290    OTHER must be valid as a SET_DEST.  */
3291 
3292 rtx
make_safe_from(rtx x,rtx other)3293 make_safe_from (rtx x, rtx other)
3294 {
3295   while (1)
3296     switch (GET_CODE (other))
3297       {
3298       case SUBREG:
3299 	other = SUBREG_REG (other);
3300 	break;
3301       case STRICT_LOW_PART:
3302       case SIGN_EXTEND:
3303       case ZERO_EXTEND:
3304 	other = XEXP (other, 0);
3305 	break;
3306       default:
3307 	goto done;
3308       }
3309  done:
3310   if ((MEM_P (other)
3311        && ! CONSTANT_P (x)
3312        && !REG_P (x)
3313        && GET_CODE (x) != SUBREG)
3314       || (REG_P (other)
3315 	  && (REGNO (other) < FIRST_PSEUDO_REGISTER
3316 	      || reg_mentioned_p (other, x))))
3317     {
3318       rtx temp = gen_reg_rtx (GET_MODE (x));
3319       emit_move_insn (temp, x);
3320       return temp;
3321     }
3322   return x;
3323 }
3324 
3325 /* Emission of insns (adding them to the doubly-linked list).  */
3326 
3327 /* Return the last insn emitted, even if it is in a sequence now pushed.  */
3328 
3329 rtx_insn *
get_last_insn_anywhere(void)3330 get_last_insn_anywhere (void)
3331 {
3332   struct sequence_stack *seq;
3333   for (seq = get_current_sequence (); seq; seq = seq->next)
3334     if (seq->last != 0)
3335       return seq->last;
3336   return 0;
3337 }
3338 
3339 /* Return the first nonnote insn emitted in current sequence or current
3340    function.  This routine looks inside SEQUENCEs.  */
3341 
3342 rtx_insn *
get_first_nonnote_insn(void)3343 get_first_nonnote_insn (void)
3344 {
3345   rtx_insn *insn = get_insns ();
3346 
3347   if (insn)
3348     {
3349       if (NOTE_P (insn))
3350 	for (insn = next_insn (insn);
3351 	     insn && NOTE_P (insn);
3352 	     insn = next_insn (insn))
3353 	  continue;
3354       else
3355 	{
3356 	  if (NONJUMP_INSN_P (insn)
3357 	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
3358 	    insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3359 	}
3360     }
3361 
3362   return insn;
3363 }
3364 
3365 /* Return the last nonnote insn emitted in current sequence or current
3366    function.  This routine looks inside SEQUENCEs.  */
3367 
3368 rtx_insn *
get_last_nonnote_insn(void)3369 get_last_nonnote_insn (void)
3370 {
3371   rtx_insn *insn = get_last_insn ();
3372 
3373   if (insn)
3374     {
3375       if (NOTE_P (insn))
3376 	for (insn = previous_insn (insn);
3377 	     insn && NOTE_P (insn);
3378 	     insn = previous_insn (insn))
3379 	  continue;
3380       else
3381 	{
3382 	  if (NONJUMP_INSN_P (insn))
3383 	    if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3384 	      insn = seq->insn (seq->len () - 1);
3385 	}
3386     }
3387 
3388   return insn;
3389 }
3390 
3391 /* Return the number of actual (non-debug) insns emitted in this
3392    function.  */
3393 
3394 int
get_max_insn_count(void)3395 get_max_insn_count (void)
3396 {
3397   int n = cur_insn_uid;
3398 
3399   /* The table size must be stable across -g, to avoid codegen
3400      differences due to debug insns, and not be affected by
3401      -fmin-insn-uid, to avoid excessive table size and to simplify
3402      debugging of -fcompare-debug failures.  */
3403   if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
3404     n -= cur_debug_insn_uid;
3405   else
3406     n -= param_min_nondebug_insn_uid;
3407 
3408   return n;
3409 }
3410 
3411 
3412 /* Return the next insn.  If it is a SEQUENCE, return the first insn
3413    of the sequence.  */
3414 
3415 rtx_insn *
next_insn(rtx_insn * insn)3416 next_insn (rtx_insn *insn)
3417 {
3418   if (insn)
3419     {
3420       insn = NEXT_INSN (insn);
3421       if (insn && NONJUMP_INSN_P (insn)
3422 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
3423 	insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3424     }
3425 
3426   return insn;
3427 }
3428 
3429 /* Return the previous insn.  If it is a SEQUENCE, return the last insn
3430    of the sequence.  */
3431 
3432 rtx_insn *
previous_insn(rtx_insn * insn)3433 previous_insn (rtx_insn *insn)
3434 {
3435   if (insn)
3436     {
3437       insn = PREV_INSN (insn);
3438       if (insn && NONJUMP_INSN_P (insn))
3439 	if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3440 	  insn = seq->insn (seq->len () - 1);
3441     }
3442 
3443   return insn;
3444 }
3445 
3446 /* Return the next insn after INSN that is not a NOTE.  This routine does not
3447    look inside SEQUENCEs.  */
3448 
3449 rtx_insn *
next_nonnote_insn(rtx_insn * insn)3450 next_nonnote_insn (rtx_insn *insn)
3451 {
3452   while (insn)
3453     {
3454       insn = NEXT_INSN (insn);
3455       if (insn == 0 || !NOTE_P (insn))
3456 	break;
3457     }
3458 
3459   return insn;
3460 }
3461 
3462 /* Return the next insn after INSN that is not a DEBUG_INSN.  This
3463    routine does not look inside SEQUENCEs.  */
3464 
3465 rtx_insn *
next_nondebug_insn(rtx_insn * insn)3466 next_nondebug_insn (rtx_insn *insn)
3467 {
3468   while (insn)
3469     {
3470       insn = NEXT_INSN (insn);
3471       if (insn == 0 || !DEBUG_INSN_P (insn))
3472 	break;
3473     }
3474 
3475   return insn;
3476 }
3477 
3478 /* Return the previous insn before INSN that is not a NOTE.  This routine does
3479    not look inside SEQUENCEs.  */
3480 
3481 rtx_insn *
prev_nonnote_insn(rtx_insn * insn)3482 prev_nonnote_insn (rtx_insn *insn)
3483 {
3484   while (insn)
3485     {
3486       insn = PREV_INSN (insn);
3487       if (insn == 0 || !NOTE_P (insn))
3488 	break;
3489     }
3490 
3491   return insn;
3492 }
3493 
3494 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3495    This routine does not look inside SEQUENCEs.  */
3496 
3497 rtx_insn *
prev_nondebug_insn(rtx_insn * insn)3498 prev_nondebug_insn (rtx_insn *insn)
3499 {
3500   while (insn)
3501     {
3502       insn = PREV_INSN (insn);
3503       if (insn == 0 || !DEBUG_INSN_P (insn))
3504 	break;
3505     }
3506 
3507   return insn;
3508 }
3509 
3510 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3511    This routine does not look inside SEQUENCEs.  */
3512 
3513 rtx_insn *
next_nonnote_nondebug_insn(rtx_insn * insn)3514 next_nonnote_nondebug_insn (rtx_insn *insn)
3515 {
3516   while (insn)
3517     {
3518       insn = NEXT_INSN (insn);
3519       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3520 	break;
3521     }
3522 
3523   return insn;
3524 }
3525 
3526 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3527    but stop the search before we enter another basic block.  This
3528    routine does not look inside SEQUENCEs.  */
3529 
3530 rtx_insn *
next_nonnote_nondebug_insn_bb(rtx_insn * insn)3531 next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3532 {
3533   while (insn)
3534     {
3535       insn = NEXT_INSN (insn);
3536       if (insn == 0)
3537 	break;
3538       if (DEBUG_INSN_P (insn))
3539 	continue;
3540       if (!NOTE_P (insn))
3541 	break;
3542       if (NOTE_INSN_BASIC_BLOCK_P (insn))
3543 	return NULL;
3544     }
3545 
3546   return insn;
3547 }
3548 
3549 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3550    This routine does not look inside SEQUENCEs.  */
3551 
3552 rtx_insn *
prev_nonnote_nondebug_insn(rtx_insn * insn)3553 prev_nonnote_nondebug_insn (rtx_insn *insn)
3554 {
3555   while (insn)
3556     {
3557       insn = PREV_INSN (insn);
3558       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3559 	break;
3560     }
3561 
3562   return insn;
3563 }
3564 
3565 /* Return the previous insn before INSN that is not a NOTE nor
3566    DEBUG_INSN, but stop the search before we enter another basic
3567    block.  This routine does not look inside SEQUENCEs.  */
3568 
3569 rtx_insn *
prev_nonnote_nondebug_insn_bb(rtx_insn * insn)3570 prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3571 {
3572   while (insn)
3573     {
3574       insn = PREV_INSN (insn);
3575       if (insn == 0)
3576 	break;
3577       if (DEBUG_INSN_P (insn))
3578 	continue;
3579       if (!NOTE_P (insn))
3580 	break;
3581       if (NOTE_INSN_BASIC_BLOCK_P (insn))
3582 	return NULL;
3583     }
3584 
3585   return insn;
3586 }
3587 
3588 /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
3589    or 0, if there is none.  This routine does not look inside
3590    SEQUENCEs.  */
3591 
3592 rtx_insn *
next_real_insn(rtx_insn * insn)3593 next_real_insn (rtx_insn *insn)
3594 {
3595   while (insn)
3596     {
3597       insn = NEXT_INSN (insn);
3598       if (insn == 0 || INSN_P (insn))
3599 	break;
3600     }
3601 
3602   return insn;
3603 }
3604 
3605 /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
3606    or 0, if there is none.  This routine does not look inside
3607    SEQUENCEs.  */
3608 
3609 rtx_insn *
prev_real_insn(rtx_insn * insn)3610 prev_real_insn (rtx_insn *insn)
3611 {
3612   while (insn)
3613     {
3614       insn = PREV_INSN (insn);
3615       if (insn == 0 || INSN_P (insn))
3616 	break;
3617     }
3618 
3619   return insn;
3620 }
3621 
3622 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3623    or 0, if there is none.  This routine does not look inside
3624    SEQUENCEs.  */
3625 
3626 rtx_insn *
next_real_nondebug_insn(rtx uncast_insn)3627 next_real_nondebug_insn (rtx uncast_insn)
3628 {
3629   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3630 
3631   while (insn)
3632     {
3633       insn = NEXT_INSN (insn);
3634       if (insn == 0 || NONDEBUG_INSN_P (insn))
3635 	break;
3636     }
3637 
3638   return insn;
3639 }
3640 
3641 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3642    or 0, if there is none.  This routine does not look inside
3643    SEQUENCEs.  */
3644 
3645 rtx_insn *
prev_real_nondebug_insn(rtx_insn * insn)3646 prev_real_nondebug_insn (rtx_insn *insn)
3647 {
3648   while (insn)
3649     {
3650       insn = PREV_INSN (insn);
3651       if (insn == 0 || NONDEBUG_INSN_P (insn))
3652 	break;
3653     }
3654 
3655   return insn;
3656 }
3657 
3658 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3659    This routine does not look inside SEQUENCEs.  */
3660 
3661 rtx_call_insn *
last_call_insn(void)3662 last_call_insn (void)
3663 {
3664   rtx_insn *insn;
3665 
3666   for (insn = get_last_insn ();
3667        insn && !CALL_P (insn);
3668        insn = PREV_INSN (insn))
3669     ;
3670 
3671   return safe_as_a <rtx_call_insn *> (insn);
3672 }
3673 
3674 /* Find the next insn after INSN that really does something.  This routine
3675    does not look inside SEQUENCEs.  After reload this also skips over
3676    standalone USE and CLOBBER insn.  */
3677 
3678 int
active_insn_p(const rtx_insn * insn)3679 active_insn_p (const rtx_insn *insn)
3680 {
3681   return (CALL_P (insn) || JUMP_P (insn)
3682 	  || JUMP_TABLE_DATA_P (insn) /* FIXME */
3683 	  || (NONJUMP_INSN_P (insn)
3684 	      && (! reload_completed
3685 		  || (GET_CODE (PATTERN (insn)) != USE
3686 		      && GET_CODE (PATTERN (insn)) != CLOBBER))));
3687 }
3688 
3689 rtx_insn *
next_active_insn(rtx_insn * insn)3690 next_active_insn (rtx_insn *insn)
3691 {
3692   while (insn)
3693     {
3694       insn = NEXT_INSN (insn);
3695       if (insn == 0 || active_insn_p (insn))
3696 	break;
3697     }
3698 
3699   return insn;
3700 }
3701 
3702 /* Find the last insn before INSN that really does something.  This routine
3703    does not look inside SEQUENCEs.  After reload this also skips over
3704    standalone USE and CLOBBER insn.  */
3705 
3706 rtx_insn *
prev_active_insn(rtx_insn * insn)3707 prev_active_insn (rtx_insn *insn)
3708 {
3709   while (insn)
3710     {
3711       insn = PREV_INSN (insn);
3712       if (insn == 0 || active_insn_p (insn))
3713 	break;
3714     }
3715 
3716   return insn;
3717 }
3718 
3719 /* Find a RTX_AUTOINC class rtx which matches DATA.  */
3720 
3721 static int
find_auto_inc(const_rtx x,const_rtx reg)3722 find_auto_inc (const_rtx x, const_rtx reg)
3723 {
3724   subrtx_iterator::array_type array;
3725   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3726     {
3727       const_rtx x = *iter;
3728       if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3729 	  && rtx_equal_p (reg, XEXP (x, 0)))
3730 	return true;
3731     }
3732   return false;
3733 }
3734 
3735 /* Increment the label uses for all labels present in rtx.  */
3736 
3737 static void
mark_label_nuses(rtx x)3738 mark_label_nuses (rtx x)
3739 {
3740   enum rtx_code code;
3741   int i, j;
3742   const char *fmt;
3743 
3744   code = GET_CODE (x);
3745   if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3746     LABEL_NUSES (label_ref_label (x))++;
3747 
3748   fmt = GET_RTX_FORMAT (code);
3749   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3750     {
3751       if (fmt[i] == 'e')
3752 	mark_label_nuses (XEXP (x, i));
3753       else if (fmt[i] == 'E')
3754 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3755 	  mark_label_nuses (XVECEXP (x, i, j));
3756     }
3757 }
3758 
3759 
3760 /* Try splitting insns that can be split for better scheduling.
3761    PAT is the pattern which might split.
3762    TRIAL is the insn providing PAT.
3763    LAST is nonzero if we should return the last insn of the sequence produced.
3764 
3765    If this routine succeeds in splitting, it returns the first or last
3766    replacement insn depending on the value of LAST.  Otherwise, it
3767    returns TRIAL.  If the insn to be returned can be split, it will be.  */
3768 
3769 rtx_insn *
try_split(rtx pat,rtx_insn * trial,int last)3770 try_split (rtx pat, rtx_insn *trial, int last)
3771 {
3772   rtx_insn *before, *after;
3773   rtx note;
3774   rtx_insn *seq, *tem;
3775   profile_probability probability;
3776   rtx_insn *insn_last, *insn;
3777   int njumps = 0;
3778   rtx_insn *call_insn = NULL;
3779 
3780   if (any_condjump_p (trial)
3781       && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3782     split_branch_probability
3783       = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3784   else
3785     split_branch_probability = profile_probability::uninitialized ();
3786 
3787   probability = split_branch_probability;
3788 
3789   seq = split_insns (pat, trial);
3790 
3791   split_branch_probability = profile_probability::uninitialized ();
3792 
3793   if (!seq)
3794     return trial;
3795 
3796   int split_insn_count = 0;
3797   /* Avoid infinite loop if any insn of the result matches
3798      the original pattern.  */
3799   insn_last = seq;
3800   while (1)
3801     {
3802       if (INSN_P (insn_last)
3803 	  && rtx_equal_p (PATTERN (insn_last), pat))
3804 	return trial;
3805       split_insn_count++;
3806       if (!NEXT_INSN (insn_last))
3807 	break;
3808       insn_last = NEXT_INSN (insn_last);
3809     }
3810 
3811   /* We're not good at redistributing frame information if
3812      the split occurs before reload or if it results in more
3813      than one insn.  */
3814   if (RTX_FRAME_RELATED_P (trial))
3815     {
3816       if (!reload_completed || split_insn_count != 1)
3817         return trial;
3818 
3819       rtx_insn *new_insn = seq;
3820       rtx_insn *old_insn = trial;
3821       copy_frame_info_to_split_insn (old_insn, new_insn);
3822     }
3823 
3824   /* We will be adding the new sequence to the function.  The splitters
3825      may have introduced invalid RTL sharing, so unshare the sequence now.  */
3826   unshare_all_rtl_in_chain (seq);
3827 
3828   /* Mark labels and copy flags.  */
3829   for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3830     {
3831       if (JUMP_P (insn))
3832 	{
3833 	  if (JUMP_P (trial))
3834 	    CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3835 	  mark_jump_label (PATTERN (insn), insn, 0);
3836 	  njumps++;
3837 	  if (probability.initialized_p ()
3838 	      && any_condjump_p (insn)
3839 	      && !find_reg_note (insn, REG_BR_PROB, 0))
3840 	    {
3841 	      /* We can preserve the REG_BR_PROB notes only if exactly
3842 		 one jump is created, otherwise the machine description
3843 		 is responsible for this step using
3844 		 split_branch_probability variable.  */
3845 	      gcc_assert (njumps == 1);
3846 	      add_reg_br_prob_note (insn, probability);
3847 	    }
3848 	}
3849     }
3850 
3851   /* If we are splitting a CALL_INSN, look for the CALL_INSN
3852      in SEQ and copy any additional information across.  */
3853   if (CALL_P (trial))
3854     {
3855       for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3856 	if (CALL_P (insn))
3857 	  {
3858 	    gcc_assert (call_insn == NULL_RTX);
3859 	    call_insn = insn;
3860 
3861 	    /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3862 	       target may have explicitly specified.  */
3863 	    rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3864 	    while (*p)
3865 	      p = &XEXP (*p, 1);
3866 	    *p = CALL_INSN_FUNCTION_USAGE (trial);
3867 
3868 	    /* If the old call was a sibling call, the new one must
3869 	       be too.  */
3870 	    SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3871 	  }
3872     }
3873 
3874   /* Copy notes, particularly those related to the CFG.  */
3875   for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3876     {
3877       switch (REG_NOTE_KIND (note))
3878 	{
3879 	case REG_EH_REGION:
3880 	  copy_reg_eh_region_note_backward (note, insn_last, NULL);
3881 	  break;
3882 
3883 	case REG_NORETURN:
3884 	case REG_SETJMP:
3885 	case REG_TM:
3886 	case REG_CALL_NOCF_CHECK:
3887 	case REG_CALL_ARG_LOCATION:
3888 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3889 	    {
3890 	      if (CALL_P (insn))
3891 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3892 	    }
3893 	  break;
3894 
3895 	case REG_NON_LOCAL_GOTO:
3896 	case REG_LABEL_TARGET:
3897 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3898 	    {
3899 	      if (JUMP_P (insn))
3900 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3901 	    }
3902 	  break;
3903 
3904 	case REG_INC:
3905 	  if (!AUTO_INC_DEC)
3906 	    break;
3907 
3908 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3909 	    {
3910 	      rtx reg = XEXP (note, 0);
3911 	      if (!FIND_REG_INC_NOTE (insn, reg)
3912 		  && find_auto_inc (PATTERN (insn), reg))
3913 		add_reg_note (insn, REG_INC, reg);
3914 	    }
3915 	  break;
3916 
3917 	case REG_ARGS_SIZE:
3918 	  fixup_args_size_notes (NULL, insn_last, get_args_size (note));
3919 	  break;
3920 
3921 	case REG_CALL_DECL:
3922 	case REG_UNTYPED_CALL:
3923 	  gcc_assert (call_insn != NULL_RTX);
3924 	  add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3925 	  break;
3926 
3927 	default:
3928 	  break;
3929 	}
3930     }
3931 
3932   /* If there are LABELS inside the split insns increment the
3933      usage count so we don't delete the label.  */
3934   if (INSN_P (trial))
3935     {
3936       insn = insn_last;
3937       while (insn != NULL_RTX)
3938 	{
3939 	  /* JUMP_P insns have already been "marked" above.  */
3940 	  if (NONJUMP_INSN_P (insn))
3941 	    mark_label_nuses (PATTERN (insn));
3942 
3943 	  insn = PREV_INSN (insn);
3944 	}
3945     }
3946 
3947   before = PREV_INSN (trial);
3948   after = NEXT_INSN (trial);
3949 
3950   emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3951 
3952   delete_insn (trial);
3953 
3954   /* Recursively call try_split for each new insn created; by the
3955      time control returns here that insn will be fully split, so
3956      set LAST and continue from the insn after the one returned.
3957      We can't use next_active_insn here since AFTER may be a note.
3958      Ignore deleted insns, which can be occur if not optimizing.  */
3959   for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3960     if (! tem->deleted () && INSN_P (tem))
3961       tem = try_split (PATTERN (tem), tem, 1);
3962 
3963   /* Return either the first or the last insn, depending on which was
3964      requested.  */
3965   return last
3966     ? (after ? PREV_INSN (after) : get_last_insn ())
3967     : NEXT_INSN (before);
3968 }
3969 
3970 /* Make and return an INSN rtx, initializing all its slots.
3971    Store PATTERN in the pattern slots.  */
3972 
3973 rtx_insn *
make_insn_raw(rtx pattern)3974 make_insn_raw (rtx pattern)
3975 {
3976   rtx_insn *insn;
3977 
3978   insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3979 
3980   INSN_UID (insn) = cur_insn_uid++;
3981   PATTERN (insn) = pattern;
3982   INSN_CODE (insn) = -1;
3983   REG_NOTES (insn) = NULL;
3984   INSN_LOCATION (insn) = curr_insn_location ();
3985   BLOCK_FOR_INSN (insn) = NULL;
3986 
3987 #ifdef ENABLE_RTL_CHECKING
3988   if (insn
3989       && INSN_P (insn)
3990       && (returnjump_p (insn)
3991 	  || (GET_CODE (insn) == SET
3992 	      && SET_DEST (insn) == pc_rtx)))
3993     {
3994       warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3995       debug_rtx (insn);
3996     }
3997 #endif
3998 
3999   return insn;
4000 }
4001 
4002 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn.  */
4003 
4004 static rtx_insn *
make_debug_insn_raw(rtx pattern)4005 make_debug_insn_raw (rtx pattern)
4006 {
4007   rtx_debug_insn *insn;
4008 
4009   insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
4010   INSN_UID (insn) = cur_debug_insn_uid++;
4011   if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
4012     INSN_UID (insn) = cur_insn_uid++;
4013 
4014   PATTERN (insn) = pattern;
4015   INSN_CODE (insn) = -1;
4016   REG_NOTES (insn) = NULL;
4017   INSN_LOCATION (insn) = curr_insn_location ();
4018   BLOCK_FOR_INSN (insn) = NULL;
4019 
4020   return insn;
4021 }
4022 
4023 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */
4024 
4025 static rtx_insn *
make_jump_insn_raw(rtx pattern)4026 make_jump_insn_raw (rtx pattern)
4027 {
4028   rtx_jump_insn *insn;
4029 
4030   insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
4031   INSN_UID (insn) = cur_insn_uid++;
4032 
4033   PATTERN (insn) = pattern;
4034   INSN_CODE (insn) = -1;
4035   REG_NOTES (insn) = NULL;
4036   JUMP_LABEL (insn) = NULL;
4037   INSN_LOCATION (insn) = curr_insn_location ();
4038   BLOCK_FOR_INSN (insn) = NULL;
4039 
4040   return insn;
4041 }
4042 
4043 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */
4044 
4045 static rtx_insn *
make_call_insn_raw(rtx pattern)4046 make_call_insn_raw (rtx pattern)
4047 {
4048   rtx_call_insn *insn;
4049 
4050   insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
4051   INSN_UID (insn) = cur_insn_uid++;
4052 
4053   PATTERN (insn) = pattern;
4054   INSN_CODE (insn) = -1;
4055   REG_NOTES (insn) = NULL;
4056   CALL_INSN_FUNCTION_USAGE (insn) = NULL;
4057   INSN_LOCATION (insn) = curr_insn_location ();
4058   BLOCK_FOR_INSN (insn) = NULL;
4059 
4060   return insn;
4061 }
4062 
4063 /* Like `make_insn_raw' but make a NOTE instead of an insn.  */
4064 
4065 static rtx_note *
make_note_raw(enum insn_note subtype)4066 make_note_raw (enum insn_note subtype)
4067 {
4068   /* Some notes are never created this way at all.  These notes are
4069      only created by patching out insns.  */
4070   gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
4071 	      && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
4072 
4073   rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
4074   INSN_UID (note) = cur_insn_uid++;
4075   NOTE_KIND (note) = subtype;
4076   BLOCK_FOR_INSN (note) = NULL;
4077   memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4078   return note;
4079 }
4080 
4081 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4082    INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4083    but also BARRIERs and JUMP_TABLE_DATAs.  PREV and NEXT may be NULL.  */
4084 
4085 static inline void
link_insn_into_chain(rtx_insn * insn,rtx_insn * prev,rtx_insn * next)4086 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4087 {
4088   SET_PREV_INSN (insn) = prev;
4089   SET_NEXT_INSN (insn) = next;
4090   if (prev != NULL)
4091     {
4092       SET_NEXT_INSN (prev) = insn;
4093       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4094 	{
4095 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4096 	  SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
4097 	}
4098     }
4099   if (next != NULL)
4100     {
4101       SET_PREV_INSN (next) = insn;
4102       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4103 	{
4104 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4105 	  SET_PREV_INSN (sequence->insn (0)) = insn;
4106 	}
4107     }
4108 
4109   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4110     {
4111       rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4112       SET_PREV_INSN (sequence->insn (0)) = prev;
4113       SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4114     }
4115 }
4116 
4117 /* Add INSN to the end of the doubly-linked list.
4118    INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */
4119 
4120 void
add_insn(rtx_insn * insn)4121 add_insn (rtx_insn *insn)
4122 {
4123   rtx_insn *prev = get_last_insn ();
4124   link_insn_into_chain (insn, prev, NULL);
4125   if (get_insns () == NULL)
4126     set_first_insn (insn);
4127   set_last_insn (insn);
4128 }
4129 
4130 /* Add INSN into the doubly-linked list after insn AFTER.  */
4131 
4132 static void
add_insn_after_nobb(rtx_insn * insn,rtx_insn * after)4133 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4134 {
4135   rtx_insn *next = NEXT_INSN (after);
4136 
4137   gcc_assert (!optimize || !after->deleted ());
4138 
4139   link_insn_into_chain (insn, after, next);
4140 
4141   if (next == NULL)
4142     {
4143       struct sequence_stack *seq;
4144 
4145       for (seq = get_current_sequence (); seq; seq = seq->next)
4146 	if (after == seq->last)
4147 	  {
4148 	    seq->last = insn;
4149 	    break;
4150 	  }
4151     }
4152 }
4153 
4154 /* Add INSN into the doubly-linked list before insn BEFORE.  */
4155 
4156 static void
add_insn_before_nobb(rtx_insn * insn,rtx_insn * before)4157 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4158 {
4159   rtx_insn *prev = PREV_INSN (before);
4160 
4161   gcc_assert (!optimize || !before->deleted ());
4162 
4163   link_insn_into_chain (insn, prev, before);
4164 
4165   if (prev == NULL)
4166     {
4167       struct sequence_stack *seq;
4168 
4169       for (seq = get_current_sequence (); seq; seq = seq->next)
4170 	if (before == seq->first)
4171 	  {
4172 	    seq->first = insn;
4173 	    break;
4174 	  }
4175 
4176       gcc_assert (seq);
4177     }
4178 }
4179 
4180 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4181    If BB is NULL, an attempt is made to infer the bb from before.
4182 
4183    This and the next function should be the only functions called
4184    to insert an insn once delay slots have been filled since only
4185    they know how to update a SEQUENCE. */
4186 
4187 void
add_insn_after(rtx_insn * insn,rtx_insn * after,basic_block bb)4188 add_insn_after (rtx_insn *insn, rtx_insn *after, basic_block bb)
4189 {
4190   add_insn_after_nobb (insn, after);
4191   if (!BARRIER_P (after)
4192       && !BARRIER_P (insn)
4193       && (bb = BLOCK_FOR_INSN (after)))
4194     {
4195       set_block_for_insn (insn, bb);
4196       if (INSN_P (insn))
4197 	df_insn_rescan (insn);
4198       /* Should not happen as first in the BB is always
4199 	 either NOTE or LABEL.  */
4200       if (BB_END (bb) == after
4201 	  /* Avoid clobbering of structure when creating new BB.  */
4202 	  && !BARRIER_P (insn)
4203 	  && !NOTE_INSN_BASIC_BLOCK_P (insn))
4204 	BB_END (bb) = insn;
4205     }
4206 }
4207 
4208 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4209    If BB is NULL, an attempt is made to infer the bb from before.
4210 
4211    This and the previous function should be the only functions called
4212    to insert an insn once delay slots have been filled since only
4213    they know how to update a SEQUENCE. */
4214 
4215 void
add_insn_before(rtx_insn * insn,rtx_insn * before,basic_block bb)4216 add_insn_before (rtx_insn *insn, rtx_insn *before, basic_block bb)
4217 {
4218   add_insn_before_nobb (insn, before);
4219 
4220   if (!bb
4221       && !BARRIER_P (before)
4222       && !BARRIER_P (insn))
4223     bb = BLOCK_FOR_INSN (before);
4224 
4225   if (bb)
4226     {
4227       set_block_for_insn (insn, bb);
4228       if (INSN_P (insn))
4229 	df_insn_rescan (insn);
4230       /* Should not happen as first in the BB is always either NOTE or
4231 	 LABEL.  */
4232       gcc_assert (BB_HEAD (bb) != insn
4233 		  /* Avoid clobbering of structure when creating new BB.  */
4234 		  || BARRIER_P (insn)
4235 		  || NOTE_INSN_BASIC_BLOCK_P (insn));
4236     }
4237 }
4238 
4239 /* Replace insn with an deleted instruction note.  */
4240 
4241 void
set_insn_deleted(rtx_insn * insn)4242 set_insn_deleted (rtx_insn *insn)
4243 {
4244   if (INSN_P (insn))
4245     df_insn_delete (insn);
4246   PUT_CODE (insn, NOTE);
4247   NOTE_KIND (insn) = NOTE_INSN_DELETED;
4248 }
4249 
4250 
4251 /* Unlink INSN from the insn chain.
4252 
4253    This function knows how to handle sequences.
4254 
4255    This function does not invalidate data flow information associated with
4256    INSN (i.e. does not call df_insn_delete).  That makes this function
4257    usable for only disconnecting an insn from the chain, and re-emit it
4258    elsewhere later.
4259 
4260    To later insert INSN elsewhere in the insn chain via add_insn and
4261    similar functions, PREV_INSN and NEXT_INSN must be nullified by
4262    the caller.  Nullifying them here breaks many insn chain walks.
4263 
4264    To really delete an insn and related DF information, use delete_insn.  */
4265 
4266 void
remove_insn(rtx_insn * insn)4267 remove_insn (rtx_insn *insn)
4268 {
4269   rtx_insn *next = NEXT_INSN (insn);
4270   rtx_insn *prev = PREV_INSN (insn);
4271   basic_block bb;
4272 
4273   if (prev)
4274     {
4275       SET_NEXT_INSN (prev) = next;
4276       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4277 	{
4278 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4279 	  SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4280 	}
4281     }
4282   else
4283     {
4284       struct sequence_stack *seq;
4285 
4286       for (seq = get_current_sequence (); seq; seq = seq->next)
4287 	if (insn == seq->first)
4288 	  {
4289 	    seq->first = next;
4290 	    break;
4291 	  }
4292 
4293       gcc_assert (seq);
4294     }
4295 
4296   if (next)
4297     {
4298       SET_PREV_INSN (next) = prev;
4299       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4300 	{
4301 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4302 	  SET_PREV_INSN (sequence->insn (0)) = prev;
4303 	}
4304     }
4305   else
4306     {
4307       struct sequence_stack *seq;
4308 
4309       for (seq = get_current_sequence (); seq; seq = seq->next)
4310 	if (insn == seq->last)
4311 	  {
4312 	    seq->last = prev;
4313 	    break;
4314 	  }
4315 
4316       gcc_assert (seq);
4317     }
4318 
4319   /* Fix up basic block boundaries, if necessary.  */
4320   if (!BARRIER_P (insn)
4321       && (bb = BLOCK_FOR_INSN (insn)))
4322     {
4323       if (BB_HEAD (bb) == insn)
4324 	{
4325 	  /* Never ever delete the basic block note without deleting whole
4326 	     basic block.  */
4327 	  gcc_assert (!NOTE_P (insn));
4328 	  BB_HEAD (bb) = next;
4329 	}
4330       if (BB_END (bb) == insn)
4331 	BB_END (bb) = prev;
4332     }
4333 }
4334 
4335 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.  */
4336 
4337 void
add_function_usage_to(rtx call_insn,rtx call_fusage)4338 add_function_usage_to (rtx call_insn, rtx call_fusage)
4339 {
4340   gcc_assert (call_insn && CALL_P (call_insn));
4341 
4342   /* Put the register usage information on the CALL.  If there is already
4343      some usage information, put ours at the end.  */
4344   if (CALL_INSN_FUNCTION_USAGE (call_insn))
4345     {
4346       rtx link;
4347 
4348       for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4349 	   link = XEXP (link, 1))
4350 	;
4351 
4352       XEXP (link, 1) = call_fusage;
4353     }
4354   else
4355     CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4356 }
4357 
4358 /* Delete all insns made since FROM.
4359    FROM becomes the new last instruction.  */
4360 
4361 void
delete_insns_since(rtx_insn * from)4362 delete_insns_since (rtx_insn *from)
4363 {
4364   if (from == 0)
4365     set_first_insn (0);
4366   else
4367     SET_NEXT_INSN (from) = 0;
4368   set_last_insn (from);
4369 }
4370 
4371 /* This function is deprecated, please use sequences instead.
4372 
4373    Move a consecutive bunch of insns to a different place in the chain.
4374    The insns to be moved are those between FROM and TO.
4375    They are moved to a new position after the insn AFTER.
4376    AFTER must not be FROM or TO or any insn in between.
4377 
4378    This function does not know about SEQUENCEs and hence should not be
4379    called after delay-slot filling has been done.  */
4380 
4381 void
reorder_insns_nobb(rtx_insn * from,rtx_insn * to,rtx_insn * after)4382 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4383 {
4384   if (flag_checking)
4385     {
4386       for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4387 	gcc_assert (after != x);
4388       gcc_assert (after != to);
4389     }
4390 
4391   /* Splice this bunch out of where it is now.  */
4392   if (PREV_INSN (from))
4393     SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4394   if (NEXT_INSN (to))
4395     SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4396   if (get_last_insn () == to)
4397     set_last_insn (PREV_INSN (from));
4398   if (get_insns () == from)
4399     set_first_insn (NEXT_INSN (to));
4400 
4401   /* Make the new neighbors point to it and it to them.  */
4402   if (NEXT_INSN (after))
4403     SET_PREV_INSN (NEXT_INSN (after)) = to;
4404 
4405   SET_NEXT_INSN (to) = NEXT_INSN (after);
4406   SET_PREV_INSN (from) = after;
4407   SET_NEXT_INSN (after) = from;
4408   if (after == get_last_insn ())
4409     set_last_insn (to);
4410 }
4411 
4412 /* Same as function above, but take care to update BB boundaries.  */
4413 void
reorder_insns(rtx_insn * from,rtx_insn * to,rtx_insn * after)4414 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4415 {
4416   rtx_insn *prev = PREV_INSN (from);
4417   basic_block bb, bb2;
4418 
4419   reorder_insns_nobb (from, to, after);
4420 
4421   if (!BARRIER_P (after)
4422       && (bb = BLOCK_FOR_INSN (after)))
4423     {
4424       rtx_insn *x;
4425       df_set_bb_dirty (bb);
4426 
4427       if (!BARRIER_P (from)
4428 	  && (bb2 = BLOCK_FOR_INSN (from)))
4429 	{
4430 	  if (BB_END (bb2) == to)
4431 	    BB_END (bb2) = prev;
4432 	  df_set_bb_dirty (bb2);
4433 	}
4434 
4435       if (BB_END (bb) == after)
4436 	BB_END (bb) = to;
4437 
4438       for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4439 	if (!BARRIER_P (x))
4440 	  df_insn_change_bb (x, bb);
4441     }
4442 }
4443 
4444 
4445 /* Emit insn(s) of given code and pattern
4446    at a specified place within the doubly-linked list.
4447 
4448    All of the emit_foo global entry points accept an object
4449    X which is either an insn list or a PATTERN of a single
4450    instruction.
4451 
4452    There are thus a few canonical ways to generate code and
4453    emit it at a specific place in the instruction stream.  For
4454    example, consider the instruction named SPOT and the fact that
4455    we would like to emit some instructions before SPOT.  We might
4456    do it like this:
4457 
4458 	start_sequence ();
4459 	... emit the new instructions ...
4460 	insns_head = get_insns ();
4461 	end_sequence ();
4462 
4463 	emit_insn_before (insns_head, SPOT);
4464 
4465    It used to be common to generate SEQUENCE rtl instead, but that
4466    is a relic of the past which no longer occurs.  The reason is that
4467    SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4468    generated would almost certainly die right after it was created.  */
4469 
4470 static rtx_insn *
emit_pattern_before_noloc(rtx x,rtx_insn * before,rtx_insn * last,basic_block bb,rtx_insn * (* make_raw)(rtx))4471 emit_pattern_before_noloc (rtx x, rtx_insn *before, rtx_insn *last,
4472 			   basic_block bb,
4473                            rtx_insn *(*make_raw) (rtx))
4474 {
4475   rtx_insn *insn;
4476 
4477   gcc_assert (before);
4478 
4479   if (x == NULL_RTX)
4480     return last;
4481 
4482   switch (GET_CODE (x))
4483     {
4484     case DEBUG_INSN:
4485     case INSN:
4486     case JUMP_INSN:
4487     case CALL_INSN:
4488     case CODE_LABEL:
4489     case BARRIER:
4490     case NOTE:
4491       insn = as_a <rtx_insn *> (x);
4492       while (insn)
4493 	{
4494 	  rtx_insn *next = NEXT_INSN (insn);
4495 	  add_insn_before (insn, before, bb);
4496 	  last = insn;
4497 	  insn = next;
4498 	}
4499       break;
4500 
4501 #ifdef ENABLE_RTL_CHECKING
4502     case SEQUENCE:
4503       gcc_unreachable ();
4504       break;
4505 #endif
4506 
4507     default:
4508       last = (*make_raw) (x);
4509       add_insn_before (last, before, bb);
4510       break;
4511     }
4512 
4513   return last;
4514 }
4515 
4516 /* Make X be output before the instruction BEFORE.  */
4517 
4518 rtx_insn *
emit_insn_before_noloc(rtx x,rtx_insn * before,basic_block bb)4519 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4520 {
4521   return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4522 }
4523 
4524 /* Make an instruction with body X and code JUMP_INSN
4525    and output it before the instruction BEFORE.  */
4526 
4527 rtx_jump_insn *
emit_jump_insn_before_noloc(rtx x,rtx_insn * before)4528 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4529 {
4530   return as_a <rtx_jump_insn *> (
4531 		emit_pattern_before_noloc (x, before, NULL, NULL,
4532 					   make_jump_insn_raw));
4533 }
4534 
4535 /* Make an instruction with body X and code CALL_INSN
4536    and output it before the instruction BEFORE.  */
4537 
4538 rtx_insn *
emit_call_insn_before_noloc(rtx x,rtx_insn * before)4539 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4540 {
4541   return emit_pattern_before_noloc (x, before, NULL, NULL,
4542 				    make_call_insn_raw);
4543 }
4544 
4545 /* Make an instruction with body X and code DEBUG_INSN
4546    and output it before the instruction BEFORE.  */
4547 
4548 rtx_insn *
emit_debug_insn_before_noloc(rtx x,rtx_insn * before)4549 emit_debug_insn_before_noloc (rtx x, rtx_insn *before)
4550 {
4551   return emit_pattern_before_noloc (x, before, NULL, NULL,
4552 				    make_debug_insn_raw);
4553 }
4554 
4555 /* Make an insn of code BARRIER
4556    and output it before the insn BEFORE.  */
4557 
4558 rtx_barrier *
emit_barrier_before(rtx_insn * before)4559 emit_barrier_before (rtx_insn *before)
4560 {
4561   rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4562 
4563   INSN_UID (insn) = cur_insn_uid++;
4564 
4565   add_insn_before (insn, before, NULL);
4566   return insn;
4567 }
4568 
4569 /* Emit the label LABEL before the insn BEFORE.  */
4570 
4571 rtx_code_label *
emit_label_before(rtx_code_label * label,rtx_insn * before)4572 emit_label_before (rtx_code_label *label, rtx_insn *before)
4573 {
4574   gcc_checking_assert (INSN_UID (label) == 0);
4575   INSN_UID (label) = cur_insn_uid++;
4576   add_insn_before (label, before, NULL);
4577   return label;
4578 }
4579 
4580 /* Helper for emit_insn_after, handles lists of instructions
4581    efficiently.  */
4582 
4583 static rtx_insn *
emit_insn_after_1(rtx_insn * first,rtx_insn * after,basic_block bb)4584 emit_insn_after_1 (rtx_insn *first, rtx_insn *after, basic_block bb)
4585 {
4586   rtx_insn *last;
4587   rtx_insn *after_after;
4588   if (!bb && !BARRIER_P (after))
4589     bb = BLOCK_FOR_INSN (after);
4590 
4591   if (bb)
4592     {
4593       df_set_bb_dirty (bb);
4594       for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4595 	if (!BARRIER_P (last))
4596 	  {
4597 	    set_block_for_insn (last, bb);
4598 	    df_insn_rescan (last);
4599 	  }
4600       if (!BARRIER_P (last))
4601 	{
4602 	  set_block_for_insn (last, bb);
4603 	  df_insn_rescan (last);
4604 	}
4605       if (BB_END (bb) == after)
4606 	BB_END (bb) = last;
4607     }
4608   else
4609     for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4610       continue;
4611 
4612   after_after = NEXT_INSN (after);
4613 
4614   SET_NEXT_INSN (after) = first;
4615   SET_PREV_INSN (first) = after;
4616   SET_NEXT_INSN (last) = after_after;
4617   if (after_after)
4618     SET_PREV_INSN (after_after) = last;
4619 
4620   if (after == get_last_insn ())
4621     set_last_insn (last);
4622 
4623   return last;
4624 }
4625 
4626 static rtx_insn *
emit_pattern_after_noloc(rtx x,rtx_insn * after,basic_block bb,rtx_insn * (* make_raw)(rtx))4627 emit_pattern_after_noloc (rtx x, rtx_insn *after, basic_block bb,
4628 			  rtx_insn *(*make_raw)(rtx))
4629 {
4630   rtx_insn *last = after;
4631 
4632   gcc_assert (after);
4633 
4634   if (x == NULL_RTX)
4635     return last;
4636 
4637   switch (GET_CODE (x))
4638     {
4639     case DEBUG_INSN:
4640     case INSN:
4641     case JUMP_INSN:
4642     case CALL_INSN:
4643     case CODE_LABEL:
4644     case BARRIER:
4645     case NOTE:
4646       last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4647       break;
4648 
4649 #ifdef ENABLE_RTL_CHECKING
4650     case SEQUENCE:
4651       gcc_unreachable ();
4652       break;
4653 #endif
4654 
4655     default:
4656       last = (*make_raw) (x);
4657       add_insn_after (last, after, bb);
4658       break;
4659     }
4660 
4661   return last;
4662 }
4663 
4664 /* Make X be output after the insn AFTER and set the BB of insn.  If
4665    BB is NULL, an attempt is made to infer the BB from AFTER.  */
4666 
4667 rtx_insn *
emit_insn_after_noloc(rtx x,rtx_insn * after,basic_block bb)4668 emit_insn_after_noloc (rtx x, rtx_insn *after, basic_block bb)
4669 {
4670   return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4671 }
4672 
4673 
4674 /* Make an insn of code JUMP_INSN with body X
4675    and output it after the insn AFTER.  */
4676 
4677 rtx_jump_insn *
emit_jump_insn_after_noloc(rtx x,rtx_insn * after)4678 emit_jump_insn_after_noloc (rtx x, rtx_insn *after)
4679 {
4680   return as_a <rtx_jump_insn *> (
4681 		emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4682 }
4683 
4684 /* Make an instruction with body X and code CALL_INSN
4685    and output it after the instruction AFTER.  */
4686 
4687 rtx_insn *
emit_call_insn_after_noloc(rtx x,rtx_insn * after)4688 emit_call_insn_after_noloc (rtx x, rtx_insn *after)
4689 {
4690   return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4691 }
4692 
4693 /* Make an instruction with body X and code CALL_INSN
4694    and output it after the instruction AFTER.  */
4695 
4696 rtx_insn *
emit_debug_insn_after_noloc(rtx x,rtx_insn * after)4697 emit_debug_insn_after_noloc (rtx x, rtx_insn *after)
4698 {
4699   return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4700 }
4701 
4702 /* Make an insn of code BARRIER
4703    and output it after the insn AFTER.  */
4704 
4705 rtx_barrier *
emit_barrier_after(rtx_insn * after)4706 emit_barrier_after (rtx_insn *after)
4707 {
4708   rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4709 
4710   INSN_UID (insn) = cur_insn_uid++;
4711 
4712   add_insn_after (insn, after, NULL);
4713   return insn;
4714 }
4715 
4716 /* Emit the label LABEL after the insn AFTER.  */
4717 
4718 rtx_insn *
emit_label_after(rtx_insn * label,rtx_insn * after)4719 emit_label_after (rtx_insn *label, rtx_insn *after)
4720 {
4721   gcc_checking_assert (INSN_UID (label) == 0);
4722   INSN_UID (label) = cur_insn_uid++;
4723   add_insn_after (label, after, NULL);
4724   return label;
4725 }
4726 
4727 /* Notes require a bit of special handling: Some notes need to have their
4728    BLOCK_FOR_INSN set, others should never have it set, and some should
4729    have it set or clear depending on the context.   */
4730 
4731 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4732    that never set BLOCK_FOR_INSN on NOTE.  BB_BOUNDARY is true if the
4733    caller is asked to emit a note before BB_HEAD, or after BB_END.  */
4734 
4735 static bool
note_outside_basic_block_p(enum insn_note subtype,bool on_bb_boundary_p)4736 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4737 {
4738   switch (subtype)
4739     {
4740       /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks.  */
4741       case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4742 	return true;
4743 
4744       /* Notes for var tracking and EH region markers can appear between or
4745 	 inside basic blocks.  If the caller is emitting on the basic block
4746 	 boundary, do not set BLOCK_FOR_INSN on the new note.  */
4747       case NOTE_INSN_VAR_LOCATION:
4748       case NOTE_INSN_EH_REGION_BEG:
4749       case NOTE_INSN_EH_REGION_END:
4750 	return on_bb_boundary_p;
4751 
4752       /* Otherwise, BLOCK_FOR_INSN must be set.  */
4753       default:
4754 	return false;
4755     }
4756 }
4757 
4758 /* Emit a note of subtype SUBTYPE after the insn AFTER.  */
4759 
4760 rtx_note *
emit_note_after(enum insn_note subtype,rtx_insn * after)4761 emit_note_after (enum insn_note subtype, rtx_insn *after)
4762 {
4763   rtx_note *note = make_note_raw (subtype);
4764   basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4765   bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4766 
4767   if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4768     add_insn_after_nobb (note, after);
4769   else
4770     add_insn_after (note, after, bb);
4771   return note;
4772 }
4773 
4774 /* Emit a note of subtype SUBTYPE before the insn BEFORE.  */
4775 
4776 rtx_note *
emit_note_before(enum insn_note subtype,rtx_insn * before)4777 emit_note_before (enum insn_note subtype, rtx_insn *before)
4778 {
4779   rtx_note *note = make_note_raw (subtype);
4780   basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4781   bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4782 
4783   if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4784     add_insn_before_nobb (note, before);
4785   else
4786     add_insn_before (note, before, bb);
4787   return note;
4788 }
4789 
4790 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4791    MAKE_RAW indicates how to turn PATTERN into a real insn.  */
4792 
4793 static rtx_insn *
emit_pattern_after_setloc(rtx pattern,rtx_insn * after,location_t loc,rtx_insn * (* make_raw)(rtx))4794 emit_pattern_after_setloc (rtx pattern, rtx_insn *after, location_t loc,
4795 			   rtx_insn *(*make_raw) (rtx))
4796 {
4797   rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4798 
4799   if (pattern == NULL_RTX || !loc)
4800     return last;
4801 
4802   after = NEXT_INSN (after);
4803   while (1)
4804     {
4805       if (active_insn_p (after)
4806 	  && !JUMP_TABLE_DATA_P (after) /* FIXME */
4807 	  && !INSN_LOCATION (after))
4808 	INSN_LOCATION (after) = loc;
4809       if (after == last)
4810 	break;
4811       after = NEXT_INSN (after);
4812     }
4813   return last;
4814 }
4815 
4816 /* Insert PATTERN after AFTER.  MAKE_RAW indicates how to turn PATTERN
4817    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert after
4818    any DEBUG_INSNs.  */
4819 
4820 static rtx_insn *
emit_pattern_after(rtx pattern,rtx_insn * after,bool skip_debug_insns,rtx_insn * (* make_raw)(rtx))4821 emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns,
4822 		    rtx_insn *(*make_raw) (rtx))
4823 {
4824   rtx_insn *prev = after;
4825 
4826   if (skip_debug_insns)
4827     while (DEBUG_INSN_P (prev))
4828       prev = PREV_INSN (prev);
4829 
4830   if (INSN_P (prev))
4831     return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4832 				      make_raw);
4833   else
4834     return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4835 }
4836 
4837 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4838 rtx_insn *
emit_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)4839 emit_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4840 {
4841   return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4842 }
4843 
4844 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4845 rtx_insn *
emit_insn_after(rtx pattern,rtx_insn * after)4846 emit_insn_after (rtx pattern, rtx_insn *after)
4847 {
4848   return emit_pattern_after (pattern, after, true, make_insn_raw);
4849 }
4850 
4851 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4852 rtx_jump_insn *
emit_jump_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)4853 emit_jump_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4854 {
4855   return as_a <rtx_jump_insn *> (
4856 	emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4857 }
4858 
4859 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4860 rtx_jump_insn *
emit_jump_insn_after(rtx pattern,rtx_insn * after)4861 emit_jump_insn_after (rtx pattern, rtx_insn *after)
4862 {
4863   return as_a <rtx_jump_insn *> (
4864 	emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4865 }
4866 
4867 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4868 rtx_insn *
emit_call_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)4869 emit_call_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4870 {
4871   return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4872 }
4873 
4874 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4875 rtx_insn *
emit_call_insn_after(rtx pattern,rtx_insn * after)4876 emit_call_insn_after (rtx pattern, rtx_insn *after)
4877 {
4878   return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4879 }
4880 
4881 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4882 rtx_insn *
emit_debug_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)4883 emit_debug_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4884 {
4885   return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4886 }
4887 
4888 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4889 rtx_insn *
emit_debug_insn_after(rtx pattern,rtx_insn * after)4890 emit_debug_insn_after (rtx pattern, rtx_insn *after)
4891 {
4892   return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4893 }
4894 
4895 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4896    MAKE_RAW indicates how to turn PATTERN into a real insn.  INSNP
4897    indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4898    CALL_INSN, etc.  */
4899 
4900 static rtx_insn *
emit_pattern_before_setloc(rtx pattern,rtx_insn * before,location_t loc,bool insnp,rtx_insn * (* make_raw)(rtx))4901 emit_pattern_before_setloc (rtx pattern, rtx_insn *before, location_t loc,
4902 			    bool insnp, rtx_insn *(*make_raw) (rtx))
4903 {
4904   rtx_insn *first = PREV_INSN (before);
4905   rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4906 					      insnp ? before : NULL,
4907 					      NULL, make_raw);
4908 
4909   if (pattern == NULL_RTX || !loc)
4910     return last;
4911 
4912   if (!first)
4913     first = get_insns ();
4914   else
4915     first = NEXT_INSN (first);
4916   while (1)
4917     {
4918       if (active_insn_p (first)
4919 	  && !JUMP_TABLE_DATA_P (first) /* FIXME */
4920 	  && !INSN_LOCATION (first))
4921 	INSN_LOCATION (first) = loc;
4922       if (first == last)
4923 	break;
4924       first = NEXT_INSN (first);
4925     }
4926   return last;
4927 }
4928 
4929 /* Insert PATTERN before BEFORE.  MAKE_RAW indicates how to turn PATTERN
4930    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert
4931    before any DEBUG_INSNs.  INSNP indicates if PATTERN is meant for an
4932    INSN as opposed to a JUMP_INSN, CALL_INSN, etc.  */
4933 
4934 static rtx_insn *
emit_pattern_before(rtx pattern,rtx_insn * before,bool skip_debug_insns,bool insnp,rtx_insn * (* make_raw)(rtx))4935 emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns,
4936 		     bool insnp, rtx_insn *(*make_raw) (rtx))
4937 {
4938   rtx_insn *next = before;
4939 
4940   if (skip_debug_insns)
4941     while (DEBUG_INSN_P (next))
4942       next = PREV_INSN (next);
4943 
4944   if (INSN_P (next))
4945     return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4946 				       insnp, make_raw);
4947   else
4948     return emit_pattern_before_noloc (pattern, before,
4949 				      insnp ? before : NULL,
4950                                       NULL, make_raw);
4951 }
4952 
4953 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
4954 rtx_insn *
emit_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)4955 emit_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
4956 {
4957   return emit_pattern_before_setloc (pattern, before, loc, true,
4958 				     make_insn_raw);
4959 }
4960 
4961 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
4962 rtx_insn *
emit_insn_before(rtx pattern,rtx_insn * before)4963 emit_insn_before (rtx pattern, rtx_insn *before)
4964 {
4965   return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4966 }
4967 
4968 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
4969 rtx_jump_insn *
emit_jump_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)4970 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
4971 {
4972   return as_a <rtx_jump_insn *> (
4973 	emit_pattern_before_setloc (pattern, before, loc, false,
4974 				    make_jump_insn_raw));
4975 }
4976 
4977 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
4978 rtx_jump_insn *
emit_jump_insn_before(rtx pattern,rtx_insn * before)4979 emit_jump_insn_before (rtx pattern, rtx_insn *before)
4980 {
4981   return as_a <rtx_jump_insn *> (
4982 	emit_pattern_before (pattern, before, true, false,
4983 			     make_jump_insn_raw));
4984 }
4985 
4986 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
4987 rtx_insn *
emit_call_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)4988 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
4989 {
4990   return emit_pattern_before_setloc (pattern, before, loc, false,
4991 				     make_call_insn_raw);
4992 }
4993 
4994 /* Like emit_call_insn_before_noloc,
4995    but set insn_location according to BEFORE.  */
4996 rtx_insn *
emit_call_insn_before(rtx pattern,rtx_insn * before)4997 emit_call_insn_before (rtx pattern, rtx_insn *before)
4998 {
4999   return emit_pattern_before (pattern, before, true, false,
5000 			      make_call_insn_raw);
5001 }
5002 
5003 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
5004 rtx_insn *
emit_debug_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)5005 emit_debug_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5006 {
5007   return emit_pattern_before_setloc (pattern, before, loc, false,
5008 				     make_debug_insn_raw);
5009 }
5010 
5011 /* Like emit_debug_insn_before_noloc,
5012    but set insn_location according to BEFORE.  */
5013 rtx_insn *
emit_debug_insn_before(rtx pattern,rtx_insn * before)5014 emit_debug_insn_before (rtx pattern, rtx_insn *before)
5015 {
5016   return emit_pattern_before (pattern, before, false, false,
5017 			      make_debug_insn_raw);
5018 }
5019 
5020 /* Take X and emit it at the end of the doubly-linked
5021    INSN list.
5022 
5023    Returns the last insn emitted.  */
5024 
5025 rtx_insn *
emit_insn(rtx x)5026 emit_insn (rtx x)
5027 {
5028   rtx_insn *last = get_last_insn ();
5029   rtx_insn *insn;
5030 
5031   if (x == NULL_RTX)
5032     return last;
5033 
5034   switch (GET_CODE (x))
5035     {
5036     case DEBUG_INSN:
5037     case INSN:
5038     case JUMP_INSN:
5039     case CALL_INSN:
5040     case CODE_LABEL:
5041     case BARRIER:
5042     case NOTE:
5043       insn = as_a <rtx_insn *> (x);
5044       while (insn)
5045 	{
5046 	  rtx_insn *next = NEXT_INSN (insn);
5047 	  add_insn (insn);
5048 	  last = insn;
5049 	  insn = next;
5050 	}
5051       break;
5052 
5053 #ifdef ENABLE_RTL_CHECKING
5054     case JUMP_TABLE_DATA:
5055     case SEQUENCE:
5056       gcc_unreachable ();
5057       break;
5058 #endif
5059 
5060     default:
5061       last = make_insn_raw (x);
5062       add_insn (last);
5063       break;
5064     }
5065 
5066   return last;
5067 }
5068 
5069 /* Make an insn of code DEBUG_INSN with pattern X
5070    and add it to the end of the doubly-linked list.  */
5071 
5072 rtx_insn *
emit_debug_insn(rtx x)5073 emit_debug_insn (rtx x)
5074 {
5075   rtx_insn *last = get_last_insn ();
5076   rtx_insn *insn;
5077 
5078   if (x == NULL_RTX)
5079     return last;
5080 
5081   switch (GET_CODE (x))
5082     {
5083     case DEBUG_INSN:
5084     case INSN:
5085     case JUMP_INSN:
5086     case CALL_INSN:
5087     case CODE_LABEL:
5088     case BARRIER:
5089     case NOTE:
5090       insn = as_a <rtx_insn *> (x);
5091       while (insn)
5092 	{
5093 	  rtx_insn *next = NEXT_INSN (insn);
5094 	  add_insn (insn);
5095 	  last = insn;
5096 	  insn = next;
5097 	}
5098       break;
5099 
5100 #ifdef ENABLE_RTL_CHECKING
5101     case JUMP_TABLE_DATA:
5102     case SEQUENCE:
5103       gcc_unreachable ();
5104       break;
5105 #endif
5106 
5107     default:
5108       last = make_debug_insn_raw (x);
5109       add_insn (last);
5110       break;
5111     }
5112 
5113   return last;
5114 }
5115 
5116 /* Make an insn of code JUMP_INSN with pattern X
5117    and add it to the end of the doubly-linked list.  */
5118 
5119 rtx_insn *
emit_jump_insn(rtx x)5120 emit_jump_insn (rtx x)
5121 {
5122   rtx_insn *last = NULL;
5123   rtx_insn *insn;
5124 
5125   switch (GET_CODE (x))
5126     {
5127     case DEBUG_INSN:
5128     case INSN:
5129     case JUMP_INSN:
5130     case CALL_INSN:
5131     case CODE_LABEL:
5132     case BARRIER:
5133     case NOTE:
5134       insn = as_a <rtx_insn *> (x);
5135       while (insn)
5136 	{
5137 	  rtx_insn *next = NEXT_INSN (insn);
5138 	  add_insn (insn);
5139 	  last = insn;
5140 	  insn = next;
5141 	}
5142       break;
5143 
5144 #ifdef ENABLE_RTL_CHECKING
5145     case JUMP_TABLE_DATA:
5146     case SEQUENCE:
5147       gcc_unreachable ();
5148       break;
5149 #endif
5150 
5151     default:
5152       last = make_jump_insn_raw (x);
5153       add_insn (last);
5154       break;
5155     }
5156 
5157   return last;
5158 }
5159 
5160 /* Make an insn of code CALL_INSN with pattern X
5161    and add it to the end of the doubly-linked list.  */
5162 
5163 rtx_insn *
emit_call_insn(rtx x)5164 emit_call_insn (rtx x)
5165 {
5166   rtx_insn *insn;
5167 
5168   switch (GET_CODE (x))
5169     {
5170     case DEBUG_INSN:
5171     case INSN:
5172     case JUMP_INSN:
5173     case CALL_INSN:
5174     case CODE_LABEL:
5175     case BARRIER:
5176     case NOTE:
5177       insn = emit_insn (x);
5178       break;
5179 
5180 #ifdef ENABLE_RTL_CHECKING
5181     case SEQUENCE:
5182     case JUMP_TABLE_DATA:
5183       gcc_unreachable ();
5184       break;
5185 #endif
5186 
5187     default:
5188       insn = make_call_insn_raw (x);
5189       add_insn (insn);
5190       break;
5191     }
5192 
5193   return insn;
5194 }
5195 
5196 /* Add the label LABEL to the end of the doubly-linked list.  */
5197 
5198 rtx_code_label *
emit_label(rtx uncast_label)5199 emit_label (rtx uncast_label)
5200 {
5201   rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5202 
5203   gcc_checking_assert (INSN_UID (label) == 0);
5204   INSN_UID (label) = cur_insn_uid++;
5205   add_insn (label);
5206   return label;
5207 }
5208 
5209 /* Make an insn of code JUMP_TABLE_DATA
5210    and add it to the end of the doubly-linked list.  */
5211 
5212 rtx_jump_table_data *
emit_jump_table_data(rtx table)5213 emit_jump_table_data (rtx table)
5214 {
5215   rtx_jump_table_data *jump_table_data =
5216     as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5217   INSN_UID (jump_table_data) = cur_insn_uid++;
5218   PATTERN (jump_table_data) = table;
5219   BLOCK_FOR_INSN (jump_table_data) = NULL;
5220   add_insn (jump_table_data);
5221   return jump_table_data;
5222 }
5223 
5224 /* Make an insn of code BARRIER
5225    and add it to the end of the doubly-linked list.  */
5226 
5227 rtx_barrier *
emit_barrier(void)5228 emit_barrier (void)
5229 {
5230   rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5231   INSN_UID (barrier) = cur_insn_uid++;
5232   add_insn (barrier);
5233   return barrier;
5234 }
5235 
5236 /* Emit a copy of note ORIG.  */
5237 
5238 rtx_note *
emit_note_copy(rtx_note * orig)5239 emit_note_copy (rtx_note *orig)
5240 {
5241   enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5242   rtx_note *note = make_note_raw (kind);
5243   NOTE_DATA (note) = NOTE_DATA (orig);
5244   add_insn (note);
5245   return note;
5246 }
5247 
5248 /* Make an insn of code NOTE or type NOTE_NO
5249    and add it to the end of the doubly-linked list.  */
5250 
5251 rtx_note *
emit_note(enum insn_note kind)5252 emit_note (enum insn_note kind)
5253 {
5254   rtx_note *note = make_note_raw (kind);
5255   add_insn (note);
5256   return note;
5257 }
5258 
5259 /* Emit a clobber of lvalue X.  */
5260 
5261 rtx_insn *
emit_clobber(rtx x)5262 emit_clobber (rtx x)
5263 {
5264   /* CONCATs should not appear in the insn stream.  */
5265   if (GET_CODE (x) == CONCAT)
5266     {
5267       emit_clobber (XEXP (x, 0));
5268       return emit_clobber (XEXP (x, 1));
5269     }
5270   return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5271 }
5272 
5273 /* Return a sequence of insns to clobber lvalue X.  */
5274 
5275 rtx_insn *
gen_clobber(rtx x)5276 gen_clobber (rtx x)
5277 {
5278   rtx_insn *seq;
5279 
5280   start_sequence ();
5281   emit_clobber (x);
5282   seq = get_insns ();
5283   end_sequence ();
5284   return seq;
5285 }
5286 
5287 /* Emit a use of rvalue X.  */
5288 
5289 rtx_insn *
emit_use(rtx x)5290 emit_use (rtx x)
5291 {
5292   /* CONCATs should not appear in the insn stream.  */
5293   if (GET_CODE (x) == CONCAT)
5294     {
5295       emit_use (XEXP (x, 0));
5296       return emit_use (XEXP (x, 1));
5297     }
5298   return emit_insn (gen_rtx_USE (VOIDmode, x));
5299 }
5300 
5301 /* Return a sequence of insns to use rvalue X.  */
5302 
5303 rtx_insn *
gen_use(rtx x)5304 gen_use (rtx x)
5305 {
5306   rtx_insn *seq;
5307 
5308   start_sequence ();
5309   emit_use (x);
5310   seq = get_insns ();
5311   end_sequence ();
5312   return seq;
5313 }
5314 
5315 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5316    Return the set in INSN that such notes describe, or NULL if the notes
5317    have no meaning for INSN.  */
5318 
5319 rtx
set_for_reg_notes(rtx insn)5320 set_for_reg_notes (rtx insn)
5321 {
5322   rtx pat, reg;
5323 
5324   if (!INSN_P (insn))
5325     return NULL_RTX;
5326 
5327   pat = PATTERN (insn);
5328   if (GET_CODE (pat) == PARALLEL)
5329     {
5330       /* We do not use single_set because that ignores SETs of unused
5331 	 registers.  REG_EQUAL and REG_EQUIV notes really do require the
5332 	 PARALLEL to have a single SET.  */
5333       if (multiple_sets (insn))
5334 	return NULL_RTX;
5335       pat = XVECEXP (pat, 0, 0);
5336     }
5337 
5338   if (GET_CODE (pat) != SET)
5339     return NULL_RTX;
5340 
5341   reg = SET_DEST (pat);
5342 
5343   /* Notes apply to the contents of a STRICT_LOW_PART.  */
5344   if (GET_CODE (reg) == STRICT_LOW_PART
5345       || GET_CODE (reg) == ZERO_EXTRACT)
5346     reg = XEXP (reg, 0);
5347 
5348   /* Check that we have a register.  */
5349   if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5350     return NULL_RTX;
5351 
5352   return pat;
5353 }
5354 
5355 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5356    note of this type already exists, remove it first.  */
5357 
5358 rtx
set_unique_reg_note(rtx insn,enum reg_note kind,rtx datum)5359 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5360 {
5361   rtx note = find_reg_note (insn, kind, NULL_RTX);
5362 
5363   switch (kind)
5364     {
5365     case REG_EQUAL:
5366     case REG_EQUIV:
5367       /* We need to support the REG_EQUAL on USE trick of find_reloads.  */
5368       if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5369 	return NULL_RTX;
5370 
5371       /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5372 	 It serves no useful purpose and breaks eliminate_regs.  */
5373       if (GET_CODE (datum) == ASM_OPERANDS)
5374 	return NULL_RTX;
5375 
5376       /* Notes with side effects are dangerous.  Even if the side-effect
5377 	 initially mirrors one in PATTERN (INSN), later optimizations
5378 	 might alter the way that the final register value is calculated
5379 	 and so move or alter the side-effect in some way.  The note would
5380 	 then no longer be a valid substitution for SET_SRC.  */
5381       if (side_effects_p (datum))
5382 	return NULL_RTX;
5383       break;
5384 
5385     default:
5386       break;
5387     }
5388 
5389   if (note)
5390     XEXP (note, 0) = datum;
5391   else
5392     {
5393       add_reg_note (insn, kind, datum);
5394       note = REG_NOTES (insn);
5395     }
5396 
5397   switch (kind)
5398     {
5399     case REG_EQUAL:
5400     case REG_EQUIV:
5401       df_notes_rescan (as_a <rtx_insn *> (insn));
5402       break;
5403     default:
5404       break;
5405     }
5406 
5407   return note;
5408 }
5409 
5410 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST.  */
5411 rtx
set_dst_reg_note(rtx insn,enum reg_note kind,rtx datum,rtx dst)5412 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5413 {
5414   rtx set = set_for_reg_notes (insn);
5415 
5416   if (set && SET_DEST (set) == dst)
5417     return set_unique_reg_note (insn, kind, datum);
5418   return NULL_RTX;
5419 }
5420 
5421 /* Emit the rtl pattern X as an appropriate kind of insn.  Also emit a
5422    following barrier if the instruction needs one and if ALLOW_BARRIER_P
5423    is true.
5424 
5425    If X is a label, it is simply added into the insn chain.  */
5426 
5427 rtx_insn *
emit(rtx x,bool allow_barrier_p)5428 emit (rtx x, bool allow_barrier_p)
5429 {
5430   enum rtx_code code = classify_insn (x);
5431 
5432   switch (code)
5433     {
5434     case CODE_LABEL:
5435       return emit_label (x);
5436     case INSN:
5437       return emit_insn (x);
5438     case  JUMP_INSN:
5439       {
5440 	rtx_insn *insn = emit_jump_insn (x);
5441 	if (allow_barrier_p
5442 	    && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5443 	  return emit_barrier ();
5444 	return insn;
5445       }
5446     case CALL_INSN:
5447       return emit_call_insn (x);
5448     case DEBUG_INSN:
5449       return emit_debug_insn (x);
5450     default:
5451       gcc_unreachable ();
5452     }
5453 }
5454 
5455 /* Space for free sequence stack entries.  */
5456 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5457 
5458 /* Begin emitting insns to a sequence.  If this sequence will contain
5459    something that might cause the compiler to pop arguments to function
5460    calls (because those pops have previously been deferred; see
5461    INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5462    before calling this function.  That will ensure that the deferred
5463    pops are not accidentally emitted in the middle of this sequence.  */
5464 
5465 void
start_sequence(void)5466 start_sequence (void)
5467 {
5468   struct sequence_stack *tem;
5469 
5470   if (free_sequence_stack != NULL)
5471     {
5472       tem = free_sequence_stack;
5473       free_sequence_stack = tem->next;
5474     }
5475   else
5476     tem = ggc_alloc<sequence_stack> ();
5477 
5478   tem->next = get_current_sequence ()->next;
5479   tem->first = get_insns ();
5480   tem->last = get_last_insn ();
5481   get_current_sequence ()->next = tem;
5482 
5483   set_first_insn (0);
5484   set_last_insn (0);
5485 }
5486 
5487 /* Set up the insn chain starting with FIRST as the current sequence,
5488    saving the previously current one.  See the documentation for
5489    start_sequence for more information about how to use this function.  */
5490 
5491 void
push_to_sequence(rtx_insn * first)5492 push_to_sequence (rtx_insn *first)
5493 {
5494   rtx_insn *last;
5495 
5496   start_sequence ();
5497 
5498   for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5499     ;
5500 
5501   set_first_insn (first);
5502   set_last_insn (last);
5503 }
5504 
5505 /* Like push_to_sequence, but take the last insn as an argument to avoid
5506    looping through the list.  */
5507 
5508 void
push_to_sequence2(rtx_insn * first,rtx_insn * last)5509 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5510 {
5511   start_sequence ();
5512 
5513   set_first_insn (first);
5514   set_last_insn (last);
5515 }
5516 
5517 /* Set up the outer-level insn chain
5518    as the current sequence, saving the previously current one.  */
5519 
5520 void
push_topmost_sequence(void)5521 push_topmost_sequence (void)
5522 {
5523   struct sequence_stack *top;
5524 
5525   start_sequence ();
5526 
5527   top = get_topmost_sequence ();
5528   set_first_insn (top->first);
5529   set_last_insn (top->last);
5530 }
5531 
5532 /* After emitting to the outer-level insn chain, update the outer-level
5533    insn chain, and restore the previous saved state.  */
5534 
5535 void
pop_topmost_sequence(void)5536 pop_topmost_sequence (void)
5537 {
5538   struct sequence_stack *top;
5539 
5540   top = get_topmost_sequence ();
5541   top->first = get_insns ();
5542   top->last = get_last_insn ();
5543 
5544   end_sequence ();
5545 }
5546 
5547 /* After emitting to a sequence, restore previous saved state.
5548 
5549    To get the contents of the sequence just made, you must call
5550    `get_insns' *before* calling here.
5551 
5552    If the compiler might have deferred popping arguments while
5553    generating this sequence, and this sequence will not be immediately
5554    inserted into the instruction stream, use do_pending_stack_adjust
5555    before calling get_insns.  That will ensure that the deferred
5556    pops are inserted into this sequence, and not into some random
5557    location in the instruction stream.  See INHIBIT_DEFER_POP for more
5558    information about deferred popping of arguments.  */
5559 
5560 void
end_sequence(void)5561 end_sequence (void)
5562 {
5563   struct sequence_stack *tem = get_current_sequence ()->next;
5564 
5565   set_first_insn (tem->first);
5566   set_last_insn (tem->last);
5567   get_current_sequence ()->next = tem->next;
5568 
5569   memset (tem, 0, sizeof (*tem));
5570   tem->next = free_sequence_stack;
5571   free_sequence_stack = tem;
5572 }
5573 
5574 /* Return 1 if currently emitting into a sequence.  */
5575 
5576 int
in_sequence_p(void)5577 in_sequence_p (void)
5578 {
5579   return get_current_sequence ()->next != 0;
5580 }
5581 
5582 /* Put the various virtual registers into REGNO_REG_RTX.  */
5583 
5584 static void
init_virtual_regs(void)5585 init_virtual_regs (void)
5586 {
5587   regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5588   regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5589   regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5590   regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5591   regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5592   regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5593     = virtual_preferred_stack_boundary_rtx;
5594 }
5595 
5596 
5597 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once.  */
5598 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5599 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5600 static int copy_insn_n_scratches;
5601 
5602 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5603    copied an ASM_OPERANDS.
5604    In that case, it is the original input-operand vector.  */
5605 static rtvec orig_asm_operands_vector;
5606 
5607 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5608    copied an ASM_OPERANDS.
5609    In that case, it is the copied input-operand vector.  */
5610 static rtvec copy_asm_operands_vector;
5611 
5612 /* Likewise for the constraints vector.  */
5613 static rtvec orig_asm_constraints_vector;
5614 static rtvec copy_asm_constraints_vector;
5615 
5616 /* Recursively create a new copy of an rtx for copy_insn.
5617    This function differs from copy_rtx in that it handles SCRATCHes and
5618    ASM_OPERANDs properly.
5619    Normally, this function is not used directly; use copy_insn as front end.
5620    However, you could first copy an insn pattern with copy_insn and then use
5621    this function afterwards to properly copy any REG_NOTEs containing
5622    SCRATCHes.  */
5623 
5624 rtx
copy_insn_1(rtx orig)5625 copy_insn_1 (rtx orig)
5626 {
5627   rtx copy;
5628   int i, j;
5629   RTX_CODE code;
5630   const char *format_ptr;
5631 
5632   if (orig == NULL)
5633     return NULL;
5634 
5635   code = GET_CODE (orig);
5636 
5637   switch (code)
5638     {
5639     case REG:
5640     case DEBUG_EXPR:
5641     CASE_CONST_ANY:
5642     case SYMBOL_REF:
5643     case CODE_LABEL:
5644     case PC:
5645     case RETURN:
5646     case SIMPLE_RETURN:
5647       return orig;
5648     case CLOBBER:
5649       /* Share clobbers of hard registers, but do not share pseudo reg
5650          clobbers or clobbers of hard registers that originated as pseudos.
5651          This is needed to allow safe register renaming.  */
5652       if (REG_P (XEXP (orig, 0))
5653 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5654 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
5655 	return orig;
5656       break;
5657 
5658     case SCRATCH:
5659       for (i = 0; i < copy_insn_n_scratches; i++)
5660 	if (copy_insn_scratch_in[i] == orig)
5661 	  return copy_insn_scratch_out[i];
5662       break;
5663 
5664     case CONST:
5665       if (shared_const_p (orig))
5666 	return orig;
5667       break;
5668 
5669       /* A MEM with a constant address is not sharable.  The problem is that
5670 	 the constant address may need to be reloaded.  If the mem is shared,
5671 	 then reloading one copy of this mem will cause all copies to appear
5672 	 to have been reloaded.  */
5673 
5674     default:
5675       break;
5676     }
5677 
5678   /* Copy the various flags, fields, and other information.  We assume
5679      that all fields need copying, and then clear the fields that should
5680      not be copied.  That is the sensible default behavior, and forces
5681      us to explicitly document why we are *not* copying a flag.  */
5682   copy = shallow_copy_rtx (orig);
5683 
5684   /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs.  */
5685   if (INSN_P (orig))
5686     {
5687       RTX_FLAG (copy, jump) = 0;
5688       RTX_FLAG (copy, call) = 0;
5689       RTX_FLAG (copy, frame_related) = 0;
5690     }
5691 
5692   format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5693 
5694   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5695     switch (*format_ptr++)
5696       {
5697       case 'e':
5698 	if (XEXP (orig, i) != NULL)
5699 	  XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5700 	break;
5701 
5702       case 'E':
5703       case 'V':
5704 	if (XVEC (orig, i) == orig_asm_constraints_vector)
5705 	  XVEC (copy, i) = copy_asm_constraints_vector;
5706 	else if (XVEC (orig, i) == orig_asm_operands_vector)
5707 	  XVEC (copy, i) = copy_asm_operands_vector;
5708 	else if (XVEC (orig, i) != NULL)
5709 	  {
5710 	    XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5711 	    for (j = 0; j < XVECLEN (copy, i); j++)
5712 	      XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5713 	  }
5714 	break;
5715 
5716       case 't':
5717       case 'w':
5718       case 'i':
5719       case 'p':
5720       case 's':
5721       case 'S':
5722       case 'u':
5723       case '0':
5724 	/* These are left unchanged.  */
5725 	break;
5726 
5727       default:
5728 	gcc_unreachable ();
5729       }
5730 
5731   if (code == SCRATCH)
5732     {
5733       i = copy_insn_n_scratches++;
5734       gcc_assert (i < MAX_RECOG_OPERANDS);
5735       copy_insn_scratch_in[i] = orig;
5736       copy_insn_scratch_out[i] = copy;
5737     }
5738   else if (code == ASM_OPERANDS)
5739     {
5740       orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5741       copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5742       orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5743       copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5744     }
5745 
5746   return copy;
5747 }
5748 
5749 /* Create a new copy of an rtx.
5750    This function differs from copy_rtx in that it handles SCRATCHes and
5751    ASM_OPERANDs properly.
5752    INSN doesn't really have to be a full INSN; it could be just the
5753    pattern.  */
5754 rtx
copy_insn(rtx insn)5755 copy_insn (rtx insn)
5756 {
5757   copy_insn_n_scratches = 0;
5758   orig_asm_operands_vector = 0;
5759   orig_asm_constraints_vector = 0;
5760   copy_asm_operands_vector = 0;
5761   copy_asm_constraints_vector = 0;
5762   return copy_insn_1 (insn);
5763 }
5764 
5765 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5766    on that assumption that INSN itself remains in its original place.  */
5767 
5768 rtx_insn *
copy_delay_slot_insn(rtx_insn * insn)5769 copy_delay_slot_insn (rtx_insn *insn)
5770 {
5771   /* Copy INSN with its rtx_code, all its notes, location etc.  */
5772   insn = as_a <rtx_insn *> (copy_rtx (insn));
5773   INSN_UID (insn) = cur_insn_uid++;
5774   return insn;
5775 }
5776 
5777 /* Initialize data structures and variables in this file
5778    before generating rtl for each function.  */
5779 
5780 void
init_emit(void)5781 init_emit (void)
5782 {
5783   set_first_insn (NULL);
5784   set_last_insn (NULL);
5785   if (param_min_nondebug_insn_uid)
5786     cur_insn_uid = param_min_nondebug_insn_uid;
5787   else
5788     cur_insn_uid = 1;
5789   cur_debug_insn_uid = 1;
5790   reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5791   first_label_num = label_num;
5792   get_current_sequence ()->next = NULL;
5793 
5794   /* Init the tables that describe all the pseudo regs.  */
5795 
5796   crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5797 
5798   crtl->emit.regno_pointer_align
5799     = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5800 
5801   regno_reg_rtx
5802     = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5803 
5804   /* Put copies of all the hard registers into regno_reg_rtx.  */
5805   memcpy (regno_reg_rtx,
5806 	  initial_regno_reg_rtx,
5807 	  FIRST_PSEUDO_REGISTER * sizeof (rtx));
5808 
5809   /* Put copies of all the virtual register rtx into regno_reg_rtx.  */
5810   init_virtual_regs ();
5811 
5812   /* Indicate that the virtual registers and stack locations are
5813      all pointers.  */
5814   REG_POINTER (stack_pointer_rtx) = 1;
5815   REG_POINTER (frame_pointer_rtx) = 1;
5816   REG_POINTER (hard_frame_pointer_rtx) = 1;
5817   REG_POINTER (arg_pointer_rtx) = 1;
5818 
5819   REG_POINTER (virtual_incoming_args_rtx) = 1;
5820   REG_POINTER (virtual_stack_vars_rtx) = 1;
5821   REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5822   REG_POINTER (virtual_outgoing_args_rtx) = 1;
5823   REG_POINTER (virtual_cfa_rtx) = 1;
5824 
5825 #ifdef STACK_BOUNDARY
5826   REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5827   REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5828   REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5829   REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5830 
5831   REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5832   REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5833   REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5834   REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5835 
5836   REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5837 #endif
5838 
5839 #ifdef INIT_EXPANDERS
5840   INIT_EXPANDERS;
5841 #endif
5842 }
5843 
5844 /* Return the value of element I of CONST_VECTOR X as a wide_int.  */
5845 
5846 wide_int
const_vector_int_elt(const_rtx x,unsigned int i)5847 const_vector_int_elt (const_rtx x, unsigned int i)
5848 {
5849   /* First handle elements that are directly encoded.  */
5850   machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
5851   if (i < (unsigned int) XVECLEN (x, 0))
5852     return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
5853 
5854   /* Identify the pattern that contains element I and work out the index of
5855      the last encoded element for that pattern.  */
5856   unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5857   unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5858   unsigned int count = i / npatterns;
5859   unsigned int pattern = i % npatterns;
5860   unsigned int final_i = encoded_nelts - npatterns + pattern;
5861 
5862   /* If there are no steps, the final encoded value is the right one.  */
5863   if (!CONST_VECTOR_STEPPED_P (x))
5864     return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
5865 
5866   /* Otherwise work out the value from the last two encoded elements.  */
5867   rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
5868   rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
5869   wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
5870 			   rtx_mode_t (v1, elt_mode));
5871   return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
5872 }
5873 
5874 /* Return the value of element I of CONST_VECTOR X.  */
5875 
5876 rtx
const_vector_elt(const_rtx x,unsigned int i)5877 const_vector_elt (const_rtx x, unsigned int i)
5878 {
5879   /* First handle elements that are directly encoded.  */
5880   if (i < (unsigned int) XVECLEN (x, 0))
5881     return CONST_VECTOR_ENCODED_ELT (x, i);
5882 
5883   /* If there are no steps, the final encoded value is the right one.  */
5884   if (!CONST_VECTOR_STEPPED_P (x))
5885     {
5886       /* Identify the pattern that contains element I and work out the index of
5887 	 the last encoded element for that pattern.  */
5888       unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5889       unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5890       unsigned int pattern = i % npatterns;
5891       unsigned int final_i = encoded_nelts - npatterns + pattern;
5892       return CONST_VECTOR_ENCODED_ELT (x, final_i);
5893     }
5894 
5895   /* Otherwise work out the value from the last two encoded elements.  */
5896   return immed_wide_int_const (const_vector_int_elt (x, i),
5897 			       GET_MODE_INNER (GET_MODE (x)));
5898 }
5899 
5900 /* Return true if X is a valid element for a CONST_VECTOR of the given
5901   mode.  */
5902 
5903 bool
valid_for_const_vector_p(machine_mode,rtx x)5904 valid_for_const_vector_p (machine_mode, rtx x)
5905 {
5906   return (CONST_SCALAR_INT_P (x)
5907 	  || CONST_POLY_INT_P (x)
5908 	  || CONST_DOUBLE_AS_FLOAT_P (x)
5909 	  || CONST_FIXED_P (x));
5910 }
5911 
5912 /* Generate a vector constant of mode MODE in which every element has
5913    value ELT.  */
5914 
5915 rtx
gen_const_vec_duplicate(machine_mode mode,rtx elt)5916 gen_const_vec_duplicate (machine_mode mode, rtx elt)
5917 {
5918   rtx_vector_builder builder (mode, 1, 1);
5919   builder.quick_push (elt);
5920   return builder.build ();
5921 }
5922 
5923 /* Return a vector rtx of mode MODE in which every element has value X.
5924    The result will be a constant if X is constant.  */
5925 
5926 rtx
gen_vec_duplicate(machine_mode mode,rtx x)5927 gen_vec_duplicate (machine_mode mode, rtx x)
5928 {
5929   if (valid_for_const_vector_p (mode, x))
5930     return gen_const_vec_duplicate (mode, x);
5931   return gen_rtx_VEC_DUPLICATE (mode, x);
5932 }
5933 
5934 /* A subroutine of const_vec_series_p that handles the case in which:
5935 
5936      (GET_CODE (X) == CONST_VECTOR
5937       && CONST_VECTOR_NPATTERNS (X) == 1
5938       && !CONST_VECTOR_DUPLICATE_P (X))
5939 
5940    is known to hold.  */
5941 
5942 bool
const_vec_series_p_1(const_rtx x,rtx * base_out,rtx * step_out)5943 const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
5944 {
5945   /* Stepped sequences are only defined for integers, to avoid specifying
5946      rounding behavior.  */
5947   if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
5948     return false;
5949 
5950   /* A non-duplicated vector with two elements can always be seen as a
5951      series with a nonzero step.  Longer vectors must have a stepped
5952      encoding.  */
5953   if (maybe_ne (CONST_VECTOR_NUNITS (x), 2)
5954       && !CONST_VECTOR_STEPPED_P (x))
5955     return false;
5956 
5957   /* Calculate the step between the first and second elements.  */
5958   scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
5959   rtx base = CONST_VECTOR_ELT (x, 0);
5960   rtx step = simplify_binary_operation (MINUS, inner,
5961 					CONST_VECTOR_ENCODED_ELT (x, 1), base);
5962   if (rtx_equal_p (step, CONST0_RTX (inner)))
5963     return false;
5964 
5965   /* If we have a stepped encoding, check that the step between the
5966      second and third elements is the same as STEP.  */
5967   if (CONST_VECTOR_STEPPED_P (x))
5968     {
5969       rtx diff = simplify_binary_operation (MINUS, inner,
5970 					    CONST_VECTOR_ENCODED_ELT (x, 2),
5971 					    CONST_VECTOR_ENCODED_ELT (x, 1));
5972       if (!rtx_equal_p (step, diff))
5973 	return false;
5974     }
5975 
5976   *base_out = base;
5977   *step_out = step;
5978   return true;
5979 }
5980 
5981 /* Generate a vector constant of mode MODE in which element I has
5982    the value BASE + I * STEP.  */
5983 
5984 rtx
gen_const_vec_series(machine_mode mode,rtx base,rtx step)5985 gen_const_vec_series (machine_mode mode, rtx base, rtx step)
5986 {
5987   gcc_assert (valid_for_const_vector_p (mode, base)
5988 	      && valid_for_const_vector_p (mode, step));
5989 
5990   rtx_vector_builder builder (mode, 1, 3);
5991   builder.quick_push (base);
5992   for (int i = 1; i < 3; ++i)
5993     builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
5994 					     builder[i - 1], step));
5995   return builder.build ();
5996 }
5997 
5998 /* Generate a vector of mode MODE in which element I has the value
5999    BASE + I * STEP.  The result will be a constant if BASE and STEP
6000    are both constants.  */
6001 
6002 rtx
gen_vec_series(machine_mode mode,rtx base,rtx step)6003 gen_vec_series (machine_mode mode, rtx base, rtx step)
6004 {
6005   if (step == const0_rtx)
6006     return gen_vec_duplicate (mode, base);
6007   if (valid_for_const_vector_p (mode, base)
6008       && valid_for_const_vector_p (mode, step))
6009     return gen_const_vec_series (mode, base, step);
6010   return gen_rtx_VEC_SERIES (mode, base, step);
6011 }
6012 
6013 /* Generate a new vector constant for mode MODE and constant value
6014    CONSTANT.  */
6015 
6016 static rtx
gen_const_vector(machine_mode mode,int constant)6017 gen_const_vector (machine_mode mode, int constant)
6018 {
6019   machine_mode inner = GET_MODE_INNER (mode);
6020 
6021   gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
6022 
6023   rtx el = const_tiny_rtx[constant][(int) inner];
6024   gcc_assert (el);
6025 
6026   return gen_const_vec_duplicate (mode, el);
6027 }
6028 
6029 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6030    all elements are zero, and the one vector when all elements are one.  */
6031 rtx
gen_rtx_CONST_VECTOR(machine_mode mode,rtvec v)6032 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
6033 {
6034   gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
6035 
6036   /* If the values are all the same, check to see if we can use one of the
6037      standard constant vectors.  */
6038   if (rtvec_all_equal_p (v))
6039     return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
6040 
6041   unsigned int nunits = GET_NUM_ELEM (v);
6042   rtx_vector_builder builder (mode, nunits, 1);
6043   for (unsigned int i = 0; i < nunits; ++i)
6044     builder.quick_push (RTVEC_ELT (v, i));
6045   return builder.build (v);
6046 }
6047 
6048 /* Initialise global register information required by all functions.  */
6049 
6050 void
init_emit_regs(void)6051 init_emit_regs (void)
6052 {
6053   int i;
6054   machine_mode mode;
6055   mem_attrs *attrs;
6056 
6057   /* Reset register attributes */
6058   reg_attrs_htab->empty ();
6059 
6060   /* We need reg_raw_mode, so initialize the modes now.  */
6061   init_reg_modes_target ();
6062 
6063   /* Assign register numbers to the globally defined register rtx.  */
6064   stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
6065   frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
6066   hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
6067   arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
6068   virtual_incoming_args_rtx =
6069     gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
6070   virtual_stack_vars_rtx =
6071     gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
6072   virtual_stack_dynamic_rtx =
6073     gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
6074   virtual_outgoing_args_rtx =
6075     gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
6076   virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
6077   virtual_preferred_stack_boundary_rtx =
6078     gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6079 
6080   /* Initialize RTL for commonly used hard registers.  These are
6081      copied into regno_reg_rtx as we begin to compile each function.  */
6082   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6083     initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6084 
6085 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6086   return_address_pointer_rtx
6087     = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
6088 #endif
6089 
6090   pic_offset_table_rtx = NULL_RTX;
6091   if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6092     pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
6093 
6094   for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
6095     {
6096       mode = (machine_mode) i;
6097       attrs = ggc_cleared_alloc<mem_attrs> ();
6098       attrs->align = BITS_PER_UNIT;
6099       attrs->addrspace = ADDR_SPACE_GENERIC;
6100       if (mode != BLKmode && mode != VOIDmode)
6101 	{
6102 	  attrs->size_known_p = true;
6103 	  attrs->size = GET_MODE_SIZE (mode);
6104 	  if (STRICT_ALIGNMENT)
6105 	    attrs->align = GET_MODE_ALIGNMENT (mode);
6106 	}
6107       mode_mem_attrs[i] = attrs;
6108     }
6109 
6110   split_branch_probability = profile_probability::uninitialized ();
6111 }
6112 
6113 /* Initialize global machine_mode variables.  */
6114 
6115 void
init_derived_machine_modes(void)6116 init_derived_machine_modes (void)
6117 {
6118   opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6119   FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
6120     {
6121       scalar_int_mode mode = mode_iter.require ();
6122 
6123       if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6124 	  && !opt_byte_mode.exists ())
6125 	opt_byte_mode = mode;
6126 
6127       if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6128 	  && !opt_word_mode.exists ())
6129 	opt_word_mode = mode;
6130     }
6131 
6132   byte_mode = opt_byte_mode.require ();
6133   word_mode = opt_word_mode.require ();
6134   ptr_mode = as_a <scalar_int_mode>
6135     (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
6136 }
6137 
6138 /* Create some permanent unique rtl objects shared between all functions.  */
6139 
6140 void
init_emit_once(void)6141 init_emit_once (void)
6142 {
6143   int i;
6144   machine_mode mode;
6145   scalar_float_mode double_mode;
6146   opt_scalar_mode smode_iter;
6147 
6148   /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6149      CONST_FIXED, and memory attribute hash tables.  */
6150   const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
6151 
6152 #if TARGET_SUPPORTS_WIDE_INT
6153   const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
6154 #endif
6155   const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
6156 
6157   if (NUM_POLY_INT_COEFFS > 1)
6158     const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6159 
6160   const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
6161 
6162   reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
6163 
6164 #ifdef INIT_EXPANDERS
6165   /* This is to initialize {init|mark|free}_machine_status before the first
6166      call to push_function_context_to.  This is needed by the Chill front
6167      end which calls push_function_context_to before the first call to
6168      init_function_start.  */
6169   INIT_EXPANDERS;
6170 #endif
6171 
6172   /* Create the unique rtx's for certain rtx codes and operand values.  */
6173 
6174   /* Process stack-limiting command-line options.  */
6175   if (opt_fstack_limit_symbol_arg != NULL)
6176     stack_limit_rtx
6177       = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6178   if (opt_fstack_limit_register_no >= 0)
6179     stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
6180 
6181   /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6182      tries to use these variables.  */
6183   for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
6184     const_int_rtx[i + MAX_SAVED_CONST_INT] =
6185       gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
6186 
6187   if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6188       && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
6189     const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
6190   else
6191     const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
6192 
6193   double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
6194 
6195   real_from_integer (&dconst0, double_mode, 0, SIGNED);
6196   real_from_integer (&dconst1, double_mode, 1, SIGNED);
6197   real_from_integer (&dconst2, double_mode, 2, SIGNED);
6198 
6199   dconstm1 = dconst1;
6200   dconstm1.sign = 1;
6201 
6202   dconsthalf = dconst1;
6203   SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
6204 
6205   for (i = 0; i < 3; i++)
6206     {
6207       const REAL_VALUE_TYPE *const r =
6208 	(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6209 
6210       FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
6211 	const_tiny_rtx[i][(int) mode] =
6212 	  const_double_from_real_value (*r, mode);
6213 
6214       FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
6215 	const_tiny_rtx[i][(int) mode] =
6216 	  const_double_from_real_value (*r, mode);
6217 
6218       const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
6219 
6220       FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6221 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6222 
6223       for (mode = MIN_MODE_PARTIAL_INT;
6224 	   mode <= MAX_MODE_PARTIAL_INT;
6225 	   mode = (machine_mode)((int)(mode) + 1))
6226 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6227     }
6228 
6229   const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6230 
6231   FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6232     const_tiny_rtx[3][(int) mode] = constm1_rtx;
6233 
6234   /* For BImode, 1 and -1 are unsigned and signed interpretations
6235      of the same value.  */
6236   const_tiny_rtx[0][(int) BImode] = const0_rtx;
6237   const_tiny_rtx[1][(int) BImode] = const_true_rtx;
6238   const_tiny_rtx[3][(int) BImode] = const_true_rtx;
6239 
6240   for (mode = MIN_MODE_PARTIAL_INT;
6241        mode <= MAX_MODE_PARTIAL_INT;
6242        mode = (machine_mode)((int)(mode) + 1))
6243     const_tiny_rtx[3][(int) mode] = constm1_rtx;
6244 
6245   FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
6246     {
6247       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6248       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6249     }
6250 
6251   FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
6252     {
6253       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6254       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6255     }
6256 
6257   /* As for BImode, "all 1" and "all -1" are unsigned and signed
6258      interpretations of the same value.  */
6259   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
6260     {
6261       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6262       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6263       const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
6264     }
6265 
6266   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
6267     {
6268       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6269       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6270       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6271     }
6272 
6273   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
6274     {
6275       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6276       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6277     }
6278 
6279   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
6280     {
6281       scalar_mode smode = smode_iter.require ();
6282       FCONST0 (smode).data.high = 0;
6283       FCONST0 (smode).data.low = 0;
6284       FCONST0 (smode).mode = smode;
6285       const_tiny_rtx[0][(int) smode]
6286 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6287     }
6288 
6289   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
6290     {
6291       scalar_mode smode = smode_iter.require ();
6292       FCONST0 (smode).data.high = 0;
6293       FCONST0 (smode).data.low = 0;
6294       FCONST0 (smode).mode = smode;
6295       const_tiny_rtx[0][(int) smode]
6296 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6297     }
6298 
6299   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
6300     {
6301       scalar_mode smode = smode_iter.require ();
6302       FCONST0 (smode).data.high = 0;
6303       FCONST0 (smode).data.low = 0;
6304       FCONST0 (smode).mode = smode;
6305       const_tiny_rtx[0][(int) smode]
6306 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6307 
6308       /* We store the value 1.  */
6309       FCONST1 (smode).data.high = 0;
6310       FCONST1 (smode).data.low = 0;
6311       FCONST1 (smode).mode = smode;
6312       FCONST1 (smode).data
6313 	= double_int_one.lshift (GET_MODE_FBIT (smode),
6314 				 HOST_BITS_PER_DOUBLE_INT,
6315 				 SIGNED_FIXED_POINT_MODE_P (smode));
6316       const_tiny_rtx[1][(int) smode]
6317 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6318     }
6319 
6320   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
6321     {
6322       scalar_mode smode = smode_iter.require ();
6323       FCONST0 (smode).data.high = 0;
6324       FCONST0 (smode).data.low = 0;
6325       FCONST0 (smode).mode = smode;
6326       const_tiny_rtx[0][(int) smode]
6327 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6328 
6329       /* We store the value 1.  */
6330       FCONST1 (smode).data.high = 0;
6331       FCONST1 (smode).data.low = 0;
6332       FCONST1 (smode).mode = smode;
6333       FCONST1 (smode).data
6334 	= double_int_one.lshift (GET_MODE_FBIT (smode),
6335 				 HOST_BITS_PER_DOUBLE_INT,
6336 				 SIGNED_FIXED_POINT_MODE_P (smode));
6337       const_tiny_rtx[1][(int) smode]
6338 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6339     }
6340 
6341   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
6342     {
6343       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6344     }
6345 
6346   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
6347     {
6348       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6349     }
6350 
6351   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
6352     {
6353       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6354       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6355     }
6356 
6357   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
6358     {
6359       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6360       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6361     }
6362 
6363   for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6364     if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6365       const_tiny_rtx[0][i] = const0_rtx;
6366 
6367   pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6368   ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6369   simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6370   invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6371 				   /*prev_insn=*/NULL,
6372 				   /*next_insn=*/NULL,
6373 				   /*bb=*/NULL,
6374 				   /*pattern=*/NULL_RTX,
6375 				   /*location=*/-1,
6376 				   CODE_FOR_nothing,
6377 				   /*reg_notes=*/NULL_RTX);
6378 }
6379 
6380 /* Produce exact duplicate of insn INSN after AFTER.
6381    Care updating of libcall regions if present.  */
6382 
6383 rtx_insn *
emit_copy_of_insn_after(rtx_insn * insn,rtx_insn * after)6384 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6385 {
6386   rtx_insn *new_rtx;
6387   rtx link;
6388 
6389   switch (GET_CODE (insn))
6390     {
6391     case INSN:
6392       new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6393       break;
6394 
6395     case JUMP_INSN:
6396       new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6397       CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6398       break;
6399 
6400     case DEBUG_INSN:
6401       new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6402       break;
6403 
6404     case CALL_INSN:
6405       new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6406       if (CALL_INSN_FUNCTION_USAGE (insn))
6407 	CALL_INSN_FUNCTION_USAGE (new_rtx)
6408 	  = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6409       SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6410       RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6411       RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6412       RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6413 	= RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6414       break;
6415 
6416     default:
6417       gcc_unreachable ();
6418     }
6419 
6420   /* Update LABEL_NUSES.  */
6421   mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6422 
6423   INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6424 
6425   /* If the old insn is frame related, then so is the new one.  This is
6426      primarily needed for IA-64 unwind info which marks epilogue insns,
6427      which may be duplicated by the basic block reordering code.  */
6428   RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6429 
6430   /* Locate the end of existing REG_NOTES in NEW_RTX.  */
6431   rtx *ptail = &REG_NOTES (new_rtx);
6432   while (*ptail != NULL_RTX)
6433     ptail = &XEXP (*ptail, 1);
6434 
6435   /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6436      will make them.  REG_LABEL_TARGETs are created there too, but are
6437      supposed to be sticky, so we copy them.  */
6438   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6439     if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6440       {
6441 	*ptail = duplicate_reg_note (link);
6442 	ptail = &XEXP (*ptail, 1);
6443       }
6444 
6445   INSN_CODE (new_rtx) = INSN_CODE (insn);
6446   return new_rtx;
6447 }
6448 
6449 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6450 rtx
gen_hard_reg_clobber(machine_mode mode,unsigned int regno)6451 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6452 {
6453   if (hard_reg_clobbers[mode][regno])
6454     return hard_reg_clobbers[mode][regno];
6455   else
6456     return (hard_reg_clobbers[mode][regno] =
6457 	    gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6458 }
6459 
6460 location_t prologue_location;
6461 location_t epilogue_location;
6462 
6463 /* Hold current location information and last location information, so the
6464    datastructures are built lazily only when some instructions in given
6465    place are needed.  */
6466 static location_t curr_location;
6467 
6468 /* Allocate insn location datastructure.  */
6469 void
insn_locations_init(void)6470 insn_locations_init (void)
6471 {
6472   prologue_location = epilogue_location = 0;
6473   curr_location = UNKNOWN_LOCATION;
6474 }
6475 
6476 /* At the end of emit stage, clear current location.  */
6477 void
insn_locations_finalize(void)6478 insn_locations_finalize (void)
6479 {
6480   epilogue_location = curr_location;
6481   curr_location = UNKNOWN_LOCATION;
6482 }
6483 
6484 /* Set current location.  */
6485 void
set_curr_insn_location(location_t location)6486 set_curr_insn_location (location_t location)
6487 {
6488   curr_location = location;
6489 }
6490 
6491 /* Get current location.  */
6492 location_t
curr_insn_location(void)6493 curr_insn_location (void)
6494 {
6495   return curr_location;
6496 }
6497 
6498 /* Set the location of the insn chain starting at INSN to LOC.  */
6499 void
set_insn_locations(rtx_insn * insn,location_t loc)6500 set_insn_locations (rtx_insn *insn, location_t loc)
6501 {
6502   while (insn)
6503     {
6504       if (INSN_P (insn))
6505 	INSN_LOCATION (insn) = loc;
6506       insn = NEXT_INSN (insn);
6507     }
6508 }
6509 
6510 /* Return lexical scope block insn belongs to.  */
6511 tree
insn_scope(const rtx_insn * insn)6512 insn_scope (const rtx_insn *insn)
6513 {
6514   return LOCATION_BLOCK (INSN_LOCATION (insn));
6515 }
6516 
6517 /* Return line number of the statement that produced this insn.  */
6518 int
insn_line(const rtx_insn * insn)6519 insn_line (const rtx_insn *insn)
6520 {
6521   return LOCATION_LINE (INSN_LOCATION (insn));
6522 }
6523 
6524 /* Return source file of the statement that produced this insn.  */
6525 const char *
insn_file(const rtx_insn * insn)6526 insn_file (const rtx_insn *insn)
6527 {
6528   return LOCATION_FILE (INSN_LOCATION (insn));
6529 }
6530 
6531 /* Return expanded location of the statement that produced this insn.  */
6532 expanded_location
insn_location(const rtx_insn * insn)6533 insn_location (const rtx_insn *insn)
6534 {
6535   return expand_location (INSN_LOCATION (insn));
6536 }
6537 
6538 /* Return true if memory model MODEL requires a pre-operation (release-style)
6539    barrier or a post-operation (acquire-style) barrier.  While not universal,
6540    this function matches behavior of several targets.  */
6541 
6542 bool
need_atomic_barrier_p(enum memmodel model,bool pre)6543 need_atomic_barrier_p (enum memmodel model, bool pre)
6544 {
6545   switch (model & MEMMODEL_BASE_MASK)
6546     {
6547     case MEMMODEL_RELAXED:
6548     case MEMMODEL_CONSUME:
6549       return false;
6550     case MEMMODEL_RELEASE:
6551       return pre;
6552     case MEMMODEL_ACQUIRE:
6553       return !pre;
6554     case MEMMODEL_ACQ_REL:
6555     case MEMMODEL_SEQ_CST:
6556       return true;
6557     default:
6558       gcc_unreachable ();
6559     }
6560 }
6561 
6562 /* Return a constant shift amount for shifting a value of mode MODE
6563    by VALUE bits.  */
6564 
6565 rtx
gen_int_shift_amount(machine_mode,poly_int64 value)6566 gen_int_shift_amount (machine_mode, poly_int64 value)
6567 {
6568   /* Use a 64-bit mode, to avoid any truncation.
6569 
6570      ??? Perhaps this should be automatically derived from the .md files
6571      instead, or perhaps have a target hook.  */
6572   scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6573 				? DImode
6574 				: int_mode_for_size (64, 0).require ());
6575   return gen_int_mode (value, shift_mode);
6576 }
6577 
6578 /* Initialize fields of rtl_data related to stack alignment.  */
6579 
6580 void
init_stack_alignment()6581 rtl_data::init_stack_alignment ()
6582 {
6583   stack_alignment_needed = STACK_BOUNDARY;
6584   max_used_stack_slot_alignment = STACK_BOUNDARY;
6585   stack_alignment_estimated = 0;
6586   preferred_stack_boundary = STACK_BOUNDARY;
6587 }
6588 
6589 
6590 #include "gt-emit-rtl.h"
6591