1 /* Emit RTL for the GCC expander.
2    Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 
21 /* Middle-to-low level generation of rtx code and insns.
22 
23    This file contains support functions for creating rtl expressions
24    and manipulating them in the doubly-linked chain of insns.
25 
26    The patterns of the insns are created by machine-dependent
27    routines in insn-emit.c, which is generated automatically from
28    the machine description.  These routines make the individual rtx's
29    of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30    which are automatically generated from rtl.def; what is machine
31    dependent is the kind of rtx's they make and what arguments they
32    use.  */
33 
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "memmodel.h"
38 #include "backend.h"
39 #include "target.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "df.h"
43 #include "tm_p.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
46 #include "regs.h"
47 #include "emit-rtl.h"
48 #include "recog.h"
49 #include "diagnostic-core.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "cfgrtl.h"
54 #include "tree-eh.h"
55 #include "explow.h"
56 #include "expr.h"
57 #include "params.h"
58 #include "builtins.h"
59 #include "rtl-iter.h"
60 #include "stor-layout.h"
61 #include "opts.h"
62 #include "predict.h"
63 #include "rtx-vector-builder.h"
64 
65 struct target_rtl default_target_rtl;
66 #if SWITCHABLE_TARGET
67 struct target_rtl *this_target_rtl = &default_target_rtl;
68 #endif
69 
70 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
71 
72 /* Commonly used modes.  */
73 
74 scalar_int_mode byte_mode;	/* Mode whose width is BITS_PER_UNIT.  */
75 scalar_int_mode word_mode;	/* Mode whose width is BITS_PER_WORD.  */
76 scalar_int_mode ptr_mode;	/* Mode whose width is POINTER_SIZE.  */
77 
78 /* Datastructures maintained for currently processed function in RTL form.  */
79 
80 struct rtl_data x_rtl;
81 
82 /* Indexed by pseudo register number, gives the rtx for that pseudo.
83    Allocated in parallel with regno_pointer_align.
84    FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85    with length attribute nested in top level structures.  */
86 
87 rtx * regno_reg_rtx;
88 
89 /* This is *not* reset after each function.  It gives each CODE_LABEL
90    in the entire compilation a unique label number.  */
91 
92 static GTY(()) int label_num = 1;
93 
94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95    the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
96    record a copy of const[012]_rtx and constm1_rtx.  CONSTM1_RTX
97    is set only for MODE_INT and MODE_VECTOR_INT modes.  */
98 
99 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
100 
101 rtx const_true_rtx;
102 
103 REAL_VALUE_TYPE dconst0;
104 REAL_VALUE_TYPE dconst1;
105 REAL_VALUE_TYPE dconst2;
106 REAL_VALUE_TYPE dconstm1;
107 REAL_VALUE_TYPE dconsthalf;
108 
109 /* Record fixed-point constant 0 and 1.  */
110 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
111 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
112 
113 /* We make one copy of (const_int C) where C is in
114    [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
115    to save space during the compilation and simplify comparisons of
116    integers.  */
117 
118 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
119 
120 /* Standard pieces of rtx, to be substituted directly into things.  */
121 rtx pc_rtx;
122 rtx ret_rtx;
123 rtx simple_return_rtx;
124 rtx cc0_rtx;
125 
126 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
127    this pointer should normally never be dereferenced), but is required to be
128    distinct from NULL_RTX.  Currently used by peephole2 pass.  */
129 rtx_insn *invalid_insn_rtx;
130 
131 /* A hash table storing CONST_INTs whose absolute value is greater
132    than MAX_SAVED_CONST_INT.  */
133 
134 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
135 {
136   typedef HOST_WIDE_INT compare_type;
137 
138   static hashval_t hash (rtx i);
139   static bool equal (rtx i, HOST_WIDE_INT h);
140 };
141 
142 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
143 
144 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
145 {
146   static hashval_t hash (rtx x);
147   static bool equal (rtx x, rtx y);
148 };
149 
150 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
151 
152 struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
153 {
154   typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
155 
156   static hashval_t hash (rtx x);
157   static bool equal (rtx x, const compare_type &y);
158 };
159 
160 static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
161 
162 /* A hash table storing register attribute structures.  */
163 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
164 {
165   static hashval_t hash (reg_attrs *x);
166   static bool equal (reg_attrs *a, reg_attrs *b);
167 };
168 
169 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
170 
171 /* A hash table storing all CONST_DOUBLEs.  */
172 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
173 {
174   static hashval_t hash (rtx x);
175   static bool equal (rtx x, rtx y);
176 };
177 
178 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
179 
180 /* A hash table storing all CONST_FIXEDs.  */
181 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
182 {
183   static hashval_t hash (rtx x);
184   static bool equal (rtx x, rtx y);
185 };
186 
187 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
188 
189 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
190 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
191 #define first_label_num (crtl->emit.x_first_label_num)
192 
193 static void set_used_decls (tree);
194 static void mark_label_nuses (rtx);
195 #if TARGET_SUPPORTS_WIDE_INT
196 static rtx lookup_const_wide_int (rtx);
197 #endif
198 static rtx lookup_const_double (rtx);
199 static rtx lookup_const_fixed (rtx);
200 static rtx gen_const_vector (machine_mode, int);
201 static void copy_rtx_if_shared_1 (rtx *orig);
202 
203 /* Probability of the conditional branch currently proceeded by try_split.  */
204 profile_probability split_branch_probability;
205 
206 /* Returns a hash code for X (which is a really a CONST_INT).  */
207 
208 hashval_t
hash(rtx x)209 const_int_hasher::hash (rtx x)
210 {
211   return (hashval_t) INTVAL (x);
212 }
213 
214 /* Returns nonzero if the value represented by X (which is really a
215    CONST_INT) is the same as that given by Y (which is really a
216    HOST_WIDE_INT *).  */
217 
218 bool
equal(rtx x,HOST_WIDE_INT y)219 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
220 {
221   return (INTVAL (x) == y);
222 }
223 
224 #if TARGET_SUPPORTS_WIDE_INT
225 /* Returns a hash code for X (which is a really a CONST_WIDE_INT).  */
226 
227 hashval_t
hash(rtx x)228 const_wide_int_hasher::hash (rtx x)
229 {
230   int i;
231   unsigned HOST_WIDE_INT hash = 0;
232   const_rtx xr = x;
233 
234   for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
235     hash += CONST_WIDE_INT_ELT (xr, i);
236 
237   return (hashval_t) hash;
238 }
239 
240 /* Returns nonzero if the value represented by X (which is really a
241    CONST_WIDE_INT) is the same as that given by Y (which is really a
242    CONST_WIDE_INT).  */
243 
244 bool
equal(rtx x,rtx y)245 const_wide_int_hasher::equal (rtx x, rtx y)
246 {
247   int i;
248   const_rtx xr = x;
249   const_rtx yr = y;
250   if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
251     return false;
252 
253   for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
254     if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
255       return false;
256 
257   return true;
258 }
259 #endif
260 
261 /* Returns a hash code for CONST_POLY_INT X.  */
262 
263 hashval_t
hash(rtx x)264 const_poly_int_hasher::hash (rtx x)
265 {
266   inchash::hash h;
267   h.add_int (GET_MODE (x));
268   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
269     h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
270   return h.end ();
271 }
272 
273 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y.  */
274 
275 bool
equal(rtx x,const compare_type & y)276 const_poly_int_hasher::equal (rtx x, const compare_type &y)
277 {
278   if (GET_MODE (x) != y.first)
279     return false;
280   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
281     if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
282       return false;
283   return true;
284 }
285 
286 /* Returns a hash code for X (which is really a CONST_DOUBLE).  */
287 hashval_t
hash(rtx x)288 const_double_hasher::hash (rtx x)
289 {
290   const_rtx const value = x;
291   hashval_t h;
292 
293   if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
294     h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
295   else
296     {
297       h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
298       /* MODE is used in the comparison, so it should be in the hash.  */
299       h ^= GET_MODE (value);
300     }
301   return h;
302 }
303 
304 /* Returns nonzero if the value represented by X (really a ...)
305    is the same as that represented by Y (really a ...) */
306 bool
equal(rtx x,rtx y)307 const_double_hasher::equal (rtx x, rtx y)
308 {
309   const_rtx const a = x, b = y;
310 
311   if (GET_MODE (a) != GET_MODE (b))
312     return 0;
313   if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
314     return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
315 	    && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
316   else
317     return real_identical (CONST_DOUBLE_REAL_VALUE (a),
318 			   CONST_DOUBLE_REAL_VALUE (b));
319 }
320 
321 /* Returns a hash code for X (which is really a CONST_FIXED).  */
322 
323 hashval_t
hash(rtx x)324 const_fixed_hasher::hash (rtx x)
325 {
326   const_rtx const value = x;
327   hashval_t h;
328 
329   h = fixed_hash (CONST_FIXED_VALUE (value));
330   /* MODE is used in the comparison, so it should be in the hash.  */
331   h ^= GET_MODE (value);
332   return h;
333 }
334 
335 /* Returns nonzero if the value represented by X is the same as that
336    represented by Y.  */
337 
338 bool
equal(rtx x,rtx y)339 const_fixed_hasher::equal (rtx x, rtx y)
340 {
341   const_rtx const a = x, b = y;
342 
343   if (GET_MODE (a) != GET_MODE (b))
344     return 0;
345   return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
346 }
347 
348 /* Return true if the given memory attributes are equal.  */
349 
350 bool
mem_attrs_eq_p(const struct mem_attrs * p,const struct mem_attrs * q)351 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
352 {
353   if (p == q)
354     return true;
355   if (!p || !q)
356     return false;
357   return (p->alias == q->alias
358 	  && p->offset_known_p == q->offset_known_p
359 	  && (!p->offset_known_p || known_eq (p->offset, q->offset))
360 	  && p->size_known_p == q->size_known_p
361 	  && (!p->size_known_p || known_eq (p->size, q->size))
362 	  && p->align == q->align
363 	  && p->addrspace == q->addrspace
364 	  && (p->expr == q->expr
365 	      || (p->expr != NULL_TREE && q->expr != NULL_TREE
366 		  && operand_equal_p (p->expr, q->expr, 0))));
367 }
368 
369 /* Set MEM's memory attributes so that they are the same as ATTRS.  */
370 
371 static void
set_mem_attrs(rtx mem,mem_attrs * attrs)372 set_mem_attrs (rtx mem, mem_attrs *attrs)
373 {
374   /* If everything is the default, we can just clear the attributes.  */
375   if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
376     {
377       MEM_ATTRS (mem) = 0;
378       return;
379     }
380 
381   if (!MEM_ATTRS (mem)
382       || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
383     {
384       MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
385       memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
386     }
387 }
388 
389 /* Returns a hash code for X (which is a really a reg_attrs *).  */
390 
391 hashval_t
hash(reg_attrs * x)392 reg_attr_hasher::hash (reg_attrs *x)
393 {
394   const reg_attrs *const p = x;
395 
396   inchash::hash h;
397   h.add_ptr (p->decl);
398   h.add_poly_hwi (p->offset);
399   return h.end ();
400 }
401 
402 /* Returns nonzero if the value represented by X  is the same as that given by
403    Y.  */
404 
405 bool
equal(reg_attrs * x,reg_attrs * y)406 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
407 {
408   const reg_attrs *const p = x;
409   const reg_attrs *const q = y;
410 
411   return (p->decl == q->decl && known_eq (p->offset, q->offset));
412 }
413 /* Allocate a new reg_attrs structure and insert it into the hash table if
414    one identical to it is not already in the table.  We are doing this for
415    MEM of mode MODE.  */
416 
417 static reg_attrs *
get_reg_attrs(tree decl,poly_int64 offset)418 get_reg_attrs (tree decl, poly_int64 offset)
419 {
420   reg_attrs attrs;
421 
422   /* If everything is the default, we can just return zero.  */
423   if (decl == 0 && known_eq (offset, 0))
424     return 0;
425 
426   attrs.decl = decl;
427   attrs.offset = offset;
428 
429   reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
430   if (*slot == 0)
431     {
432       *slot = ggc_alloc<reg_attrs> ();
433       memcpy (*slot, &attrs, sizeof (reg_attrs));
434     }
435 
436   return *slot;
437 }
438 
439 
440 #if !HAVE_blockage
441 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
442    and to block register equivalences to be seen across this insn.  */
443 
444 rtx
gen_blockage(void)445 gen_blockage (void)
446 {
447   rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
448   MEM_VOLATILE_P (x) = true;
449   return x;
450 }
451 #endif
452 
453 
454 /* Set the mode and register number of X to MODE and REGNO.  */
455 
456 void
set_mode_and_regno(rtx x,machine_mode mode,unsigned int regno)457 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
458 {
459   unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
460 			? hard_regno_nregs (regno, mode)
461 			: 1);
462   PUT_MODE_RAW (x, mode);
463   set_regno_raw (x, regno, nregs);
464 }
465 
466 /* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
467    don't attempt to share with the various global pieces of rtl (such as
468    frame_pointer_rtx).  */
469 
470 rtx
gen_raw_REG(machine_mode mode,unsigned int regno)471 gen_raw_REG (machine_mode mode, unsigned int regno)
472 {
473   rtx x = rtx_alloc (REG MEM_STAT_INFO);
474   set_mode_and_regno (x, mode, regno);
475   REG_ATTRS (x) = NULL;
476   ORIGINAL_REGNO (x) = regno;
477   return x;
478 }
479 
480 /* There are some RTL codes that require special attention; the generation
481    functions do the raw handling.  If you add to this list, modify
482    special_rtx in gengenrtl.c as well.  */
483 
484 rtx_expr_list *
gen_rtx_EXPR_LIST(machine_mode mode,rtx expr,rtx expr_list)485 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
486 {
487   return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
488 						 expr_list));
489 }
490 
491 rtx_insn_list *
gen_rtx_INSN_LIST(machine_mode mode,rtx insn,rtx insn_list)492 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
493 {
494   return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
495 						 insn_list));
496 }
497 
498 rtx_insn *
gen_rtx_INSN(machine_mode mode,rtx_insn * prev_insn,rtx_insn * next_insn,basic_block bb,rtx pattern,int location,int code,rtx reg_notes)499 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
500 	      basic_block bb, rtx pattern, int location, int code,
501 	      rtx reg_notes)
502 {
503   return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
504 						 prev_insn, next_insn,
505 						 bb, pattern, location, code,
506 						 reg_notes));
507 }
508 
509 rtx
gen_rtx_CONST_INT(machine_mode mode ATTRIBUTE_UNUSED,HOST_WIDE_INT arg)510 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
511 {
512   if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
513     return const_int_rtx[arg + MAX_SAVED_CONST_INT];
514 
515 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
516   if (const_true_rtx && arg == STORE_FLAG_VALUE)
517     return const_true_rtx;
518 #endif
519 
520   /* Look up the CONST_INT in the hash table.  */
521   rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
522 						   INSERT);
523   if (*slot == 0)
524     *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
525 
526   return *slot;
527 }
528 
529 rtx
gen_int_mode(poly_int64 c,machine_mode mode)530 gen_int_mode (poly_int64 c, machine_mode mode)
531 {
532   c = trunc_int_for_mode (c, mode);
533   if (c.is_constant ())
534     return GEN_INT (c.coeffs[0]);
535   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
536   return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
537 }
538 
539 /* CONST_DOUBLEs might be created from pairs of integers, or from
540    REAL_VALUE_TYPEs.  Also, their length is known only at run time,
541    so we cannot use gen_rtx_raw_CONST_DOUBLE.  */
542 
543 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
544    hash table.  If so, return its counterpart; otherwise add it
545    to the hash table and return it.  */
546 static rtx
lookup_const_double(rtx real)547 lookup_const_double (rtx real)
548 {
549   rtx *slot = const_double_htab->find_slot (real, INSERT);
550   if (*slot == 0)
551     *slot = real;
552 
553   return *slot;
554 }
555 
556 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
557    VALUE in mode MODE.  */
558 rtx
const_double_from_real_value(REAL_VALUE_TYPE value,machine_mode mode)559 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
560 {
561   rtx real = rtx_alloc (CONST_DOUBLE);
562   PUT_MODE (real, mode);
563 
564   real->u.rv = value;
565 
566   return lookup_const_double (real);
567 }
568 
569 /* Determine whether FIXED, a CONST_FIXED, already exists in the
570    hash table.  If so, return its counterpart; otherwise add it
571    to the hash table and return it.  */
572 
573 static rtx
lookup_const_fixed(rtx fixed)574 lookup_const_fixed (rtx fixed)
575 {
576   rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
577   if (*slot == 0)
578     *slot = fixed;
579 
580   return *slot;
581 }
582 
583 /* Return a CONST_FIXED rtx for a fixed-point value specified by
584    VALUE in mode MODE.  */
585 
586 rtx
const_fixed_from_fixed_value(FIXED_VALUE_TYPE value,machine_mode mode)587 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
588 {
589   rtx fixed = rtx_alloc (CONST_FIXED);
590   PUT_MODE (fixed, mode);
591 
592   fixed->u.fv = value;
593 
594   return lookup_const_fixed (fixed);
595 }
596 
597 #if TARGET_SUPPORTS_WIDE_INT == 0
598 /* Constructs double_int from rtx CST.  */
599 
600 double_int
rtx_to_double_int(const_rtx cst)601 rtx_to_double_int (const_rtx cst)
602 {
603   double_int r;
604 
605   if (CONST_INT_P (cst))
606       r = double_int::from_shwi (INTVAL (cst));
607   else if (CONST_DOUBLE_AS_INT_P (cst))
608     {
609       r.low = CONST_DOUBLE_LOW (cst);
610       r.high = CONST_DOUBLE_HIGH (cst);
611     }
612   else
613     gcc_unreachable ();
614 
615   return r;
616 }
617 #endif
618 
619 #if TARGET_SUPPORTS_WIDE_INT
620 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
621    If so, return its counterpart; otherwise add it to the hash table and
622    return it.  */
623 
624 static rtx
lookup_const_wide_int(rtx wint)625 lookup_const_wide_int (rtx wint)
626 {
627   rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
628   if (*slot == 0)
629     *slot = wint;
630 
631   return *slot;
632 }
633 #endif
634 
635 /* Return an rtx constant for V, given that the constant has mode MODE.
636    The returned rtx will be a CONST_INT if V fits, otherwise it will be
637    a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
638    (if TARGET_SUPPORTS_WIDE_INT).  */
639 
640 static rtx
immed_wide_int_const_1(const wide_int_ref & v,machine_mode mode)641 immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
642 {
643   unsigned int len = v.get_len ();
644   /* Not scalar_int_mode because we also allow pointer bound modes.  */
645   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
646 
647   /* Allow truncation but not extension since we do not know if the
648      number is signed or unsigned.  */
649   gcc_assert (prec <= v.get_precision ());
650 
651   if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
652     return gen_int_mode (v.elt (0), mode);
653 
654 #if TARGET_SUPPORTS_WIDE_INT
655   {
656     unsigned int i;
657     rtx value;
658     unsigned int blocks_needed
659       = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
660 
661     if (len > blocks_needed)
662       len = blocks_needed;
663 
664     value = const_wide_int_alloc (len);
665 
666     /* It is so tempting to just put the mode in here.  Must control
667        myself ... */
668     PUT_MODE (value, VOIDmode);
669     CWI_PUT_NUM_ELEM (value, len);
670 
671     for (i = 0; i < len; i++)
672       CONST_WIDE_INT_ELT (value, i) = v.elt (i);
673 
674     return lookup_const_wide_int (value);
675   }
676 #else
677   return immed_double_const (v.elt (0), v.elt (1), mode);
678 #endif
679 }
680 
681 #if TARGET_SUPPORTS_WIDE_INT == 0
682 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
683    of ints: I0 is the low-order word and I1 is the high-order word.
684    For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
685    implied upper bits are copies of the high bit of i1.  The value
686    itself is neither signed nor unsigned.  Do not use this routine for
687    non-integer modes; convert to REAL_VALUE_TYPE and use
688    const_double_from_real_value.  */
689 
690 rtx
immed_double_const(HOST_WIDE_INT i0,HOST_WIDE_INT i1,machine_mode mode)691 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
692 {
693   rtx value;
694   unsigned int i;
695 
696   /* There are the following cases (note that there are no modes with
697      HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
698 
699      1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
700 	gen_int_mode.
701      2) If the value of the integer fits into HOST_WIDE_INT anyway
702         (i.e., i1 consists only from copies of the sign bit, and sign
703 	of i0 and i1 are the same), then we return a CONST_INT for i0.
704      3) Otherwise, we create a CONST_DOUBLE for i0 and i1.  */
705   scalar_mode smode;
706   if (is_a <scalar_mode> (mode, &smode)
707       && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
708     return gen_int_mode (i0, mode);
709 
710   /* If this integer fits in one word, return a CONST_INT.  */
711   if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
712     return GEN_INT (i0);
713 
714   /* We use VOIDmode for integers.  */
715   value = rtx_alloc (CONST_DOUBLE);
716   PUT_MODE (value, VOIDmode);
717 
718   CONST_DOUBLE_LOW (value) = i0;
719   CONST_DOUBLE_HIGH (value) = i1;
720 
721   for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
722     XWINT (value, i) = 0;
723 
724   return lookup_const_double (value);
725 }
726 #endif
727 
728 /* Return an rtx representation of C in mode MODE.  */
729 
730 rtx
immed_wide_int_const(const poly_wide_int_ref & c,machine_mode mode)731 immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
732 {
733   if (c.is_constant ())
734     return immed_wide_int_const_1 (c.coeffs[0], mode);
735 
736   /* Not scalar_int_mode because we also allow pointer bound modes.  */
737   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
738 
739   /* Allow truncation but not extension since we do not know if the
740      number is signed or unsigned.  */
741   gcc_assert (prec <= c.coeffs[0].get_precision ());
742   poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
743 
744   /* See whether we already have an rtx for this constant.  */
745   inchash::hash h;
746   h.add_int (mode);
747   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
748     h.add_wide_int (newc.coeffs[i]);
749   const_poly_int_hasher::compare_type typed_value (mode, newc);
750   rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
751 							h.end (), INSERT);
752   rtx x = *slot;
753   if (x)
754     return x;
755 
756   /* Create a new rtx.  There's a choice to be made here between installing
757      the actual mode of the rtx or leaving it as VOIDmode (for consistency
758      with CONST_INT).  In practice the handling of the codes is different
759      enough that we get no benefit from using VOIDmode, and various places
760      assume that VOIDmode implies CONST_INT.  Using the real mode seems like
761      the right long-term direction anyway.  */
762   typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
763   size_t extra_size = twi::extra_size (prec);
764   x = rtx_alloc_v (CONST_POLY_INT,
765 		   sizeof (struct const_poly_int_def) + extra_size);
766   PUT_MODE (x, mode);
767   CONST_POLY_INT_COEFFS (x).set_precision (prec);
768   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
769     CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
770 
771   *slot = x;
772   return x;
773 }
774 
775 rtx
gen_rtx_REG(machine_mode mode,unsigned int regno)776 gen_rtx_REG (machine_mode mode, unsigned int regno)
777 {
778   /* In case the MD file explicitly references the frame pointer, have
779      all such references point to the same frame pointer.  This is
780      used during frame pointer elimination to distinguish the explicit
781      references to these registers from pseudos that happened to be
782      assigned to them.
783 
784      If we have eliminated the frame pointer or arg pointer, we will
785      be using it as a normal register, for example as a spill
786      register.  In such cases, we might be accessing it in a mode that
787      is not Pmode and therefore cannot use the pre-allocated rtx.
788 
789      Also don't do this when we are making new REGs in reload, since
790      we don't want to get confused with the real pointers.  */
791 
792   if (mode == Pmode && !reload_in_progress && !lra_in_progress)
793     {
794       if (regno == FRAME_POINTER_REGNUM
795 	  && (!reload_completed || frame_pointer_needed))
796 	return frame_pointer_rtx;
797 
798       if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
799 	  && regno == HARD_FRAME_POINTER_REGNUM
800 	  && (!reload_completed || frame_pointer_needed))
801 	return hard_frame_pointer_rtx;
802 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
803       if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
804 	  && regno == ARG_POINTER_REGNUM)
805 	return arg_pointer_rtx;
806 #endif
807 #ifdef RETURN_ADDRESS_POINTER_REGNUM
808       if (regno == RETURN_ADDRESS_POINTER_REGNUM)
809 	return return_address_pointer_rtx;
810 #endif
811       if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
812 	  && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
813 	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
814 	return pic_offset_table_rtx;
815       if (regno == STACK_POINTER_REGNUM)
816 	return stack_pointer_rtx;
817     }
818 
819 #if 0
820   /* If the per-function register table has been set up, try to re-use
821      an existing entry in that table to avoid useless generation of RTL.
822 
823      This code is disabled for now until we can fix the various backends
824      which depend on having non-shared hard registers in some cases.   Long
825      term we want to re-enable this code as it can significantly cut down
826      on the amount of useless RTL that gets generated.
827 
828      We'll also need to fix some code that runs after reload that wants to
829      set ORIGINAL_REGNO.  */
830 
831   if (cfun
832       && cfun->emit
833       && regno_reg_rtx
834       && regno < FIRST_PSEUDO_REGISTER
835       && reg_raw_mode[regno] == mode)
836     return regno_reg_rtx[regno];
837 #endif
838 
839   return gen_raw_REG (mode, regno);
840 }
841 
842 rtx
gen_rtx_MEM(machine_mode mode,rtx addr)843 gen_rtx_MEM (machine_mode mode, rtx addr)
844 {
845   rtx rt = gen_rtx_raw_MEM (mode, addr);
846 
847   /* This field is not cleared by the mere allocation of the rtx, so
848      we clear it here.  */
849   MEM_ATTRS (rt) = 0;
850 
851   return rt;
852 }
853 
854 /* Generate a memory referring to non-trapping constant memory.  */
855 
856 rtx
gen_const_mem(machine_mode mode,rtx addr)857 gen_const_mem (machine_mode mode, rtx addr)
858 {
859   rtx mem = gen_rtx_MEM (mode, addr);
860   MEM_READONLY_P (mem) = 1;
861   MEM_NOTRAP_P (mem) = 1;
862   return mem;
863 }
864 
865 /* Generate a MEM referring to fixed portions of the frame, e.g., register
866    save areas.  */
867 
868 rtx
gen_frame_mem(machine_mode mode,rtx addr)869 gen_frame_mem (machine_mode mode, rtx addr)
870 {
871   rtx mem = gen_rtx_MEM (mode, addr);
872   MEM_NOTRAP_P (mem) = 1;
873   set_mem_alias_set (mem, get_frame_alias_set ());
874   return mem;
875 }
876 
877 /* Generate a MEM referring to a temporary use of the stack, not part
878     of the fixed stack frame.  For example, something which is pushed
879     by a target splitter.  */
880 rtx
gen_tmp_stack_mem(machine_mode mode,rtx addr)881 gen_tmp_stack_mem (machine_mode mode, rtx addr)
882 {
883   rtx mem = gen_rtx_MEM (mode, addr);
884   MEM_NOTRAP_P (mem) = 1;
885   if (!cfun->calls_alloca)
886     set_mem_alias_set (mem, get_frame_alias_set ());
887   return mem;
888 }
889 
890 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET).  Return true if
891    this construct would be valid, and false otherwise.  */
892 
893 bool
validate_subreg(machine_mode omode,machine_mode imode,const_rtx reg,poly_uint64 offset)894 validate_subreg (machine_mode omode, machine_mode imode,
895 		 const_rtx reg, poly_uint64 offset)
896 {
897   poly_uint64 isize = GET_MODE_SIZE (imode);
898   poly_uint64 osize = GET_MODE_SIZE (omode);
899 
900   /* The sizes must be ordered, so that we know whether the subreg
901      is partial, paradoxical or complete.  */
902   if (!ordered_p (isize, osize))
903     return false;
904 
905   /* All subregs must be aligned.  */
906   if (!multiple_p (offset, osize))
907     return false;
908 
909   /* The subreg offset cannot be outside the inner object.  */
910   if (maybe_ge (offset, isize))
911     return false;
912 
913   poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
914 
915   /* ??? This should not be here.  Temporarily continue to allow word_mode
916      subregs of anything.  The most common offender is (subreg:SI (reg:DF)).
917      Generally, backends are doing something sketchy but it'll take time to
918      fix them all.  */
919   if (omode == word_mode)
920     ;
921   /* ??? Similarly, e.g. with (subreg:DF (reg:TI)).  Though store_bit_field
922      is the culprit here, and not the backends.  */
923   else if (known_ge (osize, regsize) && known_ge (isize, osize))
924     ;
925   /* Allow component subregs of complex and vector.  Though given the below
926      extraction rules, it's not always clear what that means.  */
927   else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
928 	   && GET_MODE_INNER (imode) == omode)
929     ;
930   /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
931      i.e. (subreg:V4SF (reg:SF) 0).  This surely isn't the cleanest way to
932      represent this.  It's questionable if this ought to be represented at
933      all -- why can't this all be hidden in post-reload splitters that make
934      arbitrarily mode changes to the registers themselves.  */
935   else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
936     ;
937   /* Subregs involving floating point modes are not allowed to
938      change size.  Therefore (subreg:DI (reg:DF) 0) is fine, but
939      (subreg:SI (reg:DF) 0) isn't.  */
940   else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
941     {
942       if (! (known_eq (isize, osize)
943 	     /* LRA can use subreg to store a floating point value in
944 		an integer mode.  Although the floating point and the
945 		integer modes need the same number of hard registers,
946 		the size of floating point mode can be less than the
947 		integer mode.  LRA also uses subregs for a register
948 		should be used in different mode in on insn.  */
949 	     || lra_in_progress))
950 	return false;
951     }
952 
953   /* Paradoxical subregs must have offset zero.  */
954   if (maybe_gt (osize, isize))
955     return known_eq (offset, 0U);
956 
957   /* This is a normal subreg.  Verify that the offset is representable.  */
958 
959   /* For hard registers, we already have most of these rules collected in
960      subreg_offset_representable_p.  */
961   if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
962     {
963       unsigned int regno = REGNO (reg);
964 
965       if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
966 	  && GET_MODE_INNER (imode) == omode)
967 	;
968       else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
969 	return false;
970 
971       return subreg_offset_representable_p (regno, imode, offset, omode);
972     }
973 
974   /* The outer size must be ordered wrt the register size, otherwise
975      we wouldn't know at compile time how many registers the outer
976      mode occupies.  */
977   if (!ordered_p (osize, regsize))
978     return false;
979 
980   /* For pseudo registers, we want most of the same checks.  Namely:
981 
982      Assume that the pseudo register will be allocated to hard registers
983      that can hold REGSIZE bytes each.  If OSIZE is not a multiple of REGSIZE,
984      the remainder must correspond to the lowpart of the containing hard
985      register.  If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
986      otherwise it is at the lowest offset.
987 
988      Given that we've already checked the mode and offset alignment,
989      we only have to check subblock subregs here.  */
990   if (maybe_lt (osize, regsize)
991       && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
992     {
993       /* It is invalid for the target to pick a register size for a mode
994 	 that isn't ordered wrt to the size of that mode.  */
995       poly_uint64 block_size = ordered_min (isize, regsize);
996       unsigned int start_reg;
997       poly_uint64 offset_within_reg;
998       if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
999 	  || (BYTES_BIG_ENDIAN
1000 	      ? maybe_ne (offset_within_reg, block_size - osize)
1001 	      : maybe_ne (offset_within_reg, 0U)))
1002 	return false;
1003     }
1004   return true;
1005 }
1006 
1007 rtx
gen_rtx_SUBREG(machine_mode mode,rtx reg,poly_uint64 offset)1008 gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
1009 {
1010   gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
1011   return gen_rtx_raw_SUBREG (mode, reg, offset);
1012 }
1013 
1014 /* Generate a SUBREG representing the least-significant part of REG if MODE
1015    is smaller than mode of REG, otherwise paradoxical SUBREG.  */
1016 
1017 rtx
gen_lowpart_SUBREG(machine_mode mode,rtx reg)1018 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
1019 {
1020   machine_mode inmode;
1021 
1022   inmode = GET_MODE (reg);
1023   if (inmode == VOIDmode)
1024     inmode = mode;
1025   return gen_rtx_SUBREG (mode, reg,
1026 			 subreg_lowpart_offset (mode, inmode));
1027 }
1028 
1029 rtx
gen_rtx_VAR_LOCATION(machine_mode mode,tree decl,rtx loc,enum var_init_status status)1030 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
1031 		      enum var_init_status status)
1032 {
1033   rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
1034   PAT_VAR_LOCATION_STATUS (x) = status;
1035   return x;
1036 }
1037 
1038 
1039 /* Create an rtvec and stores within it the RTXen passed in the arguments.  */
1040 
1041 rtvec
gen_rtvec(int n,...)1042 gen_rtvec (int n, ...)
1043 {
1044   int i;
1045   rtvec rt_val;
1046   va_list p;
1047 
1048   va_start (p, n);
1049 
1050   /* Don't allocate an empty rtvec...  */
1051   if (n == 0)
1052     {
1053       va_end (p);
1054       return NULL_RTVEC;
1055     }
1056 
1057   rt_val = rtvec_alloc (n);
1058 
1059   for (i = 0; i < n; i++)
1060     rt_val->elem[i] = va_arg (p, rtx);
1061 
1062   va_end (p);
1063   return rt_val;
1064 }
1065 
1066 rtvec
gen_rtvec_v(int n,rtx * argp)1067 gen_rtvec_v (int n, rtx *argp)
1068 {
1069   int i;
1070   rtvec rt_val;
1071 
1072   /* Don't allocate an empty rtvec...  */
1073   if (n == 0)
1074     return NULL_RTVEC;
1075 
1076   rt_val = rtvec_alloc (n);
1077 
1078   for (i = 0; i < n; i++)
1079     rt_val->elem[i] = *argp++;
1080 
1081   return rt_val;
1082 }
1083 
1084 rtvec
gen_rtvec_v(int n,rtx_insn ** argp)1085 gen_rtvec_v (int n, rtx_insn **argp)
1086 {
1087   int i;
1088   rtvec rt_val;
1089 
1090   /* Don't allocate an empty rtvec...  */
1091   if (n == 0)
1092     return NULL_RTVEC;
1093 
1094   rt_val = rtvec_alloc (n);
1095 
1096   for (i = 0; i < n; i++)
1097     rt_val->elem[i] = *argp++;
1098 
1099   return rt_val;
1100 }
1101 
1102 
1103 /* Return the number of bytes between the start of an OUTER_MODE
1104    in-memory value and the start of an INNER_MODE in-memory value,
1105    given that the former is a lowpart of the latter.  It may be a
1106    paradoxical lowpart, in which case the offset will be negative
1107    on big-endian targets.  */
1108 
1109 poly_int64
byte_lowpart_offset(machine_mode outer_mode,machine_mode inner_mode)1110 byte_lowpart_offset (machine_mode outer_mode,
1111 		     machine_mode inner_mode)
1112 {
1113   if (paradoxical_subreg_p (outer_mode, inner_mode))
1114     return -subreg_lowpart_offset (inner_mode, outer_mode);
1115   else
1116     return subreg_lowpart_offset (outer_mode, inner_mode);
1117 }
1118 
1119 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1120    from address X.  For paradoxical big-endian subregs this is a
1121    negative value, otherwise it's the same as OFFSET.  */
1122 
1123 poly_int64
subreg_memory_offset(machine_mode outer_mode,machine_mode inner_mode,poly_uint64 offset)1124 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1125 		      poly_uint64 offset)
1126 {
1127   if (paradoxical_subreg_p (outer_mode, inner_mode))
1128     {
1129       gcc_assert (known_eq (offset, 0U));
1130       return -subreg_lowpart_offset (inner_mode, outer_mode);
1131     }
1132   return offset;
1133 }
1134 
1135 /* As above, but return the offset that existing subreg X would have
1136    if SUBREG_REG (X) were stored in memory.  The only significant thing
1137    about the current SUBREG_REG is its mode.  */
1138 
1139 poly_int64
subreg_memory_offset(const_rtx x)1140 subreg_memory_offset (const_rtx x)
1141 {
1142   return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1143 			       SUBREG_BYTE (x));
1144 }
1145 
1146 /* Generate a REG rtx for a new pseudo register of mode MODE.
1147    This pseudo is assigned the next sequential register number.  */
1148 
1149 rtx
gen_reg_rtx(machine_mode mode)1150 gen_reg_rtx (machine_mode mode)
1151 {
1152   rtx val;
1153   unsigned int align = GET_MODE_ALIGNMENT (mode);
1154 
1155   gcc_assert (can_create_pseudo_p ());
1156 
1157   /* If a virtual register with bigger mode alignment is generated,
1158      increase stack alignment estimation because it might be spilled
1159      to stack later.  */
1160   if (SUPPORTS_STACK_ALIGNMENT
1161       && crtl->stack_alignment_estimated < align
1162       && !crtl->stack_realign_processed)
1163     {
1164       unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1165       if (crtl->stack_alignment_estimated < min_align)
1166 	crtl->stack_alignment_estimated = min_align;
1167     }
1168 
1169   if (generating_concat_p
1170       && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1171 	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1172     {
1173       /* For complex modes, don't make a single pseudo.
1174 	 Instead, make a CONCAT of two pseudos.
1175 	 This allows noncontiguous allocation of the real and imaginary parts,
1176 	 which makes much better code.  Besides, allocating DCmode
1177 	 pseudos overstrains reload on some machines like the 386.  */
1178       rtx realpart, imagpart;
1179       machine_mode partmode = GET_MODE_INNER (mode);
1180 
1181       realpart = gen_reg_rtx (partmode);
1182       imagpart = gen_reg_rtx (partmode);
1183       return gen_rtx_CONCAT (mode, realpart, imagpart);
1184     }
1185 
1186   /* Do not call gen_reg_rtx with uninitialized crtl.  */
1187   gcc_assert (crtl->emit.regno_pointer_align_length);
1188 
1189   crtl->emit.ensure_regno_capacity ();
1190   gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
1191 
1192   val = gen_raw_REG (mode, reg_rtx_no);
1193   regno_reg_rtx[reg_rtx_no++] = val;
1194   return val;
1195 }
1196 
1197 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1198    enough to have elements in the range 0 <= idx <= reg_rtx_no.  */
1199 
1200 void
ensure_regno_capacity()1201 emit_status::ensure_regno_capacity ()
1202 {
1203   int old_size = regno_pointer_align_length;
1204 
1205   if (reg_rtx_no < old_size)
1206     return;
1207 
1208   int new_size = old_size * 2;
1209   while (reg_rtx_no >= new_size)
1210     new_size *= 2;
1211 
1212   char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1213   memset (tmp + old_size, 0, new_size - old_size);
1214   regno_pointer_align = (unsigned char *) tmp;
1215 
1216   rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1217   memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1218   regno_reg_rtx = new1;
1219 
1220   crtl->emit.regno_pointer_align_length = new_size;
1221 }
1222 
1223 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise.  */
1224 
1225 bool
reg_is_parm_p(rtx reg)1226 reg_is_parm_p (rtx reg)
1227 {
1228   tree decl;
1229 
1230   gcc_assert (REG_P (reg));
1231   decl = REG_EXPR (reg);
1232   return (decl && TREE_CODE (decl) == PARM_DECL);
1233 }
1234 
1235 /* Update NEW with the same attributes as REG, but with OFFSET added
1236    to the REG_OFFSET.  */
1237 
1238 static void
update_reg_offset(rtx new_rtx,rtx reg,poly_int64 offset)1239 update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
1240 {
1241   REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1242 				       REG_OFFSET (reg) + offset);
1243 }
1244 
1245 /* Generate a register with same attributes as REG, but with OFFSET
1246    added to the REG_OFFSET.  */
1247 
1248 rtx
gen_rtx_REG_offset(rtx reg,machine_mode mode,unsigned int regno,poly_int64 offset)1249 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1250 		    poly_int64 offset)
1251 {
1252   rtx new_rtx = gen_rtx_REG (mode, regno);
1253 
1254   update_reg_offset (new_rtx, reg, offset);
1255   return new_rtx;
1256 }
1257 
1258 /* Generate a new pseudo-register with the same attributes as REG, but
1259    with OFFSET added to the REG_OFFSET.  */
1260 
1261 rtx
gen_reg_rtx_offset(rtx reg,machine_mode mode,int offset)1262 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1263 {
1264   rtx new_rtx = gen_reg_rtx (mode);
1265 
1266   update_reg_offset (new_rtx, reg, offset);
1267   return new_rtx;
1268 }
1269 
1270 /* Adjust REG in-place so that it has mode MODE.  It is assumed that the
1271    new register is a (possibly paradoxical) lowpart of the old one.  */
1272 
1273 void
adjust_reg_mode(rtx reg,machine_mode mode)1274 adjust_reg_mode (rtx reg, machine_mode mode)
1275 {
1276   update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1277   PUT_MODE (reg, mode);
1278 }
1279 
1280 /* Copy REG's attributes from X, if X has any attributes.  If REG and X
1281    have different modes, REG is a (possibly paradoxical) lowpart of X.  */
1282 
1283 void
set_reg_attrs_from_value(rtx reg,rtx x)1284 set_reg_attrs_from_value (rtx reg, rtx x)
1285 {
1286   poly_int64 offset;
1287   bool can_be_reg_pointer = true;
1288 
1289   /* Don't call mark_reg_pointer for incompatible pointer sign
1290      extension.  */
1291   while (GET_CODE (x) == SIGN_EXTEND
1292 	 || GET_CODE (x) == ZERO_EXTEND
1293 	 || GET_CODE (x) == TRUNCATE
1294 	 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1295     {
1296 #if defined(POINTERS_EXTEND_UNSIGNED)
1297       if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1298 	   || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1299 	   || (paradoxical_subreg_p (x)
1300 	       && ! (SUBREG_PROMOTED_VAR_P (x)
1301 		     && SUBREG_CHECK_PROMOTED_SIGN (x,
1302 						    POINTERS_EXTEND_UNSIGNED))))
1303 	  && !targetm.have_ptr_extend ())
1304 	can_be_reg_pointer = false;
1305 #endif
1306       x = XEXP (x, 0);
1307     }
1308 
1309   /* Hard registers can be reused for multiple purposes within the same
1310      function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1311      on them is wrong.  */
1312   if (HARD_REGISTER_P (reg))
1313     return;
1314 
1315   offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1316   if (MEM_P (x))
1317     {
1318       if (MEM_OFFSET_KNOWN_P (x))
1319 	REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1320 					 MEM_OFFSET (x) + offset);
1321       if (can_be_reg_pointer && MEM_POINTER (x))
1322 	mark_reg_pointer (reg, 0);
1323     }
1324   else if (REG_P (x))
1325     {
1326       if (REG_ATTRS (x))
1327 	update_reg_offset (reg, x, offset);
1328       if (can_be_reg_pointer && REG_POINTER (x))
1329 	mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1330     }
1331 }
1332 
1333 /* Generate a REG rtx for a new pseudo register, copying the mode
1334    and attributes from X.  */
1335 
1336 rtx
gen_reg_rtx_and_attrs(rtx x)1337 gen_reg_rtx_and_attrs (rtx x)
1338 {
1339   rtx reg = gen_reg_rtx (GET_MODE (x));
1340   set_reg_attrs_from_value (reg, x);
1341   return reg;
1342 }
1343 
1344 /* Set the register attributes for registers contained in PARM_RTX.
1345    Use needed values from memory attributes of MEM.  */
1346 
1347 void
set_reg_attrs_for_parm(rtx parm_rtx,rtx mem)1348 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1349 {
1350   if (REG_P (parm_rtx))
1351     set_reg_attrs_from_value (parm_rtx, mem);
1352   else if (GET_CODE (parm_rtx) == PARALLEL)
1353     {
1354       /* Check for a NULL entry in the first slot, used to indicate that the
1355 	 parameter goes both on the stack and in registers.  */
1356       int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1357       for (; i < XVECLEN (parm_rtx, 0); i++)
1358 	{
1359 	  rtx x = XVECEXP (parm_rtx, 0, i);
1360 	  if (REG_P (XEXP (x, 0)))
1361 	    REG_ATTRS (XEXP (x, 0))
1362 	      = get_reg_attrs (MEM_EXPR (mem),
1363 			       INTVAL (XEXP (x, 1)));
1364 	}
1365     }
1366 }
1367 
1368 /* Set the REG_ATTRS for registers in value X, given that X represents
1369    decl T.  */
1370 
1371 void
set_reg_attrs_for_decl_rtl(tree t,rtx x)1372 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1373 {
1374   if (!t)
1375     return;
1376   tree tdecl = t;
1377   if (GET_CODE (x) == SUBREG)
1378     {
1379       gcc_assert (subreg_lowpart_p (x));
1380       x = SUBREG_REG (x);
1381     }
1382   if (REG_P (x))
1383     REG_ATTRS (x)
1384       = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1385 					       DECL_P (tdecl)
1386 					       ? DECL_MODE (tdecl)
1387 					       : TYPE_MODE (TREE_TYPE (tdecl))));
1388   if (GET_CODE (x) == CONCAT)
1389     {
1390       if (REG_P (XEXP (x, 0)))
1391         REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1392       if (REG_P (XEXP (x, 1)))
1393 	REG_ATTRS (XEXP (x, 1))
1394 	  = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1395     }
1396   if (GET_CODE (x) == PARALLEL)
1397     {
1398       int i, start;
1399 
1400       /* Check for a NULL entry, used to indicate that the parameter goes
1401 	 both on the stack and in registers.  */
1402       if (XEXP (XVECEXP (x, 0, 0), 0))
1403 	start = 0;
1404       else
1405 	start = 1;
1406 
1407       for (i = start; i < XVECLEN (x, 0); i++)
1408 	{
1409 	  rtx y = XVECEXP (x, 0, i);
1410 	  if (REG_P (XEXP (y, 0)))
1411 	    REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1412 	}
1413     }
1414 }
1415 
1416 /* Assign the RTX X to declaration T.  */
1417 
1418 void
set_decl_rtl(tree t,rtx x)1419 set_decl_rtl (tree t, rtx x)
1420 {
1421   DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1422   if (x)
1423     set_reg_attrs_for_decl_rtl (t, x);
1424 }
1425 
1426 /* Assign the RTX X to parameter declaration T.  BY_REFERENCE_P is true
1427    if the ABI requires the parameter to be passed by reference.  */
1428 
1429 void
set_decl_incoming_rtl(tree t,rtx x,bool by_reference_p)1430 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1431 {
1432   DECL_INCOMING_RTL (t) = x;
1433   if (x && !by_reference_p)
1434     set_reg_attrs_for_decl_rtl (t, x);
1435 }
1436 
1437 /* Identify REG (which may be a CONCAT) as a user register.  */
1438 
1439 void
mark_user_reg(rtx reg)1440 mark_user_reg (rtx reg)
1441 {
1442   if (GET_CODE (reg) == CONCAT)
1443     {
1444       REG_USERVAR_P (XEXP (reg, 0)) = 1;
1445       REG_USERVAR_P (XEXP (reg, 1)) = 1;
1446     }
1447   else
1448     {
1449       gcc_assert (REG_P (reg));
1450       REG_USERVAR_P (reg) = 1;
1451     }
1452 }
1453 
1454 /* Identify REG as a probable pointer register and show its alignment
1455    as ALIGN, if nonzero.  */
1456 
1457 void
mark_reg_pointer(rtx reg,int align)1458 mark_reg_pointer (rtx reg, int align)
1459 {
1460   if (! REG_POINTER (reg))
1461     {
1462       REG_POINTER (reg) = 1;
1463 
1464       if (align)
1465 	REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1466     }
1467   else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1468     /* We can no-longer be sure just how aligned this pointer is.  */
1469     REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1470 }
1471 
1472 /* Return 1 plus largest pseudo reg number used in the current function.  */
1473 
1474 int
max_reg_num(void)1475 max_reg_num (void)
1476 {
1477   return reg_rtx_no;
1478 }
1479 
1480 /* Return 1 + the largest label number used so far in the current function.  */
1481 
1482 int
max_label_num(void)1483 max_label_num (void)
1484 {
1485   return label_num;
1486 }
1487 
1488 /* Return first label number used in this function (if any were used).  */
1489 
1490 int
get_first_label_num(void)1491 get_first_label_num (void)
1492 {
1493   return first_label_num;
1494 }
1495 
1496 /* If the rtx for label was created during the expansion of a nested
1497    function, then first_label_num won't include this label number.
1498    Fix this now so that array indices work later.  */
1499 
1500 void
maybe_set_first_label_num(rtx_code_label * x)1501 maybe_set_first_label_num (rtx_code_label *x)
1502 {
1503   if (CODE_LABEL_NUMBER (x) < first_label_num)
1504     first_label_num = CODE_LABEL_NUMBER (x);
1505 }
1506 
1507 /* For use by the RTL function loader, when mingling with normal
1508    functions.
1509    Ensure that label_num is greater than the label num of X, to avoid
1510    duplicate labels in the generated assembler.  */
1511 
1512 void
maybe_set_max_label_num(rtx_code_label * x)1513 maybe_set_max_label_num (rtx_code_label *x)
1514 {
1515   if (CODE_LABEL_NUMBER (x) >= label_num)
1516     label_num = CODE_LABEL_NUMBER (x) + 1;
1517 }
1518 
1519 
1520 /* Return a value representing some low-order bits of X, where the number
1521    of low-order bits is given by MODE.  Note that no conversion is done
1522    between floating-point and fixed-point values, rather, the bit
1523    representation is returned.
1524 
1525    This function handles the cases in common between gen_lowpart, below,
1526    and two variants in cse.c and combine.c.  These are the cases that can
1527    be safely handled at all points in the compilation.
1528 
1529    If this is not a case we can handle, return 0.  */
1530 
1531 rtx
gen_lowpart_common(machine_mode mode,rtx x)1532 gen_lowpart_common (machine_mode mode, rtx x)
1533 {
1534   poly_uint64 msize = GET_MODE_SIZE (mode);
1535   machine_mode innermode;
1536 
1537   /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1538      so we have to make one up.  Yuk.  */
1539   innermode = GET_MODE (x);
1540   if (CONST_INT_P (x)
1541       && known_le (msize * BITS_PER_UNIT,
1542 		   (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
1543     innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1544   else if (innermode == VOIDmode)
1545     innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
1546 
1547   gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1548 
1549   if (innermode == mode)
1550     return x;
1551 
1552   /* The size of the outer and inner modes must be ordered.  */
1553   poly_uint64 xsize = GET_MODE_SIZE (innermode);
1554   if (!ordered_p (msize, xsize))
1555     return 0;
1556 
1557   if (SCALAR_FLOAT_MODE_P (mode))
1558     {
1559       /* Don't allow paradoxical FLOAT_MODE subregs.  */
1560       if (maybe_gt (msize, xsize))
1561 	return 0;
1562     }
1563   else
1564     {
1565       /* MODE must occupy no more of the underlying registers than X.  */
1566       poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
1567       unsigned int mregs, xregs;
1568       if (!can_div_away_from_zero_p (msize, regsize, &mregs)
1569 	  || !can_div_away_from_zero_p (xsize, regsize, &xregs)
1570 	  || mregs > xregs)
1571 	return 0;
1572     }
1573 
1574   scalar_int_mode int_mode, int_innermode, from_mode;
1575   if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1576       && is_a <scalar_int_mode> (mode, &int_mode)
1577       && is_a <scalar_int_mode> (innermode, &int_innermode)
1578       && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
1579     {
1580       /* If we are getting the low-order part of something that has been
1581 	 sign- or zero-extended, we can either just use the object being
1582 	 extended or make a narrower extension.  If we want an even smaller
1583 	 piece than the size of the object being extended, call ourselves
1584 	 recursively.
1585 
1586 	 This case is used mostly by combine and cse.  */
1587 
1588       if (from_mode == int_mode)
1589 	return XEXP (x, 0);
1590       else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1591 	return gen_lowpart_common (int_mode, XEXP (x, 0));
1592       else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1593 	return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
1594     }
1595   else if (GET_CODE (x) == SUBREG || REG_P (x)
1596 	   || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1597 	   || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1598 	   || CONST_POLY_INT_P (x))
1599     return lowpart_subreg (mode, x, innermode);
1600 
1601   /* Otherwise, we can't do this.  */
1602   return 0;
1603 }
1604 
1605 rtx
gen_highpart(machine_mode mode,rtx x)1606 gen_highpart (machine_mode mode, rtx x)
1607 {
1608   poly_uint64 msize = GET_MODE_SIZE (mode);
1609   rtx result;
1610 
1611   /* This case loses if X is a subreg.  To catch bugs early,
1612      complain if an invalid MODE is used even in other cases.  */
1613   gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
1614 	      || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
1615 
1616   result = simplify_gen_subreg (mode, x, GET_MODE (x),
1617 				subreg_highpart_offset (mode, GET_MODE (x)));
1618   gcc_assert (result);
1619 
1620   /* simplify_gen_subreg is not guaranteed to return a valid operand for
1621      the target if we have a MEM.  gen_highpart must return a valid operand,
1622      emitting code if necessary to do so.  */
1623   if (MEM_P (result))
1624     {
1625       result = validize_mem (result);
1626       gcc_assert (result);
1627     }
1628 
1629   return result;
1630 }
1631 
1632 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1633    be VOIDmode constant.  */
1634 rtx
gen_highpart_mode(machine_mode outermode,machine_mode innermode,rtx exp)1635 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1636 {
1637   if (GET_MODE (exp) != VOIDmode)
1638     {
1639       gcc_assert (GET_MODE (exp) == innermode);
1640       return gen_highpart (outermode, exp);
1641     }
1642   return simplify_gen_subreg (outermode, exp, innermode,
1643 			      subreg_highpart_offset (outermode, innermode));
1644 }
1645 
1646 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1647    OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes.  */
1648 
1649 poly_uint64
subreg_size_lowpart_offset(poly_uint64 outer_bytes,poly_uint64 inner_bytes)1650 subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1651 {
1652   gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1653   if (maybe_gt (outer_bytes, inner_bytes))
1654     /* Paradoxical subregs always have a SUBREG_BYTE of 0.  */
1655     return 0;
1656 
1657   if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1658     return inner_bytes - outer_bytes;
1659   else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1660     return 0;
1661   else
1662     return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1663 }
1664 
1665 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1666    OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes.  */
1667 
1668 poly_uint64
subreg_size_highpart_offset(poly_uint64 outer_bytes,poly_uint64 inner_bytes)1669 subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1670 {
1671   gcc_assert (known_ge (inner_bytes, outer_bytes));
1672 
1673   if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1674     return 0;
1675   else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1676     return inner_bytes - outer_bytes;
1677   else
1678     return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1679 					(inner_bytes - outer_bytes)
1680 					* BITS_PER_UNIT);
1681 }
1682 
1683 /* Return 1 iff X, assumed to be a SUBREG,
1684    refers to the least significant part of its containing reg.
1685    If X is not a SUBREG, always return 1 (it is its own low part!).  */
1686 
1687 int
subreg_lowpart_p(const_rtx x)1688 subreg_lowpart_p (const_rtx x)
1689 {
1690   if (GET_CODE (x) != SUBREG)
1691     return 1;
1692   else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1693     return 0;
1694 
1695   return known_eq (subreg_lowpart_offset (GET_MODE (x),
1696 					  GET_MODE (SUBREG_REG (x))),
1697 		   SUBREG_BYTE (x));
1698 }
1699 
1700 /* Return subword OFFSET of operand OP.
1701    The word number, OFFSET, is interpreted as the word number starting
1702    at the low-order address.  OFFSET 0 is the low-order word if not
1703    WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1704 
1705    If we cannot extract the required word, we return zero.  Otherwise,
1706    an rtx corresponding to the requested word will be returned.
1707 
1708    VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
1709    reload has completed, a valid address will always be returned.  After
1710    reload, if a valid address cannot be returned, we return zero.
1711 
1712    If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1713    it is the responsibility of the caller.
1714 
1715    MODE is the mode of OP in case it is a CONST_INT.
1716 
1717    ??? This is still rather broken for some cases.  The problem for the
1718    moment is that all callers of this thing provide no 'goal mode' to
1719    tell us to work with.  This exists because all callers were written
1720    in a word based SUBREG world.
1721    Now use of this function can be deprecated by simplify_subreg in most
1722    cases.
1723  */
1724 
1725 rtx
operand_subword(rtx op,poly_uint64 offset,int validate_address,machine_mode mode)1726 operand_subword (rtx op, poly_uint64 offset, int validate_address,
1727 		 machine_mode mode)
1728 {
1729   if (mode == VOIDmode)
1730     mode = GET_MODE (op);
1731 
1732   gcc_assert (mode != VOIDmode);
1733 
1734   /* If OP is narrower than a word, fail.  */
1735   if (mode != BLKmode
1736       && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
1737     return 0;
1738 
1739   /* If we want a word outside OP, return zero.  */
1740   if (mode != BLKmode
1741       && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
1742     return const0_rtx;
1743 
1744   /* Form a new MEM at the requested address.  */
1745   if (MEM_P (op))
1746     {
1747       rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1748 
1749       if (! validate_address)
1750 	return new_rtx;
1751 
1752       else if (reload_completed)
1753 	{
1754 	  if (! strict_memory_address_addr_space_p (word_mode,
1755 						    XEXP (new_rtx, 0),
1756 						    MEM_ADDR_SPACE (op)))
1757 	    return 0;
1758 	}
1759       else
1760 	return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1761     }
1762 
1763   /* Rest can be handled by simplify_subreg.  */
1764   return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1765 }
1766 
1767 /* Similar to `operand_subword', but never return 0.  If we can't
1768    extract the required subword, put OP into a register and try again.
1769    The second attempt must succeed.  We always validate the address in
1770    this case.
1771 
1772    MODE is the mode of OP, in case it is CONST_INT.  */
1773 
1774 rtx
operand_subword_force(rtx op,poly_uint64 offset,machine_mode mode)1775 operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
1776 {
1777   rtx result = operand_subword (op, offset, 1, mode);
1778 
1779   if (result)
1780     return result;
1781 
1782   if (mode != BLKmode && mode != VOIDmode)
1783     {
1784       /* If this is a register which can not be accessed by words, copy it
1785 	 to a pseudo register.  */
1786       if (REG_P (op))
1787 	op = copy_to_reg (op);
1788       else
1789 	op = force_reg (mode, op);
1790     }
1791 
1792   result = operand_subword (op, offset, 1, mode);
1793   gcc_assert (result);
1794 
1795   return result;
1796 }
1797 
mem_attrs()1798 mem_attrs::mem_attrs ()
1799   : expr (NULL_TREE),
1800     offset (0),
1801     size (0),
1802     alias (0),
1803     align (0),
1804     addrspace (ADDR_SPACE_GENERIC),
1805     offset_known_p (false),
1806     size_known_p (false)
1807 {}
1808 
1809 /* Returns 1 if both MEM_EXPR can be considered equal
1810    and 0 otherwise.  */
1811 
1812 int
mem_expr_equal_p(const_tree expr1,const_tree expr2)1813 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1814 {
1815   if (expr1 == expr2)
1816     return 1;
1817 
1818   if (! expr1 || ! expr2)
1819     return 0;
1820 
1821   if (TREE_CODE (expr1) != TREE_CODE (expr2))
1822     return 0;
1823 
1824   return operand_equal_p (expr1, expr2, 0);
1825 }
1826 
1827 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1828    bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1829    -1 if not known.  */
1830 
1831 int
get_mem_align_offset(rtx mem,unsigned int align)1832 get_mem_align_offset (rtx mem, unsigned int align)
1833 {
1834   tree expr;
1835   poly_uint64 offset;
1836 
1837   /* This function can't use
1838      if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1839 	 || (MAX (MEM_ALIGN (mem),
1840 	          MAX (align, get_object_alignment (MEM_EXPR (mem))))
1841 	     < align))
1842        return -1;
1843      else
1844        return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1845      for two reasons:
1846      - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1847        for <variable>.  get_inner_reference doesn't handle it and
1848        even if it did, the alignment in that case needs to be determined
1849        from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1850      - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1851        isn't sufficiently aligned, the object it is in might be.  */
1852   gcc_assert (MEM_P (mem));
1853   expr = MEM_EXPR (mem);
1854   if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1855     return -1;
1856 
1857   offset = MEM_OFFSET (mem);
1858   if (DECL_P (expr))
1859     {
1860       if (DECL_ALIGN (expr) < align)
1861 	return -1;
1862     }
1863   else if (INDIRECT_REF_P (expr))
1864     {
1865       if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1866 	return -1;
1867     }
1868   else if (TREE_CODE (expr) == COMPONENT_REF)
1869     {
1870       while (1)
1871 	{
1872 	  tree inner = TREE_OPERAND (expr, 0);
1873 	  tree field = TREE_OPERAND (expr, 1);
1874 	  tree byte_offset = component_ref_field_offset (expr);
1875 	  tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1876 
1877 	  poly_uint64 suboffset;
1878 	  if (!byte_offset
1879 	      || !poly_int_tree_p (byte_offset, &suboffset)
1880 	      || !tree_fits_uhwi_p (bit_offset))
1881 	    return -1;
1882 
1883 	  offset += suboffset;
1884 	  offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1885 
1886 	  if (inner == NULL_TREE)
1887 	    {
1888 	      if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1889 		  < (unsigned int) align)
1890 		return -1;
1891 	      break;
1892 	    }
1893 	  else if (DECL_P (inner))
1894 	    {
1895 	      if (DECL_ALIGN (inner) < align)
1896 		return -1;
1897 	      break;
1898 	    }
1899 	  else if (TREE_CODE (inner) != COMPONENT_REF)
1900 	    return -1;
1901 	  expr = inner;
1902 	}
1903     }
1904   else
1905     return -1;
1906 
1907   HOST_WIDE_INT misalign;
1908   if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
1909     return -1;
1910   return misalign;
1911 }
1912 
1913 /* Given REF (a MEM) and T, either the type of X or the expression
1914    corresponding to REF, set the memory attributes.  OBJECTP is nonzero
1915    if we are making a new object of this type.  BITPOS is nonzero if
1916    there is an offset outstanding on T that will be applied later.  */
1917 
1918 void
set_mem_attributes_minus_bitpos(rtx ref,tree t,int objectp,poly_int64 bitpos)1919 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1920 				 poly_int64 bitpos)
1921 {
1922   poly_int64 apply_bitpos = 0;
1923   tree type;
1924   struct mem_attrs attrs, *defattrs, *refattrs;
1925   addr_space_t as;
1926 
1927   /* It can happen that type_for_mode was given a mode for which there
1928      is no language-level type.  In which case it returns NULL, which
1929      we can see here.  */
1930   if (t == NULL_TREE)
1931     return;
1932 
1933   type = TYPE_P (t) ? t : TREE_TYPE (t);
1934   if (type == error_mark_node)
1935     return;
1936 
1937   /* If we have already set DECL_RTL = ref, get_alias_set will get the
1938      wrong answer, as it assumes that DECL_RTL already has the right alias
1939      info.  Callers should not set DECL_RTL until after the call to
1940      set_mem_attributes.  */
1941   gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1942 
1943   /* Get the alias set from the expression or type (perhaps using a
1944      front-end routine) and use it.  */
1945   attrs.alias = get_alias_set (t);
1946 
1947   MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1948   MEM_POINTER (ref) = POINTER_TYPE_P (type);
1949 
1950   /* Default values from pre-existing memory attributes if present.  */
1951   refattrs = MEM_ATTRS (ref);
1952   if (refattrs)
1953     {
1954       /* ??? Can this ever happen?  Calling this routine on a MEM that
1955 	 already carries memory attributes should probably be invalid.  */
1956       attrs.expr = refattrs->expr;
1957       attrs.offset_known_p = refattrs->offset_known_p;
1958       attrs.offset = refattrs->offset;
1959       attrs.size_known_p = refattrs->size_known_p;
1960       attrs.size = refattrs->size;
1961       attrs.align = refattrs->align;
1962     }
1963 
1964   /* Otherwise, default values from the mode of the MEM reference.  */
1965   else
1966     {
1967       defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1968       gcc_assert (!defattrs->expr);
1969       gcc_assert (!defattrs->offset_known_p);
1970 
1971       /* Respect mode size.  */
1972       attrs.size_known_p = defattrs->size_known_p;
1973       attrs.size = defattrs->size;
1974       /* ??? Is this really necessary?  We probably should always get
1975 	 the size from the type below.  */
1976 
1977       /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1978          if T is an object, always compute the object alignment below.  */
1979       if (TYPE_P (t))
1980 	attrs.align = defattrs->align;
1981       else
1982 	attrs.align = BITS_PER_UNIT;
1983       /* ??? If T is a type, respecting mode alignment may *also* be wrong
1984 	 e.g. if the type carries an alignment attribute.  Should we be
1985 	 able to simply always use TYPE_ALIGN?  */
1986     }
1987 
1988   /* We can set the alignment from the type if we are making an object or if
1989      this is an INDIRECT_REF.  */
1990   if (objectp || TREE_CODE (t) == INDIRECT_REF)
1991     attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1992 
1993   /* If the size is known, we can set that.  */
1994   tree new_size = TYPE_SIZE_UNIT (type);
1995 
1996   /* The address-space is that of the type.  */
1997   as = TYPE_ADDR_SPACE (type);
1998 
1999   /* If T is not a type, we may be able to deduce some more information about
2000      the expression.  */
2001   if (! TYPE_P (t))
2002     {
2003       tree base;
2004 
2005       if (TREE_THIS_VOLATILE (t))
2006 	MEM_VOLATILE_P (ref) = 1;
2007 
2008       /* Now remove any conversions: they don't change what the underlying
2009 	 object is.  Likewise for SAVE_EXPR.  */
2010       while (CONVERT_EXPR_P (t)
2011 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
2012 	     || TREE_CODE (t) == SAVE_EXPR)
2013 	t = TREE_OPERAND (t, 0);
2014 
2015       /* Note whether this expression can trap.  */
2016       MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
2017 
2018       base = get_base_address (t);
2019       if (base)
2020 	{
2021 	  if (DECL_P (base)
2022 	      && TREE_READONLY (base)
2023 	      && (TREE_STATIC (base) || DECL_EXTERNAL (base))
2024 	      && !TREE_THIS_VOLATILE (base))
2025 	    MEM_READONLY_P (ref) = 1;
2026 
2027 	  /* Mark static const strings readonly as well.  */
2028 	  if (TREE_CODE (base) == STRING_CST
2029 	      && TREE_READONLY (base)
2030 	      && TREE_STATIC (base))
2031 	    MEM_READONLY_P (ref) = 1;
2032 
2033 	  /* Address-space information is on the base object.  */
2034 	  if (TREE_CODE (base) == MEM_REF
2035 	      || TREE_CODE (base) == TARGET_MEM_REF)
2036 	    as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
2037 								      0))));
2038 	  else
2039 	    as = TYPE_ADDR_SPACE (TREE_TYPE (base));
2040 	}
2041 
2042       /* If this expression uses it's parent's alias set, mark it such
2043 	 that we won't change it.  */
2044       if (component_uses_parent_alias_set_from (t) != NULL_TREE)
2045 	MEM_KEEP_ALIAS_SET_P (ref) = 1;
2046 
2047       /* If this is a decl, set the attributes of the MEM from it.  */
2048       if (DECL_P (t))
2049 	{
2050 	  attrs.expr = t;
2051 	  attrs.offset_known_p = true;
2052 	  attrs.offset = 0;
2053 	  apply_bitpos = bitpos;
2054 	  new_size = DECL_SIZE_UNIT (t);
2055 	}
2056 
2057       /* ???  If we end up with a constant here do record a MEM_EXPR.  */
2058       else if (CONSTANT_CLASS_P (t))
2059 	;
2060 
2061       /* If this is a field reference, record it.  */
2062       else if (TREE_CODE (t) == COMPONENT_REF)
2063 	{
2064 	  attrs.expr = t;
2065 	  attrs.offset_known_p = true;
2066 	  attrs.offset = 0;
2067 	  apply_bitpos = bitpos;
2068 	  if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2069 	    new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
2070 	}
2071 
2072       /* If this is an array reference, look for an outer field reference.  */
2073       else if (TREE_CODE (t) == ARRAY_REF)
2074 	{
2075 	  tree off_tree = size_zero_node;
2076 	  /* We can't modify t, because we use it at the end of the
2077 	     function.  */
2078 	  tree t2 = t;
2079 
2080 	  do
2081 	    {
2082 	      tree index = TREE_OPERAND (t2, 1);
2083 	      tree low_bound = array_ref_low_bound (t2);
2084 	      tree unit_size = array_ref_element_size (t2);
2085 
2086 	      /* We assume all arrays have sizes that are a multiple of a byte.
2087 		 First subtract the lower bound, if any, in the type of the
2088 		 index, then convert to sizetype and multiply by the size of
2089 		 the array element.  */
2090 	      if (! integer_zerop (low_bound))
2091 		index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
2092 				     index, low_bound);
2093 
2094 	      off_tree = size_binop (PLUS_EXPR,
2095 				     size_binop (MULT_EXPR,
2096 						 fold_convert (sizetype,
2097 							       index),
2098 						 unit_size),
2099 				     off_tree);
2100 	      t2 = TREE_OPERAND (t2, 0);
2101 	    }
2102 	  while (TREE_CODE (t2) == ARRAY_REF);
2103 
2104 	  if (DECL_P (t2)
2105 	      || (TREE_CODE (t2) == COMPONENT_REF
2106 		  /* For trailing arrays t2 doesn't have a size that
2107 		     covers all valid accesses.  */
2108 		  && ! array_at_struct_end_p (t)))
2109 	    {
2110 	      attrs.expr = t2;
2111 	      attrs.offset_known_p = false;
2112 	      if (poly_int_tree_p (off_tree, &attrs.offset))
2113 		{
2114 		  attrs.offset_known_p = true;
2115 		  apply_bitpos = bitpos;
2116 		}
2117 	    }
2118 	  /* Else do not record a MEM_EXPR.  */
2119 	}
2120 
2121       /* If this is an indirect reference, record it.  */
2122       else if (TREE_CODE (t) == MEM_REF
2123 	       || TREE_CODE (t) == TARGET_MEM_REF)
2124 	{
2125 	  attrs.expr = t;
2126 	  attrs.offset_known_p = true;
2127 	  attrs.offset = 0;
2128 	  apply_bitpos = bitpos;
2129 	}
2130 
2131       /* Compute the alignment.  */
2132       unsigned int obj_align;
2133       unsigned HOST_WIDE_INT obj_bitpos;
2134       get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2135       unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
2136       if (diff_align != 0)
2137 	obj_align = MIN (obj_align, diff_align);
2138       attrs.align = MAX (attrs.align, obj_align);
2139     }
2140 
2141   poly_uint64 const_size;
2142   if (poly_int_tree_p (new_size, &const_size))
2143     {
2144       attrs.size_known_p = true;
2145       attrs.size = const_size;
2146     }
2147 
2148   /* If we modified OFFSET based on T, then subtract the outstanding
2149      bit position offset.  Similarly, increase the size of the accessed
2150      object to contain the negative offset.  */
2151   if (maybe_ne (apply_bitpos, 0))
2152     {
2153       gcc_assert (attrs.offset_known_p);
2154       poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2155       attrs.offset -= bytepos;
2156       if (attrs.size_known_p)
2157 	attrs.size += bytepos;
2158     }
2159 
2160   /* Now set the attributes we computed above.  */
2161   attrs.addrspace = as;
2162   set_mem_attrs (ref, &attrs);
2163 }
2164 
2165 void
set_mem_attributes(rtx ref,tree t,int objectp)2166 set_mem_attributes (rtx ref, tree t, int objectp)
2167 {
2168   set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2169 }
2170 
2171 /* Set the alias set of MEM to SET.  */
2172 
2173 void
set_mem_alias_set(rtx mem,alias_set_type set)2174 set_mem_alias_set (rtx mem, alias_set_type set)
2175 {
2176   /* If the new and old alias sets don't conflict, something is wrong.  */
2177   gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2178   mem_attrs attrs (*get_mem_attrs (mem));
2179   attrs.alias = set;
2180   set_mem_attrs (mem, &attrs);
2181 }
2182 
2183 /* Set the address space of MEM to ADDRSPACE (target-defined).  */
2184 
2185 void
set_mem_addr_space(rtx mem,addr_space_t addrspace)2186 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2187 {
2188   mem_attrs attrs (*get_mem_attrs (mem));
2189   attrs.addrspace = addrspace;
2190   set_mem_attrs (mem, &attrs);
2191 }
2192 
2193 /* Set the alignment of MEM to ALIGN bits.  */
2194 
2195 void
set_mem_align(rtx mem,unsigned int align)2196 set_mem_align (rtx mem, unsigned int align)
2197 {
2198   mem_attrs attrs (*get_mem_attrs (mem));
2199   attrs.align = align;
2200   set_mem_attrs (mem, &attrs);
2201 }
2202 
2203 /* Set the expr for MEM to EXPR.  */
2204 
2205 void
set_mem_expr(rtx mem,tree expr)2206 set_mem_expr (rtx mem, tree expr)
2207 {
2208   mem_attrs attrs (*get_mem_attrs (mem));
2209   attrs.expr = expr;
2210   set_mem_attrs (mem, &attrs);
2211 }
2212 
2213 /* Set the offset of MEM to OFFSET.  */
2214 
2215 void
set_mem_offset(rtx mem,poly_int64 offset)2216 set_mem_offset (rtx mem, poly_int64 offset)
2217 {
2218   mem_attrs attrs (*get_mem_attrs (mem));
2219   attrs.offset_known_p = true;
2220   attrs.offset = offset;
2221   set_mem_attrs (mem, &attrs);
2222 }
2223 
2224 /* Clear the offset of MEM.  */
2225 
2226 void
clear_mem_offset(rtx mem)2227 clear_mem_offset (rtx mem)
2228 {
2229   mem_attrs attrs (*get_mem_attrs (mem));
2230   attrs.offset_known_p = false;
2231   set_mem_attrs (mem, &attrs);
2232 }
2233 
2234 /* Set the size of MEM to SIZE.  */
2235 
2236 void
set_mem_size(rtx mem,poly_int64 size)2237 set_mem_size (rtx mem, poly_int64 size)
2238 {
2239   mem_attrs attrs (*get_mem_attrs (mem));
2240   attrs.size_known_p = true;
2241   attrs.size = size;
2242   set_mem_attrs (mem, &attrs);
2243 }
2244 
2245 /* Clear the size of MEM.  */
2246 
2247 void
clear_mem_size(rtx mem)2248 clear_mem_size (rtx mem)
2249 {
2250   mem_attrs attrs (*get_mem_attrs (mem));
2251   attrs.size_known_p = false;
2252   set_mem_attrs (mem, &attrs);
2253 }
2254 
2255 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2256    and its address changed to ADDR.  (VOIDmode means don't change the mode.
2257    NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
2258    returned memory location is required to be valid.  INPLACE is true if any
2259    changes can be made directly to MEMREF or false if MEMREF must be treated
2260    as immutable.
2261 
2262    The memory attributes are not changed.  */
2263 
2264 static rtx
change_address_1(rtx memref,machine_mode mode,rtx addr,int validate,bool inplace)2265 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2266 		  bool inplace)
2267 {
2268   addr_space_t as;
2269   rtx new_rtx;
2270 
2271   gcc_assert (MEM_P (memref));
2272   as = MEM_ADDR_SPACE (memref);
2273   if (mode == VOIDmode)
2274     mode = GET_MODE (memref);
2275   if (addr == 0)
2276     addr = XEXP (memref, 0);
2277   if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2278       && (!validate || memory_address_addr_space_p (mode, addr, as)))
2279     return memref;
2280 
2281   /* Don't validate address for LRA.  LRA can make the address valid
2282      by itself in most efficient way.  */
2283   if (validate && !lra_in_progress)
2284     {
2285       if (reload_in_progress || reload_completed)
2286 	gcc_assert (memory_address_addr_space_p (mode, addr, as));
2287       else
2288 	addr = memory_address_addr_space (mode, addr, as);
2289     }
2290 
2291   if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2292     return memref;
2293 
2294   if (inplace)
2295     {
2296       XEXP (memref, 0) = addr;
2297       return memref;
2298     }
2299 
2300   new_rtx = gen_rtx_MEM (mode, addr);
2301   MEM_COPY_ATTRIBUTES (new_rtx, memref);
2302   return new_rtx;
2303 }
2304 
2305 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2306    way we are changing MEMREF, so we only preserve the alias set.  */
2307 
2308 rtx
change_address(rtx memref,machine_mode mode,rtx addr)2309 change_address (rtx memref, machine_mode mode, rtx addr)
2310 {
2311   rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2312   machine_mode mmode = GET_MODE (new_rtx);
2313   struct mem_attrs *defattrs;
2314 
2315   mem_attrs attrs (*get_mem_attrs (memref));
2316   defattrs = mode_mem_attrs[(int) mmode];
2317   attrs.expr = NULL_TREE;
2318   attrs.offset_known_p = false;
2319   attrs.size_known_p = defattrs->size_known_p;
2320   attrs.size = defattrs->size;
2321   attrs.align = defattrs->align;
2322 
2323   /* If there are no changes, just return the original memory reference.  */
2324   if (new_rtx == memref)
2325     {
2326       if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2327 	return new_rtx;
2328 
2329       new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2330       MEM_COPY_ATTRIBUTES (new_rtx, memref);
2331     }
2332 
2333   set_mem_attrs (new_rtx, &attrs);
2334   return new_rtx;
2335 }
2336 
2337 /* Return a memory reference like MEMREF, but with its mode changed
2338    to MODE and its address offset by OFFSET bytes.  If VALIDATE is
2339    nonzero, the memory address is forced to be valid.
2340    If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2341    and the caller is responsible for adjusting MEMREF base register.
2342    If ADJUST_OBJECT is zero, the underlying object associated with the
2343    memory reference is left unchanged and the caller is responsible for
2344    dealing with it.  Otherwise, if the new memory reference is outside
2345    the underlying object, even partially, then the object is dropped.
2346    SIZE, if nonzero, is the size of an access in cases where MODE
2347    has no inherent size.  */
2348 
2349 rtx
adjust_address_1(rtx memref,machine_mode mode,poly_int64 offset,int validate,int adjust_address,int adjust_object,poly_int64 size)2350 adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
2351 		  int validate, int adjust_address, int adjust_object,
2352 		  poly_int64 size)
2353 {
2354   rtx addr = XEXP (memref, 0);
2355   rtx new_rtx;
2356   scalar_int_mode address_mode;
2357   struct mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
2358   unsigned HOST_WIDE_INT max_align;
2359 #ifdef POINTERS_EXTEND_UNSIGNED
2360   scalar_int_mode pointer_mode
2361     = targetm.addr_space.pointer_mode (attrs.addrspace);
2362 #endif
2363 
2364   /* VOIDmode means no mode change for change_address_1.  */
2365   if (mode == VOIDmode)
2366     mode = GET_MODE (memref);
2367 
2368   /* Take the size of non-BLKmode accesses from the mode.  */
2369   defattrs = mode_mem_attrs[(int) mode];
2370   if (defattrs->size_known_p)
2371     size = defattrs->size;
2372 
2373   /* If there are no changes, just return the original memory reference.  */
2374   if (mode == GET_MODE (memref)
2375       && known_eq (offset, 0)
2376       && (known_eq (size, 0)
2377 	  || (attrs.size_known_p && known_eq (attrs.size, size)))
2378       && (!validate || memory_address_addr_space_p (mode, addr,
2379 						    attrs.addrspace)))
2380     return memref;
2381 
2382   /* ??? Prefer to create garbage instead of creating shared rtl.
2383      This may happen even if offset is nonzero -- consider
2384      (plus (plus reg reg) const_int) -- so do this always.  */
2385   addr = copy_rtx (addr);
2386 
2387   /* Convert a possibly large offset to a signed value within the
2388      range of the target address space.  */
2389   address_mode = get_address_mode (memref);
2390   offset = trunc_int_for_mode (offset, address_mode);
2391 
2392   if (adjust_address)
2393     {
2394       /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2395 	 object, we can merge it into the LO_SUM.  */
2396       if (GET_MODE (memref) != BLKmode
2397 	  && GET_CODE (addr) == LO_SUM
2398 	  && known_in_range_p (offset,
2399 			       0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
2400 				   / BITS_PER_UNIT)))
2401 	addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2402 			       plus_constant (address_mode,
2403 					      XEXP (addr, 1), offset));
2404 #ifdef POINTERS_EXTEND_UNSIGNED
2405       /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2406 	 in that mode, we merge it into the ZERO_EXTEND.  We take advantage of
2407 	 the fact that pointers are not allowed to overflow.  */
2408       else if (POINTERS_EXTEND_UNSIGNED > 0
2409 	       && GET_CODE (addr) == ZERO_EXTEND
2410 	       && GET_MODE (XEXP (addr, 0)) == pointer_mode
2411 	       && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
2412 	addr = gen_rtx_ZERO_EXTEND (address_mode,
2413 				    plus_constant (pointer_mode,
2414 						   XEXP (addr, 0), offset));
2415 #endif
2416       else
2417 	addr = plus_constant (address_mode, addr, offset);
2418     }
2419 
2420   new_rtx = change_address_1 (memref, mode, addr, validate, false);
2421 
2422   /* If the address is a REG, change_address_1 rightfully returns memref,
2423      but this would destroy memref's MEM_ATTRS.  */
2424   if (new_rtx == memref && maybe_ne (offset, 0))
2425     new_rtx = copy_rtx (new_rtx);
2426 
2427   /* Conservatively drop the object if we don't know where we start from.  */
2428   if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2429     {
2430       attrs.expr = NULL_TREE;
2431       attrs.alias = 0;
2432     }
2433 
2434   /* Compute the new values of the memory attributes due to this adjustment.
2435      We add the offsets and update the alignment.  */
2436   if (attrs.offset_known_p)
2437     {
2438       attrs.offset += offset;
2439 
2440       /* Drop the object if the new left end is not within its bounds.  */
2441       if (adjust_object && maybe_lt (attrs.offset, 0))
2442 	{
2443 	  attrs.expr = NULL_TREE;
2444 	  attrs.alias = 0;
2445 	}
2446     }
2447 
2448   /* Compute the new alignment by taking the MIN of the alignment and the
2449      lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2450      if zero.  */
2451   if (maybe_ne (offset, 0))
2452     {
2453       max_align = known_alignment (offset) * BITS_PER_UNIT;
2454       attrs.align = MIN (attrs.align, max_align);
2455     }
2456 
2457   if (maybe_ne (size, 0))
2458     {
2459       /* Drop the object if the new right end is not within its bounds.  */
2460       if (adjust_object && maybe_gt (offset + size, attrs.size))
2461 	{
2462 	  attrs.expr = NULL_TREE;
2463 	  attrs.alias = 0;
2464 	}
2465       attrs.size_known_p = true;
2466       attrs.size = size;
2467     }
2468   else if (attrs.size_known_p)
2469     {
2470       gcc_assert (!adjust_object);
2471       attrs.size -= offset;
2472       /* ??? The store_by_pieces machinery generates negative sizes,
2473 	 so don't assert for that here.  */
2474     }
2475 
2476   set_mem_attrs (new_rtx, &attrs);
2477 
2478   return new_rtx;
2479 }
2480 
2481 /* Return a memory reference like MEMREF, but with its mode changed
2482    to MODE and its address changed to ADDR, which is assumed to be
2483    MEMREF offset by OFFSET bytes.  If VALIDATE is
2484    nonzero, the memory address is forced to be valid.  */
2485 
2486 rtx
adjust_automodify_address_1(rtx memref,machine_mode mode,rtx addr,poly_int64 offset,int validate)2487 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2488 			     poly_int64 offset, int validate)
2489 {
2490   memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2491   return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2492 }
2493 
2494 /* Return a memory reference like MEMREF, but whose address is changed by
2495    adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
2496    known to be in OFFSET (possibly 1).  */
2497 
2498 rtx
offset_address(rtx memref,rtx offset,unsigned HOST_WIDE_INT pow2)2499 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2500 {
2501   rtx new_rtx, addr = XEXP (memref, 0);
2502   machine_mode address_mode;
2503   struct mem_attrs *defattrs;
2504 
2505   mem_attrs attrs (*get_mem_attrs (memref));
2506   address_mode = get_address_mode (memref);
2507   new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2508 
2509   /* At this point we don't know _why_ the address is invalid.  It
2510      could have secondary memory references, multiplies or anything.
2511 
2512      However, if we did go and rearrange things, we can wind up not
2513      being able to recognize the magic around pic_offset_table_rtx.
2514      This stuff is fragile, and is yet another example of why it is
2515      bad to expose PIC machinery too early.  */
2516   if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2517 				     attrs.addrspace)
2518       && GET_CODE (addr) == PLUS
2519       && XEXP (addr, 0) == pic_offset_table_rtx)
2520     {
2521       addr = force_reg (GET_MODE (addr), addr);
2522       new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2523     }
2524 
2525   update_temp_slot_address (XEXP (memref, 0), new_rtx);
2526   new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2527 
2528   /* If there are no changes, just return the original memory reference.  */
2529   if (new_rtx == memref)
2530     return new_rtx;
2531 
2532   /* Update the alignment to reflect the offset.  Reset the offset, which
2533      we don't know.  */
2534   defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2535   attrs.offset_known_p = false;
2536   attrs.size_known_p = defattrs->size_known_p;
2537   attrs.size = defattrs->size;
2538   attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2539   set_mem_attrs (new_rtx, &attrs);
2540   return new_rtx;
2541 }
2542 
2543 /* Return a memory reference like MEMREF, but with its address changed to
2544    ADDR.  The caller is asserting that the actual piece of memory pointed
2545    to is the same, just the form of the address is being changed, such as
2546    by putting something into a register.  INPLACE is true if any changes
2547    can be made directly to MEMREF or false if MEMREF must be treated as
2548    immutable.  */
2549 
2550 rtx
replace_equiv_address(rtx memref,rtx addr,bool inplace)2551 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2552 {
2553   /* change_address_1 copies the memory attribute structure without change
2554      and that's exactly what we want here.  */
2555   update_temp_slot_address (XEXP (memref, 0), addr);
2556   return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2557 }
2558 
2559 /* Likewise, but the reference is not required to be valid.  */
2560 
2561 rtx
replace_equiv_address_nv(rtx memref,rtx addr,bool inplace)2562 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2563 {
2564   return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2565 }
2566 
2567 /* Return a memory reference like MEMREF, but with its mode widened to
2568    MODE and offset by OFFSET.  This would be used by targets that e.g.
2569    cannot issue QImode memory operations and have to use SImode memory
2570    operations plus masking logic.  */
2571 
2572 rtx
widen_memory_access(rtx memref,machine_mode mode,poly_int64 offset)2573 widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
2574 {
2575   rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2576   poly_uint64 size = GET_MODE_SIZE (mode);
2577 
2578   /* If there are no changes, just return the original memory reference.  */
2579   if (new_rtx == memref)
2580     return new_rtx;
2581 
2582   mem_attrs attrs (*get_mem_attrs (new_rtx));
2583 
2584   /* If we don't know what offset we were at within the expression, then
2585      we can't know if we've overstepped the bounds.  */
2586   if (! attrs.offset_known_p)
2587     attrs.expr = NULL_TREE;
2588 
2589   while (attrs.expr)
2590     {
2591       if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2592 	{
2593 	  tree field = TREE_OPERAND (attrs.expr, 1);
2594 	  tree offset = component_ref_field_offset (attrs.expr);
2595 
2596 	  if (! DECL_SIZE_UNIT (field))
2597 	    {
2598 	      attrs.expr = NULL_TREE;
2599 	      break;
2600 	    }
2601 
2602 	  /* Is the field at least as large as the access?  If so, ok,
2603 	     otherwise strip back to the containing structure.  */
2604 	  if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2605 	      && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2606 	      && known_ge (attrs.offset, 0))
2607 	    break;
2608 
2609 	  poly_uint64 suboffset;
2610 	  if (!poly_int_tree_p (offset, &suboffset))
2611 	    {
2612 	      attrs.expr = NULL_TREE;
2613 	      break;
2614 	    }
2615 
2616 	  attrs.expr = TREE_OPERAND (attrs.expr, 0);
2617 	  attrs.offset += suboffset;
2618 	  attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2619 			   / BITS_PER_UNIT);
2620 	}
2621       /* Similarly for the decl.  */
2622       else if (DECL_P (attrs.expr)
2623 	       && DECL_SIZE_UNIT (attrs.expr)
2624 	       && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2625 	       && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2626 			   size)
2627 	       && known_ge (attrs.offset, 0))
2628 	break;
2629       else
2630 	{
2631 	  /* The widened memory access overflows the expression, which means
2632 	     that it could alias another expression.  Zap it.  */
2633 	  attrs.expr = NULL_TREE;
2634 	  break;
2635 	}
2636     }
2637 
2638   if (! attrs.expr)
2639     attrs.offset_known_p = false;
2640 
2641   /* The widened memory may alias other stuff, so zap the alias set.  */
2642   /* ??? Maybe use get_alias_set on any remaining expression.  */
2643   attrs.alias = 0;
2644   attrs.size_known_p = true;
2645   attrs.size = size;
2646   set_mem_attrs (new_rtx, &attrs);
2647   return new_rtx;
2648 }
2649 
2650 /* A fake decl that is used as the MEM_EXPR of spill slots.  */
2651 static GTY(()) tree spill_slot_decl;
2652 
2653 tree
get_spill_slot_decl(bool force_build_p)2654 get_spill_slot_decl (bool force_build_p)
2655 {
2656   tree d = spill_slot_decl;
2657   rtx rd;
2658 
2659   if (d || !force_build_p)
2660     return d;
2661 
2662   d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2663 		  VAR_DECL, get_identifier ("%sfp"), void_type_node);
2664   DECL_ARTIFICIAL (d) = 1;
2665   DECL_IGNORED_P (d) = 1;
2666   TREE_USED (d) = 1;
2667   spill_slot_decl = d;
2668 
2669   rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2670   MEM_NOTRAP_P (rd) = 1;
2671   mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
2672   attrs.alias = new_alias_set ();
2673   attrs.expr = d;
2674   set_mem_attrs (rd, &attrs);
2675   SET_DECL_RTL (d, rd);
2676 
2677   return d;
2678 }
2679 
2680 /* Given MEM, a result from assign_stack_local, fill in the memory
2681    attributes as appropriate for a register allocator spill slot.
2682    These slots are not aliasable by other memory.  We arrange for
2683    them all to use a single MEM_EXPR, so that the aliasing code can
2684    work properly in the case of shared spill slots.  */
2685 
2686 void
set_mem_attrs_for_spill(rtx mem)2687 set_mem_attrs_for_spill (rtx mem)
2688 {
2689   rtx addr;
2690 
2691   mem_attrs attrs (*get_mem_attrs (mem));
2692   attrs.expr = get_spill_slot_decl (true);
2693   attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2694   attrs.addrspace = ADDR_SPACE_GENERIC;
2695 
2696   /* We expect the incoming memory to be of the form:
2697 	(mem:MODE (plus (reg sfp) (const_int offset)))
2698      with perhaps the plus missing for offset = 0.  */
2699   addr = XEXP (mem, 0);
2700   attrs.offset_known_p = true;
2701   strip_offset (addr, &attrs.offset);
2702 
2703   set_mem_attrs (mem, &attrs);
2704   MEM_NOTRAP_P (mem) = 1;
2705 }
2706 
2707 /* Return a newly created CODE_LABEL rtx with a unique label number.  */
2708 
2709 rtx_code_label *
gen_label_rtx(void)2710 gen_label_rtx (void)
2711 {
2712   return as_a <rtx_code_label *> (
2713 	    gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2714 				NULL, label_num++, NULL));
2715 }
2716 
2717 /* For procedure integration.  */
2718 
2719 /* Install new pointers to the first and last insns in the chain.
2720    Also, set cur_insn_uid to one higher than the last in use.
2721    Used for an inline-procedure after copying the insn chain.  */
2722 
2723 void
set_new_first_and_last_insn(rtx_insn * first,rtx_insn * last)2724 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2725 {
2726   rtx_insn *insn;
2727 
2728   set_first_insn (first);
2729   set_last_insn (last);
2730   cur_insn_uid = 0;
2731 
2732   if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2733     {
2734       int debug_count = 0;
2735 
2736       cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2737       cur_debug_insn_uid = 0;
2738 
2739       for (insn = first; insn; insn = NEXT_INSN (insn))
2740 	if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2741 	  cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2742 	else
2743 	  {
2744 	    cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2745 	    if (DEBUG_INSN_P (insn))
2746 	      debug_count++;
2747 	  }
2748 
2749       if (debug_count)
2750 	cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2751       else
2752 	cur_debug_insn_uid++;
2753     }
2754   else
2755     for (insn = first; insn; insn = NEXT_INSN (insn))
2756       cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2757 
2758   cur_insn_uid++;
2759 }
2760 
2761 /* Go through all the RTL insn bodies and copy any invalid shared
2762    structure.  This routine should only be called once.  */
2763 
2764 static void
unshare_all_rtl_1(rtx_insn * insn)2765 unshare_all_rtl_1 (rtx_insn *insn)
2766 {
2767   /* Unshare just about everything else.  */
2768   unshare_all_rtl_in_chain (insn);
2769 
2770   /* Make sure the addresses of stack slots found outside the insn chain
2771      (such as, in DECL_RTL of a variable) are not shared
2772      with the insn chain.
2773 
2774      This special care is necessary when the stack slot MEM does not
2775      actually appear in the insn chain.  If it does appear, its address
2776      is unshared from all else at that point.  */
2777   unsigned int i;
2778   rtx temp;
2779   FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2780     (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2781 }
2782 
2783 /* Go through all the RTL insn bodies and copy any invalid shared
2784    structure, again.  This is a fairly expensive thing to do so it
2785    should be done sparingly.  */
2786 
2787 void
unshare_all_rtl_again(rtx_insn * insn)2788 unshare_all_rtl_again (rtx_insn *insn)
2789 {
2790   rtx_insn *p;
2791   tree decl;
2792 
2793   for (p = insn; p; p = NEXT_INSN (p))
2794     if (INSN_P (p))
2795       {
2796 	reset_used_flags (PATTERN (p));
2797 	reset_used_flags (REG_NOTES (p));
2798 	if (CALL_P (p))
2799 	  reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2800       }
2801 
2802   /* Make sure that virtual stack slots are not shared.  */
2803   set_used_decls (DECL_INITIAL (cfun->decl));
2804 
2805   /* Make sure that virtual parameters are not shared.  */
2806   for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2807     set_used_flags (DECL_RTL (decl));
2808 
2809   rtx temp;
2810   unsigned int i;
2811   FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2812     reset_used_flags (temp);
2813 
2814   unshare_all_rtl_1 (insn);
2815 }
2816 
2817 unsigned int
unshare_all_rtl(void)2818 unshare_all_rtl (void)
2819 {
2820   unshare_all_rtl_1 (get_insns ());
2821 
2822   for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2823     {
2824       if (DECL_RTL_SET_P (decl))
2825 	SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2826       DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2827     }
2828 
2829   return 0;
2830 }
2831 
2832 
2833 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2834    Recursively does the same for subexpressions.  */
2835 
2836 static void
verify_rtx_sharing(rtx orig,rtx insn)2837 verify_rtx_sharing (rtx orig, rtx insn)
2838 {
2839   rtx x = orig;
2840   int i;
2841   enum rtx_code code;
2842   const char *format_ptr;
2843 
2844   if (x == 0)
2845     return;
2846 
2847   code = GET_CODE (x);
2848 
2849   /* These types may be freely shared.  */
2850 
2851   switch (code)
2852     {
2853     case REG:
2854     case DEBUG_EXPR:
2855     case VALUE:
2856     CASE_CONST_ANY:
2857     case SYMBOL_REF:
2858     case LABEL_REF:
2859     case CODE_LABEL:
2860     case PC:
2861     case CC0:
2862     case RETURN:
2863     case SIMPLE_RETURN:
2864     case SCRATCH:
2865       /* SCRATCH must be shared because they represent distinct values.  */
2866       return;
2867     case CLOBBER:
2868       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2869          clobbers or clobbers of hard registers that originated as pseudos.
2870          This is needed to allow safe register renaming.  */
2871       if (REG_P (XEXP (x, 0))
2872 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2873 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2874 	return;
2875       break;
2876 
2877     case CONST:
2878       if (shared_const_p (orig))
2879 	return;
2880       break;
2881 
2882     case MEM:
2883       /* A MEM is allowed to be shared if its address is constant.  */
2884       if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2885 	  || reload_completed || reload_in_progress)
2886 	return;
2887 
2888       break;
2889 
2890     default:
2891       break;
2892     }
2893 
2894   /* This rtx may not be shared.  If it has already been seen,
2895      replace it with a copy of itself.  */
2896   if (flag_checking && RTX_FLAG (x, used))
2897     {
2898       error ("invalid rtl sharing found in the insn");
2899       debug_rtx (insn);
2900       error ("shared rtx");
2901       debug_rtx (x);
2902       internal_error ("internal consistency failure");
2903     }
2904   gcc_assert (!RTX_FLAG (x, used));
2905 
2906   RTX_FLAG (x, used) = 1;
2907 
2908   /* Now scan the subexpressions recursively.  */
2909 
2910   format_ptr = GET_RTX_FORMAT (code);
2911 
2912   for (i = 0; i < GET_RTX_LENGTH (code); i++)
2913     {
2914       switch (*format_ptr++)
2915 	{
2916 	case 'e':
2917 	  verify_rtx_sharing (XEXP (x, i), insn);
2918 	  break;
2919 
2920 	case 'E':
2921 	  if (XVEC (x, i) != NULL)
2922 	    {
2923 	      int j;
2924 	      int len = XVECLEN (x, i);
2925 
2926 	      for (j = 0; j < len; j++)
2927 		{
2928 		  /* We allow sharing of ASM_OPERANDS inside single
2929 		     instruction.  */
2930 		  if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2931 		      && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2932 			  == ASM_OPERANDS))
2933 		    verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2934 		  else
2935 		    verify_rtx_sharing (XVECEXP (x, i, j), insn);
2936 		}
2937 	    }
2938 	  break;
2939 	}
2940     }
2941   return;
2942 }
2943 
2944 /* Reset used-flags for INSN.  */
2945 
2946 static void
reset_insn_used_flags(rtx insn)2947 reset_insn_used_flags (rtx insn)
2948 {
2949   gcc_assert (INSN_P (insn));
2950   reset_used_flags (PATTERN (insn));
2951   reset_used_flags (REG_NOTES (insn));
2952   if (CALL_P (insn))
2953     reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2954 }
2955 
2956 /* Go through all the RTL insn bodies and clear all the USED bits.  */
2957 
2958 static void
reset_all_used_flags(void)2959 reset_all_used_flags (void)
2960 {
2961   rtx_insn *p;
2962 
2963   for (p = get_insns (); p; p = NEXT_INSN (p))
2964     if (INSN_P (p))
2965       {
2966 	rtx pat = PATTERN (p);
2967 	if (GET_CODE (pat) != SEQUENCE)
2968 	  reset_insn_used_flags (p);
2969 	else
2970 	  {
2971 	    gcc_assert (REG_NOTES (p) == NULL);
2972 	    for (int i = 0; i < XVECLEN (pat, 0); i++)
2973 	      {
2974 		rtx insn = XVECEXP (pat, 0, i);
2975 		if (INSN_P (insn))
2976 		  reset_insn_used_flags (insn);
2977 	      }
2978 	  }
2979       }
2980 }
2981 
2982 /* Verify sharing in INSN.  */
2983 
2984 static void
verify_insn_sharing(rtx insn)2985 verify_insn_sharing (rtx insn)
2986 {
2987   gcc_assert (INSN_P (insn));
2988   verify_rtx_sharing (PATTERN (insn), insn);
2989   verify_rtx_sharing (REG_NOTES (insn), insn);
2990   if (CALL_P (insn))
2991     verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
2992 }
2993 
2994 /* Go through all the RTL insn bodies and check that there is no unexpected
2995    sharing in between the subexpressions.  */
2996 
2997 DEBUG_FUNCTION void
verify_rtl_sharing(void)2998 verify_rtl_sharing (void)
2999 {
3000   rtx_insn *p;
3001 
3002   timevar_push (TV_VERIFY_RTL_SHARING);
3003 
3004   reset_all_used_flags ();
3005 
3006   for (p = get_insns (); p; p = NEXT_INSN (p))
3007     if (INSN_P (p))
3008       {
3009 	rtx pat = PATTERN (p);
3010 	if (GET_CODE (pat) != SEQUENCE)
3011 	  verify_insn_sharing (p);
3012 	else
3013 	  for (int i = 0; i < XVECLEN (pat, 0); i++)
3014 	      {
3015 		rtx insn = XVECEXP (pat, 0, i);
3016 		if (INSN_P (insn))
3017 		  verify_insn_sharing (insn);
3018 	      }
3019       }
3020 
3021   reset_all_used_flags ();
3022 
3023   timevar_pop (TV_VERIFY_RTL_SHARING);
3024 }
3025 
3026 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3027    Assumes the mark bits are cleared at entry.  */
3028 
3029 void
unshare_all_rtl_in_chain(rtx_insn * insn)3030 unshare_all_rtl_in_chain (rtx_insn *insn)
3031 {
3032   for (; insn; insn = NEXT_INSN (insn))
3033     if (INSN_P (insn))
3034       {
3035 	PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3036 	REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
3037 	if (CALL_P (insn))
3038 	  CALL_INSN_FUNCTION_USAGE (insn)
3039 	    = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
3040       }
3041 }
3042 
3043 /* Go through all virtual stack slots of a function and mark them as
3044    shared.  We never replace the DECL_RTLs themselves with a copy,
3045    but expressions mentioned into a DECL_RTL cannot be shared with
3046    expressions in the instruction stream.
3047 
3048    Note that reload may convert pseudo registers into memories in-place.
3049    Pseudo registers are always shared, but MEMs never are.  Thus if we
3050    reset the used flags on MEMs in the instruction stream, we must set
3051    them again on MEMs that appear in DECL_RTLs.  */
3052 
3053 static void
set_used_decls(tree blk)3054 set_used_decls (tree blk)
3055 {
3056   tree t;
3057 
3058   /* Mark decls.  */
3059   for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
3060     if (DECL_RTL_SET_P (t))
3061       set_used_flags (DECL_RTL (t));
3062 
3063   /* Now process sub-blocks.  */
3064   for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
3065     set_used_decls (t);
3066 }
3067 
3068 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3069    Recursively does the same for subexpressions.  Uses
3070    copy_rtx_if_shared_1 to reduce stack space.  */
3071 
3072 rtx
copy_rtx_if_shared(rtx orig)3073 copy_rtx_if_shared (rtx orig)
3074 {
3075   copy_rtx_if_shared_1 (&orig);
3076   return orig;
3077 }
3078 
3079 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3080    use.  Recursively does the same for subexpressions.  */
3081 
3082 static void
copy_rtx_if_shared_1(rtx * orig1)3083 copy_rtx_if_shared_1 (rtx *orig1)
3084 {
3085   rtx x;
3086   int i;
3087   enum rtx_code code;
3088   rtx *last_ptr;
3089   const char *format_ptr;
3090   int copied = 0;
3091   int length;
3092 
3093   /* Repeat is used to turn tail-recursion into iteration.  */
3094 repeat:
3095   x = *orig1;
3096 
3097   if (x == 0)
3098     return;
3099 
3100   code = GET_CODE (x);
3101 
3102   /* These types may be freely shared.  */
3103 
3104   switch (code)
3105     {
3106     case REG:
3107     case DEBUG_EXPR:
3108     case VALUE:
3109     CASE_CONST_ANY:
3110     case SYMBOL_REF:
3111     case LABEL_REF:
3112     case CODE_LABEL:
3113     case PC:
3114     case CC0:
3115     case RETURN:
3116     case SIMPLE_RETURN:
3117     case SCRATCH:
3118       /* SCRATCH must be shared because they represent distinct values.  */
3119       return;
3120     case CLOBBER:
3121       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
3122          clobbers or clobbers of hard registers that originated as pseudos.
3123          This is needed to allow safe register renaming.  */
3124       if (REG_P (XEXP (x, 0))
3125 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3126 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3127 	return;
3128       break;
3129 
3130     case CONST:
3131       if (shared_const_p (x))
3132 	return;
3133       break;
3134 
3135     case DEBUG_INSN:
3136     case INSN:
3137     case JUMP_INSN:
3138     case CALL_INSN:
3139     case NOTE:
3140     case BARRIER:
3141       /* The chain of insns is not being copied.  */
3142       return;
3143 
3144     default:
3145       break;
3146     }
3147 
3148   /* This rtx may not be shared.  If it has already been seen,
3149      replace it with a copy of itself.  */
3150 
3151   if (RTX_FLAG (x, used))
3152     {
3153       x = shallow_copy_rtx (x);
3154       copied = 1;
3155     }
3156   RTX_FLAG (x, used) = 1;
3157 
3158   /* Now scan the subexpressions recursively.
3159      We can store any replaced subexpressions directly into X
3160      since we know X is not shared!  Any vectors in X
3161      must be copied if X was copied.  */
3162 
3163   format_ptr = GET_RTX_FORMAT (code);
3164   length = GET_RTX_LENGTH (code);
3165   last_ptr = NULL;
3166 
3167   for (i = 0; i < length; i++)
3168     {
3169       switch (*format_ptr++)
3170 	{
3171 	case 'e':
3172           if (last_ptr)
3173             copy_rtx_if_shared_1 (last_ptr);
3174 	  last_ptr = &XEXP (x, i);
3175 	  break;
3176 
3177 	case 'E':
3178 	  if (XVEC (x, i) != NULL)
3179 	    {
3180 	      int j;
3181 	      int len = XVECLEN (x, i);
3182 
3183               /* Copy the vector iff I copied the rtx and the length
3184 		 is nonzero.  */
3185 	      if (copied && len > 0)
3186 		XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3187 
3188               /* Call recursively on all inside the vector.  */
3189 	      for (j = 0; j < len; j++)
3190                 {
3191 		  if (last_ptr)
3192 		    copy_rtx_if_shared_1 (last_ptr);
3193                   last_ptr = &XVECEXP (x, i, j);
3194                 }
3195 	    }
3196 	  break;
3197 	}
3198     }
3199   *orig1 = x;
3200   if (last_ptr)
3201     {
3202       orig1 = last_ptr;
3203       goto repeat;
3204     }
3205   return;
3206 }
3207 
3208 /* Set the USED bit in X and its non-shareable subparts to FLAG.  */
3209 
3210 static void
mark_used_flags(rtx x,int flag)3211 mark_used_flags (rtx x, int flag)
3212 {
3213   int i, j;
3214   enum rtx_code code;
3215   const char *format_ptr;
3216   int length;
3217 
3218   /* Repeat is used to turn tail-recursion into iteration.  */
3219 repeat:
3220   if (x == 0)
3221     return;
3222 
3223   code = GET_CODE (x);
3224 
3225   /* These types may be freely shared so we needn't do any resetting
3226      for them.  */
3227 
3228   switch (code)
3229     {
3230     case REG:
3231     case DEBUG_EXPR:
3232     case VALUE:
3233     CASE_CONST_ANY:
3234     case SYMBOL_REF:
3235     case CODE_LABEL:
3236     case PC:
3237     case CC0:
3238     case RETURN:
3239     case SIMPLE_RETURN:
3240       return;
3241 
3242     case DEBUG_INSN:
3243     case INSN:
3244     case JUMP_INSN:
3245     case CALL_INSN:
3246     case NOTE:
3247     case LABEL_REF:
3248     case BARRIER:
3249       /* The chain of insns is not being copied.  */
3250       return;
3251 
3252     default:
3253       break;
3254     }
3255 
3256   RTX_FLAG (x, used) = flag;
3257 
3258   format_ptr = GET_RTX_FORMAT (code);
3259   length = GET_RTX_LENGTH (code);
3260 
3261   for (i = 0; i < length; i++)
3262     {
3263       switch (*format_ptr++)
3264 	{
3265 	case 'e':
3266           if (i == length-1)
3267             {
3268               x = XEXP (x, i);
3269 	      goto repeat;
3270             }
3271 	  mark_used_flags (XEXP (x, i), flag);
3272 	  break;
3273 
3274 	case 'E':
3275 	  for (j = 0; j < XVECLEN (x, i); j++)
3276 	    mark_used_flags (XVECEXP (x, i, j), flag);
3277 	  break;
3278 	}
3279     }
3280 }
3281 
3282 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3283    to look for shared sub-parts.  */
3284 
3285 void
reset_used_flags(rtx x)3286 reset_used_flags (rtx x)
3287 {
3288   mark_used_flags (x, 0);
3289 }
3290 
3291 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3292    to look for shared sub-parts.  */
3293 
3294 void
set_used_flags(rtx x)3295 set_used_flags (rtx x)
3296 {
3297   mark_used_flags (x, 1);
3298 }
3299 
3300 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3301    Return X or the rtx for the pseudo reg the value of X was copied into.
3302    OTHER must be valid as a SET_DEST.  */
3303 
3304 rtx
make_safe_from(rtx x,rtx other)3305 make_safe_from (rtx x, rtx other)
3306 {
3307   while (1)
3308     switch (GET_CODE (other))
3309       {
3310       case SUBREG:
3311 	other = SUBREG_REG (other);
3312 	break;
3313       case STRICT_LOW_PART:
3314       case SIGN_EXTEND:
3315       case ZERO_EXTEND:
3316 	other = XEXP (other, 0);
3317 	break;
3318       default:
3319 	goto done;
3320       }
3321  done:
3322   if ((MEM_P (other)
3323        && ! CONSTANT_P (x)
3324        && !REG_P (x)
3325        && GET_CODE (x) != SUBREG)
3326       || (REG_P (other)
3327 	  && (REGNO (other) < FIRST_PSEUDO_REGISTER
3328 	      || reg_mentioned_p (other, x))))
3329     {
3330       rtx temp = gen_reg_rtx (GET_MODE (x));
3331       emit_move_insn (temp, x);
3332       return temp;
3333     }
3334   return x;
3335 }
3336 
3337 /* Emission of insns (adding them to the doubly-linked list).  */
3338 
3339 /* Return the last insn emitted, even if it is in a sequence now pushed.  */
3340 
3341 rtx_insn *
get_last_insn_anywhere(void)3342 get_last_insn_anywhere (void)
3343 {
3344   struct sequence_stack *seq;
3345   for (seq = get_current_sequence (); seq; seq = seq->next)
3346     if (seq->last != 0)
3347       return seq->last;
3348   return 0;
3349 }
3350 
3351 /* Return the first nonnote insn emitted in current sequence or current
3352    function.  This routine looks inside SEQUENCEs.  */
3353 
3354 rtx_insn *
get_first_nonnote_insn(void)3355 get_first_nonnote_insn (void)
3356 {
3357   rtx_insn *insn = get_insns ();
3358 
3359   if (insn)
3360     {
3361       if (NOTE_P (insn))
3362 	for (insn = next_insn (insn);
3363 	     insn && NOTE_P (insn);
3364 	     insn = next_insn (insn))
3365 	  continue;
3366       else
3367 	{
3368 	  if (NONJUMP_INSN_P (insn)
3369 	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
3370 	    insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3371 	}
3372     }
3373 
3374   return insn;
3375 }
3376 
3377 /* Return the last nonnote insn emitted in current sequence or current
3378    function.  This routine looks inside SEQUENCEs.  */
3379 
3380 rtx_insn *
get_last_nonnote_insn(void)3381 get_last_nonnote_insn (void)
3382 {
3383   rtx_insn *insn = get_last_insn ();
3384 
3385   if (insn)
3386     {
3387       if (NOTE_P (insn))
3388 	for (insn = previous_insn (insn);
3389 	     insn && NOTE_P (insn);
3390 	     insn = previous_insn (insn))
3391 	  continue;
3392       else
3393 	{
3394 	  if (NONJUMP_INSN_P (insn))
3395 	    if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3396 	      insn = seq->insn (seq->len () - 1);
3397 	}
3398     }
3399 
3400   return insn;
3401 }
3402 
3403 /* Return the number of actual (non-debug) insns emitted in this
3404    function.  */
3405 
3406 int
get_max_insn_count(void)3407 get_max_insn_count (void)
3408 {
3409   int n = cur_insn_uid;
3410 
3411   /* The table size must be stable across -g, to avoid codegen
3412      differences due to debug insns, and not be affected by
3413      -fmin-insn-uid, to avoid excessive table size and to simplify
3414      debugging of -fcompare-debug failures.  */
3415   if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3416     n -= cur_debug_insn_uid;
3417   else
3418     n -= MIN_NONDEBUG_INSN_UID;
3419 
3420   return n;
3421 }
3422 
3423 
3424 /* Return the next insn.  If it is a SEQUENCE, return the first insn
3425    of the sequence.  */
3426 
3427 rtx_insn *
next_insn(rtx_insn * insn)3428 next_insn (rtx_insn *insn)
3429 {
3430   if (insn)
3431     {
3432       insn = NEXT_INSN (insn);
3433       if (insn && NONJUMP_INSN_P (insn)
3434 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
3435 	insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3436     }
3437 
3438   return insn;
3439 }
3440 
3441 /* Return the previous insn.  If it is a SEQUENCE, return the last insn
3442    of the sequence.  */
3443 
3444 rtx_insn *
previous_insn(rtx_insn * insn)3445 previous_insn (rtx_insn *insn)
3446 {
3447   if (insn)
3448     {
3449       insn = PREV_INSN (insn);
3450       if (insn && NONJUMP_INSN_P (insn))
3451 	if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3452 	  insn = seq->insn (seq->len () - 1);
3453     }
3454 
3455   return insn;
3456 }
3457 
3458 /* Return the next insn after INSN that is not a NOTE.  This routine does not
3459    look inside SEQUENCEs.  */
3460 
3461 rtx_insn *
next_nonnote_insn(rtx_insn * insn)3462 next_nonnote_insn (rtx_insn *insn)
3463 {
3464   while (insn)
3465     {
3466       insn = NEXT_INSN (insn);
3467       if (insn == 0 || !NOTE_P (insn))
3468 	break;
3469     }
3470 
3471   return insn;
3472 }
3473 
3474 /* Return the next insn after INSN that is not a DEBUG_INSN.  This
3475    routine does not look inside SEQUENCEs.  */
3476 
3477 rtx_insn *
next_nondebug_insn(rtx_insn * insn)3478 next_nondebug_insn (rtx_insn *insn)
3479 {
3480   while (insn)
3481     {
3482       insn = NEXT_INSN (insn);
3483       if (insn == 0 || !DEBUG_INSN_P (insn))
3484 	break;
3485     }
3486 
3487   return insn;
3488 }
3489 
3490 /* Return the previous insn before INSN that is not a NOTE.  This routine does
3491    not look inside SEQUENCEs.  */
3492 
3493 rtx_insn *
prev_nonnote_insn(rtx_insn * insn)3494 prev_nonnote_insn (rtx_insn *insn)
3495 {
3496   while (insn)
3497     {
3498       insn = PREV_INSN (insn);
3499       if (insn == 0 || !NOTE_P (insn))
3500 	break;
3501     }
3502 
3503   return insn;
3504 }
3505 
3506 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3507    This routine does not look inside SEQUENCEs.  */
3508 
3509 rtx_insn *
prev_nondebug_insn(rtx_insn * insn)3510 prev_nondebug_insn (rtx_insn *insn)
3511 {
3512   while (insn)
3513     {
3514       insn = PREV_INSN (insn);
3515       if (insn == 0 || !DEBUG_INSN_P (insn))
3516 	break;
3517     }
3518 
3519   return insn;
3520 }
3521 
3522 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3523    This routine does not look inside SEQUENCEs.  */
3524 
3525 rtx_insn *
next_nonnote_nondebug_insn(rtx_insn * insn)3526 next_nonnote_nondebug_insn (rtx_insn *insn)
3527 {
3528   while (insn)
3529     {
3530       insn = NEXT_INSN (insn);
3531       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3532 	break;
3533     }
3534 
3535   return insn;
3536 }
3537 
3538 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3539    but stop the search before we enter another basic block.  This
3540    routine does not look inside SEQUENCEs.  */
3541 
3542 rtx_insn *
next_nonnote_nondebug_insn_bb(rtx_insn * insn)3543 next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3544 {
3545   while (insn)
3546     {
3547       insn = NEXT_INSN (insn);
3548       if (insn == 0)
3549 	break;
3550       if (DEBUG_INSN_P (insn))
3551 	continue;
3552       if (!NOTE_P (insn))
3553 	break;
3554       if (NOTE_INSN_BASIC_BLOCK_P (insn))
3555 	return NULL;
3556     }
3557 
3558   return insn;
3559 }
3560 
3561 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3562    This routine does not look inside SEQUENCEs.  */
3563 
3564 rtx_insn *
prev_nonnote_nondebug_insn(rtx_insn * insn)3565 prev_nonnote_nondebug_insn (rtx_insn *insn)
3566 {
3567   while (insn)
3568     {
3569       insn = PREV_INSN (insn);
3570       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3571 	break;
3572     }
3573 
3574   return insn;
3575 }
3576 
3577 /* Return the previous insn before INSN that is not a NOTE nor
3578    DEBUG_INSN, but stop the search before we enter another basic
3579    block.  This routine does not look inside SEQUENCEs.  */
3580 
3581 rtx_insn *
prev_nonnote_nondebug_insn_bb(rtx_insn * insn)3582 prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3583 {
3584   while (insn)
3585     {
3586       insn = PREV_INSN (insn);
3587       if (insn == 0)
3588 	break;
3589       if (DEBUG_INSN_P (insn))
3590 	continue;
3591       if (!NOTE_P (insn))
3592 	break;
3593       if (NOTE_INSN_BASIC_BLOCK_P (insn))
3594 	return NULL;
3595     }
3596 
3597   return insn;
3598 }
3599 
3600 /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
3601    or 0, if there is none.  This routine does not look inside
3602    SEQUENCEs.  */
3603 
3604 rtx_insn *
next_real_insn(rtx uncast_insn)3605 next_real_insn (rtx uncast_insn)
3606 {
3607   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3608 
3609   while (insn)
3610     {
3611       insn = NEXT_INSN (insn);
3612       if (insn == 0 || INSN_P (insn))
3613 	break;
3614     }
3615 
3616   return insn;
3617 }
3618 
3619 /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
3620    or 0, if there is none.  This routine does not look inside
3621    SEQUENCEs.  */
3622 
3623 rtx_insn *
prev_real_insn(rtx_insn * insn)3624 prev_real_insn (rtx_insn *insn)
3625 {
3626   while (insn)
3627     {
3628       insn = PREV_INSN (insn);
3629       if (insn == 0 || INSN_P (insn))
3630 	break;
3631     }
3632 
3633   return insn;
3634 }
3635 
3636 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3637    or 0, if there is none.  This routine does not look inside
3638    SEQUENCEs.  */
3639 
3640 rtx_insn *
next_real_nondebug_insn(rtx uncast_insn)3641 next_real_nondebug_insn (rtx uncast_insn)
3642 {
3643   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3644 
3645   while (insn)
3646     {
3647       insn = NEXT_INSN (insn);
3648       if (insn == 0 || NONDEBUG_INSN_P (insn))
3649 	break;
3650     }
3651 
3652   return insn;
3653 }
3654 
3655 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3656    or 0, if there is none.  This routine does not look inside
3657    SEQUENCEs.  */
3658 
3659 rtx_insn *
prev_real_nondebug_insn(rtx_insn * insn)3660 prev_real_nondebug_insn (rtx_insn *insn)
3661 {
3662   while (insn)
3663     {
3664       insn = PREV_INSN (insn);
3665       if (insn == 0 || NONDEBUG_INSN_P (insn))
3666 	break;
3667     }
3668 
3669   return insn;
3670 }
3671 
3672 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3673    This routine does not look inside SEQUENCEs.  */
3674 
3675 rtx_call_insn *
last_call_insn(void)3676 last_call_insn (void)
3677 {
3678   rtx_insn *insn;
3679 
3680   for (insn = get_last_insn ();
3681        insn && !CALL_P (insn);
3682        insn = PREV_INSN (insn))
3683     ;
3684 
3685   return safe_as_a <rtx_call_insn *> (insn);
3686 }
3687 
3688 /* Find the next insn after INSN that really does something.  This routine
3689    does not look inside SEQUENCEs.  After reload this also skips over
3690    standalone USE and CLOBBER insn.  */
3691 
3692 int
active_insn_p(const rtx_insn * insn)3693 active_insn_p (const rtx_insn *insn)
3694 {
3695   return (CALL_P (insn) || JUMP_P (insn)
3696 	  || JUMP_TABLE_DATA_P (insn) /* FIXME */
3697 	  || (NONJUMP_INSN_P (insn)
3698 	      && (! reload_completed
3699 		  || (GET_CODE (PATTERN (insn)) != USE
3700 		      && GET_CODE (PATTERN (insn)) != CLOBBER))));
3701 }
3702 
3703 rtx_insn *
next_active_insn(rtx_insn * insn)3704 next_active_insn (rtx_insn *insn)
3705 {
3706   while (insn)
3707     {
3708       insn = NEXT_INSN (insn);
3709       if (insn == 0 || active_insn_p (insn))
3710 	break;
3711     }
3712 
3713   return insn;
3714 }
3715 
3716 /* Find the last insn before INSN that really does something.  This routine
3717    does not look inside SEQUENCEs.  After reload this also skips over
3718    standalone USE and CLOBBER insn.  */
3719 
3720 rtx_insn *
prev_active_insn(rtx_insn * insn)3721 prev_active_insn (rtx_insn *insn)
3722 {
3723   while (insn)
3724     {
3725       insn = PREV_INSN (insn);
3726       if (insn == 0 || active_insn_p (insn))
3727 	break;
3728     }
3729 
3730   return insn;
3731 }
3732 
3733 /* Return the next insn that uses CC0 after INSN, which is assumed to
3734    set it.  This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3735    applied to the result of this function should yield INSN).
3736 
3737    Normally, this is simply the next insn.  However, if a REG_CC_USER note
3738    is present, it contains the insn that uses CC0.
3739 
3740    Return 0 if we can't find the insn.  */
3741 
3742 rtx_insn *
next_cc0_user(rtx_insn * insn)3743 next_cc0_user (rtx_insn *insn)
3744 {
3745   rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3746 
3747   if (note)
3748     return safe_as_a <rtx_insn *> (XEXP (note, 0));
3749 
3750   insn = next_nonnote_insn (insn);
3751   if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3752     insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3753 
3754   if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3755     return insn;
3756 
3757   return 0;
3758 }
3759 
3760 /* Find the insn that set CC0 for INSN.  Unless INSN has a REG_CC_SETTER
3761    note, it is the previous insn.  */
3762 
3763 rtx_insn *
prev_cc0_setter(rtx_insn * insn)3764 prev_cc0_setter (rtx_insn *insn)
3765 {
3766   rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3767 
3768   if (note)
3769     return safe_as_a <rtx_insn *> (XEXP (note, 0));
3770 
3771   insn = prev_nonnote_insn (insn);
3772   gcc_assert (sets_cc0_p (PATTERN (insn)));
3773 
3774   return insn;
3775 }
3776 
3777 /* Find a RTX_AUTOINC class rtx which matches DATA.  */
3778 
3779 static int
find_auto_inc(const_rtx x,const_rtx reg)3780 find_auto_inc (const_rtx x, const_rtx reg)
3781 {
3782   subrtx_iterator::array_type array;
3783   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3784     {
3785       const_rtx x = *iter;
3786       if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3787 	  && rtx_equal_p (reg, XEXP (x, 0)))
3788 	return true;
3789     }
3790   return false;
3791 }
3792 
3793 /* Increment the label uses for all labels present in rtx.  */
3794 
3795 static void
mark_label_nuses(rtx x)3796 mark_label_nuses (rtx x)
3797 {
3798   enum rtx_code code;
3799   int i, j;
3800   const char *fmt;
3801 
3802   code = GET_CODE (x);
3803   if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3804     LABEL_NUSES (label_ref_label (x))++;
3805 
3806   fmt = GET_RTX_FORMAT (code);
3807   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3808     {
3809       if (fmt[i] == 'e')
3810 	mark_label_nuses (XEXP (x, i));
3811       else if (fmt[i] == 'E')
3812 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3813 	  mark_label_nuses (XVECEXP (x, i, j));
3814     }
3815 }
3816 
3817 
3818 /* Try splitting insns that can be split for better scheduling.
3819    PAT is the pattern which might split.
3820    TRIAL is the insn providing PAT.
3821    LAST is nonzero if we should return the last insn of the sequence produced.
3822 
3823    If this routine succeeds in splitting, it returns the first or last
3824    replacement insn depending on the value of LAST.  Otherwise, it
3825    returns TRIAL.  If the insn to be returned can be split, it will be.  */
3826 
3827 rtx_insn *
try_split(rtx pat,rtx_insn * trial,int last)3828 try_split (rtx pat, rtx_insn *trial, int last)
3829 {
3830   rtx_insn *before, *after;
3831   rtx note;
3832   rtx_insn *seq, *tem;
3833   profile_probability probability;
3834   rtx_insn *insn_last, *insn;
3835   int njumps = 0;
3836   rtx_insn *call_insn = NULL;
3837 
3838   /* We're not good at redistributing frame information.  */
3839   if (RTX_FRAME_RELATED_P (trial))
3840     return trial;
3841 
3842   if (any_condjump_p (trial)
3843       && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3844     split_branch_probability
3845       = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3846   else
3847     split_branch_probability = profile_probability::uninitialized ();
3848 
3849   probability = split_branch_probability;
3850 
3851   seq = split_insns (pat, trial);
3852 
3853   split_branch_probability = profile_probability::uninitialized ();
3854 
3855   if (!seq)
3856     return trial;
3857 
3858   /* Avoid infinite loop if any insn of the result matches
3859      the original pattern.  */
3860   insn_last = seq;
3861   while (1)
3862     {
3863       if (INSN_P (insn_last)
3864 	  && rtx_equal_p (PATTERN (insn_last), pat))
3865 	return trial;
3866       if (!NEXT_INSN (insn_last))
3867 	break;
3868       insn_last = NEXT_INSN (insn_last);
3869     }
3870 
3871   /* We will be adding the new sequence to the function.  The splitters
3872      may have introduced invalid RTL sharing, so unshare the sequence now.  */
3873   unshare_all_rtl_in_chain (seq);
3874 
3875   /* Mark labels and copy flags.  */
3876   for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3877     {
3878       if (JUMP_P (insn))
3879 	{
3880 	  if (JUMP_P (trial))
3881 	    CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3882 	  mark_jump_label (PATTERN (insn), insn, 0);
3883 	  njumps++;
3884 	  if (probability.initialized_p ()
3885 	      && any_condjump_p (insn)
3886 	      && !find_reg_note (insn, REG_BR_PROB, 0))
3887 	    {
3888 	      /* We can preserve the REG_BR_PROB notes only if exactly
3889 		 one jump is created, otherwise the machine description
3890 		 is responsible for this step using
3891 		 split_branch_probability variable.  */
3892 	      gcc_assert (njumps == 1);
3893 	      add_reg_br_prob_note (insn, probability);
3894 	    }
3895 	}
3896     }
3897 
3898   /* If we are splitting a CALL_INSN, look for the CALL_INSN
3899      in SEQ and copy any additional information across.  */
3900   if (CALL_P (trial))
3901     {
3902       for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3903 	if (CALL_P (insn))
3904 	  {
3905 	    gcc_assert (call_insn == NULL_RTX);
3906 	    call_insn = insn;
3907 
3908 	    /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3909 	       target may have explicitly specified.  */
3910 	    rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3911 	    while (*p)
3912 	      p = &XEXP (*p, 1);
3913 	    *p = CALL_INSN_FUNCTION_USAGE (trial);
3914 
3915 	    /* If the old call was a sibling call, the new one must
3916 	       be too.  */
3917 	    SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3918 	  }
3919     }
3920 
3921   /* Copy notes, particularly those related to the CFG.  */
3922   for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3923     {
3924       switch (REG_NOTE_KIND (note))
3925 	{
3926 	case REG_EH_REGION:
3927 	  copy_reg_eh_region_note_backward (note, insn_last, NULL);
3928 	  break;
3929 
3930 	case REG_NORETURN:
3931 	case REG_SETJMP:
3932 	case REG_TM:
3933 	case REG_CALL_NOCF_CHECK:
3934 	case REG_CALL_ARG_LOCATION:
3935 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3936 	    {
3937 	      if (CALL_P (insn))
3938 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3939 	    }
3940 	  break;
3941 
3942 	case REG_NON_LOCAL_GOTO:
3943 	case REG_LABEL_TARGET:
3944 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3945 	    {
3946 	      if (JUMP_P (insn))
3947 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3948 	    }
3949 	  break;
3950 
3951 	case REG_INC:
3952 	  if (!AUTO_INC_DEC)
3953 	    break;
3954 
3955 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3956 	    {
3957 	      rtx reg = XEXP (note, 0);
3958 	      if (!FIND_REG_INC_NOTE (insn, reg)
3959 		  && find_auto_inc (PATTERN (insn), reg))
3960 		add_reg_note (insn, REG_INC, reg);
3961 	    }
3962 	  break;
3963 
3964 	case REG_ARGS_SIZE:
3965 	  fixup_args_size_notes (NULL, insn_last, get_args_size (note));
3966 	  break;
3967 
3968 	case REG_CALL_DECL:
3969 	  gcc_assert (call_insn != NULL_RTX);
3970 	  add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3971 	  break;
3972 
3973 	default:
3974 	  break;
3975 	}
3976     }
3977 
3978   /* If there are LABELS inside the split insns increment the
3979      usage count so we don't delete the label.  */
3980   if (INSN_P (trial))
3981     {
3982       insn = insn_last;
3983       while (insn != NULL_RTX)
3984 	{
3985 	  /* JUMP_P insns have already been "marked" above.  */
3986 	  if (NONJUMP_INSN_P (insn))
3987 	    mark_label_nuses (PATTERN (insn));
3988 
3989 	  insn = PREV_INSN (insn);
3990 	}
3991     }
3992 
3993   before = PREV_INSN (trial);
3994   after = NEXT_INSN (trial);
3995 
3996   tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3997 
3998   delete_insn (trial);
3999 
4000   /* Recursively call try_split for each new insn created; by the
4001      time control returns here that insn will be fully split, so
4002      set LAST and continue from the insn after the one returned.
4003      We can't use next_active_insn here since AFTER may be a note.
4004      Ignore deleted insns, which can be occur if not optimizing.  */
4005   for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
4006     if (! tem->deleted () && INSN_P (tem))
4007       tem = try_split (PATTERN (tem), tem, 1);
4008 
4009   /* Return either the first or the last insn, depending on which was
4010      requested.  */
4011   return last
4012     ? (after ? PREV_INSN (after) : get_last_insn ())
4013     : NEXT_INSN (before);
4014 }
4015 
4016 /* Make and return an INSN rtx, initializing all its slots.
4017    Store PATTERN in the pattern slots.  */
4018 
4019 rtx_insn *
make_insn_raw(rtx pattern)4020 make_insn_raw (rtx pattern)
4021 {
4022   rtx_insn *insn;
4023 
4024   insn = as_a <rtx_insn *> (rtx_alloc (INSN));
4025 
4026   INSN_UID (insn) = cur_insn_uid++;
4027   PATTERN (insn) = pattern;
4028   INSN_CODE (insn) = -1;
4029   REG_NOTES (insn) = NULL;
4030   INSN_LOCATION (insn) = curr_insn_location ();
4031   BLOCK_FOR_INSN (insn) = NULL;
4032 
4033 #ifdef ENABLE_RTL_CHECKING
4034   if (insn
4035       && INSN_P (insn)
4036       && (returnjump_p (insn)
4037 	  || (GET_CODE (insn) == SET
4038 	      && SET_DEST (insn) == pc_rtx)))
4039     {
4040       warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
4041       debug_rtx (insn);
4042     }
4043 #endif
4044 
4045   return insn;
4046 }
4047 
4048 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn.  */
4049 
4050 static rtx_insn *
make_debug_insn_raw(rtx pattern)4051 make_debug_insn_raw (rtx pattern)
4052 {
4053   rtx_debug_insn *insn;
4054 
4055   insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
4056   INSN_UID (insn) = cur_debug_insn_uid++;
4057   if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
4058     INSN_UID (insn) = cur_insn_uid++;
4059 
4060   PATTERN (insn) = pattern;
4061   INSN_CODE (insn) = -1;
4062   REG_NOTES (insn) = NULL;
4063   INSN_LOCATION (insn) = curr_insn_location ();
4064   BLOCK_FOR_INSN (insn) = NULL;
4065 
4066   return insn;
4067 }
4068 
4069 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */
4070 
4071 static rtx_insn *
make_jump_insn_raw(rtx pattern)4072 make_jump_insn_raw (rtx pattern)
4073 {
4074   rtx_jump_insn *insn;
4075 
4076   insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
4077   INSN_UID (insn) = cur_insn_uid++;
4078 
4079   PATTERN (insn) = pattern;
4080   INSN_CODE (insn) = -1;
4081   REG_NOTES (insn) = NULL;
4082   JUMP_LABEL (insn) = NULL;
4083   INSN_LOCATION (insn) = curr_insn_location ();
4084   BLOCK_FOR_INSN (insn) = NULL;
4085 
4086   return insn;
4087 }
4088 
4089 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */
4090 
4091 static rtx_insn *
make_call_insn_raw(rtx pattern)4092 make_call_insn_raw (rtx pattern)
4093 {
4094   rtx_call_insn *insn;
4095 
4096   insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
4097   INSN_UID (insn) = cur_insn_uid++;
4098 
4099   PATTERN (insn) = pattern;
4100   INSN_CODE (insn) = -1;
4101   REG_NOTES (insn) = NULL;
4102   CALL_INSN_FUNCTION_USAGE (insn) = NULL;
4103   INSN_LOCATION (insn) = curr_insn_location ();
4104   BLOCK_FOR_INSN (insn) = NULL;
4105 
4106   return insn;
4107 }
4108 
4109 /* Like `make_insn_raw' but make a NOTE instead of an insn.  */
4110 
4111 static rtx_note *
make_note_raw(enum insn_note subtype)4112 make_note_raw (enum insn_note subtype)
4113 {
4114   /* Some notes are never created this way at all.  These notes are
4115      only created by patching out insns.  */
4116   gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
4117 	      && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
4118 
4119   rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
4120   INSN_UID (note) = cur_insn_uid++;
4121   NOTE_KIND (note) = subtype;
4122   BLOCK_FOR_INSN (note) = NULL;
4123   memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4124   return note;
4125 }
4126 
4127 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4128    INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4129    but also BARRIERs and JUMP_TABLE_DATAs.  PREV and NEXT may be NULL.  */
4130 
4131 static inline void
link_insn_into_chain(rtx_insn * insn,rtx_insn * prev,rtx_insn * next)4132 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4133 {
4134   SET_PREV_INSN (insn) = prev;
4135   SET_NEXT_INSN (insn) = next;
4136   if (prev != NULL)
4137     {
4138       SET_NEXT_INSN (prev) = insn;
4139       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4140 	{
4141 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4142 	  SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
4143 	}
4144     }
4145   if (next != NULL)
4146     {
4147       SET_PREV_INSN (next) = insn;
4148       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4149 	{
4150 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4151 	  SET_PREV_INSN (sequence->insn (0)) = insn;
4152 	}
4153     }
4154 
4155   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4156     {
4157       rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4158       SET_PREV_INSN (sequence->insn (0)) = prev;
4159       SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4160     }
4161 }
4162 
4163 /* Add INSN to the end of the doubly-linked list.
4164    INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */
4165 
4166 void
add_insn(rtx_insn * insn)4167 add_insn (rtx_insn *insn)
4168 {
4169   rtx_insn *prev = get_last_insn ();
4170   link_insn_into_chain (insn, prev, NULL);
4171   if (get_insns () == NULL)
4172     set_first_insn (insn);
4173   set_last_insn (insn);
4174 }
4175 
4176 /* Add INSN into the doubly-linked list after insn AFTER.  */
4177 
4178 static void
add_insn_after_nobb(rtx_insn * insn,rtx_insn * after)4179 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4180 {
4181   rtx_insn *next = NEXT_INSN (after);
4182 
4183   gcc_assert (!optimize || !after->deleted ());
4184 
4185   link_insn_into_chain (insn, after, next);
4186 
4187   if (next == NULL)
4188     {
4189       struct sequence_stack *seq;
4190 
4191       for (seq = get_current_sequence (); seq; seq = seq->next)
4192 	if (after == seq->last)
4193 	  {
4194 	    seq->last = insn;
4195 	    break;
4196 	  }
4197     }
4198 }
4199 
4200 /* Add INSN into the doubly-linked list before insn BEFORE.  */
4201 
4202 static void
add_insn_before_nobb(rtx_insn * insn,rtx_insn * before)4203 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4204 {
4205   rtx_insn *prev = PREV_INSN (before);
4206 
4207   gcc_assert (!optimize || !before->deleted ());
4208 
4209   link_insn_into_chain (insn, prev, before);
4210 
4211   if (prev == NULL)
4212     {
4213       struct sequence_stack *seq;
4214 
4215       for (seq = get_current_sequence (); seq; seq = seq->next)
4216 	if (before == seq->first)
4217 	  {
4218 	    seq->first = insn;
4219 	    break;
4220 	  }
4221 
4222       gcc_assert (seq);
4223     }
4224 }
4225 
4226 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4227    If BB is NULL, an attempt is made to infer the bb from before.
4228 
4229    This and the next function should be the only functions called
4230    to insert an insn once delay slots have been filled since only
4231    they know how to update a SEQUENCE. */
4232 
4233 void
add_insn_after(rtx uncast_insn,rtx uncast_after,basic_block bb)4234 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4235 {
4236   rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4237   rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4238   add_insn_after_nobb (insn, after);
4239   if (!BARRIER_P (after)
4240       && !BARRIER_P (insn)
4241       && (bb = BLOCK_FOR_INSN (after)))
4242     {
4243       set_block_for_insn (insn, bb);
4244       if (INSN_P (insn))
4245 	df_insn_rescan (insn);
4246       /* Should not happen as first in the BB is always
4247 	 either NOTE or LABEL.  */
4248       if (BB_END (bb) == after
4249 	  /* Avoid clobbering of structure when creating new BB.  */
4250 	  && !BARRIER_P (insn)
4251 	  && !NOTE_INSN_BASIC_BLOCK_P (insn))
4252 	BB_END (bb) = insn;
4253     }
4254 }
4255 
4256 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4257    If BB is NULL, an attempt is made to infer the bb from before.
4258 
4259    This and the previous function should be the only functions called
4260    to insert an insn once delay slots have been filled since only
4261    they know how to update a SEQUENCE. */
4262 
4263 void
add_insn_before(rtx uncast_insn,rtx uncast_before,basic_block bb)4264 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4265 {
4266   rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4267   rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4268   add_insn_before_nobb (insn, before);
4269 
4270   if (!bb
4271       && !BARRIER_P (before)
4272       && !BARRIER_P (insn))
4273     bb = BLOCK_FOR_INSN (before);
4274 
4275   if (bb)
4276     {
4277       set_block_for_insn (insn, bb);
4278       if (INSN_P (insn))
4279 	df_insn_rescan (insn);
4280       /* Should not happen as first in the BB is always either NOTE or
4281 	 LABEL.  */
4282       gcc_assert (BB_HEAD (bb) != insn
4283 		  /* Avoid clobbering of structure when creating new BB.  */
4284 		  || BARRIER_P (insn)
4285 		  || NOTE_INSN_BASIC_BLOCK_P (insn));
4286     }
4287 }
4288 
4289 /* Replace insn with an deleted instruction note.  */
4290 
4291 void
set_insn_deleted(rtx insn)4292 set_insn_deleted (rtx insn)
4293 {
4294   if (INSN_P (insn))
4295     df_insn_delete (as_a <rtx_insn *> (insn));
4296   PUT_CODE (insn, NOTE);
4297   NOTE_KIND (insn) = NOTE_INSN_DELETED;
4298 }
4299 
4300 
4301 /* Unlink INSN from the insn chain.
4302 
4303    This function knows how to handle sequences.
4304 
4305    This function does not invalidate data flow information associated with
4306    INSN (i.e. does not call df_insn_delete).  That makes this function
4307    usable for only disconnecting an insn from the chain, and re-emit it
4308    elsewhere later.
4309 
4310    To later insert INSN elsewhere in the insn chain via add_insn and
4311    similar functions, PREV_INSN and NEXT_INSN must be nullified by
4312    the caller.  Nullifying them here breaks many insn chain walks.
4313 
4314    To really delete an insn and related DF information, use delete_insn.  */
4315 
4316 void
remove_insn(rtx uncast_insn)4317 remove_insn (rtx uncast_insn)
4318 {
4319   rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4320   rtx_insn *next = NEXT_INSN (insn);
4321   rtx_insn *prev = PREV_INSN (insn);
4322   basic_block bb;
4323 
4324   if (prev)
4325     {
4326       SET_NEXT_INSN (prev) = next;
4327       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4328 	{
4329 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4330 	  SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4331 	}
4332     }
4333   else
4334     {
4335       struct sequence_stack *seq;
4336 
4337       for (seq = get_current_sequence (); seq; seq = seq->next)
4338 	if (insn == seq->first)
4339 	  {
4340 	    seq->first = next;
4341 	    break;
4342 	  }
4343 
4344       gcc_assert (seq);
4345     }
4346 
4347   if (next)
4348     {
4349       SET_PREV_INSN (next) = prev;
4350       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4351 	{
4352 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4353 	  SET_PREV_INSN (sequence->insn (0)) = prev;
4354 	}
4355     }
4356   else
4357     {
4358       struct sequence_stack *seq;
4359 
4360       for (seq = get_current_sequence (); seq; seq = seq->next)
4361 	if (insn == seq->last)
4362 	  {
4363 	    seq->last = prev;
4364 	    break;
4365 	  }
4366 
4367       gcc_assert (seq);
4368     }
4369 
4370   /* Fix up basic block boundaries, if necessary.  */
4371   if (!BARRIER_P (insn)
4372       && (bb = BLOCK_FOR_INSN (insn)))
4373     {
4374       if (BB_HEAD (bb) == insn)
4375 	{
4376 	  /* Never ever delete the basic block note without deleting whole
4377 	     basic block.  */
4378 	  gcc_assert (!NOTE_P (insn));
4379 	  BB_HEAD (bb) = next;
4380 	}
4381       if (BB_END (bb) == insn)
4382 	BB_END (bb) = prev;
4383     }
4384 }
4385 
4386 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.  */
4387 
4388 void
add_function_usage_to(rtx call_insn,rtx call_fusage)4389 add_function_usage_to (rtx call_insn, rtx call_fusage)
4390 {
4391   gcc_assert (call_insn && CALL_P (call_insn));
4392 
4393   /* Put the register usage information on the CALL.  If there is already
4394      some usage information, put ours at the end.  */
4395   if (CALL_INSN_FUNCTION_USAGE (call_insn))
4396     {
4397       rtx link;
4398 
4399       for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4400 	   link = XEXP (link, 1))
4401 	;
4402 
4403       XEXP (link, 1) = call_fusage;
4404     }
4405   else
4406     CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4407 }
4408 
4409 /* Delete all insns made since FROM.
4410    FROM becomes the new last instruction.  */
4411 
4412 void
delete_insns_since(rtx_insn * from)4413 delete_insns_since (rtx_insn *from)
4414 {
4415   if (from == 0)
4416     set_first_insn (0);
4417   else
4418     SET_NEXT_INSN (from) = 0;
4419   set_last_insn (from);
4420 }
4421 
4422 /* This function is deprecated, please use sequences instead.
4423 
4424    Move a consecutive bunch of insns to a different place in the chain.
4425    The insns to be moved are those between FROM and TO.
4426    They are moved to a new position after the insn AFTER.
4427    AFTER must not be FROM or TO or any insn in between.
4428 
4429    This function does not know about SEQUENCEs and hence should not be
4430    called after delay-slot filling has been done.  */
4431 
4432 void
reorder_insns_nobb(rtx_insn * from,rtx_insn * to,rtx_insn * after)4433 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4434 {
4435   if (flag_checking)
4436     {
4437       for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4438 	gcc_assert (after != x);
4439       gcc_assert (after != to);
4440     }
4441 
4442   /* Splice this bunch out of where it is now.  */
4443   if (PREV_INSN (from))
4444     SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4445   if (NEXT_INSN (to))
4446     SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4447   if (get_last_insn () == to)
4448     set_last_insn (PREV_INSN (from));
4449   if (get_insns () == from)
4450     set_first_insn (NEXT_INSN (to));
4451 
4452   /* Make the new neighbors point to it and it to them.  */
4453   if (NEXT_INSN (after))
4454     SET_PREV_INSN (NEXT_INSN (after)) = to;
4455 
4456   SET_NEXT_INSN (to) = NEXT_INSN (after);
4457   SET_PREV_INSN (from) = after;
4458   SET_NEXT_INSN (after) = from;
4459   if (after == get_last_insn ())
4460     set_last_insn (to);
4461 }
4462 
4463 /* Same as function above, but take care to update BB boundaries.  */
4464 void
reorder_insns(rtx_insn * from,rtx_insn * to,rtx_insn * after)4465 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4466 {
4467   rtx_insn *prev = PREV_INSN (from);
4468   basic_block bb, bb2;
4469 
4470   reorder_insns_nobb (from, to, after);
4471 
4472   if (!BARRIER_P (after)
4473       && (bb = BLOCK_FOR_INSN (after)))
4474     {
4475       rtx_insn *x;
4476       df_set_bb_dirty (bb);
4477 
4478       if (!BARRIER_P (from)
4479 	  && (bb2 = BLOCK_FOR_INSN (from)))
4480 	{
4481 	  if (BB_END (bb2) == to)
4482 	    BB_END (bb2) = prev;
4483 	  df_set_bb_dirty (bb2);
4484 	}
4485 
4486       if (BB_END (bb) == after)
4487 	BB_END (bb) = to;
4488 
4489       for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4490 	if (!BARRIER_P (x))
4491 	  df_insn_change_bb (x, bb);
4492     }
4493 }
4494 
4495 
4496 /* Emit insn(s) of given code and pattern
4497    at a specified place within the doubly-linked list.
4498 
4499    All of the emit_foo global entry points accept an object
4500    X which is either an insn list or a PATTERN of a single
4501    instruction.
4502 
4503    There are thus a few canonical ways to generate code and
4504    emit it at a specific place in the instruction stream.  For
4505    example, consider the instruction named SPOT and the fact that
4506    we would like to emit some instructions before SPOT.  We might
4507    do it like this:
4508 
4509 	start_sequence ();
4510 	... emit the new instructions ...
4511 	insns_head = get_insns ();
4512 	end_sequence ();
4513 
4514 	emit_insn_before (insns_head, SPOT);
4515 
4516    It used to be common to generate SEQUENCE rtl instead, but that
4517    is a relic of the past which no longer occurs.  The reason is that
4518    SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4519    generated would almost certainly die right after it was created.  */
4520 
4521 static rtx_insn *
emit_pattern_before_noloc(rtx x,rtx before,rtx last,basic_block bb,rtx_insn * (* make_raw)(rtx))4522 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4523                            rtx_insn *(*make_raw) (rtx))
4524 {
4525   rtx_insn *insn;
4526 
4527   gcc_assert (before);
4528 
4529   if (x == NULL_RTX)
4530     return safe_as_a <rtx_insn *> (last);
4531 
4532   switch (GET_CODE (x))
4533     {
4534     case DEBUG_INSN:
4535     case INSN:
4536     case JUMP_INSN:
4537     case CALL_INSN:
4538     case CODE_LABEL:
4539     case BARRIER:
4540     case NOTE:
4541       insn = as_a <rtx_insn *> (x);
4542       while (insn)
4543 	{
4544 	  rtx_insn *next = NEXT_INSN (insn);
4545 	  add_insn_before (insn, before, bb);
4546 	  last = insn;
4547 	  insn = next;
4548 	}
4549       break;
4550 
4551 #ifdef ENABLE_RTL_CHECKING
4552     case SEQUENCE:
4553       gcc_unreachable ();
4554       break;
4555 #endif
4556 
4557     default:
4558       last = (*make_raw) (x);
4559       add_insn_before (last, before, bb);
4560       break;
4561     }
4562 
4563   return safe_as_a <rtx_insn *> (last);
4564 }
4565 
4566 /* Make X be output before the instruction BEFORE.  */
4567 
4568 rtx_insn *
emit_insn_before_noloc(rtx x,rtx_insn * before,basic_block bb)4569 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4570 {
4571   return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4572 }
4573 
4574 /* Make an instruction with body X and code JUMP_INSN
4575    and output it before the instruction BEFORE.  */
4576 
4577 rtx_jump_insn *
emit_jump_insn_before_noloc(rtx x,rtx_insn * before)4578 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4579 {
4580   return as_a <rtx_jump_insn *> (
4581 		emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4582 					   make_jump_insn_raw));
4583 }
4584 
4585 /* Make an instruction with body X and code CALL_INSN
4586    and output it before the instruction BEFORE.  */
4587 
4588 rtx_insn *
emit_call_insn_before_noloc(rtx x,rtx_insn * before)4589 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4590 {
4591   return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4592 				    make_call_insn_raw);
4593 }
4594 
4595 /* Make an instruction with body X and code DEBUG_INSN
4596    and output it before the instruction BEFORE.  */
4597 
4598 rtx_insn *
emit_debug_insn_before_noloc(rtx x,rtx before)4599 emit_debug_insn_before_noloc (rtx x, rtx before)
4600 {
4601   return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4602 				    make_debug_insn_raw);
4603 }
4604 
4605 /* Make an insn of code BARRIER
4606    and output it before the insn BEFORE.  */
4607 
4608 rtx_barrier *
emit_barrier_before(rtx before)4609 emit_barrier_before (rtx before)
4610 {
4611   rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4612 
4613   INSN_UID (insn) = cur_insn_uid++;
4614 
4615   add_insn_before (insn, before, NULL);
4616   return insn;
4617 }
4618 
4619 /* Emit the label LABEL before the insn BEFORE.  */
4620 
4621 rtx_code_label *
emit_label_before(rtx label,rtx_insn * before)4622 emit_label_before (rtx label, rtx_insn *before)
4623 {
4624   gcc_checking_assert (INSN_UID (label) == 0);
4625   INSN_UID (label) = cur_insn_uid++;
4626   add_insn_before (label, before, NULL);
4627   return as_a <rtx_code_label *> (label);
4628 }
4629 
4630 /* Helper for emit_insn_after, handles lists of instructions
4631    efficiently.  */
4632 
4633 static rtx_insn *
emit_insn_after_1(rtx_insn * first,rtx uncast_after,basic_block bb)4634 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4635 {
4636   rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4637   rtx_insn *last;
4638   rtx_insn *after_after;
4639   if (!bb && !BARRIER_P (after))
4640     bb = BLOCK_FOR_INSN (after);
4641 
4642   if (bb)
4643     {
4644       df_set_bb_dirty (bb);
4645       for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4646 	if (!BARRIER_P (last))
4647 	  {
4648 	    set_block_for_insn (last, bb);
4649 	    df_insn_rescan (last);
4650 	  }
4651       if (!BARRIER_P (last))
4652 	{
4653 	  set_block_for_insn (last, bb);
4654 	  df_insn_rescan (last);
4655 	}
4656       if (BB_END (bb) == after)
4657 	BB_END (bb) = last;
4658     }
4659   else
4660     for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4661       continue;
4662 
4663   after_after = NEXT_INSN (after);
4664 
4665   SET_NEXT_INSN (after) = first;
4666   SET_PREV_INSN (first) = after;
4667   SET_NEXT_INSN (last) = after_after;
4668   if (after_after)
4669     SET_PREV_INSN (after_after) = last;
4670 
4671   if (after == get_last_insn ())
4672     set_last_insn (last);
4673 
4674   return last;
4675 }
4676 
4677 static rtx_insn *
emit_pattern_after_noloc(rtx x,rtx uncast_after,basic_block bb,rtx_insn * (* make_raw)(rtx))4678 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4679 			  rtx_insn *(*make_raw)(rtx))
4680 {
4681   rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4682   rtx_insn *last = after;
4683 
4684   gcc_assert (after);
4685 
4686   if (x == NULL_RTX)
4687     return last;
4688 
4689   switch (GET_CODE (x))
4690     {
4691     case DEBUG_INSN:
4692     case INSN:
4693     case JUMP_INSN:
4694     case CALL_INSN:
4695     case CODE_LABEL:
4696     case BARRIER:
4697     case NOTE:
4698       last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4699       break;
4700 
4701 #ifdef ENABLE_RTL_CHECKING
4702     case SEQUENCE:
4703       gcc_unreachable ();
4704       break;
4705 #endif
4706 
4707     default:
4708       last = (*make_raw) (x);
4709       add_insn_after (last, after, bb);
4710       break;
4711     }
4712 
4713   return last;
4714 }
4715 
4716 /* Make X be output after the insn AFTER and set the BB of insn.  If
4717    BB is NULL, an attempt is made to infer the BB from AFTER.  */
4718 
4719 rtx_insn *
emit_insn_after_noloc(rtx x,rtx after,basic_block bb)4720 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4721 {
4722   return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4723 }
4724 
4725 
4726 /* Make an insn of code JUMP_INSN with body X
4727    and output it after the insn AFTER.  */
4728 
4729 rtx_jump_insn *
emit_jump_insn_after_noloc(rtx x,rtx after)4730 emit_jump_insn_after_noloc (rtx x, rtx after)
4731 {
4732   return as_a <rtx_jump_insn *> (
4733 		emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4734 }
4735 
4736 /* Make an instruction with body X and code CALL_INSN
4737    and output it after the instruction AFTER.  */
4738 
4739 rtx_insn *
emit_call_insn_after_noloc(rtx x,rtx after)4740 emit_call_insn_after_noloc (rtx x, rtx after)
4741 {
4742   return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4743 }
4744 
4745 /* Make an instruction with body X and code CALL_INSN
4746    and output it after the instruction AFTER.  */
4747 
4748 rtx_insn *
emit_debug_insn_after_noloc(rtx x,rtx after)4749 emit_debug_insn_after_noloc (rtx x, rtx after)
4750 {
4751   return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4752 }
4753 
4754 /* Make an insn of code BARRIER
4755    and output it after the insn AFTER.  */
4756 
4757 rtx_barrier *
emit_barrier_after(rtx after)4758 emit_barrier_after (rtx after)
4759 {
4760   rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4761 
4762   INSN_UID (insn) = cur_insn_uid++;
4763 
4764   add_insn_after (insn, after, NULL);
4765   return insn;
4766 }
4767 
4768 /* Emit the label LABEL after the insn AFTER.  */
4769 
4770 rtx_insn *
emit_label_after(rtx label,rtx_insn * after)4771 emit_label_after (rtx label, rtx_insn *after)
4772 {
4773   gcc_checking_assert (INSN_UID (label) == 0);
4774   INSN_UID (label) = cur_insn_uid++;
4775   add_insn_after (label, after, NULL);
4776   return as_a <rtx_insn *> (label);
4777 }
4778 
4779 /* Notes require a bit of special handling: Some notes need to have their
4780    BLOCK_FOR_INSN set, others should never have it set, and some should
4781    have it set or clear depending on the context.   */
4782 
4783 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4784    that never set BLOCK_FOR_INSN on NOTE.  BB_BOUNDARY is true if the
4785    caller is asked to emit a note before BB_HEAD, or after BB_END.  */
4786 
4787 static bool
note_outside_basic_block_p(enum insn_note subtype,bool on_bb_boundary_p)4788 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4789 {
4790   switch (subtype)
4791     {
4792       /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks.  */
4793       case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4794 	return true;
4795 
4796       /* Notes for var tracking and EH region markers can appear between or
4797 	 inside basic blocks.  If the caller is emitting on the basic block
4798 	 boundary, do not set BLOCK_FOR_INSN on the new note.  */
4799       case NOTE_INSN_VAR_LOCATION:
4800       case NOTE_INSN_EH_REGION_BEG:
4801       case NOTE_INSN_EH_REGION_END:
4802 	return on_bb_boundary_p;
4803 
4804       /* Otherwise, BLOCK_FOR_INSN must be set.  */
4805       default:
4806 	return false;
4807     }
4808 }
4809 
4810 /* Emit a note of subtype SUBTYPE after the insn AFTER.  */
4811 
4812 rtx_note *
emit_note_after(enum insn_note subtype,rtx_insn * after)4813 emit_note_after (enum insn_note subtype, rtx_insn *after)
4814 {
4815   rtx_note *note = make_note_raw (subtype);
4816   basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4817   bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4818 
4819   if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4820     add_insn_after_nobb (note, after);
4821   else
4822     add_insn_after (note, after, bb);
4823   return note;
4824 }
4825 
4826 /* Emit a note of subtype SUBTYPE before the insn BEFORE.  */
4827 
4828 rtx_note *
emit_note_before(enum insn_note subtype,rtx_insn * before)4829 emit_note_before (enum insn_note subtype, rtx_insn *before)
4830 {
4831   rtx_note *note = make_note_raw (subtype);
4832   basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4833   bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4834 
4835   if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4836     add_insn_before_nobb (note, before);
4837   else
4838     add_insn_before (note, before, bb);
4839   return note;
4840 }
4841 
4842 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4843    MAKE_RAW indicates how to turn PATTERN into a real insn.  */
4844 
4845 static rtx_insn *
emit_pattern_after_setloc(rtx pattern,rtx uncast_after,int loc,rtx_insn * (* make_raw)(rtx))4846 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4847 			   rtx_insn *(*make_raw) (rtx))
4848 {
4849   rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4850   rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4851 
4852   if (pattern == NULL_RTX || !loc)
4853     return last;
4854 
4855   after = NEXT_INSN (after);
4856   while (1)
4857     {
4858       if (active_insn_p (after)
4859 	  && !JUMP_TABLE_DATA_P (after) /* FIXME */
4860 	  && !INSN_LOCATION (after))
4861 	INSN_LOCATION (after) = loc;
4862       if (after == last)
4863 	break;
4864       after = NEXT_INSN (after);
4865     }
4866   return last;
4867 }
4868 
4869 /* Insert PATTERN after AFTER.  MAKE_RAW indicates how to turn PATTERN
4870    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert after
4871    any DEBUG_INSNs.  */
4872 
4873 static rtx_insn *
emit_pattern_after(rtx pattern,rtx uncast_after,bool skip_debug_insns,rtx_insn * (* make_raw)(rtx))4874 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4875 		    rtx_insn *(*make_raw) (rtx))
4876 {
4877   rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4878   rtx_insn *prev = after;
4879 
4880   if (skip_debug_insns)
4881     while (DEBUG_INSN_P (prev))
4882       prev = PREV_INSN (prev);
4883 
4884   if (INSN_P (prev))
4885     return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4886 				      make_raw);
4887   else
4888     return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4889 }
4890 
4891 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4892 rtx_insn *
emit_insn_after_setloc(rtx pattern,rtx after,int loc)4893 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4894 {
4895   return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4896 }
4897 
4898 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4899 rtx_insn *
emit_insn_after(rtx pattern,rtx after)4900 emit_insn_after (rtx pattern, rtx after)
4901 {
4902   return emit_pattern_after (pattern, after, true, make_insn_raw);
4903 }
4904 
4905 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4906 rtx_jump_insn *
emit_jump_insn_after_setloc(rtx pattern,rtx after,int loc)4907 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4908 {
4909   return as_a <rtx_jump_insn *> (
4910 	emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4911 }
4912 
4913 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4914 rtx_jump_insn *
emit_jump_insn_after(rtx pattern,rtx after)4915 emit_jump_insn_after (rtx pattern, rtx after)
4916 {
4917   return as_a <rtx_jump_insn *> (
4918 	emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4919 }
4920 
4921 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4922 rtx_insn *
emit_call_insn_after_setloc(rtx pattern,rtx after,int loc)4923 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4924 {
4925   return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4926 }
4927 
4928 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4929 rtx_insn *
emit_call_insn_after(rtx pattern,rtx after)4930 emit_call_insn_after (rtx pattern, rtx after)
4931 {
4932   return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4933 }
4934 
4935 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4936 rtx_insn *
emit_debug_insn_after_setloc(rtx pattern,rtx after,int loc)4937 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4938 {
4939   return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4940 }
4941 
4942 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4943 rtx_insn *
emit_debug_insn_after(rtx pattern,rtx after)4944 emit_debug_insn_after (rtx pattern, rtx after)
4945 {
4946   return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4947 }
4948 
4949 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4950    MAKE_RAW indicates how to turn PATTERN into a real insn.  INSNP
4951    indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4952    CALL_INSN, etc.  */
4953 
4954 static rtx_insn *
emit_pattern_before_setloc(rtx pattern,rtx uncast_before,int loc,bool insnp,rtx_insn * (* make_raw)(rtx))4955 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4956 			    rtx_insn *(*make_raw) (rtx))
4957 {
4958   rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4959   rtx_insn *first = PREV_INSN (before);
4960   rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4961 					      insnp ? before : NULL_RTX,
4962 					      NULL, make_raw);
4963 
4964   if (pattern == NULL_RTX || !loc)
4965     return last;
4966 
4967   if (!first)
4968     first = get_insns ();
4969   else
4970     first = NEXT_INSN (first);
4971   while (1)
4972     {
4973       if (active_insn_p (first)
4974 	  && !JUMP_TABLE_DATA_P (first) /* FIXME */
4975 	  && !INSN_LOCATION (first))
4976 	INSN_LOCATION (first) = loc;
4977       if (first == last)
4978 	break;
4979       first = NEXT_INSN (first);
4980     }
4981   return last;
4982 }
4983 
4984 /* Insert PATTERN before BEFORE.  MAKE_RAW indicates how to turn PATTERN
4985    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert
4986    before any DEBUG_INSNs.  INSNP indicates if PATTERN is meant for an
4987    INSN as opposed to a JUMP_INSN, CALL_INSN, etc.  */
4988 
4989 static rtx_insn *
emit_pattern_before(rtx pattern,rtx uncast_before,bool skip_debug_insns,bool insnp,rtx_insn * (* make_raw)(rtx))4990 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4991 		     bool insnp, rtx_insn *(*make_raw) (rtx))
4992 {
4993   rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4994   rtx_insn *next = before;
4995 
4996   if (skip_debug_insns)
4997     while (DEBUG_INSN_P (next))
4998       next = PREV_INSN (next);
4999 
5000   if (INSN_P (next))
5001     return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
5002 				       insnp, make_raw);
5003   else
5004     return emit_pattern_before_noloc (pattern, before,
5005 				      insnp ? before : NULL_RTX,
5006                                       NULL, make_raw);
5007 }
5008 
5009 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
5010 rtx_insn *
emit_insn_before_setloc(rtx pattern,rtx_insn * before,int loc)5011 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
5012 {
5013   return emit_pattern_before_setloc (pattern, before, loc, true,
5014 				     make_insn_raw);
5015 }
5016 
5017 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
5018 rtx_insn *
emit_insn_before(rtx pattern,rtx before)5019 emit_insn_before (rtx pattern, rtx before)
5020 {
5021   return emit_pattern_before (pattern, before, true, true, make_insn_raw);
5022 }
5023 
5024 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
5025 rtx_jump_insn *
emit_jump_insn_before_setloc(rtx pattern,rtx_insn * before,int loc)5026 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
5027 {
5028   return as_a <rtx_jump_insn *> (
5029 	emit_pattern_before_setloc (pattern, before, loc, false,
5030 				    make_jump_insn_raw));
5031 }
5032 
5033 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
5034 rtx_jump_insn *
emit_jump_insn_before(rtx pattern,rtx before)5035 emit_jump_insn_before (rtx pattern, rtx before)
5036 {
5037   return as_a <rtx_jump_insn *> (
5038 	emit_pattern_before (pattern, before, true, false,
5039 			     make_jump_insn_raw));
5040 }
5041 
5042 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
5043 rtx_insn *
emit_call_insn_before_setloc(rtx pattern,rtx_insn * before,int loc)5044 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
5045 {
5046   return emit_pattern_before_setloc (pattern, before, loc, false,
5047 				     make_call_insn_raw);
5048 }
5049 
5050 /* Like emit_call_insn_before_noloc,
5051    but set insn_location according to BEFORE.  */
5052 rtx_insn *
emit_call_insn_before(rtx pattern,rtx_insn * before)5053 emit_call_insn_before (rtx pattern, rtx_insn *before)
5054 {
5055   return emit_pattern_before (pattern, before, true, false,
5056 			      make_call_insn_raw);
5057 }
5058 
5059 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
5060 rtx_insn *
emit_debug_insn_before_setloc(rtx pattern,rtx before,int loc)5061 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
5062 {
5063   return emit_pattern_before_setloc (pattern, before, loc, false,
5064 				     make_debug_insn_raw);
5065 }
5066 
5067 /* Like emit_debug_insn_before_noloc,
5068    but set insn_location according to BEFORE.  */
5069 rtx_insn *
emit_debug_insn_before(rtx pattern,rtx_insn * before)5070 emit_debug_insn_before (rtx pattern, rtx_insn *before)
5071 {
5072   return emit_pattern_before (pattern, before, false, false,
5073 			      make_debug_insn_raw);
5074 }
5075 
5076 /* Take X and emit it at the end of the doubly-linked
5077    INSN list.
5078 
5079    Returns the last insn emitted.  */
5080 
5081 rtx_insn *
emit_insn(rtx x)5082 emit_insn (rtx x)
5083 {
5084   rtx_insn *last = get_last_insn ();
5085   rtx_insn *insn;
5086 
5087   if (x == NULL_RTX)
5088     return last;
5089 
5090   switch (GET_CODE (x))
5091     {
5092     case DEBUG_INSN:
5093     case INSN:
5094     case JUMP_INSN:
5095     case CALL_INSN:
5096     case CODE_LABEL:
5097     case BARRIER:
5098     case NOTE:
5099       insn = as_a <rtx_insn *> (x);
5100       while (insn)
5101 	{
5102 	  rtx_insn *next = NEXT_INSN (insn);
5103 	  add_insn (insn);
5104 	  last = insn;
5105 	  insn = next;
5106 	}
5107       break;
5108 
5109 #ifdef ENABLE_RTL_CHECKING
5110     case JUMP_TABLE_DATA:
5111     case SEQUENCE:
5112       gcc_unreachable ();
5113       break;
5114 #endif
5115 
5116     default:
5117       last = make_insn_raw (x);
5118       add_insn (last);
5119       break;
5120     }
5121 
5122   return last;
5123 }
5124 
5125 /* Make an insn of code DEBUG_INSN with pattern X
5126    and add it to the end of the doubly-linked list.  */
5127 
5128 rtx_insn *
emit_debug_insn(rtx x)5129 emit_debug_insn (rtx x)
5130 {
5131   rtx_insn *last = get_last_insn ();
5132   rtx_insn *insn;
5133 
5134   if (x == NULL_RTX)
5135     return last;
5136 
5137   switch (GET_CODE (x))
5138     {
5139     case DEBUG_INSN:
5140     case INSN:
5141     case JUMP_INSN:
5142     case CALL_INSN:
5143     case CODE_LABEL:
5144     case BARRIER:
5145     case NOTE:
5146       insn = as_a <rtx_insn *> (x);
5147       while (insn)
5148 	{
5149 	  rtx_insn *next = NEXT_INSN (insn);
5150 	  add_insn (insn);
5151 	  last = insn;
5152 	  insn = next;
5153 	}
5154       break;
5155 
5156 #ifdef ENABLE_RTL_CHECKING
5157     case JUMP_TABLE_DATA:
5158     case SEQUENCE:
5159       gcc_unreachable ();
5160       break;
5161 #endif
5162 
5163     default:
5164       last = make_debug_insn_raw (x);
5165       add_insn (last);
5166       break;
5167     }
5168 
5169   return last;
5170 }
5171 
5172 /* Make an insn of code JUMP_INSN with pattern X
5173    and add it to the end of the doubly-linked list.  */
5174 
5175 rtx_insn *
emit_jump_insn(rtx x)5176 emit_jump_insn (rtx x)
5177 {
5178   rtx_insn *last = NULL;
5179   rtx_insn *insn;
5180 
5181   switch (GET_CODE (x))
5182     {
5183     case DEBUG_INSN:
5184     case INSN:
5185     case JUMP_INSN:
5186     case CALL_INSN:
5187     case CODE_LABEL:
5188     case BARRIER:
5189     case NOTE:
5190       insn = as_a <rtx_insn *> (x);
5191       while (insn)
5192 	{
5193 	  rtx_insn *next = NEXT_INSN (insn);
5194 	  add_insn (insn);
5195 	  last = insn;
5196 	  insn = next;
5197 	}
5198       break;
5199 
5200 #ifdef ENABLE_RTL_CHECKING
5201     case JUMP_TABLE_DATA:
5202     case SEQUENCE:
5203       gcc_unreachable ();
5204       break;
5205 #endif
5206 
5207     default:
5208       last = make_jump_insn_raw (x);
5209       add_insn (last);
5210       break;
5211     }
5212 
5213   return last;
5214 }
5215 
5216 /* Make an insn of code CALL_INSN with pattern X
5217    and add it to the end of the doubly-linked list.  */
5218 
5219 rtx_insn *
emit_call_insn(rtx x)5220 emit_call_insn (rtx x)
5221 {
5222   rtx_insn *insn;
5223 
5224   switch (GET_CODE (x))
5225     {
5226     case DEBUG_INSN:
5227     case INSN:
5228     case JUMP_INSN:
5229     case CALL_INSN:
5230     case CODE_LABEL:
5231     case BARRIER:
5232     case NOTE:
5233       insn = emit_insn (x);
5234       break;
5235 
5236 #ifdef ENABLE_RTL_CHECKING
5237     case SEQUENCE:
5238     case JUMP_TABLE_DATA:
5239       gcc_unreachable ();
5240       break;
5241 #endif
5242 
5243     default:
5244       insn = make_call_insn_raw (x);
5245       add_insn (insn);
5246       break;
5247     }
5248 
5249   return insn;
5250 }
5251 
5252 /* Add the label LABEL to the end of the doubly-linked list.  */
5253 
5254 rtx_code_label *
emit_label(rtx uncast_label)5255 emit_label (rtx uncast_label)
5256 {
5257   rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5258 
5259   gcc_checking_assert (INSN_UID (label) == 0);
5260   INSN_UID (label) = cur_insn_uid++;
5261   add_insn (label);
5262   return label;
5263 }
5264 
5265 /* Make an insn of code JUMP_TABLE_DATA
5266    and add it to the end of the doubly-linked list.  */
5267 
5268 rtx_jump_table_data *
emit_jump_table_data(rtx table)5269 emit_jump_table_data (rtx table)
5270 {
5271   rtx_jump_table_data *jump_table_data =
5272     as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5273   INSN_UID (jump_table_data) = cur_insn_uid++;
5274   PATTERN (jump_table_data) = table;
5275   BLOCK_FOR_INSN (jump_table_data) = NULL;
5276   add_insn (jump_table_data);
5277   return jump_table_data;
5278 }
5279 
5280 /* Make an insn of code BARRIER
5281    and add it to the end of the doubly-linked list.  */
5282 
5283 rtx_barrier *
emit_barrier(void)5284 emit_barrier (void)
5285 {
5286   rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5287   INSN_UID (barrier) = cur_insn_uid++;
5288   add_insn (barrier);
5289   return barrier;
5290 }
5291 
5292 /* Emit a copy of note ORIG.  */
5293 
5294 rtx_note *
emit_note_copy(rtx_note * orig)5295 emit_note_copy (rtx_note *orig)
5296 {
5297   enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5298   rtx_note *note = make_note_raw (kind);
5299   NOTE_DATA (note) = NOTE_DATA (orig);
5300   add_insn (note);
5301   return note;
5302 }
5303 
5304 /* Make an insn of code NOTE or type NOTE_NO
5305    and add it to the end of the doubly-linked list.  */
5306 
5307 rtx_note *
emit_note(enum insn_note kind)5308 emit_note (enum insn_note kind)
5309 {
5310   rtx_note *note = make_note_raw (kind);
5311   add_insn (note);
5312   return note;
5313 }
5314 
5315 /* Emit a clobber of lvalue X.  */
5316 
5317 rtx_insn *
emit_clobber(rtx x)5318 emit_clobber (rtx x)
5319 {
5320   /* CONCATs should not appear in the insn stream.  */
5321   if (GET_CODE (x) == CONCAT)
5322     {
5323       emit_clobber (XEXP (x, 0));
5324       return emit_clobber (XEXP (x, 1));
5325     }
5326   return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5327 }
5328 
5329 /* Return a sequence of insns to clobber lvalue X.  */
5330 
5331 rtx_insn *
gen_clobber(rtx x)5332 gen_clobber (rtx x)
5333 {
5334   rtx_insn *seq;
5335 
5336   start_sequence ();
5337   emit_clobber (x);
5338   seq = get_insns ();
5339   end_sequence ();
5340   return seq;
5341 }
5342 
5343 /* Emit a use of rvalue X.  */
5344 
5345 rtx_insn *
emit_use(rtx x)5346 emit_use (rtx x)
5347 {
5348   /* CONCATs should not appear in the insn stream.  */
5349   if (GET_CODE (x) == CONCAT)
5350     {
5351       emit_use (XEXP (x, 0));
5352       return emit_use (XEXP (x, 1));
5353     }
5354   return emit_insn (gen_rtx_USE (VOIDmode, x));
5355 }
5356 
5357 /* Return a sequence of insns to use rvalue X.  */
5358 
5359 rtx_insn *
gen_use(rtx x)5360 gen_use (rtx x)
5361 {
5362   rtx_insn *seq;
5363 
5364   start_sequence ();
5365   emit_use (x);
5366   seq = get_insns ();
5367   end_sequence ();
5368   return seq;
5369 }
5370 
5371 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5372    Return the set in INSN that such notes describe, or NULL if the notes
5373    have no meaning for INSN.  */
5374 
5375 rtx
set_for_reg_notes(rtx insn)5376 set_for_reg_notes (rtx insn)
5377 {
5378   rtx pat, reg;
5379 
5380   if (!INSN_P (insn))
5381     return NULL_RTX;
5382 
5383   pat = PATTERN (insn);
5384   if (GET_CODE (pat) == PARALLEL)
5385     {
5386       /* We do not use single_set because that ignores SETs of unused
5387 	 registers.  REG_EQUAL and REG_EQUIV notes really do require the
5388 	 PARALLEL to have a single SET.  */
5389       if (multiple_sets (insn))
5390 	return NULL_RTX;
5391       pat = XVECEXP (pat, 0, 0);
5392     }
5393 
5394   if (GET_CODE (pat) != SET)
5395     return NULL_RTX;
5396 
5397   reg = SET_DEST (pat);
5398 
5399   /* Notes apply to the contents of a STRICT_LOW_PART.  */
5400   if (GET_CODE (reg) == STRICT_LOW_PART
5401       || GET_CODE (reg) == ZERO_EXTRACT)
5402     reg = XEXP (reg, 0);
5403 
5404   /* Check that we have a register.  */
5405   if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5406     return NULL_RTX;
5407 
5408   return pat;
5409 }
5410 
5411 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5412    note of this type already exists, remove it first.  */
5413 
5414 rtx
set_unique_reg_note(rtx insn,enum reg_note kind,rtx datum)5415 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5416 {
5417   rtx note = find_reg_note (insn, kind, NULL_RTX);
5418 
5419   switch (kind)
5420     {
5421     case REG_EQUAL:
5422     case REG_EQUIV:
5423       /* We need to support the REG_EQUAL on USE trick of find_reloads.  */
5424       if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5425 	return NULL_RTX;
5426 
5427       /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5428 	 It serves no useful purpose and breaks eliminate_regs.  */
5429       if (GET_CODE (datum) == ASM_OPERANDS)
5430 	return NULL_RTX;
5431 
5432       /* Notes with side effects are dangerous.  Even if the side-effect
5433 	 initially mirrors one in PATTERN (INSN), later optimizations
5434 	 might alter the way that the final register value is calculated
5435 	 and so move or alter the side-effect in some way.  The note would
5436 	 then no longer be a valid substitution for SET_SRC.  */
5437       if (side_effects_p (datum))
5438 	return NULL_RTX;
5439       break;
5440 
5441     default:
5442       break;
5443     }
5444 
5445   if (note)
5446     XEXP (note, 0) = datum;
5447   else
5448     {
5449       add_reg_note (insn, kind, datum);
5450       note = REG_NOTES (insn);
5451     }
5452 
5453   switch (kind)
5454     {
5455     case REG_EQUAL:
5456     case REG_EQUIV:
5457       df_notes_rescan (as_a <rtx_insn *> (insn));
5458       break;
5459     default:
5460       break;
5461     }
5462 
5463   return note;
5464 }
5465 
5466 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST.  */
5467 rtx
set_dst_reg_note(rtx insn,enum reg_note kind,rtx datum,rtx dst)5468 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5469 {
5470   rtx set = set_for_reg_notes (insn);
5471 
5472   if (set && SET_DEST (set) == dst)
5473     return set_unique_reg_note (insn, kind, datum);
5474   return NULL_RTX;
5475 }
5476 
5477 /* Emit the rtl pattern X as an appropriate kind of insn.  Also emit a
5478    following barrier if the instruction needs one and if ALLOW_BARRIER_P
5479    is true.
5480 
5481    If X is a label, it is simply added into the insn chain.  */
5482 
5483 rtx_insn *
emit(rtx x,bool allow_barrier_p)5484 emit (rtx x, bool allow_barrier_p)
5485 {
5486   enum rtx_code code = classify_insn (x);
5487 
5488   switch (code)
5489     {
5490     case CODE_LABEL:
5491       return emit_label (x);
5492     case INSN:
5493       return emit_insn (x);
5494     case  JUMP_INSN:
5495       {
5496 	rtx_insn *insn = emit_jump_insn (x);
5497 	if (allow_barrier_p
5498 	    && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5499 	  return emit_barrier ();
5500 	return insn;
5501       }
5502     case CALL_INSN:
5503       return emit_call_insn (x);
5504     case DEBUG_INSN:
5505       return emit_debug_insn (x);
5506     default:
5507       gcc_unreachable ();
5508     }
5509 }
5510 
5511 /* Space for free sequence stack entries.  */
5512 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5513 
5514 /* Begin emitting insns to a sequence.  If this sequence will contain
5515    something that might cause the compiler to pop arguments to function
5516    calls (because those pops have previously been deferred; see
5517    INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5518    before calling this function.  That will ensure that the deferred
5519    pops are not accidentally emitted in the middle of this sequence.  */
5520 
5521 void
start_sequence(void)5522 start_sequence (void)
5523 {
5524   struct sequence_stack *tem;
5525 
5526   if (free_sequence_stack != NULL)
5527     {
5528       tem = free_sequence_stack;
5529       free_sequence_stack = tem->next;
5530     }
5531   else
5532     tem = ggc_alloc<sequence_stack> ();
5533 
5534   tem->next = get_current_sequence ()->next;
5535   tem->first = get_insns ();
5536   tem->last = get_last_insn ();
5537   get_current_sequence ()->next = tem;
5538 
5539   set_first_insn (0);
5540   set_last_insn (0);
5541 }
5542 
5543 /* Set up the insn chain starting with FIRST as the current sequence,
5544    saving the previously current one.  See the documentation for
5545    start_sequence for more information about how to use this function.  */
5546 
5547 void
push_to_sequence(rtx_insn * first)5548 push_to_sequence (rtx_insn *first)
5549 {
5550   rtx_insn *last;
5551 
5552   start_sequence ();
5553 
5554   for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5555     ;
5556 
5557   set_first_insn (first);
5558   set_last_insn (last);
5559 }
5560 
5561 /* Like push_to_sequence, but take the last insn as an argument to avoid
5562    looping through the list.  */
5563 
5564 void
push_to_sequence2(rtx_insn * first,rtx_insn * last)5565 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5566 {
5567   start_sequence ();
5568 
5569   set_first_insn (first);
5570   set_last_insn (last);
5571 }
5572 
5573 /* Set up the outer-level insn chain
5574    as the current sequence, saving the previously current one.  */
5575 
5576 void
push_topmost_sequence(void)5577 push_topmost_sequence (void)
5578 {
5579   struct sequence_stack *top;
5580 
5581   start_sequence ();
5582 
5583   top = get_topmost_sequence ();
5584   set_first_insn (top->first);
5585   set_last_insn (top->last);
5586 }
5587 
5588 /* After emitting to the outer-level insn chain, update the outer-level
5589    insn chain, and restore the previous saved state.  */
5590 
5591 void
pop_topmost_sequence(void)5592 pop_topmost_sequence (void)
5593 {
5594   struct sequence_stack *top;
5595 
5596   top = get_topmost_sequence ();
5597   top->first = get_insns ();
5598   top->last = get_last_insn ();
5599 
5600   end_sequence ();
5601 }
5602 
5603 /* After emitting to a sequence, restore previous saved state.
5604 
5605    To get the contents of the sequence just made, you must call
5606    `get_insns' *before* calling here.
5607 
5608    If the compiler might have deferred popping arguments while
5609    generating this sequence, and this sequence will not be immediately
5610    inserted into the instruction stream, use do_pending_stack_adjust
5611    before calling get_insns.  That will ensure that the deferred
5612    pops are inserted into this sequence, and not into some random
5613    location in the instruction stream.  See INHIBIT_DEFER_POP for more
5614    information about deferred popping of arguments.  */
5615 
5616 void
end_sequence(void)5617 end_sequence (void)
5618 {
5619   struct sequence_stack *tem = get_current_sequence ()->next;
5620 
5621   set_first_insn (tem->first);
5622   set_last_insn (tem->last);
5623   get_current_sequence ()->next = tem->next;
5624 
5625   memset (tem, 0, sizeof (*tem));
5626   tem->next = free_sequence_stack;
5627   free_sequence_stack = tem;
5628 }
5629 
5630 /* Return 1 if currently emitting into a sequence.  */
5631 
5632 int
in_sequence_p(void)5633 in_sequence_p (void)
5634 {
5635   return get_current_sequence ()->next != 0;
5636 }
5637 
5638 /* Put the various virtual registers into REGNO_REG_RTX.  */
5639 
5640 static void
init_virtual_regs(void)5641 init_virtual_regs (void)
5642 {
5643   regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5644   regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5645   regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5646   regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5647   regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5648   regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5649     = virtual_preferred_stack_boundary_rtx;
5650 }
5651 
5652 
5653 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once.  */
5654 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5655 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5656 static int copy_insn_n_scratches;
5657 
5658 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5659    copied an ASM_OPERANDS.
5660    In that case, it is the original input-operand vector.  */
5661 static rtvec orig_asm_operands_vector;
5662 
5663 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5664    copied an ASM_OPERANDS.
5665    In that case, it is the copied input-operand vector.  */
5666 static rtvec copy_asm_operands_vector;
5667 
5668 /* Likewise for the constraints vector.  */
5669 static rtvec orig_asm_constraints_vector;
5670 static rtvec copy_asm_constraints_vector;
5671 
5672 /* Recursively create a new copy of an rtx for copy_insn.
5673    This function differs from copy_rtx in that it handles SCRATCHes and
5674    ASM_OPERANDs properly.
5675    Normally, this function is not used directly; use copy_insn as front end.
5676    However, you could first copy an insn pattern with copy_insn and then use
5677    this function afterwards to properly copy any REG_NOTEs containing
5678    SCRATCHes.  */
5679 
5680 rtx
copy_insn_1(rtx orig)5681 copy_insn_1 (rtx orig)
5682 {
5683   rtx copy;
5684   int i, j;
5685   RTX_CODE code;
5686   const char *format_ptr;
5687 
5688   if (orig == NULL)
5689     return NULL;
5690 
5691   code = GET_CODE (orig);
5692 
5693   switch (code)
5694     {
5695     case REG:
5696     case DEBUG_EXPR:
5697     CASE_CONST_ANY:
5698     case SYMBOL_REF:
5699     case CODE_LABEL:
5700     case PC:
5701     case CC0:
5702     case RETURN:
5703     case SIMPLE_RETURN:
5704       return orig;
5705     case CLOBBER:
5706       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5707          clobbers or clobbers of hard registers that originated as pseudos.
5708          This is needed to allow safe register renaming.  */
5709       if (REG_P (XEXP (orig, 0))
5710 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5711 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
5712 	return orig;
5713       break;
5714 
5715     case SCRATCH:
5716       for (i = 0; i < copy_insn_n_scratches; i++)
5717 	if (copy_insn_scratch_in[i] == orig)
5718 	  return copy_insn_scratch_out[i];
5719       break;
5720 
5721     case CONST:
5722       if (shared_const_p (orig))
5723 	return orig;
5724       break;
5725 
5726       /* A MEM with a constant address is not sharable.  The problem is that
5727 	 the constant address may need to be reloaded.  If the mem is shared,
5728 	 then reloading one copy of this mem will cause all copies to appear
5729 	 to have been reloaded.  */
5730 
5731     default:
5732       break;
5733     }
5734 
5735   /* Copy the various flags, fields, and other information.  We assume
5736      that all fields need copying, and then clear the fields that should
5737      not be copied.  That is the sensible default behavior, and forces
5738      us to explicitly document why we are *not* copying a flag.  */
5739   copy = shallow_copy_rtx (orig);
5740 
5741   /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs.  */
5742   if (INSN_P (orig))
5743     {
5744       RTX_FLAG (copy, jump) = 0;
5745       RTX_FLAG (copy, call) = 0;
5746       RTX_FLAG (copy, frame_related) = 0;
5747     }
5748 
5749   format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5750 
5751   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5752     switch (*format_ptr++)
5753       {
5754       case 'e':
5755 	if (XEXP (orig, i) != NULL)
5756 	  XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5757 	break;
5758 
5759       case 'E':
5760       case 'V':
5761 	if (XVEC (orig, i) == orig_asm_constraints_vector)
5762 	  XVEC (copy, i) = copy_asm_constraints_vector;
5763 	else if (XVEC (orig, i) == orig_asm_operands_vector)
5764 	  XVEC (copy, i) = copy_asm_operands_vector;
5765 	else if (XVEC (orig, i) != NULL)
5766 	  {
5767 	    XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5768 	    for (j = 0; j < XVECLEN (copy, i); j++)
5769 	      XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5770 	  }
5771 	break;
5772 
5773       case 't':
5774       case 'w':
5775       case 'i':
5776       case 'p':
5777       case 's':
5778       case 'S':
5779       case 'u':
5780       case '0':
5781 	/* These are left unchanged.  */
5782 	break;
5783 
5784       default:
5785 	gcc_unreachable ();
5786       }
5787 
5788   if (code == SCRATCH)
5789     {
5790       i = copy_insn_n_scratches++;
5791       gcc_assert (i < MAX_RECOG_OPERANDS);
5792       copy_insn_scratch_in[i] = orig;
5793       copy_insn_scratch_out[i] = copy;
5794     }
5795   else if (code == ASM_OPERANDS)
5796     {
5797       orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5798       copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5799       orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5800       copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5801     }
5802 
5803   return copy;
5804 }
5805 
5806 /* Create a new copy of an rtx.
5807    This function differs from copy_rtx in that it handles SCRATCHes and
5808    ASM_OPERANDs properly.
5809    INSN doesn't really have to be a full INSN; it could be just the
5810    pattern.  */
5811 rtx
copy_insn(rtx insn)5812 copy_insn (rtx insn)
5813 {
5814   copy_insn_n_scratches = 0;
5815   orig_asm_operands_vector = 0;
5816   orig_asm_constraints_vector = 0;
5817   copy_asm_operands_vector = 0;
5818   copy_asm_constraints_vector = 0;
5819   return copy_insn_1 (insn);
5820 }
5821 
5822 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5823    on that assumption that INSN itself remains in its original place.  */
5824 
5825 rtx_insn *
copy_delay_slot_insn(rtx_insn * insn)5826 copy_delay_slot_insn (rtx_insn *insn)
5827 {
5828   /* Copy INSN with its rtx_code, all its notes, location etc.  */
5829   insn = as_a <rtx_insn *> (copy_rtx (insn));
5830   INSN_UID (insn) = cur_insn_uid++;
5831   return insn;
5832 }
5833 
5834 /* Initialize data structures and variables in this file
5835    before generating rtl for each function.  */
5836 
5837 void
init_emit(void)5838 init_emit (void)
5839 {
5840   set_first_insn (NULL);
5841   set_last_insn (NULL);
5842   if (MIN_NONDEBUG_INSN_UID)
5843     cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5844   else
5845     cur_insn_uid = 1;
5846   cur_debug_insn_uid = 1;
5847   reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5848   first_label_num = label_num;
5849   get_current_sequence ()->next = NULL;
5850 
5851   /* Init the tables that describe all the pseudo regs.  */
5852 
5853   crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5854 
5855   crtl->emit.regno_pointer_align
5856     = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5857 
5858   regno_reg_rtx
5859     = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5860 
5861   /* Put copies of all the hard registers into regno_reg_rtx.  */
5862   memcpy (regno_reg_rtx,
5863 	  initial_regno_reg_rtx,
5864 	  FIRST_PSEUDO_REGISTER * sizeof (rtx));
5865 
5866   /* Put copies of all the virtual register rtx into regno_reg_rtx.  */
5867   init_virtual_regs ();
5868 
5869   /* Indicate that the virtual registers and stack locations are
5870      all pointers.  */
5871   REG_POINTER (stack_pointer_rtx) = 1;
5872   REG_POINTER (frame_pointer_rtx) = 1;
5873   REG_POINTER (hard_frame_pointer_rtx) = 1;
5874   REG_POINTER (arg_pointer_rtx) = 1;
5875 
5876   REG_POINTER (virtual_incoming_args_rtx) = 1;
5877   REG_POINTER (virtual_stack_vars_rtx) = 1;
5878   REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5879   REG_POINTER (virtual_outgoing_args_rtx) = 1;
5880   REG_POINTER (virtual_cfa_rtx) = 1;
5881 
5882 #ifdef STACK_BOUNDARY
5883   REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5884   REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5885   REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5886   REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5887 
5888   REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5889   REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5890   REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5891   REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5892 
5893   REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5894 #endif
5895 
5896 #ifdef INIT_EXPANDERS
5897   INIT_EXPANDERS;
5898 #endif
5899 }
5900 
5901 /* Return the value of element I of CONST_VECTOR X as a wide_int.  */
5902 
5903 wide_int
const_vector_int_elt(const_rtx x,unsigned int i)5904 const_vector_int_elt (const_rtx x, unsigned int i)
5905 {
5906   /* First handle elements that are directly encoded.  */
5907   machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
5908   if (i < (unsigned int) XVECLEN (x, 0))
5909     return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
5910 
5911   /* Identify the pattern that contains element I and work out the index of
5912      the last encoded element for that pattern.  */
5913   unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5914   unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5915   unsigned int count = i / npatterns;
5916   unsigned int pattern = i % npatterns;
5917   unsigned int final_i = encoded_nelts - npatterns + pattern;
5918 
5919   /* If there are no steps, the final encoded value is the right one.  */
5920   if (!CONST_VECTOR_STEPPED_P (x))
5921     return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
5922 
5923   /* Otherwise work out the value from the last two encoded elements.  */
5924   rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
5925   rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
5926   wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
5927 			   rtx_mode_t (v1, elt_mode));
5928   return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
5929 }
5930 
5931 /* Return the value of element I of CONST_VECTOR X.  */
5932 
5933 rtx
const_vector_elt(const_rtx x,unsigned int i)5934 const_vector_elt (const_rtx x, unsigned int i)
5935 {
5936   /* First handle elements that are directly encoded.  */
5937   if (i < (unsigned int) XVECLEN (x, 0))
5938     return CONST_VECTOR_ENCODED_ELT (x, i);
5939 
5940   /* If there are no steps, the final encoded value is the right one.  */
5941   if (!CONST_VECTOR_STEPPED_P (x))
5942     {
5943       /* Identify the pattern that contains element I and work out the index of
5944 	 the last encoded element for that pattern.  */
5945       unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5946       unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5947       unsigned int pattern = i % npatterns;
5948       unsigned int final_i = encoded_nelts - npatterns + pattern;
5949       return CONST_VECTOR_ENCODED_ELT (x, final_i);
5950     }
5951 
5952   /* Otherwise work out the value from the last two encoded elements.  */
5953   return immed_wide_int_const (const_vector_int_elt (x, i),
5954 			       GET_MODE_INNER (GET_MODE (x)));
5955 }
5956 
5957 /* Return true if X is a valid element for a CONST_VECTOR of the given
5958   mode.  */
5959 
5960 bool
valid_for_const_vector_p(machine_mode,rtx x)5961 valid_for_const_vector_p (machine_mode, rtx x)
5962 {
5963   return (CONST_SCALAR_INT_P (x)
5964 	  || CONST_DOUBLE_AS_FLOAT_P (x)
5965 	  || CONST_FIXED_P (x));
5966 }
5967 
5968 /* Generate a vector constant of mode MODE in which every element has
5969    value ELT.  */
5970 
5971 rtx
gen_const_vec_duplicate(machine_mode mode,rtx elt)5972 gen_const_vec_duplicate (machine_mode mode, rtx elt)
5973 {
5974   rtx_vector_builder builder (mode, 1, 1);
5975   builder.quick_push (elt);
5976   return builder.build ();
5977 }
5978 
5979 /* Return a vector rtx of mode MODE in which every element has value X.
5980    The result will be a constant if X is constant.  */
5981 
5982 rtx
gen_vec_duplicate(machine_mode mode,rtx x)5983 gen_vec_duplicate (machine_mode mode, rtx x)
5984 {
5985   if (valid_for_const_vector_p (mode, x))
5986     return gen_const_vec_duplicate (mode, x);
5987   return gen_rtx_VEC_DUPLICATE (mode, x);
5988 }
5989 
5990 /* A subroutine of const_vec_series_p that handles the case in which:
5991 
5992      (GET_CODE (X) == CONST_VECTOR
5993       && CONST_VECTOR_NPATTERNS (X) == 1
5994       && !CONST_VECTOR_DUPLICATE_P (X))
5995 
5996    is known to hold.  */
5997 
5998 bool
const_vec_series_p_1(const_rtx x,rtx * base_out,rtx * step_out)5999 const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
6000 {
6001   /* Stepped sequences are only defined for integers, to avoid specifying
6002      rounding behavior.  */
6003   if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
6004     return false;
6005 
6006   /* A non-duplicated vector with two elements can always be seen as a
6007      series with a nonzero step.  Longer vectors must have a stepped
6008      encoding.  */
6009   if (maybe_ne (CONST_VECTOR_NUNITS (x), 2)
6010       && !CONST_VECTOR_STEPPED_P (x))
6011     return false;
6012 
6013   /* Calculate the step between the first and second elements.  */
6014   scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
6015   rtx base = CONST_VECTOR_ELT (x, 0);
6016   rtx step = simplify_binary_operation (MINUS, inner,
6017 					CONST_VECTOR_ENCODED_ELT (x, 1), base);
6018   if (rtx_equal_p (step, CONST0_RTX (inner)))
6019     return false;
6020 
6021   /* If we have a stepped encoding, check that the step between the
6022      second and third elements is the same as STEP.  */
6023   if (CONST_VECTOR_STEPPED_P (x))
6024     {
6025       rtx diff = simplify_binary_operation (MINUS, inner,
6026 					    CONST_VECTOR_ENCODED_ELT (x, 2),
6027 					    CONST_VECTOR_ENCODED_ELT (x, 1));
6028       if (!rtx_equal_p (step, diff))
6029 	return false;
6030     }
6031 
6032   *base_out = base;
6033   *step_out = step;
6034   return true;
6035 }
6036 
6037 /* Generate a vector constant of mode MODE in which element I has
6038    the value BASE + I * STEP.  */
6039 
6040 rtx
gen_const_vec_series(machine_mode mode,rtx base,rtx step)6041 gen_const_vec_series (machine_mode mode, rtx base, rtx step)
6042 {
6043   gcc_assert (valid_for_const_vector_p (mode, base)
6044 	      && valid_for_const_vector_p (mode, step));
6045 
6046   rtx_vector_builder builder (mode, 1, 3);
6047   builder.quick_push (base);
6048   for (int i = 1; i < 3; ++i)
6049     builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
6050 					     builder[i - 1], step));
6051   return builder.build ();
6052 }
6053 
6054 /* Generate a vector of mode MODE in which element I has the value
6055    BASE + I * STEP.  The result will be a constant if BASE and STEP
6056    are both constants.  */
6057 
6058 rtx
gen_vec_series(machine_mode mode,rtx base,rtx step)6059 gen_vec_series (machine_mode mode, rtx base, rtx step)
6060 {
6061   if (step == const0_rtx)
6062     return gen_vec_duplicate (mode, base);
6063   if (valid_for_const_vector_p (mode, base)
6064       && valid_for_const_vector_p (mode, step))
6065     return gen_const_vec_series (mode, base, step);
6066   return gen_rtx_VEC_SERIES (mode, base, step);
6067 }
6068 
6069 /* Generate a new vector constant for mode MODE and constant value
6070    CONSTANT.  */
6071 
6072 static rtx
gen_const_vector(machine_mode mode,int constant)6073 gen_const_vector (machine_mode mode, int constant)
6074 {
6075   machine_mode inner = GET_MODE_INNER (mode);
6076 
6077   gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
6078 
6079   rtx el = const_tiny_rtx[constant][(int) inner];
6080   gcc_assert (el);
6081 
6082   return gen_const_vec_duplicate (mode, el);
6083 }
6084 
6085 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6086    all elements are zero, and the one vector when all elements are one.  */
6087 rtx
gen_rtx_CONST_VECTOR(machine_mode mode,rtvec v)6088 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
6089 {
6090   gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
6091 
6092   /* If the values are all the same, check to see if we can use one of the
6093      standard constant vectors.  */
6094   if (rtvec_all_equal_p (v))
6095     return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
6096 
6097   unsigned int nunits = GET_NUM_ELEM (v);
6098   rtx_vector_builder builder (mode, nunits, 1);
6099   for (unsigned int i = 0; i < nunits; ++i)
6100     builder.quick_push (RTVEC_ELT (v, i));
6101   return builder.build (v);
6102 }
6103 
6104 /* Initialise global register information required by all functions.  */
6105 
6106 void
init_emit_regs(void)6107 init_emit_regs (void)
6108 {
6109   int i;
6110   machine_mode mode;
6111   mem_attrs *attrs;
6112 
6113   /* Reset register attributes */
6114   reg_attrs_htab->empty ();
6115 
6116   /* We need reg_raw_mode, so initialize the modes now.  */
6117   init_reg_modes_target ();
6118 
6119   /* Assign register numbers to the globally defined register rtx.  */
6120   stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
6121   frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
6122   hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
6123   arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
6124   virtual_incoming_args_rtx =
6125     gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
6126   virtual_stack_vars_rtx =
6127     gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
6128   virtual_stack_dynamic_rtx =
6129     gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
6130   virtual_outgoing_args_rtx =
6131     gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
6132   virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
6133   virtual_preferred_stack_boundary_rtx =
6134     gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6135 
6136   /* Initialize RTL for commonly used hard registers.  These are
6137      copied into regno_reg_rtx as we begin to compile each function.  */
6138   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6139     initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6140 
6141 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6142   return_address_pointer_rtx
6143     = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
6144 #endif
6145 
6146   pic_offset_table_rtx = NULL_RTX;
6147   if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6148     pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
6149 
6150   for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
6151     {
6152       mode = (machine_mode) i;
6153       attrs = ggc_cleared_alloc<mem_attrs> ();
6154       attrs->align = BITS_PER_UNIT;
6155       attrs->addrspace = ADDR_SPACE_GENERIC;
6156       if (mode != BLKmode && mode != VOIDmode)
6157 	{
6158 	  attrs->size_known_p = true;
6159 	  attrs->size = GET_MODE_SIZE (mode);
6160 	  if (STRICT_ALIGNMENT)
6161 	    attrs->align = GET_MODE_ALIGNMENT (mode);
6162 	}
6163       mode_mem_attrs[i] = attrs;
6164     }
6165 
6166   split_branch_probability = profile_probability::uninitialized ();
6167 }
6168 
6169 /* Initialize global machine_mode variables.  */
6170 
6171 void
init_derived_machine_modes(void)6172 init_derived_machine_modes (void)
6173 {
6174   opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6175   FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
6176     {
6177       scalar_int_mode mode = mode_iter.require ();
6178 
6179       if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6180 	  && !opt_byte_mode.exists ())
6181 	opt_byte_mode = mode;
6182 
6183       if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6184 	  && !opt_word_mode.exists ())
6185 	opt_word_mode = mode;
6186     }
6187 
6188   byte_mode = opt_byte_mode.require ();
6189   word_mode = opt_word_mode.require ();
6190   ptr_mode = as_a <scalar_int_mode>
6191     (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
6192 }
6193 
6194 /* Create some permanent unique rtl objects shared between all functions.  */
6195 
6196 void
init_emit_once(void)6197 init_emit_once (void)
6198 {
6199   int i;
6200   machine_mode mode;
6201   scalar_float_mode double_mode;
6202   opt_scalar_mode smode_iter;
6203 
6204   /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6205      CONST_FIXED, and memory attribute hash tables.  */
6206   const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
6207 
6208 #if TARGET_SUPPORTS_WIDE_INT
6209   const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
6210 #endif
6211   const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
6212 
6213   if (NUM_POLY_INT_COEFFS > 1)
6214     const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6215 
6216   const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
6217 
6218   reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
6219 
6220 #ifdef INIT_EXPANDERS
6221   /* This is to initialize {init|mark|free}_machine_status before the first
6222      call to push_function_context_to.  This is needed by the Chill front
6223      end which calls push_function_context_to before the first call to
6224      init_function_start.  */
6225   INIT_EXPANDERS;
6226 #endif
6227 
6228   /* Create the unique rtx's for certain rtx codes and operand values.  */
6229 
6230   /* Process stack-limiting command-line options.  */
6231   if (opt_fstack_limit_symbol_arg != NULL)
6232     stack_limit_rtx
6233       = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6234   if (opt_fstack_limit_register_no >= 0)
6235     stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
6236 
6237   /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6238      tries to use these variables.  */
6239   for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
6240     const_int_rtx[i + MAX_SAVED_CONST_INT] =
6241       gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
6242 
6243   if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6244       && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
6245     const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
6246   else
6247     const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
6248 
6249   double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
6250 
6251   real_from_integer (&dconst0, double_mode, 0, SIGNED);
6252   real_from_integer (&dconst1, double_mode, 1, SIGNED);
6253   real_from_integer (&dconst2, double_mode, 2, SIGNED);
6254 
6255   dconstm1 = dconst1;
6256   dconstm1.sign = 1;
6257 
6258   dconsthalf = dconst1;
6259   SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
6260 
6261   for (i = 0; i < 3; i++)
6262     {
6263       const REAL_VALUE_TYPE *const r =
6264 	(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6265 
6266       FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
6267 	const_tiny_rtx[i][(int) mode] =
6268 	  const_double_from_real_value (*r, mode);
6269 
6270       FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
6271 	const_tiny_rtx[i][(int) mode] =
6272 	  const_double_from_real_value (*r, mode);
6273 
6274       const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
6275 
6276       FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6277 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6278 
6279       for (mode = MIN_MODE_PARTIAL_INT;
6280 	   mode <= MAX_MODE_PARTIAL_INT;
6281 	   mode = (machine_mode)((int)(mode) + 1))
6282 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6283     }
6284 
6285   const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6286 
6287   FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6288     const_tiny_rtx[3][(int) mode] = constm1_rtx;
6289 
6290   /* For BImode, 1 and -1 are unsigned and signed interpretations
6291      of the same value.  */
6292   const_tiny_rtx[0][(int) BImode] = const0_rtx;
6293   const_tiny_rtx[1][(int) BImode] = const_true_rtx;
6294   const_tiny_rtx[3][(int) BImode] = const_true_rtx;
6295 
6296   for (mode = MIN_MODE_PARTIAL_INT;
6297        mode <= MAX_MODE_PARTIAL_INT;
6298        mode = (machine_mode)((int)(mode) + 1))
6299     const_tiny_rtx[3][(int) mode] = constm1_rtx;
6300 
6301   FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
6302     {
6303       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6304       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6305     }
6306 
6307   FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
6308     {
6309       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6310       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6311     }
6312 
6313   /* As for BImode, "all 1" and "all -1" are unsigned and signed
6314      interpretations of the same value.  */
6315   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
6316     {
6317       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6318       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6319       const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
6320     }
6321 
6322   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
6323     {
6324       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6325       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6326       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6327     }
6328 
6329   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
6330     {
6331       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6332       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6333     }
6334 
6335   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
6336     {
6337       scalar_mode smode = smode_iter.require ();
6338       FCONST0 (smode).data.high = 0;
6339       FCONST0 (smode).data.low = 0;
6340       FCONST0 (smode).mode = smode;
6341       const_tiny_rtx[0][(int) smode]
6342 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6343     }
6344 
6345   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
6346     {
6347       scalar_mode smode = smode_iter.require ();
6348       FCONST0 (smode).data.high = 0;
6349       FCONST0 (smode).data.low = 0;
6350       FCONST0 (smode).mode = smode;
6351       const_tiny_rtx[0][(int) smode]
6352 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6353     }
6354 
6355   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
6356     {
6357       scalar_mode smode = smode_iter.require ();
6358       FCONST0 (smode).data.high = 0;
6359       FCONST0 (smode).data.low = 0;
6360       FCONST0 (smode).mode = smode;
6361       const_tiny_rtx[0][(int) smode]
6362 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6363 
6364       /* We store the value 1.  */
6365       FCONST1 (smode).data.high = 0;
6366       FCONST1 (smode).data.low = 0;
6367       FCONST1 (smode).mode = smode;
6368       FCONST1 (smode).data
6369 	= double_int_one.lshift (GET_MODE_FBIT (smode),
6370 				 HOST_BITS_PER_DOUBLE_INT,
6371 				 SIGNED_FIXED_POINT_MODE_P (smode));
6372       const_tiny_rtx[1][(int) smode]
6373 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6374     }
6375 
6376   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
6377     {
6378       scalar_mode smode = smode_iter.require ();
6379       FCONST0 (smode).data.high = 0;
6380       FCONST0 (smode).data.low = 0;
6381       FCONST0 (smode).mode = smode;
6382       const_tiny_rtx[0][(int) smode]
6383 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6384 
6385       /* We store the value 1.  */
6386       FCONST1 (smode).data.high = 0;
6387       FCONST1 (smode).data.low = 0;
6388       FCONST1 (smode).mode = smode;
6389       FCONST1 (smode).data
6390 	= double_int_one.lshift (GET_MODE_FBIT (smode),
6391 				 HOST_BITS_PER_DOUBLE_INT,
6392 				 SIGNED_FIXED_POINT_MODE_P (smode));
6393       const_tiny_rtx[1][(int) smode]
6394 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6395     }
6396 
6397   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
6398     {
6399       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6400     }
6401 
6402   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
6403     {
6404       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6405     }
6406 
6407   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
6408     {
6409       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6410       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6411     }
6412 
6413   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
6414     {
6415       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6416       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6417     }
6418 
6419   for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6420     if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6421       const_tiny_rtx[0][i] = const0_rtx;
6422 
6423   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_POINTER_BOUNDS)
6424     {
6425       scalar_mode smode = smode_iter.require ();
6426       wide_int wi_zero = wi::zero (GET_MODE_PRECISION (smode));
6427       const_tiny_rtx[0][smode] = immed_wide_int_const (wi_zero, smode);
6428     }
6429 
6430   pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6431   ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6432   simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6433   cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6434   invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6435 				   /*prev_insn=*/NULL,
6436 				   /*next_insn=*/NULL,
6437 				   /*bb=*/NULL,
6438 				   /*pattern=*/NULL_RTX,
6439 				   /*location=*/-1,
6440 				   CODE_FOR_nothing,
6441 				   /*reg_notes=*/NULL_RTX);
6442 }
6443 
6444 /* Produce exact duplicate of insn INSN after AFTER.
6445    Care updating of libcall regions if present.  */
6446 
6447 rtx_insn *
emit_copy_of_insn_after(rtx_insn * insn,rtx_insn * after)6448 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6449 {
6450   rtx_insn *new_rtx;
6451   rtx link;
6452 
6453   switch (GET_CODE (insn))
6454     {
6455     case INSN:
6456       new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6457       break;
6458 
6459     case JUMP_INSN:
6460       new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6461       CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6462       break;
6463 
6464     case DEBUG_INSN:
6465       new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6466       break;
6467 
6468     case CALL_INSN:
6469       new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6470       if (CALL_INSN_FUNCTION_USAGE (insn))
6471 	CALL_INSN_FUNCTION_USAGE (new_rtx)
6472 	  = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6473       SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6474       RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6475       RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6476       RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6477 	= RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6478       break;
6479 
6480     default:
6481       gcc_unreachable ();
6482     }
6483 
6484   /* Update LABEL_NUSES.  */
6485   mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6486 
6487   INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6488 
6489   /* If the old insn is frame related, then so is the new one.  This is
6490      primarily needed for IA-64 unwind info which marks epilogue insns,
6491      which may be duplicated by the basic block reordering code.  */
6492   RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6493 
6494   /* Locate the end of existing REG_NOTES in NEW_RTX.  */
6495   rtx *ptail = &REG_NOTES (new_rtx);
6496   while (*ptail != NULL_RTX)
6497     ptail = &XEXP (*ptail, 1);
6498 
6499   /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6500      will make them.  REG_LABEL_TARGETs are created there too, but are
6501      supposed to be sticky, so we copy them.  */
6502   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6503     if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6504       {
6505 	*ptail = duplicate_reg_note (link);
6506 	ptail = &XEXP (*ptail, 1);
6507       }
6508 
6509   INSN_CODE (new_rtx) = INSN_CODE (insn);
6510   return new_rtx;
6511 }
6512 
6513 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6514 rtx
gen_hard_reg_clobber(machine_mode mode,unsigned int regno)6515 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6516 {
6517   if (hard_reg_clobbers[mode][regno])
6518     return hard_reg_clobbers[mode][regno];
6519   else
6520     return (hard_reg_clobbers[mode][regno] =
6521 	    gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6522 }
6523 
6524 location_t prologue_location;
6525 location_t epilogue_location;
6526 
6527 /* Hold current location information and last location information, so the
6528    datastructures are built lazily only when some instructions in given
6529    place are needed.  */
6530 static location_t curr_location;
6531 
6532 /* Allocate insn location datastructure.  */
6533 void
insn_locations_init(void)6534 insn_locations_init (void)
6535 {
6536   prologue_location = epilogue_location = 0;
6537   curr_location = UNKNOWN_LOCATION;
6538 }
6539 
6540 /* At the end of emit stage, clear current location.  */
6541 void
insn_locations_finalize(void)6542 insn_locations_finalize (void)
6543 {
6544   epilogue_location = curr_location;
6545   curr_location = UNKNOWN_LOCATION;
6546 }
6547 
6548 /* Set current location.  */
6549 void
set_curr_insn_location(location_t location)6550 set_curr_insn_location (location_t location)
6551 {
6552   curr_location = location;
6553 }
6554 
6555 /* Get current location.  */
6556 location_t
curr_insn_location(void)6557 curr_insn_location (void)
6558 {
6559   return curr_location;
6560 }
6561 
6562 /* Return lexical scope block insn belongs to.  */
6563 tree
insn_scope(const rtx_insn * insn)6564 insn_scope (const rtx_insn *insn)
6565 {
6566   return LOCATION_BLOCK (INSN_LOCATION (insn));
6567 }
6568 
6569 /* Return line number of the statement that produced this insn.  */
6570 int
insn_line(const rtx_insn * insn)6571 insn_line (const rtx_insn *insn)
6572 {
6573   return LOCATION_LINE (INSN_LOCATION (insn));
6574 }
6575 
6576 /* Return source file of the statement that produced this insn.  */
6577 const char *
insn_file(const rtx_insn * insn)6578 insn_file (const rtx_insn *insn)
6579 {
6580   return LOCATION_FILE (INSN_LOCATION (insn));
6581 }
6582 
6583 /* Return expanded location of the statement that produced this insn.  */
6584 expanded_location
insn_location(const rtx_insn * insn)6585 insn_location (const rtx_insn *insn)
6586 {
6587   return expand_location (INSN_LOCATION (insn));
6588 }
6589 
6590 /* Return true if memory model MODEL requires a pre-operation (release-style)
6591    barrier or a post-operation (acquire-style) barrier.  While not universal,
6592    this function matches behavior of several targets.  */
6593 
6594 bool
need_atomic_barrier_p(enum memmodel model,bool pre)6595 need_atomic_barrier_p (enum memmodel model, bool pre)
6596 {
6597   switch (model & MEMMODEL_BASE_MASK)
6598     {
6599     case MEMMODEL_RELAXED:
6600     case MEMMODEL_CONSUME:
6601       return false;
6602     case MEMMODEL_RELEASE:
6603       return pre;
6604     case MEMMODEL_ACQUIRE:
6605       return !pre;
6606     case MEMMODEL_ACQ_REL:
6607     case MEMMODEL_SEQ_CST:
6608       return true;
6609     default:
6610       gcc_unreachable ();
6611     }
6612 }
6613 
6614 /* Return a constant shift amount for shifting a value of mode MODE
6615    by VALUE bits.  */
6616 
6617 rtx
gen_int_shift_amount(machine_mode,poly_int64 value)6618 gen_int_shift_amount (machine_mode, poly_int64 value)
6619 {
6620   /* Use a 64-bit mode, to avoid any truncation.
6621 
6622      ??? Perhaps this should be automatically derived from the .md files
6623      instead, or perhaps have a target hook.  */
6624   scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6625 				? DImode
6626 				: int_mode_for_size (64, 0).require ());
6627   return gen_int_mode (value, shift_mode);
6628 }
6629 
6630 /* Initialize fields of rtl_data related to stack alignment.  */
6631 
6632 void
init_stack_alignment()6633 rtl_data::init_stack_alignment ()
6634 {
6635   stack_alignment_needed = STACK_BOUNDARY;
6636   max_used_stack_slot_alignment = STACK_BOUNDARY;
6637   stack_alignment_estimated = 0;
6638   preferred_stack_boundary = STACK_BOUNDARY;
6639 }
6640 
6641 
6642 #include "gt-emit-rtl.h"
6643