1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2022 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.cc, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "memmodel.h"
38 #include "backend.h"
39 #include "target.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "df.h"
43 #include "tm_p.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
46 #include "regs.h"
47 #include "emit-rtl.h"
48 #include "recog.h"
49 #include "diagnostic-core.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "cfgrtl.h"
54 #include "tree-eh.h"
55 #include "explow.h"
56 #include "expr.h"
57 #include "builtins.h"
58 #include "rtl-iter.h"
59 #include "stor-layout.h"
60 #include "opts.h"
61 #include "predict.h"
62 #include "rtx-vector-builder.h"
63 #include "gimple.h"
64 #include "gimple-ssa.h"
65 #include "gimplify.h"
66
67 struct target_rtl default_target_rtl;
68 #if SWITCHABLE_TARGET
69 struct target_rtl *this_target_rtl = &default_target_rtl;
70 #endif
71
72 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
73
74 /* Commonly used modes. */
75
76 scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
77 scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
78 scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
79
80 /* Datastructures maintained for currently processed function in RTL form. */
81
82 struct rtl_data x_rtl;
83
84 /* Indexed by pseudo register number, gives the rtx for that pseudo.
85 Allocated in parallel with regno_pointer_align.
86 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
87 with length attribute nested in top level structures. */
88
89 rtx * regno_reg_rtx;
90
91 /* This is *not* reset after each function. It gives each CODE_LABEL
92 in the entire compilation a unique label number. */
93
94 static GTY(()) int label_num = 1;
95
96 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
97 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
98 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
99 is set only for MODE_INT and MODE_VECTOR_INT modes. */
100
101 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
102
103 rtx const_true_rtx;
104
105 REAL_VALUE_TYPE dconst0;
106 REAL_VALUE_TYPE dconst1;
107 REAL_VALUE_TYPE dconst2;
108 REAL_VALUE_TYPE dconstm1;
109 REAL_VALUE_TYPE dconsthalf;
110
111 /* Record fixed-point constant 0 and 1. */
112 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
113 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
114
115 /* We make one copy of (const_int C) where C is in
116 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
117 to save space during the compilation and simplify comparisons of
118 integers. */
119
120 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
121
122 /* Standard pieces of rtx, to be substituted directly into things. */
123 rtx pc_rtx;
124 rtx ret_rtx;
125 rtx simple_return_rtx;
126
127 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
128 this pointer should normally never be dereferenced), but is required to be
129 distinct from NULL_RTX. Currently used by peephole2 pass. */
130 rtx_insn *invalid_insn_rtx;
131
132 /* A hash table storing CONST_INTs whose absolute value is greater
133 than MAX_SAVED_CONST_INT. */
134
135 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
136 {
137 typedef HOST_WIDE_INT compare_type;
138
139 static hashval_t hash (rtx i);
140 static bool equal (rtx i, HOST_WIDE_INT h);
141 };
142
143 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
144
145 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
146 {
147 static hashval_t hash (rtx x);
148 static bool equal (rtx x, rtx y);
149 };
150
151 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
152
153 struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
154 {
155 typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
156
157 static hashval_t hash (rtx x);
158 static bool equal (rtx x, const compare_type &y);
159 };
160
161 static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
162
163 /* A hash table storing register attribute structures. */
164 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
165 {
166 static hashval_t hash (reg_attrs *x);
167 static bool equal (reg_attrs *a, reg_attrs *b);
168 };
169
170 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
171
172 /* A hash table storing all CONST_DOUBLEs. */
173 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
174 {
175 static hashval_t hash (rtx x);
176 static bool equal (rtx x, rtx y);
177 };
178
179 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
180
181 /* A hash table storing all CONST_FIXEDs. */
182 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
183 {
184 static hashval_t hash (rtx x);
185 static bool equal (rtx x, rtx y);
186 };
187
188 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
189
190 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
191 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
192 #define first_label_num (crtl->emit.x_first_label_num)
193
194 static void set_used_decls (tree);
195 static void mark_label_nuses (rtx);
196 #if TARGET_SUPPORTS_WIDE_INT
197 static rtx lookup_const_wide_int (rtx);
198 #endif
199 static rtx lookup_const_double (rtx);
200 static rtx lookup_const_fixed (rtx);
201 static rtx gen_const_vector (machine_mode, int);
202 static void copy_rtx_if_shared_1 (rtx *orig);
203
204 /* Probability of the conditional branch currently proceeded by try_split. */
205 profile_probability split_branch_probability;
206
207 /* Returns a hash code for X (which is a really a CONST_INT). */
208
209 hashval_t
hash(rtx x)210 const_int_hasher::hash (rtx x)
211 {
212 return (hashval_t) INTVAL (x);
213 }
214
215 /* Returns nonzero if the value represented by X (which is really a
216 CONST_INT) is the same as that given by Y (which is really a
217 HOST_WIDE_INT *). */
218
219 bool
equal(rtx x,HOST_WIDE_INT y)220 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
221 {
222 return (INTVAL (x) == y);
223 }
224
225 #if TARGET_SUPPORTS_WIDE_INT
226 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
227
228 hashval_t
hash(rtx x)229 const_wide_int_hasher::hash (rtx x)
230 {
231 int i;
232 unsigned HOST_WIDE_INT hash = 0;
233 const_rtx xr = x;
234
235 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
236 hash += CONST_WIDE_INT_ELT (xr, i);
237
238 return (hashval_t) hash;
239 }
240
241 /* Returns nonzero if the value represented by X (which is really a
242 CONST_WIDE_INT) is the same as that given by Y (which is really a
243 CONST_WIDE_INT). */
244
245 bool
equal(rtx x,rtx y)246 const_wide_int_hasher::equal (rtx x, rtx y)
247 {
248 int i;
249 const_rtx xr = x;
250 const_rtx yr = y;
251 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
252 return false;
253
254 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
255 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
256 return false;
257
258 return true;
259 }
260 #endif
261
262 /* Returns a hash code for CONST_POLY_INT X. */
263
264 hashval_t
hash(rtx x)265 const_poly_int_hasher::hash (rtx x)
266 {
267 inchash::hash h;
268 h.add_int (GET_MODE (x));
269 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
270 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
271 return h.end ();
272 }
273
274 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */
275
276 bool
equal(rtx x,const compare_type & y)277 const_poly_int_hasher::equal (rtx x, const compare_type &y)
278 {
279 if (GET_MODE (x) != y.first)
280 return false;
281 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
282 if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
283 return false;
284 return true;
285 }
286
287 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
288 hashval_t
hash(rtx x)289 const_double_hasher::hash (rtx x)
290 {
291 const_rtx const value = x;
292 hashval_t h;
293
294 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
295 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
296 else
297 {
298 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
299 /* MODE is used in the comparison, so it should be in the hash. */
300 h ^= GET_MODE (value);
301 }
302 return h;
303 }
304
305 /* Returns nonzero if the value represented by X (really a ...)
306 is the same as that represented by Y (really a ...) */
307 bool
equal(rtx x,rtx y)308 const_double_hasher::equal (rtx x, rtx y)
309 {
310 const_rtx const a = x, b = y;
311
312 if (GET_MODE (a) != GET_MODE (b))
313 return 0;
314 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
315 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
316 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
317 else
318 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
319 CONST_DOUBLE_REAL_VALUE (b));
320 }
321
322 /* Returns a hash code for X (which is really a CONST_FIXED). */
323
324 hashval_t
hash(rtx x)325 const_fixed_hasher::hash (rtx x)
326 {
327 const_rtx const value = x;
328 hashval_t h;
329
330 h = fixed_hash (CONST_FIXED_VALUE (value));
331 /* MODE is used in the comparison, so it should be in the hash. */
332 h ^= GET_MODE (value);
333 return h;
334 }
335
336 /* Returns nonzero if the value represented by X is the same as that
337 represented by Y. */
338
339 bool
equal(rtx x,rtx y)340 const_fixed_hasher::equal (rtx x, rtx y)
341 {
342 const_rtx const a = x, b = y;
343
344 if (GET_MODE (a) != GET_MODE (b))
345 return 0;
346 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
347 }
348
349 /* Return true if the given memory attributes are equal. */
350
351 bool
mem_attrs_eq_p(const class mem_attrs * p,const class mem_attrs * q)352 mem_attrs_eq_p (const class mem_attrs *p, const class mem_attrs *q)
353 {
354 if (p == q)
355 return true;
356 if (!p || !q)
357 return false;
358 return (p->alias == q->alias
359 && p->offset_known_p == q->offset_known_p
360 && (!p->offset_known_p || known_eq (p->offset, q->offset))
361 && p->size_known_p == q->size_known_p
362 && (!p->size_known_p || known_eq (p->size, q->size))
363 && p->align == q->align
364 && p->addrspace == q->addrspace
365 && (p->expr == q->expr
366 || (p->expr != NULL_TREE && q->expr != NULL_TREE
367 && operand_equal_p (p->expr, q->expr, 0))));
368 }
369
370 /* Set MEM's memory attributes so that they are the same as ATTRS. */
371
372 static void
set_mem_attrs(rtx mem,mem_attrs * attrs)373 set_mem_attrs (rtx mem, mem_attrs *attrs)
374 {
375 /* If everything is the default, we can just clear the attributes. */
376 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
377 {
378 MEM_ATTRS (mem) = 0;
379 return;
380 }
381
382 if (!MEM_ATTRS (mem)
383 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
384 {
385 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
386 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
387 }
388 }
389
390 /* Returns a hash code for X (which is a really a reg_attrs *). */
391
392 hashval_t
hash(reg_attrs * x)393 reg_attr_hasher::hash (reg_attrs *x)
394 {
395 const reg_attrs *const p = x;
396
397 inchash::hash h;
398 h.add_ptr (p->decl);
399 h.add_poly_hwi (p->offset);
400 return h.end ();
401 }
402
403 /* Returns nonzero if the value represented by X is the same as that given by
404 Y. */
405
406 bool
equal(reg_attrs * x,reg_attrs * y)407 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
408 {
409 const reg_attrs *const p = x;
410 const reg_attrs *const q = y;
411
412 return (p->decl == q->decl && known_eq (p->offset, q->offset));
413 }
414 /* Allocate a new reg_attrs structure and insert it into the hash table if
415 one identical to it is not already in the table. We are doing this for
416 MEM of mode MODE. */
417
418 static reg_attrs *
get_reg_attrs(tree decl,poly_int64 offset)419 get_reg_attrs (tree decl, poly_int64 offset)
420 {
421 reg_attrs attrs;
422
423 /* If everything is the default, we can just return zero. */
424 if (decl == 0 && known_eq (offset, 0))
425 return 0;
426
427 attrs.decl = decl;
428 attrs.offset = offset;
429
430 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
431 if (*slot == 0)
432 {
433 *slot = ggc_alloc<reg_attrs> ();
434 memcpy (*slot, &attrs, sizeof (reg_attrs));
435 }
436
437 return *slot;
438 }
439
440
441 #if !HAVE_blockage
442 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
443 and to block register equivalences to be seen across this insn. */
444
445 rtx
gen_blockage(void)446 gen_blockage (void)
447 {
448 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
449 MEM_VOLATILE_P (x) = true;
450 return x;
451 }
452 #endif
453
454
455 /* Set the mode and register number of X to MODE and REGNO. */
456
457 void
set_mode_and_regno(rtx x,machine_mode mode,unsigned int regno)458 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
459 {
460 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
461 ? hard_regno_nregs (regno, mode)
462 : 1);
463 PUT_MODE_RAW (x, mode);
464 set_regno_raw (x, regno, nregs);
465 }
466
467 /* Initialize a fresh REG rtx with mode MODE and register REGNO. */
468
469 rtx
init_raw_REG(rtx x,machine_mode mode,unsigned int regno)470 init_raw_REG (rtx x, machine_mode mode, unsigned int regno)
471 {
472 set_mode_and_regno (x, mode, regno);
473 REG_ATTRS (x) = NULL;
474 ORIGINAL_REGNO (x) = regno;
475 return x;
476 }
477
478 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
479 don't attempt to share with the various global pieces of rtl (such as
480 frame_pointer_rtx). */
481
482 rtx
gen_raw_REG(machine_mode mode,unsigned int regno)483 gen_raw_REG (machine_mode mode, unsigned int regno)
484 {
485 rtx x = rtx_alloc (REG MEM_STAT_INFO);
486 init_raw_REG (x, mode, regno);
487 return x;
488 }
489
490 /* There are some RTL codes that require special attention; the generation
491 functions do the raw handling. If you add to this list, modify
492 special_rtx in gengenrtl.cc as well. */
493
494 rtx_expr_list *
gen_rtx_EXPR_LIST(machine_mode mode,rtx expr,rtx expr_list)495 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
496 {
497 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
498 expr_list));
499 }
500
501 rtx_insn_list *
gen_rtx_INSN_LIST(machine_mode mode,rtx insn,rtx insn_list)502 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
503 {
504 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
505 insn_list));
506 }
507
508 rtx_insn *
gen_rtx_INSN(machine_mode mode,rtx_insn * prev_insn,rtx_insn * next_insn,basic_block bb,rtx pattern,int location,int code,rtx reg_notes)509 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
510 basic_block bb, rtx pattern, int location, int code,
511 rtx reg_notes)
512 {
513 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
514 prev_insn, next_insn,
515 bb, pattern, location, code,
516 reg_notes));
517 }
518
519 rtx
gen_rtx_CONST_INT(machine_mode mode ATTRIBUTE_UNUSED,HOST_WIDE_INT arg)520 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
521 {
522 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
523 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
524
525 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
526 if (const_true_rtx && arg == STORE_FLAG_VALUE)
527 return const_true_rtx;
528 #endif
529
530 /* Look up the CONST_INT in the hash table. */
531 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
532 INSERT);
533 if (*slot == 0)
534 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
535
536 return *slot;
537 }
538
539 rtx
gen_int_mode(poly_int64 c,machine_mode mode)540 gen_int_mode (poly_int64 c, machine_mode mode)
541 {
542 c = trunc_int_for_mode (c, mode);
543 if (c.is_constant ())
544 return GEN_INT (c.coeffs[0]);
545 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
546 return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
547 }
548
549 /* CONST_DOUBLEs might be created from pairs of integers, or from
550 REAL_VALUE_TYPEs. Also, their length is known only at run time,
551 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
552
553 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
554 hash table. If so, return its counterpart; otherwise add it
555 to the hash table and return it. */
556 static rtx
lookup_const_double(rtx real)557 lookup_const_double (rtx real)
558 {
559 rtx *slot = const_double_htab->find_slot (real, INSERT);
560 if (*slot == 0)
561 *slot = real;
562
563 return *slot;
564 }
565
566 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
567 VALUE in mode MODE. */
568 rtx
const_double_from_real_value(REAL_VALUE_TYPE value,machine_mode mode)569 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
570 {
571 rtx real = rtx_alloc (CONST_DOUBLE);
572 PUT_MODE (real, mode);
573
574 real->u.rv = value;
575
576 return lookup_const_double (real);
577 }
578
579 /* Determine whether FIXED, a CONST_FIXED, already exists in the
580 hash table. If so, return its counterpart; otherwise add it
581 to the hash table and return it. */
582
583 static rtx
lookup_const_fixed(rtx fixed)584 lookup_const_fixed (rtx fixed)
585 {
586 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
587 if (*slot == 0)
588 *slot = fixed;
589
590 return *slot;
591 }
592
593 /* Return a CONST_FIXED rtx for a fixed-point value specified by
594 VALUE in mode MODE. */
595
596 rtx
const_fixed_from_fixed_value(FIXED_VALUE_TYPE value,machine_mode mode)597 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
598 {
599 rtx fixed = rtx_alloc (CONST_FIXED);
600 PUT_MODE (fixed, mode);
601
602 fixed->u.fv = value;
603
604 return lookup_const_fixed (fixed);
605 }
606
607 #if TARGET_SUPPORTS_WIDE_INT == 0
608 /* Constructs double_int from rtx CST. */
609
610 double_int
rtx_to_double_int(const_rtx cst)611 rtx_to_double_int (const_rtx cst)
612 {
613 double_int r;
614
615 if (CONST_INT_P (cst))
616 r = double_int::from_shwi (INTVAL (cst));
617 else if (CONST_DOUBLE_AS_INT_P (cst))
618 {
619 r.low = CONST_DOUBLE_LOW (cst);
620 r.high = CONST_DOUBLE_HIGH (cst);
621 }
622 else
623 gcc_unreachable ();
624
625 return r;
626 }
627 #endif
628
629 #if TARGET_SUPPORTS_WIDE_INT
630 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
631 If so, return its counterpart; otherwise add it to the hash table and
632 return it. */
633
634 static rtx
lookup_const_wide_int(rtx wint)635 lookup_const_wide_int (rtx wint)
636 {
637 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
638 if (*slot == 0)
639 *slot = wint;
640
641 return *slot;
642 }
643 #endif
644
645 /* Return an rtx constant for V, given that the constant has mode MODE.
646 The returned rtx will be a CONST_INT if V fits, otherwise it will be
647 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
648 (if TARGET_SUPPORTS_WIDE_INT). */
649
650 static rtx
immed_wide_int_const_1(const wide_int_ref & v,machine_mode mode)651 immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
652 {
653 unsigned int len = v.get_len ();
654 /* Not scalar_int_mode because we also allow pointer bound modes. */
655 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
656
657 /* Allow truncation but not extension since we do not know if the
658 number is signed or unsigned. */
659 gcc_assert (prec <= v.get_precision ());
660
661 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
662 return gen_int_mode (v.elt (0), mode);
663
664 #if TARGET_SUPPORTS_WIDE_INT
665 {
666 unsigned int i;
667 rtx value;
668 unsigned int blocks_needed
669 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
670
671 if (len > blocks_needed)
672 len = blocks_needed;
673
674 value = const_wide_int_alloc (len);
675
676 /* It is so tempting to just put the mode in here. Must control
677 myself ... */
678 PUT_MODE (value, VOIDmode);
679 CWI_PUT_NUM_ELEM (value, len);
680
681 for (i = 0; i < len; i++)
682 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
683
684 return lookup_const_wide_int (value);
685 }
686 #else
687 return immed_double_const (v.elt (0), v.elt (1), mode);
688 #endif
689 }
690
691 #if TARGET_SUPPORTS_WIDE_INT == 0
692 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
693 of ints: I0 is the low-order word and I1 is the high-order word.
694 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
695 implied upper bits are copies of the high bit of i1. The value
696 itself is neither signed nor unsigned. Do not use this routine for
697 non-integer modes; convert to REAL_VALUE_TYPE and use
698 const_double_from_real_value. */
699
700 rtx
immed_double_const(HOST_WIDE_INT i0,HOST_WIDE_INT i1,machine_mode mode)701 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
702 {
703 rtx value;
704 unsigned int i;
705
706 /* There are the following cases (note that there are no modes with
707 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
708
709 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
710 gen_int_mode.
711 2) If the value of the integer fits into HOST_WIDE_INT anyway
712 (i.e., i1 consists only from copies of the sign bit, and sign
713 of i0 and i1 are the same), then we return a CONST_INT for i0.
714 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
715 scalar_mode smode;
716 if (is_a <scalar_mode> (mode, &smode)
717 && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
718 return gen_int_mode (i0, mode);
719
720 /* If this integer fits in one word, return a CONST_INT. */
721 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
722 return GEN_INT (i0);
723
724 /* We use VOIDmode for integers. */
725 value = rtx_alloc (CONST_DOUBLE);
726 PUT_MODE (value, VOIDmode);
727
728 CONST_DOUBLE_LOW (value) = i0;
729 CONST_DOUBLE_HIGH (value) = i1;
730
731 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
732 XWINT (value, i) = 0;
733
734 return lookup_const_double (value);
735 }
736 #endif
737
738 /* Return an rtx representation of C in mode MODE. */
739
740 rtx
immed_wide_int_const(const poly_wide_int_ref & c,machine_mode mode)741 immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
742 {
743 if (c.is_constant ())
744 return immed_wide_int_const_1 (c.coeffs[0], mode);
745
746 /* Not scalar_int_mode because we also allow pointer bound modes. */
747 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
748
749 /* Allow truncation but not extension since we do not know if the
750 number is signed or unsigned. */
751 gcc_assert (prec <= c.coeffs[0].get_precision ());
752 poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
753
754 /* See whether we already have an rtx for this constant. */
755 inchash::hash h;
756 h.add_int (mode);
757 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
758 h.add_wide_int (newc.coeffs[i]);
759 const_poly_int_hasher::compare_type typed_value (mode, newc);
760 rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
761 h.end (), INSERT);
762 rtx x = *slot;
763 if (x)
764 return x;
765
766 /* Create a new rtx. There's a choice to be made here between installing
767 the actual mode of the rtx or leaving it as VOIDmode (for consistency
768 with CONST_INT). In practice the handling of the codes is different
769 enough that we get no benefit from using VOIDmode, and various places
770 assume that VOIDmode implies CONST_INT. Using the real mode seems like
771 the right long-term direction anyway. */
772 typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
773 size_t extra_size = twi::extra_size (prec);
774 x = rtx_alloc_v (CONST_POLY_INT,
775 sizeof (struct const_poly_int_def) + extra_size);
776 PUT_MODE (x, mode);
777 CONST_POLY_INT_COEFFS (x).set_precision (prec);
778 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
779 CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
780
781 *slot = x;
782 return x;
783 }
784
785 rtx
gen_rtx_REG(machine_mode mode,unsigned int regno)786 gen_rtx_REG (machine_mode mode, unsigned int regno)
787 {
788 /* In case the MD file explicitly references the frame pointer, have
789 all such references point to the same frame pointer. This is
790 used during frame pointer elimination to distinguish the explicit
791 references to these registers from pseudos that happened to be
792 assigned to them.
793
794 If we have eliminated the frame pointer or arg pointer, we will
795 be using it as a normal register, for example as a spill
796 register. In such cases, we might be accessing it in a mode that
797 is not Pmode and therefore cannot use the pre-allocated rtx.
798
799 Also don't do this when we are making new REGs in reload, since
800 we don't want to get confused with the real pointers. */
801
802 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
803 {
804 if (regno == FRAME_POINTER_REGNUM
805 && (!reload_completed || frame_pointer_needed))
806 return frame_pointer_rtx;
807
808 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
809 && regno == HARD_FRAME_POINTER_REGNUM
810 && (!reload_completed || frame_pointer_needed))
811 return hard_frame_pointer_rtx;
812 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
813 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
814 && regno == ARG_POINTER_REGNUM)
815 return arg_pointer_rtx;
816 #endif
817 #ifdef RETURN_ADDRESS_POINTER_REGNUM
818 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
819 return return_address_pointer_rtx;
820 #endif
821 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
822 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
823 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
824 return pic_offset_table_rtx;
825 if (regno == STACK_POINTER_REGNUM)
826 return stack_pointer_rtx;
827 }
828
829 #if 0
830 /* If the per-function register table has been set up, try to re-use
831 an existing entry in that table to avoid useless generation of RTL.
832
833 This code is disabled for now until we can fix the various backends
834 which depend on having non-shared hard registers in some cases. Long
835 term we want to re-enable this code as it can significantly cut down
836 on the amount of useless RTL that gets generated.
837
838 We'll also need to fix some code that runs after reload that wants to
839 set ORIGINAL_REGNO. */
840
841 if (cfun
842 && cfun->emit
843 && regno_reg_rtx
844 && regno < FIRST_PSEUDO_REGISTER
845 && reg_raw_mode[regno] == mode)
846 return regno_reg_rtx[regno];
847 #endif
848
849 return gen_raw_REG (mode, regno);
850 }
851
852 rtx
gen_rtx_MEM(machine_mode mode,rtx addr)853 gen_rtx_MEM (machine_mode mode, rtx addr)
854 {
855 rtx rt = gen_rtx_raw_MEM (mode, addr);
856
857 /* This field is not cleared by the mere allocation of the rtx, so
858 we clear it here. */
859 MEM_ATTRS (rt) = 0;
860
861 return rt;
862 }
863
864 /* Generate a memory referring to non-trapping constant memory. */
865
866 rtx
gen_const_mem(machine_mode mode,rtx addr)867 gen_const_mem (machine_mode mode, rtx addr)
868 {
869 rtx mem = gen_rtx_MEM (mode, addr);
870 MEM_READONLY_P (mem) = 1;
871 MEM_NOTRAP_P (mem) = 1;
872 return mem;
873 }
874
875 /* Generate a MEM referring to fixed portions of the frame, e.g., register
876 save areas. */
877
878 rtx
gen_frame_mem(machine_mode mode,rtx addr)879 gen_frame_mem (machine_mode mode, rtx addr)
880 {
881 rtx mem = gen_rtx_MEM (mode, addr);
882 MEM_NOTRAP_P (mem) = 1;
883 set_mem_alias_set (mem, get_frame_alias_set ());
884 return mem;
885 }
886
887 /* Generate a MEM referring to a temporary use of the stack, not part
888 of the fixed stack frame. For example, something which is pushed
889 by a target splitter. */
890 rtx
gen_tmp_stack_mem(machine_mode mode,rtx addr)891 gen_tmp_stack_mem (machine_mode mode, rtx addr)
892 {
893 rtx mem = gen_rtx_MEM (mode, addr);
894 MEM_NOTRAP_P (mem) = 1;
895 if (!cfun->calls_alloca)
896 set_mem_alias_set (mem, get_frame_alias_set ());
897 return mem;
898 }
899
900 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
901 this construct would be valid, and false otherwise. */
902
903 bool
validate_subreg(machine_mode omode,machine_mode imode,const_rtx reg,poly_uint64 offset)904 validate_subreg (machine_mode omode, machine_mode imode,
905 const_rtx reg, poly_uint64 offset)
906 {
907 poly_uint64 isize = GET_MODE_SIZE (imode);
908 poly_uint64 osize = GET_MODE_SIZE (omode);
909
910 /* The sizes must be ordered, so that we know whether the subreg
911 is partial, paradoxical or complete. */
912 if (!ordered_p (isize, osize))
913 return false;
914
915 /* All subregs must be aligned. */
916 if (!multiple_p (offset, osize))
917 return false;
918
919 /* The subreg offset cannot be outside the inner object. */
920 if (maybe_ge (offset, isize))
921 return false;
922
923 poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
924
925 /* ??? This should not be here. Temporarily continue to allow word_mode
926 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
927 Generally, backends are doing something sketchy but it'll take time to
928 fix them all. */
929 if (omode == word_mode)
930 ;
931 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
932 is the culprit here, and not the backends. */
933 else if (known_ge (osize, regsize) && known_ge (isize, osize))
934 ;
935 /* Allow component subregs of complex and vector. Though given the below
936 extraction rules, it's not always clear what that means. */
937 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
938 && GET_MODE_INNER (imode) == omode)
939 ;
940 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
941 i.e. (subreg:V4SF (reg:SF) 0) or (subreg:V4SF (reg:V2SF) 0). This
942 surely isn't the cleanest way to represent this. It's questionable
943 if this ought to be represented at all -- why can't this all be hidden
944 in post-reload splitters that make arbitrarily mode changes to the
945 registers themselves. */
946 else if (VECTOR_MODE_P (omode)
947 && GET_MODE_INNER (omode) == GET_MODE_INNER (imode))
948 ;
949 /* Subregs involving floating point modes are not allowed to
950 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
951 (subreg:SI (reg:DF) 0) isn't. */
952 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
953 {
954 if (! (known_eq (isize, osize)
955 /* LRA can use subreg to store a floating point value in
956 an integer mode. Although the floating point and the
957 integer modes need the same number of hard registers,
958 the size of floating point mode can be less than the
959 integer mode. LRA also uses subregs for a register
960 should be used in different mode in on insn. */
961 || lra_in_progress))
962 return false;
963 }
964
965 /* Paradoxical subregs must have offset zero. */
966 if (maybe_gt (osize, isize))
967 return known_eq (offset, 0U);
968
969 /* This is a normal subreg. Verify that the offset is representable. */
970
971 /* For hard registers, we already have most of these rules collected in
972 subreg_offset_representable_p. */
973 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
974 {
975 unsigned int regno = REGNO (reg);
976
977 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
978 && GET_MODE_INNER (imode) == omode)
979 ;
980 else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
981 return false;
982
983 return subreg_offset_representable_p (regno, imode, offset, omode);
984 }
985
986 /* The outer size must be ordered wrt the register size, otherwise
987 we wouldn't know at compile time how many registers the outer
988 mode occupies. */
989 if (!ordered_p (osize, regsize))
990 return false;
991
992 /* For pseudo registers, we want most of the same checks. Namely:
993
994 Assume that the pseudo register will be allocated to hard registers
995 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
996 the remainder must correspond to the lowpart of the containing hard
997 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
998 otherwise it is at the lowest offset.
999
1000 Given that we've already checked the mode and offset alignment,
1001 we only have to check subblock subregs here. */
1002 if (maybe_lt (osize, regsize)
1003 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
1004 {
1005 /* It is invalid for the target to pick a register size for a mode
1006 that isn't ordered wrt to the size of that mode. */
1007 poly_uint64 block_size = ordered_min (isize, regsize);
1008 unsigned int start_reg;
1009 poly_uint64 offset_within_reg;
1010 if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
1011 || (BYTES_BIG_ENDIAN
1012 ? maybe_ne (offset_within_reg, block_size - osize)
1013 : maybe_ne (offset_within_reg, 0U)))
1014 return false;
1015 }
1016 return true;
1017 }
1018
1019 rtx
gen_rtx_SUBREG(machine_mode mode,rtx reg,poly_uint64 offset)1020 gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
1021 {
1022 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
1023 return gen_rtx_raw_SUBREG (mode, reg, offset);
1024 }
1025
1026 /* Generate a SUBREG representing the least-significant part of REG if MODE
1027 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1028
1029 rtx
gen_lowpart_SUBREG(machine_mode mode,rtx reg)1030 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
1031 {
1032 machine_mode inmode;
1033
1034 inmode = GET_MODE (reg);
1035 if (inmode == VOIDmode)
1036 inmode = mode;
1037 return gen_rtx_SUBREG (mode, reg,
1038 subreg_lowpart_offset (mode, inmode));
1039 }
1040
1041 rtx
gen_rtx_VAR_LOCATION(machine_mode mode,tree decl,rtx loc,enum var_init_status status)1042 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
1043 enum var_init_status status)
1044 {
1045 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
1046 PAT_VAR_LOCATION_STATUS (x) = status;
1047 return x;
1048 }
1049
1050
1051 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
1052
1053 rtvec
gen_rtvec(int n,...)1054 gen_rtvec (int n, ...)
1055 {
1056 int i;
1057 rtvec rt_val;
1058 va_list p;
1059
1060 va_start (p, n);
1061
1062 /* Don't allocate an empty rtvec... */
1063 if (n == 0)
1064 {
1065 va_end (p);
1066 return NULL_RTVEC;
1067 }
1068
1069 rt_val = rtvec_alloc (n);
1070
1071 for (i = 0; i < n; i++)
1072 rt_val->elem[i] = va_arg (p, rtx);
1073
1074 va_end (p);
1075 return rt_val;
1076 }
1077
1078 rtvec
gen_rtvec_v(int n,rtx * argp)1079 gen_rtvec_v (int n, rtx *argp)
1080 {
1081 int i;
1082 rtvec rt_val;
1083
1084 /* Don't allocate an empty rtvec... */
1085 if (n == 0)
1086 return NULL_RTVEC;
1087
1088 rt_val = rtvec_alloc (n);
1089
1090 for (i = 0; i < n; i++)
1091 rt_val->elem[i] = *argp++;
1092
1093 return rt_val;
1094 }
1095
1096 rtvec
gen_rtvec_v(int n,rtx_insn ** argp)1097 gen_rtvec_v (int n, rtx_insn **argp)
1098 {
1099 int i;
1100 rtvec rt_val;
1101
1102 /* Don't allocate an empty rtvec... */
1103 if (n == 0)
1104 return NULL_RTVEC;
1105
1106 rt_val = rtvec_alloc (n);
1107
1108 for (i = 0; i < n; i++)
1109 rt_val->elem[i] = *argp++;
1110
1111 return rt_val;
1112 }
1113
1114
1115 /* Return the number of bytes between the start of an OUTER_MODE
1116 in-memory value and the start of an INNER_MODE in-memory value,
1117 given that the former is a lowpart of the latter. It may be a
1118 paradoxical lowpart, in which case the offset will be negative
1119 on big-endian targets. */
1120
1121 poly_int64
byte_lowpart_offset(machine_mode outer_mode,machine_mode inner_mode)1122 byte_lowpart_offset (machine_mode outer_mode,
1123 machine_mode inner_mode)
1124 {
1125 if (paradoxical_subreg_p (outer_mode, inner_mode))
1126 return -subreg_lowpart_offset (inner_mode, outer_mode);
1127 else
1128 return subreg_lowpart_offset (outer_mode, inner_mode);
1129 }
1130
1131 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1132 from address X. For paradoxical big-endian subregs this is a
1133 negative value, otherwise it's the same as OFFSET. */
1134
1135 poly_int64
subreg_memory_offset(machine_mode outer_mode,machine_mode inner_mode,poly_uint64 offset)1136 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1137 poly_uint64 offset)
1138 {
1139 if (paradoxical_subreg_p (outer_mode, inner_mode))
1140 {
1141 gcc_assert (known_eq (offset, 0U));
1142 return -subreg_lowpart_offset (inner_mode, outer_mode);
1143 }
1144 return offset;
1145 }
1146
1147 /* As above, but return the offset that existing subreg X would have
1148 if SUBREG_REG (X) were stored in memory. The only significant thing
1149 about the current SUBREG_REG is its mode. */
1150
1151 poly_int64
subreg_memory_offset(const_rtx x)1152 subreg_memory_offset (const_rtx x)
1153 {
1154 return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1155 SUBREG_BYTE (x));
1156 }
1157
1158 /* Generate a REG rtx for a new pseudo register of mode MODE.
1159 This pseudo is assigned the next sequential register number. */
1160
1161 rtx
gen_reg_rtx(machine_mode mode)1162 gen_reg_rtx (machine_mode mode)
1163 {
1164 rtx val;
1165 unsigned int align = GET_MODE_ALIGNMENT (mode);
1166
1167 gcc_assert (can_create_pseudo_p ());
1168
1169 /* If a virtual register with bigger mode alignment is generated,
1170 increase stack alignment estimation because it might be spilled
1171 to stack later. */
1172 if (SUPPORTS_STACK_ALIGNMENT
1173 && crtl->stack_alignment_estimated < align
1174 && !crtl->stack_realign_processed)
1175 {
1176 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1177 if (crtl->stack_alignment_estimated < min_align)
1178 crtl->stack_alignment_estimated = min_align;
1179 }
1180
1181 if (generating_concat_p
1182 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1183 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1184 {
1185 /* For complex modes, don't make a single pseudo.
1186 Instead, make a CONCAT of two pseudos.
1187 This allows noncontiguous allocation of the real and imaginary parts,
1188 which makes much better code. Besides, allocating DCmode
1189 pseudos overstrains reload on some machines like the 386. */
1190 rtx realpart, imagpart;
1191 machine_mode partmode = GET_MODE_INNER (mode);
1192
1193 realpart = gen_reg_rtx (partmode);
1194 imagpart = gen_reg_rtx (partmode);
1195 return gen_rtx_CONCAT (mode, realpart, imagpart);
1196 }
1197
1198 /* Do not call gen_reg_rtx with uninitialized crtl. */
1199 gcc_assert (crtl->emit.regno_pointer_align_length);
1200
1201 crtl->emit.ensure_regno_capacity ();
1202 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
1203
1204 val = gen_raw_REG (mode, reg_rtx_no);
1205 regno_reg_rtx[reg_rtx_no++] = val;
1206 return val;
1207 }
1208
1209 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1210 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1211
1212 void
ensure_regno_capacity()1213 emit_status::ensure_regno_capacity ()
1214 {
1215 int old_size = regno_pointer_align_length;
1216
1217 if (reg_rtx_no < old_size)
1218 return;
1219
1220 int new_size = old_size * 2;
1221 while (reg_rtx_no >= new_size)
1222 new_size *= 2;
1223
1224 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1225 memset (tmp + old_size, 0, new_size - old_size);
1226 regno_pointer_align = (unsigned char *) tmp;
1227
1228 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1229 memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1230 regno_reg_rtx = new1;
1231
1232 crtl->emit.regno_pointer_align_length = new_size;
1233 }
1234
1235 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1236
1237 bool
reg_is_parm_p(rtx reg)1238 reg_is_parm_p (rtx reg)
1239 {
1240 tree decl;
1241
1242 gcc_assert (REG_P (reg));
1243 decl = REG_EXPR (reg);
1244 return (decl && TREE_CODE (decl) == PARM_DECL);
1245 }
1246
1247 /* Update NEW with the same attributes as REG, but with OFFSET added
1248 to the REG_OFFSET. */
1249
1250 static void
update_reg_offset(rtx new_rtx,rtx reg,poly_int64 offset)1251 update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
1252 {
1253 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1254 REG_OFFSET (reg) + offset);
1255 }
1256
1257 /* Generate a register with same attributes as REG, but with OFFSET
1258 added to the REG_OFFSET. */
1259
1260 rtx
gen_rtx_REG_offset(rtx reg,machine_mode mode,unsigned int regno,poly_int64 offset)1261 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1262 poly_int64 offset)
1263 {
1264 /* Use gen_raw_REG rather than gen_rtx_REG, because otherwise we'd
1265 overwrite REG_ATTRS (and in the callers often ORIGINAL_REGNO too)
1266 of the shared REG rtxes like stack_pointer_rtx etc. This should
1267 happen only for SUBREGs from DEBUG_INSNs, RA should ensure
1268 multi-word registers don't overlap the special registers like
1269 stack pointer. */
1270 rtx new_rtx = gen_raw_REG (mode, regno);
1271
1272 update_reg_offset (new_rtx, reg, offset);
1273 return new_rtx;
1274 }
1275
1276 /* Generate a new pseudo-register with the same attributes as REG, but
1277 with OFFSET added to the REG_OFFSET. */
1278
1279 rtx
gen_reg_rtx_offset(rtx reg,machine_mode mode,int offset)1280 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1281 {
1282 rtx new_rtx = gen_reg_rtx (mode);
1283
1284 update_reg_offset (new_rtx, reg, offset);
1285 return new_rtx;
1286 }
1287
1288 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1289 new register is a (possibly paradoxical) lowpart of the old one. */
1290
1291 void
adjust_reg_mode(rtx reg,machine_mode mode)1292 adjust_reg_mode (rtx reg, machine_mode mode)
1293 {
1294 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1295 PUT_MODE (reg, mode);
1296 }
1297
1298 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1299 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1300
1301 void
set_reg_attrs_from_value(rtx reg,rtx x)1302 set_reg_attrs_from_value (rtx reg, rtx x)
1303 {
1304 poly_int64 offset;
1305 bool can_be_reg_pointer = true;
1306
1307 /* Don't call mark_reg_pointer for incompatible pointer sign
1308 extension. */
1309 while (GET_CODE (x) == SIGN_EXTEND
1310 || GET_CODE (x) == ZERO_EXTEND
1311 || GET_CODE (x) == TRUNCATE
1312 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1313 {
1314 #if defined(POINTERS_EXTEND_UNSIGNED)
1315 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1316 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1317 || (paradoxical_subreg_p (x)
1318 && ! (SUBREG_PROMOTED_VAR_P (x)
1319 && SUBREG_CHECK_PROMOTED_SIGN (x,
1320 POINTERS_EXTEND_UNSIGNED))))
1321 && !targetm.have_ptr_extend ())
1322 can_be_reg_pointer = false;
1323 #endif
1324 x = XEXP (x, 0);
1325 }
1326
1327 /* Hard registers can be reused for multiple purposes within the same
1328 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1329 on them is wrong. */
1330 if (HARD_REGISTER_P (reg))
1331 return;
1332
1333 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1334 if (MEM_P (x))
1335 {
1336 if (MEM_OFFSET_KNOWN_P (x))
1337 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1338 MEM_OFFSET (x) + offset);
1339 if (can_be_reg_pointer && MEM_POINTER (x))
1340 mark_reg_pointer (reg, 0);
1341 }
1342 else if (REG_P (x))
1343 {
1344 if (REG_ATTRS (x))
1345 update_reg_offset (reg, x, offset);
1346 if (can_be_reg_pointer && REG_POINTER (x))
1347 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1348 }
1349 }
1350
1351 /* Generate a REG rtx for a new pseudo register, copying the mode
1352 and attributes from X. */
1353
1354 rtx
gen_reg_rtx_and_attrs(rtx x)1355 gen_reg_rtx_and_attrs (rtx x)
1356 {
1357 rtx reg = gen_reg_rtx (GET_MODE (x));
1358 set_reg_attrs_from_value (reg, x);
1359 return reg;
1360 }
1361
1362 /* Set the register attributes for registers contained in PARM_RTX.
1363 Use needed values from memory attributes of MEM. */
1364
1365 void
set_reg_attrs_for_parm(rtx parm_rtx,rtx mem)1366 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1367 {
1368 if (REG_P (parm_rtx))
1369 set_reg_attrs_from_value (parm_rtx, mem);
1370 else if (GET_CODE (parm_rtx) == PARALLEL)
1371 {
1372 /* Check for a NULL entry in the first slot, used to indicate that the
1373 parameter goes both on the stack and in registers. */
1374 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1375 for (; i < XVECLEN (parm_rtx, 0); i++)
1376 {
1377 rtx x = XVECEXP (parm_rtx, 0, i);
1378 if (REG_P (XEXP (x, 0)))
1379 REG_ATTRS (XEXP (x, 0))
1380 = get_reg_attrs (MEM_EXPR (mem),
1381 INTVAL (XEXP (x, 1)));
1382 }
1383 }
1384 }
1385
1386 /* Set the REG_ATTRS for registers in value X, given that X represents
1387 decl T. */
1388
1389 void
set_reg_attrs_for_decl_rtl(tree t,rtx x)1390 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1391 {
1392 if (!t)
1393 return;
1394 tree tdecl = t;
1395 if (GET_CODE (x) == SUBREG)
1396 {
1397 gcc_assert (subreg_lowpart_p (x));
1398 x = SUBREG_REG (x);
1399 }
1400 if (REG_P (x))
1401 REG_ATTRS (x)
1402 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1403 DECL_P (tdecl)
1404 ? DECL_MODE (tdecl)
1405 : TYPE_MODE (TREE_TYPE (tdecl))));
1406 if (GET_CODE (x) == CONCAT)
1407 {
1408 if (REG_P (XEXP (x, 0)))
1409 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1410 if (REG_P (XEXP (x, 1)))
1411 REG_ATTRS (XEXP (x, 1))
1412 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1413 }
1414 if (GET_CODE (x) == PARALLEL)
1415 {
1416 int i, start;
1417
1418 /* Check for a NULL entry, used to indicate that the parameter goes
1419 both on the stack and in registers. */
1420 if (XEXP (XVECEXP (x, 0, 0), 0))
1421 start = 0;
1422 else
1423 start = 1;
1424
1425 for (i = start; i < XVECLEN (x, 0); i++)
1426 {
1427 rtx y = XVECEXP (x, 0, i);
1428 if (REG_P (XEXP (y, 0)))
1429 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1430 }
1431 }
1432 }
1433
1434 /* Assign the RTX X to declaration T. */
1435
1436 void
set_decl_rtl(tree t,rtx x)1437 set_decl_rtl (tree t, rtx x)
1438 {
1439 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1440 if (x)
1441 set_reg_attrs_for_decl_rtl (t, x);
1442 }
1443
1444 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1445 if the ABI requires the parameter to be passed by reference. */
1446
1447 void
set_decl_incoming_rtl(tree t,rtx x,bool by_reference_p)1448 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1449 {
1450 DECL_INCOMING_RTL (t) = x;
1451 if (x && !by_reference_p)
1452 set_reg_attrs_for_decl_rtl (t, x);
1453 }
1454
1455 /* Identify REG (which may be a CONCAT) as a user register. */
1456
1457 void
mark_user_reg(rtx reg)1458 mark_user_reg (rtx reg)
1459 {
1460 if (GET_CODE (reg) == CONCAT)
1461 {
1462 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1463 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1464 }
1465 else
1466 {
1467 gcc_assert (REG_P (reg));
1468 REG_USERVAR_P (reg) = 1;
1469 }
1470 }
1471
1472 /* Identify REG as a probable pointer register and show its alignment
1473 as ALIGN, if nonzero. */
1474
1475 void
mark_reg_pointer(rtx reg,int align)1476 mark_reg_pointer (rtx reg, int align)
1477 {
1478 if (! REG_POINTER (reg))
1479 {
1480 REG_POINTER (reg) = 1;
1481
1482 if (align)
1483 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1484 }
1485 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1486 /* We can no-longer be sure just how aligned this pointer is. */
1487 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1488 }
1489
1490 /* Return 1 plus largest pseudo reg number used in the current function. */
1491
1492 int
max_reg_num(void)1493 max_reg_num (void)
1494 {
1495 return reg_rtx_no;
1496 }
1497
1498 /* Return 1 + the largest label number used so far in the current function. */
1499
1500 int
max_label_num(void)1501 max_label_num (void)
1502 {
1503 return label_num;
1504 }
1505
1506 /* Return first label number used in this function (if any were used). */
1507
1508 int
get_first_label_num(void)1509 get_first_label_num (void)
1510 {
1511 return first_label_num;
1512 }
1513
1514 /* If the rtx for label was created during the expansion of a nested
1515 function, then first_label_num won't include this label number.
1516 Fix this now so that array indices work later. */
1517
1518 void
maybe_set_first_label_num(rtx_code_label * x)1519 maybe_set_first_label_num (rtx_code_label *x)
1520 {
1521 if (CODE_LABEL_NUMBER (x) < first_label_num)
1522 first_label_num = CODE_LABEL_NUMBER (x);
1523 }
1524
1525 /* For use by the RTL function loader, when mingling with normal
1526 functions.
1527 Ensure that label_num is greater than the label num of X, to avoid
1528 duplicate labels in the generated assembler. */
1529
1530 void
maybe_set_max_label_num(rtx_code_label * x)1531 maybe_set_max_label_num (rtx_code_label *x)
1532 {
1533 if (CODE_LABEL_NUMBER (x) >= label_num)
1534 label_num = CODE_LABEL_NUMBER (x) + 1;
1535 }
1536
1537
1538 /* Return a value representing some low-order bits of X, where the number
1539 of low-order bits is given by MODE. Note that no conversion is done
1540 between floating-point and fixed-point values, rather, the bit
1541 representation is returned.
1542
1543 This function handles the cases in common between gen_lowpart, below,
1544 and two variants in cse.cc and combine.cc. These are the cases that can
1545 be safely handled at all points in the compilation.
1546
1547 If this is not a case we can handle, return 0. */
1548
1549 rtx
gen_lowpart_common(machine_mode mode,rtx x)1550 gen_lowpart_common (machine_mode mode, rtx x)
1551 {
1552 poly_uint64 msize = GET_MODE_SIZE (mode);
1553 machine_mode innermode;
1554
1555 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1556 so we have to make one up. Yuk. */
1557 innermode = GET_MODE (x);
1558 if (CONST_INT_P (x)
1559 && known_le (msize * BITS_PER_UNIT,
1560 (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
1561 innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1562 else if (innermode == VOIDmode)
1563 innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
1564
1565 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1566
1567 if (innermode == mode)
1568 return x;
1569
1570 /* The size of the outer and inner modes must be ordered. */
1571 poly_uint64 xsize = GET_MODE_SIZE (innermode);
1572 if (!ordered_p (msize, xsize))
1573 return 0;
1574
1575 if (SCALAR_FLOAT_MODE_P (mode))
1576 {
1577 /* Don't allow paradoxical FLOAT_MODE subregs. */
1578 if (maybe_gt (msize, xsize))
1579 return 0;
1580 }
1581 else
1582 {
1583 /* MODE must occupy no more of the underlying registers than X. */
1584 poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
1585 unsigned int mregs, xregs;
1586 if (!can_div_away_from_zero_p (msize, regsize, &mregs)
1587 || !can_div_away_from_zero_p (xsize, regsize, &xregs)
1588 || mregs > xregs)
1589 return 0;
1590 }
1591
1592 scalar_int_mode int_mode, int_innermode, from_mode;
1593 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1594 && is_a <scalar_int_mode> (mode, &int_mode)
1595 && is_a <scalar_int_mode> (innermode, &int_innermode)
1596 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
1597 {
1598 /* If we are getting the low-order part of something that has been
1599 sign- or zero-extended, we can either just use the object being
1600 extended or make a narrower extension. If we want an even smaller
1601 piece than the size of the object being extended, call ourselves
1602 recursively.
1603
1604 This case is used mostly by combine and cse. */
1605
1606 if (from_mode == int_mode)
1607 return XEXP (x, 0);
1608 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1609 return gen_lowpart_common (int_mode, XEXP (x, 0));
1610 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1611 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
1612 }
1613 else if (GET_CODE (x) == SUBREG || REG_P (x)
1614 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1615 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1616 || CONST_POLY_INT_P (x))
1617 return lowpart_subreg (mode, x, innermode);
1618
1619 /* Otherwise, we can't do this. */
1620 return 0;
1621 }
1622
1623 rtx
gen_highpart(machine_mode mode,rtx x)1624 gen_highpart (machine_mode mode, rtx x)
1625 {
1626 poly_uint64 msize = GET_MODE_SIZE (mode);
1627 rtx result;
1628
1629 /* This case loses if X is a subreg. To catch bugs early,
1630 complain if an invalid MODE is used even in other cases. */
1631 gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
1632 || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
1633
1634 /* gen_lowpart_common handles a lot of special cases due to needing to handle
1635 paradoxical subregs; it only calls simplify_gen_subreg when certain that
1636 it will produce something meaningful. The only case we need to handle
1637 specially here is MEM. */
1638 if (MEM_P (x))
1639 {
1640 poly_int64 offset = subreg_highpart_offset (mode, GET_MODE (x));
1641 return adjust_address (x, mode, offset);
1642 }
1643
1644 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1645 subreg_highpart_offset (mode, GET_MODE (x)));
1646 /* Since we handle MEM directly above, we should never get a MEM back
1647 from simplify_gen_subreg. */
1648 gcc_assert (result && !MEM_P (result));
1649
1650 return result;
1651 }
1652
1653 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1654 be VOIDmode constant. */
1655 rtx
gen_highpart_mode(machine_mode outermode,machine_mode innermode,rtx exp)1656 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1657 {
1658 if (GET_MODE (exp) != VOIDmode)
1659 {
1660 gcc_assert (GET_MODE (exp) == innermode);
1661 return gen_highpart (outermode, exp);
1662 }
1663 return simplify_gen_subreg (outermode, exp, innermode,
1664 subreg_highpart_offset (outermode, innermode));
1665 }
1666
1667 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1668 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1669
1670 poly_uint64
subreg_size_lowpart_offset(poly_uint64 outer_bytes,poly_uint64 inner_bytes)1671 subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1672 {
1673 gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1674 if (maybe_gt (outer_bytes, inner_bytes))
1675 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1676 return 0;
1677
1678 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1679 return inner_bytes - outer_bytes;
1680 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1681 return 0;
1682 else
1683 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1684 }
1685
1686 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1687 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1688
1689 poly_uint64
subreg_size_highpart_offset(poly_uint64 outer_bytes,poly_uint64 inner_bytes)1690 subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1691 {
1692 gcc_assert (known_ge (inner_bytes, outer_bytes));
1693
1694 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1695 return 0;
1696 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1697 return inner_bytes - outer_bytes;
1698 else
1699 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1700 (inner_bytes - outer_bytes)
1701 * BITS_PER_UNIT);
1702 }
1703
1704 /* Return 1 iff X, assumed to be a SUBREG,
1705 refers to the least significant part of its containing reg.
1706 If X is not a SUBREG, always return 1 (it is its own low part!). */
1707
1708 int
subreg_lowpart_p(const_rtx x)1709 subreg_lowpart_p (const_rtx x)
1710 {
1711 if (GET_CODE (x) != SUBREG)
1712 return 1;
1713 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1714 return 0;
1715
1716 return known_eq (subreg_lowpart_offset (GET_MODE (x),
1717 GET_MODE (SUBREG_REG (x))),
1718 SUBREG_BYTE (x));
1719 }
1720
1721 /* Return subword OFFSET of operand OP.
1722 The word number, OFFSET, is interpreted as the word number starting
1723 at the low-order address. OFFSET 0 is the low-order word if not
1724 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1725
1726 If we cannot extract the required word, we return zero. Otherwise,
1727 an rtx corresponding to the requested word will be returned.
1728
1729 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1730 reload has completed, a valid address will always be returned. After
1731 reload, if a valid address cannot be returned, we return zero.
1732
1733 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1734 it is the responsibility of the caller.
1735
1736 MODE is the mode of OP in case it is a CONST_INT.
1737
1738 ??? This is still rather broken for some cases. The problem for the
1739 moment is that all callers of this thing provide no 'goal mode' to
1740 tell us to work with. This exists because all callers were written
1741 in a word based SUBREG world.
1742 Now use of this function can be deprecated by simplify_subreg in most
1743 cases.
1744 */
1745
1746 rtx
operand_subword(rtx op,poly_uint64 offset,int validate_address,machine_mode mode)1747 operand_subword (rtx op, poly_uint64 offset, int validate_address,
1748 machine_mode mode)
1749 {
1750 if (mode == VOIDmode)
1751 mode = GET_MODE (op);
1752
1753 gcc_assert (mode != VOIDmode);
1754
1755 /* If OP is narrower than a word, fail. */
1756 if (mode != BLKmode
1757 && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
1758 return 0;
1759
1760 /* If we want a word outside OP, return zero. */
1761 if (mode != BLKmode
1762 && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
1763 return const0_rtx;
1764
1765 /* Form a new MEM at the requested address. */
1766 if (MEM_P (op))
1767 {
1768 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1769
1770 if (! validate_address)
1771 return new_rtx;
1772
1773 else if (reload_completed)
1774 {
1775 if (! strict_memory_address_addr_space_p (word_mode,
1776 XEXP (new_rtx, 0),
1777 MEM_ADDR_SPACE (op)))
1778 return 0;
1779 }
1780 else
1781 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1782 }
1783
1784 /* Rest can be handled by simplify_subreg. */
1785 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1786 }
1787
1788 /* Similar to `operand_subword', but never return 0. If we can't
1789 extract the required subword, put OP into a register and try again.
1790 The second attempt must succeed. We always validate the address in
1791 this case.
1792
1793 MODE is the mode of OP, in case it is CONST_INT. */
1794
1795 rtx
operand_subword_force(rtx op,poly_uint64 offset,machine_mode mode)1796 operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
1797 {
1798 rtx result = operand_subword (op, offset, 1, mode);
1799
1800 if (result)
1801 return result;
1802
1803 if (mode != BLKmode && mode != VOIDmode)
1804 {
1805 /* If this is a register which cannot be accessed by words, copy it
1806 to a pseudo register. */
1807 if (REG_P (op))
1808 op = copy_to_reg (op);
1809 else
1810 op = force_reg (mode, op);
1811 }
1812
1813 result = operand_subword (op, offset, 1, mode);
1814 gcc_assert (result);
1815
1816 return result;
1817 }
1818
mem_attrs()1819 mem_attrs::mem_attrs ()
1820 : expr (NULL_TREE),
1821 offset (0),
1822 size (0),
1823 alias (0),
1824 align (0),
1825 addrspace (ADDR_SPACE_GENERIC),
1826 offset_known_p (false),
1827 size_known_p (false)
1828 {}
1829
1830 /* Returns 1 if both MEM_EXPR can be considered equal
1831 and 0 otherwise. */
1832
1833 int
mem_expr_equal_p(const_tree expr1,const_tree expr2)1834 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1835 {
1836 if (expr1 == expr2)
1837 return 1;
1838
1839 if (! expr1 || ! expr2)
1840 return 0;
1841
1842 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1843 return 0;
1844
1845 return operand_equal_p (expr1, expr2, 0);
1846 }
1847
1848 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1849 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1850 -1 if not known. */
1851
1852 int
get_mem_align_offset(rtx mem,unsigned int align)1853 get_mem_align_offset (rtx mem, unsigned int align)
1854 {
1855 tree expr;
1856 poly_uint64 offset;
1857
1858 /* This function can't use
1859 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1860 || (MAX (MEM_ALIGN (mem),
1861 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1862 < align))
1863 return -1;
1864 else
1865 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1866 for two reasons:
1867 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1868 for <variable>. get_inner_reference doesn't handle it and
1869 even if it did, the alignment in that case needs to be determined
1870 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1871 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1872 isn't sufficiently aligned, the object it is in might be. */
1873 gcc_assert (MEM_P (mem));
1874 expr = MEM_EXPR (mem);
1875 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1876 return -1;
1877
1878 offset = MEM_OFFSET (mem);
1879 if (DECL_P (expr))
1880 {
1881 if (DECL_ALIGN (expr) < align)
1882 return -1;
1883 }
1884 else if (INDIRECT_REF_P (expr))
1885 {
1886 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1887 return -1;
1888 }
1889 else if (TREE_CODE (expr) == COMPONENT_REF)
1890 {
1891 while (1)
1892 {
1893 tree inner = TREE_OPERAND (expr, 0);
1894 tree field = TREE_OPERAND (expr, 1);
1895 tree byte_offset = component_ref_field_offset (expr);
1896 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1897
1898 poly_uint64 suboffset;
1899 if (!byte_offset
1900 || !poly_int_tree_p (byte_offset, &suboffset)
1901 || !tree_fits_uhwi_p (bit_offset))
1902 return -1;
1903
1904 offset += suboffset;
1905 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1906
1907 if (inner == NULL_TREE)
1908 {
1909 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1910 < (unsigned int) align)
1911 return -1;
1912 break;
1913 }
1914 else if (DECL_P (inner))
1915 {
1916 if (DECL_ALIGN (inner) < align)
1917 return -1;
1918 break;
1919 }
1920 else if (TREE_CODE (inner) != COMPONENT_REF)
1921 return -1;
1922 expr = inner;
1923 }
1924 }
1925 else
1926 return -1;
1927
1928 HOST_WIDE_INT misalign;
1929 if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
1930 return -1;
1931 return misalign;
1932 }
1933
1934 /* Given REF (a MEM) and T, either the type of X or the expression
1935 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1936 if we are making a new object of this type. BITPOS is nonzero if
1937 there is an offset outstanding on T that will be applied later. */
1938
1939 void
set_mem_attributes_minus_bitpos(rtx ref,tree t,int objectp,poly_int64 bitpos)1940 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1941 poly_int64 bitpos)
1942 {
1943 poly_int64 apply_bitpos = 0;
1944 tree type;
1945 class mem_attrs attrs, *defattrs, *refattrs;
1946 addr_space_t as;
1947
1948 /* It can happen that type_for_mode was given a mode for which there
1949 is no language-level type. In which case it returns NULL, which
1950 we can see here. */
1951 if (t == NULL_TREE)
1952 return;
1953
1954 type = TYPE_P (t) ? t : TREE_TYPE (t);
1955 if (type == error_mark_node)
1956 return;
1957
1958 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1959 wrong answer, as it assumes that DECL_RTL already has the right alias
1960 info. Callers should not set DECL_RTL until after the call to
1961 set_mem_attributes. */
1962 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1963
1964 /* Get the alias set from the expression or type (perhaps using a
1965 front-end routine) and use it. */
1966 attrs.alias = get_alias_set (t);
1967
1968 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1969 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1970
1971 /* Default values from pre-existing memory attributes if present. */
1972 refattrs = MEM_ATTRS (ref);
1973 if (refattrs)
1974 {
1975 /* ??? Can this ever happen? Calling this routine on a MEM that
1976 already carries memory attributes should probably be invalid. */
1977 attrs.expr = refattrs->expr;
1978 attrs.offset_known_p = refattrs->offset_known_p;
1979 attrs.offset = refattrs->offset;
1980 attrs.size_known_p = refattrs->size_known_p;
1981 attrs.size = refattrs->size;
1982 attrs.align = refattrs->align;
1983 }
1984
1985 /* Otherwise, default values from the mode of the MEM reference. */
1986 else
1987 {
1988 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1989 gcc_assert (!defattrs->expr);
1990 gcc_assert (!defattrs->offset_known_p);
1991
1992 /* Respect mode size. */
1993 attrs.size_known_p = defattrs->size_known_p;
1994 attrs.size = defattrs->size;
1995 /* ??? Is this really necessary? We probably should always get
1996 the size from the type below. */
1997
1998 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1999 if T is an object, always compute the object alignment below. */
2000 if (TYPE_P (t))
2001 attrs.align = defattrs->align;
2002 else
2003 attrs.align = BITS_PER_UNIT;
2004 /* ??? If T is a type, respecting mode alignment may *also* be wrong
2005 e.g. if the type carries an alignment attribute. Should we be
2006 able to simply always use TYPE_ALIGN? */
2007 }
2008
2009 /* We can set the alignment from the type if we are making an object or if
2010 this is an INDIRECT_REF. */
2011 if (objectp || TREE_CODE (t) == INDIRECT_REF)
2012 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
2013
2014 /* If the size is known, we can set that. */
2015 tree new_size = TYPE_SIZE_UNIT (type);
2016
2017 /* The address-space is that of the type. */
2018 as = TYPE_ADDR_SPACE (type);
2019
2020 /* If T is not a type, we may be able to deduce some more information about
2021 the expression. */
2022 if (! TYPE_P (t))
2023 {
2024 tree base;
2025
2026 if (TREE_THIS_VOLATILE (t))
2027 MEM_VOLATILE_P (ref) = 1;
2028
2029 /* Now remove any conversions: they don't change what the underlying
2030 object is. Likewise for SAVE_EXPR. */
2031 while (CONVERT_EXPR_P (t)
2032 || TREE_CODE (t) == VIEW_CONVERT_EXPR
2033 || TREE_CODE (t) == SAVE_EXPR)
2034 t = TREE_OPERAND (t, 0);
2035
2036 /* Note whether this expression can trap. */
2037 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
2038
2039 base = get_base_address (t);
2040 if (base)
2041 {
2042 if (DECL_P (base)
2043 && TREE_READONLY (base)
2044 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
2045 && !TREE_THIS_VOLATILE (base))
2046 MEM_READONLY_P (ref) = 1;
2047
2048 /* Mark static const strings readonly as well. */
2049 if (TREE_CODE (base) == STRING_CST
2050 && TREE_READONLY (base)
2051 && TREE_STATIC (base))
2052 MEM_READONLY_P (ref) = 1;
2053
2054 /* Address-space information is on the base object. */
2055 if (TREE_CODE (base) == MEM_REF
2056 || TREE_CODE (base) == TARGET_MEM_REF)
2057 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
2058 0))));
2059 else
2060 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
2061 }
2062
2063 /* If this expression uses it's parent's alias set, mark it such
2064 that we won't change it. */
2065 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
2066 MEM_KEEP_ALIAS_SET_P (ref) = 1;
2067
2068 /* If this is a decl, set the attributes of the MEM from it. */
2069 if (DECL_P (t))
2070 {
2071 attrs.expr = t;
2072 attrs.offset_known_p = true;
2073 attrs.offset = 0;
2074 apply_bitpos = bitpos;
2075 new_size = DECL_SIZE_UNIT (t);
2076 }
2077
2078 /* ??? If we end up with a constant or a descriptor do not
2079 record a MEM_EXPR. */
2080 else if (CONSTANT_CLASS_P (t)
2081 || TREE_CODE (t) == CONSTRUCTOR)
2082 ;
2083
2084 /* If this is a field reference, record it. */
2085 else if (TREE_CODE (t) == COMPONENT_REF)
2086 {
2087 attrs.expr = t;
2088 attrs.offset_known_p = true;
2089 attrs.offset = 0;
2090 apply_bitpos = bitpos;
2091 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2092 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
2093 }
2094
2095 /* Else record it. */
2096 else
2097 {
2098 gcc_assert (handled_component_p (t)
2099 || TREE_CODE (t) == MEM_REF
2100 || TREE_CODE (t) == TARGET_MEM_REF);
2101 attrs.expr = t;
2102 attrs.offset_known_p = true;
2103 attrs.offset = 0;
2104 apply_bitpos = bitpos;
2105 }
2106
2107 /* If this is a reference based on a partitioned decl replace the
2108 base with a MEM_REF of the pointer representative we created
2109 during stack slot partitioning. */
2110 if (attrs.expr
2111 && VAR_P (base)
2112 && ! is_global_var (base)
2113 && cfun->gimple_df->decls_to_pointers != NULL)
2114 {
2115 tree *namep = cfun->gimple_df->decls_to_pointers->get (base);
2116 if (namep)
2117 {
2118 attrs.expr = unshare_expr (attrs.expr);
2119 tree *orig_base = &attrs.expr;
2120 while (handled_component_p (*orig_base))
2121 orig_base = &TREE_OPERAND (*orig_base, 0);
2122 tree aptrt = reference_alias_ptr_type (*orig_base);
2123 *orig_base = build2 (MEM_REF, TREE_TYPE (*orig_base), *namep,
2124 build_int_cst (aptrt, 0));
2125 }
2126 }
2127
2128 /* Compute the alignment. */
2129 unsigned int obj_align;
2130 unsigned HOST_WIDE_INT obj_bitpos;
2131 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2132 unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
2133 if (diff_align != 0)
2134 obj_align = MIN (obj_align, diff_align);
2135 attrs.align = MAX (attrs.align, obj_align);
2136 }
2137
2138 poly_uint64 const_size;
2139 if (poly_int_tree_p (new_size, &const_size))
2140 {
2141 attrs.size_known_p = true;
2142 attrs.size = const_size;
2143 }
2144
2145 /* If we modified OFFSET based on T, then subtract the outstanding
2146 bit position offset. Similarly, increase the size of the accessed
2147 object to contain the negative offset. */
2148 if (maybe_ne (apply_bitpos, 0))
2149 {
2150 gcc_assert (attrs.offset_known_p);
2151 poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2152 attrs.offset -= bytepos;
2153 if (attrs.size_known_p)
2154 attrs.size += bytepos;
2155 }
2156
2157 /* Now set the attributes we computed above. */
2158 attrs.addrspace = as;
2159 set_mem_attrs (ref, &attrs);
2160 }
2161
2162 void
set_mem_attributes(rtx ref,tree t,int objectp)2163 set_mem_attributes (rtx ref, tree t, int objectp)
2164 {
2165 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2166 }
2167
2168 /* Set the alias set of MEM to SET. */
2169
2170 void
set_mem_alias_set(rtx mem,alias_set_type set)2171 set_mem_alias_set (rtx mem, alias_set_type set)
2172 {
2173 /* If the new and old alias sets don't conflict, something is wrong. */
2174 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2175 mem_attrs attrs (*get_mem_attrs (mem));
2176 attrs.alias = set;
2177 set_mem_attrs (mem, &attrs);
2178 }
2179
2180 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2181
2182 void
set_mem_addr_space(rtx mem,addr_space_t addrspace)2183 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2184 {
2185 mem_attrs attrs (*get_mem_attrs (mem));
2186 attrs.addrspace = addrspace;
2187 set_mem_attrs (mem, &attrs);
2188 }
2189
2190 /* Set the alignment of MEM to ALIGN bits. */
2191
2192 void
set_mem_align(rtx mem,unsigned int align)2193 set_mem_align (rtx mem, unsigned int align)
2194 {
2195 mem_attrs attrs (*get_mem_attrs (mem));
2196 attrs.align = align;
2197 set_mem_attrs (mem, &attrs);
2198 }
2199
2200 /* Set the expr for MEM to EXPR. */
2201
2202 void
set_mem_expr(rtx mem,tree expr)2203 set_mem_expr (rtx mem, tree expr)
2204 {
2205 mem_attrs attrs (*get_mem_attrs (mem));
2206 attrs.expr = expr;
2207 set_mem_attrs (mem, &attrs);
2208 }
2209
2210 /* Set the offset of MEM to OFFSET. */
2211
2212 void
set_mem_offset(rtx mem,poly_int64 offset)2213 set_mem_offset (rtx mem, poly_int64 offset)
2214 {
2215 mem_attrs attrs (*get_mem_attrs (mem));
2216 attrs.offset_known_p = true;
2217 attrs.offset = offset;
2218 set_mem_attrs (mem, &attrs);
2219 }
2220
2221 /* Clear the offset of MEM. */
2222
2223 void
clear_mem_offset(rtx mem)2224 clear_mem_offset (rtx mem)
2225 {
2226 mem_attrs attrs (*get_mem_attrs (mem));
2227 attrs.offset_known_p = false;
2228 set_mem_attrs (mem, &attrs);
2229 }
2230
2231 /* Set the size of MEM to SIZE. */
2232
2233 void
set_mem_size(rtx mem,poly_int64 size)2234 set_mem_size (rtx mem, poly_int64 size)
2235 {
2236 mem_attrs attrs (*get_mem_attrs (mem));
2237 attrs.size_known_p = true;
2238 attrs.size = size;
2239 set_mem_attrs (mem, &attrs);
2240 }
2241
2242 /* Clear the size of MEM. */
2243
2244 void
clear_mem_size(rtx mem)2245 clear_mem_size (rtx mem)
2246 {
2247 mem_attrs attrs (*get_mem_attrs (mem));
2248 attrs.size_known_p = false;
2249 set_mem_attrs (mem, &attrs);
2250 }
2251
2252 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2253 and its address changed to ADDR. (VOIDmode means don't change the mode.
2254 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2255 returned memory location is required to be valid. INPLACE is true if any
2256 changes can be made directly to MEMREF or false if MEMREF must be treated
2257 as immutable.
2258
2259 The memory attributes are not changed. */
2260
2261 static rtx
change_address_1(rtx memref,machine_mode mode,rtx addr,int validate,bool inplace)2262 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2263 bool inplace)
2264 {
2265 addr_space_t as;
2266 rtx new_rtx;
2267
2268 gcc_assert (MEM_P (memref));
2269 as = MEM_ADDR_SPACE (memref);
2270 if (mode == VOIDmode)
2271 mode = GET_MODE (memref);
2272 if (addr == 0)
2273 addr = XEXP (memref, 0);
2274 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2275 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2276 return memref;
2277
2278 /* Don't validate address for LRA. LRA can make the address valid
2279 by itself in most efficient way. */
2280 if (validate && !lra_in_progress)
2281 {
2282 if (reload_in_progress || reload_completed)
2283 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2284 else
2285 addr = memory_address_addr_space (mode, addr, as);
2286 }
2287
2288 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2289 return memref;
2290
2291 if (inplace)
2292 {
2293 XEXP (memref, 0) = addr;
2294 return memref;
2295 }
2296
2297 new_rtx = gen_rtx_MEM (mode, addr);
2298 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2299 return new_rtx;
2300 }
2301
2302 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2303 way we are changing MEMREF, so we only preserve the alias set. */
2304
2305 rtx
change_address(rtx memref,machine_mode mode,rtx addr)2306 change_address (rtx memref, machine_mode mode, rtx addr)
2307 {
2308 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2309 machine_mode mmode = GET_MODE (new_rtx);
2310 class mem_attrs *defattrs;
2311
2312 mem_attrs attrs (*get_mem_attrs (memref));
2313 defattrs = mode_mem_attrs[(int) mmode];
2314 attrs.expr = NULL_TREE;
2315 attrs.offset_known_p = false;
2316 attrs.size_known_p = defattrs->size_known_p;
2317 attrs.size = defattrs->size;
2318 attrs.align = defattrs->align;
2319
2320 /* If there are no changes, just return the original memory reference. */
2321 if (new_rtx == memref)
2322 {
2323 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2324 return new_rtx;
2325
2326 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2327 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2328 }
2329
2330 set_mem_attrs (new_rtx, &attrs);
2331 return new_rtx;
2332 }
2333
2334 /* Return a memory reference like MEMREF, but with its mode changed
2335 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2336 nonzero, the memory address is forced to be valid.
2337 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2338 and the caller is responsible for adjusting MEMREF base register.
2339 If ADJUST_OBJECT is zero, the underlying object associated with the
2340 memory reference is left unchanged and the caller is responsible for
2341 dealing with it. Otherwise, if the new memory reference is outside
2342 the underlying object, even partially, then the object is dropped.
2343 SIZE, if nonzero, is the size of an access in cases where MODE
2344 has no inherent size. */
2345
2346 rtx
adjust_address_1(rtx memref,machine_mode mode,poly_int64 offset,int validate,int adjust_address,int adjust_object,poly_int64 size)2347 adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
2348 int validate, int adjust_address, int adjust_object,
2349 poly_int64 size)
2350 {
2351 rtx addr = XEXP (memref, 0);
2352 rtx new_rtx;
2353 scalar_int_mode address_mode;
2354 class mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
2355 unsigned HOST_WIDE_INT max_align;
2356 #ifdef POINTERS_EXTEND_UNSIGNED
2357 scalar_int_mode pointer_mode
2358 = targetm.addr_space.pointer_mode (attrs.addrspace);
2359 #endif
2360
2361 /* VOIDmode means no mode change for change_address_1. */
2362 if (mode == VOIDmode)
2363 mode = GET_MODE (memref);
2364
2365 /* Take the size of non-BLKmode accesses from the mode. */
2366 defattrs = mode_mem_attrs[(int) mode];
2367 if (defattrs->size_known_p)
2368 size = defattrs->size;
2369
2370 /* If there are no changes, just return the original memory reference. */
2371 if (mode == GET_MODE (memref)
2372 && known_eq (offset, 0)
2373 && (known_eq (size, 0)
2374 || (attrs.size_known_p && known_eq (attrs.size, size)))
2375 && (!validate || memory_address_addr_space_p (mode, addr,
2376 attrs.addrspace)))
2377 return memref;
2378
2379 /* ??? Prefer to create garbage instead of creating shared rtl.
2380 This may happen even if offset is nonzero -- consider
2381 (plus (plus reg reg) const_int) -- so do this always. */
2382 addr = copy_rtx (addr);
2383
2384 /* Convert a possibly large offset to a signed value within the
2385 range of the target address space. */
2386 address_mode = get_address_mode (memref);
2387 offset = trunc_int_for_mode (offset, address_mode);
2388
2389 if (adjust_address)
2390 {
2391 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2392 object, we can merge it into the LO_SUM. */
2393 if (GET_MODE (memref) != BLKmode
2394 && GET_CODE (addr) == LO_SUM
2395 && known_in_range_p (offset,
2396 0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
2397 / BITS_PER_UNIT)))
2398 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2399 plus_constant (address_mode,
2400 XEXP (addr, 1), offset));
2401 #ifdef POINTERS_EXTEND_UNSIGNED
2402 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2403 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2404 the fact that pointers are not allowed to overflow. */
2405 else if (POINTERS_EXTEND_UNSIGNED > 0
2406 && GET_CODE (addr) == ZERO_EXTEND
2407 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2408 && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
2409 addr = gen_rtx_ZERO_EXTEND (address_mode,
2410 plus_constant (pointer_mode,
2411 XEXP (addr, 0), offset));
2412 #endif
2413 else
2414 addr = plus_constant (address_mode, addr, offset);
2415 }
2416
2417 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2418
2419 /* If the address is a REG, change_address_1 rightfully returns memref,
2420 but this would destroy memref's MEM_ATTRS. */
2421 if (new_rtx == memref && maybe_ne (offset, 0))
2422 new_rtx = copy_rtx (new_rtx);
2423
2424 /* Conservatively drop the object if we don't know where we start from. */
2425 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2426 {
2427 attrs.expr = NULL_TREE;
2428 attrs.alias = 0;
2429 }
2430
2431 /* Compute the new values of the memory attributes due to this adjustment.
2432 We add the offsets and update the alignment. */
2433 if (attrs.offset_known_p)
2434 {
2435 attrs.offset += offset;
2436
2437 /* Drop the object if the new left end is not within its bounds. */
2438 if (adjust_object && maybe_lt (attrs.offset, 0))
2439 {
2440 attrs.expr = NULL_TREE;
2441 attrs.alias = 0;
2442 }
2443 }
2444
2445 /* Compute the new alignment by taking the MIN of the alignment and the
2446 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2447 if zero. */
2448 if (maybe_ne (offset, 0))
2449 {
2450 max_align = known_alignment (offset) * BITS_PER_UNIT;
2451 attrs.align = MIN (attrs.align, max_align);
2452 }
2453
2454 if (maybe_ne (size, 0))
2455 {
2456 /* Drop the object if the new right end is not within its bounds. */
2457 if (adjust_object && maybe_gt (offset + size, attrs.size))
2458 {
2459 attrs.expr = NULL_TREE;
2460 attrs.alias = 0;
2461 }
2462 attrs.size_known_p = true;
2463 attrs.size = size;
2464 }
2465 else if (attrs.size_known_p)
2466 {
2467 gcc_assert (!adjust_object);
2468 attrs.size -= offset;
2469 /* ??? The store_by_pieces machinery generates negative sizes,
2470 so don't assert for that here. */
2471 }
2472
2473 set_mem_attrs (new_rtx, &attrs);
2474
2475 return new_rtx;
2476 }
2477
2478 /* Return a memory reference like MEMREF, but with its mode changed
2479 to MODE and its address changed to ADDR, which is assumed to be
2480 MEMREF offset by OFFSET bytes. If VALIDATE is
2481 nonzero, the memory address is forced to be valid. */
2482
2483 rtx
adjust_automodify_address_1(rtx memref,machine_mode mode,rtx addr,poly_int64 offset,int validate)2484 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2485 poly_int64 offset, int validate)
2486 {
2487 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2488 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2489 }
2490
2491 /* Return a memory reference like MEMREF, but whose address is changed by
2492 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2493 known to be in OFFSET (possibly 1). */
2494
2495 rtx
offset_address(rtx memref,rtx offset,unsigned HOST_WIDE_INT pow2)2496 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2497 {
2498 rtx new_rtx, addr = XEXP (memref, 0);
2499 machine_mode address_mode;
2500 class mem_attrs *defattrs;
2501
2502 mem_attrs attrs (*get_mem_attrs (memref));
2503 address_mode = get_address_mode (memref);
2504 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2505
2506 /* At this point we don't know _why_ the address is invalid. It
2507 could have secondary memory references, multiplies or anything.
2508
2509 However, if we did go and rearrange things, we can wind up not
2510 being able to recognize the magic around pic_offset_table_rtx.
2511 This stuff is fragile, and is yet another example of why it is
2512 bad to expose PIC machinery too early. */
2513 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2514 attrs.addrspace)
2515 && GET_CODE (addr) == PLUS
2516 && XEXP (addr, 0) == pic_offset_table_rtx)
2517 {
2518 addr = force_reg (GET_MODE (addr), addr);
2519 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2520 }
2521
2522 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2523 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2524
2525 /* If there are no changes, just return the original memory reference. */
2526 if (new_rtx == memref)
2527 return new_rtx;
2528
2529 /* Update the alignment to reflect the offset. Reset the offset, which
2530 we don't know. */
2531 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2532 attrs.offset_known_p = false;
2533 attrs.size_known_p = defattrs->size_known_p;
2534 attrs.size = defattrs->size;
2535 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2536 set_mem_attrs (new_rtx, &attrs);
2537 return new_rtx;
2538 }
2539
2540 /* Return a memory reference like MEMREF, but with its address changed to
2541 ADDR. The caller is asserting that the actual piece of memory pointed
2542 to is the same, just the form of the address is being changed, such as
2543 by putting something into a register. INPLACE is true if any changes
2544 can be made directly to MEMREF or false if MEMREF must be treated as
2545 immutable. */
2546
2547 rtx
replace_equiv_address(rtx memref,rtx addr,bool inplace)2548 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2549 {
2550 /* change_address_1 copies the memory attribute structure without change
2551 and that's exactly what we want here. */
2552 update_temp_slot_address (XEXP (memref, 0), addr);
2553 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2554 }
2555
2556 /* Likewise, but the reference is not required to be valid. */
2557
2558 rtx
replace_equiv_address_nv(rtx memref,rtx addr,bool inplace)2559 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2560 {
2561 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2562 }
2563
2564 /* Return a memory reference like MEMREF, but with its mode widened to
2565 MODE and offset by OFFSET. This would be used by targets that e.g.
2566 cannot issue QImode memory operations and have to use SImode memory
2567 operations plus masking logic. */
2568
2569 rtx
widen_memory_access(rtx memref,machine_mode mode,poly_int64 offset)2570 widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
2571 {
2572 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2573 poly_uint64 size = GET_MODE_SIZE (mode);
2574
2575 /* If there are no changes, just return the original memory reference. */
2576 if (new_rtx == memref)
2577 return new_rtx;
2578
2579 mem_attrs attrs (*get_mem_attrs (new_rtx));
2580
2581 /* If we don't know what offset we were at within the expression, then
2582 we can't know if we've overstepped the bounds. */
2583 if (! attrs.offset_known_p)
2584 attrs.expr = NULL_TREE;
2585
2586 while (attrs.expr)
2587 {
2588 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2589 {
2590 tree field = TREE_OPERAND (attrs.expr, 1);
2591 tree offset = component_ref_field_offset (attrs.expr);
2592
2593 if (! DECL_SIZE_UNIT (field))
2594 {
2595 attrs.expr = NULL_TREE;
2596 break;
2597 }
2598
2599 /* Is the field at least as large as the access? If so, ok,
2600 otherwise strip back to the containing structure. */
2601 if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2602 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2603 && known_ge (attrs.offset, 0))
2604 break;
2605
2606 poly_uint64 suboffset;
2607 if (!poly_int_tree_p (offset, &suboffset))
2608 {
2609 attrs.expr = NULL_TREE;
2610 break;
2611 }
2612
2613 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2614 attrs.offset += suboffset;
2615 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2616 / BITS_PER_UNIT);
2617 }
2618 /* Similarly for the decl. */
2619 else if (DECL_P (attrs.expr)
2620 && DECL_SIZE_UNIT (attrs.expr)
2621 && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2622 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2623 size)
2624 && known_ge (attrs.offset, 0))
2625 break;
2626 else
2627 {
2628 /* The widened memory access overflows the expression, which means
2629 that it could alias another expression. Zap it. */
2630 attrs.expr = NULL_TREE;
2631 break;
2632 }
2633 }
2634
2635 if (! attrs.expr)
2636 attrs.offset_known_p = false;
2637
2638 /* The widened memory may alias other stuff, so zap the alias set. */
2639 /* ??? Maybe use get_alias_set on any remaining expression. */
2640 attrs.alias = 0;
2641 attrs.size_known_p = true;
2642 attrs.size = size;
2643 set_mem_attrs (new_rtx, &attrs);
2644 return new_rtx;
2645 }
2646
2647 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2648 static GTY(()) tree spill_slot_decl;
2649
2650 tree
get_spill_slot_decl(bool force_build_p)2651 get_spill_slot_decl (bool force_build_p)
2652 {
2653 tree d = spill_slot_decl;
2654 rtx rd;
2655
2656 if (d || !force_build_p)
2657 return d;
2658
2659 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2660 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2661 DECL_ARTIFICIAL (d) = 1;
2662 DECL_IGNORED_P (d) = 1;
2663 TREE_USED (d) = 1;
2664 spill_slot_decl = d;
2665
2666 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2667 MEM_NOTRAP_P (rd) = 1;
2668 mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
2669 attrs.alias = new_alias_set ();
2670 attrs.expr = d;
2671 set_mem_attrs (rd, &attrs);
2672 SET_DECL_RTL (d, rd);
2673
2674 return d;
2675 }
2676
2677 /* Given MEM, a result from assign_stack_local, fill in the memory
2678 attributes as appropriate for a register allocator spill slot.
2679 These slots are not aliasable by other memory. We arrange for
2680 them all to use a single MEM_EXPR, so that the aliasing code can
2681 work properly in the case of shared spill slots. */
2682
2683 void
set_mem_attrs_for_spill(rtx mem)2684 set_mem_attrs_for_spill (rtx mem)
2685 {
2686 rtx addr;
2687
2688 mem_attrs attrs (*get_mem_attrs (mem));
2689 attrs.expr = get_spill_slot_decl (true);
2690 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2691 attrs.addrspace = ADDR_SPACE_GENERIC;
2692
2693 /* We expect the incoming memory to be of the form:
2694 (mem:MODE (plus (reg sfp) (const_int offset)))
2695 with perhaps the plus missing for offset = 0. */
2696 addr = XEXP (mem, 0);
2697 attrs.offset_known_p = true;
2698 strip_offset (addr, &attrs.offset);
2699
2700 set_mem_attrs (mem, &attrs);
2701 MEM_NOTRAP_P (mem) = 1;
2702 }
2703
2704 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2705
2706 rtx_code_label *
gen_label_rtx(void)2707 gen_label_rtx (void)
2708 {
2709 return as_a <rtx_code_label *> (
2710 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2711 NULL, label_num++, NULL));
2712 }
2713
2714 /* For procedure integration. */
2715
2716 /* Install new pointers to the first and last insns in the chain.
2717 Also, set cur_insn_uid to one higher than the last in use.
2718 Used for an inline-procedure after copying the insn chain. */
2719
2720 void
set_new_first_and_last_insn(rtx_insn * first,rtx_insn * last)2721 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2722 {
2723 rtx_insn *insn;
2724
2725 set_first_insn (first);
2726 set_last_insn (last);
2727 cur_insn_uid = 0;
2728
2729 if (param_min_nondebug_insn_uid || MAY_HAVE_DEBUG_INSNS)
2730 {
2731 int debug_count = 0;
2732
2733 cur_insn_uid = param_min_nondebug_insn_uid - 1;
2734 cur_debug_insn_uid = 0;
2735
2736 for (insn = first; insn; insn = NEXT_INSN (insn))
2737 if (INSN_UID (insn) < param_min_nondebug_insn_uid)
2738 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2739 else
2740 {
2741 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2742 if (DEBUG_INSN_P (insn))
2743 debug_count++;
2744 }
2745
2746 if (debug_count)
2747 cur_debug_insn_uid = param_min_nondebug_insn_uid + debug_count;
2748 else
2749 cur_debug_insn_uid++;
2750 }
2751 else
2752 for (insn = first; insn; insn = NEXT_INSN (insn))
2753 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2754
2755 cur_insn_uid++;
2756 }
2757
2758 /* Go through all the RTL insn bodies and copy any invalid shared
2759 structure. This routine should only be called once. */
2760
2761 static void
unshare_all_rtl_1(rtx_insn * insn)2762 unshare_all_rtl_1 (rtx_insn *insn)
2763 {
2764 /* Unshare just about everything else. */
2765 unshare_all_rtl_in_chain (insn);
2766
2767 /* Make sure the addresses of stack slots found outside the insn chain
2768 (such as, in DECL_RTL of a variable) are not shared
2769 with the insn chain.
2770
2771 This special care is necessary when the stack slot MEM does not
2772 actually appear in the insn chain. If it does appear, its address
2773 is unshared from all else at that point. */
2774 unsigned int i;
2775 rtx temp;
2776 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2777 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2778 }
2779
2780 /* Go through all the RTL insn bodies and copy any invalid shared
2781 structure, again. This is a fairly expensive thing to do so it
2782 should be done sparingly. */
2783
2784 void
unshare_all_rtl_again(rtx_insn * insn)2785 unshare_all_rtl_again (rtx_insn *insn)
2786 {
2787 rtx_insn *p;
2788 tree decl;
2789
2790 for (p = insn; p; p = NEXT_INSN (p))
2791 if (INSN_P (p))
2792 {
2793 reset_used_flags (PATTERN (p));
2794 reset_used_flags (REG_NOTES (p));
2795 if (CALL_P (p))
2796 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2797 }
2798
2799 /* Make sure that virtual stack slots are not shared. */
2800 set_used_decls (DECL_INITIAL (cfun->decl));
2801
2802 /* Make sure that virtual parameters are not shared. */
2803 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2804 set_used_flags (DECL_RTL (decl));
2805
2806 rtx temp;
2807 unsigned int i;
2808 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2809 reset_used_flags (temp);
2810
2811 unshare_all_rtl_1 (insn);
2812 }
2813
2814 unsigned int
unshare_all_rtl(void)2815 unshare_all_rtl (void)
2816 {
2817 unshare_all_rtl_1 (get_insns ());
2818
2819 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2820 {
2821 if (DECL_RTL_SET_P (decl))
2822 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2823 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2824 }
2825
2826 return 0;
2827 }
2828
2829
2830 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2831 Recursively does the same for subexpressions. */
2832
2833 static void
verify_rtx_sharing(rtx orig,rtx insn)2834 verify_rtx_sharing (rtx orig, rtx insn)
2835 {
2836 rtx x = orig;
2837 int i;
2838 enum rtx_code code;
2839 const char *format_ptr;
2840
2841 if (x == 0)
2842 return;
2843
2844 code = GET_CODE (x);
2845
2846 /* These types may be freely shared. */
2847
2848 switch (code)
2849 {
2850 case REG:
2851 case DEBUG_EXPR:
2852 case VALUE:
2853 CASE_CONST_ANY:
2854 case SYMBOL_REF:
2855 case LABEL_REF:
2856 case CODE_LABEL:
2857 case PC:
2858 case RETURN:
2859 case SIMPLE_RETURN:
2860 case SCRATCH:
2861 /* SCRATCH must be shared because they represent distinct values. */
2862 return;
2863 case CLOBBER:
2864 /* Share clobbers of hard registers, but do not share pseudo reg
2865 clobbers or clobbers of hard registers that originated as pseudos.
2866 This is needed to allow safe register renaming. */
2867 if (REG_P (XEXP (x, 0))
2868 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2869 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2870 return;
2871 break;
2872
2873 case CONST:
2874 if (shared_const_p (orig))
2875 return;
2876 break;
2877
2878 case MEM:
2879 /* A MEM is allowed to be shared if its address is constant. */
2880 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2881 || reload_completed || reload_in_progress)
2882 return;
2883
2884 break;
2885
2886 default:
2887 break;
2888 }
2889
2890 /* This rtx may not be shared. If it has already been seen,
2891 replace it with a copy of itself. */
2892 if (flag_checking && RTX_FLAG (x, used))
2893 {
2894 error ("invalid rtl sharing found in the insn");
2895 debug_rtx (insn);
2896 error ("shared rtx");
2897 debug_rtx (x);
2898 internal_error ("internal consistency failure");
2899 }
2900 gcc_assert (!RTX_FLAG (x, used));
2901
2902 RTX_FLAG (x, used) = 1;
2903
2904 /* Now scan the subexpressions recursively. */
2905
2906 format_ptr = GET_RTX_FORMAT (code);
2907
2908 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2909 {
2910 switch (*format_ptr++)
2911 {
2912 case 'e':
2913 verify_rtx_sharing (XEXP (x, i), insn);
2914 break;
2915
2916 case 'E':
2917 if (XVEC (x, i) != NULL)
2918 {
2919 int j;
2920 int len = XVECLEN (x, i);
2921
2922 for (j = 0; j < len; j++)
2923 {
2924 /* We allow sharing of ASM_OPERANDS inside single
2925 instruction. */
2926 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2927 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2928 == ASM_OPERANDS))
2929 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2930 else
2931 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2932 }
2933 }
2934 break;
2935 }
2936 }
2937 return;
2938 }
2939
2940 /* Reset used-flags for INSN. */
2941
2942 static void
reset_insn_used_flags(rtx insn)2943 reset_insn_used_flags (rtx insn)
2944 {
2945 gcc_assert (INSN_P (insn));
2946 reset_used_flags (PATTERN (insn));
2947 reset_used_flags (REG_NOTES (insn));
2948 if (CALL_P (insn))
2949 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2950 }
2951
2952 /* Go through all the RTL insn bodies and clear all the USED bits. */
2953
2954 static void
reset_all_used_flags(void)2955 reset_all_used_flags (void)
2956 {
2957 rtx_insn *p;
2958
2959 for (p = get_insns (); p; p = NEXT_INSN (p))
2960 if (INSN_P (p))
2961 {
2962 rtx pat = PATTERN (p);
2963 if (GET_CODE (pat) != SEQUENCE)
2964 reset_insn_used_flags (p);
2965 else
2966 {
2967 gcc_assert (REG_NOTES (p) == NULL);
2968 for (int i = 0; i < XVECLEN (pat, 0); i++)
2969 {
2970 rtx insn = XVECEXP (pat, 0, i);
2971 if (INSN_P (insn))
2972 reset_insn_used_flags (insn);
2973 }
2974 }
2975 }
2976 }
2977
2978 /* Verify sharing in INSN. */
2979
2980 static void
verify_insn_sharing(rtx insn)2981 verify_insn_sharing (rtx insn)
2982 {
2983 gcc_assert (INSN_P (insn));
2984 verify_rtx_sharing (PATTERN (insn), insn);
2985 verify_rtx_sharing (REG_NOTES (insn), insn);
2986 if (CALL_P (insn))
2987 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
2988 }
2989
2990 /* Go through all the RTL insn bodies and check that there is no unexpected
2991 sharing in between the subexpressions. */
2992
2993 DEBUG_FUNCTION void
verify_rtl_sharing(void)2994 verify_rtl_sharing (void)
2995 {
2996 rtx_insn *p;
2997
2998 timevar_push (TV_VERIFY_RTL_SHARING);
2999
3000 reset_all_used_flags ();
3001
3002 for (p = get_insns (); p; p = NEXT_INSN (p))
3003 if (INSN_P (p))
3004 {
3005 rtx pat = PATTERN (p);
3006 if (GET_CODE (pat) != SEQUENCE)
3007 verify_insn_sharing (p);
3008 else
3009 for (int i = 0; i < XVECLEN (pat, 0); i++)
3010 {
3011 rtx insn = XVECEXP (pat, 0, i);
3012 if (INSN_P (insn))
3013 verify_insn_sharing (insn);
3014 }
3015 }
3016
3017 reset_all_used_flags ();
3018
3019 timevar_pop (TV_VERIFY_RTL_SHARING);
3020 }
3021
3022 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3023 Assumes the mark bits are cleared at entry. */
3024
3025 void
unshare_all_rtl_in_chain(rtx_insn * insn)3026 unshare_all_rtl_in_chain (rtx_insn *insn)
3027 {
3028 for (; insn; insn = NEXT_INSN (insn))
3029 if (INSN_P (insn))
3030 {
3031 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3032 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
3033 if (CALL_P (insn))
3034 CALL_INSN_FUNCTION_USAGE (insn)
3035 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
3036 }
3037 }
3038
3039 /* Go through all virtual stack slots of a function and mark them as
3040 shared. We never replace the DECL_RTLs themselves with a copy,
3041 but expressions mentioned into a DECL_RTL cannot be shared with
3042 expressions in the instruction stream.
3043
3044 Note that reload may convert pseudo registers into memories in-place.
3045 Pseudo registers are always shared, but MEMs never are. Thus if we
3046 reset the used flags on MEMs in the instruction stream, we must set
3047 them again on MEMs that appear in DECL_RTLs. */
3048
3049 static void
set_used_decls(tree blk)3050 set_used_decls (tree blk)
3051 {
3052 tree t;
3053
3054 /* Mark decls. */
3055 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
3056 if (DECL_RTL_SET_P (t))
3057 set_used_flags (DECL_RTL (t));
3058
3059 /* Now process sub-blocks. */
3060 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
3061 set_used_decls (t);
3062 }
3063
3064 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3065 Recursively does the same for subexpressions. Uses
3066 copy_rtx_if_shared_1 to reduce stack space. */
3067
3068 rtx
copy_rtx_if_shared(rtx orig)3069 copy_rtx_if_shared (rtx orig)
3070 {
3071 copy_rtx_if_shared_1 (&orig);
3072 return orig;
3073 }
3074
3075 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3076 use. Recursively does the same for subexpressions. */
3077
3078 static void
copy_rtx_if_shared_1(rtx * orig1)3079 copy_rtx_if_shared_1 (rtx *orig1)
3080 {
3081 rtx x;
3082 int i;
3083 enum rtx_code code;
3084 rtx *last_ptr;
3085 const char *format_ptr;
3086 int copied = 0;
3087 int length;
3088
3089 /* Repeat is used to turn tail-recursion into iteration. */
3090 repeat:
3091 x = *orig1;
3092
3093 if (x == 0)
3094 return;
3095
3096 code = GET_CODE (x);
3097
3098 /* These types may be freely shared. */
3099
3100 switch (code)
3101 {
3102 case REG:
3103 case DEBUG_EXPR:
3104 case VALUE:
3105 CASE_CONST_ANY:
3106 case SYMBOL_REF:
3107 case LABEL_REF:
3108 case CODE_LABEL:
3109 case PC:
3110 case RETURN:
3111 case SIMPLE_RETURN:
3112 case SCRATCH:
3113 /* SCRATCH must be shared because they represent distinct values. */
3114 return;
3115 case CLOBBER:
3116 /* Share clobbers of hard registers, but do not share pseudo reg
3117 clobbers or clobbers of hard registers that originated as pseudos.
3118 This is needed to allow safe register renaming. */
3119 if (REG_P (XEXP (x, 0))
3120 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3121 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3122 return;
3123 break;
3124
3125 case CONST:
3126 if (shared_const_p (x))
3127 return;
3128 break;
3129
3130 case DEBUG_INSN:
3131 case INSN:
3132 case JUMP_INSN:
3133 case CALL_INSN:
3134 case NOTE:
3135 case BARRIER:
3136 /* The chain of insns is not being copied. */
3137 return;
3138
3139 default:
3140 break;
3141 }
3142
3143 /* This rtx may not be shared. If it has already been seen,
3144 replace it with a copy of itself. */
3145
3146 if (RTX_FLAG (x, used))
3147 {
3148 x = shallow_copy_rtx (x);
3149 copied = 1;
3150 }
3151 RTX_FLAG (x, used) = 1;
3152
3153 /* Now scan the subexpressions recursively.
3154 We can store any replaced subexpressions directly into X
3155 since we know X is not shared! Any vectors in X
3156 must be copied if X was copied. */
3157
3158 format_ptr = GET_RTX_FORMAT (code);
3159 length = GET_RTX_LENGTH (code);
3160 last_ptr = NULL;
3161
3162 for (i = 0; i < length; i++)
3163 {
3164 switch (*format_ptr++)
3165 {
3166 case 'e':
3167 if (last_ptr)
3168 copy_rtx_if_shared_1 (last_ptr);
3169 last_ptr = &XEXP (x, i);
3170 break;
3171
3172 case 'E':
3173 if (XVEC (x, i) != NULL)
3174 {
3175 int j;
3176 int len = XVECLEN (x, i);
3177
3178 /* Copy the vector iff I copied the rtx and the length
3179 is nonzero. */
3180 if (copied && len > 0)
3181 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3182
3183 /* Call recursively on all inside the vector. */
3184 for (j = 0; j < len; j++)
3185 {
3186 if (last_ptr)
3187 copy_rtx_if_shared_1 (last_ptr);
3188 last_ptr = &XVECEXP (x, i, j);
3189 }
3190 }
3191 break;
3192 }
3193 }
3194 *orig1 = x;
3195 if (last_ptr)
3196 {
3197 orig1 = last_ptr;
3198 goto repeat;
3199 }
3200 return;
3201 }
3202
3203 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3204
3205 static void
mark_used_flags(rtx x,int flag)3206 mark_used_flags (rtx x, int flag)
3207 {
3208 int i, j;
3209 enum rtx_code code;
3210 const char *format_ptr;
3211 int length;
3212
3213 /* Repeat is used to turn tail-recursion into iteration. */
3214 repeat:
3215 if (x == 0)
3216 return;
3217
3218 code = GET_CODE (x);
3219
3220 /* These types may be freely shared so we needn't do any resetting
3221 for them. */
3222
3223 switch (code)
3224 {
3225 case REG:
3226 case DEBUG_EXPR:
3227 case VALUE:
3228 CASE_CONST_ANY:
3229 case SYMBOL_REF:
3230 case CODE_LABEL:
3231 case PC:
3232 case RETURN:
3233 case SIMPLE_RETURN:
3234 return;
3235
3236 case DEBUG_INSN:
3237 case INSN:
3238 case JUMP_INSN:
3239 case CALL_INSN:
3240 case NOTE:
3241 case LABEL_REF:
3242 case BARRIER:
3243 /* The chain of insns is not being copied. */
3244 return;
3245
3246 default:
3247 break;
3248 }
3249
3250 RTX_FLAG (x, used) = flag;
3251
3252 format_ptr = GET_RTX_FORMAT (code);
3253 length = GET_RTX_LENGTH (code);
3254
3255 for (i = 0; i < length; i++)
3256 {
3257 switch (*format_ptr++)
3258 {
3259 case 'e':
3260 if (i == length-1)
3261 {
3262 x = XEXP (x, i);
3263 goto repeat;
3264 }
3265 mark_used_flags (XEXP (x, i), flag);
3266 break;
3267
3268 case 'E':
3269 for (j = 0; j < XVECLEN (x, i); j++)
3270 mark_used_flags (XVECEXP (x, i, j), flag);
3271 break;
3272 }
3273 }
3274 }
3275
3276 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3277 to look for shared sub-parts. */
3278
3279 void
reset_used_flags(rtx x)3280 reset_used_flags (rtx x)
3281 {
3282 mark_used_flags (x, 0);
3283 }
3284
3285 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3286 to look for shared sub-parts. */
3287
3288 void
set_used_flags(rtx x)3289 set_used_flags (rtx x)
3290 {
3291 mark_used_flags (x, 1);
3292 }
3293
3294 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3295 Return X or the rtx for the pseudo reg the value of X was copied into.
3296 OTHER must be valid as a SET_DEST. */
3297
3298 rtx
make_safe_from(rtx x,rtx other)3299 make_safe_from (rtx x, rtx other)
3300 {
3301 while (1)
3302 switch (GET_CODE (other))
3303 {
3304 case SUBREG:
3305 other = SUBREG_REG (other);
3306 break;
3307 case STRICT_LOW_PART:
3308 case SIGN_EXTEND:
3309 case ZERO_EXTEND:
3310 other = XEXP (other, 0);
3311 break;
3312 default:
3313 goto done;
3314 }
3315 done:
3316 if ((MEM_P (other)
3317 && ! CONSTANT_P (x)
3318 && !REG_P (x)
3319 && GET_CODE (x) != SUBREG)
3320 || (REG_P (other)
3321 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3322 || reg_mentioned_p (other, x))))
3323 {
3324 rtx temp = gen_reg_rtx (GET_MODE (x));
3325 emit_move_insn (temp, x);
3326 return temp;
3327 }
3328 return x;
3329 }
3330
3331 /* Emission of insns (adding them to the doubly-linked list). */
3332
3333 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3334
3335 rtx_insn *
get_last_insn_anywhere(void)3336 get_last_insn_anywhere (void)
3337 {
3338 struct sequence_stack *seq;
3339 for (seq = get_current_sequence (); seq; seq = seq->next)
3340 if (seq->last != 0)
3341 return seq->last;
3342 return 0;
3343 }
3344
3345 /* Return the first nonnote insn emitted in current sequence or current
3346 function. This routine looks inside SEQUENCEs. */
3347
3348 rtx_insn *
get_first_nonnote_insn(void)3349 get_first_nonnote_insn (void)
3350 {
3351 rtx_insn *insn = get_insns ();
3352
3353 if (insn)
3354 {
3355 if (NOTE_P (insn))
3356 for (insn = next_insn (insn);
3357 insn && NOTE_P (insn);
3358 insn = next_insn (insn))
3359 continue;
3360 else
3361 {
3362 if (NONJUMP_INSN_P (insn)
3363 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3364 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3365 }
3366 }
3367
3368 return insn;
3369 }
3370
3371 /* Return the last nonnote insn emitted in current sequence or current
3372 function. This routine looks inside SEQUENCEs. */
3373
3374 rtx_insn *
get_last_nonnote_insn(void)3375 get_last_nonnote_insn (void)
3376 {
3377 rtx_insn *insn = get_last_insn ();
3378
3379 if (insn)
3380 {
3381 if (NOTE_P (insn))
3382 for (insn = previous_insn (insn);
3383 insn && NOTE_P (insn);
3384 insn = previous_insn (insn))
3385 continue;
3386 else
3387 {
3388 if (NONJUMP_INSN_P (insn))
3389 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3390 insn = seq->insn (seq->len () - 1);
3391 }
3392 }
3393
3394 return insn;
3395 }
3396
3397 /* Return the number of actual (non-debug) insns emitted in this
3398 function. */
3399
3400 int
get_max_insn_count(void)3401 get_max_insn_count (void)
3402 {
3403 int n = cur_insn_uid;
3404
3405 /* The table size must be stable across -g, to avoid codegen
3406 differences due to debug insns, and not be affected by
3407 -fmin-insn-uid, to avoid excessive table size and to simplify
3408 debugging of -fcompare-debug failures. */
3409 if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
3410 n -= cur_debug_insn_uid;
3411 else
3412 n -= param_min_nondebug_insn_uid;
3413
3414 return n;
3415 }
3416
3417
3418 /* Return the next insn. If it is a SEQUENCE, return the first insn
3419 of the sequence. */
3420
3421 rtx_insn *
next_insn(rtx_insn * insn)3422 next_insn (rtx_insn *insn)
3423 {
3424 if (insn)
3425 {
3426 insn = NEXT_INSN (insn);
3427 if (insn && NONJUMP_INSN_P (insn)
3428 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3429 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3430 }
3431
3432 return insn;
3433 }
3434
3435 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3436 of the sequence. */
3437
3438 rtx_insn *
previous_insn(rtx_insn * insn)3439 previous_insn (rtx_insn *insn)
3440 {
3441 if (insn)
3442 {
3443 insn = PREV_INSN (insn);
3444 if (insn && NONJUMP_INSN_P (insn))
3445 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3446 insn = seq->insn (seq->len () - 1);
3447 }
3448
3449 return insn;
3450 }
3451
3452 /* Return the next insn after INSN that is not a NOTE. This routine does not
3453 look inside SEQUENCEs. */
3454
3455 rtx_insn *
next_nonnote_insn(rtx_insn * insn)3456 next_nonnote_insn (rtx_insn *insn)
3457 {
3458 while (insn)
3459 {
3460 insn = NEXT_INSN (insn);
3461 if (insn == 0 || !NOTE_P (insn))
3462 break;
3463 }
3464
3465 return insn;
3466 }
3467
3468 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3469 routine does not look inside SEQUENCEs. */
3470
3471 rtx_insn *
next_nondebug_insn(rtx_insn * insn)3472 next_nondebug_insn (rtx_insn *insn)
3473 {
3474 while (insn)
3475 {
3476 insn = NEXT_INSN (insn);
3477 if (insn == 0 || !DEBUG_INSN_P (insn))
3478 break;
3479 }
3480
3481 return insn;
3482 }
3483
3484 /* Return the previous insn before INSN that is not a NOTE. This routine does
3485 not look inside SEQUENCEs. */
3486
3487 rtx_insn *
prev_nonnote_insn(rtx_insn * insn)3488 prev_nonnote_insn (rtx_insn *insn)
3489 {
3490 while (insn)
3491 {
3492 insn = PREV_INSN (insn);
3493 if (insn == 0 || !NOTE_P (insn))
3494 break;
3495 }
3496
3497 return insn;
3498 }
3499
3500 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3501 This routine does not look inside SEQUENCEs. */
3502
3503 rtx_insn *
prev_nondebug_insn(rtx_insn * insn)3504 prev_nondebug_insn (rtx_insn *insn)
3505 {
3506 while (insn)
3507 {
3508 insn = PREV_INSN (insn);
3509 if (insn == 0 || !DEBUG_INSN_P (insn))
3510 break;
3511 }
3512
3513 return insn;
3514 }
3515
3516 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3517 This routine does not look inside SEQUENCEs. */
3518
3519 rtx_insn *
next_nonnote_nondebug_insn(rtx_insn * insn)3520 next_nonnote_nondebug_insn (rtx_insn *insn)
3521 {
3522 while (insn)
3523 {
3524 insn = NEXT_INSN (insn);
3525 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3526 break;
3527 }
3528
3529 return insn;
3530 }
3531
3532 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3533 but stop the search before we enter another basic block. This
3534 routine does not look inside SEQUENCEs. */
3535
3536 rtx_insn *
next_nonnote_nondebug_insn_bb(rtx_insn * insn)3537 next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3538 {
3539 while (insn)
3540 {
3541 insn = NEXT_INSN (insn);
3542 if (insn == 0)
3543 break;
3544 if (DEBUG_INSN_P (insn))
3545 continue;
3546 if (!NOTE_P (insn))
3547 break;
3548 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3549 return NULL;
3550 }
3551
3552 return insn;
3553 }
3554
3555 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3556 This routine does not look inside SEQUENCEs. */
3557
3558 rtx_insn *
prev_nonnote_nondebug_insn(rtx_insn * insn)3559 prev_nonnote_nondebug_insn (rtx_insn *insn)
3560 {
3561 while (insn)
3562 {
3563 insn = PREV_INSN (insn);
3564 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3565 break;
3566 }
3567
3568 return insn;
3569 }
3570
3571 /* Return the previous insn before INSN that is not a NOTE nor
3572 DEBUG_INSN, but stop the search before we enter another basic
3573 block. This routine does not look inside SEQUENCEs. */
3574
3575 rtx_insn *
prev_nonnote_nondebug_insn_bb(rtx_insn * insn)3576 prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3577 {
3578 while (insn)
3579 {
3580 insn = PREV_INSN (insn);
3581 if (insn == 0)
3582 break;
3583 if (DEBUG_INSN_P (insn))
3584 continue;
3585 if (!NOTE_P (insn))
3586 break;
3587 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3588 return NULL;
3589 }
3590
3591 return insn;
3592 }
3593
3594 /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
3595 or 0, if there is none. This routine does not look inside
3596 SEQUENCEs. */
3597
3598 rtx_insn *
next_real_insn(rtx_insn * insn)3599 next_real_insn (rtx_insn *insn)
3600 {
3601 while (insn)
3602 {
3603 insn = NEXT_INSN (insn);
3604 if (insn == 0 || INSN_P (insn))
3605 break;
3606 }
3607
3608 return insn;
3609 }
3610
3611 /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
3612 or 0, if there is none. This routine does not look inside
3613 SEQUENCEs. */
3614
3615 rtx_insn *
prev_real_insn(rtx_insn * insn)3616 prev_real_insn (rtx_insn *insn)
3617 {
3618 while (insn)
3619 {
3620 insn = PREV_INSN (insn);
3621 if (insn == 0 || INSN_P (insn))
3622 break;
3623 }
3624
3625 return insn;
3626 }
3627
3628 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3629 or 0, if there is none. This routine does not look inside
3630 SEQUENCEs. */
3631
3632 rtx_insn *
next_real_nondebug_insn(rtx uncast_insn)3633 next_real_nondebug_insn (rtx uncast_insn)
3634 {
3635 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3636
3637 while (insn)
3638 {
3639 insn = NEXT_INSN (insn);
3640 if (insn == 0 || NONDEBUG_INSN_P (insn))
3641 break;
3642 }
3643
3644 return insn;
3645 }
3646
3647 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3648 or 0, if there is none. This routine does not look inside
3649 SEQUENCEs. */
3650
3651 rtx_insn *
prev_real_nondebug_insn(rtx_insn * insn)3652 prev_real_nondebug_insn (rtx_insn *insn)
3653 {
3654 while (insn)
3655 {
3656 insn = PREV_INSN (insn);
3657 if (insn == 0 || NONDEBUG_INSN_P (insn))
3658 break;
3659 }
3660
3661 return insn;
3662 }
3663
3664 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3665 This routine does not look inside SEQUENCEs. */
3666
3667 rtx_call_insn *
last_call_insn(void)3668 last_call_insn (void)
3669 {
3670 rtx_insn *insn;
3671
3672 for (insn = get_last_insn ();
3673 insn && !CALL_P (insn);
3674 insn = PREV_INSN (insn))
3675 ;
3676
3677 return safe_as_a <rtx_call_insn *> (insn);
3678 }
3679
3680 /* Find the next insn after INSN that really does something. This routine
3681 does not look inside SEQUENCEs. After reload this also skips over
3682 standalone USE and CLOBBER insn. */
3683
3684 int
active_insn_p(const rtx_insn * insn)3685 active_insn_p (const rtx_insn *insn)
3686 {
3687 return (CALL_P (insn) || JUMP_P (insn)
3688 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3689 || (NONJUMP_INSN_P (insn)
3690 && (! reload_completed
3691 || (GET_CODE (PATTERN (insn)) != USE
3692 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3693 }
3694
3695 rtx_insn *
next_active_insn(rtx_insn * insn)3696 next_active_insn (rtx_insn *insn)
3697 {
3698 while (insn)
3699 {
3700 insn = NEXT_INSN (insn);
3701 if (insn == 0 || active_insn_p (insn))
3702 break;
3703 }
3704
3705 return insn;
3706 }
3707
3708 /* Find the last insn before INSN that really does something. This routine
3709 does not look inside SEQUENCEs. After reload this also skips over
3710 standalone USE and CLOBBER insn. */
3711
3712 rtx_insn *
prev_active_insn(rtx_insn * insn)3713 prev_active_insn (rtx_insn *insn)
3714 {
3715 while (insn)
3716 {
3717 insn = PREV_INSN (insn);
3718 if (insn == 0 || active_insn_p (insn))
3719 break;
3720 }
3721
3722 return insn;
3723 }
3724
3725 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3726
3727 static int
find_auto_inc(const_rtx x,const_rtx reg)3728 find_auto_inc (const_rtx x, const_rtx reg)
3729 {
3730 subrtx_iterator::array_type array;
3731 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3732 {
3733 const_rtx x = *iter;
3734 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3735 && rtx_equal_p (reg, XEXP (x, 0)))
3736 return true;
3737 }
3738 return false;
3739 }
3740
3741 /* Increment the label uses for all labels present in rtx. */
3742
3743 static void
mark_label_nuses(rtx x)3744 mark_label_nuses (rtx x)
3745 {
3746 enum rtx_code code;
3747 int i, j;
3748 const char *fmt;
3749
3750 code = GET_CODE (x);
3751 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3752 LABEL_NUSES (label_ref_label (x))++;
3753
3754 fmt = GET_RTX_FORMAT (code);
3755 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3756 {
3757 if (fmt[i] == 'e')
3758 mark_label_nuses (XEXP (x, i));
3759 else if (fmt[i] == 'E')
3760 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3761 mark_label_nuses (XVECEXP (x, i, j));
3762 }
3763 }
3764
3765
3766 /* Try splitting insns that can be split for better scheduling.
3767 PAT is the pattern which might split.
3768 TRIAL is the insn providing PAT.
3769 LAST is nonzero if we should return the last insn of the sequence produced.
3770
3771 If this routine succeeds in splitting, it returns the first or last
3772 replacement insn depending on the value of LAST. Otherwise, it
3773 returns TRIAL. If the insn to be returned can be split, it will be. */
3774
3775 rtx_insn *
try_split(rtx pat,rtx_insn * trial,int last)3776 try_split (rtx pat, rtx_insn *trial, int last)
3777 {
3778 rtx_insn *before, *after;
3779 rtx note;
3780 rtx_insn *seq, *tem;
3781 profile_probability probability;
3782 rtx_insn *insn_last, *insn;
3783 int njumps = 0;
3784 rtx_insn *call_insn = NULL;
3785
3786 if (any_condjump_p (trial)
3787 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3788 split_branch_probability
3789 = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3790 else
3791 split_branch_probability = profile_probability::uninitialized ();
3792
3793 probability = split_branch_probability;
3794
3795 seq = split_insns (pat, trial);
3796
3797 split_branch_probability = profile_probability::uninitialized ();
3798
3799 if (!seq)
3800 return trial;
3801
3802 int split_insn_count = 0;
3803 /* Avoid infinite loop if any insn of the result matches
3804 the original pattern. */
3805 insn_last = seq;
3806 while (1)
3807 {
3808 if (INSN_P (insn_last)
3809 && rtx_equal_p (PATTERN (insn_last), pat))
3810 return trial;
3811 split_insn_count++;
3812 if (!NEXT_INSN (insn_last))
3813 break;
3814 insn_last = NEXT_INSN (insn_last);
3815 }
3816
3817 /* We're not good at redistributing frame information if
3818 the split occurs before reload or if it results in more
3819 than one insn. */
3820 if (RTX_FRAME_RELATED_P (trial))
3821 {
3822 if (!reload_completed || split_insn_count != 1)
3823 return trial;
3824
3825 rtx_insn *new_insn = seq;
3826 rtx_insn *old_insn = trial;
3827 copy_frame_info_to_split_insn (old_insn, new_insn);
3828 }
3829
3830 /* We will be adding the new sequence to the function. The splitters
3831 may have introduced invalid RTL sharing, so unshare the sequence now. */
3832 unshare_all_rtl_in_chain (seq);
3833
3834 /* Mark labels and copy flags. */
3835 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3836 {
3837 if (JUMP_P (insn))
3838 {
3839 if (JUMP_P (trial))
3840 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3841 mark_jump_label (PATTERN (insn), insn, 0);
3842 njumps++;
3843 if (probability.initialized_p ()
3844 && any_condjump_p (insn)
3845 && !find_reg_note (insn, REG_BR_PROB, 0))
3846 {
3847 /* We can preserve the REG_BR_PROB notes only if exactly
3848 one jump is created, otherwise the machine description
3849 is responsible for this step using
3850 split_branch_probability variable. */
3851 gcc_assert (njumps == 1);
3852 add_reg_br_prob_note (insn, probability);
3853 }
3854 }
3855 }
3856
3857 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3858 in SEQ and copy any additional information across. */
3859 if (CALL_P (trial))
3860 {
3861 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3862 if (CALL_P (insn))
3863 {
3864 gcc_assert (call_insn == NULL_RTX);
3865 call_insn = insn;
3866
3867 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3868 target may have explicitly specified. */
3869 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3870 while (*p)
3871 p = &XEXP (*p, 1);
3872 *p = CALL_INSN_FUNCTION_USAGE (trial);
3873
3874 /* If the old call was a sibling call, the new one must
3875 be too. */
3876 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3877 }
3878 }
3879
3880 /* Copy notes, particularly those related to the CFG. */
3881 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3882 {
3883 switch (REG_NOTE_KIND (note))
3884 {
3885 case REG_EH_REGION:
3886 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3887 break;
3888
3889 case REG_NORETURN:
3890 case REG_SETJMP:
3891 case REG_TM:
3892 case REG_CALL_NOCF_CHECK:
3893 case REG_CALL_ARG_LOCATION:
3894 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3895 {
3896 if (CALL_P (insn))
3897 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3898 }
3899 break;
3900
3901 case REG_NON_LOCAL_GOTO:
3902 case REG_LABEL_TARGET:
3903 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3904 {
3905 if (JUMP_P (insn))
3906 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3907 }
3908 break;
3909
3910 case REG_INC:
3911 if (!AUTO_INC_DEC)
3912 break;
3913
3914 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3915 {
3916 rtx reg = XEXP (note, 0);
3917 if (!FIND_REG_INC_NOTE (insn, reg)
3918 && find_auto_inc (PATTERN (insn), reg))
3919 add_reg_note (insn, REG_INC, reg);
3920 }
3921 break;
3922
3923 case REG_ARGS_SIZE:
3924 fixup_args_size_notes (NULL, insn_last, get_args_size (note));
3925 break;
3926
3927 case REG_CALL_DECL:
3928 case REG_UNTYPED_CALL:
3929 gcc_assert (call_insn != NULL_RTX);
3930 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3931 break;
3932
3933 default:
3934 break;
3935 }
3936 }
3937
3938 /* If there are LABELS inside the split insns increment the
3939 usage count so we don't delete the label. */
3940 if (INSN_P (trial))
3941 {
3942 insn = insn_last;
3943 while (insn != NULL_RTX)
3944 {
3945 /* JUMP_P insns have already been "marked" above. */
3946 if (NONJUMP_INSN_P (insn))
3947 mark_label_nuses (PATTERN (insn));
3948
3949 insn = PREV_INSN (insn);
3950 }
3951 }
3952
3953 before = PREV_INSN (trial);
3954 after = NEXT_INSN (trial);
3955
3956 emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3957
3958 delete_insn (trial);
3959
3960 /* Recursively call try_split for each new insn created; by the
3961 time control returns here that insn will be fully split, so
3962 set LAST and continue from the insn after the one returned.
3963 We can't use next_active_insn here since AFTER may be a note.
3964 Ignore deleted insns, which can be occur if not optimizing. */
3965 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3966 if (! tem->deleted () && INSN_P (tem))
3967 tem = try_split (PATTERN (tem), tem, 1);
3968
3969 /* Return either the first or the last insn, depending on which was
3970 requested. */
3971 return last
3972 ? (after ? PREV_INSN (after) : get_last_insn ())
3973 : NEXT_INSN (before);
3974 }
3975
3976 /* Make and return an INSN rtx, initializing all its slots.
3977 Store PATTERN in the pattern slots. */
3978
3979 rtx_insn *
make_insn_raw(rtx pattern)3980 make_insn_raw (rtx pattern)
3981 {
3982 rtx_insn *insn;
3983
3984 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3985
3986 INSN_UID (insn) = cur_insn_uid++;
3987 PATTERN (insn) = pattern;
3988 INSN_CODE (insn) = -1;
3989 REG_NOTES (insn) = NULL;
3990 INSN_LOCATION (insn) = curr_insn_location ();
3991 BLOCK_FOR_INSN (insn) = NULL;
3992
3993 #ifdef ENABLE_RTL_CHECKING
3994 if (insn
3995 && INSN_P (insn)
3996 && (returnjump_p (insn)
3997 || (GET_CODE (insn) == SET
3998 && SET_DEST (insn) == pc_rtx)))
3999 {
4000 warning (0, "ICE: %<emit_insn%> used where %<emit_jump_insn%> needed:");
4001 debug_rtx (insn);
4002 }
4003 #endif
4004
4005 return insn;
4006 }
4007
4008 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4009
4010 static rtx_insn *
make_debug_insn_raw(rtx pattern)4011 make_debug_insn_raw (rtx pattern)
4012 {
4013 rtx_debug_insn *insn;
4014
4015 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
4016 INSN_UID (insn) = cur_debug_insn_uid++;
4017 if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
4018 INSN_UID (insn) = cur_insn_uid++;
4019
4020 PATTERN (insn) = pattern;
4021 INSN_CODE (insn) = -1;
4022 REG_NOTES (insn) = NULL;
4023 INSN_LOCATION (insn) = curr_insn_location ();
4024 BLOCK_FOR_INSN (insn) = NULL;
4025
4026 return insn;
4027 }
4028
4029 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
4030
4031 static rtx_insn *
make_jump_insn_raw(rtx pattern)4032 make_jump_insn_raw (rtx pattern)
4033 {
4034 rtx_jump_insn *insn;
4035
4036 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
4037 INSN_UID (insn) = cur_insn_uid++;
4038
4039 PATTERN (insn) = pattern;
4040 INSN_CODE (insn) = -1;
4041 REG_NOTES (insn) = NULL;
4042 JUMP_LABEL (insn) = NULL;
4043 INSN_LOCATION (insn) = curr_insn_location ();
4044 BLOCK_FOR_INSN (insn) = NULL;
4045
4046 return insn;
4047 }
4048
4049 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
4050
4051 static rtx_insn *
make_call_insn_raw(rtx pattern)4052 make_call_insn_raw (rtx pattern)
4053 {
4054 rtx_call_insn *insn;
4055
4056 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
4057 INSN_UID (insn) = cur_insn_uid++;
4058
4059 PATTERN (insn) = pattern;
4060 INSN_CODE (insn) = -1;
4061 REG_NOTES (insn) = NULL;
4062 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
4063 INSN_LOCATION (insn) = curr_insn_location ();
4064 BLOCK_FOR_INSN (insn) = NULL;
4065
4066 return insn;
4067 }
4068
4069 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
4070
4071 static rtx_note *
make_note_raw(enum insn_note subtype)4072 make_note_raw (enum insn_note subtype)
4073 {
4074 /* Some notes are never created this way at all. These notes are
4075 only created by patching out insns. */
4076 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
4077 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
4078
4079 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
4080 INSN_UID (note) = cur_insn_uid++;
4081 NOTE_KIND (note) = subtype;
4082 BLOCK_FOR_INSN (note) = NULL;
4083 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4084 return note;
4085 }
4086
4087 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4088 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4089 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4090
4091 static inline void
link_insn_into_chain(rtx_insn * insn,rtx_insn * prev,rtx_insn * next)4092 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4093 {
4094 SET_PREV_INSN (insn) = prev;
4095 SET_NEXT_INSN (insn) = next;
4096 if (prev != NULL)
4097 {
4098 SET_NEXT_INSN (prev) = insn;
4099 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4100 {
4101 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4102 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
4103 }
4104 }
4105 if (next != NULL)
4106 {
4107 SET_PREV_INSN (next) = insn;
4108 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4109 {
4110 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4111 SET_PREV_INSN (sequence->insn (0)) = insn;
4112 }
4113 }
4114
4115 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4116 {
4117 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4118 SET_PREV_INSN (sequence->insn (0)) = prev;
4119 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4120 }
4121 }
4122
4123 /* Add INSN to the end of the doubly-linked list.
4124 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4125
4126 void
add_insn(rtx_insn * insn)4127 add_insn (rtx_insn *insn)
4128 {
4129 rtx_insn *prev = get_last_insn ();
4130 link_insn_into_chain (insn, prev, NULL);
4131 if (get_insns () == NULL)
4132 set_first_insn (insn);
4133 set_last_insn (insn);
4134 }
4135
4136 /* Add INSN into the doubly-linked list after insn AFTER. */
4137
4138 static void
add_insn_after_nobb(rtx_insn * insn,rtx_insn * after)4139 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4140 {
4141 rtx_insn *next = NEXT_INSN (after);
4142
4143 gcc_assert (!optimize || !after->deleted ());
4144
4145 link_insn_into_chain (insn, after, next);
4146
4147 if (next == NULL)
4148 {
4149 struct sequence_stack *seq;
4150
4151 for (seq = get_current_sequence (); seq; seq = seq->next)
4152 if (after == seq->last)
4153 {
4154 seq->last = insn;
4155 break;
4156 }
4157 }
4158 }
4159
4160 /* Add INSN into the doubly-linked list before insn BEFORE. */
4161
4162 static void
add_insn_before_nobb(rtx_insn * insn,rtx_insn * before)4163 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4164 {
4165 rtx_insn *prev = PREV_INSN (before);
4166
4167 gcc_assert (!optimize || !before->deleted ());
4168
4169 link_insn_into_chain (insn, prev, before);
4170
4171 if (prev == NULL)
4172 {
4173 struct sequence_stack *seq;
4174
4175 for (seq = get_current_sequence (); seq; seq = seq->next)
4176 if (before == seq->first)
4177 {
4178 seq->first = insn;
4179 break;
4180 }
4181
4182 gcc_assert (seq);
4183 }
4184 }
4185
4186 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4187 If BB is NULL, an attempt is made to infer the bb from before.
4188
4189 This and the next function should be the only functions called
4190 to insert an insn once delay slots have been filled since only
4191 they know how to update a SEQUENCE. */
4192
4193 void
add_insn_after(rtx_insn * insn,rtx_insn * after,basic_block bb)4194 add_insn_after (rtx_insn *insn, rtx_insn *after, basic_block bb)
4195 {
4196 add_insn_after_nobb (insn, after);
4197 if (!BARRIER_P (after)
4198 && !BARRIER_P (insn)
4199 && (bb = BLOCK_FOR_INSN (after)))
4200 {
4201 set_block_for_insn (insn, bb);
4202 if (INSN_P (insn))
4203 df_insn_rescan (insn);
4204 /* Should not happen as first in the BB is always
4205 either NOTE or LABEL. */
4206 if (BB_END (bb) == after
4207 /* Avoid clobbering of structure when creating new BB. */
4208 && !BARRIER_P (insn)
4209 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4210 BB_END (bb) = insn;
4211 }
4212 }
4213
4214 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4215 If BB is NULL, an attempt is made to infer the bb from before.
4216
4217 This and the previous function should be the only functions called
4218 to insert an insn once delay slots have been filled since only
4219 they know how to update a SEQUENCE. */
4220
4221 void
add_insn_before(rtx_insn * insn,rtx_insn * before,basic_block bb)4222 add_insn_before (rtx_insn *insn, rtx_insn *before, basic_block bb)
4223 {
4224 add_insn_before_nobb (insn, before);
4225
4226 if (!bb
4227 && !BARRIER_P (before)
4228 && !BARRIER_P (insn))
4229 bb = BLOCK_FOR_INSN (before);
4230
4231 if (bb)
4232 {
4233 set_block_for_insn (insn, bb);
4234 if (INSN_P (insn))
4235 df_insn_rescan (insn);
4236 /* Should not happen as first in the BB is always either NOTE or
4237 LABEL. */
4238 gcc_assert (BB_HEAD (bb) != insn
4239 /* Avoid clobbering of structure when creating new BB. */
4240 || BARRIER_P (insn)
4241 || NOTE_INSN_BASIC_BLOCK_P (insn));
4242 }
4243 }
4244
4245 /* Replace insn with an deleted instruction note. */
4246
4247 void
set_insn_deleted(rtx_insn * insn)4248 set_insn_deleted (rtx_insn *insn)
4249 {
4250 if (INSN_P (insn))
4251 df_insn_delete (insn);
4252 PUT_CODE (insn, NOTE);
4253 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4254 }
4255
4256
4257 /* Unlink INSN from the insn chain.
4258
4259 This function knows how to handle sequences.
4260
4261 This function does not invalidate data flow information associated with
4262 INSN (i.e. does not call df_insn_delete). That makes this function
4263 usable for only disconnecting an insn from the chain, and re-emit it
4264 elsewhere later.
4265
4266 To later insert INSN elsewhere in the insn chain via add_insn and
4267 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4268 the caller. Nullifying them here breaks many insn chain walks.
4269
4270 To really delete an insn and related DF information, use delete_insn. */
4271
4272 void
remove_insn(rtx_insn * insn)4273 remove_insn (rtx_insn *insn)
4274 {
4275 rtx_insn *next = NEXT_INSN (insn);
4276 rtx_insn *prev = PREV_INSN (insn);
4277 basic_block bb;
4278
4279 if (prev)
4280 {
4281 SET_NEXT_INSN (prev) = next;
4282 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4283 {
4284 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4285 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4286 }
4287 }
4288 else
4289 {
4290 struct sequence_stack *seq;
4291
4292 for (seq = get_current_sequence (); seq; seq = seq->next)
4293 if (insn == seq->first)
4294 {
4295 seq->first = next;
4296 break;
4297 }
4298
4299 gcc_assert (seq);
4300 }
4301
4302 if (next)
4303 {
4304 SET_PREV_INSN (next) = prev;
4305 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4306 {
4307 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4308 SET_PREV_INSN (sequence->insn (0)) = prev;
4309 }
4310 }
4311 else
4312 {
4313 struct sequence_stack *seq;
4314
4315 for (seq = get_current_sequence (); seq; seq = seq->next)
4316 if (insn == seq->last)
4317 {
4318 seq->last = prev;
4319 break;
4320 }
4321
4322 gcc_assert (seq);
4323 }
4324
4325 /* Fix up basic block boundaries, if necessary. */
4326 if (!BARRIER_P (insn)
4327 && (bb = BLOCK_FOR_INSN (insn)))
4328 {
4329 if (BB_HEAD (bb) == insn)
4330 {
4331 /* Never ever delete the basic block note without deleting whole
4332 basic block. */
4333 gcc_assert (!NOTE_P (insn));
4334 BB_HEAD (bb) = next;
4335 }
4336 if (BB_END (bb) == insn)
4337 BB_END (bb) = prev;
4338 }
4339 }
4340
4341 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4342
4343 void
add_function_usage_to(rtx call_insn,rtx call_fusage)4344 add_function_usage_to (rtx call_insn, rtx call_fusage)
4345 {
4346 gcc_assert (call_insn && CALL_P (call_insn));
4347
4348 /* Put the register usage information on the CALL. If there is already
4349 some usage information, put ours at the end. */
4350 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4351 {
4352 rtx link;
4353
4354 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4355 link = XEXP (link, 1))
4356 ;
4357
4358 XEXP (link, 1) = call_fusage;
4359 }
4360 else
4361 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4362 }
4363
4364 /* Delete all insns made since FROM.
4365 FROM becomes the new last instruction. */
4366
4367 void
delete_insns_since(rtx_insn * from)4368 delete_insns_since (rtx_insn *from)
4369 {
4370 if (from == 0)
4371 set_first_insn (0);
4372 else
4373 SET_NEXT_INSN (from) = 0;
4374 set_last_insn (from);
4375 }
4376
4377 /* This function is deprecated, please use sequences instead.
4378
4379 Move a consecutive bunch of insns to a different place in the chain.
4380 The insns to be moved are those between FROM and TO.
4381 They are moved to a new position after the insn AFTER.
4382 AFTER must not be FROM or TO or any insn in between.
4383
4384 This function does not know about SEQUENCEs and hence should not be
4385 called after delay-slot filling has been done. */
4386
4387 void
reorder_insns_nobb(rtx_insn * from,rtx_insn * to,rtx_insn * after)4388 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4389 {
4390 if (flag_checking)
4391 {
4392 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4393 gcc_assert (after != x);
4394 gcc_assert (after != to);
4395 }
4396
4397 /* Splice this bunch out of where it is now. */
4398 if (PREV_INSN (from))
4399 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4400 if (NEXT_INSN (to))
4401 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4402 if (get_last_insn () == to)
4403 set_last_insn (PREV_INSN (from));
4404 if (get_insns () == from)
4405 set_first_insn (NEXT_INSN (to));
4406
4407 /* Make the new neighbors point to it and it to them. */
4408 if (NEXT_INSN (after))
4409 SET_PREV_INSN (NEXT_INSN (after)) = to;
4410
4411 SET_NEXT_INSN (to) = NEXT_INSN (after);
4412 SET_PREV_INSN (from) = after;
4413 SET_NEXT_INSN (after) = from;
4414 if (after == get_last_insn ())
4415 set_last_insn (to);
4416 }
4417
4418 /* Same as function above, but take care to update BB boundaries. */
4419 void
reorder_insns(rtx_insn * from,rtx_insn * to,rtx_insn * after)4420 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4421 {
4422 rtx_insn *prev = PREV_INSN (from);
4423 basic_block bb, bb2;
4424
4425 reorder_insns_nobb (from, to, after);
4426
4427 if (!BARRIER_P (after)
4428 && (bb = BLOCK_FOR_INSN (after)))
4429 {
4430 rtx_insn *x;
4431 df_set_bb_dirty (bb);
4432
4433 if (!BARRIER_P (from)
4434 && (bb2 = BLOCK_FOR_INSN (from)))
4435 {
4436 if (BB_END (bb2) == to)
4437 BB_END (bb2) = prev;
4438 df_set_bb_dirty (bb2);
4439 }
4440
4441 if (BB_END (bb) == after)
4442 BB_END (bb) = to;
4443
4444 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4445 if (!BARRIER_P (x))
4446 df_insn_change_bb (x, bb);
4447 }
4448 }
4449
4450
4451 /* Emit insn(s) of given code and pattern
4452 at a specified place within the doubly-linked list.
4453
4454 All of the emit_foo global entry points accept an object
4455 X which is either an insn list or a PATTERN of a single
4456 instruction.
4457
4458 There are thus a few canonical ways to generate code and
4459 emit it at a specific place in the instruction stream. For
4460 example, consider the instruction named SPOT and the fact that
4461 we would like to emit some instructions before SPOT. We might
4462 do it like this:
4463
4464 start_sequence ();
4465 ... emit the new instructions ...
4466 insns_head = get_insns ();
4467 end_sequence ();
4468
4469 emit_insn_before (insns_head, SPOT);
4470
4471 It used to be common to generate SEQUENCE rtl instead, but that
4472 is a relic of the past which no longer occurs. The reason is that
4473 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4474 generated would almost certainly die right after it was created. */
4475
4476 static rtx_insn *
emit_pattern_before_noloc(rtx x,rtx_insn * before,rtx_insn * last,basic_block bb,rtx_insn * (* make_raw)(rtx))4477 emit_pattern_before_noloc (rtx x, rtx_insn *before, rtx_insn *last,
4478 basic_block bb,
4479 rtx_insn *(*make_raw) (rtx))
4480 {
4481 rtx_insn *insn;
4482
4483 gcc_assert (before);
4484
4485 if (x == NULL_RTX)
4486 return last;
4487
4488 switch (GET_CODE (x))
4489 {
4490 case DEBUG_INSN:
4491 case INSN:
4492 case JUMP_INSN:
4493 case CALL_INSN:
4494 case CODE_LABEL:
4495 case BARRIER:
4496 case NOTE:
4497 insn = as_a <rtx_insn *> (x);
4498 while (insn)
4499 {
4500 rtx_insn *next = NEXT_INSN (insn);
4501 add_insn_before (insn, before, bb);
4502 last = insn;
4503 insn = next;
4504 }
4505 break;
4506
4507 #ifdef ENABLE_RTL_CHECKING
4508 case SEQUENCE:
4509 gcc_unreachable ();
4510 break;
4511 #endif
4512
4513 default:
4514 last = (*make_raw) (x);
4515 add_insn_before (last, before, bb);
4516 break;
4517 }
4518
4519 return last;
4520 }
4521
4522 /* Make X be output before the instruction BEFORE. */
4523
4524 rtx_insn *
emit_insn_before_noloc(rtx x,rtx_insn * before,basic_block bb)4525 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4526 {
4527 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4528 }
4529
4530 /* Make an instruction with body X and code JUMP_INSN
4531 and output it before the instruction BEFORE. */
4532
4533 rtx_jump_insn *
emit_jump_insn_before_noloc(rtx x,rtx_insn * before)4534 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4535 {
4536 return as_a <rtx_jump_insn *> (
4537 emit_pattern_before_noloc (x, before, NULL, NULL,
4538 make_jump_insn_raw));
4539 }
4540
4541 /* Make an instruction with body X and code CALL_INSN
4542 and output it before the instruction BEFORE. */
4543
4544 rtx_insn *
emit_call_insn_before_noloc(rtx x,rtx_insn * before)4545 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4546 {
4547 return emit_pattern_before_noloc (x, before, NULL, NULL,
4548 make_call_insn_raw);
4549 }
4550
4551 /* Make an instruction with body X and code DEBUG_INSN
4552 and output it before the instruction BEFORE. */
4553
4554 rtx_insn *
emit_debug_insn_before_noloc(rtx x,rtx_insn * before)4555 emit_debug_insn_before_noloc (rtx x, rtx_insn *before)
4556 {
4557 return emit_pattern_before_noloc (x, before, NULL, NULL,
4558 make_debug_insn_raw);
4559 }
4560
4561 /* Make an insn of code BARRIER
4562 and output it before the insn BEFORE. */
4563
4564 rtx_barrier *
emit_barrier_before(rtx_insn * before)4565 emit_barrier_before (rtx_insn *before)
4566 {
4567 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4568
4569 INSN_UID (insn) = cur_insn_uid++;
4570
4571 add_insn_before (insn, before, NULL);
4572 return insn;
4573 }
4574
4575 /* Emit the label LABEL before the insn BEFORE. */
4576
4577 rtx_code_label *
emit_label_before(rtx_code_label * label,rtx_insn * before)4578 emit_label_before (rtx_code_label *label, rtx_insn *before)
4579 {
4580 gcc_checking_assert (INSN_UID (label) == 0);
4581 INSN_UID (label) = cur_insn_uid++;
4582 add_insn_before (label, before, NULL);
4583 return label;
4584 }
4585
4586 /* Helper for emit_insn_after, handles lists of instructions
4587 efficiently. */
4588
4589 static rtx_insn *
emit_insn_after_1(rtx_insn * first,rtx_insn * after,basic_block bb)4590 emit_insn_after_1 (rtx_insn *first, rtx_insn *after, basic_block bb)
4591 {
4592 rtx_insn *last;
4593 rtx_insn *after_after;
4594 if (!bb && !BARRIER_P (after))
4595 bb = BLOCK_FOR_INSN (after);
4596
4597 if (bb)
4598 {
4599 df_set_bb_dirty (bb);
4600 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4601 if (!BARRIER_P (last))
4602 {
4603 set_block_for_insn (last, bb);
4604 df_insn_rescan (last);
4605 }
4606 if (!BARRIER_P (last))
4607 {
4608 set_block_for_insn (last, bb);
4609 df_insn_rescan (last);
4610 }
4611 if (BB_END (bb) == after)
4612 BB_END (bb) = last;
4613 }
4614 else
4615 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4616 continue;
4617
4618 after_after = NEXT_INSN (after);
4619
4620 SET_NEXT_INSN (after) = first;
4621 SET_PREV_INSN (first) = after;
4622 SET_NEXT_INSN (last) = after_after;
4623 if (after_after)
4624 SET_PREV_INSN (after_after) = last;
4625
4626 if (after == get_last_insn ())
4627 set_last_insn (last);
4628
4629 return last;
4630 }
4631
4632 static rtx_insn *
emit_pattern_after_noloc(rtx x,rtx_insn * after,basic_block bb,rtx_insn * (* make_raw)(rtx))4633 emit_pattern_after_noloc (rtx x, rtx_insn *after, basic_block bb,
4634 rtx_insn *(*make_raw)(rtx))
4635 {
4636 rtx_insn *last = after;
4637
4638 gcc_assert (after);
4639
4640 if (x == NULL_RTX)
4641 return last;
4642
4643 switch (GET_CODE (x))
4644 {
4645 case DEBUG_INSN:
4646 case INSN:
4647 case JUMP_INSN:
4648 case CALL_INSN:
4649 case CODE_LABEL:
4650 case BARRIER:
4651 case NOTE:
4652 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4653 break;
4654
4655 #ifdef ENABLE_RTL_CHECKING
4656 case SEQUENCE:
4657 gcc_unreachable ();
4658 break;
4659 #endif
4660
4661 default:
4662 last = (*make_raw) (x);
4663 add_insn_after (last, after, bb);
4664 break;
4665 }
4666
4667 return last;
4668 }
4669
4670 /* Make X be output after the insn AFTER and set the BB of insn. If
4671 BB is NULL, an attempt is made to infer the BB from AFTER. */
4672
4673 rtx_insn *
emit_insn_after_noloc(rtx x,rtx_insn * after,basic_block bb)4674 emit_insn_after_noloc (rtx x, rtx_insn *after, basic_block bb)
4675 {
4676 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4677 }
4678
4679
4680 /* Make an insn of code JUMP_INSN with body X
4681 and output it after the insn AFTER. */
4682
4683 rtx_jump_insn *
emit_jump_insn_after_noloc(rtx x,rtx_insn * after)4684 emit_jump_insn_after_noloc (rtx x, rtx_insn *after)
4685 {
4686 return as_a <rtx_jump_insn *> (
4687 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4688 }
4689
4690 /* Make an instruction with body X and code CALL_INSN
4691 and output it after the instruction AFTER. */
4692
4693 rtx_insn *
emit_call_insn_after_noloc(rtx x,rtx_insn * after)4694 emit_call_insn_after_noloc (rtx x, rtx_insn *after)
4695 {
4696 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4697 }
4698
4699 /* Make an instruction with body X and code CALL_INSN
4700 and output it after the instruction AFTER. */
4701
4702 rtx_insn *
emit_debug_insn_after_noloc(rtx x,rtx_insn * after)4703 emit_debug_insn_after_noloc (rtx x, rtx_insn *after)
4704 {
4705 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4706 }
4707
4708 /* Make an insn of code BARRIER
4709 and output it after the insn AFTER. */
4710
4711 rtx_barrier *
emit_barrier_after(rtx_insn * after)4712 emit_barrier_after (rtx_insn *after)
4713 {
4714 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4715
4716 INSN_UID (insn) = cur_insn_uid++;
4717
4718 add_insn_after (insn, after, NULL);
4719 return insn;
4720 }
4721
4722 /* Emit the label LABEL after the insn AFTER. */
4723
4724 rtx_insn *
emit_label_after(rtx_insn * label,rtx_insn * after)4725 emit_label_after (rtx_insn *label, rtx_insn *after)
4726 {
4727 gcc_checking_assert (INSN_UID (label) == 0);
4728 INSN_UID (label) = cur_insn_uid++;
4729 add_insn_after (label, after, NULL);
4730 return label;
4731 }
4732
4733 /* Notes require a bit of special handling: Some notes need to have their
4734 BLOCK_FOR_INSN set, others should never have it set, and some should
4735 have it set or clear depending on the context. */
4736
4737 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4738 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4739 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4740
4741 static bool
note_outside_basic_block_p(enum insn_note subtype,bool on_bb_boundary_p)4742 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4743 {
4744 switch (subtype)
4745 {
4746 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4747 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4748 return true;
4749
4750 /* Notes for var tracking and EH region markers can appear between or
4751 inside basic blocks. If the caller is emitting on the basic block
4752 boundary, do not set BLOCK_FOR_INSN on the new note. */
4753 case NOTE_INSN_VAR_LOCATION:
4754 case NOTE_INSN_EH_REGION_BEG:
4755 case NOTE_INSN_EH_REGION_END:
4756 return on_bb_boundary_p;
4757
4758 /* Otherwise, BLOCK_FOR_INSN must be set. */
4759 default:
4760 return false;
4761 }
4762 }
4763
4764 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4765
4766 rtx_note *
emit_note_after(enum insn_note subtype,rtx_insn * after)4767 emit_note_after (enum insn_note subtype, rtx_insn *after)
4768 {
4769 rtx_note *note = make_note_raw (subtype);
4770 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4771 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4772
4773 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4774 add_insn_after_nobb (note, after);
4775 else
4776 add_insn_after (note, after, bb);
4777 return note;
4778 }
4779
4780 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4781
4782 rtx_note *
emit_note_before(enum insn_note subtype,rtx_insn * before)4783 emit_note_before (enum insn_note subtype, rtx_insn *before)
4784 {
4785 rtx_note *note = make_note_raw (subtype);
4786 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4787 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4788
4789 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4790 add_insn_before_nobb (note, before);
4791 else
4792 add_insn_before (note, before, bb);
4793 return note;
4794 }
4795
4796 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4797 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4798
4799 static rtx_insn *
emit_pattern_after_setloc(rtx pattern,rtx_insn * after,location_t loc,rtx_insn * (* make_raw)(rtx))4800 emit_pattern_after_setloc (rtx pattern, rtx_insn *after, location_t loc,
4801 rtx_insn *(*make_raw) (rtx))
4802 {
4803 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4804
4805 if (pattern == NULL_RTX || !loc)
4806 return last;
4807
4808 after = NEXT_INSN (after);
4809 while (1)
4810 {
4811 if (active_insn_p (after)
4812 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4813 && !INSN_LOCATION (after))
4814 INSN_LOCATION (after) = loc;
4815 if (after == last)
4816 break;
4817 after = NEXT_INSN (after);
4818 }
4819 return last;
4820 }
4821
4822 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4823 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4824 any DEBUG_INSNs. */
4825
4826 static rtx_insn *
emit_pattern_after(rtx pattern,rtx_insn * after,bool skip_debug_insns,rtx_insn * (* make_raw)(rtx))4827 emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns,
4828 rtx_insn *(*make_raw) (rtx))
4829 {
4830 rtx_insn *prev = after;
4831
4832 if (skip_debug_insns)
4833 while (DEBUG_INSN_P (prev))
4834 prev = PREV_INSN (prev);
4835
4836 if (INSN_P (prev))
4837 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4838 make_raw);
4839 else
4840 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4841 }
4842
4843 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4844 rtx_insn *
emit_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)4845 emit_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4846 {
4847 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4848 }
4849
4850 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4851 rtx_insn *
emit_insn_after(rtx pattern,rtx_insn * after)4852 emit_insn_after (rtx pattern, rtx_insn *after)
4853 {
4854 return emit_pattern_after (pattern, after, true, make_insn_raw);
4855 }
4856
4857 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4858 rtx_jump_insn *
emit_jump_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)4859 emit_jump_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4860 {
4861 return as_a <rtx_jump_insn *> (
4862 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4863 }
4864
4865 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4866 rtx_jump_insn *
emit_jump_insn_after(rtx pattern,rtx_insn * after)4867 emit_jump_insn_after (rtx pattern, rtx_insn *after)
4868 {
4869 return as_a <rtx_jump_insn *> (
4870 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4871 }
4872
4873 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4874 rtx_insn *
emit_call_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)4875 emit_call_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4876 {
4877 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4878 }
4879
4880 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4881 rtx_insn *
emit_call_insn_after(rtx pattern,rtx_insn * after)4882 emit_call_insn_after (rtx pattern, rtx_insn *after)
4883 {
4884 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4885 }
4886
4887 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4888 rtx_insn *
emit_debug_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)4889 emit_debug_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4890 {
4891 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4892 }
4893
4894 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4895 rtx_insn *
emit_debug_insn_after(rtx pattern,rtx_insn * after)4896 emit_debug_insn_after (rtx pattern, rtx_insn *after)
4897 {
4898 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4899 }
4900
4901 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4902 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4903 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4904 CALL_INSN, etc. */
4905
4906 static rtx_insn *
emit_pattern_before_setloc(rtx pattern,rtx_insn * before,location_t loc,bool insnp,rtx_insn * (* make_raw)(rtx))4907 emit_pattern_before_setloc (rtx pattern, rtx_insn *before, location_t loc,
4908 bool insnp, rtx_insn *(*make_raw) (rtx))
4909 {
4910 rtx_insn *first = PREV_INSN (before);
4911 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4912 insnp ? before : NULL,
4913 NULL, make_raw);
4914
4915 if (pattern == NULL_RTX || !loc)
4916 return last;
4917
4918 if (!first)
4919 first = get_insns ();
4920 else
4921 first = NEXT_INSN (first);
4922 while (1)
4923 {
4924 if (active_insn_p (first)
4925 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4926 && !INSN_LOCATION (first))
4927 INSN_LOCATION (first) = loc;
4928 if (first == last)
4929 break;
4930 first = NEXT_INSN (first);
4931 }
4932 return last;
4933 }
4934
4935 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4936 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4937 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4938 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4939
4940 static rtx_insn *
emit_pattern_before(rtx pattern,rtx_insn * before,bool skip_debug_insns,bool insnp,rtx_insn * (* make_raw)(rtx))4941 emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns,
4942 bool insnp, rtx_insn *(*make_raw) (rtx))
4943 {
4944 rtx_insn *next = before;
4945
4946 if (skip_debug_insns)
4947 while (DEBUG_INSN_P (next))
4948 next = PREV_INSN (next);
4949
4950 if (INSN_P (next))
4951 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4952 insnp, make_raw);
4953 else
4954 return emit_pattern_before_noloc (pattern, before,
4955 insnp ? before : NULL,
4956 NULL, make_raw);
4957 }
4958
4959 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4960 rtx_insn *
emit_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)4961 emit_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
4962 {
4963 return emit_pattern_before_setloc (pattern, before, loc, true,
4964 make_insn_raw);
4965 }
4966
4967 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4968 rtx_insn *
emit_insn_before(rtx pattern,rtx_insn * before)4969 emit_insn_before (rtx pattern, rtx_insn *before)
4970 {
4971 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4972 }
4973
4974 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4975 rtx_jump_insn *
emit_jump_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)4976 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
4977 {
4978 return as_a <rtx_jump_insn *> (
4979 emit_pattern_before_setloc (pattern, before, loc, false,
4980 make_jump_insn_raw));
4981 }
4982
4983 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4984 rtx_jump_insn *
emit_jump_insn_before(rtx pattern,rtx_insn * before)4985 emit_jump_insn_before (rtx pattern, rtx_insn *before)
4986 {
4987 return as_a <rtx_jump_insn *> (
4988 emit_pattern_before (pattern, before, true, false,
4989 make_jump_insn_raw));
4990 }
4991
4992 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4993 rtx_insn *
emit_call_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)4994 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
4995 {
4996 return emit_pattern_before_setloc (pattern, before, loc, false,
4997 make_call_insn_raw);
4998 }
4999
5000 /* Like emit_call_insn_before_noloc,
5001 but set insn_location according to BEFORE. */
5002 rtx_insn *
emit_call_insn_before(rtx pattern,rtx_insn * before)5003 emit_call_insn_before (rtx pattern, rtx_insn *before)
5004 {
5005 return emit_pattern_before (pattern, before, true, false,
5006 make_call_insn_raw);
5007 }
5008
5009 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5010 rtx_insn *
emit_debug_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)5011 emit_debug_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5012 {
5013 return emit_pattern_before_setloc (pattern, before, loc, false,
5014 make_debug_insn_raw);
5015 }
5016
5017 /* Like emit_debug_insn_before_noloc,
5018 but set insn_location according to BEFORE. */
5019 rtx_insn *
emit_debug_insn_before(rtx pattern,rtx_insn * before)5020 emit_debug_insn_before (rtx pattern, rtx_insn *before)
5021 {
5022 return emit_pattern_before (pattern, before, false, false,
5023 make_debug_insn_raw);
5024 }
5025
5026 /* Take X and emit it at the end of the doubly-linked
5027 INSN list.
5028
5029 Returns the last insn emitted. */
5030
5031 rtx_insn *
emit_insn(rtx x)5032 emit_insn (rtx x)
5033 {
5034 rtx_insn *last = get_last_insn ();
5035 rtx_insn *insn;
5036
5037 if (x == NULL_RTX)
5038 return last;
5039
5040 switch (GET_CODE (x))
5041 {
5042 case DEBUG_INSN:
5043 case INSN:
5044 case JUMP_INSN:
5045 case CALL_INSN:
5046 case CODE_LABEL:
5047 case BARRIER:
5048 case NOTE:
5049 insn = as_a <rtx_insn *> (x);
5050 while (insn)
5051 {
5052 rtx_insn *next = NEXT_INSN (insn);
5053 add_insn (insn);
5054 last = insn;
5055 insn = next;
5056 }
5057 break;
5058
5059 #ifdef ENABLE_RTL_CHECKING
5060 case JUMP_TABLE_DATA:
5061 case SEQUENCE:
5062 gcc_unreachable ();
5063 break;
5064 #endif
5065
5066 default:
5067 last = make_insn_raw (x);
5068 add_insn (last);
5069 break;
5070 }
5071
5072 return last;
5073 }
5074
5075 /* Make an insn of code DEBUG_INSN with pattern X
5076 and add it to the end of the doubly-linked list. */
5077
5078 rtx_insn *
emit_debug_insn(rtx x)5079 emit_debug_insn (rtx x)
5080 {
5081 rtx_insn *last = get_last_insn ();
5082 rtx_insn *insn;
5083
5084 if (x == NULL_RTX)
5085 return last;
5086
5087 switch (GET_CODE (x))
5088 {
5089 case DEBUG_INSN:
5090 case INSN:
5091 case JUMP_INSN:
5092 case CALL_INSN:
5093 case CODE_LABEL:
5094 case BARRIER:
5095 case NOTE:
5096 insn = as_a <rtx_insn *> (x);
5097 while (insn)
5098 {
5099 rtx_insn *next = NEXT_INSN (insn);
5100 add_insn (insn);
5101 last = insn;
5102 insn = next;
5103 }
5104 break;
5105
5106 #ifdef ENABLE_RTL_CHECKING
5107 case JUMP_TABLE_DATA:
5108 case SEQUENCE:
5109 gcc_unreachable ();
5110 break;
5111 #endif
5112
5113 default:
5114 last = make_debug_insn_raw (x);
5115 add_insn (last);
5116 break;
5117 }
5118
5119 return last;
5120 }
5121
5122 /* Make an insn of code JUMP_INSN with pattern X
5123 and add it to the end of the doubly-linked list. */
5124
5125 rtx_insn *
emit_jump_insn(rtx x)5126 emit_jump_insn (rtx x)
5127 {
5128 rtx_insn *last = NULL;
5129 rtx_insn *insn;
5130
5131 switch (GET_CODE (x))
5132 {
5133 case DEBUG_INSN:
5134 case INSN:
5135 case JUMP_INSN:
5136 case CALL_INSN:
5137 case CODE_LABEL:
5138 case BARRIER:
5139 case NOTE:
5140 insn = as_a <rtx_insn *> (x);
5141 while (insn)
5142 {
5143 rtx_insn *next = NEXT_INSN (insn);
5144 add_insn (insn);
5145 last = insn;
5146 insn = next;
5147 }
5148 break;
5149
5150 #ifdef ENABLE_RTL_CHECKING
5151 case JUMP_TABLE_DATA:
5152 case SEQUENCE:
5153 gcc_unreachable ();
5154 break;
5155 #endif
5156
5157 default:
5158 last = make_jump_insn_raw (x);
5159 add_insn (last);
5160 break;
5161 }
5162
5163 return last;
5164 }
5165
5166 /* Make an insn of code CALL_INSN with pattern X
5167 and add it to the end of the doubly-linked list. */
5168
5169 rtx_insn *
emit_call_insn(rtx x)5170 emit_call_insn (rtx x)
5171 {
5172 rtx_insn *insn;
5173
5174 switch (GET_CODE (x))
5175 {
5176 case DEBUG_INSN:
5177 case INSN:
5178 case JUMP_INSN:
5179 case CALL_INSN:
5180 case CODE_LABEL:
5181 case BARRIER:
5182 case NOTE:
5183 insn = emit_insn (x);
5184 break;
5185
5186 #ifdef ENABLE_RTL_CHECKING
5187 case SEQUENCE:
5188 case JUMP_TABLE_DATA:
5189 gcc_unreachable ();
5190 break;
5191 #endif
5192
5193 default:
5194 insn = make_call_insn_raw (x);
5195 add_insn (insn);
5196 break;
5197 }
5198
5199 return insn;
5200 }
5201
5202 /* Add the label LABEL to the end of the doubly-linked list. */
5203
5204 rtx_code_label *
emit_label(rtx uncast_label)5205 emit_label (rtx uncast_label)
5206 {
5207 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5208
5209 gcc_checking_assert (INSN_UID (label) == 0);
5210 INSN_UID (label) = cur_insn_uid++;
5211 add_insn (label);
5212 return label;
5213 }
5214
5215 /* Make an insn of code JUMP_TABLE_DATA
5216 and add it to the end of the doubly-linked list. */
5217
5218 rtx_jump_table_data *
emit_jump_table_data(rtx table)5219 emit_jump_table_data (rtx table)
5220 {
5221 rtx_jump_table_data *jump_table_data =
5222 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5223 INSN_UID (jump_table_data) = cur_insn_uid++;
5224 PATTERN (jump_table_data) = table;
5225 BLOCK_FOR_INSN (jump_table_data) = NULL;
5226 add_insn (jump_table_data);
5227 return jump_table_data;
5228 }
5229
5230 /* Make an insn of code BARRIER
5231 and add it to the end of the doubly-linked list. */
5232
5233 rtx_barrier *
emit_barrier(void)5234 emit_barrier (void)
5235 {
5236 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5237 INSN_UID (barrier) = cur_insn_uid++;
5238 add_insn (barrier);
5239 return barrier;
5240 }
5241
5242 /* Emit a copy of note ORIG. */
5243
5244 rtx_note *
emit_note_copy(rtx_note * orig)5245 emit_note_copy (rtx_note *orig)
5246 {
5247 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5248 rtx_note *note = make_note_raw (kind);
5249 NOTE_DATA (note) = NOTE_DATA (orig);
5250 add_insn (note);
5251 return note;
5252 }
5253
5254 /* Make an insn of code NOTE or type NOTE_NO
5255 and add it to the end of the doubly-linked list. */
5256
5257 rtx_note *
emit_note(enum insn_note kind)5258 emit_note (enum insn_note kind)
5259 {
5260 rtx_note *note = make_note_raw (kind);
5261 add_insn (note);
5262 return note;
5263 }
5264
5265 /* Emit a clobber of lvalue X. */
5266
5267 rtx_insn *
emit_clobber(rtx x)5268 emit_clobber (rtx x)
5269 {
5270 /* CONCATs should not appear in the insn stream. */
5271 if (GET_CODE (x) == CONCAT)
5272 {
5273 emit_clobber (XEXP (x, 0));
5274 return emit_clobber (XEXP (x, 1));
5275 }
5276 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5277 }
5278
5279 /* Return a sequence of insns to clobber lvalue X. */
5280
5281 rtx_insn *
gen_clobber(rtx x)5282 gen_clobber (rtx x)
5283 {
5284 rtx_insn *seq;
5285
5286 start_sequence ();
5287 emit_clobber (x);
5288 seq = get_insns ();
5289 end_sequence ();
5290 return seq;
5291 }
5292
5293 /* Emit a use of rvalue X. */
5294
5295 rtx_insn *
emit_use(rtx x)5296 emit_use (rtx x)
5297 {
5298 /* CONCATs should not appear in the insn stream. */
5299 if (GET_CODE (x) == CONCAT)
5300 {
5301 emit_use (XEXP (x, 0));
5302 return emit_use (XEXP (x, 1));
5303 }
5304 return emit_insn (gen_rtx_USE (VOIDmode, x));
5305 }
5306
5307 /* Return a sequence of insns to use rvalue X. */
5308
5309 rtx_insn *
gen_use(rtx x)5310 gen_use (rtx x)
5311 {
5312 rtx_insn *seq;
5313
5314 start_sequence ();
5315 emit_use (x);
5316 seq = get_insns ();
5317 end_sequence ();
5318 return seq;
5319 }
5320
5321 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5322 Return the set in INSN that such notes describe, or NULL if the notes
5323 have no meaning for INSN. */
5324
5325 rtx
set_for_reg_notes(rtx insn)5326 set_for_reg_notes (rtx insn)
5327 {
5328 rtx pat, reg;
5329
5330 if (!INSN_P (insn))
5331 return NULL_RTX;
5332
5333 pat = PATTERN (insn);
5334 if (GET_CODE (pat) == PARALLEL)
5335 {
5336 /* We do not use single_set because that ignores SETs of unused
5337 registers. REG_EQUAL and REG_EQUIV notes really do require the
5338 PARALLEL to have a single SET. */
5339 if (multiple_sets (insn))
5340 return NULL_RTX;
5341 pat = XVECEXP (pat, 0, 0);
5342 }
5343
5344 if (GET_CODE (pat) != SET)
5345 return NULL_RTX;
5346
5347 reg = SET_DEST (pat);
5348
5349 /* Notes apply to the contents of a STRICT_LOW_PART. */
5350 if (GET_CODE (reg) == STRICT_LOW_PART
5351 || GET_CODE (reg) == ZERO_EXTRACT)
5352 reg = XEXP (reg, 0);
5353
5354 /* Check that we have a register. */
5355 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5356 return NULL_RTX;
5357
5358 return pat;
5359 }
5360
5361 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5362 note of this type already exists, remove it first. */
5363
5364 rtx
set_unique_reg_note(rtx insn,enum reg_note kind,rtx datum)5365 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5366 {
5367 rtx note = find_reg_note (insn, kind, NULL_RTX);
5368
5369 switch (kind)
5370 {
5371 case REG_EQUAL:
5372 case REG_EQUIV:
5373 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5374 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5375 return NULL_RTX;
5376
5377 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5378 It serves no useful purpose and breaks eliminate_regs. */
5379 if (GET_CODE (datum) == ASM_OPERANDS)
5380 return NULL_RTX;
5381
5382 /* Notes with side effects are dangerous. Even if the side-effect
5383 initially mirrors one in PATTERN (INSN), later optimizations
5384 might alter the way that the final register value is calculated
5385 and so move or alter the side-effect in some way. The note would
5386 then no longer be a valid substitution for SET_SRC. */
5387 if (side_effects_p (datum))
5388 return NULL_RTX;
5389 break;
5390
5391 default:
5392 break;
5393 }
5394
5395 if (note)
5396 XEXP (note, 0) = datum;
5397 else
5398 {
5399 add_reg_note (insn, kind, datum);
5400 note = REG_NOTES (insn);
5401 }
5402
5403 switch (kind)
5404 {
5405 case REG_EQUAL:
5406 case REG_EQUIV:
5407 df_notes_rescan (as_a <rtx_insn *> (insn));
5408 break;
5409 default:
5410 break;
5411 }
5412
5413 return note;
5414 }
5415
5416 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5417 rtx
set_dst_reg_note(rtx insn,enum reg_note kind,rtx datum,rtx dst)5418 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5419 {
5420 rtx set = set_for_reg_notes (insn);
5421
5422 if (set && SET_DEST (set) == dst)
5423 return set_unique_reg_note (insn, kind, datum);
5424 return NULL_RTX;
5425 }
5426
5427 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5428 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5429 is true.
5430
5431 If X is a label, it is simply added into the insn chain. */
5432
5433 rtx_insn *
emit(rtx x,bool allow_barrier_p)5434 emit (rtx x, bool allow_barrier_p)
5435 {
5436 enum rtx_code code = classify_insn (x);
5437
5438 switch (code)
5439 {
5440 case CODE_LABEL:
5441 return emit_label (x);
5442 case INSN:
5443 return emit_insn (x);
5444 case JUMP_INSN:
5445 {
5446 rtx_insn *insn = emit_jump_insn (x);
5447 if (allow_barrier_p
5448 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5449 return emit_barrier ();
5450 return insn;
5451 }
5452 case CALL_INSN:
5453 return emit_call_insn (x);
5454 case DEBUG_INSN:
5455 return emit_debug_insn (x);
5456 default:
5457 gcc_unreachable ();
5458 }
5459 }
5460
5461 /* Space for free sequence stack entries. */
5462 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5463
5464 /* Begin emitting insns to a sequence. If this sequence will contain
5465 something that might cause the compiler to pop arguments to function
5466 calls (because those pops have previously been deferred; see
5467 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5468 before calling this function. That will ensure that the deferred
5469 pops are not accidentally emitted in the middle of this sequence. */
5470
5471 void
start_sequence(void)5472 start_sequence (void)
5473 {
5474 struct sequence_stack *tem;
5475
5476 if (free_sequence_stack != NULL)
5477 {
5478 tem = free_sequence_stack;
5479 free_sequence_stack = tem->next;
5480 }
5481 else
5482 tem = ggc_alloc<sequence_stack> ();
5483
5484 tem->next = get_current_sequence ()->next;
5485 tem->first = get_insns ();
5486 tem->last = get_last_insn ();
5487 get_current_sequence ()->next = tem;
5488
5489 set_first_insn (0);
5490 set_last_insn (0);
5491 }
5492
5493 /* Set up the insn chain starting with FIRST as the current sequence,
5494 saving the previously current one. See the documentation for
5495 start_sequence for more information about how to use this function. */
5496
5497 void
push_to_sequence(rtx_insn * first)5498 push_to_sequence (rtx_insn *first)
5499 {
5500 rtx_insn *last;
5501
5502 start_sequence ();
5503
5504 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5505 ;
5506
5507 set_first_insn (first);
5508 set_last_insn (last);
5509 }
5510
5511 /* Like push_to_sequence, but take the last insn as an argument to avoid
5512 looping through the list. */
5513
5514 void
push_to_sequence2(rtx_insn * first,rtx_insn * last)5515 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5516 {
5517 start_sequence ();
5518
5519 set_first_insn (first);
5520 set_last_insn (last);
5521 }
5522
5523 /* Set up the outer-level insn chain
5524 as the current sequence, saving the previously current one. */
5525
5526 void
push_topmost_sequence(void)5527 push_topmost_sequence (void)
5528 {
5529 struct sequence_stack *top;
5530
5531 start_sequence ();
5532
5533 top = get_topmost_sequence ();
5534 set_first_insn (top->first);
5535 set_last_insn (top->last);
5536 }
5537
5538 /* After emitting to the outer-level insn chain, update the outer-level
5539 insn chain, and restore the previous saved state. */
5540
5541 void
pop_topmost_sequence(void)5542 pop_topmost_sequence (void)
5543 {
5544 struct sequence_stack *top;
5545
5546 top = get_topmost_sequence ();
5547 top->first = get_insns ();
5548 top->last = get_last_insn ();
5549
5550 end_sequence ();
5551 }
5552
5553 /* After emitting to a sequence, restore previous saved state.
5554
5555 To get the contents of the sequence just made, you must call
5556 `get_insns' *before* calling here.
5557
5558 If the compiler might have deferred popping arguments while
5559 generating this sequence, and this sequence will not be immediately
5560 inserted into the instruction stream, use do_pending_stack_adjust
5561 before calling get_insns. That will ensure that the deferred
5562 pops are inserted into this sequence, and not into some random
5563 location in the instruction stream. See INHIBIT_DEFER_POP for more
5564 information about deferred popping of arguments. */
5565
5566 void
end_sequence(void)5567 end_sequence (void)
5568 {
5569 struct sequence_stack *tem = get_current_sequence ()->next;
5570
5571 set_first_insn (tem->first);
5572 set_last_insn (tem->last);
5573 get_current_sequence ()->next = tem->next;
5574
5575 memset (tem, 0, sizeof (*tem));
5576 tem->next = free_sequence_stack;
5577 free_sequence_stack = tem;
5578 }
5579
5580 /* Return 1 if currently emitting into a sequence. */
5581
5582 int
in_sequence_p(void)5583 in_sequence_p (void)
5584 {
5585 return get_current_sequence ()->next != 0;
5586 }
5587
5588 /* Put the various virtual registers into REGNO_REG_RTX. */
5589
5590 static void
init_virtual_regs(void)5591 init_virtual_regs (void)
5592 {
5593 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5594 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5595 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5596 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5597 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5598 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5599 = virtual_preferred_stack_boundary_rtx;
5600 }
5601
5602
5603 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5604 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5605 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5606 static int copy_insn_n_scratches;
5607
5608 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5609 copied an ASM_OPERANDS.
5610 In that case, it is the original input-operand vector. */
5611 static rtvec orig_asm_operands_vector;
5612
5613 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5614 copied an ASM_OPERANDS.
5615 In that case, it is the copied input-operand vector. */
5616 static rtvec copy_asm_operands_vector;
5617
5618 /* Likewise for the constraints vector. */
5619 static rtvec orig_asm_constraints_vector;
5620 static rtvec copy_asm_constraints_vector;
5621
5622 /* Recursively create a new copy of an rtx for copy_insn.
5623 This function differs from copy_rtx in that it handles SCRATCHes and
5624 ASM_OPERANDs properly.
5625 Normally, this function is not used directly; use copy_insn as front end.
5626 However, you could first copy an insn pattern with copy_insn and then use
5627 this function afterwards to properly copy any REG_NOTEs containing
5628 SCRATCHes. */
5629
5630 rtx
copy_insn_1(rtx orig)5631 copy_insn_1 (rtx orig)
5632 {
5633 rtx copy;
5634 int i, j;
5635 RTX_CODE code;
5636 const char *format_ptr;
5637
5638 if (orig == NULL)
5639 return NULL;
5640
5641 code = GET_CODE (orig);
5642
5643 switch (code)
5644 {
5645 case REG:
5646 case DEBUG_EXPR:
5647 CASE_CONST_ANY:
5648 case SYMBOL_REF:
5649 case CODE_LABEL:
5650 case PC:
5651 case RETURN:
5652 case SIMPLE_RETURN:
5653 return orig;
5654 case CLOBBER:
5655 /* Share clobbers of hard registers, but do not share pseudo reg
5656 clobbers or clobbers of hard registers that originated as pseudos.
5657 This is needed to allow safe register renaming. */
5658 if (REG_P (XEXP (orig, 0))
5659 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5660 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
5661 return orig;
5662 break;
5663
5664 case SCRATCH:
5665 for (i = 0; i < copy_insn_n_scratches; i++)
5666 if (copy_insn_scratch_in[i] == orig)
5667 return copy_insn_scratch_out[i];
5668 break;
5669
5670 case CONST:
5671 if (shared_const_p (orig))
5672 return orig;
5673 break;
5674
5675 /* A MEM with a constant address is not sharable. The problem is that
5676 the constant address may need to be reloaded. If the mem is shared,
5677 then reloading one copy of this mem will cause all copies to appear
5678 to have been reloaded. */
5679
5680 default:
5681 break;
5682 }
5683
5684 /* Copy the various flags, fields, and other information. We assume
5685 that all fields need copying, and then clear the fields that should
5686 not be copied. That is the sensible default behavior, and forces
5687 us to explicitly document why we are *not* copying a flag. */
5688 copy = shallow_copy_rtx (orig);
5689
5690 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5691 if (INSN_P (orig))
5692 {
5693 RTX_FLAG (copy, jump) = 0;
5694 RTX_FLAG (copy, call) = 0;
5695 RTX_FLAG (copy, frame_related) = 0;
5696 }
5697
5698 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5699
5700 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5701 switch (*format_ptr++)
5702 {
5703 case 'e':
5704 if (XEXP (orig, i) != NULL)
5705 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5706 break;
5707
5708 case 'E':
5709 case 'V':
5710 if (XVEC (orig, i) == orig_asm_constraints_vector)
5711 XVEC (copy, i) = copy_asm_constraints_vector;
5712 else if (XVEC (orig, i) == orig_asm_operands_vector)
5713 XVEC (copy, i) = copy_asm_operands_vector;
5714 else if (XVEC (orig, i) != NULL)
5715 {
5716 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5717 for (j = 0; j < XVECLEN (copy, i); j++)
5718 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5719 }
5720 break;
5721
5722 case 't':
5723 case 'w':
5724 case 'i':
5725 case 'p':
5726 case 's':
5727 case 'S':
5728 case 'u':
5729 case '0':
5730 /* These are left unchanged. */
5731 break;
5732
5733 default:
5734 gcc_unreachable ();
5735 }
5736
5737 if (code == SCRATCH)
5738 {
5739 i = copy_insn_n_scratches++;
5740 gcc_assert (i < MAX_RECOG_OPERANDS);
5741 copy_insn_scratch_in[i] = orig;
5742 copy_insn_scratch_out[i] = copy;
5743 }
5744 else if (code == ASM_OPERANDS)
5745 {
5746 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5747 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5748 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5749 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5750 }
5751
5752 return copy;
5753 }
5754
5755 /* Create a new copy of an rtx.
5756 This function differs from copy_rtx in that it handles SCRATCHes and
5757 ASM_OPERANDs properly.
5758 INSN doesn't really have to be a full INSN; it could be just the
5759 pattern. */
5760 rtx
copy_insn(rtx insn)5761 copy_insn (rtx insn)
5762 {
5763 copy_insn_n_scratches = 0;
5764 orig_asm_operands_vector = 0;
5765 orig_asm_constraints_vector = 0;
5766 copy_asm_operands_vector = 0;
5767 copy_asm_constraints_vector = 0;
5768 return copy_insn_1 (insn);
5769 }
5770
5771 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5772 on that assumption that INSN itself remains in its original place. */
5773
5774 rtx_insn *
copy_delay_slot_insn(rtx_insn * insn)5775 copy_delay_slot_insn (rtx_insn *insn)
5776 {
5777 /* Copy INSN with its rtx_code, all its notes, location etc. */
5778 insn = as_a <rtx_insn *> (copy_rtx (insn));
5779 INSN_UID (insn) = cur_insn_uid++;
5780 return insn;
5781 }
5782
5783 /* Initialize data structures and variables in this file
5784 before generating rtl for each function. */
5785
5786 void
init_emit(void)5787 init_emit (void)
5788 {
5789 set_first_insn (NULL);
5790 set_last_insn (NULL);
5791 if (param_min_nondebug_insn_uid)
5792 cur_insn_uid = param_min_nondebug_insn_uid;
5793 else
5794 cur_insn_uid = 1;
5795 cur_debug_insn_uid = 1;
5796 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5797 first_label_num = label_num;
5798 get_current_sequence ()->next = NULL;
5799
5800 /* Init the tables that describe all the pseudo regs. */
5801
5802 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5803
5804 crtl->emit.regno_pointer_align
5805 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5806
5807 regno_reg_rtx
5808 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5809
5810 /* Put copies of all the hard registers into regno_reg_rtx. */
5811 memcpy (regno_reg_rtx,
5812 initial_regno_reg_rtx,
5813 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5814
5815 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5816 init_virtual_regs ();
5817
5818 /* Indicate that the virtual registers and stack locations are
5819 all pointers. */
5820 REG_POINTER (stack_pointer_rtx) = 1;
5821 REG_POINTER (frame_pointer_rtx) = 1;
5822 REG_POINTER (hard_frame_pointer_rtx) = 1;
5823 REG_POINTER (arg_pointer_rtx) = 1;
5824
5825 REG_POINTER (virtual_incoming_args_rtx) = 1;
5826 REG_POINTER (virtual_stack_vars_rtx) = 1;
5827 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5828 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5829 REG_POINTER (virtual_cfa_rtx) = 1;
5830
5831 #ifdef STACK_BOUNDARY
5832 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5833 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5834 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5835 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5836
5837 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5838 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5839 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5840 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5841
5842 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5843 #endif
5844
5845 #ifdef INIT_EXPANDERS
5846 INIT_EXPANDERS;
5847 #endif
5848 }
5849
5850 /* Return the value of element I of CONST_VECTOR X as a wide_int. */
5851
5852 wide_int
const_vector_int_elt(const_rtx x,unsigned int i)5853 const_vector_int_elt (const_rtx x, unsigned int i)
5854 {
5855 /* First handle elements that are directly encoded. */
5856 machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
5857 if (i < (unsigned int) XVECLEN (x, 0))
5858 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
5859
5860 /* Identify the pattern that contains element I and work out the index of
5861 the last encoded element for that pattern. */
5862 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5863 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5864 unsigned int count = i / npatterns;
5865 unsigned int pattern = i % npatterns;
5866 unsigned int final_i = encoded_nelts - npatterns + pattern;
5867
5868 /* If there are no steps, the final encoded value is the right one. */
5869 if (!CONST_VECTOR_STEPPED_P (x))
5870 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
5871
5872 /* Otherwise work out the value from the last two encoded elements. */
5873 rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
5874 rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
5875 wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
5876 rtx_mode_t (v1, elt_mode));
5877 return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
5878 }
5879
5880 /* Return the value of element I of CONST_VECTOR X. */
5881
5882 rtx
const_vector_elt(const_rtx x,unsigned int i)5883 const_vector_elt (const_rtx x, unsigned int i)
5884 {
5885 /* First handle elements that are directly encoded. */
5886 if (i < (unsigned int) XVECLEN (x, 0))
5887 return CONST_VECTOR_ENCODED_ELT (x, i);
5888
5889 /* If there are no steps, the final encoded value is the right one. */
5890 if (!CONST_VECTOR_STEPPED_P (x))
5891 {
5892 /* Identify the pattern that contains element I and work out the index of
5893 the last encoded element for that pattern. */
5894 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5895 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5896 unsigned int pattern = i % npatterns;
5897 unsigned int final_i = encoded_nelts - npatterns + pattern;
5898 return CONST_VECTOR_ENCODED_ELT (x, final_i);
5899 }
5900
5901 /* Otherwise work out the value from the last two encoded elements. */
5902 return immed_wide_int_const (const_vector_int_elt (x, i),
5903 GET_MODE_INNER (GET_MODE (x)));
5904 }
5905
5906 /* Return true if X is a valid element for a CONST_VECTOR of the given
5907 mode. */
5908
5909 bool
valid_for_const_vector_p(machine_mode,rtx x)5910 valid_for_const_vector_p (machine_mode, rtx x)
5911 {
5912 return (CONST_SCALAR_INT_P (x)
5913 || CONST_POLY_INT_P (x)
5914 || CONST_DOUBLE_AS_FLOAT_P (x)
5915 || CONST_FIXED_P (x));
5916 }
5917
5918 /* Generate a vector constant of mode MODE in which every element has
5919 value ELT. */
5920
5921 rtx
gen_const_vec_duplicate(machine_mode mode,rtx elt)5922 gen_const_vec_duplicate (machine_mode mode, rtx elt)
5923 {
5924 rtx_vector_builder builder (mode, 1, 1);
5925 builder.quick_push (elt);
5926 return builder.build ();
5927 }
5928
5929 /* Return a vector rtx of mode MODE in which every element has value X.
5930 The result will be a constant if X is constant. */
5931
5932 rtx
gen_vec_duplicate(machine_mode mode,rtx x)5933 gen_vec_duplicate (machine_mode mode, rtx x)
5934 {
5935 if (valid_for_const_vector_p (mode, x))
5936 return gen_const_vec_duplicate (mode, x);
5937 return gen_rtx_VEC_DUPLICATE (mode, x);
5938 }
5939
5940 /* A subroutine of const_vec_series_p that handles the case in which:
5941
5942 (GET_CODE (X) == CONST_VECTOR
5943 && CONST_VECTOR_NPATTERNS (X) == 1
5944 && !CONST_VECTOR_DUPLICATE_P (X))
5945
5946 is known to hold. */
5947
5948 bool
const_vec_series_p_1(const_rtx x,rtx * base_out,rtx * step_out)5949 const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
5950 {
5951 /* Stepped sequences are only defined for integers, to avoid specifying
5952 rounding behavior. */
5953 if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
5954 return false;
5955
5956 /* A non-duplicated vector with two elements can always be seen as a
5957 series with a nonzero step. Longer vectors must have a stepped
5958 encoding. */
5959 if (maybe_ne (CONST_VECTOR_NUNITS (x), 2)
5960 && !CONST_VECTOR_STEPPED_P (x))
5961 return false;
5962
5963 /* Calculate the step between the first and second elements. */
5964 scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
5965 rtx base = CONST_VECTOR_ELT (x, 0);
5966 rtx step = simplify_binary_operation (MINUS, inner,
5967 CONST_VECTOR_ENCODED_ELT (x, 1), base);
5968 if (rtx_equal_p (step, CONST0_RTX (inner)))
5969 return false;
5970
5971 /* If we have a stepped encoding, check that the step between the
5972 second and third elements is the same as STEP. */
5973 if (CONST_VECTOR_STEPPED_P (x))
5974 {
5975 rtx diff = simplify_binary_operation (MINUS, inner,
5976 CONST_VECTOR_ENCODED_ELT (x, 2),
5977 CONST_VECTOR_ENCODED_ELT (x, 1));
5978 if (!rtx_equal_p (step, diff))
5979 return false;
5980 }
5981
5982 *base_out = base;
5983 *step_out = step;
5984 return true;
5985 }
5986
5987 /* Generate a vector constant of mode MODE in which element I has
5988 the value BASE + I * STEP. */
5989
5990 rtx
gen_const_vec_series(machine_mode mode,rtx base,rtx step)5991 gen_const_vec_series (machine_mode mode, rtx base, rtx step)
5992 {
5993 gcc_assert (valid_for_const_vector_p (mode, base)
5994 && valid_for_const_vector_p (mode, step));
5995
5996 rtx_vector_builder builder (mode, 1, 3);
5997 builder.quick_push (base);
5998 for (int i = 1; i < 3; ++i)
5999 builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
6000 builder[i - 1], step));
6001 return builder.build ();
6002 }
6003
6004 /* Generate a vector of mode MODE in which element I has the value
6005 BASE + I * STEP. The result will be a constant if BASE and STEP
6006 are both constants. */
6007
6008 rtx
gen_vec_series(machine_mode mode,rtx base,rtx step)6009 gen_vec_series (machine_mode mode, rtx base, rtx step)
6010 {
6011 if (step == const0_rtx)
6012 return gen_vec_duplicate (mode, base);
6013 if (valid_for_const_vector_p (mode, base)
6014 && valid_for_const_vector_p (mode, step))
6015 return gen_const_vec_series (mode, base, step);
6016 return gen_rtx_VEC_SERIES (mode, base, step);
6017 }
6018
6019 /* Generate a new vector constant for mode MODE and constant value
6020 CONSTANT. */
6021
6022 static rtx
gen_const_vector(machine_mode mode,int constant)6023 gen_const_vector (machine_mode mode, int constant)
6024 {
6025 machine_mode inner = GET_MODE_INNER (mode);
6026
6027 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
6028
6029 rtx el = const_tiny_rtx[constant][(int) inner];
6030 gcc_assert (el);
6031
6032 return gen_const_vec_duplicate (mode, el);
6033 }
6034
6035 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6036 all elements are zero, and the one vector when all elements are one. */
6037 rtx
gen_rtx_CONST_VECTOR(machine_mode mode,rtvec v)6038 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
6039 {
6040 gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
6041
6042 /* If the values are all the same, check to see if we can use one of the
6043 standard constant vectors. */
6044 if (rtvec_all_equal_p (v))
6045 return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
6046
6047 unsigned int nunits = GET_NUM_ELEM (v);
6048 rtx_vector_builder builder (mode, nunits, 1);
6049 for (unsigned int i = 0; i < nunits; ++i)
6050 builder.quick_push (RTVEC_ELT (v, i));
6051 return builder.build (v);
6052 }
6053
6054 /* Initialise global register information required by all functions. */
6055
6056 void
init_emit_regs(void)6057 init_emit_regs (void)
6058 {
6059 int i;
6060 machine_mode mode;
6061 mem_attrs *attrs;
6062
6063 /* Reset register attributes */
6064 reg_attrs_htab->empty ();
6065
6066 /* We need reg_raw_mode, so initialize the modes now. */
6067 init_reg_modes_target ();
6068
6069 /* Assign register numbers to the globally defined register rtx. */
6070 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
6071 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
6072 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
6073 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
6074 virtual_incoming_args_rtx =
6075 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
6076 virtual_stack_vars_rtx =
6077 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
6078 virtual_stack_dynamic_rtx =
6079 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
6080 virtual_outgoing_args_rtx =
6081 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
6082 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
6083 virtual_preferred_stack_boundary_rtx =
6084 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6085
6086 /* Initialize RTL for commonly used hard registers. These are
6087 copied into regno_reg_rtx as we begin to compile each function. */
6088 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6089 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6090
6091 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6092 return_address_pointer_rtx
6093 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
6094 #endif
6095
6096 pic_offset_table_rtx = NULL_RTX;
6097 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6098 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
6099
6100 /* Process stack-limiting command-line options. */
6101 if (opt_fstack_limit_symbol_arg != NULL)
6102 stack_limit_rtx
6103 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6104 if (opt_fstack_limit_register_no >= 0)
6105 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
6106
6107 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
6108 {
6109 mode = (machine_mode) i;
6110 attrs = ggc_cleared_alloc<mem_attrs> ();
6111 attrs->align = BITS_PER_UNIT;
6112 attrs->addrspace = ADDR_SPACE_GENERIC;
6113 if (mode != BLKmode && mode != VOIDmode)
6114 {
6115 attrs->size_known_p = true;
6116 attrs->size = GET_MODE_SIZE (mode);
6117 if (STRICT_ALIGNMENT)
6118 attrs->align = GET_MODE_ALIGNMENT (mode);
6119 }
6120 mode_mem_attrs[i] = attrs;
6121 }
6122
6123 split_branch_probability = profile_probability::uninitialized ();
6124 }
6125
6126 /* Initialize global machine_mode variables. */
6127
6128 void
init_derived_machine_modes(void)6129 init_derived_machine_modes (void)
6130 {
6131 opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6132 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
6133 {
6134 scalar_int_mode mode = mode_iter.require ();
6135
6136 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6137 && !opt_byte_mode.exists ())
6138 opt_byte_mode = mode;
6139
6140 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6141 && !opt_word_mode.exists ())
6142 opt_word_mode = mode;
6143 }
6144
6145 byte_mode = opt_byte_mode.require ();
6146 word_mode = opt_word_mode.require ();
6147 ptr_mode = as_a <scalar_int_mode>
6148 (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
6149 }
6150
6151 /* Create some permanent unique rtl objects shared between all functions. */
6152
6153 void
init_emit_once(void)6154 init_emit_once (void)
6155 {
6156 int i;
6157 machine_mode mode;
6158 scalar_float_mode double_mode;
6159 opt_scalar_mode smode_iter;
6160
6161 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6162 CONST_FIXED, and memory attribute hash tables. */
6163 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
6164
6165 #if TARGET_SUPPORTS_WIDE_INT
6166 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
6167 #endif
6168 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
6169
6170 if (NUM_POLY_INT_COEFFS > 1)
6171 const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6172
6173 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
6174
6175 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
6176
6177 #ifdef INIT_EXPANDERS
6178 /* This is to initialize {init|mark|free}_machine_status before the first
6179 call to push_function_context_to. This is needed by the Chill front
6180 end which calls push_function_context_to before the first call to
6181 init_function_start. */
6182 INIT_EXPANDERS;
6183 #endif
6184
6185 /* Create the unique rtx's for certain rtx codes and operand values. */
6186
6187 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6188 tries to use these variables. */
6189 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
6190 const_int_rtx[i + MAX_SAVED_CONST_INT] =
6191 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
6192
6193 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6194 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
6195 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
6196 else
6197 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
6198
6199 double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
6200
6201 real_from_integer (&dconst0, double_mode, 0, SIGNED);
6202 real_from_integer (&dconst1, double_mode, 1, SIGNED);
6203 real_from_integer (&dconst2, double_mode, 2, SIGNED);
6204
6205 dconstm1 = dconst1;
6206 dconstm1.sign = 1;
6207
6208 dconsthalf = dconst1;
6209 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
6210
6211 for (i = 0; i < 3; i++)
6212 {
6213 const REAL_VALUE_TYPE *const r =
6214 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6215
6216 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
6217 const_tiny_rtx[i][(int) mode] =
6218 const_double_from_real_value (*r, mode);
6219
6220 FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
6221 const_tiny_rtx[i][(int) mode] =
6222 const_double_from_real_value (*r, mode);
6223
6224 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
6225
6226 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6227 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6228
6229 for (mode = MIN_MODE_PARTIAL_INT;
6230 mode <= MAX_MODE_PARTIAL_INT;
6231 mode = (machine_mode)((int)(mode) + 1))
6232 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6233 }
6234
6235 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6236
6237 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6238 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6239
6240 /* For BImode, 1 and -1 are unsigned and signed interpretations
6241 of the same value. */
6242 for (mode = MIN_MODE_BOOL;
6243 mode <= MAX_MODE_BOOL;
6244 mode = (machine_mode)((int)(mode) + 1))
6245 {
6246 const_tiny_rtx[0][(int) mode] = const0_rtx;
6247 if (mode == BImode)
6248 {
6249 const_tiny_rtx[1][(int) mode] = const_true_rtx;
6250 const_tiny_rtx[3][(int) mode] = const_true_rtx;
6251 }
6252 else
6253 {
6254 const_tiny_rtx[1][(int) mode] = const1_rtx;
6255 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6256 }
6257 }
6258
6259 for (mode = MIN_MODE_PARTIAL_INT;
6260 mode <= MAX_MODE_PARTIAL_INT;
6261 mode = (machine_mode)((int)(mode) + 1))
6262 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6263
6264 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
6265 {
6266 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6267 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6268 }
6269
6270 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
6271 {
6272 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6273 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6274 }
6275
6276 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
6277 {
6278 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6279 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6280 if (GET_MODE_INNER (mode) == BImode)
6281 /* As for BImode, "all 1" and "all -1" are unsigned and signed
6282 interpretations of the same value. */
6283 const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
6284 else
6285 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6286 }
6287
6288 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
6289 {
6290 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6291 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6292 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6293 }
6294
6295 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
6296 {
6297 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6298 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6299 }
6300
6301 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
6302 {
6303 scalar_mode smode = smode_iter.require ();
6304 FCONST0 (smode).data.high = 0;
6305 FCONST0 (smode).data.low = 0;
6306 FCONST0 (smode).mode = smode;
6307 const_tiny_rtx[0][(int) smode]
6308 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6309 }
6310
6311 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
6312 {
6313 scalar_mode smode = smode_iter.require ();
6314 FCONST0 (smode).data.high = 0;
6315 FCONST0 (smode).data.low = 0;
6316 FCONST0 (smode).mode = smode;
6317 const_tiny_rtx[0][(int) smode]
6318 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6319 }
6320
6321 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
6322 {
6323 scalar_mode smode = smode_iter.require ();
6324 FCONST0 (smode).data.high = 0;
6325 FCONST0 (smode).data.low = 0;
6326 FCONST0 (smode).mode = smode;
6327 const_tiny_rtx[0][(int) smode]
6328 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6329
6330 /* We store the value 1. */
6331 FCONST1 (smode).data.high = 0;
6332 FCONST1 (smode).data.low = 0;
6333 FCONST1 (smode).mode = smode;
6334 FCONST1 (smode).data
6335 = double_int_one.lshift (GET_MODE_FBIT (smode),
6336 HOST_BITS_PER_DOUBLE_INT,
6337 SIGNED_FIXED_POINT_MODE_P (smode));
6338 const_tiny_rtx[1][(int) smode]
6339 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6340 }
6341
6342 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
6343 {
6344 scalar_mode smode = smode_iter.require ();
6345 FCONST0 (smode).data.high = 0;
6346 FCONST0 (smode).data.low = 0;
6347 FCONST0 (smode).mode = smode;
6348 const_tiny_rtx[0][(int) smode]
6349 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6350
6351 /* We store the value 1. */
6352 FCONST1 (smode).data.high = 0;
6353 FCONST1 (smode).data.low = 0;
6354 FCONST1 (smode).mode = smode;
6355 FCONST1 (smode).data
6356 = double_int_one.lshift (GET_MODE_FBIT (smode),
6357 HOST_BITS_PER_DOUBLE_INT,
6358 SIGNED_FIXED_POINT_MODE_P (smode));
6359 const_tiny_rtx[1][(int) smode]
6360 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6361 }
6362
6363 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
6364 {
6365 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6366 }
6367
6368 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
6369 {
6370 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6371 }
6372
6373 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
6374 {
6375 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6376 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6377 }
6378
6379 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
6380 {
6381 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6382 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6383 }
6384
6385 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6386 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6387 const_tiny_rtx[0][i] = const0_rtx;
6388
6389 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6390 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6391 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6392 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6393 /*prev_insn=*/NULL,
6394 /*next_insn=*/NULL,
6395 /*bb=*/NULL,
6396 /*pattern=*/NULL_RTX,
6397 /*location=*/-1,
6398 CODE_FOR_nothing,
6399 /*reg_notes=*/NULL_RTX);
6400 }
6401
6402 /* Produce exact duplicate of insn INSN after AFTER.
6403 Care updating of libcall regions if present. */
6404
6405 rtx_insn *
emit_copy_of_insn_after(rtx_insn * insn,rtx_insn * after)6406 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6407 {
6408 rtx_insn *new_rtx;
6409 rtx link;
6410
6411 switch (GET_CODE (insn))
6412 {
6413 case INSN:
6414 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6415 break;
6416
6417 case JUMP_INSN:
6418 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6419 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6420 break;
6421
6422 case DEBUG_INSN:
6423 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6424 break;
6425
6426 case CALL_INSN:
6427 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6428 if (CALL_INSN_FUNCTION_USAGE (insn))
6429 CALL_INSN_FUNCTION_USAGE (new_rtx)
6430 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6431 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6432 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6433 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6434 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6435 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6436 break;
6437
6438 default:
6439 gcc_unreachable ();
6440 }
6441
6442 /* Update LABEL_NUSES. */
6443 if (NONDEBUG_INSN_P (insn))
6444 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6445
6446 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6447
6448 /* If the old insn is frame related, then so is the new one. This is
6449 primarily needed for IA-64 unwind info which marks epilogue insns,
6450 which may be duplicated by the basic block reordering code. */
6451 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6452
6453 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6454 rtx *ptail = ®_NOTES (new_rtx);
6455 while (*ptail != NULL_RTX)
6456 ptail = &XEXP (*ptail, 1);
6457
6458 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6459 will make them. REG_LABEL_TARGETs are created there too, but are
6460 supposed to be sticky, so we copy them. */
6461 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6462 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6463 {
6464 *ptail = duplicate_reg_note (link);
6465 ptail = &XEXP (*ptail, 1);
6466 }
6467
6468 INSN_CODE (new_rtx) = INSN_CODE (insn);
6469 return new_rtx;
6470 }
6471
6472 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6473 rtx
gen_hard_reg_clobber(machine_mode mode,unsigned int regno)6474 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6475 {
6476 if (hard_reg_clobbers[mode][regno])
6477 return hard_reg_clobbers[mode][regno];
6478 else
6479 return (hard_reg_clobbers[mode][regno] =
6480 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6481 }
6482
6483 location_t prologue_location;
6484 location_t epilogue_location;
6485
6486 /* Hold current location information and last location information, so the
6487 datastructures are built lazily only when some instructions in given
6488 place are needed. */
6489 static location_t curr_location;
6490
6491 /* Allocate insn location datastructure. */
6492 void
insn_locations_init(void)6493 insn_locations_init (void)
6494 {
6495 prologue_location = epilogue_location = 0;
6496 curr_location = UNKNOWN_LOCATION;
6497 }
6498
6499 /* At the end of emit stage, clear current location. */
6500 void
insn_locations_finalize(void)6501 insn_locations_finalize (void)
6502 {
6503 epilogue_location = curr_location;
6504 curr_location = UNKNOWN_LOCATION;
6505 }
6506
6507 /* Set current location. */
6508 void
set_curr_insn_location(location_t location)6509 set_curr_insn_location (location_t location)
6510 {
6511 curr_location = location;
6512 }
6513
6514 /* Get current location. */
6515 location_t
curr_insn_location(void)6516 curr_insn_location (void)
6517 {
6518 return curr_location;
6519 }
6520
6521 /* Set the location of the insn chain starting at INSN to LOC. */
6522 void
set_insn_locations(rtx_insn * insn,location_t loc)6523 set_insn_locations (rtx_insn *insn, location_t loc)
6524 {
6525 while (insn)
6526 {
6527 if (INSN_P (insn))
6528 INSN_LOCATION (insn) = loc;
6529 insn = NEXT_INSN (insn);
6530 }
6531 }
6532
6533 /* Return lexical scope block insn belongs to. */
6534 tree
insn_scope(const rtx_insn * insn)6535 insn_scope (const rtx_insn *insn)
6536 {
6537 return LOCATION_BLOCK (INSN_LOCATION (insn));
6538 }
6539
6540 /* Return line number of the statement that produced this insn. */
6541 int
insn_line(const rtx_insn * insn)6542 insn_line (const rtx_insn *insn)
6543 {
6544 return LOCATION_LINE (INSN_LOCATION (insn));
6545 }
6546
6547 /* Return source file of the statement that produced this insn. */
6548 const char *
insn_file(const rtx_insn * insn)6549 insn_file (const rtx_insn *insn)
6550 {
6551 return LOCATION_FILE (INSN_LOCATION (insn));
6552 }
6553
6554 /* Return expanded location of the statement that produced this insn. */
6555 expanded_location
insn_location(const rtx_insn * insn)6556 insn_location (const rtx_insn *insn)
6557 {
6558 return expand_location (INSN_LOCATION (insn));
6559 }
6560
6561 /* Return true if memory model MODEL requires a pre-operation (release-style)
6562 barrier or a post-operation (acquire-style) barrier. While not universal,
6563 this function matches behavior of several targets. */
6564
6565 bool
need_atomic_barrier_p(enum memmodel model,bool pre)6566 need_atomic_barrier_p (enum memmodel model, bool pre)
6567 {
6568 switch (model & MEMMODEL_BASE_MASK)
6569 {
6570 case MEMMODEL_RELAXED:
6571 case MEMMODEL_CONSUME:
6572 return false;
6573 case MEMMODEL_RELEASE:
6574 return pre;
6575 case MEMMODEL_ACQUIRE:
6576 return !pre;
6577 case MEMMODEL_ACQ_REL:
6578 case MEMMODEL_SEQ_CST:
6579 return true;
6580 default:
6581 gcc_unreachable ();
6582 }
6583 }
6584
6585 /* Return a constant shift amount for shifting a value of mode MODE
6586 by VALUE bits. */
6587
6588 rtx
gen_int_shift_amount(machine_mode,poly_int64 value)6589 gen_int_shift_amount (machine_mode, poly_int64 value)
6590 {
6591 /* Use a 64-bit mode, to avoid any truncation.
6592
6593 ??? Perhaps this should be automatically derived from the .md files
6594 instead, or perhaps have a target hook. */
6595 scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6596 ? DImode
6597 : int_mode_for_size (64, 0).require ());
6598 return gen_int_mode (value, shift_mode);
6599 }
6600
6601 /* Initialize fields of rtl_data related to stack alignment. */
6602
6603 void
init_stack_alignment()6604 rtl_data::init_stack_alignment ()
6605 {
6606 stack_alignment_needed = STACK_BOUNDARY;
6607 max_used_stack_slot_alignment = STACK_BOUNDARY;
6608 stack_alignment_estimated = 0;
6609 preferred_stack_boundary = STACK_BOUNDARY;
6610 }
6611
6612
6613 #include "gt-emit-rtl.h"
6614