1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "memmodel.h"
38 #include "backend.h"
39 #include "target.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "df.h"
43 #include "tm_p.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
46 #include "regs.h"
47 #include "emit-rtl.h"
48 #include "recog.h"
49 #include "diagnostic-core.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "cfgrtl.h"
54 #include "tree-eh.h"
55 #include "explow.h"
56 #include "expr.h"
57 #include "builtins.h"
58 #include "rtl-iter.h"
59 #include "stor-layout.h"
60 #include "opts.h"
61 #include "predict.h"
62 #include "rtx-vector-builder.h"
63 #include "gimple.h"
64 #include "gimple-ssa.h"
65 #include "gimplify.h"
66
67 struct target_rtl default_target_rtl;
68 #if SWITCHABLE_TARGET
69 struct target_rtl *this_target_rtl = &default_target_rtl;
70 #endif
71
72 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
73
74 /* Commonly used modes. */
75
76 scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
77 scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
78 scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
79
80 /* Datastructures maintained for currently processed function in RTL form. */
81
82 struct rtl_data x_rtl;
83
84 /* Indexed by pseudo register number, gives the rtx for that pseudo.
85 Allocated in parallel with regno_pointer_align.
86 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
87 with length attribute nested in top level structures. */
88
89 rtx * regno_reg_rtx;
90
91 /* This is *not* reset after each function. It gives each CODE_LABEL
92 in the entire compilation a unique label number. */
93
94 static GTY(()) int label_num = 1;
95
96 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
97 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
98 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
99 is set only for MODE_INT and MODE_VECTOR_INT modes. */
100
101 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
102
103 rtx const_true_rtx;
104
105 REAL_VALUE_TYPE dconst0;
106 REAL_VALUE_TYPE dconst1;
107 REAL_VALUE_TYPE dconst2;
108 REAL_VALUE_TYPE dconstm1;
109 REAL_VALUE_TYPE dconsthalf;
110
111 /* Record fixed-point constant 0 and 1. */
112 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
113 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
114
115 /* We make one copy of (const_int C) where C is in
116 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
117 to save space during the compilation and simplify comparisons of
118 integers. */
119
120 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
121
122 /* Standard pieces of rtx, to be substituted directly into things. */
123 rtx pc_rtx;
124 rtx ret_rtx;
125 rtx simple_return_rtx;
126 rtx cc0_rtx;
127
128 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
129 this pointer should normally never be dereferenced), but is required to be
130 distinct from NULL_RTX. Currently used by peephole2 pass. */
131 rtx_insn *invalid_insn_rtx;
132
133 /* A hash table storing CONST_INTs whose absolute value is greater
134 than MAX_SAVED_CONST_INT. */
135
136 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
137 {
138 typedef HOST_WIDE_INT compare_type;
139
140 static hashval_t hash (rtx i);
141 static bool equal (rtx i, HOST_WIDE_INT h);
142 };
143
144 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
145
146 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
147 {
148 static hashval_t hash (rtx x);
149 static bool equal (rtx x, rtx y);
150 };
151
152 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
153
154 struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
155 {
156 typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
157
158 static hashval_t hash (rtx x);
159 static bool equal (rtx x, const compare_type &y);
160 };
161
162 static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
163
164 /* A hash table storing register attribute structures. */
165 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
166 {
167 static hashval_t hash (reg_attrs *x);
168 static bool equal (reg_attrs *a, reg_attrs *b);
169 };
170
171 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
172
173 /* A hash table storing all CONST_DOUBLEs. */
174 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
175 {
176 static hashval_t hash (rtx x);
177 static bool equal (rtx x, rtx y);
178 };
179
180 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
181
182 /* A hash table storing all CONST_FIXEDs. */
183 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
184 {
185 static hashval_t hash (rtx x);
186 static bool equal (rtx x, rtx y);
187 };
188
189 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
190
191 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
192 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
193 #define first_label_num (crtl->emit.x_first_label_num)
194
195 static void set_used_decls (tree);
196 static void mark_label_nuses (rtx);
197 #if TARGET_SUPPORTS_WIDE_INT
198 static rtx lookup_const_wide_int (rtx);
199 #endif
200 static rtx lookup_const_double (rtx);
201 static rtx lookup_const_fixed (rtx);
202 static rtx gen_const_vector (machine_mode, int);
203 static void copy_rtx_if_shared_1 (rtx *orig);
204
205 /* Probability of the conditional branch currently proceeded by try_split. */
206 profile_probability split_branch_probability;
207
208 /* Returns a hash code for X (which is a really a CONST_INT). */
209
210 hashval_t
hash(rtx x)211 const_int_hasher::hash (rtx x)
212 {
213 return (hashval_t) INTVAL (x);
214 }
215
216 /* Returns nonzero if the value represented by X (which is really a
217 CONST_INT) is the same as that given by Y (which is really a
218 HOST_WIDE_INT *). */
219
220 bool
equal(rtx x,HOST_WIDE_INT y)221 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
222 {
223 return (INTVAL (x) == y);
224 }
225
226 #if TARGET_SUPPORTS_WIDE_INT
227 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
228
229 hashval_t
hash(rtx x)230 const_wide_int_hasher::hash (rtx x)
231 {
232 int i;
233 unsigned HOST_WIDE_INT hash = 0;
234 const_rtx xr = x;
235
236 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
237 hash += CONST_WIDE_INT_ELT (xr, i);
238
239 return (hashval_t) hash;
240 }
241
242 /* Returns nonzero if the value represented by X (which is really a
243 CONST_WIDE_INT) is the same as that given by Y (which is really a
244 CONST_WIDE_INT). */
245
246 bool
equal(rtx x,rtx y)247 const_wide_int_hasher::equal (rtx x, rtx y)
248 {
249 int i;
250 const_rtx xr = x;
251 const_rtx yr = y;
252 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
253 return false;
254
255 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
256 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
257 return false;
258
259 return true;
260 }
261 #endif
262
263 /* Returns a hash code for CONST_POLY_INT X. */
264
265 hashval_t
hash(rtx x)266 const_poly_int_hasher::hash (rtx x)
267 {
268 inchash::hash h;
269 h.add_int (GET_MODE (x));
270 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
271 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
272 return h.end ();
273 }
274
275 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */
276
277 bool
equal(rtx x,const compare_type & y)278 const_poly_int_hasher::equal (rtx x, const compare_type &y)
279 {
280 if (GET_MODE (x) != y.first)
281 return false;
282 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
283 if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
284 return false;
285 return true;
286 }
287
288 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
289 hashval_t
hash(rtx x)290 const_double_hasher::hash (rtx x)
291 {
292 const_rtx const value = x;
293 hashval_t h;
294
295 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
296 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
297 else
298 {
299 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
300 /* MODE is used in the comparison, so it should be in the hash. */
301 h ^= GET_MODE (value);
302 }
303 return h;
304 }
305
306 /* Returns nonzero if the value represented by X (really a ...)
307 is the same as that represented by Y (really a ...) */
308 bool
equal(rtx x,rtx y)309 const_double_hasher::equal (rtx x, rtx y)
310 {
311 const_rtx const a = x, b = y;
312
313 if (GET_MODE (a) != GET_MODE (b))
314 return 0;
315 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
316 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
317 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
318 else
319 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
320 CONST_DOUBLE_REAL_VALUE (b));
321 }
322
323 /* Returns a hash code for X (which is really a CONST_FIXED). */
324
325 hashval_t
hash(rtx x)326 const_fixed_hasher::hash (rtx x)
327 {
328 const_rtx const value = x;
329 hashval_t h;
330
331 h = fixed_hash (CONST_FIXED_VALUE (value));
332 /* MODE is used in the comparison, so it should be in the hash. */
333 h ^= GET_MODE (value);
334 return h;
335 }
336
337 /* Returns nonzero if the value represented by X is the same as that
338 represented by Y. */
339
340 bool
equal(rtx x,rtx y)341 const_fixed_hasher::equal (rtx x, rtx y)
342 {
343 const_rtx const a = x, b = y;
344
345 if (GET_MODE (a) != GET_MODE (b))
346 return 0;
347 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
348 }
349
350 /* Return true if the given memory attributes are equal. */
351
352 bool
mem_attrs_eq_p(const class mem_attrs * p,const class mem_attrs * q)353 mem_attrs_eq_p (const class mem_attrs *p, const class mem_attrs *q)
354 {
355 if (p == q)
356 return true;
357 if (!p || !q)
358 return false;
359 return (p->alias == q->alias
360 && p->offset_known_p == q->offset_known_p
361 && (!p->offset_known_p || known_eq (p->offset, q->offset))
362 && p->size_known_p == q->size_known_p
363 && (!p->size_known_p || known_eq (p->size, q->size))
364 && p->align == q->align
365 && p->addrspace == q->addrspace
366 && (p->expr == q->expr
367 || (p->expr != NULL_TREE && q->expr != NULL_TREE
368 && operand_equal_p (p->expr, q->expr, 0))));
369 }
370
371 /* Set MEM's memory attributes so that they are the same as ATTRS. */
372
373 static void
set_mem_attrs(rtx mem,mem_attrs * attrs)374 set_mem_attrs (rtx mem, mem_attrs *attrs)
375 {
376 /* If everything is the default, we can just clear the attributes. */
377 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
378 {
379 MEM_ATTRS (mem) = 0;
380 return;
381 }
382
383 if (!MEM_ATTRS (mem)
384 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
385 {
386 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
387 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
388 }
389 }
390
391 /* Returns a hash code for X (which is a really a reg_attrs *). */
392
393 hashval_t
hash(reg_attrs * x)394 reg_attr_hasher::hash (reg_attrs *x)
395 {
396 const reg_attrs *const p = x;
397
398 inchash::hash h;
399 h.add_ptr (p->decl);
400 h.add_poly_hwi (p->offset);
401 return h.end ();
402 }
403
404 /* Returns nonzero if the value represented by X is the same as that given by
405 Y. */
406
407 bool
equal(reg_attrs * x,reg_attrs * y)408 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
409 {
410 const reg_attrs *const p = x;
411 const reg_attrs *const q = y;
412
413 return (p->decl == q->decl && known_eq (p->offset, q->offset));
414 }
415 /* Allocate a new reg_attrs structure and insert it into the hash table if
416 one identical to it is not already in the table. We are doing this for
417 MEM of mode MODE. */
418
419 static reg_attrs *
get_reg_attrs(tree decl,poly_int64 offset)420 get_reg_attrs (tree decl, poly_int64 offset)
421 {
422 reg_attrs attrs;
423
424 /* If everything is the default, we can just return zero. */
425 if (decl == 0 && known_eq (offset, 0))
426 return 0;
427
428 attrs.decl = decl;
429 attrs.offset = offset;
430
431 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
432 if (*slot == 0)
433 {
434 *slot = ggc_alloc<reg_attrs> ();
435 memcpy (*slot, &attrs, sizeof (reg_attrs));
436 }
437
438 return *slot;
439 }
440
441
442 #if !HAVE_blockage
443 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
444 and to block register equivalences to be seen across this insn. */
445
446 rtx
gen_blockage(void)447 gen_blockage (void)
448 {
449 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
450 MEM_VOLATILE_P (x) = true;
451 return x;
452 }
453 #endif
454
455
456 /* Set the mode and register number of X to MODE and REGNO. */
457
458 void
set_mode_and_regno(rtx x,machine_mode mode,unsigned int regno)459 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
460 {
461 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
462 ? hard_regno_nregs (regno, mode)
463 : 1);
464 PUT_MODE_RAW (x, mode);
465 set_regno_raw (x, regno, nregs);
466 }
467
468 /* Initialize a fresh REG rtx with mode MODE and register REGNO. */
469
470 rtx
init_raw_REG(rtx x,machine_mode mode,unsigned int regno)471 init_raw_REG (rtx x, machine_mode mode, unsigned int regno)
472 {
473 set_mode_and_regno (x, mode, regno);
474 REG_ATTRS (x) = NULL;
475 ORIGINAL_REGNO (x) = regno;
476 return x;
477 }
478
479 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
480 don't attempt to share with the various global pieces of rtl (such as
481 frame_pointer_rtx). */
482
483 rtx
gen_raw_REG(machine_mode mode,unsigned int regno)484 gen_raw_REG (machine_mode mode, unsigned int regno)
485 {
486 rtx x = rtx_alloc (REG MEM_STAT_INFO);
487 init_raw_REG (x, mode, regno);
488 return x;
489 }
490
491 /* There are some RTL codes that require special attention; the generation
492 functions do the raw handling. If you add to this list, modify
493 special_rtx in gengenrtl.c as well. */
494
495 rtx_expr_list *
gen_rtx_EXPR_LIST(machine_mode mode,rtx expr,rtx expr_list)496 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
497 {
498 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
499 expr_list));
500 }
501
502 rtx_insn_list *
gen_rtx_INSN_LIST(machine_mode mode,rtx insn,rtx insn_list)503 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
504 {
505 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
506 insn_list));
507 }
508
509 rtx_insn *
gen_rtx_INSN(machine_mode mode,rtx_insn * prev_insn,rtx_insn * next_insn,basic_block bb,rtx pattern,int location,int code,rtx reg_notes)510 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
511 basic_block bb, rtx pattern, int location, int code,
512 rtx reg_notes)
513 {
514 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
515 prev_insn, next_insn,
516 bb, pattern, location, code,
517 reg_notes));
518 }
519
520 rtx
gen_rtx_CONST_INT(machine_mode mode ATTRIBUTE_UNUSED,HOST_WIDE_INT arg)521 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
522 {
523 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
524 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
525
526 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
527 if (const_true_rtx && arg == STORE_FLAG_VALUE)
528 return const_true_rtx;
529 #endif
530
531 /* Look up the CONST_INT in the hash table. */
532 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
533 INSERT);
534 if (*slot == 0)
535 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
536
537 return *slot;
538 }
539
540 rtx
gen_int_mode(poly_int64 c,machine_mode mode)541 gen_int_mode (poly_int64 c, machine_mode mode)
542 {
543 c = trunc_int_for_mode (c, mode);
544 if (c.is_constant ())
545 return GEN_INT (c.coeffs[0]);
546 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
547 return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
548 }
549
550 /* CONST_DOUBLEs might be created from pairs of integers, or from
551 REAL_VALUE_TYPEs. Also, their length is known only at run time,
552 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
553
554 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
555 hash table. If so, return its counterpart; otherwise add it
556 to the hash table and return it. */
557 static rtx
lookup_const_double(rtx real)558 lookup_const_double (rtx real)
559 {
560 rtx *slot = const_double_htab->find_slot (real, INSERT);
561 if (*slot == 0)
562 *slot = real;
563
564 return *slot;
565 }
566
567 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
568 VALUE in mode MODE. */
569 rtx
const_double_from_real_value(REAL_VALUE_TYPE value,machine_mode mode)570 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
571 {
572 rtx real = rtx_alloc (CONST_DOUBLE);
573 PUT_MODE (real, mode);
574
575 real->u.rv = value;
576
577 return lookup_const_double (real);
578 }
579
580 /* Determine whether FIXED, a CONST_FIXED, already exists in the
581 hash table. If so, return its counterpart; otherwise add it
582 to the hash table and return it. */
583
584 static rtx
lookup_const_fixed(rtx fixed)585 lookup_const_fixed (rtx fixed)
586 {
587 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
588 if (*slot == 0)
589 *slot = fixed;
590
591 return *slot;
592 }
593
594 /* Return a CONST_FIXED rtx for a fixed-point value specified by
595 VALUE in mode MODE. */
596
597 rtx
const_fixed_from_fixed_value(FIXED_VALUE_TYPE value,machine_mode mode)598 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
599 {
600 rtx fixed = rtx_alloc (CONST_FIXED);
601 PUT_MODE (fixed, mode);
602
603 fixed->u.fv = value;
604
605 return lookup_const_fixed (fixed);
606 }
607
608 #if TARGET_SUPPORTS_WIDE_INT == 0
609 /* Constructs double_int from rtx CST. */
610
611 double_int
rtx_to_double_int(const_rtx cst)612 rtx_to_double_int (const_rtx cst)
613 {
614 double_int r;
615
616 if (CONST_INT_P (cst))
617 r = double_int::from_shwi (INTVAL (cst));
618 else if (CONST_DOUBLE_AS_INT_P (cst))
619 {
620 r.low = CONST_DOUBLE_LOW (cst);
621 r.high = CONST_DOUBLE_HIGH (cst);
622 }
623 else
624 gcc_unreachable ();
625
626 return r;
627 }
628 #endif
629
630 #if TARGET_SUPPORTS_WIDE_INT
631 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
632 If so, return its counterpart; otherwise add it to the hash table and
633 return it. */
634
635 static rtx
lookup_const_wide_int(rtx wint)636 lookup_const_wide_int (rtx wint)
637 {
638 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
639 if (*slot == 0)
640 *slot = wint;
641
642 return *slot;
643 }
644 #endif
645
646 /* Return an rtx constant for V, given that the constant has mode MODE.
647 The returned rtx will be a CONST_INT if V fits, otherwise it will be
648 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
649 (if TARGET_SUPPORTS_WIDE_INT). */
650
651 static rtx
immed_wide_int_const_1(const wide_int_ref & v,machine_mode mode)652 immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
653 {
654 unsigned int len = v.get_len ();
655 /* Not scalar_int_mode because we also allow pointer bound modes. */
656 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
657
658 /* Allow truncation but not extension since we do not know if the
659 number is signed or unsigned. */
660 gcc_assert (prec <= v.get_precision ());
661
662 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
663 return gen_int_mode (v.elt (0), mode);
664
665 #if TARGET_SUPPORTS_WIDE_INT
666 {
667 unsigned int i;
668 rtx value;
669 unsigned int blocks_needed
670 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
671
672 if (len > blocks_needed)
673 len = blocks_needed;
674
675 value = const_wide_int_alloc (len);
676
677 /* It is so tempting to just put the mode in here. Must control
678 myself ... */
679 PUT_MODE (value, VOIDmode);
680 CWI_PUT_NUM_ELEM (value, len);
681
682 for (i = 0; i < len; i++)
683 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
684
685 return lookup_const_wide_int (value);
686 }
687 #else
688 return immed_double_const (v.elt (0), v.elt (1), mode);
689 #endif
690 }
691
692 #if TARGET_SUPPORTS_WIDE_INT == 0
693 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
694 of ints: I0 is the low-order word and I1 is the high-order word.
695 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
696 implied upper bits are copies of the high bit of i1. The value
697 itself is neither signed nor unsigned. Do not use this routine for
698 non-integer modes; convert to REAL_VALUE_TYPE and use
699 const_double_from_real_value. */
700
701 rtx
immed_double_const(HOST_WIDE_INT i0,HOST_WIDE_INT i1,machine_mode mode)702 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
703 {
704 rtx value;
705 unsigned int i;
706
707 /* There are the following cases (note that there are no modes with
708 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
709
710 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
711 gen_int_mode.
712 2) If the value of the integer fits into HOST_WIDE_INT anyway
713 (i.e., i1 consists only from copies of the sign bit, and sign
714 of i0 and i1 are the same), then we return a CONST_INT for i0.
715 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
716 scalar_mode smode;
717 if (is_a <scalar_mode> (mode, &smode)
718 && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
719 return gen_int_mode (i0, mode);
720
721 /* If this integer fits in one word, return a CONST_INT. */
722 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
723 return GEN_INT (i0);
724
725 /* We use VOIDmode for integers. */
726 value = rtx_alloc (CONST_DOUBLE);
727 PUT_MODE (value, VOIDmode);
728
729 CONST_DOUBLE_LOW (value) = i0;
730 CONST_DOUBLE_HIGH (value) = i1;
731
732 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
733 XWINT (value, i) = 0;
734
735 return lookup_const_double (value);
736 }
737 #endif
738
739 /* Return an rtx representation of C in mode MODE. */
740
741 rtx
immed_wide_int_const(const poly_wide_int_ref & c,machine_mode mode)742 immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
743 {
744 if (c.is_constant ())
745 return immed_wide_int_const_1 (c.coeffs[0], mode);
746
747 /* Not scalar_int_mode because we also allow pointer bound modes. */
748 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
749
750 /* Allow truncation but not extension since we do not know if the
751 number is signed or unsigned. */
752 gcc_assert (prec <= c.coeffs[0].get_precision ());
753 poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
754
755 /* See whether we already have an rtx for this constant. */
756 inchash::hash h;
757 h.add_int (mode);
758 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
759 h.add_wide_int (newc.coeffs[i]);
760 const_poly_int_hasher::compare_type typed_value (mode, newc);
761 rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
762 h.end (), INSERT);
763 rtx x = *slot;
764 if (x)
765 return x;
766
767 /* Create a new rtx. There's a choice to be made here between installing
768 the actual mode of the rtx or leaving it as VOIDmode (for consistency
769 with CONST_INT). In practice the handling of the codes is different
770 enough that we get no benefit from using VOIDmode, and various places
771 assume that VOIDmode implies CONST_INT. Using the real mode seems like
772 the right long-term direction anyway. */
773 typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
774 size_t extra_size = twi::extra_size (prec);
775 x = rtx_alloc_v (CONST_POLY_INT,
776 sizeof (struct const_poly_int_def) + extra_size);
777 PUT_MODE (x, mode);
778 CONST_POLY_INT_COEFFS (x).set_precision (prec);
779 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
780 CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
781
782 *slot = x;
783 return x;
784 }
785
786 rtx
gen_rtx_REG(machine_mode mode,unsigned int regno)787 gen_rtx_REG (machine_mode mode, unsigned int regno)
788 {
789 /* In case the MD file explicitly references the frame pointer, have
790 all such references point to the same frame pointer. This is
791 used during frame pointer elimination to distinguish the explicit
792 references to these registers from pseudos that happened to be
793 assigned to them.
794
795 If we have eliminated the frame pointer or arg pointer, we will
796 be using it as a normal register, for example as a spill
797 register. In such cases, we might be accessing it in a mode that
798 is not Pmode and therefore cannot use the pre-allocated rtx.
799
800 Also don't do this when we are making new REGs in reload, since
801 we don't want to get confused with the real pointers. */
802
803 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
804 {
805 if (regno == FRAME_POINTER_REGNUM
806 && (!reload_completed || frame_pointer_needed))
807 return frame_pointer_rtx;
808
809 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
810 && regno == HARD_FRAME_POINTER_REGNUM
811 && (!reload_completed || frame_pointer_needed))
812 return hard_frame_pointer_rtx;
813 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
814 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
815 && regno == ARG_POINTER_REGNUM)
816 return arg_pointer_rtx;
817 #endif
818 #ifdef RETURN_ADDRESS_POINTER_REGNUM
819 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
820 return return_address_pointer_rtx;
821 #endif
822 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
823 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
824 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
825 return pic_offset_table_rtx;
826 if (regno == STACK_POINTER_REGNUM)
827 return stack_pointer_rtx;
828 }
829
830 #if 0
831 /* If the per-function register table has been set up, try to re-use
832 an existing entry in that table to avoid useless generation of RTL.
833
834 This code is disabled for now until we can fix the various backends
835 which depend on having non-shared hard registers in some cases. Long
836 term we want to re-enable this code as it can significantly cut down
837 on the amount of useless RTL that gets generated.
838
839 We'll also need to fix some code that runs after reload that wants to
840 set ORIGINAL_REGNO. */
841
842 if (cfun
843 && cfun->emit
844 && regno_reg_rtx
845 && regno < FIRST_PSEUDO_REGISTER
846 && reg_raw_mode[regno] == mode)
847 return regno_reg_rtx[regno];
848 #endif
849
850 return gen_raw_REG (mode, regno);
851 }
852
853 rtx
gen_rtx_MEM(machine_mode mode,rtx addr)854 gen_rtx_MEM (machine_mode mode, rtx addr)
855 {
856 rtx rt = gen_rtx_raw_MEM (mode, addr);
857
858 /* This field is not cleared by the mere allocation of the rtx, so
859 we clear it here. */
860 MEM_ATTRS (rt) = 0;
861
862 return rt;
863 }
864
865 /* Generate a memory referring to non-trapping constant memory. */
866
867 rtx
gen_const_mem(machine_mode mode,rtx addr)868 gen_const_mem (machine_mode mode, rtx addr)
869 {
870 rtx mem = gen_rtx_MEM (mode, addr);
871 MEM_READONLY_P (mem) = 1;
872 MEM_NOTRAP_P (mem) = 1;
873 return mem;
874 }
875
876 /* Generate a MEM referring to fixed portions of the frame, e.g., register
877 save areas. */
878
879 rtx
gen_frame_mem(machine_mode mode,rtx addr)880 gen_frame_mem (machine_mode mode, rtx addr)
881 {
882 rtx mem = gen_rtx_MEM (mode, addr);
883 MEM_NOTRAP_P (mem) = 1;
884 set_mem_alias_set (mem, get_frame_alias_set ());
885 return mem;
886 }
887
888 /* Generate a MEM referring to a temporary use of the stack, not part
889 of the fixed stack frame. For example, something which is pushed
890 by a target splitter. */
891 rtx
gen_tmp_stack_mem(machine_mode mode,rtx addr)892 gen_tmp_stack_mem (machine_mode mode, rtx addr)
893 {
894 rtx mem = gen_rtx_MEM (mode, addr);
895 MEM_NOTRAP_P (mem) = 1;
896 if (!cfun->calls_alloca)
897 set_mem_alias_set (mem, get_frame_alias_set ());
898 return mem;
899 }
900
901 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
902 this construct would be valid, and false otherwise. */
903
904 bool
validate_subreg(machine_mode omode,machine_mode imode,const_rtx reg,poly_uint64 offset)905 validate_subreg (machine_mode omode, machine_mode imode,
906 const_rtx reg, poly_uint64 offset)
907 {
908 poly_uint64 isize = GET_MODE_SIZE (imode);
909 poly_uint64 osize = GET_MODE_SIZE (omode);
910
911 /* The sizes must be ordered, so that we know whether the subreg
912 is partial, paradoxical or complete. */
913 if (!ordered_p (isize, osize))
914 return false;
915
916 /* All subregs must be aligned. */
917 if (!multiple_p (offset, osize))
918 return false;
919
920 /* The subreg offset cannot be outside the inner object. */
921 if (maybe_ge (offset, isize))
922 return false;
923
924 poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
925
926 /* ??? This should not be here. Temporarily continue to allow word_mode
927 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
928 Generally, backends are doing something sketchy but it'll take time to
929 fix them all. */
930 if (omode == word_mode)
931 ;
932 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
933 is the culprit here, and not the backends. */
934 else if (known_ge (osize, regsize) && known_ge (isize, osize))
935 ;
936 /* Allow component subregs of complex and vector. Though given the below
937 extraction rules, it's not always clear what that means. */
938 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
939 && GET_MODE_INNER (imode) == omode)
940 ;
941 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
942 i.e. (subreg:V4SF (reg:SF) 0) or (subreg:V4SF (reg:V2SF) 0). This
943 surely isn't the cleanest way to represent this. It's questionable
944 if this ought to be represented at all -- why can't this all be hidden
945 in post-reload splitters that make arbitrarily mode changes to the
946 registers themselves. */
947 else if (VECTOR_MODE_P (omode)
948 && GET_MODE_INNER (omode) == GET_MODE_INNER (imode))
949 ;
950 /* Subregs involving floating point modes are not allowed to
951 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
952 (subreg:SI (reg:DF) 0) isn't. */
953 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
954 {
955 if (! (known_eq (isize, osize)
956 /* LRA can use subreg to store a floating point value in
957 an integer mode. Although the floating point and the
958 integer modes need the same number of hard registers,
959 the size of floating point mode can be less than the
960 integer mode. LRA also uses subregs for a register
961 should be used in different mode in on insn. */
962 || lra_in_progress))
963 return false;
964 }
965
966 /* Paradoxical subregs must have offset zero. */
967 if (maybe_gt (osize, isize))
968 return known_eq (offset, 0U);
969
970 /* This is a normal subreg. Verify that the offset is representable. */
971
972 /* For hard registers, we already have most of these rules collected in
973 subreg_offset_representable_p. */
974 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
975 {
976 unsigned int regno = REGNO (reg);
977
978 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
979 && GET_MODE_INNER (imode) == omode)
980 ;
981 else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
982 return false;
983
984 return subreg_offset_representable_p (regno, imode, offset, omode);
985 }
986
987 /* The outer size must be ordered wrt the register size, otherwise
988 we wouldn't know at compile time how many registers the outer
989 mode occupies. */
990 if (!ordered_p (osize, regsize))
991 return false;
992
993 /* For pseudo registers, we want most of the same checks. Namely:
994
995 Assume that the pseudo register will be allocated to hard registers
996 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
997 the remainder must correspond to the lowpart of the containing hard
998 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
999 otherwise it is at the lowest offset.
1000
1001 Given that we've already checked the mode and offset alignment,
1002 we only have to check subblock subregs here. */
1003 if (maybe_lt (osize, regsize)
1004 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
1005 {
1006 /* It is invalid for the target to pick a register size for a mode
1007 that isn't ordered wrt to the size of that mode. */
1008 poly_uint64 block_size = ordered_min (isize, regsize);
1009 unsigned int start_reg;
1010 poly_uint64 offset_within_reg;
1011 if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
1012 || (BYTES_BIG_ENDIAN
1013 ? maybe_ne (offset_within_reg, block_size - osize)
1014 : maybe_ne (offset_within_reg, 0U)))
1015 return false;
1016 }
1017 return true;
1018 }
1019
1020 rtx
gen_rtx_SUBREG(machine_mode mode,rtx reg,poly_uint64 offset)1021 gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
1022 {
1023 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
1024 return gen_rtx_raw_SUBREG (mode, reg, offset);
1025 }
1026
1027 /* Generate a SUBREG representing the least-significant part of REG if MODE
1028 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1029
1030 rtx
gen_lowpart_SUBREG(machine_mode mode,rtx reg)1031 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
1032 {
1033 machine_mode inmode;
1034
1035 inmode = GET_MODE (reg);
1036 if (inmode == VOIDmode)
1037 inmode = mode;
1038 return gen_rtx_SUBREG (mode, reg,
1039 subreg_lowpart_offset (mode, inmode));
1040 }
1041
1042 rtx
gen_rtx_VAR_LOCATION(machine_mode mode,tree decl,rtx loc,enum var_init_status status)1043 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
1044 enum var_init_status status)
1045 {
1046 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
1047 PAT_VAR_LOCATION_STATUS (x) = status;
1048 return x;
1049 }
1050
1051
1052 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
1053
1054 rtvec
gen_rtvec(int n,...)1055 gen_rtvec (int n, ...)
1056 {
1057 int i;
1058 rtvec rt_val;
1059 va_list p;
1060
1061 va_start (p, n);
1062
1063 /* Don't allocate an empty rtvec... */
1064 if (n == 0)
1065 {
1066 va_end (p);
1067 return NULL_RTVEC;
1068 }
1069
1070 rt_val = rtvec_alloc (n);
1071
1072 for (i = 0; i < n; i++)
1073 rt_val->elem[i] = va_arg (p, rtx);
1074
1075 va_end (p);
1076 return rt_val;
1077 }
1078
1079 rtvec
gen_rtvec_v(int n,rtx * argp)1080 gen_rtvec_v (int n, rtx *argp)
1081 {
1082 int i;
1083 rtvec rt_val;
1084
1085 /* Don't allocate an empty rtvec... */
1086 if (n == 0)
1087 return NULL_RTVEC;
1088
1089 rt_val = rtvec_alloc (n);
1090
1091 for (i = 0; i < n; i++)
1092 rt_val->elem[i] = *argp++;
1093
1094 return rt_val;
1095 }
1096
1097 rtvec
gen_rtvec_v(int n,rtx_insn ** argp)1098 gen_rtvec_v (int n, rtx_insn **argp)
1099 {
1100 int i;
1101 rtvec rt_val;
1102
1103 /* Don't allocate an empty rtvec... */
1104 if (n == 0)
1105 return NULL_RTVEC;
1106
1107 rt_val = rtvec_alloc (n);
1108
1109 for (i = 0; i < n; i++)
1110 rt_val->elem[i] = *argp++;
1111
1112 return rt_val;
1113 }
1114
1115
1116 /* Return the number of bytes between the start of an OUTER_MODE
1117 in-memory value and the start of an INNER_MODE in-memory value,
1118 given that the former is a lowpart of the latter. It may be a
1119 paradoxical lowpart, in which case the offset will be negative
1120 on big-endian targets. */
1121
1122 poly_int64
byte_lowpart_offset(machine_mode outer_mode,machine_mode inner_mode)1123 byte_lowpart_offset (machine_mode outer_mode,
1124 machine_mode inner_mode)
1125 {
1126 if (paradoxical_subreg_p (outer_mode, inner_mode))
1127 return -subreg_lowpart_offset (inner_mode, outer_mode);
1128 else
1129 return subreg_lowpart_offset (outer_mode, inner_mode);
1130 }
1131
1132 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1133 from address X. For paradoxical big-endian subregs this is a
1134 negative value, otherwise it's the same as OFFSET. */
1135
1136 poly_int64
subreg_memory_offset(machine_mode outer_mode,machine_mode inner_mode,poly_uint64 offset)1137 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1138 poly_uint64 offset)
1139 {
1140 if (paradoxical_subreg_p (outer_mode, inner_mode))
1141 {
1142 gcc_assert (known_eq (offset, 0U));
1143 return -subreg_lowpart_offset (inner_mode, outer_mode);
1144 }
1145 return offset;
1146 }
1147
1148 /* As above, but return the offset that existing subreg X would have
1149 if SUBREG_REG (X) were stored in memory. The only significant thing
1150 about the current SUBREG_REG is its mode. */
1151
1152 poly_int64
subreg_memory_offset(const_rtx x)1153 subreg_memory_offset (const_rtx x)
1154 {
1155 return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1156 SUBREG_BYTE (x));
1157 }
1158
1159 /* Generate a REG rtx for a new pseudo register of mode MODE.
1160 This pseudo is assigned the next sequential register number. */
1161
1162 rtx
gen_reg_rtx(machine_mode mode)1163 gen_reg_rtx (machine_mode mode)
1164 {
1165 rtx val;
1166 unsigned int align = GET_MODE_ALIGNMENT (mode);
1167
1168 gcc_assert (can_create_pseudo_p ());
1169
1170 /* If a virtual register with bigger mode alignment is generated,
1171 increase stack alignment estimation because it might be spilled
1172 to stack later. */
1173 if (SUPPORTS_STACK_ALIGNMENT
1174 && crtl->stack_alignment_estimated < align
1175 && !crtl->stack_realign_processed)
1176 {
1177 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1178 if (crtl->stack_alignment_estimated < min_align)
1179 crtl->stack_alignment_estimated = min_align;
1180 }
1181
1182 if (generating_concat_p
1183 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1184 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1185 {
1186 /* For complex modes, don't make a single pseudo.
1187 Instead, make a CONCAT of two pseudos.
1188 This allows noncontiguous allocation of the real and imaginary parts,
1189 which makes much better code. Besides, allocating DCmode
1190 pseudos overstrains reload on some machines like the 386. */
1191 rtx realpart, imagpart;
1192 machine_mode partmode = GET_MODE_INNER (mode);
1193
1194 realpart = gen_reg_rtx (partmode);
1195 imagpart = gen_reg_rtx (partmode);
1196 return gen_rtx_CONCAT (mode, realpart, imagpart);
1197 }
1198
1199 /* Do not call gen_reg_rtx with uninitialized crtl. */
1200 gcc_assert (crtl->emit.regno_pointer_align_length);
1201
1202 crtl->emit.ensure_regno_capacity ();
1203 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
1204
1205 val = gen_raw_REG (mode, reg_rtx_no);
1206 regno_reg_rtx[reg_rtx_no++] = val;
1207 return val;
1208 }
1209
1210 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1211 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1212
1213 void
ensure_regno_capacity()1214 emit_status::ensure_regno_capacity ()
1215 {
1216 int old_size = regno_pointer_align_length;
1217
1218 if (reg_rtx_no < old_size)
1219 return;
1220
1221 int new_size = old_size * 2;
1222 while (reg_rtx_no >= new_size)
1223 new_size *= 2;
1224
1225 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1226 memset (tmp + old_size, 0, new_size - old_size);
1227 regno_pointer_align = (unsigned char *) tmp;
1228
1229 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1230 memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1231 regno_reg_rtx = new1;
1232
1233 crtl->emit.regno_pointer_align_length = new_size;
1234 }
1235
1236 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1237
1238 bool
reg_is_parm_p(rtx reg)1239 reg_is_parm_p (rtx reg)
1240 {
1241 tree decl;
1242
1243 gcc_assert (REG_P (reg));
1244 decl = REG_EXPR (reg);
1245 return (decl && TREE_CODE (decl) == PARM_DECL);
1246 }
1247
1248 /* Update NEW with the same attributes as REG, but with OFFSET added
1249 to the REG_OFFSET. */
1250
1251 static void
update_reg_offset(rtx new_rtx,rtx reg,poly_int64 offset)1252 update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
1253 {
1254 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1255 REG_OFFSET (reg) + offset);
1256 }
1257
1258 /* Generate a register with same attributes as REG, but with OFFSET
1259 added to the REG_OFFSET. */
1260
1261 rtx
gen_rtx_REG_offset(rtx reg,machine_mode mode,unsigned int regno,poly_int64 offset)1262 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1263 poly_int64 offset)
1264 {
1265 rtx new_rtx = gen_rtx_REG (mode, regno);
1266
1267 update_reg_offset (new_rtx, reg, offset);
1268 return new_rtx;
1269 }
1270
1271 /* Generate a new pseudo-register with the same attributes as REG, but
1272 with OFFSET added to the REG_OFFSET. */
1273
1274 rtx
gen_reg_rtx_offset(rtx reg,machine_mode mode,int offset)1275 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1276 {
1277 rtx new_rtx = gen_reg_rtx (mode);
1278
1279 update_reg_offset (new_rtx, reg, offset);
1280 return new_rtx;
1281 }
1282
1283 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1284 new register is a (possibly paradoxical) lowpart of the old one. */
1285
1286 void
adjust_reg_mode(rtx reg,machine_mode mode)1287 adjust_reg_mode (rtx reg, machine_mode mode)
1288 {
1289 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1290 PUT_MODE (reg, mode);
1291 }
1292
1293 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1294 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1295
1296 void
set_reg_attrs_from_value(rtx reg,rtx x)1297 set_reg_attrs_from_value (rtx reg, rtx x)
1298 {
1299 poly_int64 offset;
1300 bool can_be_reg_pointer = true;
1301
1302 /* Don't call mark_reg_pointer for incompatible pointer sign
1303 extension. */
1304 while (GET_CODE (x) == SIGN_EXTEND
1305 || GET_CODE (x) == ZERO_EXTEND
1306 || GET_CODE (x) == TRUNCATE
1307 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1308 {
1309 #if defined(POINTERS_EXTEND_UNSIGNED)
1310 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1311 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1312 || (paradoxical_subreg_p (x)
1313 && ! (SUBREG_PROMOTED_VAR_P (x)
1314 && SUBREG_CHECK_PROMOTED_SIGN (x,
1315 POINTERS_EXTEND_UNSIGNED))))
1316 && !targetm.have_ptr_extend ())
1317 can_be_reg_pointer = false;
1318 #endif
1319 x = XEXP (x, 0);
1320 }
1321
1322 /* Hard registers can be reused for multiple purposes within the same
1323 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1324 on them is wrong. */
1325 if (HARD_REGISTER_P (reg))
1326 return;
1327
1328 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1329 if (MEM_P (x))
1330 {
1331 if (MEM_OFFSET_KNOWN_P (x))
1332 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1333 MEM_OFFSET (x) + offset);
1334 if (can_be_reg_pointer && MEM_POINTER (x))
1335 mark_reg_pointer (reg, 0);
1336 }
1337 else if (REG_P (x))
1338 {
1339 if (REG_ATTRS (x))
1340 update_reg_offset (reg, x, offset);
1341 if (can_be_reg_pointer && REG_POINTER (x))
1342 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1343 }
1344 }
1345
1346 /* Generate a REG rtx for a new pseudo register, copying the mode
1347 and attributes from X. */
1348
1349 rtx
gen_reg_rtx_and_attrs(rtx x)1350 gen_reg_rtx_and_attrs (rtx x)
1351 {
1352 rtx reg = gen_reg_rtx (GET_MODE (x));
1353 set_reg_attrs_from_value (reg, x);
1354 return reg;
1355 }
1356
1357 /* Set the register attributes for registers contained in PARM_RTX.
1358 Use needed values from memory attributes of MEM. */
1359
1360 void
set_reg_attrs_for_parm(rtx parm_rtx,rtx mem)1361 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1362 {
1363 if (REG_P (parm_rtx))
1364 set_reg_attrs_from_value (parm_rtx, mem);
1365 else if (GET_CODE (parm_rtx) == PARALLEL)
1366 {
1367 /* Check for a NULL entry in the first slot, used to indicate that the
1368 parameter goes both on the stack and in registers. */
1369 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1370 for (; i < XVECLEN (parm_rtx, 0); i++)
1371 {
1372 rtx x = XVECEXP (parm_rtx, 0, i);
1373 if (REG_P (XEXP (x, 0)))
1374 REG_ATTRS (XEXP (x, 0))
1375 = get_reg_attrs (MEM_EXPR (mem),
1376 INTVAL (XEXP (x, 1)));
1377 }
1378 }
1379 }
1380
1381 /* Set the REG_ATTRS for registers in value X, given that X represents
1382 decl T. */
1383
1384 void
set_reg_attrs_for_decl_rtl(tree t,rtx x)1385 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1386 {
1387 if (!t)
1388 return;
1389 tree tdecl = t;
1390 if (GET_CODE (x) == SUBREG)
1391 {
1392 gcc_assert (subreg_lowpart_p (x));
1393 x = SUBREG_REG (x);
1394 }
1395 if (REG_P (x))
1396 REG_ATTRS (x)
1397 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1398 DECL_P (tdecl)
1399 ? DECL_MODE (tdecl)
1400 : TYPE_MODE (TREE_TYPE (tdecl))));
1401 if (GET_CODE (x) == CONCAT)
1402 {
1403 if (REG_P (XEXP (x, 0)))
1404 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1405 if (REG_P (XEXP (x, 1)))
1406 REG_ATTRS (XEXP (x, 1))
1407 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1408 }
1409 if (GET_CODE (x) == PARALLEL)
1410 {
1411 int i, start;
1412
1413 /* Check for a NULL entry, used to indicate that the parameter goes
1414 both on the stack and in registers. */
1415 if (XEXP (XVECEXP (x, 0, 0), 0))
1416 start = 0;
1417 else
1418 start = 1;
1419
1420 for (i = start; i < XVECLEN (x, 0); i++)
1421 {
1422 rtx y = XVECEXP (x, 0, i);
1423 if (REG_P (XEXP (y, 0)))
1424 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1425 }
1426 }
1427 }
1428
1429 /* Assign the RTX X to declaration T. */
1430
1431 void
set_decl_rtl(tree t,rtx x)1432 set_decl_rtl (tree t, rtx x)
1433 {
1434 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1435 if (x)
1436 set_reg_attrs_for_decl_rtl (t, x);
1437 }
1438
1439 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1440 if the ABI requires the parameter to be passed by reference. */
1441
1442 void
set_decl_incoming_rtl(tree t,rtx x,bool by_reference_p)1443 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1444 {
1445 DECL_INCOMING_RTL (t) = x;
1446 if (x && !by_reference_p)
1447 set_reg_attrs_for_decl_rtl (t, x);
1448 }
1449
1450 /* Identify REG (which may be a CONCAT) as a user register. */
1451
1452 void
mark_user_reg(rtx reg)1453 mark_user_reg (rtx reg)
1454 {
1455 if (GET_CODE (reg) == CONCAT)
1456 {
1457 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1458 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1459 }
1460 else
1461 {
1462 gcc_assert (REG_P (reg));
1463 REG_USERVAR_P (reg) = 1;
1464 }
1465 }
1466
1467 /* Identify REG as a probable pointer register and show its alignment
1468 as ALIGN, if nonzero. */
1469
1470 void
mark_reg_pointer(rtx reg,int align)1471 mark_reg_pointer (rtx reg, int align)
1472 {
1473 if (! REG_POINTER (reg))
1474 {
1475 REG_POINTER (reg) = 1;
1476
1477 if (align)
1478 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1479 }
1480 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1481 /* We can no-longer be sure just how aligned this pointer is. */
1482 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1483 }
1484
1485 /* Return 1 plus largest pseudo reg number used in the current function. */
1486
1487 int
max_reg_num(void)1488 max_reg_num (void)
1489 {
1490 return reg_rtx_no;
1491 }
1492
1493 /* Return 1 + the largest label number used so far in the current function. */
1494
1495 int
max_label_num(void)1496 max_label_num (void)
1497 {
1498 return label_num;
1499 }
1500
1501 /* Return first label number used in this function (if any were used). */
1502
1503 int
get_first_label_num(void)1504 get_first_label_num (void)
1505 {
1506 return first_label_num;
1507 }
1508
1509 /* If the rtx for label was created during the expansion of a nested
1510 function, then first_label_num won't include this label number.
1511 Fix this now so that array indices work later. */
1512
1513 void
maybe_set_first_label_num(rtx_code_label * x)1514 maybe_set_first_label_num (rtx_code_label *x)
1515 {
1516 if (CODE_LABEL_NUMBER (x) < first_label_num)
1517 first_label_num = CODE_LABEL_NUMBER (x);
1518 }
1519
1520 /* For use by the RTL function loader, when mingling with normal
1521 functions.
1522 Ensure that label_num is greater than the label num of X, to avoid
1523 duplicate labels in the generated assembler. */
1524
1525 void
maybe_set_max_label_num(rtx_code_label * x)1526 maybe_set_max_label_num (rtx_code_label *x)
1527 {
1528 if (CODE_LABEL_NUMBER (x) >= label_num)
1529 label_num = CODE_LABEL_NUMBER (x) + 1;
1530 }
1531
1532
1533 /* Return a value representing some low-order bits of X, where the number
1534 of low-order bits is given by MODE. Note that no conversion is done
1535 between floating-point and fixed-point values, rather, the bit
1536 representation is returned.
1537
1538 This function handles the cases in common between gen_lowpart, below,
1539 and two variants in cse.c and combine.c. These are the cases that can
1540 be safely handled at all points in the compilation.
1541
1542 If this is not a case we can handle, return 0. */
1543
1544 rtx
gen_lowpart_common(machine_mode mode,rtx x)1545 gen_lowpart_common (machine_mode mode, rtx x)
1546 {
1547 poly_uint64 msize = GET_MODE_SIZE (mode);
1548 machine_mode innermode;
1549
1550 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1551 so we have to make one up. Yuk. */
1552 innermode = GET_MODE (x);
1553 if (CONST_INT_P (x)
1554 && known_le (msize * BITS_PER_UNIT,
1555 (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
1556 innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1557 else if (innermode == VOIDmode)
1558 innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
1559
1560 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1561
1562 if (innermode == mode)
1563 return x;
1564
1565 /* The size of the outer and inner modes must be ordered. */
1566 poly_uint64 xsize = GET_MODE_SIZE (innermode);
1567 if (!ordered_p (msize, xsize))
1568 return 0;
1569
1570 if (SCALAR_FLOAT_MODE_P (mode))
1571 {
1572 /* Don't allow paradoxical FLOAT_MODE subregs. */
1573 if (maybe_gt (msize, xsize))
1574 return 0;
1575 }
1576 else
1577 {
1578 /* MODE must occupy no more of the underlying registers than X. */
1579 poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
1580 unsigned int mregs, xregs;
1581 if (!can_div_away_from_zero_p (msize, regsize, &mregs)
1582 || !can_div_away_from_zero_p (xsize, regsize, &xregs)
1583 || mregs > xregs)
1584 return 0;
1585 }
1586
1587 scalar_int_mode int_mode, int_innermode, from_mode;
1588 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1589 && is_a <scalar_int_mode> (mode, &int_mode)
1590 && is_a <scalar_int_mode> (innermode, &int_innermode)
1591 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
1592 {
1593 /* If we are getting the low-order part of something that has been
1594 sign- or zero-extended, we can either just use the object being
1595 extended or make a narrower extension. If we want an even smaller
1596 piece than the size of the object being extended, call ourselves
1597 recursively.
1598
1599 This case is used mostly by combine and cse. */
1600
1601 if (from_mode == int_mode)
1602 return XEXP (x, 0);
1603 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1604 return gen_lowpart_common (int_mode, XEXP (x, 0));
1605 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1606 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
1607 }
1608 else if (GET_CODE (x) == SUBREG || REG_P (x)
1609 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1610 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1611 || CONST_POLY_INT_P (x))
1612 return lowpart_subreg (mode, x, innermode);
1613
1614 /* Otherwise, we can't do this. */
1615 return 0;
1616 }
1617
1618 rtx
gen_highpart(machine_mode mode,rtx x)1619 gen_highpart (machine_mode mode, rtx x)
1620 {
1621 poly_uint64 msize = GET_MODE_SIZE (mode);
1622 rtx result;
1623
1624 /* This case loses if X is a subreg. To catch bugs early,
1625 complain if an invalid MODE is used even in other cases. */
1626 gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
1627 || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
1628
1629 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1630 subreg_highpart_offset (mode, GET_MODE (x)));
1631 gcc_assert (result);
1632
1633 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1634 the target if we have a MEM. gen_highpart must return a valid operand,
1635 emitting code if necessary to do so. */
1636 if (MEM_P (result))
1637 {
1638 result = validize_mem (result);
1639 gcc_assert (result);
1640 }
1641
1642 return result;
1643 }
1644
1645 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1646 be VOIDmode constant. */
1647 rtx
gen_highpart_mode(machine_mode outermode,machine_mode innermode,rtx exp)1648 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1649 {
1650 if (GET_MODE (exp) != VOIDmode)
1651 {
1652 gcc_assert (GET_MODE (exp) == innermode);
1653 return gen_highpart (outermode, exp);
1654 }
1655 return simplify_gen_subreg (outermode, exp, innermode,
1656 subreg_highpart_offset (outermode, innermode));
1657 }
1658
1659 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1660 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1661
1662 poly_uint64
subreg_size_lowpart_offset(poly_uint64 outer_bytes,poly_uint64 inner_bytes)1663 subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1664 {
1665 gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1666 if (maybe_gt (outer_bytes, inner_bytes))
1667 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1668 return 0;
1669
1670 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1671 return inner_bytes - outer_bytes;
1672 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1673 return 0;
1674 else
1675 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1676 }
1677
1678 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1679 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1680
1681 poly_uint64
subreg_size_highpart_offset(poly_uint64 outer_bytes,poly_uint64 inner_bytes)1682 subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1683 {
1684 gcc_assert (known_ge (inner_bytes, outer_bytes));
1685
1686 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1687 return 0;
1688 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1689 return inner_bytes - outer_bytes;
1690 else
1691 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1692 (inner_bytes - outer_bytes)
1693 * BITS_PER_UNIT);
1694 }
1695
1696 /* Return 1 iff X, assumed to be a SUBREG,
1697 refers to the least significant part of its containing reg.
1698 If X is not a SUBREG, always return 1 (it is its own low part!). */
1699
1700 int
subreg_lowpart_p(const_rtx x)1701 subreg_lowpart_p (const_rtx x)
1702 {
1703 if (GET_CODE (x) != SUBREG)
1704 return 1;
1705 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1706 return 0;
1707
1708 return known_eq (subreg_lowpart_offset (GET_MODE (x),
1709 GET_MODE (SUBREG_REG (x))),
1710 SUBREG_BYTE (x));
1711 }
1712
1713 /* Return subword OFFSET of operand OP.
1714 The word number, OFFSET, is interpreted as the word number starting
1715 at the low-order address. OFFSET 0 is the low-order word if not
1716 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1717
1718 If we cannot extract the required word, we return zero. Otherwise,
1719 an rtx corresponding to the requested word will be returned.
1720
1721 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1722 reload has completed, a valid address will always be returned. After
1723 reload, if a valid address cannot be returned, we return zero.
1724
1725 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1726 it is the responsibility of the caller.
1727
1728 MODE is the mode of OP in case it is a CONST_INT.
1729
1730 ??? This is still rather broken for some cases. The problem for the
1731 moment is that all callers of this thing provide no 'goal mode' to
1732 tell us to work with. This exists because all callers were written
1733 in a word based SUBREG world.
1734 Now use of this function can be deprecated by simplify_subreg in most
1735 cases.
1736 */
1737
1738 rtx
operand_subword(rtx op,poly_uint64 offset,int validate_address,machine_mode mode)1739 operand_subword (rtx op, poly_uint64 offset, int validate_address,
1740 machine_mode mode)
1741 {
1742 if (mode == VOIDmode)
1743 mode = GET_MODE (op);
1744
1745 gcc_assert (mode != VOIDmode);
1746
1747 /* If OP is narrower than a word, fail. */
1748 if (mode != BLKmode
1749 && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
1750 return 0;
1751
1752 /* If we want a word outside OP, return zero. */
1753 if (mode != BLKmode
1754 && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
1755 return const0_rtx;
1756
1757 /* Form a new MEM at the requested address. */
1758 if (MEM_P (op))
1759 {
1760 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1761
1762 if (! validate_address)
1763 return new_rtx;
1764
1765 else if (reload_completed)
1766 {
1767 if (! strict_memory_address_addr_space_p (word_mode,
1768 XEXP (new_rtx, 0),
1769 MEM_ADDR_SPACE (op)))
1770 return 0;
1771 }
1772 else
1773 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1774 }
1775
1776 /* Rest can be handled by simplify_subreg. */
1777 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1778 }
1779
1780 /* Similar to `operand_subword', but never return 0. If we can't
1781 extract the required subword, put OP into a register and try again.
1782 The second attempt must succeed. We always validate the address in
1783 this case.
1784
1785 MODE is the mode of OP, in case it is CONST_INT. */
1786
1787 rtx
operand_subword_force(rtx op,poly_uint64 offset,machine_mode mode)1788 operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
1789 {
1790 rtx result = operand_subword (op, offset, 1, mode);
1791
1792 if (result)
1793 return result;
1794
1795 if (mode != BLKmode && mode != VOIDmode)
1796 {
1797 /* If this is a register which cannot be accessed by words, copy it
1798 to a pseudo register. */
1799 if (REG_P (op))
1800 op = copy_to_reg (op);
1801 else
1802 op = force_reg (mode, op);
1803 }
1804
1805 result = operand_subword (op, offset, 1, mode);
1806 gcc_assert (result);
1807
1808 return result;
1809 }
1810
mem_attrs()1811 mem_attrs::mem_attrs ()
1812 : expr (NULL_TREE),
1813 offset (0),
1814 size (0),
1815 alias (0),
1816 align (0),
1817 addrspace (ADDR_SPACE_GENERIC),
1818 offset_known_p (false),
1819 size_known_p (false)
1820 {}
1821
1822 /* Returns 1 if both MEM_EXPR can be considered equal
1823 and 0 otherwise. */
1824
1825 int
mem_expr_equal_p(const_tree expr1,const_tree expr2)1826 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1827 {
1828 if (expr1 == expr2)
1829 return 1;
1830
1831 if (! expr1 || ! expr2)
1832 return 0;
1833
1834 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1835 return 0;
1836
1837 return operand_equal_p (expr1, expr2, 0);
1838 }
1839
1840 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1841 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1842 -1 if not known. */
1843
1844 int
get_mem_align_offset(rtx mem,unsigned int align)1845 get_mem_align_offset (rtx mem, unsigned int align)
1846 {
1847 tree expr;
1848 poly_uint64 offset;
1849
1850 /* This function can't use
1851 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1852 || (MAX (MEM_ALIGN (mem),
1853 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1854 < align))
1855 return -1;
1856 else
1857 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1858 for two reasons:
1859 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1860 for <variable>. get_inner_reference doesn't handle it and
1861 even if it did, the alignment in that case needs to be determined
1862 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1863 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1864 isn't sufficiently aligned, the object it is in might be. */
1865 gcc_assert (MEM_P (mem));
1866 expr = MEM_EXPR (mem);
1867 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1868 return -1;
1869
1870 offset = MEM_OFFSET (mem);
1871 if (DECL_P (expr))
1872 {
1873 if (DECL_ALIGN (expr) < align)
1874 return -1;
1875 }
1876 else if (INDIRECT_REF_P (expr))
1877 {
1878 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1879 return -1;
1880 }
1881 else if (TREE_CODE (expr) == COMPONENT_REF)
1882 {
1883 while (1)
1884 {
1885 tree inner = TREE_OPERAND (expr, 0);
1886 tree field = TREE_OPERAND (expr, 1);
1887 tree byte_offset = component_ref_field_offset (expr);
1888 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1889
1890 poly_uint64 suboffset;
1891 if (!byte_offset
1892 || !poly_int_tree_p (byte_offset, &suboffset)
1893 || !tree_fits_uhwi_p (bit_offset))
1894 return -1;
1895
1896 offset += suboffset;
1897 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1898
1899 if (inner == NULL_TREE)
1900 {
1901 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1902 < (unsigned int) align)
1903 return -1;
1904 break;
1905 }
1906 else if (DECL_P (inner))
1907 {
1908 if (DECL_ALIGN (inner) < align)
1909 return -1;
1910 break;
1911 }
1912 else if (TREE_CODE (inner) != COMPONENT_REF)
1913 return -1;
1914 expr = inner;
1915 }
1916 }
1917 else
1918 return -1;
1919
1920 HOST_WIDE_INT misalign;
1921 if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
1922 return -1;
1923 return misalign;
1924 }
1925
1926 /* Given REF (a MEM) and T, either the type of X or the expression
1927 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1928 if we are making a new object of this type. BITPOS is nonzero if
1929 there is an offset outstanding on T that will be applied later. */
1930
1931 void
set_mem_attributes_minus_bitpos(rtx ref,tree t,int objectp,poly_int64 bitpos)1932 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1933 poly_int64 bitpos)
1934 {
1935 poly_int64 apply_bitpos = 0;
1936 tree type;
1937 class mem_attrs attrs, *defattrs, *refattrs;
1938 addr_space_t as;
1939
1940 /* It can happen that type_for_mode was given a mode for which there
1941 is no language-level type. In which case it returns NULL, which
1942 we can see here. */
1943 if (t == NULL_TREE)
1944 return;
1945
1946 type = TYPE_P (t) ? t : TREE_TYPE (t);
1947 if (type == error_mark_node)
1948 return;
1949
1950 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1951 wrong answer, as it assumes that DECL_RTL already has the right alias
1952 info. Callers should not set DECL_RTL until after the call to
1953 set_mem_attributes. */
1954 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1955
1956 /* Get the alias set from the expression or type (perhaps using a
1957 front-end routine) and use it. */
1958 attrs.alias = get_alias_set (t);
1959
1960 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1961 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1962
1963 /* Default values from pre-existing memory attributes if present. */
1964 refattrs = MEM_ATTRS (ref);
1965 if (refattrs)
1966 {
1967 /* ??? Can this ever happen? Calling this routine on a MEM that
1968 already carries memory attributes should probably be invalid. */
1969 attrs.expr = refattrs->expr;
1970 attrs.offset_known_p = refattrs->offset_known_p;
1971 attrs.offset = refattrs->offset;
1972 attrs.size_known_p = refattrs->size_known_p;
1973 attrs.size = refattrs->size;
1974 attrs.align = refattrs->align;
1975 }
1976
1977 /* Otherwise, default values from the mode of the MEM reference. */
1978 else
1979 {
1980 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1981 gcc_assert (!defattrs->expr);
1982 gcc_assert (!defattrs->offset_known_p);
1983
1984 /* Respect mode size. */
1985 attrs.size_known_p = defattrs->size_known_p;
1986 attrs.size = defattrs->size;
1987 /* ??? Is this really necessary? We probably should always get
1988 the size from the type below. */
1989
1990 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1991 if T is an object, always compute the object alignment below. */
1992 if (TYPE_P (t))
1993 attrs.align = defattrs->align;
1994 else
1995 attrs.align = BITS_PER_UNIT;
1996 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1997 e.g. if the type carries an alignment attribute. Should we be
1998 able to simply always use TYPE_ALIGN? */
1999 }
2000
2001 /* We can set the alignment from the type if we are making an object or if
2002 this is an INDIRECT_REF. */
2003 if (objectp || TREE_CODE (t) == INDIRECT_REF)
2004 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
2005
2006 /* If the size is known, we can set that. */
2007 tree new_size = TYPE_SIZE_UNIT (type);
2008
2009 /* The address-space is that of the type. */
2010 as = TYPE_ADDR_SPACE (type);
2011
2012 /* If T is not a type, we may be able to deduce some more information about
2013 the expression. */
2014 if (! TYPE_P (t))
2015 {
2016 tree base;
2017
2018 if (TREE_THIS_VOLATILE (t))
2019 MEM_VOLATILE_P (ref) = 1;
2020
2021 /* Now remove any conversions: they don't change what the underlying
2022 object is. Likewise for SAVE_EXPR. */
2023 while (CONVERT_EXPR_P (t)
2024 || TREE_CODE (t) == VIEW_CONVERT_EXPR
2025 || TREE_CODE (t) == SAVE_EXPR)
2026 t = TREE_OPERAND (t, 0);
2027
2028 /* Note whether this expression can trap. */
2029 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
2030
2031 base = get_base_address (t);
2032 if (base)
2033 {
2034 if (DECL_P (base)
2035 && TREE_READONLY (base)
2036 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
2037 && !TREE_THIS_VOLATILE (base))
2038 MEM_READONLY_P (ref) = 1;
2039
2040 /* Mark static const strings readonly as well. */
2041 if (TREE_CODE (base) == STRING_CST
2042 && TREE_READONLY (base)
2043 && TREE_STATIC (base))
2044 MEM_READONLY_P (ref) = 1;
2045
2046 /* Address-space information is on the base object. */
2047 if (TREE_CODE (base) == MEM_REF
2048 || TREE_CODE (base) == TARGET_MEM_REF)
2049 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
2050 0))));
2051 else
2052 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
2053 }
2054
2055 /* If this expression uses it's parent's alias set, mark it such
2056 that we won't change it. */
2057 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
2058 MEM_KEEP_ALIAS_SET_P (ref) = 1;
2059
2060 /* If this is a decl, set the attributes of the MEM from it. */
2061 if (DECL_P (t))
2062 {
2063 attrs.expr = t;
2064 attrs.offset_known_p = true;
2065 attrs.offset = 0;
2066 apply_bitpos = bitpos;
2067 new_size = DECL_SIZE_UNIT (t);
2068 }
2069
2070 /* ??? If we end up with a constant or a descriptor do not
2071 record a MEM_EXPR. */
2072 else if (CONSTANT_CLASS_P (t)
2073 || TREE_CODE (t) == CONSTRUCTOR)
2074 ;
2075
2076 /* If this is a field reference, record it. */
2077 else if (TREE_CODE (t) == COMPONENT_REF)
2078 {
2079 attrs.expr = t;
2080 attrs.offset_known_p = true;
2081 attrs.offset = 0;
2082 apply_bitpos = bitpos;
2083 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2084 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
2085 }
2086
2087 /* Else record it. */
2088 else
2089 {
2090 gcc_assert (handled_component_p (t)
2091 || TREE_CODE (t) == MEM_REF
2092 || TREE_CODE (t) == TARGET_MEM_REF);
2093 attrs.expr = t;
2094 attrs.offset_known_p = true;
2095 attrs.offset = 0;
2096 apply_bitpos = bitpos;
2097 }
2098
2099 /* If this is a reference based on a partitioned decl replace the
2100 base with a MEM_REF of the pointer representative we created
2101 during stack slot partitioning. */
2102 if (attrs.expr
2103 && VAR_P (base)
2104 && ! is_global_var (base)
2105 && cfun->gimple_df->decls_to_pointers != NULL)
2106 {
2107 tree *namep = cfun->gimple_df->decls_to_pointers->get (base);
2108 if (namep)
2109 {
2110 attrs.expr = unshare_expr (attrs.expr);
2111 tree *orig_base = &attrs.expr;
2112 while (handled_component_p (*orig_base))
2113 orig_base = &TREE_OPERAND (*orig_base, 0);
2114 tree aptrt = reference_alias_ptr_type (*orig_base);
2115 *orig_base = build2 (MEM_REF, TREE_TYPE (*orig_base), *namep,
2116 build_int_cst (aptrt, 0));
2117 }
2118 }
2119
2120 /* Compute the alignment. */
2121 unsigned int obj_align;
2122 unsigned HOST_WIDE_INT obj_bitpos;
2123 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2124 unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
2125 if (diff_align != 0)
2126 obj_align = MIN (obj_align, diff_align);
2127 attrs.align = MAX (attrs.align, obj_align);
2128 }
2129
2130 poly_uint64 const_size;
2131 if (poly_int_tree_p (new_size, &const_size))
2132 {
2133 attrs.size_known_p = true;
2134 attrs.size = const_size;
2135 }
2136
2137 /* If we modified OFFSET based on T, then subtract the outstanding
2138 bit position offset. Similarly, increase the size of the accessed
2139 object to contain the negative offset. */
2140 if (maybe_ne (apply_bitpos, 0))
2141 {
2142 gcc_assert (attrs.offset_known_p);
2143 poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2144 attrs.offset -= bytepos;
2145 if (attrs.size_known_p)
2146 attrs.size += bytepos;
2147 }
2148
2149 /* Now set the attributes we computed above. */
2150 attrs.addrspace = as;
2151 set_mem_attrs (ref, &attrs);
2152 }
2153
2154 void
set_mem_attributes(rtx ref,tree t,int objectp)2155 set_mem_attributes (rtx ref, tree t, int objectp)
2156 {
2157 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2158 }
2159
2160 /* Set the alias set of MEM to SET. */
2161
2162 void
set_mem_alias_set(rtx mem,alias_set_type set)2163 set_mem_alias_set (rtx mem, alias_set_type set)
2164 {
2165 /* If the new and old alias sets don't conflict, something is wrong. */
2166 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2167 mem_attrs attrs (*get_mem_attrs (mem));
2168 attrs.alias = set;
2169 set_mem_attrs (mem, &attrs);
2170 }
2171
2172 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2173
2174 void
set_mem_addr_space(rtx mem,addr_space_t addrspace)2175 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2176 {
2177 mem_attrs attrs (*get_mem_attrs (mem));
2178 attrs.addrspace = addrspace;
2179 set_mem_attrs (mem, &attrs);
2180 }
2181
2182 /* Set the alignment of MEM to ALIGN bits. */
2183
2184 void
set_mem_align(rtx mem,unsigned int align)2185 set_mem_align (rtx mem, unsigned int align)
2186 {
2187 mem_attrs attrs (*get_mem_attrs (mem));
2188 attrs.align = align;
2189 set_mem_attrs (mem, &attrs);
2190 }
2191
2192 /* Set the expr for MEM to EXPR. */
2193
2194 void
set_mem_expr(rtx mem,tree expr)2195 set_mem_expr (rtx mem, tree expr)
2196 {
2197 mem_attrs attrs (*get_mem_attrs (mem));
2198 attrs.expr = expr;
2199 set_mem_attrs (mem, &attrs);
2200 }
2201
2202 /* Set the offset of MEM to OFFSET. */
2203
2204 void
set_mem_offset(rtx mem,poly_int64 offset)2205 set_mem_offset (rtx mem, poly_int64 offset)
2206 {
2207 mem_attrs attrs (*get_mem_attrs (mem));
2208 attrs.offset_known_p = true;
2209 attrs.offset = offset;
2210 set_mem_attrs (mem, &attrs);
2211 }
2212
2213 /* Clear the offset of MEM. */
2214
2215 void
clear_mem_offset(rtx mem)2216 clear_mem_offset (rtx mem)
2217 {
2218 mem_attrs attrs (*get_mem_attrs (mem));
2219 attrs.offset_known_p = false;
2220 set_mem_attrs (mem, &attrs);
2221 }
2222
2223 /* Set the size of MEM to SIZE. */
2224
2225 void
set_mem_size(rtx mem,poly_int64 size)2226 set_mem_size (rtx mem, poly_int64 size)
2227 {
2228 mem_attrs attrs (*get_mem_attrs (mem));
2229 attrs.size_known_p = true;
2230 attrs.size = size;
2231 set_mem_attrs (mem, &attrs);
2232 }
2233
2234 /* Clear the size of MEM. */
2235
2236 void
clear_mem_size(rtx mem)2237 clear_mem_size (rtx mem)
2238 {
2239 mem_attrs attrs (*get_mem_attrs (mem));
2240 attrs.size_known_p = false;
2241 set_mem_attrs (mem, &attrs);
2242 }
2243
2244 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2245 and its address changed to ADDR. (VOIDmode means don't change the mode.
2246 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2247 returned memory location is required to be valid. INPLACE is true if any
2248 changes can be made directly to MEMREF or false if MEMREF must be treated
2249 as immutable.
2250
2251 The memory attributes are not changed. */
2252
2253 static rtx
change_address_1(rtx memref,machine_mode mode,rtx addr,int validate,bool inplace)2254 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2255 bool inplace)
2256 {
2257 addr_space_t as;
2258 rtx new_rtx;
2259
2260 gcc_assert (MEM_P (memref));
2261 as = MEM_ADDR_SPACE (memref);
2262 if (mode == VOIDmode)
2263 mode = GET_MODE (memref);
2264 if (addr == 0)
2265 addr = XEXP (memref, 0);
2266 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2267 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2268 return memref;
2269
2270 /* Don't validate address for LRA. LRA can make the address valid
2271 by itself in most efficient way. */
2272 if (validate && !lra_in_progress)
2273 {
2274 if (reload_in_progress || reload_completed)
2275 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2276 else
2277 addr = memory_address_addr_space (mode, addr, as);
2278 }
2279
2280 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2281 return memref;
2282
2283 if (inplace)
2284 {
2285 XEXP (memref, 0) = addr;
2286 return memref;
2287 }
2288
2289 new_rtx = gen_rtx_MEM (mode, addr);
2290 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2291 return new_rtx;
2292 }
2293
2294 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2295 way we are changing MEMREF, so we only preserve the alias set. */
2296
2297 rtx
change_address(rtx memref,machine_mode mode,rtx addr)2298 change_address (rtx memref, machine_mode mode, rtx addr)
2299 {
2300 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2301 machine_mode mmode = GET_MODE (new_rtx);
2302 class mem_attrs *defattrs;
2303
2304 mem_attrs attrs (*get_mem_attrs (memref));
2305 defattrs = mode_mem_attrs[(int) mmode];
2306 attrs.expr = NULL_TREE;
2307 attrs.offset_known_p = false;
2308 attrs.size_known_p = defattrs->size_known_p;
2309 attrs.size = defattrs->size;
2310 attrs.align = defattrs->align;
2311
2312 /* If there are no changes, just return the original memory reference. */
2313 if (new_rtx == memref)
2314 {
2315 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2316 return new_rtx;
2317
2318 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2319 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2320 }
2321
2322 set_mem_attrs (new_rtx, &attrs);
2323 return new_rtx;
2324 }
2325
2326 /* Return a memory reference like MEMREF, but with its mode changed
2327 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2328 nonzero, the memory address is forced to be valid.
2329 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2330 and the caller is responsible for adjusting MEMREF base register.
2331 If ADJUST_OBJECT is zero, the underlying object associated with the
2332 memory reference is left unchanged and the caller is responsible for
2333 dealing with it. Otherwise, if the new memory reference is outside
2334 the underlying object, even partially, then the object is dropped.
2335 SIZE, if nonzero, is the size of an access in cases where MODE
2336 has no inherent size. */
2337
2338 rtx
adjust_address_1(rtx memref,machine_mode mode,poly_int64 offset,int validate,int adjust_address,int adjust_object,poly_int64 size)2339 adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
2340 int validate, int adjust_address, int adjust_object,
2341 poly_int64 size)
2342 {
2343 rtx addr = XEXP (memref, 0);
2344 rtx new_rtx;
2345 scalar_int_mode address_mode;
2346 class mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
2347 unsigned HOST_WIDE_INT max_align;
2348 #ifdef POINTERS_EXTEND_UNSIGNED
2349 scalar_int_mode pointer_mode
2350 = targetm.addr_space.pointer_mode (attrs.addrspace);
2351 #endif
2352
2353 /* VOIDmode means no mode change for change_address_1. */
2354 if (mode == VOIDmode)
2355 mode = GET_MODE (memref);
2356
2357 /* Take the size of non-BLKmode accesses from the mode. */
2358 defattrs = mode_mem_attrs[(int) mode];
2359 if (defattrs->size_known_p)
2360 size = defattrs->size;
2361
2362 /* If there are no changes, just return the original memory reference. */
2363 if (mode == GET_MODE (memref)
2364 && known_eq (offset, 0)
2365 && (known_eq (size, 0)
2366 || (attrs.size_known_p && known_eq (attrs.size, size)))
2367 && (!validate || memory_address_addr_space_p (mode, addr,
2368 attrs.addrspace)))
2369 return memref;
2370
2371 /* ??? Prefer to create garbage instead of creating shared rtl.
2372 This may happen even if offset is nonzero -- consider
2373 (plus (plus reg reg) const_int) -- so do this always. */
2374 addr = copy_rtx (addr);
2375
2376 /* Convert a possibly large offset to a signed value within the
2377 range of the target address space. */
2378 address_mode = get_address_mode (memref);
2379 offset = trunc_int_for_mode (offset, address_mode);
2380
2381 if (adjust_address)
2382 {
2383 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2384 object, we can merge it into the LO_SUM. */
2385 if (GET_MODE (memref) != BLKmode
2386 && GET_CODE (addr) == LO_SUM
2387 && known_in_range_p (offset,
2388 0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
2389 / BITS_PER_UNIT)))
2390 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2391 plus_constant (address_mode,
2392 XEXP (addr, 1), offset));
2393 #ifdef POINTERS_EXTEND_UNSIGNED
2394 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2395 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2396 the fact that pointers are not allowed to overflow. */
2397 else if (POINTERS_EXTEND_UNSIGNED > 0
2398 && GET_CODE (addr) == ZERO_EXTEND
2399 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2400 && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
2401 addr = gen_rtx_ZERO_EXTEND (address_mode,
2402 plus_constant (pointer_mode,
2403 XEXP (addr, 0), offset));
2404 #endif
2405 else
2406 addr = plus_constant (address_mode, addr, offset);
2407 }
2408
2409 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2410
2411 /* If the address is a REG, change_address_1 rightfully returns memref,
2412 but this would destroy memref's MEM_ATTRS. */
2413 if (new_rtx == memref && maybe_ne (offset, 0))
2414 new_rtx = copy_rtx (new_rtx);
2415
2416 /* Conservatively drop the object if we don't know where we start from. */
2417 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2418 {
2419 attrs.expr = NULL_TREE;
2420 attrs.alias = 0;
2421 }
2422
2423 /* Compute the new values of the memory attributes due to this adjustment.
2424 We add the offsets and update the alignment. */
2425 if (attrs.offset_known_p)
2426 {
2427 attrs.offset += offset;
2428
2429 /* Drop the object if the new left end is not within its bounds. */
2430 if (adjust_object && maybe_lt (attrs.offset, 0))
2431 {
2432 attrs.expr = NULL_TREE;
2433 attrs.alias = 0;
2434 }
2435 }
2436
2437 /* Compute the new alignment by taking the MIN of the alignment and the
2438 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2439 if zero. */
2440 if (maybe_ne (offset, 0))
2441 {
2442 max_align = known_alignment (offset) * BITS_PER_UNIT;
2443 attrs.align = MIN (attrs.align, max_align);
2444 }
2445
2446 if (maybe_ne (size, 0))
2447 {
2448 /* Drop the object if the new right end is not within its bounds. */
2449 if (adjust_object && maybe_gt (offset + size, attrs.size))
2450 {
2451 attrs.expr = NULL_TREE;
2452 attrs.alias = 0;
2453 }
2454 attrs.size_known_p = true;
2455 attrs.size = size;
2456 }
2457 else if (attrs.size_known_p)
2458 {
2459 gcc_assert (!adjust_object);
2460 attrs.size -= offset;
2461 /* ??? The store_by_pieces machinery generates negative sizes,
2462 so don't assert for that here. */
2463 }
2464
2465 set_mem_attrs (new_rtx, &attrs);
2466
2467 return new_rtx;
2468 }
2469
2470 /* Return a memory reference like MEMREF, but with its mode changed
2471 to MODE and its address changed to ADDR, which is assumed to be
2472 MEMREF offset by OFFSET bytes. If VALIDATE is
2473 nonzero, the memory address is forced to be valid. */
2474
2475 rtx
adjust_automodify_address_1(rtx memref,machine_mode mode,rtx addr,poly_int64 offset,int validate)2476 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2477 poly_int64 offset, int validate)
2478 {
2479 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2480 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2481 }
2482
2483 /* Return a memory reference like MEMREF, but whose address is changed by
2484 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2485 known to be in OFFSET (possibly 1). */
2486
2487 rtx
offset_address(rtx memref,rtx offset,unsigned HOST_WIDE_INT pow2)2488 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2489 {
2490 rtx new_rtx, addr = XEXP (memref, 0);
2491 machine_mode address_mode;
2492 class mem_attrs *defattrs;
2493
2494 mem_attrs attrs (*get_mem_attrs (memref));
2495 address_mode = get_address_mode (memref);
2496 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2497
2498 /* At this point we don't know _why_ the address is invalid. It
2499 could have secondary memory references, multiplies or anything.
2500
2501 However, if we did go and rearrange things, we can wind up not
2502 being able to recognize the magic around pic_offset_table_rtx.
2503 This stuff is fragile, and is yet another example of why it is
2504 bad to expose PIC machinery too early. */
2505 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2506 attrs.addrspace)
2507 && GET_CODE (addr) == PLUS
2508 && XEXP (addr, 0) == pic_offset_table_rtx)
2509 {
2510 addr = force_reg (GET_MODE (addr), addr);
2511 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2512 }
2513
2514 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2515 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2516
2517 /* If there are no changes, just return the original memory reference. */
2518 if (new_rtx == memref)
2519 return new_rtx;
2520
2521 /* Update the alignment to reflect the offset. Reset the offset, which
2522 we don't know. */
2523 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2524 attrs.offset_known_p = false;
2525 attrs.size_known_p = defattrs->size_known_p;
2526 attrs.size = defattrs->size;
2527 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2528 set_mem_attrs (new_rtx, &attrs);
2529 return new_rtx;
2530 }
2531
2532 /* Return a memory reference like MEMREF, but with its address changed to
2533 ADDR. The caller is asserting that the actual piece of memory pointed
2534 to is the same, just the form of the address is being changed, such as
2535 by putting something into a register. INPLACE is true if any changes
2536 can be made directly to MEMREF or false if MEMREF must be treated as
2537 immutable. */
2538
2539 rtx
replace_equiv_address(rtx memref,rtx addr,bool inplace)2540 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2541 {
2542 /* change_address_1 copies the memory attribute structure without change
2543 and that's exactly what we want here. */
2544 update_temp_slot_address (XEXP (memref, 0), addr);
2545 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2546 }
2547
2548 /* Likewise, but the reference is not required to be valid. */
2549
2550 rtx
replace_equiv_address_nv(rtx memref,rtx addr,bool inplace)2551 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2552 {
2553 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2554 }
2555
2556 /* Return a memory reference like MEMREF, but with its mode widened to
2557 MODE and offset by OFFSET. This would be used by targets that e.g.
2558 cannot issue QImode memory operations and have to use SImode memory
2559 operations plus masking logic. */
2560
2561 rtx
widen_memory_access(rtx memref,machine_mode mode,poly_int64 offset)2562 widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
2563 {
2564 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2565 poly_uint64 size = GET_MODE_SIZE (mode);
2566
2567 /* If there are no changes, just return the original memory reference. */
2568 if (new_rtx == memref)
2569 return new_rtx;
2570
2571 mem_attrs attrs (*get_mem_attrs (new_rtx));
2572
2573 /* If we don't know what offset we were at within the expression, then
2574 we can't know if we've overstepped the bounds. */
2575 if (! attrs.offset_known_p)
2576 attrs.expr = NULL_TREE;
2577
2578 while (attrs.expr)
2579 {
2580 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2581 {
2582 tree field = TREE_OPERAND (attrs.expr, 1);
2583 tree offset = component_ref_field_offset (attrs.expr);
2584
2585 if (! DECL_SIZE_UNIT (field))
2586 {
2587 attrs.expr = NULL_TREE;
2588 break;
2589 }
2590
2591 /* Is the field at least as large as the access? If so, ok,
2592 otherwise strip back to the containing structure. */
2593 if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2594 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2595 && known_ge (attrs.offset, 0))
2596 break;
2597
2598 poly_uint64 suboffset;
2599 if (!poly_int_tree_p (offset, &suboffset))
2600 {
2601 attrs.expr = NULL_TREE;
2602 break;
2603 }
2604
2605 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2606 attrs.offset += suboffset;
2607 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2608 / BITS_PER_UNIT);
2609 }
2610 /* Similarly for the decl. */
2611 else if (DECL_P (attrs.expr)
2612 && DECL_SIZE_UNIT (attrs.expr)
2613 && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2614 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2615 size)
2616 && known_ge (attrs.offset, 0))
2617 break;
2618 else
2619 {
2620 /* The widened memory access overflows the expression, which means
2621 that it could alias another expression. Zap it. */
2622 attrs.expr = NULL_TREE;
2623 break;
2624 }
2625 }
2626
2627 if (! attrs.expr)
2628 attrs.offset_known_p = false;
2629
2630 /* The widened memory may alias other stuff, so zap the alias set. */
2631 /* ??? Maybe use get_alias_set on any remaining expression. */
2632 attrs.alias = 0;
2633 attrs.size_known_p = true;
2634 attrs.size = size;
2635 set_mem_attrs (new_rtx, &attrs);
2636 return new_rtx;
2637 }
2638
2639 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2640 static GTY(()) tree spill_slot_decl;
2641
2642 tree
get_spill_slot_decl(bool force_build_p)2643 get_spill_slot_decl (bool force_build_p)
2644 {
2645 tree d = spill_slot_decl;
2646 rtx rd;
2647
2648 if (d || !force_build_p)
2649 return d;
2650
2651 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2652 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2653 DECL_ARTIFICIAL (d) = 1;
2654 DECL_IGNORED_P (d) = 1;
2655 TREE_USED (d) = 1;
2656 spill_slot_decl = d;
2657
2658 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2659 MEM_NOTRAP_P (rd) = 1;
2660 mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
2661 attrs.alias = new_alias_set ();
2662 attrs.expr = d;
2663 set_mem_attrs (rd, &attrs);
2664 SET_DECL_RTL (d, rd);
2665
2666 return d;
2667 }
2668
2669 /* Given MEM, a result from assign_stack_local, fill in the memory
2670 attributes as appropriate for a register allocator spill slot.
2671 These slots are not aliasable by other memory. We arrange for
2672 them all to use a single MEM_EXPR, so that the aliasing code can
2673 work properly in the case of shared spill slots. */
2674
2675 void
set_mem_attrs_for_spill(rtx mem)2676 set_mem_attrs_for_spill (rtx mem)
2677 {
2678 rtx addr;
2679
2680 mem_attrs attrs (*get_mem_attrs (mem));
2681 attrs.expr = get_spill_slot_decl (true);
2682 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2683 attrs.addrspace = ADDR_SPACE_GENERIC;
2684
2685 /* We expect the incoming memory to be of the form:
2686 (mem:MODE (plus (reg sfp) (const_int offset)))
2687 with perhaps the plus missing for offset = 0. */
2688 addr = XEXP (mem, 0);
2689 attrs.offset_known_p = true;
2690 strip_offset (addr, &attrs.offset);
2691
2692 set_mem_attrs (mem, &attrs);
2693 MEM_NOTRAP_P (mem) = 1;
2694 }
2695
2696 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2697
2698 rtx_code_label *
gen_label_rtx(void)2699 gen_label_rtx (void)
2700 {
2701 return as_a <rtx_code_label *> (
2702 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2703 NULL, label_num++, NULL));
2704 }
2705
2706 /* For procedure integration. */
2707
2708 /* Install new pointers to the first and last insns in the chain.
2709 Also, set cur_insn_uid to one higher than the last in use.
2710 Used for an inline-procedure after copying the insn chain. */
2711
2712 void
set_new_first_and_last_insn(rtx_insn * first,rtx_insn * last)2713 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2714 {
2715 rtx_insn *insn;
2716
2717 set_first_insn (first);
2718 set_last_insn (last);
2719 cur_insn_uid = 0;
2720
2721 if (param_min_nondebug_insn_uid || MAY_HAVE_DEBUG_INSNS)
2722 {
2723 int debug_count = 0;
2724
2725 cur_insn_uid = param_min_nondebug_insn_uid - 1;
2726 cur_debug_insn_uid = 0;
2727
2728 for (insn = first; insn; insn = NEXT_INSN (insn))
2729 if (INSN_UID (insn) < param_min_nondebug_insn_uid)
2730 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2731 else
2732 {
2733 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2734 if (DEBUG_INSN_P (insn))
2735 debug_count++;
2736 }
2737
2738 if (debug_count)
2739 cur_debug_insn_uid = param_min_nondebug_insn_uid + debug_count;
2740 else
2741 cur_debug_insn_uid++;
2742 }
2743 else
2744 for (insn = first; insn; insn = NEXT_INSN (insn))
2745 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2746
2747 cur_insn_uid++;
2748 }
2749
2750 /* Go through all the RTL insn bodies and copy any invalid shared
2751 structure. This routine should only be called once. */
2752
2753 static void
unshare_all_rtl_1(rtx_insn * insn)2754 unshare_all_rtl_1 (rtx_insn *insn)
2755 {
2756 /* Unshare just about everything else. */
2757 unshare_all_rtl_in_chain (insn);
2758
2759 /* Make sure the addresses of stack slots found outside the insn chain
2760 (such as, in DECL_RTL of a variable) are not shared
2761 with the insn chain.
2762
2763 This special care is necessary when the stack slot MEM does not
2764 actually appear in the insn chain. If it does appear, its address
2765 is unshared from all else at that point. */
2766 unsigned int i;
2767 rtx temp;
2768 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2769 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2770 }
2771
2772 /* Go through all the RTL insn bodies and copy any invalid shared
2773 structure, again. This is a fairly expensive thing to do so it
2774 should be done sparingly. */
2775
2776 void
unshare_all_rtl_again(rtx_insn * insn)2777 unshare_all_rtl_again (rtx_insn *insn)
2778 {
2779 rtx_insn *p;
2780 tree decl;
2781
2782 for (p = insn; p; p = NEXT_INSN (p))
2783 if (INSN_P (p))
2784 {
2785 reset_used_flags (PATTERN (p));
2786 reset_used_flags (REG_NOTES (p));
2787 if (CALL_P (p))
2788 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2789 }
2790
2791 /* Make sure that virtual stack slots are not shared. */
2792 set_used_decls (DECL_INITIAL (cfun->decl));
2793
2794 /* Make sure that virtual parameters are not shared. */
2795 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2796 set_used_flags (DECL_RTL (decl));
2797
2798 rtx temp;
2799 unsigned int i;
2800 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2801 reset_used_flags (temp);
2802
2803 unshare_all_rtl_1 (insn);
2804 }
2805
2806 unsigned int
unshare_all_rtl(void)2807 unshare_all_rtl (void)
2808 {
2809 unshare_all_rtl_1 (get_insns ());
2810
2811 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2812 {
2813 if (DECL_RTL_SET_P (decl))
2814 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2815 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2816 }
2817
2818 return 0;
2819 }
2820
2821
2822 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2823 Recursively does the same for subexpressions. */
2824
2825 static void
verify_rtx_sharing(rtx orig,rtx insn)2826 verify_rtx_sharing (rtx orig, rtx insn)
2827 {
2828 rtx x = orig;
2829 int i;
2830 enum rtx_code code;
2831 const char *format_ptr;
2832
2833 if (x == 0)
2834 return;
2835
2836 code = GET_CODE (x);
2837
2838 /* These types may be freely shared. */
2839
2840 switch (code)
2841 {
2842 case REG:
2843 case DEBUG_EXPR:
2844 case VALUE:
2845 CASE_CONST_ANY:
2846 case SYMBOL_REF:
2847 case LABEL_REF:
2848 case CODE_LABEL:
2849 case PC:
2850 case CC0:
2851 case RETURN:
2852 case SIMPLE_RETURN:
2853 case SCRATCH:
2854 /* SCRATCH must be shared because they represent distinct values. */
2855 return;
2856 case CLOBBER:
2857 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2858 clobbers or clobbers of hard registers that originated as pseudos.
2859 This is needed to allow safe register renaming. */
2860 if (REG_P (XEXP (x, 0))
2861 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2862 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2863 return;
2864 break;
2865
2866 case CONST:
2867 if (shared_const_p (orig))
2868 return;
2869 break;
2870
2871 case MEM:
2872 /* A MEM is allowed to be shared if its address is constant. */
2873 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2874 || reload_completed || reload_in_progress)
2875 return;
2876
2877 break;
2878
2879 default:
2880 break;
2881 }
2882
2883 /* This rtx may not be shared. If it has already been seen,
2884 replace it with a copy of itself. */
2885 if (flag_checking && RTX_FLAG (x, used))
2886 {
2887 error ("invalid rtl sharing found in the insn");
2888 debug_rtx (insn);
2889 error ("shared rtx");
2890 debug_rtx (x);
2891 internal_error ("internal consistency failure");
2892 }
2893 gcc_assert (!RTX_FLAG (x, used));
2894
2895 RTX_FLAG (x, used) = 1;
2896
2897 /* Now scan the subexpressions recursively. */
2898
2899 format_ptr = GET_RTX_FORMAT (code);
2900
2901 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2902 {
2903 switch (*format_ptr++)
2904 {
2905 case 'e':
2906 verify_rtx_sharing (XEXP (x, i), insn);
2907 break;
2908
2909 case 'E':
2910 if (XVEC (x, i) != NULL)
2911 {
2912 int j;
2913 int len = XVECLEN (x, i);
2914
2915 for (j = 0; j < len; j++)
2916 {
2917 /* We allow sharing of ASM_OPERANDS inside single
2918 instruction. */
2919 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2920 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2921 == ASM_OPERANDS))
2922 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2923 else
2924 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2925 }
2926 }
2927 break;
2928 }
2929 }
2930 return;
2931 }
2932
2933 /* Reset used-flags for INSN. */
2934
2935 static void
reset_insn_used_flags(rtx insn)2936 reset_insn_used_flags (rtx insn)
2937 {
2938 gcc_assert (INSN_P (insn));
2939 reset_used_flags (PATTERN (insn));
2940 reset_used_flags (REG_NOTES (insn));
2941 if (CALL_P (insn))
2942 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2943 }
2944
2945 /* Go through all the RTL insn bodies and clear all the USED bits. */
2946
2947 static void
reset_all_used_flags(void)2948 reset_all_used_flags (void)
2949 {
2950 rtx_insn *p;
2951
2952 for (p = get_insns (); p; p = NEXT_INSN (p))
2953 if (INSN_P (p))
2954 {
2955 rtx pat = PATTERN (p);
2956 if (GET_CODE (pat) != SEQUENCE)
2957 reset_insn_used_flags (p);
2958 else
2959 {
2960 gcc_assert (REG_NOTES (p) == NULL);
2961 for (int i = 0; i < XVECLEN (pat, 0); i++)
2962 {
2963 rtx insn = XVECEXP (pat, 0, i);
2964 if (INSN_P (insn))
2965 reset_insn_used_flags (insn);
2966 }
2967 }
2968 }
2969 }
2970
2971 /* Verify sharing in INSN. */
2972
2973 static void
verify_insn_sharing(rtx insn)2974 verify_insn_sharing (rtx insn)
2975 {
2976 gcc_assert (INSN_P (insn));
2977 verify_rtx_sharing (PATTERN (insn), insn);
2978 verify_rtx_sharing (REG_NOTES (insn), insn);
2979 if (CALL_P (insn))
2980 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
2981 }
2982
2983 /* Go through all the RTL insn bodies and check that there is no unexpected
2984 sharing in between the subexpressions. */
2985
2986 DEBUG_FUNCTION void
verify_rtl_sharing(void)2987 verify_rtl_sharing (void)
2988 {
2989 rtx_insn *p;
2990
2991 timevar_push (TV_VERIFY_RTL_SHARING);
2992
2993 reset_all_used_flags ();
2994
2995 for (p = get_insns (); p; p = NEXT_INSN (p))
2996 if (INSN_P (p))
2997 {
2998 rtx pat = PATTERN (p);
2999 if (GET_CODE (pat) != SEQUENCE)
3000 verify_insn_sharing (p);
3001 else
3002 for (int i = 0; i < XVECLEN (pat, 0); i++)
3003 {
3004 rtx insn = XVECEXP (pat, 0, i);
3005 if (INSN_P (insn))
3006 verify_insn_sharing (insn);
3007 }
3008 }
3009
3010 reset_all_used_flags ();
3011
3012 timevar_pop (TV_VERIFY_RTL_SHARING);
3013 }
3014
3015 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3016 Assumes the mark bits are cleared at entry. */
3017
3018 void
unshare_all_rtl_in_chain(rtx_insn * insn)3019 unshare_all_rtl_in_chain (rtx_insn *insn)
3020 {
3021 for (; insn; insn = NEXT_INSN (insn))
3022 if (INSN_P (insn))
3023 {
3024 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3025 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
3026 if (CALL_P (insn))
3027 CALL_INSN_FUNCTION_USAGE (insn)
3028 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
3029 }
3030 }
3031
3032 /* Go through all virtual stack slots of a function and mark them as
3033 shared. We never replace the DECL_RTLs themselves with a copy,
3034 but expressions mentioned into a DECL_RTL cannot be shared with
3035 expressions in the instruction stream.
3036
3037 Note that reload may convert pseudo registers into memories in-place.
3038 Pseudo registers are always shared, but MEMs never are. Thus if we
3039 reset the used flags on MEMs in the instruction stream, we must set
3040 them again on MEMs that appear in DECL_RTLs. */
3041
3042 static void
set_used_decls(tree blk)3043 set_used_decls (tree blk)
3044 {
3045 tree t;
3046
3047 /* Mark decls. */
3048 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
3049 if (DECL_RTL_SET_P (t))
3050 set_used_flags (DECL_RTL (t));
3051
3052 /* Now process sub-blocks. */
3053 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
3054 set_used_decls (t);
3055 }
3056
3057 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3058 Recursively does the same for subexpressions. Uses
3059 copy_rtx_if_shared_1 to reduce stack space. */
3060
3061 rtx
copy_rtx_if_shared(rtx orig)3062 copy_rtx_if_shared (rtx orig)
3063 {
3064 copy_rtx_if_shared_1 (&orig);
3065 return orig;
3066 }
3067
3068 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3069 use. Recursively does the same for subexpressions. */
3070
3071 static void
copy_rtx_if_shared_1(rtx * orig1)3072 copy_rtx_if_shared_1 (rtx *orig1)
3073 {
3074 rtx x;
3075 int i;
3076 enum rtx_code code;
3077 rtx *last_ptr;
3078 const char *format_ptr;
3079 int copied = 0;
3080 int length;
3081
3082 /* Repeat is used to turn tail-recursion into iteration. */
3083 repeat:
3084 x = *orig1;
3085
3086 if (x == 0)
3087 return;
3088
3089 code = GET_CODE (x);
3090
3091 /* These types may be freely shared. */
3092
3093 switch (code)
3094 {
3095 case REG:
3096 case DEBUG_EXPR:
3097 case VALUE:
3098 CASE_CONST_ANY:
3099 case SYMBOL_REF:
3100 case LABEL_REF:
3101 case CODE_LABEL:
3102 case PC:
3103 case CC0:
3104 case RETURN:
3105 case SIMPLE_RETURN:
3106 case SCRATCH:
3107 /* SCRATCH must be shared because they represent distinct values. */
3108 return;
3109 case CLOBBER:
3110 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
3111 clobbers or clobbers of hard registers that originated as pseudos.
3112 This is needed to allow safe register renaming. */
3113 if (REG_P (XEXP (x, 0))
3114 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3115 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3116 return;
3117 break;
3118
3119 case CONST:
3120 if (shared_const_p (x))
3121 return;
3122 break;
3123
3124 case DEBUG_INSN:
3125 case INSN:
3126 case JUMP_INSN:
3127 case CALL_INSN:
3128 case NOTE:
3129 case BARRIER:
3130 /* The chain of insns is not being copied. */
3131 return;
3132
3133 default:
3134 break;
3135 }
3136
3137 /* This rtx may not be shared. If it has already been seen,
3138 replace it with a copy of itself. */
3139
3140 if (RTX_FLAG (x, used))
3141 {
3142 x = shallow_copy_rtx (x);
3143 copied = 1;
3144 }
3145 RTX_FLAG (x, used) = 1;
3146
3147 /* Now scan the subexpressions recursively.
3148 We can store any replaced subexpressions directly into X
3149 since we know X is not shared! Any vectors in X
3150 must be copied if X was copied. */
3151
3152 format_ptr = GET_RTX_FORMAT (code);
3153 length = GET_RTX_LENGTH (code);
3154 last_ptr = NULL;
3155
3156 for (i = 0; i < length; i++)
3157 {
3158 switch (*format_ptr++)
3159 {
3160 case 'e':
3161 if (last_ptr)
3162 copy_rtx_if_shared_1 (last_ptr);
3163 last_ptr = &XEXP (x, i);
3164 break;
3165
3166 case 'E':
3167 if (XVEC (x, i) != NULL)
3168 {
3169 int j;
3170 int len = XVECLEN (x, i);
3171
3172 /* Copy the vector iff I copied the rtx and the length
3173 is nonzero. */
3174 if (copied && len > 0)
3175 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3176
3177 /* Call recursively on all inside the vector. */
3178 for (j = 0; j < len; j++)
3179 {
3180 if (last_ptr)
3181 copy_rtx_if_shared_1 (last_ptr);
3182 last_ptr = &XVECEXP (x, i, j);
3183 }
3184 }
3185 break;
3186 }
3187 }
3188 *orig1 = x;
3189 if (last_ptr)
3190 {
3191 orig1 = last_ptr;
3192 goto repeat;
3193 }
3194 return;
3195 }
3196
3197 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3198
3199 static void
mark_used_flags(rtx x,int flag)3200 mark_used_flags (rtx x, int flag)
3201 {
3202 int i, j;
3203 enum rtx_code code;
3204 const char *format_ptr;
3205 int length;
3206
3207 /* Repeat is used to turn tail-recursion into iteration. */
3208 repeat:
3209 if (x == 0)
3210 return;
3211
3212 code = GET_CODE (x);
3213
3214 /* These types may be freely shared so we needn't do any resetting
3215 for them. */
3216
3217 switch (code)
3218 {
3219 case REG:
3220 case DEBUG_EXPR:
3221 case VALUE:
3222 CASE_CONST_ANY:
3223 case SYMBOL_REF:
3224 case CODE_LABEL:
3225 case PC:
3226 case CC0:
3227 case RETURN:
3228 case SIMPLE_RETURN:
3229 return;
3230
3231 case DEBUG_INSN:
3232 case INSN:
3233 case JUMP_INSN:
3234 case CALL_INSN:
3235 case NOTE:
3236 case LABEL_REF:
3237 case BARRIER:
3238 /* The chain of insns is not being copied. */
3239 return;
3240
3241 default:
3242 break;
3243 }
3244
3245 RTX_FLAG (x, used) = flag;
3246
3247 format_ptr = GET_RTX_FORMAT (code);
3248 length = GET_RTX_LENGTH (code);
3249
3250 for (i = 0; i < length; i++)
3251 {
3252 switch (*format_ptr++)
3253 {
3254 case 'e':
3255 if (i == length-1)
3256 {
3257 x = XEXP (x, i);
3258 goto repeat;
3259 }
3260 mark_used_flags (XEXP (x, i), flag);
3261 break;
3262
3263 case 'E':
3264 for (j = 0; j < XVECLEN (x, i); j++)
3265 mark_used_flags (XVECEXP (x, i, j), flag);
3266 break;
3267 }
3268 }
3269 }
3270
3271 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3272 to look for shared sub-parts. */
3273
3274 void
reset_used_flags(rtx x)3275 reset_used_flags (rtx x)
3276 {
3277 mark_used_flags (x, 0);
3278 }
3279
3280 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3281 to look for shared sub-parts. */
3282
3283 void
set_used_flags(rtx x)3284 set_used_flags (rtx x)
3285 {
3286 mark_used_flags (x, 1);
3287 }
3288
3289 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3290 Return X or the rtx for the pseudo reg the value of X was copied into.
3291 OTHER must be valid as a SET_DEST. */
3292
3293 rtx
make_safe_from(rtx x,rtx other)3294 make_safe_from (rtx x, rtx other)
3295 {
3296 while (1)
3297 switch (GET_CODE (other))
3298 {
3299 case SUBREG:
3300 other = SUBREG_REG (other);
3301 break;
3302 case STRICT_LOW_PART:
3303 case SIGN_EXTEND:
3304 case ZERO_EXTEND:
3305 other = XEXP (other, 0);
3306 break;
3307 default:
3308 goto done;
3309 }
3310 done:
3311 if ((MEM_P (other)
3312 && ! CONSTANT_P (x)
3313 && !REG_P (x)
3314 && GET_CODE (x) != SUBREG)
3315 || (REG_P (other)
3316 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3317 || reg_mentioned_p (other, x))))
3318 {
3319 rtx temp = gen_reg_rtx (GET_MODE (x));
3320 emit_move_insn (temp, x);
3321 return temp;
3322 }
3323 return x;
3324 }
3325
3326 /* Emission of insns (adding them to the doubly-linked list). */
3327
3328 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3329
3330 rtx_insn *
get_last_insn_anywhere(void)3331 get_last_insn_anywhere (void)
3332 {
3333 struct sequence_stack *seq;
3334 for (seq = get_current_sequence (); seq; seq = seq->next)
3335 if (seq->last != 0)
3336 return seq->last;
3337 return 0;
3338 }
3339
3340 /* Return the first nonnote insn emitted in current sequence or current
3341 function. This routine looks inside SEQUENCEs. */
3342
3343 rtx_insn *
get_first_nonnote_insn(void)3344 get_first_nonnote_insn (void)
3345 {
3346 rtx_insn *insn = get_insns ();
3347
3348 if (insn)
3349 {
3350 if (NOTE_P (insn))
3351 for (insn = next_insn (insn);
3352 insn && NOTE_P (insn);
3353 insn = next_insn (insn))
3354 continue;
3355 else
3356 {
3357 if (NONJUMP_INSN_P (insn)
3358 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3359 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3360 }
3361 }
3362
3363 return insn;
3364 }
3365
3366 /* Return the last nonnote insn emitted in current sequence or current
3367 function. This routine looks inside SEQUENCEs. */
3368
3369 rtx_insn *
get_last_nonnote_insn(void)3370 get_last_nonnote_insn (void)
3371 {
3372 rtx_insn *insn = get_last_insn ();
3373
3374 if (insn)
3375 {
3376 if (NOTE_P (insn))
3377 for (insn = previous_insn (insn);
3378 insn && NOTE_P (insn);
3379 insn = previous_insn (insn))
3380 continue;
3381 else
3382 {
3383 if (NONJUMP_INSN_P (insn))
3384 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3385 insn = seq->insn (seq->len () - 1);
3386 }
3387 }
3388
3389 return insn;
3390 }
3391
3392 /* Return the number of actual (non-debug) insns emitted in this
3393 function. */
3394
3395 int
get_max_insn_count(void)3396 get_max_insn_count (void)
3397 {
3398 int n = cur_insn_uid;
3399
3400 /* The table size must be stable across -g, to avoid codegen
3401 differences due to debug insns, and not be affected by
3402 -fmin-insn-uid, to avoid excessive table size and to simplify
3403 debugging of -fcompare-debug failures. */
3404 if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
3405 n -= cur_debug_insn_uid;
3406 else
3407 n -= param_min_nondebug_insn_uid;
3408
3409 return n;
3410 }
3411
3412
3413 /* Return the next insn. If it is a SEQUENCE, return the first insn
3414 of the sequence. */
3415
3416 rtx_insn *
next_insn(rtx_insn * insn)3417 next_insn (rtx_insn *insn)
3418 {
3419 if (insn)
3420 {
3421 insn = NEXT_INSN (insn);
3422 if (insn && NONJUMP_INSN_P (insn)
3423 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3424 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3425 }
3426
3427 return insn;
3428 }
3429
3430 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3431 of the sequence. */
3432
3433 rtx_insn *
previous_insn(rtx_insn * insn)3434 previous_insn (rtx_insn *insn)
3435 {
3436 if (insn)
3437 {
3438 insn = PREV_INSN (insn);
3439 if (insn && NONJUMP_INSN_P (insn))
3440 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3441 insn = seq->insn (seq->len () - 1);
3442 }
3443
3444 return insn;
3445 }
3446
3447 /* Return the next insn after INSN that is not a NOTE. This routine does not
3448 look inside SEQUENCEs. */
3449
3450 rtx_insn *
next_nonnote_insn(rtx_insn * insn)3451 next_nonnote_insn (rtx_insn *insn)
3452 {
3453 while (insn)
3454 {
3455 insn = NEXT_INSN (insn);
3456 if (insn == 0 || !NOTE_P (insn))
3457 break;
3458 }
3459
3460 return insn;
3461 }
3462
3463 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3464 routine does not look inside SEQUENCEs. */
3465
3466 rtx_insn *
next_nondebug_insn(rtx_insn * insn)3467 next_nondebug_insn (rtx_insn *insn)
3468 {
3469 while (insn)
3470 {
3471 insn = NEXT_INSN (insn);
3472 if (insn == 0 || !DEBUG_INSN_P (insn))
3473 break;
3474 }
3475
3476 return insn;
3477 }
3478
3479 /* Return the previous insn before INSN that is not a NOTE. This routine does
3480 not look inside SEQUENCEs. */
3481
3482 rtx_insn *
prev_nonnote_insn(rtx_insn * insn)3483 prev_nonnote_insn (rtx_insn *insn)
3484 {
3485 while (insn)
3486 {
3487 insn = PREV_INSN (insn);
3488 if (insn == 0 || !NOTE_P (insn))
3489 break;
3490 }
3491
3492 return insn;
3493 }
3494
3495 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3496 This routine does not look inside SEQUENCEs. */
3497
3498 rtx_insn *
prev_nondebug_insn(rtx_insn * insn)3499 prev_nondebug_insn (rtx_insn *insn)
3500 {
3501 while (insn)
3502 {
3503 insn = PREV_INSN (insn);
3504 if (insn == 0 || !DEBUG_INSN_P (insn))
3505 break;
3506 }
3507
3508 return insn;
3509 }
3510
3511 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3512 This routine does not look inside SEQUENCEs. */
3513
3514 rtx_insn *
next_nonnote_nondebug_insn(rtx_insn * insn)3515 next_nonnote_nondebug_insn (rtx_insn *insn)
3516 {
3517 while (insn)
3518 {
3519 insn = NEXT_INSN (insn);
3520 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3521 break;
3522 }
3523
3524 return insn;
3525 }
3526
3527 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3528 but stop the search before we enter another basic block. This
3529 routine does not look inside SEQUENCEs. */
3530
3531 rtx_insn *
next_nonnote_nondebug_insn_bb(rtx_insn * insn)3532 next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3533 {
3534 while (insn)
3535 {
3536 insn = NEXT_INSN (insn);
3537 if (insn == 0)
3538 break;
3539 if (DEBUG_INSN_P (insn))
3540 continue;
3541 if (!NOTE_P (insn))
3542 break;
3543 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3544 return NULL;
3545 }
3546
3547 return insn;
3548 }
3549
3550 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3551 This routine does not look inside SEQUENCEs. */
3552
3553 rtx_insn *
prev_nonnote_nondebug_insn(rtx_insn * insn)3554 prev_nonnote_nondebug_insn (rtx_insn *insn)
3555 {
3556 while (insn)
3557 {
3558 insn = PREV_INSN (insn);
3559 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3560 break;
3561 }
3562
3563 return insn;
3564 }
3565
3566 /* Return the previous insn before INSN that is not a NOTE nor
3567 DEBUG_INSN, but stop the search before we enter another basic
3568 block. This routine does not look inside SEQUENCEs. */
3569
3570 rtx_insn *
prev_nonnote_nondebug_insn_bb(rtx_insn * insn)3571 prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3572 {
3573 while (insn)
3574 {
3575 insn = PREV_INSN (insn);
3576 if (insn == 0)
3577 break;
3578 if (DEBUG_INSN_P (insn))
3579 continue;
3580 if (!NOTE_P (insn))
3581 break;
3582 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3583 return NULL;
3584 }
3585
3586 return insn;
3587 }
3588
3589 /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
3590 or 0, if there is none. This routine does not look inside
3591 SEQUENCEs. */
3592
3593 rtx_insn *
next_real_insn(rtx_insn * insn)3594 next_real_insn (rtx_insn *insn)
3595 {
3596 while (insn)
3597 {
3598 insn = NEXT_INSN (insn);
3599 if (insn == 0 || INSN_P (insn))
3600 break;
3601 }
3602
3603 return insn;
3604 }
3605
3606 /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
3607 or 0, if there is none. This routine does not look inside
3608 SEQUENCEs. */
3609
3610 rtx_insn *
prev_real_insn(rtx_insn * insn)3611 prev_real_insn (rtx_insn *insn)
3612 {
3613 while (insn)
3614 {
3615 insn = PREV_INSN (insn);
3616 if (insn == 0 || INSN_P (insn))
3617 break;
3618 }
3619
3620 return insn;
3621 }
3622
3623 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3624 or 0, if there is none. This routine does not look inside
3625 SEQUENCEs. */
3626
3627 rtx_insn *
next_real_nondebug_insn(rtx uncast_insn)3628 next_real_nondebug_insn (rtx uncast_insn)
3629 {
3630 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3631
3632 while (insn)
3633 {
3634 insn = NEXT_INSN (insn);
3635 if (insn == 0 || NONDEBUG_INSN_P (insn))
3636 break;
3637 }
3638
3639 return insn;
3640 }
3641
3642 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3643 or 0, if there is none. This routine does not look inside
3644 SEQUENCEs. */
3645
3646 rtx_insn *
prev_real_nondebug_insn(rtx_insn * insn)3647 prev_real_nondebug_insn (rtx_insn *insn)
3648 {
3649 while (insn)
3650 {
3651 insn = PREV_INSN (insn);
3652 if (insn == 0 || NONDEBUG_INSN_P (insn))
3653 break;
3654 }
3655
3656 return insn;
3657 }
3658
3659 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3660 This routine does not look inside SEQUENCEs. */
3661
3662 rtx_call_insn *
last_call_insn(void)3663 last_call_insn (void)
3664 {
3665 rtx_insn *insn;
3666
3667 for (insn = get_last_insn ();
3668 insn && !CALL_P (insn);
3669 insn = PREV_INSN (insn))
3670 ;
3671
3672 return safe_as_a <rtx_call_insn *> (insn);
3673 }
3674
3675 /* Find the next insn after INSN that really does something. This routine
3676 does not look inside SEQUENCEs. After reload this also skips over
3677 standalone USE and CLOBBER insn. */
3678
3679 int
active_insn_p(const rtx_insn * insn)3680 active_insn_p (const rtx_insn *insn)
3681 {
3682 return (CALL_P (insn) || JUMP_P (insn)
3683 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3684 || (NONJUMP_INSN_P (insn)
3685 && (! reload_completed
3686 || (GET_CODE (PATTERN (insn)) != USE
3687 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3688 }
3689
3690 rtx_insn *
next_active_insn(rtx_insn * insn)3691 next_active_insn (rtx_insn *insn)
3692 {
3693 while (insn)
3694 {
3695 insn = NEXT_INSN (insn);
3696 if (insn == 0 || active_insn_p (insn))
3697 break;
3698 }
3699
3700 return insn;
3701 }
3702
3703 /* Find the last insn before INSN that really does something. This routine
3704 does not look inside SEQUENCEs. After reload this also skips over
3705 standalone USE and CLOBBER insn. */
3706
3707 rtx_insn *
prev_active_insn(rtx_insn * insn)3708 prev_active_insn (rtx_insn *insn)
3709 {
3710 while (insn)
3711 {
3712 insn = PREV_INSN (insn);
3713 if (insn == 0 || active_insn_p (insn))
3714 break;
3715 }
3716
3717 return insn;
3718 }
3719
3720 /* Return the next insn that uses CC0 after INSN, which is assumed to
3721 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3722 applied to the result of this function should yield INSN).
3723
3724 Normally, this is simply the next insn. However, if a REG_CC_USER note
3725 is present, it contains the insn that uses CC0.
3726
3727 Return 0 if we can't find the insn. */
3728
3729 rtx_insn *
next_cc0_user(rtx_insn * insn)3730 next_cc0_user (rtx_insn *insn)
3731 {
3732 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3733
3734 if (note)
3735 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3736
3737 insn = next_nonnote_insn (insn);
3738 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3739 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3740
3741 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3742 return insn;
3743
3744 return 0;
3745 }
3746
3747 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3748 note, it is the previous insn. */
3749
3750 rtx_insn *
prev_cc0_setter(rtx_insn * insn)3751 prev_cc0_setter (rtx_insn *insn)
3752 {
3753 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3754
3755 if (note)
3756 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3757
3758 insn = prev_nonnote_insn (insn);
3759 gcc_assert (sets_cc0_p (PATTERN (insn)));
3760
3761 return insn;
3762 }
3763
3764 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3765
3766 static int
find_auto_inc(const_rtx x,const_rtx reg)3767 find_auto_inc (const_rtx x, const_rtx reg)
3768 {
3769 subrtx_iterator::array_type array;
3770 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3771 {
3772 const_rtx x = *iter;
3773 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3774 && rtx_equal_p (reg, XEXP (x, 0)))
3775 return true;
3776 }
3777 return false;
3778 }
3779
3780 /* Increment the label uses for all labels present in rtx. */
3781
3782 static void
mark_label_nuses(rtx x)3783 mark_label_nuses (rtx x)
3784 {
3785 enum rtx_code code;
3786 int i, j;
3787 const char *fmt;
3788
3789 code = GET_CODE (x);
3790 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3791 LABEL_NUSES (label_ref_label (x))++;
3792
3793 fmt = GET_RTX_FORMAT (code);
3794 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3795 {
3796 if (fmt[i] == 'e')
3797 mark_label_nuses (XEXP (x, i));
3798 else if (fmt[i] == 'E')
3799 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3800 mark_label_nuses (XVECEXP (x, i, j));
3801 }
3802 }
3803
3804
3805 /* Try splitting insns that can be split for better scheduling.
3806 PAT is the pattern which might split.
3807 TRIAL is the insn providing PAT.
3808 LAST is nonzero if we should return the last insn of the sequence produced.
3809
3810 If this routine succeeds in splitting, it returns the first or last
3811 replacement insn depending on the value of LAST. Otherwise, it
3812 returns TRIAL. If the insn to be returned can be split, it will be. */
3813
3814 rtx_insn *
try_split(rtx pat,rtx_insn * trial,int last)3815 try_split (rtx pat, rtx_insn *trial, int last)
3816 {
3817 rtx_insn *before, *after;
3818 rtx note;
3819 rtx_insn *seq, *tem;
3820 profile_probability probability;
3821 rtx_insn *insn_last, *insn;
3822 int njumps = 0;
3823 rtx_insn *call_insn = NULL;
3824
3825 if (any_condjump_p (trial)
3826 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3827 split_branch_probability
3828 = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3829 else
3830 split_branch_probability = profile_probability::uninitialized ();
3831
3832 probability = split_branch_probability;
3833
3834 seq = split_insns (pat, trial);
3835
3836 split_branch_probability = profile_probability::uninitialized ();
3837
3838 if (!seq)
3839 return trial;
3840
3841 int split_insn_count = 0;
3842 /* Avoid infinite loop if any insn of the result matches
3843 the original pattern. */
3844 insn_last = seq;
3845 while (1)
3846 {
3847 if (INSN_P (insn_last)
3848 && rtx_equal_p (PATTERN (insn_last), pat))
3849 return trial;
3850 split_insn_count++;
3851 if (!NEXT_INSN (insn_last))
3852 break;
3853 insn_last = NEXT_INSN (insn_last);
3854 }
3855
3856 /* We're not good at redistributing frame information if
3857 the split occurs before reload or if it results in more
3858 than one insn. */
3859 if (RTX_FRAME_RELATED_P (trial))
3860 {
3861 if (!reload_completed || split_insn_count != 1)
3862 return trial;
3863
3864 rtx_insn *new_insn = seq;
3865 rtx_insn *old_insn = trial;
3866 copy_frame_info_to_split_insn (old_insn, new_insn);
3867 }
3868
3869 /* We will be adding the new sequence to the function. The splitters
3870 may have introduced invalid RTL sharing, so unshare the sequence now. */
3871 unshare_all_rtl_in_chain (seq);
3872
3873 /* Mark labels and copy flags. */
3874 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3875 {
3876 if (JUMP_P (insn))
3877 {
3878 if (JUMP_P (trial))
3879 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3880 mark_jump_label (PATTERN (insn), insn, 0);
3881 njumps++;
3882 if (probability.initialized_p ()
3883 && any_condjump_p (insn)
3884 && !find_reg_note (insn, REG_BR_PROB, 0))
3885 {
3886 /* We can preserve the REG_BR_PROB notes only if exactly
3887 one jump is created, otherwise the machine description
3888 is responsible for this step using
3889 split_branch_probability variable. */
3890 gcc_assert (njumps == 1);
3891 add_reg_br_prob_note (insn, probability);
3892 }
3893 }
3894 }
3895
3896 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3897 in SEQ and copy any additional information across. */
3898 if (CALL_P (trial))
3899 {
3900 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3901 if (CALL_P (insn))
3902 {
3903 gcc_assert (call_insn == NULL_RTX);
3904 call_insn = insn;
3905
3906 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3907 target may have explicitly specified. */
3908 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3909 while (*p)
3910 p = &XEXP (*p, 1);
3911 *p = CALL_INSN_FUNCTION_USAGE (trial);
3912
3913 /* If the old call was a sibling call, the new one must
3914 be too. */
3915 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3916 }
3917 }
3918
3919 /* Copy notes, particularly those related to the CFG. */
3920 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3921 {
3922 switch (REG_NOTE_KIND (note))
3923 {
3924 case REG_EH_REGION:
3925 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3926 break;
3927
3928 case REG_NORETURN:
3929 case REG_SETJMP:
3930 case REG_TM:
3931 case REG_CALL_NOCF_CHECK:
3932 case REG_CALL_ARG_LOCATION:
3933 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3934 {
3935 if (CALL_P (insn))
3936 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3937 }
3938 break;
3939
3940 case REG_NON_LOCAL_GOTO:
3941 case REG_LABEL_TARGET:
3942 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3943 {
3944 if (JUMP_P (insn))
3945 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3946 }
3947 break;
3948
3949 case REG_INC:
3950 if (!AUTO_INC_DEC)
3951 break;
3952
3953 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3954 {
3955 rtx reg = XEXP (note, 0);
3956 if (!FIND_REG_INC_NOTE (insn, reg)
3957 && find_auto_inc (PATTERN (insn), reg))
3958 add_reg_note (insn, REG_INC, reg);
3959 }
3960 break;
3961
3962 case REG_ARGS_SIZE:
3963 fixup_args_size_notes (NULL, insn_last, get_args_size (note));
3964 break;
3965
3966 case REG_CALL_DECL:
3967 case REG_UNTYPED_CALL:
3968 gcc_assert (call_insn != NULL_RTX);
3969 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3970 break;
3971
3972 default:
3973 break;
3974 }
3975 }
3976
3977 /* If there are LABELS inside the split insns increment the
3978 usage count so we don't delete the label. */
3979 if (INSN_P (trial))
3980 {
3981 insn = insn_last;
3982 while (insn != NULL_RTX)
3983 {
3984 /* JUMP_P insns have already been "marked" above. */
3985 if (NONJUMP_INSN_P (insn))
3986 mark_label_nuses (PATTERN (insn));
3987
3988 insn = PREV_INSN (insn);
3989 }
3990 }
3991
3992 before = PREV_INSN (trial);
3993 after = NEXT_INSN (trial);
3994
3995 emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3996
3997 delete_insn (trial);
3998
3999 /* Recursively call try_split for each new insn created; by the
4000 time control returns here that insn will be fully split, so
4001 set LAST and continue from the insn after the one returned.
4002 We can't use next_active_insn here since AFTER may be a note.
4003 Ignore deleted insns, which can be occur if not optimizing. */
4004 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
4005 if (! tem->deleted () && INSN_P (tem))
4006 tem = try_split (PATTERN (tem), tem, 1);
4007
4008 /* Return either the first or the last insn, depending on which was
4009 requested. */
4010 return last
4011 ? (after ? PREV_INSN (after) : get_last_insn ())
4012 : NEXT_INSN (before);
4013 }
4014
4015 /* Make and return an INSN rtx, initializing all its slots.
4016 Store PATTERN in the pattern slots. */
4017
4018 rtx_insn *
make_insn_raw(rtx pattern)4019 make_insn_raw (rtx pattern)
4020 {
4021 rtx_insn *insn;
4022
4023 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
4024
4025 INSN_UID (insn) = cur_insn_uid++;
4026 PATTERN (insn) = pattern;
4027 INSN_CODE (insn) = -1;
4028 REG_NOTES (insn) = NULL;
4029 INSN_LOCATION (insn) = curr_insn_location ();
4030 BLOCK_FOR_INSN (insn) = NULL;
4031
4032 #ifdef ENABLE_RTL_CHECKING
4033 if (insn
4034 && INSN_P (insn)
4035 && (returnjump_p (insn)
4036 || (GET_CODE (insn) == SET
4037 && SET_DEST (insn) == pc_rtx)))
4038 {
4039 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
4040 debug_rtx (insn);
4041 }
4042 #endif
4043
4044 return insn;
4045 }
4046
4047 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4048
4049 static rtx_insn *
make_debug_insn_raw(rtx pattern)4050 make_debug_insn_raw (rtx pattern)
4051 {
4052 rtx_debug_insn *insn;
4053
4054 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
4055 INSN_UID (insn) = cur_debug_insn_uid++;
4056 if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
4057 INSN_UID (insn) = cur_insn_uid++;
4058
4059 PATTERN (insn) = pattern;
4060 INSN_CODE (insn) = -1;
4061 REG_NOTES (insn) = NULL;
4062 INSN_LOCATION (insn) = curr_insn_location ();
4063 BLOCK_FOR_INSN (insn) = NULL;
4064
4065 return insn;
4066 }
4067
4068 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
4069
4070 static rtx_insn *
make_jump_insn_raw(rtx pattern)4071 make_jump_insn_raw (rtx pattern)
4072 {
4073 rtx_jump_insn *insn;
4074
4075 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
4076 INSN_UID (insn) = cur_insn_uid++;
4077
4078 PATTERN (insn) = pattern;
4079 INSN_CODE (insn) = -1;
4080 REG_NOTES (insn) = NULL;
4081 JUMP_LABEL (insn) = NULL;
4082 INSN_LOCATION (insn) = curr_insn_location ();
4083 BLOCK_FOR_INSN (insn) = NULL;
4084
4085 return insn;
4086 }
4087
4088 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
4089
4090 static rtx_insn *
make_call_insn_raw(rtx pattern)4091 make_call_insn_raw (rtx pattern)
4092 {
4093 rtx_call_insn *insn;
4094
4095 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
4096 INSN_UID (insn) = cur_insn_uid++;
4097
4098 PATTERN (insn) = pattern;
4099 INSN_CODE (insn) = -1;
4100 REG_NOTES (insn) = NULL;
4101 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
4102 INSN_LOCATION (insn) = curr_insn_location ();
4103 BLOCK_FOR_INSN (insn) = NULL;
4104
4105 return insn;
4106 }
4107
4108 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
4109
4110 static rtx_note *
make_note_raw(enum insn_note subtype)4111 make_note_raw (enum insn_note subtype)
4112 {
4113 /* Some notes are never created this way at all. These notes are
4114 only created by patching out insns. */
4115 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
4116 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
4117
4118 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
4119 INSN_UID (note) = cur_insn_uid++;
4120 NOTE_KIND (note) = subtype;
4121 BLOCK_FOR_INSN (note) = NULL;
4122 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4123 return note;
4124 }
4125
4126 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4127 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4128 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4129
4130 static inline void
link_insn_into_chain(rtx_insn * insn,rtx_insn * prev,rtx_insn * next)4131 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4132 {
4133 SET_PREV_INSN (insn) = prev;
4134 SET_NEXT_INSN (insn) = next;
4135 if (prev != NULL)
4136 {
4137 SET_NEXT_INSN (prev) = insn;
4138 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4139 {
4140 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4141 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
4142 }
4143 }
4144 if (next != NULL)
4145 {
4146 SET_PREV_INSN (next) = insn;
4147 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4148 {
4149 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4150 SET_PREV_INSN (sequence->insn (0)) = insn;
4151 }
4152 }
4153
4154 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4155 {
4156 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4157 SET_PREV_INSN (sequence->insn (0)) = prev;
4158 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4159 }
4160 }
4161
4162 /* Add INSN to the end of the doubly-linked list.
4163 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4164
4165 void
add_insn(rtx_insn * insn)4166 add_insn (rtx_insn *insn)
4167 {
4168 rtx_insn *prev = get_last_insn ();
4169 link_insn_into_chain (insn, prev, NULL);
4170 if (get_insns () == NULL)
4171 set_first_insn (insn);
4172 set_last_insn (insn);
4173 }
4174
4175 /* Add INSN into the doubly-linked list after insn AFTER. */
4176
4177 static void
add_insn_after_nobb(rtx_insn * insn,rtx_insn * after)4178 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4179 {
4180 rtx_insn *next = NEXT_INSN (after);
4181
4182 gcc_assert (!optimize || !after->deleted ());
4183
4184 link_insn_into_chain (insn, after, next);
4185
4186 if (next == NULL)
4187 {
4188 struct sequence_stack *seq;
4189
4190 for (seq = get_current_sequence (); seq; seq = seq->next)
4191 if (after == seq->last)
4192 {
4193 seq->last = insn;
4194 break;
4195 }
4196 }
4197 }
4198
4199 /* Add INSN into the doubly-linked list before insn BEFORE. */
4200
4201 static void
add_insn_before_nobb(rtx_insn * insn,rtx_insn * before)4202 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4203 {
4204 rtx_insn *prev = PREV_INSN (before);
4205
4206 gcc_assert (!optimize || !before->deleted ());
4207
4208 link_insn_into_chain (insn, prev, before);
4209
4210 if (prev == NULL)
4211 {
4212 struct sequence_stack *seq;
4213
4214 for (seq = get_current_sequence (); seq; seq = seq->next)
4215 if (before == seq->first)
4216 {
4217 seq->first = insn;
4218 break;
4219 }
4220
4221 gcc_assert (seq);
4222 }
4223 }
4224
4225 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4226 If BB is NULL, an attempt is made to infer the bb from before.
4227
4228 This and the next function should be the only functions called
4229 to insert an insn once delay slots have been filled since only
4230 they know how to update a SEQUENCE. */
4231
4232 void
add_insn_after(rtx_insn * insn,rtx_insn * after,basic_block bb)4233 add_insn_after (rtx_insn *insn, rtx_insn *after, basic_block bb)
4234 {
4235 add_insn_after_nobb (insn, after);
4236 if (!BARRIER_P (after)
4237 && !BARRIER_P (insn)
4238 && (bb = BLOCK_FOR_INSN (after)))
4239 {
4240 set_block_for_insn (insn, bb);
4241 if (INSN_P (insn))
4242 df_insn_rescan (insn);
4243 /* Should not happen as first in the BB is always
4244 either NOTE or LABEL. */
4245 if (BB_END (bb) == after
4246 /* Avoid clobbering of structure when creating new BB. */
4247 && !BARRIER_P (insn)
4248 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4249 BB_END (bb) = insn;
4250 }
4251 }
4252
4253 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4254 If BB is NULL, an attempt is made to infer the bb from before.
4255
4256 This and the previous function should be the only functions called
4257 to insert an insn once delay slots have been filled since only
4258 they know how to update a SEQUENCE. */
4259
4260 void
add_insn_before(rtx_insn * insn,rtx_insn * before,basic_block bb)4261 add_insn_before (rtx_insn *insn, rtx_insn *before, basic_block bb)
4262 {
4263 add_insn_before_nobb (insn, before);
4264
4265 if (!bb
4266 && !BARRIER_P (before)
4267 && !BARRIER_P (insn))
4268 bb = BLOCK_FOR_INSN (before);
4269
4270 if (bb)
4271 {
4272 set_block_for_insn (insn, bb);
4273 if (INSN_P (insn))
4274 df_insn_rescan (insn);
4275 /* Should not happen as first in the BB is always either NOTE or
4276 LABEL. */
4277 gcc_assert (BB_HEAD (bb) != insn
4278 /* Avoid clobbering of structure when creating new BB. */
4279 || BARRIER_P (insn)
4280 || NOTE_INSN_BASIC_BLOCK_P (insn));
4281 }
4282 }
4283
4284 /* Replace insn with an deleted instruction note. */
4285
4286 void
set_insn_deleted(rtx_insn * insn)4287 set_insn_deleted (rtx_insn *insn)
4288 {
4289 if (INSN_P (insn))
4290 df_insn_delete (insn);
4291 PUT_CODE (insn, NOTE);
4292 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4293 }
4294
4295
4296 /* Unlink INSN from the insn chain.
4297
4298 This function knows how to handle sequences.
4299
4300 This function does not invalidate data flow information associated with
4301 INSN (i.e. does not call df_insn_delete). That makes this function
4302 usable for only disconnecting an insn from the chain, and re-emit it
4303 elsewhere later.
4304
4305 To later insert INSN elsewhere in the insn chain via add_insn and
4306 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4307 the caller. Nullifying them here breaks many insn chain walks.
4308
4309 To really delete an insn and related DF information, use delete_insn. */
4310
4311 void
remove_insn(rtx_insn * insn)4312 remove_insn (rtx_insn *insn)
4313 {
4314 rtx_insn *next = NEXT_INSN (insn);
4315 rtx_insn *prev = PREV_INSN (insn);
4316 basic_block bb;
4317
4318 if (prev)
4319 {
4320 SET_NEXT_INSN (prev) = next;
4321 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4322 {
4323 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4324 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4325 }
4326 }
4327 else
4328 {
4329 struct sequence_stack *seq;
4330
4331 for (seq = get_current_sequence (); seq; seq = seq->next)
4332 if (insn == seq->first)
4333 {
4334 seq->first = next;
4335 break;
4336 }
4337
4338 gcc_assert (seq);
4339 }
4340
4341 if (next)
4342 {
4343 SET_PREV_INSN (next) = prev;
4344 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4345 {
4346 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4347 SET_PREV_INSN (sequence->insn (0)) = prev;
4348 }
4349 }
4350 else
4351 {
4352 struct sequence_stack *seq;
4353
4354 for (seq = get_current_sequence (); seq; seq = seq->next)
4355 if (insn == seq->last)
4356 {
4357 seq->last = prev;
4358 break;
4359 }
4360
4361 gcc_assert (seq);
4362 }
4363
4364 /* Fix up basic block boundaries, if necessary. */
4365 if (!BARRIER_P (insn)
4366 && (bb = BLOCK_FOR_INSN (insn)))
4367 {
4368 if (BB_HEAD (bb) == insn)
4369 {
4370 /* Never ever delete the basic block note without deleting whole
4371 basic block. */
4372 gcc_assert (!NOTE_P (insn));
4373 BB_HEAD (bb) = next;
4374 }
4375 if (BB_END (bb) == insn)
4376 BB_END (bb) = prev;
4377 }
4378 }
4379
4380 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4381
4382 void
add_function_usage_to(rtx call_insn,rtx call_fusage)4383 add_function_usage_to (rtx call_insn, rtx call_fusage)
4384 {
4385 gcc_assert (call_insn && CALL_P (call_insn));
4386
4387 /* Put the register usage information on the CALL. If there is already
4388 some usage information, put ours at the end. */
4389 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4390 {
4391 rtx link;
4392
4393 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4394 link = XEXP (link, 1))
4395 ;
4396
4397 XEXP (link, 1) = call_fusage;
4398 }
4399 else
4400 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4401 }
4402
4403 /* Delete all insns made since FROM.
4404 FROM becomes the new last instruction. */
4405
4406 void
delete_insns_since(rtx_insn * from)4407 delete_insns_since (rtx_insn *from)
4408 {
4409 if (from == 0)
4410 set_first_insn (0);
4411 else
4412 SET_NEXT_INSN (from) = 0;
4413 set_last_insn (from);
4414 }
4415
4416 /* This function is deprecated, please use sequences instead.
4417
4418 Move a consecutive bunch of insns to a different place in the chain.
4419 The insns to be moved are those between FROM and TO.
4420 They are moved to a new position after the insn AFTER.
4421 AFTER must not be FROM or TO or any insn in between.
4422
4423 This function does not know about SEQUENCEs and hence should not be
4424 called after delay-slot filling has been done. */
4425
4426 void
reorder_insns_nobb(rtx_insn * from,rtx_insn * to,rtx_insn * after)4427 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4428 {
4429 if (flag_checking)
4430 {
4431 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4432 gcc_assert (after != x);
4433 gcc_assert (after != to);
4434 }
4435
4436 /* Splice this bunch out of where it is now. */
4437 if (PREV_INSN (from))
4438 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4439 if (NEXT_INSN (to))
4440 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4441 if (get_last_insn () == to)
4442 set_last_insn (PREV_INSN (from));
4443 if (get_insns () == from)
4444 set_first_insn (NEXT_INSN (to));
4445
4446 /* Make the new neighbors point to it and it to them. */
4447 if (NEXT_INSN (after))
4448 SET_PREV_INSN (NEXT_INSN (after)) = to;
4449
4450 SET_NEXT_INSN (to) = NEXT_INSN (after);
4451 SET_PREV_INSN (from) = after;
4452 SET_NEXT_INSN (after) = from;
4453 if (after == get_last_insn ())
4454 set_last_insn (to);
4455 }
4456
4457 /* Same as function above, but take care to update BB boundaries. */
4458 void
reorder_insns(rtx_insn * from,rtx_insn * to,rtx_insn * after)4459 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4460 {
4461 rtx_insn *prev = PREV_INSN (from);
4462 basic_block bb, bb2;
4463
4464 reorder_insns_nobb (from, to, after);
4465
4466 if (!BARRIER_P (after)
4467 && (bb = BLOCK_FOR_INSN (after)))
4468 {
4469 rtx_insn *x;
4470 df_set_bb_dirty (bb);
4471
4472 if (!BARRIER_P (from)
4473 && (bb2 = BLOCK_FOR_INSN (from)))
4474 {
4475 if (BB_END (bb2) == to)
4476 BB_END (bb2) = prev;
4477 df_set_bb_dirty (bb2);
4478 }
4479
4480 if (BB_END (bb) == after)
4481 BB_END (bb) = to;
4482
4483 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4484 if (!BARRIER_P (x))
4485 df_insn_change_bb (x, bb);
4486 }
4487 }
4488
4489
4490 /* Emit insn(s) of given code and pattern
4491 at a specified place within the doubly-linked list.
4492
4493 All of the emit_foo global entry points accept an object
4494 X which is either an insn list or a PATTERN of a single
4495 instruction.
4496
4497 There are thus a few canonical ways to generate code and
4498 emit it at a specific place in the instruction stream. For
4499 example, consider the instruction named SPOT and the fact that
4500 we would like to emit some instructions before SPOT. We might
4501 do it like this:
4502
4503 start_sequence ();
4504 ... emit the new instructions ...
4505 insns_head = get_insns ();
4506 end_sequence ();
4507
4508 emit_insn_before (insns_head, SPOT);
4509
4510 It used to be common to generate SEQUENCE rtl instead, but that
4511 is a relic of the past which no longer occurs. The reason is that
4512 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4513 generated would almost certainly die right after it was created. */
4514
4515 static rtx_insn *
emit_pattern_before_noloc(rtx x,rtx_insn * before,rtx_insn * last,basic_block bb,rtx_insn * (* make_raw)(rtx))4516 emit_pattern_before_noloc (rtx x, rtx_insn *before, rtx_insn *last,
4517 basic_block bb,
4518 rtx_insn *(*make_raw) (rtx))
4519 {
4520 rtx_insn *insn;
4521
4522 gcc_assert (before);
4523
4524 if (x == NULL_RTX)
4525 return last;
4526
4527 switch (GET_CODE (x))
4528 {
4529 case DEBUG_INSN:
4530 case INSN:
4531 case JUMP_INSN:
4532 case CALL_INSN:
4533 case CODE_LABEL:
4534 case BARRIER:
4535 case NOTE:
4536 insn = as_a <rtx_insn *> (x);
4537 while (insn)
4538 {
4539 rtx_insn *next = NEXT_INSN (insn);
4540 add_insn_before (insn, before, bb);
4541 last = insn;
4542 insn = next;
4543 }
4544 break;
4545
4546 #ifdef ENABLE_RTL_CHECKING
4547 case SEQUENCE:
4548 gcc_unreachable ();
4549 break;
4550 #endif
4551
4552 default:
4553 last = (*make_raw) (x);
4554 add_insn_before (last, before, bb);
4555 break;
4556 }
4557
4558 return last;
4559 }
4560
4561 /* Make X be output before the instruction BEFORE. */
4562
4563 rtx_insn *
emit_insn_before_noloc(rtx x,rtx_insn * before,basic_block bb)4564 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4565 {
4566 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4567 }
4568
4569 /* Make an instruction with body X and code JUMP_INSN
4570 and output it before the instruction BEFORE. */
4571
4572 rtx_jump_insn *
emit_jump_insn_before_noloc(rtx x,rtx_insn * before)4573 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4574 {
4575 return as_a <rtx_jump_insn *> (
4576 emit_pattern_before_noloc (x, before, NULL, NULL,
4577 make_jump_insn_raw));
4578 }
4579
4580 /* Make an instruction with body X and code CALL_INSN
4581 and output it before the instruction BEFORE. */
4582
4583 rtx_insn *
emit_call_insn_before_noloc(rtx x,rtx_insn * before)4584 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4585 {
4586 return emit_pattern_before_noloc (x, before, NULL, NULL,
4587 make_call_insn_raw);
4588 }
4589
4590 /* Make an instruction with body X and code DEBUG_INSN
4591 and output it before the instruction BEFORE. */
4592
4593 rtx_insn *
emit_debug_insn_before_noloc(rtx x,rtx_insn * before)4594 emit_debug_insn_before_noloc (rtx x, rtx_insn *before)
4595 {
4596 return emit_pattern_before_noloc (x, before, NULL, NULL,
4597 make_debug_insn_raw);
4598 }
4599
4600 /* Make an insn of code BARRIER
4601 and output it before the insn BEFORE. */
4602
4603 rtx_barrier *
emit_barrier_before(rtx_insn * before)4604 emit_barrier_before (rtx_insn *before)
4605 {
4606 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4607
4608 INSN_UID (insn) = cur_insn_uid++;
4609
4610 add_insn_before (insn, before, NULL);
4611 return insn;
4612 }
4613
4614 /* Emit the label LABEL before the insn BEFORE. */
4615
4616 rtx_code_label *
emit_label_before(rtx_code_label * label,rtx_insn * before)4617 emit_label_before (rtx_code_label *label, rtx_insn *before)
4618 {
4619 gcc_checking_assert (INSN_UID (label) == 0);
4620 INSN_UID (label) = cur_insn_uid++;
4621 add_insn_before (label, before, NULL);
4622 return label;
4623 }
4624
4625 /* Helper for emit_insn_after, handles lists of instructions
4626 efficiently. */
4627
4628 static rtx_insn *
emit_insn_after_1(rtx_insn * first,rtx_insn * after,basic_block bb)4629 emit_insn_after_1 (rtx_insn *first, rtx_insn *after, basic_block bb)
4630 {
4631 rtx_insn *last;
4632 rtx_insn *after_after;
4633 if (!bb && !BARRIER_P (after))
4634 bb = BLOCK_FOR_INSN (after);
4635
4636 if (bb)
4637 {
4638 df_set_bb_dirty (bb);
4639 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4640 if (!BARRIER_P (last))
4641 {
4642 set_block_for_insn (last, bb);
4643 df_insn_rescan (last);
4644 }
4645 if (!BARRIER_P (last))
4646 {
4647 set_block_for_insn (last, bb);
4648 df_insn_rescan (last);
4649 }
4650 if (BB_END (bb) == after)
4651 BB_END (bb) = last;
4652 }
4653 else
4654 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4655 continue;
4656
4657 after_after = NEXT_INSN (after);
4658
4659 SET_NEXT_INSN (after) = first;
4660 SET_PREV_INSN (first) = after;
4661 SET_NEXT_INSN (last) = after_after;
4662 if (after_after)
4663 SET_PREV_INSN (after_after) = last;
4664
4665 if (after == get_last_insn ())
4666 set_last_insn (last);
4667
4668 return last;
4669 }
4670
4671 static rtx_insn *
emit_pattern_after_noloc(rtx x,rtx_insn * after,basic_block bb,rtx_insn * (* make_raw)(rtx))4672 emit_pattern_after_noloc (rtx x, rtx_insn *after, basic_block bb,
4673 rtx_insn *(*make_raw)(rtx))
4674 {
4675 rtx_insn *last = after;
4676
4677 gcc_assert (after);
4678
4679 if (x == NULL_RTX)
4680 return last;
4681
4682 switch (GET_CODE (x))
4683 {
4684 case DEBUG_INSN:
4685 case INSN:
4686 case JUMP_INSN:
4687 case CALL_INSN:
4688 case CODE_LABEL:
4689 case BARRIER:
4690 case NOTE:
4691 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4692 break;
4693
4694 #ifdef ENABLE_RTL_CHECKING
4695 case SEQUENCE:
4696 gcc_unreachable ();
4697 break;
4698 #endif
4699
4700 default:
4701 last = (*make_raw) (x);
4702 add_insn_after (last, after, bb);
4703 break;
4704 }
4705
4706 return last;
4707 }
4708
4709 /* Make X be output after the insn AFTER and set the BB of insn. If
4710 BB is NULL, an attempt is made to infer the BB from AFTER. */
4711
4712 rtx_insn *
emit_insn_after_noloc(rtx x,rtx_insn * after,basic_block bb)4713 emit_insn_after_noloc (rtx x, rtx_insn *after, basic_block bb)
4714 {
4715 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4716 }
4717
4718
4719 /* Make an insn of code JUMP_INSN with body X
4720 and output it after the insn AFTER. */
4721
4722 rtx_jump_insn *
emit_jump_insn_after_noloc(rtx x,rtx_insn * after)4723 emit_jump_insn_after_noloc (rtx x, rtx_insn *after)
4724 {
4725 return as_a <rtx_jump_insn *> (
4726 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4727 }
4728
4729 /* Make an instruction with body X and code CALL_INSN
4730 and output it after the instruction AFTER. */
4731
4732 rtx_insn *
emit_call_insn_after_noloc(rtx x,rtx_insn * after)4733 emit_call_insn_after_noloc (rtx x, rtx_insn *after)
4734 {
4735 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4736 }
4737
4738 /* Make an instruction with body X and code CALL_INSN
4739 and output it after the instruction AFTER. */
4740
4741 rtx_insn *
emit_debug_insn_after_noloc(rtx x,rtx_insn * after)4742 emit_debug_insn_after_noloc (rtx x, rtx_insn *after)
4743 {
4744 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4745 }
4746
4747 /* Make an insn of code BARRIER
4748 and output it after the insn AFTER. */
4749
4750 rtx_barrier *
emit_barrier_after(rtx_insn * after)4751 emit_barrier_after (rtx_insn *after)
4752 {
4753 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4754
4755 INSN_UID (insn) = cur_insn_uid++;
4756
4757 add_insn_after (insn, after, NULL);
4758 return insn;
4759 }
4760
4761 /* Emit the label LABEL after the insn AFTER. */
4762
4763 rtx_insn *
emit_label_after(rtx_insn * label,rtx_insn * after)4764 emit_label_after (rtx_insn *label, rtx_insn *after)
4765 {
4766 gcc_checking_assert (INSN_UID (label) == 0);
4767 INSN_UID (label) = cur_insn_uid++;
4768 add_insn_after (label, after, NULL);
4769 return label;
4770 }
4771
4772 /* Notes require a bit of special handling: Some notes need to have their
4773 BLOCK_FOR_INSN set, others should never have it set, and some should
4774 have it set or clear depending on the context. */
4775
4776 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4777 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4778 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4779
4780 static bool
note_outside_basic_block_p(enum insn_note subtype,bool on_bb_boundary_p)4781 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4782 {
4783 switch (subtype)
4784 {
4785 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4786 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4787 return true;
4788
4789 /* Notes for var tracking and EH region markers can appear between or
4790 inside basic blocks. If the caller is emitting on the basic block
4791 boundary, do not set BLOCK_FOR_INSN on the new note. */
4792 case NOTE_INSN_VAR_LOCATION:
4793 case NOTE_INSN_EH_REGION_BEG:
4794 case NOTE_INSN_EH_REGION_END:
4795 return on_bb_boundary_p;
4796
4797 /* Otherwise, BLOCK_FOR_INSN must be set. */
4798 default:
4799 return false;
4800 }
4801 }
4802
4803 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4804
4805 rtx_note *
emit_note_after(enum insn_note subtype,rtx_insn * after)4806 emit_note_after (enum insn_note subtype, rtx_insn *after)
4807 {
4808 rtx_note *note = make_note_raw (subtype);
4809 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4810 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4811
4812 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4813 add_insn_after_nobb (note, after);
4814 else
4815 add_insn_after (note, after, bb);
4816 return note;
4817 }
4818
4819 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4820
4821 rtx_note *
emit_note_before(enum insn_note subtype,rtx_insn * before)4822 emit_note_before (enum insn_note subtype, rtx_insn *before)
4823 {
4824 rtx_note *note = make_note_raw (subtype);
4825 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4826 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4827
4828 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4829 add_insn_before_nobb (note, before);
4830 else
4831 add_insn_before (note, before, bb);
4832 return note;
4833 }
4834
4835 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4836 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4837
4838 static rtx_insn *
emit_pattern_after_setloc(rtx pattern,rtx_insn * after,location_t loc,rtx_insn * (* make_raw)(rtx))4839 emit_pattern_after_setloc (rtx pattern, rtx_insn *after, location_t loc,
4840 rtx_insn *(*make_raw) (rtx))
4841 {
4842 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4843
4844 if (pattern == NULL_RTX || !loc)
4845 return last;
4846
4847 after = NEXT_INSN (after);
4848 while (1)
4849 {
4850 if (active_insn_p (after)
4851 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4852 && !INSN_LOCATION (after))
4853 INSN_LOCATION (after) = loc;
4854 if (after == last)
4855 break;
4856 after = NEXT_INSN (after);
4857 }
4858 return last;
4859 }
4860
4861 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4862 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4863 any DEBUG_INSNs. */
4864
4865 static rtx_insn *
emit_pattern_after(rtx pattern,rtx_insn * after,bool skip_debug_insns,rtx_insn * (* make_raw)(rtx))4866 emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns,
4867 rtx_insn *(*make_raw) (rtx))
4868 {
4869 rtx_insn *prev = after;
4870
4871 if (skip_debug_insns)
4872 while (DEBUG_INSN_P (prev))
4873 prev = PREV_INSN (prev);
4874
4875 if (INSN_P (prev))
4876 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4877 make_raw);
4878 else
4879 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4880 }
4881
4882 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4883 rtx_insn *
emit_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)4884 emit_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4885 {
4886 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4887 }
4888
4889 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4890 rtx_insn *
emit_insn_after(rtx pattern,rtx_insn * after)4891 emit_insn_after (rtx pattern, rtx_insn *after)
4892 {
4893 return emit_pattern_after (pattern, after, true, make_insn_raw);
4894 }
4895
4896 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4897 rtx_jump_insn *
emit_jump_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)4898 emit_jump_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4899 {
4900 return as_a <rtx_jump_insn *> (
4901 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4902 }
4903
4904 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4905 rtx_jump_insn *
emit_jump_insn_after(rtx pattern,rtx_insn * after)4906 emit_jump_insn_after (rtx pattern, rtx_insn *after)
4907 {
4908 return as_a <rtx_jump_insn *> (
4909 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4910 }
4911
4912 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4913 rtx_insn *
emit_call_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)4914 emit_call_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4915 {
4916 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4917 }
4918
4919 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4920 rtx_insn *
emit_call_insn_after(rtx pattern,rtx_insn * after)4921 emit_call_insn_after (rtx pattern, rtx_insn *after)
4922 {
4923 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4924 }
4925
4926 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4927 rtx_insn *
emit_debug_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)4928 emit_debug_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4929 {
4930 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4931 }
4932
4933 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4934 rtx_insn *
emit_debug_insn_after(rtx pattern,rtx_insn * after)4935 emit_debug_insn_after (rtx pattern, rtx_insn *after)
4936 {
4937 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4938 }
4939
4940 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4941 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4942 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4943 CALL_INSN, etc. */
4944
4945 static rtx_insn *
emit_pattern_before_setloc(rtx pattern,rtx_insn * before,location_t loc,bool insnp,rtx_insn * (* make_raw)(rtx))4946 emit_pattern_before_setloc (rtx pattern, rtx_insn *before, location_t loc,
4947 bool insnp, rtx_insn *(*make_raw) (rtx))
4948 {
4949 rtx_insn *first = PREV_INSN (before);
4950 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4951 insnp ? before : NULL,
4952 NULL, make_raw);
4953
4954 if (pattern == NULL_RTX || !loc)
4955 return last;
4956
4957 if (!first)
4958 first = get_insns ();
4959 else
4960 first = NEXT_INSN (first);
4961 while (1)
4962 {
4963 if (active_insn_p (first)
4964 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4965 && !INSN_LOCATION (first))
4966 INSN_LOCATION (first) = loc;
4967 if (first == last)
4968 break;
4969 first = NEXT_INSN (first);
4970 }
4971 return last;
4972 }
4973
4974 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4975 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4976 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4977 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4978
4979 static rtx_insn *
emit_pattern_before(rtx pattern,rtx_insn * before,bool skip_debug_insns,bool insnp,rtx_insn * (* make_raw)(rtx))4980 emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns,
4981 bool insnp, rtx_insn *(*make_raw) (rtx))
4982 {
4983 rtx_insn *next = before;
4984
4985 if (skip_debug_insns)
4986 while (DEBUG_INSN_P (next))
4987 next = PREV_INSN (next);
4988
4989 if (INSN_P (next))
4990 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4991 insnp, make_raw);
4992 else
4993 return emit_pattern_before_noloc (pattern, before,
4994 insnp ? before : NULL,
4995 NULL, make_raw);
4996 }
4997
4998 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4999 rtx_insn *
emit_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)5000 emit_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5001 {
5002 return emit_pattern_before_setloc (pattern, before, loc, true,
5003 make_insn_raw);
5004 }
5005
5006 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
5007 rtx_insn *
emit_insn_before(rtx pattern,rtx_insn * before)5008 emit_insn_before (rtx pattern, rtx_insn *before)
5009 {
5010 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
5011 }
5012
5013 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5014 rtx_jump_insn *
emit_jump_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)5015 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5016 {
5017 return as_a <rtx_jump_insn *> (
5018 emit_pattern_before_setloc (pattern, before, loc, false,
5019 make_jump_insn_raw));
5020 }
5021
5022 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
5023 rtx_jump_insn *
emit_jump_insn_before(rtx pattern,rtx_insn * before)5024 emit_jump_insn_before (rtx pattern, rtx_insn *before)
5025 {
5026 return as_a <rtx_jump_insn *> (
5027 emit_pattern_before (pattern, before, true, false,
5028 make_jump_insn_raw));
5029 }
5030
5031 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5032 rtx_insn *
emit_call_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)5033 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5034 {
5035 return emit_pattern_before_setloc (pattern, before, loc, false,
5036 make_call_insn_raw);
5037 }
5038
5039 /* Like emit_call_insn_before_noloc,
5040 but set insn_location according to BEFORE. */
5041 rtx_insn *
emit_call_insn_before(rtx pattern,rtx_insn * before)5042 emit_call_insn_before (rtx pattern, rtx_insn *before)
5043 {
5044 return emit_pattern_before (pattern, before, true, false,
5045 make_call_insn_raw);
5046 }
5047
5048 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5049 rtx_insn *
emit_debug_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)5050 emit_debug_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5051 {
5052 return emit_pattern_before_setloc (pattern, before, loc, false,
5053 make_debug_insn_raw);
5054 }
5055
5056 /* Like emit_debug_insn_before_noloc,
5057 but set insn_location according to BEFORE. */
5058 rtx_insn *
emit_debug_insn_before(rtx pattern,rtx_insn * before)5059 emit_debug_insn_before (rtx pattern, rtx_insn *before)
5060 {
5061 return emit_pattern_before (pattern, before, false, false,
5062 make_debug_insn_raw);
5063 }
5064
5065 /* Take X and emit it at the end of the doubly-linked
5066 INSN list.
5067
5068 Returns the last insn emitted. */
5069
5070 rtx_insn *
emit_insn(rtx x)5071 emit_insn (rtx x)
5072 {
5073 rtx_insn *last = get_last_insn ();
5074 rtx_insn *insn;
5075
5076 if (x == NULL_RTX)
5077 return last;
5078
5079 switch (GET_CODE (x))
5080 {
5081 case DEBUG_INSN:
5082 case INSN:
5083 case JUMP_INSN:
5084 case CALL_INSN:
5085 case CODE_LABEL:
5086 case BARRIER:
5087 case NOTE:
5088 insn = as_a <rtx_insn *> (x);
5089 while (insn)
5090 {
5091 rtx_insn *next = NEXT_INSN (insn);
5092 add_insn (insn);
5093 last = insn;
5094 insn = next;
5095 }
5096 break;
5097
5098 #ifdef ENABLE_RTL_CHECKING
5099 case JUMP_TABLE_DATA:
5100 case SEQUENCE:
5101 gcc_unreachable ();
5102 break;
5103 #endif
5104
5105 default:
5106 last = make_insn_raw (x);
5107 add_insn (last);
5108 break;
5109 }
5110
5111 return last;
5112 }
5113
5114 /* Make an insn of code DEBUG_INSN with pattern X
5115 and add it to the end of the doubly-linked list. */
5116
5117 rtx_insn *
emit_debug_insn(rtx x)5118 emit_debug_insn (rtx x)
5119 {
5120 rtx_insn *last = get_last_insn ();
5121 rtx_insn *insn;
5122
5123 if (x == NULL_RTX)
5124 return last;
5125
5126 switch (GET_CODE (x))
5127 {
5128 case DEBUG_INSN:
5129 case INSN:
5130 case JUMP_INSN:
5131 case CALL_INSN:
5132 case CODE_LABEL:
5133 case BARRIER:
5134 case NOTE:
5135 insn = as_a <rtx_insn *> (x);
5136 while (insn)
5137 {
5138 rtx_insn *next = NEXT_INSN (insn);
5139 add_insn (insn);
5140 last = insn;
5141 insn = next;
5142 }
5143 break;
5144
5145 #ifdef ENABLE_RTL_CHECKING
5146 case JUMP_TABLE_DATA:
5147 case SEQUENCE:
5148 gcc_unreachable ();
5149 break;
5150 #endif
5151
5152 default:
5153 last = make_debug_insn_raw (x);
5154 add_insn (last);
5155 break;
5156 }
5157
5158 return last;
5159 }
5160
5161 /* Make an insn of code JUMP_INSN with pattern X
5162 and add it to the end of the doubly-linked list. */
5163
5164 rtx_insn *
emit_jump_insn(rtx x)5165 emit_jump_insn (rtx x)
5166 {
5167 rtx_insn *last = NULL;
5168 rtx_insn *insn;
5169
5170 switch (GET_CODE (x))
5171 {
5172 case DEBUG_INSN:
5173 case INSN:
5174 case JUMP_INSN:
5175 case CALL_INSN:
5176 case CODE_LABEL:
5177 case BARRIER:
5178 case NOTE:
5179 insn = as_a <rtx_insn *> (x);
5180 while (insn)
5181 {
5182 rtx_insn *next = NEXT_INSN (insn);
5183 add_insn (insn);
5184 last = insn;
5185 insn = next;
5186 }
5187 break;
5188
5189 #ifdef ENABLE_RTL_CHECKING
5190 case JUMP_TABLE_DATA:
5191 case SEQUENCE:
5192 gcc_unreachable ();
5193 break;
5194 #endif
5195
5196 default:
5197 last = make_jump_insn_raw (x);
5198 add_insn (last);
5199 break;
5200 }
5201
5202 return last;
5203 }
5204
5205 /* Make an insn of code CALL_INSN with pattern X
5206 and add it to the end of the doubly-linked list. */
5207
5208 rtx_insn *
emit_call_insn(rtx x)5209 emit_call_insn (rtx x)
5210 {
5211 rtx_insn *insn;
5212
5213 switch (GET_CODE (x))
5214 {
5215 case DEBUG_INSN:
5216 case INSN:
5217 case JUMP_INSN:
5218 case CALL_INSN:
5219 case CODE_LABEL:
5220 case BARRIER:
5221 case NOTE:
5222 insn = emit_insn (x);
5223 break;
5224
5225 #ifdef ENABLE_RTL_CHECKING
5226 case SEQUENCE:
5227 case JUMP_TABLE_DATA:
5228 gcc_unreachable ();
5229 break;
5230 #endif
5231
5232 default:
5233 insn = make_call_insn_raw (x);
5234 add_insn (insn);
5235 break;
5236 }
5237
5238 return insn;
5239 }
5240
5241 /* Add the label LABEL to the end of the doubly-linked list. */
5242
5243 rtx_code_label *
emit_label(rtx uncast_label)5244 emit_label (rtx uncast_label)
5245 {
5246 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5247
5248 gcc_checking_assert (INSN_UID (label) == 0);
5249 INSN_UID (label) = cur_insn_uid++;
5250 add_insn (label);
5251 return label;
5252 }
5253
5254 /* Make an insn of code JUMP_TABLE_DATA
5255 and add it to the end of the doubly-linked list. */
5256
5257 rtx_jump_table_data *
emit_jump_table_data(rtx table)5258 emit_jump_table_data (rtx table)
5259 {
5260 rtx_jump_table_data *jump_table_data =
5261 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5262 INSN_UID (jump_table_data) = cur_insn_uid++;
5263 PATTERN (jump_table_data) = table;
5264 BLOCK_FOR_INSN (jump_table_data) = NULL;
5265 add_insn (jump_table_data);
5266 return jump_table_data;
5267 }
5268
5269 /* Make an insn of code BARRIER
5270 and add it to the end of the doubly-linked list. */
5271
5272 rtx_barrier *
emit_barrier(void)5273 emit_barrier (void)
5274 {
5275 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5276 INSN_UID (barrier) = cur_insn_uid++;
5277 add_insn (barrier);
5278 return barrier;
5279 }
5280
5281 /* Emit a copy of note ORIG. */
5282
5283 rtx_note *
emit_note_copy(rtx_note * orig)5284 emit_note_copy (rtx_note *orig)
5285 {
5286 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5287 rtx_note *note = make_note_raw (kind);
5288 NOTE_DATA (note) = NOTE_DATA (orig);
5289 add_insn (note);
5290 return note;
5291 }
5292
5293 /* Make an insn of code NOTE or type NOTE_NO
5294 and add it to the end of the doubly-linked list. */
5295
5296 rtx_note *
emit_note(enum insn_note kind)5297 emit_note (enum insn_note kind)
5298 {
5299 rtx_note *note = make_note_raw (kind);
5300 add_insn (note);
5301 return note;
5302 }
5303
5304 /* Emit a clobber of lvalue X. */
5305
5306 rtx_insn *
emit_clobber(rtx x)5307 emit_clobber (rtx x)
5308 {
5309 /* CONCATs should not appear in the insn stream. */
5310 if (GET_CODE (x) == CONCAT)
5311 {
5312 emit_clobber (XEXP (x, 0));
5313 return emit_clobber (XEXP (x, 1));
5314 }
5315 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5316 }
5317
5318 /* Return a sequence of insns to clobber lvalue X. */
5319
5320 rtx_insn *
gen_clobber(rtx x)5321 gen_clobber (rtx x)
5322 {
5323 rtx_insn *seq;
5324
5325 start_sequence ();
5326 emit_clobber (x);
5327 seq = get_insns ();
5328 end_sequence ();
5329 return seq;
5330 }
5331
5332 /* Emit a use of rvalue X. */
5333
5334 rtx_insn *
emit_use(rtx x)5335 emit_use (rtx x)
5336 {
5337 /* CONCATs should not appear in the insn stream. */
5338 if (GET_CODE (x) == CONCAT)
5339 {
5340 emit_use (XEXP (x, 0));
5341 return emit_use (XEXP (x, 1));
5342 }
5343 return emit_insn (gen_rtx_USE (VOIDmode, x));
5344 }
5345
5346 /* Return a sequence of insns to use rvalue X. */
5347
5348 rtx_insn *
gen_use(rtx x)5349 gen_use (rtx x)
5350 {
5351 rtx_insn *seq;
5352
5353 start_sequence ();
5354 emit_use (x);
5355 seq = get_insns ();
5356 end_sequence ();
5357 return seq;
5358 }
5359
5360 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5361 Return the set in INSN that such notes describe, or NULL if the notes
5362 have no meaning for INSN. */
5363
5364 rtx
set_for_reg_notes(rtx insn)5365 set_for_reg_notes (rtx insn)
5366 {
5367 rtx pat, reg;
5368
5369 if (!INSN_P (insn))
5370 return NULL_RTX;
5371
5372 pat = PATTERN (insn);
5373 if (GET_CODE (pat) == PARALLEL)
5374 {
5375 /* We do not use single_set because that ignores SETs of unused
5376 registers. REG_EQUAL and REG_EQUIV notes really do require the
5377 PARALLEL to have a single SET. */
5378 if (multiple_sets (insn))
5379 return NULL_RTX;
5380 pat = XVECEXP (pat, 0, 0);
5381 }
5382
5383 if (GET_CODE (pat) != SET)
5384 return NULL_RTX;
5385
5386 reg = SET_DEST (pat);
5387
5388 /* Notes apply to the contents of a STRICT_LOW_PART. */
5389 if (GET_CODE (reg) == STRICT_LOW_PART
5390 || GET_CODE (reg) == ZERO_EXTRACT)
5391 reg = XEXP (reg, 0);
5392
5393 /* Check that we have a register. */
5394 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5395 return NULL_RTX;
5396
5397 return pat;
5398 }
5399
5400 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5401 note of this type already exists, remove it first. */
5402
5403 rtx
set_unique_reg_note(rtx insn,enum reg_note kind,rtx datum)5404 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5405 {
5406 rtx note = find_reg_note (insn, kind, NULL_RTX);
5407
5408 switch (kind)
5409 {
5410 case REG_EQUAL:
5411 case REG_EQUIV:
5412 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5413 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5414 return NULL_RTX;
5415
5416 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5417 It serves no useful purpose and breaks eliminate_regs. */
5418 if (GET_CODE (datum) == ASM_OPERANDS)
5419 return NULL_RTX;
5420
5421 /* Notes with side effects are dangerous. Even if the side-effect
5422 initially mirrors one in PATTERN (INSN), later optimizations
5423 might alter the way that the final register value is calculated
5424 and so move or alter the side-effect in some way. The note would
5425 then no longer be a valid substitution for SET_SRC. */
5426 if (side_effects_p (datum))
5427 return NULL_RTX;
5428 break;
5429
5430 default:
5431 break;
5432 }
5433
5434 if (note)
5435 XEXP (note, 0) = datum;
5436 else
5437 {
5438 add_reg_note (insn, kind, datum);
5439 note = REG_NOTES (insn);
5440 }
5441
5442 switch (kind)
5443 {
5444 case REG_EQUAL:
5445 case REG_EQUIV:
5446 df_notes_rescan (as_a <rtx_insn *> (insn));
5447 break;
5448 default:
5449 break;
5450 }
5451
5452 return note;
5453 }
5454
5455 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5456 rtx
set_dst_reg_note(rtx insn,enum reg_note kind,rtx datum,rtx dst)5457 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5458 {
5459 rtx set = set_for_reg_notes (insn);
5460
5461 if (set && SET_DEST (set) == dst)
5462 return set_unique_reg_note (insn, kind, datum);
5463 return NULL_RTX;
5464 }
5465
5466 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5467 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5468 is true.
5469
5470 If X is a label, it is simply added into the insn chain. */
5471
5472 rtx_insn *
emit(rtx x,bool allow_barrier_p)5473 emit (rtx x, bool allow_barrier_p)
5474 {
5475 enum rtx_code code = classify_insn (x);
5476
5477 switch (code)
5478 {
5479 case CODE_LABEL:
5480 return emit_label (x);
5481 case INSN:
5482 return emit_insn (x);
5483 case JUMP_INSN:
5484 {
5485 rtx_insn *insn = emit_jump_insn (x);
5486 if (allow_barrier_p
5487 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5488 return emit_barrier ();
5489 return insn;
5490 }
5491 case CALL_INSN:
5492 return emit_call_insn (x);
5493 case DEBUG_INSN:
5494 return emit_debug_insn (x);
5495 default:
5496 gcc_unreachable ();
5497 }
5498 }
5499
5500 /* Space for free sequence stack entries. */
5501 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5502
5503 /* Begin emitting insns to a sequence. If this sequence will contain
5504 something that might cause the compiler to pop arguments to function
5505 calls (because those pops have previously been deferred; see
5506 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5507 before calling this function. That will ensure that the deferred
5508 pops are not accidentally emitted in the middle of this sequence. */
5509
5510 void
start_sequence(void)5511 start_sequence (void)
5512 {
5513 struct sequence_stack *tem;
5514
5515 if (free_sequence_stack != NULL)
5516 {
5517 tem = free_sequence_stack;
5518 free_sequence_stack = tem->next;
5519 }
5520 else
5521 tem = ggc_alloc<sequence_stack> ();
5522
5523 tem->next = get_current_sequence ()->next;
5524 tem->first = get_insns ();
5525 tem->last = get_last_insn ();
5526 get_current_sequence ()->next = tem;
5527
5528 set_first_insn (0);
5529 set_last_insn (0);
5530 }
5531
5532 /* Set up the insn chain starting with FIRST as the current sequence,
5533 saving the previously current one. See the documentation for
5534 start_sequence for more information about how to use this function. */
5535
5536 void
push_to_sequence(rtx_insn * first)5537 push_to_sequence (rtx_insn *first)
5538 {
5539 rtx_insn *last;
5540
5541 start_sequence ();
5542
5543 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5544 ;
5545
5546 set_first_insn (first);
5547 set_last_insn (last);
5548 }
5549
5550 /* Like push_to_sequence, but take the last insn as an argument to avoid
5551 looping through the list. */
5552
5553 void
push_to_sequence2(rtx_insn * first,rtx_insn * last)5554 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5555 {
5556 start_sequence ();
5557
5558 set_first_insn (first);
5559 set_last_insn (last);
5560 }
5561
5562 /* Set up the outer-level insn chain
5563 as the current sequence, saving the previously current one. */
5564
5565 void
push_topmost_sequence(void)5566 push_topmost_sequence (void)
5567 {
5568 struct sequence_stack *top;
5569
5570 start_sequence ();
5571
5572 top = get_topmost_sequence ();
5573 set_first_insn (top->first);
5574 set_last_insn (top->last);
5575 }
5576
5577 /* After emitting to the outer-level insn chain, update the outer-level
5578 insn chain, and restore the previous saved state. */
5579
5580 void
pop_topmost_sequence(void)5581 pop_topmost_sequence (void)
5582 {
5583 struct sequence_stack *top;
5584
5585 top = get_topmost_sequence ();
5586 top->first = get_insns ();
5587 top->last = get_last_insn ();
5588
5589 end_sequence ();
5590 }
5591
5592 /* After emitting to a sequence, restore previous saved state.
5593
5594 To get the contents of the sequence just made, you must call
5595 `get_insns' *before* calling here.
5596
5597 If the compiler might have deferred popping arguments while
5598 generating this sequence, and this sequence will not be immediately
5599 inserted into the instruction stream, use do_pending_stack_adjust
5600 before calling get_insns. That will ensure that the deferred
5601 pops are inserted into this sequence, and not into some random
5602 location in the instruction stream. See INHIBIT_DEFER_POP for more
5603 information about deferred popping of arguments. */
5604
5605 void
end_sequence(void)5606 end_sequence (void)
5607 {
5608 struct sequence_stack *tem = get_current_sequence ()->next;
5609
5610 set_first_insn (tem->first);
5611 set_last_insn (tem->last);
5612 get_current_sequence ()->next = tem->next;
5613
5614 memset (tem, 0, sizeof (*tem));
5615 tem->next = free_sequence_stack;
5616 free_sequence_stack = tem;
5617 }
5618
5619 /* Return 1 if currently emitting into a sequence. */
5620
5621 int
in_sequence_p(void)5622 in_sequence_p (void)
5623 {
5624 return get_current_sequence ()->next != 0;
5625 }
5626
5627 /* Put the various virtual registers into REGNO_REG_RTX. */
5628
5629 static void
init_virtual_regs(void)5630 init_virtual_regs (void)
5631 {
5632 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5633 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5634 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5635 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5636 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5637 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5638 = virtual_preferred_stack_boundary_rtx;
5639 }
5640
5641
5642 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5643 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5644 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5645 static int copy_insn_n_scratches;
5646
5647 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5648 copied an ASM_OPERANDS.
5649 In that case, it is the original input-operand vector. */
5650 static rtvec orig_asm_operands_vector;
5651
5652 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5653 copied an ASM_OPERANDS.
5654 In that case, it is the copied input-operand vector. */
5655 static rtvec copy_asm_operands_vector;
5656
5657 /* Likewise for the constraints vector. */
5658 static rtvec orig_asm_constraints_vector;
5659 static rtvec copy_asm_constraints_vector;
5660
5661 /* Recursively create a new copy of an rtx for copy_insn.
5662 This function differs from copy_rtx in that it handles SCRATCHes and
5663 ASM_OPERANDs properly.
5664 Normally, this function is not used directly; use copy_insn as front end.
5665 However, you could first copy an insn pattern with copy_insn and then use
5666 this function afterwards to properly copy any REG_NOTEs containing
5667 SCRATCHes. */
5668
5669 rtx
copy_insn_1(rtx orig)5670 copy_insn_1 (rtx orig)
5671 {
5672 rtx copy;
5673 int i, j;
5674 RTX_CODE code;
5675 const char *format_ptr;
5676
5677 if (orig == NULL)
5678 return NULL;
5679
5680 code = GET_CODE (orig);
5681
5682 switch (code)
5683 {
5684 case REG:
5685 case DEBUG_EXPR:
5686 CASE_CONST_ANY:
5687 case SYMBOL_REF:
5688 case CODE_LABEL:
5689 case PC:
5690 case CC0:
5691 case RETURN:
5692 case SIMPLE_RETURN:
5693 return orig;
5694 case CLOBBER:
5695 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5696 clobbers or clobbers of hard registers that originated as pseudos.
5697 This is needed to allow safe register renaming. */
5698 if (REG_P (XEXP (orig, 0))
5699 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5700 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
5701 return orig;
5702 break;
5703
5704 case SCRATCH:
5705 for (i = 0; i < copy_insn_n_scratches; i++)
5706 if (copy_insn_scratch_in[i] == orig)
5707 return copy_insn_scratch_out[i];
5708 break;
5709
5710 case CONST:
5711 if (shared_const_p (orig))
5712 return orig;
5713 break;
5714
5715 /* A MEM with a constant address is not sharable. The problem is that
5716 the constant address may need to be reloaded. If the mem is shared,
5717 then reloading one copy of this mem will cause all copies to appear
5718 to have been reloaded. */
5719
5720 default:
5721 break;
5722 }
5723
5724 /* Copy the various flags, fields, and other information. We assume
5725 that all fields need copying, and then clear the fields that should
5726 not be copied. That is the sensible default behavior, and forces
5727 us to explicitly document why we are *not* copying a flag. */
5728 copy = shallow_copy_rtx (orig);
5729
5730 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5731 if (INSN_P (orig))
5732 {
5733 RTX_FLAG (copy, jump) = 0;
5734 RTX_FLAG (copy, call) = 0;
5735 RTX_FLAG (copy, frame_related) = 0;
5736 }
5737
5738 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5739
5740 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5741 switch (*format_ptr++)
5742 {
5743 case 'e':
5744 if (XEXP (orig, i) != NULL)
5745 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5746 break;
5747
5748 case 'E':
5749 case 'V':
5750 if (XVEC (orig, i) == orig_asm_constraints_vector)
5751 XVEC (copy, i) = copy_asm_constraints_vector;
5752 else if (XVEC (orig, i) == orig_asm_operands_vector)
5753 XVEC (copy, i) = copy_asm_operands_vector;
5754 else if (XVEC (orig, i) != NULL)
5755 {
5756 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5757 for (j = 0; j < XVECLEN (copy, i); j++)
5758 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5759 }
5760 break;
5761
5762 case 't':
5763 case 'w':
5764 case 'i':
5765 case 'p':
5766 case 's':
5767 case 'S':
5768 case 'u':
5769 case '0':
5770 /* These are left unchanged. */
5771 break;
5772
5773 default:
5774 gcc_unreachable ();
5775 }
5776
5777 if (code == SCRATCH)
5778 {
5779 i = copy_insn_n_scratches++;
5780 gcc_assert (i < MAX_RECOG_OPERANDS);
5781 copy_insn_scratch_in[i] = orig;
5782 copy_insn_scratch_out[i] = copy;
5783 }
5784 else if (code == ASM_OPERANDS)
5785 {
5786 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5787 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5788 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5789 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5790 }
5791
5792 return copy;
5793 }
5794
5795 /* Create a new copy of an rtx.
5796 This function differs from copy_rtx in that it handles SCRATCHes and
5797 ASM_OPERANDs properly.
5798 INSN doesn't really have to be a full INSN; it could be just the
5799 pattern. */
5800 rtx
copy_insn(rtx insn)5801 copy_insn (rtx insn)
5802 {
5803 copy_insn_n_scratches = 0;
5804 orig_asm_operands_vector = 0;
5805 orig_asm_constraints_vector = 0;
5806 copy_asm_operands_vector = 0;
5807 copy_asm_constraints_vector = 0;
5808 return copy_insn_1 (insn);
5809 }
5810
5811 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5812 on that assumption that INSN itself remains in its original place. */
5813
5814 rtx_insn *
copy_delay_slot_insn(rtx_insn * insn)5815 copy_delay_slot_insn (rtx_insn *insn)
5816 {
5817 /* Copy INSN with its rtx_code, all its notes, location etc. */
5818 insn = as_a <rtx_insn *> (copy_rtx (insn));
5819 INSN_UID (insn) = cur_insn_uid++;
5820 return insn;
5821 }
5822
5823 /* Initialize data structures and variables in this file
5824 before generating rtl for each function. */
5825
5826 void
init_emit(void)5827 init_emit (void)
5828 {
5829 set_first_insn (NULL);
5830 set_last_insn (NULL);
5831 if (param_min_nondebug_insn_uid)
5832 cur_insn_uid = param_min_nondebug_insn_uid;
5833 else
5834 cur_insn_uid = 1;
5835 cur_debug_insn_uid = 1;
5836 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5837 first_label_num = label_num;
5838 get_current_sequence ()->next = NULL;
5839
5840 /* Init the tables that describe all the pseudo regs. */
5841
5842 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5843
5844 crtl->emit.regno_pointer_align
5845 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5846
5847 regno_reg_rtx
5848 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5849
5850 /* Put copies of all the hard registers into regno_reg_rtx. */
5851 memcpy (regno_reg_rtx,
5852 initial_regno_reg_rtx,
5853 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5854
5855 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5856 init_virtual_regs ();
5857
5858 /* Indicate that the virtual registers and stack locations are
5859 all pointers. */
5860 REG_POINTER (stack_pointer_rtx) = 1;
5861 REG_POINTER (frame_pointer_rtx) = 1;
5862 REG_POINTER (hard_frame_pointer_rtx) = 1;
5863 REG_POINTER (arg_pointer_rtx) = 1;
5864
5865 REG_POINTER (virtual_incoming_args_rtx) = 1;
5866 REG_POINTER (virtual_stack_vars_rtx) = 1;
5867 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5868 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5869 REG_POINTER (virtual_cfa_rtx) = 1;
5870
5871 #ifdef STACK_BOUNDARY
5872 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5873 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5874 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5875 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5876
5877 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5878 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5879 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5880 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5881
5882 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5883 #endif
5884
5885 #ifdef INIT_EXPANDERS
5886 INIT_EXPANDERS;
5887 #endif
5888 }
5889
5890 /* Return the value of element I of CONST_VECTOR X as a wide_int. */
5891
5892 wide_int
const_vector_int_elt(const_rtx x,unsigned int i)5893 const_vector_int_elt (const_rtx x, unsigned int i)
5894 {
5895 /* First handle elements that are directly encoded. */
5896 machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
5897 if (i < (unsigned int) XVECLEN (x, 0))
5898 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
5899
5900 /* Identify the pattern that contains element I and work out the index of
5901 the last encoded element for that pattern. */
5902 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5903 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5904 unsigned int count = i / npatterns;
5905 unsigned int pattern = i % npatterns;
5906 unsigned int final_i = encoded_nelts - npatterns + pattern;
5907
5908 /* If there are no steps, the final encoded value is the right one. */
5909 if (!CONST_VECTOR_STEPPED_P (x))
5910 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
5911
5912 /* Otherwise work out the value from the last two encoded elements. */
5913 rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
5914 rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
5915 wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
5916 rtx_mode_t (v1, elt_mode));
5917 return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
5918 }
5919
5920 /* Return the value of element I of CONST_VECTOR X. */
5921
5922 rtx
const_vector_elt(const_rtx x,unsigned int i)5923 const_vector_elt (const_rtx x, unsigned int i)
5924 {
5925 /* First handle elements that are directly encoded. */
5926 if (i < (unsigned int) XVECLEN (x, 0))
5927 return CONST_VECTOR_ENCODED_ELT (x, i);
5928
5929 /* If there are no steps, the final encoded value is the right one. */
5930 if (!CONST_VECTOR_STEPPED_P (x))
5931 {
5932 /* Identify the pattern that contains element I and work out the index of
5933 the last encoded element for that pattern. */
5934 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5935 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5936 unsigned int pattern = i % npatterns;
5937 unsigned int final_i = encoded_nelts - npatterns + pattern;
5938 return CONST_VECTOR_ENCODED_ELT (x, final_i);
5939 }
5940
5941 /* Otherwise work out the value from the last two encoded elements. */
5942 return immed_wide_int_const (const_vector_int_elt (x, i),
5943 GET_MODE_INNER (GET_MODE (x)));
5944 }
5945
5946 /* Return true if X is a valid element for a CONST_VECTOR of the given
5947 mode. */
5948
5949 bool
valid_for_const_vector_p(machine_mode,rtx x)5950 valid_for_const_vector_p (machine_mode, rtx x)
5951 {
5952 return (CONST_SCALAR_INT_P (x)
5953 || CONST_POLY_INT_P (x)
5954 || CONST_DOUBLE_AS_FLOAT_P (x)
5955 || CONST_FIXED_P (x));
5956 }
5957
5958 /* Generate a vector constant of mode MODE in which every element has
5959 value ELT. */
5960
5961 rtx
gen_const_vec_duplicate(machine_mode mode,rtx elt)5962 gen_const_vec_duplicate (machine_mode mode, rtx elt)
5963 {
5964 rtx_vector_builder builder (mode, 1, 1);
5965 builder.quick_push (elt);
5966 return builder.build ();
5967 }
5968
5969 /* Return a vector rtx of mode MODE in which every element has value X.
5970 The result will be a constant if X is constant. */
5971
5972 rtx
gen_vec_duplicate(machine_mode mode,rtx x)5973 gen_vec_duplicate (machine_mode mode, rtx x)
5974 {
5975 if (valid_for_const_vector_p (mode, x))
5976 return gen_const_vec_duplicate (mode, x);
5977 return gen_rtx_VEC_DUPLICATE (mode, x);
5978 }
5979
5980 /* A subroutine of const_vec_series_p that handles the case in which:
5981
5982 (GET_CODE (X) == CONST_VECTOR
5983 && CONST_VECTOR_NPATTERNS (X) == 1
5984 && !CONST_VECTOR_DUPLICATE_P (X))
5985
5986 is known to hold. */
5987
5988 bool
const_vec_series_p_1(const_rtx x,rtx * base_out,rtx * step_out)5989 const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
5990 {
5991 /* Stepped sequences are only defined for integers, to avoid specifying
5992 rounding behavior. */
5993 if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
5994 return false;
5995
5996 /* A non-duplicated vector with two elements can always be seen as a
5997 series with a nonzero step. Longer vectors must have a stepped
5998 encoding. */
5999 if (maybe_ne (CONST_VECTOR_NUNITS (x), 2)
6000 && !CONST_VECTOR_STEPPED_P (x))
6001 return false;
6002
6003 /* Calculate the step between the first and second elements. */
6004 scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
6005 rtx base = CONST_VECTOR_ELT (x, 0);
6006 rtx step = simplify_binary_operation (MINUS, inner,
6007 CONST_VECTOR_ENCODED_ELT (x, 1), base);
6008 if (rtx_equal_p (step, CONST0_RTX (inner)))
6009 return false;
6010
6011 /* If we have a stepped encoding, check that the step between the
6012 second and third elements is the same as STEP. */
6013 if (CONST_VECTOR_STEPPED_P (x))
6014 {
6015 rtx diff = simplify_binary_operation (MINUS, inner,
6016 CONST_VECTOR_ENCODED_ELT (x, 2),
6017 CONST_VECTOR_ENCODED_ELT (x, 1));
6018 if (!rtx_equal_p (step, diff))
6019 return false;
6020 }
6021
6022 *base_out = base;
6023 *step_out = step;
6024 return true;
6025 }
6026
6027 /* Generate a vector constant of mode MODE in which element I has
6028 the value BASE + I * STEP. */
6029
6030 rtx
gen_const_vec_series(machine_mode mode,rtx base,rtx step)6031 gen_const_vec_series (machine_mode mode, rtx base, rtx step)
6032 {
6033 gcc_assert (valid_for_const_vector_p (mode, base)
6034 && valid_for_const_vector_p (mode, step));
6035
6036 rtx_vector_builder builder (mode, 1, 3);
6037 builder.quick_push (base);
6038 for (int i = 1; i < 3; ++i)
6039 builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
6040 builder[i - 1], step));
6041 return builder.build ();
6042 }
6043
6044 /* Generate a vector of mode MODE in which element I has the value
6045 BASE + I * STEP. The result will be a constant if BASE and STEP
6046 are both constants. */
6047
6048 rtx
gen_vec_series(machine_mode mode,rtx base,rtx step)6049 gen_vec_series (machine_mode mode, rtx base, rtx step)
6050 {
6051 if (step == const0_rtx)
6052 return gen_vec_duplicate (mode, base);
6053 if (valid_for_const_vector_p (mode, base)
6054 && valid_for_const_vector_p (mode, step))
6055 return gen_const_vec_series (mode, base, step);
6056 return gen_rtx_VEC_SERIES (mode, base, step);
6057 }
6058
6059 /* Generate a new vector constant for mode MODE and constant value
6060 CONSTANT. */
6061
6062 static rtx
gen_const_vector(machine_mode mode,int constant)6063 gen_const_vector (machine_mode mode, int constant)
6064 {
6065 machine_mode inner = GET_MODE_INNER (mode);
6066
6067 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
6068
6069 rtx el = const_tiny_rtx[constant][(int) inner];
6070 gcc_assert (el);
6071
6072 return gen_const_vec_duplicate (mode, el);
6073 }
6074
6075 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6076 all elements are zero, and the one vector when all elements are one. */
6077 rtx
gen_rtx_CONST_VECTOR(machine_mode mode,rtvec v)6078 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
6079 {
6080 gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
6081
6082 /* If the values are all the same, check to see if we can use one of the
6083 standard constant vectors. */
6084 if (rtvec_all_equal_p (v))
6085 return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
6086
6087 unsigned int nunits = GET_NUM_ELEM (v);
6088 rtx_vector_builder builder (mode, nunits, 1);
6089 for (unsigned int i = 0; i < nunits; ++i)
6090 builder.quick_push (RTVEC_ELT (v, i));
6091 return builder.build (v);
6092 }
6093
6094 /* Initialise global register information required by all functions. */
6095
6096 void
init_emit_regs(void)6097 init_emit_regs (void)
6098 {
6099 int i;
6100 machine_mode mode;
6101 mem_attrs *attrs;
6102
6103 /* Reset register attributes */
6104 reg_attrs_htab->empty ();
6105
6106 /* We need reg_raw_mode, so initialize the modes now. */
6107 init_reg_modes_target ();
6108
6109 /* Assign register numbers to the globally defined register rtx. */
6110 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
6111 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
6112 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
6113 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
6114 virtual_incoming_args_rtx =
6115 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
6116 virtual_stack_vars_rtx =
6117 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
6118 virtual_stack_dynamic_rtx =
6119 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
6120 virtual_outgoing_args_rtx =
6121 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
6122 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
6123 virtual_preferred_stack_boundary_rtx =
6124 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6125
6126 /* Initialize RTL for commonly used hard registers. These are
6127 copied into regno_reg_rtx as we begin to compile each function. */
6128 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6129 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6130
6131 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6132 return_address_pointer_rtx
6133 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
6134 #endif
6135
6136 pic_offset_table_rtx = NULL_RTX;
6137 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6138 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
6139
6140 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
6141 {
6142 mode = (machine_mode) i;
6143 attrs = ggc_cleared_alloc<mem_attrs> ();
6144 attrs->align = BITS_PER_UNIT;
6145 attrs->addrspace = ADDR_SPACE_GENERIC;
6146 if (mode != BLKmode && mode != VOIDmode)
6147 {
6148 attrs->size_known_p = true;
6149 attrs->size = GET_MODE_SIZE (mode);
6150 if (STRICT_ALIGNMENT)
6151 attrs->align = GET_MODE_ALIGNMENT (mode);
6152 }
6153 mode_mem_attrs[i] = attrs;
6154 }
6155
6156 split_branch_probability = profile_probability::uninitialized ();
6157 }
6158
6159 /* Initialize global machine_mode variables. */
6160
6161 void
init_derived_machine_modes(void)6162 init_derived_machine_modes (void)
6163 {
6164 opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6165 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
6166 {
6167 scalar_int_mode mode = mode_iter.require ();
6168
6169 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6170 && !opt_byte_mode.exists ())
6171 opt_byte_mode = mode;
6172
6173 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6174 && !opt_word_mode.exists ())
6175 opt_word_mode = mode;
6176 }
6177
6178 byte_mode = opt_byte_mode.require ();
6179 word_mode = opt_word_mode.require ();
6180 ptr_mode = as_a <scalar_int_mode>
6181 (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
6182 }
6183
6184 /* Create some permanent unique rtl objects shared between all functions. */
6185
6186 void
init_emit_once(void)6187 init_emit_once (void)
6188 {
6189 int i;
6190 machine_mode mode;
6191 scalar_float_mode double_mode;
6192 opt_scalar_mode smode_iter;
6193
6194 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6195 CONST_FIXED, and memory attribute hash tables. */
6196 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
6197
6198 #if TARGET_SUPPORTS_WIDE_INT
6199 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
6200 #endif
6201 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
6202
6203 if (NUM_POLY_INT_COEFFS > 1)
6204 const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6205
6206 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
6207
6208 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
6209
6210 #ifdef INIT_EXPANDERS
6211 /* This is to initialize {init|mark|free}_machine_status before the first
6212 call to push_function_context_to. This is needed by the Chill front
6213 end which calls push_function_context_to before the first call to
6214 init_function_start. */
6215 INIT_EXPANDERS;
6216 #endif
6217
6218 /* Create the unique rtx's for certain rtx codes and operand values. */
6219
6220 /* Process stack-limiting command-line options. */
6221 if (opt_fstack_limit_symbol_arg != NULL)
6222 stack_limit_rtx
6223 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6224 if (opt_fstack_limit_register_no >= 0)
6225 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
6226
6227 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6228 tries to use these variables. */
6229 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
6230 const_int_rtx[i + MAX_SAVED_CONST_INT] =
6231 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
6232
6233 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6234 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
6235 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
6236 else
6237 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
6238
6239 double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
6240
6241 real_from_integer (&dconst0, double_mode, 0, SIGNED);
6242 real_from_integer (&dconst1, double_mode, 1, SIGNED);
6243 real_from_integer (&dconst2, double_mode, 2, SIGNED);
6244
6245 dconstm1 = dconst1;
6246 dconstm1.sign = 1;
6247
6248 dconsthalf = dconst1;
6249 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
6250
6251 for (i = 0; i < 3; i++)
6252 {
6253 const REAL_VALUE_TYPE *const r =
6254 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6255
6256 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
6257 const_tiny_rtx[i][(int) mode] =
6258 const_double_from_real_value (*r, mode);
6259
6260 FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
6261 const_tiny_rtx[i][(int) mode] =
6262 const_double_from_real_value (*r, mode);
6263
6264 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
6265
6266 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6267 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6268
6269 for (mode = MIN_MODE_PARTIAL_INT;
6270 mode <= MAX_MODE_PARTIAL_INT;
6271 mode = (machine_mode)((int)(mode) + 1))
6272 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6273 }
6274
6275 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6276
6277 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6278 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6279
6280 /* For BImode, 1 and -1 are unsigned and signed interpretations
6281 of the same value. */
6282 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6283 const_tiny_rtx[1][(int) BImode] = const_true_rtx;
6284 const_tiny_rtx[3][(int) BImode] = const_true_rtx;
6285
6286 for (mode = MIN_MODE_PARTIAL_INT;
6287 mode <= MAX_MODE_PARTIAL_INT;
6288 mode = (machine_mode)((int)(mode) + 1))
6289 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6290
6291 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
6292 {
6293 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6294 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6295 }
6296
6297 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
6298 {
6299 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6300 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6301 }
6302
6303 /* As for BImode, "all 1" and "all -1" are unsigned and signed
6304 interpretations of the same value. */
6305 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
6306 {
6307 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6308 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6309 const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
6310 }
6311
6312 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
6313 {
6314 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6315 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6316 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6317 }
6318
6319 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
6320 {
6321 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6322 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6323 }
6324
6325 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
6326 {
6327 scalar_mode smode = smode_iter.require ();
6328 FCONST0 (smode).data.high = 0;
6329 FCONST0 (smode).data.low = 0;
6330 FCONST0 (smode).mode = smode;
6331 const_tiny_rtx[0][(int) smode]
6332 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6333 }
6334
6335 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
6336 {
6337 scalar_mode smode = smode_iter.require ();
6338 FCONST0 (smode).data.high = 0;
6339 FCONST0 (smode).data.low = 0;
6340 FCONST0 (smode).mode = smode;
6341 const_tiny_rtx[0][(int) smode]
6342 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6343 }
6344
6345 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
6346 {
6347 scalar_mode smode = smode_iter.require ();
6348 FCONST0 (smode).data.high = 0;
6349 FCONST0 (smode).data.low = 0;
6350 FCONST0 (smode).mode = smode;
6351 const_tiny_rtx[0][(int) smode]
6352 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6353
6354 /* We store the value 1. */
6355 FCONST1 (smode).data.high = 0;
6356 FCONST1 (smode).data.low = 0;
6357 FCONST1 (smode).mode = smode;
6358 FCONST1 (smode).data
6359 = double_int_one.lshift (GET_MODE_FBIT (smode),
6360 HOST_BITS_PER_DOUBLE_INT,
6361 SIGNED_FIXED_POINT_MODE_P (smode));
6362 const_tiny_rtx[1][(int) smode]
6363 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6364 }
6365
6366 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
6367 {
6368 scalar_mode smode = smode_iter.require ();
6369 FCONST0 (smode).data.high = 0;
6370 FCONST0 (smode).data.low = 0;
6371 FCONST0 (smode).mode = smode;
6372 const_tiny_rtx[0][(int) smode]
6373 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6374
6375 /* We store the value 1. */
6376 FCONST1 (smode).data.high = 0;
6377 FCONST1 (smode).data.low = 0;
6378 FCONST1 (smode).mode = smode;
6379 FCONST1 (smode).data
6380 = double_int_one.lshift (GET_MODE_FBIT (smode),
6381 HOST_BITS_PER_DOUBLE_INT,
6382 SIGNED_FIXED_POINT_MODE_P (smode));
6383 const_tiny_rtx[1][(int) smode]
6384 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6385 }
6386
6387 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
6388 {
6389 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6390 }
6391
6392 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
6393 {
6394 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6395 }
6396
6397 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
6398 {
6399 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6400 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6401 }
6402
6403 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
6404 {
6405 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6406 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6407 }
6408
6409 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6410 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6411 const_tiny_rtx[0][i] = const0_rtx;
6412
6413 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6414 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6415 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6416 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6417 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6418 /*prev_insn=*/NULL,
6419 /*next_insn=*/NULL,
6420 /*bb=*/NULL,
6421 /*pattern=*/NULL_RTX,
6422 /*location=*/-1,
6423 CODE_FOR_nothing,
6424 /*reg_notes=*/NULL_RTX);
6425 }
6426
6427 /* Produce exact duplicate of insn INSN after AFTER.
6428 Care updating of libcall regions if present. */
6429
6430 rtx_insn *
emit_copy_of_insn_after(rtx_insn * insn,rtx_insn * after)6431 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6432 {
6433 rtx_insn *new_rtx;
6434 rtx link;
6435
6436 switch (GET_CODE (insn))
6437 {
6438 case INSN:
6439 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6440 break;
6441
6442 case JUMP_INSN:
6443 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6444 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6445 break;
6446
6447 case DEBUG_INSN:
6448 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6449 break;
6450
6451 case CALL_INSN:
6452 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6453 if (CALL_INSN_FUNCTION_USAGE (insn))
6454 CALL_INSN_FUNCTION_USAGE (new_rtx)
6455 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6456 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6457 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6458 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6459 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6460 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6461 break;
6462
6463 default:
6464 gcc_unreachable ();
6465 }
6466
6467 /* Update LABEL_NUSES. */
6468 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6469
6470 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6471
6472 /* If the old insn is frame related, then so is the new one. This is
6473 primarily needed for IA-64 unwind info which marks epilogue insns,
6474 which may be duplicated by the basic block reordering code. */
6475 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6476
6477 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6478 rtx *ptail = ®_NOTES (new_rtx);
6479 while (*ptail != NULL_RTX)
6480 ptail = &XEXP (*ptail, 1);
6481
6482 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6483 will make them. REG_LABEL_TARGETs are created there too, but are
6484 supposed to be sticky, so we copy them. */
6485 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6486 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6487 {
6488 *ptail = duplicate_reg_note (link);
6489 ptail = &XEXP (*ptail, 1);
6490 }
6491
6492 INSN_CODE (new_rtx) = INSN_CODE (insn);
6493 return new_rtx;
6494 }
6495
6496 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6497 rtx
gen_hard_reg_clobber(machine_mode mode,unsigned int regno)6498 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6499 {
6500 if (hard_reg_clobbers[mode][regno])
6501 return hard_reg_clobbers[mode][regno];
6502 else
6503 return (hard_reg_clobbers[mode][regno] =
6504 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6505 }
6506
6507 location_t prologue_location;
6508 location_t epilogue_location;
6509
6510 /* Hold current location information and last location information, so the
6511 datastructures are built lazily only when some instructions in given
6512 place are needed. */
6513 static location_t curr_location;
6514
6515 /* Allocate insn location datastructure. */
6516 void
insn_locations_init(void)6517 insn_locations_init (void)
6518 {
6519 prologue_location = epilogue_location = 0;
6520 curr_location = UNKNOWN_LOCATION;
6521 }
6522
6523 /* At the end of emit stage, clear current location. */
6524 void
insn_locations_finalize(void)6525 insn_locations_finalize (void)
6526 {
6527 epilogue_location = curr_location;
6528 curr_location = UNKNOWN_LOCATION;
6529 }
6530
6531 /* Set current location. */
6532 void
set_curr_insn_location(location_t location)6533 set_curr_insn_location (location_t location)
6534 {
6535 curr_location = location;
6536 }
6537
6538 /* Get current location. */
6539 location_t
curr_insn_location(void)6540 curr_insn_location (void)
6541 {
6542 return curr_location;
6543 }
6544
6545 /* Set the location of the insn chain starting at INSN to LOC. */
6546 void
set_insn_locations(rtx_insn * insn,location_t loc)6547 set_insn_locations (rtx_insn *insn, location_t loc)
6548 {
6549 while (insn)
6550 {
6551 if (INSN_P (insn))
6552 INSN_LOCATION (insn) = loc;
6553 insn = NEXT_INSN (insn);
6554 }
6555 }
6556
6557 /* Return lexical scope block insn belongs to. */
6558 tree
insn_scope(const rtx_insn * insn)6559 insn_scope (const rtx_insn *insn)
6560 {
6561 return LOCATION_BLOCK (INSN_LOCATION (insn));
6562 }
6563
6564 /* Return line number of the statement that produced this insn. */
6565 int
insn_line(const rtx_insn * insn)6566 insn_line (const rtx_insn *insn)
6567 {
6568 return LOCATION_LINE (INSN_LOCATION (insn));
6569 }
6570
6571 /* Return source file of the statement that produced this insn. */
6572 const char *
insn_file(const rtx_insn * insn)6573 insn_file (const rtx_insn *insn)
6574 {
6575 return LOCATION_FILE (INSN_LOCATION (insn));
6576 }
6577
6578 /* Return expanded location of the statement that produced this insn. */
6579 expanded_location
insn_location(const rtx_insn * insn)6580 insn_location (const rtx_insn *insn)
6581 {
6582 return expand_location (INSN_LOCATION (insn));
6583 }
6584
6585 /* Return true if memory model MODEL requires a pre-operation (release-style)
6586 barrier or a post-operation (acquire-style) barrier. While not universal,
6587 this function matches behavior of several targets. */
6588
6589 bool
need_atomic_barrier_p(enum memmodel model,bool pre)6590 need_atomic_barrier_p (enum memmodel model, bool pre)
6591 {
6592 switch (model & MEMMODEL_BASE_MASK)
6593 {
6594 case MEMMODEL_RELAXED:
6595 case MEMMODEL_CONSUME:
6596 return false;
6597 case MEMMODEL_RELEASE:
6598 return pre;
6599 case MEMMODEL_ACQUIRE:
6600 return !pre;
6601 case MEMMODEL_ACQ_REL:
6602 case MEMMODEL_SEQ_CST:
6603 return true;
6604 default:
6605 gcc_unreachable ();
6606 }
6607 }
6608
6609 /* Return a constant shift amount for shifting a value of mode MODE
6610 by VALUE bits. */
6611
6612 rtx
gen_int_shift_amount(machine_mode,poly_int64 value)6613 gen_int_shift_amount (machine_mode, poly_int64 value)
6614 {
6615 /* Use a 64-bit mode, to avoid any truncation.
6616
6617 ??? Perhaps this should be automatically derived from the .md files
6618 instead, or perhaps have a target hook. */
6619 scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6620 ? DImode
6621 : int_mode_for_size (64, 0).require ());
6622 return gen_int_mode (value, shift_mode);
6623 }
6624
6625 /* Initialize fields of rtl_data related to stack alignment. */
6626
6627 void
init_stack_alignment()6628 rtl_data::init_stack_alignment ()
6629 {
6630 stack_alignment_needed = STACK_BOUNDARY;
6631 max_used_stack_slot_alignment = STACK_BOUNDARY;
6632 stack_alignment_estimated = 0;
6633 preferred_stack_boundary = STACK_BOUNDARY;
6634 }
6635
6636
6637 #include "gt-emit-rtl.h"
6638