1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "memmodel.h"
38 #include "backend.h"
39 #include "target.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "df.h"
43 #include "tm_p.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
46 #include "regs.h"
47 #include "emit-rtl.h"
48 #include "recog.h"
49 #include "diagnostic-core.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "cfgrtl.h"
54 #include "tree-eh.h"
55 #include "explow.h"
56 #include "expr.h"
57 #include "params.h"
58 #include "builtins.h"
59 #include "rtl-iter.h"
60 #include "stor-layout.h"
61 #include "opts.h"
62 #include "predict.h"
63 #include "rtx-vector-builder.h"
64
65 struct target_rtl default_target_rtl;
66 #if SWITCHABLE_TARGET
67 struct target_rtl *this_target_rtl = &default_target_rtl;
68 #endif
69
70 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
71
72 /* Commonly used modes. */
73
74 scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
75 scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
76 scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
77
78 /* Datastructures maintained for currently processed function in RTL form. */
79
80 struct rtl_data x_rtl;
81
82 /* Indexed by pseudo register number, gives the rtx for that pseudo.
83 Allocated in parallel with regno_pointer_align.
84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
86
87 rtx * regno_reg_rtx;
88
89 /* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
91
92 static GTY(()) int label_num = 1;
93
94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
97 is set only for MODE_INT and MODE_VECTOR_INT modes. */
98
99 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
100
101 rtx const_true_rtx;
102
103 REAL_VALUE_TYPE dconst0;
104 REAL_VALUE_TYPE dconst1;
105 REAL_VALUE_TYPE dconst2;
106 REAL_VALUE_TYPE dconstm1;
107 REAL_VALUE_TYPE dconsthalf;
108
109 /* Record fixed-point constant 0 and 1. */
110 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
111 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
112
113 /* We make one copy of (const_int C) where C is in
114 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
115 to save space during the compilation and simplify comparisons of
116 integers. */
117
118 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
119
120 /* Standard pieces of rtx, to be substituted directly into things. */
121 rtx pc_rtx;
122 rtx ret_rtx;
123 rtx simple_return_rtx;
124 rtx cc0_rtx;
125
126 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
127 this pointer should normally never be dereferenced), but is required to be
128 distinct from NULL_RTX. Currently used by peephole2 pass. */
129 rtx_insn *invalid_insn_rtx;
130
131 /* A hash table storing CONST_INTs whose absolute value is greater
132 than MAX_SAVED_CONST_INT. */
133
134 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
135 {
136 typedef HOST_WIDE_INT compare_type;
137
138 static hashval_t hash (rtx i);
139 static bool equal (rtx i, HOST_WIDE_INT h);
140 };
141
142 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
143
144 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
145 {
146 static hashval_t hash (rtx x);
147 static bool equal (rtx x, rtx y);
148 };
149
150 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
151
152 struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
153 {
154 typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
155
156 static hashval_t hash (rtx x);
157 static bool equal (rtx x, const compare_type &y);
158 };
159
160 static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
161
162 /* A hash table storing register attribute structures. */
163 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
164 {
165 static hashval_t hash (reg_attrs *x);
166 static bool equal (reg_attrs *a, reg_attrs *b);
167 };
168
169 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
170
171 /* A hash table storing all CONST_DOUBLEs. */
172 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
173 {
174 static hashval_t hash (rtx x);
175 static bool equal (rtx x, rtx y);
176 };
177
178 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
179
180 /* A hash table storing all CONST_FIXEDs. */
181 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
182 {
183 static hashval_t hash (rtx x);
184 static bool equal (rtx x, rtx y);
185 };
186
187 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
188
189 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
190 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
191 #define first_label_num (crtl->emit.x_first_label_num)
192
193 static void set_used_decls (tree);
194 static void mark_label_nuses (rtx);
195 #if TARGET_SUPPORTS_WIDE_INT
196 static rtx lookup_const_wide_int (rtx);
197 #endif
198 static rtx lookup_const_double (rtx);
199 static rtx lookup_const_fixed (rtx);
200 static rtx gen_const_vector (machine_mode, int);
201 static void copy_rtx_if_shared_1 (rtx *orig);
202
203 /* Probability of the conditional branch currently proceeded by try_split. */
204 profile_probability split_branch_probability;
205
206 /* Returns a hash code for X (which is a really a CONST_INT). */
207
208 hashval_t
hash(rtx x)209 const_int_hasher::hash (rtx x)
210 {
211 return (hashval_t) INTVAL (x);
212 }
213
214 /* Returns nonzero if the value represented by X (which is really a
215 CONST_INT) is the same as that given by Y (which is really a
216 HOST_WIDE_INT *). */
217
218 bool
equal(rtx x,HOST_WIDE_INT y)219 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
220 {
221 return (INTVAL (x) == y);
222 }
223
224 #if TARGET_SUPPORTS_WIDE_INT
225 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
226
227 hashval_t
hash(rtx x)228 const_wide_int_hasher::hash (rtx x)
229 {
230 int i;
231 unsigned HOST_WIDE_INT hash = 0;
232 const_rtx xr = x;
233
234 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
235 hash += CONST_WIDE_INT_ELT (xr, i);
236
237 return (hashval_t) hash;
238 }
239
240 /* Returns nonzero if the value represented by X (which is really a
241 CONST_WIDE_INT) is the same as that given by Y (which is really a
242 CONST_WIDE_INT). */
243
244 bool
equal(rtx x,rtx y)245 const_wide_int_hasher::equal (rtx x, rtx y)
246 {
247 int i;
248 const_rtx xr = x;
249 const_rtx yr = y;
250 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
251 return false;
252
253 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
254 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
255 return false;
256
257 return true;
258 }
259 #endif
260
261 /* Returns a hash code for CONST_POLY_INT X. */
262
263 hashval_t
hash(rtx x)264 const_poly_int_hasher::hash (rtx x)
265 {
266 inchash::hash h;
267 h.add_int (GET_MODE (x));
268 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
269 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
270 return h.end ();
271 }
272
273 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */
274
275 bool
equal(rtx x,const compare_type & y)276 const_poly_int_hasher::equal (rtx x, const compare_type &y)
277 {
278 if (GET_MODE (x) != y.first)
279 return false;
280 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
281 if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
282 return false;
283 return true;
284 }
285
286 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
287 hashval_t
hash(rtx x)288 const_double_hasher::hash (rtx x)
289 {
290 const_rtx const value = x;
291 hashval_t h;
292
293 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
294 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
295 else
296 {
297 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
298 /* MODE is used in the comparison, so it should be in the hash. */
299 h ^= GET_MODE (value);
300 }
301 return h;
302 }
303
304 /* Returns nonzero if the value represented by X (really a ...)
305 is the same as that represented by Y (really a ...) */
306 bool
equal(rtx x,rtx y)307 const_double_hasher::equal (rtx x, rtx y)
308 {
309 const_rtx const a = x, b = y;
310
311 if (GET_MODE (a) != GET_MODE (b))
312 return 0;
313 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
314 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
315 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
316 else
317 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
318 CONST_DOUBLE_REAL_VALUE (b));
319 }
320
321 /* Returns a hash code for X (which is really a CONST_FIXED). */
322
323 hashval_t
hash(rtx x)324 const_fixed_hasher::hash (rtx x)
325 {
326 const_rtx const value = x;
327 hashval_t h;
328
329 h = fixed_hash (CONST_FIXED_VALUE (value));
330 /* MODE is used in the comparison, so it should be in the hash. */
331 h ^= GET_MODE (value);
332 return h;
333 }
334
335 /* Returns nonzero if the value represented by X is the same as that
336 represented by Y. */
337
338 bool
equal(rtx x,rtx y)339 const_fixed_hasher::equal (rtx x, rtx y)
340 {
341 const_rtx const a = x, b = y;
342
343 if (GET_MODE (a) != GET_MODE (b))
344 return 0;
345 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
346 }
347
348 /* Return true if the given memory attributes are equal. */
349
350 bool
mem_attrs_eq_p(const struct mem_attrs * p,const struct mem_attrs * q)351 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
352 {
353 if (p == q)
354 return true;
355 if (!p || !q)
356 return false;
357 return (p->alias == q->alias
358 && p->offset_known_p == q->offset_known_p
359 && (!p->offset_known_p || known_eq (p->offset, q->offset))
360 && p->size_known_p == q->size_known_p
361 && (!p->size_known_p || known_eq (p->size, q->size))
362 && p->align == q->align
363 && p->addrspace == q->addrspace
364 && (p->expr == q->expr
365 || (p->expr != NULL_TREE && q->expr != NULL_TREE
366 && operand_equal_p (p->expr, q->expr, 0))));
367 }
368
369 /* Set MEM's memory attributes so that they are the same as ATTRS. */
370
371 static void
set_mem_attrs(rtx mem,mem_attrs * attrs)372 set_mem_attrs (rtx mem, mem_attrs *attrs)
373 {
374 /* If everything is the default, we can just clear the attributes. */
375 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
376 {
377 MEM_ATTRS (mem) = 0;
378 return;
379 }
380
381 if (!MEM_ATTRS (mem)
382 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
383 {
384 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
385 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
386 }
387 }
388
389 /* Returns a hash code for X (which is a really a reg_attrs *). */
390
391 hashval_t
hash(reg_attrs * x)392 reg_attr_hasher::hash (reg_attrs *x)
393 {
394 const reg_attrs *const p = x;
395
396 inchash::hash h;
397 h.add_ptr (p->decl);
398 h.add_poly_hwi (p->offset);
399 return h.end ();
400 }
401
402 /* Returns nonzero if the value represented by X is the same as that given by
403 Y. */
404
405 bool
equal(reg_attrs * x,reg_attrs * y)406 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
407 {
408 const reg_attrs *const p = x;
409 const reg_attrs *const q = y;
410
411 return (p->decl == q->decl && known_eq (p->offset, q->offset));
412 }
413 /* Allocate a new reg_attrs structure and insert it into the hash table if
414 one identical to it is not already in the table. We are doing this for
415 MEM of mode MODE. */
416
417 static reg_attrs *
get_reg_attrs(tree decl,poly_int64 offset)418 get_reg_attrs (tree decl, poly_int64 offset)
419 {
420 reg_attrs attrs;
421
422 /* If everything is the default, we can just return zero. */
423 if (decl == 0 && known_eq (offset, 0))
424 return 0;
425
426 attrs.decl = decl;
427 attrs.offset = offset;
428
429 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
430 if (*slot == 0)
431 {
432 *slot = ggc_alloc<reg_attrs> ();
433 memcpy (*slot, &attrs, sizeof (reg_attrs));
434 }
435
436 return *slot;
437 }
438
439
440 #if !HAVE_blockage
441 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
442 and to block register equivalences to be seen across this insn. */
443
444 rtx
gen_blockage(void)445 gen_blockage (void)
446 {
447 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
448 MEM_VOLATILE_P (x) = true;
449 return x;
450 }
451 #endif
452
453
454 /* Set the mode and register number of X to MODE and REGNO. */
455
456 void
set_mode_and_regno(rtx x,machine_mode mode,unsigned int regno)457 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
458 {
459 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
460 ? hard_regno_nregs (regno, mode)
461 : 1);
462 PUT_MODE_RAW (x, mode);
463 set_regno_raw (x, regno, nregs);
464 }
465
466 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
467 don't attempt to share with the various global pieces of rtl (such as
468 frame_pointer_rtx). */
469
470 rtx
gen_raw_REG(machine_mode mode,unsigned int regno)471 gen_raw_REG (machine_mode mode, unsigned int regno)
472 {
473 rtx x = rtx_alloc (REG MEM_STAT_INFO);
474 set_mode_and_regno (x, mode, regno);
475 REG_ATTRS (x) = NULL;
476 ORIGINAL_REGNO (x) = regno;
477 return x;
478 }
479
480 /* There are some RTL codes that require special attention; the generation
481 functions do the raw handling. If you add to this list, modify
482 special_rtx in gengenrtl.c as well. */
483
484 rtx_expr_list *
gen_rtx_EXPR_LIST(machine_mode mode,rtx expr,rtx expr_list)485 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
486 {
487 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
488 expr_list));
489 }
490
491 rtx_insn_list *
gen_rtx_INSN_LIST(machine_mode mode,rtx insn,rtx insn_list)492 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
493 {
494 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
495 insn_list));
496 }
497
498 rtx_insn *
gen_rtx_INSN(machine_mode mode,rtx_insn * prev_insn,rtx_insn * next_insn,basic_block bb,rtx pattern,int location,int code,rtx reg_notes)499 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
500 basic_block bb, rtx pattern, int location, int code,
501 rtx reg_notes)
502 {
503 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
504 prev_insn, next_insn,
505 bb, pattern, location, code,
506 reg_notes));
507 }
508
509 rtx
gen_rtx_CONST_INT(machine_mode mode ATTRIBUTE_UNUSED,HOST_WIDE_INT arg)510 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
511 {
512 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
513 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
514
515 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
516 if (const_true_rtx && arg == STORE_FLAG_VALUE)
517 return const_true_rtx;
518 #endif
519
520 /* Look up the CONST_INT in the hash table. */
521 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
522 INSERT);
523 if (*slot == 0)
524 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
525
526 return *slot;
527 }
528
529 rtx
gen_int_mode(poly_int64 c,machine_mode mode)530 gen_int_mode (poly_int64 c, machine_mode mode)
531 {
532 c = trunc_int_for_mode (c, mode);
533 if (c.is_constant ())
534 return GEN_INT (c.coeffs[0]);
535 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
536 return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
537 }
538
539 /* CONST_DOUBLEs might be created from pairs of integers, or from
540 REAL_VALUE_TYPEs. Also, their length is known only at run time,
541 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
542
543 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
544 hash table. If so, return its counterpart; otherwise add it
545 to the hash table and return it. */
546 static rtx
lookup_const_double(rtx real)547 lookup_const_double (rtx real)
548 {
549 rtx *slot = const_double_htab->find_slot (real, INSERT);
550 if (*slot == 0)
551 *slot = real;
552
553 return *slot;
554 }
555
556 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
557 VALUE in mode MODE. */
558 rtx
const_double_from_real_value(REAL_VALUE_TYPE value,machine_mode mode)559 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
560 {
561 rtx real = rtx_alloc (CONST_DOUBLE);
562 PUT_MODE (real, mode);
563
564 real->u.rv = value;
565
566 return lookup_const_double (real);
567 }
568
569 /* Determine whether FIXED, a CONST_FIXED, already exists in the
570 hash table. If so, return its counterpart; otherwise add it
571 to the hash table and return it. */
572
573 static rtx
lookup_const_fixed(rtx fixed)574 lookup_const_fixed (rtx fixed)
575 {
576 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
577 if (*slot == 0)
578 *slot = fixed;
579
580 return *slot;
581 }
582
583 /* Return a CONST_FIXED rtx for a fixed-point value specified by
584 VALUE in mode MODE. */
585
586 rtx
const_fixed_from_fixed_value(FIXED_VALUE_TYPE value,machine_mode mode)587 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
588 {
589 rtx fixed = rtx_alloc (CONST_FIXED);
590 PUT_MODE (fixed, mode);
591
592 fixed->u.fv = value;
593
594 return lookup_const_fixed (fixed);
595 }
596
597 #if TARGET_SUPPORTS_WIDE_INT == 0
598 /* Constructs double_int from rtx CST. */
599
600 double_int
rtx_to_double_int(const_rtx cst)601 rtx_to_double_int (const_rtx cst)
602 {
603 double_int r;
604
605 if (CONST_INT_P (cst))
606 r = double_int::from_shwi (INTVAL (cst));
607 else if (CONST_DOUBLE_AS_INT_P (cst))
608 {
609 r.low = CONST_DOUBLE_LOW (cst);
610 r.high = CONST_DOUBLE_HIGH (cst);
611 }
612 else
613 gcc_unreachable ();
614
615 return r;
616 }
617 #endif
618
619 #if TARGET_SUPPORTS_WIDE_INT
620 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
621 If so, return its counterpart; otherwise add it to the hash table and
622 return it. */
623
624 static rtx
lookup_const_wide_int(rtx wint)625 lookup_const_wide_int (rtx wint)
626 {
627 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
628 if (*slot == 0)
629 *slot = wint;
630
631 return *slot;
632 }
633 #endif
634
635 /* Return an rtx constant for V, given that the constant has mode MODE.
636 The returned rtx will be a CONST_INT if V fits, otherwise it will be
637 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
638 (if TARGET_SUPPORTS_WIDE_INT). */
639
640 static rtx
immed_wide_int_const_1(const wide_int_ref & v,machine_mode mode)641 immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
642 {
643 unsigned int len = v.get_len ();
644 /* Not scalar_int_mode because we also allow pointer bound modes. */
645 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
646
647 /* Allow truncation but not extension since we do not know if the
648 number is signed or unsigned. */
649 gcc_assert (prec <= v.get_precision ());
650
651 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
652 return gen_int_mode (v.elt (0), mode);
653
654 #if TARGET_SUPPORTS_WIDE_INT
655 {
656 unsigned int i;
657 rtx value;
658 unsigned int blocks_needed
659 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
660
661 if (len > blocks_needed)
662 len = blocks_needed;
663
664 value = const_wide_int_alloc (len);
665
666 /* It is so tempting to just put the mode in here. Must control
667 myself ... */
668 PUT_MODE (value, VOIDmode);
669 CWI_PUT_NUM_ELEM (value, len);
670
671 for (i = 0; i < len; i++)
672 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
673
674 return lookup_const_wide_int (value);
675 }
676 #else
677 return immed_double_const (v.elt (0), v.elt (1), mode);
678 #endif
679 }
680
681 #if TARGET_SUPPORTS_WIDE_INT == 0
682 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
683 of ints: I0 is the low-order word and I1 is the high-order word.
684 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
685 implied upper bits are copies of the high bit of i1. The value
686 itself is neither signed nor unsigned. Do not use this routine for
687 non-integer modes; convert to REAL_VALUE_TYPE and use
688 const_double_from_real_value. */
689
690 rtx
immed_double_const(HOST_WIDE_INT i0,HOST_WIDE_INT i1,machine_mode mode)691 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
692 {
693 rtx value;
694 unsigned int i;
695
696 /* There are the following cases (note that there are no modes with
697 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
698
699 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
700 gen_int_mode.
701 2) If the value of the integer fits into HOST_WIDE_INT anyway
702 (i.e., i1 consists only from copies of the sign bit, and sign
703 of i0 and i1 are the same), then we return a CONST_INT for i0.
704 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
705 scalar_mode smode;
706 if (is_a <scalar_mode> (mode, &smode)
707 && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
708 return gen_int_mode (i0, mode);
709
710 /* If this integer fits in one word, return a CONST_INT. */
711 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
712 return GEN_INT (i0);
713
714 /* We use VOIDmode for integers. */
715 value = rtx_alloc (CONST_DOUBLE);
716 PUT_MODE (value, VOIDmode);
717
718 CONST_DOUBLE_LOW (value) = i0;
719 CONST_DOUBLE_HIGH (value) = i1;
720
721 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
722 XWINT (value, i) = 0;
723
724 return lookup_const_double (value);
725 }
726 #endif
727
728 /* Return an rtx representation of C in mode MODE. */
729
730 rtx
immed_wide_int_const(const poly_wide_int_ref & c,machine_mode mode)731 immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
732 {
733 if (c.is_constant ())
734 return immed_wide_int_const_1 (c.coeffs[0], mode);
735
736 /* Not scalar_int_mode because we also allow pointer bound modes. */
737 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
738
739 /* Allow truncation but not extension since we do not know if the
740 number is signed or unsigned. */
741 gcc_assert (prec <= c.coeffs[0].get_precision ());
742 poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
743
744 /* See whether we already have an rtx for this constant. */
745 inchash::hash h;
746 h.add_int (mode);
747 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
748 h.add_wide_int (newc.coeffs[i]);
749 const_poly_int_hasher::compare_type typed_value (mode, newc);
750 rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
751 h.end (), INSERT);
752 rtx x = *slot;
753 if (x)
754 return x;
755
756 /* Create a new rtx. There's a choice to be made here between installing
757 the actual mode of the rtx or leaving it as VOIDmode (for consistency
758 with CONST_INT). In practice the handling of the codes is different
759 enough that we get no benefit from using VOIDmode, and various places
760 assume that VOIDmode implies CONST_INT. Using the real mode seems like
761 the right long-term direction anyway. */
762 typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
763 size_t extra_size = twi::extra_size (prec);
764 x = rtx_alloc_v (CONST_POLY_INT,
765 sizeof (struct const_poly_int_def) + extra_size);
766 PUT_MODE (x, mode);
767 CONST_POLY_INT_COEFFS (x).set_precision (prec);
768 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
769 CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
770
771 *slot = x;
772 return x;
773 }
774
775 rtx
gen_rtx_REG(machine_mode mode,unsigned int regno)776 gen_rtx_REG (machine_mode mode, unsigned int regno)
777 {
778 /* In case the MD file explicitly references the frame pointer, have
779 all such references point to the same frame pointer. This is
780 used during frame pointer elimination to distinguish the explicit
781 references to these registers from pseudos that happened to be
782 assigned to them.
783
784 If we have eliminated the frame pointer or arg pointer, we will
785 be using it as a normal register, for example as a spill
786 register. In such cases, we might be accessing it in a mode that
787 is not Pmode and therefore cannot use the pre-allocated rtx.
788
789 Also don't do this when we are making new REGs in reload, since
790 we don't want to get confused with the real pointers. */
791
792 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
793 {
794 if (regno == FRAME_POINTER_REGNUM
795 && (!reload_completed || frame_pointer_needed))
796 return frame_pointer_rtx;
797
798 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
799 && regno == HARD_FRAME_POINTER_REGNUM
800 && (!reload_completed || frame_pointer_needed))
801 return hard_frame_pointer_rtx;
802 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
803 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
804 && regno == ARG_POINTER_REGNUM)
805 return arg_pointer_rtx;
806 #endif
807 #ifdef RETURN_ADDRESS_POINTER_REGNUM
808 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
809 return return_address_pointer_rtx;
810 #endif
811 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
812 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
813 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
814 return pic_offset_table_rtx;
815 if (regno == STACK_POINTER_REGNUM)
816 return stack_pointer_rtx;
817 }
818
819 #if 0
820 /* If the per-function register table has been set up, try to re-use
821 an existing entry in that table to avoid useless generation of RTL.
822
823 This code is disabled for now until we can fix the various backends
824 which depend on having non-shared hard registers in some cases. Long
825 term we want to re-enable this code as it can significantly cut down
826 on the amount of useless RTL that gets generated.
827
828 We'll also need to fix some code that runs after reload that wants to
829 set ORIGINAL_REGNO. */
830
831 if (cfun
832 && cfun->emit
833 && regno_reg_rtx
834 && regno < FIRST_PSEUDO_REGISTER
835 && reg_raw_mode[regno] == mode)
836 return regno_reg_rtx[regno];
837 #endif
838
839 return gen_raw_REG (mode, regno);
840 }
841
842 rtx
gen_rtx_MEM(machine_mode mode,rtx addr)843 gen_rtx_MEM (machine_mode mode, rtx addr)
844 {
845 rtx rt = gen_rtx_raw_MEM (mode, addr);
846
847 /* This field is not cleared by the mere allocation of the rtx, so
848 we clear it here. */
849 MEM_ATTRS (rt) = 0;
850
851 return rt;
852 }
853
854 /* Generate a memory referring to non-trapping constant memory. */
855
856 rtx
gen_const_mem(machine_mode mode,rtx addr)857 gen_const_mem (machine_mode mode, rtx addr)
858 {
859 rtx mem = gen_rtx_MEM (mode, addr);
860 MEM_READONLY_P (mem) = 1;
861 MEM_NOTRAP_P (mem) = 1;
862 return mem;
863 }
864
865 /* Generate a MEM referring to fixed portions of the frame, e.g., register
866 save areas. */
867
868 rtx
gen_frame_mem(machine_mode mode,rtx addr)869 gen_frame_mem (machine_mode mode, rtx addr)
870 {
871 rtx mem = gen_rtx_MEM (mode, addr);
872 MEM_NOTRAP_P (mem) = 1;
873 set_mem_alias_set (mem, get_frame_alias_set ());
874 return mem;
875 }
876
877 /* Generate a MEM referring to a temporary use of the stack, not part
878 of the fixed stack frame. For example, something which is pushed
879 by a target splitter. */
880 rtx
gen_tmp_stack_mem(machine_mode mode,rtx addr)881 gen_tmp_stack_mem (machine_mode mode, rtx addr)
882 {
883 rtx mem = gen_rtx_MEM (mode, addr);
884 MEM_NOTRAP_P (mem) = 1;
885 if (!cfun->calls_alloca)
886 set_mem_alias_set (mem, get_frame_alias_set ());
887 return mem;
888 }
889
890 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
891 this construct would be valid, and false otherwise. */
892
893 bool
validate_subreg(machine_mode omode,machine_mode imode,const_rtx reg,poly_uint64 offset)894 validate_subreg (machine_mode omode, machine_mode imode,
895 const_rtx reg, poly_uint64 offset)
896 {
897 poly_uint64 isize = GET_MODE_SIZE (imode);
898 poly_uint64 osize = GET_MODE_SIZE (omode);
899
900 /* The sizes must be ordered, so that we know whether the subreg
901 is partial, paradoxical or complete. */
902 if (!ordered_p (isize, osize))
903 return false;
904
905 /* All subregs must be aligned. */
906 if (!multiple_p (offset, osize))
907 return false;
908
909 /* The subreg offset cannot be outside the inner object. */
910 if (maybe_ge (offset, isize))
911 return false;
912
913 poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
914
915 /* ??? This should not be here. Temporarily continue to allow word_mode
916 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
917 Generally, backends are doing something sketchy but it'll take time to
918 fix them all. */
919 if (omode == word_mode)
920 ;
921 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
922 is the culprit here, and not the backends. */
923 else if (known_ge (osize, regsize) && known_ge (isize, osize))
924 ;
925 /* Allow component subregs of complex and vector. Though given the below
926 extraction rules, it's not always clear what that means. */
927 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
928 && GET_MODE_INNER (imode) == omode)
929 ;
930 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
931 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
932 represent this. It's questionable if this ought to be represented at
933 all -- why can't this all be hidden in post-reload splitters that make
934 arbitrarily mode changes to the registers themselves. */
935 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
936 ;
937 /* Subregs involving floating point modes are not allowed to
938 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
939 (subreg:SI (reg:DF) 0) isn't. */
940 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
941 {
942 if (! (known_eq (isize, osize)
943 /* LRA can use subreg to store a floating point value in
944 an integer mode. Although the floating point and the
945 integer modes need the same number of hard registers,
946 the size of floating point mode can be less than the
947 integer mode. LRA also uses subregs for a register
948 should be used in different mode in on insn. */
949 || lra_in_progress))
950 return false;
951 }
952
953 /* Paradoxical subregs must have offset zero. */
954 if (maybe_gt (osize, isize))
955 return known_eq (offset, 0U);
956
957 /* This is a normal subreg. Verify that the offset is representable. */
958
959 /* For hard registers, we already have most of these rules collected in
960 subreg_offset_representable_p. */
961 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
962 {
963 unsigned int regno = REGNO (reg);
964
965 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
966 && GET_MODE_INNER (imode) == omode)
967 ;
968 else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
969 return false;
970
971 return subreg_offset_representable_p (regno, imode, offset, omode);
972 }
973
974 /* The outer size must be ordered wrt the register size, otherwise
975 we wouldn't know at compile time how many registers the outer
976 mode occupies. */
977 if (!ordered_p (osize, regsize))
978 return false;
979
980 /* For pseudo registers, we want most of the same checks. Namely:
981
982 Assume that the pseudo register will be allocated to hard registers
983 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
984 the remainder must correspond to the lowpart of the containing hard
985 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
986 otherwise it is at the lowest offset.
987
988 Given that we've already checked the mode and offset alignment,
989 we only have to check subblock subregs here. */
990 if (maybe_lt (osize, regsize)
991 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
992 {
993 /* It is invalid for the target to pick a register size for a mode
994 that isn't ordered wrt to the size of that mode. */
995 poly_uint64 block_size = ordered_min (isize, regsize);
996 unsigned int start_reg;
997 poly_uint64 offset_within_reg;
998 if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
999 || (BYTES_BIG_ENDIAN
1000 ? maybe_ne (offset_within_reg, block_size - osize)
1001 : maybe_ne (offset_within_reg, 0U)))
1002 return false;
1003 }
1004 return true;
1005 }
1006
1007 rtx
gen_rtx_SUBREG(machine_mode mode,rtx reg,poly_uint64 offset)1008 gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
1009 {
1010 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
1011 return gen_rtx_raw_SUBREG (mode, reg, offset);
1012 }
1013
1014 /* Generate a SUBREG representing the least-significant part of REG if MODE
1015 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1016
1017 rtx
gen_lowpart_SUBREG(machine_mode mode,rtx reg)1018 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
1019 {
1020 machine_mode inmode;
1021
1022 inmode = GET_MODE (reg);
1023 if (inmode == VOIDmode)
1024 inmode = mode;
1025 return gen_rtx_SUBREG (mode, reg,
1026 subreg_lowpart_offset (mode, inmode));
1027 }
1028
1029 rtx
gen_rtx_VAR_LOCATION(machine_mode mode,tree decl,rtx loc,enum var_init_status status)1030 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
1031 enum var_init_status status)
1032 {
1033 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
1034 PAT_VAR_LOCATION_STATUS (x) = status;
1035 return x;
1036 }
1037
1038
1039 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
1040
1041 rtvec
gen_rtvec(int n,...)1042 gen_rtvec (int n, ...)
1043 {
1044 int i;
1045 rtvec rt_val;
1046 va_list p;
1047
1048 va_start (p, n);
1049
1050 /* Don't allocate an empty rtvec... */
1051 if (n == 0)
1052 {
1053 va_end (p);
1054 return NULL_RTVEC;
1055 }
1056
1057 rt_val = rtvec_alloc (n);
1058
1059 for (i = 0; i < n; i++)
1060 rt_val->elem[i] = va_arg (p, rtx);
1061
1062 va_end (p);
1063 return rt_val;
1064 }
1065
1066 rtvec
gen_rtvec_v(int n,rtx * argp)1067 gen_rtvec_v (int n, rtx *argp)
1068 {
1069 int i;
1070 rtvec rt_val;
1071
1072 /* Don't allocate an empty rtvec... */
1073 if (n == 0)
1074 return NULL_RTVEC;
1075
1076 rt_val = rtvec_alloc (n);
1077
1078 for (i = 0; i < n; i++)
1079 rt_val->elem[i] = *argp++;
1080
1081 return rt_val;
1082 }
1083
1084 rtvec
gen_rtvec_v(int n,rtx_insn ** argp)1085 gen_rtvec_v (int n, rtx_insn **argp)
1086 {
1087 int i;
1088 rtvec rt_val;
1089
1090 /* Don't allocate an empty rtvec... */
1091 if (n == 0)
1092 return NULL_RTVEC;
1093
1094 rt_val = rtvec_alloc (n);
1095
1096 for (i = 0; i < n; i++)
1097 rt_val->elem[i] = *argp++;
1098
1099 return rt_val;
1100 }
1101
1102
1103 /* Return the number of bytes between the start of an OUTER_MODE
1104 in-memory value and the start of an INNER_MODE in-memory value,
1105 given that the former is a lowpart of the latter. It may be a
1106 paradoxical lowpart, in which case the offset will be negative
1107 on big-endian targets. */
1108
1109 poly_int64
byte_lowpart_offset(machine_mode outer_mode,machine_mode inner_mode)1110 byte_lowpart_offset (machine_mode outer_mode,
1111 machine_mode inner_mode)
1112 {
1113 if (paradoxical_subreg_p (outer_mode, inner_mode))
1114 return -subreg_lowpart_offset (inner_mode, outer_mode);
1115 else
1116 return subreg_lowpart_offset (outer_mode, inner_mode);
1117 }
1118
1119 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1120 from address X. For paradoxical big-endian subregs this is a
1121 negative value, otherwise it's the same as OFFSET. */
1122
1123 poly_int64
subreg_memory_offset(machine_mode outer_mode,machine_mode inner_mode,poly_uint64 offset)1124 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1125 poly_uint64 offset)
1126 {
1127 if (paradoxical_subreg_p (outer_mode, inner_mode))
1128 {
1129 gcc_assert (known_eq (offset, 0U));
1130 return -subreg_lowpart_offset (inner_mode, outer_mode);
1131 }
1132 return offset;
1133 }
1134
1135 /* As above, but return the offset that existing subreg X would have
1136 if SUBREG_REG (X) were stored in memory. The only significant thing
1137 about the current SUBREG_REG is its mode. */
1138
1139 poly_int64
subreg_memory_offset(const_rtx x)1140 subreg_memory_offset (const_rtx x)
1141 {
1142 return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1143 SUBREG_BYTE (x));
1144 }
1145
1146 /* Generate a REG rtx for a new pseudo register of mode MODE.
1147 This pseudo is assigned the next sequential register number. */
1148
1149 rtx
gen_reg_rtx(machine_mode mode)1150 gen_reg_rtx (machine_mode mode)
1151 {
1152 rtx val;
1153 unsigned int align = GET_MODE_ALIGNMENT (mode);
1154
1155 gcc_assert (can_create_pseudo_p ());
1156
1157 /* If a virtual register with bigger mode alignment is generated,
1158 increase stack alignment estimation because it might be spilled
1159 to stack later. */
1160 if (SUPPORTS_STACK_ALIGNMENT
1161 && crtl->stack_alignment_estimated < align
1162 && !crtl->stack_realign_processed)
1163 {
1164 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1165 if (crtl->stack_alignment_estimated < min_align)
1166 crtl->stack_alignment_estimated = min_align;
1167 }
1168
1169 if (generating_concat_p
1170 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1171 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1172 {
1173 /* For complex modes, don't make a single pseudo.
1174 Instead, make a CONCAT of two pseudos.
1175 This allows noncontiguous allocation of the real and imaginary parts,
1176 which makes much better code. Besides, allocating DCmode
1177 pseudos overstrains reload on some machines like the 386. */
1178 rtx realpart, imagpart;
1179 machine_mode partmode = GET_MODE_INNER (mode);
1180
1181 realpart = gen_reg_rtx (partmode);
1182 imagpart = gen_reg_rtx (partmode);
1183 return gen_rtx_CONCAT (mode, realpart, imagpart);
1184 }
1185
1186 /* Do not call gen_reg_rtx with uninitialized crtl. */
1187 gcc_assert (crtl->emit.regno_pointer_align_length);
1188
1189 crtl->emit.ensure_regno_capacity ();
1190 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
1191
1192 val = gen_raw_REG (mode, reg_rtx_no);
1193 regno_reg_rtx[reg_rtx_no++] = val;
1194 return val;
1195 }
1196
1197 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1198 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1199
1200 void
ensure_regno_capacity()1201 emit_status::ensure_regno_capacity ()
1202 {
1203 int old_size = regno_pointer_align_length;
1204
1205 if (reg_rtx_no < old_size)
1206 return;
1207
1208 int new_size = old_size * 2;
1209 while (reg_rtx_no >= new_size)
1210 new_size *= 2;
1211
1212 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1213 memset (tmp + old_size, 0, new_size - old_size);
1214 regno_pointer_align = (unsigned char *) tmp;
1215
1216 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1217 memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1218 regno_reg_rtx = new1;
1219
1220 crtl->emit.regno_pointer_align_length = new_size;
1221 }
1222
1223 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1224
1225 bool
reg_is_parm_p(rtx reg)1226 reg_is_parm_p (rtx reg)
1227 {
1228 tree decl;
1229
1230 gcc_assert (REG_P (reg));
1231 decl = REG_EXPR (reg);
1232 return (decl && TREE_CODE (decl) == PARM_DECL);
1233 }
1234
1235 /* Update NEW with the same attributes as REG, but with OFFSET added
1236 to the REG_OFFSET. */
1237
1238 static void
update_reg_offset(rtx new_rtx,rtx reg,poly_int64 offset)1239 update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
1240 {
1241 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1242 REG_OFFSET (reg) + offset);
1243 }
1244
1245 /* Generate a register with same attributes as REG, but with OFFSET
1246 added to the REG_OFFSET. */
1247
1248 rtx
gen_rtx_REG_offset(rtx reg,machine_mode mode,unsigned int regno,poly_int64 offset)1249 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1250 poly_int64 offset)
1251 {
1252 rtx new_rtx = gen_rtx_REG (mode, regno);
1253
1254 update_reg_offset (new_rtx, reg, offset);
1255 return new_rtx;
1256 }
1257
1258 /* Generate a new pseudo-register with the same attributes as REG, but
1259 with OFFSET added to the REG_OFFSET. */
1260
1261 rtx
gen_reg_rtx_offset(rtx reg,machine_mode mode,int offset)1262 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1263 {
1264 rtx new_rtx = gen_reg_rtx (mode);
1265
1266 update_reg_offset (new_rtx, reg, offset);
1267 return new_rtx;
1268 }
1269
1270 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1271 new register is a (possibly paradoxical) lowpart of the old one. */
1272
1273 void
adjust_reg_mode(rtx reg,machine_mode mode)1274 adjust_reg_mode (rtx reg, machine_mode mode)
1275 {
1276 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1277 PUT_MODE (reg, mode);
1278 }
1279
1280 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1281 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1282
1283 void
set_reg_attrs_from_value(rtx reg,rtx x)1284 set_reg_attrs_from_value (rtx reg, rtx x)
1285 {
1286 poly_int64 offset;
1287 bool can_be_reg_pointer = true;
1288
1289 /* Don't call mark_reg_pointer for incompatible pointer sign
1290 extension. */
1291 while (GET_CODE (x) == SIGN_EXTEND
1292 || GET_CODE (x) == ZERO_EXTEND
1293 || GET_CODE (x) == TRUNCATE
1294 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1295 {
1296 #if defined(POINTERS_EXTEND_UNSIGNED)
1297 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1298 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1299 || (paradoxical_subreg_p (x)
1300 && ! (SUBREG_PROMOTED_VAR_P (x)
1301 && SUBREG_CHECK_PROMOTED_SIGN (x,
1302 POINTERS_EXTEND_UNSIGNED))))
1303 && !targetm.have_ptr_extend ())
1304 can_be_reg_pointer = false;
1305 #endif
1306 x = XEXP (x, 0);
1307 }
1308
1309 /* Hard registers can be reused for multiple purposes within the same
1310 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1311 on them is wrong. */
1312 if (HARD_REGISTER_P (reg))
1313 return;
1314
1315 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1316 if (MEM_P (x))
1317 {
1318 if (MEM_OFFSET_KNOWN_P (x))
1319 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1320 MEM_OFFSET (x) + offset);
1321 if (can_be_reg_pointer && MEM_POINTER (x))
1322 mark_reg_pointer (reg, 0);
1323 }
1324 else if (REG_P (x))
1325 {
1326 if (REG_ATTRS (x))
1327 update_reg_offset (reg, x, offset);
1328 if (can_be_reg_pointer && REG_POINTER (x))
1329 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1330 }
1331 }
1332
1333 /* Generate a REG rtx for a new pseudo register, copying the mode
1334 and attributes from X. */
1335
1336 rtx
gen_reg_rtx_and_attrs(rtx x)1337 gen_reg_rtx_and_attrs (rtx x)
1338 {
1339 rtx reg = gen_reg_rtx (GET_MODE (x));
1340 set_reg_attrs_from_value (reg, x);
1341 return reg;
1342 }
1343
1344 /* Set the register attributes for registers contained in PARM_RTX.
1345 Use needed values from memory attributes of MEM. */
1346
1347 void
set_reg_attrs_for_parm(rtx parm_rtx,rtx mem)1348 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1349 {
1350 if (REG_P (parm_rtx))
1351 set_reg_attrs_from_value (parm_rtx, mem);
1352 else if (GET_CODE (parm_rtx) == PARALLEL)
1353 {
1354 /* Check for a NULL entry in the first slot, used to indicate that the
1355 parameter goes both on the stack and in registers. */
1356 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1357 for (; i < XVECLEN (parm_rtx, 0); i++)
1358 {
1359 rtx x = XVECEXP (parm_rtx, 0, i);
1360 if (REG_P (XEXP (x, 0)))
1361 REG_ATTRS (XEXP (x, 0))
1362 = get_reg_attrs (MEM_EXPR (mem),
1363 INTVAL (XEXP (x, 1)));
1364 }
1365 }
1366 }
1367
1368 /* Set the REG_ATTRS for registers in value X, given that X represents
1369 decl T. */
1370
1371 void
set_reg_attrs_for_decl_rtl(tree t,rtx x)1372 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1373 {
1374 if (!t)
1375 return;
1376 tree tdecl = t;
1377 if (GET_CODE (x) == SUBREG)
1378 {
1379 gcc_assert (subreg_lowpart_p (x));
1380 x = SUBREG_REG (x);
1381 }
1382 if (REG_P (x))
1383 REG_ATTRS (x)
1384 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1385 DECL_P (tdecl)
1386 ? DECL_MODE (tdecl)
1387 : TYPE_MODE (TREE_TYPE (tdecl))));
1388 if (GET_CODE (x) == CONCAT)
1389 {
1390 if (REG_P (XEXP (x, 0)))
1391 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1392 if (REG_P (XEXP (x, 1)))
1393 REG_ATTRS (XEXP (x, 1))
1394 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1395 }
1396 if (GET_CODE (x) == PARALLEL)
1397 {
1398 int i, start;
1399
1400 /* Check for a NULL entry, used to indicate that the parameter goes
1401 both on the stack and in registers. */
1402 if (XEXP (XVECEXP (x, 0, 0), 0))
1403 start = 0;
1404 else
1405 start = 1;
1406
1407 for (i = start; i < XVECLEN (x, 0); i++)
1408 {
1409 rtx y = XVECEXP (x, 0, i);
1410 if (REG_P (XEXP (y, 0)))
1411 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1412 }
1413 }
1414 }
1415
1416 /* Assign the RTX X to declaration T. */
1417
1418 void
set_decl_rtl(tree t,rtx x)1419 set_decl_rtl (tree t, rtx x)
1420 {
1421 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1422 if (x)
1423 set_reg_attrs_for_decl_rtl (t, x);
1424 }
1425
1426 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1427 if the ABI requires the parameter to be passed by reference. */
1428
1429 void
set_decl_incoming_rtl(tree t,rtx x,bool by_reference_p)1430 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1431 {
1432 DECL_INCOMING_RTL (t) = x;
1433 if (x && !by_reference_p)
1434 set_reg_attrs_for_decl_rtl (t, x);
1435 }
1436
1437 /* Identify REG (which may be a CONCAT) as a user register. */
1438
1439 void
mark_user_reg(rtx reg)1440 mark_user_reg (rtx reg)
1441 {
1442 if (GET_CODE (reg) == CONCAT)
1443 {
1444 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1445 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1446 }
1447 else
1448 {
1449 gcc_assert (REG_P (reg));
1450 REG_USERVAR_P (reg) = 1;
1451 }
1452 }
1453
1454 /* Identify REG as a probable pointer register and show its alignment
1455 as ALIGN, if nonzero. */
1456
1457 void
mark_reg_pointer(rtx reg,int align)1458 mark_reg_pointer (rtx reg, int align)
1459 {
1460 if (! REG_POINTER (reg))
1461 {
1462 REG_POINTER (reg) = 1;
1463
1464 if (align)
1465 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1466 }
1467 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1468 /* We can no-longer be sure just how aligned this pointer is. */
1469 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1470 }
1471
1472 /* Return 1 plus largest pseudo reg number used in the current function. */
1473
1474 int
max_reg_num(void)1475 max_reg_num (void)
1476 {
1477 return reg_rtx_no;
1478 }
1479
1480 /* Return 1 + the largest label number used so far in the current function. */
1481
1482 int
max_label_num(void)1483 max_label_num (void)
1484 {
1485 return label_num;
1486 }
1487
1488 /* Return first label number used in this function (if any were used). */
1489
1490 int
get_first_label_num(void)1491 get_first_label_num (void)
1492 {
1493 return first_label_num;
1494 }
1495
1496 /* If the rtx for label was created during the expansion of a nested
1497 function, then first_label_num won't include this label number.
1498 Fix this now so that array indices work later. */
1499
1500 void
maybe_set_first_label_num(rtx_code_label * x)1501 maybe_set_first_label_num (rtx_code_label *x)
1502 {
1503 if (CODE_LABEL_NUMBER (x) < first_label_num)
1504 first_label_num = CODE_LABEL_NUMBER (x);
1505 }
1506
1507 /* For use by the RTL function loader, when mingling with normal
1508 functions.
1509 Ensure that label_num is greater than the label num of X, to avoid
1510 duplicate labels in the generated assembler. */
1511
1512 void
maybe_set_max_label_num(rtx_code_label * x)1513 maybe_set_max_label_num (rtx_code_label *x)
1514 {
1515 if (CODE_LABEL_NUMBER (x) >= label_num)
1516 label_num = CODE_LABEL_NUMBER (x) + 1;
1517 }
1518
1519
1520 /* Return a value representing some low-order bits of X, where the number
1521 of low-order bits is given by MODE. Note that no conversion is done
1522 between floating-point and fixed-point values, rather, the bit
1523 representation is returned.
1524
1525 This function handles the cases in common between gen_lowpart, below,
1526 and two variants in cse.c and combine.c. These are the cases that can
1527 be safely handled at all points in the compilation.
1528
1529 If this is not a case we can handle, return 0. */
1530
1531 rtx
gen_lowpart_common(machine_mode mode,rtx x)1532 gen_lowpart_common (machine_mode mode, rtx x)
1533 {
1534 poly_uint64 msize = GET_MODE_SIZE (mode);
1535 machine_mode innermode;
1536
1537 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1538 so we have to make one up. Yuk. */
1539 innermode = GET_MODE (x);
1540 if (CONST_INT_P (x)
1541 && known_le (msize * BITS_PER_UNIT,
1542 (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
1543 innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1544 else if (innermode == VOIDmode)
1545 innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
1546
1547 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1548
1549 if (innermode == mode)
1550 return x;
1551
1552 /* The size of the outer and inner modes must be ordered. */
1553 poly_uint64 xsize = GET_MODE_SIZE (innermode);
1554 if (!ordered_p (msize, xsize))
1555 return 0;
1556
1557 if (SCALAR_FLOAT_MODE_P (mode))
1558 {
1559 /* Don't allow paradoxical FLOAT_MODE subregs. */
1560 if (maybe_gt (msize, xsize))
1561 return 0;
1562 }
1563 else
1564 {
1565 /* MODE must occupy no more of the underlying registers than X. */
1566 poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
1567 unsigned int mregs, xregs;
1568 if (!can_div_away_from_zero_p (msize, regsize, &mregs)
1569 || !can_div_away_from_zero_p (xsize, regsize, &xregs)
1570 || mregs > xregs)
1571 return 0;
1572 }
1573
1574 scalar_int_mode int_mode, int_innermode, from_mode;
1575 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1576 && is_a <scalar_int_mode> (mode, &int_mode)
1577 && is_a <scalar_int_mode> (innermode, &int_innermode)
1578 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
1579 {
1580 /* If we are getting the low-order part of something that has been
1581 sign- or zero-extended, we can either just use the object being
1582 extended or make a narrower extension. If we want an even smaller
1583 piece than the size of the object being extended, call ourselves
1584 recursively.
1585
1586 This case is used mostly by combine and cse. */
1587
1588 if (from_mode == int_mode)
1589 return XEXP (x, 0);
1590 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1591 return gen_lowpart_common (int_mode, XEXP (x, 0));
1592 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1593 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
1594 }
1595 else if (GET_CODE (x) == SUBREG || REG_P (x)
1596 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1597 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1598 || CONST_POLY_INT_P (x))
1599 return lowpart_subreg (mode, x, innermode);
1600
1601 /* Otherwise, we can't do this. */
1602 return 0;
1603 }
1604
1605 rtx
gen_highpart(machine_mode mode,rtx x)1606 gen_highpart (machine_mode mode, rtx x)
1607 {
1608 poly_uint64 msize = GET_MODE_SIZE (mode);
1609 rtx result;
1610
1611 /* This case loses if X is a subreg. To catch bugs early,
1612 complain if an invalid MODE is used even in other cases. */
1613 gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
1614 || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
1615
1616 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1617 subreg_highpart_offset (mode, GET_MODE (x)));
1618 gcc_assert (result);
1619
1620 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1621 the target if we have a MEM. gen_highpart must return a valid operand,
1622 emitting code if necessary to do so. */
1623 if (MEM_P (result))
1624 {
1625 result = validize_mem (result);
1626 gcc_assert (result);
1627 }
1628
1629 return result;
1630 }
1631
1632 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1633 be VOIDmode constant. */
1634 rtx
gen_highpart_mode(machine_mode outermode,machine_mode innermode,rtx exp)1635 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1636 {
1637 if (GET_MODE (exp) != VOIDmode)
1638 {
1639 gcc_assert (GET_MODE (exp) == innermode);
1640 return gen_highpart (outermode, exp);
1641 }
1642 return simplify_gen_subreg (outermode, exp, innermode,
1643 subreg_highpart_offset (outermode, innermode));
1644 }
1645
1646 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1647 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1648
1649 poly_uint64
subreg_size_lowpart_offset(poly_uint64 outer_bytes,poly_uint64 inner_bytes)1650 subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1651 {
1652 gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1653 if (maybe_gt (outer_bytes, inner_bytes))
1654 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1655 return 0;
1656
1657 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1658 return inner_bytes - outer_bytes;
1659 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1660 return 0;
1661 else
1662 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1663 }
1664
1665 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1666 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1667
1668 poly_uint64
subreg_size_highpart_offset(poly_uint64 outer_bytes,poly_uint64 inner_bytes)1669 subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1670 {
1671 gcc_assert (known_ge (inner_bytes, outer_bytes));
1672
1673 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1674 return 0;
1675 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1676 return inner_bytes - outer_bytes;
1677 else
1678 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1679 (inner_bytes - outer_bytes)
1680 * BITS_PER_UNIT);
1681 }
1682
1683 /* Return 1 iff X, assumed to be a SUBREG,
1684 refers to the least significant part of its containing reg.
1685 If X is not a SUBREG, always return 1 (it is its own low part!). */
1686
1687 int
subreg_lowpart_p(const_rtx x)1688 subreg_lowpart_p (const_rtx x)
1689 {
1690 if (GET_CODE (x) != SUBREG)
1691 return 1;
1692 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1693 return 0;
1694
1695 return known_eq (subreg_lowpart_offset (GET_MODE (x),
1696 GET_MODE (SUBREG_REG (x))),
1697 SUBREG_BYTE (x));
1698 }
1699
1700 /* Return subword OFFSET of operand OP.
1701 The word number, OFFSET, is interpreted as the word number starting
1702 at the low-order address. OFFSET 0 is the low-order word if not
1703 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1704
1705 If we cannot extract the required word, we return zero. Otherwise,
1706 an rtx corresponding to the requested word will be returned.
1707
1708 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1709 reload has completed, a valid address will always be returned. After
1710 reload, if a valid address cannot be returned, we return zero.
1711
1712 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1713 it is the responsibility of the caller.
1714
1715 MODE is the mode of OP in case it is a CONST_INT.
1716
1717 ??? This is still rather broken for some cases. The problem for the
1718 moment is that all callers of this thing provide no 'goal mode' to
1719 tell us to work with. This exists because all callers were written
1720 in a word based SUBREG world.
1721 Now use of this function can be deprecated by simplify_subreg in most
1722 cases.
1723 */
1724
1725 rtx
operand_subword(rtx op,poly_uint64 offset,int validate_address,machine_mode mode)1726 operand_subword (rtx op, poly_uint64 offset, int validate_address,
1727 machine_mode mode)
1728 {
1729 if (mode == VOIDmode)
1730 mode = GET_MODE (op);
1731
1732 gcc_assert (mode != VOIDmode);
1733
1734 /* If OP is narrower than a word, fail. */
1735 if (mode != BLKmode
1736 && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
1737 return 0;
1738
1739 /* If we want a word outside OP, return zero. */
1740 if (mode != BLKmode
1741 && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
1742 return const0_rtx;
1743
1744 /* Form a new MEM at the requested address. */
1745 if (MEM_P (op))
1746 {
1747 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1748
1749 if (! validate_address)
1750 return new_rtx;
1751
1752 else if (reload_completed)
1753 {
1754 if (! strict_memory_address_addr_space_p (word_mode,
1755 XEXP (new_rtx, 0),
1756 MEM_ADDR_SPACE (op)))
1757 return 0;
1758 }
1759 else
1760 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1761 }
1762
1763 /* Rest can be handled by simplify_subreg. */
1764 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1765 }
1766
1767 /* Similar to `operand_subword', but never return 0. If we can't
1768 extract the required subword, put OP into a register and try again.
1769 The second attempt must succeed. We always validate the address in
1770 this case.
1771
1772 MODE is the mode of OP, in case it is CONST_INT. */
1773
1774 rtx
operand_subword_force(rtx op,poly_uint64 offset,machine_mode mode)1775 operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
1776 {
1777 rtx result = operand_subword (op, offset, 1, mode);
1778
1779 if (result)
1780 return result;
1781
1782 if (mode != BLKmode && mode != VOIDmode)
1783 {
1784 /* If this is a register which can not be accessed by words, copy it
1785 to a pseudo register. */
1786 if (REG_P (op))
1787 op = copy_to_reg (op);
1788 else
1789 op = force_reg (mode, op);
1790 }
1791
1792 result = operand_subword (op, offset, 1, mode);
1793 gcc_assert (result);
1794
1795 return result;
1796 }
1797
mem_attrs()1798 mem_attrs::mem_attrs ()
1799 : expr (NULL_TREE),
1800 offset (0),
1801 size (0),
1802 alias (0),
1803 align (0),
1804 addrspace (ADDR_SPACE_GENERIC),
1805 offset_known_p (false),
1806 size_known_p (false)
1807 {}
1808
1809 /* Returns 1 if both MEM_EXPR can be considered equal
1810 and 0 otherwise. */
1811
1812 int
mem_expr_equal_p(const_tree expr1,const_tree expr2)1813 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1814 {
1815 if (expr1 == expr2)
1816 return 1;
1817
1818 if (! expr1 || ! expr2)
1819 return 0;
1820
1821 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1822 return 0;
1823
1824 return operand_equal_p (expr1, expr2, 0);
1825 }
1826
1827 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1828 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1829 -1 if not known. */
1830
1831 int
get_mem_align_offset(rtx mem,unsigned int align)1832 get_mem_align_offset (rtx mem, unsigned int align)
1833 {
1834 tree expr;
1835 poly_uint64 offset;
1836
1837 /* This function can't use
1838 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1839 || (MAX (MEM_ALIGN (mem),
1840 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1841 < align))
1842 return -1;
1843 else
1844 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1845 for two reasons:
1846 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1847 for <variable>. get_inner_reference doesn't handle it and
1848 even if it did, the alignment in that case needs to be determined
1849 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1850 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1851 isn't sufficiently aligned, the object it is in might be. */
1852 gcc_assert (MEM_P (mem));
1853 expr = MEM_EXPR (mem);
1854 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1855 return -1;
1856
1857 offset = MEM_OFFSET (mem);
1858 if (DECL_P (expr))
1859 {
1860 if (DECL_ALIGN (expr) < align)
1861 return -1;
1862 }
1863 else if (INDIRECT_REF_P (expr))
1864 {
1865 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1866 return -1;
1867 }
1868 else if (TREE_CODE (expr) == COMPONENT_REF)
1869 {
1870 while (1)
1871 {
1872 tree inner = TREE_OPERAND (expr, 0);
1873 tree field = TREE_OPERAND (expr, 1);
1874 tree byte_offset = component_ref_field_offset (expr);
1875 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1876
1877 poly_uint64 suboffset;
1878 if (!byte_offset
1879 || !poly_int_tree_p (byte_offset, &suboffset)
1880 || !tree_fits_uhwi_p (bit_offset))
1881 return -1;
1882
1883 offset += suboffset;
1884 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1885
1886 if (inner == NULL_TREE)
1887 {
1888 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1889 < (unsigned int) align)
1890 return -1;
1891 break;
1892 }
1893 else if (DECL_P (inner))
1894 {
1895 if (DECL_ALIGN (inner) < align)
1896 return -1;
1897 break;
1898 }
1899 else if (TREE_CODE (inner) != COMPONENT_REF)
1900 return -1;
1901 expr = inner;
1902 }
1903 }
1904 else
1905 return -1;
1906
1907 HOST_WIDE_INT misalign;
1908 if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
1909 return -1;
1910 return misalign;
1911 }
1912
1913 /* Given REF (a MEM) and T, either the type of X or the expression
1914 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1915 if we are making a new object of this type. BITPOS is nonzero if
1916 there is an offset outstanding on T that will be applied later. */
1917
1918 void
set_mem_attributes_minus_bitpos(rtx ref,tree t,int objectp,poly_int64 bitpos)1919 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1920 poly_int64 bitpos)
1921 {
1922 poly_int64 apply_bitpos = 0;
1923 tree type;
1924 struct mem_attrs attrs, *defattrs, *refattrs;
1925 addr_space_t as;
1926
1927 /* It can happen that type_for_mode was given a mode for which there
1928 is no language-level type. In which case it returns NULL, which
1929 we can see here. */
1930 if (t == NULL_TREE)
1931 return;
1932
1933 type = TYPE_P (t) ? t : TREE_TYPE (t);
1934 if (type == error_mark_node)
1935 return;
1936
1937 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1938 wrong answer, as it assumes that DECL_RTL already has the right alias
1939 info. Callers should not set DECL_RTL until after the call to
1940 set_mem_attributes. */
1941 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1942
1943 /* Get the alias set from the expression or type (perhaps using a
1944 front-end routine) and use it. */
1945 attrs.alias = get_alias_set (t);
1946
1947 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1948 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1949
1950 /* Default values from pre-existing memory attributes if present. */
1951 refattrs = MEM_ATTRS (ref);
1952 if (refattrs)
1953 {
1954 /* ??? Can this ever happen? Calling this routine on a MEM that
1955 already carries memory attributes should probably be invalid. */
1956 attrs.expr = refattrs->expr;
1957 attrs.offset_known_p = refattrs->offset_known_p;
1958 attrs.offset = refattrs->offset;
1959 attrs.size_known_p = refattrs->size_known_p;
1960 attrs.size = refattrs->size;
1961 attrs.align = refattrs->align;
1962 }
1963
1964 /* Otherwise, default values from the mode of the MEM reference. */
1965 else
1966 {
1967 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1968 gcc_assert (!defattrs->expr);
1969 gcc_assert (!defattrs->offset_known_p);
1970
1971 /* Respect mode size. */
1972 attrs.size_known_p = defattrs->size_known_p;
1973 attrs.size = defattrs->size;
1974 /* ??? Is this really necessary? We probably should always get
1975 the size from the type below. */
1976
1977 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1978 if T is an object, always compute the object alignment below. */
1979 if (TYPE_P (t))
1980 attrs.align = defattrs->align;
1981 else
1982 attrs.align = BITS_PER_UNIT;
1983 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1984 e.g. if the type carries an alignment attribute. Should we be
1985 able to simply always use TYPE_ALIGN? */
1986 }
1987
1988 /* We can set the alignment from the type if we are making an object or if
1989 this is an INDIRECT_REF. */
1990 if (objectp || TREE_CODE (t) == INDIRECT_REF)
1991 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1992
1993 /* If the size is known, we can set that. */
1994 tree new_size = TYPE_SIZE_UNIT (type);
1995
1996 /* The address-space is that of the type. */
1997 as = TYPE_ADDR_SPACE (type);
1998
1999 /* If T is not a type, we may be able to deduce some more information about
2000 the expression. */
2001 if (! TYPE_P (t))
2002 {
2003 tree base;
2004
2005 if (TREE_THIS_VOLATILE (t))
2006 MEM_VOLATILE_P (ref) = 1;
2007
2008 /* Now remove any conversions: they don't change what the underlying
2009 object is. Likewise for SAVE_EXPR. */
2010 while (CONVERT_EXPR_P (t)
2011 || TREE_CODE (t) == VIEW_CONVERT_EXPR
2012 || TREE_CODE (t) == SAVE_EXPR)
2013 t = TREE_OPERAND (t, 0);
2014
2015 /* Note whether this expression can trap. */
2016 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
2017
2018 base = get_base_address (t);
2019 if (base)
2020 {
2021 if (DECL_P (base)
2022 && TREE_READONLY (base)
2023 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
2024 && !TREE_THIS_VOLATILE (base))
2025 MEM_READONLY_P (ref) = 1;
2026
2027 /* Mark static const strings readonly as well. */
2028 if (TREE_CODE (base) == STRING_CST
2029 && TREE_READONLY (base)
2030 && TREE_STATIC (base))
2031 MEM_READONLY_P (ref) = 1;
2032
2033 /* Address-space information is on the base object. */
2034 if (TREE_CODE (base) == MEM_REF
2035 || TREE_CODE (base) == TARGET_MEM_REF)
2036 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
2037 0))));
2038 else
2039 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
2040 }
2041
2042 /* If this expression uses it's parent's alias set, mark it such
2043 that we won't change it. */
2044 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
2045 MEM_KEEP_ALIAS_SET_P (ref) = 1;
2046
2047 /* If this is a decl, set the attributes of the MEM from it. */
2048 if (DECL_P (t))
2049 {
2050 attrs.expr = t;
2051 attrs.offset_known_p = true;
2052 attrs.offset = 0;
2053 apply_bitpos = bitpos;
2054 new_size = DECL_SIZE_UNIT (t);
2055 }
2056
2057 /* ??? If we end up with a constant here do record a MEM_EXPR. */
2058 else if (CONSTANT_CLASS_P (t))
2059 ;
2060
2061 /* If this is a field reference, record it. */
2062 else if (TREE_CODE (t) == COMPONENT_REF)
2063 {
2064 attrs.expr = t;
2065 attrs.offset_known_p = true;
2066 attrs.offset = 0;
2067 apply_bitpos = bitpos;
2068 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2069 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
2070 }
2071
2072 /* If this is an array reference, look for an outer field reference. */
2073 else if (TREE_CODE (t) == ARRAY_REF)
2074 {
2075 tree off_tree = size_zero_node;
2076 /* We can't modify t, because we use it at the end of the
2077 function. */
2078 tree t2 = t;
2079
2080 do
2081 {
2082 tree index = TREE_OPERAND (t2, 1);
2083 tree low_bound = array_ref_low_bound (t2);
2084 tree unit_size = array_ref_element_size (t2);
2085
2086 /* We assume all arrays have sizes that are a multiple of a byte.
2087 First subtract the lower bound, if any, in the type of the
2088 index, then convert to sizetype and multiply by the size of
2089 the array element. */
2090 if (! integer_zerop (low_bound))
2091 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
2092 index, low_bound);
2093
2094 off_tree = size_binop (PLUS_EXPR,
2095 size_binop (MULT_EXPR,
2096 fold_convert (sizetype,
2097 index),
2098 unit_size),
2099 off_tree);
2100 t2 = TREE_OPERAND (t2, 0);
2101 }
2102 while (TREE_CODE (t2) == ARRAY_REF);
2103
2104 if (DECL_P (t2)
2105 || (TREE_CODE (t2) == COMPONENT_REF
2106 /* For trailing arrays t2 doesn't have a size that
2107 covers all valid accesses. */
2108 && ! array_at_struct_end_p (t)))
2109 {
2110 attrs.expr = t2;
2111 attrs.offset_known_p = false;
2112 if (poly_int_tree_p (off_tree, &attrs.offset))
2113 {
2114 attrs.offset_known_p = true;
2115 apply_bitpos = bitpos;
2116 }
2117 }
2118 /* Else do not record a MEM_EXPR. */
2119 }
2120
2121 /* If this is an indirect reference, record it. */
2122 else if (TREE_CODE (t) == MEM_REF
2123 || TREE_CODE (t) == TARGET_MEM_REF)
2124 {
2125 attrs.expr = t;
2126 attrs.offset_known_p = true;
2127 attrs.offset = 0;
2128 apply_bitpos = bitpos;
2129 }
2130
2131 /* Compute the alignment. */
2132 unsigned int obj_align;
2133 unsigned HOST_WIDE_INT obj_bitpos;
2134 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2135 unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
2136 if (diff_align != 0)
2137 obj_align = MIN (obj_align, diff_align);
2138 attrs.align = MAX (attrs.align, obj_align);
2139 }
2140
2141 poly_uint64 const_size;
2142 if (poly_int_tree_p (new_size, &const_size))
2143 {
2144 attrs.size_known_p = true;
2145 attrs.size = const_size;
2146 }
2147
2148 /* If we modified OFFSET based on T, then subtract the outstanding
2149 bit position offset. Similarly, increase the size of the accessed
2150 object to contain the negative offset. */
2151 if (maybe_ne (apply_bitpos, 0))
2152 {
2153 gcc_assert (attrs.offset_known_p);
2154 poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2155 attrs.offset -= bytepos;
2156 if (attrs.size_known_p)
2157 attrs.size += bytepos;
2158 }
2159
2160 /* Now set the attributes we computed above. */
2161 attrs.addrspace = as;
2162 set_mem_attrs (ref, &attrs);
2163 }
2164
2165 void
set_mem_attributes(rtx ref,tree t,int objectp)2166 set_mem_attributes (rtx ref, tree t, int objectp)
2167 {
2168 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2169 }
2170
2171 /* Set the alias set of MEM to SET. */
2172
2173 void
set_mem_alias_set(rtx mem,alias_set_type set)2174 set_mem_alias_set (rtx mem, alias_set_type set)
2175 {
2176 /* If the new and old alias sets don't conflict, something is wrong. */
2177 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2178 mem_attrs attrs (*get_mem_attrs (mem));
2179 attrs.alias = set;
2180 set_mem_attrs (mem, &attrs);
2181 }
2182
2183 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2184
2185 void
set_mem_addr_space(rtx mem,addr_space_t addrspace)2186 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2187 {
2188 mem_attrs attrs (*get_mem_attrs (mem));
2189 attrs.addrspace = addrspace;
2190 set_mem_attrs (mem, &attrs);
2191 }
2192
2193 /* Set the alignment of MEM to ALIGN bits. */
2194
2195 void
set_mem_align(rtx mem,unsigned int align)2196 set_mem_align (rtx mem, unsigned int align)
2197 {
2198 mem_attrs attrs (*get_mem_attrs (mem));
2199 attrs.align = align;
2200 set_mem_attrs (mem, &attrs);
2201 }
2202
2203 /* Set the expr for MEM to EXPR. */
2204
2205 void
set_mem_expr(rtx mem,tree expr)2206 set_mem_expr (rtx mem, tree expr)
2207 {
2208 mem_attrs attrs (*get_mem_attrs (mem));
2209 attrs.expr = expr;
2210 set_mem_attrs (mem, &attrs);
2211 }
2212
2213 /* Set the offset of MEM to OFFSET. */
2214
2215 void
set_mem_offset(rtx mem,poly_int64 offset)2216 set_mem_offset (rtx mem, poly_int64 offset)
2217 {
2218 mem_attrs attrs (*get_mem_attrs (mem));
2219 attrs.offset_known_p = true;
2220 attrs.offset = offset;
2221 set_mem_attrs (mem, &attrs);
2222 }
2223
2224 /* Clear the offset of MEM. */
2225
2226 void
clear_mem_offset(rtx mem)2227 clear_mem_offset (rtx mem)
2228 {
2229 mem_attrs attrs (*get_mem_attrs (mem));
2230 attrs.offset_known_p = false;
2231 set_mem_attrs (mem, &attrs);
2232 }
2233
2234 /* Set the size of MEM to SIZE. */
2235
2236 void
set_mem_size(rtx mem,poly_int64 size)2237 set_mem_size (rtx mem, poly_int64 size)
2238 {
2239 mem_attrs attrs (*get_mem_attrs (mem));
2240 attrs.size_known_p = true;
2241 attrs.size = size;
2242 set_mem_attrs (mem, &attrs);
2243 }
2244
2245 /* Clear the size of MEM. */
2246
2247 void
clear_mem_size(rtx mem)2248 clear_mem_size (rtx mem)
2249 {
2250 mem_attrs attrs (*get_mem_attrs (mem));
2251 attrs.size_known_p = false;
2252 set_mem_attrs (mem, &attrs);
2253 }
2254
2255 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2256 and its address changed to ADDR. (VOIDmode means don't change the mode.
2257 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2258 returned memory location is required to be valid. INPLACE is true if any
2259 changes can be made directly to MEMREF or false if MEMREF must be treated
2260 as immutable.
2261
2262 The memory attributes are not changed. */
2263
2264 static rtx
change_address_1(rtx memref,machine_mode mode,rtx addr,int validate,bool inplace)2265 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2266 bool inplace)
2267 {
2268 addr_space_t as;
2269 rtx new_rtx;
2270
2271 gcc_assert (MEM_P (memref));
2272 as = MEM_ADDR_SPACE (memref);
2273 if (mode == VOIDmode)
2274 mode = GET_MODE (memref);
2275 if (addr == 0)
2276 addr = XEXP (memref, 0);
2277 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2278 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2279 return memref;
2280
2281 /* Don't validate address for LRA. LRA can make the address valid
2282 by itself in most efficient way. */
2283 if (validate && !lra_in_progress)
2284 {
2285 if (reload_in_progress || reload_completed)
2286 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2287 else
2288 addr = memory_address_addr_space (mode, addr, as);
2289 }
2290
2291 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2292 return memref;
2293
2294 if (inplace)
2295 {
2296 XEXP (memref, 0) = addr;
2297 return memref;
2298 }
2299
2300 new_rtx = gen_rtx_MEM (mode, addr);
2301 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2302 return new_rtx;
2303 }
2304
2305 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2306 way we are changing MEMREF, so we only preserve the alias set. */
2307
2308 rtx
change_address(rtx memref,machine_mode mode,rtx addr)2309 change_address (rtx memref, machine_mode mode, rtx addr)
2310 {
2311 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2312 machine_mode mmode = GET_MODE (new_rtx);
2313 struct mem_attrs *defattrs;
2314
2315 mem_attrs attrs (*get_mem_attrs (memref));
2316 defattrs = mode_mem_attrs[(int) mmode];
2317 attrs.expr = NULL_TREE;
2318 attrs.offset_known_p = false;
2319 attrs.size_known_p = defattrs->size_known_p;
2320 attrs.size = defattrs->size;
2321 attrs.align = defattrs->align;
2322
2323 /* If there are no changes, just return the original memory reference. */
2324 if (new_rtx == memref)
2325 {
2326 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2327 return new_rtx;
2328
2329 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2330 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2331 }
2332
2333 set_mem_attrs (new_rtx, &attrs);
2334 return new_rtx;
2335 }
2336
2337 /* Return a memory reference like MEMREF, but with its mode changed
2338 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2339 nonzero, the memory address is forced to be valid.
2340 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2341 and the caller is responsible for adjusting MEMREF base register.
2342 If ADJUST_OBJECT is zero, the underlying object associated with the
2343 memory reference is left unchanged and the caller is responsible for
2344 dealing with it. Otherwise, if the new memory reference is outside
2345 the underlying object, even partially, then the object is dropped.
2346 SIZE, if nonzero, is the size of an access in cases where MODE
2347 has no inherent size. */
2348
2349 rtx
adjust_address_1(rtx memref,machine_mode mode,poly_int64 offset,int validate,int adjust_address,int adjust_object,poly_int64 size)2350 adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
2351 int validate, int adjust_address, int adjust_object,
2352 poly_int64 size)
2353 {
2354 rtx addr = XEXP (memref, 0);
2355 rtx new_rtx;
2356 scalar_int_mode address_mode;
2357 struct mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
2358 unsigned HOST_WIDE_INT max_align;
2359 #ifdef POINTERS_EXTEND_UNSIGNED
2360 scalar_int_mode pointer_mode
2361 = targetm.addr_space.pointer_mode (attrs.addrspace);
2362 #endif
2363
2364 /* VOIDmode means no mode change for change_address_1. */
2365 if (mode == VOIDmode)
2366 mode = GET_MODE (memref);
2367
2368 /* Take the size of non-BLKmode accesses from the mode. */
2369 defattrs = mode_mem_attrs[(int) mode];
2370 if (defattrs->size_known_p)
2371 size = defattrs->size;
2372
2373 /* If there are no changes, just return the original memory reference. */
2374 if (mode == GET_MODE (memref)
2375 && known_eq (offset, 0)
2376 && (known_eq (size, 0)
2377 || (attrs.size_known_p && known_eq (attrs.size, size)))
2378 && (!validate || memory_address_addr_space_p (mode, addr,
2379 attrs.addrspace)))
2380 return memref;
2381
2382 /* ??? Prefer to create garbage instead of creating shared rtl.
2383 This may happen even if offset is nonzero -- consider
2384 (plus (plus reg reg) const_int) -- so do this always. */
2385 addr = copy_rtx (addr);
2386
2387 /* Convert a possibly large offset to a signed value within the
2388 range of the target address space. */
2389 address_mode = get_address_mode (memref);
2390 offset = trunc_int_for_mode (offset, address_mode);
2391
2392 if (adjust_address)
2393 {
2394 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2395 object, we can merge it into the LO_SUM. */
2396 if (GET_MODE (memref) != BLKmode
2397 && GET_CODE (addr) == LO_SUM
2398 && known_in_range_p (offset,
2399 0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
2400 / BITS_PER_UNIT)))
2401 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2402 plus_constant (address_mode,
2403 XEXP (addr, 1), offset));
2404 #ifdef POINTERS_EXTEND_UNSIGNED
2405 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2406 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2407 the fact that pointers are not allowed to overflow. */
2408 else if (POINTERS_EXTEND_UNSIGNED > 0
2409 && GET_CODE (addr) == ZERO_EXTEND
2410 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2411 && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
2412 addr = gen_rtx_ZERO_EXTEND (address_mode,
2413 plus_constant (pointer_mode,
2414 XEXP (addr, 0), offset));
2415 #endif
2416 else
2417 addr = plus_constant (address_mode, addr, offset);
2418 }
2419
2420 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2421
2422 /* If the address is a REG, change_address_1 rightfully returns memref,
2423 but this would destroy memref's MEM_ATTRS. */
2424 if (new_rtx == memref && maybe_ne (offset, 0))
2425 new_rtx = copy_rtx (new_rtx);
2426
2427 /* Conservatively drop the object if we don't know where we start from. */
2428 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2429 {
2430 attrs.expr = NULL_TREE;
2431 attrs.alias = 0;
2432 }
2433
2434 /* Compute the new values of the memory attributes due to this adjustment.
2435 We add the offsets and update the alignment. */
2436 if (attrs.offset_known_p)
2437 {
2438 attrs.offset += offset;
2439
2440 /* Drop the object if the new left end is not within its bounds. */
2441 if (adjust_object && maybe_lt (attrs.offset, 0))
2442 {
2443 attrs.expr = NULL_TREE;
2444 attrs.alias = 0;
2445 }
2446 }
2447
2448 /* Compute the new alignment by taking the MIN of the alignment and the
2449 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2450 if zero. */
2451 if (maybe_ne (offset, 0))
2452 {
2453 max_align = known_alignment (offset) * BITS_PER_UNIT;
2454 attrs.align = MIN (attrs.align, max_align);
2455 }
2456
2457 if (maybe_ne (size, 0))
2458 {
2459 /* Drop the object if the new right end is not within its bounds. */
2460 if (adjust_object && maybe_gt (offset + size, attrs.size))
2461 {
2462 attrs.expr = NULL_TREE;
2463 attrs.alias = 0;
2464 }
2465 attrs.size_known_p = true;
2466 attrs.size = size;
2467 }
2468 else if (attrs.size_known_p)
2469 {
2470 gcc_assert (!adjust_object);
2471 attrs.size -= offset;
2472 /* ??? The store_by_pieces machinery generates negative sizes,
2473 so don't assert for that here. */
2474 }
2475
2476 set_mem_attrs (new_rtx, &attrs);
2477
2478 return new_rtx;
2479 }
2480
2481 /* Return a memory reference like MEMREF, but with its mode changed
2482 to MODE and its address changed to ADDR, which is assumed to be
2483 MEMREF offset by OFFSET bytes. If VALIDATE is
2484 nonzero, the memory address is forced to be valid. */
2485
2486 rtx
adjust_automodify_address_1(rtx memref,machine_mode mode,rtx addr,poly_int64 offset,int validate)2487 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2488 poly_int64 offset, int validate)
2489 {
2490 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2491 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2492 }
2493
2494 /* Return a memory reference like MEMREF, but whose address is changed by
2495 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2496 known to be in OFFSET (possibly 1). */
2497
2498 rtx
offset_address(rtx memref,rtx offset,unsigned HOST_WIDE_INT pow2)2499 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2500 {
2501 rtx new_rtx, addr = XEXP (memref, 0);
2502 machine_mode address_mode;
2503 struct mem_attrs *defattrs;
2504
2505 mem_attrs attrs (*get_mem_attrs (memref));
2506 address_mode = get_address_mode (memref);
2507 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2508
2509 /* At this point we don't know _why_ the address is invalid. It
2510 could have secondary memory references, multiplies or anything.
2511
2512 However, if we did go and rearrange things, we can wind up not
2513 being able to recognize the magic around pic_offset_table_rtx.
2514 This stuff is fragile, and is yet another example of why it is
2515 bad to expose PIC machinery too early. */
2516 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2517 attrs.addrspace)
2518 && GET_CODE (addr) == PLUS
2519 && XEXP (addr, 0) == pic_offset_table_rtx)
2520 {
2521 addr = force_reg (GET_MODE (addr), addr);
2522 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2523 }
2524
2525 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2526 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2527
2528 /* If there are no changes, just return the original memory reference. */
2529 if (new_rtx == memref)
2530 return new_rtx;
2531
2532 /* Update the alignment to reflect the offset. Reset the offset, which
2533 we don't know. */
2534 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2535 attrs.offset_known_p = false;
2536 attrs.size_known_p = defattrs->size_known_p;
2537 attrs.size = defattrs->size;
2538 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2539 set_mem_attrs (new_rtx, &attrs);
2540 return new_rtx;
2541 }
2542
2543 /* Return a memory reference like MEMREF, but with its address changed to
2544 ADDR. The caller is asserting that the actual piece of memory pointed
2545 to is the same, just the form of the address is being changed, such as
2546 by putting something into a register. INPLACE is true if any changes
2547 can be made directly to MEMREF or false if MEMREF must be treated as
2548 immutable. */
2549
2550 rtx
replace_equiv_address(rtx memref,rtx addr,bool inplace)2551 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2552 {
2553 /* change_address_1 copies the memory attribute structure without change
2554 and that's exactly what we want here. */
2555 update_temp_slot_address (XEXP (memref, 0), addr);
2556 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2557 }
2558
2559 /* Likewise, but the reference is not required to be valid. */
2560
2561 rtx
replace_equiv_address_nv(rtx memref,rtx addr,bool inplace)2562 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2563 {
2564 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2565 }
2566
2567 /* Return a memory reference like MEMREF, but with its mode widened to
2568 MODE and offset by OFFSET. This would be used by targets that e.g.
2569 cannot issue QImode memory operations and have to use SImode memory
2570 operations plus masking logic. */
2571
2572 rtx
widen_memory_access(rtx memref,machine_mode mode,poly_int64 offset)2573 widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
2574 {
2575 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2576 poly_uint64 size = GET_MODE_SIZE (mode);
2577
2578 /* If there are no changes, just return the original memory reference. */
2579 if (new_rtx == memref)
2580 return new_rtx;
2581
2582 mem_attrs attrs (*get_mem_attrs (new_rtx));
2583
2584 /* If we don't know what offset we were at within the expression, then
2585 we can't know if we've overstepped the bounds. */
2586 if (! attrs.offset_known_p)
2587 attrs.expr = NULL_TREE;
2588
2589 while (attrs.expr)
2590 {
2591 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2592 {
2593 tree field = TREE_OPERAND (attrs.expr, 1);
2594 tree offset = component_ref_field_offset (attrs.expr);
2595
2596 if (! DECL_SIZE_UNIT (field))
2597 {
2598 attrs.expr = NULL_TREE;
2599 break;
2600 }
2601
2602 /* Is the field at least as large as the access? If so, ok,
2603 otherwise strip back to the containing structure. */
2604 if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2605 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2606 && known_ge (attrs.offset, 0))
2607 break;
2608
2609 poly_uint64 suboffset;
2610 if (!poly_int_tree_p (offset, &suboffset))
2611 {
2612 attrs.expr = NULL_TREE;
2613 break;
2614 }
2615
2616 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2617 attrs.offset += suboffset;
2618 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2619 / BITS_PER_UNIT);
2620 }
2621 /* Similarly for the decl. */
2622 else if (DECL_P (attrs.expr)
2623 && DECL_SIZE_UNIT (attrs.expr)
2624 && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2625 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2626 size)
2627 && known_ge (attrs.offset, 0))
2628 break;
2629 else
2630 {
2631 /* The widened memory access overflows the expression, which means
2632 that it could alias another expression. Zap it. */
2633 attrs.expr = NULL_TREE;
2634 break;
2635 }
2636 }
2637
2638 if (! attrs.expr)
2639 attrs.offset_known_p = false;
2640
2641 /* The widened memory may alias other stuff, so zap the alias set. */
2642 /* ??? Maybe use get_alias_set on any remaining expression. */
2643 attrs.alias = 0;
2644 attrs.size_known_p = true;
2645 attrs.size = size;
2646 set_mem_attrs (new_rtx, &attrs);
2647 return new_rtx;
2648 }
2649
2650 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2651 static GTY(()) tree spill_slot_decl;
2652
2653 tree
get_spill_slot_decl(bool force_build_p)2654 get_spill_slot_decl (bool force_build_p)
2655 {
2656 tree d = spill_slot_decl;
2657 rtx rd;
2658
2659 if (d || !force_build_p)
2660 return d;
2661
2662 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2663 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2664 DECL_ARTIFICIAL (d) = 1;
2665 DECL_IGNORED_P (d) = 1;
2666 TREE_USED (d) = 1;
2667 spill_slot_decl = d;
2668
2669 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2670 MEM_NOTRAP_P (rd) = 1;
2671 mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
2672 attrs.alias = new_alias_set ();
2673 attrs.expr = d;
2674 set_mem_attrs (rd, &attrs);
2675 SET_DECL_RTL (d, rd);
2676
2677 return d;
2678 }
2679
2680 /* Given MEM, a result from assign_stack_local, fill in the memory
2681 attributes as appropriate for a register allocator spill slot.
2682 These slots are not aliasable by other memory. We arrange for
2683 them all to use a single MEM_EXPR, so that the aliasing code can
2684 work properly in the case of shared spill slots. */
2685
2686 void
set_mem_attrs_for_spill(rtx mem)2687 set_mem_attrs_for_spill (rtx mem)
2688 {
2689 rtx addr;
2690
2691 mem_attrs attrs (*get_mem_attrs (mem));
2692 attrs.expr = get_spill_slot_decl (true);
2693 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2694 attrs.addrspace = ADDR_SPACE_GENERIC;
2695
2696 /* We expect the incoming memory to be of the form:
2697 (mem:MODE (plus (reg sfp) (const_int offset)))
2698 with perhaps the plus missing for offset = 0. */
2699 addr = XEXP (mem, 0);
2700 attrs.offset_known_p = true;
2701 strip_offset (addr, &attrs.offset);
2702
2703 set_mem_attrs (mem, &attrs);
2704 MEM_NOTRAP_P (mem) = 1;
2705 }
2706
2707 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2708
2709 rtx_code_label *
gen_label_rtx(void)2710 gen_label_rtx (void)
2711 {
2712 return as_a <rtx_code_label *> (
2713 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2714 NULL, label_num++, NULL));
2715 }
2716
2717 /* For procedure integration. */
2718
2719 /* Install new pointers to the first and last insns in the chain.
2720 Also, set cur_insn_uid to one higher than the last in use.
2721 Used for an inline-procedure after copying the insn chain. */
2722
2723 void
set_new_first_and_last_insn(rtx_insn * first,rtx_insn * last)2724 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2725 {
2726 rtx_insn *insn;
2727
2728 set_first_insn (first);
2729 set_last_insn (last);
2730 cur_insn_uid = 0;
2731
2732 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2733 {
2734 int debug_count = 0;
2735
2736 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2737 cur_debug_insn_uid = 0;
2738
2739 for (insn = first; insn; insn = NEXT_INSN (insn))
2740 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2741 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2742 else
2743 {
2744 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2745 if (DEBUG_INSN_P (insn))
2746 debug_count++;
2747 }
2748
2749 if (debug_count)
2750 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2751 else
2752 cur_debug_insn_uid++;
2753 }
2754 else
2755 for (insn = first; insn; insn = NEXT_INSN (insn))
2756 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2757
2758 cur_insn_uid++;
2759 }
2760
2761 /* Go through all the RTL insn bodies and copy any invalid shared
2762 structure. This routine should only be called once. */
2763
2764 static void
unshare_all_rtl_1(rtx_insn * insn)2765 unshare_all_rtl_1 (rtx_insn *insn)
2766 {
2767 /* Unshare just about everything else. */
2768 unshare_all_rtl_in_chain (insn);
2769
2770 /* Make sure the addresses of stack slots found outside the insn chain
2771 (such as, in DECL_RTL of a variable) are not shared
2772 with the insn chain.
2773
2774 This special care is necessary when the stack slot MEM does not
2775 actually appear in the insn chain. If it does appear, its address
2776 is unshared from all else at that point. */
2777 unsigned int i;
2778 rtx temp;
2779 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2780 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2781 }
2782
2783 /* Go through all the RTL insn bodies and copy any invalid shared
2784 structure, again. This is a fairly expensive thing to do so it
2785 should be done sparingly. */
2786
2787 void
unshare_all_rtl_again(rtx_insn * insn)2788 unshare_all_rtl_again (rtx_insn *insn)
2789 {
2790 rtx_insn *p;
2791 tree decl;
2792
2793 for (p = insn; p; p = NEXT_INSN (p))
2794 if (INSN_P (p))
2795 {
2796 reset_used_flags (PATTERN (p));
2797 reset_used_flags (REG_NOTES (p));
2798 if (CALL_P (p))
2799 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2800 }
2801
2802 /* Make sure that virtual stack slots are not shared. */
2803 set_used_decls (DECL_INITIAL (cfun->decl));
2804
2805 /* Make sure that virtual parameters are not shared. */
2806 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2807 set_used_flags (DECL_RTL (decl));
2808
2809 rtx temp;
2810 unsigned int i;
2811 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2812 reset_used_flags (temp);
2813
2814 unshare_all_rtl_1 (insn);
2815 }
2816
2817 unsigned int
unshare_all_rtl(void)2818 unshare_all_rtl (void)
2819 {
2820 unshare_all_rtl_1 (get_insns ());
2821
2822 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2823 {
2824 if (DECL_RTL_SET_P (decl))
2825 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2826 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2827 }
2828
2829 return 0;
2830 }
2831
2832
2833 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2834 Recursively does the same for subexpressions. */
2835
2836 static void
verify_rtx_sharing(rtx orig,rtx insn)2837 verify_rtx_sharing (rtx orig, rtx insn)
2838 {
2839 rtx x = orig;
2840 int i;
2841 enum rtx_code code;
2842 const char *format_ptr;
2843
2844 if (x == 0)
2845 return;
2846
2847 code = GET_CODE (x);
2848
2849 /* These types may be freely shared. */
2850
2851 switch (code)
2852 {
2853 case REG:
2854 case DEBUG_EXPR:
2855 case VALUE:
2856 CASE_CONST_ANY:
2857 case SYMBOL_REF:
2858 case LABEL_REF:
2859 case CODE_LABEL:
2860 case PC:
2861 case CC0:
2862 case RETURN:
2863 case SIMPLE_RETURN:
2864 case SCRATCH:
2865 /* SCRATCH must be shared because they represent distinct values. */
2866 return;
2867 case CLOBBER:
2868 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2869 clobbers or clobbers of hard registers that originated as pseudos.
2870 This is needed to allow safe register renaming. */
2871 if (REG_P (XEXP (x, 0))
2872 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2873 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2874 return;
2875 break;
2876
2877 case CONST:
2878 if (shared_const_p (orig))
2879 return;
2880 break;
2881
2882 case MEM:
2883 /* A MEM is allowed to be shared if its address is constant. */
2884 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2885 || reload_completed || reload_in_progress)
2886 return;
2887
2888 break;
2889
2890 default:
2891 break;
2892 }
2893
2894 /* This rtx may not be shared. If it has already been seen,
2895 replace it with a copy of itself. */
2896 if (flag_checking && RTX_FLAG (x, used))
2897 {
2898 error ("invalid rtl sharing found in the insn");
2899 debug_rtx (insn);
2900 error ("shared rtx");
2901 debug_rtx (x);
2902 internal_error ("internal consistency failure");
2903 }
2904 gcc_assert (!RTX_FLAG (x, used));
2905
2906 RTX_FLAG (x, used) = 1;
2907
2908 /* Now scan the subexpressions recursively. */
2909
2910 format_ptr = GET_RTX_FORMAT (code);
2911
2912 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2913 {
2914 switch (*format_ptr++)
2915 {
2916 case 'e':
2917 verify_rtx_sharing (XEXP (x, i), insn);
2918 break;
2919
2920 case 'E':
2921 if (XVEC (x, i) != NULL)
2922 {
2923 int j;
2924 int len = XVECLEN (x, i);
2925
2926 for (j = 0; j < len; j++)
2927 {
2928 /* We allow sharing of ASM_OPERANDS inside single
2929 instruction. */
2930 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2931 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2932 == ASM_OPERANDS))
2933 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2934 else
2935 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2936 }
2937 }
2938 break;
2939 }
2940 }
2941 return;
2942 }
2943
2944 /* Reset used-flags for INSN. */
2945
2946 static void
reset_insn_used_flags(rtx insn)2947 reset_insn_used_flags (rtx insn)
2948 {
2949 gcc_assert (INSN_P (insn));
2950 reset_used_flags (PATTERN (insn));
2951 reset_used_flags (REG_NOTES (insn));
2952 if (CALL_P (insn))
2953 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2954 }
2955
2956 /* Go through all the RTL insn bodies and clear all the USED bits. */
2957
2958 static void
reset_all_used_flags(void)2959 reset_all_used_flags (void)
2960 {
2961 rtx_insn *p;
2962
2963 for (p = get_insns (); p; p = NEXT_INSN (p))
2964 if (INSN_P (p))
2965 {
2966 rtx pat = PATTERN (p);
2967 if (GET_CODE (pat) != SEQUENCE)
2968 reset_insn_used_flags (p);
2969 else
2970 {
2971 gcc_assert (REG_NOTES (p) == NULL);
2972 for (int i = 0; i < XVECLEN (pat, 0); i++)
2973 {
2974 rtx insn = XVECEXP (pat, 0, i);
2975 if (INSN_P (insn))
2976 reset_insn_used_flags (insn);
2977 }
2978 }
2979 }
2980 }
2981
2982 /* Verify sharing in INSN. */
2983
2984 static void
verify_insn_sharing(rtx insn)2985 verify_insn_sharing (rtx insn)
2986 {
2987 gcc_assert (INSN_P (insn));
2988 verify_rtx_sharing (PATTERN (insn), insn);
2989 verify_rtx_sharing (REG_NOTES (insn), insn);
2990 if (CALL_P (insn))
2991 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
2992 }
2993
2994 /* Go through all the RTL insn bodies and check that there is no unexpected
2995 sharing in between the subexpressions. */
2996
2997 DEBUG_FUNCTION void
verify_rtl_sharing(void)2998 verify_rtl_sharing (void)
2999 {
3000 rtx_insn *p;
3001
3002 timevar_push (TV_VERIFY_RTL_SHARING);
3003
3004 reset_all_used_flags ();
3005
3006 for (p = get_insns (); p; p = NEXT_INSN (p))
3007 if (INSN_P (p))
3008 {
3009 rtx pat = PATTERN (p);
3010 if (GET_CODE (pat) != SEQUENCE)
3011 verify_insn_sharing (p);
3012 else
3013 for (int i = 0; i < XVECLEN (pat, 0); i++)
3014 {
3015 rtx insn = XVECEXP (pat, 0, i);
3016 if (INSN_P (insn))
3017 verify_insn_sharing (insn);
3018 }
3019 }
3020
3021 reset_all_used_flags ();
3022
3023 timevar_pop (TV_VERIFY_RTL_SHARING);
3024 }
3025
3026 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3027 Assumes the mark bits are cleared at entry. */
3028
3029 void
unshare_all_rtl_in_chain(rtx_insn * insn)3030 unshare_all_rtl_in_chain (rtx_insn *insn)
3031 {
3032 for (; insn; insn = NEXT_INSN (insn))
3033 if (INSN_P (insn))
3034 {
3035 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3036 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
3037 if (CALL_P (insn))
3038 CALL_INSN_FUNCTION_USAGE (insn)
3039 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
3040 }
3041 }
3042
3043 /* Go through all virtual stack slots of a function and mark them as
3044 shared. We never replace the DECL_RTLs themselves with a copy,
3045 but expressions mentioned into a DECL_RTL cannot be shared with
3046 expressions in the instruction stream.
3047
3048 Note that reload may convert pseudo registers into memories in-place.
3049 Pseudo registers are always shared, but MEMs never are. Thus if we
3050 reset the used flags on MEMs in the instruction stream, we must set
3051 them again on MEMs that appear in DECL_RTLs. */
3052
3053 static void
set_used_decls(tree blk)3054 set_used_decls (tree blk)
3055 {
3056 tree t;
3057
3058 /* Mark decls. */
3059 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
3060 if (DECL_RTL_SET_P (t))
3061 set_used_flags (DECL_RTL (t));
3062
3063 /* Now process sub-blocks. */
3064 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
3065 set_used_decls (t);
3066 }
3067
3068 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3069 Recursively does the same for subexpressions. Uses
3070 copy_rtx_if_shared_1 to reduce stack space. */
3071
3072 rtx
copy_rtx_if_shared(rtx orig)3073 copy_rtx_if_shared (rtx orig)
3074 {
3075 copy_rtx_if_shared_1 (&orig);
3076 return orig;
3077 }
3078
3079 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3080 use. Recursively does the same for subexpressions. */
3081
3082 static void
copy_rtx_if_shared_1(rtx * orig1)3083 copy_rtx_if_shared_1 (rtx *orig1)
3084 {
3085 rtx x;
3086 int i;
3087 enum rtx_code code;
3088 rtx *last_ptr;
3089 const char *format_ptr;
3090 int copied = 0;
3091 int length;
3092
3093 /* Repeat is used to turn tail-recursion into iteration. */
3094 repeat:
3095 x = *orig1;
3096
3097 if (x == 0)
3098 return;
3099
3100 code = GET_CODE (x);
3101
3102 /* These types may be freely shared. */
3103
3104 switch (code)
3105 {
3106 case REG:
3107 case DEBUG_EXPR:
3108 case VALUE:
3109 CASE_CONST_ANY:
3110 case SYMBOL_REF:
3111 case LABEL_REF:
3112 case CODE_LABEL:
3113 case PC:
3114 case CC0:
3115 case RETURN:
3116 case SIMPLE_RETURN:
3117 case SCRATCH:
3118 /* SCRATCH must be shared because they represent distinct values. */
3119 return;
3120 case CLOBBER:
3121 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
3122 clobbers or clobbers of hard registers that originated as pseudos.
3123 This is needed to allow safe register renaming. */
3124 if (REG_P (XEXP (x, 0))
3125 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3126 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3127 return;
3128 break;
3129
3130 case CONST:
3131 if (shared_const_p (x))
3132 return;
3133 break;
3134
3135 case DEBUG_INSN:
3136 case INSN:
3137 case JUMP_INSN:
3138 case CALL_INSN:
3139 case NOTE:
3140 case BARRIER:
3141 /* The chain of insns is not being copied. */
3142 return;
3143
3144 default:
3145 break;
3146 }
3147
3148 /* This rtx may not be shared. If it has already been seen,
3149 replace it with a copy of itself. */
3150
3151 if (RTX_FLAG (x, used))
3152 {
3153 x = shallow_copy_rtx (x);
3154 copied = 1;
3155 }
3156 RTX_FLAG (x, used) = 1;
3157
3158 /* Now scan the subexpressions recursively.
3159 We can store any replaced subexpressions directly into X
3160 since we know X is not shared! Any vectors in X
3161 must be copied if X was copied. */
3162
3163 format_ptr = GET_RTX_FORMAT (code);
3164 length = GET_RTX_LENGTH (code);
3165 last_ptr = NULL;
3166
3167 for (i = 0; i < length; i++)
3168 {
3169 switch (*format_ptr++)
3170 {
3171 case 'e':
3172 if (last_ptr)
3173 copy_rtx_if_shared_1 (last_ptr);
3174 last_ptr = &XEXP (x, i);
3175 break;
3176
3177 case 'E':
3178 if (XVEC (x, i) != NULL)
3179 {
3180 int j;
3181 int len = XVECLEN (x, i);
3182
3183 /* Copy the vector iff I copied the rtx and the length
3184 is nonzero. */
3185 if (copied && len > 0)
3186 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3187
3188 /* Call recursively on all inside the vector. */
3189 for (j = 0; j < len; j++)
3190 {
3191 if (last_ptr)
3192 copy_rtx_if_shared_1 (last_ptr);
3193 last_ptr = &XVECEXP (x, i, j);
3194 }
3195 }
3196 break;
3197 }
3198 }
3199 *orig1 = x;
3200 if (last_ptr)
3201 {
3202 orig1 = last_ptr;
3203 goto repeat;
3204 }
3205 return;
3206 }
3207
3208 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3209
3210 static void
mark_used_flags(rtx x,int flag)3211 mark_used_flags (rtx x, int flag)
3212 {
3213 int i, j;
3214 enum rtx_code code;
3215 const char *format_ptr;
3216 int length;
3217
3218 /* Repeat is used to turn tail-recursion into iteration. */
3219 repeat:
3220 if (x == 0)
3221 return;
3222
3223 code = GET_CODE (x);
3224
3225 /* These types may be freely shared so we needn't do any resetting
3226 for them. */
3227
3228 switch (code)
3229 {
3230 case REG:
3231 case DEBUG_EXPR:
3232 case VALUE:
3233 CASE_CONST_ANY:
3234 case SYMBOL_REF:
3235 case CODE_LABEL:
3236 case PC:
3237 case CC0:
3238 case RETURN:
3239 case SIMPLE_RETURN:
3240 return;
3241
3242 case DEBUG_INSN:
3243 case INSN:
3244 case JUMP_INSN:
3245 case CALL_INSN:
3246 case NOTE:
3247 case LABEL_REF:
3248 case BARRIER:
3249 /* The chain of insns is not being copied. */
3250 return;
3251
3252 default:
3253 break;
3254 }
3255
3256 RTX_FLAG (x, used) = flag;
3257
3258 format_ptr = GET_RTX_FORMAT (code);
3259 length = GET_RTX_LENGTH (code);
3260
3261 for (i = 0; i < length; i++)
3262 {
3263 switch (*format_ptr++)
3264 {
3265 case 'e':
3266 if (i == length-1)
3267 {
3268 x = XEXP (x, i);
3269 goto repeat;
3270 }
3271 mark_used_flags (XEXP (x, i), flag);
3272 break;
3273
3274 case 'E':
3275 for (j = 0; j < XVECLEN (x, i); j++)
3276 mark_used_flags (XVECEXP (x, i, j), flag);
3277 break;
3278 }
3279 }
3280 }
3281
3282 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3283 to look for shared sub-parts. */
3284
3285 void
reset_used_flags(rtx x)3286 reset_used_flags (rtx x)
3287 {
3288 mark_used_flags (x, 0);
3289 }
3290
3291 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3292 to look for shared sub-parts. */
3293
3294 void
set_used_flags(rtx x)3295 set_used_flags (rtx x)
3296 {
3297 mark_used_flags (x, 1);
3298 }
3299
3300 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3301 Return X or the rtx for the pseudo reg the value of X was copied into.
3302 OTHER must be valid as a SET_DEST. */
3303
3304 rtx
make_safe_from(rtx x,rtx other)3305 make_safe_from (rtx x, rtx other)
3306 {
3307 while (1)
3308 switch (GET_CODE (other))
3309 {
3310 case SUBREG:
3311 other = SUBREG_REG (other);
3312 break;
3313 case STRICT_LOW_PART:
3314 case SIGN_EXTEND:
3315 case ZERO_EXTEND:
3316 other = XEXP (other, 0);
3317 break;
3318 default:
3319 goto done;
3320 }
3321 done:
3322 if ((MEM_P (other)
3323 && ! CONSTANT_P (x)
3324 && !REG_P (x)
3325 && GET_CODE (x) != SUBREG)
3326 || (REG_P (other)
3327 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3328 || reg_mentioned_p (other, x))))
3329 {
3330 rtx temp = gen_reg_rtx (GET_MODE (x));
3331 emit_move_insn (temp, x);
3332 return temp;
3333 }
3334 return x;
3335 }
3336
3337 /* Emission of insns (adding them to the doubly-linked list). */
3338
3339 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3340
3341 rtx_insn *
get_last_insn_anywhere(void)3342 get_last_insn_anywhere (void)
3343 {
3344 struct sequence_stack *seq;
3345 for (seq = get_current_sequence (); seq; seq = seq->next)
3346 if (seq->last != 0)
3347 return seq->last;
3348 return 0;
3349 }
3350
3351 /* Return the first nonnote insn emitted in current sequence or current
3352 function. This routine looks inside SEQUENCEs. */
3353
3354 rtx_insn *
get_first_nonnote_insn(void)3355 get_first_nonnote_insn (void)
3356 {
3357 rtx_insn *insn = get_insns ();
3358
3359 if (insn)
3360 {
3361 if (NOTE_P (insn))
3362 for (insn = next_insn (insn);
3363 insn && NOTE_P (insn);
3364 insn = next_insn (insn))
3365 continue;
3366 else
3367 {
3368 if (NONJUMP_INSN_P (insn)
3369 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3370 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3371 }
3372 }
3373
3374 return insn;
3375 }
3376
3377 /* Return the last nonnote insn emitted in current sequence or current
3378 function. This routine looks inside SEQUENCEs. */
3379
3380 rtx_insn *
get_last_nonnote_insn(void)3381 get_last_nonnote_insn (void)
3382 {
3383 rtx_insn *insn = get_last_insn ();
3384
3385 if (insn)
3386 {
3387 if (NOTE_P (insn))
3388 for (insn = previous_insn (insn);
3389 insn && NOTE_P (insn);
3390 insn = previous_insn (insn))
3391 continue;
3392 else
3393 {
3394 if (NONJUMP_INSN_P (insn))
3395 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3396 insn = seq->insn (seq->len () - 1);
3397 }
3398 }
3399
3400 return insn;
3401 }
3402
3403 /* Return the number of actual (non-debug) insns emitted in this
3404 function. */
3405
3406 int
get_max_insn_count(void)3407 get_max_insn_count (void)
3408 {
3409 int n = cur_insn_uid;
3410
3411 /* The table size must be stable across -g, to avoid codegen
3412 differences due to debug insns, and not be affected by
3413 -fmin-insn-uid, to avoid excessive table size and to simplify
3414 debugging of -fcompare-debug failures. */
3415 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3416 n -= cur_debug_insn_uid;
3417 else
3418 n -= MIN_NONDEBUG_INSN_UID;
3419
3420 return n;
3421 }
3422
3423
3424 /* Return the next insn. If it is a SEQUENCE, return the first insn
3425 of the sequence. */
3426
3427 rtx_insn *
next_insn(rtx_insn * insn)3428 next_insn (rtx_insn *insn)
3429 {
3430 if (insn)
3431 {
3432 insn = NEXT_INSN (insn);
3433 if (insn && NONJUMP_INSN_P (insn)
3434 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3435 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3436 }
3437
3438 return insn;
3439 }
3440
3441 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3442 of the sequence. */
3443
3444 rtx_insn *
previous_insn(rtx_insn * insn)3445 previous_insn (rtx_insn *insn)
3446 {
3447 if (insn)
3448 {
3449 insn = PREV_INSN (insn);
3450 if (insn && NONJUMP_INSN_P (insn))
3451 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3452 insn = seq->insn (seq->len () - 1);
3453 }
3454
3455 return insn;
3456 }
3457
3458 /* Return the next insn after INSN that is not a NOTE. This routine does not
3459 look inside SEQUENCEs. */
3460
3461 rtx_insn *
next_nonnote_insn(rtx_insn * insn)3462 next_nonnote_insn (rtx_insn *insn)
3463 {
3464 while (insn)
3465 {
3466 insn = NEXT_INSN (insn);
3467 if (insn == 0 || !NOTE_P (insn))
3468 break;
3469 }
3470
3471 return insn;
3472 }
3473
3474 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3475 routine does not look inside SEQUENCEs. */
3476
3477 rtx_insn *
next_nondebug_insn(rtx_insn * insn)3478 next_nondebug_insn (rtx_insn *insn)
3479 {
3480 while (insn)
3481 {
3482 insn = NEXT_INSN (insn);
3483 if (insn == 0 || !DEBUG_INSN_P (insn))
3484 break;
3485 }
3486
3487 return insn;
3488 }
3489
3490 /* Return the previous insn before INSN that is not a NOTE. This routine does
3491 not look inside SEQUENCEs. */
3492
3493 rtx_insn *
prev_nonnote_insn(rtx_insn * insn)3494 prev_nonnote_insn (rtx_insn *insn)
3495 {
3496 while (insn)
3497 {
3498 insn = PREV_INSN (insn);
3499 if (insn == 0 || !NOTE_P (insn))
3500 break;
3501 }
3502
3503 return insn;
3504 }
3505
3506 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3507 This routine does not look inside SEQUENCEs. */
3508
3509 rtx_insn *
prev_nondebug_insn(rtx_insn * insn)3510 prev_nondebug_insn (rtx_insn *insn)
3511 {
3512 while (insn)
3513 {
3514 insn = PREV_INSN (insn);
3515 if (insn == 0 || !DEBUG_INSN_P (insn))
3516 break;
3517 }
3518
3519 return insn;
3520 }
3521
3522 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3523 This routine does not look inside SEQUENCEs. */
3524
3525 rtx_insn *
next_nonnote_nondebug_insn(rtx_insn * insn)3526 next_nonnote_nondebug_insn (rtx_insn *insn)
3527 {
3528 while (insn)
3529 {
3530 insn = NEXT_INSN (insn);
3531 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3532 break;
3533 }
3534
3535 return insn;
3536 }
3537
3538 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3539 but stop the search before we enter another basic block. This
3540 routine does not look inside SEQUENCEs. */
3541
3542 rtx_insn *
next_nonnote_nondebug_insn_bb(rtx_insn * insn)3543 next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3544 {
3545 while (insn)
3546 {
3547 insn = NEXT_INSN (insn);
3548 if (insn == 0)
3549 break;
3550 if (DEBUG_INSN_P (insn))
3551 continue;
3552 if (!NOTE_P (insn))
3553 break;
3554 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3555 return NULL;
3556 }
3557
3558 return insn;
3559 }
3560
3561 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3562 This routine does not look inside SEQUENCEs. */
3563
3564 rtx_insn *
prev_nonnote_nondebug_insn(rtx_insn * insn)3565 prev_nonnote_nondebug_insn (rtx_insn *insn)
3566 {
3567 while (insn)
3568 {
3569 insn = PREV_INSN (insn);
3570 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3571 break;
3572 }
3573
3574 return insn;
3575 }
3576
3577 /* Return the previous insn before INSN that is not a NOTE nor
3578 DEBUG_INSN, but stop the search before we enter another basic
3579 block. This routine does not look inside SEQUENCEs. */
3580
3581 rtx_insn *
prev_nonnote_nondebug_insn_bb(rtx_insn * insn)3582 prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3583 {
3584 while (insn)
3585 {
3586 insn = PREV_INSN (insn);
3587 if (insn == 0)
3588 break;
3589 if (DEBUG_INSN_P (insn))
3590 continue;
3591 if (!NOTE_P (insn))
3592 break;
3593 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3594 return NULL;
3595 }
3596
3597 return insn;
3598 }
3599
3600 /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
3601 or 0, if there is none. This routine does not look inside
3602 SEQUENCEs. */
3603
3604 rtx_insn *
next_real_insn(rtx uncast_insn)3605 next_real_insn (rtx uncast_insn)
3606 {
3607 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3608
3609 while (insn)
3610 {
3611 insn = NEXT_INSN (insn);
3612 if (insn == 0 || INSN_P (insn))
3613 break;
3614 }
3615
3616 return insn;
3617 }
3618
3619 /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
3620 or 0, if there is none. This routine does not look inside
3621 SEQUENCEs. */
3622
3623 rtx_insn *
prev_real_insn(rtx_insn * insn)3624 prev_real_insn (rtx_insn *insn)
3625 {
3626 while (insn)
3627 {
3628 insn = PREV_INSN (insn);
3629 if (insn == 0 || INSN_P (insn))
3630 break;
3631 }
3632
3633 return insn;
3634 }
3635
3636 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3637 or 0, if there is none. This routine does not look inside
3638 SEQUENCEs. */
3639
3640 rtx_insn *
next_real_nondebug_insn(rtx uncast_insn)3641 next_real_nondebug_insn (rtx uncast_insn)
3642 {
3643 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3644
3645 while (insn)
3646 {
3647 insn = NEXT_INSN (insn);
3648 if (insn == 0 || NONDEBUG_INSN_P (insn))
3649 break;
3650 }
3651
3652 return insn;
3653 }
3654
3655 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3656 or 0, if there is none. This routine does not look inside
3657 SEQUENCEs. */
3658
3659 rtx_insn *
prev_real_nondebug_insn(rtx_insn * insn)3660 prev_real_nondebug_insn (rtx_insn *insn)
3661 {
3662 while (insn)
3663 {
3664 insn = PREV_INSN (insn);
3665 if (insn == 0 || NONDEBUG_INSN_P (insn))
3666 break;
3667 }
3668
3669 return insn;
3670 }
3671
3672 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3673 This routine does not look inside SEQUENCEs. */
3674
3675 rtx_call_insn *
last_call_insn(void)3676 last_call_insn (void)
3677 {
3678 rtx_insn *insn;
3679
3680 for (insn = get_last_insn ();
3681 insn && !CALL_P (insn);
3682 insn = PREV_INSN (insn))
3683 ;
3684
3685 return safe_as_a <rtx_call_insn *> (insn);
3686 }
3687
3688 /* Find the next insn after INSN that really does something. This routine
3689 does not look inside SEQUENCEs. After reload this also skips over
3690 standalone USE and CLOBBER insn. */
3691
3692 int
active_insn_p(const rtx_insn * insn)3693 active_insn_p (const rtx_insn *insn)
3694 {
3695 return (CALL_P (insn) || JUMP_P (insn)
3696 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3697 || (NONJUMP_INSN_P (insn)
3698 && (! reload_completed
3699 || (GET_CODE (PATTERN (insn)) != USE
3700 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3701 }
3702
3703 rtx_insn *
next_active_insn(rtx_insn * insn)3704 next_active_insn (rtx_insn *insn)
3705 {
3706 while (insn)
3707 {
3708 insn = NEXT_INSN (insn);
3709 if (insn == 0 || active_insn_p (insn))
3710 break;
3711 }
3712
3713 return insn;
3714 }
3715
3716 /* Find the last insn before INSN that really does something. This routine
3717 does not look inside SEQUENCEs. After reload this also skips over
3718 standalone USE and CLOBBER insn. */
3719
3720 rtx_insn *
prev_active_insn(rtx_insn * insn)3721 prev_active_insn (rtx_insn *insn)
3722 {
3723 while (insn)
3724 {
3725 insn = PREV_INSN (insn);
3726 if (insn == 0 || active_insn_p (insn))
3727 break;
3728 }
3729
3730 return insn;
3731 }
3732
3733 /* Return the next insn that uses CC0 after INSN, which is assumed to
3734 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3735 applied to the result of this function should yield INSN).
3736
3737 Normally, this is simply the next insn. However, if a REG_CC_USER note
3738 is present, it contains the insn that uses CC0.
3739
3740 Return 0 if we can't find the insn. */
3741
3742 rtx_insn *
next_cc0_user(rtx_insn * insn)3743 next_cc0_user (rtx_insn *insn)
3744 {
3745 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3746
3747 if (note)
3748 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3749
3750 insn = next_nonnote_insn (insn);
3751 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3752 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3753
3754 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3755 return insn;
3756
3757 return 0;
3758 }
3759
3760 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3761 note, it is the previous insn. */
3762
3763 rtx_insn *
prev_cc0_setter(rtx_insn * insn)3764 prev_cc0_setter (rtx_insn *insn)
3765 {
3766 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3767
3768 if (note)
3769 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3770
3771 insn = prev_nonnote_insn (insn);
3772 gcc_assert (sets_cc0_p (PATTERN (insn)));
3773
3774 return insn;
3775 }
3776
3777 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3778
3779 static int
find_auto_inc(const_rtx x,const_rtx reg)3780 find_auto_inc (const_rtx x, const_rtx reg)
3781 {
3782 subrtx_iterator::array_type array;
3783 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3784 {
3785 const_rtx x = *iter;
3786 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3787 && rtx_equal_p (reg, XEXP (x, 0)))
3788 return true;
3789 }
3790 return false;
3791 }
3792
3793 /* Increment the label uses for all labels present in rtx. */
3794
3795 static void
mark_label_nuses(rtx x)3796 mark_label_nuses (rtx x)
3797 {
3798 enum rtx_code code;
3799 int i, j;
3800 const char *fmt;
3801
3802 code = GET_CODE (x);
3803 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3804 LABEL_NUSES (label_ref_label (x))++;
3805
3806 fmt = GET_RTX_FORMAT (code);
3807 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3808 {
3809 if (fmt[i] == 'e')
3810 mark_label_nuses (XEXP (x, i));
3811 else if (fmt[i] == 'E')
3812 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3813 mark_label_nuses (XVECEXP (x, i, j));
3814 }
3815 }
3816
3817
3818 /* Try splitting insns that can be split for better scheduling.
3819 PAT is the pattern which might split.
3820 TRIAL is the insn providing PAT.
3821 LAST is nonzero if we should return the last insn of the sequence produced.
3822
3823 If this routine succeeds in splitting, it returns the first or last
3824 replacement insn depending on the value of LAST. Otherwise, it
3825 returns TRIAL. If the insn to be returned can be split, it will be. */
3826
3827 rtx_insn *
try_split(rtx pat,rtx_insn * trial,int last)3828 try_split (rtx pat, rtx_insn *trial, int last)
3829 {
3830 rtx_insn *before, *after;
3831 rtx note;
3832 rtx_insn *seq, *tem;
3833 profile_probability probability;
3834 rtx_insn *insn_last, *insn;
3835 int njumps = 0;
3836 rtx_insn *call_insn = NULL;
3837
3838 /* We're not good at redistributing frame information. */
3839 if (RTX_FRAME_RELATED_P (trial))
3840 return trial;
3841
3842 if (any_condjump_p (trial)
3843 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3844 split_branch_probability
3845 = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3846 else
3847 split_branch_probability = profile_probability::uninitialized ();
3848
3849 probability = split_branch_probability;
3850
3851 seq = split_insns (pat, trial);
3852
3853 split_branch_probability = profile_probability::uninitialized ();
3854
3855 if (!seq)
3856 return trial;
3857
3858 /* Avoid infinite loop if any insn of the result matches
3859 the original pattern. */
3860 insn_last = seq;
3861 while (1)
3862 {
3863 if (INSN_P (insn_last)
3864 && rtx_equal_p (PATTERN (insn_last), pat))
3865 return trial;
3866 if (!NEXT_INSN (insn_last))
3867 break;
3868 insn_last = NEXT_INSN (insn_last);
3869 }
3870
3871 /* We will be adding the new sequence to the function. The splitters
3872 may have introduced invalid RTL sharing, so unshare the sequence now. */
3873 unshare_all_rtl_in_chain (seq);
3874
3875 /* Mark labels and copy flags. */
3876 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3877 {
3878 if (JUMP_P (insn))
3879 {
3880 if (JUMP_P (trial))
3881 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3882 mark_jump_label (PATTERN (insn), insn, 0);
3883 njumps++;
3884 if (probability.initialized_p ()
3885 && any_condjump_p (insn)
3886 && !find_reg_note (insn, REG_BR_PROB, 0))
3887 {
3888 /* We can preserve the REG_BR_PROB notes only if exactly
3889 one jump is created, otherwise the machine description
3890 is responsible for this step using
3891 split_branch_probability variable. */
3892 gcc_assert (njumps == 1);
3893 add_reg_br_prob_note (insn, probability);
3894 }
3895 }
3896 }
3897
3898 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3899 in SEQ and copy any additional information across. */
3900 if (CALL_P (trial))
3901 {
3902 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3903 if (CALL_P (insn))
3904 {
3905 gcc_assert (call_insn == NULL_RTX);
3906 call_insn = insn;
3907
3908 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3909 target may have explicitly specified. */
3910 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3911 while (*p)
3912 p = &XEXP (*p, 1);
3913 *p = CALL_INSN_FUNCTION_USAGE (trial);
3914
3915 /* If the old call was a sibling call, the new one must
3916 be too. */
3917 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3918 }
3919 }
3920
3921 /* Copy notes, particularly those related to the CFG. */
3922 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3923 {
3924 switch (REG_NOTE_KIND (note))
3925 {
3926 case REG_EH_REGION:
3927 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3928 break;
3929
3930 case REG_NORETURN:
3931 case REG_SETJMP:
3932 case REG_TM:
3933 case REG_CALL_NOCF_CHECK:
3934 case REG_CALL_ARG_LOCATION:
3935 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3936 {
3937 if (CALL_P (insn))
3938 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3939 }
3940 break;
3941
3942 case REG_NON_LOCAL_GOTO:
3943 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3944 {
3945 if (JUMP_P (insn))
3946 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3947 }
3948 break;
3949
3950 case REG_INC:
3951 if (!AUTO_INC_DEC)
3952 break;
3953
3954 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3955 {
3956 rtx reg = XEXP (note, 0);
3957 if (!FIND_REG_INC_NOTE (insn, reg)
3958 && find_auto_inc (PATTERN (insn), reg))
3959 add_reg_note (insn, REG_INC, reg);
3960 }
3961 break;
3962
3963 case REG_ARGS_SIZE:
3964 fixup_args_size_notes (NULL, insn_last, get_args_size (note));
3965 break;
3966
3967 case REG_CALL_DECL:
3968 gcc_assert (call_insn != NULL_RTX);
3969 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3970 break;
3971
3972 default:
3973 break;
3974 }
3975 }
3976
3977 /* If there are LABELS inside the split insns increment the
3978 usage count so we don't delete the label. */
3979 if (INSN_P (trial))
3980 {
3981 insn = insn_last;
3982 while (insn != NULL_RTX)
3983 {
3984 /* JUMP_P insns have already been "marked" above. */
3985 if (NONJUMP_INSN_P (insn))
3986 mark_label_nuses (PATTERN (insn));
3987
3988 insn = PREV_INSN (insn);
3989 }
3990 }
3991
3992 before = PREV_INSN (trial);
3993 after = NEXT_INSN (trial);
3994
3995 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3996
3997 delete_insn (trial);
3998
3999 /* Recursively call try_split for each new insn created; by the
4000 time control returns here that insn will be fully split, so
4001 set LAST and continue from the insn after the one returned.
4002 We can't use next_active_insn here since AFTER may be a note.
4003 Ignore deleted insns, which can be occur if not optimizing. */
4004 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
4005 if (! tem->deleted () && INSN_P (tem))
4006 tem = try_split (PATTERN (tem), tem, 1);
4007
4008 /* Return either the first or the last insn, depending on which was
4009 requested. */
4010 return last
4011 ? (after ? PREV_INSN (after) : get_last_insn ())
4012 : NEXT_INSN (before);
4013 }
4014
4015 /* Make and return an INSN rtx, initializing all its slots.
4016 Store PATTERN in the pattern slots. */
4017
4018 rtx_insn *
make_insn_raw(rtx pattern)4019 make_insn_raw (rtx pattern)
4020 {
4021 rtx_insn *insn;
4022
4023 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
4024
4025 INSN_UID (insn) = cur_insn_uid++;
4026 PATTERN (insn) = pattern;
4027 INSN_CODE (insn) = -1;
4028 REG_NOTES (insn) = NULL;
4029 INSN_LOCATION (insn) = curr_insn_location ();
4030 BLOCK_FOR_INSN (insn) = NULL;
4031
4032 #ifdef ENABLE_RTL_CHECKING
4033 if (insn
4034 && INSN_P (insn)
4035 && (returnjump_p (insn)
4036 || (GET_CODE (insn) == SET
4037 && SET_DEST (insn) == pc_rtx)))
4038 {
4039 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
4040 debug_rtx (insn);
4041 }
4042 #endif
4043
4044 return insn;
4045 }
4046
4047 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4048
4049 static rtx_insn *
make_debug_insn_raw(rtx pattern)4050 make_debug_insn_raw (rtx pattern)
4051 {
4052 rtx_debug_insn *insn;
4053
4054 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
4055 INSN_UID (insn) = cur_debug_insn_uid++;
4056 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
4057 INSN_UID (insn) = cur_insn_uid++;
4058
4059 PATTERN (insn) = pattern;
4060 INSN_CODE (insn) = -1;
4061 REG_NOTES (insn) = NULL;
4062 INSN_LOCATION (insn) = curr_insn_location ();
4063 BLOCK_FOR_INSN (insn) = NULL;
4064
4065 return insn;
4066 }
4067
4068 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
4069
4070 static rtx_insn *
make_jump_insn_raw(rtx pattern)4071 make_jump_insn_raw (rtx pattern)
4072 {
4073 rtx_jump_insn *insn;
4074
4075 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
4076 INSN_UID (insn) = cur_insn_uid++;
4077
4078 PATTERN (insn) = pattern;
4079 INSN_CODE (insn) = -1;
4080 REG_NOTES (insn) = NULL;
4081 JUMP_LABEL (insn) = NULL;
4082 INSN_LOCATION (insn) = curr_insn_location ();
4083 BLOCK_FOR_INSN (insn) = NULL;
4084
4085 return insn;
4086 }
4087
4088 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
4089
4090 static rtx_insn *
make_call_insn_raw(rtx pattern)4091 make_call_insn_raw (rtx pattern)
4092 {
4093 rtx_call_insn *insn;
4094
4095 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
4096 INSN_UID (insn) = cur_insn_uid++;
4097
4098 PATTERN (insn) = pattern;
4099 INSN_CODE (insn) = -1;
4100 REG_NOTES (insn) = NULL;
4101 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
4102 INSN_LOCATION (insn) = curr_insn_location ();
4103 BLOCK_FOR_INSN (insn) = NULL;
4104
4105 return insn;
4106 }
4107
4108 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
4109
4110 static rtx_note *
make_note_raw(enum insn_note subtype)4111 make_note_raw (enum insn_note subtype)
4112 {
4113 /* Some notes are never created this way at all. These notes are
4114 only created by patching out insns. */
4115 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
4116 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
4117
4118 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
4119 INSN_UID (note) = cur_insn_uid++;
4120 NOTE_KIND (note) = subtype;
4121 BLOCK_FOR_INSN (note) = NULL;
4122 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4123 return note;
4124 }
4125
4126 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4127 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4128 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4129
4130 static inline void
link_insn_into_chain(rtx_insn * insn,rtx_insn * prev,rtx_insn * next)4131 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4132 {
4133 SET_PREV_INSN (insn) = prev;
4134 SET_NEXT_INSN (insn) = next;
4135 if (prev != NULL)
4136 {
4137 SET_NEXT_INSN (prev) = insn;
4138 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4139 {
4140 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4141 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
4142 }
4143 }
4144 if (next != NULL)
4145 {
4146 SET_PREV_INSN (next) = insn;
4147 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4148 {
4149 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4150 SET_PREV_INSN (sequence->insn (0)) = insn;
4151 }
4152 }
4153
4154 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4155 {
4156 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4157 SET_PREV_INSN (sequence->insn (0)) = prev;
4158 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4159 }
4160 }
4161
4162 /* Add INSN to the end of the doubly-linked list.
4163 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4164
4165 void
add_insn(rtx_insn * insn)4166 add_insn (rtx_insn *insn)
4167 {
4168 rtx_insn *prev = get_last_insn ();
4169 link_insn_into_chain (insn, prev, NULL);
4170 if (get_insns () == NULL)
4171 set_first_insn (insn);
4172 set_last_insn (insn);
4173 }
4174
4175 /* Add INSN into the doubly-linked list after insn AFTER. */
4176
4177 static void
add_insn_after_nobb(rtx_insn * insn,rtx_insn * after)4178 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4179 {
4180 rtx_insn *next = NEXT_INSN (after);
4181
4182 gcc_assert (!optimize || !after->deleted ());
4183
4184 link_insn_into_chain (insn, after, next);
4185
4186 if (next == NULL)
4187 {
4188 struct sequence_stack *seq;
4189
4190 for (seq = get_current_sequence (); seq; seq = seq->next)
4191 if (after == seq->last)
4192 {
4193 seq->last = insn;
4194 break;
4195 }
4196 }
4197 }
4198
4199 /* Add INSN into the doubly-linked list before insn BEFORE. */
4200
4201 static void
add_insn_before_nobb(rtx_insn * insn,rtx_insn * before)4202 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4203 {
4204 rtx_insn *prev = PREV_INSN (before);
4205
4206 gcc_assert (!optimize || !before->deleted ());
4207
4208 link_insn_into_chain (insn, prev, before);
4209
4210 if (prev == NULL)
4211 {
4212 struct sequence_stack *seq;
4213
4214 for (seq = get_current_sequence (); seq; seq = seq->next)
4215 if (before == seq->first)
4216 {
4217 seq->first = insn;
4218 break;
4219 }
4220
4221 gcc_assert (seq);
4222 }
4223 }
4224
4225 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4226 If BB is NULL, an attempt is made to infer the bb from before.
4227
4228 This and the next function should be the only functions called
4229 to insert an insn once delay slots have been filled since only
4230 they know how to update a SEQUENCE. */
4231
4232 void
add_insn_after(rtx uncast_insn,rtx uncast_after,basic_block bb)4233 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4234 {
4235 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4236 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4237 add_insn_after_nobb (insn, after);
4238 if (!BARRIER_P (after)
4239 && !BARRIER_P (insn)
4240 && (bb = BLOCK_FOR_INSN (after)))
4241 {
4242 set_block_for_insn (insn, bb);
4243 if (INSN_P (insn))
4244 df_insn_rescan (insn);
4245 /* Should not happen as first in the BB is always
4246 either NOTE or LABEL. */
4247 if (BB_END (bb) == after
4248 /* Avoid clobbering of structure when creating new BB. */
4249 && !BARRIER_P (insn)
4250 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4251 BB_END (bb) = insn;
4252 }
4253 }
4254
4255 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4256 If BB is NULL, an attempt is made to infer the bb from before.
4257
4258 This and the previous function should be the only functions called
4259 to insert an insn once delay slots have been filled since only
4260 they know how to update a SEQUENCE. */
4261
4262 void
add_insn_before(rtx uncast_insn,rtx uncast_before,basic_block bb)4263 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4264 {
4265 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4266 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4267 add_insn_before_nobb (insn, before);
4268
4269 if (!bb
4270 && !BARRIER_P (before)
4271 && !BARRIER_P (insn))
4272 bb = BLOCK_FOR_INSN (before);
4273
4274 if (bb)
4275 {
4276 set_block_for_insn (insn, bb);
4277 if (INSN_P (insn))
4278 df_insn_rescan (insn);
4279 /* Should not happen as first in the BB is always either NOTE or
4280 LABEL. */
4281 gcc_assert (BB_HEAD (bb) != insn
4282 /* Avoid clobbering of structure when creating new BB. */
4283 || BARRIER_P (insn)
4284 || NOTE_INSN_BASIC_BLOCK_P (insn));
4285 }
4286 }
4287
4288 /* Replace insn with an deleted instruction note. */
4289
4290 void
set_insn_deleted(rtx insn)4291 set_insn_deleted (rtx insn)
4292 {
4293 if (INSN_P (insn))
4294 df_insn_delete (as_a <rtx_insn *> (insn));
4295 PUT_CODE (insn, NOTE);
4296 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4297 }
4298
4299
4300 /* Unlink INSN from the insn chain.
4301
4302 This function knows how to handle sequences.
4303
4304 This function does not invalidate data flow information associated with
4305 INSN (i.e. does not call df_insn_delete). That makes this function
4306 usable for only disconnecting an insn from the chain, and re-emit it
4307 elsewhere later.
4308
4309 To later insert INSN elsewhere in the insn chain via add_insn and
4310 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4311 the caller. Nullifying them here breaks many insn chain walks.
4312
4313 To really delete an insn and related DF information, use delete_insn. */
4314
4315 void
remove_insn(rtx uncast_insn)4316 remove_insn (rtx uncast_insn)
4317 {
4318 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4319 rtx_insn *next = NEXT_INSN (insn);
4320 rtx_insn *prev = PREV_INSN (insn);
4321 basic_block bb;
4322
4323 if (prev)
4324 {
4325 SET_NEXT_INSN (prev) = next;
4326 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4327 {
4328 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4329 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4330 }
4331 }
4332 else
4333 {
4334 struct sequence_stack *seq;
4335
4336 for (seq = get_current_sequence (); seq; seq = seq->next)
4337 if (insn == seq->first)
4338 {
4339 seq->first = next;
4340 break;
4341 }
4342
4343 gcc_assert (seq);
4344 }
4345
4346 if (next)
4347 {
4348 SET_PREV_INSN (next) = prev;
4349 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4350 {
4351 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4352 SET_PREV_INSN (sequence->insn (0)) = prev;
4353 }
4354 }
4355 else
4356 {
4357 struct sequence_stack *seq;
4358
4359 for (seq = get_current_sequence (); seq; seq = seq->next)
4360 if (insn == seq->last)
4361 {
4362 seq->last = prev;
4363 break;
4364 }
4365
4366 gcc_assert (seq);
4367 }
4368
4369 /* Fix up basic block boundaries, if necessary. */
4370 if (!BARRIER_P (insn)
4371 && (bb = BLOCK_FOR_INSN (insn)))
4372 {
4373 if (BB_HEAD (bb) == insn)
4374 {
4375 /* Never ever delete the basic block note without deleting whole
4376 basic block. */
4377 gcc_assert (!NOTE_P (insn));
4378 BB_HEAD (bb) = next;
4379 }
4380 if (BB_END (bb) == insn)
4381 BB_END (bb) = prev;
4382 }
4383 }
4384
4385 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4386
4387 void
add_function_usage_to(rtx call_insn,rtx call_fusage)4388 add_function_usage_to (rtx call_insn, rtx call_fusage)
4389 {
4390 gcc_assert (call_insn && CALL_P (call_insn));
4391
4392 /* Put the register usage information on the CALL. If there is already
4393 some usage information, put ours at the end. */
4394 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4395 {
4396 rtx link;
4397
4398 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4399 link = XEXP (link, 1))
4400 ;
4401
4402 XEXP (link, 1) = call_fusage;
4403 }
4404 else
4405 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4406 }
4407
4408 /* Delete all insns made since FROM.
4409 FROM becomes the new last instruction. */
4410
4411 void
delete_insns_since(rtx_insn * from)4412 delete_insns_since (rtx_insn *from)
4413 {
4414 if (from == 0)
4415 set_first_insn (0);
4416 else
4417 SET_NEXT_INSN (from) = 0;
4418 set_last_insn (from);
4419 }
4420
4421 /* This function is deprecated, please use sequences instead.
4422
4423 Move a consecutive bunch of insns to a different place in the chain.
4424 The insns to be moved are those between FROM and TO.
4425 They are moved to a new position after the insn AFTER.
4426 AFTER must not be FROM or TO or any insn in between.
4427
4428 This function does not know about SEQUENCEs and hence should not be
4429 called after delay-slot filling has been done. */
4430
4431 void
reorder_insns_nobb(rtx_insn * from,rtx_insn * to,rtx_insn * after)4432 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4433 {
4434 if (flag_checking)
4435 {
4436 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4437 gcc_assert (after != x);
4438 gcc_assert (after != to);
4439 }
4440
4441 /* Splice this bunch out of where it is now. */
4442 if (PREV_INSN (from))
4443 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4444 if (NEXT_INSN (to))
4445 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4446 if (get_last_insn () == to)
4447 set_last_insn (PREV_INSN (from));
4448 if (get_insns () == from)
4449 set_first_insn (NEXT_INSN (to));
4450
4451 /* Make the new neighbors point to it and it to them. */
4452 if (NEXT_INSN (after))
4453 SET_PREV_INSN (NEXT_INSN (after)) = to;
4454
4455 SET_NEXT_INSN (to) = NEXT_INSN (after);
4456 SET_PREV_INSN (from) = after;
4457 SET_NEXT_INSN (after) = from;
4458 if (after == get_last_insn ())
4459 set_last_insn (to);
4460 }
4461
4462 /* Same as function above, but take care to update BB boundaries. */
4463 void
reorder_insns(rtx_insn * from,rtx_insn * to,rtx_insn * after)4464 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4465 {
4466 rtx_insn *prev = PREV_INSN (from);
4467 basic_block bb, bb2;
4468
4469 reorder_insns_nobb (from, to, after);
4470
4471 if (!BARRIER_P (after)
4472 && (bb = BLOCK_FOR_INSN (after)))
4473 {
4474 rtx_insn *x;
4475 df_set_bb_dirty (bb);
4476
4477 if (!BARRIER_P (from)
4478 && (bb2 = BLOCK_FOR_INSN (from)))
4479 {
4480 if (BB_END (bb2) == to)
4481 BB_END (bb2) = prev;
4482 df_set_bb_dirty (bb2);
4483 }
4484
4485 if (BB_END (bb) == after)
4486 BB_END (bb) = to;
4487
4488 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4489 if (!BARRIER_P (x))
4490 df_insn_change_bb (x, bb);
4491 }
4492 }
4493
4494
4495 /* Emit insn(s) of given code and pattern
4496 at a specified place within the doubly-linked list.
4497
4498 All of the emit_foo global entry points accept an object
4499 X which is either an insn list or a PATTERN of a single
4500 instruction.
4501
4502 There are thus a few canonical ways to generate code and
4503 emit it at a specific place in the instruction stream. For
4504 example, consider the instruction named SPOT and the fact that
4505 we would like to emit some instructions before SPOT. We might
4506 do it like this:
4507
4508 start_sequence ();
4509 ... emit the new instructions ...
4510 insns_head = get_insns ();
4511 end_sequence ();
4512
4513 emit_insn_before (insns_head, SPOT);
4514
4515 It used to be common to generate SEQUENCE rtl instead, but that
4516 is a relic of the past which no longer occurs. The reason is that
4517 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4518 generated would almost certainly die right after it was created. */
4519
4520 static rtx_insn *
emit_pattern_before_noloc(rtx x,rtx before,rtx last,basic_block bb,rtx_insn * (* make_raw)(rtx))4521 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4522 rtx_insn *(*make_raw) (rtx))
4523 {
4524 rtx_insn *insn;
4525
4526 gcc_assert (before);
4527
4528 if (x == NULL_RTX)
4529 return safe_as_a <rtx_insn *> (last);
4530
4531 switch (GET_CODE (x))
4532 {
4533 case DEBUG_INSN:
4534 case INSN:
4535 case JUMP_INSN:
4536 case CALL_INSN:
4537 case CODE_LABEL:
4538 case BARRIER:
4539 case NOTE:
4540 insn = as_a <rtx_insn *> (x);
4541 while (insn)
4542 {
4543 rtx_insn *next = NEXT_INSN (insn);
4544 add_insn_before (insn, before, bb);
4545 last = insn;
4546 insn = next;
4547 }
4548 break;
4549
4550 #ifdef ENABLE_RTL_CHECKING
4551 case SEQUENCE:
4552 gcc_unreachable ();
4553 break;
4554 #endif
4555
4556 default:
4557 last = (*make_raw) (x);
4558 add_insn_before (last, before, bb);
4559 break;
4560 }
4561
4562 return safe_as_a <rtx_insn *> (last);
4563 }
4564
4565 /* Make X be output before the instruction BEFORE. */
4566
4567 rtx_insn *
emit_insn_before_noloc(rtx x,rtx_insn * before,basic_block bb)4568 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4569 {
4570 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4571 }
4572
4573 /* Make an instruction with body X and code JUMP_INSN
4574 and output it before the instruction BEFORE. */
4575
4576 rtx_jump_insn *
emit_jump_insn_before_noloc(rtx x,rtx_insn * before)4577 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4578 {
4579 return as_a <rtx_jump_insn *> (
4580 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4581 make_jump_insn_raw));
4582 }
4583
4584 /* Make an instruction with body X and code CALL_INSN
4585 and output it before the instruction BEFORE. */
4586
4587 rtx_insn *
emit_call_insn_before_noloc(rtx x,rtx_insn * before)4588 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4589 {
4590 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4591 make_call_insn_raw);
4592 }
4593
4594 /* Make an instruction with body X and code DEBUG_INSN
4595 and output it before the instruction BEFORE. */
4596
4597 rtx_insn *
emit_debug_insn_before_noloc(rtx x,rtx before)4598 emit_debug_insn_before_noloc (rtx x, rtx before)
4599 {
4600 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4601 make_debug_insn_raw);
4602 }
4603
4604 /* Make an insn of code BARRIER
4605 and output it before the insn BEFORE. */
4606
4607 rtx_barrier *
emit_barrier_before(rtx before)4608 emit_barrier_before (rtx before)
4609 {
4610 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4611
4612 INSN_UID (insn) = cur_insn_uid++;
4613
4614 add_insn_before (insn, before, NULL);
4615 return insn;
4616 }
4617
4618 /* Emit the label LABEL before the insn BEFORE. */
4619
4620 rtx_code_label *
emit_label_before(rtx label,rtx_insn * before)4621 emit_label_before (rtx label, rtx_insn *before)
4622 {
4623 gcc_checking_assert (INSN_UID (label) == 0);
4624 INSN_UID (label) = cur_insn_uid++;
4625 add_insn_before (label, before, NULL);
4626 return as_a <rtx_code_label *> (label);
4627 }
4628
4629 /* Helper for emit_insn_after, handles lists of instructions
4630 efficiently. */
4631
4632 static rtx_insn *
emit_insn_after_1(rtx_insn * first,rtx uncast_after,basic_block bb)4633 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4634 {
4635 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4636 rtx_insn *last;
4637 rtx_insn *after_after;
4638 if (!bb && !BARRIER_P (after))
4639 bb = BLOCK_FOR_INSN (after);
4640
4641 if (bb)
4642 {
4643 df_set_bb_dirty (bb);
4644 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4645 if (!BARRIER_P (last))
4646 {
4647 set_block_for_insn (last, bb);
4648 df_insn_rescan (last);
4649 }
4650 if (!BARRIER_P (last))
4651 {
4652 set_block_for_insn (last, bb);
4653 df_insn_rescan (last);
4654 }
4655 if (BB_END (bb) == after)
4656 BB_END (bb) = last;
4657 }
4658 else
4659 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4660 continue;
4661
4662 after_after = NEXT_INSN (after);
4663
4664 SET_NEXT_INSN (after) = first;
4665 SET_PREV_INSN (first) = after;
4666 SET_NEXT_INSN (last) = after_after;
4667 if (after_after)
4668 SET_PREV_INSN (after_after) = last;
4669
4670 if (after == get_last_insn ())
4671 set_last_insn (last);
4672
4673 return last;
4674 }
4675
4676 static rtx_insn *
emit_pattern_after_noloc(rtx x,rtx uncast_after,basic_block bb,rtx_insn * (* make_raw)(rtx))4677 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4678 rtx_insn *(*make_raw)(rtx))
4679 {
4680 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4681 rtx_insn *last = after;
4682
4683 gcc_assert (after);
4684
4685 if (x == NULL_RTX)
4686 return last;
4687
4688 switch (GET_CODE (x))
4689 {
4690 case DEBUG_INSN:
4691 case INSN:
4692 case JUMP_INSN:
4693 case CALL_INSN:
4694 case CODE_LABEL:
4695 case BARRIER:
4696 case NOTE:
4697 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4698 break;
4699
4700 #ifdef ENABLE_RTL_CHECKING
4701 case SEQUENCE:
4702 gcc_unreachable ();
4703 break;
4704 #endif
4705
4706 default:
4707 last = (*make_raw) (x);
4708 add_insn_after (last, after, bb);
4709 break;
4710 }
4711
4712 return last;
4713 }
4714
4715 /* Make X be output after the insn AFTER and set the BB of insn. If
4716 BB is NULL, an attempt is made to infer the BB from AFTER. */
4717
4718 rtx_insn *
emit_insn_after_noloc(rtx x,rtx after,basic_block bb)4719 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4720 {
4721 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4722 }
4723
4724
4725 /* Make an insn of code JUMP_INSN with body X
4726 and output it after the insn AFTER. */
4727
4728 rtx_jump_insn *
emit_jump_insn_after_noloc(rtx x,rtx after)4729 emit_jump_insn_after_noloc (rtx x, rtx after)
4730 {
4731 return as_a <rtx_jump_insn *> (
4732 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4733 }
4734
4735 /* Make an instruction with body X and code CALL_INSN
4736 and output it after the instruction AFTER. */
4737
4738 rtx_insn *
emit_call_insn_after_noloc(rtx x,rtx after)4739 emit_call_insn_after_noloc (rtx x, rtx after)
4740 {
4741 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4742 }
4743
4744 /* Make an instruction with body X and code CALL_INSN
4745 and output it after the instruction AFTER. */
4746
4747 rtx_insn *
emit_debug_insn_after_noloc(rtx x,rtx after)4748 emit_debug_insn_after_noloc (rtx x, rtx after)
4749 {
4750 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4751 }
4752
4753 /* Make an insn of code BARRIER
4754 and output it after the insn AFTER. */
4755
4756 rtx_barrier *
emit_barrier_after(rtx after)4757 emit_barrier_after (rtx after)
4758 {
4759 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4760
4761 INSN_UID (insn) = cur_insn_uid++;
4762
4763 add_insn_after (insn, after, NULL);
4764 return insn;
4765 }
4766
4767 /* Emit the label LABEL after the insn AFTER. */
4768
4769 rtx_insn *
emit_label_after(rtx label,rtx_insn * after)4770 emit_label_after (rtx label, rtx_insn *after)
4771 {
4772 gcc_checking_assert (INSN_UID (label) == 0);
4773 INSN_UID (label) = cur_insn_uid++;
4774 add_insn_after (label, after, NULL);
4775 return as_a <rtx_insn *> (label);
4776 }
4777
4778 /* Notes require a bit of special handling: Some notes need to have their
4779 BLOCK_FOR_INSN set, others should never have it set, and some should
4780 have it set or clear depending on the context. */
4781
4782 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4783 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4784 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4785
4786 static bool
note_outside_basic_block_p(enum insn_note subtype,bool on_bb_boundary_p)4787 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4788 {
4789 switch (subtype)
4790 {
4791 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4792 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4793 return true;
4794
4795 /* Notes for var tracking and EH region markers can appear between or
4796 inside basic blocks. If the caller is emitting on the basic block
4797 boundary, do not set BLOCK_FOR_INSN on the new note. */
4798 case NOTE_INSN_VAR_LOCATION:
4799 case NOTE_INSN_EH_REGION_BEG:
4800 case NOTE_INSN_EH_REGION_END:
4801 return on_bb_boundary_p;
4802
4803 /* Otherwise, BLOCK_FOR_INSN must be set. */
4804 default:
4805 return false;
4806 }
4807 }
4808
4809 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4810
4811 rtx_note *
emit_note_after(enum insn_note subtype,rtx_insn * after)4812 emit_note_after (enum insn_note subtype, rtx_insn *after)
4813 {
4814 rtx_note *note = make_note_raw (subtype);
4815 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4816 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4817
4818 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4819 add_insn_after_nobb (note, after);
4820 else
4821 add_insn_after (note, after, bb);
4822 return note;
4823 }
4824
4825 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4826
4827 rtx_note *
emit_note_before(enum insn_note subtype,rtx_insn * before)4828 emit_note_before (enum insn_note subtype, rtx_insn *before)
4829 {
4830 rtx_note *note = make_note_raw (subtype);
4831 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4832 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4833
4834 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4835 add_insn_before_nobb (note, before);
4836 else
4837 add_insn_before (note, before, bb);
4838 return note;
4839 }
4840
4841 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4842 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4843
4844 static rtx_insn *
emit_pattern_after_setloc(rtx pattern,rtx uncast_after,int loc,rtx_insn * (* make_raw)(rtx))4845 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4846 rtx_insn *(*make_raw) (rtx))
4847 {
4848 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4849 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4850
4851 if (pattern == NULL_RTX || !loc)
4852 return last;
4853
4854 after = NEXT_INSN (after);
4855 while (1)
4856 {
4857 if (active_insn_p (after)
4858 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4859 && !INSN_LOCATION (after))
4860 INSN_LOCATION (after) = loc;
4861 if (after == last)
4862 break;
4863 after = NEXT_INSN (after);
4864 }
4865 return last;
4866 }
4867
4868 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4869 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4870 any DEBUG_INSNs. */
4871
4872 static rtx_insn *
emit_pattern_after(rtx pattern,rtx uncast_after,bool skip_debug_insns,rtx_insn * (* make_raw)(rtx))4873 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4874 rtx_insn *(*make_raw) (rtx))
4875 {
4876 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4877 rtx_insn *prev = after;
4878
4879 if (skip_debug_insns)
4880 while (DEBUG_INSN_P (prev))
4881 prev = PREV_INSN (prev);
4882
4883 if (INSN_P (prev))
4884 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4885 make_raw);
4886 else
4887 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4888 }
4889
4890 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4891 rtx_insn *
emit_insn_after_setloc(rtx pattern,rtx after,int loc)4892 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4893 {
4894 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4895 }
4896
4897 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4898 rtx_insn *
emit_insn_after(rtx pattern,rtx after)4899 emit_insn_after (rtx pattern, rtx after)
4900 {
4901 return emit_pattern_after (pattern, after, true, make_insn_raw);
4902 }
4903
4904 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4905 rtx_jump_insn *
emit_jump_insn_after_setloc(rtx pattern,rtx after,int loc)4906 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4907 {
4908 return as_a <rtx_jump_insn *> (
4909 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4910 }
4911
4912 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4913 rtx_jump_insn *
emit_jump_insn_after(rtx pattern,rtx after)4914 emit_jump_insn_after (rtx pattern, rtx after)
4915 {
4916 return as_a <rtx_jump_insn *> (
4917 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4918 }
4919
4920 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4921 rtx_insn *
emit_call_insn_after_setloc(rtx pattern,rtx after,int loc)4922 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4923 {
4924 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4925 }
4926
4927 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4928 rtx_insn *
emit_call_insn_after(rtx pattern,rtx after)4929 emit_call_insn_after (rtx pattern, rtx after)
4930 {
4931 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4932 }
4933
4934 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4935 rtx_insn *
emit_debug_insn_after_setloc(rtx pattern,rtx after,int loc)4936 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4937 {
4938 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4939 }
4940
4941 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4942 rtx_insn *
emit_debug_insn_after(rtx pattern,rtx after)4943 emit_debug_insn_after (rtx pattern, rtx after)
4944 {
4945 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4946 }
4947
4948 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4949 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4950 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4951 CALL_INSN, etc. */
4952
4953 static rtx_insn *
emit_pattern_before_setloc(rtx pattern,rtx uncast_before,int loc,bool insnp,rtx_insn * (* make_raw)(rtx))4954 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4955 rtx_insn *(*make_raw) (rtx))
4956 {
4957 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4958 rtx_insn *first = PREV_INSN (before);
4959 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4960 insnp ? before : NULL_RTX,
4961 NULL, make_raw);
4962
4963 if (pattern == NULL_RTX || !loc)
4964 return last;
4965
4966 if (!first)
4967 first = get_insns ();
4968 else
4969 first = NEXT_INSN (first);
4970 while (1)
4971 {
4972 if (active_insn_p (first)
4973 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4974 && !INSN_LOCATION (first))
4975 INSN_LOCATION (first) = loc;
4976 if (first == last)
4977 break;
4978 first = NEXT_INSN (first);
4979 }
4980 return last;
4981 }
4982
4983 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4984 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4985 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4986 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4987
4988 static rtx_insn *
emit_pattern_before(rtx pattern,rtx uncast_before,bool skip_debug_insns,bool insnp,rtx_insn * (* make_raw)(rtx))4989 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4990 bool insnp, rtx_insn *(*make_raw) (rtx))
4991 {
4992 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4993 rtx_insn *next = before;
4994
4995 if (skip_debug_insns)
4996 while (DEBUG_INSN_P (next))
4997 next = PREV_INSN (next);
4998
4999 if (INSN_P (next))
5000 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
5001 insnp, make_raw);
5002 else
5003 return emit_pattern_before_noloc (pattern, before,
5004 insnp ? before : NULL_RTX,
5005 NULL, make_raw);
5006 }
5007
5008 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5009 rtx_insn *
emit_insn_before_setloc(rtx pattern,rtx_insn * before,int loc)5010 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
5011 {
5012 return emit_pattern_before_setloc (pattern, before, loc, true,
5013 make_insn_raw);
5014 }
5015
5016 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
5017 rtx_insn *
emit_insn_before(rtx pattern,rtx before)5018 emit_insn_before (rtx pattern, rtx before)
5019 {
5020 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
5021 }
5022
5023 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5024 rtx_jump_insn *
emit_jump_insn_before_setloc(rtx pattern,rtx_insn * before,int loc)5025 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
5026 {
5027 return as_a <rtx_jump_insn *> (
5028 emit_pattern_before_setloc (pattern, before, loc, false,
5029 make_jump_insn_raw));
5030 }
5031
5032 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
5033 rtx_jump_insn *
emit_jump_insn_before(rtx pattern,rtx before)5034 emit_jump_insn_before (rtx pattern, rtx before)
5035 {
5036 return as_a <rtx_jump_insn *> (
5037 emit_pattern_before (pattern, before, true, false,
5038 make_jump_insn_raw));
5039 }
5040
5041 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5042 rtx_insn *
emit_call_insn_before_setloc(rtx pattern,rtx_insn * before,int loc)5043 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
5044 {
5045 return emit_pattern_before_setloc (pattern, before, loc, false,
5046 make_call_insn_raw);
5047 }
5048
5049 /* Like emit_call_insn_before_noloc,
5050 but set insn_location according to BEFORE. */
5051 rtx_insn *
emit_call_insn_before(rtx pattern,rtx_insn * before)5052 emit_call_insn_before (rtx pattern, rtx_insn *before)
5053 {
5054 return emit_pattern_before (pattern, before, true, false,
5055 make_call_insn_raw);
5056 }
5057
5058 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5059 rtx_insn *
emit_debug_insn_before_setloc(rtx pattern,rtx before,int loc)5060 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
5061 {
5062 return emit_pattern_before_setloc (pattern, before, loc, false,
5063 make_debug_insn_raw);
5064 }
5065
5066 /* Like emit_debug_insn_before_noloc,
5067 but set insn_location according to BEFORE. */
5068 rtx_insn *
emit_debug_insn_before(rtx pattern,rtx_insn * before)5069 emit_debug_insn_before (rtx pattern, rtx_insn *before)
5070 {
5071 return emit_pattern_before (pattern, before, false, false,
5072 make_debug_insn_raw);
5073 }
5074
5075 /* Take X and emit it at the end of the doubly-linked
5076 INSN list.
5077
5078 Returns the last insn emitted. */
5079
5080 rtx_insn *
emit_insn(rtx x)5081 emit_insn (rtx x)
5082 {
5083 rtx_insn *last = get_last_insn ();
5084 rtx_insn *insn;
5085
5086 if (x == NULL_RTX)
5087 return last;
5088
5089 switch (GET_CODE (x))
5090 {
5091 case DEBUG_INSN:
5092 case INSN:
5093 case JUMP_INSN:
5094 case CALL_INSN:
5095 case CODE_LABEL:
5096 case BARRIER:
5097 case NOTE:
5098 insn = as_a <rtx_insn *> (x);
5099 while (insn)
5100 {
5101 rtx_insn *next = NEXT_INSN (insn);
5102 add_insn (insn);
5103 last = insn;
5104 insn = next;
5105 }
5106 break;
5107
5108 #ifdef ENABLE_RTL_CHECKING
5109 case JUMP_TABLE_DATA:
5110 case SEQUENCE:
5111 gcc_unreachable ();
5112 break;
5113 #endif
5114
5115 default:
5116 last = make_insn_raw (x);
5117 add_insn (last);
5118 break;
5119 }
5120
5121 return last;
5122 }
5123
5124 /* Make an insn of code DEBUG_INSN with pattern X
5125 and add it to the end of the doubly-linked list. */
5126
5127 rtx_insn *
emit_debug_insn(rtx x)5128 emit_debug_insn (rtx x)
5129 {
5130 rtx_insn *last = get_last_insn ();
5131 rtx_insn *insn;
5132
5133 if (x == NULL_RTX)
5134 return last;
5135
5136 switch (GET_CODE (x))
5137 {
5138 case DEBUG_INSN:
5139 case INSN:
5140 case JUMP_INSN:
5141 case CALL_INSN:
5142 case CODE_LABEL:
5143 case BARRIER:
5144 case NOTE:
5145 insn = as_a <rtx_insn *> (x);
5146 while (insn)
5147 {
5148 rtx_insn *next = NEXT_INSN (insn);
5149 add_insn (insn);
5150 last = insn;
5151 insn = next;
5152 }
5153 break;
5154
5155 #ifdef ENABLE_RTL_CHECKING
5156 case JUMP_TABLE_DATA:
5157 case SEQUENCE:
5158 gcc_unreachable ();
5159 break;
5160 #endif
5161
5162 default:
5163 last = make_debug_insn_raw (x);
5164 add_insn (last);
5165 break;
5166 }
5167
5168 return last;
5169 }
5170
5171 /* Make an insn of code JUMP_INSN with pattern X
5172 and add it to the end of the doubly-linked list. */
5173
5174 rtx_insn *
emit_jump_insn(rtx x)5175 emit_jump_insn (rtx x)
5176 {
5177 rtx_insn *last = NULL;
5178 rtx_insn *insn;
5179
5180 switch (GET_CODE (x))
5181 {
5182 case DEBUG_INSN:
5183 case INSN:
5184 case JUMP_INSN:
5185 case CALL_INSN:
5186 case CODE_LABEL:
5187 case BARRIER:
5188 case NOTE:
5189 insn = as_a <rtx_insn *> (x);
5190 while (insn)
5191 {
5192 rtx_insn *next = NEXT_INSN (insn);
5193 add_insn (insn);
5194 last = insn;
5195 insn = next;
5196 }
5197 break;
5198
5199 #ifdef ENABLE_RTL_CHECKING
5200 case JUMP_TABLE_DATA:
5201 case SEQUENCE:
5202 gcc_unreachable ();
5203 break;
5204 #endif
5205
5206 default:
5207 last = make_jump_insn_raw (x);
5208 add_insn (last);
5209 break;
5210 }
5211
5212 return last;
5213 }
5214
5215 /* Make an insn of code CALL_INSN with pattern X
5216 and add it to the end of the doubly-linked list. */
5217
5218 rtx_insn *
emit_call_insn(rtx x)5219 emit_call_insn (rtx x)
5220 {
5221 rtx_insn *insn;
5222
5223 switch (GET_CODE (x))
5224 {
5225 case DEBUG_INSN:
5226 case INSN:
5227 case JUMP_INSN:
5228 case CALL_INSN:
5229 case CODE_LABEL:
5230 case BARRIER:
5231 case NOTE:
5232 insn = emit_insn (x);
5233 break;
5234
5235 #ifdef ENABLE_RTL_CHECKING
5236 case SEQUENCE:
5237 case JUMP_TABLE_DATA:
5238 gcc_unreachable ();
5239 break;
5240 #endif
5241
5242 default:
5243 insn = make_call_insn_raw (x);
5244 add_insn (insn);
5245 break;
5246 }
5247
5248 return insn;
5249 }
5250
5251 /* Add the label LABEL to the end of the doubly-linked list. */
5252
5253 rtx_code_label *
emit_label(rtx uncast_label)5254 emit_label (rtx uncast_label)
5255 {
5256 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5257
5258 gcc_checking_assert (INSN_UID (label) == 0);
5259 INSN_UID (label) = cur_insn_uid++;
5260 add_insn (label);
5261 return label;
5262 }
5263
5264 /* Make an insn of code JUMP_TABLE_DATA
5265 and add it to the end of the doubly-linked list. */
5266
5267 rtx_jump_table_data *
emit_jump_table_data(rtx table)5268 emit_jump_table_data (rtx table)
5269 {
5270 rtx_jump_table_data *jump_table_data =
5271 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5272 INSN_UID (jump_table_data) = cur_insn_uid++;
5273 PATTERN (jump_table_data) = table;
5274 BLOCK_FOR_INSN (jump_table_data) = NULL;
5275 add_insn (jump_table_data);
5276 return jump_table_data;
5277 }
5278
5279 /* Make an insn of code BARRIER
5280 and add it to the end of the doubly-linked list. */
5281
5282 rtx_barrier *
emit_barrier(void)5283 emit_barrier (void)
5284 {
5285 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5286 INSN_UID (barrier) = cur_insn_uid++;
5287 add_insn (barrier);
5288 return barrier;
5289 }
5290
5291 /* Emit a copy of note ORIG. */
5292
5293 rtx_note *
emit_note_copy(rtx_note * orig)5294 emit_note_copy (rtx_note *orig)
5295 {
5296 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5297 rtx_note *note = make_note_raw (kind);
5298 NOTE_DATA (note) = NOTE_DATA (orig);
5299 add_insn (note);
5300 return note;
5301 }
5302
5303 /* Make an insn of code NOTE or type NOTE_NO
5304 and add it to the end of the doubly-linked list. */
5305
5306 rtx_note *
emit_note(enum insn_note kind)5307 emit_note (enum insn_note kind)
5308 {
5309 rtx_note *note = make_note_raw (kind);
5310 add_insn (note);
5311 return note;
5312 }
5313
5314 /* Emit a clobber of lvalue X. */
5315
5316 rtx_insn *
emit_clobber(rtx x)5317 emit_clobber (rtx x)
5318 {
5319 /* CONCATs should not appear in the insn stream. */
5320 if (GET_CODE (x) == CONCAT)
5321 {
5322 emit_clobber (XEXP (x, 0));
5323 return emit_clobber (XEXP (x, 1));
5324 }
5325 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5326 }
5327
5328 /* Return a sequence of insns to clobber lvalue X. */
5329
5330 rtx_insn *
gen_clobber(rtx x)5331 gen_clobber (rtx x)
5332 {
5333 rtx_insn *seq;
5334
5335 start_sequence ();
5336 emit_clobber (x);
5337 seq = get_insns ();
5338 end_sequence ();
5339 return seq;
5340 }
5341
5342 /* Emit a use of rvalue X. */
5343
5344 rtx_insn *
emit_use(rtx x)5345 emit_use (rtx x)
5346 {
5347 /* CONCATs should not appear in the insn stream. */
5348 if (GET_CODE (x) == CONCAT)
5349 {
5350 emit_use (XEXP (x, 0));
5351 return emit_use (XEXP (x, 1));
5352 }
5353 return emit_insn (gen_rtx_USE (VOIDmode, x));
5354 }
5355
5356 /* Return a sequence of insns to use rvalue X. */
5357
5358 rtx_insn *
gen_use(rtx x)5359 gen_use (rtx x)
5360 {
5361 rtx_insn *seq;
5362
5363 start_sequence ();
5364 emit_use (x);
5365 seq = get_insns ();
5366 end_sequence ();
5367 return seq;
5368 }
5369
5370 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5371 Return the set in INSN that such notes describe, or NULL if the notes
5372 have no meaning for INSN. */
5373
5374 rtx
set_for_reg_notes(rtx insn)5375 set_for_reg_notes (rtx insn)
5376 {
5377 rtx pat, reg;
5378
5379 if (!INSN_P (insn))
5380 return NULL_RTX;
5381
5382 pat = PATTERN (insn);
5383 if (GET_CODE (pat) == PARALLEL)
5384 {
5385 /* We do not use single_set because that ignores SETs of unused
5386 registers. REG_EQUAL and REG_EQUIV notes really do require the
5387 PARALLEL to have a single SET. */
5388 if (multiple_sets (insn))
5389 return NULL_RTX;
5390 pat = XVECEXP (pat, 0, 0);
5391 }
5392
5393 if (GET_CODE (pat) != SET)
5394 return NULL_RTX;
5395
5396 reg = SET_DEST (pat);
5397
5398 /* Notes apply to the contents of a STRICT_LOW_PART. */
5399 if (GET_CODE (reg) == STRICT_LOW_PART
5400 || GET_CODE (reg) == ZERO_EXTRACT)
5401 reg = XEXP (reg, 0);
5402
5403 /* Check that we have a register. */
5404 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5405 return NULL_RTX;
5406
5407 return pat;
5408 }
5409
5410 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5411 note of this type already exists, remove it first. */
5412
5413 rtx
set_unique_reg_note(rtx insn,enum reg_note kind,rtx datum)5414 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5415 {
5416 rtx note = find_reg_note (insn, kind, NULL_RTX);
5417
5418 switch (kind)
5419 {
5420 case REG_EQUAL:
5421 case REG_EQUIV:
5422 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5423 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5424 return NULL_RTX;
5425
5426 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5427 It serves no useful purpose and breaks eliminate_regs. */
5428 if (GET_CODE (datum) == ASM_OPERANDS)
5429 return NULL_RTX;
5430
5431 /* Notes with side effects are dangerous. Even if the side-effect
5432 initially mirrors one in PATTERN (INSN), later optimizations
5433 might alter the way that the final register value is calculated
5434 and so move or alter the side-effect in some way. The note would
5435 then no longer be a valid substitution for SET_SRC. */
5436 if (side_effects_p (datum))
5437 return NULL_RTX;
5438 break;
5439
5440 default:
5441 break;
5442 }
5443
5444 if (note)
5445 XEXP (note, 0) = datum;
5446 else
5447 {
5448 add_reg_note (insn, kind, datum);
5449 note = REG_NOTES (insn);
5450 }
5451
5452 switch (kind)
5453 {
5454 case REG_EQUAL:
5455 case REG_EQUIV:
5456 df_notes_rescan (as_a <rtx_insn *> (insn));
5457 break;
5458 default:
5459 break;
5460 }
5461
5462 return note;
5463 }
5464
5465 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5466 rtx
set_dst_reg_note(rtx insn,enum reg_note kind,rtx datum,rtx dst)5467 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5468 {
5469 rtx set = set_for_reg_notes (insn);
5470
5471 if (set && SET_DEST (set) == dst)
5472 return set_unique_reg_note (insn, kind, datum);
5473 return NULL_RTX;
5474 }
5475
5476 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5477 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5478 is true.
5479
5480 If X is a label, it is simply added into the insn chain. */
5481
5482 rtx_insn *
emit(rtx x,bool allow_barrier_p)5483 emit (rtx x, bool allow_barrier_p)
5484 {
5485 enum rtx_code code = classify_insn (x);
5486
5487 switch (code)
5488 {
5489 case CODE_LABEL:
5490 return emit_label (x);
5491 case INSN:
5492 return emit_insn (x);
5493 case JUMP_INSN:
5494 {
5495 rtx_insn *insn = emit_jump_insn (x);
5496 if (allow_barrier_p
5497 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5498 return emit_barrier ();
5499 return insn;
5500 }
5501 case CALL_INSN:
5502 return emit_call_insn (x);
5503 case DEBUG_INSN:
5504 return emit_debug_insn (x);
5505 default:
5506 gcc_unreachable ();
5507 }
5508 }
5509
5510 /* Space for free sequence stack entries. */
5511 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5512
5513 /* Begin emitting insns to a sequence. If this sequence will contain
5514 something that might cause the compiler to pop arguments to function
5515 calls (because those pops have previously been deferred; see
5516 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5517 before calling this function. That will ensure that the deferred
5518 pops are not accidentally emitted in the middle of this sequence. */
5519
5520 void
start_sequence(void)5521 start_sequence (void)
5522 {
5523 struct sequence_stack *tem;
5524
5525 if (free_sequence_stack != NULL)
5526 {
5527 tem = free_sequence_stack;
5528 free_sequence_stack = tem->next;
5529 }
5530 else
5531 tem = ggc_alloc<sequence_stack> ();
5532
5533 tem->next = get_current_sequence ()->next;
5534 tem->first = get_insns ();
5535 tem->last = get_last_insn ();
5536 get_current_sequence ()->next = tem;
5537
5538 set_first_insn (0);
5539 set_last_insn (0);
5540 }
5541
5542 /* Set up the insn chain starting with FIRST as the current sequence,
5543 saving the previously current one. See the documentation for
5544 start_sequence for more information about how to use this function. */
5545
5546 void
push_to_sequence(rtx_insn * first)5547 push_to_sequence (rtx_insn *first)
5548 {
5549 rtx_insn *last;
5550
5551 start_sequence ();
5552
5553 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5554 ;
5555
5556 set_first_insn (first);
5557 set_last_insn (last);
5558 }
5559
5560 /* Like push_to_sequence, but take the last insn as an argument to avoid
5561 looping through the list. */
5562
5563 void
push_to_sequence2(rtx_insn * first,rtx_insn * last)5564 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5565 {
5566 start_sequence ();
5567
5568 set_first_insn (first);
5569 set_last_insn (last);
5570 }
5571
5572 /* Set up the outer-level insn chain
5573 as the current sequence, saving the previously current one. */
5574
5575 void
push_topmost_sequence(void)5576 push_topmost_sequence (void)
5577 {
5578 struct sequence_stack *top;
5579
5580 start_sequence ();
5581
5582 top = get_topmost_sequence ();
5583 set_first_insn (top->first);
5584 set_last_insn (top->last);
5585 }
5586
5587 /* After emitting to the outer-level insn chain, update the outer-level
5588 insn chain, and restore the previous saved state. */
5589
5590 void
pop_topmost_sequence(void)5591 pop_topmost_sequence (void)
5592 {
5593 struct sequence_stack *top;
5594
5595 top = get_topmost_sequence ();
5596 top->first = get_insns ();
5597 top->last = get_last_insn ();
5598
5599 end_sequence ();
5600 }
5601
5602 /* After emitting to a sequence, restore previous saved state.
5603
5604 To get the contents of the sequence just made, you must call
5605 `get_insns' *before* calling here.
5606
5607 If the compiler might have deferred popping arguments while
5608 generating this sequence, and this sequence will not be immediately
5609 inserted into the instruction stream, use do_pending_stack_adjust
5610 before calling get_insns. That will ensure that the deferred
5611 pops are inserted into this sequence, and not into some random
5612 location in the instruction stream. See INHIBIT_DEFER_POP for more
5613 information about deferred popping of arguments. */
5614
5615 void
end_sequence(void)5616 end_sequence (void)
5617 {
5618 struct sequence_stack *tem = get_current_sequence ()->next;
5619
5620 set_first_insn (tem->first);
5621 set_last_insn (tem->last);
5622 get_current_sequence ()->next = tem->next;
5623
5624 memset (tem, 0, sizeof (*tem));
5625 tem->next = free_sequence_stack;
5626 free_sequence_stack = tem;
5627 }
5628
5629 /* Return 1 if currently emitting into a sequence. */
5630
5631 int
in_sequence_p(void)5632 in_sequence_p (void)
5633 {
5634 return get_current_sequence ()->next != 0;
5635 }
5636
5637 /* Put the various virtual registers into REGNO_REG_RTX. */
5638
5639 static void
init_virtual_regs(void)5640 init_virtual_regs (void)
5641 {
5642 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5643 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5644 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5645 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5646 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5647 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5648 = virtual_preferred_stack_boundary_rtx;
5649 }
5650
5651
5652 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5653 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5654 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5655 static int copy_insn_n_scratches;
5656
5657 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5658 copied an ASM_OPERANDS.
5659 In that case, it is the original input-operand vector. */
5660 static rtvec orig_asm_operands_vector;
5661
5662 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5663 copied an ASM_OPERANDS.
5664 In that case, it is the copied input-operand vector. */
5665 static rtvec copy_asm_operands_vector;
5666
5667 /* Likewise for the constraints vector. */
5668 static rtvec orig_asm_constraints_vector;
5669 static rtvec copy_asm_constraints_vector;
5670
5671 /* Recursively create a new copy of an rtx for copy_insn.
5672 This function differs from copy_rtx in that it handles SCRATCHes and
5673 ASM_OPERANDs properly.
5674 Normally, this function is not used directly; use copy_insn as front end.
5675 However, you could first copy an insn pattern with copy_insn and then use
5676 this function afterwards to properly copy any REG_NOTEs containing
5677 SCRATCHes. */
5678
5679 rtx
copy_insn_1(rtx orig)5680 copy_insn_1 (rtx orig)
5681 {
5682 rtx copy;
5683 int i, j;
5684 RTX_CODE code;
5685 const char *format_ptr;
5686
5687 if (orig == NULL)
5688 return NULL;
5689
5690 code = GET_CODE (orig);
5691
5692 switch (code)
5693 {
5694 case REG:
5695 case DEBUG_EXPR:
5696 CASE_CONST_ANY:
5697 case SYMBOL_REF:
5698 case CODE_LABEL:
5699 case PC:
5700 case CC0:
5701 case RETURN:
5702 case SIMPLE_RETURN:
5703 return orig;
5704 case CLOBBER:
5705 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5706 clobbers or clobbers of hard registers that originated as pseudos.
5707 This is needed to allow safe register renaming. */
5708 if (REG_P (XEXP (orig, 0))
5709 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5710 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
5711 return orig;
5712 break;
5713
5714 case SCRATCH:
5715 for (i = 0; i < copy_insn_n_scratches; i++)
5716 if (copy_insn_scratch_in[i] == orig)
5717 return copy_insn_scratch_out[i];
5718 break;
5719
5720 case CONST:
5721 if (shared_const_p (orig))
5722 return orig;
5723 break;
5724
5725 /* A MEM with a constant address is not sharable. The problem is that
5726 the constant address may need to be reloaded. If the mem is shared,
5727 then reloading one copy of this mem will cause all copies to appear
5728 to have been reloaded. */
5729
5730 default:
5731 break;
5732 }
5733
5734 /* Copy the various flags, fields, and other information. We assume
5735 that all fields need copying, and then clear the fields that should
5736 not be copied. That is the sensible default behavior, and forces
5737 us to explicitly document why we are *not* copying a flag. */
5738 copy = shallow_copy_rtx (orig);
5739
5740 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5741 if (INSN_P (orig))
5742 {
5743 RTX_FLAG (copy, jump) = 0;
5744 RTX_FLAG (copy, call) = 0;
5745 RTX_FLAG (copy, frame_related) = 0;
5746 }
5747
5748 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5749
5750 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5751 switch (*format_ptr++)
5752 {
5753 case 'e':
5754 if (XEXP (orig, i) != NULL)
5755 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5756 break;
5757
5758 case 'E':
5759 case 'V':
5760 if (XVEC (orig, i) == orig_asm_constraints_vector)
5761 XVEC (copy, i) = copy_asm_constraints_vector;
5762 else if (XVEC (orig, i) == orig_asm_operands_vector)
5763 XVEC (copy, i) = copy_asm_operands_vector;
5764 else if (XVEC (orig, i) != NULL)
5765 {
5766 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5767 for (j = 0; j < XVECLEN (copy, i); j++)
5768 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5769 }
5770 break;
5771
5772 case 't':
5773 case 'w':
5774 case 'i':
5775 case 'p':
5776 case 's':
5777 case 'S':
5778 case 'u':
5779 case '0':
5780 /* These are left unchanged. */
5781 break;
5782
5783 default:
5784 gcc_unreachable ();
5785 }
5786
5787 if (code == SCRATCH)
5788 {
5789 i = copy_insn_n_scratches++;
5790 gcc_assert (i < MAX_RECOG_OPERANDS);
5791 copy_insn_scratch_in[i] = orig;
5792 copy_insn_scratch_out[i] = copy;
5793 }
5794 else if (code == ASM_OPERANDS)
5795 {
5796 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5797 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5798 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5799 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5800 }
5801
5802 return copy;
5803 }
5804
5805 /* Create a new copy of an rtx.
5806 This function differs from copy_rtx in that it handles SCRATCHes and
5807 ASM_OPERANDs properly.
5808 INSN doesn't really have to be a full INSN; it could be just the
5809 pattern. */
5810 rtx
copy_insn(rtx insn)5811 copy_insn (rtx insn)
5812 {
5813 copy_insn_n_scratches = 0;
5814 orig_asm_operands_vector = 0;
5815 orig_asm_constraints_vector = 0;
5816 copy_asm_operands_vector = 0;
5817 copy_asm_constraints_vector = 0;
5818 return copy_insn_1 (insn);
5819 }
5820
5821 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5822 on that assumption that INSN itself remains in its original place. */
5823
5824 rtx_insn *
copy_delay_slot_insn(rtx_insn * insn)5825 copy_delay_slot_insn (rtx_insn *insn)
5826 {
5827 /* Copy INSN with its rtx_code, all its notes, location etc. */
5828 insn = as_a <rtx_insn *> (copy_rtx (insn));
5829 INSN_UID (insn) = cur_insn_uid++;
5830 return insn;
5831 }
5832
5833 /* Initialize data structures and variables in this file
5834 before generating rtl for each function. */
5835
5836 void
init_emit(void)5837 init_emit (void)
5838 {
5839 set_first_insn (NULL);
5840 set_last_insn (NULL);
5841 if (MIN_NONDEBUG_INSN_UID)
5842 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5843 else
5844 cur_insn_uid = 1;
5845 cur_debug_insn_uid = 1;
5846 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5847 first_label_num = label_num;
5848 get_current_sequence ()->next = NULL;
5849
5850 /* Init the tables that describe all the pseudo regs. */
5851
5852 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5853
5854 crtl->emit.regno_pointer_align
5855 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5856
5857 regno_reg_rtx
5858 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5859
5860 /* Put copies of all the hard registers into regno_reg_rtx. */
5861 memcpy (regno_reg_rtx,
5862 initial_regno_reg_rtx,
5863 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5864
5865 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5866 init_virtual_regs ();
5867
5868 /* Indicate that the virtual registers and stack locations are
5869 all pointers. */
5870 REG_POINTER (stack_pointer_rtx) = 1;
5871 REG_POINTER (frame_pointer_rtx) = 1;
5872 REG_POINTER (hard_frame_pointer_rtx) = 1;
5873 REG_POINTER (arg_pointer_rtx) = 1;
5874
5875 REG_POINTER (virtual_incoming_args_rtx) = 1;
5876 REG_POINTER (virtual_stack_vars_rtx) = 1;
5877 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5878 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5879 REG_POINTER (virtual_cfa_rtx) = 1;
5880
5881 #ifdef STACK_BOUNDARY
5882 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5883 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5884 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5885 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5886
5887 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5888 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5889 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5890 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5891
5892 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5893 #endif
5894
5895 #ifdef INIT_EXPANDERS
5896 INIT_EXPANDERS;
5897 #endif
5898 }
5899
5900 /* Return the value of element I of CONST_VECTOR X as a wide_int. */
5901
5902 wide_int
const_vector_int_elt(const_rtx x,unsigned int i)5903 const_vector_int_elt (const_rtx x, unsigned int i)
5904 {
5905 /* First handle elements that are directly encoded. */
5906 machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
5907 if (i < (unsigned int) XVECLEN (x, 0))
5908 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
5909
5910 /* Identify the pattern that contains element I and work out the index of
5911 the last encoded element for that pattern. */
5912 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5913 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5914 unsigned int count = i / npatterns;
5915 unsigned int pattern = i % npatterns;
5916 unsigned int final_i = encoded_nelts - npatterns + pattern;
5917
5918 /* If there are no steps, the final encoded value is the right one. */
5919 if (!CONST_VECTOR_STEPPED_P (x))
5920 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
5921
5922 /* Otherwise work out the value from the last two encoded elements. */
5923 rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
5924 rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
5925 wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
5926 rtx_mode_t (v1, elt_mode));
5927 return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
5928 }
5929
5930 /* Return the value of element I of CONST_VECTOR X. */
5931
5932 rtx
const_vector_elt(const_rtx x,unsigned int i)5933 const_vector_elt (const_rtx x, unsigned int i)
5934 {
5935 /* First handle elements that are directly encoded. */
5936 if (i < (unsigned int) XVECLEN (x, 0))
5937 return CONST_VECTOR_ENCODED_ELT (x, i);
5938
5939 /* If there are no steps, the final encoded value is the right one. */
5940 if (!CONST_VECTOR_STEPPED_P (x))
5941 {
5942 /* Identify the pattern that contains element I and work out the index of
5943 the last encoded element for that pattern. */
5944 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5945 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5946 unsigned int pattern = i % npatterns;
5947 unsigned int final_i = encoded_nelts - npatterns + pattern;
5948 return CONST_VECTOR_ENCODED_ELT (x, final_i);
5949 }
5950
5951 /* Otherwise work out the value from the last two encoded elements. */
5952 return immed_wide_int_const (const_vector_int_elt (x, i),
5953 GET_MODE_INNER (GET_MODE (x)));
5954 }
5955
5956 /* Return true if X is a valid element for a CONST_VECTOR of the given
5957 mode. */
5958
5959 bool
valid_for_const_vector_p(machine_mode,rtx x)5960 valid_for_const_vector_p (machine_mode, rtx x)
5961 {
5962 return (CONST_SCALAR_INT_P (x)
5963 || CONST_DOUBLE_AS_FLOAT_P (x)
5964 || CONST_FIXED_P (x));
5965 }
5966
5967 /* Generate a vector constant of mode MODE in which every element has
5968 value ELT. */
5969
5970 rtx
gen_const_vec_duplicate(machine_mode mode,rtx elt)5971 gen_const_vec_duplicate (machine_mode mode, rtx elt)
5972 {
5973 rtx_vector_builder builder (mode, 1, 1);
5974 builder.quick_push (elt);
5975 return builder.build ();
5976 }
5977
5978 /* Return a vector rtx of mode MODE in which every element has value X.
5979 The result will be a constant if X is constant. */
5980
5981 rtx
gen_vec_duplicate(machine_mode mode,rtx x)5982 gen_vec_duplicate (machine_mode mode, rtx x)
5983 {
5984 if (valid_for_const_vector_p (mode, x))
5985 return gen_const_vec_duplicate (mode, x);
5986 return gen_rtx_VEC_DUPLICATE (mode, x);
5987 }
5988
5989 /* A subroutine of const_vec_series_p that handles the case in which:
5990
5991 (GET_CODE (X) == CONST_VECTOR
5992 && CONST_VECTOR_NPATTERNS (X) == 1
5993 && !CONST_VECTOR_DUPLICATE_P (X))
5994
5995 is known to hold. */
5996
5997 bool
const_vec_series_p_1(const_rtx x,rtx * base_out,rtx * step_out)5998 const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
5999 {
6000 /* Stepped sequences are only defined for integers, to avoid specifying
6001 rounding behavior. */
6002 if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
6003 return false;
6004
6005 /* A non-duplicated vector with two elements can always be seen as a
6006 series with a nonzero step. Longer vectors must have a stepped
6007 encoding. */
6008 if (maybe_ne (CONST_VECTOR_NUNITS (x), 2)
6009 && !CONST_VECTOR_STEPPED_P (x))
6010 return false;
6011
6012 /* Calculate the step between the first and second elements. */
6013 scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
6014 rtx base = CONST_VECTOR_ELT (x, 0);
6015 rtx step = simplify_binary_operation (MINUS, inner,
6016 CONST_VECTOR_ENCODED_ELT (x, 1), base);
6017 if (rtx_equal_p (step, CONST0_RTX (inner)))
6018 return false;
6019
6020 /* If we have a stepped encoding, check that the step between the
6021 second and third elements is the same as STEP. */
6022 if (CONST_VECTOR_STEPPED_P (x))
6023 {
6024 rtx diff = simplify_binary_operation (MINUS, inner,
6025 CONST_VECTOR_ENCODED_ELT (x, 2),
6026 CONST_VECTOR_ENCODED_ELT (x, 1));
6027 if (!rtx_equal_p (step, diff))
6028 return false;
6029 }
6030
6031 *base_out = base;
6032 *step_out = step;
6033 return true;
6034 }
6035
6036 /* Generate a vector constant of mode MODE in which element I has
6037 the value BASE + I * STEP. */
6038
6039 rtx
gen_const_vec_series(machine_mode mode,rtx base,rtx step)6040 gen_const_vec_series (machine_mode mode, rtx base, rtx step)
6041 {
6042 gcc_assert (valid_for_const_vector_p (mode, base)
6043 && valid_for_const_vector_p (mode, step));
6044
6045 rtx_vector_builder builder (mode, 1, 3);
6046 builder.quick_push (base);
6047 for (int i = 1; i < 3; ++i)
6048 builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
6049 builder[i - 1], step));
6050 return builder.build ();
6051 }
6052
6053 /* Generate a vector of mode MODE in which element I has the value
6054 BASE + I * STEP. The result will be a constant if BASE and STEP
6055 are both constants. */
6056
6057 rtx
gen_vec_series(machine_mode mode,rtx base,rtx step)6058 gen_vec_series (machine_mode mode, rtx base, rtx step)
6059 {
6060 if (step == const0_rtx)
6061 return gen_vec_duplicate (mode, base);
6062 if (valid_for_const_vector_p (mode, base)
6063 && valid_for_const_vector_p (mode, step))
6064 return gen_const_vec_series (mode, base, step);
6065 return gen_rtx_VEC_SERIES (mode, base, step);
6066 }
6067
6068 /* Generate a new vector constant for mode MODE and constant value
6069 CONSTANT. */
6070
6071 static rtx
gen_const_vector(machine_mode mode,int constant)6072 gen_const_vector (machine_mode mode, int constant)
6073 {
6074 machine_mode inner = GET_MODE_INNER (mode);
6075
6076 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
6077
6078 rtx el = const_tiny_rtx[constant][(int) inner];
6079 gcc_assert (el);
6080
6081 return gen_const_vec_duplicate (mode, el);
6082 }
6083
6084 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6085 all elements are zero, and the one vector when all elements are one. */
6086 rtx
gen_rtx_CONST_VECTOR(machine_mode mode,rtvec v)6087 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
6088 {
6089 gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
6090
6091 /* If the values are all the same, check to see if we can use one of the
6092 standard constant vectors. */
6093 if (rtvec_all_equal_p (v))
6094 return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
6095
6096 unsigned int nunits = GET_NUM_ELEM (v);
6097 rtx_vector_builder builder (mode, nunits, 1);
6098 for (unsigned int i = 0; i < nunits; ++i)
6099 builder.quick_push (RTVEC_ELT (v, i));
6100 return builder.build (v);
6101 }
6102
6103 /* Initialise global register information required by all functions. */
6104
6105 void
init_emit_regs(void)6106 init_emit_regs (void)
6107 {
6108 int i;
6109 machine_mode mode;
6110 mem_attrs *attrs;
6111
6112 /* Reset register attributes */
6113 reg_attrs_htab->empty ();
6114
6115 /* We need reg_raw_mode, so initialize the modes now. */
6116 init_reg_modes_target ();
6117
6118 /* Assign register numbers to the globally defined register rtx. */
6119 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
6120 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
6121 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
6122 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
6123 virtual_incoming_args_rtx =
6124 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
6125 virtual_stack_vars_rtx =
6126 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
6127 virtual_stack_dynamic_rtx =
6128 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
6129 virtual_outgoing_args_rtx =
6130 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
6131 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
6132 virtual_preferred_stack_boundary_rtx =
6133 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6134
6135 /* Initialize RTL for commonly used hard registers. These are
6136 copied into regno_reg_rtx as we begin to compile each function. */
6137 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6138 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6139
6140 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6141 return_address_pointer_rtx
6142 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
6143 #endif
6144
6145 pic_offset_table_rtx = NULL_RTX;
6146 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6147 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
6148
6149 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
6150 {
6151 mode = (machine_mode) i;
6152 attrs = ggc_cleared_alloc<mem_attrs> ();
6153 attrs->align = BITS_PER_UNIT;
6154 attrs->addrspace = ADDR_SPACE_GENERIC;
6155 if (mode != BLKmode && mode != VOIDmode)
6156 {
6157 attrs->size_known_p = true;
6158 attrs->size = GET_MODE_SIZE (mode);
6159 if (STRICT_ALIGNMENT)
6160 attrs->align = GET_MODE_ALIGNMENT (mode);
6161 }
6162 mode_mem_attrs[i] = attrs;
6163 }
6164
6165 split_branch_probability = profile_probability::uninitialized ();
6166 }
6167
6168 /* Initialize global machine_mode variables. */
6169
6170 void
init_derived_machine_modes(void)6171 init_derived_machine_modes (void)
6172 {
6173 opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6174 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
6175 {
6176 scalar_int_mode mode = mode_iter.require ();
6177
6178 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6179 && !opt_byte_mode.exists ())
6180 opt_byte_mode = mode;
6181
6182 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6183 && !opt_word_mode.exists ())
6184 opt_word_mode = mode;
6185 }
6186
6187 byte_mode = opt_byte_mode.require ();
6188 word_mode = opt_word_mode.require ();
6189 ptr_mode = as_a <scalar_int_mode>
6190 (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
6191 }
6192
6193 /* Create some permanent unique rtl objects shared between all functions. */
6194
6195 void
init_emit_once(void)6196 init_emit_once (void)
6197 {
6198 int i;
6199 machine_mode mode;
6200 scalar_float_mode double_mode;
6201 opt_scalar_mode smode_iter;
6202
6203 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6204 CONST_FIXED, and memory attribute hash tables. */
6205 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
6206
6207 #if TARGET_SUPPORTS_WIDE_INT
6208 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
6209 #endif
6210 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
6211
6212 if (NUM_POLY_INT_COEFFS > 1)
6213 const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6214
6215 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
6216
6217 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
6218
6219 #ifdef INIT_EXPANDERS
6220 /* This is to initialize {init|mark|free}_machine_status before the first
6221 call to push_function_context_to. This is needed by the Chill front
6222 end which calls push_function_context_to before the first call to
6223 init_function_start. */
6224 INIT_EXPANDERS;
6225 #endif
6226
6227 /* Create the unique rtx's for certain rtx codes and operand values. */
6228
6229 /* Process stack-limiting command-line options. */
6230 if (opt_fstack_limit_symbol_arg != NULL)
6231 stack_limit_rtx
6232 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6233 if (opt_fstack_limit_register_no >= 0)
6234 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
6235
6236 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6237 tries to use these variables. */
6238 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
6239 const_int_rtx[i + MAX_SAVED_CONST_INT] =
6240 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
6241
6242 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6243 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
6244 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
6245 else
6246 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
6247
6248 double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
6249
6250 real_from_integer (&dconst0, double_mode, 0, SIGNED);
6251 real_from_integer (&dconst1, double_mode, 1, SIGNED);
6252 real_from_integer (&dconst2, double_mode, 2, SIGNED);
6253
6254 dconstm1 = dconst1;
6255 dconstm1.sign = 1;
6256
6257 dconsthalf = dconst1;
6258 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
6259
6260 for (i = 0; i < 3; i++)
6261 {
6262 const REAL_VALUE_TYPE *const r =
6263 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6264
6265 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
6266 const_tiny_rtx[i][(int) mode] =
6267 const_double_from_real_value (*r, mode);
6268
6269 FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
6270 const_tiny_rtx[i][(int) mode] =
6271 const_double_from_real_value (*r, mode);
6272
6273 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
6274
6275 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6276 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6277
6278 for (mode = MIN_MODE_PARTIAL_INT;
6279 mode <= MAX_MODE_PARTIAL_INT;
6280 mode = (machine_mode)((int)(mode) + 1))
6281 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6282 }
6283
6284 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6285
6286 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6287 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6288
6289 /* For BImode, 1 and -1 are unsigned and signed interpretations
6290 of the same value. */
6291 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6292 const_tiny_rtx[1][(int) BImode] = const_true_rtx;
6293 const_tiny_rtx[3][(int) BImode] = const_true_rtx;
6294
6295 for (mode = MIN_MODE_PARTIAL_INT;
6296 mode <= MAX_MODE_PARTIAL_INT;
6297 mode = (machine_mode)((int)(mode) + 1))
6298 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6299
6300 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
6301 {
6302 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6303 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6304 }
6305
6306 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
6307 {
6308 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6309 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6310 }
6311
6312 /* As for BImode, "all 1" and "all -1" are unsigned and signed
6313 interpretations of the same value. */
6314 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
6315 {
6316 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6317 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6318 const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
6319 }
6320
6321 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
6322 {
6323 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6324 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6325 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6326 }
6327
6328 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
6329 {
6330 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6331 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6332 }
6333
6334 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
6335 {
6336 scalar_mode smode = smode_iter.require ();
6337 FCONST0 (smode).data.high = 0;
6338 FCONST0 (smode).data.low = 0;
6339 FCONST0 (smode).mode = smode;
6340 const_tiny_rtx[0][(int) smode]
6341 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6342 }
6343
6344 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
6345 {
6346 scalar_mode smode = smode_iter.require ();
6347 FCONST0 (smode).data.high = 0;
6348 FCONST0 (smode).data.low = 0;
6349 FCONST0 (smode).mode = smode;
6350 const_tiny_rtx[0][(int) smode]
6351 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6352 }
6353
6354 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
6355 {
6356 scalar_mode smode = smode_iter.require ();
6357 FCONST0 (smode).data.high = 0;
6358 FCONST0 (smode).data.low = 0;
6359 FCONST0 (smode).mode = smode;
6360 const_tiny_rtx[0][(int) smode]
6361 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6362
6363 /* We store the value 1. */
6364 FCONST1 (smode).data.high = 0;
6365 FCONST1 (smode).data.low = 0;
6366 FCONST1 (smode).mode = smode;
6367 FCONST1 (smode).data
6368 = double_int_one.lshift (GET_MODE_FBIT (smode),
6369 HOST_BITS_PER_DOUBLE_INT,
6370 SIGNED_FIXED_POINT_MODE_P (smode));
6371 const_tiny_rtx[1][(int) smode]
6372 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6373 }
6374
6375 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
6376 {
6377 scalar_mode smode = smode_iter.require ();
6378 FCONST0 (smode).data.high = 0;
6379 FCONST0 (smode).data.low = 0;
6380 FCONST0 (smode).mode = smode;
6381 const_tiny_rtx[0][(int) smode]
6382 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6383
6384 /* We store the value 1. */
6385 FCONST1 (smode).data.high = 0;
6386 FCONST1 (smode).data.low = 0;
6387 FCONST1 (smode).mode = smode;
6388 FCONST1 (smode).data
6389 = double_int_one.lshift (GET_MODE_FBIT (smode),
6390 HOST_BITS_PER_DOUBLE_INT,
6391 SIGNED_FIXED_POINT_MODE_P (smode));
6392 const_tiny_rtx[1][(int) smode]
6393 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6394 }
6395
6396 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
6397 {
6398 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6399 }
6400
6401 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
6402 {
6403 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6404 }
6405
6406 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
6407 {
6408 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6409 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6410 }
6411
6412 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
6413 {
6414 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6415 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6416 }
6417
6418 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6419 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6420 const_tiny_rtx[0][i] = const0_rtx;
6421
6422 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_POINTER_BOUNDS)
6423 {
6424 scalar_mode smode = smode_iter.require ();
6425 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (smode));
6426 const_tiny_rtx[0][smode] = immed_wide_int_const (wi_zero, smode);
6427 }
6428
6429 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6430 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6431 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6432 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6433 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6434 /*prev_insn=*/NULL,
6435 /*next_insn=*/NULL,
6436 /*bb=*/NULL,
6437 /*pattern=*/NULL_RTX,
6438 /*location=*/-1,
6439 CODE_FOR_nothing,
6440 /*reg_notes=*/NULL_RTX);
6441 }
6442
6443 /* Produce exact duplicate of insn INSN after AFTER.
6444 Care updating of libcall regions if present. */
6445
6446 rtx_insn *
emit_copy_of_insn_after(rtx_insn * insn,rtx_insn * after)6447 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6448 {
6449 rtx_insn *new_rtx;
6450 rtx link;
6451
6452 switch (GET_CODE (insn))
6453 {
6454 case INSN:
6455 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6456 break;
6457
6458 case JUMP_INSN:
6459 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6460 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6461 break;
6462
6463 case DEBUG_INSN:
6464 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6465 break;
6466
6467 case CALL_INSN:
6468 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6469 if (CALL_INSN_FUNCTION_USAGE (insn))
6470 CALL_INSN_FUNCTION_USAGE (new_rtx)
6471 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6472 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6473 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6474 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6475 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6476 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6477 break;
6478
6479 default:
6480 gcc_unreachable ();
6481 }
6482
6483 /* Update LABEL_NUSES. */
6484 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6485
6486 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6487
6488 /* If the old insn is frame related, then so is the new one. This is
6489 primarily needed for IA-64 unwind info which marks epilogue insns,
6490 which may be duplicated by the basic block reordering code. */
6491 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6492
6493 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6494 rtx *ptail = ®_NOTES (new_rtx);
6495 while (*ptail != NULL_RTX)
6496 ptail = &XEXP (*ptail, 1);
6497
6498 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6499 will make them. REG_LABEL_TARGETs are created there too, but are
6500 supposed to be sticky, so we copy them. */
6501 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6502 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6503 {
6504 *ptail = duplicate_reg_note (link);
6505 ptail = &XEXP (*ptail, 1);
6506 }
6507
6508 INSN_CODE (new_rtx) = INSN_CODE (insn);
6509 return new_rtx;
6510 }
6511
6512 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6513 rtx
gen_hard_reg_clobber(machine_mode mode,unsigned int regno)6514 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6515 {
6516 if (hard_reg_clobbers[mode][regno])
6517 return hard_reg_clobbers[mode][regno];
6518 else
6519 return (hard_reg_clobbers[mode][regno] =
6520 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6521 }
6522
6523 location_t prologue_location;
6524 location_t epilogue_location;
6525
6526 /* Hold current location information and last location information, so the
6527 datastructures are built lazily only when some instructions in given
6528 place are needed. */
6529 static location_t curr_location;
6530
6531 /* Allocate insn location datastructure. */
6532 void
insn_locations_init(void)6533 insn_locations_init (void)
6534 {
6535 prologue_location = epilogue_location = 0;
6536 curr_location = UNKNOWN_LOCATION;
6537 }
6538
6539 /* At the end of emit stage, clear current location. */
6540 void
insn_locations_finalize(void)6541 insn_locations_finalize (void)
6542 {
6543 epilogue_location = curr_location;
6544 curr_location = UNKNOWN_LOCATION;
6545 }
6546
6547 /* Set current location. */
6548 void
set_curr_insn_location(location_t location)6549 set_curr_insn_location (location_t location)
6550 {
6551 curr_location = location;
6552 }
6553
6554 /* Get current location. */
6555 location_t
curr_insn_location(void)6556 curr_insn_location (void)
6557 {
6558 return curr_location;
6559 }
6560
6561 /* Return lexical scope block insn belongs to. */
6562 tree
insn_scope(const rtx_insn * insn)6563 insn_scope (const rtx_insn *insn)
6564 {
6565 return LOCATION_BLOCK (INSN_LOCATION (insn));
6566 }
6567
6568 /* Return line number of the statement that produced this insn. */
6569 int
insn_line(const rtx_insn * insn)6570 insn_line (const rtx_insn *insn)
6571 {
6572 return LOCATION_LINE (INSN_LOCATION (insn));
6573 }
6574
6575 /* Return source file of the statement that produced this insn. */
6576 const char *
insn_file(const rtx_insn * insn)6577 insn_file (const rtx_insn *insn)
6578 {
6579 return LOCATION_FILE (INSN_LOCATION (insn));
6580 }
6581
6582 /* Return expanded location of the statement that produced this insn. */
6583 expanded_location
insn_location(const rtx_insn * insn)6584 insn_location (const rtx_insn *insn)
6585 {
6586 return expand_location (INSN_LOCATION (insn));
6587 }
6588
6589 /* Return true if memory model MODEL requires a pre-operation (release-style)
6590 barrier or a post-operation (acquire-style) barrier. While not universal,
6591 this function matches behavior of several targets. */
6592
6593 bool
need_atomic_barrier_p(enum memmodel model,bool pre)6594 need_atomic_barrier_p (enum memmodel model, bool pre)
6595 {
6596 switch (model & MEMMODEL_BASE_MASK)
6597 {
6598 case MEMMODEL_RELAXED:
6599 case MEMMODEL_CONSUME:
6600 return false;
6601 case MEMMODEL_RELEASE:
6602 return pre;
6603 case MEMMODEL_ACQUIRE:
6604 return !pre;
6605 case MEMMODEL_ACQ_REL:
6606 case MEMMODEL_SEQ_CST:
6607 return true;
6608 default:
6609 gcc_unreachable ();
6610 }
6611 }
6612
6613 /* Return a constant shift amount for shifting a value of mode MODE
6614 by VALUE bits. */
6615
6616 rtx
gen_int_shift_amount(machine_mode,poly_int64 value)6617 gen_int_shift_amount (machine_mode, poly_int64 value)
6618 {
6619 /* Use a 64-bit mode, to avoid any truncation.
6620
6621 ??? Perhaps this should be automatically derived from the .md files
6622 instead, or perhaps have a target hook. */
6623 scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6624 ? DImode
6625 : int_mode_for_size (64, 0).require ());
6626 return gen_int_mode (value, shift_mode);
6627 }
6628
6629 /* Initialize fields of rtl_data related to stack alignment. */
6630
6631 void
init_stack_alignment()6632 rtl_data::init_stack_alignment ()
6633 {
6634 stack_alignment_needed = STACK_BOUNDARY;
6635 max_used_stack_slot_alignment = STACK_BOUNDARY;
6636 stack_alignment_estimated = 0;
6637 preferred_stack_boundary = STACK_BOUNDARY;
6638 }
6639
6640
6641 #include "gt-emit-rtl.h"
6642