1 /* Copy propagation on hard registers for the GNU compiler.
2 Copyright (C) 2000-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "df.h"
26 #include "memmodel.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "regs.h"
30 #include "emit-rtl.h"
31 #include "recog.h"
32 #include "diagnostic-core.h"
33 #include "addresses.h"
34 #include "tree-pass.h"
35 #include "rtl-iter.h"
36 #include "cfgrtl.h"
37 #include "target.h"
38 #include "function-abi.h"
39
40 /* The following code does forward propagation of hard register copies.
41 The object is to eliminate as many dependencies as possible, so that
42 we have the most scheduling freedom. As a side effect, we also clean
43 up some silly register allocation decisions made by reload. This
44 code may be obsoleted by a new register allocator. */
45
46 /* DEBUG_INSNs aren't changed right away, as doing so might extend the
47 lifetime of a register and get the DEBUG_INSN subsequently reset.
48 So they are queued instead, and updated only when the register is
49 used in some subsequent real insn before it is set. */
50 struct queued_debug_insn_change
51 {
52 struct queued_debug_insn_change *next;
53 rtx_insn *insn;
54 rtx *loc;
55 rtx new_rtx;
56 };
57
58 /* For each register, we have a list of registers that contain the same
59 value. The OLDEST_REGNO field points to the head of the list, and
60 the NEXT_REGNO field runs through the list. The MODE field indicates
61 what mode the data is known to be in; this field is VOIDmode when the
62 register is not known to contain valid data. */
63
64 struct value_data_entry
65 {
66 machine_mode mode;
67 unsigned int oldest_regno;
68 unsigned int next_regno;
69 struct queued_debug_insn_change *debug_insn_changes;
70 };
71
72 struct value_data
73 {
74 struct value_data_entry e[FIRST_PSEUDO_REGISTER];
75 unsigned int max_value_regs;
76 unsigned int n_debug_insn_changes;
77 };
78
79 static object_allocator<queued_debug_insn_change> queued_debug_insn_change_pool
80 ("debug insn changes pool");
81
82 static bool skip_debug_insn_p;
83
84 static void kill_value_one_regno (unsigned, struct value_data *);
85 static void kill_value_regno (unsigned, unsigned, struct value_data *);
86 static void kill_value (const_rtx, struct value_data *);
87 static void set_value_regno (unsigned, machine_mode, struct value_data *);
88 static void init_value_data (struct value_data *);
89 static void kill_clobbered_value (rtx, const_rtx, void *);
90 static void kill_set_value (rtx, const_rtx, void *);
91 static void copy_value (rtx, rtx, struct value_data *);
92 static bool mode_change_ok (machine_mode, machine_mode,
93 unsigned int);
94 static rtx maybe_mode_change (machine_mode, machine_mode,
95 machine_mode, unsigned int, unsigned int);
96 static rtx find_oldest_value_reg (enum reg_class, rtx, struct value_data *);
97 static bool replace_oldest_value_reg (rtx *, enum reg_class, rtx_insn *,
98 struct value_data *);
99 static bool replace_oldest_value_addr (rtx *, enum reg_class,
100 machine_mode, addr_space_t,
101 rtx_insn *, struct value_data *);
102 static bool replace_oldest_value_mem (rtx, rtx_insn *, struct value_data *);
103 static bool copyprop_hardreg_forward_1 (basic_block, struct value_data *);
104 extern void debug_value_data (struct value_data *);
105 static void validate_value_data (struct value_data *);
106
107 /* Free all queued updates for DEBUG_INSNs that change some reg to
108 register REGNO. */
109
110 static void
free_debug_insn_changes(struct value_data * vd,unsigned int regno)111 free_debug_insn_changes (struct value_data *vd, unsigned int regno)
112 {
113 struct queued_debug_insn_change *cur, *next;
114 for (cur = vd->e[regno].debug_insn_changes; cur; cur = next)
115 {
116 next = cur->next;
117 --vd->n_debug_insn_changes;
118 queued_debug_insn_change_pool.remove (cur);
119 }
120 vd->e[regno].debug_insn_changes = NULL;
121 }
122
123 /* Kill register REGNO. This involves removing it from any value
124 lists, and resetting the value mode to VOIDmode. This is only a
125 helper function; it does not handle any hard registers overlapping
126 with REGNO. */
127
128 static void
kill_value_one_regno(unsigned int regno,struct value_data * vd)129 kill_value_one_regno (unsigned int regno, struct value_data *vd)
130 {
131 unsigned int i, next;
132
133 if (vd->e[regno].oldest_regno != regno)
134 {
135 for (i = vd->e[regno].oldest_regno;
136 vd->e[i].next_regno != regno;
137 i = vd->e[i].next_regno)
138 continue;
139 vd->e[i].next_regno = vd->e[regno].next_regno;
140 }
141 else if ((next = vd->e[regno].next_regno) != INVALID_REGNUM)
142 {
143 for (i = next; i != INVALID_REGNUM; i = vd->e[i].next_regno)
144 vd->e[i].oldest_regno = next;
145 }
146
147 vd->e[regno].mode = VOIDmode;
148 vd->e[regno].oldest_regno = regno;
149 vd->e[regno].next_regno = INVALID_REGNUM;
150 if (vd->e[regno].debug_insn_changes)
151 free_debug_insn_changes (vd, regno);
152
153 if (flag_checking)
154 validate_value_data (vd);
155 }
156
157 /* Kill the value in register REGNO for NREGS, and any other registers
158 whose values overlap. */
159
160 static void
kill_value_regno(unsigned int regno,unsigned int nregs,struct value_data * vd)161 kill_value_regno (unsigned int regno, unsigned int nregs,
162 struct value_data *vd)
163 {
164 unsigned int j;
165
166 /* Kill the value we're told to kill. */
167 for (j = 0; j < nregs; ++j)
168 kill_value_one_regno (regno + j, vd);
169
170 /* Kill everything that overlapped what we're told to kill. */
171 if (regno < vd->max_value_regs)
172 j = 0;
173 else
174 j = regno - vd->max_value_regs;
175 for (; j < regno; ++j)
176 {
177 unsigned int i, n;
178 if (vd->e[j].mode == VOIDmode)
179 continue;
180 n = hard_regno_nregs (j, vd->e[j].mode);
181 if (j + n > regno)
182 for (i = 0; i < n; ++i)
183 kill_value_one_regno (j + i, vd);
184 }
185 }
186
187 /* Kill X. This is a convenience function wrapping kill_value_regno
188 so that we mind the mode the register is in. */
189
190 static void
kill_value(const_rtx x,struct value_data * vd)191 kill_value (const_rtx x, struct value_data *vd)
192 {
193 if (GET_CODE (x) == SUBREG)
194 {
195 rtx tmp = simplify_subreg (GET_MODE (x), SUBREG_REG (x),
196 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
197 x = tmp ? tmp : SUBREG_REG (x);
198 }
199 if (REG_P (x))
200 kill_value_regno (REGNO (x), REG_NREGS (x), vd);
201 }
202
203 /* Remember that REGNO is valid in MODE. */
204
205 static void
set_value_regno(unsigned int regno,machine_mode mode,struct value_data * vd)206 set_value_regno (unsigned int regno, machine_mode mode,
207 struct value_data *vd)
208 {
209 unsigned int nregs;
210
211 vd->e[regno].mode = mode;
212
213 nregs = hard_regno_nregs (regno, mode);
214 if (nregs > vd->max_value_regs)
215 vd->max_value_regs = nregs;
216 }
217
218 /* Initialize VD such that there are no known relationships between regs. */
219
220 static void
init_value_data(struct value_data * vd)221 init_value_data (struct value_data *vd)
222 {
223 int i;
224 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
225 {
226 vd->e[i].mode = VOIDmode;
227 vd->e[i].oldest_regno = i;
228 vd->e[i].next_regno = INVALID_REGNUM;
229 vd->e[i].debug_insn_changes = NULL;
230 }
231 vd->max_value_regs = 0;
232 vd->n_debug_insn_changes = 0;
233 }
234
235 /* Called through note_stores. If X is clobbered, kill its value. */
236
237 static void
kill_clobbered_value(rtx x,const_rtx set,void * data)238 kill_clobbered_value (rtx x, const_rtx set, void *data)
239 {
240 struct value_data *const vd = (struct value_data *) data;
241
242 if (GET_CODE (set) == CLOBBER)
243 kill_value (x, vd);
244 }
245
246 /* A structure passed as data to kill_set_value through note_stores. */
247 struct kill_set_value_data
248 {
249 struct value_data *vd;
250 rtx ignore_set_reg;
251 };
252
253 /* Called through note_stores. If X is set, not clobbered, kill its
254 current value and install it as the root of its own value list. */
255
256 static void
kill_set_value(rtx x,const_rtx set,void * data)257 kill_set_value (rtx x, const_rtx set, void *data)
258 {
259 struct kill_set_value_data *ksvd = (struct kill_set_value_data *) data;
260 if (rtx_equal_p (x, ksvd->ignore_set_reg))
261 return;
262
263 if (GET_CODE (set) != CLOBBER)
264 {
265 kill_value (x, ksvd->vd);
266 if (REG_P (x))
267 set_value_regno (REGNO (x), GET_MODE (x), ksvd->vd);
268 }
269 }
270
271 /* Kill any register used in X as the base of an auto-increment expression,
272 and install that register as the root of its own value list. */
273
274 static void
kill_autoinc_value(rtx_insn * insn,struct value_data * vd)275 kill_autoinc_value (rtx_insn *insn, struct value_data *vd)
276 {
277 subrtx_iterator::array_type array;
278 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
279 {
280 const_rtx x = *iter;
281 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
282 {
283 x = XEXP (x, 0);
284 kill_value (x, vd);
285 set_value_regno (REGNO (x), GET_MODE (x), vd);
286 iter.skip_subrtxes ();
287 }
288 }
289 }
290
291 /* Assert that SRC has been copied to DEST. Adjust the data structures
292 to reflect that SRC contains an older copy of the shared value. */
293
294 static void
copy_value(rtx dest,rtx src,struct value_data * vd)295 copy_value (rtx dest, rtx src, struct value_data *vd)
296 {
297 unsigned int dr = REGNO (dest);
298 unsigned int sr = REGNO (src);
299 unsigned int dn, sn;
300 unsigned int i;
301
302 /* ??? At present, it's possible to see noop sets. It'd be nice if
303 this were cleaned up beforehand... */
304 if (sr == dr)
305 return;
306
307 /* Do not propagate copies to the stack pointer, as that can leave
308 memory accesses with no scheduling dependency on the stack update. */
309 if (dr == STACK_POINTER_REGNUM)
310 return;
311
312 /* Likewise with the frame pointer, if we're using one. */
313 if (frame_pointer_needed && dr == HARD_FRAME_POINTER_REGNUM)
314 return;
315
316 /* Do not propagate copies to fixed or global registers, patterns
317 can be relying to see particular fixed register or users can
318 expect the chosen global register in asm. */
319 if (fixed_regs[dr] || global_regs[dr])
320 return;
321
322 /* If SRC and DEST overlap, don't record anything. */
323 dn = REG_NREGS (dest);
324 sn = REG_NREGS (src);
325 if ((dr > sr && dr < sr + sn)
326 || (sr > dr && sr < dr + dn))
327 return;
328
329 /* If SRC had no assigned mode (i.e. we didn't know it was live)
330 assign it now and assume the value came from an input argument
331 or somesuch. */
332 if (vd->e[sr].mode == VOIDmode)
333 set_value_regno (sr, vd->e[dr].mode, vd);
334
335 /* If we are narrowing the input to a smaller number of hard regs,
336 and it is in big endian, we are really extracting a high part.
337 Since we generally associate a low part of a value with the value itself,
338 we must not do the same for the high part.
339 Note we can still get low parts for the same mode combination through
340 a two-step copy involving differently sized hard regs.
341 Assume hard regs fr* are 32 bits each, while r* are 64 bits each:
342 (set (reg:DI r0) (reg:DI fr0))
343 (set (reg:SI fr2) (reg:SI r0))
344 loads the low part of (reg:DI fr0) - i.e. fr1 - into fr2, while:
345 (set (reg:SI fr2) (reg:SI fr0))
346 loads the high part of (reg:DI fr0) into fr2.
347
348 We can't properly represent the latter case in our tables, so don't
349 record anything then. */
350 else if (sn < hard_regno_nregs (sr, vd->e[sr].mode)
351 && maybe_ne (subreg_lowpart_offset (GET_MODE (dest),
352 vd->e[sr].mode), 0U))
353 return;
354
355 /* If SRC had been assigned a mode narrower than the copy, we can't
356 link DEST into the chain, because not all of the pieces of the
357 copy came from oldest_regno. */
358 else if (sn > hard_regno_nregs (sr, vd->e[sr].mode))
359 return;
360
361 /* If a narrower value is copied using wider mode, the upper bits
362 are undefined (could be e.g. a former paradoxical subreg). Signal
363 in that case we've only copied value using the narrower mode.
364 Consider:
365 (set (reg:DI r14) (mem:DI ...))
366 (set (reg:QI si) (reg:QI r14))
367 (set (reg:DI bp) (reg:DI r14))
368 (set (reg:DI r14) (const_int ...))
369 (set (reg:DI dx) (reg:DI si))
370 (set (reg:DI si) (const_int ...))
371 (set (reg:DI dx) (reg:DI bp))
372 The last set is not redundant, while the low 8 bits of dx are already
373 equal to low 8 bits of bp, the other bits are undefined. */
374 else if (partial_subreg_p (vd->e[sr].mode, GET_MODE (src)))
375 {
376 if (!REG_CAN_CHANGE_MODE_P (sr, GET_MODE (src), vd->e[sr].mode)
377 || !REG_CAN_CHANGE_MODE_P (dr, vd->e[sr].mode, GET_MODE (dest)))
378 return;
379 set_value_regno (dr, vd->e[sr].mode, vd);
380 }
381
382 /* Link DR at the end of the value chain used by SR. */
383
384 vd->e[dr].oldest_regno = vd->e[sr].oldest_regno;
385
386 for (i = sr; vd->e[i].next_regno != INVALID_REGNUM; i = vd->e[i].next_regno)
387 continue;
388 vd->e[i].next_regno = dr;
389
390 if (flag_checking)
391 validate_value_data (vd);
392 }
393
394 /* Return true if a mode change from ORIG to NEW is allowed for REGNO. */
395
396 static bool
mode_change_ok(machine_mode orig_mode,machine_mode new_mode,unsigned int regno ATTRIBUTE_UNUSED)397 mode_change_ok (machine_mode orig_mode, machine_mode new_mode,
398 unsigned int regno ATTRIBUTE_UNUSED)
399 {
400 if (partial_subreg_p (orig_mode, new_mode))
401 return false;
402
403 return REG_CAN_CHANGE_MODE_P (regno, orig_mode, new_mode);
404 }
405
406 /* Register REGNO was originally set in ORIG_MODE. It - or a copy of it -
407 was copied in COPY_MODE to COPY_REGNO, and then COPY_REGNO was accessed
408 in NEW_MODE.
409 Return a NEW_MODE rtx for REGNO if that's OK, otherwise return NULL_RTX. */
410
411 static rtx
maybe_mode_change(machine_mode orig_mode,machine_mode copy_mode,machine_mode new_mode,unsigned int regno,unsigned int copy_regno ATTRIBUTE_UNUSED)412 maybe_mode_change (machine_mode orig_mode, machine_mode copy_mode,
413 machine_mode new_mode, unsigned int regno,
414 unsigned int copy_regno ATTRIBUTE_UNUSED)
415 {
416 if (partial_subreg_p (copy_mode, orig_mode)
417 && partial_subreg_p (copy_mode, new_mode))
418 return NULL_RTX;
419
420 /* Avoid creating multiple copies of the stack pointer. Some ports
421 assume there is one and only one stack pointer.
422
423 It's unclear if we need to do the same for other special registers. */
424 if (regno == STACK_POINTER_REGNUM)
425 return NULL_RTX;
426
427 if (orig_mode == new_mode)
428 return gen_raw_REG (new_mode, regno);
429 else if (mode_change_ok (orig_mode, new_mode, regno))
430 {
431 int copy_nregs = hard_regno_nregs (copy_regno, copy_mode);
432 int use_nregs = hard_regno_nregs (copy_regno, new_mode);
433 poly_uint64 bytes_per_reg;
434 if (!can_div_trunc_p (GET_MODE_SIZE (copy_mode),
435 copy_nregs, &bytes_per_reg))
436 return NULL_RTX;
437 poly_uint64 copy_offset = bytes_per_reg * (copy_nregs - use_nregs);
438 poly_uint64 offset
439 = subreg_size_lowpart_offset (GET_MODE_SIZE (new_mode) + copy_offset,
440 GET_MODE_SIZE (orig_mode));
441 regno += subreg_regno_offset (regno, orig_mode, offset, new_mode);
442 if (targetm.hard_regno_mode_ok (regno, new_mode))
443 return gen_raw_REG (new_mode, regno);
444 }
445 return NULL_RTX;
446 }
447
448 /* Find the oldest copy of the value contained in REGNO that is in
449 register class CL and has mode MODE. If found, return an rtx
450 of that oldest register, otherwise return NULL. */
451
452 static rtx
find_oldest_value_reg(enum reg_class cl,rtx reg,struct value_data * vd)453 find_oldest_value_reg (enum reg_class cl, rtx reg, struct value_data *vd)
454 {
455 unsigned int regno = REGNO (reg);
456 machine_mode mode = GET_MODE (reg);
457 unsigned int i;
458
459 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
460
461 /* If we are accessing REG in some mode other that what we set it in,
462 make sure that the replacement is valid. In particular, consider
463 (set (reg:DI r11) (...))
464 (set (reg:SI r9) (reg:SI r11))
465 (set (reg:SI r10) (...))
466 (set (...) (reg:DI r9))
467 Replacing r9 with r11 is invalid. */
468 if (mode != vd->e[regno].mode
469 && REG_NREGS (reg) > hard_regno_nregs (regno, vd->e[regno].mode))
470 return NULL_RTX;
471
472 for (i = vd->e[regno].oldest_regno; i != regno; i = vd->e[i].next_regno)
473 {
474 machine_mode oldmode = vd->e[i].mode;
475 rtx new_rtx;
476
477 if (!in_hard_reg_set_p (reg_class_contents[cl], mode, i))
478 continue;
479
480 new_rtx = maybe_mode_change (oldmode, vd->e[regno].mode, mode, i, regno);
481 if (new_rtx)
482 {
483 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (reg);
484 REG_ATTRS (new_rtx) = REG_ATTRS (reg);
485 REG_POINTER (new_rtx) = REG_POINTER (reg);
486 return new_rtx;
487 }
488 }
489
490 return NULL_RTX;
491 }
492
493 /* If possible, replace the register at *LOC with the oldest register
494 in register class CL. Return true if successfully replaced. */
495
496 static bool
replace_oldest_value_reg(rtx * loc,enum reg_class cl,rtx_insn * insn,struct value_data * vd)497 replace_oldest_value_reg (rtx *loc, enum reg_class cl, rtx_insn *insn,
498 struct value_data *vd)
499 {
500 rtx new_rtx = find_oldest_value_reg (cl, *loc, vd);
501 if (new_rtx && (!DEBUG_INSN_P (insn) || !skip_debug_insn_p))
502 {
503 if (DEBUG_INSN_P (insn))
504 {
505 struct queued_debug_insn_change *change;
506
507 if (dump_file)
508 fprintf (dump_file, "debug_insn %u: queued replacing reg %u with %u\n",
509 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
510
511 change = queued_debug_insn_change_pool.allocate ();
512 change->next = vd->e[REGNO (new_rtx)].debug_insn_changes;
513 change->insn = insn;
514 change->loc = loc;
515 change->new_rtx = new_rtx;
516 vd->e[REGNO (new_rtx)].debug_insn_changes = change;
517 ++vd->n_debug_insn_changes;
518 return true;
519 }
520 if (dump_file)
521 fprintf (dump_file, "insn %u: replaced reg %u with %u\n",
522 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
523
524 validate_change (insn, loc, new_rtx, 1);
525 return true;
526 }
527 return false;
528 }
529
530 /* Similar to replace_oldest_value_reg, but *LOC contains an address.
531 Adapted from find_reloads_address_1. CL is INDEX_REG_CLASS or
532 BASE_REG_CLASS depending on how the register is being considered. */
533
534 static bool
replace_oldest_value_addr(rtx * loc,enum reg_class cl,machine_mode mode,addr_space_t as,rtx_insn * insn,struct value_data * vd)535 replace_oldest_value_addr (rtx *loc, enum reg_class cl,
536 machine_mode mode, addr_space_t as,
537 rtx_insn *insn, struct value_data *vd)
538 {
539 rtx x = *loc;
540 RTX_CODE code = GET_CODE (x);
541 const char *fmt;
542 int i, j;
543 bool changed = false;
544
545 switch (code)
546 {
547 case PLUS:
548 if (DEBUG_INSN_P (insn))
549 break;
550
551 {
552 rtx orig_op0 = XEXP (x, 0);
553 rtx orig_op1 = XEXP (x, 1);
554 RTX_CODE code0 = GET_CODE (orig_op0);
555 RTX_CODE code1 = GET_CODE (orig_op1);
556 rtx op0 = orig_op0;
557 rtx op1 = orig_op1;
558 rtx *locI = NULL;
559 rtx *locB = NULL;
560 enum rtx_code index_code = SCRATCH;
561
562 if (GET_CODE (op0) == SUBREG)
563 {
564 op0 = SUBREG_REG (op0);
565 code0 = GET_CODE (op0);
566 }
567
568 if (GET_CODE (op1) == SUBREG)
569 {
570 op1 = SUBREG_REG (op1);
571 code1 = GET_CODE (op1);
572 }
573
574 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
575 || code0 == ZERO_EXTEND || code1 == MEM)
576 {
577 locI = &XEXP (x, 0);
578 locB = &XEXP (x, 1);
579 index_code = GET_CODE (*locI);
580 }
581 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
582 || code1 == ZERO_EXTEND || code0 == MEM)
583 {
584 locI = &XEXP (x, 1);
585 locB = &XEXP (x, 0);
586 index_code = GET_CODE (*locI);
587 }
588 else if (code0 == CONST_INT || code0 == CONST
589 || code0 == SYMBOL_REF || code0 == LABEL_REF)
590 {
591 locB = &XEXP (x, 1);
592 index_code = GET_CODE (XEXP (x, 0));
593 }
594 else if (code1 == CONST_INT || code1 == CONST
595 || code1 == SYMBOL_REF || code1 == LABEL_REF)
596 {
597 locB = &XEXP (x, 0);
598 index_code = GET_CODE (XEXP (x, 1));
599 }
600 else if (code0 == REG && code1 == REG)
601 {
602 int index_op;
603 unsigned regno0 = REGNO (op0), regno1 = REGNO (op1);
604
605 if (REGNO_OK_FOR_INDEX_P (regno1)
606 && regno_ok_for_base_p (regno0, mode, as, PLUS, REG))
607 index_op = 1;
608 else if (REGNO_OK_FOR_INDEX_P (regno0)
609 && regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
610 index_op = 0;
611 else if (regno_ok_for_base_p (regno0, mode, as, PLUS, REG)
612 || REGNO_OK_FOR_INDEX_P (regno1))
613 index_op = 1;
614 else if (regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
615 index_op = 0;
616 else
617 index_op = 1;
618
619 locI = &XEXP (x, index_op);
620 locB = &XEXP (x, !index_op);
621 index_code = GET_CODE (*locI);
622 }
623 else if (code0 == REG)
624 {
625 locI = &XEXP (x, 0);
626 locB = &XEXP (x, 1);
627 index_code = GET_CODE (*locI);
628 }
629 else if (code1 == REG)
630 {
631 locI = &XEXP (x, 1);
632 locB = &XEXP (x, 0);
633 index_code = GET_CODE (*locI);
634 }
635
636 if (locI)
637 changed |= replace_oldest_value_addr (locI, INDEX_REG_CLASS,
638 mode, as, insn, vd);
639 if (locB)
640 changed |= replace_oldest_value_addr (locB,
641 base_reg_class (mode, as, PLUS,
642 index_code),
643 mode, as, insn, vd);
644 return changed;
645 }
646
647 case POST_INC:
648 case POST_DEC:
649 case POST_MODIFY:
650 case PRE_INC:
651 case PRE_DEC:
652 case PRE_MODIFY:
653 return false;
654
655 case MEM:
656 return replace_oldest_value_mem (x, insn, vd);
657
658 case REG:
659 return replace_oldest_value_reg (loc, cl, insn, vd);
660
661 default:
662 break;
663 }
664
665 fmt = GET_RTX_FORMAT (code);
666 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
667 {
668 if (fmt[i] == 'e')
669 changed |= replace_oldest_value_addr (&XEXP (x, i), cl, mode, as,
670 insn, vd);
671 else if (fmt[i] == 'E')
672 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
673 changed |= replace_oldest_value_addr (&XVECEXP (x, i, j), cl,
674 mode, as, insn, vd);
675 }
676
677 return changed;
678 }
679
680 /* Similar to replace_oldest_value_reg, but X contains a memory. */
681
682 static bool
replace_oldest_value_mem(rtx x,rtx_insn * insn,struct value_data * vd)683 replace_oldest_value_mem (rtx x, rtx_insn *insn, struct value_data *vd)
684 {
685 enum reg_class cl;
686
687 if (DEBUG_INSN_P (insn))
688 cl = ALL_REGS;
689 else
690 cl = base_reg_class (GET_MODE (x), MEM_ADDR_SPACE (x), MEM, SCRATCH);
691
692 return replace_oldest_value_addr (&XEXP (x, 0), cl,
693 GET_MODE (x), MEM_ADDR_SPACE (x),
694 insn, vd);
695 }
696
697 /* Apply all queued updates for DEBUG_INSNs that change some reg to
698 register REGNO. */
699
700 static void
apply_debug_insn_changes(struct value_data * vd,unsigned int regno)701 apply_debug_insn_changes (struct value_data *vd, unsigned int regno)
702 {
703 struct queued_debug_insn_change *change;
704 rtx_insn *last_insn = vd->e[regno].debug_insn_changes->insn;
705
706 for (change = vd->e[regno].debug_insn_changes;
707 change;
708 change = change->next)
709 {
710 if (last_insn != change->insn)
711 {
712 apply_change_group ();
713 last_insn = change->insn;
714 }
715 validate_change (change->insn, change->loc, change->new_rtx, 1);
716 }
717 apply_change_group ();
718 }
719
720 /* Called via note_uses, for all used registers in a real insn
721 apply DEBUG_INSN changes that change registers to the used
722 registers. */
723
724 static void
cprop_find_used_regs(rtx * loc,void * data)725 cprop_find_used_regs (rtx *loc, void *data)
726 {
727 struct value_data *const vd = (struct value_data *) data;
728 subrtx_iterator::array_type array;
729 FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
730 {
731 const_rtx x = *iter;
732 if (REG_P (x))
733 {
734 unsigned int regno = REGNO (x);
735 if (vd->e[regno].debug_insn_changes)
736 {
737 apply_debug_insn_changes (vd, regno);
738 free_debug_insn_changes (vd, regno);
739 }
740 }
741 }
742 }
743
744 /* Apply clobbers of INSN in PATTERN and C_I_F_U to value_data VD. */
745
746 static void
kill_clobbered_values(rtx_insn * insn,struct value_data * vd)747 kill_clobbered_values (rtx_insn *insn, struct value_data *vd)
748 {
749 note_stores (insn, kill_clobbered_value, vd);
750 }
751
752 /* Perform the forward copy propagation on basic block BB. */
753
754 static bool
copyprop_hardreg_forward_1(basic_block bb,struct value_data * vd)755 copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
756 {
757 bool anything_changed = false;
758 rtx_insn *insn, *next;
759
760 for (insn = BB_HEAD (bb); ; insn = next)
761 {
762 int n_ops, i, predicated;
763 bool is_asm, any_replacements;
764 rtx set;
765 rtx link;
766 bool changed = false;
767 struct kill_set_value_data ksvd;
768
769 next = NEXT_INSN (insn);
770 if (!NONDEBUG_INSN_P (insn))
771 {
772 if (DEBUG_BIND_INSN_P (insn))
773 {
774 rtx loc = INSN_VAR_LOCATION_LOC (insn);
775 if (!VAR_LOC_UNKNOWN_P (loc))
776 replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn),
777 ALL_REGS, GET_MODE (loc),
778 ADDR_SPACE_GENERIC, insn, vd);
779 }
780
781 if (insn == BB_END (bb))
782 break;
783 else
784 continue;
785 }
786
787 set = single_set (insn);
788
789 /* Detect noop sets and remove them before processing side effects. */
790 if (set && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set)))
791 {
792 unsigned int regno = REGNO (SET_SRC (set));
793 rtx r1 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
794 SET_DEST (set), vd);
795 rtx r2 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
796 SET_SRC (set), vd);
797 if (rtx_equal_p (r1 ? r1 : SET_DEST (set), r2 ? r2 : SET_SRC (set)))
798 {
799 bool last = insn == BB_END (bb);
800 delete_insn (insn);
801 if (last)
802 break;
803 continue;
804 }
805 }
806
807 /* Detect obviously dead sets (via REG_UNUSED notes) and remove them. */
808 if (set
809 && !RTX_FRAME_RELATED_P (insn)
810 && !may_trap_p (set)
811 && find_reg_note (insn, REG_UNUSED, SET_DEST (set))
812 && !side_effects_p (SET_SRC (set))
813 && !side_effects_p (SET_DEST (set)))
814 {
815 bool last = insn == BB_END (bb);
816 delete_insn (insn);
817 if (last)
818 break;
819 continue;
820 }
821
822
823 extract_constrain_insn (insn);
824 preprocess_constraints (insn);
825 const operand_alternative *op_alt = which_op_alt ();
826 n_ops = recog_data.n_operands;
827 is_asm = asm_noperands (PATTERN (insn)) >= 0;
828
829 /* Simplify the code below by promoting OP_OUT to OP_INOUT
830 in predicated instructions. */
831
832 predicated = GET_CODE (PATTERN (insn)) == COND_EXEC;
833 for (i = 0; i < n_ops; ++i)
834 {
835 int matches = op_alt[i].matches;
836 if (matches >= 0 || op_alt[i].matched >= 0
837 || (predicated && recog_data.operand_type[i] == OP_OUT))
838 recog_data.operand_type[i] = OP_INOUT;
839 }
840
841 /* Apply changes to earlier DEBUG_INSNs if possible. */
842 if (vd->n_debug_insn_changes)
843 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
844
845 /* For each earlyclobber operand, zap the value data. */
846 for (i = 0; i < n_ops; i++)
847 if (op_alt[i].earlyclobber)
848 kill_value (recog_data.operand[i], vd);
849
850 /* Within asms, a clobber cannot overlap inputs or outputs.
851 I wouldn't think this were true for regular insns, but
852 scan_rtx treats them like that... */
853 kill_clobbered_values (insn, vd);
854
855 /* Kill all auto-incremented values. */
856 /* ??? REG_INC is useless, since stack pushes aren't done that way. */
857 kill_autoinc_value (insn, vd);
858
859 /* Kill all early-clobbered operands. */
860 for (i = 0; i < n_ops; i++)
861 if (op_alt[i].earlyclobber)
862 kill_value (recog_data.operand[i], vd);
863
864 /* If we have dead sets in the insn, then we need to note these as we
865 would clobbers. */
866 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
867 {
868 if (REG_NOTE_KIND (link) == REG_UNUSED)
869 {
870 kill_value (XEXP (link, 0), vd);
871 /* Furthermore, if the insn looked like a single-set,
872 but the dead store kills the source value of that
873 set, then we can no-longer use the plain move
874 special case below. */
875 if (set
876 && reg_overlap_mentioned_p (XEXP (link, 0), SET_SRC (set)))
877 set = NULL;
878 }
879
880 /* We need to keep CFI info correct, and the same on all paths,
881 so we cannot normally replace the registers REG_CFA_REGISTER
882 refers to. Bail. */
883 if (REG_NOTE_KIND (link) == REG_CFA_REGISTER)
884 goto did_replacement;
885 }
886
887 /* Special-case plain move instructions, since we may well
888 be able to do the move from a different register class. */
889 if (set && REG_P (SET_SRC (set)))
890 {
891 rtx src = SET_SRC (set);
892 unsigned int regno = REGNO (src);
893 machine_mode mode = GET_MODE (src);
894 unsigned int i;
895 rtx new_rtx;
896
897 /* If we are accessing SRC in some mode other that what we
898 set it in, make sure that the replacement is valid. */
899 if (mode != vd->e[regno].mode)
900 {
901 if (REG_NREGS (src)
902 > hard_regno_nregs (regno, vd->e[regno].mode))
903 goto no_move_special_case;
904
905 /* And likewise, if we are narrowing on big endian the transformation
906 is also invalid. */
907 if (REG_NREGS (src) < hard_regno_nregs (regno, vd->e[regno].mode)
908 && maybe_ne (subreg_lowpart_offset (mode,
909 vd->e[regno].mode), 0U))
910 goto no_move_special_case;
911 }
912
913 /* If the destination is also a register, try to find a source
914 register in the same class. */
915 if (REG_P (SET_DEST (set)))
916 {
917 new_rtx = find_oldest_value_reg (REGNO_REG_CLASS (regno),
918 src, vd);
919
920 if (new_rtx && validate_change (insn, &SET_SRC (set), new_rtx, 0))
921 {
922 if (dump_file)
923 fprintf (dump_file,
924 "insn %u: replaced reg %u with %u\n",
925 INSN_UID (insn), regno, REGNO (new_rtx));
926 changed = true;
927 goto did_replacement;
928 }
929 /* We need to re-extract as validate_change clobbers
930 recog_data. */
931 extract_constrain_insn (insn);
932 preprocess_constraints (insn);
933 }
934
935 /* Otherwise, try all valid registers and see if its valid. */
936 for (i = vd->e[regno].oldest_regno; i != regno;
937 i = vd->e[i].next_regno)
938 {
939 new_rtx = maybe_mode_change (vd->e[i].mode, vd->e[regno].mode,
940 mode, i, regno);
941 if (new_rtx != NULL_RTX)
942 {
943 if (validate_change (insn, &SET_SRC (set), new_rtx, 0))
944 {
945 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (src);
946 REG_ATTRS (new_rtx) = REG_ATTRS (src);
947 REG_POINTER (new_rtx) = REG_POINTER (src);
948 if (dump_file)
949 fprintf (dump_file,
950 "insn %u: replaced reg %u with %u\n",
951 INSN_UID (insn), regno, REGNO (new_rtx));
952 changed = true;
953 goto did_replacement;
954 }
955 /* We need to re-extract as validate_change clobbers
956 recog_data. */
957 extract_constrain_insn (insn);
958 preprocess_constraints (insn);
959 }
960 }
961 }
962 no_move_special_case:
963
964 any_replacements = false;
965
966 /* For each input operand, replace a hard register with the
967 eldest live copy that's in an appropriate register class. */
968 for (i = 0; i < n_ops; i++)
969 {
970 bool replaced = false;
971
972 /* Don't scan match_operand here, since we've no reg class
973 information to pass down. Any operands that we could
974 substitute in will be represented elsewhere. */
975 if (recog_data.constraints[i][0] == '\0')
976 continue;
977
978 /* Don't replace in asms intentionally referencing hard regs. */
979 if (is_asm && REG_P (recog_data.operand[i])
980 && (REGNO (recog_data.operand[i])
981 == ORIGINAL_REGNO (recog_data.operand[i])))
982 continue;
983
984 if (recog_data.operand_type[i] == OP_IN)
985 {
986 if (op_alt[i].is_address)
987 replaced
988 = replace_oldest_value_addr (recog_data.operand_loc[i],
989 alternative_class (op_alt, i),
990 VOIDmode, ADDR_SPACE_GENERIC,
991 insn, vd);
992 else if (REG_P (recog_data.operand[i]))
993 replaced
994 = replace_oldest_value_reg (recog_data.operand_loc[i],
995 alternative_class (op_alt, i),
996 insn, vd);
997 else if (MEM_P (recog_data.operand[i]))
998 replaced = replace_oldest_value_mem (recog_data.operand[i],
999 insn, vd);
1000 }
1001 else if (MEM_P (recog_data.operand[i]))
1002 replaced = replace_oldest_value_mem (recog_data.operand[i],
1003 insn, vd);
1004
1005 /* If we performed any replacement, update match_dups. */
1006 if (replaced)
1007 {
1008 int j;
1009 rtx new_rtx;
1010
1011 new_rtx = *recog_data.operand_loc[i];
1012 recog_data.operand[i] = new_rtx;
1013 for (j = 0; j < recog_data.n_dups; j++)
1014 if (recog_data.dup_num[j] == i)
1015 validate_unshare_change (insn, recog_data.dup_loc[j], new_rtx, 1);
1016
1017 any_replacements = true;
1018 }
1019 }
1020
1021 if (any_replacements)
1022 {
1023 if (! apply_change_group ())
1024 {
1025 if (dump_file)
1026 fprintf (dump_file,
1027 "insn %u: reg replacements not verified\n",
1028 INSN_UID (insn));
1029 }
1030 else
1031 changed = true;
1032 }
1033
1034 did_replacement:
1035 if (changed)
1036 {
1037 anything_changed = true;
1038
1039 /* If something changed, perhaps further changes to earlier
1040 DEBUG_INSNs can be applied. */
1041 if (vd->n_debug_insn_changes)
1042 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
1043 df_insn_rescan (insn);
1044 }
1045
1046 ksvd.vd = vd;
1047 ksvd.ignore_set_reg = NULL_RTX;
1048
1049 /* Clobber call-clobbered registers. */
1050 if (CALL_P (insn))
1051 {
1052 unsigned int set_regno = INVALID_REGNUM;
1053 unsigned int set_nregs = 0;
1054 unsigned int regno;
1055 rtx exp;
1056
1057 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
1058 {
1059 rtx x = XEXP (exp, 0);
1060 if (GET_CODE (x) == SET)
1061 {
1062 rtx dest = SET_DEST (x);
1063 kill_value (dest, vd);
1064 set_value_regno (REGNO (dest), GET_MODE (dest), vd);
1065 copy_value (dest, SET_SRC (x), vd);
1066 ksvd.ignore_set_reg = dest;
1067 set_regno = REGNO (dest);
1068 set_nregs = REG_NREGS (dest);
1069 break;
1070 }
1071 }
1072
1073 function_abi callee_abi = insn_callee_abi (insn);
1074 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1075 if (vd->e[regno].mode != VOIDmode
1076 && callee_abi.clobbers_reg_p (vd->e[regno].mode, regno)
1077 && (regno < set_regno || regno >= set_regno + set_nregs))
1078 kill_value_regno (regno, 1, vd);
1079
1080 /* If SET was seen in CALL_INSN_FUNCTION_USAGE, and SET_SRC
1081 of the SET isn't clobbered by CALLEE_ABI, but instead among
1082 CLOBBERs on the CALL_INSN, we could wrongly assume the
1083 value in it is still live. */
1084 if (ksvd.ignore_set_reg)
1085 kill_clobbered_values (insn, vd);
1086 }
1087
1088 bool copy_p = (set
1089 && REG_P (SET_DEST (set))
1090 && REG_P (SET_SRC (set)));
1091 bool noop_p = (copy_p
1092 && rtx_equal_p (SET_DEST (set), SET_SRC (set)));
1093
1094 /* If a noop move is using narrower mode than we have recorded,
1095 we need to either remove the noop move, or kill_set_value. */
1096 if (noop_p
1097 && partial_subreg_p (GET_MODE (SET_DEST (set)),
1098 vd->e[REGNO (SET_DEST (set))].mode))
1099 {
1100 if (noop_move_p (insn))
1101 {
1102 bool last = insn == BB_END (bb);
1103 delete_insn (insn);
1104 if (last)
1105 break;
1106 }
1107 else
1108 noop_p = false;
1109 }
1110
1111 if (!noop_p)
1112 {
1113 /* Notice stores. */
1114 note_stores (insn, kill_set_value, &ksvd);
1115
1116 /* Notice copies. */
1117 if (copy_p)
1118 {
1119 df_insn_rescan (insn);
1120 copy_value (SET_DEST (set), SET_SRC (set), vd);
1121 }
1122 }
1123
1124 if (insn == BB_END (bb))
1125 break;
1126 }
1127
1128 return anything_changed;
1129 }
1130
1131 /* Dump the value chain data to stderr. */
1132
1133 DEBUG_FUNCTION void
debug_value_data(struct value_data * vd)1134 debug_value_data (struct value_data *vd)
1135 {
1136 HARD_REG_SET set;
1137 unsigned int i, j;
1138
1139 CLEAR_HARD_REG_SET (set);
1140
1141 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1142 if (vd->e[i].oldest_regno == i)
1143 {
1144 if (vd->e[i].mode == VOIDmode)
1145 {
1146 if (vd->e[i].next_regno != INVALID_REGNUM)
1147 fprintf (stderr, "[%u] Bad next_regno for empty chain (%u)\n",
1148 i, vd->e[i].next_regno);
1149 continue;
1150 }
1151
1152 SET_HARD_REG_BIT (set, i);
1153 fprintf (stderr, "[%u %s] ", i, GET_MODE_NAME (vd->e[i].mode));
1154
1155 for (j = vd->e[i].next_regno;
1156 j != INVALID_REGNUM;
1157 j = vd->e[j].next_regno)
1158 {
1159 if (TEST_HARD_REG_BIT (set, j))
1160 {
1161 fprintf (stderr, "[%u] Loop in regno chain\n", j);
1162 return;
1163 }
1164
1165 if (vd->e[j].oldest_regno != i)
1166 {
1167 fprintf (stderr, "[%u] Bad oldest_regno (%u)\n",
1168 j, vd->e[j].oldest_regno);
1169 return;
1170 }
1171 SET_HARD_REG_BIT (set, j);
1172 fprintf (stderr, "[%u %s] ", j, GET_MODE_NAME (vd->e[j].mode));
1173 }
1174 fputc ('\n', stderr);
1175 }
1176
1177 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1178 if (! TEST_HARD_REG_BIT (set, i)
1179 && (vd->e[i].mode != VOIDmode
1180 || vd->e[i].oldest_regno != i
1181 || vd->e[i].next_regno != INVALID_REGNUM))
1182 fprintf (stderr, "[%u] Non-empty reg in chain (%s %u %i)\n",
1183 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1184 vd->e[i].next_regno);
1185 }
1186
1187 /* Do copyprop_hardreg_forward_1 for a single basic block BB.
1188 DEBUG_INSN is skipped since we do not want to involve DF related
1189 staff as how it is handled in function pass_cprop_hardreg::execute.
1190
1191 NOTE: Currently it is only used for shrink-wrap. Maybe extend it
1192 to handle DEBUG_INSN for other uses. */
1193
1194 void
copyprop_hardreg_forward_bb_without_debug_insn(basic_block bb)1195 copyprop_hardreg_forward_bb_without_debug_insn (basic_block bb)
1196 {
1197 struct value_data *vd;
1198 vd = XNEWVEC (struct value_data, 1);
1199 init_value_data (vd);
1200
1201 skip_debug_insn_p = true;
1202 copyprop_hardreg_forward_1 (bb, vd);
1203 free (vd);
1204 skip_debug_insn_p = false;
1205 }
1206
1207 static void
validate_value_data(struct value_data * vd)1208 validate_value_data (struct value_data *vd)
1209 {
1210 HARD_REG_SET set;
1211 unsigned int i, j;
1212
1213 CLEAR_HARD_REG_SET (set);
1214
1215 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1216 if (vd->e[i].oldest_regno == i)
1217 {
1218 if (vd->e[i].mode == VOIDmode)
1219 {
1220 if (vd->e[i].next_regno != INVALID_REGNUM)
1221 internal_error ("%qs: [%u] bad %<next_regno%> for empty chain (%u)",
1222 __func__, i, vd->e[i].next_regno);
1223 continue;
1224 }
1225
1226 SET_HARD_REG_BIT (set, i);
1227
1228 for (j = vd->e[i].next_regno;
1229 j != INVALID_REGNUM;
1230 j = vd->e[j].next_regno)
1231 {
1232 if (TEST_HARD_REG_BIT (set, j))
1233 internal_error ("%qs: loop in %<next_regno%> chain (%u)",
1234 __func__, j);
1235 if (vd->e[j].oldest_regno != i)
1236 internal_error ("%qs: [%u] bad %<oldest_regno%> (%u)",
1237 __func__, j, vd->e[j].oldest_regno);
1238
1239 SET_HARD_REG_BIT (set, j);
1240 }
1241 }
1242
1243 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1244 if (! TEST_HARD_REG_BIT (set, i)
1245 && (vd->e[i].mode != VOIDmode
1246 || vd->e[i].oldest_regno != i
1247 || vd->e[i].next_regno != INVALID_REGNUM))
1248 internal_error ("%qs: [%u] non-empty register in chain (%s %u %i)",
1249 __func__, i,
1250 GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1251 vd->e[i].next_regno);
1252 }
1253
1254
1255 namespace {
1256
1257 const pass_data pass_data_cprop_hardreg =
1258 {
1259 RTL_PASS, /* type */
1260 "cprop_hardreg", /* name */
1261 OPTGROUP_NONE, /* optinfo_flags */
1262 TV_CPROP_REGISTERS, /* tv_id */
1263 0, /* properties_required */
1264 0, /* properties_provided */
1265 0, /* properties_destroyed */
1266 0, /* todo_flags_start */
1267 TODO_df_finish, /* todo_flags_finish */
1268 };
1269
1270 class pass_cprop_hardreg : public rtl_opt_pass
1271 {
1272 public:
pass_cprop_hardreg(gcc::context * ctxt)1273 pass_cprop_hardreg (gcc::context *ctxt)
1274 : rtl_opt_pass (pass_data_cprop_hardreg, ctxt)
1275 {}
1276
1277 /* opt_pass methods: */
gate(function *)1278 virtual bool gate (function *)
1279 {
1280 return (optimize > 0 && (flag_cprop_registers));
1281 }
1282
1283 virtual unsigned int execute (function *);
1284
1285 }; // class pass_cprop_hardreg
1286
1287 static bool
cprop_hardreg_bb(basic_block bb,struct value_data * all_vd,sbitmap visited)1288 cprop_hardreg_bb (basic_block bb, struct value_data *all_vd, sbitmap visited)
1289 {
1290 bitmap_set_bit (visited, bb->index);
1291
1292 /* If a block has a single predecessor, that we've already
1293 processed, begin with the value data that was live at
1294 the end of the predecessor block. */
1295 /* ??? Ought to use more intelligent queuing of blocks. */
1296 if (single_pred_p (bb)
1297 && bitmap_bit_p (visited, single_pred (bb)->index)
1298 && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
1299 {
1300 all_vd[bb->index] = all_vd[single_pred (bb)->index];
1301 if (all_vd[bb->index].n_debug_insn_changes)
1302 {
1303 unsigned int regno;
1304
1305 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1306 {
1307 if (all_vd[bb->index].e[regno].debug_insn_changes)
1308 {
1309 struct queued_debug_insn_change *cur;
1310 for (cur = all_vd[bb->index].e[regno].debug_insn_changes;
1311 cur; cur = cur->next)
1312 --all_vd[bb->index].n_debug_insn_changes;
1313 all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1314 if (all_vd[bb->index].n_debug_insn_changes == 0)
1315 break;
1316 }
1317 }
1318 }
1319 }
1320 else
1321 init_value_data (all_vd + bb->index);
1322
1323 return copyprop_hardreg_forward_1 (bb, all_vd + bb->index);
1324 }
1325
1326 static void
cprop_hardreg_debug(function * fun,struct value_data * all_vd)1327 cprop_hardreg_debug (function *fun, struct value_data *all_vd)
1328 {
1329 basic_block bb;
1330
1331 FOR_EACH_BB_FN (bb, fun)
1332 if (all_vd[bb->index].n_debug_insn_changes)
1333 {
1334 unsigned int regno;
1335 bitmap live;
1336
1337 live = df_get_live_out (bb);
1338 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1339 if (all_vd[bb->index].e[regno].debug_insn_changes)
1340 {
1341 if (REGNO_REG_SET_P (live, regno))
1342 apply_debug_insn_changes (all_vd + bb->index, regno);
1343
1344 struct queued_debug_insn_change *cur;
1345 for (cur = all_vd[bb->index].e[regno].debug_insn_changes;
1346 cur; cur = cur->next)
1347 --all_vd[bb->index].n_debug_insn_changes;
1348 all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1349 if (all_vd[bb->index].n_debug_insn_changes == 0)
1350 break;
1351 }
1352 }
1353
1354 queued_debug_insn_change_pool.release ();
1355 }
1356
1357 unsigned int
execute(function * fun)1358 pass_cprop_hardreg::execute (function *fun)
1359 {
1360 struct value_data *all_vd;
1361 basic_block bb;
1362
1363 all_vd = XNEWVEC (struct value_data, last_basic_block_for_fn (fun));
1364
1365 auto_sbitmap visited (last_basic_block_for_fn (fun));
1366 bitmap_clear (visited);
1367
1368 auto_vec<int> worklist;
1369 bool any_debug_changes = false;
1370
1371 /* We need accurate notes. Earlier passes such as if-conversion may
1372 leave notes in an inconsistent state. */
1373 df_note_add_problem ();
1374 df_analyze ();
1375
1376 /* It is tempting to set DF_LR_RUN_DCE, but DCE may choose to delete
1377 an insn and this pass would not have visibility into the removal.
1378 This pass would then potentially use the source of that
1379 INSN for propagation purposes, generating invalid code.
1380
1381 So we just ask for updated notes and handle trivial deletions
1382 within this pass where we can update this passes internal
1383 data structures appropriately. */
1384 df_set_flags (DF_DEFER_INSN_RESCAN);
1385
1386 FOR_EACH_BB_FN (bb, fun)
1387 {
1388 if (cprop_hardreg_bb (bb, all_vd, visited))
1389 worklist.safe_push (bb->index);
1390 if (all_vd[bb->index].n_debug_insn_changes)
1391 any_debug_changes = true;
1392 }
1393
1394 /* We must call df_analyze here unconditionally to ensure that the
1395 REG_UNUSED and REG_DEAD notes are consistent with and without -g. */
1396 df_analyze ();
1397
1398 if (MAY_HAVE_DEBUG_BIND_INSNS && any_debug_changes)
1399 cprop_hardreg_debug (fun, all_vd);
1400
1401 /* Second pass if we've changed anything, only for the bbs where we have
1402 changed anything though. */
1403 if (!worklist.is_empty ())
1404 {
1405 unsigned int i;
1406 int index;
1407
1408 any_debug_changes = false;
1409 bitmap_clear (visited);
1410 FOR_EACH_VEC_ELT (worklist, i, index)
1411 {
1412 bb = BASIC_BLOCK_FOR_FN (fun, index);
1413 cprop_hardreg_bb (bb, all_vd, visited);
1414 if (all_vd[bb->index].n_debug_insn_changes)
1415 any_debug_changes = true;
1416 }
1417
1418 df_analyze ();
1419 if (MAY_HAVE_DEBUG_BIND_INSNS && any_debug_changes)
1420 cprop_hardreg_debug (fun, all_vd);
1421 }
1422
1423 free (all_vd);
1424 return 0;
1425 }
1426
1427 } // anon namespace
1428
1429 rtl_opt_pass *
make_pass_cprop_hardreg(gcc::context * ctxt)1430 make_pass_cprop_hardreg (gcc::context *ctxt)
1431 {
1432 return new pass_cprop_hardreg (ctxt);
1433 }
1434