1 /* Copy propagation on hard registers for the GNU compiler.
2 Copyright (C) 2000-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "df.h"
26 #include "memmodel.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "regs.h"
30 #include "emit-rtl.h"
31 #include "recog.h"
32 #include "diagnostic-core.h"
33 #include "addresses.h"
34 #include "tree-pass.h"
35 #include "rtl-iter.h"
36 #include "cfgrtl.h"
37 #include "target.h"
38 #include "function-abi.h"
39
40 /* The following code does forward propagation of hard register copies.
41 The object is to eliminate as many dependencies as possible, so that
42 we have the most scheduling freedom. As a side effect, we also clean
43 up some silly register allocation decisions made by reload. This
44 code may be obsoleted by a new register allocator. */
45
46 /* DEBUG_INSNs aren't changed right away, as doing so might extend the
47 lifetime of a register and get the DEBUG_INSN subsequently reset.
48 So they are queued instead, and updated only when the register is
49 used in some subsequent real insn before it is set. */
50 struct queued_debug_insn_change
51 {
52 struct queued_debug_insn_change *next;
53 rtx_insn *insn;
54 rtx *loc;
55 rtx new_rtx;
56 };
57
58 /* For each register, we have a list of registers that contain the same
59 value. The OLDEST_REGNO field points to the head of the list, and
60 the NEXT_REGNO field runs through the list. The MODE field indicates
61 what mode the data is known to be in; this field is VOIDmode when the
62 register is not known to contain valid data. */
63
64 struct value_data_entry
65 {
66 machine_mode mode;
67 unsigned int oldest_regno;
68 unsigned int next_regno;
69 struct queued_debug_insn_change *debug_insn_changes;
70 };
71
72 struct value_data
73 {
74 struct value_data_entry e[FIRST_PSEUDO_REGISTER];
75 unsigned int max_value_regs;
76 unsigned int n_debug_insn_changes;
77 };
78
79 static object_allocator<queued_debug_insn_change> queued_debug_insn_change_pool
80 ("debug insn changes pool");
81
82 static bool skip_debug_insn_p;
83
84 static void kill_value_one_regno (unsigned, struct value_data *);
85 static void kill_value_regno (unsigned, unsigned, struct value_data *);
86 static void kill_value (const_rtx, struct value_data *);
87 static void set_value_regno (unsigned, machine_mode, struct value_data *);
88 static void init_value_data (struct value_data *);
89 static void kill_clobbered_value (rtx, const_rtx, void *);
90 static void kill_set_value (rtx, const_rtx, void *);
91 static void copy_value (rtx, rtx, struct value_data *);
92 static bool mode_change_ok (machine_mode, machine_mode,
93 unsigned int);
94 static rtx maybe_mode_change (machine_mode, machine_mode,
95 machine_mode, unsigned int, unsigned int);
96 static rtx find_oldest_value_reg (enum reg_class, rtx, struct value_data *);
97 static bool replace_oldest_value_reg (rtx *, enum reg_class, rtx_insn *,
98 struct value_data *);
99 static bool replace_oldest_value_addr (rtx *, enum reg_class,
100 machine_mode, addr_space_t,
101 rtx_insn *, struct value_data *);
102 static bool replace_oldest_value_mem (rtx, rtx_insn *, struct value_data *);
103 static bool copyprop_hardreg_forward_1 (basic_block, struct value_data *);
104 extern void debug_value_data (struct value_data *);
105 static void validate_value_data (struct value_data *);
106
107 /* Free all queued updates for DEBUG_INSNs that change some reg to
108 register REGNO. */
109
110 static void
free_debug_insn_changes(struct value_data * vd,unsigned int regno)111 free_debug_insn_changes (struct value_data *vd, unsigned int regno)
112 {
113 struct queued_debug_insn_change *cur, *next;
114 for (cur = vd->e[regno].debug_insn_changes; cur; cur = next)
115 {
116 next = cur->next;
117 --vd->n_debug_insn_changes;
118 queued_debug_insn_change_pool.remove (cur);
119 }
120 vd->e[regno].debug_insn_changes = NULL;
121 }
122
123 /* Kill register REGNO. This involves removing it from any value
124 lists, and resetting the value mode to VOIDmode. This is only a
125 helper function; it does not handle any hard registers overlapping
126 with REGNO. */
127
128 static void
kill_value_one_regno(unsigned int regno,struct value_data * vd)129 kill_value_one_regno (unsigned int regno, struct value_data *vd)
130 {
131 unsigned int i, next;
132
133 if (vd->e[regno].oldest_regno != regno)
134 {
135 for (i = vd->e[regno].oldest_regno;
136 vd->e[i].next_regno != regno;
137 i = vd->e[i].next_regno)
138 continue;
139 vd->e[i].next_regno = vd->e[regno].next_regno;
140 }
141 else if ((next = vd->e[regno].next_regno) != INVALID_REGNUM)
142 {
143 for (i = next; i != INVALID_REGNUM; i = vd->e[i].next_regno)
144 vd->e[i].oldest_regno = next;
145 }
146
147 vd->e[regno].mode = VOIDmode;
148 vd->e[regno].oldest_regno = regno;
149 vd->e[regno].next_regno = INVALID_REGNUM;
150 if (vd->e[regno].debug_insn_changes)
151 free_debug_insn_changes (vd, regno);
152
153 if (flag_checking)
154 validate_value_data (vd);
155 }
156
157 /* Kill the value in register REGNO for NREGS, and any other registers
158 whose values overlap. */
159
160 static void
kill_value_regno(unsigned int regno,unsigned int nregs,struct value_data * vd)161 kill_value_regno (unsigned int regno, unsigned int nregs,
162 struct value_data *vd)
163 {
164 unsigned int j;
165
166 /* Kill the value we're told to kill. */
167 for (j = 0; j < nregs; ++j)
168 kill_value_one_regno (regno + j, vd);
169
170 /* Kill everything that overlapped what we're told to kill. */
171 if (regno < vd->max_value_regs)
172 j = 0;
173 else
174 j = regno - vd->max_value_regs;
175 for (; j < regno; ++j)
176 {
177 unsigned int i, n;
178 if (vd->e[j].mode == VOIDmode)
179 continue;
180 n = hard_regno_nregs (j, vd->e[j].mode);
181 if (j + n > regno)
182 for (i = 0; i < n; ++i)
183 kill_value_one_regno (j + i, vd);
184 }
185 }
186
187 /* Kill X. This is a convenience function wrapping kill_value_regno
188 so that we mind the mode the register is in. */
189
190 static void
kill_value(const_rtx x,struct value_data * vd)191 kill_value (const_rtx x, struct value_data *vd)
192 {
193 if (GET_CODE (x) == SUBREG)
194 {
195 rtx tmp = simplify_subreg (GET_MODE (x), SUBREG_REG (x),
196 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
197 x = tmp ? tmp : SUBREG_REG (x);
198 }
199 if (REG_P (x))
200 kill_value_regno (REGNO (x), REG_NREGS (x), vd);
201 }
202
203 /* Remember that REGNO is valid in MODE. */
204
205 static void
set_value_regno(unsigned int regno,machine_mode mode,struct value_data * vd)206 set_value_regno (unsigned int regno, machine_mode mode,
207 struct value_data *vd)
208 {
209 unsigned int nregs;
210
211 vd->e[regno].mode = mode;
212
213 nregs = hard_regno_nregs (regno, mode);
214 if (nregs > vd->max_value_regs)
215 vd->max_value_regs = nregs;
216 }
217
218 /* Initialize VD such that there are no known relationships between regs. */
219
220 static void
init_value_data(struct value_data * vd)221 init_value_data (struct value_data *vd)
222 {
223 int i;
224 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
225 {
226 vd->e[i].mode = VOIDmode;
227 vd->e[i].oldest_regno = i;
228 vd->e[i].next_regno = INVALID_REGNUM;
229 vd->e[i].debug_insn_changes = NULL;
230 }
231 vd->max_value_regs = 0;
232 vd->n_debug_insn_changes = 0;
233 }
234
235 /* Called through note_stores. If X is clobbered, kill its value. */
236
237 static void
kill_clobbered_value(rtx x,const_rtx set,void * data)238 kill_clobbered_value (rtx x, const_rtx set, void *data)
239 {
240 struct value_data *const vd = (struct value_data *) data;
241
242 if (GET_CODE (set) == CLOBBER)
243 kill_value (x, vd);
244 }
245
246 /* A structure passed as data to kill_set_value through note_stores. */
247 struct kill_set_value_data
248 {
249 struct value_data *vd;
250 rtx ignore_set_reg;
251 };
252
253 /* Called through note_stores. If X is set, not clobbered, kill its
254 current value and install it as the root of its own value list. */
255
256 static void
kill_set_value(rtx x,const_rtx set,void * data)257 kill_set_value (rtx x, const_rtx set, void *data)
258 {
259 struct kill_set_value_data *ksvd = (struct kill_set_value_data *) data;
260 if (rtx_equal_p (x, ksvd->ignore_set_reg))
261 return;
262
263 if (GET_CODE (set) != CLOBBER)
264 {
265 kill_value (x, ksvd->vd);
266 if (REG_P (x))
267 set_value_regno (REGNO (x), GET_MODE (x), ksvd->vd);
268 }
269 }
270
271 /* Kill any register used in X as the base of an auto-increment expression,
272 and install that register as the root of its own value list. */
273
274 static void
kill_autoinc_value(rtx_insn * insn,struct value_data * vd)275 kill_autoinc_value (rtx_insn *insn, struct value_data *vd)
276 {
277 subrtx_iterator::array_type array;
278 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
279 {
280 const_rtx x = *iter;
281 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
282 {
283 x = XEXP (x, 0);
284 kill_value (x, vd);
285 set_value_regno (REGNO (x), GET_MODE (x), vd);
286 iter.skip_subrtxes ();
287 }
288 }
289 }
290
291 /* Assert that SRC has been copied to DEST. Adjust the data structures
292 to reflect that SRC contains an older copy of the shared value. */
293
294 static void
copy_value(rtx dest,rtx src,struct value_data * vd)295 copy_value (rtx dest, rtx src, struct value_data *vd)
296 {
297 unsigned int dr = REGNO (dest);
298 unsigned int sr = REGNO (src);
299 unsigned int dn, sn;
300 unsigned int i;
301
302 /* ??? At present, it's possible to see noop sets. It'd be nice if
303 this were cleaned up beforehand... */
304 if (sr == dr)
305 return;
306
307 /* Do not propagate copies to the stack pointer, as that can leave
308 memory accesses with no scheduling dependency on the stack update. */
309 if (dr == STACK_POINTER_REGNUM)
310 return;
311
312 /* Likewise with the frame pointer, if we're using one. */
313 if (frame_pointer_needed && dr == HARD_FRAME_POINTER_REGNUM)
314 return;
315
316 /* Do not propagate copies to fixed or global registers, patterns
317 can be relying to see particular fixed register or users can
318 expect the chosen global register in asm. */
319 if (fixed_regs[dr] || global_regs[dr])
320 return;
321
322 /* If SRC and DEST overlap, don't record anything. */
323 dn = REG_NREGS (dest);
324 sn = REG_NREGS (src);
325 if ((dr > sr && dr < sr + sn)
326 || (sr > dr && sr < dr + dn))
327 return;
328
329 /* If SRC had no assigned mode (i.e. we didn't know it was live)
330 assign it now and assume the value came from an input argument
331 or somesuch. */
332 if (vd->e[sr].mode == VOIDmode)
333 set_value_regno (sr, vd->e[dr].mode, vd);
334
335 /* If we are narrowing the input to a smaller number of hard regs,
336 and it is in big endian, we are really extracting a high part.
337 Since we generally associate a low part of a value with the value itself,
338 we must not do the same for the high part.
339 Note we can still get low parts for the same mode combination through
340 a two-step copy involving differently sized hard regs.
341 Assume hard regs fr* are 32 bits each, while r* are 64 bits each:
342 (set (reg:DI r0) (reg:DI fr0))
343 (set (reg:SI fr2) (reg:SI r0))
344 loads the low part of (reg:DI fr0) - i.e. fr1 - into fr2, while:
345 (set (reg:SI fr2) (reg:SI fr0))
346 loads the high part of (reg:DI fr0) into fr2.
347
348 We can't properly represent the latter case in our tables, so don't
349 record anything then. */
350 else if (sn < hard_regno_nregs (sr, vd->e[sr].mode)
351 && maybe_ne (subreg_lowpart_offset (GET_MODE (dest),
352 vd->e[sr].mode), 0U))
353 return;
354
355 /* If SRC had been assigned a mode narrower than the copy, we can't
356 link DEST into the chain, because not all of the pieces of the
357 copy came from oldest_regno. */
358 else if (sn > hard_regno_nregs (sr, vd->e[sr].mode))
359 return;
360
361 /* Link DR at the end of the value chain used by SR. */
362
363 vd->e[dr].oldest_regno = vd->e[sr].oldest_regno;
364
365 for (i = sr; vd->e[i].next_regno != INVALID_REGNUM; i = vd->e[i].next_regno)
366 continue;
367 vd->e[i].next_regno = dr;
368
369 if (flag_checking)
370 validate_value_data (vd);
371 }
372
373 /* Return true if a mode change from ORIG to NEW is allowed for REGNO. */
374
375 static bool
mode_change_ok(machine_mode orig_mode,machine_mode new_mode,unsigned int regno ATTRIBUTE_UNUSED)376 mode_change_ok (machine_mode orig_mode, machine_mode new_mode,
377 unsigned int regno ATTRIBUTE_UNUSED)
378 {
379 if (partial_subreg_p (orig_mode, new_mode))
380 return false;
381
382 return REG_CAN_CHANGE_MODE_P (regno, orig_mode, new_mode);
383 }
384
385 /* Register REGNO was originally set in ORIG_MODE. It - or a copy of it -
386 was copied in COPY_MODE to COPY_REGNO, and then COPY_REGNO was accessed
387 in NEW_MODE.
388 Return a NEW_MODE rtx for REGNO if that's OK, otherwise return NULL_RTX. */
389
390 static rtx
maybe_mode_change(machine_mode orig_mode,machine_mode copy_mode,machine_mode new_mode,unsigned int regno,unsigned int copy_regno ATTRIBUTE_UNUSED)391 maybe_mode_change (machine_mode orig_mode, machine_mode copy_mode,
392 machine_mode new_mode, unsigned int regno,
393 unsigned int copy_regno ATTRIBUTE_UNUSED)
394 {
395 if (partial_subreg_p (copy_mode, orig_mode)
396 && partial_subreg_p (copy_mode, new_mode))
397 return NULL_RTX;
398
399 /* Avoid creating multiple copies of the stack pointer. Some ports
400 assume there is one and only one stack pointer.
401
402 It's unclear if we need to do the same for other special registers. */
403 if (regno == STACK_POINTER_REGNUM)
404 return NULL_RTX;
405
406 if (orig_mode == new_mode)
407 return gen_raw_REG (new_mode, regno);
408 else if (mode_change_ok (orig_mode, new_mode, regno))
409 {
410 int copy_nregs = hard_regno_nregs (copy_regno, copy_mode);
411 int use_nregs = hard_regno_nregs (copy_regno, new_mode);
412 poly_uint64 bytes_per_reg;
413 if (!can_div_trunc_p (GET_MODE_SIZE (copy_mode),
414 copy_nregs, &bytes_per_reg))
415 return NULL_RTX;
416 poly_uint64 copy_offset = bytes_per_reg * (copy_nregs - use_nregs);
417 poly_uint64 offset
418 = subreg_size_lowpart_offset (GET_MODE_SIZE (new_mode) + copy_offset,
419 GET_MODE_SIZE (orig_mode));
420 regno += subreg_regno_offset (regno, orig_mode, offset, new_mode);
421 if (targetm.hard_regno_mode_ok (regno, new_mode))
422 return gen_raw_REG (new_mode, regno);
423 }
424 return NULL_RTX;
425 }
426
427 /* Find the oldest copy of the value contained in REGNO that is in
428 register class CL and has mode MODE. If found, return an rtx
429 of that oldest register, otherwise return NULL. */
430
431 static rtx
find_oldest_value_reg(enum reg_class cl,rtx reg,struct value_data * vd)432 find_oldest_value_reg (enum reg_class cl, rtx reg, struct value_data *vd)
433 {
434 unsigned int regno = REGNO (reg);
435 machine_mode mode = GET_MODE (reg);
436 unsigned int i;
437
438 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
439
440 /* If we are accessing REG in some mode other that what we set it in,
441 make sure that the replacement is valid. In particular, consider
442 (set (reg:DI r11) (...))
443 (set (reg:SI r9) (reg:SI r11))
444 (set (reg:SI r10) (...))
445 (set (...) (reg:DI r9))
446 Replacing r9 with r11 is invalid. */
447 if (mode != vd->e[regno].mode
448 && REG_NREGS (reg) > hard_regno_nregs (regno, vd->e[regno].mode))
449 return NULL_RTX;
450
451 for (i = vd->e[regno].oldest_regno; i != regno; i = vd->e[i].next_regno)
452 {
453 machine_mode oldmode = vd->e[i].mode;
454 rtx new_rtx;
455
456 if (!in_hard_reg_set_p (reg_class_contents[cl], mode, i))
457 continue;
458
459 new_rtx = maybe_mode_change (oldmode, vd->e[regno].mode, mode, i, regno);
460 if (new_rtx)
461 {
462 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (reg);
463 REG_ATTRS (new_rtx) = REG_ATTRS (reg);
464 REG_POINTER (new_rtx) = REG_POINTER (reg);
465 return new_rtx;
466 }
467 }
468
469 return NULL_RTX;
470 }
471
472 /* If possible, replace the register at *LOC with the oldest register
473 in register class CL. Return true if successfully replaced. */
474
475 static bool
replace_oldest_value_reg(rtx * loc,enum reg_class cl,rtx_insn * insn,struct value_data * vd)476 replace_oldest_value_reg (rtx *loc, enum reg_class cl, rtx_insn *insn,
477 struct value_data *vd)
478 {
479 rtx new_rtx = find_oldest_value_reg (cl, *loc, vd);
480 if (new_rtx && (!DEBUG_INSN_P (insn) || !skip_debug_insn_p))
481 {
482 if (DEBUG_INSN_P (insn))
483 {
484 struct queued_debug_insn_change *change;
485
486 if (dump_file)
487 fprintf (dump_file, "debug_insn %u: queued replacing reg %u with %u\n",
488 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
489
490 change = queued_debug_insn_change_pool.allocate ();
491 change->next = vd->e[REGNO (new_rtx)].debug_insn_changes;
492 change->insn = insn;
493 change->loc = loc;
494 change->new_rtx = new_rtx;
495 vd->e[REGNO (new_rtx)].debug_insn_changes = change;
496 ++vd->n_debug_insn_changes;
497 return true;
498 }
499 if (dump_file)
500 fprintf (dump_file, "insn %u: replaced reg %u with %u\n",
501 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
502
503 validate_change (insn, loc, new_rtx, 1);
504 return true;
505 }
506 return false;
507 }
508
509 /* Similar to replace_oldest_value_reg, but *LOC contains an address.
510 Adapted from find_reloads_address_1. CL is INDEX_REG_CLASS or
511 BASE_REG_CLASS depending on how the register is being considered. */
512
513 static bool
replace_oldest_value_addr(rtx * loc,enum reg_class cl,machine_mode mode,addr_space_t as,rtx_insn * insn,struct value_data * vd)514 replace_oldest_value_addr (rtx *loc, enum reg_class cl,
515 machine_mode mode, addr_space_t as,
516 rtx_insn *insn, struct value_data *vd)
517 {
518 rtx x = *loc;
519 RTX_CODE code = GET_CODE (x);
520 const char *fmt;
521 int i, j;
522 bool changed = false;
523
524 switch (code)
525 {
526 case PLUS:
527 if (DEBUG_INSN_P (insn))
528 break;
529
530 {
531 rtx orig_op0 = XEXP (x, 0);
532 rtx orig_op1 = XEXP (x, 1);
533 RTX_CODE code0 = GET_CODE (orig_op0);
534 RTX_CODE code1 = GET_CODE (orig_op1);
535 rtx op0 = orig_op0;
536 rtx op1 = orig_op1;
537 rtx *locI = NULL;
538 rtx *locB = NULL;
539 enum rtx_code index_code = SCRATCH;
540
541 if (GET_CODE (op0) == SUBREG)
542 {
543 op0 = SUBREG_REG (op0);
544 code0 = GET_CODE (op0);
545 }
546
547 if (GET_CODE (op1) == SUBREG)
548 {
549 op1 = SUBREG_REG (op1);
550 code1 = GET_CODE (op1);
551 }
552
553 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
554 || code0 == ZERO_EXTEND || code1 == MEM)
555 {
556 locI = &XEXP (x, 0);
557 locB = &XEXP (x, 1);
558 index_code = GET_CODE (*locI);
559 }
560 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
561 || code1 == ZERO_EXTEND || code0 == MEM)
562 {
563 locI = &XEXP (x, 1);
564 locB = &XEXP (x, 0);
565 index_code = GET_CODE (*locI);
566 }
567 else if (code0 == CONST_INT || code0 == CONST
568 || code0 == SYMBOL_REF || code0 == LABEL_REF)
569 {
570 locB = &XEXP (x, 1);
571 index_code = GET_CODE (XEXP (x, 0));
572 }
573 else if (code1 == CONST_INT || code1 == CONST
574 || code1 == SYMBOL_REF || code1 == LABEL_REF)
575 {
576 locB = &XEXP (x, 0);
577 index_code = GET_CODE (XEXP (x, 1));
578 }
579 else if (code0 == REG && code1 == REG)
580 {
581 int index_op;
582 unsigned regno0 = REGNO (op0), regno1 = REGNO (op1);
583
584 if (REGNO_OK_FOR_INDEX_P (regno1)
585 && regno_ok_for_base_p (regno0, mode, as, PLUS, REG))
586 index_op = 1;
587 else if (REGNO_OK_FOR_INDEX_P (regno0)
588 && regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
589 index_op = 0;
590 else if (regno_ok_for_base_p (regno0, mode, as, PLUS, REG)
591 || REGNO_OK_FOR_INDEX_P (regno1))
592 index_op = 1;
593 else if (regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
594 index_op = 0;
595 else
596 index_op = 1;
597
598 locI = &XEXP (x, index_op);
599 locB = &XEXP (x, !index_op);
600 index_code = GET_CODE (*locI);
601 }
602 else if (code0 == REG)
603 {
604 locI = &XEXP (x, 0);
605 locB = &XEXP (x, 1);
606 index_code = GET_CODE (*locI);
607 }
608 else if (code1 == REG)
609 {
610 locI = &XEXP (x, 1);
611 locB = &XEXP (x, 0);
612 index_code = GET_CODE (*locI);
613 }
614
615 if (locI)
616 changed |= replace_oldest_value_addr (locI, INDEX_REG_CLASS,
617 mode, as, insn, vd);
618 if (locB)
619 changed |= replace_oldest_value_addr (locB,
620 base_reg_class (mode, as, PLUS,
621 index_code),
622 mode, as, insn, vd);
623 return changed;
624 }
625
626 case POST_INC:
627 case POST_DEC:
628 case POST_MODIFY:
629 case PRE_INC:
630 case PRE_DEC:
631 case PRE_MODIFY:
632 return false;
633
634 case MEM:
635 return replace_oldest_value_mem (x, insn, vd);
636
637 case REG:
638 return replace_oldest_value_reg (loc, cl, insn, vd);
639
640 default:
641 break;
642 }
643
644 fmt = GET_RTX_FORMAT (code);
645 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
646 {
647 if (fmt[i] == 'e')
648 changed |= replace_oldest_value_addr (&XEXP (x, i), cl, mode, as,
649 insn, vd);
650 else if (fmt[i] == 'E')
651 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
652 changed |= replace_oldest_value_addr (&XVECEXP (x, i, j), cl,
653 mode, as, insn, vd);
654 }
655
656 return changed;
657 }
658
659 /* Similar to replace_oldest_value_reg, but X contains a memory. */
660
661 static bool
replace_oldest_value_mem(rtx x,rtx_insn * insn,struct value_data * vd)662 replace_oldest_value_mem (rtx x, rtx_insn *insn, struct value_data *vd)
663 {
664 enum reg_class cl;
665
666 if (DEBUG_INSN_P (insn))
667 cl = ALL_REGS;
668 else
669 cl = base_reg_class (GET_MODE (x), MEM_ADDR_SPACE (x), MEM, SCRATCH);
670
671 return replace_oldest_value_addr (&XEXP (x, 0), cl,
672 GET_MODE (x), MEM_ADDR_SPACE (x),
673 insn, vd);
674 }
675
676 /* Apply all queued updates for DEBUG_INSNs that change some reg to
677 register REGNO. */
678
679 static void
apply_debug_insn_changes(struct value_data * vd,unsigned int regno)680 apply_debug_insn_changes (struct value_data *vd, unsigned int regno)
681 {
682 struct queued_debug_insn_change *change;
683 rtx_insn *last_insn = vd->e[regno].debug_insn_changes->insn;
684
685 for (change = vd->e[regno].debug_insn_changes;
686 change;
687 change = change->next)
688 {
689 if (last_insn != change->insn)
690 {
691 apply_change_group ();
692 last_insn = change->insn;
693 }
694 validate_change (change->insn, change->loc, change->new_rtx, 1);
695 }
696 apply_change_group ();
697 }
698
699 /* Called via note_uses, for all used registers in a real insn
700 apply DEBUG_INSN changes that change registers to the used
701 registers. */
702
703 static void
cprop_find_used_regs(rtx * loc,void * data)704 cprop_find_used_regs (rtx *loc, void *data)
705 {
706 struct value_data *const vd = (struct value_data *) data;
707 subrtx_iterator::array_type array;
708 FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
709 {
710 const_rtx x = *iter;
711 if (REG_P (x))
712 {
713 unsigned int regno = REGNO (x);
714 if (vd->e[regno].debug_insn_changes)
715 {
716 apply_debug_insn_changes (vd, regno);
717 free_debug_insn_changes (vd, regno);
718 }
719 }
720 }
721 }
722
723 /* Apply clobbers of INSN in PATTERN and C_I_F_U to value_data VD. */
724
725 static void
kill_clobbered_values(rtx_insn * insn,struct value_data * vd)726 kill_clobbered_values (rtx_insn *insn, struct value_data *vd)
727 {
728 note_stores (insn, kill_clobbered_value, vd);
729 }
730
731 /* Perform the forward copy propagation on basic block BB. */
732
733 static bool
copyprop_hardreg_forward_1(basic_block bb,struct value_data * vd)734 copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
735 {
736 bool anything_changed = false;
737 rtx_insn *insn, *next;
738
739 for (insn = BB_HEAD (bb); ; insn = next)
740 {
741 int n_ops, i, predicated;
742 bool is_asm, any_replacements;
743 rtx set;
744 rtx link;
745 bool changed = false;
746 struct kill_set_value_data ksvd;
747
748 next = NEXT_INSN (insn);
749 if (!NONDEBUG_INSN_P (insn))
750 {
751 if (DEBUG_BIND_INSN_P (insn))
752 {
753 rtx loc = INSN_VAR_LOCATION_LOC (insn);
754 if (!VAR_LOC_UNKNOWN_P (loc))
755 replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn),
756 ALL_REGS, GET_MODE (loc),
757 ADDR_SPACE_GENERIC, insn, vd);
758 }
759
760 if (insn == BB_END (bb))
761 break;
762 else
763 continue;
764 }
765
766 set = single_set (insn);
767
768 /* Detect noop sets and remove them before processing side effects. */
769 if (set && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set)))
770 {
771 unsigned int regno = REGNO (SET_SRC (set));
772 rtx r1 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
773 SET_DEST (set), vd);
774 rtx r2 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
775 SET_SRC (set), vd);
776 if (rtx_equal_p (r1 ? r1 : SET_DEST (set), r2 ? r2 : SET_SRC (set)))
777 {
778 bool last = insn == BB_END (bb);
779 delete_insn (insn);
780 if (last)
781 break;
782 continue;
783 }
784 }
785
786 /* Detect obviously dead sets (via REG_UNUSED notes) and remove them. */
787 if (set
788 && !RTX_FRAME_RELATED_P (insn)
789 && !may_trap_p (set)
790 && find_reg_note (insn, REG_UNUSED, SET_DEST (set))
791 && !side_effects_p (SET_SRC (set))
792 && !side_effects_p (SET_DEST (set)))
793 {
794 bool last = insn == BB_END (bb);
795 delete_insn (insn);
796 if (last)
797 break;
798 continue;
799 }
800
801
802 extract_constrain_insn (insn);
803 preprocess_constraints (insn);
804 const operand_alternative *op_alt = which_op_alt ();
805 n_ops = recog_data.n_operands;
806 is_asm = asm_noperands (PATTERN (insn)) >= 0;
807
808 /* Simplify the code below by promoting OP_OUT to OP_INOUT
809 in predicated instructions. */
810
811 predicated = GET_CODE (PATTERN (insn)) == COND_EXEC;
812 for (i = 0; i < n_ops; ++i)
813 {
814 int matches = op_alt[i].matches;
815 if (matches >= 0 || op_alt[i].matched >= 0
816 || (predicated && recog_data.operand_type[i] == OP_OUT))
817 recog_data.operand_type[i] = OP_INOUT;
818 }
819
820 /* Apply changes to earlier DEBUG_INSNs if possible. */
821 if (vd->n_debug_insn_changes)
822 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
823
824 /* For each earlyclobber operand, zap the value data. */
825 for (i = 0; i < n_ops; i++)
826 if (op_alt[i].earlyclobber)
827 kill_value (recog_data.operand[i], vd);
828
829 /* Within asms, a clobber cannot overlap inputs or outputs.
830 I wouldn't think this were true for regular insns, but
831 scan_rtx treats them like that... */
832 kill_clobbered_values (insn, vd);
833
834 /* Kill all auto-incremented values. */
835 /* ??? REG_INC is useless, since stack pushes aren't done that way. */
836 kill_autoinc_value (insn, vd);
837
838 /* Kill all early-clobbered operands. */
839 for (i = 0; i < n_ops; i++)
840 if (op_alt[i].earlyclobber)
841 kill_value (recog_data.operand[i], vd);
842
843 /* If we have dead sets in the insn, then we need to note these as we
844 would clobbers. */
845 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
846 {
847 if (REG_NOTE_KIND (link) == REG_UNUSED)
848 {
849 kill_value (XEXP (link, 0), vd);
850 /* Furthermore, if the insn looked like a single-set,
851 but the dead store kills the source value of that
852 set, then we can no-longer use the plain move
853 special case below. */
854 if (set
855 && reg_overlap_mentioned_p (XEXP (link, 0), SET_SRC (set)))
856 set = NULL;
857 }
858
859 /* We need to keep CFI info correct, and the same on all paths,
860 so we cannot normally replace the registers REG_CFA_REGISTER
861 refers to. Bail. */
862 if (REG_NOTE_KIND (link) == REG_CFA_REGISTER)
863 goto did_replacement;
864 }
865
866 /* Special-case plain move instructions, since we may well
867 be able to do the move from a different register class. */
868 if (set && REG_P (SET_SRC (set)))
869 {
870 rtx src = SET_SRC (set);
871 unsigned int regno = REGNO (src);
872 machine_mode mode = GET_MODE (src);
873 unsigned int i;
874 rtx new_rtx;
875
876 /* If we are accessing SRC in some mode other that what we
877 set it in, make sure that the replacement is valid. */
878 if (mode != vd->e[regno].mode)
879 {
880 if (REG_NREGS (src)
881 > hard_regno_nregs (regno, vd->e[regno].mode))
882 goto no_move_special_case;
883
884 /* And likewise, if we are narrowing on big endian the transformation
885 is also invalid. */
886 if (REG_NREGS (src) < hard_regno_nregs (regno, vd->e[regno].mode)
887 && maybe_ne (subreg_lowpart_offset (mode,
888 vd->e[regno].mode), 0U))
889 goto no_move_special_case;
890 }
891
892 /* If the destination is also a register, try to find a source
893 register in the same class. */
894 if (REG_P (SET_DEST (set)))
895 {
896 new_rtx = find_oldest_value_reg (REGNO_REG_CLASS (regno),
897 src, vd);
898
899 if (new_rtx && validate_change (insn, &SET_SRC (set), new_rtx, 0))
900 {
901 if (dump_file)
902 fprintf (dump_file,
903 "insn %u: replaced reg %u with %u\n",
904 INSN_UID (insn), regno, REGNO (new_rtx));
905 changed = true;
906 goto did_replacement;
907 }
908 /* We need to re-extract as validate_change clobbers
909 recog_data. */
910 extract_constrain_insn (insn);
911 preprocess_constraints (insn);
912 }
913
914 /* Otherwise, try all valid registers and see if its valid. */
915 for (i = vd->e[regno].oldest_regno; i != regno;
916 i = vd->e[i].next_regno)
917 {
918 new_rtx = maybe_mode_change (vd->e[i].mode, vd->e[regno].mode,
919 mode, i, regno);
920 if (new_rtx != NULL_RTX)
921 {
922 if (validate_change (insn, &SET_SRC (set), new_rtx, 0))
923 {
924 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (src);
925 REG_ATTRS (new_rtx) = REG_ATTRS (src);
926 REG_POINTER (new_rtx) = REG_POINTER (src);
927 if (dump_file)
928 fprintf (dump_file,
929 "insn %u: replaced reg %u with %u\n",
930 INSN_UID (insn), regno, REGNO (new_rtx));
931 changed = true;
932 goto did_replacement;
933 }
934 /* We need to re-extract as validate_change clobbers
935 recog_data. */
936 extract_constrain_insn (insn);
937 preprocess_constraints (insn);
938 }
939 }
940 }
941 no_move_special_case:
942
943 any_replacements = false;
944
945 /* For each input operand, replace a hard register with the
946 eldest live copy that's in an appropriate register class. */
947 for (i = 0; i < n_ops; i++)
948 {
949 bool replaced = false;
950
951 /* Don't scan match_operand here, since we've no reg class
952 information to pass down. Any operands that we could
953 substitute in will be represented elsewhere. */
954 if (recog_data.constraints[i][0] == '\0')
955 continue;
956
957 /* Don't replace in asms intentionally referencing hard regs. */
958 if (is_asm && REG_P (recog_data.operand[i])
959 && (REGNO (recog_data.operand[i])
960 == ORIGINAL_REGNO (recog_data.operand[i])))
961 continue;
962
963 if (recog_data.operand_type[i] == OP_IN)
964 {
965 if (op_alt[i].is_address)
966 replaced
967 = replace_oldest_value_addr (recog_data.operand_loc[i],
968 alternative_class (op_alt, i),
969 VOIDmode, ADDR_SPACE_GENERIC,
970 insn, vd);
971 else if (REG_P (recog_data.operand[i]))
972 replaced
973 = replace_oldest_value_reg (recog_data.operand_loc[i],
974 alternative_class (op_alt, i),
975 insn, vd);
976 else if (MEM_P (recog_data.operand[i]))
977 replaced = replace_oldest_value_mem (recog_data.operand[i],
978 insn, vd);
979 }
980 else if (MEM_P (recog_data.operand[i]))
981 replaced = replace_oldest_value_mem (recog_data.operand[i],
982 insn, vd);
983
984 /* If we performed any replacement, update match_dups. */
985 if (replaced)
986 {
987 int j;
988 rtx new_rtx;
989
990 new_rtx = *recog_data.operand_loc[i];
991 recog_data.operand[i] = new_rtx;
992 for (j = 0; j < recog_data.n_dups; j++)
993 if (recog_data.dup_num[j] == i)
994 validate_unshare_change (insn, recog_data.dup_loc[j], new_rtx, 1);
995
996 any_replacements = true;
997 }
998 }
999
1000 if (any_replacements)
1001 {
1002 if (! apply_change_group ())
1003 {
1004 if (dump_file)
1005 fprintf (dump_file,
1006 "insn %u: reg replacements not verified\n",
1007 INSN_UID (insn));
1008 }
1009 else
1010 changed = true;
1011 }
1012
1013 did_replacement:
1014 if (changed)
1015 {
1016 anything_changed = true;
1017
1018 /* If something changed, perhaps further changes to earlier
1019 DEBUG_INSNs can be applied. */
1020 if (vd->n_debug_insn_changes)
1021 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
1022 df_insn_rescan (insn);
1023 }
1024
1025 ksvd.vd = vd;
1026 ksvd.ignore_set_reg = NULL_RTX;
1027
1028 /* Clobber call-clobbered registers. */
1029 if (CALL_P (insn))
1030 {
1031 unsigned int set_regno = INVALID_REGNUM;
1032 unsigned int set_nregs = 0;
1033 unsigned int regno;
1034 rtx exp;
1035
1036 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
1037 {
1038 rtx x = XEXP (exp, 0);
1039 if (GET_CODE (x) == SET)
1040 {
1041 rtx dest = SET_DEST (x);
1042 kill_value (dest, vd);
1043 set_value_regno (REGNO (dest), GET_MODE (dest), vd);
1044 copy_value (dest, SET_SRC (x), vd);
1045 ksvd.ignore_set_reg = dest;
1046 set_regno = REGNO (dest);
1047 set_nregs = REG_NREGS (dest);
1048 break;
1049 }
1050 }
1051
1052 function_abi callee_abi = insn_callee_abi (insn);
1053 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1054 if (vd->e[regno].mode != VOIDmode
1055 && callee_abi.clobbers_reg_p (vd->e[regno].mode, regno)
1056 && (regno < set_regno || regno >= set_regno + set_nregs))
1057 kill_value_regno (regno, 1, vd);
1058
1059 /* If SET was seen in CALL_INSN_FUNCTION_USAGE, and SET_SRC
1060 of the SET isn't clobbered by CALLEE_ABI, but instead among
1061 CLOBBERs on the CALL_INSN, we could wrongly assume the
1062 value in it is still live. */
1063 if (ksvd.ignore_set_reg)
1064 kill_clobbered_values (insn, vd);
1065 }
1066
1067 bool copy_p = (set
1068 && REG_P (SET_DEST (set))
1069 && REG_P (SET_SRC (set)));
1070 bool noop_p = (copy_p
1071 && rtx_equal_p (SET_DEST (set), SET_SRC (set)));
1072
1073 /* If a noop move is using narrower mode than we have recorded,
1074 we need to either remove the noop move, or kill_set_value. */
1075 if (noop_p
1076 && partial_subreg_p (GET_MODE (SET_DEST (set)),
1077 vd->e[REGNO (SET_DEST (set))].mode))
1078 {
1079 if (noop_move_p (insn))
1080 {
1081 bool last = insn == BB_END (bb);
1082 delete_insn (insn);
1083 if (last)
1084 break;
1085 }
1086 else
1087 noop_p = false;
1088 }
1089
1090 if (!noop_p)
1091 {
1092 /* Notice stores. */
1093 note_stores (insn, kill_set_value, &ksvd);
1094
1095 /* Notice copies. */
1096 if (copy_p)
1097 {
1098 df_insn_rescan (insn);
1099 copy_value (SET_DEST (set), SET_SRC (set), vd);
1100 }
1101 }
1102
1103 if (insn == BB_END (bb))
1104 break;
1105 }
1106
1107 return anything_changed;
1108 }
1109
1110 /* Dump the value chain data to stderr. */
1111
1112 DEBUG_FUNCTION void
debug_value_data(struct value_data * vd)1113 debug_value_data (struct value_data *vd)
1114 {
1115 HARD_REG_SET set;
1116 unsigned int i, j;
1117
1118 CLEAR_HARD_REG_SET (set);
1119
1120 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1121 if (vd->e[i].oldest_regno == i)
1122 {
1123 if (vd->e[i].mode == VOIDmode)
1124 {
1125 if (vd->e[i].next_regno != INVALID_REGNUM)
1126 fprintf (stderr, "[%u] Bad next_regno for empty chain (%u)\n",
1127 i, vd->e[i].next_regno);
1128 continue;
1129 }
1130
1131 SET_HARD_REG_BIT (set, i);
1132 fprintf (stderr, "[%u %s] ", i, GET_MODE_NAME (vd->e[i].mode));
1133
1134 for (j = vd->e[i].next_regno;
1135 j != INVALID_REGNUM;
1136 j = vd->e[j].next_regno)
1137 {
1138 if (TEST_HARD_REG_BIT (set, j))
1139 {
1140 fprintf (stderr, "[%u] Loop in regno chain\n", j);
1141 return;
1142 }
1143
1144 if (vd->e[j].oldest_regno != i)
1145 {
1146 fprintf (stderr, "[%u] Bad oldest_regno (%u)\n",
1147 j, vd->e[j].oldest_regno);
1148 return;
1149 }
1150 SET_HARD_REG_BIT (set, j);
1151 fprintf (stderr, "[%u %s] ", j, GET_MODE_NAME (vd->e[j].mode));
1152 }
1153 fputc ('\n', stderr);
1154 }
1155
1156 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1157 if (! TEST_HARD_REG_BIT (set, i)
1158 && (vd->e[i].mode != VOIDmode
1159 || vd->e[i].oldest_regno != i
1160 || vd->e[i].next_regno != INVALID_REGNUM))
1161 fprintf (stderr, "[%u] Non-empty reg in chain (%s %u %i)\n",
1162 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1163 vd->e[i].next_regno);
1164 }
1165
1166 /* Do copyprop_hardreg_forward_1 for a single basic block BB.
1167 DEBUG_INSN is skipped since we do not want to involve DF related
1168 staff as how it is handled in function pass_cprop_hardreg::execute.
1169
1170 NOTE: Currently it is only used for shrink-wrap. Maybe extend it
1171 to handle DEBUG_INSN for other uses. */
1172
1173 void
copyprop_hardreg_forward_bb_without_debug_insn(basic_block bb)1174 copyprop_hardreg_forward_bb_without_debug_insn (basic_block bb)
1175 {
1176 struct value_data *vd;
1177 vd = XNEWVEC (struct value_data, 1);
1178 init_value_data (vd);
1179
1180 skip_debug_insn_p = true;
1181 copyprop_hardreg_forward_1 (bb, vd);
1182 free (vd);
1183 skip_debug_insn_p = false;
1184 }
1185
1186 static void
validate_value_data(struct value_data * vd)1187 validate_value_data (struct value_data *vd)
1188 {
1189 HARD_REG_SET set;
1190 unsigned int i, j;
1191
1192 CLEAR_HARD_REG_SET (set);
1193
1194 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1195 if (vd->e[i].oldest_regno == i)
1196 {
1197 if (vd->e[i].mode == VOIDmode)
1198 {
1199 if (vd->e[i].next_regno != INVALID_REGNUM)
1200 internal_error ("%qs: [%u] bad %<next_regno%> for empty chain (%u)",
1201 __func__, i, vd->e[i].next_regno);
1202 continue;
1203 }
1204
1205 SET_HARD_REG_BIT (set, i);
1206
1207 for (j = vd->e[i].next_regno;
1208 j != INVALID_REGNUM;
1209 j = vd->e[j].next_regno)
1210 {
1211 if (TEST_HARD_REG_BIT (set, j))
1212 internal_error ("%qs: loop in %<next_regno%> chain (%u)",
1213 __func__, j);
1214 if (vd->e[j].oldest_regno != i)
1215 internal_error ("%qs: [%u] bad %<oldest_regno%> (%u)",
1216 __func__, j, vd->e[j].oldest_regno);
1217
1218 SET_HARD_REG_BIT (set, j);
1219 }
1220 }
1221
1222 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1223 if (! TEST_HARD_REG_BIT (set, i)
1224 && (vd->e[i].mode != VOIDmode
1225 || vd->e[i].oldest_regno != i
1226 || vd->e[i].next_regno != INVALID_REGNUM))
1227 internal_error ("%qs: [%u] non-empty register in chain (%s %u %i)",
1228 __func__, i,
1229 GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1230 vd->e[i].next_regno);
1231 }
1232
1233
1234 namespace {
1235
1236 const pass_data pass_data_cprop_hardreg =
1237 {
1238 RTL_PASS, /* type */
1239 "cprop_hardreg", /* name */
1240 OPTGROUP_NONE, /* optinfo_flags */
1241 TV_CPROP_REGISTERS, /* tv_id */
1242 0, /* properties_required */
1243 0, /* properties_provided */
1244 0, /* properties_destroyed */
1245 0, /* todo_flags_start */
1246 TODO_df_finish, /* todo_flags_finish */
1247 };
1248
1249 class pass_cprop_hardreg : public rtl_opt_pass
1250 {
1251 public:
pass_cprop_hardreg(gcc::context * ctxt)1252 pass_cprop_hardreg (gcc::context *ctxt)
1253 : rtl_opt_pass (pass_data_cprop_hardreg, ctxt)
1254 {}
1255
1256 /* opt_pass methods: */
gate(function *)1257 virtual bool gate (function *)
1258 {
1259 return (optimize > 0 && (flag_cprop_registers));
1260 }
1261
1262 virtual unsigned int execute (function *);
1263
1264 }; // class pass_cprop_hardreg
1265
1266 static bool
cprop_hardreg_bb(basic_block bb,struct value_data * all_vd,sbitmap visited)1267 cprop_hardreg_bb (basic_block bb, struct value_data *all_vd, sbitmap visited)
1268 {
1269 bitmap_set_bit (visited, bb->index);
1270
1271 /* If a block has a single predecessor, that we've already
1272 processed, begin with the value data that was live at
1273 the end of the predecessor block. */
1274 /* ??? Ought to use more intelligent queuing of blocks. */
1275 if (single_pred_p (bb)
1276 && bitmap_bit_p (visited, single_pred (bb)->index)
1277 && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
1278 {
1279 all_vd[bb->index] = all_vd[single_pred (bb)->index];
1280 if (all_vd[bb->index].n_debug_insn_changes)
1281 {
1282 unsigned int regno;
1283
1284 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1285 {
1286 if (all_vd[bb->index].e[regno].debug_insn_changes)
1287 {
1288 struct queued_debug_insn_change *cur;
1289 for (cur = all_vd[bb->index].e[regno].debug_insn_changes;
1290 cur; cur = cur->next)
1291 --all_vd[bb->index].n_debug_insn_changes;
1292 all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1293 if (all_vd[bb->index].n_debug_insn_changes == 0)
1294 break;
1295 }
1296 }
1297 }
1298 }
1299 else
1300 init_value_data (all_vd + bb->index);
1301
1302 return copyprop_hardreg_forward_1 (bb, all_vd + bb->index);
1303 }
1304
1305 static void
cprop_hardreg_debug(function * fun,struct value_data * all_vd)1306 cprop_hardreg_debug (function *fun, struct value_data *all_vd)
1307 {
1308 basic_block bb;
1309
1310 FOR_EACH_BB_FN (bb, fun)
1311 if (all_vd[bb->index].n_debug_insn_changes)
1312 {
1313 unsigned int regno;
1314 bitmap live;
1315
1316 live = df_get_live_out (bb);
1317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1318 if (all_vd[bb->index].e[regno].debug_insn_changes)
1319 {
1320 if (REGNO_REG_SET_P (live, regno))
1321 apply_debug_insn_changes (all_vd + bb->index, regno);
1322
1323 struct queued_debug_insn_change *cur;
1324 for (cur = all_vd[bb->index].e[regno].debug_insn_changes;
1325 cur; cur = cur->next)
1326 --all_vd[bb->index].n_debug_insn_changes;
1327 all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1328 if (all_vd[bb->index].n_debug_insn_changes == 0)
1329 break;
1330 }
1331 }
1332
1333 queued_debug_insn_change_pool.release ();
1334 }
1335
1336 unsigned int
execute(function * fun)1337 pass_cprop_hardreg::execute (function *fun)
1338 {
1339 struct value_data *all_vd;
1340 basic_block bb;
1341
1342 all_vd = XNEWVEC (struct value_data, last_basic_block_for_fn (fun));
1343
1344 auto_sbitmap visited (last_basic_block_for_fn (fun));
1345 bitmap_clear (visited);
1346
1347 auto_vec<int> worklist;
1348 bool any_debug_changes = false;
1349
1350 /* We need accurate notes. Earlier passes such as if-conversion may
1351 leave notes in an inconsistent state. */
1352 df_note_add_problem ();
1353 df_analyze ();
1354
1355 /* It is tempting to set DF_LR_RUN_DCE, but DCE may choose to delete
1356 an insn and this pass would not have visibility into the removal.
1357 This pass would then potentially use the source of that
1358 INSN for propagation purposes, generating invalid code.
1359
1360 So we just ask for updated notes and handle trivial deletions
1361 within this pass where we can update this passes internal
1362 data structures appropriately. */
1363 df_set_flags (DF_DEFER_INSN_RESCAN);
1364
1365 FOR_EACH_BB_FN (bb, fun)
1366 {
1367 if (cprop_hardreg_bb (bb, all_vd, visited))
1368 worklist.safe_push (bb->index);
1369 if (all_vd[bb->index].n_debug_insn_changes)
1370 any_debug_changes = true;
1371 }
1372
1373 /* We must call df_analyze here unconditionally to ensure that the
1374 REG_UNUSED and REG_DEAD notes are consistent with and without -g. */
1375 df_analyze ();
1376
1377 if (MAY_HAVE_DEBUG_BIND_INSNS && any_debug_changes)
1378 cprop_hardreg_debug (fun, all_vd);
1379
1380 /* Second pass if we've changed anything, only for the bbs where we have
1381 changed anything though. */
1382 if (!worklist.is_empty ())
1383 {
1384 unsigned int i;
1385 int index;
1386
1387 any_debug_changes = false;
1388 bitmap_clear (visited);
1389 FOR_EACH_VEC_ELT (worklist, i, index)
1390 {
1391 bb = BASIC_BLOCK_FOR_FN (fun, index);
1392 cprop_hardreg_bb (bb, all_vd, visited);
1393 if (all_vd[bb->index].n_debug_insn_changes)
1394 any_debug_changes = true;
1395 }
1396
1397 df_analyze ();
1398 if (MAY_HAVE_DEBUG_BIND_INSNS && any_debug_changes)
1399 cprop_hardreg_debug (fun, all_vd);
1400 }
1401
1402 free (all_vd);
1403 return 0;
1404 }
1405
1406 } // anon namespace
1407
1408 rtl_opt_pass *
make_pass_cprop_hardreg(gcc::context * ctxt)1409 make_pass_cprop_hardreg (gcc::context *ctxt)
1410 {
1411 return new pass_cprop_hardreg (ctxt);
1412 }
1413