1 /* Copy propagation on hard registers for the GNU compiler.
2 Copyright (C) 2000-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "df.h"
26 #include "memmodel.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "regs.h"
30 #include "emit-rtl.h"
31 #include "recog.h"
32 #include "diagnostic-core.h"
33 #include "addresses.h"
34 #include "tree-pass.h"
35 #include "rtl-iter.h"
36 #include "cfgrtl.h"
37 #include "target.h"
38
39 /* The following code does forward propagation of hard register copies.
40 The object is to eliminate as many dependencies as possible, so that
41 we have the most scheduling freedom. As a side effect, we also clean
42 up some silly register allocation decisions made by reload. This
43 code may be obsoleted by a new register allocator. */
44
45 /* DEBUG_INSNs aren't changed right away, as doing so might extend the
46 lifetime of a register and get the DEBUG_INSN subsequently reset.
47 So they are queued instead, and updated only when the register is
48 used in some subsequent real insn before it is set. */
49 struct queued_debug_insn_change
50 {
51 struct queued_debug_insn_change *next;
52 rtx_insn *insn;
53 rtx *loc;
54 rtx new_rtx;
55 };
56
57 /* For each register, we have a list of registers that contain the same
58 value. The OLDEST_REGNO field points to the head of the list, and
59 the NEXT_REGNO field runs through the list. The MODE field indicates
60 what mode the data is known to be in; this field is VOIDmode when the
61 register is not known to contain valid data. */
62
63 struct value_data_entry
64 {
65 machine_mode mode;
66 unsigned int oldest_regno;
67 unsigned int next_regno;
68 struct queued_debug_insn_change *debug_insn_changes;
69 };
70
71 struct value_data
72 {
73 struct value_data_entry e[FIRST_PSEUDO_REGISTER];
74 unsigned int max_value_regs;
75 unsigned int n_debug_insn_changes;
76 };
77
78 static object_allocator<queued_debug_insn_change> queued_debug_insn_change_pool
79 ("debug insn changes pool");
80
81 static bool skip_debug_insn_p;
82
83 static void kill_value_one_regno (unsigned, struct value_data *);
84 static void kill_value_regno (unsigned, unsigned, struct value_data *);
85 static void kill_value (const_rtx, struct value_data *);
86 static void set_value_regno (unsigned, machine_mode, struct value_data *);
87 static void init_value_data (struct value_data *);
88 static void kill_clobbered_value (rtx, const_rtx, void *);
89 static void kill_set_value (rtx, const_rtx, void *);
90 static void copy_value (rtx, rtx, struct value_data *);
91 static bool mode_change_ok (machine_mode, machine_mode,
92 unsigned int);
93 static rtx maybe_mode_change (machine_mode, machine_mode,
94 machine_mode, unsigned int, unsigned int);
95 static rtx find_oldest_value_reg (enum reg_class, rtx, struct value_data *);
96 static bool replace_oldest_value_reg (rtx *, enum reg_class, rtx_insn *,
97 struct value_data *);
98 static bool replace_oldest_value_addr (rtx *, enum reg_class,
99 machine_mode, addr_space_t,
100 rtx_insn *, struct value_data *);
101 static bool replace_oldest_value_mem (rtx, rtx_insn *, struct value_data *);
102 static bool copyprop_hardreg_forward_1 (basic_block, struct value_data *);
103 extern void debug_value_data (struct value_data *);
104 static void validate_value_data (struct value_data *);
105
106 /* Free all queued updates for DEBUG_INSNs that change some reg to
107 register REGNO. */
108
109 static void
free_debug_insn_changes(struct value_data * vd,unsigned int regno)110 free_debug_insn_changes (struct value_data *vd, unsigned int regno)
111 {
112 struct queued_debug_insn_change *cur, *next;
113 for (cur = vd->e[regno].debug_insn_changes; cur; cur = next)
114 {
115 next = cur->next;
116 --vd->n_debug_insn_changes;
117 queued_debug_insn_change_pool.remove (cur);
118 }
119 vd->e[regno].debug_insn_changes = NULL;
120 }
121
122 /* Kill register REGNO. This involves removing it from any value
123 lists, and resetting the value mode to VOIDmode. This is only a
124 helper function; it does not handle any hard registers overlapping
125 with REGNO. */
126
127 static void
kill_value_one_regno(unsigned int regno,struct value_data * vd)128 kill_value_one_regno (unsigned int regno, struct value_data *vd)
129 {
130 unsigned int i, next;
131
132 if (vd->e[regno].oldest_regno != regno)
133 {
134 for (i = vd->e[regno].oldest_regno;
135 vd->e[i].next_regno != regno;
136 i = vd->e[i].next_regno)
137 continue;
138 vd->e[i].next_regno = vd->e[regno].next_regno;
139 }
140 else if ((next = vd->e[regno].next_regno) != INVALID_REGNUM)
141 {
142 for (i = next; i != INVALID_REGNUM; i = vd->e[i].next_regno)
143 vd->e[i].oldest_regno = next;
144 }
145
146 vd->e[regno].mode = VOIDmode;
147 vd->e[regno].oldest_regno = regno;
148 vd->e[regno].next_regno = INVALID_REGNUM;
149 if (vd->e[regno].debug_insn_changes)
150 free_debug_insn_changes (vd, regno);
151
152 if (flag_checking)
153 validate_value_data (vd);
154 }
155
156 /* Kill the value in register REGNO for NREGS, and any other registers
157 whose values overlap. */
158
159 static void
kill_value_regno(unsigned int regno,unsigned int nregs,struct value_data * vd)160 kill_value_regno (unsigned int regno, unsigned int nregs,
161 struct value_data *vd)
162 {
163 unsigned int j;
164
165 /* Kill the value we're told to kill. */
166 for (j = 0; j < nregs; ++j)
167 kill_value_one_regno (regno + j, vd);
168
169 /* Kill everything that overlapped what we're told to kill. */
170 if (regno < vd->max_value_regs)
171 j = 0;
172 else
173 j = regno - vd->max_value_regs;
174 for (; j < regno; ++j)
175 {
176 unsigned int i, n;
177 if (vd->e[j].mode == VOIDmode)
178 continue;
179 n = hard_regno_nregs (j, vd->e[j].mode);
180 if (j + n > regno)
181 for (i = 0; i < n; ++i)
182 kill_value_one_regno (j + i, vd);
183 }
184 }
185
186 /* Kill X. This is a convenience function wrapping kill_value_regno
187 so that we mind the mode the register is in. */
188
189 static void
kill_value(const_rtx x,struct value_data * vd)190 kill_value (const_rtx x, struct value_data *vd)
191 {
192 if (GET_CODE (x) == SUBREG)
193 {
194 rtx tmp = simplify_subreg (GET_MODE (x), SUBREG_REG (x),
195 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
196 x = tmp ? tmp : SUBREG_REG (x);
197 }
198 if (REG_P (x))
199 kill_value_regno (REGNO (x), REG_NREGS (x), vd);
200 }
201
202 /* Remember that REGNO is valid in MODE. */
203
204 static void
set_value_regno(unsigned int regno,machine_mode mode,struct value_data * vd)205 set_value_regno (unsigned int regno, machine_mode mode,
206 struct value_data *vd)
207 {
208 unsigned int nregs;
209
210 vd->e[regno].mode = mode;
211
212 nregs = hard_regno_nregs (regno, mode);
213 if (nregs > vd->max_value_regs)
214 vd->max_value_regs = nregs;
215 }
216
217 /* Initialize VD such that there are no known relationships between regs. */
218
219 static void
init_value_data(struct value_data * vd)220 init_value_data (struct value_data *vd)
221 {
222 int i;
223 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
224 {
225 vd->e[i].mode = VOIDmode;
226 vd->e[i].oldest_regno = i;
227 vd->e[i].next_regno = INVALID_REGNUM;
228 vd->e[i].debug_insn_changes = NULL;
229 }
230 vd->max_value_regs = 0;
231 vd->n_debug_insn_changes = 0;
232 }
233
234 /* Called through note_stores. If X is clobbered, kill its value. */
235
236 static void
kill_clobbered_value(rtx x,const_rtx set,void * data)237 kill_clobbered_value (rtx x, const_rtx set, void *data)
238 {
239 struct value_data *const vd = (struct value_data *) data;
240 if (GET_CODE (set) == CLOBBER)
241 kill_value (x, vd);
242 }
243
244 /* A structure passed as data to kill_set_value through note_stores. */
245 struct kill_set_value_data
246 {
247 struct value_data *vd;
248 rtx ignore_set_reg;
249 };
250
251 /* Called through note_stores. If X is set, not clobbered, kill its
252 current value and install it as the root of its own value list. */
253
254 static void
kill_set_value(rtx x,const_rtx set,void * data)255 kill_set_value (rtx x, const_rtx set, void *data)
256 {
257 struct kill_set_value_data *ksvd = (struct kill_set_value_data *) data;
258 if (rtx_equal_p (x, ksvd->ignore_set_reg))
259 return;
260 if (GET_CODE (set) != CLOBBER)
261 {
262 kill_value (x, ksvd->vd);
263 if (REG_P (x))
264 set_value_regno (REGNO (x), GET_MODE (x), ksvd->vd);
265 }
266 }
267
268 /* Kill any register used in X as the base of an auto-increment expression,
269 and install that register as the root of its own value list. */
270
271 static void
kill_autoinc_value(rtx_insn * insn,struct value_data * vd)272 kill_autoinc_value (rtx_insn *insn, struct value_data *vd)
273 {
274 subrtx_iterator::array_type array;
275 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
276 {
277 const_rtx x = *iter;
278 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
279 {
280 x = XEXP (x, 0);
281 kill_value (x, vd);
282 set_value_regno (REGNO (x), GET_MODE (x), vd);
283 iter.skip_subrtxes ();
284 }
285 }
286 }
287
288 /* Assert that SRC has been copied to DEST. Adjust the data structures
289 to reflect that SRC contains an older copy of the shared value. */
290
291 static void
copy_value(rtx dest,rtx src,struct value_data * vd)292 copy_value (rtx dest, rtx src, struct value_data *vd)
293 {
294 unsigned int dr = REGNO (dest);
295 unsigned int sr = REGNO (src);
296 unsigned int dn, sn;
297 unsigned int i;
298
299 /* ??? At present, it's possible to see noop sets. It'd be nice if
300 this were cleaned up beforehand... */
301 if (sr == dr)
302 return;
303
304 /* Do not propagate copies to the stack pointer, as that can leave
305 memory accesses with no scheduling dependency on the stack update. */
306 if (dr == STACK_POINTER_REGNUM)
307 return;
308
309 /* Likewise with the frame pointer, if we're using one. */
310 if (frame_pointer_needed && dr == HARD_FRAME_POINTER_REGNUM)
311 return;
312
313 /* Do not propagate copies to fixed or global registers, patterns
314 can be relying to see particular fixed register or users can
315 expect the chosen global register in asm. */
316 if (fixed_regs[dr] || global_regs[dr])
317 return;
318
319 /* If SRC and DEST overlap, don't record anything. */
320 dn = REG_NREGS (dest);
321 sn = REG_NREGS (src);
322 if ((dr > sr && dr < sr + sn)
323 || (sr > dr && sr < dr + dn))
324 return;
325
326 /* If SRC had no assigned mode (i.e. we didn't know it was live)
327 assign it now and assume the value came from an input argument
328 or somesuch. */
329 if (vd->e[sr].mode == VOIDmode)
330 set_value_regno (sr, vd->e[dr].mode, vd);
331
332 /* If we are narrowing the input to a smaller number of hard regs,
333 and it is in big endian, we are really extracting a high part.
334 Since we generally associate a low part of a value with the value itself,
335 we must not do the same for the high part.
336 Note we can still get low parts for the same mode combination through
337 a two-step copy involving differently sized hard regs.
338 Assume hard regs fr* are 32 bits each, while r* are 64 bits each:
339 (set (reg:DI r0) (reg:DI fr0))
340 (set (reg:SI fr2) (reg:SI r0))
341 loads the low part of (reg:DI fr0) - i.e. fr1 - into fr2, while:
342 (set (reg:SI fr2) (reg:SI fr0))
343 loads the high part of (reg:DI fr0) into fr2.
344
345 We can't properly represent the latter case in our tables, so don't
346 record anything then. */
347 else if (sn < hard_regno_nregs (sr, vd->e[sr].mode)
348 && maybe_ne (subreg_lowpart_offset (GET_MODE (dest),
349 vd->e[sr].mode), 0U))
350 return;
351
352 /* If SRC had been assigned a mode narrower than the copy, we can't
353 link DEST into the chain, because not all of the pieces of the
354 copy came from oldest_regno. */
355 else if (sn > hard_regno_nregs (sr, vd->e[sr].mode))
356 return;
357
358 /* Link DR at the end of the value chain used by SR. */
359
360 vd->e[dr].oldest_regno = vd->e[sr].oldest_regno;
361
362 for (i = sr; vd->e[i].next_regno != INVALID_REGNUM; i = vd->e[i].next_regno)
363 continue;
364 vd->e[i].next_regno = dr;
365
366 if (flag_checking)
367 validate_value_data (vd);
368 }
369
370 /* Return true if a mode change from ORIG to NEW is allowed for REGNO. */
371
372 static bool
mode_change_ok(machine_mode orig_mode,machine_mode new_mode,unsigned int regno ATTRIBUTE_UNUSED)373 mode_change_ok (machine_mode orig_mode, machine_mode new_mode,
374 unsigned int regno ATTRIBUTE_UNUSED)
375 {
376 if (partial_subreg_p (orig_mode, new_mode))
377 return false;
378
379 return REG_CAN_CHANGE_MODE_P (regno, orig_mode, new_mode);
380 }
381
382 /* Register REGNO was originally set in ORIG_MODE. It - or a copy of it -
383 was copied in COPY_MODE to COPY_REGNO, and then COPY_REGNO was accessed
384 in NEW_MODE.
385 Return a NEW_MODE rtx for REGNO if that's OK, otherwise return NULL_RTX. */
386
387 static rtx
maybe_mode_change(machine_mode orig_mode,machine_mode copy_mode,machine_mode new_mode,unsigned int regno,unsigned int copy_regno ATTRIBUTE_UNUSED)388 maybe_mode_change (machine_mode orig_mode, machine_mode copy_mode,
389 machine_mode new_mode, unsigned int regno,
390 unsigned int copy_regno ATTRIBUTE_UNUSED)
391 {
392 if (partial_subreg_p (copy_mode, orig_mode)
393 && partial_subreg_p (copy_mode, new_mode))
394 return NULL_RTX;
395
396 /* Avoid creating multiple copies of the stack pointer. Some ports
397 assume there is one and only one stack pointer.
398
399 It's unclear if we need to do the same for other special registers. */
400 if (regno == STACK_POINTER_REGNUM)
401 return NULL_RTX;
402
403 if (orig_mode == new_mode)
404 return gen_raw_REG (new_mode, regno);
405 else if (mode_change_ok (orig_mode, new_mode, regno))
406 {
407 int copy_nregs = hard_regno_nregs (copy_regno, copy_mode);
408 int use_nregs = hard_regno_nregs (copy_regno, new_mode);
409 poly_uint64 bytes_per_reg;
410 if (!can_div_trunc_p (GET_MODE_SIZE (copy_mode),
411 copy_nregs, &bytes_per_reg))
412 return NULL_RTX;
413 poly_uint64 copy_offset = bytes_per_reg * (copy_nregs - use_nregs);
414 poly_uint64 offset
415 = subreg_size_lowpart_offset (GET_MODE_SIZE (new_mode) + copy_offset,
416 GET_MODE_SIZE (orig_mode));
417 regno += subreg_regno_offset (regno, orig_mode, offset, new_mode);
418 if (targetm.hard_regno_mode_ok (regno, new_mode))
419 return gen_raw_REG (new_mode, regno);
420 }
421 return NULL_RTX;
422 }
423
424 /* Find the oldest copy of the value contained in REGNO that is in
425 register class CL and has mode MODE. If found, return an rtx
426 of that oldest register, otherwise return NULL. */
427
428 static rtx
find_oldest_value_reg(enum reg_class cl,rtx reg,struct value_data * vd)429 find_oldest_value_reg (enum reg_class cl, rtx reg, struct value_data *vd)
430 {
431 unsigned int regno = REGNO (reg);
432 machine_mode mode = GET_MODE (reg);
433 unsigned int i;
434
435 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
436
437 /* If we are accessing REG in some mode other that what we set it in,
438 make sure that the replacement is valid. In particular, consider
439 (set (reg:DI r11) (...))
440 (set (reg:SI r9) (reg:SI r11))
441 (set (reg:SI r10) (...))
442 (set (...) (reg:DI r9))
443 Replacing r9 with r11 is invalid. */
444 if (mode != vd->e[regno].mode
445 && REG_NREGS (reg) > hard_regno_nregs (regno, vd->e[regno].mode))
446 return NULL_RTX;
447
448 for (i = vd->e[regno].oldest_regno; i != regno; i = vd->e[i].next_regno)
449 {
450 machine_mode oldmode = vd->e[i].mode;
451 rtx new_rtx;
452
453 if (!in_hard_reg_set_p (reg_class_contents[cl], mode, i))
454 continue;
455
456 new_rtx = maybe_mode_change (oldmode, vd->e[regno].mode, mode, i, regno);
457 if (new_rtx)
458 {
459 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (reg);
460 REG_ATTRS (new_rtx) = REG_ATTRS (reg);
461 REG_POINTER (new_rtx) = REG_POINTER (reg);
462 return new_rtx;
463 }
464 }
465
466 return NULL_RTX;
467 }
468
469 /* If possible, replace the register at *LOC with the oldest register
470 in register class CL. Return true if successfully replaced. */
471
472 static bool
replace_oldest_value_reg(rtx * loc,enum reg_class cl,rtx_insn * insn,struct value_data * vd)473 replace_oldest_value_reg (rtx *loc, enum reg_class cl, rtx_insn *insn,
474 struct value_data *vd)
475 {
476 rtx new_rtx = find_oldest_value_reg (cl, *loc, vd);
477 if (new_rtx && (!DEBUG_INSN_P (insn) || !skip_debug_insn_p))
478 {
479 if (DEBUG_INSN_P (insn))
480 {
481 struct queued_debug_insn_change *change;
482
483 if (dump_file)
484 fprintf (dump_file, "debug_insn %u: queued replacing reg %u with %u\n",
485 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
486
487 change = queued_debug_insn_change_pool.allocate ();
488 change->next = vd->e[REGNO (new_rtx)].debug_insn_changes;
489 change->insn = insn;
490 change->loc = loc;
491 change->new_rtx = new_rtx;
492 vd->e[REGNO (new_rtx)].debug_insn_changes = change;
493 ++vd->n_debug_insn_changes;
494 return true;
495 }
496 if (dump_file)
497 fprintf (dump_file, "insn %u: replaced reg %u with %u\n",
498 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
499
500 validate_change (insn, loc, new_rtx, 1);
501 return true;
502 }
503 return false;
504 }
505
506 /* Similar to replace_oldest_value_reg, but *LOC contains an address.
507 Adapted from find_reloads_address_1. CL is INDEX_REG_CLASS or
508 BASE_REG_CLASS depending on how the register is being considered. */
509
510 static bool
replace_oldest_value_addr(rtx * loc,enum reg_class cl,machine_mode mode,addr_space_t as,rtx_insn * insn,struct value_data * vd)511 replace_oldest_value_addr (rtx *loc, enum reg_class cl,
512 machine_mode mode, addr_space_t as,
513 rtx_insn *insn, struct value_data *vd)
514 {
515 rtx x = *loc;
516 RTX_CODE code = GET_CODE (x);
517 const char *fmt;
518 int i, j;
519 bool changed = false;
520
521 switch (code)
522 {
523 case PLUS:
524 if (DEBUG_INSN_P (insn))
525 break;
526
527 {
528 rtx orig_op0 = XEXP (x, 0);
529 rtx orig_op1 = XEXP (x, 1);
530 RTX_CODE code0 = GET_CODE (orig_op0);
531 RTX_CODE code1 = GET_CODE (orig_op1);
532 rtx op0 = orig_op0;
533 rtx op1 = orig_op1;
534 rtx *locI = NULL;
535 rtx *locB = NULL;
536 enum rtx_code index_code = SCRATCH;
537
538 if (GET_CODE (op0) == SUBREG)
539 {
540 op0 = SUBREG_REG (op0);
541 code0 = GET_CODE (op0);
542 }
543
544 if (GET_CODE (op1) == SUBREG)
545 {
546 op1 = SUBREG_REG (op1);
547 code1 = GET_CODE (op1);
548 }
549
550 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
551 || code0 == ZERO_EXTEND || code1 == MEM)
552 {
553 locI = &XEXP (x, 0);
554 locB = &XEXP (x, 1);
555 index_code = GET_CODE (*locI);
556 }
557 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
558 || code1 == ZERO_EXTEND || code0 == MEM)
559 {
560 locI = &XEXP (x, 1);
561 locB = &XEXP (x, 0);
562 index_code = GET_CODE (*locI);
563 }
564 else if (code0 == CONST_INT || code0 == CONST
565 || code0 == SYMBOL_REF || code0 == LABEL_REF)
566 {
567 locB = &XEXP (x, 1);
568 index_code = GET_CODE (XEXP (x, 0));
569 }
570 else if (code1 == CONST_INT || code1 == CONST
571 || code1 == SYMBOL_REF || code1 == LABEL_REF)
572 {
573 locB = &XEXP (x, 0);
574 index_code = GET_CODE (XEXP (x, 1));
575 }
576 else if (code0 == REG && code1 == REG)
577 {
578 int index_op;
579 unsigned regno0 = REGNO (op0), regno1 = REGNO (op1);
580
581 if (REGNO_OK_FOR_INDEX_P (regno1)
582 && regno_ok_for_base_p (regno0, mode, as, PLUS, REG))
583 index_op = 1;
584 else if (REGNO_OK_FOR_INDEX_P (regno0)
585 && regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
586 index_op = 0;
587 else if (regno_ok_for_base_p (regno0, mode, as, PLUS, REG)
588 || REGNO_OK_FOR_INDEX_P (regno1))
589 index_op = 1;
590 else if (regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
591 index_op = 0;
592 else
593 index_op = 1;
594
595 locI = &XEXP (x, index_op);
596 locB = &XEXP (x, !index_op);
597 index_code = GET_CODE (*locI);
598 }
599 else if (code0 == REG)
600 {
601 locI = &XEXP (x, 0);
602 locB = &XEXP (x, 1);
603 index_code = GET_CODE (*locI);
604 }
605 else if (code1 == REG)
606 {
607 locI = &XEXP (x, 1);
608 locB = &XEXP (x, 0);
609 index_code = GET_CODE (*locI);
610 }
611
612 if (locI)
613 changed |= replace_oldest_value_addr (locI, INDEX_REG_CLASS,
614 mode, as, insn, vd);
615 if (locB)
616 changed |= replace_oldest_value_addr (locB,
617 base_reg_class (mode, as, PLUS,
618 index_code),
619 mode, as, insn, vd);
620 return changed;
621 }
622
623 case POST_INC:
624 case POST_DEC:
625 case POST_MODIFY:
626 case PRE_INC:
627 case PRE_DEC:
628 case PRE_MODIFY:
629 return false;
630
631 case MEM:
632 return replace_oldest_value_mem (x, insn, vd);
633
634 case REG:
635 return replace_oldest_value_reg (loc, cl, insn, vd);
636
637 default:
638 break;
639 }
640
641 fmt = GET_RTX_FORMAT (code);
642 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
643 {
644 if (fmt[i] == 'e')
645 changed |= replace_oldest_value_addr (&XEXP (x, i), cl, mode, as,
646 insn, vd);
647 else if (fmt[i] == 'E')
648 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
649 changed |= replace_oldest_value_addr (&XVECEXP (x, i, j), cl,
650 mode, as, insn, vd);
651 }
652
653 return changed;
654 }
655
656 /* Similar to replace_oldest_value_reg, but X contains a memory. */
657
658 static bool
replace_oldest_value_mem(rtx x,rtx_insn * insn,struct value_data * vd)659 replace_oldest_value_mem (rtx x, rtx_insn *insn, struct value_data *vd)
660 {
661 enum reg_class cl;
662
663 if (DEBUG_INSN_P (insn))
664 cl = ALL_REGS;
665 else
666 cl = base_reg_class (GET_MODE (x), MEM_ADDR_SPACE (x), MEM, SCRATCH);
667
668 return replace_oldest_value_addr (&XEXP (x, 0), cl,
669 GET_MODE (x), MEM_ADDR_SPACE (x),
670 insn, vd);
671 }
672
673 /* Apply all queued updates for DEBUG_INSNs that change some reg to
674 register REGNO. */
675
676 static void
apply_debug_insn_changes(struct value_data * vd,unsigned int regno)677 apply_debug_insn_changes (struct value_data *vd, unsigned int regno)
678 {
679 struct queued_debug_insn_change *change;
680 rtx_insn *last_insn = vd->e[regno].debug_insn_changes->insn;
681
682 for (change = vd->e[regno].debug_insn_changes;
683 change;
684 change = change->next)
685 {
686 if (last_insn != change->insn)
687 {
688 apply_change_group ();
689 last_insn = change->insn;
690 }
691 validate_change (change->insn, change->loc, change->new_rtx, 1);
692 }
693 apply_change_group ();
694 }
695
696 /* Called via note_uses, for all used registers in a real insn
697 apply DEBUG_INSN changes that change registers to the used
698 registers. */
699
700 static void
cprop_find_used_regs(rtx * loc,void * data)701 cprop_find_used_regs (rtx *loc, void *data)
702 {
703 struct value_data *const vd = (struct value_data *) data;
704 subrtx_iterator::array_type array;
705 FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
706 {
707 const_rtx x = *iter;
708 if (REG_P (x))
709 {
710 unsigned int regno = REGNO (x);
711 if (vd->e[regno].debug_insn_changes)
712 {
713 apply_debug_insn_changes (vd, regno);
714 free_debug_insn_changes (vd, regno);
715 }
716 }
717 }
718 }
719
720 /* Apply clobbers of INSN in PATTERN and C_I_F_U to value_data VD. */
721
722 static void
kill_clobbered_values(rtx_insn * insn,struct value_data * vd)723 kill_clobbered_values (rtx_insn *insn, struct value_data *vd)
724 {
725 note_stores (PATTERN (insn), kill_clobbered_value, vd);
726
727 if (CALL_P (insn))
728 {
729 rtx exp;
730
731 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
732 {
733 rtx x = XEXP (exp, 0);
734 if (GET_CODE (x) == CLOBBER)
735 kill_value (SET_DEST (x), vd);
736 }
737 }
738 }
739
740 /* Perform the forward copy propagation on basic block BB. */
741
742 static bool
copyprop_hardreg_forward_1(basic_block bb,struct value_data * vd)743 copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
744 {
745 bool anything_changed = false;
746 rtx_insn *insn, *next;
747
748 for (insn = BB_HEAD (bb); ; insn = next)
749 {
750 int n_ops, i, predicated;
751 bool is_asm, any_replacements;
752 rtx set;
753 rtx link;
754 bool changed = false;
755 struct kill_set_value_data ksvd;
756
757 next = NEXT_INSN (insn);
758 if (!NONDEBUG_INSN_P (insn))
759 {
760 if (DEBUG_BIND_INSN_P (insn))
761 {
762 rtx loc = INSN_VAR_LOCATION_LOC (insn);
763 if (!VAR_LOC_UNKNOWN_P (loc))
764 replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn),
765 ALL_REGS, GET_MODE (loc),
766 ADDR_SPACE_GENERIC, insn, vd);
767 }
768
769 if (insn == BB_END (bb))
770 break;
771 else
772 continue;
773 }
774
775 set = single_set (insn);
776
777 /* Detect noop sets and remove them before processing side effects. */
778 if (set && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set)))
779 {
780 unsigned int regno = REGNO (SET_SRC (set));
781 rtx r1 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
782 SET_DEST (set), vd);
783 rtx r2 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
784 SET_SRC (set), vd);
785 if (rtx_equal_p (r1 ? r1 : SET_DEST (set), r2 ? r2 : SET_SRC (set)))
786 {
787 bool last = insn == BB_END (bb);
788 delete_insn (insn);
789 if (last)
790 break;
791 continue;
792 }
793 }
794
795 extract_constrain_insn (insn);
796 preprocess_constraints (insn);
797 const operand_alternative *op_alt = which_op_alt ();
798 n_ops = recog_data.n_operands;
799 is_asm = asm_noperands (PATTERN (insn)) >= 0;
800
801 /* Simplify the code below by promoting OP_OUT to OP_INOUT
802 in predicated instructions. */
803
804 predicated = GET_CODE (PATTERN (insn)) == COND_EXEC;
805 for (i = 0; i < n_ops; ++i)
806 {
807 int matches = op_alt[i].matches;
808 if (matches >= 0 || op_alt[i].matched >= 0
809 || (predicated && recog_data.operand_type[i] == OP_OUT))
810 recog_data.operand_type[i] = OP_INOUT;
811 }
812
813 /* Apply changes to earlier DEBUG_INSNs if possible. */
814 if (vd->n_debug_insn_changes)
815 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
816
817 /* For each earlyclobber operand, zap the value data. */
818 for (i = 0; i < n_ops; i++)
819 if (op_alt[i].earlyclobber)
820 kill_value (recog_data.operand[i], vd);
821
822 /* Within asms, a clobber cannot overlap inputs or outputs.
823 I wouldn't think this were true for regular insns, but
824 scan_rtx treats them like that... */
825 kill_clobbered_values (insn, vd);
826
827 /* Kill all auto-incremented values. */
828 /* ??? REG_INC is useless, since stack pushes aren't done that way. */
829 kill_autoinc_value (insn, vd);
830
831 /* Kill all early-clobbered operands. */
832 for (i = 0; i < n_ops; i++)
833 if (op_alt[i].earlyclobber)
834 kill_value (recog_data.operand[i], vd);
835
836 /* If we have dead sets in the insn, then we need to note these as we
837 would clobbers. */
838 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
839 {
840 if (REG_NOTE_KIND (link) == REG_UNUSED)
841 {
842 kill_value (XEXP (link, 0), vd);
843 /* Furthermore, if the insn looked like a single-set,
844 but the dead store kills the source value of that
845 set, then we can no-longer use the plain move
846 special case below. */
847 if (set
848 && reg_overlap_mentioned_p (XEXP (link, 0), SET_SRC (set)))
849 set = NULL;
850 }
851
852 /* We need to keep CFI info correct, and the same on all paths,
853 so we cannot normally replace the registers REG_CFA_REGISTER
854 refers to. Bail. */
855 if (REG_NOTE_KIND (link) == REG_CFA_REGISTER)
856 goto did_replacement;
857 }
858
859 /* Special-case plain move instructions, since we may well
860 be able to do the move from a different register class. */
861 if (set && REG_P (SET_SRC (set)))
862 {
863 rtx src = SET_SRC (set);
864 unsigned int regno = REGNO (src);
865 machine_mode mode = GET_MODE (src);
866 unsigned int i;
867 rtx new_rtx;
868
869 /* If we are accessing SRC in some mode other that what we
870 set it in, make sure that the replacement is valid. */
871 if (mode != vd->e[regno].mode)
872 {
873 if (REG_NREGS (src)
874 > hard_regno_nregs (regno, vd->e[regno].mode))
875 goto no_move_special_case;
876
877 /* And likewise, if we are narrowing on big endian the transformation
878 is also invalid. */
879 if (REG_NREGS (src) < hard_regno_nregs (regno, vd->e[regno].mode)
880 && maybe_ne (subreg_lowpart_offset (mode,
881 vd->e[regno].mode), 0U))
882 goto no_move_special_case;
883 }
884
885 /* If the destination is also a register, try to find a source
886 register in the same class. */
887 if (REG_P (SET_DEST (set)))
888 {
889 new_rtx = find_oldest_value_reg (REGNO_REG_CLASS (regno),
890 src, vd);
891
892 if (new_rtx && validate_change (insn, &SET_SRC (set), new_rtx, 0))
893 {
894 if (dump_file)
895 fprintf (dump_file,
896 "insn %u: replaced reg %u with %u\n",
897 INSN_UID (insn), regno, REGNO (new_rtx));
898 changed = true;
899 goto did_replacement;
900 }
901 /* We need to re-extract as validate_change clobbers
902 recog_data. */
903 extract_constrain_insn (insn);
904 preprocess_constraints (insn);
905 }
906
907 /* Otherwise, try all valid registers and see if its valid. */
908 for (i = vd->e[regno].oldest_regno; i != regno;
909 i = vd->e[i].next_regno)
910 {
911 new_rtx = maybe_mode_change (vd->e[i].mode, vd->e[regno].mode,
912 mode, i, regno);
913 if (new_rtx != NULL_RTX)
914 {
915 if (validate_change (insn, &SET_SRC (set), new_rtx, 0))
916 {
917 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (src);
918 REG_ATTRS (new_rtx) = REG_ATTRS (src);
919 REG_POINTER (new_rtx) = REG_POINTER (src);
920 if (dump_file)
921 fprintf (dump_file,
922 "insn %u: replaced reg %u with %u\n",
923 INSN_UID (insn), regno, REGNO (new_rtx));
924 changed = true;
925 goto did_replacement;
926 }
927 /* We need to re-extract as validate_change clobbers
928 recog_data. */
929 extract_constrain_insn (insn);
930 preprocess_constraints (insn);
931 }
932 }
933 }
934 no_move_special_case:
935
936 any_replacements = false;
937
938 /* For each input operand, replace a hard register with the
939 eldest live copy that's in an appropriate register class. */
940 for (i = 0; i < n_ops; i++)
941 {
942 bool replaced = false;
943
944 /* Don't scan match_operand here, since we've no reg class
945 information to pass down. Any operands that we could
946 substitute in will be represented elsewhere. */
947 if (recog_data.constraints[i][0] == '\0')
948 continue;
949
950 /* Don't replace in asms intentionally referencing hard regs. */
951 if (is_asm && REG_P (recog_data.operand[i])
952 && (REGNO (recog_data.operand[i])
953 == ORIGINAL_REGNO (recog_data.operand[i])))
954 continue;
955
956 if (recog_data.operand_type[i] == OP_IN)
957 {
958 if (op_alt[i].is_address)
959 replaced
960 = replace_oldest_value_addr (recog_data.operand_loc[i],
961 alternative_class (op_alt, i),
962 VOIDmode, ADDR_SPACE_GENERIC,
963 insn, vd);
964 else if (REG_P (recog_data.operand[i]))
965 replaced
966 = replace_oldest_value_reg (recog_data.operand_loc[i],
967 alternative_class (op_alt, i),
968 insn, vd);
969 else if (MEM_P (recog_data.operand[i]))
970 replaced = replace_oldest_value_mem (recog_data.operand[i],
971 insn, vd);
972 }
973 else if (MEM_P (recog_data.operand[i]))
974 replaced = replace_oldest_value_mem (recog_data.operand[i],
975 insn, vd);
976
977 /* If we performed any replacement, update match_dups. */
978 if (replaced)
979 {
980 int j;
981 rtx new_rtx;
982
983 new_rtx = *recog_data.operand_loc[i];
984 recog_data.operand[i] = new_rtx;
985 for (j = 0; j < recog_data.n_dups; j++)
986 if (recog_data.dup_num[j] == i)
987 validate_unshare_change (insn, recog_data.dup_loc[j], new_rtx, 1);
988
989 any_replacements = true;
990 }
991 }
992
993 if (any_replacements)
994 {
995 if (! apply_change_group ())
996 {
997 if (dump_file)
998 fprintf (dump_file,
999 "insn %u: reg replacements not verified\n",
1000 INSN_UID (insn));
1001 }
1002 else
1003 changed = true;
1004 }
1005
1006 did_replacement:
1007 if (changed)
1008 {
1009 anything_changed = true;
1010
1011 /* If something changed, perhaps further changes to earlier
1012 DEBUG_INSNs can be applied. */
1013 if (vd->n_debug_insn_changes)
1014 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
1015 }
1016
1017 ksvd.vd = vd;
1018 ksvd.ignore_set_reg = NULL_RTX;
1019
1020 /* Clobber call-clobbered registers. */
1021 if (CALL_P (insn))
1022 {
1023 unsigned int set_regno = INVALID_REGNUM;
1024 unsigned int set_nregs = 0;
1025 unsigned int regno;
1026 rtx exp;
1027 HARD_REG_SET regs_invalidated_by_this_call;
1028
1029 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
1030 {
1031 rtx x = XEXP (exp, 0);
1032 if (GET_CODE (x) == SET)
1033 {
1034 rtx dest = SET_DEST (x);
1035 kill_value (dest, vd);
1036 set_value_regno (REGNO (dest), GET_MODE (dest), vd);
1037 copy_value (dest, SET_SRC (x), vd);
1038 ksvd.ignore_set_reg = dest;
1039 set_regno = REGNO (dest);
1040 set_nregs = REG_NREGS (dest);
1041 break;
1042 }
1043 }
1044
1045 get_call_reg_set_usage (insn,
1046 ®s_invalidated_by_this_call,
1047 regs_invalidated_by_call);
1048 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1049 if ((TEST_HARD_REG_BIT (regs_invalidated_by_this_call, regno)
1050 || (targetm.hard_regno_call_part_clobbered
1051 (regno, vd->e[regno].mode)))
1052 && (regno < set_regno || regno >= set_regno + set_nregs))
1053 kill_value_regno (regno, 1, vd);
1054
1055 /* If SET was seen in CALL_INSN_FUNCTION_USAGE, and SET_SRC
1056 of the SET isn't in regs_invalidated_by_call hard reg set,
1057 but instead among CLOBBERs on the CALL_INSN, we could wrongly
1058 assume the value in it is still live. */
1059 if (ksvd.ignore_set_reg)
1060 kill_clobbered_values (insn, vd);
1061 }
1062
1063 bool copy_p = (set
1064 && REG_P (SET_DEST (set))
1065 && REG_P (SET_SRC (set)));
1066 bool noop_p = (copy_p
1067 && rtx_equal_p (SET_DEST (set), SET_SRC (set)));
1068
1069 /* If a noop move is using narrower mode than we have recorded,
1070 we need to either remove the noop move, or kill_set_value. */
1071 if (noop_p
1072 && partial_subreg_p (GET_MODE (SET_DEST (set)),
1073 vd->e[REGNO (SET_DEST (set))].mode))
1074 {
1075 if (noop_move_p (insn))
1076 {
1077 bool last = insn == BB_END (bb);
1078 delete_insn (insn);
1079 if (last)
1080 break;
1081 }
1082 else
1083 noop_p = false;
1084 }
1085
1086 if (!noop_p)
1087 {
1088 /* Notice stores. */
1089 note_stores (PATTERN (insn), kill_set_value, &ksvd);
1090
1091 /* Notice copies. */
1092 if (copy_p)
1093 copy_value (SET_DEST (set), SET_SRC (set), vd);
1094 }
1095
1096 if (insn == BB_END (bb))
1097 break;
1098 }
1099
1100 return anything_changed;
1101 }
1102
1103 /* Dump the value chain data to stderr. */
1104
1105 DEBUG_FUNCTION void
debug_value_data(struct value_data * vd)1106 debug_value_data (struct value_data *vd)
1107 {
1108 HARD_REG_SET set;
1109 unsigned int i, j;
1110
1111 CLEAR_HARD_REG_SET (set);
1112
1113 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1114 if (vd->e[i].oldest_regno == i)
1115 {
1116 if (vd->e[i].mode == VOIDmode)
1117 {
1118 if (vd->e[i].next_regno != INVALID_REGNUM)
1119 fprintf (stderr, "[%u] Bad next_regno for empty chain (%u)\n",
1120 i, vd->e[i].next_regno);
1121 continue;
1122 }
1123
1124 SET_HARD_REG_BIT (set, i);
1125 fprintf (stderr, "[%u %s] ", i, GET_MODE_NAME (vd->e[i].mode));
1126
1127 for (j = vd->e[i].next_regno;
1128 j != INVALID_REGNUM;
1129 j = vd->e[j].next_regno)
1130 {
1131 if (TEST_HARD_REG_BIT (set, j))
1132 {
1133 fprintf (stderr, "[%u] Loop in regno chain\n", j);
1134 return;
1135 }
1136
1137 if (vd->e[j].oldest_regno != i)
1138 {
1139 fprintf (stderr, "[%u] Bad oldest_regno (%u)\n",
1140 j, vd->e[j].oldest_regno);
1141 return;
1142 }
1143 SET_HARD_REG_BIT (set, j);
1144 fprintf (stderr, "[%u %s] ", j, GET_MODE_NAME (vd->e[j].mode));
1145 }
1146 fputc ('\n', stderr);
1147 }
1148
1149 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1150 if (! TEST_HARD_REG_BIT (set, i)
1151 && (vd->e[i].mode != VOIDmode
1152 || vd->e[i].oldest_regno != i
1153 || vd->e[i].next_regno != INVALID_REGNUM))
1154 fprintf (stderr, "[%u] Non-empty reg in chain (%s %u %i)\n",
1155 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1156 vd->e[i].next_regno);
1157 }
1158
1159 /* Do copyprop_hardreg_forward_1 for a single basic block BB.
1160 DEBUG_INSN is skipped since we do not want to involve DF related
1161 staff as how it is handled in function pass_cprop_hardreg::execute.
1162
1163 NOTE: Currently it is only used for shrink-wrap. Maybe extend it
1164 to handle DEBUG_INSN for other uses. */
1165
1166 void
copyprop_hardreg_forward_bb_without_debug_insn(basic_block bb)1167 copyprop_hardreg_forward_bb_without_debug_insn (basic_block bb)
1168 {
1169 struct value_data *vd;
1170 vd = XNEWVEC (struct value_data, 1);
1171 init_value_data (vd);
1172
1173 skip_debug_insn_p = true;
1174 copyprop_hardreg_forward_1 (bb, vd);
1175 free (vd);
1176 skip_debug_insn_p = false;
1177 }
1178
1179 static void
validate_value_data(struct value_data * vd)1180 validate_value_data (struct value_data *vd)
1181 {
1182 HARD_REG_SET set;
1183 unsigned int i, j;
1184
1185 CLEAR_HARD_REG_SET (set);
1186
1187 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1188 if (vd->e[i].oldest_regno == i)
1189 {
1190 if (vd->e[i].mode == VOIDmode)
1191 {
1192 if (vd->e[i].next_regno != INVALID_REGNUM)
1193 internal_error ("validate_value_data: [%u] Bad next_regno for empty chain (%u)",
1194 i, vd->e[i].next_regno);
1195 continue;
1196 }
1197
1198 SET_HARD_REG_BIT (set, i);
1199
1200 for (j = vd->e[i].next_regno;
1201 j != INVALID_REGNUM;
1202 j = vd->e[j].next_regno)
1203 {
1204 if (TEST_HARD_REG_BIT (set, j))
1205 internal_error ("validate_value_data: Loop in regno chain (%u)",
1206 j);
1207 if (vd->e[j].oldest_regno != i)
1208 internal_error ("validate_value_data: [%u] Bad oldest_regno (%u)",
1209 j, vd->e[j].oldest_regno);
1210
1211 SET_HARD_REG_BIT (set, j);
1212 }
1213 }
1214
1215 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1216 if (! TEST_HARD_REG_BIT (set, i)
1217 && (vd->e[i].mode != VOIDmode
1218 || vd->e[i].oldest_regno != i
1219 || vd->e[i].next_regno != INVALID_REGNUM))
1220 internal_error ("validate_value_data: [%u] Non-empty reg in chain (%s %u %i)",
1221 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1222 vd->e[i].next_regno);
1223 }
1224
1225
1226 namespace {
1227
1228 const pass_data pass_data_cprop_hardreg =
1229 {
1230 RTL_PASS, /* type */
1231 "cprop_hardreg", /* name */
1232 OPTGROUP_NONE, /* optinfo_flags */
1233 TV_CPROP_REGISTERS, /* tv_id */
1234 0, /* properties_required */
1235 0, /* properties_provided */
1236 0, /* properties_destroyed */
1237 0, /* todo_flags_start */
1238 TODO_df_finish, /* todo_flags_finish */
1239 };
1240
1241 class pass_cprop_hardreg : public rtl_opt_pass
1242 {
1243 public:
pass_cprop_hardreg(gcc::context * ctxt)1244 pass_cprop_hardreg (gcc::context *ctxt)
1245 : rtl_opt_pass (pass_data_cprop_hardreg, ctxt)
1246 {}
1247
1248 /* opt_pass methods: */
gate(function *)1249 virtual bool gate (function *)
1250 {
1251 return (optimize > 0 && (flag_cprop_registers));
1252 }
1253
1254 virtual unsigned int execute (function *);
1255
1256 }; // class pass_cprop_hardreg
1257
1258 unsigned int
execute(function * fun)1259 pass_cprop_hardreg::execute (function *fun)
1260 {
1261 struct value_data *all_vd;
1262 basic_block bb;
1263 bool analyze_called = false;
1264
1265 all_vd = XNEWVEC (struct value_data, last_basic_block_for_fn (fun));
1266
1267 auto_sbitmap visited (last_basic_block_for_fn (fun));
1268 bitmap_clear (visited);
1269
1270 FOR_EACH_BB_FN (bb, fun)
1271 {
1272 bitmap_set_bit (visited, bb->index);
1273
1274 /* If a block has a single predecessor, that we've already
1275 processed, begin with the value data that was live at
1276 the end of the predecessor block. */
1277 /* ??? Ought to use more intelligent queuing of blocks. */
1278 if (single_pred_p (bb)
1279 && bitmap_bit_p (visited, single_pred (bb)->index)
1280 && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
1281 {
1282 all_vd[bb->index] = all_vd[single_pred (bb)->index];
1283 if (all_vd[bb->index].n_debug_insn_changes)
1284 {
1285 unsigned int regno;
1286
1287 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1288 {
1289 if (all_vd[bb->index].e[regno].debug_insn_changes)
1290 {
1291 all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1292 if (--all_vd[bb->index].n_debug_insn_changes == 0)
1293 break;
1294 }
1295 }
1296 }
1297 }
1298 else
1299 init_value_data (all_vd + bb->index);
1300
1301 copyprop_hardreg_forward_1 (bb, all_vd + bb->index);
1302 }
1303
1304 if (MAY_HAVE_DEBUG_BIND_INSNS)
1305 {
1306 FOR_EACH_BB_FN (bb, fun)
1307 if (bitmap_bit_p (visited, bb->index)
1308 && all_vd[bb->index].n_debug_insn_changes)
1309 {
1310 unsigned int regno;
1311 bitmap live;
1312
1313 if (!analyze_called)
1314 {
1315 df_analyze ();
1316 analyze_called = true;
1317 }
1318 live = df_get_live_out (bb);
1319 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1320 if (all_vd[bb->index].e[regno].debug_insn_changes)
1321 {
1322 if (REGNO_REG_SET_P (live, regno))
1323 apply_debug_insn_changes (all_vd + bb->index, regno);
1324 if (all_vd[bb->index].n_debug_insn_changes == 0)
1325 break;
1326 }
1327 }
1328
1329 queued_debug_insn_change_pool.release ();
1330 }
1331
1332 free (all_vd);
1333 return 0;
1334 }
1335
1336 } // anon namespace
1337
1338 rtl_opt_pass *
make_pass_cprop_hardreg(gcc::context * ctxt)1339 make_pass_cprop_hardreg (gcc::context *ctxt)
1340 {
1341 return new pass_cprop_hardreg (ctxt);
1342 }
1343