xref: /dragonfly/contrib/gcc-8.0/gcc/regcprop.c (revision c87dd536)
1 /* Copy propagation on hard registers for the GNU compiler.
2    Copyright (C) 2000-2018 Free Software Foundation, Inc.
3 
4    This file is part of GCC.
5 
6    GCC is free software; you can redistribute it and/or modify it
7    under the terms of the GNU General Public License as published by
8    the Free Software Foundation; either version 3, or (at your option)
9    any later version.
10 
11    GCC is distributed in the hope that it will be useful, but WITHOUT
12    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
14    License for more details.
15 
16    You should have received a copy of the GNU General Public License
17    along with GCC; see the file COPYING3.  If not see
18    <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "df.h"
26 #include "memmodel.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "regs.h"
30 #include "emit-rtl.h"
31 #include "recog.h"
32 #include "diagnostic-core.h"
33 #include "addresses.h"
34 #include "tree-pass.h"
35 #include "rtl-iter.h"
36 #include "cfgrtl.h"
37 #include "target.h"
38 
39 /* The following code does forward propagation of hard register copies.
40    The object is to eliminate as many dependencies as possible, so that
41    we have the most scheduling freedom.  As a side effect, we also clean
42    up some silly register allocation decisions made by reload.  This
43    code may be obsoleted by a new register allocator.  */
44 
45 /* DEBUG_INSNs aren't changed right away, as doing so might extend the
46    lifetime of a register and get the DEBUG_INSN subsequently reset.
47    So they are queued instead, and updated only when the register is
48    used in some subsequent real insn before it is set.  */
49 struct queued_debug_insn_change
50 {
51   struct queued_debug_insn_change *next;
52   rtx_insn *insn;
53   rtx *loc;
54   rtx new_rtx;
55 };
56 
57 /* For each register, we have a list of registers that contain the same
58    value.  The OLDEST_REGNO field points to the head of the list, and
59    the NEXT_REGNO field runs through the list.  The MODE field indicates
60    what mode the data is known to be in; this field is VOIDmode when the
61    register is not known to contain valid data.  */
62 
63 struct value_data_entry
64 {
65   machine_mode mode;
66   unsigned int oldest_regno;
67   unsigned int next_regno;
68   struct queued_debug_insn_change *debug_insn_changes;
69 };
70 
71 struct value_data
72 {
73   struct value_data_entry e[FIRST_PSEUDO_REGISTER];
74   unsigned int max_value_regs;
75   unsigned int n_debug_insn_changes;
76 };
77 
78 static object_allocator<queued_debug_insn_change> queued_debug_insn_change_pool
79   ("debug insn changes pool");
80 
81 static bool skip_debug_insn_p;
82 
83 static void kill_value_one_regno (unsigned, struct value_data *);
84 static void kill_value_regno (unsigned, unsigned, struct value_data *);
85 static void kill_value (const_rtx, struct value_data *);
86 static void set_value_regno (unsigned, machine_mode, struct value_data *);
87 static void init_value_data (struct value_data *);
88 static void kill_clobbered_value (rtx, const_rtx, void *);
89 static void kill_set_value (rtx, const_rtx, void *);
90 static void copy_value (rtx, rtx, struct value_data *);
91 static bool mode_change_ok (machine_mode, machine_mode,
92 			    unsigned int);
93 static rtx maybe_mode_change (machine_mode, machine_mode,
94 			      machine_mode, unsigned int, unsigned int);
95 static rtx find_oldest_value_reg (enum reg_class, rtx, struct value_data *);
96 static bool replace_oldest_value_reg (rtx *, enum reg_class, rtx_insn *,
97 				      struct value_data *);
98 static bool replace_oldest_value_addr (rtx *, enum reg_class,
99 				       machine_mode, addr_space_t,
100 				       rtx_insn *, struct value_data *);
101 static bool replace_oldest_value_mem (rtx, rtx_insn *, struct value_data *);
102 static bool copyprop_hardreg_forward_1 (basic_block, struct value_data *);
103 extern void debug_value_data (struct value_data *);
104 static void validate_value_data (struct value_data *);
105 
106 /* Free all queued updates for DEBUG_INSNs that change some reg to
107    register REGNO.  */
108 
109 static void
110 free_debug_insn_changes (struct value_data *vd, unsigned int regno)
111 {
112   struct queued_debug_insn_change *cur, *next;
113   for (cur = vd->e[regno].debug_insn_changes; cur; cur = next)
114     {
115       next = cur->next;
116       --vd->n_debug_insn_changes;
117       queued_debug_insn_change_pool.remove (cur);
118     }
119   vd->e[regno].debug_insn_changes = NULL;
120 }
121 
122 /* Kill register REGNO.  This involves removing it from any value
123    lists, and resetting the value mode to VOIDmode.  This is only a
124    helper function; it does not handle any hard registers overlapping
125    with REGNO.  */
126 
127 static void
128 kill_value_one_regno (unsigned int regno, struct value_data *vd)
129 {
130   unsigned int i, next;
131 
132   if (vd->e[regno].oldest_regno != regno)
133     {
134       for (i = vd->e[regno].oldest_regno;
135 	   vd->e[i].next_regno != regno;
136 	   i = vd->e[i].next_regno)
137 	continue;
138       vd->e[i].next_regno = vd->e[regno].next_regno;
139     }
140   else if ((next = vd->e[regno].next_regno) != INVALID_REGNUM)
141     {
142       for (i = next; i != INVALID_REGNUM; i = vd->e[i].next_regno)
143 	vd->e[i].oldest_regno = next;
144     }
145 
146   vd->e[regno].mode = VOIDmode;
147   vd->e[regno].oldest_regno = regno;
148   vd->e[regno].next_regno = INVALID_REGNUM;
149   if (vd->e[regno].debug_insn_changes)
150     free_debug_insn_changes (vd, regno);
151 
152   if (flag_checking)
153     validate_value_data (vd);
154 }
155 
156 /* Kill the value in register REGNO for NREGS, and any other registers
157    whose values overlap.  */
158 
159 static void
160 kill_value_regno (unsigned int regno, unsigned int nregs,
161 		  struct value_data *vd)
162 {
163   unsigned int j;
164 
165   /* Kill the value we're told to kill.  */
166   for (j = 0; j < nregs; ++j)
167     kill_value_one_regno (regno + j, vd);
168 
169   /* Kill everything that overlapped what we're told to kill.  */
170   if (regno < vd->max_value_regs)
171     j = 0;
172   else
173     j = regno - vd->max_value_regs;
174   for (; j < regno; ++j)
175     {
176       unsigned int i, n;
177       if (vd->e[j].mode == VOIDmode)
178 	continue;
179       n = hard_regno_nregs (j, vd->e[j].mode);
180       if (j + n > regno)
181 	for (i = 0; i < n; ++i)
182 	  kill_value_one_regno (j + i, vd);
183     }
184 }
185 
186 /* Kill X.  This is a convenience function wrapping kill_value_regno
187    so that we mind the mode the register is in.  */
188 
189 static void
190 kill_value (const_rtx x, struct value_data *vd)
191 {
192   if (GET_CODE (x) == SUBREG)
193     {
194       rtx tmp = simplify_subreg (GET_MODE (x), SUBREG_REG (x),
195 				 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
196       x = tmp ? tmp : SUBREG_REG (x);
197     }
198   if (REG_P (x))
199     kill_value_regno (REGNO (x), REG_NREGS (x), vd);
200 }
201 
202 /* Remember that REGNO is valid in MODE.  */
203 
204 static void
205 set_value_regno (unsigned int regno, machine_mode mode,
206 		 struct value_data *vd)
207 {
208   unsigned int nregs;
209 
210   vd->e[regno].mode = mode;
211 
212   nregs = hard_regno_nregs (regno, mode);
213   if (nregs > vd->max_value_regs)
214     vd->max_value_regs = nregs;
215 }
216 
217 /* Initialize VD such that there are no known relationships between regs.  */
218 
219 static void
220 init_value_data (struct value_data *vd)
221 {
222   int i;
223   for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
224     {
225       vd->e[i].mode = VOIDmode;
226       vd->e[i].oldest_regno = i;
227       vd->e[i].next_regno = INVALID_REGNUM;
228       vd->e[i].debug_insn_changes = NULL;
229     }
230   vd->max_value_regs = 0;
231   vd->n_debug_insn_changes = 0;
232 }
233 
234 /* Called through note_stores.  If X is clobbered, kill its value.  */
235 
236 static void
237 kill_clobbered_value (rtx x, const_rtx set, void *data)
238 {
239   struct value_data *const vd = (struct value_data *) data;
240   if (GET_CODE (set) == CLOBBER)
241     kill_value (x, vd);
242 }
243 
244 /* A structure passed as data to kill_set_value through note_stores.  */
245 struct kill_set_value_data
246 {
247   struct value_data *vd;
248   rtx ignore_set_reg;
249 };
250 
251 /* Called through note_stores.  If X is set, not clobbered, kill its
252    current value and install it as the root of its own value list.  */
253 
254 static void
255 kill_set_value (rtx x, const_rtx set, void *data)
256 {
257   struct kill_set_value_data *ksvd = (struct kill_set_value_data *) data;
258   if (rtx_equal_p (x, ksvd->ignore_set_reg))
259     return;
260   if (GET_CODE (set) != CLOBBER)
261     {
262       kill_value (x, ksvd->vd);
263       if (REG_P (x))
264 	set_value_regno (REGNO (x), GET_MODE (x), ksvd->vd);
265     }
266 }
267 
268 /* Kill any register used in X as the base of an auto-increment expression,
269    and install that register as the root of its own value list.  */
270 
271 static void
272 kill_autoinc_value (rtx_insn *insn, struct value_data *vd)
273 {
274   subrtx_iterator::array_type array;
275   FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
276     {
277       const_rtx x = *iter;
278       if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
279 	{
280 	  x = XEXP (x, 0);
281 	  kill_value (x, vd);
282 	  set_value_regno (REGNO (x), GET_MODE (x), vd);
283 	  iter.skip_subrtxes ();
284 	}
285     }
286 }
287 
288 /* Assert that SRC has been copied to DEST.  Adjust the data structures
289    to reflect that SRC contains an older copy of the shared value.  */
290 
291 static void
292 copy_value (rtx dest, rtx src, struct value_data *vd)
293 {
294   unsigned int dr = REGNO (dest);
295   unsigned int sr = REGNO (src);
296   unsigned int dn, sn;
297   unsigned int i;
298 
299   /* ??? At present, it's possible to see noop sets.  It'd be nice if
300      this were cleaned up beforehand...  */
301   if (sr == dr)
302     return;
303 
304   /* Do not propagate copies to the stack pointer, as that can leave
305      memory accesses with no scheduling dependency on the stack update.  */
306   if (dr == STACK_POINTER_REGNUM)
307     return;
308 
309   /* Likewise with the frame pointer, if we're using one.  */
310   if (frame_pointer_needed && dr == HARD_FRAME_POINTER_REGNUM)
311     return;
312 
313   /* Do not propagate copies to fixed or global registers, patterns
314      can be relying to see particular fixed register or users can
315      expect the chosen global register in asm.  */
316   if (fixed_regs[dr] || global_regs[dr])
317     return;
318 
319   /* If SRC and DEST overlap, don't record anything.  */
320   dn = REG_NREGS (dest);
321   sn = REG_NREGS (src);
322   if ((dr > sr && dr < sr + sn)
323       || (sr > dr && sr < dr + dn))
324     return;
325 
326   /* If SRC had no assigned mode (i.e. we didn't know it was live)
327      assign it now and assume the value came from an input argument
328      or somesuch.  */
329   if (vd->e[sr].mode == VOIDmode)
330     set_value_regno (sr, vd->e[dr].mode, vd);
331 
332   /* If we are narrowing the input to a smaller number of hard regs,
333      and it is in big endian, we are really extracting a high part.
334      Since we generally associate a low part of a value with the value itself,
335      we must not do the same for the high part.
336      Note we can still get low parts for the same mode combination through
337      a two-step copy involving differently sized hard regs.
338      Assume hard regs fr* are 32 bits each, while r* are 64 bits each:
339      (set (reg:DI r0) (reg:DI fr0))
340      (set (reg:SI fr2) (reg:SI r0))
341      loads the low part of (reg:DI fr0) - i.e. fr1 - into fr2, while:
342      (set (reg:SI fr2) (reg:SI fr0))
343      loads the high part of (reg:DI fr0) into fr2.
344 
345      We can't properly represent the latter case in our tables, so don't
346      record anything then.  */
347   else if (sn < hard_regno_nregs (sr, vd->e[sr].mode)
348 	   && maybe_ne (subreg_lowpart_offset (GET_MODE (dest),
349 					       vd->e[sr].mode), 0U))
350     return;
351 
352   /* If SRC had been assigned a mode narrower than the copy, we can't
353      link DEST into the chain, because not all of the pieces of the
354      copy came from oldest_regno.  */
355   else if (sn > hard_regno_nregs (sr, vd->e[sr].mode))
356     return;
357 
358   /* Link DR at the end of the value chain used by SR.  */
359 
360   vd->e[dr].oldest_regno = vd->e[sr].oldest_regno;
361 
362   for (i = sr; vd->e[i].next_regno != INVALID_REGNUM; i = vd->e[i].next_regno)
363     continue;
364   vd->e[i].next_regno = dr;
365 
366   if (flag_checking)
367     validate_value_data (vd);
368 }
369 
370 /* Return true if a mode change from ORIG to NEW is allowed for REGNO.  */
371 
372 static bool
373 mode_change_ok (machine_mode orig_mode, machine_mode new_mode,
374 		unsigned int regno ATTRIBUTE_UNUSED)
375 {
376   if (partial_subreg_p (orig_mode, new_mode))
377     return false;
378 
379   return REG_CAN_CHANGE_MODE_P (regno, orig_mode, new_mode);
380 }
381 
382 /* Register REGNO was originally set in ORIG_MODE.  It - or a copy of it -
383    was copied in COPY_MODE to COPY_REGNO, and then COPY_REGNO was accessed
384    in NEW_MODE.
385    Return a NEW_MODE rtx for REGNO if that's OK, otherwise return NULL_RTX.  */
386 
387 static rtx
388 maybe_mode_change (machine_mode orig_mode, machine_mode copy_mode,
389 		   machine_mode new_mode, unsigned int regno,
390 		   unsigned int copy_regno ATTRIBUTE_UNUSED)
391 {
392   if (partial_subreg_p (copy_mode, orig_mode)
393       && partial_subreg_p (copy_mode, new_mode))
394     return NULL_RTX;
395 
396   /* Avoid creating multiple copies of the stack pointer.  Some ports
397      assume there is one and only one stack pointer.
398 
399      It's unclear if we need to do the same for other special registers.  */
400   if (regno == STACK_POINTER_REGNUM)
401     return NULL_RTX;
402 
403   if (orig_mode == new_mode)
404     return gen_raw_REG (new_mode, regno);
405   else if (mode_change_ok (orig_mode, new_mode, regno))
406     {
407       int copy_nregs = hard_regno_nregs (copy_regno, copy_mode);
408       int use_nregs = hard_regno_nregs (copy_regno, new_mode);
409       poly_uint64 bytes_per_reg;
410       if (!can_div_trunc_p (GET_MODE_SIZE (copy_mode),
411 			    copy_nregs, &bytes_per_reg))
412 	return NULL_RTX;
413       poly_uint64 copy_offset = bytes_per_reg * (copy_nregs - use_nregs);
414       poly_uint64 offset
415 	= subreg_size_lowpart_offset (GET_MODE_SIZE (new_mode) + copy_offset,
416 				      GET_MODE_SIZE (orig_mode));
417       regno += subreg_regno_offset (regno, orig_mode, offset, new_mode);
418       if (targetm.hard_regno_mode_ok (regno, new_mode))
419 	return gen_raw_REG (new_mode, regno);
420     }
421   return NULL_RTX;
422 }
423 
424 /* Find the oldest copy of the value contained in REGNO that is in
425    register class CL and has mode MODE.  If found, return an rtx
426    of that oldest register, otherwise return NULL.  */
427 
428 static rtx
429 find_oldest_value_reg (enum reg_class cl, rtx reg, struct value_data *vd)
430 {
431   unsigned int regno = REGNO (reg);
432   machine_mode mode = GET_MODE (reg);
433   unsigned int i;
434 
435   gcc_assert (regno < FIRST_PSEUDO_REGISTER);
436 
437   /* If we are accessing REG in some mode other that what we set it in,
438      make sure that the replacement is valid.  In particular, consider
439 	(set (reg:DI r11) (...))
440 	(set (reg:SI r9) (reg:SI r11))
441 	(set (reg:SI r10) (...))
442 	(set (...) (reg:DI r9))
443      Replacing r9 with r11 is invalid.  */
444   if (mode != vd->e[regno].mode
445       && REG_NREGS (reg) > hard_regno_nregs (regno, vd->e[regno].mode))
446     return NULL_RTX;
447 
448   for (i = vd->e[regno].oldest_regno; i != regno; i = vd->e[i].next_regno)
449     {
450       machine_mode oldmode = vd->e[i].mode;
451       rtx new_rtx;
452 
453       if (!in_hard_reg_set_p (reg_class_contents[cl], mode, i))
454 	continue;
455 
456       new_rtx = maybe_mode_change (oldmode, vd->e[regno].mode, mode, i, regno);
457       if (new_rtx)
458 	{
459 	  ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (reg);
460 	  REG_ATTRS (new_rtx) = REG_ATTRS (reg);
461 	  REG_POINTER (new_rtx) = REG_POINTER (reg);
462 	  return new_rtx;
463 	}
464     }
465 
466   return NULL_RTX;
467 }
468 
469 /* If possible, replace the register at *LOC with the oldest register
470    in register class CL.  Return true if successfully replaced.  */
471 
472 static bool
473 replace_oldest_value_reg (rtx *loc, enum reg_class cl, rtx_insn *insn,
474 			  struct value_data *vd)
475 {
476   rtx new_rtx = find_oldest_value_reg (cl, *loc, vd);
477   if (new_rtx && (!DEBUG_INSN_P (insn) || !skip_debug_insn_p))
478     {
479       if (DEBUG_INSN_P (insn))
480 	{
481 	  struct queued_debug_insn_change *change;
482 
483 	  if (dump_file)
484 	    fprintf (dump_file, "debug_insn %u: queued replacing reg %u with %u\n",
485 		     INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
486 
487 	  change = queued_debug_insn_change_pool.allocate ();
488 	  change->next = vd->e[REGNO (new_rtx)].debug_insn_changes;
489 	  change->insn = insn;
490 	  change->loc = loc;
491 	  change->new_rtx = new_rtx;
492 	  vd->e[REGNO (new_rtx)].debug_insn_changes = change;
493 	  ++vd->n_debug_insn_changes;
494 	  return true;
495 	}
496       if (dump_file)
497 	fprintf (dump_file, "insn %u: replaced reg %u with %u\n",
498 		 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
499 
500       validate_change (insn, loc, new_rtx, 1);
501       return true;
502     }
503   return false;
504 }
505 
506 /* Similar to replace_oldest_value_reg, but *LOC contains an address.
507    Adapted from find_reloads_address_1.  CL is INDEX_REG_CLASS or
508    BASE_REG_CLASS depending on how the register is being considered.  */
509 
510 static bool
511 replace_oldest_value_addr (rtx *loc, enum reg_class cl,
512 			   machine_mode mode, addr_space_t as,
513 			   rtx_insn *insn, struct value_data *vd)
514 {
515   rtx x = *loc;
516   RTX_CODE code = GET_CODE (x);
517   const char *fmt;
518   int i, j;
519   bool changed = false;
520 
521   switch (code)
522     {
523     case PLUS:
524       if (DEBUG_INSN_P (insn))
525 	break;
526 
527       {
528 	rtx orig_op0 = XEXP (x, 0);
529 	rtx orig_op1 = XEXP (x, 1);
530 	RTX_CODE code0 = GET_CODE (orig_op0);
531 	RTX_CODE code1 = GET_CODE (orig_op1);
532 	rtx op0 = orig_op0;
533 	rtx op1 = orig_op1;
534 	rtx *locI = NULL;
535 	rtx *locB = NULL;
536 	enum rtx_code index_code = SCRATCH;
537 
538 	if (GET_CODE (op0) == SUBREG)
539 	  {
540 	    op0 = SUBREG_REG (op0);
541 	    code0 = GET_CODE (op0);
542 	  }
543 
544 	if (GET_CODE (op1) == SUBREG)
545 	  {
546 	    op1 = SUBREG_REG (op1);
547 	    code1 = GET_CODE (op1);
548 	  }
549 
550 	if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
551 	    || code0 == ZERO_EXTEND || code1 == MEM)
552 	  {
553 	    locI = &XEXP (x, 0);
554 	    locB = &XEXP (x, 1);
555 	    index_code = GET_CODE (*locI);
556 	  }
557 	else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
558 		 || code1 == ZERO_EXTEND || code0 == MEM)
559 	  {
560 	    locI = &XEXP (x, 1);
561 	    locB = &XEXP (x, 0);
562 	    index_code = GET_CODE (*locI);
563 	  }
564 	else if (code0 == CONST_INT || code0 == CONST
565 		 || code0 == SYMBOL_REF || code0 == LABEL_REF)
566 	  {
567 	    locB = &XEXP (x, 1);
568 	    index_code = GET_CODE (XEXP (x, 0));
569 	  }
570 	else if (code1 == CONST_INT || code1 == CONST
571 		 || code1 == SYMBOL_REF || code1 == LABEL_REF)
572 	  {
573 	    locB = &XEXP (x, 0);
574 	    index_code = GET_CODE (XEXP (x, 1));
575 	  }
576 	else if (code0 == REG && code1 == REG)
577 	  {
578 	    int index_op;
579 	    unsigned regno0 = REGNO (op0), regno1 = REGNO (op1);
580 
581 	    if (REGNO_OK_FOR_INDEX_P (regno1)
582 		&& regno_ok_for_base_p (regno0, mode, as, PLUS, REG))
583 	      index_op = 1;
584 	    else if (REGNO_OK_FOR_INDEX_P (regno0)
585 		     && regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
586 	      index_op = 0;
587 	    else if (regno_ok_for_base_p (regno0, mode, as, PLUS, REG)
588 		     || REGNO_OK_FOR_INDEX_P (regno1))
589 	      index_op = 1;
590 	    else if (regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
591 	      index_op = 0;
592 	    else
593 	      index_op = 1;
594 
595 	    locI = &XEXP (x, index_op);
596 	    locB = &XEXP (x, !index_op);
597 	    index_code = GET_CODE (*locI);
598 	  }
599 	else if (code0 == REG)
600 	  {
601 	    locI = &XEXP (x, 0);
602 	    locB = &XEXP (x, 1);
603 	    index_code = GET_CODE (*locI);
604 	  }
605 	else if (code1 == REG)
606 	  {
607 	    locI = &XEXP (x, 1);
608 	    locB = &XEXP (x, 0);
609 	    index_code = GET_CODE (*locI);
610 	  }
611 
612 	if (locI)
613 	  changed |= replace_oldest_value_addr (locI, INDEX_REG_CLASS,
614 						mode, as, insn, vd);
615 	if (locB)
616 	  changed |= replace_oldest_value_addr (locB,
617 						base_reg_class (mode, as, PLUS,
618 								index_code),
619 						mode, as, insn, vd);
620 	return changed;
621       }
622 
623     case POST_INC:
624     case POST_DEC:
625     case POST_MODIFY:
626     case PRE_INC:
627     case PRE_DEC:
628     case PRE_MODIFY:
629       return false;
630 
631     case MEM:
632       return replace_oldest_value_mem (x, insn, vd);
633 
634     case REG:
635       return replace_oldest_value_reg (loc, cl, insn, vd);
636 
637     default:
638       break;
639     }
640 
641   fmt = GET_RTX_FORMAT (code);
642   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
643     {
644       if (fmt[i] == 'e')
645 	changed |= replace_oldest_value_addr (&XEXP (x, i), cl, mode, as,
646 					      insn, vd);
647       else if (fmt[i] == 'E')
648 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
649 	  changed |= replace_oldest_value_addr (&XVECEXP (x, i, j), cl,
650 						mode, as, insn, vd);
651     }
652 
653   return changed;
654 }
655 
656 /* Similar to replace_oldest_value_reg, but X contains a memory.  */
657 
658 static bool
659 replace_oldest_value_mem (rtx x, rtx_insn *insn, struct value_data *vd)
660 {
661   enum reg_class cl;
662 
663   if (DEBUG_INSN_P (insn))
664     cl = ALL_REGS;
665   else
666     cl = base_reg_class (GET_MODE (x), MEM_ADDR_SPACE (x), MEM, SCRATCH);
667 
668   return replace_oldest_value_addr (&XEXP (x, 0), cl,
669 				    GET_MODE (x), MEM_ADDR_SPACE (x),
670 				    insn, vd);
671 }
672 
673 /* Apply all queued updates for DEBUG_INSNs that change some reg to
674    register REGNO.  */
675 
676 static void
677 apply_debug_insn_changes (struct value_data *vd, unsigned int regno)
678 {
679   struct queued_debug_insn_change *change;
680   rtx_insn *last_insn = vd->e[regno].debug_insn_changes->insn;
681 
682   for (change = vd->e[regno].debug_insn_changes;
683        change;
684        change = change->next)
685     {
686       if (last_insn != change->insn)
687 	{
688 	  apply_change_group ();
689 	  last_insn = change->insn;
690 	}
691       validate_change (change->insn, change->loc, change->new_rtx, 1);
692     }
693   apply_change_group ();
694 }
695 
696 /* Called via note_uses, for all used registers in a real insn
697    apply DEBUG_INSN changes that change registers to the used
698    registers.  */
699 
700 static void
701 cprop_find_used_regs (rtx *loc, void *data)
702 {
703   struct value_data *const vd = (struct value_data *) data;
704   subrtx_iterator::array_type array;
705   FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
706     {
707       const_rtx x = *iter;
708       if (REG_P (x))
709 	{
710 	  unsigned int regno = REGNO (x);
711 	  if (vd->e[regno].debug_insn_changes)
712 	    {
713 	      apply_debug_insn_changes (vd, regno);
714 	      free_debug_insn_changes (vd, regno);
715 	    }
716 	}
717     }
718 }
719 
720 /* Apply clobbers of INSN in PATTERN and C_I_F_U to value_data VD.  */
721 
722 static void
723 kill_clobbered_values (rtx_insn *insn, struct value_data *vd)
724 {
725   note_stores (PATTERN (insn), kill_clobbered_value, vd);
726 
727   if (CALL_P (insn))
728     {
729       rtx exp;
730 
731       for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
732 	{
733 	  rtx x = XEXP (exp, 0);
734 	  if (GET_CODE (x) == CLOBBER)
735 	    kill_value (SET_DEST (x), vd);
736 	}
737     }
738 }
739 
740 /* Perform the forward copy propagation on basic block BB.  */
741 
742 static bool
743 copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
744 {
745   bool anything_changed = false;
746   rtx_insn *insn, *next;
747 
748   for (insn = BB_HEAD (bb); ; insn = next)
749     {
750       int n_ops, i, predicated;
751       bool is_asm, any_replacements;
752       rtx set;
753       rtx link;
754       bool changed = false;
755       struct kill_set_value_data ksvd;
756 
757       next = NEXT_INSN (insn);
758       if (!NONDEBUG_INSN_P (insn))
759 	{
760 	  if (DEBUG_BIND_INSN_P (insn))
761 	    {
762 	      rtx loc = INSN_VAR_LOCATION_LOC (insn);
763 	      if (!VAR_LOC_UNKNOWN_P (loc))
764 		replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn),
765 					   ALL_REGS, GET_MODE (loc),
766 					   ADDR_SPACE_GENERIC, insn, vd);
767 	    }
768 
769 	  if (insn == BB_END (bb))
770 	    break;
771 	  else
772 	    continue;
773 	}
774 
775       set = single_set (insn);
776 
777       /* Detect noop sets and remove them before processing side effects.  */
778       if (set && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set)))
779 	{
780 	  unsigned int regno = REGNO (SET_SRC (set));
781 	  rtx r1 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
782 					  SET_DEST (set), vd);
783 	  rtx r2 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
784 					  SET_SRC (set), vd);
785 	  if (rtx_equal_p (r1 ? r1 : SET_DEST (set), r2 ? r2 : SET_SRC (set)))
786 	    {
787 	      bool last = insn == BB_END (bb);
788 	      delete_insn (insn);
789 	      if (last)
790 		break;
791 	      continue;
792 	    }
793 	}
794 
795       extract_constrain_insn (insn);
796       preprocess_constraints (insn);
797       const operand_alternative *op_alt = which_op_alt ();
798       n_ops = recog_data.n_operands;
799       is_asm = asm_noperands (PATTERN (insn)) >= 0;
800 
801       /* Simplify the code below by promoting OP_OUT to OP_INOUT
802 	 in predicated instructions.  */
803 
804       predicated = GET_CODE (PATTERN (insn)) == COND_EXEC;
805       for (i = 0; i < n_ops; ++i)
806 	{
807 	  int matches = op_alt[i].matches;
808 	  if (matches >= 0 || op_alt[i].matched >= 0
809 	      || (predicated && recog_data.operand_type[i] == OP_OUT))
810 	    recog_data.operand_type[i] = OP_INOUT;
811 	}
812 
813       /* Apply changes to earlier DEBUG_INSNs if possible.  */
814       if (vd->n_debug_insn_changes)
815 	note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
816 
817       /* For each earlyclobber operand, zap the value data.  */
818       for (i = 0; i < n_ops; i++)
819 	if (op_alt[i].earlyclobber)
820 	  kill_value (recog_data.operand[i], vd);
821 
822       /* Within asms, a clobber cannot overlap inputs or outputs.
823 	 I wouldn't think this were true for regular insns, but
824 	 scan_rtx treats them like that...  */
825       kill_clobbered_values (insn, vd);
826 
827       /* Kill all auto-incremented values.  */
828       /* ??? REG_INC is useless, since stack pushes aren't done that way.  */
829       kill_autoinc_value (insn, vd);
830 
831       /* Kill all early-clobbered operands.  */
832       for (i = 0; i < n_ops; i++)
833 	if (op_alt[i].earlyclobber)
834 	  kill_value (recog_data.operand[i], vd);
835 
836       /* If we have dead sets in the insn, then we need to note these as we
837 	 would clobbers.  */
838       for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
839 	{
840 	  if (REG_NOTE_KIND (link) == REG_UNUSED)
841 	    {
842 	      kill_value (XEXP (link, 0), vd);
843 	      /* Furthermore, if the insn looked like a single-set,
844 		 but the dead store kills the source value of that
845 		 set, then we can no-longer use the plain move
846 		 special case below.  */
847 	      if (set
848 		  && reg_overlap_mentioned_p (XEXP (link, 0), SET_SRC (set)))
849 		set = NULL;
850 	    }
851 	}
852 
853       /* Special-case plain move instructions, since we may well
854 	 be able to do the move from a different register class.  */
855       if (set && REG_P (SET_SRC (set)))
856 	{
857 	  rtx src = SET_SRC (set);
858 	  unsigned int regno = REGNO (src);
859 	  machine_mode mode = GET_MODE (src);
860 	  unsigned int i;
861 	  rtx new_rtx;
862 
863 	  /* If we are accessing SRC in some mode other that what we
864 	     set it in, make sure that the replacement is valid.  */
865 	  if (mode != vd->e[regno].mode)
866 	    {
867 	      if (REG_NREGS (src)
868 		  > hard_regno_nregs (regno, vd->e[regno].mode))
869 		goto no_move_special_case;
870 
871 	      /* And likewise, if we are narrowing on big endian the transformation
872 		 is also invalid.  */
873 	      if (REG_NREGS (src) < hard_regno_nregs (regno, vd->e[regno].mode)
874 		  && maybe_ne (subreg_lowpart_offset (mode,
875 						      vd->e[regno].mode), 0U))
876 		goto no_move_special_case;
877 	    }
878 
879 	  /* If the destination is also a register, try to find a source
880 	     register in the same class.  */
881 	  if (REG_P (SET_DEST (set)))
882 	    {
883 	      new_rtx = find_oldest_value_reg (REGNO_REG_CLASS (regno),
884 					       src, vd);
885 
886 	      if (new_rtx && validate_change (insn, &SET_SRC (set), new_rtx, 0))
887 		{
888 		  if (dump_file)
889 		    fprintf (dump_file,
890 			     "insn %u: replaced reg %u with %u\n",
891 			     INSN_UID (insn), regno, REGNO (new_rtx));
892 		  changed = true;
893 		  goto did_replacement;
894 		}
895 	      /* We need to re-extract as validate_change clobbers
896 		 recog_data.  */
897 	      extract_constrain_insn (insn);
898 	      preprocess_constraints (insn);
899 	    }
900 
901 	  /* Otherwise, try all valid registers and see if its valid.  */
902 	  for (i = vd->e[regno].oldest_regno; i != regno;
903 	       i = vd->e[i].next_regno)
904 	    {
905 	      new_rtx = maybe_mode_change (vd->e[i].mode, vd->e[regno].mode,
906 				       mode, i, regno);
907 	      if (new_rtx != NULL_RTX)
908 		{
909 		  if (validate_change (insn, &SET_SRC (set), new_rtx, 0))
910 		    {
911 		      ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (src);
912 		      REG_ATTRS (new_rtx) = REG_ATTRS (src);
913 		      REG_POINTER (new_rtx) = REG_POINTER (src);
914 		      if (dump_file)
915 			fprintf (dump_file,
916 				 "insn %u: replaced reg %u with %u\n",
917 				 INSN_UID (insn), regno, REGNO (new_rtx));
918 		      changed = true;
919 		      goto did_replacement;
920 		    }
921 		  /* We need to re-extract as validate_change clobbers
922 		     recog_data.  */
923 		  extract_constrain_insn (insn);
924 		  preprocess_constraints (insn);
925 		}
926 	    }
927 	}
928       no_move_special_case:
929 
930       any_replacements = false;
931 
932       /* For each input operand, replace a hard register with the
933 	 eldest live copy that's in an appropriate register class.  */
934       for (i = 0; i < n_ops; i++)
935 	{
936 	  bool replaced = false;
937 
938 	  /* Don't scan match_operand here, since we've no reg class
939 	     information to pass down.  Any operands that we could
940 	     substitute in will be represented elsewhere.  */
941 	  if (recog_data.constraints[i][0] == '\0')
942 	    continue;
943 
944 	  /* Don't replace in asms intentionally referencing hard regs.  */
945 	  if (is_asm && REG_P (recog_data.operand[i])
946 	      && (REGNO (recog_data.operand[i])
947 		  == ORIGINAL_REGNO (recog_data.operand[i])))
948 	    continue;
949 
950 	  if (recog_data.operand_type[i] == OP_IN)
951 	    {
952 	      if (op_alt[i].is_address)
953 		replaced
954 		  = replace_oldest_value_addr (recog_data.operand_loc[i],
955 					       alternative_class (op_alt, i),
956 					       VOIDmode, ADDR_SPACE_GENERIC,
957 					       insn, vd);
958 	      else if (REG_P (recog_data.operand[i]))
959 		replaced
960 		  = replace_oldest_value_reg (recog_data.operand_loc[i],
961 					      alternative_class (op_alt, i),
962 					      insn, vd);
963 	      else if (MEM_P (recog_data.operand[i]))
964 		replaced = replace_oldest_value_mem (recog_data.operand[i],
965 						     insn, vd);
966 	    }
967 	  else if (MEM_P (recog_data.operand[i]))
968 	    replaced = replace_oldest_value_mem (recog_data.operand[i],
969 						 insn, vd);
970 
971 	  /* If we performed any replacement, update match_dups.  */
972 	  if (replaced)
973 	    {
974 	      int j;
975 	      rtx new_rtx;
976 
977 	      new_rtx = *recog_data.operand_loc[i];
978 	      recog_data.operand[i] = new_rtx;
979 	      for (j = 0; j < recog_data.n_dups; j++)
980 		if (recog_data.dup_num[j] == i)
981 		  validate_unshare_change (insn, recog_data.dup_loc[j], new_rtx, 1);
982 
983 	      any_replacements = true;
984 	    }
985 	}
986 
987       if (any_replacements)
988 	{
989 	  if (! apply_change_group ())
990 	    {
991 	      if (dump_file)
992 		fprintf (dump_file,
993 			 "insn %u: reg replacements not verified\n",
994 			 INSN_UID (insn));
995 	    }
996 	  else
997 	    changed = true;
998 	}
999 
1000     did_replacement:
1001       if (changed)
1002 	{
1003 	  anything_changed = true;
1004 
1005 	  /* If something changed, perhaps further changes to earlier
1006 	     DEBUG_INSNs can be applied.  */
1007 	  if (vd->n_debug_insn_changes)
1008 	    note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
1009 	}
1010 
1011       ksvd.vd = vd;
1012       ksvd.ignore_set_reg = NULL_RTX;
1013 
1014       /* Clobber call-clobbered registers.  */
1015       if (CALL_P (insn))
1016 	{
1017 	  unsigned int set_regno = INVALID_REGNUM;
1018 	  unsigned int set_nregs = 0;
1019 	  unsigned int regno;
1020 	  rtx exp;
1021 	  HARD_REG_SET regs_invalidated_by_this_call;
1022 
1023 	  for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
1024 	    {
1025 	      rtx x = XEXP (exp, 0);
1026 	      if (GET_CODE (x) == SET)
1027 		{
1028 		  rtx dest = SET_DEST (x);
1029 		  kill_value (dest, vd);
1030 		  set_value_regno (REGNO (dest), GET_MODE (dest), vd);
1031 		  copy_value (dest, SET_SRC (x), vd);
1032 		  ksvd.ignore_set_reg = dest;
1033 		  set_regno = REGNO (dest);
1034 		  set_nregs = REG_NREGS (dest);
1035 		  break;
1036 		}
1037 	    }
1038 
1039 	  get_call_reg_set_usage (insn,
1040 				  &regs_invalidated_by_this_call,
1041 				  regs_invalidated_by_call);
1042 	  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1043 	    if ((TEST_HARD_REG_BIT (regs_invalidated_by_this_call, regno)
1044 		 || (targetm.hard_regno_call_part_clobbered
1045 		     (regno, vd->e[regno].mode)))
1046 		&& (regno < set_regno || regno >= set_regno + set_nregs))
1047 	      kill_value_regno (regno, 1, vd);
1048 
1049 	  /* If SET was seen in CALL_INSN_FUNCTION_USAGE, and SET_SRC
1050 	     of the SET isn't in regs_invalidated_by_call hard reg set,
1051 	     but instead among CLOBBERs on the CALL_INSN, we could wrongly
1052 	     assume the value in it is still live.  */
1053 	  if (ksvd.ignore_set_reg)
1054 	    kill_clobbered_values (insn, vd);
1055 	}
1056 
1057       bool copy_p = (set
1058 		     && REG_P (SET_DEST (set))
1059 		     && REG_P (SET_SRC (set)));
1060       bool noop_p = (copy_p
1061 		     && rtx_equal_p (SET_DEST (set), SET_SRC (set)));
1062 
1063       /* If a noop move is using narrower mode than we have recorded,
1064 	 we need to either remove the noop move, or kill_set_value.  */
1065       if (noop_p
1066 	  && partial_subreg_p (GET_MODE (SET_DEST (set)),
1067 			       vd->e[REGNO (SET_DEST (set))].mode))
1068 	{
1069 	  if (noop_move_p (insn))
1070 	    {
1071 	      bool last = insn == BB_END (bb);
1072 	      delete_insn (insn);
1073 	      if (last)
1074 		break;
1075 	    }
1076 	  else
1077 	    noop_p = false;
1078 	}
1079 
1080       if (!noop_p)
1081 	{
1082 	  /* Notice stores.  */
1083 	  note_stores (PATTERN (insn), kill_set_value, &ksvd);
1084 
1085 	  /* Notice copies.  */
1086 	  if (copy_p)
1087 	    copy_value (SET_DEST (set), SET_SRC (set), vd);
1088 	}
1089 
1090       if (insn == BB_END (bb))
1091 	break;
1092     }
1093 
1094   return anything_changed;
1095 }
1096 
1097 /* Dump the value chain data to stderr.  */
1098 
1099 DEBUG_FUNCTION void
1100 debug_value_data (struct value_data *vd)
1101 {
1102   HARD_REG_SET set;
1103   unsigned int i, j;
1104 
1105   CLEAR_HARD_REG_SET (set);
1106 
1107   for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1108     if (vd->e[i].oldest_regno == i)
1109       {
1110 	if (vd->e[i].mode == VOIDmode)
1111 	  {
1112 	    if (vd->e[i].next_regno != INVALID_REGNUM)
1113 	      fprintf (stderr, "[%u] Bad next_regno for empty chain (%u)\n",
1114 		       i, vd->e[i].next_regno);
1115 	    continue;
1116 	  }
1117 
1118 	SET_HARD_REG_BIT (set, i);
1119 	fprintf (stderr, "[%u %s] ", i, GET_MODE_NAME (vd->e[i].mode));
1120 
1121 	for (j = vd->e[i].next_regno;
1122 	     j != INVALID_REGNUM;
1123 	     j = vd->e[j].next_regno)
1124 	  {
1125 	    if (TEST_HARD_REG_BIT (set, j))
1126 	      {
1127 		fprintf (stderr, "[%u] Loop in regno chain\n", j);
1128 		return;
1129 	      }
1130 
1131 	    if (vd->e[j].oldest_regno != i)
1132 	      {
1133 		fprintf (stderr, "[%u] Bad oldest_regno (%u)\n",
1134 			 j, vd->e[j].oldest_regno);
1135 		return;
1136 	      }
1137 	    SET_HARD_REG_BIT (set, j);
1138 	    fprintf (stderr, "[%u %s] ", j, GET_MODE_NAME (vd->e[j].mode));
1139 	  }
1140 	fputc ('\n', stderr);
1141       }
1142 
1143   for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1144     if (! TEST_HARD_REG_BIT (set, i)
1145 	&& (vd->e[i].mode != VOIDmode
1146 	    || vd->e[i].oldest_regno != i
1147 	    || vd->e[i].next_regno != INVALID_REGNUM))
1148       fprintf (stderr, "[%u] Non-empty reg in chain (%s %u %i)\n",
1149 	       i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1150 	       vd->e[i].next_regno);
1151 }
1152 
1153 /* Do copyprop_hardreg_forward_1 for a single basic block BB.
1154    DEBUG_INSN is skipped since we do not want to involve DF related
1155    staff as how it is handled in function pass_cprop_hardreg::execute.
1156 
1157    NOTE: Currently it is only used for shrink-wrap.  Maybe extend it
1158    to handle DEBUG_INSN for other uses.  */
1159 
1160 void
1161 copyprop_hardreg_forward_bb_without_debug_insn (basic_block bb)
1162 {
1163   struct value_data *vd;
1164   vd = XNEWVEC (struct value_data, 1);
1165   init_value_data (vd);
1166 
1167   skip_debug_insn_p = true;
1168   copyprop_hardreg_forward_1 (bb, vd);
1169   free (vd);
1170   skip_debug_insn_p = false;
1171 }
1172 
1173 static void
1174 validate_value_data (struct value_data *vd)
1175 {
1176   HARD_REG_SET set;
1177   unsigned int i, j;
1178 
1179   CLEAR_HARD_REG_SET (set);
1180 
1181   for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1182     if (vd->e[i].oldest_regno == i)
1183       {
1184 	if (vd->e[i].mode == VOIDmode)
1185 	  {
1186 	    if (vd->e[i].next_regno != INVALID_REGNUM)
1187 	      internal_error ("validate_value_data: [%u] Bad next_regno for empty chain (%u)",
1188 			      i, vd->e[i].next_regno);
1189 	    continue;
1190 	  }
1191 
1192 	SET_HARD_REG_BIT (set, i);
1193 
1194 	for (j = vd->e[i].next_regno;
1195 	     j != INVALID_REGNUM;
1196 	     j = vd->e[j].next_regno)
1197 	  {
1198 	    if (TEST_HARD_REG_BIT (set, j))
1199 	      internal_error ("validate_value_data: Loop in regno chain (%u)",
1200 			      j);
1201 	    if (vd->e[j].oldest_regno != i)
1202 	      internal_error ("validate_value_data: [%u] Bad oldest_regno (%u)",
1203 			      j, vd->e[j].oldest_regno);
1204 
1205 	    SET_HARD_REG_BIT (set, j);
1206 	  }
1207       }
1208 
1209   for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1210     if (! TEST_HARD_REG_BIT (set, i)
1211 	&& (vd->e[i].mode != VOIDmode
1212 	    || vd->e[i].oldest_regno != i
1213 	    || vd->e[i].next_regno != INVALID_REGNUM))
1214       internal_error ("validate_value_data: [%u] Non-empty reg in chain (%s %u %i)",
1215 		      i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1216 		      vd->e[i].next_regno);
1217 }
1218 
1219 
1220 namespace {
1221 
1222 const pass_data pass_data_cprop_hardreg =
1223 {
1224   RTL_PASS, /* type */
1225   "cprop_hardreg", /* name */
1226   OPTGROUP_NONE, /* optinfo_flags */
1227   TV_CPROP_REGISTERS, /* tv_id */
1228   0, /* properties_required */
1229   0, /* properties_provided */
1230   0, /* properties_destroyed */
1231   0, /* todo_flags_start */
1232   TODO_df_finish, /* todo_flags_finish */
1233 };
1234 
1235 class pass_cprop_hardreg : public rtl_opt_pass
1236 {
1237 public:
1238   pass_cprop_hardreg (gcc::context *ctxt)
1239     : rtl_opt_pass (pass_data_cprop_hardreg, ctxt)
1240   {}
1241 
1242   /* opt_pass methods: */
1243   virtual bool gate (function *)
1244     {
1245       return (optimize > 0 && (flag_cprop_registers));
1246     }
1247 
1248   virtual unsigned int execute (function *);
1249 
1250 }; // class pass_cprop_hardreg
1251 
1252 unsigned int
1253 pass_cprop_hardreg::execute (function *fun)
1254 {
1255   struct value_data *all_vd;
1256   basic_block bb;
1257   bool analyze_called = false;
1258 
1259   all_vd = XNEWVEC (struct value_data, last_basic_block_for_fn (fun));
1260 
1261   auto_sbitmap visited (last_basic_block_for_fn (fun));
1262   bitmap_clear (visited);
1263 
1264   FOR_EACH_BB_FN (bb, fun)
1265     {
1266       bitmap_set_bit (visited, bb->index);
1267 
1268       /* If a block has a single predecessor, that we've already
1269 	 processed, begin with the value data that was live at
1270 	 the end of the predecessor block.  */
1271       /* ??? Ought to use more intelligent queuing of blocks.  */
1272       if (single_pred_p (bb)
1273 	  && bitmap_bit_p (visited, single_pred (bb)->index)
1274 	  && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
1275 	{
1276 	  all_vd[bb->index] = all_vd[single_pred (bb)->index];
1277 	  if (all_vd[bb->index].n_debug_insn_changes)
1278 	    {
1279 	      unsigned int regno;
1280 
1281 	      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1282 		{
1283 		  if (all_vd[bb->index].e[regno].debug_insn_changes)
1284 		    {
1285 		      all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1286 		      if (--all_vd[bb->index].n_debug_insn_changes == 0)
1287 			break;
1288 		    }
1289 		}
1290 	    }
1291 	}
1292       else
1293 	init_value_data (all_vd + bb->index);
1294 
1295       copyprop_hardreg_forward_1 (bb, all_vd + bb->index);
1296     }
1297 
1298   if (MAY_HAVE_DEBUG_BIND_INSNS)
1299     {
1300       FOR_EACH_BB_FN (bb, fun)
1301 	if (bitmap_bit_p (visited, bb->index)
1302 	    && all_vd[bb->index].n_debug_insn_changes)
1303 	  {
1304 	    unsigned int regno;
1305 	    bitmap live;
1306 
1307 	    if (!analyze_called)
1308 	      {
1309 		df_analyze ();
1310 		analyze_called = true;
1311 	      }
1312 	    live = df_get_live_out (bb);
1313 	    for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1314 	      if (all_vd[bb->index].e[regno].debug_insn_changes)
1315 		{
1316 		  if (REGNO_REG_SET_P (live, regno))
1317 		    apply_debug_insn_changes (all_vd + bb->index, regno);
1318 		  if (all_vd[bb->index].n_debug_insn_changes == 0)
1319 		    break;
1320 		}
1321 	  }
1322 
1323       queued_debug_insn_change_pool.release ();
1324     }
1325 
1326   free (all_vd);
1327   return 0;
1328 }
1329 
1330 } // anon namespace
1331 
1332 rtl_opt_pass *
1333 make_pass_cprop_hardreg (gcc::context *ctxt)
1334 {
1335   return new pass_cprop_hardreg (ctxt);
1336 }
1337