1 /* Copy propagation on hard registers for the GNU compiler.
2    Copyright (C) 2000-2019 Free Software Foundation, Inc.
3 
4    This file is part of GCC.
5 
6    GCC is free software; you can redistribute it and/or modify it
7    under the terms of the GNU General Public License as published by
8    the Free Software Foundation; either version 3, or (at your option)
9    any later version.
10 
11    GCC is distributed in the hope that it will be useful, but WITHOUT
12    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
14    License for more details.
15 
16    You should have received a copy of the GNU General Public License
17    along with GCC; see the file COPYING3.  If not see
18    <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "df.h"
26 #include "memmodel.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "regs.h"
30 #include "emit-rtl.h"
31 #include "recog.h"
32 #include "diagnostic-core.h"
33 #include "addresses.h"
34 #include "tree-pass.h"
35 #include "rtl-iter.h"
36 #include "cfgrtl.h"
37 #include "target.h"
38 
39 /* The following code does forward propagation of hard register copies.
40    The object is to eliminate as many dependencies as possible, so that
41    we have the most scheduling freedom.  As a side effect, we also clean
42    up some silly register allocation decisions made by reload.  This
43    code may be obsoleted by a new register allocator.  */
44 
45 /* DEBUG_INSNs aren't changed right away, as doing so might extend the
46    lifetime of a register and get the DEBUG_INSN subsequently reset.
47    So they are queued instead, and updated only when the register is
48    used in some subsequent real insn before it is set.  */
49 struct queued_debug_insn_change
50 {
51   struct queued_debug_insn_change *next;
52   rtx_insn *insn;
53   rtx *loc;
54   rtx new_rtx;
55 };
56 
57 /* For each register, we have a list of registers that contain the same
58    value.  The OLDEST_REGNO field points to the head of the list, and
59    the NEXT_REGNO field runs through the list.  The MODE field indicates
60    what mode the data is known to be in; this field is VOIDmode when the
61    register is not known to contain valid data.  */
62 
63 struct value_data_entry
64 {
65   machine_mode mode;
66   unsigned int oldest_regno;
67   unsigned int next_regno;
68   struct queued_debug_insn_change *debug_insn_changes;
69 };
70 
71 struct value_data
72 {
73   struct value_data_entry e[FIRST_PSEUDO_REGISTER];
74   unsigned int max_value_regs;
75   unsigned int n_debug_insn_changes;
76 };
77 
78 static object_allocator<queued_debug_insn_change> queued_debug_insn_change_pool
79   ("debug insn changes pool");
80 
81 static bool skip_debug_insn_p;
82 
83 static void kill_value_one_regno (unsigned, struct value_data *);
84 static void kill_value_regno (unsigned, unsigned, struct value_data *);
85 static void kill_value (const_rtx, struct value_data *);
86 static void set_value_regno (unsigned, machine_mode, struct value_data *);
87 static void init_value_data (struct value_data *);
88 static void kill_clobbered_value (rtx, const_rtx, void *);
89 static void kill_set_value (rtx, const_rtx, void *);
90 static void copy_value (rtx, rtx, struct value_data *);
91 static bool mode_change_ok (machine_mode, machine_mode,
92 			    unsigned int);
93 static rtx maybe_mode_change (machine_mode, machine_mode,
94 			      machine_mode, unsigned int, unsigned int);
95 static rtx find_oldest_value_reg (enum reg_class, rtx, struct value_data *);
96 static bool replace_oldest_value_reg (rtx *, enum reg_class, rtx_insn *,
97 				      struct value_data *);
98 static bool replace_oldest_value_addr (rtx *, enum reg_class,
99 				       machine_mode, addr_space_t,
100 				       rtx_insn *, struct value_data *);
101 static bool replace_oldest_value_mem (rtx, rtx_insn *, struct value_data *);
102 static bool copyprop_hardreg_forward_1 (basic_block, struct value_data *);
103 extern void debug_value_data (struct value_data *);
104 static void validate_value_data (struct value_data *);
105 
106 /* Free all queued updates for DEBUG_INSNs that change some reg to
107    register REGNO.  */
108 
109 static void
free_debug_insn_changes(struct value_data * vd,unsigned int regno)110 free_debug_insn_changes (struct value_data *vd, unsigned int regno)
111 {
112   struct queued_debug_insn_change *cur, *next;
113   for (cur = vd->e[regno].debug_insn_changes; cur; cur = next)
114     {
115       next = cur->next;
116       --vd->n_debug_insn_changes;
117       queued_debug_insn_change_pool.remove (cur);
118     }
119   vd->e[regno].debug_insn_changes = NULL;
120 }
121 
122 /* Kill register REGNO.  This involves removing it from any value
123    lists, and resetting the value mode to VOIDmode.  This is only a
124    helper function; it does not handle any hard registers overlapping
125    with REGNO.  */
126 
127 static void
kill_value_one_regno(unsigned int regno,struct value_data * vd)128 kill_value_one_regno (unsigned int regno, struct value_data *vd)
129 {
130   unsigned int i, next;
131 
132   if (vd->e[regno].oldest_regno != regno)
133     {
134       for (i = vd->e[regno].oldest_regno;
135 	   vd->e[i].next_regno != regno;
136 	   i = vd->e[i].next_regno)
137 	continue;
138       vd->e[i].next_regno = vd->e[regno].next_regno;
139     }
140   else if ((next = vd->e[regno].next_regno) != INVALID_REGNUM)
141     {
142       for (i = next; i != INVALID_REGNUM; i = vd->e[i].next_regno)
143 	vd->e[i].oldest_regno = next;
144     }
145 
146   vd->e[regno].mode = VOIDmode;
147   vd->e[regno].oldest_regno = regno;
148   vd->e[regno].next_regno = INVALID_REGNUM;
149   if (vd->e[regno].debug_insn_changes)
150     free_debug_insn_changes (vd, regno);
151 
152   if (flag_checking)
153     validate_value_data (vd);
154 }
155 
156 /* Kill the value in register REGNO for NREGS, and any other registers
157    whose values overlap.  */
158 
159 static void
kill_value_regno(unsigned int regno,unsigned int nregs,struct value_data * vd)160 kill_value_regno (unsigned int regno, unsigned int nregs,
161 		  struct value_data *vd)
162 {
163   unsigned int j;
164 
165   /* Kill the value we're told to kill.  */
166   for (j = 0; j < nregs; ++j)
167     kill_value_one_regno (regno + j, vd);
168 
169   /* Kill everything that overlapped what we're told to kill.  */
170   if (regno < vd->max_value_regs)
171     j = 0;
172   else
173     j = regno - vd->max_value_regs;
174   for (; j < regno; ++j)
175     {
176       unsigned int i, n;
177       if (vd->e[j].mode == VOIDmode)
178 	continue;
179       n = hard_regno_nregs (j, vd->e[j].mode);
180       if (j + n > regno)
181 	for (i = 0; i < n; ++i)
182 	  kill_value_one_regno (j + i, vd);
183     }
184 }
185 
186 /* Kill X.  This is a convenience function wrapping kill_value_regno
187    so that we mind the mode the register is in.  */
188 
189 static void
kill_value(const_rtx x,struct value_data * vd)190 kill_value (const_rtx x, struct value_data *vd)
191 {
192   if (GET_CODE (x) == SUBREG)
193     {
194       rtx tmp = simplify_subreg (GET_MODE (x), SUBREG_REG (x),
195 				 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
196       x = tmp ? tmp : SUBREG_REG (x);
197     }
198   if (REG_P (x))
199     kill_value_regno (REGNO (x), REG_NREGS (x), vd);
200 }
201 
202 /* Remember that REGNO is valid in MODE.  */
203 
204 static void
set_value_regno(unsigned int regno,machine_mode mode,struct value_data * vd)205 set_value_regno (unsigned int regno, machine_mode mode,
206 		 struct value_data *vd)
207 {
208   unsigned int nregs;
209 
210   vd->e[regno].mode = mode;
211 
212   nregs = hard_regno_nregs (regno, mode);
213   if (nregs > vd->max_value_regs)
214     vd->max_value_regs = nregs;
215 }
216 
217 /* Initialize VD such that there are no known relationships between regs.  */
218 
219 static void
init_value_data(struct value_data * vd)220 init_value_data (struct value_data *vd)
221 {
222   int i;
223   for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
224     {
225       vd->e[i].mode = VOIDmode;
226       vd->e[i].oldest_regno = i;
227       vd->e[i].next_regno = INVALID_REGNUM;
228       vd->e[i].debug_insn_changes = NULL;
229     }
230   vd->max_value_regs = 0;
231   vd->n_debug_insn_changes = 0;
232 }
233 
234 /* Called through note_stores.  If X is clobbered, kill its value.  */
235 
236 static void
kill_clobbered_value(rtx x,const_rtx set,void * data)237 kill_clobbered_value (rtx x, const_rtx set, void *data)
238 {
239   struct value_data *const vd = (struct value_data *) data;
240   gcc_assert (GET_CODE (set) != CLOBBER_HIGH || REG_P (x));
241 
242   if (GET_CODE (set) == CLOBBER
243       || (GET_CODE (set) == CLOBBER_HIGH
244 	  && reg_is_clobbered_by_clobber_high (x, XEXP (set, 0))))
245     kill_value (x, vd);
246 }
247 
248 /* A structure passed as data to kill_set_value through note_stores.  */
249 struct kill_set_value_data
250 {
251   struct value_data *vd;
252   rtx ignore_set_reg;
253 };
254 
255 /* Called through note_stores.  If X is set, not clobbered, kill its
256    current value and install it as the root of its own value list.  */
257 
258 static void
kill_set_value(rtx x,const_rtx set,void * data)259 kill_set_value (rtx x, const_rtx set, void *data)
260 {
261   struct kill_set_value_data *ksvd = (struct kill_set_value_data *) data;
262   if (rtx_equal_p (x, ksvd->ignore_set_reg))
263     return;
264 
265   gcc_assert (GET_CODE (set) != CLOBBER_HIGH || REG_P (x));
266   if (GET_CODE (set) != CLOBBER && GET_CODE (set) != CLOBBER_HIGH)
267     {
268       kill_value (x, ksvd->vd);
269       if (REG_P (x))
270 	set_value_regno (REGNO (x), GET_MODE (x), ksvd->vd);
271     }
272 }
273 
274 /* Kill any register used in X as the base of an auto-increment expression,
275    and install that register as the root of its own value list.  */
276 
277 static void
kill_autoinc_value(rtx_insn * insn,struct value_data * vd)278 kill_autoinc_value (rtx_insn *insn, struct value_data *vd)
279 {
280   subrtx_iterator::array_type array;
281   FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
282     {
283       const_rtx x = *iter;
284       if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
285 	{
286 	  x = XEXP (x, 0);
287 	  kill_value (x, vd);
288 	  set_value_regno (REGNO (x), GET_MODE (x), vd);
289 	  iter.skip_subrtxes ();
290 	}
291     }
292 }
293 
294 /* Assert that SRC has been copied to DEST.  Adjust the data structures
295    to reflect that SRC contains an older copy of the shared value.  */
296 
297 static void
copy_value(rtx dest,rtx src,struct value_data * vd)298 copy_value (rtx dest, rtx src, struct value_data *vd)
299 {
300   unsigned int dr = REGNO (dest);
301   unsigned int sr = REGNO (src);
302   unsigned int dn, sn;
303   unsigned int i;
304 
305   /* ??? At present, it's possible to see noop sets.  It'd be nice if
306      this were cleaned up beforehand...  */
307   if (sr == dr)
308     return;
309 
310   /* Do not propagate copies to the stack pointer, as that can leave
311      memory accesses with no scheduling dependency on the stack update.  */
312   if (dr == STACK_POINTER_REGNUM)
313     return;
314 
315   /* Likewise with the frame pointer, if we're using one.  */
316   if (frame_pointer_needed && dr == HARD_FRAME_POINTER_REGNUM)
317     return;
318 
319   /* Do not propagate copies to fixed or global registers, patterns
320      can be relying to see particular fixed register or users can
321      expect the chosen global register in asm.  */
322   if (fixed_regs[dr] || global_regs[dr])
323     return;
324 
325   /* If SRC and DEST overlap, don't record anything.  */
326   dn = REG_NREGS (dest);
327   sn = REG_NREGS (src);
328   if ((dr > sr && dr < sr + sn)
329       || (sr > dr && sr < dr + dn))
330     return;
331 
332   /* If SRC had no assigned mode (i.e. we didn't know it was live)
333      assign it now and assume the value came from an input argument
334      or somesuch.  */
335   if (vd->e[sr].mode == VOIDmode)
336     set_value_regno (sr, vd->e[dr].mode, vd);
337 
338   /* If we are narrowing the input to a smaller number of hard regs,
339      and it is in big endian, we are really extracting a high part.
340      Since we generally associate a low part of a value with the value itself,
341      we must not do the same for the high part.
342      Note we can still get low parts for the same mode combination through
343      a two-step copy involving differently sized hard regs.
344      Assume hard regs fr* are 32 bits each, while r* are 64 bits each:
345      (set (reg:DI r0) (reg:DI fr0))
346      (set (reg:SI fr2) (reg:SI r0))
347      loads the low part of (reg:DI fr0) - i.e. fr1 - into fr2, while:
348      (set (reg:SI fr2) (reg:SI fr0))
349      loads the high part of (reg:DI fr0) into fr2.
350 
351      We can't properly represent the latter case in our tables, so don't
352      record anything then.  */
353   else if (sn < hard_regno_nregs (sr, vd->e[sr].mode)
354 	   && maybe_ne (subreg_lowpart_offset (GET_MODE (dest),
355 					       vd->e[sr].mode), 0U))
356     return;
357 
358   /* If SRC had been assigned a mode narrower than the copy, we can't
359      link DEST into the chain, because not all of the pieces of the
360      copy came from oldest_regno.  */
361   else if (sn > hard_regno_nregs (sr, vd->e[sr].mode))
362     return;
363 
364   /* Link DR at the end of the value chain used by SR.  */
365 
366   vd->e[dr].oldest_regno = vd->e[sr].oldest_regno;
367 
368   for (i = sr; vd->e[i].next_regno != INVALID_REGNUM; i = vd->e[i].next_regno)
369     continue;
370   vd->e[i].next_regno = dr;
371 
372   if (flag_checking)
373     validate_value_data (vd);
374 }
375 
376 /* Return true if a mode change from ORIG to NEW is allowed for REGNO.  */
377 
378 static bool
mode_change_ok(machine_mode orig_mode,machine_mode new_mode,unsigned int regno ATTRIBUTE_UNUSED)379 mode_change_ok (machine_mode orig_mode, machine_mode new_mode,
380 		unsigned int regno ATTRIBUTE_UNUSED)
381 {
382   if (partial_subreg_p (orig_mode, new_mode))
383     return false;
384 
385   return REG_CAN_CHANGE_MODE_P (regno, orig_mode, new_mode);
386 }
387 
388 /* Register REGNO was originally set in ORIG_MODE.  It - or a copy of it -
389    was copied in COPY_MODE to COPY_REGNO, and then COPY_REGNO was accessed
390    in NEW_MODE.
391    Return a NEW_MODE rtx for REGNO if that's OK, otherwise return NULL_RTX.  */
392 
393 static rtx
maybe_mode_change(machine_mode orig_mode,machine_mode copy_mode,machine_mode new_mode,unsigned int regno,unsigned int copy_regno ATTRIBUTE_UNUSED)394 maybe_mode_change (machine_mode orig_mode, machine_mode copy_mode,
395 		   machine_mode new_mode, unsigned int regno,
396 		   unsigned int copy_regno ATTRIBUTE_UNUSED)
397 {
398   if (partial_subreg_p (copy_mode, orig_mode)
399       && partial_subreg_p (copy_mode, new_mode))
400     return NULL_RTX;
401 
402   /* Avoid creating multiple copies of the stack pointer.  Some ports
403      assume there is one and only one stack pointer.
404 
405      It's unclear if we need to do the same for other special registers.  */
406   if (regno == STACK_POINTER_REGNUM)
407     return NULL_RTX;
408 
409   if (orig_mode == new_mode)
410     return gen_raw_REG (new_mode, regno);
411   else if (mode_change_ok (orig_mode, new_mode, regno))
412     {
413       int copy_nregs = hard_regno_nregs (copy_regno, copy_mode);
414       int use_nregs = hard_regno_nregs (copy_regno, new_mode);
415       poly_uint64 bytes_per_reg;
416       if (!can_div_trunc_p (GET_MODE_SIZE (copy_mode),
417 			    copy_nregs, &bytes_per_reg))
418 	return NULL_RTX;
419       poly_uint64 copy_offset = bytes_per_reg * (copy_nregs - use_nregs);
420       poly_uint64 offset
421 	= subreg_size_lowpart_offset (GET_MODE_SIZE (new_mode) + copy_offset,
422 				      GET_MODE_SIZE (orig_mode));
423       regno += subreg_regno_offset (regno, orig_mode, offset, new_mode);
424       if (targetm.hard_regno_mode_ok (regno, new_mode))
425 	return gen_raw_REG (new_mode, regno);
426     }
427   return NULL_RTX;
428 }
429 
430 /* Find the oldest copy of the value contained in REGNO that is in
431    register class CL and has mode MODE.  If found, return an rtx
432    of that oldest register, otherwise return NULL.  */
433 
434 static rtx
find_oldest_value_reg(enum reg_class cl,rtx reg,struct value_data * vd)435 find_oldest_value_reg (enum reg_class cl, rtx reg, struct value_data *vd)
436 {
437   unsigned int regno = REGNO (reg);
438   machine_mode mode = GET_MODE (reg);
439   unsigned int i;
440 
441   gcc_assert (regno < FIRST_PSEUDO_REGISTER);
442 
443   /* If we are accessing REG in some mode other that what we set it in,
444      make sure that the replacement is valid.  In particular, consider
445 	(set (reg:DI r11) (...))
446 	(set (reg:SI r9) (reg:SI r11))
447 	(set (reg:SI r10) (...))
448 	(set (...) (reg:DI r9))
449      Replacing r9 with r11 is invalid.  */
450   if (mode != vd->e[regno].mode
451       && REG_NREGS (reg) > hard_regno_nregs (regno, vd->e[regno].mode))
452     return NULL_RTX;
453 
454   for (i = vd->e[regno].oldest_regno; i != regno; i = vd->e[i].next_regno)
455     {
456       machine_mode oldmode = vd->e[i].mode;
457       rtx new_rtx;
458 
459       if (!in_hard_reg_set_p (reg_class_contents[cl], mode, i))
460 	continue;
461 
462       new_rtx = maybe_mode_change (oldmode, vd->e[regno].mode, mode, i, regno);
463       if (new_rtx)
464 	{
465 	  ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (reg);
466 	  REG_ATTRS (new_rtx) = REG_ATTRS (reg);
467 	  REG_POINTER (new_rtx) = REG_POINTER (reg);
468 	  return new_rtx;
469 	}
470     }
471 
472   return NULL_RTX;
473 }
474 
475 /* If possible, replace the register at *LOC with the oldest register
476    in register class CL.  Return true if successfully replaced.  */
477 
478 static bool
replace_oldest_value_reg(rtx * loc,enum reg_class cl,rtx_insn * insn,struct value_data * vd)479 replace_oldest_value_reg (rtx *loc, enum reg_class cl, rtx_insn *insn,
480 			  struct value_data *vd)
481 {
482   rtx new_rtx = find_oldest_value_reg (cl, *loc, vd);
483   if (new_rtx && (!DEBUG_INSN_P (insn) || !skip_debug_insn_p))
484     {
485       if (DEBUG_INSN_P (insn))
486 	{
487 	  struct queued_debug_insn_change *change;
488 
489 	  if (dump_file)
490 	    fprintf (dump_file, "debug_insn %u: queued replacing reg %u with %u\n",
491 		     INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
492 
493 	  change = queued_debug_insn_change_pool.allocate ();
494 	  change->next = vd->e[REGNO (new_rtx)].debug_insn_changes;
495 	  change->insn = insn;
496 	  change->loc = loc;
497 	  change->new_rtx = new_rtx;
498 	  vd->e[REGNO (new_rtx)].debug_insn_changes = change;
499 	  ++vd->n_debug_insn_changes;
500 	  return true;
501 	}
502       if (dump_file)
503 	fprintf (dump_file, "insn %u: replaced reg %u with %u\n",
504 		 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
505 
506       validate_change (insn, loc, new_rtx, 1);
507       return true;
508     }
509   return false;
510 }
511 
512 /* Similar to replace_oldest_value_reg, but *LOC contains an address.
513    Adapted from find_reloads_address_1.  CL is INDEX_REG_CLASS or
514    BASE_REG_CLASS depending on how the register is being considered.  */
515 
516 static bool
replace_oldest_value_addr(rtx * loc,enum reg_class cl,machine_mode mode,addr_space_t as,rtx_insn * insn,struct value_data * vd)517 replace_oldest_value_addr (rtx *loc, enum reg_class cl,
518 			   machine_mode mode, addr_space_t as,
519 			   rtx_insn *insn, struct value_data *vd)
520 {
521   rtx x = *loc;
522   RTX_CODE code = GET_CODE (x);
523   const char *fmt;
524   int i, j;
525   bool changed = false;
526 
527   switch (code)
528     {
529     case PLUS:
530       if (DEBUG_INSN_P (insn))
531 	break;
532 
533       {
534 	rtx orig_op0 = XEXP (x, 0);
535 	rtx orig_op1 = XEXP (x, 1);
536 	RTX_CODE code0 = GET_CODE (orig_op0);
537 	RTX_CODE code1 = GET_CODE (orig_op1);
538 	rtx op0 = orig_op0;
539 	rtx op1 = orig_op1;
540 	rtx *locI = NULL;
541 	rtx *locB = NULL;
542 	enum rtx_code index_code = SCRATCH;
543 
544 	if (GET_CODE (op0) == SUBREG)
545 	  {
546 	    op0 = SUBREG_REG (op0);
547 	    code0 = GET_CODE (op0);
548 	  }
549 
550 	if (GET_CODE (op1) == SUBREG)
551 	  {
552 	    op1 = SUBREG_REG (op1);
553 	    code1 = GET_CODE (op1);
554 	  }
555 
556 	if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
557 	    || code0 == ZERO_EXTEND || code1 == MEM)
558 	  {
559 	    locI = &XEXP (x, 0);
560 	    locB = &XEXP (x, 1);
561 	    index_code = GET_CODE (*locI);
562 	  }
563 	else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
564 		 || code1 == ZERO_EXTEND || code0 == MEM)
565 	  {
566 	    locI = &XEXP (x, 1);
567 	    locB = &XEXP (x, 0);
568 	    index_code = GET_CODE (*locI);
569 	  }
570 	else if (code0 == CONST_INT || code0 == CONST
571 		 || code0 == SYMBOL_REF || code0 == LABEL_REF)
572 	  {
573 	    locB = &XEXP (x, 1);
574 	    index_code = GET_CODE (XEXP (x, 0));
575 	  }
576 	else if (code1 == CONST_INT || code1 == CONST
577 		 || code1 == SYMBOL_REF || code1 == LABEL_REF)
578 	  {
579 	    locB = &XEXP (x, 0);
580 	    index_code = GET_CODE (XEXP (x, 1));
581 	  }
582 	else if (code0 == REG && code1 == REG)
583 	  {
584 	    int index_op;
585 	    unsigned regno0 = REGNO (op0), regno1 = REGNO (op1);
586 
587 	    if (REGNO_OK_FOR_INDEX_P (regno1)
588 		&& regno_ok_for_base_p (regno0, mode, as, PLUS, REG))
589 	      index_op = 1;
590 	    else if (REGNO_OK_FOR_INDEX_P (regno0)
591 		     && regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
592 	      index_op = 0;
593 	    else if (regno_ok_for_base_p (regno0, mode, as, PLUS, REG)
594 		     || REGNO_OK_FOR_INDEX_P (regno1))
595 	      index_op = 1;
596 	    else if (regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
597 	      index_op = 0;
598 	    else
599 	      index_op = 1;
600 
601 	    locI = &XEXP (x, index_op);
602 	    locB = &XEXP (x, !index_op);
603 	    index_code = GET_CODE (*locI);
604 	  }
605 	else if (code0 == REG)
606 	  {
607 	    locI = &XEXP (x, 0);
608 	    locB = &XEXP (x, 1);
609 	    index_code = GET_CODE (*locI);
610 	  }
611 	else if (code1 == REG)
612 	  {
613 	    locI = &XEXP (x, 1);
614 	    locB = &XEXP (x, 0);
615 	    index_code = GET_CODE (*locI);
616 	  }
617 
618 	if (locI)
619 	  changed |= replace_oldest_value_addr (locI, INDEX_REG_CLASS,
620 						mode, as, insn, vd);
621 	if (locB)
622 	  changed |= replace_oldest_value_addr (locB,
623 						base_reg_class (mode, as, PLUS,
624 								index_code),
625 						mode, as, insn, vd);
626 	return changed;
627       }
628 
629     case POST_INC:
630     case POST_DEC:
631     case POST_MODIFY:
632     case PRE_INC:
633     case PRE_DEC:
634     case PRE_MODIFY:
635       return false;
636 
637     case MEM:
638       return replace_oldest_value_mem (x, insn, vd);
639 
640     case REG:
641       return replace_oldest_value_reg (loc, cl, insn, vd);
642 
643     default:
644       break;
645     }
646 
647   fmt = GET_RTX_FORMAT (code);
648   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
649     {
650       if (fmt[i] == 'e')
651 	changed |= replace_oldest_value_addr (&XEXP (x, i), cl, mode, as,
652 					      insn, vd);
653       else if (fmt[i] == 'E')
654 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
655 	  changed |= replace_oldest_value_addr (&XVECEXP (x, i, j), cl,
656 						mode, as, insn, vd);
657     }
658 
659   return changed;
660 }
661 
662 /* Similar to replace_oldest_value_reg, but X contains a memory.  */
663 
664 static bool
replace_oldest_value_mem(rtx x,rtx_insn * insn,struct value_data * vd)665 replace_oldest_value_mem (rtx x, rtx_insn *insn, struct value_data *vd)
666 {
667   enum reg_class cl;
668 
669   if (DEBUG_INSN_P (insn))
670     cl = ALL_REGS;
671   else
672     cl = base_reg_class (GET_MODE (x), MEM_ADDR_SPACE (x), MEM, SCRATCH);
673 
674   return replace_oldest_value_addr (&XEXP (x, 0), cl,
675 				    GET_MODE (x), MEM_ADDR_SPACE (x),
676 				    insn, vd);
677 }
678 
679 /* Apply all queued updates for DEBUG_INSNs that change some reg to
680    register REGNO.  */
681 
682 static void
apply_debug_insn_changes(struct value_data * vd,unsigned int regno)683 apply_debug_insn_changes (struct value_data *vd, unsigned int regno)
684 {
685   struct queued_debug_insn_change *change;
686   rtx_insn *last_insn = vd->e[regno].debug_insn_changes->insn;
687 
688   for (change = vd->e[regno].debug_insn_changes;
689        change;
690        change = change->next)
691     {
692       if (last_insn != change->insn)
693 	{
694 	  apply_change_group ();
695 	  last_insn = change->insn;
696 	}
697       validate_change (change->insn, change->loc, change->new_rtx, 1);
698     }
699   apply_change_group ();
700 }
701 
702 /* Called via note_uses, for all used registers in a real insn
703    apply DEBUG_INSN changes that change registers to the used
704    registers.  */
705 
706 static void
cprop_find_used_regs(rtx * loc,void * data)707 cprop_find_used_regs (rtx *loc, void *data)
708 {
709   struct value_data *const vd = (struct value_data *) data;
710   subrtx_iterator::array_type array;
711   FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
712     {
713       const_rtx x = *iter;
714       if (REG_P (x))
715 	{
716 	  unsigned int regno = REGNO (x);
717 	  if (vd->e[regno].debug_insn_changes)
718 	    {
719 	      apply_debug_insn_changes (vd, regno);
720 	      free_debug_insn_changes (vd, regno);
721 	    }
722 	}
723     }
724 }
725 
726 /* Apply clobbers of INSN in PATTERN and C_I_F_U to value_data VD.  */
727 
728 static void
kill_clobbered_values(rtx_insn * insn,struct value_data * vd)729 kill_clobbered_values (rtx_insn *insn, struct value_data *vd)
730 {
731   note_stores (PATTERN (insn), kill_clobbered_value, vd);
732 
733   if (CALL_P (insn))
734     {
735       rtx exp;
736 
737       for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
738 	{
739 	  rtx x = XEXP (exp, 0);
740 	  if (GET_CODE (x) == CLOBBER)
741 	    kill_value (SET_DEST (x), vd);
742 	}
743     }
744 }
745 
746 /* Perform the forward copy propagation on basic block BB.  */
747 
748 static bool
copyprop_hardreg_forward_1(basic_block bb,struct value_data * vd)749 copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
750 {
751   bool anything_changed = false;
752   rtx_insn *insn, *next;
753 
754   for (insn = BB_HEAD (bb); ; insn = next)
755     {
756       int n_ops, i, predicated;
757       bool is_asm, any_replacements;
758       rtx set;
759       rtx link;
760       bool changed = false;
761       struct kill_set_value_data ksvd;
762 
763       next = NEXT_INSN (insn);
764       if (!NONDEBUG_INSN_P (insn))
765 	{
766 	  if (DEBUG_BIND_INSN_P (insn))
767 	    {
768 	      rtx loc = INSN_VAR_LOCATION_LOC (insn);
769 	      if (!VAR_LOC_UNKNOWN_P (loc))
770 		replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn),
771 					   ALL_REGS, GET_MODE (loc),
772 					   ADDR_SPACE_GENERIC, insn, vd);
773 	    }
774 
775 	  if (insn == BB_END (bb))
776 	    break;
777 	  else
778 	    continue;
779 	}
780 
781       set = single_set (insn);
782 
783       /* Detect noop sets and remove them before processing side effects.  */
784       if (set && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set)))
785 	{
786 	  unsigned int regno = REGNO (SET_SRC (set));
787 	  rtx r1 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
788 					  SET_DEST (set), vd);
789 	  rtx r2 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
790 					  SET_SRC (set), vd);
791 	  if (rtx_equal_p (r1 ? r1 : SET_DEST (set), r2 ? r2 : SET_SRC (set)))
792 	    {
793 	      bool last = insn == BB_END (bb);
794 	      delete_insn (insn);
795 	      if (last)
796 		break;
797 	      continue;
798 	    }
799 	}
800 
801       /* Detect obviously dead sets (via REG_UNUSED notes) and remove them.  */
802       if (set
803 	  && !RTX_FRAME_RELATED_P (insn)
804 	  && !may_trap_p (set)
805 	  && find_reg_note (insn, REG_UNUSED, SET_DEST (set))
806 	  && !side_effects_p (SET_SRC (set))
807 	  && !side_effects_p (SET_DEST (set)))
808 	{
809 	  bool last = insn == BB_END (bb);
810 	  delete_insn (insn);
811 	  if (last)
812 	    break;
813 	  continue;
814 	}
815 
816 
817       extract_constrain_insn (insn);
818       preprocess_constraints (insn);
819       const operand_alternative *op_alt = which_op_alt ();
820       n_ops = recog_data.n_operands;
821       is_asm = asm_noperands (PATTERN (insn)) >= 0;
822 
823       /* Simplify the code below by promoting OP_OUT to OP_INOUT
824 	 in predicated instructions.  */
825 
826       predicated = GET_CODE (PATTERN (insn)) == COND_EXEC;
827       for (i = 0; i < n_ops; ++i)
828 	{
829 	  int matches = op_alt[i].matches;
830 	  if (matches >= 0 || op_alt[i].matched >= 0
831 	      || (predicated && recog_data.operand_type[i] == OP_OUT))
832 	    recog_data.operand_type[i] = OP_INOUT;
833 	}
834 
835       /* Apply changes to earlier DEBUG_INSNs if possible.  */
836       if (vd->n_debug_insn_changes)
837 	note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
838 
839       /* For each earlyclobber operand, zap the value data.  */
840       for (i = 0; i < n_ops; i++)
841 	if (op_alt[i].earlyclobber)
842 	  kill_value (recog_data.operand[i], vd);
843 
844       /* Within asms, a clobber cannot overlap inputs or outputs.
845 	 I wouldn't think this were true for regular insns, but
846 	 scan_rtx treats them like that...  */
847       kill_clobbered_values (insn, vd);
848 
849       /* Kill all auto-incremented values.  */
850       /* ??? REG_INC is useless, since stack pushes aren't done that way.  */
851       kill_autoinc_value (insn, vd);
852 
853       /* Kill all early-clobbered operands.  */
854       for (i = 0; i < n_ops; i++)
855 	if (op_alt[i].earlyclobber)
856 	  kill_value (recog_data.operand[i], vd);
857 
858       /* If we have dead sets in the insn, then we need to note these as we
859 	 would clobbers.  */
860       for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
861 	{
862 	  if (REG_NOTE_KIND (link) == REG_UNUSED)
863 	    {
864 	      kill_value (XEXP (link, 0), vd);
865 	      /* Furthermore, if the insn looked like a single-set,
866 		 but the dead store kills the source value of that
867 		 set, then we can no-longer use the plain move
868 		 special case below.  */
869 	      if (set
870 		  && reg_overlap_mentioned_p (XEXP (link, 0), SET_SRC (set)))
871 		set = NULL;
872 	    }
873 
874 	  /* We need to keep CFI info correct, and the same on all paths,
875 	     so we cannot normally replace the registers REG_CFA_REGISTER
876 	     refers to.  Bail.  */
877 	  if (REG_NOTE_KIND (link) == REG_CFA_REGISTER)
878 	    goto did_replacement;
879 	}
880 
881       /* Special-case plain move instructions, since we may well
882 	 be able to do the move from a different register class.  */
883       if (set && REG_P (SET_SRC (set)))
884 	{
885 	  rtx src = SET_SRC (set);
886 	  unsigned int regno = REGNO (src);
887 	  machine_mode mode = GET_MODE (src);
888 	  unsigned int i;
889 	  rtx new_rtx;
890 
891 	  /* If we are accessing SRC in some mode other that what we
892 	     set it in, make sure that the replacement is valid.  */
893 	  if (mode != vd->e[regno].mode)
894 	    {
895 	      if (REG_NREGS (src)
896 		  > hard_regno_nregs (regno, vd->e[regno].mode))
897 		goto no_move_special_case;
898 
899 	      /* And likewise, if we are narrowing on big endian the transformation
900 		 is also invalid.  */
901 	      if (REG_NREGS (src) < hard_regno_nregs (regno, vd->e[regno].mode)
902 		  && maybe_ne (subreg_lowpart_offset (mode,
903 						      vd->e[regno].mode), 0U))
904 		goto no_move_special_case;
905 	    }
906 
907 	  /* If the destination is also a register, try to find a source
908 	     register in the same class.  */
909 	  if (REG_P (SET_DEST (set)))
910 	    {
911 	      new_rtx = find_oldest_value_reg (REGNO_REG_CLASS (regno),
912 					       src, vd);
913 
914 	      if (new_rtx && validate_change (insn, &SET_SRC (set), new_rtx, 0))
915 		{
916 		  if (dump_file)
917 		    fprintf (dump_file,
918 			     "insn %u: replaced reg %u with %u\n",
919 			     INSN_UID (insn), regno, REGNO (new_rtx));
920 		  changed = true;
921 		  goto did_replacement;
922 		}
923 	      /* We need to re-extract as validate_change clobbers
924 		 recog_data.  */
925 	      extract_constrain_insn (insn);
926 	      preprocess_constraints (insn);
927 	    }
928 
929 	  /* Otherwise, try all valid registers and see if its valid.  */
930 	  for (i = vd->e[regno].oldest_regno; i != regno;
931 	       i = vd->e[i].next_regno)
932 	    {
933 	      new_rtx = maybe_mode_change (vd->e[i].mode, vd->e[regno].mode,
934 				       mode, i, regno);
935 	      if (new_rtx != NULL_RTX)
936 		{
937 		  if (validate_change (insn, &SET_SRC (set), new_rtx, 0))
938 		    {
939 		      ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (src);
940 		      REG_ATTRS (new_rtx) = REG_ATTRS (src);
941 		      REG_POINTER (new_rtx) = REG_POINTER (src);
942 		      if (dump_file)
943 			fprintf (dump_file,
944 				 "insn %u: replaced reg %u with %u\n",
945 				 INSN_UID (insn), regno, REGNO (new_rtx));
946 		      changed = true;
947 		      goto did_replacement;
948 		    }
949 		  /* We need to re-extract as validate_change clobbers
950 		     recog_data.  */
951 		  extract_constrain_insn (insn);
952 		  preprocess_constraints (insn);
953 		}
954 	    }
955 	}
956       no_move_special_case:
957 
958       any_replacements = false;
959 
960       /* For each input operand, replace a hard register with the
961 	 eldest live copy that's in an appropriate register class.  */
962       for (i = 0; i < n_ops; i++)
963 	{
964 	  bool replaced = false;
965 
966 	  /* Don't scan match_operand here, since we've no reg class
967 	     information to pass down.  Any operands that we could
968 	     substitute in will be represented elsewhere.  */
969 	  if (recog_data.constraints[i][0] == '\0')
970 	    continue;
971 
972 	  /* Don't replace in asms intentionally referencing hard regs.  */
973 	  if (is_asm && REG_P (recog_data.operand[i])
974 	      && (REGNO (recog_data.operand[i])
975 		  == ORIGINAL_REGNO (recog_data.operand[i])))
976 	    continue;
977 
978 	  if (recog_data.operand_type[i] == OP_IN)
979 	    {
980 	      if (op_alt[i].is_address)
981 		replaced
982 		  = replace_oldest_value_addr (recog_data.operand_loc[i],
983 					       alternative_class (op_alt, i),
984 					       VOIDmode, ADDR_SPACE_GENERIC,
985 					       insn, vd);
986 	      else if (REG_P (recog_data.operand[i]))
987 		replaced
988 		  = replace_oldest_value_reg (recog_data.operand_loc[i],
989 					      alternative_class (op_alt, i),
990 					      insn, vd);
991 	      else if (MEM_P (recog_data.operand[i]))
992 		replaced = replace_oldest_value_mem (recog_data.operand[i],
993 						     insn, vd);
994 	    }
995 	  else if (MEM_P (recog_data.operand[i]))
996 	    replaced = replace_oldest_value_mem (recog_data.operand[i],
997 						 insn, vd);
998 
999 	  /* If we performed any replacement, update match_dups.  */
1000 	  if (replaced)
1001 	    {
1002 	      int j;
1003 	      rtx new_rtx;
1004 
1005 	      new_rtx = *recog_data.operand_loc[i];
1006 	      recog_data.operand[i] = new_rtx;
1007 	      for (j = 0; j < recog_data.n_dups; j++)
1008 		if (recog_data.dup_num[j] == i)
1009 		  validate_unshare_change (insn, recog_data.dup_loc[j], new_rtx, 1);
1010 
1011 	      any_replacements = true;
1012 	    }
1013 	}
1014 
1015       if (any_replacements)
1016 	{
1017 	  if (! apply_change_group ())
1018 	    {
1019 	      if (dump_file)
1020 		fprintf (dump_file,
1021 			 "insn %u: reg replacements not verified\n",
1022 			 INSN_UID (insn));
1023 	    }
1024 	  else
1025 	    changed = true;
1026 	}
1027 
1028     did_replacement:
1029       if (changed)
1030 	{
1031 	  anything_changed = true;
1032 
1033 	  /* If something changed, perhaps further changes to earlier
1034 	     DEBUG_INSNs can be applied.  */
1035 	  if (vd->n_debug_insn_changes)
1036 	    note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
1037 	  df_insn_rescan (insn);
1038 	}
1039 
1040       ksvd.vd = vd;
1041       ksvd.ignore_set_reg = NULL_RTX;
1042 
1043       /* Clobber call-clobbered registers.  */
1044       if (CALL_P (insn))
1045 	{
1046 	  unsigned int set_regno = INVALID_REGNUM;
1047 	  unsigned int set_nregs = 0;
1048 	  unsigned int regno;
1049 	  rtx exp;
1050 	  HARD_REG_SET regs_invalidated_by_this_call;
1051 
1052 	  for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
1053 	    {
1054 	      rtx x = XEXP (exp, 0);
1055 	      if (GET_CODE (x) == SET)
1056 		{
1057 		  rtx dest = SET_DEST (x);
1058 		  kill_value (dest, vd);
1059 		  set_value_regno (REGNO (dest), GET_MODE (dest), vd);
1060 		  copy_value (dest, SET_SRC (x), vd);
1061 		  ksvd.ignore_set_reg = dest;
1062 		  set_regno = REGNO (dest);
1063 		  set_nregs = REG_NREGS (dest);
1064 		  break;
1065 		}
1066 	    }
1067 
1068 	  get_call_reg_set_usage (insn,
1069 				  &regs_invalidated_by_this_call,
1070 				  regs_invalidated_by_call);
1071 	  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1072 	    if ((TEST_HARD_REG_BIT (regs_invalidated_by_this_call, regno)
1073 		 || (targetm.hard_regno_call_part_clobbered
1074 		     (insn, regno, vd->e[regno].mode)))
1075 		&& (regno < set_regno || regno >= set_regno + set_nregs))
1076 	      kill_value_regno (regno, 1, vd);
1077 
1078 	  /* If SET was seen in CALL_INSN_FUNCTION_USAGE, and SET_SRC
1079 	     of the SET isn't in regs_invalidated_by_call hard reg set,
1080 	     but instead among CLOBBERs on the CALL_INSN, we could wrongly
1081 	     assume the value in it is still live.  */
1082 	  if (ksvd.ignore_set_reg)
1083 	    kill_clobbered_values (insn, vd);
1084 	}
1085 
1086       bool copy_p = (set
1087 		     && REG_P (SET_DEST (set))
1088 		     && REG_P (SET_SRC (set)));
1089       bool noop_p = (copy_p
1090 		     && rtx_equal_p (SET_DEST (set), SET_SRC (set)));
1091 
1092       /* If a noop move is using narrower mode than we have recorded,
1093 	 we need to either remove the noop move, or kill_set_value.  */
1094       if (noop_p
1095 	  && partial_subreg_p (GET_MODE (SET_DEST (set)),
1096 			       vd->e[REGNO (SET_DEST (set))].mode))
1097 	{
1098 	  if (noop_move_p (insn))
1099 	    {
1100 	      bool last = insn == BB_END (bb);
1101 	      delete_insn (insn);
1102 	      if (last)
1103 		break;
1104 	    }
1105 	  else
1106 	    noop_p = false;
1107 	}
1108 
1109       if (!noop_p)
1110 	{
1111 	  /* Notice stores.  */
1112 	  note_stores (PATTERN (insn), kill_set_value, &ksvd);
1113 
1114 	  /* Notice copies.  */
1115 	  if (copy_p)
1116 	    {
1117 	      df_insn_rescan (insn);
1118 	      copy_value (SET_DEST (set), SET_SRC (set), vd);
1119 	    }
1120 	}
1121 
1122       if (insn == BB_END (bb))
1123 	break;
1124     }
1125 
1126   return anything_changed;
1127 }
1128 
1129 /* Dump the value chain data to stderr.  */
1130 
1131 DEBUG_FUNCTION void
debug_value_data(struct value_data * vd)1132 debug_value_data (struct value_data *vd)
1133 {
1134   HARD_REG_SET set;
1135   unsigned int i, j;
1136 
1137   CLEAR_HARD_REG_SET (set);
1138 
1139   for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1140     if (vd->e[i].oldest_regno == i)
1141       {
1142 	if (vd->e[i].mode == VOIDmode)
1143 	  {
1144 	    if (vd->e[i].next_regno != INVALID_REGNUM)
1145 	      fprintf (stderr, "[%u] Bad next_regno for empty chain (%u)\n",
1146 		       i, vd->e[i].next_regno);
1147 	    continue;
1148 	  }
1149 
1150 	SET_HARD_REG_BIT (set, i);
1151 	fprintf (stderr, "[%u %s] ", i, GET_MODE_NAME (vd->e[i].mode));
1152 
1153 	for (j = vd->e[i].next_regno;
1154 	     j != INVALID_REGNUM;
1155 	     j = vd->e[j].next_regno)
1156 	  {
1157 	    if (TEST_HARD_REG_BIT (set, j))
1158 	      {
1159 		fprintf (stderr, "[%u] Loop in regno chain\n", j);
1160 		return;
1161 	      }
1162 
1163 	    if (vd->e[j].oldest_regno != i)
1164 	      {
1165 		fprintf (stderr, "[%u] Bad oldest_regno (%u)\n",
1166 			 j, vd->e[j].oldest_regno);
1167 		return;
1168 	      }
1169 	    SET_HARD_REG_BIT (set, j);
1170 	    fprintf (stderr, "[%u %s] ", j, GET_MODE_NAME (vd->e[j].mode));
1171 	  }
1172 	fputc ('\n', stderr);
1173       }
1174 
1175   for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1176     if (! TEST_HARD_REG_BIT (set, i)
1177 	&& (vd->e[i].mode != VOIDmode
1178 	    || vd->e[i].oldest_regno != i
1179 	    || vd->e[i].next_regno != INVALID_REGNUM))
1180       fprintf (stderr, "[%u] Non-empty reg in chain (%s %u %i)\n",
1181 	       i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1182 	       vd->e[i].next_regno);
1183 }
1184 
1185 /* Do copyprop_hardreg_forward_1 for a single basic block BB.
1186    DEBUG_INSN is skipped since we do not want to involve DF related
1187    staff as how it is handled in function pass_cprop_hardreg::execute.
1188 
1189    NOTE: Currently it is only used for shrink-wrap.  Maybe extend it
1190    to handle DEBUG_INSN for other uses.  */
1191 
1192 void
copyprop_hardreg_forward_bb_without_debug_insn(basic_block bb)1193 copyprop_hardreg_forward_bb_without_debug_insn (basic_block bb)
1194 {
1195   struct value_data *vd;
1196   vd = XNEWVEC (struct value_data, 1);
1197   init_value_data (vd);
1198 
1199   skip_debug_insn_p = true;
1200   copyprop_hardreg_forward_1 (bb, vd);
1201   free (vd);
1202   skip_debug_insn_p = false;
1203 }
1204 
1205 static void
validate_value_data(struct value_data * vd)1206 validate_value_data (struct value_data *vd)
1207 {
1208   HARD_REG_SET set;
1209   unsigned int i, j;
1210 
1211   CLEAR_HARD_REG_SET (set);
1212 
1213   for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1214     if (vd->e[i].oldest_regno == i)
1215       {
1216 	if (vd->e[i].mode == VOIDmode)
1217 	  {
1218 	    if (vd->e[i].next_regno != INVALID_REGNUM)
1219 	      internal_error ("validate_value_data: [%u] Bad next_regno for empty chain (%u)",
1220 			      i, vd->e[i].next_regno);
1221 	    continue;
1222 	  }
1223 
1224 	SET_HARD_REG_BIT (set, i);
1225 
1226 	for (j = vd->e[i].next_regno;
1227 	     j != INVALID_REGNUM;
1228 	     j = vd->e[j].next_regno)
1229 	  {
1230 	    if (TEST_HARD_REG_BIT (set, j))
1231 	      internal_error ("validate_value_data: Loop in regno chain (%u)",
1232 			      j);
1233 	    if (vd->e[j].oldest_regno != i)
1234 	      internal_error ("validate_value_data: [%u] Bad oldest_regno (%u)",
1235 			      j, vd->e[j].oldest_regno);
1236 
1237 	    SET_HARD_REG_BIT (set, j);
1238 	  }
1239       }
1240 
1241   for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1242     if (! TEST_HARD_REG_BIT (set, i)
1243 	&& (vd->e[i].mode != VOIDmode
1244 	    || vd->e[i].oldest_regno != i
1245 	    || vd->e[i].next_regno != INVALID_REGNUM))
1246       internal_error ("validate_value_data: [%u] Non-empty reg in chain (%s %u %i)",
1247 		      i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1248 		      vd->e[i].next_regno);
1249 }
1250 
1251 
1252 namespace {
1253 
1254 const pass_data pass_data_cprop_hardreg =
1255 {
1256   RTL_PASS, /* type */
1257   "cprop_hardreg", /* name */
1258   OPTGROUP_NONE, /* optinfo_flags */
1259   TV_CPROP_REGISTERS, /* tv_id */
1260   0, /* properties_required */
1261   0, /* properties_provided */
1262   0, /* properties_destroyed */
1263   0, /* todo_flags_start */
1264   TODO_df_finish, /* todo_flags_finish */
1265 };
1266 
1267 class pass_cprop_hardreg : public rtl_opt_pass
1268 {
1269 public:
pass_cprop_hardreg(gcc::context * ctxt)1270   pass_cprop_hardreg (gcc::context *ctxt)
1271     : rtl_opt_pass (pass_data_cprop_hardreg, ctxt)
1272   {}
1273 
1274   /* opt_pass methods: */
gate(function *)1275   virtual bool gate (function *)
1276     {
1277       return (optimize > 0 && (flag_cprop_registers));
1278     }
1279 
1280   virtual unsigned int execute (function *);
1281 
1282 }; // class pass_cprop_hardreg
1283 
1284 static bool
cprop_hardreg_bb(basic_block bb,struct value_data * all_vd,sbitmap visited)1285 cprop_hardreg_bb (basic_block bb, struct value_data *all_vd, sbitmap visited)
1286 {
1287   bitmap_set_bit (visited, bb->index);
1288 
1289   /* If a block has a single predecessor, that we've already
1290      processed, begin with the value data that was live at
1291      the end of the predecessor block.  */
1292   /* ??? Ought to use more intelligent queuing of blocks.  */
1293   if (single_pred_p (bb)
1294       && bitmap_bit_p (visited, single_pred (bb)->index)
1295       && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
1296     {
1297       all_vd[bb->index] = all_vd[single_pred (bb)->index];
1298       if (all_vd[bb->index].n_debug_insn_changes)
1299 	{
1300 	  unsigned int regno;
1301 
1302 	  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1303 	    {
1304 	      if (all_vd[bb->index].e[regno].debug_insn_changes)
1305 		{
1306 		  struct queued_debug_insn_change *cur;
1307 		  for (cur = all_vd[bb->index].e[regno].debug_insn_changes;
1308 		       cur; cur = cur->next)
1309 		    --all_vd[bb->index].n_debug_insn_changes;
1310 		  all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1311 		  if (all_vd[bb->index].n_debug_insn_changes == 0)
1312 		    break;
1313 		}
1314 	    }
1315 	}
1316     }
1317   else
1318     init_value_data (all_vd + bb->index);
1319 
1320   return copyprop_hardreg_forward_1 (bb, all_vd + bb->index);
1321 }
1322 
1323 static void
cprop_hardreg_debug(function * fun,struct value_data * all_vd)1324 cprop_hardreg_debug (function *fun, struct value_data *all_vd)
1325 {
1326   basic_block bb;
1327 
1328   FOR_EACH_BB_FN (bb, fun)
1329     if (all_vd[bb->index].n_debug_insn_changes)
1330       {
1331 	unsigned int regno;
1332 	bitmap live;
1333 
1334 	live = df_get_live_out (bb);
1335 	for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1336 	  if (all_vd[bb->index].e[regno].debug_insn_changes)
1337 	    {
1338 	      if (REGNO_REG_SET_P (live, regno))
1339 		apply_debug_insn_changes (all_vd + bb->index, regno);
1340 
1341 	      struct queued_debug_insn_change *cur;
1342 	      for (cur = all_vd[bb->index].e[regno].debug_insn_changes;
1343 		   cur; cur = cur->next)
1344 		--all_vd[bb->index].n_debug_insn_changes;
1345 	      all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1346 	      if (all_vd[bb->index].n_debug_insn_changes == 0)
1347 		break;
1348 	    }
1349       }
1350 
1351   queued_debug_insn_change_pool.release ();
1352 }
1353 
1354 unsigned int
execute(function * fun)1355 pass_cprop_hardreg::execute (function *fun)
1356 {
1357   struct value_data *all_vd;
1358   basic_block bb;
1359 
1360   all_vd = XNEWVEC (struct value_data, last_basic_block_for_fn (fun));
1361 
1362   auto_sbitmap visited (last_basic_block_for_fn (fun));
1363   bitmap_clear (visited);
1364 
1365   auto_vec<int> worklist;
1366   bool any_debug_changes = false;
1367 
1368   /* We need accurate notes.  Earlier passes such as if-conversion may
1369      leave notes in an inconsistent state.  */
1370   df_note_add_problem ();
1371   df_analyze ();
1372 
1373   /* It is tempting to set DF_LR_RUN_DCE, but DCE may choose to delete
1374      an insn and this pass would not have visibility into the removal.
1375      This pass would then potentially use the source of that
1376      INSN for propagation purposes, generating invalid code.
1377 
1378      So we just ask for updated notes and handle trivial deletions
1379      within this pass where we can update this passes internal
1380      data structures appropriately.  */
1381   df_set_flags (DF_DEFER_INSN_RESCAN);
1382 
1383   FOR_EACH_BB_FN (bb, fun)
1384     {
1385       if (cprop_hardreg_bb (bb, all_vd, visited))
1386 	worklist.safe_push (bb->index);
1387       if (all_vd[bb->index].n_debug_insn_changes)
1388 	any_debug_changes = true;
1389     }
1390 
1391   /* We must call df_analyze here unconditionally to ensure that the
1392      REG_UNUSED and REG_DEAD notes are consistent with and without -g.  */
1393   df_analyze ();
1394 
1395   if (MAY_HAVE_DEBUG_BIND_INSNS && any_debug_changes)
1396     cprop_hardreg_debug (fun, all_vd);
1397 
1398   /* Second pass if we've changed anything, only for the bbs where we have
1399      changed anything though.  */
1400   if (!worklist.is_empty ())
1401     {
1402       unsigned int i;
1403       int index;
1404 
1405       any_debug_changes = false;
1406       bitmap_clear (visited);
1407       FOR_EACH_VEC_ELT (worklist, i, index)
1408 	{
1409 	  bb = BASIC_BLOCK_FOR_FN (fun, index);
1410 	  cprop_hardreg_bb (bb, all_vd, visited);
1411 	  if (all_vd[bb->index].n_debug_insn_changes)
1412 	    any_debug_changes = true;
1413 	}
1414 
1415       df_analyze ();
1416       if (MAY_HAVE_DEBUG_BIND_INSNS && any_debug_changes)
1417 	cprop_hardreg_debug (fun, all_vd);
1418     }
1419 
1420   free (all_vd);
1421   return 0;
1422 }
1423 
1424 } // anon namespace
1425 
1426 rtl_opt_pass *
make_pass_cprop_hardreg(gcc::context * ctxt)1427 make_pass_cprop_hardreg (gcc::context *ctxt)
1428 {
1429   return new pass_cprop_hardreg (ctxt);
1430 }
1431