1 /* RTL dead code elimination.
2    Copyright (C) 2005-2021 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "predict.h"
27 #include "df.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "emit-rtl.h"  /* FIXME: Can go away once crtl is moved to rtl.h.  */
31 #include "cfgrtl.h"
32 #include "cfgbuild.h"
33 #include "cfgcleanup.h"
34 #include "dce.h"
35 #include "valtrack.h"
36 #include "tree-pass.h"
37 #include "dbgcnt.h"
38 #include "rtl-iter.h"
39 
40 
41 /* -------------------------------------------------------------------------
42    Core mark/delete routines
43    ------------------------------------------------------------------------- */
44 
45 /* True if we are invoked while the df engine is running; in this case,
46    we don't want to reenter it.  */
47 static bool df_in_progress = false;
48 
49 /* True if we are allowed to alter the CFG in this pass.  */
50 static bool can_alter_cfg = false;
51 
52 /* Instructions that have been marked but whose dependencies have not
53    yet been processed.  */
54 static vec<rtx_insn *> worklist;
55 
56 /* Bitmap of instructions marked as needed indexed by INSN_UID.  */
57 static sbitmap marked;
58 
59 /* Bitmap obstacks used for block processing by the fast algorithm.  */
60 static bitmap_obstack dce_blocks_bitmap_obstack;
61 static bitmap_obstack dce_tmp_bitmap_obstack;
62 
63 static bool find_call_stack_args (rtx_call_insn *, bool, bool, bitmap);
64 
65 /* A subroutine for which BODY is part of the instruction being tested;
66    either the top-level pattern, or an element of a PARALLEL.  The
67    instruction is known not to be a bare USE or CLOBBER.  */
68 
69 static bool
deletable_insn_p_1(rtx body)70 deletable_insn_p_1 (rtx body)
71 {
72   switch (GET_CODE (body))
73     {
74     case PREFETCH:
75     case TRAP_IF:
76       /* The UNSPEC case was added here because the ia-64 claims that
77 	 USEs do not work after reload and generates UNSPECS rather
78 	 than USEs.  Since dce is run after reload we need to avoid
79 	 deleting these even if they are dead.  If it turns out that
80 	 USEs really do work after reload, the ia-64 should be
81 	 changed, and the UNSPEC case can be removed.  */
82     case UNSPEC:
83       return false;
84 
85     default:
86       return !volatile_refs_p (body);
87     }
88 }
89 
90 /* Don't delete calls that may throw if we cannot do so.  */
91 
92 static bool
can_delete_call(rtx_insn * insn)93 can_delete_call (rtx_insn *insn)
94 {
95   if (cfun->can_delete_dead_exceptions && can_alter_cfg)
96     return true;
97   if (!insn_nothrow_p (insn))
98     return false;
99   if (can_alter_cfg)
100     return true;
101   /* If we can't alter cfg, even when the call can't throw exceptions, it
102      might have EDGE_ABNORMAL_CALL edges and so we shouldn't delete such
103      calls.  */
104   gcc_assert (CALL_P (insn));
105   if (BLOCK_FOR_INSN (insn) && BB_END (BLOCK_FOR_INSN (insn)) == insn)
106     {
107       edge e;
108       edge_iterator ei;
109 
110       FOR_EACH_EDGE (e, ei, BLOCK_FOR_INSN (insn)->succs)
111 	if ((e->flags & EDGE_ABNORMAL_CALL) != 0)
112 	  return false;
113     }
114   return true;
115 }
116 
117 /* Return true if INSN is a normal instruction that can be deleted by
118    the DCE pass.  */
119 
120 static bool
deletable_insn_p(rtx_insn * insn,bool fast,bitmap arg_stores)121 deletable_insn_p (rtx_insn *insn, bool fast, bitmap arg_stores)
122 {
123   rtx body, x;
124   int i;
125   df_ref def;
126 
127   if (CALL_P (insn)
128       /* We cannot delete calls inside of the recursive dce because
129 	 this may cause basic blocks to be deleted and this messes up
130 	 the rest of the stack of optimization passes.  */
131       && (!df_in_progress)
132       /* We cannot delete pure or const sibling calls because it is
133 	 hard to see the result.  */
134       && (!SIBLING_CALL_P (insn))
135       /* We can delete dead const or pure calls as long as they do not
136          infinite loop.  */
137       && (RTL_CONST_OR_PURE_CALL_P (insn)
138 	  && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn))
139       /* Don't delete calls that may throw if we cannot do so.  */
140       && can_delete_call (insn))
141     return find_call_stack_args (as_a <rtx_call_insn *> (insn), false,
142 				 fast, arg_stores);
143 
144   /* Don't delete jumps, notes and the like.  */
145   if (!NONJUMP_INSN_P (insn))
146     return false;
147 
148   /* Don't delete insns that may throw if we cannot do so.  */
149   if (!(cfun->can_delete_dead_exceptions && can_alter_cfg)
150       && !insn_nothrow_p (insn))
151     return false;
152 
153   /* If INSN sets a global_reg, leave it untouched.  */
154   FOR_EACH_INSN_DEF (def, insn)
155     if (HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
156 	&& global_regs[DF_REF_REGNO (def)])
157       return false;
158     /* Initialization of pseudo PIC register should never be removed.  */
159     else if (DF_REF_REG (def) == pic_offset_table_rtx
160 	     && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
161       return false;
162 
163   /* Callee-save restores are needed.  */
164   if (RTX_FRAME_RELATED_P (insn)
165       && crtl->shrink_wrapped_separate
166       && find_reg_note (insn, REG_CFA_RESTORE, NULL))
167     return false;
168 
169   body = PATTERN (insn);
170   switch (GET_CODE (body))
171     {
172     case USE:
173     case VAR_LOCATION:
174       return false;
175 
176     case CLOBBER:
177       if (fast)
178 	{
179 	  /* A CLOBBER of a dead pseudo register serves no purpose.
180 	     That is not necessarily true for hard registers until
181 	     after reload.  */
182 	  x = XEXP (body, 0);
183 	  return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
184 	}
185       else
186 	/* Because of the way that use-def chains are built, it is not
187 	   possible to tell if the clobber is dead because it can
188 	   never be the target of a use-def chain.  */
189 	return false;
190 
191     case PARALLEL:
192       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
193 	if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
194 	  return false;
195       return true;
196 
197     default:
198       return deletable_insn_p_1 (body);
199     }
200 }
201 
202 
203 /* Return true if INSN has been marked as needed.  */
204 
205 static inline int
marked_insn_p(rtx_insn * insn)206 marked_insn_p (rtx_insn *insn)
207 {
208   /* Artificial defs are always needed and they do not have an insn.
209      We should never see them here.  */
210   gcc_assert (insn);
211   return bitmap_bit_p (marked, INSN_UID (insn));
212 }
213 
214 
215 /* If INSN has not yet been marked as needed, mark it now, and add it to
216    the worklist.  */
217 
218 static void
mark_insn(rtx_insn * insn,bool fast)219 mark_insn (rtx_insn *insn, bool fast)
220 {
221   if (!marked_insn_p (insn))
222     {
223       if (!fast)
224 	worklist.safe_push (insn);
225       bitmap_set_bit (marked, INSN_UID (insn));
226       if (dump_file)
227 	fprintf (dump_file, "  Adding insn %d to worklist\n", INSN_UID (insn));
228       if (CALL_P (insn)
229 	  && !df_in_progress
230 	  && !SIBLING_CALL_P (insn)
231 	  && (RTL_CONST_OR_PURE_CALL_P (insn)
232 	      && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn))
233 	  && can_delete_call (insn))
234 	find_call_stack_args (as_a <rtx_call_insn *> (insn), true, fast, NULL);
235     }
236 }
237 
238 
239 /* A note_stores callback used by mark_nonreg_stores.  DATA is the
240    instruction containing DEST.  */
241 
242 static void
mark_nonreg_stores_1(rtx dest,const_rtx pattern,void * data)243 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
244 {
245   if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
246     mark_insn ((rtx_insn *) data, true);
247 }
248 
249 
250 /* A note_stores callback used by mark_nonreg_stores.  DATA is the
251    instruction containing DEST.  */
252 
253 static void
mark_nonreg_stores_2(rtx dest,const_rtx pattern,void * data)254 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
255 {
256   if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
257     mark_insn ((rtx_insn *) data, false);
258 }
259 
260 
261 /* Mark INSN if it stores to a non-register destination.  */
262 
263 static void
mark_nonreg_stores(rtx_insn * insn,bool fast)264 mark_nonreg_stores (rtx_insn *insn, bool fast)
265 {
266   if (fast)
267     note_stores (insn, mark_nonreg_stores_1, insn);
268   else
269     note_stores (insn, mark_nonreg_stores_2, insn);
270 }
271 
272 
273 /* Return true if a store to SIZE bytes, starting OFF bytes from stack pointer,
274    is a call argument store, and clear corresponding bits from SP_BYTES
275    bitmap if it is.  */
276 
277 static bool
check_argument_store(HOST_WIDE_INT size,HOST_WIDE_INT off,HOST_WIDE_INT min_sp_off,HOST_WIDE_INT max_sp_off,bitmap sp_bytes)278 check_argument_store (HOST_WIDE_INT size, HOST_WIDE_INT off,
279 		      HOST_WIDE_INT min_sp_off, HOST_WIDE_INT max_sp_off,
280 		      bitmap sp_bytes)
281 {
282   HOST_WIDE_INT byte;
283   for (byte = off; byte < off + size; byte++)
284     {
285       if (byte < min_sp_off
286 	  || byte >= max_sp_off
287 	  || !bitmap_clear_bit (sp_bytes, byte - min_sp_off))
288 	return false;
289     }
290   return true;
291 }
292 
293 /* If MEM has sp address, return 0, if it has sp + const address,
294    return that const, if it has reg address where reg is set to sp + const
295    and FAST is false, return const, otherwise return
296    INTTYPE_MINUMUM (HOST_WIDE_INT).  */
297 
298 static HOST_WIDE_INT
sp_based_mem_offset(rtx_call_insn * call_insn,const_rtx mem,bool fast)299 sp_based_mem_offset (rtx_call_insn *call_insn, const_rtx mem, bool fast)
300 {
301   HOST_WIDE_INT off = 0;
302   rtx addr = XEXP (mem, 0);
303   if (GET_CODE (addr) == PLUS
304       && REG_P (XEXP (addr, 0))
305       && CONST_INT_P (XEXP (addr, 1)))
306     {
307       off = INTVAL (XEXP (addr, 1));
308       addr = XEXP (addr, 0);
309     }
310   if (addr == stack_pointer_rtx)
311     return off;
312 
313   if (!REG_P (addr) || fast)
314     return INTTYPE_MINIMUM (HOST_WIDE_INT);
315 
316   /* If not fast, use chains to see if addr wasn't set to sp + offset.  */
317   df_ref use;
318   FOR_EACH_INSN_USE (use, call_insn)
319   if (rtx_equal_p (addr, DF_REF_REG (use)))
320     break;
321 
322   if (use == NULL)
323     return INTTYPE_MINIMUM (HOST_WIDE_INT);
324 
325   struct df_link *defs;
326   for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
327     if (! DF_REF_IS_ARTIFICIAL (defs->ref))
328       break;
329 
330   if (defs == NULL)
331     return INTTYPE_MINIMUM (HOST_WIDE_INT);
332 
333   rtx set = single_set (DF_REF_INSN (defs->ref));
334   if (!set)
335     return INTTYPE_MINIMUM (HOST_WIDE_INT);
336 
337   if (GET_CODE (SET_SRC (set)) != PLUS
338       || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
339       || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
340     return INTTYPE_MINIMUM (HOST_WIDE_INT);
341 
342   off += INTVAL (XEXP (SET_SRC (set), 1));
343   return off;
344 }
345 
346 /* Data for check_argument_load called via note_uses.  */
347 struct check_argument_load_data {
348   bitmap sp_bytes;
349   HOST_WIDE_INT min_sp_off, max_sp_off;
350   rtx_call_insn *call_insn;
351   bool fast;
352   bool load_found;
353 };
354 
355 /* Helper function for find_call_stack_args.  Check if there are
356    any loads from the argument slots in between the const/pure call
357    and store to the argument slot, set LOAD_FOUND if any is found.  */
358 
359 static void
check_argument_load(rtx * loc,void * data)360 check_argument_load (rtx *loc, void *data)
361 {
362   struct check_argument_load_data *d
363     = (struct check_argument_load_data *) data;
364   subrtx_iterator::array_type array;
365   FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
366     {
367       const_rtx mem = *iter;
368       HOST_WIDE_INT size;
369       if (MEM_P (mem)
370 	  && MEM_SIZE_KNOWN_P (mem)
371 	  && MEM_SIZE (mem).is_constant (&size))
372 	{
373 	  HOST_WIDE_INT off = sp_based_mem_offset (d->call_insn, mem, d->fast);
374 	  if (off != INTTYPE_MINIMUM (HOST_WIDE_INT)
375 	      && off < d->max_sp_off
376 	      && off + size > d->min_sp_off)
377 	    for (HOST_WIDE_INT byte = MAX (off, d->min_sp_off);
378 		 byte < MIN (off + size, d->max_sp_off); byte++)
379 	      if (bitmap_bit_p (d->sp_bytes, byte - d->min_sp_off))
380 		{
381 		  d->load_found = true;
382 		  return;
383 		}
384 	}
385     }
386 }
387 
388 /* Try to find all stack stores of CALL_INSN arguments if
389    ACCUMULATE_OUTGOING_ARGS.  If all stack stores have been found
390    and it is therefore safe to eliminate the call, return true,
391    otherwise return false.  This function should be first called
392    with DO_MARK false, and only when the CALL_INSN is actually
393    going to be marked called again with DO_MARK true.  */
394 
395 static bool
find_call_stack_args(rtx_call_insn * call_insn,bool do_mark,bool fast,bitmap arg_stores)396 find_call_stack_args (rtx_call_insn *call_insn, bool do_mark, bool fast,
397 		      bitmap arg_stores)
398 {
399   rtx p;
400   rtx_insn *insn, *prev_insn;
401   bool ret;
402   HOST_WIDE_INT min_sp_off, max_sp_off;
403   bitmap sp_bytes;
404 
405   gcc_assert (CALL_P (call_insn));
406   if (!ACCUMULATE_OUTGOING_ARGS)
407     return true;
408 
409   if (!do_mark)
410     {
411       gcc_assert (arg_stores);
412       bitmap_clear (arg_stores);
413     }
414 
415   min_sp_off = INTTYPE_MAXIMUM (HOST_WIDE_INT);
416   max_sp_off = 0;
417 
418   /* First determine the minimum and maximum offset from sp for
419      stored arguments.  */
420   for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
421     if (GET_CODE (XEXP (p, 0)) == USE
422 	&& MEM_P (XEXP (XEXP (p, 0), 0)))
423       {
424 	rtx mem = XEXP (XEXP (p, 0), 0);
425 	HOST_WIDE_INT size;
426 	if (!MEM_SIZE_KNOWN_P (mem) || !MEM_SIZE (mem).is_constant (&size))
427 	  return false;
428 	HOST_WIDE_INT off = sp_based_mem_offset (call_insn, mem, fast);
429 	if (off == INTTYPE_MINIMUM (HOST_WIDE_INT))
430 	  return false;
431 	min_sp_off = MIN (min_sp_off, off);
432 	max_sp_off = MAX (max_sp_off, off + size);
433       }
434 
435   if (min_sp_off >= max_sp_off)
436     return true;
437   sp_bytes = BITMAP_ALLOC (NULL);
438 
439   /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
440      which contain arguments.  Checking has been done in the previous
441      loop.  */
442   for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
443     if (GET_CODE (XEXP (p, 0)) == USE
444 	&& MEM_P (XEXP (XEXP (p, 0), 0)))
445       {
446 	rtx mem = XEXP (XEXP (p, 0), 0);
447 	/* Checked in the previous iteration.  */
448 	HOST_WIDE_INT size = MEM_SIZE (mem).to_constant ();
449 	HOST_WIDE_INT off = sp_based_mem_offset (call_insn, mem, fast);
450 	gcc_checking_assert (off != INTTYPE_MINIMUM (HOST_WIDE_INT));
451 	for (HOST_WIDE_INT byte = off; byte < off + size; byte++)
452 	  if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
453 	    gcc_unreachable ();
454       }
455 
456   /* Walk backwards, looking for argument stores.  The search stops
457      when seeing another call, sp adjustment, memory store other than
458      argument store or a read from an argument stack slot.  */
459   struct check_argument_load_data data
460     = { sp_bytes, min_sp_off, max_sp_off, call_insn, fast, false };
461   ret = false;
462   for (insn = PREV_INSN (call_insn); insn; insn = prev_insn)
463     {
464       if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn)))
465 	prev_insn = NULL;
466       else
467 	prev_insn = PREV_INSN (insn);
468 
469       if (CALL_P (insn))
470 	break;
471 
472       if (!NONDEBUG_INSN_P (insn))
473 	continue;
474 
475       rtx set = single_set (insn);
476       if (!set || SET_DEST (set) == stack_pointer_rtx)
477 	break;
478 
479       note_uses (&PATTERN (insn), check_argument_load, &data);
480       if (data.load_found)
481 	break;
482 
483       if (!MEM_P (SET_DEST (set)))
484 	continue;
485 
486       rtx mem = SET_DEST (set);
487       HOST_WIDE_INT off = sp_based_mem_offset (call_insn, mem, fast);
488       if (off == INTTYPE_MINIMUM (HOST_WIDE_INT))
489 	break;
490 
491       HOST_WIDE_INT size;
492       if (!MEM_SIZE_KNOWN_P (mem)
493 	  || !MEM_SIZE (mem).is_constant (&size)
494 	  || !check_argument_store (size, off, min_sp_off,
495 				    max_sp_off, sp_bytes))
496 	break;
497 
498       if (!deletable_insn_p (insn, fast, NULL))
499 	break;
500 
501       if (do_mark)
502 	mark_insn (insn, fast);
503       else
504 	bitmap_set_bit (arg_stores, INSN_UID (insn));
505 
506       if (bitmap_empty_p (sp_bytes))
507 	{
508 	  ret = true;
509 	  break;
510 	}
511     }
512 
513   BITMAP_FREE (sp_bytes);
514   if (!ret && arg_stores)
515     bitmap_clear (arg_stores);
516 
517   return ret;
518 }
519 
520 
521 /* Remove all REG_EQUAL and REG_EQUIV notes referring to the registers INSN
522    writes to.  */
523 
524 static void
remove_reg_equal_equiv_notes_for_defs(rtx_insn * insn)525 remove_reg_equal_equiv_notes_for_defs (rtx_insn *insn)
526 {
527   df_ref def;
528 
529   FOR_EACH_INSN_DEF (def, insn)
530     remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (def));
531 }
532 
533 /* Scan all BBs for debug insns and reset those that reference values
534    defined in unmarked insns.  */
535 
536 static void
reset_unmarked_insns_debug_uses(void)537 reset_unmarked_insns_debug_uses (void)
538 {
539   basic_block bb;
540   rtx_insn *insn, *next;
541 
542   FOR_EACH_BB_REVERSE_FN (bb, cfun)
543     FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
544       if (DEBUG_INSN_P (insn))
545 	{
546 	  df_ref use;
547 
548 	  FOR_EACH_INSN_USE (use, insn)
549 	    {
550 	      struct df_link *defs;
551 	      for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
552 		{
553 		  rtx_insn *ref_insn;
554 		  if (DF_REF_IS_ARTIFICIAL (defs->ref))
555 		    continue;
556 		  ref_insn = DF_REF_INSN (defs->ref);
557 		  if (!marked_insn_p (ref_insn))
558 		    break;
559 		}
560 	      if (!defs)
561 		continue;
562 	      /* ??? FIXME could we propagate the values assigned to
563 		 each of the DEFs?  */
564 	      INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
565 	      df_insn_rescan_debug_internal (insn);
566 	      break;
567 	    }
568 	}
569 }
570 
571 /* Delete every instruction that hasn't been marked.  */
572 
573 static void
delete_unmarked_insns(void)574 delete_unmarked_insns (void)
575 {
576   basic_block bb;
577   rtx_insn *insn, *next;
578   bool must_clean = false;
579 
580   FOR_EACH_BB_REVERSE_FN (bb, cfun)
581     FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
582       if (NONDEBUG_INSN_P (insn))
583 	{
584 	  rtx turn_into_use = NULL_RTX;
585 
586 	  /* Always delete no-op moves.  */
587 	  if (noop_move_p (insn)
588 	      /* Unless the no-op move can throw and we are not allowed
589 		 to alter cfg.  */
590 	      && (!cfun->can_throw_non_call_exceptions
591 		  || (cfun->can_delete_dead_exceptions && can_alter_cfg)
592 		  || insn_nothrow_p (insn)))
593 	    {
594 	      if (RTX_FRAME_RELATED_P (insn))
595 		turn_into_use
596 		  = find_reg_note (insn, REG_CFA_RESTORE, NULL);
597 	      if (turn_into_use && REG_P (XEXP (turn_into_use, 0)))
598 		turn_into_use = XEXP (turn_into_use, 0);
599 	      else
600 		turn_into_use = NULL_RTX;
601 	    }
602 
603 	  /* Otherwise rely only on the DCE algorithm.  */
604 	  else if (marked_insn_p (insn))
605 	    continue;
606 
607 	  /* Beware that reaching a dbg counter limit here can result
608 	     in miscompiled file.  This occurs when a group of insns
609 	     must be deleted together, typically because the kept insn
610 	     depends on the output from the deleted insn.  Deleting
611 	     this insns in reverse order (both at the bb level and
612 	     when looking at the blocks) minimizes this, but does not
613 	     eliminate it, since it is possible for the using insn to
614 	     be top of a block and the producer to be at the bottom of
615 	     the block.  However, in most cases this will only result
616 	     in an uninitialized use of an insn that is dead anyway.
617 
618 	     However, there is one rare case that will cause a
619 	     miscompile: deletion of non-looping pure and constant
620 	     calls on a machine where ACCUMULATE_OUTGOING_ARGS is true.
621 	     In this case it is possible to remove the call, but leave
622 	     the argument pushes to the stack.  Because of the changes
623 	     to the stack pointer, this will almost always lead to a
624 	     miscompile.  */
625 	  if (!dbg_cnt (dce))
626 	    continue;
627 
628 	  if (dump_file)
629 	    fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
630 
631 	  /* Before we delete the insn we have to remove the REG_EQUAL notes
632 	     for the destination regs in order to avoid dangling notes.  */
633 	  remove_reg_equal_equiv_notes_for_defs (insn);
634 
635 	  if (turn_into_use)
636 	    {
637 	      /* Don't remove frame related noop moves if they cary
638 		 REG_CFA_RESTORE note, while we don't need to emit any code,
639 		 we need it to emit the CFI restore note.  */
640 	      PATTERN (insn)
641 		= gen_rtx_USE (GET_MODE (turn_into_use), turn_into_use);
642 	      INSN_CODE (insn) = -1;
643 	      df_insn_rescan (insn);
644 	    }
645 	  else
646 	    /* Now delete the insn.  */
647 	    must_clean |= delete_insn_and_edges (insn);
648 	}
649 
650   /* Deleted a pure or const call.  */
651   if (must_clean)
652     {
653       gcc_assert (can_alter_cfg);
654       delete_unreachable_blocks ();
655       free_dominance_info (CDI_DOMINATORS);
656     }
657 }
658 
659 
660 /* Go through the instructions and mark those whose necessity is not
661    dependent on inter-instruction information.  Make sure all other
662    instructions are not marked.  */
663 
664 static void
prescan_insns_for_dce(bool fast)665 prescan_insns_for_dce (bool fast)
666 {
667   basic_block bb;
668   rtx_insn *insn, *prev;
669   bitmap arg_stores = NULL;
670 
671   if (dump_file)
672     fprintf (dump_file, "Finding needed instructions:\n");
673 
674   if (!df_in_progress && ACCUMULATE_OUTGOING_ARGS)
675     arg_stores = BITMAP_ALLOC (NULL);
676 
677   FOR_EACH_BB_FN (bb, cfun)
678     {
679       FOR_BB_INSNS_REVERSE_SAFE (bb, insn, prev)
680 	if (NONDEBUG_INSN_P (insn))
681 	  {
682 	    /* Don't mark argument stores now.  They will be marked
683 	       if needed when the associated CALL is marked.  */
684 	    if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn)))
685 	      continue;
686 	    if (deletable_insn_p (insn, fast, arg_stores))
687 	      mark_nonreg_stores (insn, fast);
688 	    else
689 	      mark_insn (insn, fast);
690 	  }
691       /* find_call_stack_args only looks at argument stores in the
692 	 same bb.  */
693       if (arg_stores)
694 	bitmap_clear (arg_stores);
695     }
696 
697   if (arg_stores)
698     BITMAP_FREE (arg_stores);
699 
700   if (dump_file)
701     fprintf (dump_file, "Finished finding needed instructions:\n");
702 }
703 
704 
705 /* UD-based DSE routines. */
706 
707 /* Mark instructions that define artificially-used registers, such as
708    the frame pointer and the stack pointer.  */
709 
710 static void
mark_artificial_uses(void)711 mark_artificial_uses (void)
712 {
713   basic_block bb;
714   struct df_link *defs;
715   df_ref use;
716 
717   FOR_ALL_BB_FN (bb, cfun)
718     FOR_EACH_ARTIFICIAL_USE (use, bb->index)
719       for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
720 	if (!DF_REF_IS_ARTIFICIAL (defs->ref))
721 	  mark_insn (DF_REF_INSN (defs->ref), false);
722 }
723 
724 
725 /* Mark every instruction that defines a register value that INSN uses.  */
726 
727 static void
mark_reg_dependencies(rtx_insn * insn)728 mark_reg_dependencies (rtx_insn *insn)
729 {
730   struct df_link *defs;
731   df_ref use;
732 
733   if (DEBUG_INSN_P (insn))
734     return;
735 
736   FOR_EACH_INSN_USE (use, insn)
737     {
738       if (dump_file)
739 	{
740 	  fprintf (dump_file, "Processing use of ");
741 	  print_simple_rtl (dump_file, DF_REF_REG (use));
742 	  fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
743 	}
744       for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
745 	if (! DF_REF_IS_ARTIFICIAL (defs->ref))
746 	  mark_insn (DF_REF_INSN (defs->ref), false);
747     }
748 }
749 
750 
751 /* Initialize global variables for a new DCE pass.  */
752 
753 static void
init_dce(bool fast)754 init_dce (bool fast)
755 {
756   if (!df_in_progress)
757     {
758       if (!fast)
759 	{
760 	  df_set_flags (DF_RD_PRUNE_DEAD_DEFS);
761 	  df_chain_add_problem (DF_UD_CHAIN);
762 	}
763       df_analyze ();
764     }
765 
766   if (dump_file)
767     df_dump (dump_file);
768 
769   if (fast)
770     {
771       bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
772       bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
773       can_alter_cfg = false;
774     }
775   else
776     can_alter_cfg = true;
777 
778   marked = sbitmap_alloc (get_max_uid () + 1);
779   bitmap_clear (marked);
780 }
781 
782 
783 /* Free the data allocated by init_dce.  */
784 
785 static void
fini_dce(bool fast)786 fini_dce (bool fast)
787 {
788   sbitmap_free (marked);
789 
790   if (fast)
791     {
792       bitmap_obstack_release (&dce_blocks_bitmap_obstack);
793       bitmap_obstack_release (&dce_tmp_bitmap_obstack);
794     }
795 }
796 
797 
798 /* UD-chain based DCE.  */
799 
800 static unsigned int
rest_of_handle_ud_dce(void)801 rest_of_handle_ud_dce (void)
802 {
803   rtx_insn *insn;
804 
805   init_dce (false);
806 
807   prescan_insns_for_dce (false);
808   mark_artificial_uses ();
809   while (worklist.length () > 0)
810     {
811       insn = worklist.pop ();
812       mark_reg_dependencies (insn);
813     }
814   worklist.release ();
815 
816   if (MAY_HAVE_DEBUG_BIND_INSNS)
817     reset_unmarked_insns_debug_uses ();
818 
819   /* Before any insns are deleted, we must remove the chains since
820      they are not bidirectional.  */
821   df_remove_problem (df_chain);
822   delete_unmarked_insns ();
823 
824   fini_dce (false);
825   return 0;
826 }
827 
828 
829 namespace {
830 
831 const pass_data pass_data_ud_rtl_dce =
832 {
833   RTL_PASS, /* type */
834   "ud_dce", /* name */
835   OPTGROUP_NONE, /* optinfo_flags */
836   TV_DCE, /* tv_id */
837   0, /* properties_required */
838   0, /* properties_provided */
839   0, /* properties_destroyed */
840   0, /* todo_flags_start */
841   TODO_df_finish, /* todo_flags_finish */
842 };
843 
844 class pass_ud_rtl_dce : public rtl_opt_pass
845 {
846 public:
pass_ud_rtl_dce(gcc::context * ctxt)847   pass_ud_rtl_dce (gcc::context *ctxt)
848     : rtl_opt_pass (pass_data_ud_rtl_dce, ctxt)
849   {}
850 
851   /* opt_pass methods: */
gate(function *)852   virtual bool gate (function *)
853     {
854       return optimize > 1 && flag_dce && dbg_cnt (dce_ud);
855     }
856 
execute(function *)857   virtual unsigned int execute (function *)
858     {
859       return rest_of_handle_ud_dce ();
860     }
861 
862 }; // class pass_ud_rtl_dce
863 
864 } // anon namespace
865 
866 rtl_opt_pass *
make_pass_ud_rtl_dce(gcc::context * ctxt)867 make_pass_ud_rtl_dce (gcc::context *ctxt)
868 {
869   return new pass_ud_rtl_dce (ctxt);
870 }
871 
872 
873 /* -------------------------------------------------------------------------
874    Fast DCE functions
875    ------------------------------------------------------------------------- */
876 
877 /* Process basic block BB.  Return true if the live_in set has
878    changed. REDO_OUT is true if the info at the bottom of the block
879    needs to be recalculated before starting.  AU is the proper set of
880    artificial uses.  Track global substitution of uses of dead pseudos
881    in debug insns using GLOBAL_DEBUG.  */
882 
883 static bool
word_dce_process_block(basic_block bb,bool redo_out,struct dead_debug_global * global_debug)884 word_dce_process_block (basic_block bb, bool redo_out,
885 			struct dead_debug_global *global_debug)
886 {
887   bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
888   rtx_insn *insn;
889   bool block_changed;
890   struct dead_debug_local debug;
891 
892   if (redo_out)
893     {
894       /* Need to redo the live_out set of this block if when one of
895 	 the succs of this block has had a change in it live in
896 	 set.  */
897       edge e;
898       edge_iterator ei;
899       df_confluence_function_n con_fun_n = df_word_lr->problem->con_fun_n;
900       bitmap_clear (DF_WORD_LR_OUT (bb));
901       FOR_EACH_EDGE (e, ei, bb->succs)
902 	(*con_fun_n) (e);
903     }
904 
905   if (dump_file)
906     {
907       fprintf (dump_file, "processing block %d live out = ", bb->index);
908       df_print_word_regset (dump_file, DF_WORD_LR_OUT (bb));
909     }
910 
911   bitmap_copy (local_live, DF_WORD_LR_OUT (bb));
912   dead_debug_local_init (&debug, NULL, global_debug);
913 
914   FOR_BB_INSNS_REVERSE (bb, insn)
915     if (DEBUG_INSN_P (insn))
916       {
917 	df_ref use;
918 	FOR_EACH_INSN_USE (use, insn)
919 	  if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER
920 	      && known_eq (GET_MODE_SIZE (GET_MODE (DF_REF_REAL_REG (use))),
921 			   2 * UNITS_PER_WORD)
922 	      && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use))
923 	      && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use) + 1))
924 	    dead_debug_add (&debug, use, DF_REF_REGNO (use));
925       }
926     else if (INSN_P (insn))
927       {
928 	bool any_changed;
929 
930 	/* No matter if the instruction is needed or not, we remove
931 	   any regno in the defs from the live set.  */
932 	any_changed = df_word_lr_simulate_defs (insn, local_live);
933 	if (any_changed)
934 	  mark_insn (insn, true);
935 
936 	/* On the other hand, we do not allow the dead uses to set
937 	   anything in local_live.  */
938 	if (marked_insn_p (insn))
939 	  df_word_lr_simulate_uses (insn, local_live);
940 
941 	/* Insert debug temps for dead REGs used in subsequent debug
942 	   insns.  We may have to emit a debug temp even if the insn
943 	   was marked, in case the debug use was after the point of
944 	   death.  */
945 	if (debug.used && !bitmap_empty_p (debug.used))
946 	  {
947 	    df_ref def;
948 
949 	    FOR_EACH_INSN_DEF (def, insn)
950 	      dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn,
951 				      marked_insn_p (insn)
952 				      && !control_flow_insn_p (insn)
953 				      ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
954 				      : DEBUG_TEMP_BEFORE_WITH_VALUE);
955 	  }
956 
957 	if (dump_file)
958 	  {
959 	    fprintf (dump_file, "finished processing insn %d live out = ",
960 		     INSN_UID (insn));
961 	    df_print_word_regset (dump_file, local_live);
962 	  }
963       }
964 
965   block_changed = !bitmap_equal_p (local_live, DF_WORD_LR_IN (bb));
966   if (block_changed)
967     bitmap_copy (DF_WORD_LR_IN (bb), local_live);
968 
969   dead_debug_local_finish (&debug, NULL);
970   BITMAP_FREE (local_live);
971   return block_changed;
972 }
973 
974 
975 /* Process basic block BB.  Return true if the live_in set has
976    changed. REDO_OUT is true if the info at the bottom of the block
977    needs to be recalculated before starting.  AU is the proper set of
978    artificial uses.  Track global substitution of uses of dead pseudos
979    in debug insns using GLOBAL_DEBUG.  */
980 
981 static bool
dce_process_block(basic_block bb,bool redo_out,bitmap au,struct dead_debug_global * global_debug)982 dce_process_block (basic_block bb, bool redo_out, bitmap au,
983 		   struct dead_debug_global *global_debug)
984 {
985   bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
986   rtx_insn *insn;
987   bool block_changed;
988   df_ref def;
989   struct dead_debug_local debug;
990 
991   if (redo_out)
992     {
993       /* Need to redo the live_out set of this block if when one of
994 	 the succs of this block has had a change in it live in
995 	 set.  */
996       edge e;
997       edge_iterator ei;
998       df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
999       bitmap_clear (DF_LR_OUT (bb));
1000       FOR_EACH_EDGE (e, ei, bb->succs)
1001 	(*con_fun_n) (e);
1002     }
1003 
1004   if (dump_file)
1005     {
1006       fprintf (dump_file, "processing block %d lr out = ", bb->index);
1007       df_print_regset (dump_file, DF_LR_OUT (bb));
1008     }
1009 
1010   bitmap_copy (local_live, DF_LR_OUT (bb));
1011 
1012   df_simulate_initialize_backwards (bb, local_live);
1013   dead_debug_local_init (&debug, NULL, global_debug);
1014 
1015   FOR_BB_INSNS_REVERSE (bb, insn)
1016     if (DEBUG_INSN_P (insn))
1017       {
1018 	df_ref use;
1019 	FOR_EACH_INSN_USE (use, insn)
1020 	  if (!bitmap_bit_p (local_live, DF_REF_REGNO (use))
1021 	      && !bitmap_bit_p (au, DF_REF_REGNO (use)))
1022 	    dead_debug_add (&debug, use, DF_REF_REGNO (use));
1023       }
1024     else if (INSN_P (insn))
1025       {
1026 	bool needed = marked_insn_p (insn);
1027 
1028 	/* The insn is needed if there is someone who uses the output.  */
1029 	if (!needed)
1030 	  FOR_EACH_INSN_DEF (def, insn)
1031 	    if (bitmap_bit_p (local_live, DF_REF_REGNO (def))
1032 		|| bitmap_bit_p (au, DF_REF_REGNO (def)))
1033 	      {
1034 		needed = true;
1035 		mark_insn (insn, true);
1036 		break;
1037 	      }
1038 
1039 	/* No matter if the instruction is needed or not, we remove
1040 	   any regno in the defs from the live set.  */
1041 	df_simulate_defs (insn, local_live);
1042 
1043 	/* On the other hand, we do not allow the dead uses to set
1044 	   anything in local_live.  */
1045 	if (needed)
1046 	  df_simulate_uses (insn, local_live);
1047 
1048 	/* Insert debug temps for dead REGs used in subsequent debug
1049 	   insns.  We may have to emit a debug temp even if the insn
1050 	   was marked, in case the debug use was after the point of
1051 	   death.  */
1052 	if (debug.used && !bitmap_empty_p (debug.used))
1053 	  FOR_EACH_INSN_DEF (def, insn)
1054 	    dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn,
1055 				    needed && !control_flow_insn_p (insn)
1056 				    ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
1057 				    : DEBUG_TEMP_BEFORE_WITH_VALUE);
1058       }
1059 
1060   dead_debug_local_finish (&debug, NULL);
1061   df_simulate_finalize_backwards (bb, local_live);
1062 
1063   block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
1064   if (block_changed)
1065     bitmap_copy (DF_LR_IN (bb), local_live);
1066 
1067   BITMAP_FREE (local_live);
1068   return block_changed;
1069 }
1070 
1071 
1072 /* Perform fast DCE once initialization is done.  If WORD_LEVEL is
1073    true, use the word level dce, otherwise do it at the pseudo
1074    level.  */
1075 
1076 static void
fast_dce(bool word_level)1077 fast_dce (bool word_level)
1078 {
1079   int *postorder = df_get_postorder (DF_BACKWARD);
1080   int n_blocks = df_get_n_blocks (DF_BACKWARD);
1081   /* The set of blocks that have been seen on this iteration.  */
1082   bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1083   /* The set of blocks that need to have the out vectors reset because
1084      the in of one of their successors has changed.  */
1085   bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1086   bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1087   bool global_changed = true;
1088 
1089   /* These regs are considered always live so if they end up dying
1090      because of some def, we need to bring the back again.  Calling
1091      df_simulate_fixup_sets has the disadvantage of calling
1092      bb_has_eh_pred once per insn, so we cache the information
1093      here.  */
1094   bitmap au = &df->regular_block_artificial_uses;
1095   bitmap au_eh = &df->eh_block_artificial_uses;
1096   int i;
1097   struct dead_debug_global global_debug;
1098 
1099   prescan_insns_for_dce (true);
1100 
1101   for (i = 0; i < n_blocks; i++)
1102     bitmap_set_bit (all_blocks, postorder[i]);
1103 
1104   dead_debug_global_init (&global_debug, NULL);
1105 
1106   while (global_changed)
1107     {
1108       global_changed = false;
1109 
1110       for (i = 0; i < n_blocks; i++)
1111 	{
1112 	  int index = postorder[i];
1113 	  basic_block bb = BASIC_BLOCK_FOR_FN (cfun, index);
1114 	  bool local_changed;
1115 
1116 	  if (index < NUM_FIXED_BLOCKS)
1117 	    {
1118 	      bitmap_set_bit (processed, index);
1119 	      continue;
1120 	    }
1121 
1122 	  if (word_level)
1123 	    local_changed
1124 	      = word_dce_process_block (bb, bitmap_bit_p (redo_out, index),
1125 					&global_debug);
1126 	  else
1127 	    local_changed
1128 	      = dce_process_block (bb, bitmap_bit_p (redo_out, index),
1129 				   bb_has_eh_pred (bb) ? au_eh : au,
1130 				   &global_debug);
1131 	  bitmap_set_bit (processed, index);
1132 
1133 	  if (local_changed)
1134 	    {
1135 	      edge e;
1136 	      edge_iterator ei;
1137 	      FOR_EACH_EDGE (e, ei, bb->preds)
1138 		if (bitmap_bit_p (processed, e->src->index))
1139 		  /* Be tricky about when we need to iterate the
1140 		     analysis.  We only have redo the analysis if the
1141 		     bitmaps change at the top of a block that is the
1142 		     entry to a loop.  */
1143 		  global_changed = true;
1144 		else
1145 		  bitmap_set_bit (redo_out, e->src->index);
1146 	    }
1147 	}
1148 
1149       if (global_changed)
1150 	{
1151 	  /* Turn off the RUN_DCE flag to prevent recursive calls to
1152 	     dce.  */
1153 	  int old_flag = df_clear_flags (DF_LR_RUN_DCE);
1154 
1155 	  /* So something was deleted that requires a redo.  Do it on
1156 	     the cheap.  */
1157 	  delete_unmarked_insns ();
1158 	  bitmap_clear (marked);
1159 	  bitmap_clear (processed);
1160 	  bitmap_clear (redo_out);
1161 
1162 	  /* We do not need to rescan any instructions.  We only need
1163 	     to redo the dataflow equations for the blocks that had a
1164 	     change at the top of the block.  Then we need to redo the
1165 	     iteration.  */
1166 	  if (word_level)
1167 	    df_analyze_problem (df_word_lr, all_blocks, postorder, n_blocks);
1168 	  else
1169 	    df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
1170 
1171 	  if (old_flag & DF_LR_RUN_DCE)
1172 	    df_set_flags (DF_LR_RUN_DCE);
1173 
1174 	  prescan_insns_for_dce (true);
1175 	}
1176     }
1177 
1178   dead_debug_global_finish (&global_debug, NULL);
1179 
1180   delete_unmarked_insns ();
1181 
1182   BITMAP_FREE (processed);
1183   BITMAP_FREE (redo_out);
1184   BITMAP_FREE (all_blocks);
1185 }
1186 
1187 
1188 /* Fast register level DCE.  */
1189 
1190 static unsigned int
rest_of_handle_fast_dce(void)1191 rest_of_handle_fast_dce (void)
1192 {
1193   init_dce (true);
1194   fast_dce (false);
1195   fini_dce (true);
1196   return 0;
1197 }
1198 
1199 
1200 /* Fast byte level DCE.  */
1201 
1202 void
run_word_dce(void)1203 run_word_dce (void)
1204 {
1205   int old_flags;
1206 
1207   if (!flag_dce)
1208     return;
1209 
1210   timevar_push (TV_DCE);
1211   old_flags = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1212   df_word_lr_add_problem ();
1213   init_dce (true);
1214   fast_dce (true);
1215   fini_dce (true);
1216   df_set_flags (old_flags);
1217   timevar_pop (TV_DCE);
1218 }
1219 
1220 
1221 /* This is an internal call that is used by the df live register
1222    problem to run fast dce as a side effect of creating the live
1223    information.  The stack is organized so that the lr problem is run,
1224    this pass is run, which updates the live info and the df scanning
1225    info, and then returns to allow the rest of the problems to be run.
1226 
1227    This can be called by elsewhere but it will not update the bit
1228    vectors for any other problems than LR.  */
1229 
1230 void
run_fast_df_dce(void)1231 run_fast_df_dce (void)
1232 {
1233   if (flag_dce)
1234     {
1235       /* If dce is able to delete something, it has to happen
1236 	 immediately.  Otherwise there will be problems handling the
1237 	 eq_notes.  */
1238       int old_flags =
1239 	df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1240 
1241       df_in_progress = true;
1242       rest_of_handle_fast_dce ();
1243       df_in_progress = false;
1244 
1245       df_set_flags (old_flags);
1246     }
1247 }
1248 
1249 
1250 /* Run a fast DCE pass.  */
1251 
1252 void
run_fast_dce(void)1253 run_fast_dce (void)
1254 {
1255   if (flag_dce)
1256     rest_of_handle_fast_dce ();
1257 }
1258 
1259 
1260 namespace {
1261 
1262 const pass_data pass_data_fast_rtl_dce =
1263 {
1264   RTL_PASS, /* type */
1265   "rtl_dce", /* name */
1266   OPTGROUP_NONE, /* optinfo_flags */
1267   TV_DCE, /* tv_id */
1268   0, /* properties_required */
1269   0, /* properties_provided */
1270   0, /* properties_destroyed */
1271   0, /* todo_flags_start */
1272   TODO_df_finish, /* todo_flags_finish */
1273 };
1274 
1275 class pass_fast_rtl_dce : public rtl_opt_pass
1276 {
1277 public:
pass_fast_rtl_dce(gcc::context * ctxt)1278   pass_fast_rtl_dce (gcc::context *ctxt)
1279     : rtl_opt_pass (pass_data_fast_rtl_dce, ctxt)
1280   {}
1281 
1282   /* opt_pass methods: */
gate(function *)1283   virtual bool gate (function *)
1284     {
1285       return optimize > 0 && flag_dce && dbg_cnt (dce_fast);
1286     }
1287 
execute(function *)1288   virtual unsigned int execute (function *)
1289     {
1290       return rest_of_handle_fast_dce ();
1291     }
1292 
1293 }; // class pass_fast_rtl_dce
1294 
1295 } // anon namespace
1296 
1297 rtl_opt_pass *
make_pass_fast_rtl_dce(gcc::context * ctxt)1298 make_pass_fast_rtl_dce (gcc::context *ctxt)
1299 {
1300   return new pass_fast_rtl_dce (ctxt);
1301 }
1302