1 /* Dead store elimination
2    Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3    Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "timevar.h"
30 #include "gimple-pretty-print.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-dump.h"
34 #include "domwalk.h"
35 #include "flags.h"
36 #include "langhooks.h"
37 
38 /* This file implements dead store elimination.
39 
40    A dead store is a store into a memory location which will later be
41    overwritten by another store without any intervening loads.  In this
42    case the earlier store can be deleted.
43 
44    In our SSA + virtual operand world we use immediate uses of virtual
45    operands to detect dead stores.  If a store's virtual definition
46    is used precisely once by a later store to the same location which
47    post dominates the first store, then the first store is dead.
48 
49    The single use of the store's virtual definition ensures that
50    there are no intervening aliased loads and the requirement that
51    the second load post dominate the first ensures that if the earlier
52    store executes, then the later stores will execute before the function
53    exits.
54 
55    It may help to think of this as first moving the earlier store to
56    the point immediately before the later store.  Again, the single
57    use of the virtual definition and the post-dominance relationship
58    ensure that such movement would be safe.  Clearly if there are
59    back to back stores, then the second is redundant.
60 
61    Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
62    may also help in understanding this code since it discusses the
63    relationship between dead store and redundant load elimination.  In
64    fact, they are the same transformation applied to different views of
65    the CFG.  */
66 
67 
68 /* Bitmap of blocks that have had EH statements cleaned.  We should
69    remove their dead edges eventually.  */
70 static bitmap need_eh_cleanup;
71 
72 static bool gate_dse (void);
73 static unsigned int tree_ssa_dse (void);
74 static void dse_enter_block (struct dom_walk_data *, basic_block);
75 
76 
77 /* A helper of dse_optimize_stmt.
78    Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that
79    may prove STMT to be dead.
80    Return TRUE if the above conditions are met, otherwise FALSE.  */
81 
82 static bool
83 dse_possible_dead_store_p (gimple stmt, gimple *use_stmt)
84 {
85   gimple temp;
86   unsigned cnt = 0;
87 
88   *use_stmt = NULL;
89 
90   /* Find the first dominated statement that clobbers (part of) the
91      memory stmt stores to with no intermediate statement that may use
92      part of the memory stmt stores.  That is, find a store that may
93      prove stmt to be a dead store.  */
94   temp = stmt;
95   do
96     {
97       gimple use_stmt;
98       imm_use_iterator ui;
99       bool fail = false;
100       tree defvar;
101 
102       /* Limit stmt walking to be linear in the number of possibly
103          dead stores.  */
104       if (++cnt > 256)
105 	return false;
106 
107       if (gimple_code (temp) == GIMPLE_PHI)
108 	defvar = PHI_RESULT (temp);
109       else
110 	defvar = gimple_vdef (temp);
111       temp = NULL;
112       FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
113 	{
114 	  cnt++;
115 
116 	  /* If we ever reach our DSE candidate stmt again fail.  We
117 	     cannot handle dead stores in loops.  */
118 	  if (use_stmt == stmt)
119 	    {
120 	      fail = true;
121 	      BREAK_FROM_IMM_USE_STMT (ui);
122 	    }
123 	  /* In simple cases we can look through PHI nodes, but we
124 	     have to be careful with loops and with memory references
125 	     containing operands that are also operands of PHI nodes.
126 	     See gcc.c-torture/execute/20051110-*.c.  */
127 	  else if (gimple_code (use_stmt) == GIMPLE_PHI)
128 	    {
129 	      if (temp
130 		  /* Make sure we are not in a loop latch block.  */
131 		  || gimple_bb (stmt) == gimple_bb (use_stmt)
132 		  || dominated_by_p (CDI_DOMINATORS,
133 				     gimple_bb (stmt), gimple_bb (use_stmt))
134 		  /* We can look through PHIs to regions post-dominating
135 		     the DSE candidate stmt.  */
136 		  || !dominated_by_p (CDI_POST_DOMINATORS,
137 				      gimple_bb (stmt), gimple_bb (use_stmt)))
138 		{
139 		  fail = true;
140 		  BREAK_FROM_IMM_USE_STMT (ui);
141 		}
142 	      temp = use_stmt;
143 	    }
144 	  /* If the statement is a use the store is not dead.  */
145 	  else if (ref_maybe_used_by_stmt_p (use_stmt,
146 					     gimple_assign_lhs (stmt)))
147 	    {
148 	      fail = true;
149 	      BREAK_FROM_IMM_USE_STMT (ui);
150 	    }
151 	  /* If this is a store, remember it or bail out if we have
152 	     multiple ones (the will be in different CFG parts then).  */
153 	  else if (gimple_vdef (use_stmt))
154 	    {
155 	      if (temp)
156 		{
157 		  fail = true;
158 		  BREAK_FROM_IMM_USE_STMT (ui);
159 		}
160 	      temp = use_stmt;
161 	    }
162 	}
163 
164       if (fail)
165 	return false;
166 
167       /* If we didn't find any definition this means the store is dead
168          if it isn't a store to global reachable memory.  In this case
169 	 just pretend the stmt makes itself dead.  Otherwise fail.  */
170       if (!temp)
171 	{
172 	  if (is_hidden_global_store (stmt))
173 	    return false;
174 
175 	  temp = stmt;
176 	  break;
177 	}
178     }
179   /* We deliberately stop on clobbering statements and not only on
180      killing ones to make walking cheaper.  Otherwise we can just
181      continue walking until both stores have equal reference trees.  */
182   while (!stmt_may_clobber_ref_p (temp, gimple_assign_lhs (stmt)));
183 
184   *use_stmt = temp;
185 
186   return true;
187 }
188 
189 
190 /* Attempt to eliminate dead stores in the statement referenced by BSI.
191 
192    A dead store is a store into a memory location which will later be
193    overwritten by another store without any intervening loads.  In this
194    case the earlier store can be deleted.
195 
196    In our SSA + virtual operand world we use immediate uses of virtual
197    operands to detect dead stores.  If a store's virtual definition
198    is used precisely once by a later store to the same location which
199    post dominates the first store, then the first store is dead.  */
200 
201 static void
202 dse_optimize_stmt (gimple_stmt_iterator gsi)
203 {
204   gimple stmt = gsi_stmt (gsi);
205 
206   /* If this statement has no virtual defs, then there is nothing
207      to do.  */
208   if (!gimple_vdef (stmt))
209     return;
210 
211   /* We know we have virtual definitions.  If this is a GIMPLE_ASSIGN
212      that's not also a function call, then record it into our table.  */
213   if (is_gimple_call (stmt) && gimple_call_fndecl (stmt))
214     return;
215 
216   if (gimple_has_volatile_ops (stmt))
217     return;
218 
219   if (is_gimple_assign (stmt))
220     {
221       gimple use_stmt;
222 
223       if (!dse_possible_dead_store_p (stmt, &use_stmt))
224 	return;
225 
226       /* If we have precisely one immediate use at this point and the
227 	 stores are to the same memory location or there is a chain of
228 	 virtual uses from stmt and the stmt which stores to that same
229 	 memory location, then we may have found redundant store.  */
230       if ((gimple_has_lhs (use_stmt)
231 	   && (operand_equal_p (gimple_assign_lhs (stmt),
232 				gimple_get_lhs (use_stmt), 0)))
233 	  || stmt_kills_ref_p (use_stmt, gimple_assign_lhs (stmt)))
234 	{
235 	  /* If use_stmt is or might be a nop assignment, e.g. for
236 	     struct { ... } S a, b, *p; ...
237 	     b = a; b = b;
238 	     or
239 	     b = a; b = *p; where p might be &b,
240 	     or
241 	     *p = a; *p = b; where p might be &b,
242 	     or
243 	     *p = *u; *p = *v; where p might be v, then USE_STMT
244 	     acts as a use as well as definition, so store in STMT
245 	     is not dead.  */
246 	  if (stmt != use_stmt
247 	      && ref_maybe_used_by_stmt_p (use_stmt, gimple_assign_lhs (stmt)))
248 	    return;
249 
250 	  if (dump_file && (dump_flags & TDF_DETAILS))
251             {
252               fprintf (dump_file, "  Deleted dead store '");
253               print_gimple_stmt (dump_file, gsi_stmt (gsi), dump_flags, 0);
254               fprintf (dump_file, "'\n");
255             }
256 
257 	  /* Then we need to fix the operand of the consuming stmt.  */
258 	  unlink_stmt_vdef (stmt);
259 
260 	  bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
261 
262 	  /* Remove the dead store.  */
263 	  gsi_remove (&gsi, true);
264 
265 	  /* And release any SSA_NAMEs set in this statement back to the
266 	     SSA_NAME manager.  */
267 	  release_defs (stmt);
268 	}
269     }
270 }
271 
272 static void
273 dse_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
274 		 basic_block bb)
275 {
276   gimple_stmt_iterator gsi;
277 
278   for (gsi = gsi_last (bb_seq (bb)); !gsi_end_p (gsi); gsi_prev (&gsi))
279     dse_optimize_stmt (gsi);
280 }
281 
282 /* Main entry point.  */
283 
284 static unsigned int
285 tree_ssa_dse (void)
286 {
287   struct dom_walk_data walk_data;
288 
289   need_eh_cleanup = BITMAP_ALLOC (NULL);
290 
291   renumber_gimple_stmt_uids ();
292 
293   /* We might consider making this a property of each pass so that it
294      can be [re]computed on an as-needed basis.  Particularly since
295      this pass could be seen as an extension of DCE which needs post
296      dominators.  */
297   calculate_dominance_info (CDI_POST_DOMINATORS);
298   calculate_dominance_info (CDI_DOMINATORS);
299 
300   /* Dead store elimination is fundamentally a walk of the post-dominator
301      tree and a backwards walk of statements within each block.  */
302   walk_data.dom_direction = CDI_POST_DOMINATORS;
303   walk_data.initialize_block_local_data = NULL;
304   walk_data.before_dom_children = dse_enter_block;
305   walk_data.after_dom_children = NULL;
306 
307   walk_data.block_local_data_size = 0;
308   walk_data.global_data = NULL;
309 
310   /* Initialize the dominator walker.  */
311   init_walk_dominator_tree (&walk_data);
312 
313   /* Recursively walk the dominator tree.  */
314   walk_dominator_tree (&walk_data, EXIT_BLOCK_PTR);
315 
316   /* Finalize the dominator walker.  */
317   fini_walk_dominator_tree (&walk_data);
318 
319   /* Removal of stores may make some EH edges dead.  Purge such edges from
320      the CFG as needed.  */
321   if (!bitmap_empty_p (need_eh_cleanup))
322     {
323       gimple_purge_all_dead_eh_edges (need_eh_cleanup);
324       cleanup_tree_cfg ();
325     }
326 
327   BITMAP_FREE (need_eh_cleanup);
328 
329   /* For now, just wipe the post-dominator information.  */
330   free_dominance_info (CDI_POST_DOMINATORS);
331   return 0;
332 }
333 
334 static bool
335 gate_dse (void)
336 {
337   return flag_tree_dse != 0;
338 }
339 
340 struct gimple_opt_pass pass_dse =
341 {
342  {
343   GIMPLE_PASS,
344   "dse",			/* name */
345   gate_dse,			/* gate */
346   tree_ssa_dse,			/* execute */
347   NULL,				/* sub */
348   NULL,				/* next */
349   0,				/* static_pass_number */
350   TV_TREE_DSE,			/* tv_id */
351   PROP_cfg | PROP_ssa,		/* properties_required */
352   0,				/* properties_provided */
353   0,				/* properties_destroyed */
354   0,				/* todo_flags_start */
355   TODO_ggc_collect
356     | TODO_verify_ssa		/* todo_flags_finish */
357  }
358 };
359