1 /* Dead and redundant store elimination
2    Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "gimple-iterator.h"
32 #include "tree-cfg.h"
33 #include "tree-dfa.h"
34 #include "domwalk.h"
35 #include "tree-cfgcleanup.h"
36 #include "alias.h"
37 #include "tree-ssa-loop.h"
38 #include "tree-ssa-dse.h"
39 #include "builtins.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 
43 /* This file implements dead store elimination.
44 
45    A dead store is a store into a memory location which will later be
46    overwritten by another store without any intervening loads.  In this
47    case the earlier store can be deleted or trimmed if the store
48    was partially dead.
49 
50    A redundant store is a store into a memory location which stores
51    the exact same value as a prior store to the same memory location.
52    While this can often be handled by dead store elimination, removing
53    the redundant store is often better than removing or trimming the
54    dead store.
55 
56    In our SSA + virtual operand world we use immediate uses of virtual
57    operands to detect these cases.  If a store's virtual definition
58    is used precisely once by a later store to the same location which
59    post dominates the first store, then the first store is dead.  If
60    the data stored is the same, then the second store is redundant.
61 
62    The single use of the store's virtual definition ensures that
63    there are no intervening aliased loads and the requirement that
64    the second load post dominate the first ensures that if the earlier
65    store executes, then the later stores will execute before the function
66    exits.
67 
68    It may help to think of this as first moving the earlier store to
69    the point immediately before the later store.  Again, the single
70    use of the virtual definition and the post-dominance relationship
71    ensure that such movement would be safe.  Clearly if there are
72    back to back stores, then the second is makes the first dead.  If
73    the second store stores the same value, then the second store is
74    redundant.
75 
76    Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
77    may also help in understanding this code since it discusses the
78    relationship between dead store and redundant load elimination.  In
79    fact, they are the same transformation applied to different views of
80    the CFG.  */
81 
82 static void delete_dead_or_redundant_call (gimple_stmt_iterator *, const char *);
83 
84 /* Bitmap of blocks that have had EH statements cleaned.  We should
85    remove their dead edges eventually.  */
86 static bitmap need_eh_cleanup;
87 
88 /* STMT is a statement that may write into memory.  Analyze it and
89    initialize WRITE to describe how STMT affects memory.
90 
91    Return TRUE if the statement was analyzed, FALSE otherwise.
92 
93    It is always safe to return FALSE.  But typically better optimziation
94    can be achieved by analyzing more statements.  */
95 
96 static bool
initialize_ao_ref_for_dse(gimple * stmt,ao_ref * write)97 initialize_ao_ref_for_dse (gimple *stmt, ao_ref *write)
98 {
99   /* It's advantageous to handle certain mem* functions.  */
100   if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
101     {
102       switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
103 	{
104 	case BUILT_IN_MEMCPY:
105 	case BUILT_IN_MEMMOVE:
106 	case BUILT_IN_MEMSET:
107 	case BUILT_IN_MEMCPY_CHK:
108 	case BUILT_IN_MEMMOVE_CHK:
109 	case BUILT_IN_MEMSET_CHK:
110 	case BUILT_IN_STRNCPY:
111 	case BUILT_IN_STRNCPY_CHK:
112 	  {
113 	    tree size = gimple_call_arg (stmt, 2);
114 	    tree ptr = gimple_call_arg (stmt, 0);
115 	    ao_ref_init_from_ptr_and_size (write, ptr, size);
116 	    return true;
117 	  }
118 
119 	/* A calloc call can never be dead, but it can make
120 	   subsequent stores redundant if they store 0 into
121 	   the same memory locations.  */
122 	case BUILT_IN_CALLOC:
123 	  {
124 	    tree nelem = gimple_call_arg (stmt, 0);
125 	    tree selem = gimple_call_arg (stmt, 1);
126 	    tree lhs;
127 	    if (TREE_CODE (nelem) == INTEGER_CST
128 		&& TREE_CODE (selem) == INTEGER_CST
129 		&& (lhs = gimple_call_lhs (stmt)) != NULL_TREE)
130 	      {
131 		tree size = fold_build2 (MULT_EXPR, TREE_TYPE (nelem),
132 					 nelem, selem);
133 		ao_ref_init_from_ptr_and_size (write, lhs, size);
134 		return true;
135 	      }
136 	  }
137 
138 	default:
139 	  break;
140 	}
141     }
142   else if (is_gimple_assign (stmt))
143     {
144       ao_ref_init (write, gimple_assign_lhs (stmt));
145       return true;
146     }
147   return false;
148 }
149 
150 /* Given REF from the alias oracle, return TRUE if it is a valid
151    memory reference for dead store elimination, false otherwise.
152 
153    In particular, the reference must have a known base, known maximum
154    size, start at a byte offset and have a size that is one or more
155    bytes.  */
156 
157 static bool
valid_ao_ref_for_dse(ao_ref * ref)158 valid_ao_ref_for_dse (ao_ref *ref)
159 {
160   return (ao_ref_base (ref)
161 	  && known_size_p (ref->max_size)
162 	  && maybe_ne (ref->size, 0)
163 	  && known_eq (ref->max_size, ref->size)
164 	  && known_ge (ref->offset, 0)
165 	  && multiple_p (ref->offset, BITS_PER_UNIT)
166 	  && multiple_p (ref->size, BITS_PER_UNIT));
167 }
168 
169 /* Try to normalize COPY (an ao_ref) relative to REF.  Essentially when we are
170    done COPY will only refer bytes found within REF.  Return true if COPY
171    is known to intersect at least one byte of REF.  */
172 
173 static bool
normalize_ref(ao_ref * copy,ao_ref * ref)174 normalize_ref (ao_ref *copy, ao_ref *ref)
175 {
176   if (!ordered_p (copy->offset, ref->offset))
177     return false;
178 
179   /* If COPY starts before REF, then reset the beginning of
180      COPY to match REF and decrease the size of COPY by the
181      number of bytes removed from COPY.  */
182   if (maybe_lt (copy->offset, ref->offset))
183     {
184       poly_int64 diff = ref->offset - copy->offset;
185       if (maybe_le (copy->size, diff))
186 	return false;
187       copy->size -= diff;
188       copy->offset = ref->offset;
189     }
190 
191   poly_int64 diff = copy->offset - ref->offset;
192   if (maybe_le (ref->size, diff))
193     return false;
194 
195   /* If COPY extends beyond REF, chop off its size appropriately.  */
196   poly_int64 limit = ref->size - diff;
197   if (!ordered_p (limit, copy->size))
198     return false;
199 
200   if (maybe_gt (copy->size, limit))
201     copy->size = limit;
202   return true;
203 }
204 
205 /* Clear any bytes written by STMT from the bitmap LIVE_BYTES.  The base
206    address written by STMT must match the one found in REF, which must
207    have its base address previously initialized.
208 
209    This routine must be conservative.  If we don't know the offset or
210    actual size written, assume nothing was written.  */
211 
212 static void
clear_bytes_written_by(sbitmap live_bytes,gimple * stmt,ao_ref * ref)213 clear_bytes_written_by (sbitmap live_bytes, gimple *stmt, ao_ref *ref)
214 {
215   ao_ref write;
216   if (!initialize_ao_ref_for_dse (stmt, &write))
217     return;
218 
219   /* Verify we have the same base memory address, the write
220      has a known size and overlaps with REF.  */
221   HOST_WIDE_INT start, size;
222   if (valid_ao_ref_for_dse (&write)
223       && operand_equal_p (write.base, ref->base, OEP_ADDRESS_OF)
224       && known_eq (write.size, write.max_size)
225       && normalize_ref (&write, ref)
226       && (write.offset - ref->offset).is_constant (&start)
227       && write.size.is_constant (&size))
228     bitmap_clear_range (live_bytes, start / BITS_PER_UNIT,
229 			size / BITS_PER_UNIT);
230 }
231 
232 /* REF is a memory write.  Extract relevant information from it and
233    initialize the LIVE_BYTES bitmap.  If successful, return TRUE.
234    Otherwise return FALSE.  */
235 
236 static bool
setup_live_bytes_from_ref(ao_ref * ref,sbitmap live_bytes)237 setup_live_bytes_from_ref (ao_ref *ref, sbitmap live_bytes)
238 {
239   HOST_WIDE_INT const_size;
240   if (valid_ao_ref_for_dse (ref)
241       && ref->size.is_constant (&const_size)
242       && (const_size / BITS_PER_UNIT
243 	  <= param_dse_max_object_size))
244     {
245       bitmap_clear (live_bytes);
246       bitmap_set_range (live_bytes, 0, const_size / BITS_PER_UNIT);
247       return true;
248     }
249   return false;
250 }
251 
252 /* Compute the number of elements that we can trim from the head and
253    tail of ORIG resulting in a bitmap that is a superset of LIVE.
254 
255    Store the number of elements trimmed from the head and tail in
256    TRIM_HEAD and TRIM_TAIL.
257 
258    STMT is the statement being trimmed and is used for debugging dump
259    output only.  */
260 
261 static void
compute_trims(ao_ref * ref,sbitmap live,int * trim_head,int * trim_tail,gimple * stmt)262 compute_trims (ao_ref *ref, sbitmap live, int *trim_head, int *trim_tail,
263 	       gimple *stmt)
264 {
265   /* We use sbitmaps biased such that ref->offset is bit zero and the bitmap
266      extends through ref->size.  So we know that in the original bitmap
267      bits 0..ref->size were true.  We don't actually need the bitmap, just
268      the REF to compute the trims.  */
269 
270   /* Now identify how much, if any of the tail we can chop off.  */
271   HOST_WIDE_INT const_size;
272   int last_live = bitmap_last_set_bit (live);
273   if (ref->size.is_constant (&const_size))
274     {
275       int last_orig = (const_size / BITS_PER_UNIT) - 1;
276       /* We can leave inconvenient amounts on the tail as
277 	 residual handling in mem* and str* functions is usually
278 	 reasonably efficient.  */
279       *trim_tail = last_orig - last_live;
280 
281       /* But don't trim away out of bounds accesses, as this defeats
282 	 proper warnings.
283 
284 	 We could have a type with no TYPE_SIZE_UNIT or we could have a VLA
285 	 where TYPE_SIZE_UNIT is not a constant.  */
286       if (*trim_tail
287 	  && TYPE_SIZE_UNIT (TREE_TYPE (ref->base))
288 	  && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (ref->base))) == INTEGER_CST
289 	  && compare_tree_int (TYPE_SIZE_UNIT (TREE_TYPE (ref->base)),
290 			       last_orig) <= 0)
291 	*trim_tail = 0;
292     }
293   else
294     *trim_tail = 0;
295 
296   /* Identify how much, if any of the head we can chop off.  */
297   int first_orig = 0;
298   int first_live = bitmap_first_set_bit (live);
299   *trim_head = first_live - first_orig;
300 
301   /* If more than a word remains, then make sure to keep the
302      starting point at least word aligned.  */
303   if (last_live - first_live > UNITS_PER_WORD)
304     *trim_head &= ~(UNITS_PER_WORD - 1);
305 
306   if ((*trim_head || *trim_tail)
307       && dump_file && (dump_flags & TDF_DETAILS))
308     {
309       fprintf (dump_file, "  Trimming statement (head = %d, tail = %d): ",
310 	       *trim_head, *trim_tail);
311       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
312       fprintf (dump_file, "\n");
313     }
314 }
315 
316 /* STMT initializes an object from COMPLEX_CST where one or more of the
317    bytes written may be dead stores.  REF is a representation of the
318    memory written.  LIVE is the bitmap of stores that are actually live.
319 
320    Attempt to rewrite STMT so that only the real or imaginary part of
321    the object is actually stored.  */
322 
323 static void
maybe_trim_complex_store(ao_ref * ref,sbitmap live,gimple * stmt)324 maybe_trim_complex_store (ao_ref *ref, sbitmap live, gimple *stmt)
325 {
326   int trim_head, trim_tail;
327   compute_trims (ref, live, &trim_head, &trim_tail, stmt);
328 
329   /* The amount of data trimmed from the head or tail must be at
330      least half the size of the object to ensure we're trimming
331      the entire real or imaginary half.  By writing things this
332      way we avoid more O(n) bitmap operations.  */
333   if (known_ge (trim_tail * 2 * BITS_PER_UNIT, ref->size))
334     {
335       /* TREE_REALPART is live */
336       tree x = TREE_REALPART (gimple_assign_rhs1 (stmt));
337       tree y = gimple_assign_lhs (stmt);
338       y = build1 (REALPART_EXPR, TREE_TYPE (x), y);
339       gimple_assign_set_lhs (stmt, y);
340       gimple_assign_set_rhs1 (stmt, x);
341     }
342   else if (known_ge (trim_head * 2 * BITS_PER_UNIT, ref->size))
343     {
344       /* TREE_IMAGPART is live */
345       tree x = TREE_IMAGPART (gimple_assign_rhs1 (stmt));
346       tree y = gimple_assign_lhs (stmt);
347       y = build1 (IMAGPART_EXPR, TREE_TYPE (x), y);
348       gimple_assign_set_lhs (stmt, y);
349       gimple_assign_set_rhs1 (stmt, x);
350     }
351 
352   /* Other cases indicate parts of both the real and imag subobjects
353      are live.  We do not try to optimize those cases.  */
354 }
355 
356 /* STMT initializes an object using a CONSTRUCTOR where one or more of the
357    bytes written are dead stores.  ORIG is the bitmap of bytes stored by
358    STMT.  LIVE is the bitmap of stores that are actually live.
359 
360    Attempt to rewrite STMT so that only the real or imaginary part of
361    the object is actually stored.
362 
363    The most common case for getting here is a CONSTRUCTOR with no elements
364    being used to zero initialize an object.  We do not try to handle other
365    cases as those would force us to fully cover the object with the
366    CONSTRUCTOR node except for the components that are dead.  */
367 
368 static void
maybe_trim_constructor_store(ao_ref * ref,sbitmap live,gimple * stmt)369 maybe_trim_constructor_store (ao_ref *ref, sbitmap live, gimple *stmt)
370 {
371   tree ctor = gimple_assign_rhs1 (stmt);
372 
373   /* This is the only case we currently handle.  It actually seems to
374      catch most cases of actual interest.  */
375   gcc_assert (CONSTRUCTOR_NELTS (ctor) == 0);
376 
377   int head_trim = 0;
378   int tail_trim = 0;
379   compute_trims (ref, live, &head_trim, &tail_trim, stmt);
380 
381   /* Now we want to replace the constructor initializer
382      with memset (object + head_trim, 0, size - head_trim - tail_trim).  */
383   if (head_trim || tail_trim)
384     {
385       /* We want &lhs for the MEM_REF expression.  */
386       tree lhs_addr = build_fold_addr_expr (gimple_assign_lhs (stmt));
387 
388       if (! is_gimple_min_invariant (lhs_addr))
389 	return;
390 
391       /* The number of bytes for the new constructor.  */
392       poly_int64 ref_bytes = exact_div (ref->size, BITS_PER_UNIT);
393       poly_int64 count = ref_bytes - head_trim - tail_trim;
394 
395       /* And the new type for the CONSTRUCTOR.  Essentially it's just
396 	 a char array large enough to cover the non-trimmed parts of
397 	 the original CONSTRUCTOR.  Note we want explicit bounds here
398 	 so that we know how many bytes to clear when expanding the
399 	 CONSTRUCTOR.  */
400       tree type = build_array_type_nelts (char_type_node, count);
401 
402       /* Build a suitable alias type rather than using alias set zero
403 	 to avoid pessimizing.  */
404       tree alias_type = reference_alias_ptr_type (gimple_assign_lhs (stmt));
405 
406       /* Build a MEM_REF representing the whole accessed area, starting
407 	 at the first byte not trimmed.  */
408       tree exp = fold_build2 (MEM_REF, type, lhs_addr,
409 			      build_int_cst (alias_type, head_trim));
410 
411       /* Now update STMT with a new RHS and LHS.  */
412       gimple_assign_set_lhs (stmt, exp);
413       gimple_assign_set_rhs1 (stmt, build_constructor (type, NULL));
414     }
415 }
416 
417 /* STMT is a memcpy, memmove or memset.  Decrement the number of bytes
418    copied/set by DECREMENT.  */
419 static void
decrement_count(gimple * stmt,int decrement)420 decrement_count (gimple *stmt, int decrement)
421 {
422   tree *countp = gimple_call_arg_ptr (stmt, 2);
423   gcc_assert (TREE_CODE (*countp) == INTEGER_CST);
424   *countp = wide_int_to_tree (TREE_TYPE (*countp), (TREE_INT_CST_LOW (*countp)
425 						    - decrement));
426 }
427 
428 static void
increment_start_addr(gimple * stmt,tree * where,int increment)429 increment_start_addr (gimple *stmt, tree *where, int increment)
430 {
431   if (tree lhs = gimple_call_lhs (stmt))
432     if (where == gimple_call_arg_ptr (stmt, 0))
433       {
434 	gassign *newop = gimple_build_assign (lhs, unshare_expr (*where));
435 	gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
436 	gsi_insert_after (&gsi, newop, GSI_SAME_STMT);
437 	gimple_call_set_lhs (stmt, NULL_TREE);
438 	update_stmt (stmt);
439       }
440 
441   if (TREE_CODE (*where) == SSA_NAME)
442     {
443       tree tem = make_ssa_name (TREE_TYPE (*where));
444       gassign *newop
445 	= gimple_build_assign (tem, POINTER_PLUS_EXPR, *where,
446 			       build_int_cst (sizetype, increment));
447       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
448       gsi_insert_before (&gsi, newop, GSI_SAME_STMT);
449       *where = tem;
450       update_stmt (stmt);
451       return;
452     }
453 
454   *where = build_fold_addr_expr (fold_build2 (MEM_REF, char_type_node,
455 					      *where,
456 					      build_int_cst (ptr_type_node,
457 							     increment)));
458 }
459 
460 /* STMT is builtin call that writes bytes in bitmap ORIG, some bytes are dead
461    (ORIG & ~NEW) and need not be stored.  Try to rewrite STMT to reduce
462    the amount of data it actually writes.
463 
464    Right now we only support trimming from the head or the tail of the
465    memory region.  In theory we could split the mem* call, but it's
466    likely of marginal value.  */
467 
468 static void
maybe_trim_memstar_call(ao_ref * ref,sbitmap live,gimple * stmt)469 maybe_trim_memstar_call (ao_ref *ref, sbitmap live, gimple *stmt)
470 {
471   int head_trim, tail_trim;
472   switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
473     {
474     case BUILT_IN_STRNCPY:
475     case BUILT_IN_STRNCPY_CHK:
476       compute_trims (ref, live, &head_trim, &tail_trim, stmt);
477       if (head_trim)
478 	{
479 	  /* Head trimming of strncpy is only possible if we can
480 	     prove all bytes we would trim are non-zero (or we could
481 	     turn the strncpy into memset if there must be zero
482 	     among the head trimmed bytes).  If we don't know anything
483 	     about those bytes, the presence or absence of '\0' bytes
484 	     in there will affect whether it acts for the non-trimmed
485 	     bytes as memset or memcpy/strncpy.  */
486 	  c_strlen_data lendata = { };
487 	  int orig_head_trim = head_trim;
488 	  tree srcstr = gimple_call_arg (stmt, 1);
489 	  if (!get_range_strlen (srcstr, &lendata, /*eltsize=*/1)
490 	      || !tree_fits_uhwi_p (lendata.minlen))
491 	    head_trim = 0;
492 	  else if (tree_to_uhwi (lendata.minlen) < (unsigned) head_trim)
493 	    {
494 	      head_trim = tree_to_uhwi (lendata.minlen);
495 	      if ((orig_head_trim & (UNITS_PER_WORD - 1)) == 0)
496 		head_trim &= ~(UNITS_PER_WORD - 1);
497 	    }
498 	  if (orig_head_trim != head_trim
499 	      && dump_file
500 	      && (dump_flags & TDF_DETAILS))
501 	    fprintf (dump_file,
502 		     "  Adjusting strncpy trimming to (head = %d,"
503 		     " tail = %d)\n", head_trim, tail_trim);
504 	}
505       goto do_memcpy;
506 
507     case BUILT_IN_MEMCPY:
508     case BUILT_IN_MEMMOVE:
509     case BUILT_IN_MEMCPY_CHK:
510     case BUILT_IN_MEMMOVE_CHK:
511       compute_trims (ref, live, &head_trim, &tail_trim, stmt);
512 
513     do_memcpy:
514       /* Tail trimming is easy, we can just reduce the count.  */
515       if (tail_trim)
516 	decrement_count (stmt, tail_trim);
517 
518       /* Head trimming requires adjusting all the arguments.  */
519       if (head_trim)
520 	{
521 	  /* For __*_chk need to adjust also the last argument.  */
522 	  if (gimple_call_num_args (stmt) == 4)
523 	    {
524 	      tree size = gimple_call_arg (stmt, 3);
525 	      if (!tree_fits_uhwi_p (size))
526 		break;
527 	      if (!integer_all_onesp (size))
528 		{
529 		  unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
530 		  if (sz < (unsigned) head_trim)
531 		    break;
532 		  tree arg = wide_int_to_tree (TREE_TYPE (size),
533 					       sz - head_trim);
534 		  gimple_call_set_arg (stmt, 3, arg);
535 		}
536 	    }
537 	  tree *dst = gimple_call_arg_ptr (stmt, 0);
538 	  increment_start_addr (stmt, dst, head_trim);
539 	  tree *src = gimple_call_arg_ptr (stmt, 1);
540 	  increment_start_addr (stmt, src, head_trim);
541 	  decrement_count (stmt, head_trim);
542 	}
543       break;
544 
545     case BUILT_IN_MEMSET:
546     case BUILT_IN_MEMSET_CHK:
547       compute_trims (ref, live, &head_trim, &tail_trim, stmt);
548 
549       /* Tail trimming is easy, we can just reduce the count.  */
550       if (tail_trim)
551 	decrement_count (stmt, tail_trim);
552 
553       /* Head trimming requires adjusting all the arguments.  */
554       if (head_trim)
555 	{
556 	  /* For __*_chk need to adjust also the last argument.  */
557 	  if (gimple_call_num_args (stmt) == 4)
558 	    {
559 	      tree size = gimple_call_arg (stmt, 3);
560 	      if (!tree_fits_uhwi_p (size))
561 		break;
562 	      if (!integer_all_onesp (size))
563 		{
564 		  unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
565 		  if (sz < (unsigned) head_trim)
566 		    break;
567 		  tree arg = wide_int_to_tree (TREE_TYPE (size),
568 					       sz - head_trim);
569 		  gimple_call_set_arg (stmt, 3, arg);
570 		}
571 	    }
572 	  tree *dst = gimple_call_arg_ptr (stmt, 0);
573 	  increment_start_addr (stmt, dst, head_trim);
574 	  decrement_count (stmt, head_trim);
575 	}
576       break;
577 
578     default:
579       break;
580     }
581 }
582 
583 /* STMT is a memory write where one or more bytes written are dead
584    stores.  ORIG is the bitmap of bytes stored by STMT.  LIVE is the
585    bitmap of stores that are actually live.
586 
587    Attempt to rewrite STMT so that it writes fewer memory locations.  Right
588    now we only support trimming at the start or end of the memory region.
589    It's not clear how much there is to be gained by trimming from the middle
590    of the region.  */
591 
592 static void
maybe_trim_partially_dead_store(ao_ref * ref,sbitmap live,gimple * stmt)593 maybe_trim_partially_dead_store (ao_ref *ref, sbitmap live, gimple *stmt)
594 {
595   if (is_gimple_assign (stmt)
596       && TREE_CODE (gimple_assign_lhs (stmt)) != TARGET_MEM_REF)
597     {
598       switch (gimple_assign_rhs_code (stmt))
599 	{
600 	case CONSTRUCTOR:
601 	  maybe_trim_constructor_store (ref, live, stmt);
602 	  break;
603 	case COMPLEX_CST:
604 	  maybe_trim_complex_store (ref, live, stmt);
605 	  break;
606 	default:
607 	  break;
608 	}
609     }
610 }
611 
612 /* Return TRUE if USE_REF reads bytes from LIVE where live is
613    derived from REF, a write reference.
614 
615    While this routine may modify USE_REF, it's passed by value, not
616    location.  So callers do not see those modifications.  */
617 
618 static bool
live_bytes_read(ao_ref use_ref,ao_ref * ref,sbitmap live)619 live_bytes_read (ao_ref use_ref, ao_ref *ref, sbitmap live)
620 {
621   /* We have already verified that USE_REF and REF hit the same object.
622      Now verify that there's actually an overlap between USE_REF and REF.  */
623   HOST_WIDE_INT start, size;
624   if (normalize_ref (&use_ref, ref)
625       && (use_ref.offset - ref->offset).is_constant (&start)
626       && use_ref.size.is_constant (&size))
627     {
628       /* If USE_REF covers all of REF, then it will hit one or more
629 	 live bytes.   This avoids useless iteration over the bitmap
630 	 below.  */
631       if (start == 0 && known_eq (size, ref->size))
632 	return true;
633 
634       /* Now check if any of the remaining bits in use_ref are set in LIVE.  */
635       return bitmap_bit_in_range_p (live, start / BITS_PER_UNIT,
636 				    (start + size - 1) / BITS_PER_UNIT);
637     }
638   return true;
639 }
640 
641 /* Callback for dse_classify_store calling for_each_index.  Verify that
642    indices are invariant in the loop with backedge PHI in basic-block DATA.  */
643 
644 static bool
check_name(tree,tree * idx,void * data)645 check_name (tree, tree *idx, void *data)
646 {
647   basic_block phi_bb = (basic_block) data;
648   if (TREE_CODE (*idx) == SSA_NAME
649       && !SSA_NAME_IS_DEFAULT_DEF (*idx)
650       && dominated_by_p (CDI_DOMINATORS, gimple_bb (SSA_NAME_DEF_STMT (*idx)),
651 			 phi_bb))
652     return false;
653   return true;
654 }
655 
656 /* STMT stores the value 0 into one or more memory locations
657    (via memset, empty constructor, calloc call, etc).
658 
659    See if there is a subsequent store of the value 0 to one
660    or more of the same memory location(s).  If so, the subsequent
661    store is redundant and can be removed.
662 
663    The subsequent stores could be via memset, empty constructors,
664    simple MEM stores, etc.  */
665 
666 static void
dse_optimize_redundant_stores(gimple * stmt)667 dse_optimize_redundant_stores (gimple *stmt)
668 {
669   int cnt = 0;
670 
671   /* TBAA state of STMT, if it is a call it is effectively alias-set zero.  */
672   alias_set_type earlier_set = 0;
673   alias_set_type earlier_base_set = 0;
674   if (is_gimple_assign (stmt))
675     {
676       ao_ref lhs_ref;
677       ao_ref_init (&lhs_ref, gimple_assign_lhs (stmt));
678       earlier_set = ao_ref_alias_set (&lhs_ref);
679       earlier_base_set = ao_ref_base_alias_set (&lhs_ref);
680     }
681 
682   /* We could do something fairly complex and look through PHIs
683      like DSE_CLASSIFY_STORE, but it doesn't seem to be worth
684      the effort.
685 
686      Look at all the immediate uses of the VDEF (which are obviously
687      dominated by STMT).   See if one or more stores 0 into the same
688      memory locations a STMT, if so remove the immediate use statements.  */
689   tree defvar = gimple_vdef (stmt);
690   imm_use_iterator ui;
691   gimple *use_stmt;
692   FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
693     {
694       /* Limit stmt walking.  */
695       if (++cnt > param_dse_max_alias_queries_per_store)
696 	BREAK_FROM_IMM_USE_STMT (ui);
697 
698       /* If USE_STMT stores 0 into one or more of the same locations
699 	 as STMT and STMT would kill USE_STMT, then we can just remove
700 	 USE_STMT.  */
701       tree fndecl;
702       if ((is_gimple_assign (use_stmt)
703 	   && gimple_vdef (use_stmt)
704 	   && (gimple_assign_single_p (use_stmt)
705 	       && initializer_zerop (gimple_assign_rhs1 (use_stmt))))
706 	  || (gimple_call_builtin_p (use_stmt, BUILT_IN_NORMAL)
707 	      && (fndecl = gimple_call_fndecl (use_stmt)) != NULL
708 	      && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
709 		  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
710 	      && integer_zerop (gimple_call_arg (use_stmt, 1))))
711 	{
712 	  ao_ref write;
713 
714 	  if (!initialize_ao_ref_for_dse (use_stmt, &write))
715 	    BREAK_FROM_IMM_USE_STMT (ui)
716 
717 	  if (valid_ao_ref_for_dse (&write)
718 	      && stmt_kills_ref_p (stmt, &write))
719 	    {
720 	      gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
721 	      if (is_gimple_assign (use_stmt))
722 		{
723 		  ao_ref lhs_ref;
724 		  ao_ref_init (&lhs_ref, gimple_assign_lhs (use_stmt));
725 		  if ((earlier_set == ao_ref_alias_set (&lhs_ref)
726 		       || alias_set_subset_of (ao_ref_alias_set (&lhs_ref),
727 					       earlier_set))
728 		      && (earlier_base_set == ao_ref_base_alias_set (&lhs_ref)
729 			  || alias_set_subset_of
730 			       (ao_ref_base_alias_set (&lhs_ref),
731 						  earlier_base_set)))
732 		    delete_dead_or_redundant_assignment (&gsi, "redundant",
733 							 need_eh_cleanup);
734 		}
735 	      else if (is_gimple_call (use_stmt))
736 		{
737 		  if ((earlier_set == 0
738 		       || alias_set_subset_of (0, earlier_set))
739 		      && (earlier_base_set == 0
740 			  || alias_set_subset_of (0, earlier_base_set)))
741 		  delete_dead_or_redundant_call (&gsi, "redundant");
742 		}
743 	      else
744 		gcc_unreachable ();
745 	    }
746 	}
747     }
748 }
749 
750 /* A helper of dse_optimize_stmt.
751    Given a GIMPLE_ASSIGN in STMT that writes to REF, classify it
752    according to downstream uses and defs.  Sets *BY_CLOBBER_P to true
753    if only clobber statements influenced the classification result.
754    Returns the classification.  */
755 
756 dse_store_status
dse_classify_store(ao_ref * ref,gimple * stmt,bool byte_tracking_enabled,sbitmap live_bytes,bool * by_clobber_p,tree stop_at_vuse)757 dse_classify_store (ao_ref *ref, gimple *stmt,
758 		    bool byte_tracking_enabled, sbitmap live_bytes,
759 		    bool *by_clobber_p, tree stop_at_vuse)
760 {
761   gimple *temp;
762   int cnt = 0;
763   auto_bitmap visited;
764 
765   if (by_clobber_p)
766     *by_clobber_p = true;
767 
768   /* Find the first dominated statement that clobbers (part of) the
769      memory stmt stores to with no intermediate statement that may use
770      part of the memory stmt stores.  That is, find a store that may
771      prove stmt to be a dead store.  */
772   temp = stmt;
773   do
774     {
775       gimple *use_stmt;
776       imm_use_iterator ui;
777       bool fail = false;
778       tree defvar;
779 
780       if (gimple_code (temp) == GIMPLE_PHI)
781 	{
782 	  /* If we visit this PHI by following a backedge then we have to
783 	     make sure ref->ref only refers to SSA names that are invariant
784 	     with respect to the loop represented by this PHI node.  */
785 	  if (dominated_by_p (CDI_DOMINATORS, gimple_bb (stmt),
786 			      gimple_bb (temp))
787 	      && !for_each_index (ref->ref ? &ref->ref : &ref->base,
788 				  check_name, gimple_bb (temp)))
789 	    return DSE_STORE_LIVE;
790 	  defvar = PHI_RESULT (temp);
791 	  bitmap_set_bit (visited, SSA_NAME_VERSION (defvar));
792 	}
793       else
794 	defvar = gimple_vdef (temp);
795 
796       /* If we're instructed to stop walking at region boundary, do so.  */
797       if (defvar == stop_at_vuse)
798 	return DSE_STORE_LIVE;
799 
800       auto_vec<gimple *, 10> defs;
801       gimple *phi_def = NULL;
802       FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
803 	{
804 	  /* Limit stmt walking.  */
805 	  if (++cnt > param_dse_max_alias_queries_per_store)
806 	    {
807 	      fail = true;
808 	      BREAK_FROM_IMM_USE_STMT (ui);
809 	    }
810 
811 	  /* We have visited ourselves already so ignore STMT for the
812 	     purpose of chaining.  */
813 	  if (use_stmt == stmt)
814 	    ;
815 	  /* In simple cases we can look through PHI nodes, but we
816 	     have to be careful with loops and with memory references
817 	     containing operands that are also operands of PHI nodes.
818 	     See gcc.c-torture/execute/20051110-*.c.  */
819 	  else if (gimple_code (use_stmt) == GIMPLE_PHI)
820 	    {
821 	      /* If we already visited this PHI ignore it for further
822 		 processing.  */
823 	      if (!bitmap_bit_p (visited,
824 				 SSA_NAME_VERSION (PHI_RESULT (use_stmt))))
825 		{
826 		  defs.safe_push (use_stmt);
827 		  phi_def = use_stmt;
828 		}
829 	    }
830 	  /* If the statement is a use the store is not dead.  */
831 	  else if (ref_maybe_used_by_stmt_p (use_stmt, ref))
832 	    {
833 	      /* Handle common cases where we can easily build an ao_ref
834 		 structure for USE_STMT and in doing so we find that the
835 		 references hit non-live bytes and thus can be ignored.  */
836 	      if (byte_tracking_enabled
837 		  && is_gimple_assign (use_stmt))
838 		{
839 		  ao_ref use_ref;
840 		  ao_ref_init (&use_ref, gimple_assign_rhs1 (use_stmt));
841 		  if (valid_ao_ref_for_dse (&use_ref)
842 		      && use_ref.base == ref->base
843 		      && known_eq (use_ref.size, use_ref.max_size)
844 		      && !live_bytes_read (use_ref, ref, live_bytes))
845 		    {
846 		      /* If this is a store, remember it as we possibly
847 			 need to walk the defs uses.  */
848 		      if (gimple_vdef (use_stmt))
849 			defs.safe_push (use_stmt);
850 		      continue;
851 		    }
852 		}
853 
854 	      fail = true;
855 	      BREAK_FROM_IMM_USE_STMT (ui);
856 	    }
857 	  /* If this is a store, remember it as we possibly need to walk the
858 	     defs uses.  */
859 	  else if (gimple_vdef (use_stmt))
860 	    defs.safe_push (use_stmt);
861 	}
862 
863       if (fail)
864 	{
865 	  /* STMT might be partially dead and we may be able to reduce
866 	     how many memory locations it stores into.  */
867 	  if (byte_tracking_enabled && !gimple_clobber_p (stmt))
868 	    return DSE_STORE_MAYBE_PARTIAL_DEAD;
869 	  return DSE_STORE_LIVE;
870 	}
871 
872       /* If we didn't find any definition this means the store is dead
873          if it isn't a store to global reachable memory.  In this case
874 	 just pretend the stmt makes itself dead.  Otherwise fail.  */
875       if (defs.is_empty ())
876 	{
877 	  if (ref_may_alias_global_p (ref))
878 	    return DSE_STORE_LIVE;
879 
880 	  if (by_clobber_p)
881 	    *by_clobber_p = false;
882 	  return DSE_STORE_DEAD;
883 	}
884 
885       /* Process defs and remove those we need not process further.  */
886       for (unsigned i = 0; i < defs.length ();)
887 	{
888 	  gimple *def = defs[i];
889 	  gimple *use_stmt;
890 	  use_operand_p use_p;
891 	  /* If the path to check starts with a kill we do not need to
892 	     process it further.
893 	     ???  With byte tracking we need only kill the bytes currently
894 	     live.  */
895 	  if (stmt_kills_ref_p (def, ref))
896 	    {
897 	      if (by_clobber_p && !gimple_clobber_p (def))
898 		*by_clobber_p = false;
899 	      defs.unordered_remove (i);
900 	    }
901 	  /* In addition to kills we can remove defs whose only use
902 	     is another def in defs.  That can only ever be PHIs of which
903 	     we track a single for simplicity reasons (we fail for multiple
904 	     PHIs anyways).  We can also ignore defs that feed only into
905 	     already visited PHIs.  */
906 	  else if (gimple_code (def) != GIMPLE_PHI
907 		   && single_imm_use (gimple_vdef (def), &use_p, &use_stmt)
908 		   && (use_stmt == phi_def
909 		       || (gimple_code (use_stmt) == GIMPLE_PHI
910 			   && bitmap_bit_p (visited,
911 					    SSA_NAME_VERSION
912 					      (PHI_RESULT (use_stmt))))))
913 	    defs.unordered_remove (i);
914 	  else
915 	    ++i;
916 	}
917 
918       /* If all defs kill the ref we are done.  */
919       if (defs.is_empty ())
920 	return DSE_STORE_DEAD;
921       /* If more than one def survives fail.  */
922       if (defs.length () > 1)
923 	{
924 	  /* STMT might be partially dead and we may be able to reduce
925 	     how many memory locations it stores into.  */
926 	  if (byte_tracking_enabled && !gimple_clobber_p (stmt))
927 	    return DSE_STORE_MAYBE_PARTIAL_DEAD;
928 	  return DSE_STORE_LIVE;
929 	}
930       temp = defs[0];
931 
932       /* Track partial kills.  */
933       if (byte_tracking_enabled)
934 	{
935 	  clear_bytes_written_by (live_bytes, temp, ref);
936 	  if (bitmap_empty_p (live_bytes))
937 	    {
938 	      if (by_clobber_p && !gimple_clobber_p (temp))
939 		*by_clobber_p = false;
940 	      return DSE_STORE_DEAD;
941 	    }
942 	}
943     }
944   /* Continue walking until there are no more live bytes.  */
945   while (1);
946 }
947 
948 
949 class dse_dom_walker : public dom_walker
950 {
951 public:
dse_dom_walker(cdi_direction direction)952   dse_dom_walker (cdi_direction direction)
953     : dom_walker (direction),
954     m_live_bytes (param_dse_max_object_size),
955     m_byte_tracking_enabled (false) {}
956 
957   virtual edge before_dom_children (basic_block);
958 
959 private:
960   auto_sbitmap m_live_bytes;
961   bool m_byte_tracking_enabled;
962   void dse_optimize_stmt (gimple_stmt_iterator *);
963 };
964 
965 /* Delete a dead call at GSI, which is mem* call of some kind.  */
966 static void
delete_dead_or_redundant_call(gimple_stmt_iterator * gsi,const char * type)967 delete_dead_or_redundant_call (gimple_stmt_iterator *gsi, const char *type)
968 {
969   gimple *stmt = gsi_stmt (*gsi);
970   if (dump_file && (dump_flags & TDF_DETAILS))
971     {
972       fprintf (dump_file, "  Deleted %s call: ", type);
973       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
974       fprintf (dump_file, "\n");
975     }
976 
977   tree lhs = gimple_call_lhs (stmt);
978   if (lhs)
979     {
980       tree ptr = gimple_call_arg (stmt, 0);
981       gimple *new_stmt = gimple_build_assign (lhs, ptr);
982       unlink_stmt_vdef (stmt);
983       if (gsi_replace (gsi, new_stmt, true))
984         bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
985     }
986   else
987     {
988       /* Then we need to fix the operand of the consuming stmt.  */
989       unlink_stmt_vdef (stmt);
990 
991       /* Remove the dead store.  */
992       if (gsi_remove (gsi, true))
993 	bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
994       release_defs (stmt);
995     }
996 }
997 
998 /* Delete a dead store at GSI, which is a gimple assignment. */
999 
1000 void
delete_dead_or_redundant_assignment(gimple_stmt_iterator * gsi,const char * type,bitmap need_eh_cleanup)1001 delete_dead_or_redundant_assignment (gimple_stmt_iterator *gsi, const char *type,
1002 				     bitmap need_eh_cleanup)
1003 {
1004   gimple *stmt = gsi_stmt (*gsi);
1005   if (dump_file && (dump_flags & TDF_DETAILS))
1006     {
1007       fprintf (dump_file, "  Deleted %s store: ", type);
1008       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1009       fprintf (dump_file, "\n");
1010     }
1011 
1012   /* Then we need to fix the operand of the consuming stmt.  */
1013   unlink_stmt_vdef (stmt);
1014 
1015   /* Remove the dead store.  */
1016   basic_block bb = gimple_bb (stmt);
1017   if (gsi_remove (gsi, true) && need_eh_cleanup)
1018     bitmap_set_bit (need_eh_cleanup, bb->index);
1019 
1020   /* And release any SSA_NAMEs set in this statement back to the
1021      SSA_NAME manager.  */
1022   release_defs (stmt);
1023 }
1024 
1025 /* Attempt to eliminate dead stores in the statement referenced by BSI.
1026 
1027    A dead store is a store into a memory location which will later be
1028    overwritten by another store without any intervening loads.  In this
1029    case the earlier store can be deleted.
1030 
1031    In our SSA + virtual operand world we use immediate uses of virtual
1032    operands to detect dead stores.  If a store's virtual definition
1033    is used precisely once by a later store to the same location which
1034    post dominates the first store, then the first store is dead.  */
1035 
1036 void
dse_optimize_stmt(gimple_stmt_iterator * gsi)1037 dse_dom_walker::dse_optimize_stmt (gimple_stmt_iterator *gsi)
1038 {
1039   gimple *stmt = gsi_stmt (*gsi);
1040 
1041   /* If this statement has no virtual defs, then there is nothing
1042      to do.  */
1043   if (!gimple_vdef (stmt))
1044     return;
1045 
1046   /* Don't return early on *this_2(D) ={v} {CLOBBER}.  */
1047   if (gimple_has_volatile_ops (stmt)
1048       && (!gimple_clobber_p (stmt)
1049 	  || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
1050     return;
1051 
1052   ao_ref ref;
1053   if (!initialize_ao_ref_for_dse (stmt, &ref))
1054     return;
1055 
1056   /* We know we have virtual definitions.  We can handle assignments and
1057      some builtin calls.  */
1058   if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1059     {
1060       tree fndecl = gimple_call_fndecl (stmt);
1061       switch (DECL_FUNCTION_CODE (fndecl))
1062 	{
1063 	case BUILT_IN_MEMCPY:
1064 	case BUILT_IN_MEMMOVE:
1065 	case BUILT_IN_STRNCPY:
1066 	case BUILT_IN_MEMSET:
1067 	case BUILT_IN_MEMCPY_CHK:
1068 	case BUILT_IN_MEMMOVE_CHK:
1069 	case BUILT_IN_STRNCPY_CHK:
1070 	case BUILT_IN_MEMSET_CHK:
1071 	  {
1072 	    /* Occasionally calls with an explicit length of zero
1073 	       show up in the IL.  It's pointless to do analysis
1074 	       on them, they're trivially dead.  */
1075 	    tree size = gimple_call_arg (stmt, 2);
1076 	    if (integer_zerop (size))
1077 	      {
1078 		delete_dead_or_redundant_call (gsi, "dead");
1079 		return;
1080 	      }
1081 
1082 	    /* If this is a memset call that initializes an object
1083 	       to zero, it may be redundant with an earlier memset
1084 	       or empty CONSTRUCTOR of a larger object.  */
1085 	    if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1086 		 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
1087 		&& integer_zerop (gimple_call_arg (stmt, 1)))
1088 	      dse_optimize_redundant_stores (stmt);
1089 
1090 	    enum dse_store_status store_status;
1091 	    m_byte_tracking_enabled
1092 	      = setup_live_bytes_from_ref (&ref, m_live_bytes);
1093 	    store_status = dse_classify_store (&ref, stmt,
1094 					       m_byte_tracking_enabled,
1095 					       m_live_bytes);
1096 	    if (store_status == DSE_STORE_LIVE)
1097 	      return;
1098 
1099 	    if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
1100 	      {
1101 		maybe_trim_memstar_call (&ref, m_live_bytes, stmt);
1102 		return;
1103 	      }
1104 
1105 	    if (store_status == DSE_STORE_DEAD)
1106 	      delete_dead_or_redundant_call (gsi, "dead");
1107 	    return;
1108 	  }
1109 
1110 	case BUILT_IN_CALLOC:
1111 	  /* We already know the arguments are integer constants.  */
1112 	  dse_optimize_redundant_stores (stmt);
1113 	  return;
1114 
1115 	default:
1116 	  return;
1117 	}
1118     }
1119 
1120   if (is_gimple_assign (stmt))
1121     {
1122       bool by_clobber_p = false;
1123 
1124       /* Check if this statement stores zero to a memory location,
1125 	 and if there is a subsequent store of zero to the same
1126 	 memory location.  If so, remove the subsequent store.  */
1127       if (gimple_assign_single_p (stmt)
1128 	  && initializer_zerop (gimple_assign_rhs1 (stmt)))
1129 	dse_optimize_redundant_stores (stmt);
1130 
1131       /* Self-assignments are zombies.  */
1132       if (operand_equal_p (gimple_assign_rhs1 (stmt),
1133 			   gimple_assign_lhs (stmt), 0))
1134 	;
1135       else
1136 	{
1137 	  m_byte_tracking_enabled
1138 	    = setup_live_bytes_from_ref (&ref, m_live_bytes);
1139 	  enum dse_store_status store_status;
1140 	  store_status = dse_classify_store (&ref, stmt,
1141 					     m_byte_tracking_enabled,
1142 					     m_live_bytes, &by_clobber_p);
1143 	  if (store_status == DSE_STORE_LIVE)
1144 	    return;
1145 
1146 	  if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
1147 	    {
1148 	      maybe_trim_partially_dead_store (&ref, m_live_bytes, stmt);
1149 	      return;
1150 	    }
1151 	}
1152 
1153       /* Now we know that use_stmt kills the LHS of stmt.  */
1154 
1155       /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
1156 	 another clobber stmt.  */
1157       if (gimple_clobber_p (stmt)
1158 	  && !by_clobber_p)
1159 	return;
1160 
1161       delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup);
1162     }
1163 }
1164 
1165 edge
before_dom_children(basic_block bb)1166 dse_dom_walker::before_dom_children (basic_block bb)
1167 {
1168   gimple_stmt_iterator gsi;
1169 
1170   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
1171     {
1172       dse_optimize_stmt (&gsi);
1173       if (gsi_end_p (gsi))
1174 	gsi = gsi_last_bb (bb);
1175       else
1176 	gsi_prev (&gsi);
1177     }
1178   return NULL;
1179 }
1180 
1181 namespace {
1182 
1183 const pass_data pass_data_dse =
1184 {
1185   GIMPLE_PASS, /* type */
1186   "dse", /* name */
1187   OPTGROUP_NONE, /* optinfo_flags */
1188   TV_TREE_DSE, /* tv_id */
1189   ( PROP_cfg | PROP_ssa ), /* properties_required */
1190   0, /* properties_provided */
1191   0, /* properties_destroyed */
1192   0, /* todo_flags_start */
1193   0, /* todo_flags_finish */
1194 };
1195 
1196 class pass_dse : public gimple_opt_pass
1197 {
1198 public:
pass_dse(gcc::context * ctxt)1199   pass_dse (gcc::context *ctxt)
1200     : gimple_opt_pass (pass_data_dse, ctxt)
1201   {}
1202 
1203   /* opt_pass methods: */
clone()1204   opt_pass * clone () { return new pass_dse (m_ctxt); }
gate(function *)1205   virtual bool gate (function *) { return flag_tree_dse != 0; }
1206   virtual unsigned int execute (function *);
1207 
1208 }; // class pass_dse
1209 
1210 unsigned int
execute(function * fun)1211 pass_dse::execute (function *fun)
1212 {
1213   need_eh_cleanup = BITMAP_ALLOC (NULL);
1214 
1215   renumber_gimple_stmt_uids (cfun);
1216 
1217   /* We might consider making this a property of each pass so that it
1218      can be [re]computed on an as-needed basis.  Particularly since
1219      this pass could be seen as an extension of DCE which needs post
1220      dominators.  */
1221   calculate_dominance_info (CDI_POST_DOMINATORS);
1222   calculate_dominance_info (CDI_DOMINATORS);
1223 
1224   /* Dead store elimination is fundamentally a walk of the post-dominator
1225      tree and a backwards walk of statements within each block.  */
1226   dse_dom_walker (CDI_POST_DOMINATORS).walk (fun->cfg->x_exit_block_ptr);
1227 
1228   /* Removal of stores may make some EH edges dead.  Purge such edges from
1229      the CFG as needed.  */
1230   if (!bitmap_empty_p (need_eh_cleanup))
1231     {
1232       gimple_purge_all_dead_eh_edges (need_eh_cleanup);
1233       cleanup_tree_cfg ();
1234     }
1235 
1236   BITMAP_FREE (need_eh_cleanup);
1237 
1238   /* For now, just wipe the post-dominator information.  */
1239   free_dominance_info (CDI_POST_DOMINATORS);
1240   return 0;
1241 }
1242 
1243 } // anon namespace
1244 
1245 gimple_opt_pass *
make_pass_dse(gcc::context * ctxt)1246 make_pass_dse (gcc::context *ctxt)
1247 {
1248   return new pass_dse (ctxt);
1249 }
1250