1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
12 later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 02110-1301, USA. */
23
24 /* Dead code elimination.
25
26 References:
27
28 Building an Optimizing Compiler,
29 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30
31 Advanced Compiler Design and Implementation,
32 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33
34 Dead-code elimination is the removal of statements which have no
35 impact on the program's output. "Dead statements" have no impact
36 on the program's output, while "necessary statements" may have
37 impact on the output.
38
39 The algorithm consists of three phases:
40 1. Marking as necessary all statements known to be necessary,
41 e.g. most function calls, writing a value to memory, etc;
42 2. Propagating necessary statements, e.g., the statements
43 giving values to operands in necessary statements; and
44 3. Removing dead statements. */
45
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "ggc.h"
51
52 /* These RTL headers are needed for basic-block.h. */
53 #include "rtl.h"
54 #include "tm_p.h"
55 #include "hard-reg-set.h"
56 #include "obstack.h"
57 #include "basic-block.h"
58
59 #include "tree.h"
60 #include "diagnostic.h"
61 #include "tree-flow.h"
62 #include "tree-gimple.h"
63 #include "tree-dump.h"
64 #include "tree-pass.h"
65 #include "timevar.h"
66 #include "flags.h"
67 #include "cfgloop.h"
68 #include "tree-scalar-evolution.h"
69
70 static struct stmt_stats
71 {
72 int total;
73 int total_phis;
74 int removed;
75 int removed_phis;
76 } stats;
77
78 static VEC(tree,heap) *worklist;
79
80 /* Vector indicating an SSA name has already been processed and marked
81 as necessary. */
82 static sbitmap processed;
83
84 /* Vector indicating that last_stmt if a basic block has already been
85 marked as necessary. */
86 static sbitmap last_stmt_necessary;
87
88 /* Before we can determine whether a control branch is dead, we need to
89 compute which blocks are control dependent on which edges.
90
91 We expect each block to be control dependent on very few edges so we
92 use a bitmap for each block recording its edges. An array holds the
93 bitmap. The Ith bit in the bitmap is set if that block is dependent
94 on the Ith edge. */
95 static bitmap *control_dependence_map;
96
97 /* Vector indicating that a basic block has already had all the edges
98 processed that it is control dependent on. */
99 static sbitmap visited_control_parents;
100
101 /* TRUE if this pass alters the CFG (by removing control statements).
102 FALSE otherwise.
103
104 If this pass alters the CFG, then it will arrange for the dominators
105 to be recomputed. */
106 static bool cfg_altered;
107
108 /* Execute code that follows the macro for each edge (given number
109 EDGE_NUMBER within the CODE) for which the block with index N is
110 control dependent. */
111 #define EXECUTE_IF_CONTROL_DEPENDENT(BI, N, EDGE_NUMBER) \
112 EXECUTE_IF_SET_IN_BITMAP (control_dependence_map[(N)], 0, \
113 (EDGE_NUMBER), (BI))
114
115 /* Local function prototypes. */
116 static inline void set_control_dependence_map_bit (basic_block, int);
117 static inline void clear_control_dependence_bitmap (basic_block);
118 static void find_all_control_dependences (struct edge_list *);
119 static void find_control_dependence (struct edge_list *, int);
120 static inline basic_block find_pdom (basic_block);
121
122 static inline void mark_stmt_necessary (tree, bool);
123 static inline void mark_operand_necessary (tree, bool);
124
125 static void mark_stmt_if_obviously_necessary (tree, bool);
126 static void find_obviously_necessary_stmts (struct edge_list *);
127
128 static void mark_control_dependent_edges_necessary (basic_block, struct edge_list *);
129 static void propagate_necessity (struct edge_list *);
130
131 static void eliminate_unnecessary_stmts (void);
132 static void remove_dead_phis (basic_block);
133 static void remove_dead_stmt (block_stmt_iterator *, basic_block);
134
135 static void print_stats (void);
136 static void tree_dce_init (bool);
137 static void tree_dce_done (bool);
138
139 /* Indicate block BB is control dependent on an edge with index EDGE_INDEX. */
140 static inline void
set_control_dependence_map_bit(basic_block bb,int edge_index)141 set_control_dependence_map_bit (basic_block bb, int edge_index)
142 {
143 if (bb == ENTRY_BLOCK_PTR)
144 return;
145 gcc_assert (bb != EXIT_BLOCK_PTR);
146 bitmap_set_bit (control_dependence_map[bb->index], edge_index);
147 }
148
149 /* Clear all control dependences for block BB. */
150 static inline void
clear_control_dependence_bitmap(basic_block bb)151 clear_control_dependence_bitmap (basic_block bb)
152 {
153 bitmap_clear (control_dependence_map[bb->index]);
154 }
155
156 /* Record all blocks' control dependences on all edges in the edge
157 list EL, ala Morgan, Section 3.6. */
158
159 static void
find_all_control_dependences(struct edge_list * el)160 find_all_control_dependences (struct edge_list *el)
161 {
162 int i;
163
164 for (i = 0; i < NUM_EDGES (el); ++i)
165 find_control_dependence (el, i);
166 }
167
168 /* Determine all blocks' control dependences on the given edge with edge_list
169 EL index EDGE_INDEX, ala Morgan, Section 3.6. */
170
171 static void
find_control_dependence(struct edge_list * el,int edge_index)172 find_control_dependence (struct edge_list *el, int edge_index)
173 {
174 basic_block current_block;
175 basic_block ending_block;
176
177 gcc_assert (INDEX_EDGE_PRED_BB (el, edge_index) != EXIT_BLOCK_PTR);
178
179 if (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR)
180 ending_block = single_succ (ENTRY_BLOCK_PTR);
181 else
182 ending_block = find_pdom (INDEX_EDGE_PRED_BB (el, edge_index));
183
184 for (current_block = INDEX_EDGE_SUCC_BB (el, edge_index);
185 current_block != ending_block && current_block != EXIT_BLOCK_PTR;
186 current_block = find_pdom (current_block))
187 {
188 edge e = INDEX_EDGE (el, edge_index);
189
190 /* For abnormal edges, we don't make current_block control
191 dependent because instructions that throw are always necessary
192 anyway. */
193 if (e->flags & EDGE_ABNORMAL)
194 continue;
195
196 set_control_dependence_map_bit (current_block, edge_index);
197 }
198 }
199
200 /* Find the immediate postdominator PDOM of the specified basic block BLOCK.
201 This function is necessary because some blocks have negative numbers. */
202
203 static inline basic_block
find_pdom(basic_block block)204 find_pdom (basic_block block)
205 {
206 gcc_assert (block != ENTRY_BLOCK_PTR);
207
208 if (block == EXIT_BLOCK_PTR)
209 return EXIT_BLOCK_PTR;
210 else
211 {
212 basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
213 if (! bb)
214 return EXIT_BLOCK_PTR;
215 return bb;
216 }
217 }
218
219 #define NECESSARY(stmt) stmt->common.asm_written_flag
220
221 /* If STMT is not already marked necessary, mark it, and add it to the
222 worklist if ADD_TO_WORKLIST is true. */
223 static inline void
mark_stmt_necessary(tree stmt,bool add_to_worklist)224 mark_stmt_necessary (tree stmt, bool add_to_worklist)
225 {
226 gcc_assert (stmt);
227 gcc_assert (!DECL_P (stmt));
228
229 if (NECESSARY (stmt))
230 return;
231
232 if (dump_file && (dump_flags & TDF_DETAILS))
233 {
234 fprintf (dump_file, "Marking useful stmt: ");
235 print_generic_stmt (dump_file, stmt, TDF_SLIM);
236 fprintf (dump_file, "\n");
237 }
238
239 NECESSARY (stmt) = 1;
240 if (add_to_worklist)
241 VEC_safe_push (tree, heap, worklist, stmt);
242 }
243
244 /* Mark the statement defining operand OP as necessary. PHIONLY is true
245 if we should only mark it necessary if it is a phi node. */
246
247 static inline void
mark_operand_necessary(tree op,bool phionly)248 mark_operand_necessary (tree op, bool phionly)
249 {
250 tree stmt;
251 int ver;
252
253 gcc_assert (op);
254
255 ver = SSA_NAME_VERSION (op);
256 if (TEST_BIT (processed, ver))
257 return;
258 SET_BIT (processed, ver);
259
260 stmt = SSA_NAME_DEF_STMT (op);
261 gcc_assert (stmt);
262
263 if (NECESSARY (stmt)
264 || IS_EMPTY_STMT (stmt)
265 || (phionly && TREE_CODE (stmt) != PHI_NODE))
266 return;
267
268 NECESSARY (stmt) = 1;
269 VEC_safe_push (tree, heap, worklist, stmt);
270 }
271
272
273 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
274 it can make other statements necessary.
275
276 If AGGRESSIVE is false, control statements are conservatively marked as
277 necessary. */
278
279 static void
mark_stmt_if_obviously_necessary(tree stmt,bool aggressive)280 mark_stmt_if_obviously_necessary (tree stmt, bool aggressive)
281 {
282 stmt_ann_t ann;
283 tree op;
284
285 /* With non-call exceptions, we have to assume that all statements could
286 throw. If a statement may throw, it is inherently necessary. */
287 if (flag_non_call_exceptions
288 && tree_could_throw_p (stmt))
289 {
290 mark_stmt_necessary (stmt, true);
291 return;
292 }
293
294 /* Statements that are implicitly live. Most function calls, asm and return
295 statements are required. Labels and BIND_EXPR nodes are kept because
296 they are control flow, and we have no way of knowing whether they can be
297 removed. DCE can eliminate all the other statements in a block, and CFG
298 can then remove the block and labels. */
299 switch (TREE_CODE (stmt))
300 {
301 case BIND_EXPR:
302 case LABEL_EXPR:
303 case CASE_LABEL_EXPR:
304 mark_stmt_necessary (stmt, false);
305 return;
306
307 case ASM_EXPR:
308 case RESX_EXPR:
309 case RETURN_EXPR:
310 mark_stmt_necessary (stmt, true);
311 return;
312
313 case CALL_EXPR:
314 /* Most, but not all function calls are required. Function calls that
315 produce no result and have no side effects (i.e. const pure
316 functions) are unnecessary. */
317 if (TREE_SIDE_EFFECTS (stmt))
318 mark_stmt_necessary (stmt, true);
319 return;
320
321 case MODIFY_EXPR:
322 op = get_call_expr_in (stmt);
323 if (op && TREE_SIDE_EFFECTS (op))
324 {
325 mark_stmt_necessary (stmt, true);
326 return;
327 }
328
329 /* These values are mildly magic bits of the EH runtime. We can't
330 see the entire lifetime of these values until landing pads are
331 generated. */
332 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == EXC_PTR_EXPR
333 || TREE_CODE (TREE_OPERAND (stmt, 0)) == FILTER_EXPR)
334 {
335 mark_stmt_necessary (stmt, true);
336 return;
337 }
338 break;
339
340 case GOTO_EXPR:
341 gcc_assert (!simple_goto_p (stmt));
342 mark_stmt_necessary (stmt, true);
343 return;
344
345 case COND_EXPR:
346 gcc_assert (EDGE_COUNT (bb_for_stmt (stmt)->succs) == 2);
347 /* Fall through. */
348
349 case SWITCH_EXPR:
350 if (! aggressive)
351 mark_stmt_necessary (stmt, true);
352 break;
353
354 default:
355 break;
356 }
357
358 ann = stmt_ann (stmt);
359
360 /* If the statement has volatile operands, it needs to be preserved.
361 Same for statements that can alter control flow in unpredictable
362 ways. */
363 if (ann->has_volatile_ops || is_ctrl_altering_stmt (stmt))
364 {
365 mark_stmt_necessary (stmt, true);
366 return;
367 }
368
369 if (is_hidden_global_store (stmt))
370 {
371 mark_stmt_necessary (stmt, true);
372 return;
373 }
374
375 return;
376 }
377
378 /* Find obviously necessary statements. These are things like most function
379 calls, and stores to file level variables.
380
381 If EL is NULL, control statements are conservatively marked as
382 necessary. Otherwise it contains the list of edges used by control
383 dependence analysis. */
384
385 static void
find_obviously_necessary_stmts(struct edge_list * el)386 find_obviously_necessary_stmts (struct edge_list *el)
387 {
388 basic_block bb;
389 block_stmt_iterator i;
390 edge e;
391
392 FOR_EACH_BB (bb)
393 {
394 tree phi;
395
396 /* Check any PHI nodes in the block. */
397 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
398 {
399 NECESSARY (phi) = 0;
400
401 /* PHIs for virtual variables do not directly affect code
402 generation and need not be considered inherently necessary
403 regardless of the bits set in their decl.
404
405 Thus, we only need to mark PHIs for real variables which
406 need their result preserved as being inherently necessary. */
407 if (is_gimple_reg (PHI_RESULT (phi))
408 && is_global_var (SSA_NAME_VAR (PHI_RESULT (phi))))
409 mark_stmt_necessary (phi, true);
410 }
411
412 /* Check all statements in the block. */
413 for (i = bsi_start (bb); ! bsi_end_p (i); bsi_next (&i))
414 {
415 tree stmt = bsi_stmt (i);
416 NECESSARY (stmt) = 0;
417 mark_stmt_if_obviously_necessary (stmt, el != NULL);
418 }
419 }
420
421 if (el)
422 {
423 /* Prevent the loops from being removed. We must keep the infinite loops,
424 and we currently do not have a means to recognize the finite ones. */
425 FOR_EACH_BB (bb)
426 {
427 edge_iterator ei;
428 FOR_EACH_EDGE (e, ei, bb->succs)
429 if (e->flags & EDGE_DFS_BACK)
430 mark_control_dependent_edges_necessary (e->dest, el);
431 }
432 }
433 }
434
435 /* Make corresponding control dependent edges necessary. We only
436 have to do this once for each basic block, so we clear the bitmap
437 after we're done. */
438 static void
mark_control_dependent_edges_necessary(basic_block bb,struct edge_list * el)439 mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el)
440 {
441 bitmap_iterator bi;
442 unsigned edge_number;
443
444 gcc_assert (bb != EXIT_BLOCK_PTR);
445
446 if (bb == ENTRY_BLOCK_PTR)
447 return;
448
449 EXECUTE_IF_CONTROL_DEPENDENT (bi, bb->index, edge_number)
450 {
451 tree t;
452 basic_block cd_bb = INDEX_EDGE_PRED_BB (el, edge_number);
453
454 if (TEST_BIT (last_stmt_necessary, cd_bb->index))
455 continue;
456 SET_BIT (last_stmt_necessary, cd_bb->index);
457
458 t = last_stmt (cd_bb);
459 if (t && is_ctrl_stmt (t))
460 mark_stmt_necessary (t, true);
461 }
462 }
463
464 /* Propagate necessity using the operands of necessary statements. Process
465 the uses on each statement in the worklist, and add all feeding statements
466 which contribute to the calculation of this value to the worklist.
467
468 In conservative mode, EL is NULL. */
469
470 static void
propagate_necessity(struct edge_list * el)471 propagate_necessity (struct edge_list *el)
472 {
473 tree i;
474 bool aggressive = (el ? true : false);
475
476 if (dump_file && (dump_flags & TDF_DETAILS))
477 fprintf (dump_file, "\nProcessing worklist:\n");
478
479 while (VEC_length (tree, worklist) > 0)
480 {
481 /* Take `i' from worklist. */
482 i = VEC_pop (tree, worklist);
483
484 if (dump_file && (dump_flags & TDF_DETAILS))
485 {
486 fprintf (dump_file, "processing: ");
487 print_generic_stmt (dump_file, i, TDF_SLIM);
488 fprintf (dump_file, "\n");
489 }
490
491 if (aggressive)
492 {
493 /* Mark the last statements of the basic blocks that the block
494 containing `i' is control dependent on, but only if we haven't
495 already done so. */
496 basic_block bb = bb_for_stmt (i);
497 if (bb != ENTRY_BLOCK_PTR
498 && ! TEST_BIT (visited_control_parents, bb->index))
499 {
500 SET_BIT (visited_control_parents, bb->index);
501 mark_control_dependent_edges_necessary (bb, el);
502 }
503 }
504
505 if (TREE_CODE (i) == PHI_NODE)
506 {
507 /* PHI nodes are somewhat special in that each PHI alternative has
508 data and control dependencies. All the statements feeding the
509 PHI node's arguments are always necessary. In aggressive mode,
510 we also consider the control dependent edges leading to the
511 predecessor block associated with each PHI alternative as
512 necessary. */
513 int k;
514 for (k = 0; k < PHI_NUM_ARGS (i); k++)
515 {
516 tree arg = PHI_ARG_DEF (i, k);
517 if (TREE_CODE (arg) == SSA_NAME)
518 mark_operand_necessary (arg, false);
519 }
520
521 if (aggressive)
522 {
523 for (k = 0; k < PHI_NUM_ARGS (i); k++)
524 {
525 basic_block arg_bb = PHI_ARG_EDGE (i, k)->src;
526 if (arg_bb != ENTRY_BLOCK_PTR
527 && ! TEST_BIT (visited_control_parents, arg_bb->index))
528 {
529 SET_BIT (visited_control_parents, arg_bb->index);
530 mark_control_dependent_edges_necessary (arg_bb, el);
531 }
532 }
533 }
534 }
535 else
536 {
537 /* Propagate through the operands. Examine all the USE, VUSE and
538 V_MAY_DEF operands in this statement. Mark all the statements
539 which feed this statement's uses as necessary. */
540 ssa_op_iter iter;
541 tree use;
542
543 /* The operands of V_MAY_DEF expressions are also needed as they
544 represent potential definitions that may reach this
545 statement (V_MAY_DEF operands allow us to follow def-def
546 links). */
547
548 FOR_EACH_SSA_TREE_OPERAND (use, i, iter, SSA_OP_ALL_USES)
549 mark_operand_necessary (use, false);
550 }
551 }
552 }
553
554
555 /* Propagate necessity around virtual phi nodes used in kill operands.
556 The reason this isn't done during propagate_necessity is because we don't
557 want to keep phis around that are just there for must-defs, unless we
558 absolutely have to. After we've rewritten the reaching definitions to be
559 correct in the previous part of the fixup routine, we can simply propagate
560 around the information about which of these virtual phi nodes are really
561 used, and set the NECESSARY flag accordingly.
562 Note that we do the minimum here to ensure that we keep alive the phis that
563 are actually used in the corrected SSA form. In particular, some of these
564 phis may now have all of the same operand, and will be deleted by some
565 other pass. */
566
567 static void
mark_really_necessary_kill_operand_phis(void)568 mark_really_necessary_kill_operand_phis (void)
569 {
570 basic_block bb;
571 int i;
572
573 /* Seed the worklist with the new virtual phi arguments and virtual
574 uses */
575 FOR_EACH_BB (bb)
576 {
577 block_stmt_iterator bsi;
578 tree phi;
579
580 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
581 {
582 if (!is_gimple_reg (PHI_RESULT (phi)) && NECESSARY (phi))
583 {
584 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
585 mark_operand_necessary (PHI_ARG_DEF (phi, i), true);
586 }
587 }
588
589 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
590 {
591 tree stmt = bsi_stmt (bsi);
592
593 if (NECESSARY (stmt))
594 {
595 use_operand_p use_p;
596 ssa_op_iter iter;
597 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
598 SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
599 {
600 tree use = USE_FROM_PTR (use_p);
601 mark_operand_necessary (use, true);
602 }
603 }
604 }
605 }
606
607 /* Mark all virtual phis still in use as necessary, and all of their
608 arguments that are phis as necessary. */
609 while (VEC_length (tree, worklist) > 0)
610 {
611 tree use = VEC_pop (tree, worklist);
612
613 for (i = 0; i < PHI_NUM_ARGS (use); i++)
614 mark_operand_necessary (PHI_ARG_DEF (use, i), true);
615 }
616 }
617
618
619
620
621 /* Eliminate unnecessary statements. Any instruction not marked as necessary
622 contributes nothing to the program, and can be deleted. */
623
624 static void
eliminate_unnecessary_stmts(void)625 eliminate_unnecessary_stmts (void)
626 {
627 basic_block bb;
628 block_stmt_iterator i;
629
630 if (dump_file && (dump_flags & TDF_DETAILS))
631 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
632
633 clear_special_calls ();
634 FOR_EACH_BB (bb)
635 {
636 /* Remove dead PHI nodes. */
637 remove_dead_phis (bb);
638 }
639
640 FOR_EACH_BB (bb)
641 {
642 /* Remove dead statements. */
643 for (i = bsi_start (bb); ! bsi_end_p (i) ; )
644 {
645 tree t = bsi_stmt (i);
646
647 stats.total++;
648
649 /* If `i' is not necessary then remove it. */
650 if (! NECESSARY (t))
651 remove_dead_stmt (&i, bb);
652 else
653 {
654 tree call = get_call_expr_in (t);
655 if (call)
656 notice_special_calls (call);
657 bsi_next (&i);
658 }
659 }
660 }
661 }
662
663 /* Remove dead PHI nodes from block BB. */
664
665 static void
remove_dead_phis(basic_block bb)666 remove_dead_phis (basic_block bb)
667 {
668 tree prev, phi;
669
670 prev = NULL_TREE;
671 phi = phi_nodes (bb);
672 while (phi)
673 {
674 stats.total_phis++;
675
676 if (! NECESSARY (phi))
677 {
678 tree next = PHI_CHAIN (phi);
679
680 if (dump_file && (dump_flags & TDF_DETAILS))
681 {
682 fprintf (dump_file, "Deleting : ");
683 print_generic_stmt (dump_file, phi, TDF_SLIM);
684 fprintf (dump_file, "\n");
685 }
686
687 remove_phi_node (phi, prev);
688 stats.removed_phis++;
689 phi = next;
690 }
691 else
692 {
693 prev = phi;
694 phi = PHI_CHAIN (phi);
695 }
696 }
697 }
698
699 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
700 containing I so that we don't have to look it up. */
701
702 static void
remove_dead_stmt(block_stmt_iterator * i,basic_block bb)703 remove_dead_stmt (block_stmt_iterator *i, basic_block bb)
704 {
705 tree t = bsi_stmt (*i);
706 def_operand_p def_p;
707
708 ssa_op_iter iter;
709
710 if (dump_file && (dump_flags & TDF_DETAILS))
711 {
712 fprintf (dump_file, "Deleting : ");
713 print_generic_stmt (dump_file, t, TDF_SLIM);
714 fprintf (dump_file, "\n");
715 }
716
717 stats.removed++;
718
719 /* If we have determined that a conditional branch statement contributes
720 nothing to the program, then we not only remove it, but we also change
721 the flow graph so that the current block will simply fall-thru to its
722 immediate post-dominator. The blocks we are circumventing will be
723 removed by cleanup_tree_cfg if this change in the flow graph makes them
724 unreachable. */
725 if (is_ctrl_stmt (t))
726 {
727 basic_block post_dom_bb;
728
729 /* The post dominance info has to be up-to-date. */
730 gcc_assert (dom_computed[CDI_POST_DOMINATORS] == DOM_OK);
731 /* Get the immediate post dominator of bb. */
732 post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
733
734 /* There are three particularly problematical cases.
735
736 1. Blocks that do not have an immediate post dominator. This
737 can happen with infinite loops.
738
739 2. Blocks that are only post dominated by the exit block. These
740 can also happen for infinite loops as we create fake edges
741 in the dominator tree.
742
743 3. If the post dominator has PHI nodes we may be able to compute
744 the right PHI args for them.
745
746
747 In each of these cases we must remove the control statement
748 as it may reference SSA_NAMEs which are going to be removed and
749 we remove all but one outgoing edge from the block. */
750 if (! post_dom_bb
751 || post_dom_bb == EXIT_BLOCK_PTR
752 || phi_nodes (post_dom_bb))
753 ;
754 else
755 {
756 /* Redirect the first edge out of BB to reach POST_DOM_BB. */
757 redirect_edge_and_branch (EDGE_SUCC (bb, 0), post_dom_bb);
758 PENDING_STMT (EDGE_SUCC (bb, 0)) = NULL;
759 }
760 EDGE_SUCC (bb, 0)->probability = REG_BR_PROB_BASE;
761 EDGE_SUCC (bb, 0)->count = bb->count;
762
763 /* The edge is no longer associated with a conditional, so it does
764 not have TRUE/FALSE flags. */
765 EDGE_SUCC (bb, 0)->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
766
767 /* The lone outgoing edge from BB will be a fallthru edge. */
768 EDGE_SUCC (bb, 0)->flags |= EDGE_FALLTHRU;
769
770 /* Remove the remaining the outgoing edges. */
771 while (!single_succ_p (bb))
772 {
773 /* FIXME. When we remove the edge, we modify the CFG, which
774 in turn modifies the dominator and post-dominator tree.
775 Is it safe to postpone recomputing the dominator and
776 post-dominator tree until the end of this pass given that
777 the post-dominators are used above? */
778 cfg_altered = true;
779 remove_edge (EDGE_SUCC (bb, 1));
780 }
781 }
782
783 FOR_EACH_SSA_DEF_OPERAND (def_p, t, iter, SSA_OP_VIRTUAL_DEFS)
784 {
785 tree def = DEF_FROM_PTR (def_p);
786 mark_sym_for_renaming (SSA_NAME_VAR (def));
787 }
788 bsi_remove (i, true);
789 release_defs (t);
790 }
791
792 /* Print out removed statement statistics. */
793
794 static void
print_stats(void)795 print_stats (void)
796 {
797 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
798 {
799 float percg;
800
801 percg = ((float) stats.removed / (float) stats.total) * 100;
802 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
803 stats.removed, stats.total, (int) percg);
804
805 if (stats.total_phis == 0)
806 percg = 0;
807 else
808 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
809
810 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
811 stats.removed_phis, stats.total_phis, (int) percg);
812 }
813 }
814
815 /* Initialization for this pass. Set up the used data structures. */
816
817 static void
tree_dce_init(bool aggressive)818 tree_dce_init (bool aggressive)
819 {
820 memset ((void *) &stats, 0, sizeof (stats));
821
822 if (aggressive)
823 {
824 int i;
825
826 control_dependence_map = XNEWVEC (bitmap, last_basic_block);
827 for (i = 0; i < last_basic_block; ++i)
828 control_dependence_map[i] = BITMAP_ALLOC (NULL);
829
830 last_stmt_necessary = sbitmap_alloc (last_basic_block);
831 sbitmap_zero (last_stmt_necessary);
832 }
833
834 processed = sbitmap_alloc (num_ssa_names + 1);
835 sbitmap_zero (processed);
836
837 worklist = VEC_alloc (tree, heap, 64);
838 cfg_altered = false;
839 }
840
841 /* Cleanup after this pass. */
842
843 static void
tree_dce_done(bool aggressive)844 tree_dce_done (bool aggressive)
845 {
846 if (aggressive)
847 {
848 int i;
849
850 for (i = 0; i < last_basic_block; ++i)
851 BITMAP_FREE (control_dependence_map[i]);
852 free (control_dependence_map);
853
854 sbitmap_free (visited_control_parents);
855 sbitmap_free (last_stmt_necessary);
856 }
857
858 sbitmap_free (processed);
859
860 VEC_free (tree, heap, worklist);
861 }
862
863 /* Main routine to eliminate dead code.
864
865 AGGRESSIVE controls the aggressiveness of the algorithm.
866 In conservative mode, we ignore control dependence and simply declare
867 all but the most trivially dead branches necessary. This mode is fast.
868 In aggressive mode, control dependences are taken into account, which
869 results in more dead code elimination, but at the cost of some time.
870
871 FIXME: Aggressive mode before PRE doesn't work currently because
872 the dominance info is not invalidated after DCE1. This is
873 not an issue right now because we only run aggressive DCE
874 as the last tree SSA pass, but keep this in mind when you
875 start experimenting with pass ordering. */
876
877 static void
perform_tree_ssa_dce(bool aggressive)878 perform_tree_ssa_dce (bool aggressive)
879 {
880 struct edge_list *el = NULL;
881
882 tree_dce_init (aggressive);
883
884 if (aggressive)
885 {
886 /* Compute control dependence. */
887 timevar_push (TV_CONTROL_DEPENDENCES);
888 calculate_dominance_info (CDI_POST_DOMINATORS);
889 el = create_edge_list ();
890 find_all_control_dependences (el);
891 timevar_pop (TV_CONTROL_DEPENDENCES);
892
893 visited_control_parents = sbitmap_alloc (last_basic_block);
894 sbitmap_zero (visited_control_parents);
895
896 mark_dfs_back_edges ();
897 }
898
899 find_obviously_necessary_stmts (el);
900
901 propagate_necessity (el);
902
903 mark_really_necessary_kill_operand_phis ();
904 eliminate_unnecessary_stmts ();
905
906 if (aggressive)
907 free_dominance_info (CDI_POST_DOMINATORS);
908
909 /* If we removed paths in the CFG, then we need to update
910 dominators as well. I haven't investigated the possibility
911 of incrementally updating dominators. */
912 if (cfg_altered)
913 free_dominance_info (CDI_DOMINATORS);
914
915 /* Debugging dumps. */
916 if (dump_file)
917 print_stats ();
918
919 tree_dce_done (aggressive);
920
921 free_edge_list (el);
922 }
923
924 /* Pass entry points. */
925 static unsigned int
tree_ssa_dce(void)926 tree_ssa_dce (void)
927 {
928 perform_tree_ssa_dce (/*aggressive=*/false);
929 return 0;
930 }
931
932 static unsigned int
tree_ssa_dce_loop(void)933 tree_ssa_dce_loop (void)
934 {
935 perform_tree_ssa_dce (/*aggressive=*/false);
936 free_numbers_of_iterations_estimates (current_loops);
937 scev_reset ();
938 return 0;
939 }
940
941 static unsigned int
tree_ssa_cd_dce(void)942 tree_ssa_cd_dce (void)
943 {
944 perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
945 return 0;
946 }
947
948 static bool
gate_dce(void)949 gate_dce (void)
950 {
951 return flag_tree_dce != 0;
952 }
953
954 struct tree_opt_pass pass_dce =
955 {
956 "dce", /* name */
957 gate_dce, /* gate */
958 tree_ssa_dce, /* execute */
959 NULL, /* sub */
960 NULL, /* next */
961 0, /* static_pass_number */
962 TV_TREE_DCE, /* tv_id */
963 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
964 0, /* properties_provided */
965 0, /* properties_destroyed */
966 0, /* todo_flags_start */
967 TODO_dump_func
968 | TODO_update_ssa
969 | TODO_cleanup_cfg
970 | TODO_ggc_collect
971 | TODO_verify_ssa
972 | TODO_remove_unused_locals, /* todo_flags_finish */
973 0 /* letter */
974 };
975
976 struct tree_opt_pass pass_dce_loop =
977 {
978 "dceloop", /* name */
979 gate_dce, /* gate */
980 tree_ssa_dce_loop, /* execute */
981 NULL, /* sub */
982 NULL, /* next */
983 0, /* static_pass_number */
984 TV_TREE_DCE, /* tv_id */
985 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
986 0, /* properties_provided */
987 0, /* properties_destroyed */
988 0, /* todo_flags_start */
989 TODO_dump_func
990 | TODO_update_ssa
991 | TODO_cleanup_cfg
992 | TODO_verify_ssa, /* todo_flags_finish */
993 0 /* letter */
994 };
995
996 struct tree_opt_pass pass_cd_dce =
997 {
998 "cddce", /* name */
999 gate_dce, /* gate */
1000 tree_ssa_cd_dce, /* execute */
1001 NULL, /* sub */
1002 NULL, /* next */
1003 0, /* static_pass_number */
1004 TV_TREE_CD_DCE, /* tv_id */
1005 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1006 0, /* properties_provided */
1007 0, /* properties_destroyed */
1008 0, /* todo_flags_start */
1009 TODO_dump_func
1010 | TODO_update_ssa
1011 | TODO_cleanup_cfg
1012 | TODO_ggc_collect
1013 | TODO_verify_ssa
1014 | TODO_verify_flow, /* todo_flags_finish */
1015 0 /* letter */
1016 };
1017