1*38fd1498Szrj /* Dead code elimination pass for the GNU compiler.
2*38fd1498Szrj Copyright (C) 2002-2018 Free Software Foundation, Inc.
3*38fd1498Szrj Contributed by Ben Elliston <bje@redhat.com>
4*38fd1498Szrj and Andrew MacLeod <amacleod@redhat.com>
5*38fd1498Szrj Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6*38fd1498Szrj
7*38fd1498Szrj This file is part of GCC.
8*38fd1498Szrj
9*38fd1498Szrj GCC is free software; you can redistribute it and/or modify it
10*38fd1498Szrj under the terms of the GNU General Public License as published by the
11*38fd1498Szrj Free Software Foundation; either version 3, or (at your option) any
12*38fd1498Szrj later version.
13*38fd1498Szrj
14*38fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT
15*38fd1498Szrj ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16*38fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17*38fd1498Szrj for more details.
18*38fd1498Szrj
19*38fd1498Szrj You should have received a copy of the GNU General Public License
20*38fd1498Szrj along with GCC; see the file COPYING3. If not see
21*38fd1498Szrj <http://www.gnu.org/licenses/>. */
22*38fd1498Szrj
23*38fd1498Szrj /* Dead code elimination.
24*38fd1498Szrj
25*38fd1498Szrj References:
26*38fd1498Szrj
27*38fd1498Szrj Building an Optimizing Compiler,
28*38fd1498Szrj Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29*38fd1498Szrj
30*38fd1498Szrj Advanced Compiler Design and Implementation,
31*38fd1498Szrj Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32*38fd1498Szrj
33*38fd1498Szrj Dead-code elimination is the removal of statements which have no
34*38fd1498Szrj impact on the program's output. "Dead statements" have no impact
35*38fd1498Szrj on the program's output, while "necessary statements" may have
36*38fd1498Szrj impact on the output.
37*38fd1498Szrj
38*38fd1498Szrj The algorithm consists of three phases:
39*38fd1498Szrj 1. Marking as necessary all statements known to be necessary,
40*38fd1498Szrj e.g. most function calls, writing a value to memory, etc;
41*38fd1498Szrj 2. Propagating necessary statements, e.g., the statements
42*38fd1498Szrj giving values to operands in necessary statements; and
43*38fd1498Szrj 3. Removing dead statements. */
44*38fd1498Szrj
45*38fd1498Szrj #include "config.h"
46*38fd1498Szrj #include "system.h"
47*38fd1498Szrj #include "coretypes.h"
48*38fd1498Szrj #include "backend.h"
49*38fd1498Szrj #include "rtl.h"
50*38fd1498Szrj #include "tree.h"
51*38fd1498Szrj #include "gimple.h"
52*38fd1498Szrj #include "cfghooks.h"
53*38fd1498Szrj #include "tree-pass.h"
54*38fd1498Szrj #include "ssa.h"
55*38fd1498Szrj #include "gimple-pretty-print.h"
56*38fd1498Szrj #include "fold-const.h"
57*38fd1498Szrj #include "calls.h"
58*38fd1498Szrj #include "cfganal.h"
59*38fd1498Szrj #include "tree-eh.h"
60*38fd1498Szrj #include "gimplify.h"
61*38fd1498Szrj #include "gimple-iterator.h"
62*38fd1498Szrj #include "tree-cfg.h"
63*38fd1498Szrj #include "tree-ssa-loop-niter.h"
64*38fd1498Szrj #include "tree-into-ssa.h"
65*38fd1498Szrj #include "tree-dfa.h"
66*38fd1498Szrj #include "cfgloop.h"
67*38fd1498Szrj #include "tree-scalar-evolution.h"
68*38fd1498Szrj #include "tree-chkp.h"
69*38fd1498Szrj #include "tree-ssa-propagate.h"
70*38fd1498Szrj #include "gimple-fold.h"
71*38fd1498Szrj
72*38fd1498Szrj static struct stmt_stats
73*38fd1498Szrj {
74*38fd1498Szrj int total;
75*38fd1498Szrj int total_phis;
76*38fd1498Szrj int removed;
77*38fd1498Szrj int removed_phis;
78*38fd1498Szrj } stats;
79*38fd1498Szrj
80*38fd1498Szrj #define STMT_NECESSARY GF_PLF_1
81*38fd1498Szrj
82*38fd1498Szrj static vec<gimple *> worklist;
83*38fd1498Szrj
84*38fd1498Szrj /* Vector indicating an SSA name has already been processed and marked
85*38fd1498Szrj as necessary. */
86*38fd1498Szrj static sbitmap processed;
87*38fd1498Szrj
88*38fd1498Szrj /* Vector indicating that the last statement of a basic block has already
89*38fd1498Szrj been marked as necessary. */
90*38fd1498Szrj static sbitmap last_stmt_necessary;
91*38fd1498Szrj
92*38fd1498Szrj /* Vector indicating that BB contains statements that are live. */
93*38fd1498Szrj static sbitmap bb_contains_live_stmts;
94*38fd1498Szrj
95*38fd1498Szrj /* Before we can determine whether a control branch is dead, we need to
96*38fd1498Szrj compute which blocks are control dependent on which edges.
97*38fd1498Szrj
98*38fd1498Szrj We expect each block to be control dependent on very few edges so we
99*38fd1498Szrj use a bitmap for each block recording its edges. An array holds the
100*38fd1498Szrj bitmap. The Ith bit in the bitmap is set if that block is dependent
101*38fd1498Szrj on the Ith edge. */
102*38fd1498Szrj static control_dependences *cd;
103*38fd1498Szrj
104*38fd1498Szrj /* Vector indicating that a basic block has already had all the edges
105*38fd1498Szrj processed that it is control dependent on. */
106*38fd1498Szrj static sbitmap visited_control_parents;
107*38fd1498Szrj
108*38fd1498Szrj /* TRUE if this pass alters the CFG (by removing control statements).
109*38fd1498Szrj FALSE otherwise.
110*38fd1498Szrj
111*38fd1498Szrj If this pass alters the CFG, then it will arrange for the dominators
112*38fd1498Szrj to be recomputed. */
113*38fd1498Szrj static bool cfg_altered;
114*38fd1498Szrj
115*38fd1498Szrj /* When non-NULL holds map from basic block index into the postorder. */
116*38fd1498Szrj static int *bb_postorder;
117*38fd1498Szrj
118*38fd1498Szrj
119*38fd1498Szrj /* If STMT is not already marked necessary, mark it, and add it to the
120*38fd1498Szrj worklist if ADD_TO_WORKLIST is true. */
121*38fd1498Szrj
122*38fd1498Szrj static inline void
mark_stmt_necessary(gimple * stmt,bool add_to_worklist)123*38fd1498Szrj mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
124*38fd1498Szrj {
125*38fd1498Szrj gcc_assert (stmt);
126*38fd1498Szrj
127*38fd1498Szrj if (gimple_plf (stmt, STMT_NECESSARY))
128*38fd1498Szrj return;
129*38fd1498Szrj
130*38fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
131*38fd1498Szrj {
132*38fd1498Szrj fprintf (dump_file, "Marking useful stmt: ");
133*38fd1498Szrj print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
134*38fd1498Szrj fprintf (dump_file, "\n");
135*38fd1498Szrj }
136*38fd1498Szrj
137*38fd1498Szrj gimple_set_plf (stmt, STMT_NECESSARY, true);
138*38fd1498Szrj if (add_to_worklist)
139*38fd1498Szrj worklist.safe_push (stmt);
140*38fd1498Szrj if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
141*38fd1498Szrj bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
142*38fd1498Szrj }
143*38fd1498Szrj
144*38fd1498Szrj
145*38fd1498Szrj /* Mark the statement defining operand OP as necessary. */
146*38fd1498Szrj
147*38fd1498Szrj static inline void
mark_operand_necessary(tree op)148*38fd1498Szrj mark_operand_necessary (tree op)
149*38fd1498Szrj {
150*38fd1498Szrj gimple *stmt;
151*38fd1498Szrj int ver;
152*38fd1498Szrj
153*38fd1498Szrj gcc_assert (op);
154*38fd1498Szrj
155*38fd1498Szrj ver = SSA_NAME_VERSION (op);
156*38fd1498Szrj if (bitmap_bit_p (processed, ver))
157*38fd1498Szrj {
158*38fd1498Szrj stmt = SSA_NAME_DEF_STMT (op);
159*38fd1498Szrj gcc_assert (gimple_nop_p (stmt)
160*38fd1498Szrj || gimple_plf (stmt, STMT_NECESSARY));
161*38fd1498Szrj return;
162*38fd1498Szrj }
163*38fd1498Szrj bitmap_set_bit (processed, ver);
164*38fd1498Szrj
165*38fd1498Szrj stmt = SSA_NAME_DEF_STMT (op);
166*38fd1498Szrj gcc_assert (stmt);
167*38fd1498Szrj
168*38fd1498Szrj if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
169*38fd1498Szrj return;
170*38fd1498Szrj
171*38fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
172*38fd1498Szrj {
173*38fd1498Szrj fprintf (dump_file, "marking necessary through ");
174*38fd1498Szrj print_generic_expr (dump_file, op);
175*38fd1498Szrj fprintf (dump_file, " stmt ");
176*38fd1498Szrj print_gimple_stmt (dump_file, stmt, 0);
177*38fd1498Szrj }
178*38fd1498Szrj
179*38fd1498Szrj gimple_set_plf (stmt, STMT_NECESSARY, true);
180*38fd1498Szrj if (bb_contains_live_stmts)
181*38fd1498Szrj bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
182*38fd1498Szrj worklist.safe_push (stmt);
183*38fd1498Szrj }
184*38fd1498Szrj
185*38fd1498Szrj
186*38fd1498Szrj /* Mark STMT as necessary if it obviously is. Add it to the worklist if
187*38fd1498Szrj it can make other statements necessary.
188*38fd1498Szrj
189*38fd1498Szrj If AGGRESSIVE is false, control statements are conservatively marked as
190*38fd1498Szrj necessary. */
191*38fd1498Szrj
192*38fd1498Szrj static void
mark_stmt_if_obviously_necessary(gimple * stmt,bool aggressive)193*38fd1498Szrj mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
194*38fd1498Szrj {
195*38fd1498Szrj /* With non-call exceptions, we have to assume that all statements could
196*38fd1498Szrj throw. If a statement could throw, it can be deemed necessary. */
197*38fd1498Szrj if (cfun->can_throw_non_call_exceptions
198*38fd1498Szrj && !cfun->can_delete_dead_exceptions
199*38fd1498Szrj && stmt_could_throw_p (stmt))
200*38fd1498Szrj {
201*38fd1498Szrj mark_stmt_necessary (stmt, true);
202*38fd1498Szrj return;
203*38fd1498Szrj }
204*38fd1498Szrj
205*38fd1498Szrj /* Statements that are implicitly live. Most function calls, asm
206*38fd1498Szrj and return statements are required. Labels and GIMPLE_BIND nodes
207*38fd1498Szrj are kept because they are control flow, and we have no way of
208*38fd1498Szrj knowing whether they can be removed. DCE can eliminate all the
209*38fd1498Szrj other statements in a block, and CFG can then remove the block
210*38fd1498Szrj and labels. */
211*38fd1498Szrj switch (gimple_code (stmt))
212*38fd1498Szrj {
213*38fd1498Szrj case GIMPLE_PREDICT:
214*38fd1498Szrj case GIMPLE_LABEL:
215*38fd1498Szrj mark_stmt_necessary (stmt, false);
216*38fd1498Szrj return;
217*38fd1498Szrj
218*38fd1498Szrj case GIMPLE_ASM:
219*38fd1498Szrj case GIMPLE_RESX:
220*38fd1498Szrj case GIMPLE_RETURN:
221*38fd1498Szrj mark_stmt_necessary (stmt, true);
222*38fd1498Szrj return;
223*38fd1498Szrj
224*38fd1498Szrj case GIMPLE_CALL:
225*38fd1498Szrj {
226*38fd1498Szrj tree callee = gimple_call_fndecl (stmt);
227*38fd1498Szrj if (callee != NULL_TREE
228*38fd1498Szrj && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
229*38fd1498Szrj switch (DECL_FUNCTION_CODE (callee))
230*38fd1498Szrj {
231*38fd1498Szrj case BUILT_IN_MALLOC:
232*38fd1498Szrj case BUILT_IN_ALIGNED_ALLOC:
233*38fd1498Szrj case BUILT_IN_CALLOC:
234*38fd1498Szrj CASE_BUILT_IN_ALLOCA:
235*38fd1498Szrj case BUILT_IN_STRDUP:
236*38fd1498Szrj case BUILT_IN_STRNDUP:
237*38fd1498Szrj return;
238*38fd1498Szrj
239*38fd1498Szrj default:;
240*38fd1498Szrj }
241*38fd1498Szrj /* Most, but not all function calls are required. Function calls that
242*38fd1498Szrj produce no result and have no side effects (i.e. const pure
243*38fd1498Szrj functions) are unnecessary. */
244*38fd1498Szrj if (gimple_has_side_effects (stmt))
245*38fd1498Szrj {
246*38fd1498Szrj mark_stmt_necessary (stmt, true);
247*38fd1498Szrj return;
248*38fd1498Szrj }
249*38fd1498Szrj if (!gimple_call_lhs (stmt))
250*38fd1498Szrj return;
251*38fd1498Szrj break;
252*38fd1498Szrj }
253*38fd1498Szrj
254*38fd1498Szrj case GIMPLE_DEBUG:
255*38fd1498Szrj /* Debug temps without a value are not useful. ??? If we could
256*38fd1498Szrj easily locate the debug temp bind stmt for a use thereof,
257*38fd1498Szrj would could refrain from marking all debug temps here, and
258*38fd1498Szrj mark them only if they're used. */
259*38fd1498Szrj if (gimple_debug_nonbind_marker_p (stmt)
260*38fd1498Szrj || !gimple_debug_bind_p (stmt)
261*38fd1498Szrj || gimple_debug_bind_has_value_p (stmt)
262*38fd1498Szrj || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
263*38fd1498Szrj mark_stmt_necessary (stmt, false);
264*38fd1498Szrj return;
265*38fd1498Szrj
266*38fd1498Szrj case GIMPLE_GOTO:
267*38fd1498Szrj gcc_assert (!simple_goto_p (stmt));
268*38fd1498Szrj mark_stmt_necessary (stmt, true);
269*38fd1498Szrj return;
270*38fd1498Szrj
271*38fd1498Szrj case GIMPLE_COND:
272*38fd1498Szrj gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
273*38fd1498Szrj /* Fall through. */
274*38fd1498Szrj
275*38fd1498Szrj case GIMPLE_SWITCH:
276*38fd1498Szrj if (! aggressive)
277*38fd1498Szrj mark_stmt_necessary (stmt, true);
278*38fd1498Szrj break;
279*38fd1498Szrj
280*38fd1498Szrj case GIMPLE_ASSIGN:
281*38fd1498Szrj if (gimple_clobber_p (stmt))
282*38fd1498Szrj return;
283*38fd1498Szrj break;
284*38fd1498Szrj
285*38fd1498Szrj default:
286*38fd1498Szrj break;
287*38fd1498Szrj }
288*38fd1498Szrj
289*38fd1498Szrj /* If the statement has volatile operands, it needs to be preserved.
290*38fd1498Szrj Same for statements that can alter control flow in unpredictable
291*38fd1498Szrj ways. */
292*38fd1498Szrj if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
293*38fd1498Szrj {
294*38fd1498Szrj mark_stmt_necessary (stmt, true);
295*38fd1498Szrj return;
296*38fd1498Szrj }
297*38fd1498Szrj
298*38fd1498Szrj if (stmt_may_clobber_global_p (stmt))
299*38fd1498Szrj {
300*38fd1498Szrj mark_stmt_necessary (stmt, true);
301*38fd1498Szrj return;
302*38fd1498Szrj }
303*38fd1498Szrj
304*38fd1498Szrj return;
305*38fd1498Szrj }
306*38fd1498Szrj
307*38fd1498Szrj
308*38fd1498Szrj /* Mark the last statement of BB as necessary. */
309*38fd1498Szrj
310*38fd1498Szrj static void
mark_last_stmt_necessary(basic_block bb)311*38fd1498Szrj mark_last_stmt_necessary (basic_block bb)
312*38fd1498Szrj {
313*38fd1498Szrj gimple *stmt = last_stmt (bb);
314*38fd1498Szrj
315*38fd1498Szrj bitmap_set_bit (last_stmt_necessary, bb->index);
316*38fd1498Szrj bitmap_set_bit (bb_contains_live_stmts, bb->index);
317*38fd1498Szrj
318*38fd1498Szrj /* We actually mark the statement only if it is a control statement. */
319*38fd1498Szrj if (stmt && is_ctrl_stmt (stmt))
320*38fd1498Szrj mark_stmt_necessary (stmt, true);
321*38fd1498Szrj }
322*38fd1498Szrj
323*38fd1498Szrj
324*38fd1498Szrj /* Mark control dependent edges of BB as necessary. We have to do this only
325*38fd1498Szrj once for each basic block so we set the appropriate bit after we're done.
326*38fd1498Szrj
327*38fd1498Szrj When IGNORE_SELF is true, ignore BB in the list of control dependences. */
328*38fd1498Szrj
329*38fd1498Szrj static void
mark_control_dependent_edges_necessary(basic_block bb,bool ignore_self)330*38fd1498Szrj mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
331*38fd1498Szrj {
332*38fd1498Szrj bitmap_iterator bi;
333*38fd1498Szrj unsigned edge_number;
334*38fd1498Szrj bool skipped = false;
335*38fd1498Szrj
336*38fd1498Szrj gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
337*38fd1498Szrj
338*38fd1498Szrj if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
339*38fd1498Szrj return;
340*38fd1498Szrj
341*38fd1498Szrj EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
342*38fd1498Szrj 0, edge_number, bi)
343*38fd1498Szrj {
344*38fd1498Szrj basic_block cd_bb = cd->get_edge_src (edge_number);
345*38fd1498Szrj
346*38fd1498Szrj if (ignore_self && cd_bb == bb)
347*38fd1498Szrj {
348*38fd1498Szrj skipped = true;
349*38fd1498Szrj continue;
350*38fd1498Szrj }
351*38fd1498Szrj
352*38fd1498Szrj if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
353*38fd1498Szrj mark_last_stmt_necessary (cd_bb);
354*38fd1498Szrj }
355*38fd1498Szrj
356*38fd1498Szrj if (!skipped)
357*38fd1498Szrj bitmap_set_bit (visited_control_parents, bb->index);
358*38fd1498Szrj }
359*38fd1498Szrj
360*38fd1498Szrj
361*38fd1498Szrj /* Find obviously necessary statements. These are things like most function
362*38fd1498Szrj calls, and stores to file level variables.
363*38fd1498Szrj
364*38fd1498Szrj If EL is NULL, control statements are conservatively marked as
365*38fd1498Szrj necessary. Otherwise it contains the list of edges used by control
366*38fd1498Szrj dependence analysis. */
367*38fd1498Szrj
368*38fd1498Szrj static void
find_obviously_necessary_stmts(bool aggressive)369*38fd1498Szrj find_obviously_necessary_stmts (bool aggressive)
370*38fd1498Szrj {
371*38fd1498Szrj basic_block bb;
372*38fd1498Szrj gimple_stmt_iterator gsi;
373*38fd1498Szrj edge e;
374*38fd1498Szrj gimple *phi, *stmt;
375*38fd1498Szrj int flags;
376*38fd1498Szrj
377*38fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
378*38fd1498Szrj {
379*38fd1498Szrj /* PHI nodes are never inherently necessary. */
380*38fd1498Szrj for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
381*38fd1498Szrj {
382*38fd1498Szrj phi = gsi_stmt (gsi);
383*38fd1498Szrj gimple_set_plf (phi, STMT_NECESSARY, false);
384*38fd1498Szrj }
385*38fd1498Szrj
386*38fd1498Szrj /* Check all statements in the block. */
387*38fd1498Szrj for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
388*38fd1498Szrj {
389*38fd1498Szrj stmt = gsi_stmt (gsi);
390*38fd1498Szrj gimple_set_plf (stmt, STMT_NECESSARY, false);
391*38fd1498Szrj mark_stmt_if_obviously_necessary (stmt, aggressive);
392*38fd1498Szrj }
393*38fd1498Szrj }
394*38fd1498Szrj
395*38fd1498Szrj /* Pure and const functions are finite and thus have no infinite loops in
396*38fd1498Szrj them. */
397*38fd1498Szrj flags = flags_from_decl_or_type (current_function_decl);
398*38fd1498Szrj if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
399*38fd1498Szrj return;
400*38fd1498Szrj
401*38fd1498Szrj /* Prevent the empty possibly infinite loops from being removed. */
402*38fd1498Szrj if (aggressive)
403*38fd1498Szrj {
404*38fd1498Szrj struct loop *loop;
405*38fd1498Szrj if (mark_irreducible_loops ())
406*38fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
407*38fd1498Szrj {
408*38fd1498Szrj edge_iterator ei;
409*38fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
410*38fd1498Szrj if ((e->flags & EDGE_DFS_BACK)
411*38fd1498Szrj && (e->flags & EDGE_IRREDUCIBLE_LOOP))
412*38fd1498Szrj {
413*38fd1498Szrj if (dump_file)
414*38fd1498Szrj fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
415*38fd1498Szrj e->src->index, e->dest->index);
416*38fd1498Szrj mark_control_dependent_edges_necessary (e->dest, false);
417*38fd1498Szrj }
418*38fd1498Szrj }
419*38fd1498Szrj
420*38fd1498Szrj FOR_EACH_LOOP (loop, 0)
421*38fd1498Szrj if (!finite_loop_p (loop))
422*38fd1498Szrj {
423*38fd1498Szrj if (dump_file)
424*38fd1498Szrj fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
425*38fd1498Szrj mark_control_dependent_edges_necessary (loop->latch, false);
426*38fd1498Szrj }
427*38fd1498Szrj }
428*38fd1498Szrj }
429*38fd1498Szrj
430*38fd1498Szrj
431*38fd1498Szrj /* Return true if REF is based on an aliased base, otherwise false. */
432*38fd1498Szrj
433*38fd1498Szrj static bool
ref_may_be_aliased(tree ref)434*38fd1498Szrj ref_may_be_aliased (tree ref)
435*38fd1498Szrj {
436*38fd1498Szrj gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
437*38fd1498Szrj while (handled_component_p (ref))
438*38fd1498Szrj ref = TREE_OPERAND (ref, 0);
439*38fd1498Szrj if (TREE_CODE (ref) == MEM_REF
440*38fd1498Szrj && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
441*38fd1498Szrj ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
442*38fd1498Szrj return !(DECL_P (ref)
443*38fd1498Szrj && !may_be_aliased (ref));
444*38fd1498Szrj }
445*38fd1498Szrj
446*38fd1498Szrj static bitmap visited = NULL;
447*38fd1498Szrj static unsigned int longest_chain = 0;
448*38fd1498Szrj static unsigned int total_chain = 0;
449*38fd1498Szrj static unsigned int nr_walks = 0;
450*38fd1498Szrj static bool chain_ovfl = false;
451*38fd1498Szrj
452*38fd1498Szrj /* Worker for the walker that marks reaching definitions of REF,
453*38fd1498Szrj which is based on a non-aliased decl, necessary. It returns
454*38fd1498Szrj true whenever the defining statement of the current VDEF is
455*38fd1498Szrj a kill for REF, as no dominating may-defs are necessary for REF
456*38fd1498Szrj anymore. DATA points to the basic-block that contains the
457*38fd1498Szrj stmt that refers to REF. */
458*38fd1498Szrj
459*38fd1498Szrj static bool
mark_aliased_reaching_defs_necessary_1(ao_ref * ref,tree vdef,void * data)460*38fd1498Szrj mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
461*38fd1498Szrj {
462*38fd1498Szrj gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
463*38fd1498Szrj
464*38fd1498Szrj /* All stmts we visit are necessary. */
465*38fd1498Szrj if (! gimple_clobber_p (def_stmt))
466*38fd1498Szrj mark_operand_necessary (vdef);
467*38fd1498Szrj
468*38fd1498Szrj /* If the stmt lhs kills ref, then we can stop walking. */
469*38fd1498Szrj if (gimple_has_lhs (def_stmt)
470*38fd1498Szrj && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
471*38fd1498Szrj /* The assignment is not necessarily carried out if it can throw
472*38fd1498Szrj and we can catch it in the current function where we could inspect
473*38fd1498Szrj the previous value.
474*38fd1498Szrj ??? We only need to care about the RHS throwing. For aggregate
475*38fd1498Szrj assignments or similar calls and non-call exceptions the LHS
476*38fd1498Szrj might throw as well. */
477*38fd1498Szrj && !stmt_can_throw_internal (def_stmt))
478*38fd1498Szrj {
479*38fd1498Szrj tree base, lhs = gimple_get_lhs (def_stmt);
480*38fd1498Szrj poly_int64 size, offset, max_size;
481*38fd1498Szrj bool reverse;
482*38fd1498Szrj ao_ref_base (ref);
483*38fd1498Szrj base
484*38fd1498Szrj = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
485*38fd1498Szrj /* We can get MEM[symbol: sZ, index: D.8862_1] here,
486*38fd1498Szrj so base == refd->base does not always hold. */
487*38fd1498Szrj if (base == ref->base)
488*38fd1498Szrj {
489*38fd1498Szrj /* For a must-alias check we need to be able to constrain
490*38fd1498Szrj the accesses properly. */
491*38fd1498Szrj if (known_eq (size, max_size)
492*38fd1498Szrj && known_subrange_p (ref->offset, ref->max_size, offset, size))
493*38fd1498Szrj return true;
494*38fd1498Szrj /* Or they need to be exactly the same. */
495*38fd1498Szrj else if (ref->ref
496*38fd1498Szrj /* Make sure there is no induction variable involved
497*38fd1498Szrj in the references (gcc.c-torture/execute/pr42142.c).
498*38fd1498Szrj The simplest way is to check if the kill dominates
499*38fd1498Szrj the use. */
500*38fd1498Szrj /* But when both are in the same block we cannot
501*38fd1498Szrj easily tell whether we came from a backedge
502*38fd1498Szrj unless we decide to compute stmt UIDs
503*38fd1498Szrj (see PR58246). */
504*38fd1498Szrj && (basic_block) data != gimple_bb (def_stmt)
505*38fd1498Szrj && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
506*38fd1498Szrj gimple_bb (def_stmt))
507*38fd1498Szrj && operand_equal_p (ref->ref, lhs, 0))
508*38fd1498Szrj return true;
509*38fd1498Szrj }
510*38fd1498Szrj }
511*38fd1498Szrj
512*38fd1498Szrj /* Otherwise keep walking. */
513*38fd1498Szrj return false;
514*38fd1498Szrj }
515*38fd1498Szrj
516*38fd1498Szrj static void
mark_aliased_reaching_defs_necessary(gimple * stmt,tree ref)517*38fd1498Szrj mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
518*38fd1498Szrj {
519*38fd1498Szrj unsigned int chain;
520*38fd1498Szrj ao_ref refd;
521*38fd1498Szrj gcc_assert (!chain_ovfl);
522*38fd1498Szrj ao_ref_init (&refd, ref);
523*38fd1498Szrj chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
524*38fd1498Szrj mark_aliased_reaching_defs_necessary_1,
525*38fd1498Szrj gimple_bb (stmt), NULL);
526*38fd1498Szrj if (chain > longest_chain)
527*38fd1498Szrj longest_chain = chain;
528*38fd1498Szrj total_chain += chain;
529*38fd1498Szrj nr_walks++;
530*38fd1498Szrj }
531*38fd1498Szrj
532*38fd1498Szrj /* Worker for the walker that marks reaching definitions of REF, which
533*38fd1498Szrj is not based on a non-aliased decl. For simplicity we need to end
534*38fd1498Szrj up marking all may-defs necessary that are not based on a non-aliased
535*38fd1498Szrj decl. The only job of this walker is to skip may-defs based on
536*38fd1498Szrj a non-aliased decl. */
537*38fd1498Szrj
538*38fd1498Szrj static bool
mark_all_reaching_defs_necessary_1(ao_ref * ref ATTRIBUTE_UNUSED,tree vdef,void * data ATTRIBUTE_UNUSED)539*38fd1498Szrj mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
540*38fd1498Szrj tree vdef, void *data ATTRIBUTE_UNUSED)
541*38fd1498Szrj {
542*38fd1498Szrj gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
543*38fd1498Szrj
544*38fd1498Szrj /* We have to skip already visited (and thus necessary) statements
545*38fd1498Szrj to make the chaining work after we dropped back to simple mode. */
546*38fd1498Szrj if (chain_ovfl
547*38fd1498Szrj && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
548*38fd1498Szrj {
549*38fd1498Szrj gcc_assert (gimple_nop_p (def_stmt)
550*38fd1498Szrj || gimple_plf (def_stmt, STMT_NECESSARY));
551*38fd1498Szrj return false;
552*38fd1498Szrj }
553*38fd1498Szrj
554*38fd1498Szrj /* We want to skip stores to non-aliased variables. */
555*38fd1498Szrj if (!chain_ovfl
556*38fd1498Szrj && gimple_assign_single_p (def_stmt))
557*38fd1498Szrj {
558*38fd1498Szrj tree lhs = gimple_assign_lhs (def_stmt);
559*38fd1498Szrj if (!ref_may_be_aliased (lhs))
560*38fd1498Szrj return false;
561*38fd1498Szrj }
562*38fd1498Szrj
563*38fd1498Szrj /* We want to skip statments that do not constitute stores but have
564*38fd1498Szrj a virtual definition. */
565*38fd1498Szrj if (is_gimple_call (def_stmt))
566*38fd1498Szrj {
567*38fd1498Szrj tree callee = gimple_call_fndecl (def_stmt);
568*38fd1498Szrj if (callee != NULL_TREE
569*38fd1498Szrj && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
570*38fd1498Szrj switch (DECL_FUNCTION_CODE (callee))
571*38fd1498Szrj {
572*38fd1498Szrj case BUILT_IN_MALLOC:
573*38fd1498Szrj case BUILT_IN_ALIGNED_ALLOC:
574*38fd1498Szrj case BUILT_IN_CALLOC:
575*38fd1498Szrj CASE_BUILT_IN_ALLOCA:
576*38fd1498Szrj case BUILT_IN_FREE:
577*38fd1498Szrj return false;
578*38fd1498Szrj
579*38fd1498Szrj default:;
580*38fd1498Szrj }
581*38fd1498Szrj }
582*38fd1498Szrj
583*38fd1498Szrj if (! gimple_clobber_p (def_stmt))
584*38fd1498Szrj mark_operand_necessary (vdef);
585*38fd1498Szrj
586*38fd1498Szrj return false;
587*38fd1498Szrj }
588*38fd1498Szrj
589*38fd1498Szrj static void
mark_all_reaching_defs_necessary(gimple * stmt)590*38fd1498Szrj mark_all_reaching_defs_necessary (gimple *stmt)
591*38fd1498Szrj {
592*38fd1498Szrj walk_aliased_vdefs (NULL, gimple_vuse (stmt),
593*38fd1498Szrj mark_all_reaching_defs_necessary_1, NULL, &visited);
594*38fd1498Szrj }
595*38fd1498Szrj
596*38fd1498Szrj /* Return true for PHI nodes with one or identical arguments
597*38fd1498Szrj can be removed. */
598*38fd1498Szrj static bool
degenerate_phi_p(gimple * phi)599*38fd1498Szrj degenerate_phi_p (gimple *phi)
600*38fd1498Szrj {
601*38fd1498Szrj unsigned int i;
602*38fd1498Szrj tree op = gimple_phi_arg_def (phi, 0);
603*38fd1498Szrj for (i = 1; i < gimple_phi_num_args (phi); i++)
604*38fd1498Szrj if (gimple_phi_arg_def (phi, i) != op)
605*38fd1498Szrj return false;
606*38fd1498Szrj return true;
607*38fd1498Szrj }
608*38fd1498Szrj
609*38fd1498Szrj /* Propagate necessity using the operands of necessary statements.
610*38fd1498Szrj Process the uses on each statement in the worklist, and add all
611*38fd1498Szrj feeding statements which contribute to the calculation of this
612*38fd1498Szrj value to the worklist.
613*38fd1498Szrj
614*38fd1498Szrj In conservative mode, EL is NULL. */
615*38fd1498Szrj
616*38fd1498Szrj static void
propagate_necessity(bool aggressive)617*38fd1498Szrj propagate_necessity (bool aggressive)
618*38fd1498Szrj {
619*38fd1498Szrj gimple *stmt;
620*38fd1498Szrj
621*38fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
622*38fd1498Szrj fprintf (dump_file, "\nProcessing worklist:\n");
623*38fd1498Szrj
624*38fd1498Szrj while (worklist.length () > 0)
625*38fd1498Szrj {
626*38fd1498Szrj /* Take STMT from worklist. */
627*38fd1498Szrj stmt = worklist.pop ();
628*38fd1498Szrj
629*38fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
630*38fd1498Szrj {
631*38fd1498Szrj fprintf (dump_file, "processing: ");
632*38fd1498Szrj print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
633*38fd1498Szrj fprintf (dump_file, "\n");
634*38fd1498Szrj }
635*38fd1498Szrj
636*38fd1498Szrj if (aggressive)
637*38fd1498Szrj {
638*38fd1498Szrj /* Mark the last statement of the basic blocks on which the block
639*38fd1498Szrj containing STMT is control dependent, but only if we haven't
640*38fd1498Szrj already done so. */
641*38fd1498Szrj basic_block bb = gimple_bb (stmt);
642*38fd1498Szrj if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
643*38fd1498Szrj && !bitmap_bit_p (visited_control_parents, bb->index))
644*38fd1498Szrj mark_control_dependent_edges_necessary (bb, false);
645*38fd1498Szrj }
646*38fd1498Szrj
647*38fd1498Szrj if (gimple_code (stmt) == GIMPLE_PHI
648*38fd1498Szrj /* We do not process virtual PHI nodes nor do we track their
649*38fd1498Szrj necessity. */
650*38fd1498Szrj && !virtual_operand_p (gimple_phi_result (stmt)))
651*38fd1498Szrj {
652*38fd1498Szrj /* PHI nodes are somewhat special in that each PHI alternative has
653*38fd1498Szrj data and control dependencies. All the statements feeding the
654*38fd1498Szrj PHI node's arguments are always necessary. In aggressive mode,
655*38fd1498Szrj we also consider the control dependent edges leading to the
656*38fd1498Szrj predecessor block associated with each PHI alternative as
657*38fd1498Szrj necessary. */
658*38fd1498Szrj gphi *phi = as_a <gphi *> (stmt);
659*38fd1498Szrj size_t k;
660*38fd1498Szrj
661*38fd1498Szrj for (k = 0; k < gimple_phi_num_args (stmt); k++)
662*38fd1498Szrj {
663*38fd1498Szrj tree arg = PHI_ARG_DEF (stmt, k);
664*38fd1498Szrj if (TREE_CODE (arg) == SSA_NAME)
665*38fd1498Szrj mark_operand_necessary (arg);
666*38fd1498Szrj }
667*38fd1498Szrj
668*38fd1498Szrj /* For PHI operands it matters from where the control flow arrives
669*38fd1498Szrj to the BB. Consider the following example:
670*38fd1498Szrj
671*38fd1498Szrj a=exp1;
672*38fd1498Szrj b=exp2;
673*38fd1498Szrj if (test)
674*38fd1498Szrj ;
675*38fd1498Szrj else
676*38fd1498Szrj ;
677*38fd1498Szrj c=PHI(a,b)
678*38fd1498Szrj
679*38fd1498Szrj We need to mark control dependence of the empty basic blocks, since they
680*38fd1498Szrj contains computation of PHI operands.
681*38fd1498Szrj
682*38fd1498Szrj Doing so is too restrictive in the case the predecestor block is in
683*38fd1498Szrj the loop. Consider:
684*38fd1498Szrj
685*38fd1498Szrj if (b)
686*38fd1498Szrj {
687*38fd1498Szrj int i;
688*38fd1498Szrj for (i = 0; i<1000; ++i)
689*38fd1498Szrj ;
690*38fd1498Szrj j = 0;
691*38fd1498Szrj }
692*38fd1498Szrj return j;
693*38fd1498Szrj
694*38fd1498Szrj There is PHI for J in the BB containing return statement.
695*38fd1498Szrj In this case the control dependence of predecestor block (that is
696*38fd1498Szrj within the empty loop) also contains the block determining number
697*38fd1498Szrj of iterations of the block that would prevent removing of empty
698*38fd1498Szrj loop in this case.
699*38fd1498Szrj
700*38fd1498Szrj This scenario can be avoided by splitting critical edges.
701*38fd1498Szrj To save the critical edge splitting pass we identify how the control
702*38fd1498Szrj dependence would look like if the edge was split.
703*38fd1498Szrj
704*38fd1498Szrj Consider the modified CFG created from current CFG by splitting
705*38fd1498Szrj edge B->C. In the postdominance tree of modified CFG, C' is
706*38fd1498Szrj always child of C. There are two cases how chlids of C' can look
707*38fd1498Szrj like:
708*38fd1498Szrj
709*38fd1498Szrj 1) C' is leaf
710*38fd1498Szrj
711*38fd1498Szrj In this case the only basic block C' is control dependent on is B.
712*38fd1498Szrj
713*38fd1498Szrj 2) C' has single child that is B
714*38fd1498Szrj
715*38fd1498Szrj In this case control dependence of C' is same as control
716*38fd1498Szrj dependence of B in original CFG except for block B itself.
717*38fd1498Szrj (since C' postdominate B in modified CFG)
718*38fd1498Szrj
719*38fd1498Szrj Now how to decide what case happens? There are two basic options:
720*38fd1498Szrj
721*38fd1498Szrj a) C postdominate B. Then C immediately postdominate B and
722*38fd1498Szrj case 2 happens iff there is no other way from B to C except
723*38fd1498Szrj the edge B->C.
724*38fd1498Szrj
725*38fd1498Szrj There is other way from B to C iff there is succesor of B that
726*38fd1498Szrj is not postdominated by B. Testing this condition is somewhat
727*38fd1498Szrj expensive, because we need to iterate all succesors of B.
728*38fd1498Szrj We are safe to assume that this does not happen: we will mark B
729*38fd1498Szrj as needed when processing the other path from B to C that is
730*38fd1498Szrj conrol dependent on B and marking control dependencies of B
731*38fd1498Szrj itself is harmless because they will be processed anyway after
732*38fd1498Szrj processing control statement in B.
733*38fd1498Szrj
734*38fd1498Szrj b) C does not postdominate B. Always case 1 happens since there is
735*38fd1498Szrj path from C to exit that does not go through B and thus also C'. */
736*38fd1498Szrj
737*38fd1498Szrj if (aggressive && !degenerate_phi_p (stmt))
738*38fd1498Szrj {
739*38fd1498Szrj for (k = 0; k < gimple_phi_num_args (stmt); k++)
740*38fd1498Szrj {
741*38fd1498Szrj basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
742*38fd1498Szrj
743*38fd1498Szrj if (gimple_bb (stmt)
744*38fd1498Szrj != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
745*38fd1498Szrj {
746*38fd1498Szrj if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
747*38fd1498Szrj mark_last_stmt_necessary (arg_bb);
748*38fd1498Szrj }
749*38fd1498Szrj else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
750*38fd1498Szrj && !bitmap_bit_p (visited_control_parents,
751*38fd1498Szrj arg_bb->index))
752*38fd1498Szrj mark_control_dependent_edges_necessary (arg_bb, true);
753*38fd1498Szrj }
754*38fd1498Szrj }
755*38fd1498Szrj }
756*38fd1498Szrj else
757*38fd1498Szrj {
758*38fd1498Szrj /* Propagate through the operands. Examine all the USE, VUSE and
759*38fd1498Szrj VDEF operands in this statement. Mark all the statements
760*38fd1498Szrj which feed this statement's uses as necessary. */
761*38fd1498Szrj ssa_op_iter iter;
762*38fd1498Szrj tree use;
763*38fd1498Szrj
764*38fd1498Szrj /* If this is a call to free which is directly fed by an
765*38fd1498Szrj allocation function do not mark that necessary through
766*38fd1498Szrj processing the argument. */
767*38fd1498Szrj if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
768*38fd1498Szrj {
769*38fd1498Szrj tree ptr = gimple_call_arg (stmt, 0);
770*38fd1498Szrj gimple *def_stmt;
771*38fd1498Szrj tree def_callee;
772*38fd1498Szrj /* If the pointer we free is defined by an allocation
773*38fd1498Szrj function do not add the call to the worklist. */
774*38fd1498Szrj if (TREE_CODE (ptr) == SSA_NAME
775*38fd1498Szrj && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
776*38fd1498Szrj && (def_callee = gimple_call_fndecl (def_stmt))
777*38fd1498Szrj && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
778*38fd1498Szrj && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
779*38fd1498Szrj || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
780*38fd1498Szrj || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
781*38fd1498Szrj {
782*38fd1498Szrj gimple *bounds_def_stmt;
783*38fd1498Szrj tree bounds;
784*38fd1498Szrj
785*38fd1498Szrj /* For instrumented calls we should also check used
786*38fd1498Szrj bounds are returned by the same allocation call. */
787*38fd1498Szrj if (!gimple_call_with_bounds_p (stmt)
788*38fd1498Szrj || ((bounds = gimple_call_arg (stmt, 1))
789*38fd1498Szrj && TREE_CODE (bounds) == SSA_NAME
790*38fd1498Szrj && (bounds_def_stmt = SSA_NAME_DEF_STMT (bounds))
791*38fd1498Szrj && chkp_gimple_call_builtin_p (bounds_def_stmt,
792*38fd1498Szrj BUILT_IN_CHKP_BNDRET)
793*38fd1498Szrj && gimple_call_arg (bounds_def_stmt, 0) == ptr))
794*38fd1498Szrj continue;
795*38fd1498Szrj }
796*38fd1498Szrj }
797*38fd1498Szrj
798*38fd1498Szrj FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
799*38fd1498Szrj mark_operand_necessary (use);
800*38fd1498Szrj
801*38fd1498Szrj use = gimple_vuse (stmt);
802*38fd1498Szrj if (!use)
803*38fd1498Szrj continue;
804*38fd1498Szrj
805*38fd1498Szrj /* If we dropped to simple mode make all immediately
806*38fd1498Szrj reachable definitions necessary. */
807*38fd1498Szrj if (chain_ovfl)
808*38fd1498Szrj {
809*38fd1498Szrj mark_all_reaching_defs_necessary (stmt);
810*38fd1498Szrj continue;
811*38fd1498Szrj }
812*38fd1498Szrj
813*38fd1498Szrj /* For statements that may load from memory (have a VUSE) we
814*38fd1498Szrj have to mark all reaching (may-)definitions as necessary.
815*38fd1498Szrj We partition this task into two cases:
816*38fd1498Szrj 1) explicit loads based on decls that are not aliased
817*38fd1498Szrj 2) implicit loads (like calls) and explicit loads not
818*38fd1498Szrj based on decls that are not aliased (like indirect
819*38fd1498Szrj references or loads from globals)
820*38fd1498Szrj For 1) we mark all reaching may-defs as necessary, stopping
821*38fd1498Szrj at dominating kills. For 2) we want to mark all dominating
822*38fd1498Szrj references necessary, but non-aliased ones which we handle
823*38fd1498Szrj in 1). By keeping a global visited bitmap for references
824*38fd1498Szrj we walk for 2) we avoid quadratic behavior for those. */
825*38fd1498Szrj
826*38fd1498Szrj if (is_gimple_call (stmt))
827*38fd1498Szrj {
828*38fd1498Szrj tree callee = gimple_call_fndecl (stmt);
829*38fd1498Szrj unsigned i;
830*38fd1498Szrj
831*38fd1498Szrj /* Calls to functions that are merely acting as barriers
832*38fd1498Szrj or that only store to memory do not make any previous
833*38fd1498Szrj stores necessary. */
834*38fd1498Szrj if (callee != NULL_TREE
835*38fd1498Szrj && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
836*38fd1498Szrj && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
837*38fd1498Szrj || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
838*38fd1498Szrj || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
839*38fd1498Szrj || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
840*38fd1498Szrj || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
841*38fd1498Szrj || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
842*38fd1498Szrj || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
843*38fd1498Szrj || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee))
844*38fd1498Szrj || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
845*38fd1498Szrj || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
846*38fd1498Szrj || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
847*38fd1498Szrj continue;
848*38fd1498Szrj
849*38fd1498Szrj /* Calls implicitly load from memory, their arguments
850*38fd1498Szrj in addition may explicitly perform memory loads. */
851*38fd1498Szrj mark_all_reaching_defs_necessary (stmt);
852*38fd1498Szrj for (i = 0; i < gimple_call_num_args (stmt); ++i)
853*38fd1498Szrj {
854*38fd1498Szrj tree arg = gimple_call_arg (stmt, i);
855*38fd1498Szrj if (TREE_CODE (arg) == SSA_NAME
856*38fd1498Szrj || is_gimple_min_invariant (arg))
857*38fd1498Szrj continue;
858*38fd1498Szrj if (TREE_CODE (arg) == WITH_SIZE_EXPR)
859*38fd1498Szrj arg = TREE_OPERAND (arg, 0);
860*38fd1498Szrj if (!ref_may_be_aliased (arg))
861*38fd1498Szrj mark_aliased_reaching_defs_necessary (stmt, arg);
862*38fd1498Szrj }
863*38fd1498Szrj }
864*38fd1498Szrj else if (gimple_assign_single_p (stmt))
865*38fd1498Szrj {
866*38fd1498Szrj tree rhs;
867*38fd1498Szrj /* If this is a load mark things necessary. */
868*38fd1498Szrj rhs = gimple_assign_rhs1 (stmt);
869*38fd1498Szrj if (TREE_CODE (rhs) != SSA_NAME
870*38fd1498Szrj && !is_gimple_min_invariant (rhs)
871*38fd1498Szrj && TREE_CODE (rhs) != CONSTRUCTOR)
872*38fd1498Szrj {
873*38fd1498Szrj if (!ref_may_be_aliased (rhs))
874*38fd1498Szrj mark_aliased_reaching_defs_necessary (stmt, rhs);
875*38fd1498Szrj else
876*38fd1498Szrj mark_all_reaching_defs_necessary (stmt);
877*38fd1498Szrj }
878*38fd1498Szrj }
879*38fd1498Szrj else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
880*38fd1498Szrj {
881*38fd1498Szrj tree rhs = gimple_return_retval (return_stmt);
882*38fd1498Szrj /* A return statement may perform a load. */
883*38fd1498Szrj if (rhs
884*38fd1498Szrj && TREE_CODE (rhs) != SSA_NAME
885*38fd1498Szrj && !is_gimple_min_invariant (rhs)
886*38fd1498Szrj && TREE_CODE (rhs) != CONSTRUCTOR)
887*38fd1498Szrj {
888*38fd1498Szrj if (!ref_may_be_aliased (rhs))
889*38fd1498Szrj mark_aliased_reaching_defs_necessary (stmt, rhs);
890*38fd1498Szrj else
891*38fd1498Szrj mark_all_reaching_defs_necessary (stmt);
892*38fd1498Szrj }
893*38fd1498Szrj }
894*38fd1498Szrj else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
895*38fd1498Szrj {
896*38fd1498Szrj unsigned i;
897*38fd1498Szrj mark_all_reaching_defs_necessary (stmt);
898*38fd1498Szrj /* Inputs may perform loads. */
899*38fd1498Szrj for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
900*38fd1498Szrj {
901*38fd1498Szrj tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
902*38fd1498Szrj if (TREE_CODE (op) != SSA_NAME
903*38fd1498Szrj && !is_gimple_min_invariant (op)
904*38fd1498Szrj && TREE_CODE (op) != CONSTRUCTOR
905*38fd1498Szrj && !ref_may_be_aliased (op))
906*38fd1498Szrj mark_aliased_reaching_defs_necessary (stmt, op);
907*38fd1498Szrj }
908*38fd1498Szrj }
909*38fd1498Szrj else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
910*38fd1498Szrj {
911*38fd1498Szrj /* The beginning of a transaction is a memory barrier. */
912*38fd1498Szrj /* ??? If we were really cool, we'd only be a barrier
913*38fd1498Szrj for the memories touched within the transaction. */
914*38fd1498Szrj mark_all_reaching_defs_necessary (stmt);
915*38fd1498Szrj }
916*38fd1498Szrj else
917*38fd1498Szrj gcc_unreachable ();
918*38fd1498Szrj
919*38fd1498Szrj /* If we over-used our alias oracle budget drop to simple
920*38fd1498Szrj mode. The cost metric allows quadratic behavior
921*38fd1498Szrj (number of uses times number of may-defs queries) up to
922*38fd1498Szrj a constant maximal number of queries and after that falls back to
923*38fd1498Szrj super-linear complexity. */
924*38fd1498Szrj if (/* Constant but quadratic for small functions. */
925*38fd1498Szrj total_chain > 128 * 128
926*38fd1498Szrj /* Linear in the number of may-defs. */
927*38fd1498Szrj && total_chain > 32 * longest_chain
928*38fd1498Szrj /* Linear in the number of uses. */
929*38fd1498Szrj && total_chain > nr_walks * 32)
930*38fd1498Szrj {
931*38fd1498Szrj chain_ovfl = true;
932*38fd1498Szrj if (visited)
933*38fd1498Szrj bitmap_clear (visited);
934*38fd1498Szrj }
935*38fd1498Szrj }
936*38fd1498Szrj }
937*38fd1498Szrj }
938*38fd1498Szrj
939*38fd1498Szrj /* Remove dead PHI nodes from block BB. */
940*38fd1498Szrj
941*38fd1498Szrj static bool
remove_dead_phis(basic_block bb)942*38fd1498Szrj remove_dead_phis (basic_block bb)
943*38fd1498Szrj {
944*38fd1498Szrj bool something_changed = false;
945*38fd1498Szrj gphi *phi;
946*38fd1498Szrj gphi_iterator gsi;
947*38fd1498Szrj
948*38fd1498Szrj for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
949*38fd1498Szrj {
950*38fd1498Szrj stats.total_phis++;
951*38fd1498Szrj phi = gsi.phi ();
952*38fd1498Szrj
953*38fd1498Szrj /* We do not track necessity of virtual PHI nodes. Instead do
954*38fd1498Szrj very simple dead PHI removal here. */
955*38fd1498Szrj if (virtual_operand_p (gimple_phi_result (phi)))
956*38fd1498Szrj {
957*38fd1498Szrj /* Virtual PHI nodes with one or identical arguments
958*38fd1498Szrj can be removed. */
959*38fd1498Szrj if (degenerate_phi_p (phi))
960*38fd1498Szrj {
961*38fd1498Szrj tree vdef = gimple_phi_result (phi);
962*38fd1498Szrj tree vuse = gimple_phi_arg_def (phi, 0);
963*38fd1498Szrj
964*38fd1498Szrj use_operand_p use_p;
965*38fd1498Szrj imm_use_iterator iter;
966*38fd1498Szrj gimple *use_stmt;
967*38fd1498Szrj FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
968*38fd1498Szrj FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
969*38fd1498Szrj SET_USE (use_p, vuse);
970*38fd1498Szrj if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
971*38fd1498Szrj && TREE_CODE (vuse) == SSA_NAME)
972*38fd1498Szrj SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
973*38fd1498Szrj }
974*38fd1498Szrj else
975*38fd1498Szrj gimple_set_plf (phi, STMT_NECESSARY, true);
976*38fd1498Szrj }
977*38fd1498Szrj
978*38fd1498Szrj if (!gimple_plf (phi, STMT_NECESSARY))
979*38fd1498Szrj {
980*38fd1498Szrj something_changed = true;
981*38fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
982*38fd1498Szrj {
983*38fd1498Szrj fprintf (dump_file, "Deleting : ");
984*38fd1498Szrj print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
985*38fd1498Szrj fprintf (dump_file, "\n");
986*38fd1498Szrj }
987*38fd1498Szrj
988*38fd1498Szrj remove_phi_node (&gsi, true);
989*38fd1498Szrj stats.removed_phis++;
990*38fd1498Szrj continue;
991*38fd1498Szrj }
992*38fd1498Szrj
993*38fd1498Szrj gsi_next (&gsi);
994*38fd1498Szrj }
995*38fd1498Szrj return something_changed;
996*38fd1498Szrj }
997*38fd1498Szrj
998*38fd1498Szrj
999*38fd1498Szrj /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1000*38fd1498Szrj containing I so that we don't have to look it up. */
1001*38fd1498Szrj
1002*38fd1498Szrj static void
remove_dead_stmt(gimple_stmt_iterator * i,basic_block bb)1003*38fd1498Szrj remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1004*38fd1498Szrj {
1005*38fd1498Szrj gimple *stmt = gsi_stmt (*i);
1006*38fd1498Szrj
1007*38fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
1008*38fd1498Szrj {
1009*38fd1498Szrj fprintf (dump_file, "Deleting : ");
1010*38fd1498Szrj print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1011*38fd1498Szrj fprintf (dump_file, "\n");
1012*38fd1498Szrj }
1013*38fd1498Szrj
1014*38fd1498Szrj stats.removed++;
1015*38fd1498Szrj
1016*38fd1498Szrj /* If we have determined that a conditional branch statement contributes
1017*38fd1498Szrj nothing to the program, then we not only remove it, but we need to update
1018*38fd1498Szrj the CFG. We can chose any of edges out of BB as long as we are sure to not
1019*38fd1498Szrj close infinite loops. This is done by always choosing the edge closer to
1020*38fd1498Szrj exit in inverted_post_order_compute order. */
1021*38fd1498Szrj if (is_ctrl_stmt (stmt))
1022*38fd1498Szrj {
1023*38fd1498Szrj edge_iterator ei;
1024*38fd1498Szrj edge e = NULL, e2;
1025*38fd1498Szrj
1026*38fd1498Szrj /* See if there is only one non-abnormal edge. */
1027*38fd1498Szrj if (single_succ_p (bb))
1028*38fd1498Szrj e = single_succ_edge (bb);
1029*38fd1498Szrj /* Otherwise chose one that is closer to bb with live statement in it.
1030*38fd1498Szrj To be able to chose one, we compute inverted post order starting from
1031*38fd1498Szrj all BBs with live statements. */
1032*38fd1498Szrj if (!e)
1033*38fd1498Szrj {
1034*38fd1498Szrj if (!bb_postorder)
1035*38fd1498Szrj {
1036*38fd1498Szrj auto_vec<int, 20> postorder;
1037*38fd1498Szrj inverted_post_order_compute (&postorder,
1038*38fd1498Szrj &bb_contains_live_stmts);
1039*38fd1498Szrj bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1040*38fd1498Szrj for (unsigned int i = 0; i < postorder.length (); ++i)
1041*38fd1498Szrj bb_postorder[postorder[i]] = i;
1042*38fd1498Szrj }
1043*38fd1498Szrj FOR_EACH_EDGE (e2, ei, bb->succs)
1044*38fd1498Szrj if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1045*38fd1498Szrj || bb_postorder [e->dest->index]
1046*38fd1498Szrj < bb_postorder [e2->dest->index])
1047*38fd1498Szrj e = e2;
1048*38fd1498Szrj }
1049*38fd1498Szrj gcc_assert (e);
1050*38fd1498Szrj e->probability = profile_probability::always ();
1051*38fd1498Szrj
1052*38fd1498Szrj /* The edge is no longer associated with a conditional, so it does
1053*38fd1498Szrj not have TRUE/FALSE flags.
1054*38fd1498Szrj We are also safe to drop EH/ABNORMAL flags and turn them into
1055*38fd1498Szrj normal control flow, because we know that all the destinations (including
1056*38fd1498Szrj those odd edges) are equivalent for program execution. */
1057*38fd1498Szrj e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1058*38fd1498Szrj
1059*38fd1498Szrj /* The lone outgoing edge from BB will be a fallthru edge. */
1060*38fd1498Szrj e->flags |= EDGE_FALLTHRU;
1061*38fd1498Szrj
1062*38fd1498Szrj /* Remove the remaining outgoing edges. */
1063*38fd1498Szrj for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1064*38fd1498Szrj if (e != e2)
1065*38fd1498Szrj {
1066*38fd1498Szrj cfg_altered = true;
1067*38fd1498Szrj /* If we made a BB unconditionally exit a loop or removed
1068*38fd1498Szrj an entry into an irreducible region, then this transform
1069*38fd1498Szrj alters the set of BBs in the loop. Schedule a fixup. */
1070*38fd1498Szrj if (loop_exit_edge_p (bb->loop_father, e)
1071*38fd1498Szrj || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1072*38fd1498Szrj loops_state_set (LOOPS_NEED_FIXUP);
1073*38fd1498Szrj remove_edge (e2);
1074*38fd1498Szrj }
1075*38fd1498Szrj else
1076*38fd1498Szrj ei_next (&ei);
1077*38fd1498Szrj }
1078*38fd1498Szrj
1079*38fd1498Szrj /* If this is a store into a variable that is being optimized away,
1080*38fd1498Szrj add a debug bind stmt if possible. */
1081*38fd1498Szrj if (MAY_HAVE_DEBUG_BIND_STMTS
1082*38fd1498Szrj && gimple_assign_single_p (stmt)
1083*38fd1498Szrj && is_gimple_val (gimple_assign_rhs1 (stmt)))
1084*38fd1498Szrj {
1085*38fd1498Szrj tree lhs = gimple_assign_lhs (stmt);
1086*38fd1498Szrj if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1087*38fd1498Szrj && !DECL_IGNORED_P (lhs)
1088*38fd1498Szrj && is_gimple_reg_type (TREE_TYPE (lhs))
1089*38fd1498Szrj && !is_global_var (lhs)
1090*38fd1498Szrj && !DECL_HAS_VALUE_EXPR_P (lhs))
1091*38fd1498Szrj {
1092*38fd1498Szrj tree rhs = gimple_assign_rhs1 (stmt);
1093*38fd1498Szrj gdebug *note
1094*38fd1498Szrj = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1095*38fd1498Szrj gsi_insert_after (i, note, GSI_SAME_STMT);
1096*38fd1498Szrj }
1097*38fd1498Szrj }
1098*38fd1498Szrj
1099*38fd1498Szrj unlink_stmt_vdef (stmt);
1100*38fd1498Szrj gsi_remove (i, true);
1101*38fd1498Szrj release_defs (stmt);
1102*38fd1498Szrj }
1103*38fd1498Szrj
1104*38fd1498Szrj /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1105*38fd1498Szrj uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1106*38fd1498Szrj
1107*38fd1498Szrj static tree
find_non_realpart_uses(tree * tp,int * walk_subtrees,void * data)1108*38fd1498Szrj find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1109*38fd1498Szrj {
1110*38fd1498Szrj if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1111*38fd1498Szrj *walk_subtrees = 0;
1112*38fd1498Szrj if (*tp == (tree) data)
1113*38fd1498Szrj return *tp;
1114*38fd1498Szrj return NULL_TREE;
1115*38fd1498Szrj }
1116*38fd1498Szrj
1117*38fd1498Szrj /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1118*38fd1498Szrj but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1119*38fd1498Szrj into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1120*38fd1498Szrj uses. */
1121*38fd1498Szrj
1122*38fd1498Szrj static void
maybe_optimize_arith_overflow(gimple_stmt_iterator * gsi,enum tree_code subcode)1123*38fd1498Szrj maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1124*38fd1498Szrj enum tree_code subcode)
1125*38fd1498Szrj {
1126*38fd1498Szrj gimple *stmt = gsi_stmt (*gsi);
1127*38fd1498Szrj tree lhs = gimple_call_lhs (stmt);
1128*38fd1498Szrj
1129*38fd1498Szrj if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1130*38fd1498Szrj return;
1131*38fd1498Szrj
1132*38fd1498Szrj imm_use_iterator imm_iter;
1133*38fd1498Szrj use_operand_p use_p;
1134*38fd1498Szrj bool has_debug_uses = false;
1135*38fd1498Szrj bool has_realpart_uses = false;
1136*38fd1498Szrj bool has_other_uses = false;
1137*38fd1498Szrj FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1138*38fd1498Szrj {
1139*38fd1498Szrj gimple *use_stmt = USE_STMT (use_p);
1140*38fd1498Szrj if (is_gimple_debug (use_stmt))
1141*38fd1498Szrj has_debug_uses = true;
1142*38fd1498Szrj else if (is_gimple_assign (use_stmt)
1143*38fd1498Szrj && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1144*38fd1498Szrj && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1145*38fd1498Szrj has_realpart_uses = true;
1146*38fd1498Szrj else
1147*38fd1498Szrj {
1148*38fd1498Szrj has_other_uses = true;
1149*38fd1498Szrj break;
1150*38fd1498Szrj }
1151*38fd1498Szrj }
1152*38fd1498Szrj
1153*38fd1498Szrj if (!has_realpart_uses || has_other_uses)
1154*38fd1498Szrj return;
1155*38fd1498Szrj
1156*38fd1498Szrj tree arg0 = gimple_call_arg (stmt, 0);
1157*38fd1498Szrj tree arg1 = gimple_call_arg (stmt, 1);
1158*38fd1498Szrj location_t loc = gimple_location (stmt);
1159*38fd1498Szrj tree type = TREE_TYPE (TREE_TYPE (lhs));
1160*38fd1498Szrj tree utype = type;
1161*38fd1498Szrj if (!TYPE_UNSIGNED (type))
1162*38fd1498Szrj utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1163*38fd1498Szrj tree result = fold_build2_loc (loc, subcode, utype,
1164*38fd1498Szrj fold_convert_loc (loc, utype, arg0),
1165*38fd1498Szrj fold_convert_loc (loc, utype, arg1));
1166*38fd1498Szrj result = fold_convert_loc (loc, type, result);
1167*38fd1498Szrj
1168*38fd1498Szrj if (has_debug_uses)
1169*38fd1498Szrj {
1170*38fd1498Szrj gimple *use_stmt;
1171*38fd1498Szrj FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1172*38fd1498Szrj {
1173*38fd1498Szrj if (!gimple_debug_bind_p (use_stmt))
1174*38fd1498Szrj continue;
1175*38fd1498Szrj tree v = gimple_debug_bind_get_value (use_stmt);
1176*38fd1498Szrj if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1177*38fd1498Szrj {
1178*38fd1498Szrj gimple_debug_bind_reset_value (use_stmt);
1179*38fd1498Szrj update_stmt (use_stmt);
1180*38fd1498Szrj }
1181*38fd1498Szrj }
1182*38fd1498Szrj }
1183*38fd1498Szrj
1184*38fd1498Szrj if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1185*38fd1498Szrj result = drop_tree_overflow (result);
1186*38fd1498Szrj tree overflow = build_zero_cst (type);
1187*38fd1498Szrj tree ctype = build_complex_type (type);
1188*38fd1498Szrj if (TREE_CODE (result) == INTEGER_CST)
1189*38fd1498Szrj result = build_complex (ctype, result, overflow);
1190*38fd1498Szrj else
1191*38fd1498Szrj result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1192*38fd1498Szrj ctype, result, overflow);
1193*38fd1498Szrj
1194*38fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
1195*38fd1498Szrj {
1196*38fd1498Szrj fprintf (dump_file, "Transforming call: ");
1197*38fd1498Szrj print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1198*38fd1498Szrj fprintf (dump_file, "because the overflow result is never used into: ");
1199*38fd1498Szrj print_generic_stmt (dump_file, result, TDF_SLIM);
1200*38fd1498Szrj fprintf (dump_file, "\n");
1201*38fd1498Szrj }
1202*38fd1498Szrj
1203*38fd1498Szrj if (!update_call_from_tree (gsi, result))
1204*38fd1498Szrj gimplify_and_update_call_from_tree (gsi, result);
1205*38fd1498Szrj }
1206*38fd1498Szrj
1207*38fd1498Szrj /* Eliminate unnecessary statements. Any instruction not marked as necessary
1208*38fd1498Szrj contributes nothing to the program, and can be deleted. */
1209*38fd1498Szrj
1210*38fd1498Szrj static bool
eliminate_unnecessary_stmts(void)1211*38fd1498Szrj eliminate_unnecessary_stmts (void)
1212*38fd1498Szrj {
1213*38fd1498Szrj bool something_changed = false;
1214*38fd1498Szrj basic_block bb;
1215*38fd1498Szrj gimple_stmt_iterator gsi, psi;
1216*38fd1498Szrj gimple *stmt;
1217*38fd1498Szrj tree call;
1218*38fd1498Szrj vec<basic_block> h;
1219*38fd1498Szrj
1220*38fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
1221*38fd1498Szrj fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1222*38fd1498Szrj
1223*38fd1498Szrj clear_special_calls ();
1224*38fd1498Szrj
1225*38fd1498Szrj /* Walking basic blocks and statements in reverse order avoids
1226*38fd1498Szrj releasing SSA names before any other DEFs that refer to them are
1227*38fd1498Szrj released. This helps avoid loss of debug information, as we get
1228*38fd1498Szrj a chance to propagate all RHSs of removed SSAs into debug uses,
1229*38fd1498Szrj rather than only the latest ones. E.g., consider:
1230*38fd1498Szrj
1231*38fd1498Szrj x_3 = y_1 + z_2;
1232*38fd1498Szrj a_5 = x_3 - b_4;
1233*38fd1498Szrj # DEBUG a => a_5
1234*38fd1498Szrj
1235*38fd1498Szrj If we were to release x_3 before a_5, when we reached a_5 and
1236*38fd1498Szrj tried to substitute it into the debug stmt, we'd see x_3 there,
1237*38fd1498Szrj but x_3's DEF, type, etc would have already been disconnected.
1238*38fd1498Szrj By going backwards, the debug stmt first changes to:
1239*38fd1498Szrj
1240*38fd1498Szrj # DEBUG a => x_3 - b_4
1241*38fd1498Szrj
1242*38fd1498Szrj and then to:
1243*38fd1498Szrj
1244*38fd1498Szrj # DEBUG a => y_1 + z_2 - b_4
1245*38fd1498Szrj
1246*38fd1498Szrj as desired. */
1247*38fd1498Szrj gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1248*38fd1498Szrj h = get_all_dominated_blocks (CDI_DOMINATORS,
1249*38fd1498Szrj single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1250*38fd1498Szrj
1251*38fd1498Szrj while (h.length ())
1252*38fd1498Szrj {
1253*38fd1498Szrj bb = h.pop ();
1254*38fd1498Szrj
1255*38fd1498Szrj /* Remove dead statements. */
1256*38fd1498Szrj for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1257*38fd1498Szrj {
1258*38fd1498Szrj stmt = gsi_stmt (gsi);
1259*38fd1498Szrj
1260*38fd1498Szrj psi = gsi;
1261*38fd1498Szrj gsi_prev (&psi);
1262*38fd1498Szrj
1263*38fd1498Szrj stats.total++;
1264*38fd1498Szrj
1265*38fd1498Szrj /* We can mark a call to free as not necessary if the
1266*38fd1498Szrj defining statement of its argument is not necessary
1267*38fd1498Szrj (and thus is getting removed). */
1268*38fd1498Szrj if (gimple_plf (stmt, STMT_NECESSARY)
1269*38fd1498Szrj && gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1270*38fd1498Szrj {
1271*38fd1498Szrj tree ptr = gimple_call_arg (stmt, 0);
1272*38fd1498Szrj if (TREE_CODE (ptr) == SSA_NAME)
1273*38fd1498Szrj {
1274*38fd1498Szrj gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1275*38fd1498Szrj if (!gimple_nop_p (def_stmt)
1276*38fd1498Szrj && !gimple_plf (def_stmt, STMT_NECESSARY))
1277*38fd1498Szrj gimple_set_plf (stmt, STMT_NECESSARY, false);
1278*38fd1498Szrj }
1279*38fd1498Szrj /* We did not propagate necessity for free calls fed
1280*38fd1498Szrj by allocation function to allow unnecessary
1281*38fd1498Szrj alloc-free sequence elimination. For instrumented
1282*38fd1498Szrj calls it also means we did not mark bounds producer
1283*38fd1498Szrj as necessary and it is time to do it in case free
1284*38fd1498Szrj call is not removed. */
1285*38fd1498Szrj if (gimple_call_with_bounds_p (stmt))
1286*38fd1498Szrj {
1287*38fd1498Szrj gimple *bounds_def_stmt;
1288*38fd1498Szrj tree bounds = gimple_call_arg (stmt, 1);
1289*38fd1498Szrj gcc_assert (TREE_CODE (bounds) == SSA_NAME);
1290*38fd1498Szrj bounds_def_stmt = SSA_NAME_DEF_STMT (bounds);
1291*38fd1498Szrj if (bounds_def_stmt
1292*38fd1498Szrj && !gimple_plf (bounds_def_stmt, STMT_NECESSARY))
1293*38fd1498Szrj gimple_set_plf (bounds_def_stmt, STMT_NECESSARY,
1294*38fd1498Szrj gimple_plf (stmt, STMT_NECESSARY));
1295*38fd1498Szrj }
1296*38fd1498Szrj }
1297*38fd1498Szrj
1298*38fd1498Szrj /* If GSI is not necessary then remove it. */
1299*38fd1498Szrj if (!gimple_plf (stmt, STMT_NECESSARY))
1300*38fd1498Szrj {
1301*38fd1498Szrj /* Keep clobbers that we can keep live live. */
1302*38fd1498Szrj if (gimple_clobber_p (stmt))
1303*38fd1498Szrj {
1304*38fd1498Szrj ssa_op_iter iter;
1305*38fd1498Szrj use_operand_p use_p;
1306*38fd1498Szrj bool dead = false;
1307*38fd1498Szrj FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1308*38fd1498Szrj {
1309*38fd1498Szrj tree name = USE_FROM_PTR (use_p);
1310*38fd1498Szrj if (!SSA_NAME_IS_DEFAULT_DEF (name)
1311*38fd1498Szrj && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1312*38fd1498Szrj {
1313*38fd1498Szrj dead = true;
1314*38fd1498Szrj break;
1315*38fd1498Szrj }
1316*38fd1498Szrj }
1317*38fd1498Szrj if (!dead)
1318*38fd1498Szrj continue;
1319*38fd1498Szrj }
1320*38fd1498Szrj if (!is_gimple_debug (stmt))
1321*38fd1498Szrj something_changed = true;
1322*38fd1498Szrj remove_dead_stmt (&gsi, bb);
1323*38fd1498Szrj }
1324*38fd1498Szrj else if (is_gimple_call (stmt))
1325*38fd1498Szrj {
1326*38fd1498Szrj tree name = gimple_call_lhs (stmt);
1327*38fd1498Szrj
1328*38fd1498Szrj notice_special_calls (as_a <gcall *> (stmt));
1329*38fd1498Szrj
1330*38fd1498Szrj /* When LHS of var = call (); is dead, simplify it into
1331*38fd1498Szrj call (); saving one operand. */
1332*38fd1498Szrj if (name
1333*38fd1498Szrj && TREE_CODE (name) == SSA_NAME
1334*38fd1498Szrj && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1335*38fd1498Szrj /* Avoid doing so for allocation calls which we
1336*38fd1498Szrj did not mark as necessary, it will confuse the
1337*38fd1498Szrj special logic we apply to malloc/free pair removal. */
1338*38fd1498Szrj && (!(call = gimple_call_fndecl (stmt))
1339*38fd1498Szrj || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1340*38fd1498Szrj || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1341*38fd1498Szrj && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1342*38fd1498Szrj && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1343*38fd1498Szrj && !ALLOCA_FUNCTION_CODE_P
1344*38fd1498Szrj (DECL_FUNCTION_CODE (call))))
1345*38fd1498Szrj /* Avoid doing so for bndret calls for the same reason. */
1346*38fd1498Szrj && !chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
1347*38fd1498Szrj {
1348*38fd1498Szrj something_changed = true;
1349*38fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
1350*38fd1498Szrj {
1351*38fd1498Szrj fprintf (dump_file, "Deleting LHS of call: ");
1352*38fd1498Szrj print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1353*38fd1498Szrj fprintf (dump_file, "\n");
1354*38fd1498Szrj }
1355*38fd1498Szrj
1356*38fd1498Szrj gimple_call_set_lhs (stmt, NULL_TREE);
1357*38fd1498Szrj maybe_clean_or_replace_eh_stmt (stmt, stmt);
1358*38fd1498Szrj update_stmt (stmt);
1359*38fd1498Szrj release_ssa_name (name);
1360*38fd1498Szrj
1361*38fd1498Szrj /* GOMP_SIMD_LANE or ASAN_POISON without lhs is not
1362*38fd1498Szrj needed. */
1363*38fd1498Szrj if (gimple_call_internal_p (stmt))
1364*38fd1498Szrj switch (gimple_call_internal_fn (stmt))
1365*38fd1498Szrj {
1366*38fd1498Szrj case IFN_GOMP_SIMD_LANE:
1367*38fd1498Szrj case IFN_ASAN_POISON:
1368*38fd1498Szrj remove_dead_stmt (&gsi, bb);
1369*38fd1498Szrj break;
1370*38fd1498Szrj default:
1371*38fd1498Szrj break;
1372*38fd1498Szrj }
1373*38fd1498Szrj }
1374*38fd1498Szrj else if (gimple_call_internal_p (stmt))
1375*38fd1498Szrj switch (gimple_call_internal_fn (stmt))
1376*38fd1498Szrj {
1377*38fd1498Szrj case IFN_ADD_OVERFLOW:
1378*38fd1498Szrj maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1379*38fd1498Szrj break;
1380*38fd1498Szrj case IFN_SUB_OVERFLOW:
1381*38fd1498Szrj maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1382*38fd1498Szrj break;
1383*38fd1498Szrj case IFN_MUL_OVERFLOW:
1384*38fd1498Szrj maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1385*38fd1498Szrj break;
1386*38fd1498Szrj default:
1387*38fd1498Szrj break;
1388*38fd1498Szrj }
1389*38fd1498Szrj }
1390*38fd1498Szrj }
1391*38fd1498Szrj }
1392*38fd1498Szrj
1393*38fd1498Szrj h.release ();
1394*38fd1498Szrj
1395*38fd1498Szrj /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1396*38fd1498Szrj rendered some PHI nodes unreachable while they are still in use.
1397*38fd1498Szrj Mark them for renaming. */
1398*38fd1498Szrj if (cfg_altered)
1399*38fd1498Szrj {
1400*38fd1498Szrj basic_block prev_bb;
1401*38fd1498Szrj
1402*38fd1498Szrj find_unreachable_blocks ();
1403*38fd1498Szrj
1404*38fd1498Szrj /* Delete all unreachable basic blocks in reverse dominator order. */
1405*38fd1498Szrj for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1406*38fd1498Szrj bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1407*38fd1498Szrj {
1408*38fd1498Szrj prev_bb = bb->prev_bb;
1409*38fd1498Szrj
1410*38fd1498Szrj if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1411*38fd1498Szrj || !(bb->flags & BB_REACHABLE))
1412*38fd1498Szrj {
1413*38fd1498Szrj for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1414*38fd1498Szrj gsi_next (&gsi))
1415*38fd1498Szrj if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1416*38fd1498Szrj {
1417*38fd1498Szrj bool found = false;
1418*38fd1498Szrj imm_use_iterator iter;
1419*38fd1498Szrj
1420*38fd1498Szrj FOR_EACH_IMM_USE_STMT (stmt, iter,
1421*38fd1498Szrj gimple_phi_result (gsi.phi ()))
1422*38fd1498Szrj {
1423*38fd1498Szrj if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1424*38fd1498Szrj continue;
1425*38fd1498Szrj if (gimple_code (stmt) == GIMPLE_PHI
1426*38fd1498Szrj || gimple_plf (stmt, STMT_NECESSARY))
1427*38fd1498Szrj {
1428*38fd1498Szrj found = true;
1429*38fd1498Szrj BREAK_FROM_IMM_USE_STMT (iter);
1430*38fd1498Szrj }
1431*38fd1498Szrj }
1432*38fd1498Szrj if (found)
1433*38fd1498Szrj mark_virtual_phi_result_for_renaming (gsi.phi ());
1434*38fd1498Szrj }
1435*38fd1498Szrj
1436*38fd1498Szrj if (!(bb->flags & BB_REACHABLE))
1437*38fd1498Szrj {
1438*38fd1498Szrj /* Speed up the removal of blocks that don't
1439*38fd1498Szrj dominate others. Walking backwards, this should
1440*38fd1498Szrj be the common case. ??? Do we need to recompute
1441*38fd1498Szrj dominators because of cfg_altered? */
1442*38fd1498Szrj if (!first_dom_son (CDI_DOMINATORS, bb))
1443*38fd1498Szrj delete_basic_block (bb);
1444*38fd1498Szrj else
1445*38fd1498Szrj {
1446*38fd1498Szrj h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1447*38fd1498Szrj
1448*38fd1498Szrj while (h.length ())
1449*38fd1498Szrj {
1450*38fd1498Szrj bb = h.pop ();
1451*38fd1498Szrj prev_bb = bb->prev_bb;
1452*38fd1498Szrj /* Rearrangements to the CFG may have failed
1453*38fd1498Szrj to update the dominators tree, so that
1454*38fd1498Szrj formerly-dominated blocks are now
1455*38fd1498Szrj otherwise reachable. */
1456*38fd1498Szrj if (!!(bb->flags & BB_REACHABLE))
1457*38fd1498Szrj continue;
1458*38fd1498Szrj delete_basic_block (bb);
1459*38fd1498Szrj }
1460*38fd1498Szrj
1461*38fd1498Szrj h.release ();
1462*38fd1498Szrj }
1463*38fd1498Szrj }
1464*38fd1498Szrj }
1465*38fd1498Szrj }
1466*38fd1498Szrj }
1467*38fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
1468*38fd1498Szrj {
1469*38fd1498Szrj /* Remove dead PHI nodes. */
1470*38fd1498Szrj something_changed |= remove_dead_phis (bb);
1471*38fd1498Szrj }
1472*38fd1498Szrj
1473*38fd1498Szrj if (bb_postorder)
1474*38fd1498Szrj free (bb_postorder);
1475*38fd1498Szrj bb_postorder = NULL;
1476*38fd1498Szrj
1477*38fd1498Szrj return something_changed;
1478*38fd1498Szrj }
1479*38fd1498Szrj
1480*38fd1498Szrj
1481*38fd1498Szrj /* Print out removed statement statistics. */
1482*38fd1498Szrj
1483*38fd1498Szrj static void
print_stats(void)1484*38fd1498Szrj print_stats (void)
1485*38fd1498Szrj {
1486*38fd1498Szrj float percg;
1487*38fd1498Szrj
1488*38fd1498Szrj percg = ((float) stats.removed / (float) stats.total) * 100;
1489*38fd1498Szrj fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1490*38fd1498Szrj stats.removed, stats.total, (int) percg);
1491*38fd1498Szrj
1492*38fd1498Szrj if (stats.total_phis == 0)
1493*38fd1498Szrj percg = 0;
1494*38fd1498Szrj else
1495*38fd1498Szrj percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1496*38fd1498Szrj
1497*38fd1498Szrj fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1498*38fd1498Szrj stats.removed_phis, stats.total_phis, (int) percg);
1499*38fd1498Szrj }
1500*38fd1498Szrj
1501*38fd1498Szrj /* Initialization for this pass. Set up the used data structures. */
1502*38fd1498Szrj
1503*38fd1498Szrj static void
tree_dce_init(bool aggressive)1504*38fd1498Szrj tree_dce_init (bool aggressive)
1505*38fd1498Szrj {
1506*38fd1498Szrj memset ((void *) &stats, 0, sizeof (stats));
1507*38fd1498Szrj
1508*38fd1498Szrj if (aggressive)
1509*38fd1498Szrj {
1510*38fd1498Szrj last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1511*38fd1498Szrj bitmap_clear (last_stmt_necessary);
1512*38fd1498Szrj bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1513*38fd1498Szrj bitmap_clear (bb_contains_live_stmts);
1514*38fd1498Szrj }
1515*38fd1498Szrj
1516*38fd1498Szrj processed = sbitmap_alloc (num_ssa_names + 1);
1517*38fd1498Szrj bitmap_clear (processed);
1518*38fd1498Szrj
1519*38fd1498Szrj worklist.create (64);
1520*38fd1498Szrj cfg_altered = false;
1521*38fd1498Szrj }
1522*38fd1498Szrj
1523*38fd1498Szrj /* Cleanup after this pass. */
1524*38fd1498Szrj
1525*38fd1498Szrj static void
tree_dce_done(bool aggressive)1526*38fd1498Szrj tree_dce_done (bool aggressive)
1527*38fd1498Szrj {
1528*38fd1498Szrj if (aggressive)
1529*38fd1498Szrj {
1530*38fd1498Szrj delete cd;
1531*38fd1498Szrj sbitmap_free (visited_control_parents);
1532*38fd1498Szrj sbitmap_free (last_stmt_necessary);
1533*38fd1498Szrj sbitmap_free (bb_contains_live_stmts);
1534*38fd1498Szrj bb_contains_live_stmts = NULL;
1535*38fd1498Szrj }
1536*38fd1498Szrj
1537*38fd1498Szrj sbitmap_free (processed);
1538*38fd1498Szrj
1539*38fd1498Szrj worklist.release ();
1540*38fd1498Szrj }
1541*38fd1498Szrj
1542*38fd1498Szrj /* Main routine to eliminate dead code.
1543*38fd1498Szrj
1544*38fd1498Szrj AGGRESSIVE controls the aggressiveness of the algorithm.
1545*38fd1498Szrj In conservative mode, we ignore control dependence and simply declare
1546*38fd1498Szrj all but the most trivially dead branches necessary. This mode is fast.
1547*38fd1498Szrj In aggressive mode, control dependences are taken into account, which
1548*38fd1498Szrj results in more dead code elimination, but at the cost of some time.
1549*38fd1498Szrj
1550*38fd1498Szrj FIXME: Aggressive mode before PRE doesn't work currently because
1551*38fd1498Szrj the dominance info is not invalidated after DCE1. This is
1552*38fd1498Szrj not an issue right now because we only run aggressive DCE
1553*38fd1498Szrj as the last tree SSA pass, but keep this in mind when you
1554*38fd1498Szrj start experimenting with pass ordering. */
1555*38fd1498Szrj
1556*38fd1498Szrj static unsigned int
perform_tree_ssa_dce(bool aggressive)1557*38fd1498Szrj perform_tree_ssa_dce (bool aggressive)
1558*38fd1498Szrj {
1559*38fd1498Szrj bool something_changed = 0;
1560*38fd1498Szrj
1561*38fd1498Szrj calculate_dominance_info (CDI_DOMINATORS);
1562*38fd1498Szrj
1563*38fd1498Szrj /* Preheaders are needed for SCEV to work.
1564*38fd1498Szrj Simple lateches and recorded exits improve chances that loop will
1565*38fd1498Szrj proved to be finite in testcases such as in loop-15.c and loop-24.c */
1566*38fd1498Szrj bool in_loop_pipeline = scev_initialized_p ();
1567*38fd1498Szrj if (aggressive && ! in_loop_pipeline)
1568*38fd1498Szrj {
1569*38fd1498Szrj scev_initialize ();
1570*38fd1498Szrj loop_optimizer_init (LOOPS_NORMAL
1571*38fd1498Szrj | LOOPS_HAVE_RECORDED_EXITS);
1572*38fd1498Szrj }
1573*38fd1498Szrj
1574*38fd1498Szrj tree_dce_init (aggressive);
1575*38fd1498Szrj
1576*38fd1498Szrj if (aggressive)
1577*38fd1498Szrj {
1578*38fd1498Szrj /* Compute control dependence. */
1579*38fd1498Szrj calculate_dominance_info (CDI_POST_DOMINATORS);
1580*38fd1498Szrj cd = new control_dependences ();
1581*38fd1498Szrj
1582*38fd1498Szrj visited_control_parents =
1583*38fd1498Szrj sbitmap_alloc (last_basic_block_for_fn (cfun));
1584*38fd1498Szrj bitmap_clear (visited_control_parents);
1585*38fd1498Szrj
1586*38fd1498Szrj mark_dfs_back_edges ();
1587*38fd1498Szrj }
1588*38fd1498Szrj
1589*38fd1498Szrj find_obviously_necessary_stmts (aggressive);
1590*38fd1498Szrj
1591*38fd1498Szrj if (aggressive && ! in_loop_pipeline)
1592*38fd1498Szrj {
1593*38fd1498Szrj loop_optimizer_finalize ();
1594*38fd1498Szrj scev_finalize ();
1595*38fd1498Szrj }
1596*38fd1498Szrj
1597*38fd1498Szrj longest_chain = 0;
1598*38fd1498Szrj total_chain = 0;
1599*38fd1498Szrj nr_walks = 0;
1600*38fd1498Szrj chain_ovfl = false;
1601*38fd1498Szrj visited = BITMAP_ALLOC (NULL);
1602*38fd1498Szrj propagate_necessity (aggressive);
1603*38fd1498Szrj BITMAP_FREE (visited);
1604*38fd1498Szrj
1605*38fd1498Szrj something_changed |= eliminate_unnecessary_stmts ();
1606*38fd1498Szrj something_changed |= cfg_altered;
1607*38fd1498Szrj
1608*38fd1498Szrj /* We do not update postdominators, so free them unconditionally. */
1609*38fd1498Szrj free_dominance_info (CDI_POST_DOMINATORS);
1610*38fd1498Szrj
1611*38fd1498Szrj /* If we removed paths in the CFG, then we need to update
1612*38fd1498Szrj dominators as well. I haven't investigated the possibility
1613*38fd1498Szrj of incrementally updating dominators. */
1614*38fd1498Szrj if (cfg_altered)
1615*38fd1498Szrj free_dominance_info (CDI_DOMINATORS);
1616*38fd1498Szrj
1617*38fd1498Szrj statistics_counter_event (cfun, "Statements deleted", stats.removed);
1618*38fd1498Szrj statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1619*38fd1498Szrj
1620*38fd1498Szrj /* Debugging dumps. */
1621*38fd1498Szrj if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1622*38fd1498Szrj print_stats ();
1623*38fd1498Szrj
1624*38fd1498Szrj tree_dce_done (aggressive);
1625*38fd1498Szrj
1626*38fd1498Szrj if (something_changed)
1627*38fd1498Szrj {
1628*38fd1498Szrj free_numbers_of_iterations_estimates (cfun);
1629*38fd1498Szrj if (in_loop_pipeline)
1630*38fd1498Szrj scev_reset ();
1631*38fd1498Szrj return TODO_update_ssa | TODO_cleanup_cfg;
1632*38fd1498Szrj }
1633*38fd1498Szrj return 0;
1634*38fd1498Szrj }
1635*38fd1498Szrj
1636*38fd1498Szrj /* Pass entry points. */
1637*38fd1498Szrj static unsigned int
tree_ssa_dce(void)1638*38fd1498Szrj tree_ssa_dce (void)
1639*38fd1498Szrj {
1640*38fd1498Szrj return perform_tree_ssa_dce (/*aggressive=*/false);
1641*38fd1498Szrj }
1642*38fd1498Szrj
1643*38fd1498Szrj static unsigned int
tree_ssa_cd_dce(void)1644*38fd1498Szrj tree_ssa_cd_dce (void)
1645*38fd1498Szrj {
1646*38fd1498Szrj return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1647*38fd1498Szrj }
1648*38fd1498Szrj
1649*38fd1498Szrj namespace {
1650*38fd1498Szrj
1651*38fd1498Szrj const pass_data pass_data_dce =
1652*38fd1498Szrj {
1653*38fd1498Szrj GIMPLE_PASS, /* type */
1654*38fd1498Szrj "dce", /* name */
1655*38fd1498Szrj OPTGROUP_NONE, /* optinfo_flags */
1656*38fd1498Szrj TV_TREE_DCE, /* tv_id */
1657*38fd1498Szrj ( PROP_cfg | PROP_ssa ), /* properties_required */
1658*38fd1498Szrj 0, /* properties_provided */
1659*38fd1498Szrj 0, /* properties_destroyed */
1660*38fd1498Szrj 0, /* todo_flags_start */
1661*38fd1498Szrj 0, /* todo_flags_finish */
1662*38fd1498Szrj };
1663*38fd1498Szrj
1664*38fd1498Szrj class pass_dce : public gimple_opt_pass
1665*38fd1498Szrj {
1666*38fd1498Szrj public:
pass_dce(gcc::context * ctxt)1667*38fd1498Szrj pass_dce (gcc::context *ctxt)
1668*38fd1498Szrj : gimple_opt_pass (pass_data_dce, ctxt)
1669*38fd1498Szrj {}
1670*38fd1498Szrj
1671*38fd1498Szrj /* opt_pass methods: */
clone()1672*38fd1498Szrj opt_pass * clone () { return new pass_dce (m_ctxt); }
gate(function *)1673*38fd1498Szrj virtual bool gate (function *) { return flag_tree_dce != 0; }
execute(function *)1674*38fd1498Szrj virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1675*38fd1498Szrj
1676*38fd1498Szrj }; // class pass_dce
1677*38fd1498Szrj
1678*38fd1498Szrj } // anon namespace
1679*38fd1498Szrj
1680*38fd1498Szrj gimple_opt_pass *
make_pass_dce(gcc::context * ctxt)1681*38fd1498Szrj make_pass_dce (gcc::context *ctxt)
1682*38fd1498Szrj {
1683*38fd1498Szrj return new pass_dce (ctxt);
1684*38fd1498Szrj }
1685*38fd1498Szrj
1686*38fd1498Szrj namespace {
1687*38fd1498Szrj
1688*38fd1498Szrj const pass_data pass_data_cd_dce =
1689*38fd1498Szrj {
1690*38fd1498Szrj GIMPLE_PASS, /* type */
1691*38fd1498Szrj "cddce", /* name */
1692*38fd1498Szrj OPTGROUP_NONE, /* optinfo_flags */
1693*38fd1498Szrj TV_TREE_CD_DCE, /* tv_id */
1694*38fd1498Szrj ( PROP_cfg | PROP_ssa ), /* properties_required */
1695*38fd1498Szrj 0, /* properties_provided */
1696*38fd1498Szrj 0, /* properties_destroyed */
1697*38fd1498Szrj 0, /* todo_flags_start */
1698*38fd1498Szrj 0, /* todo_flags_finish */
1699*38fd1498Szrj };
1700*38fd1498Szrj
1701*38fd1498Szrj class pass_cd_dce : public gimple_opt_pass
1702*38fd1498Szrj {
1703*38fd1498Szrj public:
pass_cd_dce(gcc::context * ctxt)1704*38fd1498Szrj pass_cd_dce (gcc::context *ctxt)
1705*38fd1498Szrj : gimple_opt_pass (pass_data_cd_dce, ctxt)
1706*38fd1498Szrj {}
1707*38fd1498Szrj
1708*38fd1498Szrj /* opt_pass methods: */
clone()1709*38fd1498Szrj opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
gate(function *)1710*38fd1498Szrj virtual bool gate (function *) { return flag_tree_dce != 0; }
execute(function *)1711*38fd1498Szrj virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); }
1712*38fd1498Szrj
1713*38fd1498Szrj }; // class pass_cd_dce
1714*38fd1498Szrj
1715*38fd1498Szrj } // anon namespace
1716*38fd1498Szrj
1717*38fd1498Szrj gimple_opt_pass *
make_pass_cd_dce(gcc::context * ctxt)1718*38fd1498Szrj make_pass_cd_dce (gcc::context *ctxt)
1719*38fd1498Szrj {
1720*38fd1498Szrj return new pass_cd_dce (ctxt);
1721*38fd1498Szrj }
1722*38fd1498Szrj
1723*38fd1498Szrj
1724*38fd1498Szrj /* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
1725*38fd1498Szrj is consumed by this function. The function has linear complexity in
1726*38fd1498Szrj the number of dead stmts with a constant factor like the average SSA
1727*38fd1498Szrj use operands number. */
1728*38fd1498Szrj
1729*38fd1498Szrj void
simple_dce_from_worklist(bitmap worklist)1730*38fd1498Szrj simple_dce_from_worklist (bitmap worklist)
1731*38fd1498Szrj {
1732*38fd1498Szrj while (! bitmap_empty_p (worklist))
1733*38fd1498Szrj {
1734*38fd1498Szrj /* Pop item. */
1735*38fd1498Szrj unsigned i = bitmap_first_set_bit (worklist);
1736*38fd1498Szrj bitmap_clear_bit (worklist, i);
1737*38fd1498Szrj
1738*38fd1498Szrj tree def = ssa_name (i);
1739*38fd1498Szrj /* Removed by somebody else or still in use. */
1740*38fd1498Szrj if (! def || ! has_zero_uses (def))
1741*38fd1498Szrj continue;
1742*38fd1498Szrj
1743*38fd1498Szrj gimple *t = SSA_NAME_DEF_STMT (def);
1744*38fd1498Szrj if (gimple_has_side_effects (t))
1745*38fd1498Szrj continue;
1746*38fd1498Szrj
1747*38fd1498Szrj /* Add uses to the worklist. */
1748*38fd1498Szrj ssa_op_iter iter;
1749*38fd1498Szrj use_operand_p use_p;
1750*38fd1498Szrj FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
1751*38fd1498Szrj {
1752*38fd1498Szrj tree use = USE_FROM_PTR (use_p);
1753*38fd1498Szrj if (TREE_CODE (use) == SSA_NAME
1754*38fd1498Szrj && ! SSA_NAME_IS_DEFAULT_DEF (use))
1755*38fd1498Szrj bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
1756*38fd1498Szrj }
1757*38fd1498Szrj
1758*38fd1498Szrj /* Remove stmt. */
1759*38fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
1760*38fd1498Szrj {
1761*38fd1498Szrj fprintf (dump_file, "Removing dead stmt:");
1762*38fd1498Szrj print_gimple_stmt (dump_file, t, 0);
1763*38fd1498Szrj }
1764*38fd1498Szrj gimple_stmt_iterator gsi = gsi_for_stmt (t);
1765*38fd1498Szrj if (gimple_code (t) == GIMPLE_PHI)
1766*38fd1498Szrj remove_phi_node (&gsi, true);
1767*38fd1498Szrj else
1768*38fd1498Szrj {
1769*38fd1498Szrj gsi_remove (&gsi, true);
1770*38fd1498Szrj release_defs (t);
1771*38fd1498Szrj }
1772*38fd1498Szrj }
1773*38fd1498Szrj }
1774