1*e4b17023SJohn Marino /* Control flow graph analysis code for GNU compiler.
2*e4b17023SJohn Marino Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3*e4b17023SJohn Marino 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2010
4*e4b17023SJohn Marino Free Software Foundation, Inc.
5*e4b17023SJohn Marino
6*e4b17023SJohn Marino This file is part of GCC.
7*e4b17023SJohn Marino
8*e4b17023SJohn Marino GCC is free software; you can redistribute it and/or modify it under
9*e4b17023SJohn Marino the terms of the GNU General Public License as published by the Free
10*e4b17023SJohn Marino Software Foundation; either version 3, or (at your option) any later
11*e4b17023SJohn Marino version.
12*e4b17023SJohn Marino
13*e4b17023SJohn Marino GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14*e4b17023SJohn Marino WARRANTY; without even the implied warranty of MERCHANTABILITY or
15*e4b17023SJohn Marino FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16*e4b17023SJohn Marino for more details.
17*e4b17023SJohn Marino
18*e4b17023SJohn Marino You should have received a copy of the GNU General Public License
19*e4b17023SJohn Marino along with GCC; see the file COPYING3. If not see
20*e4b17023SJohn Marino <http://www.gnu.org/licenses/>. */
21*e4b17023SJohn Marino
22*e4b17023SJohn Marino /* This file contains various simple utilities to analyze the CFG. */
23*e4b17023SJohn Marino #include "config.h"
24*e4b17023SJohn Marino #include "system.h"
25*e4b17023SJohn Marino #include "coretypes.h"
26*e4b17023SJohn Marino #include "tm.h"
27*e4b17023SJohn Marino #include "rtl.h"
28*e4b17023SJohn Marino #include "obstack.h"
29*e4b17023SJohn Marino #include "hard-reg-set.h"
30*e4b17023SJohn Marino #include "basic-block.h"
31*e4b17023SJohn Marino #include "insn-config.h"
32*e4b17023SJohn Marino #include "recog.h"
33*e4b17023SJohn Marino #include "diagnostic-core.h"
34*e4b17023SJohn Marino #include "tm_p.h"
35*e4b17023SJohn Marino #include "vec.h"
36*e4b17023SJohn Marino #include "vecprim.h"
37*e4b17023SJohn Marino #include "bitmap.h"
38*e4b17023SJohn Marino #include "sbitmap.h"
39*e4b17023SJohn Marino #include "timevar.h"
40*e4b17023SJohn Marino
41*e4b17023SJohn Marino /* Store the data structures necessary for depth-first search. */
42*e4b17023SJohn Marino struct depth_first_search_dsS {
43*e4b17023SJohn Marino /* stack for backtracking during the algorithm */
44*e4b17023SJohn Marino basic_block *stack;
45*e4b17023SJohn Marino
46*e4b17023SJohn Marino /* number of edges in the stack. That is, positions 0, ..., sp-1
47*e4b17023SJohn Marino have edges. */
48*e4b17023SJohn Marino unsigned int sp;
49*e4b17023SJohn Marino
50*e4b17023SJohn Marino /* record of basic blocks already seen by depth-first search */
51*e4b17023SJohn Marino sbitmap visited_blocks;
52*e4b17023SJohn Marino };
53*e4b17023SJohn Marino typedef struct depth_first_search_dsS *depth_first_search_ds;
54*e4b17023SJohn Marino
55*e4b17023SJohn Marino static void flow_dfs_compute_reverse_init (depth_first_search_ds);
56*e4b17023SJohn Marino static void flow_dfs_compute_reverse_add_bb (depth_first_search_ds,
57*e4b17023SJohn Marino basic_block);
58*e4b17023SJohn Marino static basic_block flow_dfs_compute_reverse_execute (depth_first_search_ds,
59*e4b17023SJohn Marino basic_block);
60*e4b17023SJohn Marino static void flow_dfs_compute_reverse_finish (depth_first_search_ds);
61*e4b17023SJohn Marino static bool flow_active_insn_p (const_rtx);
62*e4b17023SJohn Marino
63*e4b17023SJohn Marino /* Like active_insn_p, except keep the return value clobber around
64*e4b17023SJohn Marino even after reload. */
65*e4b17023SJohn Marino
66*e4b17023SJohn Marino static bool
flow_active_insn_p(const_rtx insn)67*e4b17023SJohn Marino flow_active_insn_p (const_rtx insn)
68*e4b17023SJohn Marino {
69*e4b17023SJohn Marino if (active_insn_p (insn))
70*e4b17023SJohn Marino return true;
71*e4b17023SJohn Marino
72*e4b17023SJohn Marino /* A clobber of the function return value exists for buggy
73*e4b17023SJohn Marino programs that fail to return a value. Its effect is to
74*e4b17023SJohn Marino keep the return value from being live across the entire
75*e4b17023SJohn Marino function. If we allow it to be skipped, we introduce the
76*e4b17023SJohn Marino possibility for register lifetime confusion. */
77*e4b17023SJohn Marino if (GET_CODE (PATTERN (insn)) == CLOBBER
78*e4b17023SJohn Marino && REG_P (XEXP (PATTERN (insn), 0))
79*e4b17023SJohn Marino && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
80*e4b17023SJohn Marino return true;
81*e4b17023SJohn Marino
82*e4b17023SJohn Marino return false;
83*e4b17023SJohn Marino }
84*e4b17023SJohn Marino
85*e4b17023SJohn Marino /* Return true if the block has no effect and only forwards control flow to
86*e4b17023SJohn Marino its single destination. */
87*e4b17023SJohn Marino
88*e4b17023SJohn Marino bool
forwarder_block_p(const_basic_block bb)89*e4b17023SJohn Marino forwarder_block_p (const_basic_block bb)
90*e4b17023SJohn Marino {
91*e4b17023SJohn Marino rtx insn;
92*e4b17023SJohn Marino
93*e4b17023SJohn Marino if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
94*e4b17023SJohn Marino || !single_succ_p (bb))
95*e4b17023SJohn Marino return false;
96*e4b17023SJohn Marino
97*e4b17023SJohn Marino for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
98*e4b17023SJohn Marino if (INSN_P (insn) && flow_active_insn_p (insn))
99*e4b17023SJohn Marino return false;
100*e4b17023SJohn Marino
101*e4b17023SJohn Marino return (!INSN_P (insn)
102*e4b17023SJohn Marino || (JUMP_P (insn) && simplejump_p (insn))
103*e4b17023SJohn Marino || !flow_active_insn_p (insn));
104*e4b17023SJohn Marino }
105*e4b17023SJohn Marino
106*e4b17023SJohn Marino /* Return nonzero if we can reach target from src by falling through. */
107*e4b17023SJohn Marino
108*e4b17023SJohn Marino bool
can_fallthru(basic_block src,basic_block target)109*e4b17023SJohn Marino can_fallthru (basic_block src, basic_block target)
110*e4b17023SJohn Marino {
111*e4b17023SJohn Marino rtx insn = BB_END (src);
112*e4b17023SJohn Marino rtx insn2;
113*e4b17023SJohn Marino edge e;
114*e4b17023SJohn Marino edge_iterator ei;
115*e4b17023SJohn Marino
116*e4b17023SJohn Marino if (target == EXIT_BLOCK_PTR)
117*e4b17023SJohn Marino return true;
118*e4b17023SJohn Marino if (src->next_bb != target)
119*e4b17023SJohn Marino return 0;
120*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, src->succs)
121*e4b17023SJohn Marino if (e->dest == EXIT_BLOCK_PTR
122*e4b17023SJohn Marino && e->flags & EDGE_FALLTHRU)
123*e4b17023SJohn Marino return 0;
124*e4b17023SJohn Marino
125*e4b17023SJohn Marino insn2 = BB_HEAD (target);
126*e4b17023SJohn Marino if (insn2 && !active_insn_p (insn2))
127*e4b17023SJohn Marino insn2 = next_active_insn (insn2);
128*e4b17023SJohn Marino
129*e4b17023SJohn Marino /* ??? Later we may add code to move jump tables offline. */
130*e4b17023SJohn Marino return next_active_insn (insn) == insn2;
131*e4b17023SJohn Marino }
132*e4b17023SJohn Marino
133*e4b17023SJohn Marino /* Return nonzero if we could reach target from src by falling through,
134*e4b17023SJohn Marino if the target was made adjacent. If we already have a fall-through
135*e4b17023SJohn Marino edge to the exit block, we can't do that. */
136*e4b17023SJohn Marino bool
could_fall_through(basic_block src,basic_block target)137*e4b17023SJohn Marino could_fall_through (basic_block src, basic_block target)
138*e4b17023SJohn Marino {
139*e4b17023SJohn Marino edge e;
140*e4b17023SJohn Marino edge_iterator ei;
141*e4b17023SJohn Marino
142*e4b17023SJohn Marino if (target == EXIT_BLOCK_PTR)
143*e4b17023SJohn Marino return true;
144*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, src->succs)
145*e4b17023SJohn Marino if (e->dest == EXIT_BLOCK_PTR
146*e4b17023SJohn Marino && e->flags & EDGE_FALLTHRU)
147*e4b17023SJohn Marino return 0;
148*e4b17023SJohn Marino return true;
149*e4b17023SJohn Marino }
150*e4b17023SJohn Marino
151*e4b17023SJohn Marino /* Mark the back edges in DFS traversal.
152*e4b17023SJohn Marino Return nonzero if a loop (natural or otherwise) is present.
153*e4b17023SJohn Marino Inspired by Depth_First_Search_PP described in:
154*e4b17023SJohn Marino
155*e4b17023SJohn Marino Advanced Compiler Design and Implementation
156*e4b17023SJohn Marino Steven Muchnick
157*e4b17023SJohn Marino Morgan Kaufmann, 1997
158*e4b17023SJohn Marino
159*e4b17023SJohn Marino and heavily borrowed from pre_and_rev_post_order_compute. */
160*e4b17023SJohn Marino
161*e4b17023SJohn Marino bool
mark_dfs_back_edges(void)162*e4b17023SJohn Marino mark_dfs_back_edges (void)
163*e4b17023SJohn Marino {
164*e4b17023SJohn Marino edge_iterator *stack;
165*e4b17023SJohn Marino int *pre;
166*e4b17023SJohn Marino int *post;
167*e4b17023SJohn Marino int sp;
168*e4b17023SJohn Marino int prenum = 1;
169*e4b17023SJohn Marino int postnum = 1;
170*e4b17023SJohn Marino sbitmap visited;
171*e4b17023SJohn Marino bool found = false;
172*e4b17023SJohn Marino
173*e4b17023SJohn Marino /* Allocate the preorder and postorder number arrays. */
174*e4b17023SJohn Marino pre = XCNEWVEC (int, last_basic_block);
175*e4b17023SJohn Marino post = XCNEWVEC (int, last_basic_block);
176*e4b17023SJohn Marino
177*e4b17023SJohn Marino /* Allocate stack for back-tracking up CFG. */
178*e4b17023SJohn Marino stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
179*e4b17023SJohn Marino sp = 0;
180*e4b17023SJohn Marino
181*e4b17023SJohn Marino /* Allocate bitmap to track nodes that have been visited. */
182*e4b17023SJohn Marino visited = sbitmap_alloc (last_basic_block);
183*e4b17023SJohn Marino
184*e4b17023SJohn Marino /* None of the nodes in the CFG have been visited yet. */
185*e4b17023SJohn Marino sbitmap_zero (visited);
186*e4b17023SJohn Marino
187*e4b17023SJohn Marino /* Push the first edge on to the stack. */
188*e4b17023SJohn Marino stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
189*e4b17023SJohn Marino
190*e4b17023SJohn Marino while (sp)
191*e4b17023SJohn Marino {
192*e4b17023SJohn Marino edge_iterator ei;
193*e4b17023SJohn Marino basic_block src;
194*e4b17023SJohn Marino basic_block dest;
195*e4b17023SJohn Marino
196*e4b17023SJohn Marino /* Look at the edge on the top of the stack. */
197*e4b17023SJohn Marino ei = stack[sp - 1];
198*e4b17023SJohn Marino src = ei_edge (ei)->src;
199*e4b17023SJohn Marino dest = ei_edge (ei)->dest;
200*e4b17023SJohn Marino ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
201*e4b17023SJohn Marino
202*e4b17023SJohn Marino /* Check if the edge destination has been visited yet. */
203*e4b17023SJohn Marino if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
204*e4b17023SJohn Marino {
205*e4b17023SJohn Marino /* Mark that we have visited the destination. */
206*e4b17023SJohn Marino SET_BIT (visited, dest->index);
207*e4b17023SJohn Marino
208*e4b17023SJohn Marino pre[dest->index] = prenum++;
209*e4b17023SJohn Marino if (EDGE_COUNT (dest->succs) > 0)
210*e4b17023SJohn Marino {
211*e4b17023SJohn Marino /* Since the DEST node has been visited for the first
212*e4b17023SJohn Marino time, check its successors. */
213*e4b17023SJohn Marino stack[sp++] = ei_start (dest->succs);
214*e4b17023SJohn Marino }
215*e4b17023SJohn Marino else
216*e4b17023SJohn Marino post[dest->index] = postnum++;
217*e4b17023SJohn Marino }
218*e4b17023SJohn Marino else
219*e4b17023SJohn Marino {
220*e4b17023SJohn Marino if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
221*e4b17023SJohn Marino && pre[src->index] >= pre[dest->index]
222*e4b17023SJohn Marino && post[dest->index] == 0)
223*e4b17023SJohn Marino ei_edge (ei)->flags |= EDGE_DFS_BACK, found = true;
224*e4b17023SJohn Marino
225*e4b17023SJohn Marino if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR)
226*e4b17023SJohn Marino post[src->index] = postnum++;
227*e4b17023SJohn Marino
228*e4b17023SJohn Marino if (!ei_one_before_end_p (ei))
229*e4b17023SJohn Marino ei_next (&stack[sp - 1]);
230*e4b17023SJohn Marino else
231*e4b17023SJohn Marino sp--;
232*e4b17023SJohn Marino }
233*e4b17023SJohn Marino }
234*e4b17023SJohn Marino
235*e4b17023SJohn Marino free (pre);
236*e4b17023SJohn Marino free (post);
237*e4b17023SJohn Marino free (stack);
238*e4b17023SJohn Marino sbitmap_free (visited);
239*e4b17023SJohn Marino
240*e4b17023SJohn Marino return found;
241*e4b17023SJohn Marino }
242*e4b17023SJohn Marino
243*e4b17023SJohn Marino /* Set the flag EDGE_CAN_FALLTHRU for edges that can be fallthru. */
244*e4b17023SJohn Marino
245*e4b17023SJohn Marino void
set_edge_can_fallthru_flag(void)246*e4b17023SJohn Marino set_edge_can_fallthru_flag (void)
247*e4b17023SJohn Marino {
248*e4b17023SJohn Marino basic_block bb;
249*e4b17023SJohn Marino
250*e4b17023SJohn Marino FOR_EACH_BB (bb)
251*e4b17023SJohn Marino {
252*e4b17023SJohn Marino edge e;
253*e4b17023SJohn Marino edge_iterator ei;
254*e4b17023SJohn Marino
255*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, bb->succs)
256*e4b17023SJohn Marino {
257*e4b17023SJohn Marino e->flags &= ~EDGE_CAN_FALLTHRU;
258*e4b17023SJohn Marino
259*e4b17023SJohn Marino /* The FALLTHRU edge is also CAN_FALLTHRU edge. */
260*e4b17023SJohn Marino if (e->flags & EDGE_FALLTHRU)
261*e4b17023SJohn Marino e->flags |= EDGE_CAN_FALLTHRU;
262*e4b17023SJohn Marino }
263*e4b17023SJohn Marino
264*e4b17023SJohn Marino /* If the BB ends with an invertible condjump all (2) edges are
265*e4b17023SJohn Marino CAN_FALLTHRU edges. */
266*e4b17023SJohn Marino if (EDGE_COUNT (bb->succs) != 2)
267*e4b17023SJohn Marino continue;
268*e4b17023SJohn Marino if (!any_condjump_p (BB_END (bb)))
269*e4b17023SJohn Marino continue;
270*e4b17023SJohn Marino if (!invert_jump (BB_END (bb), JUMP_LABEL (BB_END (bb)), 0))
271*e4b17023SJohn Marino continue;
272*e4b17023SJohn Marino invert_jump (BB_END (bb), JUMP_LABEL (BB_END (bb)), 0);
273*e4b17023SJohn Marino EDGE_SUCC (bb, 0)->flags |= EDGE_CAN_FALLTHRU;
274*e4b17023SJohn Marino EDGE_SUCC (bb, 1)->flags |= EDGE_CAN_FALLTHRU;
275*e4b17023SJohn Marino }
276*e4b17023SJohn Marino }
277*e4b17023SJohn Marino
278*e4b17023SJohn Marino /* Find unreachable blocks. An unreachable block will have 0 in
279*e4b17023SJohn Marino the reachable bit in block->flags. A nonzero value indicates the
280*e4b17023SJohn Marino block is reachable. */
281*e4b17023SJohn Marino
282*e4b17023SJohn Marino void
find_unreachable_blocks(void)283*e4b17023SJohn Marino find_unreachable_blocks (void)
284*e4b17023SJohn Marino {
285*e4b17023SJohn Marino edge e;
286*e4b17023SJohn Marino edge_iterator ei;
287*e4b17023SJohn Marino basic_block *tos, *worklist, bb;
288*e4b17023SJohn Marino
289*e4b17023SJohn Marino tos = worklist = XNEWVEC (basic_block, n_basic_blocks);
290*e4b17023SJohn Marino
291*e4b17023SJohn Marino /* Clear all the reachability flags. */
292*e4b17023SJohn Marino
293*e4b17023SJohn Marino FOR_EACH_BB (bb)
294*e4b17023SJohn Marino bb->flags &= ~BB_REACHABLE;
295*e4b17023SJohn Marino
296*e4b17023SJohn Marino /* Add our starting points to the worklist. Almost always there will
297*e4b17023SJohn Marino be only one. It isn't inconceivable that we might one day directly
298*e4b17023SJohn Marino support Fortran alternate entry points. */
299*e4b17023SJohn Marino
300*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
301*e4b17023SJohn Marino {
302*e4b17023SJohn Marino *tos++ = e->dest;
303*e4b17023SJohn Marino
304*e4b17023SJohn Marino /* Mark the block reachable. */
305*e4b17023SJohn Marino e->dest->flags |= BB_REACHABLE;
306*e4b17023SJohn Marino }
307*e4b17023SJohn Marino
308*e4b17023SJohn Marino /* Iterate: find everything reachable from what we've already seen. */
309*e4b17023SJohn Marino
310*e4b17023SJohn Marino while (tos != worklist)
311*e4b17023SJohn Marino {
312*e4b17023SJohn Marino basic_block b = *--tos;
313*e4b17023SJohn Marino
314*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, b->succs)
315*e4b17023SJohn Marino {
316*e4b17023SJohn Marino basic_block dest = e->dest;
317*e4b17023SJohn Marino
318*e4b17023SJohn Marino if (!(dest->flags & BB_REACHABLE))
319*e4b17023SJohn Marino {
320*e4b17023SJohn Marino *tos++ = dest;
321*e4b17023SJohn Marino dest->flags |= BB_REACHABLE;
322*e4b17023SJohn Marino }
323*e4b17023SJohn Marino }
324*e4b17023SJohn Marino }
325*e4b17023SJohn Marino
326*e4b17023SJohn Marino free (worklist);
327*e4b17023SJohn Marino }
328*e4b17023SJohn Marino
329*e4b17023SJohn Marino /* Functions to access an edge list with a vector representation.
330*e4b17023SJohn Marino Enough data is kept such that given an index number, the
331*e4b17023SJohn Marino pred and succ that edge represents can be determined, or
332*e4b17023SJohn Marino given a pred and a succ, its index number can be returned.
333*e4b17023SJohn Marino This allows algorithms which consume a lot of memory to
334*e4b17023SJohn Marino represent the normally full matrix of edge (pred,succ) with a
335*e4b17023SJohn Marino single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
336*e4b17023SJohn Marino wasted space in the client code due to sparse flow graphs. */
337*e4b17023SJohn Marino
338*e4b17023SJohn Marino /* This functions initializes the edge list. Basically the entire
339*e4b17023SJohn Marino flowgraph is processed, and all edges are assigned a number,
340*e4b17023SJohn Marino and the data structure is filled in. */
341*e4b17023SJohn Marino
342*e4b17023SJohn Marino struct edge_list *
create_edge_list(void)343*e4b17023SJohn Marino create_edge_list (void)
344*e4b17023SJohn Marino {
345*e4b17023SJohn Marino struct edge_list *elist;
346*e4b17023SJohn Marino edge e;
347*e4b17023SJohn Marino int num_edges;
348*e4b17023SJohn Marino int block_count;
349*e4b17023SJohn Marino basic_block bb;
350*e4b17023SJohn Marino edge_iterator ei;
351*e4b17023SJohn Marino
352*e4b17023SJohn Marino block_count = n_basic_blocks; /* Include the entry and exit blocks. */
353*e4b17023SJohn Marino
354*e4b17023SJohn Marino num_edges = 0;
355*e4b17023SJohn Marino
356*e4b17023SJohn Marino /* Determine the number of edges in the flow graph by counting successor
357*e4b17023SJohn Marino edges on each basic block. */
358*e4b17023SJohn Marino FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
359*e4b17023SJohn Marino {
360*e4b17023SJohn Marino num_edges += EDGE_COUNT (bb->succs);
361*e4b17023SJohn Marino }
362*e4b17023SJohn Marino
363*e4b17023SJohn Marino elist = XNEW (struct edge_list);
364*e4b17023SJohn Marino elist->num_blocks = block_count;
365*e4b17023SJohn Marino elist->num_edges = num_edges;
366*e4b17023SJohn Marino elist->index_to_edge = XNEWVEC (edge, num_edges);
367*e4b17023SJohn Marino
368*e4b17023SJohn Marino num_edges = 0;
369*e4b17023SJohn Marino
370*e4b17023SJohn Marino /* Follow successors of blocks, and register these edges. */
371*e4b17023SJohn Marino FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
372*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, bb->succs)
373*e4b17023SJohn Marino elist->index_to_edge[num_edges++] = e;
374*e4b17023SJohn Marino
375*e4b17023SJohn Marino return elist;
376*e4b17023SJohn Marino }
377*e4b17023SJohn Marino
378*e4b17023SJohn Marino /* This function free's memory associated with an edge list. */
379*e4b17023SJohn Marino
380*e4b17023SJohn Marino void
free_edge_list(struct edge_list * elist)381*e4b17023SJohn Marino free_edge_list (struct edge_list *elist)
382*e4b17023SJohn Marino {
383*e4b17023SJohn Marino if (elist)
384*e4b17023SJohn Marino {
385*e4b17023SJohn Marino free (elist->index_to_edge);
386*e4b17023SJohn Marino free (elist);
387*e4b17023SJohn Marino }
388*e4b17023SJohn Marino }
389*e4b17023SJohn Marino
390*e4b17023SJohn Marino /* This function provides debug output showing an edge list. */
391*e4b17023SJohn Marino
392*e4b17023SJohn Marino DEBUG_FUNCTION void
print_edge_list(FILE * f,struct edge_list * elist)393*e4b17023SJohn Marino print_edge_list (FILE *f, struct edge_list *elist)
394*e4b17023SJohn Marino {
395*e4b17023SJohn Marino int x;
396*e4b17023SJohn Marino
397*e4b17023SJohn Marino fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
398*e4b17023SJohn Marino elist->num_blocks, elist->num_edges);
399*e4b17023SJohn Marino
400*e4b17023SJohn Marino for (x = 0; x < elist->num_edges; x++)
401*e4b17023SJohn Marino {
402*e4b17023SJohn Marino fprintf (f, " %-4d - edge(", x);
403*e4b17023SJohn Marino if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
404*e4b17023SJohn Marino fprintf (f, "entry,");
405*e4b17023SJohn Marino else
406*e4b17023SJohn Marino fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
407*e4b17023SJohn Marino
408*e4b17023SJohn Marino if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
409*e4b17023SJohn Marino fprintf (f, "exit)\n");
410*e4b17023SJohn Marino else
411*e4b17023SJohn Marino fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
412*e4b17023SJohn Marino }
413*e4b17023SJohn Marino }
414*e4b17023SJohn Marino
415*e4b17023SJohn Marino /* This function provides an internal consistency check of an edge list,
416*e4b17023SJohn Marino verifying that all edges are present, and that there are no
417*e4b17023SJohn Marino extra edges. */
418*e4b17023SJohn Marino
419*e4b17023SJohn Marino DEBUG_FUNCTION void
verify_edge_list(FILE * f,struct edge_list * elist)420*e4b17023SJohn Marino verify_edge_list (FILE *f, struct edge_list *elist)
421*e4b17023SJohn Marino {
422*e4b17023SJohn Marino int pred, succ, index;
423*e4b17023SJohn Marino edge e;
424*e4b17023SJohn Marino basic_block bb, p, s;
425*e4b17023SJohn Marino edge_iterator ei;
426*e4b17023SJohn Marino
427*e4b17023SJohn Marino FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
428*e4b17023SJohn Marino {
429*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, bb->succs)
430*e4b17023SJohn Marino {
431*e4b17023SJohn Marino pred = e->src->index;
432*e4b17023SJohn Marino succ = e->dest->index;
433*e4b17023SJohn Marino index = EDGE_INDEX (elist, e->src, e->dest);
434*e4b17023SJohn Marino if (index == EDGE_INDEX_NO_EDGE)
435*e4b17023SJohn Marino {
436*e4b17023SJohn Marino fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
437*e4b17023SJohn Marino continue;
438*e4b17023SJohn Marino }
439*e4b17023SJohn Marino
440*e4b17023SJohn Marino if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
441*e4b17023SJohn Marino fprintf (f, "*p* Pred for index %d should be %d not %d\n",
442*e4b17023SJohn Marino index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
443*e4b17023SJohn Marino if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
444*e4b17023SJohn Marino fprintf (f, "*p* Succ for index %d should be %d not %d\n",
445*e4b17023SJohn Marino index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
446*e4b17023SJohn Marino }
447*e4b17023SJohn Marino }
448*e4b17023SJohn Marino
449*e4b17023SJohn Marino /* We've verified that all the edges are in the list, now lets make sure
450*e4b17023SJohn Marino there are no spurious edges in the list. */
451*e4b17023SJohn Marino
452*e4b17023SJohn Marino FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
453*e4b17023SJohn Marino FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
454*e4b17023SJohn Marino {
455*e4b17023SJohn Marino int found_edge = 0;
456*e4b17023SJohn Marino
457*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, p->succs)
458*e4b17023SJohn Marino if (e->dest == s)
459*e4b17023SJohn Marino {
460*e4b17023SJohn Marino found_edge = 1;
461*e4b17023SJohn Marino break;
462*e4b17023SJohn Marino }
463*e4b17023SJohn Marino
464*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, s->preds)
465*e4b17023SJohn Marino if (e->src == p)
466*e4b17023SJohn Marino {
467*e4b17023SJohn Marino found_edge = 1;
468*e4b17023SJohn Marino break;
469*e4b17023SJohn Marino }
470*e4b17023SJohn Marino
471*e4b17023SJohn Marino if (EDGE_INDEX (elist, p, s)
472*e4b17023SJohn Marino == EDGE_INDEX_NO_EDGE && found_edge != 0)
473*e4b17023SJohn Marino fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
474*e4b17023SJohn Marino p->index, s->index);
475*e4b17023SJohn Marino if (EDGE_INDEX (elist, p, s)
476*e4b17023SJohn Marino != EDGE_INDEX_NO_EDGE && found_edge == 0)
477*e4b17023SJohn Marino fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
478*e4b17023SJohn Marino p->index, s->index, EDGE_INDEX (elist, p, s));
479*e4b17023SJohn Marino }
480*e4b17023SJohn Marino }
481*e4b17023SJohn Marino
482*e4b17023SJohn Marino /* Given PRED and SUCC blocks, return the edge which connects the blocks.
483*e4b17023SJohn Marino If no such edge exists, return NULL. */
484*e4b17023SJohn Marino
485*e4b17023SJohn Marino edge
find_edge(basic_block pred,basic_block succ)486*e4b17023SJohn Marino find_edge (basic_block pred, basic_block succ)
487*e4b17023SJohn Marino {
488*e4b17023SJohn Marino edge e;
489*e4b17023SJohn Marino edge_iterator ei;
490*e4b17023SJohn Marino
491*e4b17023SJohn Marino if (EDGE_COUNT (pred->succs) <= EDGE_COUNT (succ->preds))
492*e4b17023SJohn Marino {
493*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, pred->succs)
494*e4b17023SJohn Marino if (e->dest == succ)
495*e4b17023SJohn Marino return e;
496*e4b17023SJohn Marino }
497*e4b17023SJohn Marino else
498*e4b17023SJohn Marino {
499*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, succ->preds)
500*e4b17023SJohn Marino if (e->src == pred)
501*e4b17023SJohn Marino return e;
502*e4b17023SJohn Marino }
503*e4b17023SJohn Marino
504*e4b17023SJohn Marino return NULL;
505*e4b17023SJohn Marino }
506*e4b17023SJohn Marino
507*e4b17023SJohn Marino /* This routine will determine what, if any, edge there is between
508*e4b17023SJohn Marino a specified predecessor and successor. */
509*e4b17023SJohn Marino
510*e4b17023SJohn Marino int
find_edge_index(struct edge_list * edge_list,basic_block pred,basic_block succ)511*e4b17023SJohn Marino find_edge_index (struct edge_list *edge_list, basic_block pred, basic_block succ)
512*e4b17023SJohn Marino {
513*e4b17023SJohn Marino int x;
514*e4b17023SJohn Marino
515*e4b17023SJohn Marino for (x = 0; x < NUM_EDGES (edge_list); x++)
516*e4b17023SJohn Marino if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
517*e4b17023SJohn Marino && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
518*e4b17023SJohn Marino return x;
519*e4b17023SJohn Marino
520*e4b17023SJohn Marino return (EDGE_INDEX_NO_EDGE);
521*e4b17023SJohn Marino }
522*e4b17023SJohn Marino
523*e4b17023SJohn Marino /* Dump the list of basic blocks in the bitmap NODES. */
524*e4b17023SJohn Marino
525*e4b17023SJohn Marino void
flow_nodes_print(const char * str,const_sbitmap nodes,FILE * file)526*e4b17023SJohn Marino flow_nodes_print (const char *str, const_sbitmap nodes, FILE *file)
527*e4b17023SJohn Marino {
528*e4b17023SJohn Marino unsigned int node = 0;
529*e4b17023SJohn Marino sbitmap_iterator sbi;
530*e4b17023SJohn Marino
531*e4b17023SJohn Marino if (! nodes)
532*e4b17023SJohn Marino return;
533*e4b17023SJohn Marino
534*e4b17023SJohn Marino fprintf (file, "%s { ", str);
535*e4b17023SJohn Marino EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, sbi)
536*e4b17023SJohn Marino fprintf (file, "%d ", node);
537*e4b17023SJohn Marino fputs ("}\n", file);
538*e4b17023SJohn Marino }
539*e4b17023SJohn Marino
540*e4b17023SJohn Marino /* Dump the list of edges in the array EDGE_LIST. */
541*e4b17023SJohn Marino
542*e4b17023SJohn Marino void
flow_edge_list_print(const char * str,const edge * edge_list,int num_edges,FILE * file)543*e4b17023SJohn Marino flow_edge_list_print (const char *str, const edge *edge_list, int num_edges, FILE *file)
544*e4b17023SJohn Marino {
545*e4b17023SJohn Marino int i;
546*e4b17023SJohn Marino
547*e4b17023SJohn Marino if (! edge_list)
548*e4b17023SJohn Marino return;
549*e4b17023SJohn Marino
550*e4b17023SJohn Marino fprintf (file, "%s { ", str);
551*e4b17023SJohn Marino for (i = 0; i < num_edges; i++)
552*e4b17023SJohn Marino fprintf (file, "%d->%d ", edge_list[i]->src->index,
553*e4b17023SJohn Marino edge_list[i]->dest->index);
554*e4b17023SJohn Marino
555*e4b17023SJohn Marino fputs ("}\n", file);
556*e4b17023SJohn Marino }
557*e4b17023SJohn Marino
558*e4b17023SJohn Marino
559*e4b17023SJohn Marino /* This routine will remove any fake predecessor edges for a basic block.
560*e4b17023SJohn Marino When the edge is removed, it is also removed from whatever successor
561*e4b17023SJohn Marino list it is in. */
562*e4b17023SJohn Marino
563*e4b17023SJohn Marino static void
remove_fake_predecessors(basic_block bb)564*e4b17023SJohn Marino remove_fake_predecessors (basic_block bb)
565*e4b17023SJohn Marino {
566*e4b17023SJohn Marino edge e;
567*e4b17023SJohn Marino edge_iterator ei;
568*e4b17023SJohn Marino
569*e4b17023SJohn Marino for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
570*e4b17023SJohn Marino {
571*e4b17023SJohn Marino if ((e->flags & EDGE_FAKE) == EDGE_FAKE)
572*e4b17023SJohn Marino remove_edge (e);
573*e4b17023SJohn Marino else
574*e4b17023SJohn Marino ei_next (&ei);
575*e4b17023SJohn Marino }
576*e4b17023SJohn Marino }
577*e4b17023SJohn Marino
578*e4b17023SJohn Marino /* This routine will remove all fake edges from the flow graph. If
579*e4b17023SJohn Marino we remove all fake successors, it will automatically remove all
580*e4b17023SJohn Marino fake predecessors. */
581*e4b17023SJohn Marino
582*e4b17023SJohn Marino void
remove_fake_edges(void)583*e4b17023SJohn Marino remove_fake_edges (void)
584*e4b17023SJohn Marino {
585*e4b17023SJohn Marino basic_block bb;
586*e4b17023SJohn Marino
587*e4b17023SJohn Marino FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
588*e4b17023SJohn Marino remove_fake_predecessors (bb);
589*e4b17023SJohn Marino }
590*e4b17023SJohn Marino
591*e4b17023SJohn Marino /* This routine will remove all fake edges to the EXIT_BLOCK. */
592*e4b17023SJohn Marino
593*e4b17023SJohn Marino void
remove_fake_exit_edges(void)594*e4b17023SJohn Marino remove_fake_exit_edges (void)
595*e4b17023SJohn Marino {
596*e4b17023SJohn Marino remove_fake_predecessors (EXIT_BLOCK_PTR);
597*e4b17023SJohn Marino }
598*e4b17023SJohn Marino
599*e4b17023SJohn Marino
600*e4b17023SJohn Marino /* This function will add a fake edge between any block which has no
601*e4b17023SJohn Marino successors, and the exit block. Some data flow equations require these
602*e4b17023SJohn Marino edges to exist. */
603*e4b17023SJohn Marino
604*e4b17023SJohn Marino void
add_noreturn_fake_exit_edges(void)605*e4b17023SJohn Marino add_noreturn_fake_exit_edges (void)
606*e4b17023SJohn Marino {
607*e4b17023SJohn Marino basic_block bb;
608*e4b17023SJohn Marino
609*e4b17023SJohn Marino FOR_EACH_BB (bb)
610*e4b17023SJohn Marino if (EDGE_COUNT (bb->succs) == 0)
611*e4b17023SJohn Marino make_single_succ_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
612*e4b17023SJohn Marino }
613*e4b17023SJohn Marino
614*e4b17023SJohn Marino /* This function adds a fake edge between any infinite loops to the
615*e4b17023SJohn Marino exit block. Some optimizations require a path from each node to
616*e4b17023SJohn Marino the exit node.
617*e4b17023SJohn Marino
618*e4b17023SJohn Marino See also Morgan, Figure 3.10, pp. 82-83.
619*e4b17023SJohn Marino
620*e4b17023SJohn Marino The current implementation is ugly, not attempting to minimize the
621*e4b17023SJohn Marino number of inserted fake edges. To reduce the number of fake edges
622*e4b17023SJohn Marino to insert, add fake edges from _innermost_ loops containing only
623*e4b17023SJohn Marino nodes not reachable from the exit block. */
624*e4b17023SJohn Marino
625*e4b17023SJohn Marino void
connect_infinite_loops_to_exit(void)626*e4b17023SJohn Marino connect_infinite_loops_to_exit (void)
627*e4b17023SJohn Marino {
628*e4b17023SJohn Marino basic_block unvisited_block = EXIT_BLOCK_PTR;
629*e4b17023SJohn Marino struct depth_first_search_dsS dfs_ds;
630*e4b17023SJohn Marino
631*e4b17023SJohn Marino /* Perform depth-first search in the reverse graph to find nodes
632*e4b17023SJohn Marino reachable from the exit block. */
633*e4b17023SJohn Marino flow_dfs_compute_reverse_init (&dfs_ds);
634*e4b17023SJohn Marino flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
635*e4b17023SJohn Marino
636*e4b17023SJohn Marino /* Repeatedly add fake edges, updating the unreachable nodes. */
637*e4b17023SJohn Marino while (1)
638*e4b17023SJohn Marino {
639*e4b17023SJohn Marino unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds,
640*e4b17023SJohn Marino unvisited_block);
641*e4b17023SJohn Marino if (!unvisited_block)
642*e4b17023SJohn Marino break;
643*e4b17023SJohn Marino
644*e4b17023SJohn Marino make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
645*e4b17023SJohn Marino flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
646*e4b17023SJohn Marino }
647*e4b17023SJohn Marino
648*e4b17023SJohn Marino flow_dfs_compute_reverse_finish (&dfs_ds);
649*e4b17023SJohn Marino return;
650*e4b17023SJohn Marino }
651*e4b17023SJohn Marino
652*e4b17023SJohn Marino /* Compute reverse top sort order. This is computing a post order
653*e4b17023SJohn Marino numbering of the graph. If INCLUDE_ENTRY_EXIT is true, then
654*e4b17023SJohn Marino ENTRY_BLOCK and EXIT_BLOCK are included. If DELETE_UNREACHABLE is
655*e4b17023SJohn Marino true, unreachable blocks are deleted. */
656*e4b17023SJohn Marino
657*e4b17023SJohn Marino int
post_order_compute(int * post_order,bool include_entry_exit,bool delete_unreachable)658*e4b17023SJohn Marino post_order_compute (int *post_order, bool include_entry_exit,
659*e4b17023SJohn Marino bool delete_unreachable)
660*e4b17023SJohn Marino {
661*e4b17023SJohn Marino edge_iterator *stack;
662*e4b17023SJohn Marino int sp;
663*e4b17023SJohn Marino int post_order_num = 0;
664*e4b17023SJohn Marino sbitmap visited;
665*e4b17023SJohn Marino int count;
666*e4b17023SJohn Marino
667*e4b17023SJohn Marino if (include_entry_exit)
668*e4b17023SJohn Marino post_order[post_order_num++] = EXIT_BLOCK;
669*e4b17023SJohn Marino
670*e4b17023SJohn Marino /* Allocate stack for back-tracking up CFG. */
671*e4b17023SJohn Marino stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
672*e4b17023SJohn Marino sp = 0;
673*e4b17023SJohn Marino
674*e4b17023SJohn Marino /* Allocate bitmap to track nodes that have been visited. */
675*e4b17023SJohn Marino visited = sbitmap_alloc (last_basic_block);
676*e4b17023SJohn Marino
677*e4b17023SJohn Marino /* None of the nodes in the CFG have been visited yet. */
678*e4b17023SJohn Marino sbitmap_zero (visited);
679*e4b17023SJohn Marino
680*e4b17023SJohn Marino /* Push the first edge on to the stack. */
681*e4b17023SJohn Marino stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
682*e4b17023SJohn Marino
683*e4b17023SJohn Marino while (sp)
684*e4b17023SJohn Marino {
685*e4b17023SJohn Marino edge_iterator ei;
686*e4b17023SJohn Marino basic_block src;
687*e4b17023SJohn Marino basic_block dest;
688*e4b17023SJohn Marino
689*e4b17023SJohn Marino /* Look at the edge on the top of the stack. */
690*e4b17023SJohn Marino ei = stack[sp - 1];
691*e4b17023SJohn Marino src = ei_edge (ei)->src;
692*e4b17023SJohn Marino dest = ei_edge (ei)->dest;
693*e4b17023SJohn Marino
694*e4b17023SJohn Marino /* Check if the edge destination has been visited yet. */
695*e4b17023SJohn Marino if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
696*e4b17023SJohn Marino {
697*e4b17023SJohn Marino /* Mark that we have visited the destination. */
698*e4b17023SJohn Marino SET_BIT (visited, dest->index);
699*e4b17023SJohn Marino
700*e4b17023SJohn Marino if (EDGE_COUNT (dest->succs) > 0)
701*e4b17023SJohn Marino /* Since the DEST node has been visited for the first
702*e4b17023SJohn Marino time, check its successors. */
703*e4b17023SJohn Marino stack[sp++] = ei_start (dest->succs);
704*e4b17023SJohn Marino else
705*e4b17023SJohn Marino post_order[post_order_num++] = dest->index;
706*e4b17023SJohn Marino }
707*e4b17023SJohn Marino else
708*e4b17023SJohn Marino {
709*e4b17023SJohn Marino if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR)
710*e4b17023SJohn Marino post_order[post_order_num++] = src->index;
711*e4b17023SJohn Marino
712*e4b17023SJohn Marino if (!ei_one_before_end_p (ei))
713*e4b17023SJohn Marino ei_next (&stack[sp - 1]);
714*e4b17023SJohn Marino else
715*e4b17023SJohn Marino sp--;
716*e4b17023SJohn Marino }
717*e4b17023SJohn Marino }
718*e4b17023SJohn Marino
719*e4b17023SJohn Marino if (include_entry_exit)
720*e4b17023SJohn Marino {
721*e4b17023SJohn Marino post_order[post_order_num++] = ENTRY_BLOCK;
722*e4b17023SJohn Marino count = post_order_num;
723*e4b17023SJohn Marino }
724*e4b17023SJohn Marino else
725*e4b17023SJohn Marino count = post_order_num + 2;
726*e4b17023SJohn Marino
727*e4b17023SJohn Marino /* Delete the unreachable blocks if some were found and we are
728*e4b17023SJohn Marino supposed to do it. */
729*e4b17023SJohn Marino if (delete_unreachable && (count != n_basic_blocks))
730*e4b17023SJohn Marino {
731*e4b17023SJohn Marino basic_block b;
732*e4b17023SJohn Marino basic_block next_bb;
733*e4b17023SJohn Marino for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
734*e4b17023SJohn Marino {
735*e4b17023SJohn Marino next_bb = b->next_bb;
736*e4b17023SJohn Marino
737*e4b17023SJohn Marino if (!(TEST_BIT (visited, b->index)))
738*e4b17023SJohn Marino delete_basic_block (b);
739*e4b17023SJohn Marino }
740*e4b17023SJohn Marino
741*e4b17023SJohn Marino tidy_fallthru_edges ();
742*e4b17023SJohn Marino }
743*e4b17023SJohn Marino
744*e4b17023SJohn Marino free (stack);
745*e4b17023SJohn Marino sbitmap_free (visited);
746*e4b17023SJohn Marino return post_order_num;
747*e4b17023SJohn Marino }
748*e4b17023SJohn Marino
749*e4b17023SJohn Marino
750*e4b17023SJohn Marino /* Helper routine for inverted_post_order_compute.
751*e4b17023SJohn Marino BB has to belong to a region of CFG
752*e4b17023SJohn Marino unreachable by inverted traversal from the exit.
753*e4b17023SJohn Marino i.e. there's no control flow path from ENTRY to EXIT
754*e4b17023SJohn Marino that contains this BB.
755*e4b17023SJohn Marino This can happen in two cases - if there's an infinite loop
756*e4b17023SJohn Marino or if there's a block that has no successor
757*e4b17023SJohn Marino (call to a function with no return).
758*e4b17023SJohn Marino Some RTL passes deal with this condition by
759*e4b17023SJohn Marino calling connect_infinite_loops_to_exit () and/or
760*e4b17023SJohn Marino add_noreturn_fake_exit_edges ().
761*e4b17023SJohn Marino However, those methods involve modifying the CFG itself
762*e4b17023SJohn Marino which may not be desirable.
763*e4b17023SJohn Marino Hence, we deal with the infinite loop/no return cases
764*e4b17023SJohn Marino by identifying a unique basic block that can reach all blocks
765*e4b17023SJohn Marino in such a region by inverted traversal.
766*e4b17023SJohn Marino This function returns a basic block that guarantees
767*e4b17023SJohn Marino that all blocks in the region are reachable
768*e4b17023SJohn Marino by starting an inverted traversal from the returned block. */
769*e4b17023SJohn Marino
770*e4b17023SJohn Marino static basic_block
dfs_find_deadend(basic_block bb)771*e4b17023SJohn Marino dfs_find_deadend (basic_block bb)
772*e4b17023SJohn Marino {
773*e4b17023SJohn Marino sbitmap visited = sbitmap_alloc (last_basic_block);
774*e4b17023SJohn Marino sbitmap_zero (visited);
775*e4b17023SJohn Marino
776*e4b17023SJohn Marino for (;;)
777*e4b17023SJohn Marino {
778*e4b17023SJohn Marino SET_BIT (visited, bb->index);
779*e4b17023SJohn Marino if (EDGE_COUNT (bb->succs) == 0
780*e4b17023SJohn Marino || TEST_BIT (visited, EDGE_SUCC (bb, 0)->dest->index))
781*e4b17023SJohn Marino {
782*e4b17023SJohn Marino sbitmap_free (visited);
783*e4b17023SJohn Marino return bb;
784*e4b17023SJohn Marino }
785*e4b17023SJohn Marino
786*e4b17023SJohn Marino bb = EDGE_SUCC (bb, 0)->dest;
787*e4b17023SJohn Marino }
788*e4b17023SJohn Marino
789*e4b17023SJohn Marino gcc_unreachable ();
790*e4b17023SJohn Marino }
791*e4b17023SJohn Marino
792*e4b17023SJohn Marino
793*e4b17023SJohn Marino /* Compute the reverse top sort order of the inverted CFG
794*e4b17023SJohn Marino i.e. starting from the exit block and following the edges backward
795*e4b17023SJohn Marino (from successors to predecessors).
796*e4b17023SJohn Marino This ordering can be used for forward dataflow problems among others.
797*e4b17023SJohn Marino
798*e4b17023SJohn Marino This function assumes that all blocks in the CFG are reachable
799*e4b17023SJohn Marino from the ENTRY (but not necessarily from EXIT).
800*e4b17023SJohn Marino
801*e4b17023SJohn Marino If there's an infinite loop,
802*e4b17023SJohn Marino a simple inverted traversal starting from the blocks
803*e4b17023SJohn Marino with no successors can't visit all blocks.
804*e4b17023SJohn Marino To solve this problem, we first do inverted traversal
805*e4b17023SJohn Marino starting from the blocks with no successor.
806*e4b17023SJohn Marino And if there's any block left that's not visited by the regular
807*e4b17023SJohn Marino inverted traversal from EXIT,
808*e4b17023SJohn Marino those blocks are in such problematic region.
809*e4b17023SJohn Marino Among those, we find one block that has
810*e4b17023SJohn Marino any visited predecessor (which is an entry into such a region),
811*e4b17023SJohn Marino and start looking for a "dead end" from that block
812*e4b17023SJohn Marino and do another inverted traversal from that block. */
813*e4b17023SJohn Marino
814*e4b17023SJohn Marino int
inverted_post_order_compute(int * post_order)815*e4b17023SJohn Marino inverted_post_order_compute (int *post_order)
816*e4b17023SJohn Marino {
817*e4b17023SJohn Marino basic_block bb;
818*e4b17023SJohn Marino edge_iterator *stack;
819*e4b17023SJohn Marino int sp;
820*e4b17023SJohn Marino int post_order_num = 0;
821*e4b17023SJohn Marino sbitmap visited;
822*e4b17023SJohn Marino
823*e4b17023SJohn Marino /* Allocate stack for back-tracking up CFG. */
824*e4b17023SJohn Marino stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
825*e4b17023SJohn Marino sp = 0;
826*e4b17023SJohn Marino
827*e4b17023SJohn Marino /* Allocate bitmap to track nodes that have been visited. */
828*e4b17023SJohn Marino visited = sbitmap_alloc (last_basic_block);
829*e4b17023SJohn Marino
830*e4b17023SJohn Marino /* None of the nodes in the CFG have been visited yet. */
831*e4b17023SJohn Marino sbitmap_zero (visited);
832*e4b17023SJohn Marino
833*e4b17023SJohn Marino /* Put all blocks that have no successor into the initial work list. */
834*e4b17023SJohn Marino FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
835*e4b17023SJohn Marino if (EDGE_COUNT (bb->succs) == 0)
836*e4b17023SJohn Marino {
837*e4b17023SJohn Marino /* Push the initial edge on to the stack. */
838*e4b17023SJohn Marino if (EDGE_COUNT (bb->preds) > 0)
839*e4b17023SJohn Marino {
840*e4b17023SJohn Marino stack[sp++] = ei_start (bb->preds);
841*e4b17023SJohn Marino SET_BIT (visited, bb->index);
842*e4b17023SJohn Marino }
843*e4b17023SJohn Marino }
844*e4b17023SJohn Marino
845*e4b17023SJohn Marino do
846*e4b17023SJohn Marino {
847*e4b17023SJohn Marino bool has_unvisited_bb = false;
848*e4b17023SJohn Marino
849*e4b17023SJohn Marino /* The inverted traversal loop. */
850*e4b17023SJohn Marino while (sp)
851*e4b17023SJohn Marino {
852*e4b17023SJohn Marino edge_iterator ei;
853*e4b17023SJohn Marino basic_block pred;
854*e4b17023SJohn Marino
855*e4b17023SJohn Marino /* Look at the edge on the top of the stack. */
856*e4b17023SJohn Marino ei = stack[sp - 1];
857*e4b17023SJohn Marino bb = ei_edge (ei)->dest;
858*e4b17023SJohn Marino pred = ei_edge (ei)->src;
859*e4b17023SJohn Marino
860*e4b17023SJohn Marino /* Check if the predecessor has been visited yet. */
861*e4b17023SJohn Marino if (! TEST_BIT (visited, pred->index))
862*e4b17023SJohn Marino {
863*e4b17023SJohn Marino /* Mark that we have visited the destination. */
864*e4b17023SJohn Marino SET_BIT (visited, pred->index);
865*e4b17023SJohn Marino
866*e4b17023SJohn Marino if (EDGE_COUNT (pred->preds) > 0)
867*e4b17023SJohn Marino /* Since the predecessor node has been visited for the first
868*e4b17023SJohn Marino time, check its predecessors. */
869*e4b17023SJohn Marino stack[sp++] = ei_start (pred->preds);
870*e4b17023SJohn Marino else
871*e4b17023SJohn Marino post_order[post_order_num++] = pred->index;
872*e4b17023SJohn Marino }
873*e4b17023SJohn Marino else
874*e4b17023SJohn Marino {
875*e4b17023SJohn Marino if (bb != EXIT_BLOCK_PTR && ei_one_before_end_p (ei))
876*e4b17023SJohn Marino post_order[post_order_num++] = bb->index;
877*e4b17023SJohn Marino
878*e4b17023SJohn Marino if (!ei_one_before_end_p (ei))
879*e4b17023SJohn Marino ei_next (&stack[sp - 1]);
880*e4b17023SJohn Marino else
881*e4b17023SJohn Marino sp--;
882*e4b17023SJohn Marino }
883*e4b17023SJohn Marino }
884*e4b17023SJohn Marino
885*e4b17023SJohn Marino /* Detect any infinite loop and activate the kludge.
886*e4b17023SJohn Marino Note that this doesn't check EXIT_BLOCK itself
887*e4b17023SJohn Marino since EXIT_BLOCK is always added after the outer do-while loop. */
888*e4b17023SJohn Marino FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
889*e4b17023SJohn Marino if (!TEST_BIT (visited, bb->index))
890*e4b17023SJohn Marino {
891*e4b17023SJohn Marino has_unvisited_bb = true;
892*e4b17023SJohn Marino
893*e4b17023SJohn Marino if (EDGE_COUNT (bb->preds) > 0)
894*e4b17023SJohn Marino {
895*e4b17023SJohn Marino edge_iterator ei;
896*e4b17023SJohn Marino edge e;
897*e4b17023SJohn Marino basic_block visited_pred = NULL;
898*e4b17023SJohn Marino
899*e4b17023SJohn Marino /* Find an already visited predecessor. */
900*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, bb->preds)
901*e4b17023SJohn Marino {
902*e4b17023SJohn Marino if (TEST_BIT (visited, e->src->index))
903*e4b17023SJohn Marino visited_pred = e->src;
904*e4b17023SJohn Marino }
905*e4b17023SJohn Marino
906*e4b17023SJohn Marino if (visited_pred)
907*e4b17023SJohn Marino {
908*e4b17023SJohn Marino basic_block be = dfs_find_deadend (bb);
909*e4b17023SJohn Marino gcc_assert (be != NULL);
910*e4b17023SJohn Marino SET_BIT (visited, be->index);
911*e4b17023SJohn Marino stack[sp++] = ei_start (be->preds);
912*e4b17023SJohn Marino break;
913*e4b17023SJohn Marino }
914*e4b17023SJohn Marino }
915*e4b17023SJohn Marino }
916*e4b17023SJohn Marino
917*e4b17023SJohn Marino if (has_unvisited_bb && sp == 0)
918*e4b17023SJohn Marino {
919*e4b17023SJohn Marino /* No blocks are reachable from EXIT at all.
920*e4b17023SJohn Marino Find a dead-end from the ENTRY, and restart the iteration. */
921*e4b17023SJohn Marino basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR);
922*e4b17023SJohn Marino gcc_assert (be != NULL);
923*e4b17023SJohn Marino SET_BIT (visited, be->index);
924*e4b17023SJohn Marino stack[sp++] = ei_start (be->preds);
925*e4b17023SJohn Marino }
926*e4b17023SJohn Marino
927*e4b17023SJohn Marino /* The only case the below while fires is
928*e4b17023SJohn Marino when there's an infinite loop. */
929*e4b17023SJohn Marino }
930*e4b17023SJohn Marino while (sp);
931*e4b17023SJohn Marino
932*e4b17023SJohn Marino /* EXIT_BLOCK is always included. */
933*e4b17023SJohn Marino post_order[post_order_num++] = EXIT_BLOCK;
934*e4b17023SJohn Marino
935*e4b17023SJohn Marino free (stack);
936*e4b17023SJohn Marino sbitmap_free (visited);
937*e4b17023SJohn Marino return post_order_num;
938*e4b17023SJohn Marino }
939*e4b17023SJohn Marino
940*e4b17023SJohn Marino /* Compute the depth first search order and store in the array
941*e4b17023SJohn Marino PRE_ORDER if nonzero, marking the nodes visited in VISITED. If
942*e4b17023SJohn Marino REV_POST_ORDER is nonzero, return the reverse completion number for each
943*e4b17023SJohn Marino node. Returns the number of nodes visited. A depth first search
944*e4b17023SJohn Marino tries to get as far away from the starting point as quickly as
945*e4b17023SJohn Marino possible.
946*e4b17023SJohn Marino
947*e4b17023SJohn Marino pre_order is a really a preorder numbering of the graph.
948*e4b17023SJohn Marino rev_post_order is really a reverse postorder numbering of the graph.
949*e4b17023SJohn Marino */
950*e4b17023SJohn Marino
951*e4b17023SJohn Marino int
pre_and_rev_post_order_compute(int * pre_order,int * rev_post_order,bool include_entry_exit)952*e4b17023SJohn Marino pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
953*e4b17023SJohn Marino bool include_entry_exit)
954*e4b17023SJohn Marino {
955*e4b17023SJohn Marino edge_iterator *stack;
956*e4b17023SJohn Marino int sp;
957*e4b17023SJohn Marino int pre_order_num = 0;
958*e4b17023SJohn Marino int rev_post_order_num = n_basic_blocks - 1;
959*e4b17023SJohn Marino sbitmap visited;
960*e4b17023SJohn Marino
961*e4b17023SJohn Marino /* Allocate stack for back-tracking up CFG. */
962*e4b17023SJohn Marino stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
963*e4b17023SJohn Marino sp = 0;
964*e4b17023SJohn Marino
965*e4b17023SJohn Marino if (include_entry_exit)
966*e4b17023SJohn Marino {
967*e4b17023SJohn Marino if (pre_order)
968*e4b17023SJohn Marino pre_order[pre_order_num] = ENTRY_BLOCK;
969*e4b17023SJohn Marino pre_order_num++;
970*e4b17023SJohn Marino if (rev_post_order)
971*e4b17023SJohn Marino rev_post_order[rev_post_order_num--] = ENTRY_BLOCK;
972*e4b17023SJohn Marino }
973*e4b17023SJohn Marino else
974*e4b17023SJohn Marino rev_post_order_num -= NUM_FIXED_BLOCKS;
975*e4b17023SJohn Marino
976*e4b17023SJohn Marino /* Allocate bitmap to track nodes that have been visited. */
977*e4b17023SJohn Marino visited = sbitmap_alloc (last_basic_block);
978*e4b17023SJohn Marino
979*e4b17023SJohn Marino /* None of the nodes in the CFG have been visited yet. */
980*e4b17023SJohn Marino sbitmap_zero (visited);
981*e4b17023SJohn Marino
982*e4b17023SJohn Marino /* Push the first edge on to the stack. */
983*e4b17023SJohn Marino stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
984*e4b17023SJohn Marino
985*e4b17023SJohn Marino while (sp)
986*e4b17023SJohn Marino {
987*e4b17023SJohn Marino edge_iterator ei;
988*e4b17023SJohn Marino basic_block src;
989*e4b17023SJohn Marino basic_block dest;
990*e4b17023SJohn Marino
991*e4b17023SJohn Marino /* Look at the edge on the top of the stack. */
992*e4b17023SJohn Marino ei = stack[sp - 1];
993*e4b17023SJohn Marino src = ei_edge (ei)->src;
994*e4b17023SJohn Marino dest = ei_edge (ei)->dest;
995*e4b17023SJohn Marino
996*e4b17023SJohn Marino /* Check if the edge destination has been visited yet. */
997*e4b17023SJohn Marino if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
998*e4b17023SJohn Marino {
999*e4b17023SJohn Marino /* Mark that we have visited the destination. */
1000*e4b17023SJohn Marino SET_BIT (visited, dest->index);
1001*e4b17023SJohn Marino
1002*e4b17023SJohn Marino if (pre_order)
1003*e4b17023SJohn Marino pre_order[pre_order_num] = dest->index;
1004*e4b17023SJohn Marino
1005*e4b17023SJohn Marino pre_order_num++;
1006*e4b17023SJohn Marino
1007*e4b17023SJohn Marino if (EDGE_COUNT (dest->succs) > 0)
1008*e4b17023SJohn Marino /* Since the DEST node has been visited for the first
1009*e4b17023SJohn Marino time, check its successors. */
1010*e4b17023SJohn Marino stack[sp++] = ei_start (dest->succs);
1011*e4b17023SJohn Marino else if (rev_post_order)
1012*e4b17023SJohn Marino /* There are no successors for the DEST node so assign
1013*e4b17023SJohn Marino its reverse completion number. */
1014*e4b17023SJohn Marino rev_post_order[rev_post_order_num--] = dest->index;
1015*e4b17023SJohn Marino }
1016*e4b17023SJohn Marino else
1017*e4b17023SJohn Marino {
1018*e4b17023SJohn Marino if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR
1019*e4b17023SJohn Marino && rev_post_order)
1020*e4b17023SJohn Marino /* There are no more successors for the SRC node
1021*e4b17023SJohn Marino so assign its reverse completion number. */
1022*e4b17023SJohn Marino rev_post_order[rev_post_order_num--] = src->index;
1023*e4b17023SJohn Marino
1024*e4b17023SJohn Marino if (!ei_one_before_end_p (ei))
1025*e4b17023SJohn Marino ei_next (&stack[sp - 1]);
1026*e4b17023SJohn Marino else
1027*e4b17023SJohn Marino sp--;
1028*e4b17023SJohn Marino }
1029*e4b17023SJohn Marino }
1030*e4b17023SJohn Marino
1031*e4b17023SJohn Marino free (stack);
1032*e4b17023SJohn Marino sbitmap_free (visited);
1033*e4b17023SJohn Marino
1034*e4b17023SJohn Marino if (include_entry_exit)
1035*e4b17023SJohn Marino {
1036*e4b17023SJohn Marino if (pre_order)
1037*e4b17023SJohn Marino pre_order[pre_order_num] = EXIT_BLOCK;
1038*e4b17023SJohn Marino pre_order_num++;
1039*e4b17023SJohn Marino if (rev_post_order)
1040*e4b17023SJohn Marino rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
1041*e4b17023SJohn Marino /* The number of nodes visited should be the number of blocks. */
1042*e4b17023SJohn Marino gcc_assert (pre_order_num == n_basic_blocks);
1043*e4b17023SJohn Marino }
1044*e4b17023SJohn Marino else
1045*e4b17023SJohn Marino /* The number of nodes visited should be the number of blocks minus
1046*e4b17023SJohn Marino the entry and exit blocks which are not visited here. */
1047*e4b17023SJohn Marino gcc_assert (pre_order_num == n_basic_blocks - NUM_FIXED_BLOCKS);
1048*e4b17023SJohn Marino
1049*e4b17023SJohn Marino return pre_order_num;
1050*e4b17023SJohn Marino }
1051*e4b17023SJohn Marino
1052*e4b17023SJohn Marino /* Compute the depth first search order on the _reverse_ graph and
1053*e4b17023SJohn Marino store in the array DFS_ORDER, marking the nodes visited in VISITED.
1054*e4b17023SJohn Marino Returns the number of nodes visited.
1055*e4b17023SJohn Marino
1056*e4b17023SJohn Marino The computation is split into three pieces:
1057*e4b17023SJohn Marino
1058*e4b17023SJohn Marino flow_dfs_compute_reverse_init () creates the necessary data
1059*e4b17023SJohn Marino structures.
1060*e4b17023SJohn Marino
1061*e4b17023SJohn Marino flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1062*e4b17023SJohn Marino structures. The block will start the search.
1063*e4b17023SJohn Marino
1064*e4b17023SJohn Marino flow_dfs_compute_reverse_execute () continues (or starts) the
1065*e4b17023SJohn Marino search using the block on the top of the stack, stopping when the
1066*e4b17023SJohn Marino stack is empty.
1067*e4b17023SJohn Marino
1068*e4b17023SJohn Marino flow_dfs_compute_reverse_finish () destroys the necessary data
1069*e4b17023SJohn Marino structures.
1070*e4b17023SJohn Marino
1071*e4b17023SJohn Marino Thus, the user will probably call ..._init(), call ..._add_bb() to
1072*e4b17023SJohn Marino add a beginning basic block to the stack, call ..._execute(),
1073*e4b17023SJohn Marino possibly add another bb to the stack and again call ..._execute(),
1074*e4b17023SJohn Marino ..., and finally call _finish(). */
1075*e4b17023SJohn Marino
1076*e4b17023SJohn Marino /* Initialize the data structures used for depth-first search on the
1077*e4b17023SJohn Marino reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
1078*e4b17023SJohn Marino added to the basic block stack. DATA is the current depth-first
1079*e4b17023SJohn Marino search context. If INITIALIZE_STACK is nonzero, there is an
1080*e4b17023SJohn Marino element on the stack. */
1081*e4b17023SJohn Marino
1082*e4b17023SJohn Marino static void
flow_dfs_compute_reverse_init(depth_first_search_ds data)1083*e4b17023SJohn Marino flow_dfs_compute_reverse_init (depth_first_search_ds data)
1084*e4b17023SJohn Marino {
1085*e4b17023SJohn Marino /* Allocate stack for back-tracking up CFG. */
1086*e4b17023SJohn Marino data->stack = XNEWVEC (basic_block, n_basic_blocks);
1087*e4b17023SJohn Marino data->sp = 0;
1088*e4b17023SJohn Marino
1089*e4b17023SJohn Marino /* Allocate bitmap to track nodes that have been visited. */
1090*e4b17023SJohn Marino data->visited_blocks = sbitmap_alloc (last_basic_block);
1091*e4b17023SJohn Marino
1092*e4b17023SJohn Marino /* None of the nodes in the CFG have been visited yet. */
1093*e4b17023SJohn Marino sbitmap_zero (data->visited_blocks);
1094*e4b17023SJohn Marino
1095*e4b17023SJohn Marino return;
1096*e4b17023SJohn Marino }
1097*e4b17023SJohn Marino
1098*e4b17023SJohn Marino /* Add the specified basic block to the top of the dfs data
1099*e4b17023SJohn Marino structures. When the search continues, it will start at the
1100*e4b17023SJohn Marino block. */
1101*e4b17023SJohn Marino
1102*e4b17023SJohn Marino static void
flow_dfs_compute_reverse_add_bb(depth_first_search_ds data,basic_block bb)1103*e4b17023SJohn Marino flow_dfs_compute_reverse_add_bb (depth_first_search_ds data, basic_block bb)
1104*e4b17023SJohn Marino {
1105*e4b17023SJohn Marino data->stack[data->sp++] = bb;
1106*e4b17023SJohn Marino SET_BIT (data->visited_blocks, bb->index);
1107*e4b17023SJohn Marino }
1108*e4b17023SJohn Marino
1109*e4b17023SJohn Marino /* Continue the depth-first search through the reverse graph starting with the
1110*e4b17023SJohn Marino block at the stack's top and ending when the stack is empty. Visited nodes
1111*e4b17023SJohn Marino are marked. Returns an unvisited basic block, or NULL if there is none
1112*e4b17023SJohn Marino available. */
1113*e4b17023SJohn Marino
1114*e4b17023SJohn Marino static basic_block
flow_dfs_compute_reverse_execute(depth_first_search_ds data,basic_block last_unvisited)1115*e4b17023SJohn Marino flow_dfs_compute_reverse_execute (depth_first_search_ds data,
1116*e4b17023SJohn Marino basic_block last_unvisited)
1117*e4b17023SJohn Marino {
1118*e4b17023SJohn Marino basic_block bb;
1119*e4b17023SJohn Marino edge e;
1120*e4b17023SJohn Marino edge_iterator ei;
1121*e4b17023SJohn Marino
1122*e4b17023SJohn Marino while (data->sp > 0)
1123*e4b17023SJohn Marino {
1124*e4b17023SJohn Marino bb = data->stack[--data->sp];
1125*e4b17023SJohn Marino
1126*e4b17023SJohn Marino /* Perform depth-first search on adjacent vertices. */
1127*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, bb->preds)
1128*e4b17023SJohn Marino if (!TEST_BIT (data->visited_blocks, e->src->index))
1129*e4b17023SJohn Marino flow_dfs_compute_reverse_add_bb (data, e->src);
1130*e4b17023SJohn Marino }
1131*e4b17023SJohn Marino
1132*e4b17023SJohn Marino /* Determine if there are unvisited basic blocks. */
1133*e4b17023SJohn Marino FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
1134*e4b17023SJohn Marino if (!TEST_BIT (data->visited_blocks, bb->index))
1135*e4b17023SJohn Marino return bb;
1136*e4b17023SJohn Marino
1137*e4b17023SJohn Marino return NULL;
1138*e4b17023SJohn Marino }
1139*e4b17023SJohn Marino
1140*e4b17023SJohn Marino /* Destroy the data structures needed for depth-first search on the
1141*e4b17023SJohn Marino reverse graph. */
1142*e4b17023SJohn Marino
1143*e4b17023SJohn Marino static void
flow_dfs_compute_reverse_finish(depth_first_search_ds data)1144*e4b17023SJohn Marino flow_dfs_compute_reverse_finish (depth_first_search_ds data)
1145*e4b17023SJohn Marino {
1146*e4b17023SJohn Marino free (data->stack);
1147*e4b17023SJohn Marino sbitmap_free (data->visited_blocks);
1148*e4b17023SJohn Marino }
1149*e4b17023SJohn Marino
1150*e4b17023SJohn Marino /* Performs dfs search from BB over vertices satisfying PREDICATE;
1151*e4b17023SJohn Marino if REVERSE, go against direction of edges. Returns number of blocks
1152*e4b17023SJohn Marino found and their list in RSLT. RSLT can contain at most RSLT_MAX items. */
1153*e4b17023SJohn Marino int
dfs_enumerate_from(basic_block bb,int reverse,bool (* predicate)(const_basic_block,const void *),basic_block * rslt,int rslt_max,const void * data)1154*e4b17023SJohn Marino dfs_enumerate_from (basic_block bb, int reverse,
1155*e4b17023SJohn Marino bool (*predicate) (const_basic_block, const void *),
1156*e4b17023SJohn Marino basic_block *rslt, int rslt_max, const void *data)
1157*e4b17023SJohn Marino {
1158*e4b17023SJohn Marino basic_block *st, lbb;
1159*e4b17023SJohn Marino int sp = 0, tv = 0;
1160*e4b17023SJohn Marino unsigned size;
1161*e4b17023SJohn Marino
1162*e4b17023SJohn Marino /* A bitmap to keep track of visited blocks. Allocating it each time
1163*e4b17023SJohn Marino this function is called is not possible, since dfs_enumerate_from
1164*e4b17023SJohn Marino is often used on small (almost) disjoint parts of cfg (bodies of
1165*e4b17023SJohn Marino loops), and allocating a large sbitmap would lead to quadratic
1166*e4b17023SJohn Marino behavior. */
1167*e4b17023SJohn Marino static sbitmap visited;
1168*e4b17023SJohn Marino static unsigned v_size;
1169*e4b17023SJohn Marino
1170*e4b17023SJohn Marino #define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index))
1171*e4b17023SJohn Marino #define UNMARK_VISITED(BB) (RESET_BIT (visited, (BB)->index))
1172*e4b17023SJohn Marino #define VISITED_P(BB) (TEST_BIT (visited, (BB)->index))
1173*e4b17023SJohn Marino
1174*e4b17023SJohn Marino /* Resize the VISITED sbitmap if necessary. */
1175*e4b17023SJohn Marino size = last_basic_block;
1176*e4b17023SJohn Marino if (size < 10)
1177*e4b17023SJohn Marino size = 10;
1178*e4b17023SJohn Marino
1179*e4b17023SJohn Marino if (!visited)
1180*e4b17023SJohn Marino {
1181*e4b17023SJohn Marino
1182*e4b17023SJohn Marino visited = sbitmap_alloc (size);
1183*e4b17023SJohn Marino sbitmap_zero (visited);
1184*e4b17023SJohn Marino v_size = size;
1185*e4b17023SJohn Marino }
1186*e4b17023SJohn Marino else if (v_size < size)
1187*e4b17023SJohn Marino {
1188*e4b17023SJohn Marino /* Ensure that we increase the size of the sbitmap exponentially. */
1189*e4b17023SJohn Marino if (2 * v_size > size)
1190*e4b17023SJohn Marino size = 2 * v_size;
1191*e4b17023SJohn Marino
1192*e4b17023SJohn Marino visited = sbitmap_resize (visited, size, 0);
1193*e4b17023SJohn Marino v_size = size;
1194*e4b17023SJohn Marino }
1195*e4b17023SJohn Marino
1196*e4b17023SJohn Marino st = XCNEWVEC (basic_block, rslt_max);
1197*e4b17023SJohn Marino rslt[tv++] = st[sp++] = bb;
1198*e4b17023SJohn Marino MARK_VISITED (bb);
1199*e4b17023SJohn Marino while (sp)
1200*e4b17023SJohn Marino {
1201*e4b17023SJohn Marino edge e;
1202*e4b17023SJohn Marino edge_iterator ei;
1203*e4b17023SJohn Marino lbb = st[--sp];
1204*e4b17023SJohn Marino if (reverse)
1205*e4b17023SJohn Marino {
1206*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, lbb->preds)
1207*e4b17023SJohn Marino if (!VISITED_P (e->src) && predicate (e->src, data))
1208*e4b17023SJohn Marino {
1209*e4b17023SJohn Marino gcc_assert (tv != rslt_max);
1210*e4b17023SJohn Marino rslt[tv++] = st[sp++] = e->src;
1211*e4b17023SJohn Marino MARK_VISITED (e->src);
1212*e4b17023SJohn Marino }
1213*e4b17023SJohn Marino }
1214*e4b17023SJohn Marino else
1215*e4b17023SJohn Marino {
1216*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, lbb->succs)
1217*e4b17023SJohn Marino if (!VISITED_P (e->dest) && predicate (e->dest, data))
1218*e4b17023SJohn Marino {
1219*e4b17023SJohn Marino gcc_assert (tv != rslt_max);
1220*e4b17023SJohn Marino rslt[tv++] = st[sp++] = e->dest;
1221*e4b17023SJohn Marino MARK_VISITED (e->dest);
1222*e4b17023SJohn Marino }
1223*e4b17023SJohn Marino }
1224*e4b17023SJohn Marino }
1225*e4b17023SJohn Marino free (st);
1226*e4b17023SJohn Marino for (sp = 0; sp < tv; sp++)
1227*e4b17023SJohn Marino UNMARK_VISITED (rslt[sp]);
1228*e4b17023SJohn Marino return tv;
1229*e4b17023SJohn Marino #undef MARK_VISITED
1230*e4b17023SJohn Marino #undef UNMARK_VISITED
1231*e4b17023SJohn Marino #undef VISITED_P
1232*e4b17023SJohn Marino }
1233*e4b17023SJohn Marino
1234*e4b17023SJohn Marino
1235*e4b17023SJohn Marino /* Compute dominance frontiers, ala Harvey, Ferrante, et al.
1236*e4b17023SJohn Marino
1237*e4b17023SJohn Marino This algorithm can be found in Timothy Harvey's PhD thesis, at
1238*e4b17023SJohn Marino http://www.cs.rice.edu/~harv/dissertation.pdf in the section on iterative
1239*e4b17023SJohn Marino dominance algorithms.
1240*e4b17023SJohn Marino
1241*e4b17023SJohn Marino First, we identify each join point, j (any node with more than one
1242*e4b17023SJohn Marino incoming edge is a join point).
1243*e4b17023SJohn Marino
1244*e4b17023SJohn Marino We then examine each predecessor, p, of j and walk up the dominator tree
1245*e4b17023SJohn Marino starting at p.
1246*e4b17023SJohn Marino
1247*e4b17023SJohn Marino We stop the walk when we reach j's immediate dominator - j is in the
1248*e4b17023SJohn Marino dominance frontier of each of the nodes in the walk, except for j's
1249*e4b17023SJohn Marino immediate dominator. Intuitively, all of the rest of j's dominators are
1250*e4b17023SJohn Marino shared by j's predecessors as well.
1251*e4b17023SJohn Marino Since they dominate j, they will not have j in their dominance frontiers.
1252*e4b17023SJohn Marino
1253*e4b17023SJohn Marino The number of nodes touched by this algorithm is equal to the size
1254*e4b17023SJohn Marino of the dominance frontiers, no more, no less.
1255*e4b17023SJohn Marino */
1256*e4b17023SJohn Marino
1257*e4b17023SJohn Marino
1258*e4b17023SJohn Marino static void
compute_dominance_frontiers_1(bitmap_head * frontiers)1259*e4b17023SJohn Marino compute_dominance_frontiers_1 (bitmap_head *frontiers)
1260*e4b17023SJohn Marino {
1261*e4b17023SJohn Marino edge p;
1262*e4b17023SJohn Marino edge_iterator ei;
1263*e4b17023SJohn Marino basic_block b;
1264*e4b17023SJohn Marino FOR_EACH_BB (b)
1265*e4b17023SJohn Marino {
1266*e4b17023SJohn Marino if (EDGE_COUNT (b->preds) >= 2)
1267*e4b17023SJohn Marino {
1268*e4b17023SJohn Marino FOR_EACH_EDGE (p, ei, b->preds)
1269*e4b17023SJohn Marino {
1270*e4b17023SJohn Marino basic_block runner = p->src;
1271*e4b17023SJohn Marino basic_block domsb;
1272*e4b17023SJohn Marino if (runner == ENTRY_BLOCK_PTR)
1273*e4b17023SJohn Marino continue;
1274*e4b17023SJohn Marino
1275*e4b17023SJohn Marino domsb = get_immediate_dominator (CDI_DOMINATORS, b);
1276*e4b17023SJohn Marino while (runner != domsb)
1277*e4b17023SJohn Marino {
1278*e4b17023SJohn Marino if (!bitmap_set_bit (&frontiers[runner->index],
1279*e4b17023SJohn Marino b->index))
1280*e4b17023SJohn Marino break;
1281*e4b17023SJohn Marino runner = get_immediate_dominator (CDI_DOMINATORS,
1282*e4b17023SJohn Marino runner);
1283*e4b17023SJohn Marino }
1284*e4b17023SJohn Marino }
1285*e4b17023SJohn Marino }
1286*e4b17023SJohn Marino }
1287*e4b17023SJohn Marino }
1288*e4b17023SJohn Marino
1289*e4b17023SJohn Marino
1290*e4b17023SJohn Marino void
compute_dominance_frontiers(bitmap_head * frontiers)1291*e4b17023SJohn Marino compute_dominance_frontiers (bitmap_head *frontiers)
1292*e4b17023SJohn Marino {
1293*e4b17023SJohn Marino timevar_push (TV_DOM_FRONTIERS);
1294*e4b17023SJohn Marino
1295*e4b17023SJohn Marino compute_dominance_frontiers_1 (frontiers);
1296*e4b17023SJohn Marino
1297*e4b17023SJohn Marino timevar_pop (TV_DOM_FRONTIERS);
1298*e4b17023SJohn Marino }
1299*e4b17023SJohn Marino
1300*e4b17023SJohn Marino /* Given a set of blocks with variable definitions (DEF_BLOCKS),
1301*e4b17023SJohn Marino return a bitmap with all the blocks in the iterated dominance
1302*e4b17023SJohn Marino frontier of the blocks in DEF_BLOCKS. DFS contains dominance
1303*e4b17023SJohn Marino frontier information as returned by compute_dominance_frontiers.
1304*e4b17023SJohn Marino
1305*e4b17023SJohn Marino The resulting set of blocks are the potential sites where PHI nodes
1306*e4b17023SJohn Marino are needed. The caller is responsible for freeing the memory
1307*e4b17023SJohn Marino allocated for the return value. */
1308*e4b17023SJohn Marino
1309*e4b17023SJohn Marino bitmap
compute_idf(bitmap def_blocks,bitmap_head * dfs)1310*e4b17023SJohn Marino compute_idf (bitmap def_blocks, bitmap_head *dfs)
1311*e4b17023SJohn Marino {
1312*e4b17023SJohn Marino bitmap_iterator bi;
1313*e4b17023SJohn Marino unsigned bb_index, i;
1314*e4b17023SJohn Marino VEC(int,heap) *work_stack;
1315*e4b17023SJohn Marino bitmap phi_insertion_points;
1316*e4b17023SJohn Marino
1317*e4b17023SJohn Marino work_stack = VEC_alloc (int, heap, n_basic_blocks);
1318*e4b17023SJohn Marino phi_insertion_points = BITMAP_ALLOC (NULL);
1319*e4b17023SJohn Marino
1320*e4b17023SJohn Marino /* Seed the work list with all the blocks in DEF_BLOCKS. We use
1321*e4b17023SJohn Marino VEC_quick_push here for speed. This is safe because we know that
1322*e4b17023SJohn Marino the number of definition blocks is no greater than the number of
1323*e4b17023SJohn Marino basic blocks, which is the initial capacity of WORK_STACK. */
1324*e4b17023SJohn Marino EXECUTE_IF_SET_IN_BITMAP (def_blocks, 0, bb_index, bi)
1325*e4b17023SJohn Marino VEC_quick_push (int, work_stack, bb_index);
1326*e4b17023SJohn Marino
1327*e4b17023SJohn Marino /* Pop a block off the worklist, add every block that appears in
1328*e4b17023SJohn Marino the original block's DF that we have not already processed to
1329*e4b17023SJohn Marino the worklist. Iterate until the worklist is empty. Blocks
1330*e4b17023SJohn Marino which are added to the worklist are potential sites for
1331*e4b17023SJohn Marino PHI nodes. */
1332*e4b17023SJohn Marino while (VEC_length (int, work_stack) > 0)
1333*e4b17023SJohn Marino {
1334*e4b17023SJohn Marino bb_index = VEC_pop (int, work_stack);
1335*e4b17023SJohn Marino
1336*e4b17023SJohn Marino /* Since the registration of NEW -> OLD name mappings is done
1337*e4b17023SJohn Marino separately from the call to update_ssa, when updating the SSA
1338*e4b17023SJohn Marino form, the basic blocks where new and/or old names are defined
1339*e4b17023SJohn Marino may have disappeared by CFG cleanup calls. In this case,
1340*e4b17023SJohn Marino we may pull a non-existing block from the work stack. */
1341*e4b17023SJohn Marino gcc_assert (bb_index < (unsigned) last_basic_block);
1342*e4b17023SJohn Marino
1343*e4b17023SJohn Marino EXECUTE_IF_AND_COMPL_IN_BITMAP (&dfs[bb_index], phi_insertion_points,
1344*e4b17023SJohn Marino 0, i, bi)
1345*e4b17023SJohn Marino {
1346*e4b17023SJohn Marino /* Use a safe push because if there is a definition of VAR
1347*e4b17023SJohn Marino in every basic block, then WORK_STACK may eventually have
1348*e4b17023SJohn Marino more than N_BASIC_BLOCK entries. */
1349*e4b17023SJohn Marino VEC_safe_push (int, heap, work_stack, i);
1350*e4b17023SJohn Marino bitmap_set_bit (phi_insertion_points, i);
1351*e4b17023SJohn Marino }
1352*e4b17023SJohn Marino }
1353*e4b17023SJohn Marino
1354*e4b17023SJohn Marino VEC_free (int, heap, work_stack);
1355*e4b17023SJohn Marino
1356*e4b17023SJohn Marino return phi_insertion_points;
1357*e4b17023SJohn Marino }
1358*e4b17023SJohn Marino
1359*e4b17023SJohn Marino
1360