xref: /netbsd/external/gpl3/gcc.old/dist/gcc/cfganal.c (revision ec02198a)
1 /* Control flow graph analysis code for GNU compiler.
2    Copyright (C) 1987-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains various simple utilities to analyze the CFG.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "cfghooks.h"
27 #include "timevar.h"
28 #include "cfganal.h"
29 #include "cfgloop.h"
30 
31 namespace {
32 /* Store the data structures necessary for depth-first search.  */
33 class depth_first_search
34   {
35 public:
36     depth_first_search ();
37 
38     basic_block execute (basic_block);
39     void add_bb (basic_block);
40 
41 private:
42   /* stack for backtracking during the algorithm */
43   auto_vec<basic_block, 20> m_stack;
44 
45   /* record of basic blocks already seen by depth-first search */
46   auto_sbitmap m_visited_blocks;
47 };
48 }
49 
50 /* Mark the back edges in DFS traversal.
51    Return nonzero if a loop (natural or otherwise) is present.
52    Inspired by Depth_First_Search_PP described in:
53 
54      Advanced Compiler Design and Implementation
55      Steven Muchnick
56      Morgan Kaufmann, 1997
57 
58    and heavily borrowed from pre_and_rev_post_order_compute.  */
59 
60 bool
mark_dfs_back_edges(void)61 mark_dfs_back_edges (void)
62 {
63   int *pre;
64   int *post;
65   int prenum = 1;
66   int postnum = 1;
67   bool found = false;
68 
69   /* Allocate the preorder and postorder number arrays.  */
70   pre = XCNEWVEC (int, last_basic_block_for_fn (cfun));
71   post = XCNEWVEC (int, last_basic_block_for_fn (cfun));
72 
73   /* Allocate stack for back-tracking up CFG.  */
74   auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
75 
76   /* Allocate bitmap to track nodes that have been visited.  */
77   auto_sbitmap visited (last_basic_block_for_fn (cfun));
78 
79   /* None of the nodes in the CFG have been visited yet.  */
80   bitmap_clear (visited);
81 
82   /* Push the first edge on to the stack.  */
83   stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
84 
85   while (!stack.is_empty ())
86     {
87       basic_block src;
88       basic_block dest;
89 
90       /* Look at the edge on the top of the stack.  */
91       edge_iterator ei = stack.last ();
92       src = ei_edge (ei)->src;
93       dest = ei_edge (ei)->dest;
94       ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
95 
96       /* Check if the edge destination has been visited yet.  */
97       if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && ! bitmap_bit_p (visited,
98 								  dest->index))
99 	{
100 	  /* Mark that we have visited the destination.  */
101 	  bitmap_set_bit (visited, dest->index);
102 
103 	  pre[dest->index] = prenum++;
104 	  if (EDGE_COUNT (dest->succs) > 0)
105 	    {
106 	      /* Since the DEST node has been visited for the first
107 		 time, check its successors.  */
108 	      stack.quick_push (ei_start (dest->succs));
109 	    }
110 	  else
111 	    post[dest->index] = postnum++;
112 	}
113       else
114 	{
115 	  if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
116 	      && src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
117 	      && pre[src->index] >= pre[dest->index]
118 	      && post[dest->index] == 0)
119 	    ei_edge (ei)->flags |= EDGE_DFS_BACK, found = true;
120 
121 	  if (ei_one_before_end_p (ei)
122 	      && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
123 	    post[src->index] = postnum++;
124 
125 	  if (!ei_one_before_end_p (ei))
126 	    ei_next (&stack.last ());
127 	  else
128 	    stack.pop ();
129 	}
130     }
131 
132   free (pre);
133   free (post);
134 
135   return found;
136 }
137 
138 /* Find unreachable blocks.  An unreachable block will have 0 in
139    the reachable bit in block->flags.  A nonzero value indicates the
140    block is reachable.  */
141 
142 void
find_unreachable_blocks(void)143 find_unreachable_blocks (void)
144 {
145   edge e;
146   edge_iterator ei;
147   basic_block *tos, *worklist, bb;
148 
149   tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
150 
151   /* Clear all the reachability flags.  */
152 
153   FOR_EACH_BB_FN (bb, cfun)
154     bb->flags &= ~BB_REACHABLE;
155 
156   /* Add our starting points to the worklist.  Almost always there will
157      be only one.  It isn't inconceivable that we might one day directly
158      support Fortran alternate entry points.  */
159 
160   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
161     {
162       *tos++ = e->dest;
163 
164       /* Mark the block reachable.  */
165       e->dest->flags |= BB_REACHABLE;
166     }
167 
168   /* Iterate: find everything reachable from what we've already seen.  */
169 
170   while (tos != worklist)
171     {
172       basic_block b = *--tos;
173 
174       FOR_EACH_EDGE (e, ei, b->succs)
175 	{
176 	  basic_block dest = e->dest;
177 
178 	  if (!(dest->flags & BB_REACHABLE))
179 	    {
180 	      *tos++ = dest;
181 	      dest->flags |= BB_REACHABLE;
182 	    }
183 	}
184     }
185 
186   free (worklist);
187 }
188 
189 /* Verify that there are no unreachable blocks in the current function.  */
190 
191 void
verify_no_unreachable_blocks(void)192 verify_no_unreachable_blocks (void)
193 {
194   find_unreachable_blocks ();
195 
196   basic_block bb;
197   FOR_EACH_BB_FN (bb, cfun)
198     gcc_assert ((bb->flags & BB_REACHABLE) != 0);
199 }
200 
201 
202 /* Functions to access an edge list with a vector representation.
203    Enough data is kept such that given an index number, the
204    pred and succ that edge represents can be determined, or
205    given a pred and a succ, its index number can be returned.
206    This allows algorithms which consume a lot of memory to
207    represent the normally full matrix of edge (pred,succ) with a
208    single indexed vector,  edge (EDGE_INDEX (pred, succ)), with no
209    wasted space in the client code due to sparse flow graphs.  */
210 
211 /* This functions initializes the edge list. Basically the entire
212    flowgraph is processed, and all edges are assigned a number,
213    and the data structure is filled in.  */
214 
215 struct edge_list *
create_edge_list(void)216 create_edge_list (void)
217 {
218   struct edge_list *elist;
219   edge e;
220   int num_edges;
221   basic_block bb;
222   edge_iterator ei;
223 
224   /* Determine the number of edges in the flow graph by counting successor
225      edges on each basic block.  */
226   num_edges = 0;
227   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
228 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
229     {
230       num_edges += EDGE_COUNT (bb->succs);
231     }
232 
233   elist = XNEW (struct edge_list);
234   elist->num_edges = num_edges;
235   elist->index_to_edge = XNEWVEC (edge, num_edges);
236 
237   num_edges = 0;
238 
239   /* Follow successors of blocks, and register these edges.  */
240   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
241 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
242     FOR_EACH_EDGE (e, ei, bb->succs)
243       elist->index_to_edge[num_edges++] = e;
244 
245   return elist;
246 }
247 
248 /* This function free's memory associated with an edge list.  */
249 
250 void
free_edge_list(struct edge_list * elist)251 free_edge_list (struct edge_list *elist)
252 {
253   if (elist)
254     {
255       free (elist->index_to_edge);
256       free (elist);
257     }
258 }
259 
260 /* This function provides debug output showing an edge list.  */
261 
262 DEBUG_FUNCTION void
print_edge_list(FILE * f,struct edge_list * elist)263 print_edge_list (FILE *f, struct edge_list *elist)
264 {
265   int x;
266 
267   fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
268 	   n_basic_blocks_for_fn (cfun), elist->num_edges);
269 
270   for (x = 0; x < elist->num_edges; x++)
271     {
272       fprintf (f, " %-4d - edge(", x);
273       if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
274 	fprintf (f, "entry,");
275       else
276 	fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
277 
278       if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR_FOR_FN (cfun))
279 	fprintf (f, "exit)\n");
280       else
281 	fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
282     }
283 }
284 
285 /* This function provides an internal consistency check of an edge list,
286    verifying that all edges are present, and that there are no
287    extra edges.  */
288 
289 DEBUG_FUNCTION void
verify_edge_list(FILE * f,struct edge_list * elist)290 verify_edge_list (FILE *f, struct edge_list *elist)
291 {
292   int pred, succ, index;
293   edge e;
294   basic_block bb, p, s;
295   edge_iterator ei;
296 
297   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
298 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
299     {
300       FOR_EACH_EDGE (e, ei, bb->succs)
301 	{
302 	  pred = e->src->index;
303 	  succ = e->dest->index;
304 	  index = EDGE_INDEX (elist, e->src, e->dest);
305 	  if (index == EDGE_INDEX_NO_EDGE)
306 	    {
307 	      fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
308 	      continue;
309 	    }
310 
311 	  if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
312 	    fprintf (f, "*p* Pred for index %d should be %d not %d\n",
313 		     index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
314 	  if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
315 	    fprintf (f, "*p* Succ for index %d should be %d not %d\n",
316 		     index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
317 	}
318     }
319 
320   /* We've verified that all the edges are in the list, now lets make sure
321      there are no spurious edges in the list.  This is an expensive check!  */
322 
323   FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR_FOR_FN (cfun),
324 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
325     FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
326       {
327 	int found_edge = 0;
328 
329 	FOR_EACH_EDGE (e, ei, p->succs)
330 	  if (e->dest == s)
331 	    {
332 	      found_edge = 1;
333 	      break;
334 	    }
335 
336 	FOR_EACH_EDGE (e, ei, s->preds)
337 	  if (e->src == p)
338 	    {
339 	      found_edge = 1;
340 	      break;
341 	    }
342 
343 	if (EDGE_INDEX (elist, p, s)
344 	    == EDGE_INDEX_NO_EDGE && found_edge != 0)
345 	  fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
346 		   p->index, s->index);
347 	if (EDGE_INDEX (elist, p, s)
348 	    != EDGE_INDEX_NO_EDGE && found_edge == 0)
349 	  fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
350 		   p->index, s->index, EDGE_INDEX (elist, p, s));
351       }
352 }
353 
354 
355 /* Functions to compute control dependences.  */
356 
357 /* Indicate block BB is control dependent on an edge with index EDGE_INDEX.  */
358 void
set_control_dependence_map_bit(basic_block bb,int edge_index)359 control_dependences::set_control_dependence_map_bit (basic_block bb,
360 						     int edge_index)
361 {
362   if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
363     return;
364   gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
365   bitmap_set_bit (control_dependence_map[bb->index], edge_index);
366 }
367 
368 /* Clear all control dependences for block BB.  */
369 void
clear_control_dependence_bitmap(basic_block bb)370 control_dependences::clear_control_dependence_bitmap (basic_block bb)
371 {
372   bitmap_clear (control_dependence_map[bb->index]);
373 }
374 
375 /* Find the immediate postdominator PDOM of the specified basic block BLOCK.
376    This function is necessary because some blocks have negative numbers.  */
377 
378 static inline basic_block
find_pdom(basic_block block)379 find_pdom (basic_block block)
380 {
381   gcc_assert (block != ENTRY_BLOCK_PTR_FOR_FN (cfun));
382 
383   if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
384     return EXIT_BLOCK_PTR_FOR_FN (cfun);
385   else
386     {
387       basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
388       if (! bb)
389 	return EXIT_BLOCK_PTR_FOR_FN (cfun);
390       return bb;
391     }
392 }
393 
394 /* Determine all blocks' control dependences on the given edge with edge_list
395    EL index EDGE_INDEX, ala Morgan, Section 3.6.  */
396 
397 void
find_control_dependence(int edge_index)398 control_dependences::find_control_dependence (int edge_index)
399 {
400   basic_block current_block;
401   basic_block ending_block;
402 
403   gcc_assert (get_edge_src (edge_index) != EXIT_BLOCK_PTR_FOR_FN (cfun));
404 
405   /* For abnormal edges, we don't make current_block control
406      dependent because instructions that throw are always necessary
407      anyway.  */
408   edge e = find_edge (get_edge_src (edge_index), get_edge_dest (edge_index));
409   if (e->flags & EDGE_ABNORMAL)
410     return;
411 
412   if (get_edge_src (edge_index) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
413     ending_block = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
414   else
415     ending_block = find_pdom (get_edge_src (edge_index));
416 
417   for (current_block = get_edge_dest (edge_index);
418        current_block != ending_block
419        && current_block != EXIT_BLOCK_PTR_FOR_FN (cfun);
420        current_block = find_pdom (current_block))
421     set_control_dependence_map_bit (current_block, edge_index);
422 }
423 
424 /* Record all blocks' control dependences on all edges in the edge
425    list EL, ala Morgan, Section 3.6.  */
426 
control_dependences()427 control_dependences::control_dependences ()
428 {
429   timevar_push (TV_CONTROL_DEPENDENCES);
430 
431   /* Initialize the edge list.  */
432   int num_edges = 0;
433   basic_block bb;
434   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
435 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
436     num_edges += EDGE_COUNT (bb->succs);
437   m_el.create (num_edges);
438   edge e;
439   edge_iterator ei;
440   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
441 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
442     FOR_EACH_EDGE (e, ei, bb->succs)
443       m_el.quick_push (std::make_pair (e->src->index, e->dest->index));
444 
445   control_dependence_map.create (last_basic_block_for_fn (cfun));
446   for (int i = 0; i < last_basic_block_for_fn (cfun); ++i)
447     control_dependence_map.quick_push (BITMAP_ALLOC (NULL));
448   for (int i = 0; i < num_edges; ++i)
449     find_control_dependence (i);
450 
451   timevar_pop (TV_CONTROL_DEPENDENCES);
452 }
453 
454 /* Free control dependences and the associated edge list.  */
455 
~control_dependences()456 control_dependences::~control_dependences ()
457 {
458   for (unsigned i = 0; i < control_dependence_map.length (); ++i)
459     BITMAP_FREE (control_dependence_map[i]);
460   control_dependence_map.release ();
461   m_el.release ();
462 }
463 
464 /* Returns the bitmap of edges the basic-block I is dependent on.  */
465 
466 bitmap
get_edges_dependent_on(int i)467 control_dependences::get_edges_dependent_on (int i)
468 {
469   return control_dependence_map[i];
470 }
471 
472 /* Returns the edge source with index I from the edge list.  */
473 
474 basic_block
get_edge_src(int i)475 control_dependences::get_edge_src (int i)
476 {
477   return BASIC_BLOCK_FOR_FN (cfun, m_el[i].first);
478 }
479 
480 /* Returns the edge destination with index I from the edge list.  */
481 
482 basic_block
get_edge_dest(int i)483 control_dependences::get_edge_dest (int i)
484 {
485   return BASIC_BLOCK_FOR_FN (cfun, m_el[i].second);
486 }
487 
488 
489 /* Given PRED and SUCC blocks, return the edge which connects the blocks.
490    If no such edge exists, return NULL.  */
491 
492 edge
find_edge(basic_block pred,basic_block succ)493 find_edge (basic_block pred, basic_block succ)
494 {
495   edge e;
496   edge_iterator ei;
497 
498   if (EDGE_COUNT (pred->succs) <= EDGE_COUNT (succ->preds))
499     {
500       FOR_EACH_EDGE (e, ei, pred->succs)
501 	if (e->dest == succ)
502 	  return e;
503     }
504   else
505     {
506       FOR_EACH_EDGE (e, ei, succ->preds)
507 	if (e->src == pred)
508 	  return e;
509     }
510 
511   return NULL;
512 }
513 
514 /* This routine will determine what, if any, edge there is between
515    a specified predecessor and successor.  */
516 
517 int
find_edge_index(struct edge_list * edge_list,basic_block pred,basic_block succ)518 find_edge_index (struct edge_list *edge_list, basic_block pred, basic_block succ)
519 {
520   int x;
521 
522   for (x = 0; x < NUM_EDGES (edge_list); x++)
523     if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
524 	&& INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
525       return x;
526 
527   return (EDGE_INDEX_NO_EDGE);
528 }
529 
530 /* This routine will remove any fake predecessor edges for a basic block.
531    When the edge is removed, it is also removed from whatever successor
532    list it is in.  */
533 
534 static void
remove_fake_predecessors(basic_block bb)535 remove_fake_predecessors (basic_block bb)
536 {
537   edge e;
538   edge_iterator ei;
539 
540   for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
541     {
542       if ((e->flags & EDGE_FAKE) == EDGE_FAKE)
543 	remove_edge (e);
544       else
545 	ei_next (&ei);
546     }
547 }
548 
549 /* This routine will remove all fake edges from the flow graph.  If
550    we remove all fake successors, it will automatically remove all
551    fake predecessors.  */
552 
553 void
remove_fake_edges(void)554 remove_fake_edges (void)
555 {
556   basic_block bb;
557 
558   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
559     remove_fake_predecessors (bb);
560 }
561 
562 /* This routine will remove all fake edges to the EXIT_BLOCK.  */
563 
564 void
remove_fake_exit_edges(void)565 remove_fake_exit_edges (void)
566 {
567   remove_fake_predecessors (EXIT_BLOCK_PTR_FOR_FN (cfun));
568 }
569 
570 
571 /* This function will add a fake edge between any block which has no
572    successors, and the exit block. Some data flow equations require these
573    edges to exist.  */
574 
575 void
add_noreturn_fake_exit_edges(void)576 add_noreturn_fake_exit_edges (void)
577 {
578   basic_block bb;
579 
580   FOR_EACH_BB_FN (bb, cfun)
581     if (EDGE_COUNT (bb->succs) == 0)
582       make_single_succ_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
583 }
584 
585 /* This function adds a fake edge between any infinite loops to the
586    exit block.  Some optimizations require a path from each node to
587    the exit node.
588 
589    See also Morgan, Figure 3.10, pp. 82-83.
590 
591    The current implementation is ugly, not attempting to minimize the
592    number of inserted fake edges.  To reduce the number of fake edges
593    to insert, add fake edges from _innermost_ loops containing only
594    nodes not reachable from the exit block.  */
595 
596 void
connect_infinite_loops_to_exit(void)597 connect_infinite_loops_to_exit (void)
598 {
599   /* Perform depth-first search in the reverse graph to find nodes
600      reachable from the exit block.  */
601   depth_first_search dfs;
602   dfs.add_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
603 
604   /* Repeatedly add fake edges, updating the unreachable nodes.  */
605   basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
606   while (1)
607     {
608       unvisited_block = dfs.execute (unvisited_block);
609       if (!unvisited_block)
610 	break;
611 
612       basic_block deadend_block = dfs_find_deadend (unvisited_block);
613       edge e = make_edge (deadend_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
614 			  EDGE_FAKE);
615       e->probability = profile_probability::never ();
616       dfs.add_bb (deadend_block);
617     }
618 }
619 
620 /* Compute reverse top sort order.  This is computing a post order
621    numbering of the graph.  If INCLUDE_ENTRY_EXIT is true, then
622    ENTRY_BLOCK and EXIT_BLOCK are included.  If DELETE_UNREACHABLE is
623    true, unreachable blocks are deleted.  */
624 
625 int
post_order_compute(int * post_order,bool include_entry_exit,bool delete_unreachable)626 post_order_compute (int *post_order, bool include_entry_exit,
627 		    bool delete_unreachable)
628 {
629   int post_order_num = 0;
630   int count;
631 
632   if (include_entry_exit)
633     post_order[post_order_num++] = EXIT_BLOCK;
634 
635   /* Allocate stack for back-tracking up CFG.  */
636   auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
637 
638   /* Allocate bitmap to track nodes that have been visited.  */
639   auto_sbitmap visited (last_basic_block_for_fn (cfun));
640 
641   /* None of the nodes in the CFG have been visited yet.  */
642   bitmap_clear (visited);
643 
644   /* Push the first edge on to the stack.  */
645   stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
646 
647   while (!stack.is_empty ())
648     {
649       basic_block src;
650       basic_block dest;
651 
652       /* Look at the edge on the top of the stack.  */
653       edge_iterator ei = stack.last ();
654       src = ei_edge (ei)->src;
655       dest = ei_edge (ei)->dest;
656 
657       /* Check if the edge destination has been visited yet.  */
658       if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
659 	  && ! bitmap_bit_p (visited, dest->index))
660 	{
661 	  /* Mark that we have visited the destination.  */
662 	  bitmap_set_bit (visited, dest->index);
663 
664 	  if (EDGE_COUNT (dest->succs) > 0)
665 	    /* Since the DEST node has been visited for the first
666 	       time, check its successors.  */
667 	    stack.quick_push (ei_start (dest->succs));
668 	  else
669 	    post_order[post_order_num++] = dest->index;
670 	}
671       else
672 	{
673 	  if (ei_one_before_end_p (ei)
674 	      && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
675 	    post_order[post_order_num++] = src->index;
676 
677 	  if (!ei_one_before_end_p (ei))
678 	    ei_next (&stack.last ());
679 	  else
680 	    stack.pop ();
681 	}
682     }
683 
684   if (include_entry_exit)
685     {
686       post_order[post_order_num++] = ENTRY_BLOCK;
687       count = post_order_num;
688     }
689   else
690     count = post_order_num + 2;
691 
692   /* Delete the unreachable blocks if some were found and we are
693      supposed to do it.  */
694   if (delete_unreachable && (count != n_basic_blocks_for_fn (cfun)))
695     {
696       basic_block b;
697       basic_block next_bb;
698       for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
699 	   != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
700 	{
701 	  next_bb = b->next_bb;
702 
703 	  if (!(bitmap_bit_p (visited, b->index)))
704 	    delete_basic_block (b);
705 	}
706 
707       tidy_fallthru_edges ();
708     }
709 
710   return post_order_num;
711 }
712 
713 
714 /* Helper routine for inverted_post_order_compute
715    flow_dfs_compute_reverse_execute, and the reverse-CFG
716    deapth first search in dominance.c.
717    BB has to belong to a region of CFG
718    unreachable by inverted traversal from the exit.
719    i.e. there's no control flow path from ENTRY to EXIT
720    that contains this BB.
721    This can happen in two cases - if there's an infinite loop
722    or if there's a block that has no successor
723    (call to a function with no return).
724    Some RTL passes deal with this condition by
725    calling connect_infinite_loops_to_exit () and/or
726    add_noreturn_fake_exit_edges ().
727    However, those methods involve modifying the CFG itself
728    which may not be desirable.
729    Hence, we deal with the infinite loop/no return cases
730    by identifying a unique basic block that can reach all blocks
731    in such a region by inverted traversal.
732    This function returns a basic block that guarantees
733    that all blocks in the region are reachable
734    by starting an inverted traversal from the returned block.  */
735 
736 basic_block
dfs_find_deadend(basic_block bb)737 dfs_find_deadend (basic_block bb)
738 {
739   auto_bitmap visited;
740   basic_block next = bb;
741 
742   for (;;)
743     {
744       if (EDGE_COUNT (next->succs) == 0)
745 	return next;
746 
747       if (! bitmap_set_bit (visited, next->index))
748 	return bb;
749 
750       bb = next;
751       /* If we are in an analyzed cycle make sure to try exiting it.
752          Note this is a heuristic only and expected to work when loop
753 	 fixup is needed as well.  */
754       if (! bb->loop_father
755 	  || ! loop_outer (bb->loop_father))
756 	next = EDGE_SUCC (bb, 0)->dest;
757       else
758 	{
759 	  edge_iterator ei;
760 	  edge e;
761 	  FOR_EACH_EDGE (e, ei, bb->succs)
762 	    if (loop_exit_edge_p (bb->loop_father, e))
763 	      break;
764 	  next = e ? e->dest : EDGE_SUCC (bb, 0)->dest;
765 	}
766     }
767 
768   gcc_unreachable ();
769 }
770 
771 
772 /* Compute the reverse top sort order of the inverted CFG
773    i.e. starting from the exit block and following the edges backward
774    (from successors to predecessors).
775    This ordering can be used for forward dataflow problems among others.
776 
777    Optionally if START_POINTS is specified, start from exit block and all
778    basic blocks in START_POINTS.  This is used by CD-DCE.
779 
780    This function assumes that all blocks in the CFG are reachable
781    from the ENTRY (but not necessarily from EXIT).
782 
783    If there's an infinite loop,
784    a simple inverted traversal starting from the blocks
785    with no successors can't visit all blocks.
786    To solve this problem, we first do inverted traversal
787    starting from the blocks with no successor.
788    And if there's any block left that's not visited by the regular
789    inverted traversal from EXIT,
790    those blocks are in such problematic region.
791    Among those, we find one block that has
792    any visited predecessor (which is an entry into such a region),
793    and start looking for a "dead end" from that block
794    and do another inverted traversal from that block.  */
795 
796 void
inverted_post_order_compute(vec<int> * post_order,sbitmap * start_points)797 inverted_post_order_compute (vec<int> *post_order,
798 			     sbitmap *start_points)
799 {
800   basic_block bb;
801   post_order->reserve_exact (n_basic_blocks_for_fn (cfun));
802 
803   if (flag_checking)
804     verify_no_unreachable_blocks ();
805 
806   /* Allocate stack for back-tracking up CFG.  */
807   auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
808 
809   /* Allocate bitmap to track nodes that have been visited.  */
810   auto_sbitmap visited (last_basic_block_for_fn (cfun));
811 
812   /* None of the nodes in the CFG have been visited yet.  */
813   bitmap_clear (visited);
814 
815   if (start_points)
816     {
817       FOR_ALL_BB_FN (bb, cfun)
818         if (bitmap_bit_p (*start_points, bb->index)
819 	    && EDGE_COUNT (bb->preds) > 0)
820 	  {
821 	    stack.quick_push (ei_start (bb->preds));
822             bitmap_set_bit (visited, bb->index);
823 	  }
824       if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
825 	{
826 	  stack.quick_push (ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds));
827           bitmap_set_bit (visited, EXIT_BLOCK_PTR_FOR_FN (cfun)->index);
828 	}
829     }
830   else
831   /* Put all blocks that have no successor into the initial work list.  */
832   FOR_ALL_BB_FN (bb, cfun)
833     if (EDGE_COUNT (bb->succs) == 0)
834       {
835         /* Push the initial edge on to the stack.  */
836         if (EDGE_COUNT (bb->preds) > 0)
837           {
838 	    stack.quick_push (ei_start (bb->preds));
839             bitmap_set_bit (visited, bb->index);
840           }
841       }
842 
843   do
844     {
845       bool has_unvisited_bb = false;
846 
847       /* The inverted traversal loop. */
848       while (!stack.is_empty ())
849         {
850           edge_iterator ei;
851           basic_block pred;
852 
853           /* Look at the edge on the top of the stack.  */
854 	  ei = stack.last ();
855           bb = ei_edge (ei)->dest;
856           pred = ei_edge (ei)->src;
857 
858           /* Check if the predecessor has been visited yet.  */
859           if (! bitmap_bit_p (visited, pred->index))
860             {
861               /* Mark that we have visited the destination.  */
862               bitmap_set_bit (visited, pred->index);
863 
864               if (EDGE_COUNT (pred->preds) > 0)
865                 /* Since the predecessor node has been visited for the first
866                    time, check its predecessors.  */
867 		stack.quick_push (ei_start (pred->preds));
868               else
869 		post_order->quick_push (pred->index);
870             }
871           else
872             {
873 	      if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
874 		  && ei_one_before_end_p (ei))
875 		post_order->quick_push (bb->index);
876 
877               if (!ei_one_before_end_p (ei))
878 		ei_next (&stack.last ());
879               else
880 		stack.pop ();
881             }
882         }
883 
884       /* Detect any infinite loop and activate the kludge.
885          Note that this doesn't check EXIT_BLOCK itself
886 	 since EXIT_BLOCK is always added after the outer do-while loop.  */
887       FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
888 		      EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
889         if (!bitmap_bit_p (visited, bb->index))
890           {
891             has_unvisited_bb = true;
892 
893             if (EDGE_COUNT (bb->preds) > 0)
894               {
895                 edge_iterator ei;
896                 edge e;
897                 basic_block visited_pred = NULL;
898 
899                 /* Find an already visited predecessor.  */
900                 FOR_EACH_EDGE (e, ei, bb->preds)
901                   {
902                     if (bitmap_bit_p (visited, e->src->index))
903                       visited_pred = e->src;
904                   }
905 
906                 if (visited_pred)
907                   {
908                     basic_block be = dfs_find_deadend (bb);
909                     gcc_assert (be != NULL);
910                     bitmap_set_bit (visited, be->index);
911 		    stack.quick_push (ei_start (be->preds));
912                     break;
913                   }
914               }
915           }
916 
917       if (has_unvisited_bb && stack.is_empty ())
918         {
919 	  /* No blocks are reachable from EXIT at all.
920              Find a dead-end from the ENTRY, and restart the iteration. */
921 	  basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR_FOR_FN (cfun));
922           gcc_assert (be != NULL);
923           bitmap_set_bit (visited, be->index);
924 	  stack.quick_push (ei_start (be->preds));
925         }
926 
927       /* The only case the below while fires is
928          when there's an infinite loop.  */
929     }
930   while (!stack.is_empty ());
931 
932   /* EXIT_BLOCK is always included.  */
933   post_order->quick_push (EXIT_BLOCK);
934 }
935 
936 /* Compute the depth first search order of FN and store in the array
937    PRE_ORDER if nonzero.  If REV_POST_ORDER is nonzero, return the
938    reverse completion number for each node.  Returns the number of nodes
939    visited.  A depth first search tries to get as far away from the starting
940    point as quickly as possible.
941 
942    In case the function has unreachable blocks the number of nodes
943    visited does not include them.
944 
945    pre_order is a really a preorder numbering of the graph.
946    rev_post_order is really a reverse postorder numbering of the graph.  */
947 
948 int
pre_and_rev_post_order_compute_fn(struct function * fn,int * pre_order,int * rev_post_order,bool include_entry_exit)949 pre_and_rev_post_order_compute_fn (struct function *fn,
950 				   int *pre_order, int *rev_post_order,
951 				   bool include_entry_exit)
952 {
953   int pre_order_num = 0;
954   int rev_post_order_num = n_basic_blocks_for_fn (fn) - 1;
955 
956   /* Allocate stack for back-tracking up CFG.  */
957   auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (fn) + 1);
958 
959   if (include_entry_exit)
960     {
961       if (pre_order)
962 	pre_order[pre_order_num] = ENTRY_BLOCK;
963       pre_order_num++;
964       if (rev_post_order)
965 	rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
966     }
967   else
968     rev_post_order_num -= NUM_FIXED_BLOCKS;
969 
970   /* BB flag to track nodes that have been visited.  */
971   auto_bb_flag visited (fn);
972 
973   /* Push the first edge on to the stack.  */
974   stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (fn)->succs));
975 
976   while (!stack.is_empty ())
977     {
978       basic_block src;
979       basic_block dest;
980 
981       /* Look at the edge on the top of the stack.  */
982       edge_iterator ei = stack.last ();
983       src = ei_edge (ei)->src;
984       dest = ei_edge (ei)->dest;
985 
986       /* Check if the edge destination has been visited yet.  */
987       if (dest != EXIT_BLOCK_PTR_FOR_FN (fn)
988 	  && ! (dest->flags & visited))
989 	{
990 	  /* Mark that we have visited the destination.  */
991 	  dest->flags |= visited;
992 
993 	  if (pre_order)
994 	    pre_order[pre_order_num] = dest->index;
995 
996 	  pre_order_num++;
997 
998 	  if (EDGE_COUNT (dest->succs) > 0)
999 	    /* Since the DEST node has been visited for the first
1000 	       time, check its successors.  */
1001 	    stack.quick_push (ei_start (dest->succs));
1002 	  else if (rev_post_order)
1003 	    /* There are no successors for the DEST node so assign
1004 	       its reverse completion number.  */
1005 	    rev_post_order[rev_post_order_num--] = dest->index;
1006 	}
1007       else
1008 	{
1009 	  if (ei_one_before_end_p (ei)
1010 	      && src != ENTRY_BLOCK_PTR_FOR_FN (fn)
1011 	      && rev_post_order)
1012 	    /* There are no more successors for the SRC node
1013 	       so assign its reverse completion number.  */
1014 	    rev_post_order[rev_post_order_num--] = src->index;
1015 
1016 	  if (!ei_one_before_end_p (ei))
1017 	    ei_next (&stack.last ());
1018 	  else
1019 	    stack.pop ();
1020 	}
1021     }
1022 
1023   if (include_entry_exit)
1024     {
1025       if (pre_order)
1026 	pre_order[pre_order_num] = EXIT_BLOCK;
1027       pre_order_num++;
1028       if (rev_post_order)
1029 	rev_post_order[rev_post_order_num--] = ENTRY_BLOCK;
1030     }
1031 
1032   /* Clear the temporarily allocated flag.  */
1033   if (!rev_post_order)
1034     rev_post_order = pre_order;
1035   for (int i = 0; i < pre_order_num; ++i)
1036     BASIC_BLOCK_FOR_FN (fn, rev_post_order[i])->flags &= ~visited;
1037 
1038   return pre_order_num;
1039 }
1040 
1041 /* Like pre_and_rev_post_order_compute_fn but operating on the
1042    current function and asserting that all nodes were visited.  */
1043 
1044 int
pre_and_rev_post_order_compute(int * pre_order,int * rev_post_order,bool include_entry_exit)1045 pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
1046 				bool include_entry_exit)
1047 {
1048   int pre_order_num
1049     = pre_and_rev_post_order_compute_fn (cfun, pre_order, rev_post_order,
1050 					 include_entry_exit);
1051   if (include_entry_exit)
1052     /* The number of nodes visited should be the number of blocks.  */
1053     gcc_assert (pre_order_num == n_basic_blocks_for_fn (cfun));
1054   else
1055     /* The number of nodes visited should be the number of blocks minus
1056        the entry and exit blocks which are not visited here.  */
1057     gcc_assert (pre_order_num
1058 		== (n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS));
1059 
1060   return pre_order_num;
1061 }
1062 
1063 
1064 /* Per basic-block data for rev_post_order_and_mark_dfs_back_seme,
1065    element of a sparsely populated array indexed by basic-block number.  */
1066 typedef auto_vec<int, 2> scc_exit_vec_t;
1067 struct rpoamdbs_bb_data {
1068     int depth;
1069     int bb_to_pre;
1070     /* The basic-block index of the SCC entry of the block visited first
1071        (the SCC leader).  */
1072     int scc;
1073     /* The index into the RPO array where the blocks SCC entries end
1074        (only valid for the SCC leader).  */
1075     int scc_end;
1076     /* The indexes of the exits destinations of this SCC (only valid
1077        for the SCC leader).  Initialized upon discovery of SCC leaders.  */
1078     scc_exit_vec_t scc_exits;
1079 };
1080 
1081 /* Tag H as a header of B, weaving H and its loop header list into the
1082    current loop header list of B.  */
1083 
1084 static void
tag_header(int b,int h,rpoamdbs_bb_data * bb_data)1085 tag_header (int b, int h, rpoamdbs_bb_data *bb_data)
1086 {
1087   if (h == -1 || b == h)
1088     return;
1089   int cur1 = b;
1090   int cur2 = h;
1091   while (bb_data[cur1].scc != -1)
1092     {
1093       int ih = bb_data[cur1].scc;
1094       if (ih == cur2)
1095 	return;
1096       if (bb_data[ih].depth < bb_data[cur2].depth)
1097 	{
1098 	  bb_data[cur1].scc = cur2;
1099 	  cur1 = cur2;
1100 	  cur2 = ih;
1101 	}
1102       else
1103 	cur1 = ih;
1104     }
1105   bb_data[cur1].scc = cur2;
1106 }
1107 
1108 /* Comparator for a sort of two edges destinations E1 and E2 after their index
1109    in the PRE array as specified by BB_TO_PRE.  */
1110 
1111 static int
cmp_edge_dest_pre(const void * e1_,const void * e2_,void * data_)1112 cmp_edge_dest_pre (const void *e1_, const void *e2_, void *data_)
1113 {
1114   const int *e1 = (const int *)e1_;
1115   const int *e2 = (const int *)e2_;
1116   rpoamdbs_bb_data *bb_data = (rpoamdbs_bb_data *)data_;
1117   return (bb_data[*e1].bb_to_pre - bb_data[*e2].bb_to_pre);
1118 }
1119 
1120 /* Compute the reverse completion number of a depth first search
1121    on the SEME region denoted by the ENTRY edge and the EXIT_BBS set of
1122    exit block indexes and store it in the array REV_POST_ORDER.
1123    Also sets the EDGE_DFS_BACK edge flags according to this visitation
1124    order.
1125    Returns the number of nodes visited.
1126 
1127    In case the function has unreachable blocks the number of nodes
1128    visited does not include them.
1129 
1130    If FOR_ITERATION is true then compute an RPO where SCCs form a
1131    contiguous region in the RPO array.
1132    *TOPLEVEL_SCC_EXTENTS if not NULL is filled with pairs of
1133    *REV_POST_ORDER indexes denoting extents of the toplevel SCCs in
1134    this region.  */
1135 
1136 int
rev_post_order_and_mark_dfs_back_seme(struct function * fn,edge entry,bitmap exit_bbs,bool for_iteration,int * rev_post_order,vec<std::pair<int,int>> * toplevel_scc_extents)1137 rev_post_order_and_mark_dfs_back_seme (struct function *fn, edge entry,
1138 				       bitmap exit_bbs, bool for_iteration,
1139 				       int *rev_post_order,
1140 				       vec<std::pair<int, int> >
1141 					 *toplevel_scc_extents)
1142 {
1143   int rev_post_order_num = 0;
1144 
1145   /* BB flag to track nodes that have been visited.  */
1146   auto_bb_flag visited (fn);
1147 
1148   /* Lazily initialized per-BB data for the two DFS walks below.  */
1149   rpoamdbs_bb_data *bb_data
1150     = XNEWVEC (rpoamdbs_bb_data, last_basic_block_for_fn (fn));
1151 
1152   /* First DFS walk, loop discovery according to
1153       A New Algorithm for Identifying Loops in Decompilation
1154      by Tao Wei, Jian Mao, Wei Zou and You Chen of the Institute of
1155      Computer Science and Technology of the Peking University.  */
1156   auto_vec<edge_iterator, 20> ei_stack (n_basic_blocks_for_fn (fn) + 1);
1157   auto_bb_flag is_header (fn);
1158   int depth = 1;
1159   unsigned n_sccs = 0;
1160 
1161   basic_block dest = entry->dest;
1162   edge_iterator ei;
1163   int pre_num = 0;
1164 
1165   /* DFS process DEST.  */
1166 find_loops:
1167   bb_data[dest->index].bb_to_pre = pre_num++;
1168   bb_data[dest->index].depth = depth;
1169   bb_data[dest->index].scc = -1;
1170   depth++;
1171   gcc_assert ((dest->flags & (is_header|visited)) == 0);
1172   dest->flags |= visited;
1173   ei = ei_start (dest->succs);
1174   while (!ei_end_p (ei))
1175     {
1176       ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
1177       if (bitmap_bit_p (exit_bbs, ei_edge (ei)->dest->index))
1178 	;
1179       else if (!(ei_edge (ei)->dest->flags & visited))
1180 	{
1181 	  ei_stack.quick_push (ei);
1182 	  dest = ei_edge (ei)->dest;
1183 	  /* DFS recurse on DEST.  */
1184 	  goto find_loops;
1185 
1186 ret_from_find_loops:
1187 	  /* Return point of DFS recursion.  */
1188 	  ei = ei_stack.pop ();
1189 	  dest = ei_edge (ei)->src;
1190 	  int header = bb_data[ei_edge (ei)->dest->index].scc;
1191 	  tag_header (dest->index, header, bb_data);
1192 	  depth = bb_data[dest->index].depth + 1;
1193 	}
1194       else
1195 	{
1196 	  if (bb_data[ei_edge (ei)->dest->index].depth > 0) /* on the stack */
1197 	    {
1198 	      ei_edge (ei)->flags |= EDGE_DFS_BACK;
1199 	      n_sccs++;
1200 	      ei_edge (ei)->dest->flags |= is_header;
1201 	      ::new (&bb_data[ei_edge (ei)->dest->index].scc_exits)
1202 		auto_vec<int, 2> ();
1203 	      tag_header (dest->index, ei_edge (ei)->dest->index, bb_data);
1204 	    }
1205 	  else if (bb_data[ei_edge (ei)->dest->index].scc == -1)
1206 	    ;
1207 	  else
1208 	    {
1209 	      int header = bb_data[ei_edge (ei)->dest->index].scc;
1210 	      if (bb_data[header].depth > 0)
1211 		tag_header (dest->index, header, bb_data);
1212 	      else
1213 		{
1214 		  /* A re-entry into an existing loop.  */
1215 		  /* ???  Need to mark is_header?  */
1216 		  while (bb_data[header].scc != -1)
1217 		    {
1218 		      header = bb_data[header].scc;
1219 		      if (bb_data[header].depth > 0)
1220 			{
1221 			  tag_header (dest->index, header, bb_data);
1222 			  break;
1223 			}
1224 		    }
1225 		}
1226 	    }
1227 	}
1228       ei_next (&ei);
1229     }
1230   rev_post_order[rev_post_order_num++] = dest->index;
1231   /* not on the stack anymore */
1232   bb_data[dest->index].depth = -bb_data[dest->index].depth;
1233   if (!ei_stack.is_empty ())
1234     /* Return from DFS recursion.  */
1235     goto ret_from_find_loops;
1236 
1237   /* Optimize for no SCCs found or !for_iteration.  */
1238   if (n_sccs == 0 || !for_iteration)
1239     {
1240       /* Clear the temporarily allocated flags.  */
1241       for (int i = 0; i < rev_post_order_num; ++i)
1242 	BASIC_BLOCK_FOR_FN (fn, rev_post_order[i])->flags
1243 	  &= ~(is_header|visited);
1244       /* And swap elements.  */
1245       for (int i = 0; i < rev_post_order_num/2; ++i)
1246 	std::swap (rev_post_order[i], rev_post_order[rev_post_order_num-i-1]);
1247       XDELETEVEC (bb_data);
1248 
1249       return rev_post_order_num;
1250     }
1251 
1252   /* Next find SCC exits, clear the visited flag and compute an upper bound
1253      for the edge stack below.  */
1254   unsigned edge_count = 0;
1255   for (int i = 0; i < rev_post_order_num; ++i)
1256     {
1257       int bb = rev_post_order[i];
1258       BASIC_BLOCK_FOR_FN (fn, bb)->flags &= ~visited;
1259       edge e;
1260       FOR_EACH_EDGE (e, ei, BASIC_BLOCK_FOR_FN (fn, bb)->succs)
1261 	{
1262 	  if (bitmap_bit_p (exit_bbs, e->dest->index))
1263 	    continue;
1264 	  edge_count++;
1265 	  /* if e is an exit from e->src, record it for
1266 	     bb_data[e->src].scc.  */
1267 	  int src_scc = e->src->index;
1268 	  if (!(e->src->flags & is_header))
1269 	    src_scc = bb_data[src_scc].scc;
1270 	  if (src_scc == -1)
1271 	    continue;
1272 	  int dest_scc = e->dest->index;
1273 	  if (!(e->dest->flags & is_header))
1274 	    dest_scc = bb_data[dest_scc].scc;
1275 	  if (src_scc == dest_scc)
1276 	    continue;
1277 	  /* When dest_scc is nested insde src_scc it's not an
1278 	     exit.  */
1279 	  int tem_dest_scc = dest_scc;
1280 	  unsigned dest_scc_depth = 0;
1281 	  while (tem_dest_scc != -1)
1282 	    {
1283 	      dest_scc_depth++;
1284 	      if ((tem_dest_scc = bb_data[tem_dest_scc].scc) == src_scc)
1285 		break;
1286 	    }
1287 	  if (tem_dest_scc != -1)
1288 	    continue;
1289 	  /* When src_scc is nested inside dest_scc record an
1290 	     exit from the outermost SCC this edge exits.  */
1291 	  int tem_src_scc = src_scc;
1292 	  unsigned src_scc_depth = 0;
1293 	  while (tem_src_scc != -1)
1294 	    {
1295 	      if (bb_data[tem_src_scc].scc == dest_scc)
1296 		{
1297 		  edge_count++;
1298 		  bb_data[tem_src_scc].scc_exits.safe_push (e->dest->index);
1299 		  break;
1300 		}
1301 	      tem_src_scc = bb_data[tem_src_scc].scc;
1302 	      src_scc_depth++;
1303 	    }
1304 	  /* Else find the outermost SCC this edge exits (exits
1305 	     from the inner SCCs are not important for the DFS
1306 	     walk adjustment).  Do so by computing the common
1307 	     ancestor SCC where the immediate child it to the source
1308 	     SCC is the exited SCC.  */
1309 	  if (tem_src_scc == -1)
1310 	    {
1311 	      edge_count++;
1312 	      while (src_scc_depth > dest_scc_depth)
1313 		{
1314 		  src_scc = bb_data[src_scc].scc;
1315 		  src_scc_depth--;
1316 		}
1317 	      while (dest_scc_depth > src_scc_depth)
1318 		{
1319 		  dest_scc = bb_data[dest_scc].scc;
1320 		  dest_scc_depth--;
1321 		}
1322 	      while (bb_data[src_scc].scc != bb_data[dest_scc].scc)
1323 		{
1324 		  src_scc = bb_data[src_scc].scc;
1325 		  dest_scc = bb_data[dest_scc].scc;
1326 		}
1327 	      bb_data[src_scc].scc_exits.safe_push (e->dest->index);
1328 	    }
1329 	}
1330     }
1331 
1332   /* Now the second DFS walk to compute a RPO where the extent of SCCs
1333      is minimized thus SCC members are adjacent in the RPO array.
1334      This is done by performing a DFS walk computing RPO with first visiting
1335      extra direct edges from SCC entry to its exits.
1336      That simulates a DFS walk over the graph with SCCs collapsed and
1337      walking the SCCs themselves only when all outgoing edges from the
1338      SCCs have been visited.
1339      SCC_END[scc-header-index] is the position in the RPO array of the
1340      last member of the SCC.  */
1341   auto_vec<std::pair<basic_block, basic_block>, 20> estack (edge_count + 1);
1342   int idx = rev_post_order_num;
1343   basic_block edest;
1344   dest = entry->dest;
1345 
1346   /* DFS process DEST.  */
1347 dfs_rpo:
1348   gcc_checking_assert ((dest->flags & visited) == 0);
1349   /* Verify we enter SCCs through the same header and SCC nesting appears
1350      the same.  */
1351   gcc_assert (bb_data[dest->index].scc == -1
1352 	      || (BASIC_BLOCK_FOR_FN (fn, bb_data[dest->index].scc)->flags
1353 		  & visited));
1354   dest->flags |= visited;
1355   bb_data[dest->index].scc_end = -1;
1356   if ((dest->flags & is_header)
1357       && !bb_data[dest->index].scc_exits.is_empty ())
1358     {
1359       /* Push the all SCC exits as outgoing edges from its header to
1360 	 be visited first.
1361 	 To process exits in the same relative order as in the first
1362 	 DFS walk sort them after their destination PRE order index.  */
1363       gcc_sort_r (&bb_data[dest->index].scc_exits[0],
1364 		  bb_data[dest->index].scc_exits.length (),
1365 		  sizeof (int), cmp_edge_dest_pre, bb_data);
1366       /* Process edges in reverse to match previous DFS walk order.  */
1367       for (int i = bb_data[dest->index].scc_exits.length () - 1; i >= 0; --i)
1368 	estack.quick_push (std::make_pair
1369 	  (dest, BASIC_BLOCK_FOR_FN (fn, bb_data[dest->index].scc_exits[i])));
1370     }
1371   else
1372     {
1373       if (dest->flags & is_header)
1374 	bb_data[dest->index].scc_end = idx - 1;
1375       /* Push the edge vector in reverse to match the iteration order
1376 	 from the DFS walk above.  */
1377       for (int i = EDGE_COUNT (dest->succs) - 1; i >= 0; --i)
1378 	if (!bitmap_bit_p (exit_bbs, EDGE_SUCC (dest, i)->dest->index))
1379 	  estack.quick_push (std::make_pair (dest,
1380 					     EDGE_SUCC (dest, i)->dest));
1381     }
1382   while (!estack.is_empty ()
1383 	 && estack.last ().first == dest)
1384     {
1385       edest = estack.last ().second;
1386       if (!(edest->flags & visited))
1387 	{
1388 	  dest = edest;
1389 	  /* DFS recurse on DEST.  */
1390 	  goto dfs_rpo;
1391 
1392 ret_from_dfs_rpo:
1393 	  /* Return point of DFS recursion.  */
1394 	  dest = estack.last ().first;
1395 	}
1396       estack.pop ();
1397       /* If we processed all SCC exits from DEST mark the SCC end
1398 	 since all RPO entries up to DEST itself will now belong
1399 	 to its SCC.  The special-case of no SCC exits is already
1400 	 dealt with above.  */
1401       if (dest->flags & is_header
1402 	  /* When the last exit edge was processed mark the SCC end
1403 	     and push the regular edges.  */
1404 	  && bb_data[dest->index].scc_end == -1
1405 	  && (estack.is_empty ()
1406 	      || estack.last ().first != dest))
1407 	{
1408 	  bb_data[dest->index].scc_end = idx - 1;
1409 	  /* Push the edge vector in reverse to match the iteration order
1410 	     from the DFS walk above.  */
1411 	  for (int i = EDGE_COUNT (dest->succs) - 1; i >= 0; --i)
1412 	    if (!bitmap_bit_p (exit_bbs, EDGE_SUCC (dest, i)->dest->index))
1413 	      estack.quick_push (std::make_pair (dest,
1414 						 EDGE_SUCC (dest, i)->dest));
1415 	}
1416     }
1417   rev_post_order[--idx] = dest->index;
1418   if (!estack.is_empty ())
1419     /* Return from DFS recursion.  */
1420     goto ret_from_dfs_rpo;
1421 
1422   /* Each SCC extends are from the position of the header inside
1423      the RPO array up to RPO array index scc_end[header-index].  */
1424   if (toplevel_scc_extents)
1425     for (int i = 0; i < rev_post_order_num; i++)
1426       {
1427 	basic_block bb = BASIC_BLOCK_FOR_FN (fn, rev_post_order[i]);
1428 	if (bb->flags & is_header)
1429 	  {
1430 	    toplevel_scc_extents->safe_push
1431 	      (std::make_pair (i, bb_data[bb->index].scc_end));
1432 	    i = bb_data[bb->index].scc_end;
1433 	  }
1434       }
1435 
1436   /* Clear the temporarily allocated flags and free memory.  */
1437   for (int i = 0; i < rev_post_order_num; ++i)
1438     {
1439       basic_block bb = BASIC_BLOCK_FOR_FN (fn, rev_post_order[i]);
1440       if (bb->flags & is_header)
1441 	bb_data[bb->index].scc_exits.~scc_exit_vec_t ();
1442       bb->flags &= ~(visited|is_header);
1443     }
1444 
1445   XDELETEVEC (bb_data);
1446 
1447   return rev_post_order_num;
1448 }
1449 
1450 
1451 
1452 /* Compute the depth first search order on the _reverse_ graph and
1453    store it in the array DFS_ORDER, marking the nodes visited in VISITED.
1454    Returns the number of nodes visited.
1455 
1456    The computation is split into three pieces:
1457 
1458    flow_dfs_compute_reverse_init () creates the necessary data
1459    structures.
1460 
1461    flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1462    structures.  The block will start the search.
1463 
1464    flow_dfs_compute_reverse_execute () continues (or starts) the
1465    search using the block on the top of the stack, stopping when the
1466    stack is empty.
1467 
1468    flow_dfs_compute_reverse_finish () destroys the necessary data
1469    structures.
1470 
1471    Thus, the user will probably call ..._init(), call ..._add_bb() to
1472    add a beginning basic block to the stack, call ..._execute(),
1473    possibly add another bb to the stack and again call ..._execute(),
1474    ..., and finally call _finish().  */
1475 
1476 /* Initialize the data structures used for depth-first search on the
1477    reverse graph.  If INITIALIZE_STACK is nonzero, the exit block is
1478    added to the basic block stack.  DATA is the current depth-first
1479    search context.  If INITIALIZE_STACK is nonzero, there is an
1480    element on the stack.  */
1481 
depth_first_search()1482 depth_first_search::depth_first_search () :
1483   m_stack (n_basic_blocks_for_fn (cfun)),
1484   m_visited_blocks (last_basic_block_for_fn (cfun))
1485 {
1486   bitmap_clear (m_visited_blocks);
1487 }
1488 
1489 /* Add the specified basic block to the top of the dfs data
1490    structures.  When the search continues, it will start at the
1491    block.  */
1492 
1493 void
add_bb(basic_block bb)1494 depth_first_search::add_bb (basic_block bb)
1495 {
1496   m_stack.quick_push (bb);
1497   bitmap_set_bit (m_visited_blocks, bb->index);
1498 }
1499 
1500 /* Continue the depth-first search through the reverse graph starting with the
1501    block at the stack's top and ending when the stack is empty.  Visited nodes
1502    are marked.  Returns an unvisited basic block, or NULL if there is none
1503    available.  */
1504 
1505 basic_block
execute(basic_block last_unvisited)1506 depth_first_search::execute (basic_block last_unvisited)
1507 {
1508   basic_block bb;
1509   edge e;
1510   edge_iterator ei;
1511 
1512   while (!m_stack.is_empty ())
1513     {
1514       bb = m_stack.pop ();
1515 
1516       /* Perform depth-first search on adjacent vertices.  */
1517       FOR_EACH_EDGE (e, ei, bb->preds)
1518 	if (!bitmap_bit_p (m_visited_blocks, e->src->index))
1519 	  add_bb (e->src);
1520     }
1521 
1522   /* Determine if there are unvisited basic blocks.  */
1523   FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
1524     if (!bitmap_bit_p (m_visited_blocks, bb->index))
1525       return bb;
1526 
1527   return NULL;
1528 }
1529 
1530 /* Performs dfs search from BB over vertices satisfying PREDICATE;
1531    if REVERSE, go against direction of edges.  Returns number of blocks
1532    found and their list in RSLT.  RSLT can contain at most RSLT_MAX items.  */
1533 int
dfs_enumerate_from(basic_block bb,int reverse,bool (* predicate)(const_basic_block,const void *),basic_block * rslt,int rslt_max,const void * data)1534 dfs_enumerate_from (basic_block bb, int reverse,
1535 		    bool (*predicate) (const_basic_block, const void *),
1536 		    basic_block *rslt, int rslt_max, const void *data)
1537 {
1538   basic_block *st, lbb;
1539   int sp = 0, tv = 0;
1540 
1541   auto_bb_flag visited (cfun);
1542 
1543 #define MARK_VISITED(BB) ((BB)->flags |= visited)
1544 #define UNMARK_VISITED(BB) ((BB)->flags &= ~visited)
1545 #define VISITED_P(BB) (((BB)->flags & visited) != 0)
1546 
1547   st = XNEWVEC (basic_block, rslt_max);
1548   rslt[tv++] = st[sp++] = bb;
1549   MARK_VISITED (bb);
1550   while (sp)
1551     {
1552       edge e;
1553       edge_iterator ei;
1554       lbb = st[--sp];
1555       if (reverse)
1556 	{
1557 	  FOR_EACH_EDGE (e, ei, lbb->preds)
1558 	    if (!VISITED_P (e->src) && predicate (e->src, data))
1559 	      {
1560 		gcc_assert (tv != rslt_max);
1561 		rslt[tv++] = st[sp++] = e->src;
1562 		MARK_VISITED (e->src);
1563 	      }
1564 	}
1565       else
1566 	{
1567 	  FOR_EACH_EDGE (e, ei, lbb->succs)
1568 	    if (!VISITED_P (e->dest) && predicate (e->dest, data))
1569 	      {
1570 		gcc_assert (tv != rslt_max);
1571 		rslt[tv++] = st[sp++] = e->dest;
1572 		MARK_VISITED (e->dest);
1573 	      }
1574 	}
1575     }
1576   free (st);
1577   for (sp = 0; sp < tv; sp++)
1578     UNMARK_VISITED (rslt[sp]);
1579   return tv;
1580 #undef MARK_VISITED
1581 #undef UNMARK_VISITED
1582 #undef VISITED_P
1583 }
1584 
1585 
1586 /* Compute dominance frontiers, ala Harvey, Ferrante, et al.
1587 
1588    This algorithm can be found in Timothy Harvey's PhD thesis, at
1589    http://www.cs.rice.edu/~harv/dissertation.pdf in the section on iterative
1590    dominance algorithms.
1591 
1592    First, we identify each join point, j (any node with more than one
1593    incoming edge is a join point).
1594 
1595    We then examine each predecessor, p, of j and walk up the dominator tree
1596    starting at p.
1597 
1598    We stop the walk when we reach j's immediate dominator - j is in the
1599    dominance frontier of each of  the nodes in the walk, except for j's
1600    immediate dominator. Intuitively, all of the rest of j's dominators are
1601    shared by j's predecessors as well.
1602    Since they dominate j, they will not have j in their dominance frontiers.
1603 
1604    The number of nodes touched by this algorithm is equal to the size
1605    of the dominance frontiers, no more, no less.
1606 */
1607 
1608 void
compute_dominance_frontiers(bitmap_head * frontiers)1609 compute_dominance_frontiers (bitmap_head *frontiers)
1610 {
1611   timevar_push (TV_DOM_FRONTIERS);
1612 
1613   edge p;
1614   edge_iterator ei;
1615   basic_block b;
1616   FOR_EACH_BB_FN (b, cfun)
1617     {
1618       if (EDGE_COUNT (b->preds) >= 2)
1619 	{
1620 	  basic_block domsb = get_immediate_dominator (CDI_DOMINATORS, b);
1621 	  FOR_EACH_EDGE (p, ei, b->preds)
1622 	    {
1623 	      basic_block runner = p->src;
1624 	      if (runner == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1625 		continue;
1626 
1627 	      while (runner != domsb)
1628 		{
1629 		  if (!bitmap_set_bit (&frontiers[runner->index], b->index))
1630 		    break;
1631 		  runner = get_immediate_dominator (CDI_DOMINATORS, runner);
1632 		}
1633 	    }
1634 	}
1635     }
1636 
1637   timevar_pop (TV_DOM_FRONTIERS);
1638 }
1639 
1640 /* Given a set of blocks with variable definitions (DEF_BLOCKS),
1641    return a bitmap with all the blocks in the iterated dominance
1642    frontier of the blocks in DEF_BLOCKS.  DFS contains dominance
1643    frontier information as returned by compute_dominance_frontiers.
1644 
1645    The resulting set of blocks are the potential sites where PHI nodes
1646    are needed.  The caller is responsible for freeing the memory
1647    allocated for the return value.  */
1648 
1649 bitmap
compute_idf(bitmap def_blocks,bitmap_head * dfs)1650 compute_idf (bitmap def_blocks, bitmap_head *dfs)
1651 {
1652   bitmap_iterator bi;
1653   unsigned bb_index, i;
1654   bitmap phi_insertion_points;
1655 
1656   phi_insertion_points = BITMAP_ALLOC (NULL);
1657 
1658   /* Seed the work set with all the blocks in DEF_BLOCKS.  */
1659   auto_bitmap work_set;
1660   bitmap_copy (work_set, def_blocks);
1661   bitmap_tree_view (work_set);
1662 
1663   /* Pop a block off the workset, add every block that appears in
1664      the original block's DF that we have not already processed to
1665      the workset.  Iterate until the workset is empty.   Blocks
1666      which are added to the workset are potential sites for
1667      PHI nodes.  */
1668   while (!bitmap_empty_p (work_set))
1669     {
1670       /* The dominance frontier of a block is blocks after it so iterating
1671          on earlier blocks first is better.
1672 	 ???  Basic blocks are by no means guaranteed to be ordered in
1673 	 optimal order for this iteration.  */
1674       bb_index = bitmap_first_set_bit (work_set);
1675       bitmap_clear_bit (work_set, bb_index);
1676 
1677       /* Since the registration of NEW -> OLD name mappings is done
1678 	 separately from the call to update_ssa, when updating the SSA
1679 	 form, the basic blocks where new and/or old names are defined
1680 	 may have disappeared by CFG cleanup calls.  In this case,
1681 	 we may pull a non-existing block from the work stack.  */
1682       gcc_checking_assert (bb_index
1683 			   < (unsigned) last_basic_block_for_fn (cfun));
1684 
1685       EXECUTE_IF_AND_COMPL_IN_BITMAP (&dfs[bb_index], phi_insertion_points,
1686 	                              0, i, bi)
1687 	{
1688 	  bitmap_set_bit (work_set, i);
1689 	  bitmap_set_bit (phi_insertion_points, i);
1690 	}
1691     }
1692 
1693   return phi_insertion_points;
1694 }
1695 
1696 /* Intersection and union of preds/succs for sbitmap based data flow
1697    solvers.  All four functions defined below take the same arguments:
1698    B is the basic block to perform the operation for.  DST is the
1699    target sbitmap, i.e. the result.  SRC is an sbitmap vector of size
1700    last_basic_block so that it can be indexed with basic block indices.
1701    DST may be (but does not have to be) SRC[B->index].  */
1702 
1703 /* Set the bitmap DST to the intersection of SRC of successors of
1704    basic block B.  */
1705 
1706 void
bitmap_intersection_of_succs(sbitmap dst,sbitmap * src,basic_block b)1707 bitmap_intersection_of_succs (sbitmap dst, sbitmap *src, basic_block b)
1708 {
1709   unsigned int set_size = dst->size;
1710   edge e;
1711   unsigned ix;
1712 
1713   for (e = NULL, ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1714     {
1715       e = EDGE_SUCC (b, ix);
1716       if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1717 	continue;
1718 
1719       bitmap_copy (dst, src[e->dest->index]);
1720       break;
1721     }
1722 
1723   if (e == 0)
1724     bitmap_ones (dst);
1725   else
1726     for (++ix; ix < EDGE_COUNT (b->succs); ix++)
1727       {
1728 	unsigned int i;
1729 	SBITMAP_ELT_TYPE *p, *r;
1730 
1731 	e = EDGE_SUCC (b, ix);
1732 	if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1733 	  continue;
1734 
1735 	p = src[e->dest->index]->elms;
1736 	r = dst->elms;
1737 	for (i = 0; i < set_size; i++)
1738 	  *r++ &= *p++;
1739       }
1740 }
1741 
1742 /* Set the bitmap DST to the intersection of SRC of predecessors of
1743    basic block B.  */
1744 
1745 void
bitmap_intersection_of_preds(sbitmap dst,sbitmap * src,basic_block b)1746 bitmap_intersection_of_preds (sbitmap dst, sbitmap *src, basic_block b)
1747 {
1748   unsigned int set_size = dst->size;
1749   edge e;
1750   unsigned ix;
1751 
1752   for (e = NULL, ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1753     {
1754       e = EDGE_PRED (b, ix);
1755       if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1756 	continue;
1757 
1758       bitmap_copy (dst, src[e->src->index]);
1759       break;
1760     }
1761 
1762   if (e == 0)
1763     bitmap_ones (dst);
1764   else
1765     for (++ix; ix < EDGE_COUNT (b->preds); ix++)
1766       {
1767 	unsigned int i;
1768 	SBITMAP_ELT_TYPE *p, *r;
1769 
1770 	e = EDGE_PRED (b, ix);
1771 	if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1772 	  continue;
1773 
1774 	p = src[e->src->index]->elms;
1775 	r = dst->elms;
1776 	for (i = 0; i < set_size; i++)
1777 	  *r++ &= *p++;
1778       }
1779 }
1780 
1781 /* Set the bitmap DST to the union of SRC of successors of
1782    basic block B.  */
1783 
1784 void
bitmap_union_of_succs(sbitmap dst,sbitmap * src,basic_block b)1785 bitmap_union_of_succs (sbitmap dst, sbitmap *src, basic_block b)
1786 {
1787   unsigned int set_size = dst->size;
1788   edge e;
1789   unsigned ix;
1790 
1791   for (ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1792     {
1793       e = EDGE_SUCC (b, ix);
1794       if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1795 	continue;
1796 
1797       bitmap_copy (dst, src[e->dest->index]);
1798       break;
1799     }
1800 
1801   if (ix == EDGE_COUNT (b->succs))
1802     bitmap_clear (dst);
1803   else
1804     for (ix++; ix < EDGE_COUNT (b->succs); ix++)
1805       {
1806 	unsigned int i;
1807 	SBITMAP_ELT_TYPE *p, *r;
1808 
1809 	e = EDGE_SUCC (b, ix);
1810 	if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1811 	  continue;
1812 
1813 	p = src[e->dest->index]->elms;
1814 	r = dst->elms;
1815 	for (i = 0; i < set_size; i++)
1816 	  *r++ |= *p++;
1817       }
1818 }
1819 
1820 /* Set the bitmap DST to the union of SRC of predecessors of
1821    basic block B.  */
1822 
1823 void
bitmap_union_of_preds(sbitmap dst,sbitmap * src,basic_block b)1824 bitmap_union_of_preds (sbitmap dst, sbitmap *src, basic_block b)
1825 {
1826   unsigned int set_size = dst->size;
1827   edge e;
1828   unsigned ix;
1829 
1830   for (ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1831     {
1832       e = EDGE_PRED (b, ix);
1833       if (e->src== ENTRY_BLOCK_PTR_FOR_FN (cfun))
1834 	continue;
1835 
1836       bitmap_copy (dst, src[e->src->index]);
1837       break;
1838     }
1839 
1840   if (ix == EDGE_COUNT (b->preds))
1841     bitmap_clear (dst);
1842   else
1843     for (ix++; ix < EDGE_COUNT (b->preds); ix++)
1844       {
1845 	unsigned int i;
1846 	SBITMAP_ELT_TYPE *p, *r;
1847 
1848 	e = EDGE_PRED (b, ix);
1849 	if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1850 	  continue;
1851 
1852 	p = src[e->src->index]->elms;
1853 	r = dst->elms;
1854 	for (i = 0; i < set_size; i++)
1855 	  *r++ |= *p++;
1856       }
1857 }
1858 
1859 /* Returns the list of basic blocks in the function in an order that guarantees
1860    that if a block X has just a single predecessor Y, then Y is after X in the
1861    ordering.  */
1862 
1863 basic_block *
single_pred_before_succ_order(void)1864 single_pred_before_succ_order (void)
1865 {
1866   basic_block x, y;
1867   basic_block *order = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
1868   unsigned n = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
1869   unsigned np, i;
1870   auto_sbitmap visited (last_basic_block_for_fn (cfun));
1871 
1872 #define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
1873 #define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
1874 
1875   bitmap_clear (visited);
1876 
1877   MARK_VISITED (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1878   FOR_EACH_BB_FN (x, cfun)
1879     {
1880       if (VISITED_P (x))
1881 	continue;
1882 
1883       /* Walk the predecessors of x as long as they have precisely one
1884 	 predecessor and add them to the list, so that they get stored
1885 	 after x.  */
1886       for (y = x, np = 1;
1887 	   single_pred_p (y) && !VISITED_P (single_pred (y));
1888 	   y = single_pred (y))
1889 	np++;
1890       for (y = x, i = n - np;
1891 	   single_pred_p (y) && !VISITED_P (single_pred (y));
1892 	   y = single_pred (y), i++)
1893 	{
1894 	  order[i] = y;
1895 	  MARK_VISITED (y);
1896 	}
1897       order[i] = y;
1898       MARK_VISITED (y);
1899 
1900       gcc_assert (i == n - 1);
1901       n -= np;
1902     }
1903 
1904   gcc_assert (n == 0);
1905   return order;
1906 
1907 #undef MARK_VISITED
1908 #undef VISITED_P
1909 }
1910 
1911 /* Ignoring loop backedges, if BB has precisely one incoming edge then
1912    return that edge.  Otherwise return NULL.
1913 
1914    When IGNORE_NOT_EXECUTABLE is true, also ignore edges that are not marked
1915    as executable.  */
1916 
1917 edge
single_pred_edge_ignoring_loop_edges(basic_block bb,bool ignore_not_executable)1918 single_pred_edge_ignoring_loop_edges (basic_block bb,
1919 				      bool ignore_not_executable)
1920 {
1921   edge retval = NULL;
1922   edge e;
1923   edge_iterator ei;
1924 
1925   FOR_EACH_EDGE (e, ei, bb->preds)
1926     {
1927       /* A loop back edge can be identified by the destination of
1928 	 the edge dominating the source of the edge.  */
1929       if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1930 	continue;
1931 
1932       /* We can safely ignore edges that are not executable.  */
1933       if (ignore_not_executable
1934 	  && (e->flags & EDGE_EXECUTABLE) == 0)
1935 	continue;
1936 
1937       /* If we have already seen a non-loop edge, then we must have
1938 	 multiple incoming non-loop edges and thus we return NULL.  */
1939       if (retval)
1940 	return NULL;
1941 
1942       /* This is the first non-loop incoming edge we have found.  Record
1943 	 it.  */
1944       retval = e;
1945     }
1946 
1947   return retval;
1948 }
1949