1 /* Control flow graph analysis code for GNU compiler.
2    Copyright (C) 1987-2019 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains various simple utilities to analyze the CFG.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "cfghooks.h"
27 #include "timevar.h"
28 #include "cfganal.h"
29 #include "cfgloop.h"
30 
31 namespace {
32 /* Store the data structures necessary for depth-first search.  */
33 class depth_first_search
34   {
35 public:
36     depth_first_search ();
37 
38     basic_block execute (basic_block);
39     void add_bb (basic_block);
40 
41 private:
42   /* stack for backtracking during the algorithm */
43   auto_vec<basic_block, 20> m_stack;
44 
45   /* record of basic blocks already seen by depth-first search */
46   auto_sbitmap m_visited_blocks;
47 };
48 }
49 
50 /* Mark the back edges in DFS traversal.
51    Return nonzero if a loop (natural or otherwise) is present.
52    Inspired by Depth_First_Search_PP described in:
53 
54      Advanced Compiler Design and Implementation
55      Steven Muchnick
56      Morgan Kaufmann, 1997
57 
58    and heavily borrowed from pre_and_rev_post_order_compute.  */
59 
60 bool
mark_dfs_back_edges(void)61 mark_dfs_back_edges (void)
62 {
63   int *pre;
64   int *post;
65   int prenum = 1;
66   int postnum = 1;
67   bool found = false;
68 
69   /* Allocate the preorder and postorder number arrays.  */
70   pre = XCNEWVEC (int, last_basic_block_for_fn (cfun));
71   post = XCNEWVEC (int, last_basic_block_for_fn (cfun));
72 
73   /* Allocate stack for back-tracking up CFG.  */
74   auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
75 
76   /* Allocate bitmap to track nodes that have been visited.  */
77   auto_sbitmap visited (last_basic_block_for_fn (cfun));
78 
79   /* None of the nodes in the CFG have been visited yet.  */
80   bitmap_clear (visited);
81 
82   /* Push the first edge on to the stack.  */
83   stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
84 
85   while (!stack.is_empty ())
86     {
87       basic_block src;
88       basic_block dest;
89 
90       /* Look at the edge on the top of the stack.  */
91       edge_iterator ei = stack.last ();
92       src = ei_edge (ei)->src;
93       dest = ei_edge (ei)->dest;
94       ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
95 
96       /* Check if the edge destination has been visited yet.  */
97       if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && ! bitmap_bit_p (visited,
98 								  dest->index))
99 	{
100 	  /* Mark that we have visited the destination.  */
101 	  bitmap_set_bit (visited, dest->index);
102 
103 	  pre[dest->index] = prenum++;
104 	  if (EDGE_COUNT (dest->succs) > 0)
105 	    {
106 	      /* Since the DEST node has been visited for the first
107 		 time, check its successors.  */
108 	      stack.quick_push (ei_start (dest->succs));
109 	    }
110 	  else
111 	    post[dest->index] = postnum++;
112 	}
113       else
114 	{
115 	  if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
116 	      && src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
117 	      && pre[src->index] >= pre[dest->index]
118 	      && post[dest->index] == 0)
119 	    ei_edge (ei)->flags |= EDGE_DFS_BACK, found = true;
120 
121 	  if (ei_one_before_end_p (ei)
122 	      && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
123 	    post[src->index] = postnum++;
124 
125 	  if (!ei_one_before_end_p (ei))
126 	    ei_next (&stack.last ());
127 	  else
128 	    stack.pop ();
129 	}
130     }
131 
132   free (pre);
133   free (post);
134 
135   return found;
136 }
137 
138 /* Find unreachable blocks.  An unreachable block will have 0 in
139    the reachable bit in block->flags.  A nonzero value indicates the
140    block is reachable.  */
141 
142 void
find_unreachable_blocks(void)143 find_unreachable_blocks (void)
144 {
145   edge e;
146   edge_iterator ei;
147   basic_block *tos, *worklist, bb;
148 
149   tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
150 
151   /* Clear all the reachability flags.  */
152 
153   FOR_EACH_BB_FN (bb, cfun)
154     bb->flags &= ~BB_REACHABLE;
155 
156   /* Add our starting points to the worklist.  Almost always there will
157      be only one.  It isn't inconceivable that we might one day directly
158      support Fortran alternate entry points.  */
159 
160   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
161     {
162       *tos++ = e->dest;
163 
164       /* Mark the block reachable.  */
165       e->dest->flags |= BB_REACHABLE;
166     }
167 
168   /* Iterate: find everything reachable from what we've already seen.  */
169 
170   while (tos != worklist)
171     {
172       basic_block b = *--tos;
173 
174       FOR_EACH_EDGE (e, ei, b->succs)
175 	{
176 	  basic_block dest = e->dest;
177 
178 	  if (!(dest->flags & BB_REACHABLE))
179 	    {
180 	      *tos++ = dest;
181 	      dest->flags |= BB_REACHABLE;
182 	    }
183 	}
184     }
185 
186   free (worklist);
187 }
188 
189 /* Verify that there are no unreachable blocks in the current function.  */
190 
191 void
verify_no_unreachable_blocks(void)192 verify_no_unreachable_blocks (void)
193 {
194   find_unreachable_blocks ();
195 
196   basic_block bb;
197   FOR_EACH_BB_FN (bb, cfun)
198     gcc_assert ((bb->flags & BB_REACHABLE) != 0);
199 }
200 
201 
202 /* Functions to access an edge list with a vector representation.
203    Enough data is kept such that given an index number, the
204    pred and succ that edge represents can be determined, or
205    given a pred and a succ, its index number can be returned.
206    This allows algorithms which consume a lot of memory to
207    represent the normally full matrix of edge (pred,succ) with a
208    single indexed vector,  edge (EDGE_INDEX (pred, succ)), with no
209    wasted space in the client code due to sparse flow graphs.  */
210 
211 /* This functions initializes the edge list. Basically the entire
212    flowgraph is processed, and all edges are assigned a number,
213    and the data structure is filled in.  */
214 
215 struct edge_list *
create_edge_list(void)216 create_edge_list (void)
217 {
218   struct edge_list *elist;
219   edge e;
220   int num_edges;
221   basic_block bb;
222   edge_iterator ei;
223 
224   /* Determine the number of edges in the flow graph by counting successor
225      edges on each basic block.  */
226   num_edges = 0;
227   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
228 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
229     {
230       num_edges += EDGE_COUNT (bb->succs);
231     }
232 
233   elist = XNEW (struct edge_list);
234   elist->num_edges = num_edges;
235   elist->index_to_edge = XNEWVEC (edge, num_edges);
236 
237   num_edges = 0;
238 
239   /* Follow successors of blocks, and register these edges.  */
240   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
241 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
242     FOR_EACH_EDGE (e, ei, bb->succs)
243       elist->index_to_edge[num_edges++] = e;
244 
245   return elist;
246 }
247 
248 /* This function free's memory associated with an edge list.  */
249 
250 void
free_edge_list(struct edge_list * elist)251 free_edge_list (struct edge_list *elist)
252 {
253   if (elist)
254     {
255       free (elist->index_to_edge);
256       free (elist);
257     }
258 }
259 
260 /* This function provides debug output showing an edge list.  */
261 
262 DEBUG_FUNCTION void
print_edge_list(FILE * f,struct edge_list * elist)263 print_edge_list (FILE *f, struct edge_list *elist)
264 {
265   int x;
266 
267   fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
268 	   n_basic_blocks_for_fn (cfun), elist->num_edges);
269 
270   for (x = 0; x < elist->num_edges; x++)
271     {
272       fprintf (f, " %-4d - edge(", x);
273       if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
274 	fprintf (f, "entry,");
275       else
276 	fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
277 
278       if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR_FOR_FN (cfun))
279 	fprintf (f, "exit)\n");
280       else
281 	fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
282     }
283 }
284 
285 /* This function provides an internal consistency check of an edge list,
286    verifying that all edges are present, and that there are no
287    extra edges.  */
288 
289 DEBUG_FUNCTION void
verify_edge_list(FILE * f,struct edge_list * elist)290 verify_edge_list (FILE *f, struct edge_list *elist)
291 {
292   int pred, succ, index;
293   edge e;
294   basic_block bb, p, s;
295   edge_iterator ei;
296 
297   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
298 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
299     {
300       FOR_EACH_EDGE (e, ei, bb->succs)
301 	{
302 	  pred = e->src->index;
303 	  succ = e->dest->index;
304 	  index = EDGE_INDEX (elist, e->src, e->dest);
305 	  if (index == EDGE_INDEX_NO_EDGE)
306 	    {
307 	      fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
308 	      continue;
309 	    }
310 
311 	  if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
312 	    fprintf (f, "*p* Pred for index %d should be %d not %d\n",
313 		     index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
314 	  if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
315 	    fprintf (f, "*p* Succ for index %d should be %d not %d\n",
316 		     index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
317 	}
318     }
319 
320   /* We've verified that all the edges are in the list, now lets make sure
321      there are no spurious edges in the list.  This is an expensive check!  */
322 
323   FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR_FOR_FN (cfun),
324 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
325     FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
326       {
327 	int found_edge = 0;
328 
329 	FOR_EACH_EDGE (e, ei, p->succs)
330 	  if (e->dest == s)
331 	    {
332 	      found_edge = 1;
333 	      break;
334 	    }
335 
336 	FOR_EACH_EDGE (e, ei, s->preds)
337 	  if (e->src == p)
338 	    {
339 	      found_edge = 1;
340 	      break;
341 	    }
342 
343 	if (EDGE_INDEX (elist, p, s)
344 	    == EDGE_INDEX_NO_EDGE && found_edge != 0)
345 	  fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
346 		   p->index, s->index);
347 	if (EDGE_INDEX (elist, p, s)
348 	    != EDGE_INDEX_NO_EDGE && found_edge == 0)
349 	  fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
350 		   p->index, s->index, EDGE_INDEX (elist, p, s));
351       }
352 }
353 
354 
355 /* Functions to compute control dependences.  */
356 
357 /* Indicate block BB is control dependent on an edge with index EDGE_INDEX.  */
358 void
set_control_dependence_map_bit(basic_block bb,int edge_index)359 control_dependences::set_control_dependence_map_bit (basic_block bb,
360 						     int edge_index)
361 {
362   if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
363     return;
364   gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
365   bitmap_set_bit (control_dependence_map[bb->index], edge_index);
366 }
367 
368 /* Clear all control dependences for block BB.  */
369 void
clear_control_dependence_bitmap(basic_block bb)370 control_dependences::clear_control_dependence_bitmap (basic_block bb)
371 {
372   bitmap_clear (control_dependence_map[bb->index]);
373 }
374 
375 /* Find the immediate postdominator PDOM of the specified basic block BLOCK.
376    This function is necessary because some blocks have negative numbers.  */
377 
378 static inline basic_block
find_pdom(basic_block block)379 find_pdom (basic_block block)
380 {
381   gcc_assert (block != ENTRY_BLOCK_PTR_FOR_FN (cfun));
382 
383   if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
384     return EXIT_BLOCK_PTR_FOR_FN (cfun);
385   else
386     {
387       basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
388       if (! bb)
389 	return EXIT_BLOCK_PTR_FOR_FN (cfun);
390       return bb;
391     }
392 }
393 
394 /* Determine all blocks' control dependences on the given edge with edge_list
395    EL index EDGE_INDEX, ala Morgan, Section 3.6.  */
396 
397 void
find_control_dependence(int edge_index)398 control_dependences::find_control_dependence (int edge_index)
399 {
400   basic_block current_block;
401   basic_block ending_block;
402 
403   gcc_assert (get_edge_src (edge_index) != EXIT_BLOCK_PTR_FOR_FN (cfun));
404 
405   /* For abnormal edges, we don't make current_block control
406      dependent because instructions that throw are always necessary
407      anyway.  */
408   edge e = find_edge (get_edge_src (edge_index), get_edge_dest (edge_index));
409   if (e->flags & EDGE_ABNORMAL)
410     return;
411 
412   if (get_edge_src (edge_index) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
413     ending_block = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
414   else
415     ending_block = find_pdom (get_edge_src (edge_index));
416 
417   for (current_block = get_edge_dest (edge_index);
418        current_block != ending_block
419        && current_block != EXIT_BLOCK_PTR_FOR_FN (cfun);
420        current_block = find_pdom (current_block))
421     set_control_dependence_map_bit (current_block, edge_index);
422 }
423 
424 /* Record all blocks' control dependences on all edges in the edge
425    list EL, ala Morgan, Section 3.6.  */
426 
control_dependences()427 control_dependences::control_dependences ()
428 {
429   timevar_push (TV_CONTROL_DEPENDENCES);
430 
431   /* Initialize the edge list.  */
432   int num_edges = 0;
433   basic_block bb;
434   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
435 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
436     num_edges += EDGE_COUNT (bb->succs);
437   m_el.create (num_edges);
438   edge e;
439   edge_iterator ei;
440   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
441 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
442     FOR_EACH_EDGE (e, ei, bb->succs)
443       m_el.quick_push (std::make_pair (e->src->index, e->dest->index));
444 
445   control_dependence_map.create (last_basic_block_for_fn (cfun));
446   for (int i = 0; i < last_basic_block_for_fn (cfun); ++i)
447     control_dependence_map.quick_push (BITMAP_ALLOC (NULL));
448   for (int i = 0; i < num_edges; ++i)
449     find_control_dependence (i);
450 
451   timevar_pop (TV_CONTROL_DEPENDENCES);
452 }
453 
454 /* Free control dependences and the associated edge list.  */
455 
~control_dependences()456 control_dependences::~control_dependences ()
457 {
458   for (unsigned i = 0; i < control_dependence_map.length (); ++i)
459     BITMAP_FREE (control_dependence_map[i]);
460   control_dependence_map.release ();
461   m_el.release ();
462 }
463 
464 /* Returns the bitmap of edges the basic-block I is dependent on.  */
465 
466 bitmap
get_edges_dependent_on(int i)467 control_dependences::get_edges_dependent_on (int i)
468 {
469   return control_dependence_map[i];
470 }
471 
472 /* Returns the edge source with index I from the edge list.  */
473 
474 basic_block
get_edge_src(int i)475 control_dependences::get_edge_src (int i)
476 {
477   return BASIC_BLOCK_FOR_FN (cfun, m_el[i].first);
478 }
479 
480 /* Returns the edge destination with index I from the edge list.  */
481 
482 basic_block
get_edge_dest(int i)483 control_dependences::get_edge_dest (int i)
484 {
485   return BASIC_BLOCK_FOR_FN (cfun, m_el[i].second);
486 }
487 
488 
489 /* Given PRED and SUCC blocks, return the edge which connects the blocks.
490    If no such edge exists, return NULL.  */
491 
492 edge
find_edge(basic_block pred,basic_block succ)493 find_edge (basic_block pred, basic_block succ)
494 {
495   edge e;
496   edge_iterator ei;
497 
498   if (EDGE_COUNT (pred->succs) <= EDGE_COUNT (succ->preds))
499     {
500       FOR_EACH_EDGE (e, ei, pred->succs)
501 	if (e->dest == succ)
502 	  return e;
503     }
504   else
505     {
506       FOR_EACH_EDGE (e, ei, succ->preds)
507 	if (e->src == pred)
508 	  return e;
509     }
510 
511   return NULL;
512 }
513 
514 /* This routine will determine what, if any, edge there is between
515    a specified predecessor and successor.  */
516 
517 int
find_edge_index(struct edge_list * edge_list,basic_block pred,basic_block succ)518 find_edge_index (struct edge_list *edge_list, basic_block pred, basic_block succ)
519 {
520   int x;
521 
522   for (x = 0; x < NUM_EDGES (edge_list); x++)
523     if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
524 	&& INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
525       return x;
526 
527   return (EDGE_INDEX_NO_EDGE);
528 }
529 
530 /* This routine will remove any fake predecessor edges for a basic block.
531    When the edge is removed, it is also removed from whatever successor
532    list it is in.  */
533 
534 static void
remove_fake_predecessors(basic_block bb)535 remove_fake_predecessors (basic_block bb)
536 {
537   edge e;
538   edge_iterator ei;
539 
540   for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
541     {
542       if ((e->flags & EDGE_FAKE) == EDGE_FAKE)
543 	remove_edge (e);
544       else
545 	ei_next (&ei);
546     }
547 }
548 
549 /* This routine will remove all fake edges from the flow graph.  If
550    we remove all fake successors, it will automatically remove all
551    fake predecessors.  */
552 
553 void
remove_fake_edges(void)554 remove_fake_edges (void)
555 {
556   basic_block bb;
557 
558   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
559     remove_fake_predecessors (bb);
560 }
561 
562 /* This routine will remove all fake edges to the EXIT_BLOCK.  */
563 
564 void
remove_fake_exit_edges(void)565 remove_fake_exit_edges (void)
566 {
567   remove_fake_predecessors (EXIT_BLOCK_PTR_FOR_FN (cfun));
568 }
569 
570 
571 /* This function will add a fake edge between any block which has no
572    successors, and the exit block. Some data flow equations require these
573    edges to exist.  */
574 
575 void
add_noreturn_fake_exit_edges(void)576 add_noreturn_fake_exit_edges (void)
577 {
578   basic_block bb;
579 
580   FOR_EACH_BB_FN (bb, cfun)
581     if (EDGE_COUNT (bb->succs) == 0)
582       make_single_succ_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
583 }
584 
585 /* This function adds a fake edge between any infinite loops to the
586    exit block.  Some optimizations require a path from each node to
587    the exit node.
588 
589    See also Morgan, Figure 3.10, pp. 82-83.
590 
591    The current implementation is ugly, not attempting to minimize the
592    number of inserted fake edges.  To reduce the number of fake edges
593    to insert, add fake edges from _innermost_ loops containing only
594    nodes not reachable from the exit block.  */
595 
596 void
connect_infinite_loops_to_exit(void)597 connect_infinite_loops_to_exit (void)
598 {
599   /* Perform depth-first search in the reverse graph to find nodes
600      reachable from the exit block.  */
601   depth_first_search dfs;
602   dfs.add_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
603 
604   /* Repeatedly add fake edges, updating the unreachable nodes.  */
605   basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
606   while (1)
607     {
608       unvisited_block = dfs.execute (unvisited_block);
609       if (!unvisited_block)
610 	break;
611 
612       basic_block deadend_block = dfs_find_deadend (unvisited_block);
613       edge e = make_edge (deadend_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
614 			  EDGE_FAKE);
615       e->probability = profile_probability::never ();
616       dfs.add_bb (deadend_block);
617     }
618 }
619 
620 /* Compute reverse top sort order.  This is computing a post order
621    numbering of the graph.  If INCLUDE_ENTRY_EXIT is true, then
622    ENTRY_BLOCK and EXIT_BLOCK are included.  If DELETE_UNREACHABLE is
623    true, unreachable blocks are deleted.  */
624 
625 int
post_order_compute(int * post_order,bool include_entry_exit,bool delete_unreachable)626 post_order_compute (int *post_order, bool include_entry_exit,
627 		    bool delete_unreachable)
628 {
629   int post_order_num = 0;
630   int count;
631 
632   if (include_entry_exit)
633     post_order[post_order_num++] = EXIT_BLOCK;
634 
635   /* Allocate stack for back-tracking up CFG.  */
636   auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
637 
638   /* Allocate bitmap to track nodes that have been visited.  */
639   auto_sbitmap visited (last_basic_block_for_fn (cfun));
640 
641   /* None of the nodes in the CFG have been visited yet.  */
642   bitmap_clear (visited);
643 
644   /* Push the first edge on to the stack.  */
645   stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
646 
647   while (!stack.is_empty ())
648     {
649       basic_block src;
650       basic_block dest;
651 
652       /* Look at the edge on the top of the stack.  */
653       edge_iterator ei = stack.last ();
654       src = ei_edge (ei)->src;
655       dest = ei_edge (ei)->dest;
656 
657       /* Check if the edge destination has been visited yet.  */
658       if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
659 	  && ! bitmap_bit_p (visited, dest->index))
660 	{
661 	  /* Mark that we have visited the destination.  */
662 	  bitmap_set_bit (visited, dest->index);
663 
664 	  if (EDGE_COUNT (dest->succs) > 0)
665 	    /* Since the DEST node has been visited for the first
666 	       time, check its successors.  */
667 	    stack.quick_push (ei_start (dest->succs));
668 	  else
669 	    post_order[post_order_num++] = dest->index;
670 	}
671       else
672 	{
673 	  if (ei_one_before_end_p (ei)
674 	      && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
675 	    post_order[post_order_num++] = src->index;
676 
677 	  if (!ei_one_before_end_p (ei))
678 	    ei_next (&stack.last ());
679 	  else
680 	    stack.pop ();
681 	}
682     }
683 
684   if (include_entry_exit)
685     {
686       post_order[post_order_num++] = ENTRY_BLOCK;
687       count = post_order_num;
688     }
689   else
690     count = post_order_num + 2;
691 
692   /* Delete the unreachable blocks if some were found and we are
693      supposed to do it.  */
694   if (delete_unreachable && (count != n_basic_blocks_for_fn (cfun)))
695     {
696       basic_block b;
697       basic_block next_bb;
698       for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
699 	   != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
700 	{
701 	  next_bb = b->next_bb;
702 
703 	  if (!(bitmap_bit_p (visited, b->index)))
704 	    delete_basic_block (b);
705 	}
706 
707       tidy_fallthru_edges ();
708     }
709 
710   return post_order_num;
711 }
712 
713 
714 /* Helper routine for inverted_post_order_compute
715    flow_dfs_compute_reverse_execute, and the reverse-CFG
716    deapth first search in dominance.c.
717    BB has to belong to a region of CFG
718    unreachable by inverted traversal from the exit.
719    i.e. there's no control flow path from ENTRY to EXIT
720    that contains this BB.
721    This can happen in two cases - if there's an infinite loop
722    or if there's a block that has no successor
723    (call to a function with no return).
724    Some RTL passes deal with this condition by
725    calling connect_infinite_loops_to_exit () and/or
726    add_noreturn_fake_exit_edges ().
727    However, those methods involve modifying the CFG itself
728    which may not be desirable.
729    Hence, we deal with the infinite loop/no return cases
730    by identifying a unique basic block that can reach all blocks
731    in such a region by inverted traversal.
732    This function returns a basic block that guarantees
733    that all blocks in the region are reachable
734    by starting an inverted traversal from the returned block.  */
735 
736 basic_block
dfs_find_deadend(basic_block bb)737 dfs_find_deadend (basic_block bb)
738 {
739   auto_bitmap visited;
740   basic_block next = bb;
741 
742   for (;;)
743     {
744       if (EDGE_COUNT (next->succs) == 0)
745 	return next;
746 
747       if (! bitmap_set_bit (visited, next->index))
748 	return bb;
749 
750       bb = next;
751       /* If we are in an analyzed cycle make sure to try exiting it.
752          Note this is a heuristic only and expected to work when loop
753 	 fixup is needed as well.  */
754       if (! bb->loop_father
755 	  || ! loop_outer (bb->loop_father))
756 	next = EDGE_SUCC (bb, 0)->dest;
757       else
758 	{
759 	  edge_iterator ei;
760 	  edge e;
761 	  FOR_EACH_EDGE (e, ei, bb->succs)
762 	    if (loop_exit_edge_p (bb->loop_father, e))
763 	      break;
764 	  next = e ? e->dest : EDGE_SUCC (bb, 0)->dest;
765 	}
766     }
767 
768   gcc_unreachable ();
769 }
770 
771 
772 /* Compute the reverse top sort order of the inverted CFG
773    i.e. starting from the exit block and following the edges backward
774    (from successors to predecessors).
775    This ordering can be used for forward dataflow problems among others.
776 
777    Optionally if START_POINTS is specified, start from exit block and all
778    basic blocks in START_POINTS.  This is used by CD-DCE.
779 
780    This function assumes that all blocks in the CFG are reachable
781    from the ENTRY (but not necessarily from EXIT).
782 
783    If there's an infinite loop,
784    a simple inverted traversal starting from the blocks
785    with no successors can't visit all blocks.
786    To solve this problem, we first do inverted traversal
787    starting from the blocks with no successor.
788    And if there's any block left that's not visited by the regular
789    inverted traversal from EXIT,
790    those blocks are in such problematic region.
791    Among those, we find one block that has
792    any visited predecessor (which is an entry into such a region),
793    and start looking for a "dead end" from that block
794    and do another inverted traversal from that block.  */
795 
796 void
inverted_post_order_compute(vec<int> * post_order,sbitmap * start_points)797 inverted_post_order_compute (vec<int> *post_order,
798 			     sbitmap *start_points)
799 {
800   basic_block bb;
801   post_order->reserve_exact (n_basic_blocks_for_fn (cfun));
802 
803   if (flag_checking)
804     verify_no_unreachable_blocks ();
805 
806   /* Allocate stack for back-tracking up CFG.  */
807   auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
808 
809   /* Allocate bitmap to track nodes that have been visited.  */
810   auto_sbitmap visited (last_basic_block_for_fn (cfun));
811 
812   /* None of the nodes in the CFG have been visited yet.  */
813   bitmap_clear (visited);
814 
815   if (start_points)
816     {
817       FOR_ALL_BB_FN (bb, cfun)
818         if (bitmap_bit_p (*start_points, bb->index)
819 	    && EDGE_COUNT (bb->preds) > 0)
820 	  {
821 	    stack.quick_push (ei_start (bb->preds));
822             bitmap_set_bit (visited, bb->index);
823 	  }
824       if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
825 	{
826 	  stack.quick_push (ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds));
827           bitmap_set_bit (visited, EXIT_BLOCK_PTR_FOR_FN (cfun)->index);
828 	}
829     }
830   else
831   /* Put all blocks that have no successor into the initial work list.  */
832   FOR_ALL_BB_FN (bb, cfun)
833     if (EDGE_COUNT (bb->succs) == 0)
834       {
835         /* Push the initial edge on to the stack.  */
836         if (EDGE_COUNT (bb->preds) > 0)
837           {
838 	    stack.quick_push (ei_start (bb->preds));
839             bitmap_set_bit (visited, bb->index);
840           }
841       }
842 
843   do
844     {
845       bool has_unvisited_bb = false;
846 
847       /* The inverted traversal loop. */
848       while (!stack.is_empty ())
849         {
850           edge_iterator ei;
851           basic_block pred;
852 
853           /* Look at the edge on the top of the stack.  */
854 	  ei = stack.last ();
855           bb = ei_edge (ei)->dest;
856           pred = ei_edge (ei)->src;
857 
858           /* Check if the predecessor has been visited yet.  */
859           if (! bitmap_bit_p (visited, pred->index))
860             {
861               /* Mark that we have visited the destination.  */
862               bitmap_set_bit (visited, pred->index);
863 
864               if (EDGE_COUNT (pred->preds) > 0)
865                 /* Since the predecessor node has been visited for the first
866                    time, check its predecessors.  */
867 		stack.quick_push (ei_start (pred->preds));
868               else
869 		post_order->quick_push (pred->index);
870             }
871           else
872             {
873 	      if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
874 		  && ei_one_before_end_p (ei))
875 		post_order->quick_push (bb->index);
876 
877               if (!ei_one_before_end_p (ei))
878 		ei_next (&stack.last ());
879               else
880 		stack.pop ();
881             }
882         }
883 
884       /* Detect any infinite loop and activate the kludge.
885          Note that this doesn't check EXIT_BLOCK itself
886 	 since EXIT_BLOCK is always added after the outer do-while loop.  */
887       FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
888 		      EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
889         if (!bitmap_bit_p (visited, bb->index))
890           {
891             has_unvisited_bb = true;
892 
893             if (EDGE_COUNT (bb->preds) > 0)
894               {
895                 edge_iterator ei;
896                 edge e;
897                 basic_block visited_pred = NULL;
898 
899                 /* Find an already visited predecessor.  */
900                 FOR_EACH_EDGE (e, ei, bb->preds)
901                   {
902                     if (bitmap_bit_p (visited, e->src->index))
903                       visited_pred = e->src;
904                   }
905 
906                 if (visited_pred)
907                   {
908                     basic_block be = dfs_find_deadend (bb);
909                     gcc_assert (be != NULL);
910                     bitmap_set_bit (visited, be->index);
911 		    stack.quick_push (ei_start (be->preds));
912                     break;
913                   }
914               }
915           }
916 
917       if (has_unvisited_bb && stack.is_empty ())
918         {
919 	  /* No blocks are reachable from EXIT at all.
920              Find a dead-end from the ENTRY, and restart the iteration. */
921 	  basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR_FOR_FN (cfun));
922           gcc_assert (be != NULL);
923           bitmap_set_bit (visited, be->index);
924 	  stack.quick_push (ei_start (be->preds));
925         }
926 
927       /* The only case the below while fires is
928          when there's an infinite loop.  */
929     }
930   while (!stack.is_empty ());
931 
932   /* EXIT_BLOCK is always included.  */
933   post_order->quick_push (EXIT_BLOCK);
934 }
935 
936 /* Compute the depth first search order of FN and store in the array
937    PRE_ORDER if nonzero.  If REV_POST_ORDER is nonzero, return the
938    reverse completion number for each node.  Returns the number of nodes
939    visited.  A depth first search tries to get as far away from the starting
940    point as quickly as possible.
941 
942    In case the function has unreachable blocks the number of nodes
943    visited does not include them.
944 
945    pre_order is a really a preorder numbering of the graph.
946    rev_post_order is really a reverse postorder numbering of the graph.  */
947 
948 int
pre_and_rev_post_order_compute_fn(struct function * fn,int * pre_order,int * rev_post_order,bool include_entry_exit)949 pre_and_rev_post_order_compute_fn (struct function *fn,
950 				   int *pre_order, int *rev_post_order,
951 				   bool include_entry_exit)
952 {
953   int pre_order_num = 0;
954   int rev_post_order_num = n_basic_blocks_for_fn (fn) - 1;
955 
956   /* Allocate stack for back-tracking up CFG.  */
957   auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (fn) + 1);
958 
959   if (include_entry_exit)
960     {
961       if (pre_order)
962 	pre_order[pre_order_num] = ENTRY_BLOCK;
963       pre_order_num++;
964       if (rev_post_order)
965 	rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
966     }
967   else
968     rev_post_order_num -= NUM_FIXED_BLOCKS;
969 
970   /* Allocate bitmap to track nodes that have been visited.  */
971   auto_sbitmap visited (last_basic_block_for_fn (fn));
972 
973   /* None of the nodes in the CFG have been visited yet.  */
974   bitmap_clear (visited);
975 
976   /* Push the first edge on to the stack.  */
977   stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (fn)->succs));
978 
979   while (!stack.is_empty ())
980     {
981       basic_block src;
982       basic_block dest;
983 
984       /* Look at the edge on the top of the stack.  */
985       edge_iterator ei = stack.last ();
986       src = ei_edge (ei)->src;
987       dest = ei_edge (ei)->dest;
988 
989       /* Check if the edge destination has been visited yet.  */
990       if (dest != EXIT_BLOCK_PTR_FOR_FN (fn)
991 	  && ! bitmap_bit_p (visited, dest->index))
992 	{
993 	  /* Mark that we have visited the destination.  */
994 	  bitmap_set_bit (visited, dest->index);
995 
996 	  if (pre_order)
997 	    pre_order[pre_order_num] = dest->index;
998 
999 	  pre_order_num++;
1000 
1001 	  if (EDGE_COUNT (dest->succs) > 0)
1002 	    /* Since the DEST node has been visited for the first
1003 	       time, check its successors.  */
1004 	    stack.quick_push (ei_start (dest->succs));
1005 	  else if (rev_post_order)
1006 	    /* There are no successors for the DEST node so assign
1007 	       its reverse completion number.  */
1008 	    rev_post_order[rev_post_order_num--] = dest->index;
1009 	}
1010       else
1011 	{
1012 	  if (ei_one_before_end_p (ei)
1013 	      && src != ENTRY_BLOCK_PTR_FOR_FN (fn)
1014 	      && rev_post_order)
1015 	    /* There are no more successors for the SRC node
1016 	       so assign its reverse completion number.  */
1017 	    rev_post_order[rev_post_order_num--] = src->index;
1018 
1019 	  if (!ei_one_before_end_p (ei))
1020 	    ei_next (&stack.last ());
1021 	  else
1022 	    stack.pop ();
1023 	}
1024     }
1025 
1026   if (include_entry_exit)
1027     {
1028       if (pre_order)
1029 	pre_order[pre_order_num] = EXIT_BLOCK;
1030       pre_order_num++;
1031       if (rev_post_order)
1032 	rev_post_order[rev_post_order_num--] = ENTRY_BLOCK;
1033     }
1034 
1035   return pre_order_num;
1036 }
1037 
1038 /* Like pre_and_rev_post_order_compute_fn but operating on the
1039    current function and asserting that all nodes were visited.  */
1040 
1041 int
pre_and_rev_post_order_compute(int * pre_order,int * rev_post_order,bool include_entry_exit)1042 pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
1043 				bool include_entry_exit)
1044 {
1045   int pre_order_num
1046     = pre_and_rev_post_order_compute_fn (cfun, pre_order, rev_post_order,
1047 					 include_entry_exit);
1048   if (include_entry_exit)
1049     /* The number of nodes visited should be the number of blocks.  */
1050     gcc_assert (pre_order_num == n_basic_blocks_for_fn (cfun));
1051   else
1052     /* The number of nodes visited should be the number of blocks minus
1053        the entry and exit blocks which are not visited here.  */
1054     gcc_assert (pre_order_num
1055 		== (n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS));
1056 
1057   return pre_order_num;
1058 }
1059 
1060 /* Unlike pre_and_rev_post_order_compute we fill rev_post_order backwards
1061    so iterating in RPO order needs to start with rev_post_order[n - 1]
1062    going to rev_post_order[0].  If FOR_ITERATION is true then try to
1063    make CFG cycles fit into small contiguous regions of the RPO order.
1064    When FOR_ITERATION is true this requires up-to-date loop structures.  */
1065 
1066 int
rev_post_order_and_mark_dfs_back_seme(struct function * fn,edge entry,bitmap exit_bbs,bool for_iteration,int * rev_post_order)1067 rev_post_order_and_mark_dfs_back_seme (struct function *fn, edge entry,
1068 				       bitmap exit_bbs, bool for_iteration,
1069 				       int *rev_post_order)
1070 {
1071   int pre_order_num = 0;
1072   int rev_post_order_num = 0;
1073 
1074   /* Allocate stack for back-tracking up CFG.  Worst case we need
1075      O(n^2) edges but the following should suffice in practice without
1076      a need to re-allocate.  */
1077   auto_vec<edge, 20> stack (2 * n_basic_blocks_for_fn (fn));
1078 
1079   int *pre = XNEWVEC (int, 2 * last_basic_block_for_fn (fn));
1080   int *post = pre + last_basic_block_for_fn (fn);
1081 
1082   /* BB flag to track nodes that have been visited.  */
1083   auto_bb_flag visited (fn);
1084   /* BB flag to track which nodes have post[] assigned to avoid
1085      zeroing post.  */
1086   auto_bb_flag post_assigned (fn);
1087 
1088   /* Push the first edge on to the stack.  */
1089   stack.quick_push (entry);
1090 
1091   while (!stack.is_empty ())
1092     {
1093       basic_block src;
1094       basic_block dest;
1095 
1096       /* Look at the edge on the top of the stack.  */
1097       int idx = stack.length () - 1;
1098       edge e = stack[idx];
1099       src = e->src;
1100       dest = e->dest;
1101       e->flags &= ~EDGE_DFS_BACK;
1102 
1103       /* Check if the edge destination has been visited yet.  */
1104       if (! bitmap_bit_p (exit_bbs, dest->index)
1105 	  && ! (dest->flags & visited))
1106 	{
1107 	  /* Mark that we have visited the destination.  */
1108 	  dest->flags |= visited;
1109 
1110 	  pre[dest->index] = pre_order_num++;
1111 
1112 	  if (EDGE_COUNT (dest->succs) > 0)
1113 	    {
1114 	      /* Since the DEST node has been visited for the first
1115 		 time, check its successors.  */
1116 	      /* Push the edge vector in reverse to match previous behavior.  */
1117 	      stack.reserve (EDGE_COUNT (dest->succs));
1118 	      for (int i = EDGE_COUNT (dest->succs) - 1; i >= 0; --i)
1119 		stack.quick_push (EDGE_SUCC (dest, i));
1120 	      /* Generalize to handle more successors?  */
1121 	      if (for_iteration
1122 		  && EDGE_COUNT (dest->succs) == 2)
1123 		{
1124 		  edge &e1 = stack[stack.length () - 2];
1125 		  if (loop_exit_edge_p (e1->src->loop_father, e1))
1126 		    std::swap (e1, stack.last ());
1127 		}
1128 	    }
1129 	  else
1130 	    {
1131 	      /* There are no successors for the DEST node so assign
1132 		 its reverse completion number.  */
1133 	      post[dest->index] = rev_post_order_num;
1134 	      dest->flags |= post_assigned;
1135 	      rev_post_order[rev_post_order_num] = dest->index;
1136 	      rev_post_order_num++;
1137 	    }
1138 	}
1139       else
1140 	{
1141 	  if (dest->flags & visited
1142 	      && src != entry->src
1143 	      && pre[src->index] >= pre[dest->index]
1144 	      && !(dest->flags & post_assigned))
1145 	    e->flags |= EDGE_DFS_BACK;
1146 
1147 	  if (idx != 0 && stack[idx - 1]->src != src)
1148 	    {
1149 	      /* There are no more successors for the SRC node
1150 		 so assign its reverse completion number.  */
1151 	      post[src->index] = rev_post_order_num;
1152 	      src->flags |= post_assigned;
1153 	      rev_post_order[rev_post_order_num] = src->index;
1154 	      rev_post_order_num++;
1155 	    }
1156 
1157 	  stack.pop ();
1158 	}
1159     }
1160 
1161   XDELETEVEC (pre);
1162 
1163   /* Clear the temporarily allocated flags.  */
1164   for (int i = 0; i < rev_post_order_num; ++i)
1165     BASIC_BLOCK_FOR_FN (fn, rev_post_order[i])->flags
1166       &= ~(post_assigned|visited);
1167 
1168   return rev_post_order_num;
1169 }
1170 
1171 
1172 
1173 /* Compute the depth first search order on the _reverse_ graph and
1174    store it in the array DFS_ORDER, marking the nodes visited in VISITED.
1175    Returns the number of nodes visited.
1176 
1177    The computation is split into three pieces:
1178 
1179    flow_dfs_compute_reverse_init () creates the necessary data
1180    structures.
1181 
1182    flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1183    structures.  The block will start the search.
1184 
1185    flow_dfs_compute_reverse_execute () continues (or starts) the
1186    search using the block on the top of the stack, stopping when the
1187    stack is empty.
1188 
1189    flow_dfs_compute_reverse_finish () destroys the necessary data
1190    structures.
1191 
1192    Thus, the user will probably call ..._init(), call ..._add_bb() to
1193    add a beginning basic block to the stack, call ..._execute(),
1194    possibly add another bb to the stack and again call ..._execute(),
1195    ..., and finally call _finish().  */
1196 
1197 /* Initialize the data structures used for depth-first search on the
1198    reverse graph.  If INITIALIZE_STACK is nonzero, the exit block is
1199    added to the basic block stack.  DATA is the current depth-first
1200    search context.  If INITIALIZE_STACK is nonzero, there is an
1201    element on the stack.  */
1202 
depth_first_search()1203 depth_first_search::depth_first_search () :
1204   m_stack (n_basic_blocks_for_fn (cfun)),
1205   m_visited_blocks (last_basic_block_for_fn (cfun))
1206 {
1207   bitmap_clear (m_visited_blocks);
1208 }
1209 
1210 /* Add the specified basic block to the top of the dfs data
1211    structures.  When the search continues, it will start at the
1212    block.  */
1213 
1214 void
add_bb(basic_block bb)1215 depth_first_search::add_bb (basic_block bb)
1216 {
1217   m_stack.quick_push (bb);
1218   bitmap_set_bit (m_visited_blocks, bb->index);
1219 }
1220 
1221 /* Continue the depth-first search through the reverse graph starting with the
1222    block at the stack's top and ending when the stack is empty.  Visited nodes
1223    are marked.  Returns an unvisited basic block, or NULL if there is none
1224    available.  */
1225 
1226 basic_block
execute(basic_block last_unvisited)1227 depth_first_search::execute (basic_block last_unvisited)
1228 {
1229   basic_block bb;
1230   edge e;
1231   edge_iterator ei;
1232 
1233   while (!m_stack.is_empty ())
1234     {
1235       bb = m_stack.pop ();
1236 
1237       /* Perform depth-first search on adjacent vertices.  */
1238       FOR_EACH_EDGE (e, ei, bb->preds)
1239 	if (!bitmap_bit_p (m_visited_blocks, e->src->index))
1240 	  add_bb (e->src);
1241     }
1242 
1243   /* Determine if there are unvisited basic blocks.  */
1244   FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
1245     if (!bitmap_bit_p (m_visited_blocks, bb->index))
1246       return bb;
1247 
1248   return NULL;
1249 }
1250 
1251 /* Performs dfs search from BB over vertices satisfying PREDICATE;
1252    if REVERSE, go against direction of edges.  Returns number of blocks
1253    found and their list in RSLT.  RSLT can contain at most RSLT_MAX items.  */
1254 int
dfs_enumerate_from(basic_block bb,int reverse,bool (* predicate)(const_basic_block,const void *),basic_block * rslt,int rslt_max,const void * data)1255 dfs_enumerate_from (basic_block bb, int reverse,
1256 		    bool (*predicate) (const_basic_block, const void *),
1257 		    basic_block *rslt, int rslt_max, const void *data)
1258 {
1259   basic_block *st, lbb;
1260   int sp = 0, tv = 0;
1261 
1262   auto_bb_flag visited (cfun);
1263 
1264 #define MARK_VISITED(BB) ((BB)->flags |= visited)
1265 #define UNMARK_VISITED(BB) ((BB)->flags &= ~visited)
1266 #define VISITED_P(BB) (((BB)->flags & visited) != 0)
1267 
1268   st = XNEWVEC (basic_block, rslt_max);
1269   rslt[tv++] = st[sp++] = bb;
1270   MARK_VISITED (bb);
1271   while (sp)
1272     {
1273       edge e;
1274       edge_iterator ei;
1275       lbb = st[--sp];
1276       if (reverse)
1277 	{
1278 	  FOR_EACH_EDGE (e, ei, lbb->preds)
1279 	    if (!VISITED_P (e->src) && predicate (e->src, data))
1280 	      {
1281 		gcc_assert (tv != rslt_max);
1282 		rslt[tv++] = st[sp++] = e->src;
1283 		MARK_VISITED (e->src);
1284 	      }
1285 	}
1286       else
1287 	{
1288 	  FOR_EACH_EDGE (e, ei, lbb->succs)
1289 	    if (!VISITED_P (e->dest) && predicate (e->dest, data))
1290 	      {
1291 		gcc_assert (tv != rslt_max);
1292 		rslt[tv++] = st[sp++] = e->dest;
1293 		MARK_VISITED (e->dest);
1294 	      }
1295 	}
1296     }
1297   free (st);
1298   for (sp = 0; sp < tv; sp++)
1299     UNMARK_VISITED (rslt[sp]);
1300   return tv;
1301 #undef MARK_VISITED
1302 #undef UNMARK_VISITED
1303 #undef VISITED_P
1304 }
1305 
1306 
1307 /* Compute dominance frontiers, ala Harvey, Ferrante, et al.
1308 
1309    This algorithm can be found in Timothy Harvey's PhD thesis, at
1310    http://www.cs.rice.edu/~harv/dissertation.pdf in the section on iterative
1311    dominance algorithms.
1312 
1313    First, we identify each join point, j (any node with more than one
1314    incoming edge is a join point).
1315 
1316    We then examine each predecessor, p, of j and walk up the dominator tree
1317    starting at p.
1318 
1319    We stop the walk when we reach j's immediate dominator - j is in the
1320    dominance frontier of each of  the nodes in the walk, except for j's
1321    immediate dominator. Intuitively, all of the rest of j's dominators are
1322    shared by j's predecessors as well.
1323    Since they dominate j, they will not have j in their dominance frontiers.
1324 
1325    The number of nodes touched by this algorithm is equal to the size
1326    of the dominance frontiers, no more, no less.
1327 */
1328 
1329 
1330 static void
compute_dominance_frontiers_1(bitmap_head * frontiers)1331 compute_dominance_frontiers_1 (bitmap_head *frontiers)
1332 {
1333   edge p;
1334   edge_iterator ei;
1335   basic_block b;
1336   FOR_EACH_BB_FN (b, cfun)
1337     {
1338       if (EDGE_COUNT (b->preds) >= 2)
1339 	{
1340 	  FOR_EACH_EDGE (p, ei, b->preds)
1341 	    {
1342 	      basic_block runner = p->src;
1343 	      basic_block domsb;
1344 	      if (runner == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1345 		continue;
1346 
1347 	      domsb = get_immediate_dominator (CDI_DOMINATORS, b);
1348 	      while (runner != domsb)
1349 		{
1350 		  if (!bitmap_set_bit (&frontiers[runner->index],
1351 				       b->index))
1352 		    break;
1353 		  runner = get_immediate_dominator (CDI_DOMINATORS,
1354 						    runner);
1355 		}
1356 	    }
1357 	}
1358     }
1359 }
1360 
1361 
1362 void
compute_dominance_frontiers(bitmap_head * frontiers)1363 compute_dominance_frontiers (bitmap_head *frontiers)
1364 {
1365   timevar_push (TV_DOM_FRONTIERS);
1366 
1367   compute_dominance_frontiers_1 (frontiers);
1368 
1369   timevar_pop (TV_DOM_FRONTIERS);
1370 }
1371 
1372 /* Given a set of blocks with variable definitions (DEF_BLOCKS),
1373    return a bitmap with all the blocks in the iterated dominance
1374    frontier of the blocks in DEF_BLOCKS.  DFS contains dominance
1375    frontier information as returned by compute_dominance_frontiers.
1376 
1377    The resulting set of blocks are the potential sites where PHI nodes
1378    are needed.  The caller is responsible for freeing the memory
1379    allocated for the return value.  */
1380 
1381 bitmap
compute_idf(bitmap def_blocks,bitmap_head * dfs)1382 compute_idf (bitmap def_blocks, bitmap_head *dfs)
1383 {
1384   bitmap_iterator bi;
1385   unsigned bb_index, i;
1386   bitmap phi_insertion_points;
1387 
1388   /* Each block can appear at most twice on the work-stack.  */
1389   auto_vec<int> work_stack (2 * n_basic_blocks_for_fn (cfun));
1390   phi_insertion_points = BITMAP_ALLOC (NULL);
1391 
1392   /* Seed the work list with all the blocks in DEF_BLOCKS.  We use
1393      vec::quick_push here for speed.  This is safe because we know that
1394      the number of definition blocks is no greater than the number of
1395      basic blocks, which is the initial capacity of WORK_STACK.  */
1396   EXECUTE_IF_SET_IN_BITMAP (def_blocks, 0, bb_index, bi)
1397     work_stack.quick_push (bb_index);
1398 
1399   /* Pop a block off the worklist, add every block that appears in
1400      the original block's DF that we have not already processed to
1401      the worklist.  Iterate until the worklist is empty.   Blocks
1402      which are added to the worklist are potential sites for
1403      PHI nodes.  */
1404   while (work_stack.length () > 0)
1405     {
1406       bb_index = work_stack.pop ();
1407 
1408       /* Since the registration of NEW -> OLD name mappings is done
1409 	 separately from the call to update_ssa, when updating the SSA
1410 	 form, the basic blocks where new and/or old names are defined
1411 	 may have disappeared by CFG cleanup calls.  In this case,
1412 	 we may pull a non-existing block from the work stack.  */
1413       gcc_checking_assert (bb_index
1414 			   < (unsigned) last_basic_block_for_fn (cfun));
1415 
1416       EXECUTE_IF_AND_COMPL_IN_BITMAP (&dfs[bb_index], phi_insertion_points,
1417 	                              0, i, bi)
1418 	{
1419 	  work_stack.quick_push (i);
1420 	  bitmap_set_bit (phi_insertion_points, i);
1421 	}
1422     }
1423 
1424   return phi_insertion_points;
1425 }
1426 
1427 /* Intersection and union of preds/succs for sbitmap based data flow
1428    solvers.  All four functions defined below take the same arguments:
1429    B is the basic block to perform the operation for.  DST is the
1430    target sbitmap, i.e. the result.  SRC is an sbitmap vector of size
1431    last_basic_block so that it can be indexed with basic block indices.
1432    DST may be (but does not have to be) SRC[B->index].  */
1433 
1434 /* Set the bitmap DST to the intersection of SRC of successors of
1435    basic block B.  */
1436 
1437 void
bitmap_intersection_of_succs(sbitmap dst,sbitmap * src,basic_block b)1438 bitmap_intersection_of_succs (sbitmap dst, sbitmap *src, basic_block b)
1439 {
1440   unsigned int set_size = dst->size;
1441   edge e;
1442   unsigned ix;
1443 
1444   for (e = NULL, ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1445     {
1446       e = EDGE_SUCC (b, ix);
1447       if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1448 	continue;
1449 
1450       bitmap_copy (dst, src[e->dest->index]);
1451       break;
1452     }
1453 
1454   if (e == 0)
1455     bitmap_ones (dst);
1456   else
1457     for (++ix; ix < EDGE_COUNT (b->succs); ix++)
1458       {
1459 	unsigned int i;
1460 	SBITMAP_ELT_TYPE *p, *r;
1461 
1462 	e = EDGE_SUCC (b, ix);
1463 	if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1464 	  continue;
1465 
1466 	p = src[e->dest->index]->elms;
1467 	r = dst->elms;
1468 	for (i = 0; i < set_size; i++)
1469 	  *r++ &= *p++;
1470       }
1471 }
1472 
1473 /* Set the bitmap DST to the intersection of SRC of predecessors of
1474    basic block B.  */
1475 
1476 void
bitmap_intersection_of_preds(sbitmap dst,sbitmap * src,basic_block b)1477 bitmap_intersection_of_preds (sbitmap dst, sbitmap *src, basic_block b)
1478 {
1479   unsigned int set_size = dst->size;
1480   edge e;
1481   unsigned ix;
1482 
1483   for (e = NULL, ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1484     {
1485       e = EDGE_PRED (b, ix);
1486       if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1487 	continue;
1488 
1489       bitmap_copy (dst, src[e->src->index]);
1490       break;
1491     }
1492 
1493   if (e == 0)
1494     bitmap_ones (dst);
1495   else
1496     for (++ix; ix < EDGE_COUNT (b->preds); ix++)
1497       {
1498 	unsigned int i;
1499 	SBITMAP_ELT_TYPE *p, *r;
1500 
1501 	e = EDGE_PRED (b, ix);
1502 	if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1503 	  continue;
1504 
1505 	p = src[e->src->index]->elms;
1506 	r = dst->elms;
1507 	for (i = 0; i < set_size; i++)
1508 	  *r++ &= *p++;
1509       }
1510 }
1511 
1512 /* Set the bitmap DST to the union of SRC of successors of
1513    basic block B.  */
1514 
1515 void
bitmap_union_of_succs(sbitmap dst,sbitmap * src,basic_block b)1516 bitmap_union_of_succs (sbitmap dst, sbitmap *src, basic_block b)
1517 {
1518   unsigned int set_size = dst->size;
1519   edge e;
1520   unsigned ix;
1521 
1522   for (ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1523     {
1524       e = EDGE_SUCC (b, ix);
1525       if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1526 	continue;
1527 
1528       bitmap_copy (dst, src[e->dest->index]);
1529       break;
1530     }
1531 
1532   if (ix == EDGE_COUNT (b->succs))
1533     bitmap_clear (dst);
1534   else
1535     for (ix++; ix < EDGE_COUNT (b->succs); ix++)
1536       {
1537 	unsigned int i;
1538 	SBITMAP_ELT_TYPE *p, *r;
1539 
1540 	e = EDGE_SUCC (b, ix);
1541 	if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1542 	  continue;
1543 
1544 	p = src[e->dest->index]->elms;
1545 	r = dst->elms;
1546 	for (i = 0; i < set_size; i++)
1547 	  *r++ |= *p++;
1548       }
1549 }
1550 
1551 /* Set the bitmap DST to the union of SRC of predecessors of
1552    basic block B.  */
1553 
1554 void
bitmap_union_of_preds(sbitmap dst,sbitmap * src,basic_block b)1555 bitmap_union_of_preds (sbitmap dst, sbitmap *src, basic_block b)
1556 {
1557   unsigned int set_size = dst->size;
1558   edge e;
1559   unsigned ix;
1560 
1561   for (ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1562     {
1563       e = EDGE_PRED (b, ix);
1564       if (e->src== ENTRY_BLOCK_PTR_FOR_FN (cfun))
1565 	continue;
1566 
1567       bitmap_copy (dst, src[e->src->index]);
1568       break;
1569     }
1570 
1571   if (ix == EDGE_COUNT (b->preds))
1572     bitmap_clear (dst);
1573   else
1574     for (ix++; ix < EDGE_COUNT (b->preds); ix++)
1575       {
1576 	unsigned int i;
1577 	SBITMAP_ELT_TYPE *p, *r;
1578 
1579 	e = EDGE_PRED (b, ix);
1580 	if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1581 	  continue;
1582 
1583 	p = src[e->src->index]->elms;
1584 	r = dst->elms;
1585 	for (i = 0; i < set_size; i++)
1586 	  *r++ |= *p++;
1587       }
1588 }
1589 
1590 /* Returns the list of basic blocks in the function in an order that guarantees
1591    that if a block X has just a single predecessor Y, then Y is after X in the
1592    ordering.  */
1593 
1594 basic_block *
single_pred_before_succ_order(void)1595 single_pred_before_succ_order (void)
1596 {
1597   basic_block x, y;
1598   basic_block *order = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
1599   unsigned n = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
1600   unsigned np, i;
1601   auto_sbitmap visited (last_basic_block_for_fn (cfun));
1602 
1603 #define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
1604 #define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
1605 
1606   bitmap_clear (visited);
1607 
1608   MARK_VISITED (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1609   FOR_EACH_BB_FN (x, cfun)
1610     {
1611       if (VISITED_P (x))
1612 	continue;
1613 
1614       /* Walk the predecessors of x as long as they have precisely one
1615 	 predecessor and add them to the list, so that they get stored
1616 	 after x.  */
1617       for (y = x, np = 1;
1618 	   single_pred_p (y) && !VISITED_P (single_pred (y));
1619 	   y = single_pred (y))
1620 	np++;
1621       for (y = x, i = n - np;
1622 	   single_pred_p (y) && !VISITED_P (single_pred (y));
1623 	   y = single_pred (y), i++)
1624 	{
1625 	  order[i] = y;
1626 	  MARK_VISITED (y);
1627 	}
1628       order[i] = y;
1629       MARK_VISITED (y);
1630 
1631       gcc_assert (i == n - 1);
1632       n -= np;
1633     }
1634 
1635   gcc_assert (n == 0);
1636   return order;
1637 
1638 #undef MARK_VISITED
1639 #undef VISITED_P
1640 }
1641 
1642 /* Ignoring loop backedges, if BB has precisely one incoming edge then
1643    return that edge.  Otherwise return NULL.
1644 
1645    When IGNORE_NOT_EXECUTABLE is true, also ignore edges that are not marked
1646    as executable.  */
1647 
1648 edge
single_pred_edge_ignoring_loop_edges(basic_block bb,bool ignore_not_executable)1649 single_pred_edge_ignoring_loop_edges (basic_block bb,
1650 				      bool ignore_not_executable)
1651 {
1652   edge retval = NULL;
1653   edge e;
1654   edge_iterator ei;
1655 
1656   FOR_EACH_EDGE (e, ei, bb->preds)
1657     {
1658       /* A loop back edge can be identified by the destination of
1659 	 the edge dominating the source of the edge.  */
1660       if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1661 	continue;
1662 
1663       /* We can safely ignore edges that are not executable.  */
1664       if (ignore_not_executable
1665 	  && (e->flags & EDGE_EXECUTABLE) == 0)
1666 	continue;
1667 
1668       /* If we have already seen a non-loop edge, then we must have
1669 	 multiple incoming non-loop edges and thus we return NULL.  */
1670       if (retval)
1671 	return NULL;
1672 
1673       /* This is the first non-loop incoming edge we have found.  Record
1674 	 it.  */
1675       retval = e;
1676     }
1677 
1678   return retval;
1679 }
1680