1 /* Control flow graph analysis code for GNU compiler.
2    Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains various simple utilities to analyze the CFG.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "cfghooks.h"
27 #include "timevar.h"
28 #include "cfganal.h"
29 #include "cfgloop.h"
30 
31 namespace {
32 /* Store the data structures necessary for depth-first search.  */
33 class depth_first_search
34   {
35 public:
36     depth_first_search ();
37 
38     basic_block execute (basic_block);
39     void add_bb (basic_block);
40 
41 private:
42   /* stack for backtracking during the algorithm */
43   auto_vec<basic_block, 20> m_stack;
44 
45   /* record of basic blocks already seen by depth-first search */
46   auto_sbitmap m_visited_blocks;
47 };
48 }
49 
50 /* Mark the back edges in DFS traversal.
51    Return nonzero if a loop (natural or otherwise) is present.
52    Inspired by Depth_First_Search_PP described in:
53 
54      Advanced Compiler Design and Implementation
55      Steven Muchnick
56      Morgan Kaufmann, 1997
57 
58    and heavily borrowed from pre_and_rev_post_order_compute.  */
59 
60 bool
mark_dfs_back_edges(void)61 mark_dfs_back_edges (void)
62 {
63   int *pre;
64   int *post;
65   int prenum = 1;
66   int postnum = 1;
67   bool found = false;
68 
69   /* Allocate the preorder and postorder number arrays.  */
70   pre = XCNEWVEC (int, last_basic_block_for_fn (cfun));
71   post = XCNEWVEC (int, last_basic_block_for_fn (cfun));
72 
73   /* Allocate stack for back-tracking up CFG.  */
74   auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
75 
76   /* Allocate bitmap to track nodes that have been visited.  */
77   auto_sbitmap visited (last_basic_block_for_fn (cfun));
78 
79   /* None of the nodes in the CFG have been visited yet.  */
80   bitmap_clear (visited);
81 
82   /* Push the first edge on to the stack.  */
83   stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
84 
85   while (!stack.is_empty ())
86     {
87       basic_block src;
88       basic_block dest;
89 
90       /* Look at the edge on the top of the stack.  */
91       edge_iterator ei = stack.last ();
92       src = ei_edge (ei)->src;
93       dest = ei_edge (ei)->dest;
94       ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
95 
96       /* Check if the edge destination has been visited yet.  */
97       if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && ! bitmap_bit_p (visited,
98 								  dest->index))
99 	{
100 	  /* Mark that we have visited the destination.  */
101 	  bitmap_set_bit (visited, dest->index);
102 
103 	  pre[dest->index] = prenum++;
104 	  if (EDGE_COUNT (dest->succs) > 0)
105 	    {
106 	      /* Since the DEST node has been visited for the first
107 		 time, check its successors.  */
108 	      stack.quick_push (ei_start (dest->succs));
109 	    }
110 	  else
111 	    post[dest->index] = postnum++;
112 	}
113       else
114 	{
115 	  if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
116 	      && src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
117 	      && pre[src->index] >= pre[dest->index]
118 	      && post[dest->index] == 0)
119 	    ei_edge (ei)->flags |= EDGE_DFS_BACK, found = true;
120 
121 	  if (ei_one_before_end_p (ei)
122 	      && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
123 	    post[src->index] = postnum++;
124 
125 	  if (!ei_one_before_end_p (ei))
126 	    ei_next (&stack.last ());
127 	  else
128 	    stack.pop ();
129 	}
130     }
131 
132   free (pre);
133   free (post);
134 
135   return found;
136 }
137 
138 /* Find unreachable blocks.  An unreachable block will have 0 in
139    the reachable bit in block->flags.  A nonzero value indicates the
140    block is reachable.  */
141 
142 void
find_unreachable_blocks(void)143 find_unreachable_blocks (void)
144 {
145   edge e;
146   edge_iterator ei;
147   basic_block *tos, *worklist, bb;
148 
149   tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
150 
151   /* Clear all the reachability flags.  */
152 
153   FOR_EACH_BB_FN (bb, cfun)
154     bb->flags &= ~BB_REACHABLE;
155 
156   /* Add our starting points to the worklist.  Almost always there will
157      be only one.  It isn't inconceivable that we might one day directly
158      support Fortran alternate entry points.  */
159 
160   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
161     {
162       *tos++ = e->dest;
163 
164       /* Mark the block reachable.  */
165       e->dest->flags |= BB_REACHABLE;
166     }
167 
168   /* Iterate: find everything reachable from what we've already seen.  */
169 
170   while (tos != worklist)
171     {
172       basic_block b = *--tos;
173 
174       FOR_EACH_EDGE (e, ei, b->succs)
175 	{
176 	  basic_block dest = e->dest;
177 
178 	  if (!(dest->flags & BB_REACHABLE))
179 	    {
180 	      *tos++ = dest;
181 	      dest->flags |= BB_REACHABLE;
182 	    }
183 	}
184     }
185 
186   free (worklist);
187 }
188 
189 /* Verify that there are no unreachable blocks in the current function.  */
190 
191 void
verify_no_unreachable_blocks(void)192 verify_no_unreachable_blocks (void)
193 {
194   find_unreachable_blocks ();
195 
196   basic_block bb;
197   FOR_EACH_BB_FN (bb, cfun)
198     gcc_assert ((bb->flags & BB_REACHABLE) != 0);
199 }
200 
201 
202 /* Functions to access an edge list with a vector representation.
203    Enough data is kept such that given an index number, the
204    pred and succ that edge represents can be determined, or
205    given a pred and a succ, its index number can be returned.
206    This allows algorithms which consume a lot of memory to
207    represent the normally full matrix of edge (pred,succ) with a
208    single indexed vector,  edge (EDGE_INDEX (pred, succ)), with no
209    wasted space in the client code due to sparse flow graphs.  */
210 
211 /* This functions initializes the edge list. Basically the entire
212    flowgraph is processed, and all edges are assigned a number,
213    and the data structure is filled in.  */
214 
215 struct edge_list *
create_edge_list(void)216 create_edge_list (void)
217 {
218   struct edge_list *elist;
219   edge e;
220   int num_edges;
221   basic_block bb;
222   edge_iterator ei;
223 
224   /* Determine the number of edges in the flow graph by counting successor
225      edges on each basic block.  */
226   num_edges = 0;
227   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
228 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
229     {
230       num_edges += EDGE_COUNT (bb->succs);
231     }
232 
233   elist = XNEW (struct edge_list);
234   elist->num_edges = num_edges;
235   elist->index_to_edge = XNEWVEC (edge, num_edges);
236 
237   num_edges = 0;
238 
239   /* Follow successors of blocks, and register these edges.  */
240   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
241 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
242     FOR_EACH_EDGE (e, ei, bb->succs)
243       elist->index_to_edge[num_edges++] = e;
244 
245   return elist;
246 }
247 
248 /* This function free's memory associated with an edge list.  */
249 
250 void
free_edge_list(struct edge_list * elist)251 free_edge_list (struct edge_list *elist)
252 {
253   if (elist)
254     {
255       free (elist->index_to_edge);
256       free (elist);
257     }
258 }
259 
260 /* This function provides debug output showing an edge list.  */
261 
262 DEBUG_FUNCTION void
print_edge_list(FILE * f,struct edge_list * elist)263 print_edge_list (FILE *f, struct edge_list *elist)
264 {
265   int x;
266 
267   fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
268 	   n_basic_blocks_for_fn (cfun), elist->num_edges);
269 
270   for (x = 0; x < elist->num_edges; x++)
271     {
272       fprintf (f, " %-4d - edge(", x);
273       if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
274 	fprintf (f, "entry,");
275       else
276 	fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
277 
278       if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR_FOR_FN (cfun))
279 	fprintf (f, "exit)\n");
280       else
281 	fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
282     }
283 }
284 
285 /* This function provides an internal consistency check of an edge list,
286    verifying that all edges are present, and that there are no
287    extra edges.  */
288 
289 DEBUG_FUNCTION void
verify_edge_list(FILE * f,struct edge_list * elist)290 verify_edge_list (FILE *f, struct edge_list *elist)
291 {
292   int pred, succ, index;
293   edge e;
294   basic_block bb, p, s;
295   edge_iterator ei;
296 
297   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
298 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
299     {
300       FOR_EACH_EDGE (e, ei, bb->succs)
301 	{
302 	  pred = e->src->index;
303 	  succ = e->dest->index;
304 	  index = EDGE_INDEX (elist, e->src, e->dest);
305 	  if (index == EDGE_INDEX_NO_EDGE)
306 	    {
307 	      fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
308 	      continue;
309 	    }
310 
311 	  if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
312 	    fprintf (f, "*p* Pred for index %d should be %d not %d\n",
313 		     index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
314 	  if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
315 	    fprintf (f, "*p* Succ for index %d should be %d not %d\n",
316 		     index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
317 	}
318     }
319 
320   /* We've verified that all the edges are in the list, now lets make sure
321      there are no spurious edges in the list.  This is an expensive check!  */
322 
323   FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR_FOR_FN (cfun),
324 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
325     FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
326       {
327 	int found_edge = 0;
328 
329 	FOR_EACH_EDGE (e, ei, p->succs)
330 	  if (e->dest == s)
331 	    {
332 	      found_edge = 1;
333 	      break;
334 	    }
335 
336 	FOR_EACH_EDGE (e, ei, s->preds)
337 	  if (e->src == p)
338 	    {
339 	      found_edge = 1;
340 	      break;
341 	    }
342 
343 	if (EDGE_INDEX (elist, p, s)
344 	    == EDGE_INDEX_NO_EDGE && found_edge != 0)
345 	  fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
346 		   p->index, s->index);
347 	if (EDGE_INDEX (elist, p, s)
348 	    != EDGE_INDEX_NO_EDGE && found_edge == 0)
349 	  fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
350 		   p->index, s->index, EDGE_INDEX (elist, p, s));
351       }
352 }
353 
354 
355 /* Functions to compute control dependences.  */
356 
357 /* Indicate block BB is control dependent on an edge with index EDGE_INDEX.  */
358 void
set_control_dependence_map_bit(basic_block bb,int edge_index)359 control_dependences::set_control_dependence_map_bit (basic_block bb,
360 						     int edge_index)
361 {
362   if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
363     return;
364   gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
365   bitmap_set_bit (control_dependence_map[bb->index], edge_index);
366 }
367 
368 /* Clear all control dependences for block BB.  */
369 void
clear_control_dependence_bitmap(basic_block bb)370 control_dependences::clear_control_dependence_bitmap (basic_block bb)
371 {
372   bitmap_clear (control_dependence_map[bb->index]);
373 }
374 
375 /* Find the immediate postdominator PDOM of the specified basic block BLOCK.
376    This function is necessary because some blocks have negative numbers.  */
377 
378 static inline basic_block
find_pdom(basic_block block)379 find_pdom (basic_block block)
380 {
381   gcc_assert (block != ENTRY_BLOCK_PTR_FOR_FN (cfun));
382 
383   if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
384     return EXIT_BLOCK_PTR_FOR_FN (cfun);
385   else
386     {
387       basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
388       if (! bb)
389 	return EXIT_BLOCK_PTR_FOR_FN (cfun);
390       return bb;
391     }
392 }
393 
394 /* Determine all blocks' control dependences on the given edge with edge_list
395    EL index EDGE_INDEX, ala Morgan, Section 3.6.  */
396 
397 void
find_control_dependence(int edge_index)398 control_dependences::find_control_dependence (int edge_index)
399 {
400   basic_block current_block;
401   basic_block ending_block;
402 
403   gcc_assert (get_edge_src (edge_index) != EXIT_BLOCK_PTR_FOR_FN (cfun));
404 
405   /* For abnormal edges, we don't make current_block control
406      dependent because instructions that throw are always necessary
407      anyway.  */
408   edge e = find_edge (get_edge_src (edge_index), get_edge_dest (edge_index));
409   if (e->flags & EDGE_ABNORMAL)
410     return;
411 
412   if (get_edge_src (edge_index) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
413     ending_block = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
414   else
415     ending_block = find_pdom (get_edge_src (edge_index));
416 
417   for (current_block = get_edge_dest (edge_index);
418        current_block != ending_block
419        && current_block != EXIT_BLOCK_PTR_FOR_FN (cfun);
420        current_block = find_pdom (current_block))
421     set_control_dependence_map_bit (current_block, edge_index);
422 }
423 
424 /* Record all blocks' control dependences on all edges in the edge
425    list EL, ala Morgan, Section 3.6.  */
426 
control_dependences()427 control_dependences::control_dependences ()
428 {
429   timevar_push (TV_CONTROL_DEPENDENCES);
430 
431   /* Initialize the edge list.  */
432   int num_edges = 0;
433   basic_block bb;
434   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
435 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
436     num_edges += EDGE_COUNT (bb->succs);
437   m_el.create (num_edges);
438   edge e;
439   edge_iterator ei;
440   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
441 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
442     FOR_EACH_EDGE (e, ei, bb->succs)
443       m_el.quick_push (std::make_pair (e->src->index, e->dest->index));
444 
445   control_dependence_map.create (last_basic_block_for_fn (cfun));
446   for (int i = 0; i < last_basic_block_for_fn (cfun); ++i)
447     control_dependence_map.quick_push (BITMAP_ALLOC (NULL));
448   for (int i = 0; i < num_edges; ++i)
449     find_control_dependence (i);
450 
451   timevar_pop (TV_CONTROL_DEPENDENCES);
452 }
453 
454 /* Free control dependences and the associated edge list.  */
455 
~control_dependences()456 control_dependences::~control_dependences ()
457 {
458   for (unsigned i = 0; i < control_dependence_map.length (); ++i)
459     BITMAP_FREE (control_dependence_map[i]);
460   control_dependence_map.release ();
461   m_el.release ();
462 }
463 
464 /* Returns the bitmap of edges the basic-block I is dependent on.  */
465 
466 bitmap
get_edges_dependent_on(int i)467 control_dependences::get_edges_dependent_on (int i)
468 {
469   return control_dependence_map[i];
470 }
471 
472 /* Returns the edge source with index I from the edge list.  */
473 
474 basic_block
get_edge_src(int i)475 control_dependences::get_edge_src (int i)
476 {
477   return BASIC_BLOCK_FOR_FN (cfun, m_el[i].first);
478 }
479 
480 /* Returns the edge destination with index I from the edge list.  */
481 
482 basic_block
get_edge_dest(int i)483 control_dependences::get_edge_dest (int i)
484 {
485   return BASIC_BLOCK_FOR_FN (cfun, m_el[i].second);
486 }
487 
488 
489 /* Given PRED and SUCC blocks, return the edge which connects the blocks.
490    If no such edge exists, return NULL.  */
491 
492 edge
find_edge(basic_block pred,basic_block succ)493 find_edge (basic_block pred, basic_block succ)
494 {
495   edge e;
496   edge_iterator ei;
497 
498   if (EDGE_COUNT (pred->succs) <= EDGE_COUNT (succ->preds))
499     {
500       FOR_EACH_EDGE (e, ei, pred->succs)
501 	if (e->dest == succ)
502 	  return e;
503     }
504   else
505     {
506       FOR_EACH_EDGE (e, ei, succ->preds)
507 	if (e->src == pred)
508 	  return e;
509     }
510 
511   return NULL;
512 }
513 
514 /* This routine will determine what, if any, edge there is between
515    a specified predecessor and successor.  */
516 
517 int
find_edge_index(struct edge_list * edge_list,basic_block pred,basic_block succ)518 find_edge_index (struct edge_list *edge_list, basic_block pred, basic_block succ)
519 {
520   int x;
521 
522   for (x = 0; x < NUM_EDGES (edge_list); x++)
523     if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
524 	&& INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
525       return x;
526 
527   return (EDGE_INDEX_NO_EDGE);
528 }
529 
530 /* This routine will remove any fake predecessor edges for a basic block.
531    When the edge is removed, it is also removed from whatever successor
532    list it is in.  */
533 
534 static void
remove_fake_predecessors(basic_block bb)535 remove_fake_predecessors (basic_block bb)
536 {
537   edge e;
538   edge_iterator ei;
539 
540   for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
541     {
542       if ((e->flags & EDGE_FAKE) == EDGE_FAKE)
543 	remove_edge (e);
544       else
545 	ei_next (&ei);
546     }
547 }
548 
549 /* This routine will remove all fake edges from the flow graph.  If
550    we remove all fake successors, it will automatically remove all
551    fake predecessors.  */
552 
553 void
remove_fake_edges(void)554 remove_fake_edges (void)
555 {
556   basic_block bb;
557 
558   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
559     remove_fake_predecessors (bb);
560 }
561 
562 /* This routine will remove all fake edges to the EXIT_BLOCK.  */
563 
564 void
remove_fake_exit_edges(void)565 remove_fake_exit_edges (void)
566 {
567   remove_fake_predecessors (EXIT_BLOCK_PTR_FOR_FN (cfun));
568 }
569 
570 
571 /* This function will add a fake edge between any block which has no
572    successors, and the exit block. Some data flow equations require these
573    edges to exist.  */
574 
575 void
add_noreturn_fake_exit_edges(void)576 add_noreturn_fake_exit_edges (void)
577 {
578   basic_block bb;
579 
580   FOR_EACH_BB_FN (bb, cfun)
581     if (EDGE_COUNT (bb->succs) == 0)
582       make_single_succ_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
583 }
584 
585 /* This function adds a fake edge between any infinite loops to the
586    exit block.  Some optimizations require a path from each node to
587    the exit node.
588 
589    See also Morgan, Figure 3.10, pp. 82-83.
590 
591    The current implementation is ugly, not attempting to minimize the
592    number of inserted fake edges.  To reduce the number of fake edges
593    to insert, add fake edges from _innermost_ loops containing only
594    nodes not reachable from the exit block.  */
595 
596 void
connect_infinite_loops_to_exit(void)597 connect_infinite_loops_to_exit (void)
598 {
599   /* Perform depth-first search in the reverse graph to find nodes
600      reachable from the exit block.  */
601   depth_first_search dfs;
602   dfs.add_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
603 
604   /* Repeatedly add fake edges, updating the unreachable nodes.  */
605   basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
606   while (1)
607     {
608       unvisited_block = dfs.execute (unvisited_block);
609       if (!unvisited_block)
610 	break;
611 
612       basic_block deadend_block = dfs_find_deadend (unvisited_block);
613       edge e = make_edge (deadend_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
614 			  EDGE_FAKE);
615       e->probability = profile_probability::never ();
616       dfs.add_bb (deadend_block);
617     }
618 }
619 
620 /* Compute reverse top sort order.  This is computing a post order
621    numbering of the graph.  If INCLUDE_ENTRY_EXIT is true, then
622    ENTRY_BLOCK and EXIT_BLOCK are included.  If DELETE_UNREACHABLE is
623    true, unreachable blocks are deleted.  */
624 
625 int
post_order_compute(int * post_order,bool include_entry_exit,bool delete_unreachable)626 post_order_compute (int *post_order, bool include_entry_exit,
627 		    bool delete_unreachable)
628 {
629   int post_order_num = 0;
630   int count;
631 
632   if (include_entry_exit)
633     post_order[post_order_num++] = EXIT_BLOCK;
634 
635   /* Allocate stack for back-tracking up CFG.  */
636   auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
637 
638   /* Allocate bitmap to track nodes that have been visited.  */
639   auto_sbitmap visited (last_basic_block_for_fn (cfun));
640 
641   /* None of the nodes in the CFG have been visited yet.  */
642   bitmap_clear (visited);
643 
644   /* Push the first edge on to the stack.  */
645   stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
646 
647   while (!stack.is_empty ())
648     {
649       basic_block src;
650       basic_block dest;
651 
652       /* Look at the edge on the top of the stack.  */
653       edge_iterator ei = stack.last ();
654       src = ei_edge (ei)->src;
655       dest = ei_edge (ei)->dest;
656 
657       /* Check if the edge destination has been visited yet.  */
658       if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
659 	  && ! bitmap_bit_p (visited, dest->index))
660 	{
661 	  /* Mark that we have visited the destination.  */
662 	  bitmap_set_bit (visited, dest->index);
663 
664 	  if (EDGE_COUNT (dest->succs) > 0)
665 	    /* Since the DEST node has been visited for the first
666 	       time, check its successors.  */
667 	    stack.quick_push (ei_start (dest->succs));
668 	  else
669 	    post_order[post_order_num++] = dest->index;
670 	}
671       else
672 	{
673 	  if (ei_one_before_end_p (ei)
674 	      && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
675 	    post_order[post_order_num++] = src->index;
676 
677 	  if (!ei_one_before_end_p (ei))
678 	    ei_next (&stack.last ());
679 	  else
680 	    stack.pop ();
681 	}
682     }
683 
684   if (include_entry_exit)
685     {
686       post_order[post_order_num++] = ENTRY_BLOCK;
687       count = post_order_num;
688     }
689   else
690     count = post_order_num + 2;
691 
692   /* Delete the unreachable blocks if some were found and we are
693      supposed to do it.  */
694   if (delete_unreachable && (count != n_basic_blocks_for_fn (cfun)))
695     {
696       basic_block b;
697       basic_block next_bb;
698       for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
699 	   != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
700 	{
701 	  next_bb = b->next_bb;
702 
703 	  if (!(bitmap_bit_p (visited, b->index)))
704 	    delete_basic_block (b);
705 	}
706 
707       tidy_fallthru_edges ();
708     }
709 
710   return post_order_num;
711 }
712 
713 
714 /* Helper routine for inverted_post_order_compute
715    flow_dfs_compute_reverse_execute, and the reverse-CFG
716    deapth first search in dominance.c.
717    BB has to belong to a region of CFG
718    unreachable by inverted traversal from the exit.
719    i.e. there's no control flow path from ENTRY to EXIT
720    that contains this BB.
721    This can happen in two cases - if there's an infinite loop
722    or if there's a block that has no successor
723    (call to a function with no return).
724    Some RTL passes deal with this condition by
725    calling connect_infinite_loops_to_exit () and/or
726    add_noreturn_fake_exit_edges ().
727    However, those methods involve modifying the CFG itself
728    which may not be desirable.
729    Hence, we deal with the infinite loop/no return cases
730    by identifying a unique basic block that can reach all blocks
731    in such a region by inverted traversal.
732    This function returns a basic block that guarantees
733    that all blocks in the region are reachable
734    by starting an inverted traversal from the returned block.  */
735 
736 basic_block
dfs_find_deadend(basic_block bb)737 dfs_find_deadend (basic_block bb)
738 {
739   auto_bitmap visited;
740   basic_block next = bb;
741 
742   for (;;)
743     {
744       if (EDGE_COUNT (next->succs) == 0)
745 	return next;
746 
747       if (! bitmap_set_bit (visited, next->index))
748 	return bb;
749 
750       bb = next;
751       /* If we are in an analyzed cycle make sure to try exiting it.
752          Note this is a heuristic only and expected to work when loop
753 	 fixup is needed as well.  */
754       if (! bb->loop_father
755 	  || ! loop_outer (bb->loop_father))
756 	next = EDGE_SUCC (bb, 0)->dest;
757       else
758 	{
759 	  edge_iterator ei;
760 	  edge e;
761 	  FOR_EACH_EDGE (e, ei, bb->succs)
762 	    if (loop_exit_edge_p (bb->loop_father, e))
763 	      break;
764 	  next = e ? e->dest : EDGE_SUCC (bb, 0)->dest;
765 	}
766     }
767 
768   gcc_unreachable ();
769 }
770 
771 
772 /* Compute the reverse top sort order of the inverted CFG
773    i.e. starting from the exit block and following the edges backward
774    (from successors to predecessors).
775    This ordering can be used for forward dataflow problems among others.
776 
777    Optionally if START_POINTS is specified, start from exit block and all
778    basic blocks in START_POINTS.  This is used by CD-DCE.
779 
780    This function assumes that all blocks in the CFG are reachable
781    from the ENTRY (but not necessarily from EXIT).
782 
783    If there's an infinite loop,
784    a simple inverted traversal starting from the blocks
785    with no successors can't visit all blocks.
786    To solve this problem, we first do inverted traversal
787    starting from the blocks with no successor.
788    And if there's any block left that's not visited by the regular
789    inverted traversal from EXIT,
790    those blocks are in such problematic region.
791    Among those, we find one block that has
792    any visited predecessor (which is an entry into such a region),
793    and start looking for a "dead end" from that block
794    and do another inverted traversal from that block.  */
795 
796 void
inverted_post_order_compute(vec<int> * post_order,sbitmap * start_points)797 inverted_post_order_compute (vec<int> *post_order,
798 			     sbitmap *start_points)
799 {
800   basic_block bb;
801   post_order->reserve_exact (n_basic_blocks_for_fn (cfun));
802 
803   if (flag_checking)
804     verify_no_unreachable_blocks ();
805 
806   /* Allocate stack for back-tracking up CFG.  */
807   auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
808 
809   /* Allocate bitmap to track nodes that have been visited.  */
810   auto_sbitmap visited (last_basic_block_for_fn (cfun));
811 
812   /* None of the nodes in the CFG have been visited yet.  */
813   bitmap_clear (visited);
814 
815   if (start_points)
816     {
817       FOR_ALL_BB_FN (bb, cfun)
818         if (bitmap_bit_p (*start_points, bb->index)
819 	    && EDGE_COUNT (bb->preds) > 0)
820 	  {
821 	    stack.quick_push (ei_start (bb->preds));
822             bitmap_set_bit (visited, bb->index);
823 	  }
824       if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
825 	{
826 	  stack.quick_push (ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds));
827           bitmap_set_bit (visited, EXIT_BLOCK_PTR_FOR_FN (cfun)->index);
828 	}
829     }
830   else
831   /* Put all blocks that have no successor into the initial work list.  */
832   FOR_ALL_BB_FN (bb, cfun)
833     if (EDGE_COUNT (bb->succs) == 0)
834       {
835         /* Push the initial edge on to the stack.  */
836         if (EDGE_COUNT (bb->preds) > 0)
837           {
838 	    stack.quick_push (ei_start (bb->preds));
839             bitmap_set_bit (visited, bb->index);
840           }
841       }
842 
843   do
844     {
845       bool has_unvisited_bb = false;
846 
847       /* The inverted traversal loop. */
848       while (!stack.is_empty ())
849         {
850           edge_iterator ei;
851           basic_block pred;
852 
853           /* Look at the edge on the top of the stack.  */
854 	  ei = stack.last ();
855           bb = ei_edge (ei)->dest;
856           pred = ei_edge (ei)->src;
857 
858           /* Check if the predecessor has been visited yet.  */
859           if (! bitmap_bit_p (visited, pred->index))
860             {
861               /* Mark that we have visited the destination.  */
862               bitmap_set_bit (visited, pred->index);
863 
864               if (EDGE_COUNT (pred->preds) > 0)
865                 /* Since the predecessor node has been visited for the first
866                    time, check its predecessors.  */
867 		stack.quick_push (ei_start (pred->preds));
868               else
869 		post_order->quick_push (pred->index);
870             }
871           else
872             {
873 	      if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
874 		  && ei_one_before_end_p (ei))
875 		post_order->quick_push (bb->index);
876 
877               if (!ei_one_before_end_p (ei))
878 		ei_next (&stack.last ());
879               else
880 		stack.pop ();
881             }
882         }
883 
884       /* Detect any infinite loop and activate the kludge.
885          Note that this doesn't check EXIT_BLOCK itself
886 	 since EXIT_BLOCK is always added after the outer do-while loop.  */
887       FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
888 		      EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
889         if (!bitmap_bit_p (visited, bb->index))
890           {
891             has_unvisited_bb = true;
892 
893             if (EDGE_COUNT (bb->preds) > 0)
894               {
895                 edge_iterator ei;
896                 edge e;
897                 basic_block visited_pred = NULL;
898 
899                 /* Find an already visited predecessor.  */
900                 FOR_EACH_EDGE (e, ei, bb->preds)
901                   {
902                     if (bitmap_bit_p (visited, e->src->index))
903                       visited_pred = e->src;
904                   }
905 
906                 if (visited_pred)
907                   {
908                     basic_block be = dfs_find_deadend (bb);
909                     gcc_assert (be != NULL);
910                     bitmap_set_bit (visited, be->index);
911 		    stack.quick_push (ei_start (be->preds));
912                     break;
913                   }
914               }
915           }
916 
917       if (has_unvisited_bb && stack.is_empty ())
918         {
919 	  /* No blocks are reachable from EXIT at all.
920              Find a dead-end from the ENTRY, and restart the iteration. */
921 	  basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR_FOR_FN (cfun));
922           gcc_assert (be != NULL);
923           bitmap_set_bit (visited, be->index);
924 	  stack.quick_push (ei_start (be->preds));
925         }
926 
927       /* The only case the below while fires is
928          when there's an infinite loop.  */
929     }
930   while (!stack.is_empty ());
931 
932   /* EXIT_BLOCK is always included.  */
933   post_order->quick_push (EXIT_BLOCK);
934 }
935 
936 /* Compute the depth first search order of FN and store in the array
937    PRE_ORDER if nonzero.  If REV_POST_ORDER is nonzero, return the
938    reverse completion number for each node.  Returns the number of nodes
939    visited.  A depth first search tries to get as far away from the starting
940    point as quickly as possible.
941 
942    In case the function has unreachable blocks the number of nodes
943    visited does not include them.
944 
945    pre_order is a really a preorder numbering of the graph.
946    rev_post_order is really a reverse postorder numbering of the graph.  */
947 
948 int
pre_and_rev_post_order_compute_fn(struct function * fn,int * pre_order,int * rev_post_order,bool include_entry_exit)949 pre_and_rev_post_order_compute_fn (struct function *fn,
950 				   int *pre_order, int *rev_post_order,
951 				   bool include_entry_exit)
952 {
953   int pre_order_num = 0;
954   int rev_post_order_num = n_basic_blocks_for_fn (cfun) - 1;
955 
956   /* Allocate stack for back-tracking up CFG.  */
957   auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
958 
959   if (include_entry_exit)
960     {
961       if (pre_order)
962 	pre_order[pre_order_num] = ENTRY_BLOCK;
963       pre_order_num++;
964       if (rev_post_order)
965 	rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
966     }
967   else
968     rev_post_order_num -= NUM_FIXED_BLOCKS;
969 
970   /* Allocate bitmap to track nodes that have been visited.  */
971   auto_sbitmap visited (last_basic_block_for_fn (cfun));
972 
973   /* None of the nodes in the CFG have been visited yet.  */
974   bitmap_clear (visited);
975 
976   /* Push the first edge on to the stack.  */
977   stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (fn)->succs));
978 
979   while (!stack.is_empty ())
980     {
981       basic_block src;
982       basic_block dest;
983 
984       /* Look at the edge on the top of the stack.  */
985       edge_iterator ei = stack.last ();
986       src = ei_edge (ei)->src;
987       dest = ei_edge (ei)->dest;
988 
989       /* Check if the edge destination has been visited yet.  */
990       if (dest != EXIT_BLOCK_PTR_FOR_FN (fn)
991 	  && ! bitmap_bit_p (visited, dest->index))
992 	{
993 	  /* Mark that we have visited the destination.  */
994 	  bitmap_set_bit (visited, dest->index);
995 
996 	  if (pre_order)
997 	    pre_order[pre_order_num] = dest->index;
998 
999 	  pre_order_num++;
1000 
1001 	  if (EDGE_COUNT (dest->succs) > 0)
1002 	    /* Since the DEST node has been visited for the first
1003 	       time, check its successors.  */
1004 	    stack.quick_push (ei_start (dest->succs));
1005 	  else if (rev_post_order)
1006 	    /* There are no successors for the DEST node so assign
1007 	       its reverse completion number.  */
1008 	    rev_post_order[rev_post_order_num--] = dest->index;
1009 	}
1010       else
1011 	{
1012 	  if (ei_one_before_end_p (ei)
1013 	      && src != ENTRY_BLOCK_PTR_FOR_FN (fn)
1014 	      && rev_post_order)
1015 	    /* There are no more successors for the SRC node
1016 	       so assign its reverse completion number.  */
1017 	    rev_post_order[rev_post_order_num--] = src->index;
1018 
1019 	  if (!ei_one_before_end_p (ei))
1020 	    ei_next (&stack.last ());
1021 	  else
1022 	    stack.pop ();
1023 	}
1024     }
1025 
1026   if (include_entry_exit)
1027     {
1028       if (pre_order)
1029 	pre_order[pre_order_num] = EXIT_BLOCK;
1030       pre_order_num++;
1031       if (rev_post_order)
1032 	rev_post_order[rev_post_order_num--] = ENTRY_BLOCK;
1033     }
1034 
1035   return pre_order_num;
1036 }
1037 
1038 /* Like pre_and_rev_post_order_compute_fn but operating on the
1039    current function and asserting that all nodes were visited.  */
1040 
1041 int
pre_and_rev_post_order_compute(int * pre_order,int * rev_post_order,bool include_entry_exit)1042 pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
1043 				bool include_entry_exit)
1044 {
1045   int pre_order_num
1046     = pre_and_rev_post_order_compute_fn (cfun, pre_order, rev_post_order,
1047 					 include_entry_exit);
1048   if (include_entry_exit)
1049     /* The number of nodes visited should be the number of blocks.  */
1050     gcc_assert (pre_order_num == n_basic_blocks_for_fn (cfun));
1051   else
1052     /* The number of nodes visited should be the number of blocks minus
1053        the entry and exit blocks which are not visited here.  */
1054     gcc_assert (pre_order_num
1055 		== (n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS));
1056 
1057   return pre_order_num;
1058 }
1059 
1060 /* Compute the depth first search order on the _reverse_ graph and
1061    store in the array DFS_ORDER, marking the nodes visited in VISITED.
1062    Returns the number of nodes visited.
1063 
1064    The computation is split into three pieces:
1065 
1066    flow_dfs_compute_reverse_init () creates the necessary data
1067    structures.
1068 
1069    flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1070    structures.  The block will start the search.
1071 
1072    flow_dfs_compute_reverse_execute () continues (or starts) the
1073    search using the block on the top of the stack, stopping when the
1074    stack is empty.
1075 
1076    flow_dfs_compute_reverse_finish () destroys the necessary data
1077    structures.
1078 
1079    Thus, the user will probably call ..._init(), call ..._add_bb() to
1080    add a beginning basic block to the stack, call ..._execute(),
1081    possibly add another bb to the stack and again call ..._execute(),
1082    ..., and finally call _finish().  */
1083 
1084 /* Initialize the data structures used for depth-first search on the
1085    reverse graph.  If INITIALIZE_STACK is nonzero, the exit block is
1086    added to the basic block stack.  DATA is the current depth-first
1087    search context.  If INITIALIZE_STACK is nonzero, there is an
1088    element on the stack.  */
1089 
depth_first_search()1090 depth_first_search::depth_first_search () :
1091   m_stack (n_basic_blocks_for_fn (cfun)),
1092   m_visited_blocks (last_basic_block_for_fn (cfun))
1093 {
1094   bitmap_clear (m_visited_blocks);
1095 }
1096 
1097 /* Add the specified basic block to the top of the dfs data
1098    structures.  When the search continues, it will start at the
1099    block.  */
1100 
1101 void
add_bb(basic_block bb)1102 depth_first_search::add_bb (basic_block bb)
1103 {
1104   m_stack.quick_push (bb);
1105   bitmap_set_bit (m_visited_blocks, bb->index);
1106 }
1107 
1108 /* Continue the depth-first search through the reverse graph starting with the
1109    block at the stack's top and ending when the stack is empty.  Visited nodes
1110    are marked.  Returns an unvisited basic block, or NULL if there is none
1111    available.  */
1112 
1113 basic_block
execute(basic_block last_unvisited)1114 depth_first_search::execute (basic_block last_unvisited)
1115 {
1116   basic_block bb;
1117   edge e;
1118   edge_iterator ei;
1119 
1120   while (!m_stack.is_empty ())
1121     {
1122       bb = m_stack.pop ();
1123 
1124       /* Perform depth-first search on adjacent vertices.  */
1125       FOR_EACH_EDGE (e, ei, bb->preds)
1126 	if (!bitmap_bit_p (m_visited_blocks, e->src->index))
1127 	  add_bb (e->src);
1128     }
1129 
1130   /* Determine if there are unvisited basic blocks.  */
1131   FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
1132     if (!bitmap_bit_p (m_visited_blocks, bb->index))
1133       return bb;
1134 
1135   return NULL;
1136 }
1137 
1138 /* Performs dfs search from BB over vertices satisfying PREDICATE;
1139    if REVERSE, go against direction of edges.  Returns number of blocks
1140    found and their list in RSLT.  RSLT can contain at most RSLT_MAX items.  */
1141 int
dfs_enumerate_from(basic_block bb,int reverse,bool (* predicate)(const_basic_block,const void *),basic_block * rslt,int rslt_max,const void * data)1142 dfs_enumerate_from (basic_block bb, int reverse,
1143 		    bool (*predicate) (const_basic_block, const void *),
1144 		    basic_block *rslt, int rslt_max, const void *data)
1145 {
1146   basic_block *st, lbb;
1147   int sp = 0, tv = 0;
1148   unsigned size;
1149 
1150   /* A bitmap to keep track of visited blocks.  Allocating it each time
1151      this function is called is not possible, since dfs_enumerate_from
1152      is often used on small (almost) disjoint parts of cfg (bodies of
1153      loops), and allocating a large sbitmap would lead to quadratic
1154      behavior.  */
1155   static sbitmap visited;
1156   static unsigned v_size;
1157 
1158 #define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
1159 #define UNMARK_VISITED(BB) (bitmap_clear_bit (visited, (BB)->index))
1160 #define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
1161 
1162   /* Resize the VISITED sbitmap if necessary.  */
1163   size = last_basic_block_for_fn (cfun);
1164   if (size < 10)
1165     size = 10;
1166 
1167   if (!visited)
1168     {
1169 
1170       visited = sbitmap_alloc (size);
1171       bitmap_clear (visited);
1172       v_size = size;
1173     }
1174   else if (v_size < size)
1175     {
1176       /* Ensure that we increase the size of the sbitmap exponentially.  */
1177       if (2 * v_size > size)
1178 	size = 2 * v_size;
1179 
1180       visited = sbitmap_resize (visited, size, 0);
1181       v_size = size;
1182     }
1183 
1184   st = XNEWVEC (basic_block, rslt_max);
1185   rslt[tv++] = st[sp++] = bb;
1186   MARK_VISITED (bb);
1187   while (sp)
1188     {
1189       edge e;
1190       edge_iterator ei;
1191       lbb = st[--sp];
1192       if (reverse)
1193 	{
1194 	  FOR_EACH_EDGE (e, ei, lbb->preds)
1195 	    if (!VISITED_P (e->src) && predicate (e->src, data))
1196 	      {
1197 		gcc_assert (tv != rslt_max);
1198 		rslt[tv++] = st[sp++] = e->src;
1199 		MARK_VISITED (e->src);
1200 	      }
1201 	}
1202       else
1203 	{
1204 	  FOR_EACH_EDGE (e, ei, lbb->succs)
1205 	    if (!VISITED_P (e->dest) && predicate (e->dest, data))
1206 	      {
1207 		gcc_assert (tv != rslt_max);
1208 		rslt[tv++] = st[sp++] = e->dest;
1209 		MARK_VISITED (e->dest);
1210 	      }
1211 	}
1212     }
1213   free (st);
1214   for (sp = 0; sp < tv; sp++)
1215     UNMARK_VISITED (rslt[sp]);
1216   return tv;
1217 #undef MARK_VISITED
1218 #undef UNMARK_VISITED
1219 #undef VISITED_P
1220 }
1221 
1222 
1223 /* Compute dominance frontiers, ala Harvey, Ferrante, et al.
1224 
1225    This algorithm can be found in Timothy Harvey's PhD thesis, at
1226    http://www.cs.rice.edu/~harv/dissertation.pdf in the section on iterative
1227    dominance algorithms.
1228 
1229    First, we identify each join point, j (any node with more than one
1230    incoming edge is a join point).
1231 
1232    We then examine each predecessor, p, of j and walk up the dominator tree
1233    starting at p.
1234 
1235    We stop the walk when we reach j's immediate dominator - j is in the
1236    dominance frontier of each of  the nodes in the walk, except for j's
1237    immediate dominator. Intuitively, all of the rest of j's dominators are
1238    shared by j's predecessors as well.
1239    Since they dominate j, they will not have j in their dominance frontiers.
1240 
1241    The number of nodes touched by this algorithm is equal to the size
1242    of the dominance frontiers, no more, no less.
1243 */
1244 
1245 
1246 static void
compute_dominance_frontiers_1(bitmap_head * frontiers)1247 compute_dominance_frontiers_1 (bitmap_head *frontiers)
1248 {
1249   edge p;
1250   edge_iterator ei;
1251   basic_block b;
1252   FOR_EACH_BB_FN (b, cfun)
1253     {
1254       if (EDGE_COUNT (b->preds) >= 2)
1255 	{
1256 	  FOR_EACH_EDGE (p, ei, b->preds)
1257 	    {
1258 	      basic_block runner = p->src;
1259 	      basic_block domsb;
1260 	      if (runner == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1261 		continue;
1262 
1263 	      domsb = get_immediate_dominator (CDI_DOMINATORS, b);
1264 	      while (runner != domsb)
1265 		{
1266 		  if (!bitmap_set_bit (&frontiers[runner->index],
1267 				       b->index))
1268 		    break;
1269 		  runner = get_immediate_dominator (CDI_DOMINATORS,
1270 						    runner);
1271 		}
1272 	    }
1273 	}
1274     }
1275 }
1276 
1277 
1278 void
compute_dominance_frontiers(bitmap_head * frontiers)1279 compute_dominance_frontiers (bitmap_head *frontiers)
1280 {
1281   timevar_push (TV_DOM_FRONTIERS);
1282 
1283   compute_dominance_frontiers_1 (frontiers);
1284 
1285   timevar_pop (TV_DOM_FRONTIERS);
1286 }
1287 
1288 /* Given a set of blocks with variable definitions (DEF_BLOCKS),
1289    return a bitmap with all the blocks in the iterated dominance
1290    frontier of the blocks in DEF_BLOCKS.  DFS contains dominance
1291    frontier information as returned by compute_dominance_frontiers.
1292 
1293    The resulting set of blocks are the potential sites where PHI nodes
1294    are needed.  The caller is responsible for freeing the memory
1295    allocated for the return value.  */
1296 
1297 bitmap
compute_idf(bitmap def_blocks,bitmap_head * dfs)1298 compute_idf (bitmap def_blocks, bitmap_head *dfs)
1299 {
1300   bitmap_iterator bi;
1301   unsigned bb_index, i;
1302   bitmap phi_insertion_points;
1303 
1304   /* Each block can appear at most twice on the work-stack.  */
1305   auto_vec<int> work_stack (2 * n_basic_blocks_for_fn (cfun));
1306   phi_insertion_points = BITMAP_ALLOC (NULL);
1307 
1308   /* Seed the work list with all the blocks in DEF_BLOCKS.  We use
1309      vec::quick_push here for speed.  This is safe because we know that
1310      the number of definition blocks is no greater than the number of
1311      basic blocks, which is the initial capacity of WORK_STACK.  */
1312   EXECUTE_IF_SET_IN_BITMAP (def_blocks, 0, bb_index, bi)
1313     work_stack.quick_push (bb_index);
1314 
1315   /* Pop a block off the worklist, add every block that appears in
1316      the original block's DF that we have not already processed to
1317      the worklist.  Iterate until the worklist is empty.   Blocks
1318      which are added to the worklist are potential sites for
1319      PHI nodes.  */
1320   while (work_stack.length () > 0)
1321     {
1322       bb_index = work_stack.pop ();
1323 
1324       /* Since the registration of NEW -> OLD name mappings is done
1325 	 separately from the call to update_ssa, when updating the SSA
1326 	 form, the basic blocks where new and/or old names are defined
1327 	 may have disappeared by CFG cleanup calls.  In this case,
1328 	 we may pull a non-existing block from the work stack.  */
1329       gcc_checking_assert (bb_index
1330 			   < (unsigned) last_basic_block_for_fn (cfun));
1331 
1332       EXECUTE_IF_AND_COMPL_IN_BITMAP (&dfs[bb_index], phi_insertion_points,
1333 	                              0, i, bi)
1334 	{
1335 	  work_stack.quick_push (i);
1336 	  bitmap_set_bit (phi_insertion_points, i);
1337 	}
1338     }
1339 
1340   return phi_insertion_points;
1341 }
1342 
1343 /* Intersection and union of preds/succs for sbitmap based data flow
1344    solvers.  All four functions defined below take the same arguments:
1345    B is the basic block to perform the operation for.  DST is the
1346    target sbitmap, i.e. the result.  SRC is an sbitmap vector of size
1347    last_basic_block so that it can be indexed with basic block indices.
1348    DST may be (but does not have to be) SRC[B->index].  */
1349 
1350 /* Set the bitmap DST to the intersection of SRC of successors of
1351    basic block B.  */
1352 
1353 void
bitmap_intersection_of_succs(sbitmap dst,sbitmap * src,basic_block b)1354 bitmap_intersection_of_succs (sbitmap dst, sbitmap *src, basic_block b)
1355 {
1356   unsigned int set_size = dst->size;
1357   edge e;
1358   unsigned ix;
1359 
1360   for (e = NULL, ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1361     {
1362       e = EDGE_SUCC (b, ix);
1363       if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1364 	continue;
1365 
1366       bitmap_copy (dst, src[e->dest->index]);
1367       break;
1368     }
1369 
1370   if (e == 0)
1371     bitmap_ones (dst);
1372   else
1373     for (++ix; ix < EDGE_COUNT (b->succs); ix++)
1374       {
1375 	unsigned int i;
1376 	SBITMAP_ELT_TYPE *p, *r;
1377 
1378 	e = EDGE_SUCC (b, ix);
1379 	if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1380 	  continue;
1381 
1382 	p = src[e->dest->index]->elms;
1383 	r = dst->elms;
1384 	for (i = 0; i < set_size; i++)
1385 	  *r++ &= *p++;
1386       }
1387 }
1388 
1389 /* Set the bitmap DST to the intersection of SRC of predecessors of
1390    basic block B.  */
1391 
1392 void
bitmap_intersection_of_preds(sbitmap dst,sbitmap * src,basic_block b)1393 bitmap_intersection_of_preds (sbitmap dst, sbitmap *src, basic_block b)
1394 {
1395   unsigned int set_size = dst->size;
1396   edge e;
1397   unsigned ix;
1398 
1399   for (e = NULL, ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1400     {
1401       e = EDGE_PRED (b, ix);
1402       if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1403 	continue;
1404 
1405       bitmap_copy (dst, src[e->src->index]);
1406       break;
1407     }
1408 
1409   if (e == 0)
1410     bitmap_ones (dst);
1411   else
1412     for (++ix; ix < EDGE_COUNT (b->preds); ix++)
1413       {
1414 	unsigned int i;
1415 	SBITMAP_ELT_TYPE *p, *r;
1416 
1417 	e = EDGE_PRED (b, ix);
1418 	if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1419 	  continue;
1420 
1421 	p = src[e->src->index]->elms;
1422 	r = dst->elms;
1423 	for (i = 0; i < set_size; i++)
1424 	  *r++ &= *p++;
1425       }
1426 }
1427 
1428 /* Set the bitmap DST to the union of SRC of successors of
1429    basic block B.  */
1430 
1431 void
bitmap_union_of_succs(sbitmap dst,sbitmap * src,basic_block b)1432 bitmap_union_of_succs (sbitmap dst, sbitmap *src, basic_block b)
1433 {
1434   unsigned int set_size = dst->size;
1435   edge e;
1436   unsigned ix;
1437 
1438   for (ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1439     {
1440       e = EDGE_SUCC (b, ix);
1441       if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1442 	continue;
1443 
1444       bitmap_copy (dst, src[e->dest->index]);
1445       break;
1446     }
1447 
1448   if (ix == EDGE_COUNT (b->succs))
1449     bitmap_clear (dst);
1450   else
1451     for (ix++; ix < EDGE_COUNT (b->succs); ix++)
1452       {
1453 	unsigned int i;
1454 	SBITMAP_ELT_TYPE *p, *r;
1455 
1456 	e = EDGE_SUCC (b, ix);
1457 	if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1458 	  continue;
1459 
1460 	p = src[e->dest->index]->elms;
1461 	r = dst->elms;
1462 	for (i = 0; i < set_size; i++)
1463 	  *r++ |= *p++;
1464       }
1465 }
1466 
1467 /* Set the bitmap DST to the union of SRC of predecessors of
1468    basic block B.  */
1469 
1470 void
bitmap_union_of_preds(sbitmap dst,sbitmap * src,basic_block b)1471 bitmap_union_of_preds (sbitmap dst, sbitmap *src, basic_block b)
1472 {
1473   unsigned int set_size = dst->size;
1474   edge e;
1475   unsigned ix;
1476 
1477   for (ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1478     {
1479       e = EDGE_PRED (b, ix);
1480       if (e->src== ENTRY_BLOCK_PTR_FOR_FN (cfun))
1481 	continue;
1482 
1483       bitmap_copy (dst, src[e->src->index]);
1484       break;
1485     }
1486 
1487   if (ix == EDGE_COUNT (b->preds))
1488     bitmap_clear (dst);
1489   else
1490     for (ix++; ix < EDGE_COUNT (b->preds); ix++)
1491       {
1492 	unsigned int i;
1493 	SBITMAP_ELT_TYPE *p, *r;
1494 
1495 	e = EDGE_PRED (b, ix);
1496 	if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1497 	  continue;
1498 
1499 	p = src[e->src->index]->elms;
1500 	r = dst->elms;
1501 	for (i = 0; i < set_size; i++)
1502 	  *r++ |= *p++;
1503       }
1504 }
1505 
1506 /* Returns the list of basic blocks in the function in an order that guarantees
1507    that if a block X has just a single predecessor Y, then Y is after X in the
1508    ordering.  */
1509 
1510 basic_block *
single_pred_before_succ_order(void)1511 single_pred_before_succ_order (void)
1512 {
1513   basic_block x, y;
1514   basic_block *order = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
1515   unsigned n = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
1516   unsigned np, i;
1517   auto_sbitmap visited (last_basic_block_for_fn (cfun));
1518 
1519 #define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
1520 #define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
1521 
1522   bitmap_clear (visited);
1523 
1524   MARK_VISITED (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1525   FOR_EACH_BB_FN (x, cfun)
1526     {
1527       if (VISITED_P (x))
1528 	continue;
1529 
1530       /* Walk the predecessors of x as long as they have precisely one
1531 	 predecessor and add them to the list, so that they get stored
1532 	 after x.  */
1533       for (y = x, np = 1;
1534 	   single_pred_p (y) && !VISITED_P (single_pred (y));
1535 	   y = single_pred (y))
1536 	np++;
1537       for (y = x, i = n - np;
1538 	   single_pred_p (y) && !VISITED_P (single_pred (y));
1539 	   y = single_pred (y), i++)
1540 	{
1541 	  order[i] = y;
1542 	  MARK_VISITED (y);
1543 	}
1544       order[i] = y;
1545       MARK_VISITED (y);
1546 
1547       gcc_assert (i == n - 1);
1548       n -= np;
1549     }
1550 
1551   gcc_assert (n == 0);
1552   return order;
1553 
1554 #undef MARK_VISITED
1555 #undef VISITED_P
1556 }
1557 
1558 /* Ignoring loop backedges, if BB has precisely one incoming edge then
1559    return that edge.  Otherwise return NULL.
1560 
1561    When IGNORE_NOT_EXECUTABLE is true, also ignore edges that are not marked
1562    as executable.  */
1563 
1564 edge
single_pred_edge_ignoring_loop_edges(basic_block bb,bool ignore_not_executable)1565 single_pred_edge_ignoring_loop_edges (basic_block bb,
1566 				      bool ignore_not_executable)
1567 {
1568   edge retval = NULL;
1569   edge e;
1570   edge_iterator ei;
1571 
1572   FOR_EACH_EDGE (e, ei, bb->preds)
1573     {
1574       /* A loop back edge can be identified by the destination of
1575 	 the edge dominating the source of the edge.  */
1576       if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1577 	continue;
1578 
1579       /* We can safely ignore edges that are not executable.  */
1580       if (ignore_not_executable
1581 	  && (e->flags & EDGE_EXECUTABLE) == 0)
1582 	continue;
1583 
1584       /* If we have already seen a non-loop edge, then we must have
1585 	 multiple incoming non-loop edges and thus we return NULL.  */
1586       if (retval)
1587 	return NULL;
1588 
1589       /* This is the first non-loop incoming edge we have found.  Record
1590 	 it.  */
1591       retval = e;
1592     }
1593 
1594   return retval;
1595 }
1596