xref: /openbsd/gnu/gcc/gcc/tree-cfgcleanup.c (revision 404b540a)
1 /* CFG cleanup for trees.
2    Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
3    Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "toplev.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "hashtab.h"
48 #include "tree-ssa-propagate.h"
49 #include "tree-scalar-evolution.h"
50 
51 /* Remove any fallthru edge from EV.  Return true if an edge was removed.  */
52 
53 static bool
remove_fallthru_edge(VEC (edge,gc)* ev)54 remove_fallthru_edge (VEC(edge,gc) *ev)
55 {
56   edge_iterator ei;
57   edge e;
58 
59   FOR_EACH_EDGE (e, ei, ev)
60     if ((e->flags & EDGE_FALLTHRU) != 0)
61       {
62 	remove_edge (e);
63 	return true;
64       }
65   return false;
66 }
67 
68 /* Disconnect an unreachable block in the control expression starting
69    at block BB.  */
70 
71 static bool
cleanup_control_expr_graph(basic_block bb,block_stmt_iterator bsi)72 cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
73 {
74   edge taken_edge;
75   bool retval = false;
76   tree expr = bsi_stmt (bsi), val;
77 
78   if (!single_succ_p (bb))
79     {
80       edge e;
81       edge_iterator ei;
82       bool warned;
83 
84       fold_defer_overflow_warnings ();
85 
86       switch (TREE_CODE (expr))
87 	{
88 	case COND_EXPR:
89 	  val = fold (COND_EXPR_COND (expr));
90 	  break;
91 
92 	case SWITCH_EXPR:
93 	  val = fold (SWITCH_COND (expr));
94 	  if (TREE_CODE (val) != INTEGER_CST)
95 	    {
96 	      fold_undefer_and_ignore_overflow_warnings ();
97 	      return false;
98 	    }
99 	  break;
100 
101 	default:
102 	  gcc_unreachable ();
103 	}
104 
105       taken_edge = find_taken_edge (bb, val);
106       if (!taken_edge)
107 	{
108 	  fold_undefer_and_ignore_overflow_warnings ();
109 	  return false;
110 	}
111 
112       /* Remove all the edges except the one that is always executed.  */
113       warned = false;
114       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
115 	{
116 	  if (e != taken_edge)
117 	    {
118 	      if (!warned)
119 		{
120 		  fold_undefer_overflow_warnings
121 		    (true, expr, WARN_STRICT_OVERFLOW_CONDITIONAL);
122 		  warned = true;
123 		}
124 
125 	      taken_edge->probability += e->probability;
126 	      taken_edge->count += e->count;
127 	      remove_edge (e);
128 	      retval = true;
129 	    }
130 	  else
131 	    ei_next (&ei);
132 	}
133       if (!warned)
134 	fold_undefer_and_ignore_overflow_warnings ();
135       if (taken_edge->probability > REG_BR_PROB_BASE)
136 	taken_edge->probability = REG_BR_PROB_BASE;
137     }
138   else
139     taken_edge = single_succ_edge (bb);
140 
141   bsi_remove (&bsi, true);
142   taken_edge->flags = EDGE_FALLTHRU;
143 
144   /* We removed some paths from the cfg.  */
145   free_dominance_info (CDI_DOMINATORS);
146 
147   return retval;
148 }
149 
150 /* A list of all the noreturn calls passed to modify_stmt.
151    cleanup_control_flow uses it to detect cases where a mid-block
152    indirect call has been turned into a noreturn call.  When this
153    happens, all the instructions after the call are no longer
154    reachable and must be deleted as dead.  */
155 
VEC(tree,gc)156 VEC(tree,gc) *modified_noreturn_calls;
157 
158 /* Try to remove superfluous control structures.  */
159 
160 static bool
161 cleanup_control_flow (void)
162 {
163   basic_block bb;
164   block_stmt_iterator bsi;
165   bool retval = false;
166   tree stmt;
167 
168   /* Detect cases where a mid-block call is now known not to return.  */
169   while (VEC_length (tree, modified_noreturn_calls))
170     {
171       stmt = VEC_pop (tree, modified_noreturn_calls);
172       bb = bb_for_stmt (stmt);
173       if (bb != NULL && last_stmt (bb) != stmt && noreturn_call_p (stmt))
174 	split_block (bb, stmt);
175     }
176 
177   FOR_EACH_BB (bb)
178     {
179       bsi = bsi_last (bb);
180 
181       /* If the last statement of the block could throw and now cannot,
182 	 we need to prune cfg.  */
183       retval |= tree_purge_dead_eh_edges (bb);
184 
185       if (bsi_end_p (bsi))
186 	continue;
187 
188       stmt = bsi_stmt (bsi);
189 
190       if (TREE_CODE (stmt) == COND_EXPR
191 	  || TREE_CODE (stmt) == SWITCH_EXPR)
192 	retval |= cleanup_control_expr_graph (bb, bsi);
193       /* If we had a computed goto which has a compile-time determinable
194 	 destination, then we can eliminate the goto.  */
195       else if (TREE_CODE (stmt) == GOTO_EXPR
196 	       && TREE_CODE (GOTO_DESTINATION (stmt)) == ADDR_EXPR
197 	       && (TREE_CODE (TREE_OPERAND (GOTO_DESTINATION (stmt), 0))
198 		   == LABEL_DECL))
199 	{
200 	  edge e;
201 	  tree label;
202 	  edge_iterator ei;
203 	  basic_block target_block;
204 	  bool removed_edge = false;
205 
206 	  /* First look at all the outgoing edges.  Delete any outgoing
207 	     edges which do not go to the right block.  For the one
208 	     edge which goes to the right block, fix up its flags.  */
209 	  label = TREE_OPERAND (GOTO_DESTINATION (stmt), 0);
210 	  target_block = label_to_block (label);
211 	  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
212 	    {
213 	      if (e->dest != target_block)
214 		{
215 		  removed_edge = true;
216 		  remove_edge (e);
217 		}
218 	      else
219 	        {
220 		  /* Turn off the EDGE_ABNORMAL flag.  */
221 		  e->flags &= ~EDGE_ABNORMAL;
222 
223 		  /* And set EDGE_FALLTHRU.  */
224 		  e->flags |= EDGE_FALLTHRU;
225 		  ei_next (&ei);
226 		}
227 	    }
228 
229 	  /* If we removed one or more edges, then we will need to fix the
230 	     dominators.  It may be possible to incrementally update them.  */
231 	  if (removed_edge)
232 	    free_dominance_info (CDI_DOMINATORS);
233 
234 	  /* Remove the GOTO_EXPR as it is not needed.  The CFG has all the
235 	     relevant information we need.  */
236 	  bsi_remove (&bsi, true);
237 	  retval = true;
238 	}
239 
240       /* Check for indirect calls that have been turned into
241 	 noreturn calls.  */
242       else if (noreturn_call_p (stmt) && remove_fallthru_edge (bb->succs))
243 	{
244 	  free_dominance_info (CDI_DOMINATORS);
245 	  retval = true;
246 	}
247     }
248   return retval;
249 }
250 
251 /* Return true if basic block BB does nothing except pass control
252    flow to another block and that we can safely insert a label at
253    the start of the successor block.
254 
255    As a precondition, we require that BB be not equal to
256    ENTRY_BLOCK_PTR.  */
257 
258 static bool
tree_forwarder_block_p(basic_block bb,bool phi_wanted)259 tree_forwarder_block_p (basic_block bb, bool phi_wanted)
260 {
261   block_stmt_iterator bsi;
262   edge_iterator ei;
263   edge e, succ;
264   basic_block dest;
265 
266   /* BB must have a single outgoing edge.  */
267   if (single_succ_p (bb) != 1
268       /* If PHI_WANTED is false, BB must not have any PHI nodes.
269 	 Otherwise, BB must have PHI nodes.  */
270       || (phi_nodes (bb) != NULL_TREE) != phi_wanted
271       /* BB may not be a predecessor of EXIT_BLOCK_PTR.  */
272       || single_succ (bb) == EXIT_BLOCK_PTR
273       /* Nor should this be an infinite loop.  */
274       || single_succ (bb) == bb
275       /* BB may not have an abnormal outgoing edge.  */
276       || (single_succ_edge (bb)->flags & EDGE_ABNORMAL))
277     return false;
278 
279 #if ENABLE_CHECKING
280   gcc_assert (bb != ENTRY_BLOCK_PTR);
281 #endif
282 
283   /* Now walk through the statements backward.  We can ignore labels,
284      anything else means this is not a forwarder block.  */
285   for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
286     {
287       tree stmt = bsi_stmt (bsi);
288 
289       switch (TREE_CODE (stmt))
290 	{
291 	case LABEL_EXPR:
292 	  if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
293 	    return false;
294 	  break;
295 
296 	default:
297 	  return false;
298 	}
299     }
300 
301   if (find_edge (ENTRY_BLOCK_PTR, bb))
302     return false;
303 
304   if (current_loops)
305     {
306       basic_block dest;
307       /* Protect loop latches, headers and preheaders.  */
308       if (bb->loop_father->header == bb)
309 	return false;
310       dest = EDGE_SUCC (bb, 0)->dest;
311 
312       if (dest->loop_father->header == dest)
313 	return false;
314     }
315 
316   /* If we have an EH edge leaving this block, make sure that the
317      destination of this block has only one predecessor.  This ensures
318      that we don't get into the situation where we try to remove two
319      forwarders that go to the same basic block but are handlers for
320      different EH regions.  */
321   succ = single_succ_edge (bb);
322   dest = succ->dest;
323   FOR_EACH_EDGE (e, ei, bb->preds)
324     {
325       if (e->flags & EDGE_EH)
326         {
327 	  if (!single_pred_p (dest))
328 	    return false;
329 	}
330     }
331 
332   return true;
333 }
334 
335 /* Return true if BB has at least one abnormal incoming edge.  */
336 
337 static inline bool
has_abnormal_incoming_edge_p(basic_block bb)338 has_abnormal_incoming_edge_p (basic_block bb)
339 {
340   edge e;
341   edge_iterator ei;
342 
343   FOR_EACH_EDGE (e, ei, bb->preds)
344     if (e->flags & EDGE_ABNORMAL)
345       return true;
346 
347   return false;
348 }
349 
350 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
351    those alternatives are equal in each of the PHI nodes, then return
352    true, else return false.  */
353 
354 static bool
phi_alternatives_equal(basic_block dest,edge e1,edge e2)355 phi_alternatives_equal (basic_block dest, edge e1, edge e2)
356 {
357   int n1 = e1->dest_idx;
358   int n2 = e2->dest_idx;
359   tree phi;
360 
361   for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
362     {
363       tree val1 = PHI_ARG_DEF (phi, n1);
364       tree val2 = PHI_ARG_DEF (phi, n2);
365 
366       gcc_assert (val1 != NULL_TREE);
367       gcc_assert (val2 != NULL_TREE);
368 
369       if (!operand_equal_for_phi_arg_p (val1, val2))
370 	return false;
371     }
372 
373   return true;
374 }
375 
376 /* Removes forwarder block BB.  Returns false if this failed.  If a new
377    forwarder block is created due to redirection of edges, it is
378    stored to worklist.  */
379 
380 static bool
remove_forwarder_block(basic_block bb,basic_block ** worklist)381 remove_forwarder_block (basic_block bb, basic_block **worklist)
382 {
383   edge succ = single_succ_edge (bb), e, s;
384   basic_block dest = succ->dest;
385   tree label;
386   tree phi;
387   edge_iterator ei;
388   block_stmt_iterator bsi, bsi_to;
389   bool seen_abnormal_edge = false;
390 
391   /* We check for infinite loops already in tree_forwarder_block_p.
392      However it may happen that the infinite loop is created
393      afterwards due to removal of forwarders.  */
394   if (dest == bb)
395     return false;
396 
397   /* If the destination block consists of a nonlocal label, do not merge
398      it.  */
399   label = first_stmt (dest);
400   if (label
401       && TREE_CODE (label) == LABEL_EXPR
402       && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
403     return false;
404 
405   /* If there is an abnormal edge to basic block BB, but not into
406      dest, problems might occur during removal of the phi node at out
407      of ssa due to overlapping live ranges of registers.
408 
409      If there is an abnormal edge in DEST, the problems would occur
410      anyway since cleanup_dead_labels would then merge the labels for
411      two different eh regions, and rest of exception handling code
412      does not like it.
413 
414      So if there is an abnormal edge to BB, proceed only if there is
415      no abnormal edge to DEST and there are no phi nodes in DEST.  */
416   if (has_abnormal_incoming_edge_p (bb))
417     {
418       seen_abnormal_edge = true;
419 
420       if (has_abnormal_incoming_edge_p (dest)
421 	  || phi_nodes (dest) != NULL_TREE)
422 	return false;
423     }
424 
425   /* If there are phi nodes in DEST, and some of the blocks that are
426      predecessors of BB are also predecessors of DEST, check that the
427      phi node arguments match.  */
428   if (phi_nodes (dest))
429     {
430       FOR_EACH_EDGE (e, ei, bb->preds)
431 	{
432 	  s = find_edge (e->src, dest);
433 	  if (!s)
434 	    continue;
435 
436 	  if (!phi_alternatives_equal (dest, succ, s))
437 	    return false;
438 	}
439     }
440 
441   /* Redirect the edges.  */
442   for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
443     {
444       if (e->flags & EDGE_ABNORMAL)
445 	{
446 	  /* If there is an abnormal edge, redirect it anyway, and
447 	     move the labels to the new block to make it legal.  */
448 	  s = redirect_edge_succ_nodup (e, dest);
449 	}
450       else
451 	s = redirect_edge_and_branch (e, dest);
452 
453       if (s == e)
454 	{
455 	  /* Create arguments for the phi nodes, since the edge was not
456 	     here before.  */
457 	  for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
458 	    add_phi_arg (phi, PHI_ARG_DEF (phi, succ->dest_idx), s);
459 	}
460       else
461 	{
462 	  /* The source basic block might become a forwarder.  We know
463 	     that it was not a forwarder before, since it used to have
464 	     at least two outgoing edges, so we may just add it to
465 	     worklist.  */
466 	  if (tree_forwarder_block_p (s->src, false))
467 	    *(*worklist)++ = s->src;
468 	}
469     }
470 
471   if (seen_abnormal_edge)
472     {
473       /* Move the labels to the new block, so that the redirection of
474 	 the abnormal edges works.  */
475 
476       bsi_to = bsi_start (dest);
477       for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
478 	{
479 	  label = bsi_stmt (bsi);
480 	  gcc_assert (TREE_CODE (label) == LABEL_EXPR);
481 	  bsi_remove (&bsi, false);
482 	  bsi_insert_before (&bsi_to, label, BSI_CONTINUE_LINKING);
483 	}
484     }
485 
486   /* Update the dominators.  */
487   if (dom_info_available_p (CDI_DOMINATORS))
488     {
489       basic_block dom, dombb, domdest;
490 
491       dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
492       domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
493       if (domdest == bb)
494 	{
495 	  /* Shortcut to avoid calling (relatively expensive)
496 	     nearest_common_dominator unless necessary.  */
497 	  dom = dombb;
498 	}
499       else
500 	dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
501 
502       set_immediate_dominator (CDI_DOMINATORS, dest, dom);
503     }
504 
505   /* And kill the forwarder block.  */
506   delete_basic_block (bb);
507 
508   return true;
509 }
510 
511 /* Removes forwarder blocks.  */
512 
513 static bool
cleanup_forwarder_blocks(void)514 cleanup_forwarder_blocks (void)
515 {
516   basic_block bb;
517   bool changed = false;
518   basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks);
519   basic_block *current = worklist;
520 
521   FOR_EACH_BB (bb)
522     {
523       if (tree_forwarder_block_p (bb, false))
524 	*current++ = bb;
525     }
526 
527   while (current != worklist)
528     {
529       bb = *--current;
530       changed |= remove_forwarder_block (bb, &current);
531     }
532 
533   free (worklist);
534   return changed;
535 }
536 
537 /* Do one round of CFG cleanup.  */
538 
539 static bool
cleanup_tree_cfg_1(void)540 cleanup_tree_cfg_1 (void)
541 {
542   bool retval;
543 
544   retval = cleanup_control_flow ();
545   retval |= delete_unreachable_blocks ();
546 
547   /* Forwarder blocks can carry line number information which is
548      useful when debugging, so we only clean them up when
549      optimizing.  */
550 
551   if (optimize > 0)
552     {
553       /* cleanup_forwarder_blocks can redirect edges out of
554 	 SWITCH_EXPRs, which can get expensive.  So we want to enable
555 	 recording of edge to CASE_LABEL_EXPR mappings around the call
556 	 to cleanup_forwarder_blocks.  */
557       start_recording_case_labels ();
558       retval |= cleanup_forwarder_blocks ();
559       end_recording_case_labels ();
560     }
561 
562   /* Merging the blocks may create new opportunities for folding
563      conditional branches (due to the elimination of single-valued PHI
564      nodes).  */
565   retval |= merge_seq_blocks ();
566 
567   return retval;
568 }
569 
570 
571 /* Remove unreachable blocks and other miscellaneous clean up work.
572    Return true if the flowgraph was modified, false otherwise.  */
573 
574 bool
cleanup_tree_cfg(void)575 cleanup_tree_cfg (void)
576 {
577   bool retval, changed;
578 
579   timevar_push (TV_TREE_CLEANUP_CFG);
580 
581   /* Iterate until there are no more cleanups left to do.  If any
582      iteration changed the flowgraph, set CHANGED to true.  */
583   changed = false;
584   do
585     {
586       retval = cleanup_tree_cfg_1 ();
587       changed |= retval;
588     }
589   while (retval);
590 
591   compact_blocks ();
592 
593 #ifdef ENABLE_CHECKING
594   verify_flow_info ();
595 #endif
596 
597   timevar_pop (TV_TREE_CLEANUP_CFG);
598 
599   return changed;
600 }
601 
602 /* Cleanup cfg and repair loop structures.  */
603 
604 void
cleanup_tree_cfg_loop(void)605 cleanup_tree_cfg_loop (void)
606 {
607   bool changed = cleanup_tree_cfg ();
608 
609   if (changed)
610     {
611       bitmap changed_bbs = BITMAP_ALLOC (NULL);
612       fix_loop_structure (current_loops, changed_bbs);
613       calculate_dominance_info (CDI_DOMINATORS);
614 
615       /* This usually does nothing.  But sometimes parts of cfg that originally
616 	 were inside a loop get out of it due to edge removal (since they
617 	 become unreachable by back edges from latch).  */
618       rewrite_into_loop_closed_ssa (changed_bbs, TODO_update_ssa);
619 
620       BITMAP_FREE (changed_bbs);
621 
622 #ifdef ENABLE_CHECKING
623       verify_loop_structure (current_loops);
624 #endif
625       scev_reset ();
626     }
627 }
628 
629 /* Merge the PHI nodes at BB into those at BB's sole successor.  */
630 
631 static void
remove_forwarder_block_with_phi(basic_block bb)632 remove_forwarder_block_with_phi (basic_block bb)
633 {
634   edge succ = single_succ_edge (bb);
635   basic_block dest = succ->dest;
636   tree label;
637   basic_block dombb, domdest, dom;
638 
639   /* We check for infinite loops already in tree_forwarder_block_p.
640      However it may happen that the infinite loop is created
641      afterwards due to removal of forwarders.  */
642   if (dest == bb)
643     return;
644 
645   /* If the destination block consists of a nonlocal label, do not
646      merge it.  */
647   label = first_stmt (dest);
648   if (label
649       && TREE_CODE (label) == LABEL_EXPR
650       && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
651     return;
652 
653   /* Redirect each incoming edge to BB to DEST.  */
654   while (EDGE_COUNT (bb->preds) > 0)
655     {
656       edge e = EDGE_PRED (bb, 0), s;
657       tree phi;
658 
659       s = find_edge (e->src, dest);
660       if (s)
661 	{
662 	  /* We already have an edge S from E->src to DEST.  If S and
663 	     E->dest's sole successor edge have the same PHI arguments
664 	     at DEST, redirect S to DEST.  */
665 	  if (phi_alternatives_equal (dest, s, succ))
666 	    {
667 	      e = redirect_edge_and_branch (e, dest);
668 	      PENDING_STMT (e) = NULL_TREE;
669 	      continue;
670 	    }
671 
672 	  /* PHI arguments are different.  Create a forwarder block by
673 	     splitting E so that we can merge PHI arguments on E to
674 	     DEST.  */
675 	  e = single_succ_edge (split_edge (e));
676 	}
677 
678       s = redirect_edge_and_branch (e, dest);
679 
680       /* redirect_edge_and_branch must not create a new edge.  */
681       gcc_assert (s == e);
682 
683       /* Add to the PHI nodes at DEST each PHI argument removed at the
684 	 destination of E.  */
685       for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
686 	{
687 	  tree def = PHI_ARG_DEF (phi, succ->dest_idx);
688 
689 	  if (TREE_CODE (def) == SSA_NAME)
690 	    {
691 	      tree var;
692 
693 	      /* If DEF is one of the results of PHI nodes removed during
694 		 redirection, replace it with the PHI argument that used
695 		 to be on E.  */
696 	      for (var = PENDING_STMT (e); var; var = TREE_CHAIN (var))
697 		{
698 		  tree old_arg = TREE_PURPOSE (var);
699 		  tree new_arg = TREE_VALUE (var);
700 
701 		  if (def == old_arg)
702 		    {
703 		      def = new_arg;
704 		      break;
705 		    }
706 		}
707 	    }
708 
709 	  add_phi_arg (phi, def, s);
710 	}
711 
712       PENDING_STMT (e) = NULL;
713     }
714 
715   /* Update the dominators.  */
716   dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
717   domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
718   if (domdest == bb)
719     {
720       /* Shortcut to avoid calling (relatively expensive)
721 	 nearest_common_dominator unless necessary.  */
722       dom = dombb;
723     }
724   else
725     dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
726 
727   set_immediate_dominator (CDI_DOMINATORS, dest, dom);
728 
729   /* Remove BB since all of BB's incoming edges have been redirected
730      to DEST.  */
731   delete_basic_block (bb);
732 }
733 
734 /* This pass merges PHI nodes if one feeds into another.  For example,
735    suppose we have the following:
736 
737   goto <bb 9> (<L9>);
738 
739 <L8>:;
740   tem_17 = foo ();
741 
742   # tem_6 = PHI <tem_17(8), tem_23(7)>;
743 <L9>:;
744 
745   # tem_3 = PHI <tem_6(9), tem_2(5)>;
746 <L10>:;
747 
748   Then we merge the first PHI node into the second one like so:
749 
750   goto <bb 9> (<L10>);
751 
752 <L8>:;
753   tem_17 = foo ();
754 
755   # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
756 <L10>:;
757 */
758 
759 static unsigned int
merge_phi_nodes(void)760 merge_phi_nodes (void)
761 {
762   basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks);
763   basic_block *current = worklist;
764   basic_block bb;
765 
766   calculate_dominance_info (CDI_DOMINATORS);
767 
768   /* Find all PHI nodes that we may be able to merge.  */
769   FOR_EACH_BB (bb)
770     {
771       basic_block dest;
772 
773       /* Look for a forwarder block with PHI nodes.  */
774       if (!tree_forwarder_block_p (bb, true))
775 	continue;
776 
777       dest = single_succ (bb);
778 
779       /* We have to feed into another basic block with PHI
780 	 nodes.  */
781       if (!phi_nodes (dest)
782 	  /* We don't want to deal with a basic block with
783 	     abnormal edges.  */
784 	  || has_abnormal_incoming_edge_p (bb))
785 	continue;
786 
787       if (!dominated_by_p (CDI_DOMINATORS, dest, bb))
788 	{
789 	  /* If BB does not dominate DEST, then the PHI nodes at
790 	     DEST must be the only users of the results of the PHI
791 	     nodes at BB.  */
792 	  *current++ = bb;
793 	}
794       else
795 	{
796 	  tree phi;
797 	  unsigned int dest_idx = single_succ_edge (bb)->dest_idx;
798 
799 	  /* BB dominates DEST.  There may be many users of the PHI
800 	     nodes in BB.  However, there is still a trivial case we
801 	     can handle.  If the result of every PHI in BB is used
802 	     only by a PHI in DEST, then we can trivially merge the
803 	     PHI nodes from BB into DEST.  */
804 	  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
805 	    {
806 	      tree result = PHI_RESULT (phi);
807 	      use_operand_p imm_use;
808 	      tree use_stmt;
809 
810 	      /* If the PHI's result is never used, then we can just
811 		 ignore it.  */
812 	      if (has_zero_uses (result))
813 		continue;
814 
815 	      /* Get the single use of the result of this PHI node.  */
816   	      if (!single_imm_use (result, &imm_use, &use_stmt)
817 		  || TREE_CODE (use_stmt) != PHI_NODE
818 		  || bb_for_stmt (use_stmt) != dest
819 		  || PHI_ARG_DEF (use_stmt, dest_idx) != result)
820 		break;
821 	    }
822 
823 	  /* If the loop above iterated through all the PHI nodes
824 	     in BB, then we can merge the PHIs from BB into DEST.  */
825 	  if (!phi)
826 	    *current++ = bb;
827 	}
828     }
829 
830   /* Now let's drain WORKLIST.  */
831   while (current != worklist)
832     {
833       bb = *--current;
834       remove_forwarder_block_with_phi (bb);
835     }
836 
837   free (worklist);
838   return 0;
839 }
840 
841 static bool
gate_merge_phi(void)842 gate_merge_phi (void)
843 {
844   return 1;
845 }
846 
847 struct tree_opt_pass pass_merge_phi = {
848   "mergephi",			/* name */
849   gate_merge_phi,		/* gate */
850   merge_phi_nodes,		/* execute */
851   NULL,				/* sub */
852   NULL,				/* next */
853   0,				/* static_pass_number */
854   TV_TREE_MERGE_PHI,		/* tv_id */
855   PROP_cfg | PROP_ssa,		/* properties_required */
856   0,				/* properties_provided */
857   0,				/* properties_destroyed */
858   0,				/* todo_flags_start */
859   TODO_dump_func | TODO_ggc_collect	/* todo_flags_finish */
860   | TODO_verify_ssa,
861   0				/* letter */
862 };
863