1*10d565efSmrg /* Combining of if-expressions on trees.
2*10d565efSmrg    Copyright (C) 2007-2017 Free Software Foundation, Inc.
3*10d565efSmrg    Contributed by Richard Guenther <rguenther@suse.de>
4*10d565efSmrg 
5*10d565efSmrg This file is part of GCC.
6*10d565efSmrg 
7*10d565efSmrg GCC is free software; you can redistribute it and/or modify
8*10d565efSmrg it under the terms of the GNU General Public License as published by
9*10d565efSmrg the Free Software Foundation; either version 3, or (at your option)
10*10d565efSmrg any later version.
11*10d565efSmrg 
12*10d565efSmrg GCC is distributed in the hope that it will be useful,
13*10d565efSmrg but WITHOUT ANY WARRANTY; without even the implied warranty of
14*10d565efSmrg MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15*10d565efSmrg GNU General Public License for more details.
16*10d565efSmrg 
17*10d565efSmrg You should have received a copy of the GNU General Public License
18*10d565efSmrg along with GCC; see the file COPYING3.  If not see
19*10d565efSmrg <http://www.gnu.org/licenses/>.  */
20*10d565efSmrg 
21*10d565efSmrg #include "config.h"
22*10d565efSmrg #include "system.h"
23*10d565efSmrg #include "coretypes.h"
24*10d565efSmrg #include "backend.h"
25*10d565efSmrg #include "rtl.h"
26*10d565efSmrg #include "tree.h"
27*10d565efSmrg #include "gimple.h"
28*10d565efSmrg #include "cfghooks.h"
29*10d565efSmrg #include "tree-pass.h"
30*10d565efSmrg #include "memmodel.h"
31*10d565efSmrg #include "tm_p.h"
32*10d565efSmrg #include "ssa.h"
33*10d565efSmrg #include "tree-pretty-print.h"
34*10d565efSmrg /* rtl is needed only because arm back-end requires it for
35*10d565efSmrg    BRANCH_COST.  */
36*10d565efSmrg #include "fold-const.h"
37*10d565efSmrg #include "cfganal.h"
38*10d565efSmrg #include "gimple-fold.h"
39*10d565efSmrg #include "gimple-iterator.h"
40*10d565efSmrg #include "gimplify-me.h"
41*10d565efSmrg #include "tree-cfg.h"
42*10d565efSmrg #include "tree-ssa.h"
43*10d565efSmrg 
44*10d565efSmrg #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
45*10d565efSmrg #define LOGICAL_OP_NON_SHORT_CIRCUIT \
46*10d565efSmrg   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
47*10d565efSmrg                 false) >= 2)
48*10d565efSmrg #endif
49*10d565efSmrg 
50*10d565efSmrg /* This pass combines COND_EXPRs to simplify control flow.  It
51*10d565efSmrg    currently recognizes bit tests and comparisons in chains that
52*10d565efSmrg    represent logical and or logical or of two COND_EXPRs.
53*10d565efSmrg 
54*10d565efSmrg    It does so by walking basic blocks in a approximate reverse
55*10d565efSmrg    post-dominator order and trying to match CFG patterns that
56*10d565efSmrg    represent logical and or logical or of two COND_EXPRs.
57*10d565efSmrg    Transformations are done if the COND_EXPR conditions match
58*10d565efSmrg    either
59*10d565efSmrg 
60*10d565efSmrg      1. two single bit tests X & (1 << Yn) (for logical and)
61*10d565efSmrg 
62*10d565efSmrg      2. two bit tests X & Yn (for logical or)
63*10d565efSmrg 
64*10d565efSmrg      3. two comparisons X OPn Y (for logical or)
65*10d565efSmrg 
66*10d565efSmrg    To simplify this pass, removing basic blocks and dead code
67*10d565efSmrg    is left to CFG cleanup and DCE.  */
68*10d565efSmrg 
69*10d565efSmrg 
70*10d565efSmrg /* Recognize a if-then-else CFG pattern starting to match with the
71*10d565efSmrg    COND_BB basic-block containing the COND_EXPR.  The recognized
72*10d565efSmrg    then end else blocks are stored to *THEN_BB and *ELSE_BB.  If
73*10d565efSmrg    *THEN_BB and/or *ELSE_BB are already set, they are required to
74*10d565efSmrg    match the then and else basic-blocks to make the pattern match.
75*10d565efSmrg    Returns true if the pattern matched, false otherwise.  */
76*10d565efSmrg 
77*10d565efSmrg static bool
78*10d565efSmrg recognize_if_then_else (basic_block cond_bb,
79*10d565efSmrg 			basic_block *then_bb, basic_block *else_bb)
80*10d565efSmrg {
81*10d565efSmrg   edge t, e;
82*10d565efSmrg 
83*10d565efSmrg   if (EDGE_COUNT (cond_bb->succs) != 2)
84*10d565efSmrg     return false;
85*10d565efSmrg 
86*10d565efSmrg   /* Find the then/else edges.  */
87*10d565efSmrg   t = EDGE_SUCC (cond_bb, 0);
88*10d565efSmrg   e = EDGE_SUCC (cond_bb, 1);
89*10d565efSmrg   if (!(t->flags & EDGE_TRUE_VALUE))
90*10d565efSmrg     std::swap (t, e);
91*10d565efSmrg   if (!(t->flags & EDGE_TRUE_VALUE)
92*10d565efSmrg       || !(e->flags & EDGE_FALSE_VALUE))
93*10d565efSmrg     return false;
94*10d565efSmrg 
95*10d565efSmrg   /* Check if the edge destinations point to the required block.  */
96*10d565efSmrg   if (*then_bb
97*10d565efSmrg       && t->dest != *then_bb)
98*10d565efSmrg     return false;
99*10d565efSmrg   if (*else_bb
100*10d565efSmrg       && e->dest != *else_bb)
101*10d565efSmrg     return false;
102*10d565efSmrg 
103*10d565efSmrg   if (!*then_bb)
104*10d565efSmrg     *then_bb = t->dest;
105*10d565efSmrg   if (!*else_bb)
106*10d565efSmrg     *else_bb = e->dest;
107*10d565efSmrg 
108*10d565efSmrg   return true;
109*10d565efSmrg }
110*10d565efSmrg 
111*10d565efSmrg /* Verify if the basic block BB does not have side-effects.  Return
112*10d565efSmrg    true in this case, else false.  */
113*10d565efSmrg 
114*10d565efSmrg static bool
115*10d565efSmrg bb_no_side_effects_p (basic_block bb)
116*10d565efSmrg {
117*10d565efSmrg   gimple_stmt_iterator gsi;
118*10d565efSmrg 
119*10d565efSmrg   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
120*10d565efSmrg     {
121*10d565efSmrg       gimple *stmt = gsi_stmt (gsi);
122*10d565efSmrg 
123*10d565efSmrg       if (is_gimple_debug (stmt))
124*10d565efSmrg 	continue;
125*10d565efSmrg 
126*10d565efSmrg       if (gimple_has_side_effects (stmt)
127*10d565efSmrg 	  || gimple_uses_undefined_value_p (stmt)
128*10d565efSmrg 	  || gimple_could_trap_p (stmt)
129*10d565efSmrg 	  || gimple_vuse (stmt)
130*10d565efSmrg 	  /* const calls don't match any of the above, yet they could
131*10d565efSmrg 	     still have some side-effects - they could contain
132*10d565efSmrg 	     gimple_could_trap_p statements, like floating point
133*10d565efSmrg 	     exceptions or integer division by zero.  See PR70586.
134*10d565efSmrg 	     FIXME: perhaps gimple_has_side_effects or gimple_could_trap_p
135*10d565efSmrg 	     should handle this.  */
136*10d565efSmrg 	  || is_gimple_call (stmt))
137*10d565efSmrg 	return false;
138*10d565efSmrg     }
139*10d565efSmrg 
140*10d565efSmrg   return true;
141*10d565efSmrg }
142*10d565efSmrg 
143*10d565efSmrg /* Return true if BB is an empty forwarder block to TO_BB.  */
144*10d565efSmrg 
145*10d565efSmrg static bool
146*10d565efSmrg forwarder_block_to (basic_block bb, basic_block to_bb)
147*10d565efSmrg {
148*10d565efSmrg   return empty_block_p (bb)
149*10d565efSmrg 	 && single_succ_p (bb)
150*10d565efSmrg 	 && single_succ (bb) == to_bb;
151*10d565efSmrg }
152*10d565efSmrg 
153*10d565efSmrg /* Verify if all PHI node arguments in DEST for edges from BB1 or
154*10d565efSmrg    BB2 to DEST are the same.  This makes the CFG merge point
155*10d565efSmrg    free from side-effects.  Return true in this case, else false.  */
156*10d565efSmrg 
157*10d565efSmrg static bool
158*10d565efSmrg same_phi_args_p (basic_block bb1, basic_block bb2, basic_block dest)
159*10d565efSmrg {
160*10d565efSmrg   edge e1 = find_edge (bb1, dest);
161*10d565efSmrg   edge e2 = find_edge (bb2, dest);
162*10d565efSmrg   gphi_iterator gsi;
163*10d565efSmrg   gphi *phi;
164*10d565efSmrg 
165*10d565efSmrg   for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
166*10d565efSmrg     {
167*10d565efSmrg       phi = gsi.phi ();
168*10d565efSmrg       if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, e1),
169*10d565efSmrg 			    PHI_ARG_DEF_FROM_EDGE (phi, e2), 0))
170*10d565efSmrg         return false;
171*10d565efSmrg     }
172*10d565efSmrg 
173*10d565efSmrg   return true;
174*10d565efSmrg }
175*10d565efSmrg 
176*10d565efSmrg /* Return the best representative SSA name for CANDIDATE which is used
177*10d565efSmrg    in a bit test.  */
178*10d565efSmrg 
179*10d565efSmrg static tree
180*10d565efSmrg get_name_for_bit_test (tree candidate)
181*10d565efSmrg {
182*10d565efSmrg   /* Skip single-use names in favor of using the name from a
183*10d565efSmrg      non-widening conversion definition.  */
184*10d565efSmrg   if (TREE_CODE (candidate) == SSA_NAME
185*10d565efSmrg       && has_single_use (candidate))
186*10d565efSmrg     {
187*10d565efSmrg       gimple *def_stmt = SSA_NAME_DEF_STMT (candidate);
188*10d565efSmrg       if (is_gimple_assign (def_stmt)
189*10d565efSmrg 	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
190*10d565efSmrg 	{
191*10d565efSmrg 	  if (TYPE_PRECISION (TREE_TYPE (candidate))
192*10d565efSmrg 	      <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
193*10d565efSmrg 	    return gimple_assign_rhs1 (def_stmt);
194*10d565efSmrg 	}
195*10d565efSmrg     }
196*10d565efSmrg 
197*10d565efSmrg   return candidate;
198*10d565efSmrg }
199*10d565efSmrg 
200*10d565efSmrg /* Recognize a single bit test pattern in GIMPLE_COND and its defining
201*10d565efSmrg    statements.  Store the name being tested in *NAME and the bit
202*10d565efSmrg    in *BIT.  The GIMPLE_COND computes *NAME & (1 << *BIT).
203*10d565efSmrg    Returns true if the pattern matched, false otherwise.  */
204*10d565efSmrg 
205*10d565efSmrg static bool
206*10d565efSmrg recognize_single_bit_test (gcond *cond, tree *name, tree *bit, bool inv)
207*10d565efSmrg {
208*10d565efSmrg   gimple *stmt;
209*10d565efSmrg 
210*10d565efSmrg   /* Get at the definition of the result of the bit test.  */
211*10d565efSmrg   if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
212*10d565efSmrg       || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
213*10d565efSmrg       || !integer_zerop (gimple_cond_rhs (cond)))
214*10d565efSmrg     return false;
215*10d565efSmrg   stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
216*10d565efSmrg   if (!is_gimple_assign (stmt))
217*10d565efSmrg     return false;
218*10d565efSmrg 
219*10d565efSmrg   /* Look at which bit is tested.  One form to recognize is
220*10d565efSmrg      D.1985_5 = state_3(D) >> control1_4(D);
221*10d565efSmrg      D.1986_6 = (int) D.1985_5;
222*10d565efSmrg      D.1987_7 = op0 & 1;
223*10d565efSmrg      if (D.1987_7 != 0)  */
224*10d565efSmrg   if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
225*10d565efSmrg       && integer_onep (gimple_assign_rhs2 (stmt))
226*10d565efSmrg       && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
227*10d565efSmrg     {
228*10d565efSmrg       tree orig_name = gimple_assign_rhs1 (stmt);
229*10d565efSmrg 
230*10d565efSmrg       /* Look through copies and conversions to eventually
231*10d565efSmrg 	 find the stmt that computes the shift.  */
232*10d565efSmrg       stmt = SSA_NAME_DEF_STMT (orig_name);
233*10d565efSmrg 
234*10d565efSmrg       while (is_gimple_assign (stmt)
235*10d565efSmrg 	     && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
236*10d565efSmrg 		  && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt)))
237*10d565efSmrg 		      <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt))))
238*10d565efSmrg 		  && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
239*10d565efSmrg 		 || gimple_assign_ssa_name_copy_p (stmt)))
240*10d565efSmrg 	stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
241*10d565efSmrg 
242*10d565efSmrg       /* If we found such, decompose it.  */
243*10d565efSmrg       if (is_gimple_assign (stmt)
244*10d565efSmrg 	  && gimple_assign_rhs_code (stmt) == RSHIFT_EXPR)
245*10d565efSmrg 	{
246*10d565efSmrg 	  /* op0 & (1 << op1) */
247*10d565efSmrg 	  *bit = gimple_assign_rhs2 (stmt);
248*10d565efSmrg 	  *name = gimple_assign_rhs1 (stmt);
249*10d565efSmrg 	}
250*10d565efSmrg       else
251*10d565efSmrg 	{
252*10d565efSmrg 	  /* t & 1 */
253*10d565efSmrg 	  *bit = integer_zero_node;
254*10d565efSmrg 	  *name = get_name_for_bit_test (orig_name);
255*10d565efSmrg 	}
256*10d565efSmrg 
257*10d565efSmrg       return true;
258*10d565efSmrg     }
259*10d565efSmrg 
260*10d565efSmrg   /* Another form is
261*10d565efSmrg      D.1987_7 = op0 & (1 << CST)
262*10d565efSmrg      if (D.1987_7 != 0)  */
263*10d565efSmrg   if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
264*10d565efSmrg       && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
265*10d565efSmrg       && integer_pow2p (gimple_assign_rhs2 (stmt)))
266*10d565efSmrg     {
267*10d565efSmrg       *name = gimple_assign_rhs1 (stmt);
268*10d565efSmrg       *bit = build_int_cst (integer_type_node,
269*10d565efSmrg 			    tree_log2 (gimple_assign_rhs2 (stmt)));
270*10d565efSmrg       return true;
271*10d565efSmrg     }
272*10d565efSmrg 
273*10d565efSmrg   /* Another form is
274*10d565efSmrg      D.1986_6 = 1 << control1_4(D)
275*10d565efSmrg      D.1987_7 = op0 & D.1986_6
276*10d565efSmrg      if (D.1987_7 != 0)  */
277*10d565efSmrg   if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
278*10d565efSmrg       && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
279*10d565efSmrg       && TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
280*10d565efSmrg     {
281*10d565efSmrg       gimple *tmp;
282*10d565efSmrg 
283*10d565efSmrg       /* Both arguments of the BIT_AND_EXPR can be the single-bit
284*10d565efSmrg 	 specifying expression.  */
285*10d565efSmrg       tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
286*10d565efSmrg       if (is_gimple_assign (tmp)
287*10d565efSmrg 	  && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
288*10d565efSmrg 	  && integer_onep (gimple_assign_rhs1 (tmp)))
289*10d565efSmrg 	{
290*10d565efSmrg 	  *name = gimple_assign_rhs2 (stmt);
291*10d565efSmrg 	  *bit = gimple_assign_rhs2 (tmp);
292*10d565efSmrg 	  return true;
293*10d565efSmrg 	}
294*10d565efSmrg 
295*10d565efSmrg       tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
296*10d565efSmrg       if (is_gimple_assign (tmp)
297*10d565efSmrg 	  && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
298*10d565efSmrg 	  && integer_onep (gimple_assign_rhs1 (tmp)))
299*10d565efSmrg 	{
300*10d565efSmrg 	  *name = gimple_assign_rhs1 (stmt);
301*10d565efSmrg 	  *bit = gimple_assign_rhs2 (tmp);
302*10d565efSmrg 	  return true;
303*10d565efSmrg 	}
304*10d565efSmrg     }
305*10d565efSmrg 
306*10d565efSmrg   return false;
307*10d565efSmrg }
308*10d565efSmrg 
309*10d565efSmrg /* Recognize a bit test pattern in a GIMPLE_COND and its defining
310*10d565efSmrg    statements.  Store the name being tested in *NAME and the bits
311*10d565efSmrg    in *BITS.  The COND_EXPR computes *NAME & *BITS.
312*10d565efSmrg    Returns true if the pattern matched, false otherwise.  */
313*10d565efSmrg 
314*10d565efSmrg static bool
315*10d565efSmrg recognize_bits_test (gcond *cond, tree *name, tree *bits, bool inv)
316*10d565efSmrg {
317*10d565efSmrg   gimple *stmt;
318*10d565efSmrg 
319*10d565efSmrg   /* Get at the definition of the result of the bit test.  */
320*10d565efSmrg   if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
321*10d565efSmrg       || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
322*10d565efSmrg       || !integer_zerop (gimple_cond_rhs (cond)))
323*10d565efSmrg     return false;
324*10d565efSmrg   stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
325*10d565efSmrg   if (!is_gimple_assign (stmt)
326*10d565efSmrg       || gimple_assign_rhs_code (stmt) != BIT_AND_EXPR)
327*10d565efSmrg     return false;
328*10d565efSmrg 
329*10d565efSmrg   *name = get_name_for_bit_test (gimple_assign_rhs1 (stmt));
330*10d565efSmrg   *bits = gimple_assign_rhs2 (stmt);
331*10d565efSmrg 
332*10d565efSmrg   return true;
333*10d565efSmrg }
334*10d565efSmrg 
335*10d565efSmrg 
336*10d565efSmrg /* Update profile after code in outer_cond_bb was adjusted so
337*10d565efSmrg    outer_cond_bb has no condition.  */
338*10d565efSmrg 
339*10d565efSmrg static void
340*10d565efSmrg update_profile_after_ifcombine (basic_block inner_cond_bb,
341*10d565efSmrg 			        basic_block outer_cond_bb)
342*10d565efSmrg {
343*10d565efSmrg   edge outer_to_inner = find_edge (outer_cond_bb, inner_cond_bb);
344*10d565efSmrg   edge outer2 = (EDGE_SUCC (outer_cond_bb, 0) == outer_to_inner
345*10d565efSmrg 		 ? EDGE_SUCC (outer_cond_bb, 1)
346*10d565efSmrg 		 : EDGE_SUCC (outer_cond_bb, 0));
347*10d565efSmrg   edge inner_taken = EDGE_SUCC (inner_cond_bb, 0);
348*10d565efSmrg   edge inner_not_taken = EDGE_SUCC (inner_cond_bb, 1);
349*10d565efSmrg 
350*10d565efSmrg   if (inner_taken->dest != outer2->dest)
351*10d565efSmrg     std::swap (inner_taken, inner_not_taken);
352*10d565efSmrg   gcc_assert (inner_taken->dest == outer2->dest);
353*10d565efSmrg 
354*10d565efSmrg   /* In the following we assume that inner_cond_bb has single predecessor.  */
355*10d565efSmrg   gcc_assert (single_pred_p (inner_cond_bb));
356*10d565efSmrg 
357*10d565efSmrg   /* Path outer_cond_bb->(outer2) needs to be merged into path
358*10d565efSmrg      outer_cond_bb->(outer_to_inner)->inner_cond_bb->(inner_taken)
359*10d565efSmrg      and probability of inner_not_taken updated.  */
360*10d565efSmrg 
361*10d565efSmrg   outer_to_inner->count = outer_cond_bb->count;
362*10d565efSmrg   inner_cond_bb->count = outer_cond_bb->count;
363*10d565efSmrg   inner_taken->count += outer2->count;
364*10d565efSmrg   outer2->count = 0;
365*10d565efSmrg 
366*10d565efSmrg   inner_taken->probability = outer2->probability
367*10d565efSmrg 			     + RDIV (outer_to_inner->probability
368*10d565efSmrg 				     * inner_taken->probability,
369*10d565efSmrg 				     REG_BR_PROB_BASE);
370*10d565efSmrg   if (inner_taken->probability > REG_BR_PROB_BASE)
371*10d565efSmrg     inner_taken->probability = REG_BR_PROB_BASE;
372*10d565efSmrg   inner_not_taken->probability = REG_BR_PROB_BASE
373*10d565efSmrg 				 - inner_taken->probability;
374*10d565efSmrg 
375*10d565efSmrg   outer_to_inner->probability = REG_BR_PROB_BASE;
376*10d565efSmrg   inner_cond_bb->frequency = outer_cond_bb->frequency;
377*10d565efSmrg   outer2->probability = 0;
378*10d565efSmrg }
379*10d565efSmrg 
380*10d565efSmrg /* If-convert on a and pattern with a common else block.  The inner
381*10d565efSmrg    if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
382*10d565efSmrg    inner_inv, outer_inv and result_inv indicate whether the conditions
383*10d565efSmrg    are inverted.
384*10d565efSmrg    Returns true if the edges to the common else basic-block were merged.  */
385*10d565efSmrg 
386*10d565efSmrg static bool
387*10d565efSmrg ifcombine_ifandif (basic_block inner_cond_bb, bool inner_inv,
388*10d565efSmrg 		   basic_block outer_cond_bb, bool outer_inv, bool result_inv)
389*10d565efSmrg {
390*10d565efSmrg   gimple_stmt_iterator gsi;
391*10d565efSmrg   gimple *inner_stmt, *outer_stmt;
392*10d565efSmrg   gcond *inner_cond, *outer_cond;
393*10d565efSmrg   tree name1, name2, bit1, bit2, bits1, bits2;
394*10d565efSmrg 
395*10d565efSmrg   inner_stmt = last_stmt (inner_cond_bb);
396*10d565efSmrg   if (!inner_stmt
397*10d565efSmrg       || gimple_code (inner_stmt) != GIMPLE_COND)
398*10d565efSmrg     return false;
399*10d565efSmrg   inner_cond = as_a <gcond *> (inner_stmt);
400*10d565efSmrg 
401*10d565efSmrg   outer_stmt = last_stmt (outer_cond_bb);
402*10d565efSmrg   if (!outer_stmt
403*10d565efSmrg       || gimple_code (outer_stmt) != GIMPLE_COND)
404*10d565efSmrg     return false;
405*10d565efSmrg   outer_cond = as_a <gcond *> (outer_stmt);
406*10d565efSmrg 
407*10d565efSmrg   /* See if we test a single bit of the same name in both tests.  In
408*10d565efSmrg      that case remove the outer test, merging both else edges,
409*10d565efSmrg      and change the inner one to test for
410*10d565efSmrg      name & (bit1 | bit2) == (bit1 | bit2).  */
411*10d565efSmrg   if (recognize_single_bit_test (inner_cond, &name1, &bit1, inner_inv)
412*10d565efSmrg       && recognize_single_bit_test (outer_cond, &name2, &bit2, outer_inv)
413*10d565efSmrg       && name1 == name2)
414*10d565efSmrg     {
415*10d565efSmrg       tree t, t2;
416*10d565efSmrg 
417*10d565efSmrg       /* Do it.  */
418*10d565efSmrg       gsi = gsi_for_stmt (inner_cond);
419*10d565efSmrg       t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
420*10d565efSmrg 		       build_int_cst (TREE_TYPE (name1), 1), bit1);
421*10d565efSmrg       t2 = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
422*10d565efSmrg 		        build_int_cst (TREE_TYPE (name1), 1), bit2);
423*10d565efSmrg       t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), t, t2);
424*10d565efSmrg       t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
425*10d565efSmrg 				    true, GSI_SAME_STMT);
426*10d565efSmrg       t2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
427*10d565efSmrg       t2 = force_gimple_operand_gsi (&gsi, t2, true, NULL_TREE,
428*10d565efSmrg 				     true, GSI_SAME_STMT);
429*10d565efSmrg       t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR,
430*10d565efSmrg 		       boolean_type_node, t2, t);
431*10d565efSmrg       t = canonicalize_cond_expr_cond (t);
432*10d565efSmrg       if (!t)
433*10d565efSmrg 	return false;
434*10d565efSmrg       gimple_cond_set_condition_from_tree (inner_cond, t);
435*10d565efSmrg       update_stmt (inner_cond);
436*10d565efSmrg 
437*10d565efSmrg       /* Leave CFG optimization to cfg_cleanup.  */
438*10d565efSmrg       gimple_cond_set_condition_from_tree (outer_cond,
439*10d565efSmrg 	outer_inv ? boolean_false_node : boolean_true_node);
440*10d565efSmrg       update_stmt (outer_cond);
441*10d565efSmrg 
442*10d565efSmrg       update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb);
443*10d565efSmrg 
444*10d565efSmrg       if (dump_file)
445*10d565efSmrg 	{
446*10d565efSmrg 	  fprintf (dump_file, "optimizing double bit test to ");
447*10d565efSmrg 	  print_generic_expr (dump_file, name1, 0);
448*10d565efSmrg 	  fprintf (dump_file, " & T == T\nwith temporary T = (1 << ");
449*10d565efSmrg 	  print_generic_expr (dump_file, bit1, 0);
450*10d565efSmrg 	  fprintf (dump_file, ") | (1 << ");
451*10d565efSmrg 	  print_generic_expr (dump_file, bit2, 0);
452*10d565efSmrg 	  fprintf (dump_file, ")\n");
453*10d565efSmrg 	}
454*10d565efSmrg 
455*10d565efSmrg       return true;
456*10d565efSmrg     }
457*10d565efSmrg 
458*10d565efSmrg   /* See if we have two bit tests of the same name in both tests.
459*10d565efSmrg      In that case remove the outer test and change the inner one to
460*10d565efSmrg      test for name & (bits1 | bits2) != 0.  */
461*10d565efSmrg   else if (recognize_bits_test (inner_cond, &name1, &bits1, !inner_inv)
462*10d565efSmrg       && recognize_bits_test (outer_cond, &name2, &bits2, !outer_inv))
463*10d565efSmrg     {
464*10d565efSmrg       gimple_stmt_iterator gsi;
465*10d565efSmrg       tree t;
466*10d565efSmrg 
467*10d565efSmrg       /* Find the common name which is bit-tested.  */
468*10d565efSmrg       if (name1 == name2)
469*10d565efSmrg 	;
470*10d565efSmrg       else if (bits1 == bits2)
471*10d565efSmrg 	{
472*10d565efSmrg 	  std::swap (name2, bits2);
473*10d565efSmrg 	  std::swap (name1, bits1);
474*10d565efSmrg 	}
475*10d565efSmrg       else if (name1 == bits2)
476*10d565efSmrg 	std::swap (name2, bits2);
477*10d565efSmrg       else if (bits1 == name2)
478*10d565efSmrg 	std::swap (name1, bits1);
479*10d565efSmrg       else
480*10d565efSmrg 	return false;
481*10d565efSmrg 
482*10d565efSmrg       /* As we strip non-widening conversions in finding a common
483*10d565efSmrg          name that is tested make sure to end up with an integral
484*10d565efSmrg 	 type for building the bit operations.  */
485*10d565efSmrg       if (TYPE_PRECISION (TREE_TYPE (bits1))
486*10d565efSmrg 	  >= TYPE_PRECISION (TREE_TYPE (bits2)))
487*10d565efSmrg 	{
488*10d565efSmrg 	  bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
489*10d565efSmrg 	  name1 = fold_convert (TREE_TYPE (bits1), name1);
490*10d565efSmrg 	  bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
491*10d565efSmrg 	  bits2 = fold_convert (TREE_TYPE (bits1), bits2);
492*10d565efSmrg 	}
493*10d565efSmrg       else
494*10d565efSmrg 	{
495*10d565efSmrg 	  bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
496*10d565efSmrg 	  name1 = fold_convert (TREE_TYPE (bits2), name1);
497*10d565efSmrg 	  bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
498*10d565efSmrg 	  bits1 = fold_convert (TREE_TYPE (bits2), bits1);
499*10d565efSmrg 	}
500*10d565efSmrg 
501*10d565efSmrg       /* Do it.  */
502*10d565efSmrg       gsi = gsi_for_stmt (inner_cond);
503*10d565efSmrg       t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), bits1, bits2);
504*10d565efSmrg       t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
505*10d565efSmrg 				    true, GSI_SAME_STMT);
506*10d565efSmrg       t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
507*10d565efSmrg       t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
508*10d565efSmrg 				    true, GSI_SAME_STMT);
509*10d565efSmrg       t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR, boolean_type_node, t,
510*10d565efSmrg 		       build_int_cst (TREE_TYPE (t), 0));
511*10d565efSmrg       t = canonicalize_cond_expr_cond (t);
512*10d565efSmrg       if (!t)
513*10d565efSmrg 	return false;
514*10d565efSmrg       gimple_cond_set_condition_from_tree (inner_cond, t);
515*10d565efSmrg       update_stmt (inner_cond);
516*10d565efSmrg 
517*10d565efSmrg       /* Leave CFG optimization to cfg_cleanup.  */
518*10d565efSmrg       gimple_cond_set_condition_from_tree (outer_cond,
519*10d565efSmrg 	outer_inv ? boolean_false_node : boolean_true_node);
520*10d565efSmrg       update_stmt (outer_cond);
521*10d565efSmrg       update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb);
522*10d565efSmrg 
523*10d565efSmrg       if (dump_file)
524*10d565efSmrg 	{
525*10d565efSmrg 	  fprintf (dump_file, "optimizing bits or bits test to ");
526*10d565efSmrg 	  print_generic_expr (dump_file, name1, 0);
527*10d565efSmrg 	  fprintf (dump_file, " & T != 0\nwith temporary T = ");
528*10d565efSmrg 	  print_generic_expr (dump_file, bits1, 0);
529*10d565efSmrg 	  fprintf (dump_file, " | ");
530*10d565efSmrg 	  print_generic_expr (dump_file, bits2, 0);
531*10d565efSmrg 	  fprintf (dump_file, "\n");
532*10d565efSmrg 	}
533*10d565efSmrg 
534*10d565efSmrg       return true;
535*10d565efSmrg     }
536*10d565efSmrg 
537*10d565efSmrg   /* See if we have two comparisons that we can merge into one.  */
538*10d565efSmrg   else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond)) == tcc_comparison
539*10d565efSmrg 	   && TREE_CODE_CLASS (gimple_cond_code (outer_cond)) == tcc_comparison)
540*10d565efSmrg     {
541*10d565efSmrg       tree t;
542*10d565efSmrg       enum tree_code inner_cond_code = gimple_cond_code (inner_cond);
543*10d565efSmrg       enum tree_code outer_cond_code = gimple_cond_code (outer_cond);
544*10d565efSmrg 
545*10d565efSmrg       /* Invert comparisons if necessary (and possible).  */
546*10d565efSmrg       if (inner_inv)
547*10d565efSmrg 	inner_cond_code = invert_tree_comparison (inner_cond_code,
548*10d565efSmrg 	  HONOR_NANS (gimple_cond_lhs (inner_cond)));
549*10d565efSmrg       if (inner_cond_code == ERROR_MARK)
550*10d565efSmrg 	return false;
551*10d565efSmrg       if (outer_inv)
552*10d565efSmrg 	outer_cond_code = invert_tree_comparison (outer_cond_code,
553*10d565efSmrg 	  HONOR_NANS (gimple_cond_lhs (outer_cond)));
554*10d565efSmrg       if (outer_cond_code == ERROR_MARK)
555*10d565efSmrg 	return false;
556*10d565efSmrg       /* Don't return false so fast, try maybe_fold_or_comparisons?  */
557*10d565efSmrg 
558*10d565efSmrg       if (!(t = maybe_fold_and_comparisons (inner_cond_code,
559*10d565efSmrg 					    gimple_cond_lhs (inner_cond),
560*10d565efSmrg 					    gimple_cond_rhs (inner_cond),
561*10d565efSmrg 					    outer_cond_code,
562*10d565efSmrg 					    gimple_cond_lhs (outer_cond),
563*10d565efSmrg 					    gimple_cond_rhs (outer_cond))))
564*10d565efSmrg 	{
565*10d565efSmrg 	  tree t1, t2;
566*10d565efSmrg 	  gimple_stmt_iterator gsi;
567*10d565efSmrg 	  if (!LOGICAL_OP_NON_SHORT_CIRCUIT)
568*10d565efSmrg 	    return false;
569*10d565efSmrg 	  /* Only do this optimization if the inner bb contains only the conditional. */
570*10d565efSmrg 	  if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb)))
571*10d565efSmrg 	    return false;
572*10d565efSmrg 	  t1 = fold_build2_loc (gimple_location (inner_cond),
573*10d565efSmrg 				inner_cond_code,
574*10d565efSmrg 				boolean_type_node,
575*10d565efSmrg 				gimple_cond_lhs (inner_cond),
576*10d565efSmrg 				gimple_cond_rhs (inner_cond));
577*10d565efSmrg 	  t2 = fold_build2_loc (gimple_location (outer_cond),
578*10d565efSmrg 				outer_cond_code,
579*10d565efSmrg 				boolean_type_node,
580*10d565efSmrg 				gimple_cond_lhs (outer_cond),
581*10d565efSmrg 				gimple_cond_rhs (outer_cond));
582*10d565efSmrg 	  t = fold_build2_loc (gimple_location (inner_cond),
583*10d565efSmrg 			       TRUTH_AND_EXPR, boolean_type_node, t1, t2);
584*10d565efSmrg 	  if (result_inv)
585*10d565efSmrg 	    {
586*10d565efSmrg 	      t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
587*10d565efSmrg 	      result_inv = false;
588*10d565efSmrg 	    }
589*10d565efSmrg 	  gsi = gsi_for_stmt (inner_cond);
590*10d565efSmrg 	  t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr, NULL, true,
591*10d565efSmrg 					  GSI_SAME_STMT);
592*10d565efSmrg         }
593*10d565efSmrg       if (result_inv)
594*10d565efSmrg 	t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
595*10d565efSmrg       t = canonicalize_cond_expr_cond (t);
596*10d565efSmrg       if (!t)
597*10d565efSmrg 	return false;
598*10d565efSmrg       gimple_cond_set_condition_from_tree (inner_cond, t);
599*10d565efSmrg       update_stmt (inner_cond);
600*10d565efSmrg 
601*10d565efSmrg       /* Leave CFG optimization to cfg_cleanup.  */
602*10d565efSmrg       gimple_cond_set_condition_from_tree (outer_cond,
603*10d565efSmrg 	outer_inv ? boolean_false_node : boolean_true_node);
604*10d565efSmrg       update_stmt (outer_cond);
605*10d565efSmrg       update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb);
606*10d565efSmrg 
607*10d565efSmrg       if (dump_file)
608*10d565efSmrg 	{
609*10d565efSmrg 	  fprintf (dump_file, "optimizing two comparisons to ");
610*10d565efSmrg 	  print_generic_expr (dump_file, t, 0);
611*10d565efSmrg 	  fprintf (dump_file, "\n");
612*10d565efSmrg 	}
613*10d565efSmrg 
614*10d565efSmrg       return true;
615*10d565efSmrg     }
616*10d565efSmrg 
617*10d565efSmrg   return false;
618*10d565efSmrg }
619*10d565efSmrg 
620*10d565efSmrg /* Helper function for tree_ssa_ifcombine_bb.  Recognize a CFG pattern and
621*10d565efSmrg    dispatch to the appropriate if-conversion helper for a particular
622*10d565efSmrg    set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB.
623*10d565efSmrg    PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB.  */
624*10d565efSmrg 
625*10d565efSmrg static bool
626*10d565efSmrg tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb, basic_block outer_cond_bb,
627*10d565efSmrg 			 basic_block then_bb, basic_block else_bb,
628*10d565efSmrg 			 basic_block phi_pred_bb)
629*10d565efSmrg {
630*10d565efSmrg   /* The && form is characterized by a common else_bb with
631*10d565efSmrg      the two edges leading to it mergable.  The latter is
632*10d565efSmrg      guaranteed by matching PHI arguments in the else_bb and
633*10d565efSmrg      the inner cond_bb having no side-effects.  */
634*10d565efSmrg   if (phi_pred_bb != else_bb
635*10d565efSmrg       && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &else_bb)
636*10d565efSmrg       && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb))
637*10d565efSmrg     {
638*10d565efSmrg       /* We have
639*10d565efSmrg 	   <outer_cond_bb>
640*10d565efSmrg 	     if (q) goto inner_cond_bb; else goto else_bb;
641*10d565efSmrg 	   <inner_cond_bb>
642*10d565efSmrg 	     if (p) goto ...; else goto else_bb;
643*10d565efSmrg 	     ...
644*10d565efSmrg 	   <else_bb>
645*10d565efSmrg 	     ...
646*10d565efSmrg        */
647*10d565efSmrg       return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, false,
648*10d565efSmrg 				false);
649*10d565efSmrg     }
650*10d565efSmrg 
651*10d565efSmrg   /* And a version where the outer condition is negated.  */
652*10d565efSmrg   if (phi_pred_bb != else_bb
653*10d565efSmrg       && recognize_if_then_else (outer_cond_bb, &else_bb, &inner_cond_bb)
654*10d565efSmrg       && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb))
655*10d565efSmrg     {
656*10d565efSmrg       /* We have
657*10d565efSmrg 	   <outer_cond_bb>
658*10d565efSmrg 	     if (q) goto else_bb; else goto inner_cond_bb;
659*10d565efSmrg 	   <inner_cond_bb>
660*10d565efSmrg 	     if (p) goto ...; else goto else_bb;
661*10d565efSmrg 	     ...
662*10d565efSmrg 	   <else_bb>
663*10d565efSmrg 	     ...
664*10d565efSmrg        */
665*10d565efSmrg       return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, true,
666*10d565efSmrg 				false);
667*10d565efSmrg     }
668*10d565efSmrg 
669*10d565efSmrg   /* The || form is characterized by a common then_bb with the
670*10d565efSmrg      two edges leading to it mergable.  The latter is guaranteed
671*10d565efSmrg      by matching PHI arguments in the then_bb and the inner cond_bb
672*10d565efSmrg      having no side-effects.  */
673*10d565efSmrg   if (phi_pred_bb != then_bb
674*10d565efSmrg       && recognize_if_then_else (outer_cond_bb, &then_bb, &inner_cond_bb)
675*10d565efSmrg       && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb))
676*10d565efSmrg     {
677*10d565efSmrg       /* We have
678*10d565efSmrg 	   <outer_cond_bb>
679*10d565efSmrg 	     if (q) goto then_bb; else goto inner_cond_bb;
680*10d565efSmrg 	   <inner_cond_bb>
681*10d565efSmrg 	     if (q) goto then_bb; else goto ...;
682*10d565efSmrg 	   <then_bb>
683*10d565efSmrg 	     ...
684*10d565efSmrg        */
685*10d565efSmrg       return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, true,
686*10d565efSmrg 				true);
687*10d565efSmrg     }
688*10d565efSmrg 
689*10d565efSmrg   /* And a version where the outer condition is negated.  */
690*10d565efSmrg   if (phi_pred_bb != then_bb
691*10d565efSmrg       && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &then_bb)
692*10d565efSmrg       && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb))
693*10d565efSmrg     {
694*10d565efSmrg       /* We have
695*10d565efSmrg 	   <outer_cond_bb>
696*10d565efSmrg 	     if (q) goto inner_cond_bb; else goto then_bb;
697*10d565efSmrg 	   <inner_cond_bb>
698*10d565efSmrg 	     if (q) goto then_bb; else goto ...;
699*10d565efSmrg 	   <then_bb>
700*10d565efSmrg 	     ...
701*10d565efSmrg        */
702*10d565efSmrg       return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, false,
703*10d565efSmrg 				true);
704*10d565efSmrg     }
705*10d565efSmrg 
706*10d565efSmrg   return false;
707*10d565efSmrg }
708*10d565efSmrg 
709*10d565efSmrg /* Recognize a CFG pattern and dispatch to the appropriate
710*10d565efSmrg    if-conversion helper.  We start with BB as the innermost
711*10d565efSmrg    worker basic-block.  Returns true if a transformation was done.  */
712*10d565efSmrg 
713*10d565efSmrg static bool
714*10d565efSmrg tree_ssa_ifcombine_bb (basic_block inner_cond_bb)
715*10d565efSmrg {
716*10d565efSmrg   basic_block then_bb = NULL, else_bb = NULL;
717*10d565efSmrg 
718*10d565efSmrg   if (!recognize_if_then_else (inner_cond_bb, &then_bb, &else_bb))
719*10d565efSmrg     return false;
720*10d565efSmrg 
721*10d565efSmrg   /* Recognize && and || of two conditions with a common
722*10d565efSmrg      then/else block which entry edges we can merge.  That is:
723*10d565efSmrg        if (a || b)
724*10d565efSmrg 	 ;
725*10d565efSmrg      and
726*10d565efSmrg        if (a && b)
727*10d565efSmrg 	 ;
728*10d565efSmrg      This requires a single predecessor of the inner cond_bb.  */
729*10d565efSmrg   if (single_pred_p (inner_cond_bb)
730*10d565efSmrg       && bb_no_side_effects_p (inner_cond_bb))
731*10d565efSmrg     {
732*10d565efSmrg       basic_block outer_cond_bb = single_pred (inner_cond_bb);
733*10d565efSmrg 
734*10d565efSmrg       if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb,
735*10d565efSmrg 				   then_bb, else_bb, inner_cond_bb))
736*10d565efSmrg 	return true;
737*10d565efSmrg 
738*10d565efSmrg       if (forwarder_block_to (else_bb, then_bb))
739*10d565efSmrg 	{
740*10d565efSmrg 	  /* Other possibilities for the && form, if else_bb is
741*10d565efSmrg 	     empty forwarder block to then_bb.  Compared to the above simpler
742*10d565efSmrg 	     forms this can be treated as if then_bb and else_bb were swapped,
743*10d565efSmrg 	     and the corresponding inner_cond_bb not inverted because of that.
744*10d565efSmrg 	     For same_phi_args_p we look at equality of arguments between
745*10d565efSmrg 	     edge from outer_cond_bb and the forwarder block.  */
746*10d565efSmrg 	  if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
747*10d565efSmrg 				       then_bb, else_bb))
748*10d565efSmrg 	    return true;
749*10d565efSmrg 	}
750*10d565efSmrg       else if (forwarder_block_to (then_bb, else_bb))
751*10d565efSmrg 	{
752*10d565efSmrg 	  /* Other possibilities for the || form, if then_bb is
753*10d565efSmrg 	     empty forwarder block to else_bb.  Compared to the above simpler
754*10d565efSmrg 	     forms this can be treated as if then_bb and else_bb were swapped,
755*10d565efSmrg 	     and the corresponding inner_cond_bb not inverted because of that.
756*10d565efSmrg 	     For same_phi_args_p we look at equality of arguments between
757*10d565efSmrg 	     edge from outer_cond_bb and the forwarder block.  */
758*10d565efSmrg 	  if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
759*10d565efSmrg 				       then_bb, then_bb))
760*10d565efSmrg 	    return true;
761*10d565efSmrg 	}
762*10d565efSmrg     }
763*10d565efSmrg 
764*10d565efSmrg   return false;
765*10d565efSmrg }
766*10d565efSmrg 
767*10d565efSmrg /* Main entry for the tree if-conversion pass.  */
768*10d565efSmrg 
769*10d565efSmrg namespace {
770*10d565efSmrg 
771*10d565efSmrg const pass_data pass_data_tree_ifcombine =
772*10d565efSmrg {
773*10d565efSmrg   GIMPLE_PASS, /* type */
774*10d565efSmrg   "ifcombine", /* name */
775*10d565efSmrg   OPTGROUP_NONE, /* optinfo_flags */
776*10d565efSmrg   TV_TREE_IFCOMBINE, /* tv_id */
777*10d565efSmrg   ( PROP_cfg | PROP_ssa ), /* properties_required */
778*10d565efSmrg   0, /* properties_provided */
779*10d565efSmrg   0, /* properties_destroyed */
780*10d565efSmrg   0, /* todo_flags_start */
781*10d565efSmrg   TODO_update_ssa, /* todo_flags_finish */
782*10d565efSmrg };
783*10d565efSmrg 
784*10d565efSmrg class pass_tree_ifcombine : public gimple_opt_pass
785*10d565efSmrg {
786*10d565efSmrg public:
787*10d565efSmrg   pass_tree_ifcombine (gcc::context *ctxt)
788*10d565efSmrg     : gimple_opt_pass (pass_data_tree_ifcombine, ctxt)
789*10d565efSmrg   {}
790*10d565efSmrg 
791*10d565efSmrg   /* opt_pass methods: */
792*10d565efSmrg   virtual unsigned int execute (function *);
793*10d565efSmrg 
794*10d565efSmrg }; // class pass_tree_ifcombine
795*10d565efSmrg 
796*10d565efSmrg unsigned int
797*10d565efSmrg pass_tree_ifcombine::execute (function *fun)
798*10d565efSmrg {
799*10d565efSmrg   basic_block *bbs;
800*10d565efSmrg   bool cfg_changed = false;
801*10d565efSmrg   int i;
802*10d565efSmrg 
803*10d565efSmrg   bbs = single_pred_before_succ_order ();
804*10d565efSmrg   calculate_dominance_info (CDI_DOMINATORS);
805*10d565efSmrg 
806*10d565efSmrg   /* Search every basic block for COND_EXPR we may be able to optimize.
807*10d565efSmrg 
808*10d565efSmrg      We walk the blocks in order that guarantees that a block with
809*10d565efSmrg      a single predecessor is processed after the predecessor.
810*10d565efSmrg      This ensures that we collapse outter ifs before visiting the
811*10d565efSmrg      inner ones, and also that we do not try to visit a removed
812*10d565efSmrg      block.  This is opposite of PHI-OPT, because we cascade the
813*10d565efSmrg      combining rather than cascading PHIs. */
814*10d565efSmrg   for (i = n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
815*10d565efSmrg     {
816*10d565efSmrg       basic_block bb = bbs[i];
817*10d565efSmrg       gimple *stmt = last_stmt (bb);
818*10d565efSmrg 
819*10d565efSmrg       if (stmt
820*10d565efSmrg 	  && gimple_code (stmt) == GIMPLE_COND)
821*10d565efSmrg 	if (tree_ssa_ifcombine_bb (bb))
822*10d565efSmrg 	  {
823*10d565efSmrg 	    /* Clear range info from all stmts in BB which is now executed
824*10d565efSmrg 	       conditional on a always true/false condition.  */
825*10d565efSmrg 	    reset_flow_sensitive_info_in_bb (bb);
826*10d565efSmrg 	    cfg_changed |= true;
827*10d565efSmrg 	  }
828*10d565efSmrg     }
829*10d565efSmrg 
830*10d565efSmrg   free (bbs);
831*10d565efSmrg 
832*10d565efSmrg   return cfg_changed ? TODO_cleanup_cfg : 0;
833*10d565efSmrg }
834*10d565efSmrg 
835*10d565efSmrg } // anon namespace
836*10d565efSmrg 
837*10d565efSmrg gimple_opt_pass *
838*10d565efSmrg make_pass_tree_ifcombine (gcc::context *ctxt)
839*10d565efSmrg {
840*10d565efSmrg   return new pass_tree_ifcombine (ctxt);
841*10d565efSmrg }
842