110d565efSmrg /* Combining of if-expressions on trees.
2*ec02198aSmrg    Copyright (C) 2007-2020 Free Software Foundation, Inc.
310d565efSmrg    Contributed by Richard Guenther <rguenther@suse.de>
410d565efSmrg 
510d565efSmrg This file is part of GCC.
610d565efSmrg 
710d565efSmrg GCC is free software; you can redistribute it and/or modify
810d565efSmrg it under the terms of the GNU General Public License as published by
910d565efSmrg the Free Software Foundation; either version 3, or (at your option)
1010d565efSmrg any later version.
1110d565efSmrg 
1210d565efSmrg GCC is distributed in the hope that it will be useful,
1310d565efSmrg but WITHOUT ANY WARRANTY; without even the implied warranty of
1410d565efSmrg MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
1510d565efSmrg GNU General Public License for more details.
1610d565efSmrg 
1710d565efSmrg You should have received a copy of the GNU General Public License
1810d565efSmrg along with GCC; see the file COPYING3.  If not see
1910d565efSmrg <http://www.gnu.org/licenses/>.  */
2010d565efSmrg 
2110d565efSmrg #include "config.h"
2210d565efSmrg #include "system.h"
2310d565efSmrg #include "coretypes.h"
2410d565efSmrg #include "backend.h"
2510d565efSmrg #include "rtl.h"
2610d565efSmrg #include "tree.h"
2710d565efSmrg #include "gimple.h"
2810d565efSmrg #include "cfghooks.h"
2910d565efSmrg #include "tree-pass.h"
3010d565efSmrg #include "memmodel.h"
3110d565efSmrg #include "tm_p.h"
3210d565efSmrg #include "ssa.h"
3310d565efSmrg #include "tree-pretty-print.h"
3410d565efSmrg /* rtl is needed only because arm back-end requires it for
3510d565efSmrg    BRANCH_COST.  */
3610d565efSmrg #include "fold-const.h"
3710d565efSmrg #include "cfganal.h"
3810d565efSmrg #include "gimple-fold.h"
3910d565efSmrg #include "gimple-iterator.h"
4010d565efSmrg #include "gimplify-me.h"
4110d565efSmrg #include "tree-cfg.h"
4210d565efSmrg #include "tree-ssa.h"
4310d565efSmrg 
4410d565efSmrg #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4510d565efSmrg #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4610d565efSmrg   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4710d565efSmrg                 false) >= 2)
4810d565efSmrg #endif
4910d565efSmrg 
5010d565efSmrg /* This pass combines COND_EXPRs to simplify control flow.  It
5110d565efSmrg    currently recognizes bit tests and comparisons in chains that
5210d565efSmrg    represent logical and or logical or of two COND_EXPRs.
5310d565efSmrg 
5410d565efSmrg    It does so by walking basic blocks in a approximate reverse
5510d565efSmrg    post-dominator order and trying to match CFG patterns that
5610d565efSmrg    represent logical and or logical or of two COND_EXPRs.
5710d565efSmrg    Transformations are done if the COND_EXPR conditions match
5810d565efSmrg    either
5910d565efSmrg 
6010d565efSmrg      1. two single bit tests X & (1 << Yn) (for logical and)
6110d565efSmrg 
6210d565efSmrg      2. two bit tests X & Yn (for logical or)
6310d565efSmrg 
6410d565efSmrg      3. two comparisons X OPn Y (for logical or)
6510d565efSmrg 
6610d565efSmrg    To simplify this pass, removing basic blocks and dead code
6710d565efSmrg    is left to CFG cleanup and DCE.  */
6810d565efSmrg 
6910d565efSmrg 
7010d565efSmrg /* Recognize a if-then-else CFG pattern starting to match with the
7110d565efSmrg    COND_BB basic-block containing the COND_EXPR.  The recognized
7210d565efSmrg    then end else blocks are stored to *THEN_BB and *ELSE_BB.  If
7310d565efSmrg    *THEN_BB and/or *ELSE_BB are already set, they are required to
7410d565efSmrg    match the then and else basic-blocks to make the pattern match.
7510d565efSmrg    Returns true if the pattern matched, false otherwise.  */
7610d565efSmrg 
7710d565efSmrg static bool
recognize_if_then_else(basic_block cond_bb,basic_block * then_bb,basic_block * else_bb)7810d565efSmrg recognize_if_then_else (basic_block cond_bb,
7910d565efSmrg 			basic_block *then_bb, basic_block *else_bb)
8010d565efSmrg {
8110d565efSmrg   edge t, e;
8210d565efSmrg 
8310d565efSmrg   if (EDGE_COUNT (cond_bb->succs) != 2)
8410d565efSmrg     return false;
8510d565efSmrg 
8610d565efSmrg   /* Find the then/else edges.  */
8710d565efSmrg   t = EDGE_SUCC (cond_bb, 0);
8810d565efSmrg   e = EDGE_SUCC (cond_bb, 1);
8910d565efSmrg   if (!(t->flags & EDGE_TRUE_VALUE))
9010d565efSmrg     std::swap (t, e);
9110d565efSmrg   if (!(t->flags & EDGE_TRUE_VALUE)
9210d565efSmrg       || !(e->flags & EDGE_FALSE_VALUE))
9310d565efSmrg     return false;
9410d565efSmrg 
9510d565efSmrg   /* Check if the edge destinations point to the required block.  */
9610d565efSmrg   if (*then_bb
9710d565efSmrg       && t->dest != *then_bb)
9810d565efSmrg     return false;
9910d565efSmrg   if (*else_bb
10010d565efSmrg       && e->dest != *else_bb)
10110d565efSmrg     return false;
10210d565efSmrg 
10310d565efSmrg   if (!*then_bb)
10410d565efSmrg     *then_bb = t->dest;
10510d565efSmrg   if (!*else_bb)
10610d565efSmrg     *else_bb = e->dest;
10710d565efSmrg 
10810d565efSmrg   return true;
10910d565efSmrg }
11010d565efSmrg 
11110d565efSmrg /* Verify if the basic block BB does not have side-effects.  Return
11210d565efSmrg    true in this case, else false.  */
11310d565efSmrg 
11410d565efSmrg static bool
bb_no_side_effects_p(basic_block bb)11510d565efSmrg bb_no_side_effects_p (basic_block bb)
11610d565efSmrg {
11710d565efSmrg   gimple_stmt_iterator gsi;
11810d565efSmrg 
11910d565efSmrg   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
12010d565efSmrg     {
12110d565efSmrg       gimple *stmt = gsi_stmt (gsi);
12210d565efSmrg 
12310d565efSmrg       if (is_gimple_debug (stmt))
12410d565efSmrg 	continue;
12510d565efSmrg 
12610d565efSmrg       if (gimple_has_side_effects (stmt)
12710d565efSmrg 	  || gimple_uses_undefined_value_p (stmt)
12810d565efSmrg 	  || gimple_could_trap_p (stmt)
12910d565efSmrg 	  || gimple_vuse (stmt)
13010d565efSmrg 	  /* const calls don't match any of the above, yet they could
13110d565efSmrg 	     still have some side-effects - they could contain
13210d565efSmrg 	     gimple_could_trap_p statements, like floating point
13310d565efSmrg 	     exceptions or integer division by zero.  See PR70586.
13410d565efSmrg 	     FIXME: perhaps gimple_has_side_effects or gimple_could_trap_p
13510d565efSmrg 	     should handle this.  */
13610d565efSmrg 	  || is_gimple_call (stmt))
13710d565efSmrg 	return false;
13810d565efSmrg     }
13910d565efSmrg 
14010d565efSmrg   return true;
14110d565efSmrg }
14210d565efSmrg 
14310d565efSmrg /* Return true if BB is an empty forwarder block to TO_BB.  */
14410d565efSmrg 
14510d565efSmrg static bool
forwarder_block_to(basic_block bb,basic_block to_bb)14610d565efSmrg forwarder_block_to (basic_block bb, basic_block to_bb)
14710d565efSmrg {
14810d565efSmrg   return empty_block_p (bb)
14910d565efSmrg 	 && single_succ_p (bb)
15010d565efSmrg 	 && single_succ (bb) == to_bb;
15110d565efSmrg }
15210d565efSmrg 
15310d565efSmrg /* Verify if all PHI node arguments in DEST for edges from BB1 or
15410d565efSmrg    BB2 to DEST are the same.  This makes the CFG merge point
15510d565efSmrg    free from side-effects.  Return true in this case, else false.  */
15610d565efSmrg 
15710d565efSmrg static bool
same_phi_args_p(basic_block bb1,basic_block bb2,basic_block dest)15810d565efSmrg same_phi_args_p (basic_block bb1, basic_block bb2, basic_block dest)
15910d565efSmrg {
16010d565efSmrg   edge e1 = find_edge (bb1, dest);
16110d565efSmrg   edge e2 = find_edge (bb2, dest);
16210d565efSmrg   gphi_iterator gsi;
16310d565efSmrg   gphi *phi;
16410d565efSmrg 
16510d565efSmrg   for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
16610d565efSmrg     {
16710d565efSmrg       phi = gsi.phi ();
16810d565efSmrg       if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, e1),
16910d565efSmrg 			    PHI_ARG_DEF_FROM_EDGE (phi, e2), 0))
17010d565efSmrg         return false;
17110d565efSmrg     }
17210d565efSmrg 
17310d565efSmrg   return true;
17410d565efSmrg }
17510d565efSmrg 
17610d565efSmrg /* Return the best representative SSA name for CANDIDATE which is used
17710d565efSmrg    in a bit test.  */
17810d565efSmrg 
17910d565efSmrg static tree
get_name_for_bit_test(tree candidate)18010d565efSmrg get_name_for_bit_test (tree candidate)
18110d565efSmrg {
18210d565efSmrg   /* Skip single-use names in favor of using the name from a
18310d565efSmrg      non-widening conversion definition.  */
18410d565efSmrg   if (TREE_CODE (candidate) == SSA_NAME
18510d565efSmrg       && has_single_use (candidate))
18610d565efSmrg     {
18710d565efSmrg       gimple *def_stmt = SSA_NAME_DEF_STMT (candidate);
18810d565efSmrg       if (is_gimple_assign (def_stmt)
18910d565efSmrg 	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
19010d565efSmrg 	{
19110d565efSmrg 	  if (TYPE_PRECISION (TREE_TYPE (candidate))
19210d565efSmrg 	      <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
19310d565efSmrg 	    return gimple_assign_rhs1 (def_stmt);
19410d565efSmrg 	}
19510d565efSmrg     }
19610d565efSmrg 
19710d565efSmrg   return candidate;
19810d565efSmrg }
19910d565efSmrg 
20010d565efSmrg /* Recognize a single bit test pattern in GIMPLE_COND and its defining
20110d565efSmrg    statements.  Store the name being tested in *NAME and the bit
20210d565efSmrg    in *BIT.  The GIMPLE_COND computes *NAME & (1 << *BIT).
20310d565efSmrg    Returns true if the pattern matched, false otherwise.  */
20410d565efSmrg 
20510d565efSmrg static bool
recognize_single_bit_test(gcond * cond,tree * name,tree * bit,bool inv)20610d565efSmrg recognize_single_bit_test (gcond *cond, tree *name, tree *bit, bool inv)
20710d565efSmrg {
20810d565efSmrg   gimple *stmt;
20910d565efSmrg 
21010d565efSmrg   /* Get at the definition of the result of the bit test.  */
21110d565efSmrg   if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
21210d565efSmrg       || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
21310d565efSmrg       || !integer_zerop (gimple_cond_rhs (cond)))
21410d565efSmrg     return false;
21510d565efSmrg   stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
21610d565efSmrg   if (!is_gimple_assign (stmt))
21710d565efSmrg     return false;
21810d565efSmrg 
21910d565efSmrg   /* Look at which bit is tested.  One form to recognize is
22010d565efSmrg      D.1985_5 = state_3(D) >> control1_4(D);
22110d565efSmrg      D.1986_6 = (int) D.1985_5;
22210d565efSmrg      D.1987_7 = op0 & 1;
22310d565efSmrg      if (D.1987_7 != 0)  */
22410d565efSmrg   if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
22510d565efSmrg       && integer_onep (gimple_assign_rhs2 (stmt))
22610d565efSmrg       && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
22710d565efSmrg     {
22810d565efSmrg       tree orig_name = gimple_assign_rhs1 (stmt);
22910d565efSmrg 
23010d565efSmrg       /* Look through copies and conversions to eventually
23110d565efSmrg 	 find the stmt that computes the shift.  */
23210d565efSmrg       stmt = SSA_NAME_DEF_STMT (orig_name);
23310d565efSmrg 
23410d565efSmrg       while (is_gimple_assign (stmt)
23510d565efSmrg 	     && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
23610d565efSmrg 		  && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt)))
23710d565efSmrg 		      <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt))))
23810d565efSmrg 		  && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
23910d565efSmrg 		 || gimple_assign_ssa_name_copy_p (stmt)))
24010d565efSmrg 	stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
24110d565efSmrg 
24210d565efSmrg       /* If we found such, decompose it.  */
24310d565efSmrg       if (is_gimple_assign (stmt)
24410d565efSmrg 	  && gimple_assign_rhs_code (stmt) == RSHIFT_EXPR)
24510d565efSmrg 	{
24610d565efSmrg 	  /* op0 & (1 << op1) */
24710d565efSmrg 	  *bit = gimple_assign_rhs2 (stmt);
24810d565efSmrg 	  *name = gimple_assign_rhs1 (stmt);
24910d565efSmrg 	}
25010d565efSmrg       else
25110d565efSmrg 	{
25210d565efSmrg 	  /* t & 1 */
25310d565efSmrg 	  *bit = integer_zero_node;
25410d565efSmrg 	  *name = get_name_for_bit_test (orig_name);
25510d565efSmrg 	}
25610d565efSmrg 
25710d565efSmrg       return true;
25810d565efSmrg     }
25910d565efSmrg 
26010d565efSmrg   /* Another form is
26110d565efSmrg      D.1987_7 = op0 & (1 << CST)
26210d565efSmrg      if (D.1987_7 != 0)  */
26310d565efSmrg   if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
26410d565efSmrg       && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
26510d565efSmrg       && integer_pow2p (gimple_assign_rhs2 (stmt)))
26610d565efSmrg     {
26710d565efSmrg       *name = gimple_assign_rhs1 (stmt);
26810d565efSmrg       *bit = build_int_cst (integer_type_node,
26910d565efSmrg 			    tree_log2 (gimple_assign_rhs2 (stmt)));
27010d565efSmrg       return true;
27110d565efSmrg     }
27210d565efSmrg 
27310d565efSmrg   /* Another form is
27410d565efSmrg      D.1986_6 = 1 << control1_4(D)
27510d565efSmrg      D.1987_7 = op0 & D.1986_6
27610d565efSmrg      if (D.1987_7 != 0)  */
27710d565efSmrg   if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
27810d565efSmrg       && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
27910d565efSmrg       && TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
28010d565efSmrg     {
28110d565efSmrg       gimple *tmp;
28210d565efSmrg 
28310d565efSmrg       /* Both arguments of the BIT_AND_EXPR can be the single-bit
28410d565efSmrg 	 specifying expression.  */
28510d565efSmrg       tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
28610d565efSmrg       if (is_gimple_assign (tmp)
28710d565efSmrg 	  && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
28810d565efSmrg 	  && integer_onep (gimple_assign_rhs1 (tmp)))
28910d565efSmrg 	{
29010d565efSmrg 	  *name = gimple_assign_rhs2 (stmt);
29110d565efSmrg 	  *bit = gimple_assign_rhs2 (tmp);
29210d565efSmrg 	  return true;
29310d565efSmrg 	}
29410d565efSmrg 
29510d565efSmrg       tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
29610d565efSmrg       if (is_gimple_assign (tmp)
29710d565efSmrg 	  && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
29810d565efSmrg 	  && integer_onep (gimple_assign_rhs1 (tmp)))
29910d565efSmrg 	{
30010d565efSmrg 	  *name = gimple_assign_rhs1 (stmt);
30110d565efSmrg 	  *bit = gimple_assign_rhs2 (tmp);
30210d565efSmrg 	  return true;
30310d565efSmrg 	}
30410d565efSmrg     }
30510d565efSmrg 
30610d565efSmrg   return false;
30710d565efSmrg }
30810d565efSmrg 
30910d565efSmrg /* Recognize a bit test pattern in a GIMPLE_COND and its defining
31010d565efSmrg    statements.  Store the name being tested in *NAME and the bits
31110d565efSmrg    in *BITS.  The COND_EXPR computes *NAME & *BITS.
31210d565efSmrg    Returns true if the pattern matched, false otherwise.  */
31310d565efSmrg 
31410d565efSmrg static bool
recognize_bits_test(gcond * cond,tree * name,tree * bits,bool inv)31510d565efSmrg recognize_bits_test (gcond *cond, tree *name, tree *bits, bool inv)
31610d565efSmrg {
31710d565efSmrg   gimple *stmt;
31810d565efSmrg 
31910d565efSmrg   /* Get at the definition of the result of the bit test.  */
32010d565efSmrg   if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
32110d565efSmrg       || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
32210d565efSmrg       || !integer_zerop (gimple_cond_rhs (cond)))
32310d565efSmrg     return false;
32410d565efSmrg   stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
32510d565efSmrg   if (!is_gimple_assign (stmt)
32610d565efSmrg       || gimple_assign_rhs_code (stmt) != BIT_AND_EXPR)
32710d565efSmrg     return false;
32810d565efSmrg 
32910d565efSmrg   *name = get_name_for_bit_test (gimple_assign_rhs1 (stmt));
33010d565efSmrg   *bits = gimple_assign_rhs2 (stmt);
33110d565efSmrg 
33210d565efSmrg   return true;
33310d565efSmrg }
33410d565efSmrg 
33510d565efSmrg 
33610d565efSmrg /* Update profile after code in outer_cond_bb was adjusted so
33710d565efSmrg    outer_cond_bb has no condition.  */
33810d565efSmrg 
33910d565efSmrg static void
update_profile_after_ifcombine(basic_block inner_cond_bb,basic_block outer_cond_bb)34010d565efSmrg update_profile_after_ifcombine (basic_block inner_cond_bb,
34110d565efSmrg 			        basic_block outer_cond_bb)
34210d565efSmrg {
34310d565efSmrg   edge outer_to_inner = find_edge (outer_cond_bb, inner_cond_bb);
34410d565efSmrg   edge outer2 = (EDGE_SUCC (outer_cond_bb, 0) == outer_to_inner
34510d565efSmrg 		 ? EDGE_SUCC (outer_cond_bb, 1)
34610d565efSmrg 		 : EDGE_SUCC (outer_cond_bb, 0));
34710d565efSmrg   edge inner_taken = EDGE_SUCC (inner_cond_bb, 0);
34810d565efSmrg   edge inner_not_taken = EDGE_SUCC (inner_cond_bb, 1);
34910d565efSmrg 
35010d565efSmrg   if (inner_taken->dest != outer2->dest)
35110d565efSmrg     std::swap (inner_taken, inner_not_taken);
35210d565efSmrg   gcc_assert (inner_taken->dest == outer2->dest);
35310d565efSmrg 
35410d565efSmrg   /* In the following we assume that inner_cond_bb has single predecessor.  */
35510d565efSmrg   gcc_assert (single_pred_p (inner_cond_bb));
35610d565efSmrg 
35710d565efSmrg   /* Path outer_cond_bb->(outer2) needs to be merged into path
35810d565efSmrg      outer_cond_bb->(outer_to_inner)->inner_cond_bb->(inner_taken)
35910d565efSmrg      and probability of inner_not_taken updated.  */
36010d565efSmrg 
36110d565efSmrg   inner_cond_bb->count = outer_cond_bb->count;
36210d565efSmrg 
3630fc04c29Smrg   /* Handle special case where inner_taken probability is always. In this case
3640fc04c29Smrg      we know that the overall outcome will be always as well, but combining
3650fc04c29Smrg      probabilities will be conservative because it does not know that
3660fc04c29Smrg      outer2->probability is inverse of outer_to_inner->probability.  */
3670fc04c29Smrg   if (inner_taken->probability == profile_probability::always ())
3680fc04c29Smrg     ;
3690fc04c29Smrg   else
370c7a68eb7Smrg     inner_taken->probability = outer2->probability + outer_to_inner->probability
371c7a68eb7Smrg 			       * inner_taken->probability;
372c7a68eb7Smrg   inner_not_taken->probability = profile_probability::always ()
37310d565efSmrg 				 - inner_taken->probability;
37410d565efSmrg 
375c7a68eb7Smrg   outer_to_inner->probability = profile_probability::always ();
376c7a68eb7Smrg   outer2->probability = profile_probability::never ();
37710d565efSmrg }
37810d565efSmrg 
37910d565efSmrg /* If-convert on a and pattern with a common else block.  The inner
38010d565efSmrg    if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
38110d565efSmrg    inner_inv, outer_inv and result_inv indicate whether the conditions
38210d565efSmrg    are inverted.
38310d565efSmrg    Returns true if the edges to the common else basic-block were merged.  */
38410d565efSmrg 
38510d565efSmrg static bool
ifcombine_ifandif(basic_block inner_cond_bb,bool inner_inv,basic_block outer_cond_bb,bool outer_inv,bool result_inv)38610d565efSmrg ifcombine_ifandif (basic_block inner_cond_bb, bool inner_inv,
38710d565efSmrg 		   basic_block outer_cond_bb, bool outer_inv, bool result_inv)
38810d565efSmrg {
38910d565efSmrg   gimple_stmt_iterator gsi;
39010d565efSmrg   gimple *inner_stmt, *outer_stmt;
39110d565efSmrg   gcond *inner_cond, *outer_cond;
39210d565efSmrg   tree name1, name2, bit1, bit2, bits1, bits2;
39310d565efSmrg 
39410d565efSmrg   inner_stmt = last_stmt (inner_cond_bb);
39510d565efSmrg   if (!inner_stmt
39610d565efSmrg       || gimple_code (inner_stmt) != GIMPLE_COND)
39710d565efSmrg     return false;
39810d565efSmrg   inner_cond = as_a <gcond *> (inner_stmt);
39910d565efSmrg 
40010d565efSmrg   outer_stmt = last_stmt (outer_cond_bb);
40110d565efSmrg   if (!outer_stmt
40210d565efSmrg       || gimple_code (outer_stmt) != GIMPLE_COND)
40310d565efSmrg     return false;
40410d565efSmrg   outer_cond = as_a <gcond *> (outer_stmt);
40510d565efSmrg 
40610d565efSmrg   /* See if we test a single bit of the same name in both tests.  In
40710d565efSmrg      that case remove the outer test, merging both else edges,
40810d565efSmrg      and change the inner one to test for
40910d565efSmrg      name & (bit1 | bit2) == (bit1 | bit2).  */
41010d565efSmrg   if (recognize_single_bit_test (inner_cond, &name1, &bit1, inner_inv)
41110d565efSmrg       && recognize_single_bit_test (outer_cond, &name2, &bit2, outer_inv)
41210d565efSmrg       && name1 == name2)
41310d565efSmrg     {
41410d565efSmrg       tree t, t2;
41510d565efSmrg 
41610d565efSmrg       /* Do it.  */
41710d565efSmrg       gsi = gsi_for_stmt (inner_cond);
41810d565efSmrg       t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
41910d565efSmrg 		       build_int_cst (TREE_TYPE (name1), 1), bit1);
42010d565efSmrg       t2 = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
42110d565efSmrg 		        build_int_cst (TREE_TYPE (name1), 1), bit2);
42210d565efSmrg       t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), t, t2);
42310d565efSmrg       t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
42410d565efSmrg 				    true, GSI_SAME_STMT);
42510d565efSmrg       t2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
42610d565efSmrg       t2 = force_gimple_operand_gsi (&gsi, t2, true, NULL_TREE,
42710d565efSmrg 				     true, GSI_SAME_STMT);
42810d565efSmrg       t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR,
42910d565efSmrg 		       boolean_type_node, t2, t);
43010d565efSmrg       t = canonicalize_cond_expr_cond (t);
43110d565efSmrg       if (!t)
43210d565efSmrg 	return false;
43310d565efSmrg       gimple_cond_set_condition_from_tree (inner_cond, t);
43410d565efSmrg       update_stmt (inner_cond);
43510d565efSmrg 
43610d565efSmrg       /* Leave CFG optimization to cfg_cleanup.  */
43710d565efSmrg       gimple_cond_set_condition_from_tree (outer_cond,
43810d565efSmrg 	outer_inv ? boolean_false_node : boolean_true_node);
43910d565efSmrg       update_stmt (outer_cond);
44010d565efSmrg 
44110d565efSmrg       update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb);
44210d565efSmrg 
44310d565efSmrg       if (dump_file)
44410d565efSmrg 	{
44510d565efSmrg 	  fprintf (dump_file, "optimizing double bit test to ");
446c7a68eb7Smrg 	  print_generic_expr (dump_file, name1);
44710d565efSmrg 	  fprintf (dump_file, " & T == T\nwith temporary T = (1 << ");
448c7a68eb7Smrg 	  print_generic_expr (dump_file, bit1);
44910d565efSmrg 	  fprintf (dump_file, ") | (1 << ");
450c7a68eb7Smrg 	  print_generic_expr (dump_file, bit2);
45110d565efSmrg 	  fprintf (dump_file, ")\n");
45210d565efSmrg 	}
45310d565efSmrg 
45410d565efSmrg       return true;
45510d565efSmrg     }
45610d565efSmrg 
45710d565efSmrg   /* See if we have two bit tests of the same name in both tests.
45810d565efSmrg      In that case remove the outer test and change the inner one to
45910d565efSmrg      test for name & (bits1 | bits2) != 0.  */
46010d565efSmrg   else if (recognize_bits_test (inner_cond, &name1, &bits1, !inner_inv)
46110d565efSmrg       && recognize_bits_test (outer_cond, &name2, &bits2, !outer_inv))
46210d565efSmrg     {
46310d565efSmrg       gimple_stmt_iterator gsi;
46410d565efSmrg       tree t;
46510d565efSmrg 
46610d565efSmrg       /* Find the common name which is bit-tested.  */
46710d565efSmrg       if (name1 == name2)
46810d565efSmrg 	;
46910d565efSmrg       else if (bits1 == bits2)
47010d565efSmrg 	{
47110d565efSmrg 	  std::swap (name2, bits2);
47210d565efSmrg 	  std::swap (name1, bits1);
47310d565efSmrg 	}
47410d565efSmrg       else if (name1 == bits2)
47510d565efSmrg 	std::swap (name2, bits2);
47610d565efSmrg       else if (bits1 == name2)
47710d565efSmrg 	std::swap (name1, bits1);
47810d565efSmrg       else
47910d565efSmrg 	return false;
48010d565efSmrg 
48110d565efSmrg       /* As we strip non-widening conversions in finding a common
48210d565efSmrg          name that is tested make sure to end up with an integral
48310d565efSmrg 	 type for building the bit operations.  */
48410d565efSmrg       if (TYPE_PRECISION (TREE_TYPE (bits1))
48510d565efSmrg 	  >= TYPE_PRECISION (TREE_TYPE (bits2)))
48610d565efSmrg 	{
48710d565efSmrg 	  bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
48810d565efSmrg 	  name1 = fold_convert (TREE_TYPE (bits1), name1);
48910d565efSmrg 	  bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
49010d565efSmrg 	  bits2 = fold_convert (TREE_TYPE (bits1), bits2);
49110d565efSmrg 	}
49210d565efSmrg       else
49310d565efSmrg 	{
49410d565efSmrg 	  bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
49510d565efSmrg 	  name1 = fold_convert (TREE_TYPE (bits2), name1);
49610d565efSmrg 	  bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
49710d565efSmrg 	  bits1 = fold_convert (TREE_TYPE (bits2), bits1);
49810d565efSmrg 	}
49910d565efSmrg 
50010d565efSmrg       /* Do it.  */
50110d565efSmrg       gsi = gsi_for_stmt (inner_cond);
50210d565efSmrg       t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), bits1, bits2);
50310d565efSmrg       t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
50410d565efSmrg 				    true, GSI_SAME_STMT);
50510d565efSmrg       t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
50610d565efSmrg       t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
50710d565efSmrg 				    true, GSI_SAME_STMT);
50810d565efSmrg       t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR, boolean_type_node, t,
50910d565efSmrg 		       build_int_cst (TREE_TYPE (t), 0));
51010d565efSmrg       t = canonicalize_cond_expr_cond (t);
51110d565efSmrg       if (!t)
51210d565efSmrg 	return false;
51310d565efSmrg       gimple_cond_set_condition_from_tree (inner_cond, t);
51410d565efSmrg       update_stmt (inner_cond);
51510d565efSmrg 
51610d565efSmrg       /* Leave CFG optimization to cfg_cleanup.  */
51710d565efSmrg       gimple_cond_set_condition_from_tree (outer_cond,
51810d565efSmrg 	outer_inv ? boolean_false_node : boolean_true_node);
51910d565efSmrg       update_stmt (outer_cond);
52010d565efSmrg       update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb);
52110d565efSmrg 
52210d565efSmrg       if (dump_file)
52310d565efSmrg 	{
52410d565efSmrg 	  fprintf (dump_file, "optimizing bits or bits test to ");
525c7a68eb7Smrg 	  print_generic_expr (dump_file, name1);
52610d565efSmrg 	  fprintf (dump_file, " & T != 0\nwith temporary T = ");
527c7a68eb7Smrg 	  print_generic_expr (dump_file, bits1);
52810d565efSmrg 	  fprintf (dump_file, " | ");
529c7a68eb7Smrg 	  print_generic_expr (dump_file, bits2);
53010d565efSmrg 	  fprintf (dump_file, "\n");
53110d565efSmrg 	}
53210d565efSmrg 
53310d565efSmrg       return true;
53410d565efSmrg     }
53510d565efSmrg 
53610d565efSmrg   /* See if we have two comparisons that we can merge into one.  */
53710d565efSmrg   else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond)) == tcc_comparison
53810d565efSmrg 	   && TREE_CODE_CLASS (gimple_cond_code (outer_cond)) == tcc_comparison)
53910d565efSmrg     {
54010d565efSmrg       tree t;
54110d565efSmrg       enum tree_code inner_cond_code = gimple_cond_code (inner_cond);
54210d565efSmrg       enum tree_code outer_cond_code = gimple_cond_code (outer_cond);
54310d565efSmrg 
54410d565efSmrg       /* Invert comparisons if necessary (and possible).  */
54510d565efSmrg       if (inner_inv)
54610d565efSmrg 	inner_cond_code = invert_tree_comparison (inner_cond_code,
54710d565efSmrg 	  HONOR_NANS (gimple_cond_lhs (inner_cond)));
54810d565efSmrg       if (inner_cond_code == ERROR_MARK)
54910d565efSmrg 	return false;
55010d565efSmrg       if (outer_inv)
55110d565efSmrg 	outer_cond_code = invert_tree_comparison (outer_cond_code,
55210d565efSmrg 	  HONOR_NANS (gimple_cond_lhs (outer_cond)));
55310d565efSmrg       if (outer_cond_code == ERROR_MARK)
55410d565efSmrg 	return false;
55510d565efSmrg       /* Don't return false so fast, try maybe_fold_or_comparisons?  */
55610d565efSmrg 
557*ec02198aSmrg       if (!(t = maybe_fold_and_comparisons (boolean_type_node, inner_cond_code,
55810d565efSmrg 					    gimple_cond_lhs (inner_cond),
55910d565efSmrg 					    gimple_cond_rhs (inner_cond),
56010d565efSmrg 					    outer_cond_code,
56110d565efSmrg 					    gimple_cond_lhs (outer_cond),
56210d565efSmrg 					    gimple_cond_rhs (outer_cond))))
56310d565efSmrg 	{
56410d565efSmrg 	  tree t1, t2;
56510d565efSmrg 	  gimple_stmt_iterator gsi;
566c7a68eb7Smrg 	  bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
567*ec02198aSmrg 	  if (param_logical_op_non_short_circuit != -1)
568c7a68eb7Smrg 	    logical_op_non_short_circuit
569*ec02198aSmrg 	      = param_logical_op_non_short_circuit;
570c7a68eb7Smrg 	  if (!logical_op_non_short_circuit || flag_sanitize_coverage)
57110d565efSmrg 	    return false;
57210d565efSmrg 	  /* Only do this optimization if the inner bb contains only the conditional. */
57310d565efSmrg 	  if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb)))
57410d565efSmrg 	    return false;
57510d565efSmrg 	  t1 = fold_build2_loc (gimple_location (inner_cond),
57610d565efSmrg 				inner_cond_code,
57710d565efSmrg 				boolean_type_node,
57810d565efSmrg 				gimple_cond_lhs (inner_cond),
57910d565efSmrg 				gimple_cond_rhs (inner_cond));
58010d565efSmrg 	  t2 = fold_build2_loc (gimple_location (outer_cond),
58110d565efSmrg 				outer_cond_code,
58210d565efSmrg 				boolean_type_node,
58310d565efSmrg 				gimple_cond_lhs (outer_cond),
58410d565efSmrg 				gimple_cond_rhs (outer_cond));
58510d565efSmrg 	  t = fold_build2_loc (gimple_location (inner_cond),
58610d565efSmrg 			       TRUTH_AND_EXPR, boolean_type_node, t1, t2);
58710d565efSmrg 	  if (result_inv)
58810d565efSmrg 	    {
58910d565efSmrg 	      t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
59010d565efSmrg 	      result_inv = false;
59110d565efSmrg 	    }
59210d565efSmrg 	  gsi = gsi_for_stmt (inner_cond);
59310d565efSmrg 	  t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr, NULL, true,
59410d565efSmrg 					  GSI_SAME_STMT);
59510d565efSmrg         }
59610d565efSmrg       if (result_inv)
59710d565efSmrg 	t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
59810d565efSmrg       t = canonicalize_cond_expr_cond (t);
59910d565efSmrg       if (!t)
60010d565efSmrg 	return false;
601*ec02198aSmrg       if (!is_gimple_condexpr_for_cond (t))
602*ec02198aSmrg 	{
603*ec02198aSmrg 	  gsi = gsi_for_stmt (inner_cond);
604*ec02198aSmrg 	  t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr_for_cond,
605*ec02198aSmrg 					  NULL, true, GSI_SAME_STMT);
606*ec02198aSmrg 	}
60710d565efSmrg       gimple_cond_set_condition_from_tree (inner_cond, t);
60810d565efSmrg       update_stmt (inner_cond);
60910d565efSmrg 
61010d565efSmrg       /* Leave CFG optimization to cfg_cleanup.  */
61110d565efSmrg       gimple_cond_set_condition_from_tree (outer_cond,
61210d565efSmrg 	outer_inv ? boolean_false_node : boolean_true_node);
61310d565efSmrg       update_stmt (outer_cond);
61410d565efSmrg       update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb);
61510d565efSmrg 
61610d565efSmrg       if (dump_file)
61710d565efSmrg 	{
61810d565efSmrg 	  fprintf (dump_file, "optimizing two comparisons to ");
619c7a68eb7Smrg 	  print_generic_expr (dump_file, t);
62010d565efSmrg 	  fprintf (dump_file, "\n");
62110d565efSmrg 	}
62210d565efSmrg 
62310d565efSmrg       return true;
62410d565efSmrg     }
62510d565efSmrg 
62610d565efSmrg   return false;
62710d565efSmrg }
62810d565efSmrg 
62910d565efSmrg /* Helper function for tree_ssa_ifcombine_bb.  Recognize a CFG pattern and
63010d565efSmrg    dispatch to the appropriate if-conversion helper for a particular
63110d565efSmrg    set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB.
63210d565efSmrg    PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB.  */
63310d565efSmrg 
63410d565efSmrg static bool
tree_ssa_ifcombine_bb_1(basic_block inner_cond_bb,basic_block outer_cond_bb,basic_block then_bb,basic_block else_bb,basic_block phi_pred_bb)63510d565efSmrg tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb, basic_block outer_cond_bb,
63610d565efSmrg 			 basic_block then_bb, basic_block else_bb,
63710d565efSmrg 			 basic_block phi_pred_bb)
63810d565efSmrg {
63910d565efSmrg   /* The && form is characterized by a common else_bb with
64010d565efSmrg      the two edges leading to it mergable.  The latter is
64110d565efSmrg      guaranteed by matching PHI arguments in the else_bb and
64210d565efSmrg      the inner cond_bb having no side-effects.  */
64310d565efSmrg   if (phi_pred_bb != else_bb
64410d565efSmrg       && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &else_bb)
64510d565efSmrg       && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb))
64610d565efSmrg     {
64710d565efSmrg       /* We have
64810d565efSmrg 	   <outer_cond_bb>
64910d565efSmrg 	     if (q) goto inner_cond_bb; else goto else_bb;
65010d565efSmrg 	   <inner_cond_bb>
65110d565efSmrg 	     if (p) goto ...; else goto else_bb;
65210d565efSmrg 	     ...
65310d565efSmrg 	   <else_bb>
65410d565efSmrg 	     ...
65510d565efSmrg        */
65610d565efSmrg       return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, false,
65710d565efSmrg 				false);
65810d565efSmrg     }
65910d565efSmrg 
66010d565efSmrg   /* And a version where the outer condition is negated.  */
66110d565efSmrg   if (phi_pred_bb != else_bb
66210d565efSmrg       && recognize_if_then_else (outer_cond_bb, &else_bb, &inner_cond_bb)
66310d565efSmrg       && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb))
66410d565efSmrg     {
66510d565efSmrg       /* We have
66610d565efSmrg 	   <outer_cond_bb>
66710d565efSmrg 	     if (q) goto else_bb; else goto inner_cond_bb;
66810d565efSmrg 	   <inner_cond_bb>
66910d565efSmrg 	     if (p) goto ...; else goto else_bb;
67010d565efSmrg 	     ...
67110d565efSmrg 	   <else_bb>
67210d565efSmrg 	     ...
67310d565efSmrg        */
67410d565efSmrg       return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, true,
67510d565efSmrg 				false);
67610d565efSmrg     }
67710d565efSmrg 
67810d565efSmrg   /* The || form is characterized by a common then_bb with the
67910d565efSmrg      two edges leading to it mergable.  The latter is guaranteed
68010d565efSmrg      by matching PHI arguments in the then_bb and the inner cond_bb
68110d565efSmrg      having no side-effects.  */
68210d565efSmrg   if (phi_pred_bb != then_bb
68310d565efSmrg       && recognize_if_then_else (outer_cond_bb, &then_bb, &inner_cond_bb)
68410d565efSmrg       && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb))
68510d565efSmrg     {
68610d565efSmrg       /* We have
68710d565efSmrg 	   <outer_cond_bb>
68810d565efSmrg 	     if (q) goto then_bb; else goto inner_cond_bb;
68910d565efSmrg 	   <inner_cond_bb>
69010d565efSmrg 	     if (q) goto then_bb; else goto ...;
69110d565efSmrg 	   <then_bb>
69210d565efSmrg 	     ...
69310d565efSmrg        */
69410d565efSmrg       return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, true,
69510d565efSmrg 				true);
69610d565efSmrg     }
69710d565efSmrg 
69810d565efSmrg   /* And a version where the outer condition is negated.  */
69910d565efSmrg   if (phi_pred_bb != then_bb
70010d565efSmrg       && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &then_bb)
70110d565efSmrg       && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb))
70210d565efSmrg     {
70310d565efSmrg       /* We have
70410d565efSmrg 	   <outer_cond_bb>
70510d565efSmrg 	     if (q) goto inner_cond_bb; else goto then_bb;
70610d565efSmrg 	   <inner_cond_bb>
70710d565efSmrg 	     if (q) goto then_bb; else goto ...;
70810d565efSmrg 	   <then_bb>
70910d565efSmrg 	     ...
71010d565efSmrg        */
71110d565efSmrg       return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, false,
71210d565efSmrg 				true);
71310d565efSmrg     }
71410d565efSmrg 
71510d565efSmrg   return false;
71610d565efSmrg }
71710d565efSmrg 
71810d565efSmrg /* Recognize a CFG pattern and dispatch to the appropriate
71910d565efSmrg    if-conversion helper.  We start with BB as the innermost
72010d565efSmrg    worker basic-block.  Returns true if a transformation was done.  */
72110d565efSmrg 
72210d565efSmrg static bool
tree_ssa_ifcombine_bb(basic_block inner_cond_bb)72310d565efSmrg tree_ssa_ifcombine_bb (basic_block inner_cond_bb)
72410d565efSmrg {
72510d565efSmrg   basic_block then_bb = NULL, else_bb = NULL;
72610d565efSmrg 
72710d565efSmrg   if (!recognize_if_then_else (inner_cond_bb, &then_bb, &else_bb))
72810d565efSmrg     return false;
72910d565efSmrg 
73010d565efSmrg   /* Recognize && and || of two conditions with a common
73110d565efSmrg      then/else block which entry edges we can merge.  That is:
73210d565efSmrg        if (a || b)
73310d565efSmrg 	 ;
73410d565efSmrg      and
73510d565efSmrg        if (a && b)
73610d565efSmrg 	 ;
73710d565efSmrg      This requires a single predecessor of the inner cond_bb.  */
73810d565efSmrg   if (single_pred_p (inner_cond_bb)
73910d565efSmrg       && bb_no_side_effects_p (inner_cond_bb))
74010d565efSmrg     {
74110d565efSmrg       basic_block outer_cond_bb = single_pred (inner_cond_bb);
74210d565efSmrg 
74310d565efSmrg       if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb,
74410d565efSmrg 				   then_bb, else_bb, inner_cond_bb))
74510d565efSmrg 	return true;
74610d565efSmrg 
74710d565efSmrg       if (forwarder_block_to (else_bb, then_bb))
74810d565efSmrg 	{
74910d565efSmrg 	  /* Other possibilities for the && form, if else_bb is
75010d565efSmrg 	     empty forwarder block to then_bb.  Compared to the above simpler
75110d565efSmrg 	     forms this can be treated as if then_bb and else_bb were swapped,
75210d565efSmrg 	     and the corresponding inner_cond_bb not inverted because of that.
75310d565efSmrg 	     For same_phi_args_p we look at equality of arguments between
75410d565efSmrg 	     edge from outer_cond_bb and the forwarder block.  */
75510d565efSmrg 	  if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
75610d565efSmrg 				       then_bb, else_bb))
75710d565efSmrg 	    return true;
75810d565efSmrg 	}
75910d565efSmrg       else if (forwarder_block_to (then_bb, else_bb))
76010d565efSmrg 	{
76110d565efSmrg 	  /* Other possibilities for the || form, if then_bb is
76210d565efSmrg 	     empty forwarder block to else_bb.  Compared to the above simpler
76310d565efSmrg 	     forms this can be treated as if then_bb and else_bb were swapped,
76410d565efSmrg 	     and the corresponding inner_cond_bb not inverted because of that.
76510d565efSmrg 	     For same_phi_args_p we look at equality of arguments between
76610d565efSmrg 	     edge from outer_cond_bb and the forwarder block.  */
76710d565efSmrg 	  if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
76810d565efSmrg 				       then_bb, then_bb))
76910d565efSmrg 	    return true;
77010d565efSmrg 	}
77110d565efSmrg     }
77210d565efSmrg 
77310d565efSmrg   return false;
77410d565efSmrg }
77510d565efSmrg 
77610d565efSmrg /* Main entry for the tree if-conversion pass.  */
77710d565efSmrg 
77810d565efSmrg namespace {
77910d565efSmrg 
78010d565efSmrg const pass_data pass_data_tree_ifcombine =
78110d565efSmrg {
78210d565efSmrg   GIMPLE_PASS, /* type */
78310d565efSmrg   "ifcombine", /* name */
78410d565efSmrg   OPTGROUP_NONE, /* optinfo_flags */
78510d565efSmrg   TV_TREE_IFCOMBINE, /* tv_id */
78610d565efSmrg   ( PROP_cfg | PROP_ssa ), /* properties_required */
78710d565efSmrg   0, /* properties_provided */
78810d565efSmrg   0, /* properties_destroyed */
78910d565efSmrg   0, /* todo_flags_start */
79010d565efSmrg   TODO_update_ssa, /* todo_flags_finish */
79110d565efSmrg };
79210d565efSmrg 
79310d565efSmrg class pass_tree_ifcombine : public gimple_opt_pass
79410d565efSmrg {
79510d565efSmrg public:
pass_tree_ifcombine(gcc::context * ctxt)79610d565efSmrg   pass_tree_ifcombine (gcc::context *ctxt)
79710d565efSmrg     : gimple_opt_pass (pass_data_tree_ifcombine, ctxt)
79810d565efSmrg   {}
79910d565efSmrg 
80010d565efSmrg   /* opt_pass methods: */
80110d565efSmrg   virtual unsigned int execute (function *);
80210d565efSmrg 
80310d565efSmrg }; // class pass_tree_ifcombine
80410d565efSmrg 
80510d565efSmrg unsigned int
execute(function * fun)80610d565efSmrg pass_tree_ifcombine::execute (function *fun)
80710d565efSmrg {
80810d565efSmrg   basic_block *bbs;
80910d565efSmrg   bool cfg_changed = false;
81010d565efSmrg   int i;
81110d565efSmrg 
81210d565efSmrg   bbs = single_pred_before_succ_order ();
81310d565efSmrg   calculate_dominance_info (CDI_DOMINATORS);
81410d565efSmrg 
81510d565efSmrg   /* Search every basic block for COND_EXPR we may be able to optimize.
81610d565efSmrg 
81710d565efSmrg      We walk the blocks in order that guarantees that a block with
81810d565efSmrg      a single predecessor is processed after the predecessor.
81910d565efSmrg      This ensures that we collapse outter ifs before visiting the
82010d565efSmrg      inner ones, and also that we do not try to visit a removed
82110d565efSmrg      block.  This is opposite of PHI-OPT, because we cascade the
82210d565efSmrg      combining rather than cascading PHIs. */
82310d565efSmrg   for (i = n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
82410d565efSmrg     {
82510d565efSmrg       basic_block bb = bbs[i];
82610d565efSmrg       gimple *stmt = last_stmt (bb);
82710d565efSmrg 
82810d565efSmrg       if (stmt
82910d565efSmrg 	  && gimple_code (stmt) == GIMPLE_COND)
83010d565efSmrg 	if (tree_ssa_ifcombine_bb (bb))
83110d565efSmrg 	  {
83210d565efSmrg 	    /* Clear range info from all stmts in BB which is now executed
83310d565efSmrg 	       conditional on a always true/false condition.  */
83410d565efSmrg 	    reset_flow_sensitive_info_in_bb (bb);
83510d565efSmrg 	    cfg_changed |= true;
83610d565efSmrg 	  }
83710d565efSmrg     }
83810d565efSmrg 
83910d565efSmrg   free (bbs);
84010d565efSmrg 
84110d565efSmrg   return cfg_changed ? TODO_cleanup_cfg : 0;
84210d565efSmrg }
84310d565efSmrg 
84410d565efSmrg } // anon namespace
84510d565efSmrg 
84610d565efSmrg gimple_opt_pass *
make_pass_tree_ifcombine(gcc::context * ctxt)84710d565efSmrg make_pass_tree_ifcombine (gcc::context *ctxt)
84810d565efSmrg {
84910d565efSmrg   return new pass_tree_ifcombine (ctxt);
85010d565efSmrg }
851