138fd1498Szrj /* SSA Dominator optimizations for trees
238fd1498Szrj    Copyright (C) 2001-2018 Free Software Foundation, Inc.
338fd1498Szrj    Contributed by Diego Novillo <dnovillo@redhat.com>
438fd1498Szrj 
538fd1498Szrj This file is part of GCC.
638fd1498Szrj 
738fd1498Szrj GCC is free software; you can redistribute it and/or modify
838fd1498Szrj it under the terms of the GNU General Public License as published by
938fd1498Szrj the Free Software Foundation; either version 3, or (at your option)
1038fd1498Szrj any later version.
1138fd1498Szrj 
1238fd1498Szrj GCC is distributed in the hope that it will be useful,
1338fd1498Szrj but WITHOUT ANY WARRANTY; without even the implied warranty of
1438fd1498Szrj MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
1538fd1498Szrj GNU General Public License for more details.
1638fd1498Szrj 
1738fd1498Szrj You should have received a copy of the GNU General Public License
1838fd1498Szrj along with GCC; see the file COPYING3.  If not see
1938fd1498Szrj <http://www.gnu.org/licenses/>.  */
2038fd1498Szrj 
2138fd1498Szrj #include "config.h"
2238fd1498Szrj #include "system.h"
2338fd1498Szrj #include "coretypes.h"
2438fd1498Szrj #include "backend.h"
2538fd1498Szrj #include "tree.h"
2638fd1498Szrj #include "gimple.h"
2738fd1498Szrj #include "tree-pass.h"
2838fd1498Szrj #include "ssa.h"
2938fd1498Szrj #include "gimple-pretty-print.h"
3038fd1498Szrj #include "fold-const.h"
3138fd1498Szrj #include "cfganal.h"
3238fd1498Szrj #include "cfgloop.h"
3338fd1498Szrj #include "gimple-fold.h"
3438fd1498Szrj #include "tree-eh.h"
3538fd1498Szrj #include "tree-inline.h"
3638fd1498Szrj #include "gimple-iterator.h"
3738fd1498Szrj #include "tree-cfg.h"
3838fd1498Szrj #include "tree-into-ssa.h"
3938fd1498Szrj #include "domwalk.h"
4038fd1498Szrj #include "tree-ssa-propagate.h"
4138fd1498Szrj #include "tree-ssa-threadupdate.h"
4238fd1498Szrj #include "params.h"
4338fd1498Szrj #include "tree-ssa-scopedtables.h"
4438fd1498Szrj #include "tree-ssa-threadedge.h"
4538fd1498Szrj #include "tree-ssa-dom.h"
4638fd1498Szrj #include "gimplify.h"
4738fd1498Szrj #include "tree-cfgcleanup.h"
4838fd1498Szrj #include "dbgcnt.h"
4938fd1498Szrj #include "alloc-pool.h"
5038fd1498Szrj #include "tree-vrp.h"
5138fd1498Szrj #include "vr-values.h"
5238fd1498Szrj #include "gimple-ssa-evrp-analyze.h"
5338fd1498Szrj 
5438fd1498Szrj /* This file implements optimizations on the dominator tree.  */
5538fd1498Szrj 
5638fd1498Szrj /* Structure for recording edge equivalences.
5738fd1498Szrj 
5838fd1498Szrj    Computing and storing the edge equivalences instead of creating
5938fd1498Szrj    them on-demand can save significant amounts of time, particularly
6038fd1498Szrj    for pathological cases involving switch statements.
6138fd1498Szrj 
6238fd1498Szrj    These structures live for a single iteration of the dominator
6338fd1498Szrj    optimizer in the edge's AUX field.  At the end of an iteration we
6438fd1498Szrj    free each of these structures.  */
6538fd1498Szrj class edge_info
6638fd1498Szrj {
6738fd1498Szrj  public:
6838fd1498Szrj   typedef std::pair <tree, tree> equiv_pair;
6938fd1498Szrj   edge_info (edge);
7038fd1498Szrj   ~edge_info ();
7138fd1498Szrj 
7238fd1498Szrj   /* Record a simple LHS = RHS equivalence.  This may trigger
7338fd1498Szrj      calls to derive_equivalences.  */
7438fd1498Szrj   void record_simple_equiv (tree, tree);
7538fd1498Szrj 
7638fd1498Szrj   /* If traversing this edge creates simple equivalences, we store
7738fd1498Szrj      them as LHS/RHS pairs within this vector.  */
7838fd1498Szrj   vec<equiv_pair> simple_equivalences;
7938fd1498Szrj 
8038fd1498Szrj   /* Traversing an edge may also indicate one or more particular conditions
8138fd1498Szrj      are true or false.  */
8238fd1498Szrj   vec<cond_equivalence> cond_equivalences;
8338fd1498Szrj 
8438fd1498Szrj  private:
8538fd1498Szrj   /* Derive equivalences by walking the use-def chains.  */
8638fd1498Szrj   void derive_equivalences (tree, tree, int);
8738fd1498Szrj };
8838fd1498Szrj 
8938fd1498Szrj /* Track whether or not we have changed the control flow graph.  */
9038fd1498Szrj static bool cfg_altered;
9138fd1498Szrj 
9238fd1498Szrj /* Bitmap of blocks that have had EH statements cleaned.  We should
9338fd1498Szrj    remove their dead edges eventually.  */
9438fd1498Szrj static bitmap need_eh_cleanup;
9538fd1498Szrj static vec<gimple *> need_noreturn_fixup;
9638fd1498Szrj 
9738fd1498Szrj /* Statistics for dominator optimizations.  */
9838fd1498Szrj struct opt_stats_d
9938fd1498Szrj {
10038fd1498Szrj   long num_stmts;
10138fd1498Szrj   long num_exprs_considered;
10238fd1498Szrj   long num_re;
10338fd1498Szrj   long num_const_prop;
10438fd1498Szrj   long num_copy_prop;
10538fd1498Szrj };
10638fd1498Szrj 
10738fd1498Szrj static struct opt_stats_d opt_stats;
10838fd1498Szrj 
10938fd1498Szrj /* Local functions.  */
11038fd1498Szrj static void record_equality (tree, tree, class const_and_copies *);
11138fd1498Szrj static void record_equivalences_from_phis (basic_block);
11238fd1498Szrj static void record_equivalences_from_incoming_edge (basic_block,
11338fd1498Szrj 						    class const_and_copies *,
11438fd1498Szrj 						    class avail_exprs_stack *);
11538fd1498Szrj static void eliminate_redundant_computations (gimple_stmt_iterator *,
11638fd1498Szrj 					      class const_and_copies *,
11738fd1498Szrj 					      class avail_exprs_stack *);
11838fd1498Szrj static void record_equivalences_from_stmt (gimple *, int,
11938fd1498Szrj 					   class avail_exprs_stack *);
12038fd1498Szrj static void dump_dominator_optimization_stats (FILE *file,
12138fd1498Szrj 					       hash_table<expr_elt_hasher> *);
12238fd1498Szrj 
12338fd1498Szrj /* Constructor for EDGE_INFO.  An EDGE_INFO instance is always
12438fd1498Szrj    associated with an edge E.  */
12538fd1498Szrj 
edge_info(edge e)12638fd1498Szrj edge_info::edge_info (edge e)
12738fd1498Szrj {
12838fd1498Szrj   /* Free the old one associated with E, if it exists and
12938fd1498Szrj      associate our new object with E.  */
13038fd1498Szrj   free_dom_edge_info (e);
13138fd1498Szrj   e->aux = this;
13238fd1498Szrj 
13338fd1498Szrj   /* And initialize the embedded vectors.  */
13438fd1498Szrj   simple_equivalences = vNULL;
13538fd1498Szrj   cond_equivalences = vNULL;
13638fd1498Szrj }
13738fd1498Szrj 
13838fd1498Szrj /* Destructor just needs to release the vectors.  */
13938fd1498Szrj 
~edge_info(void)14038fd1498Szrj edge_info::~edge_info (void)
14138fd1498Szrj {
14238fd1498Szrj   this->cond_equivalences.release ();
14338fd1498Szrj   this->simple_equivalences.release ();
14438fd1498Szrj }
14538fd1498Szrj 
14638fd1498Szrj /* NAME is known to have the value VALUE, which must be a constant.
14738fd1498Szrj 
14838fd1498Szrj    Walk through its use-def chain to see if there are other equivalences
14938fd1498Szrj    we might be able to derive.
15038fd1498Szrj 
15138fd1498Szrj    RECURSION_LIMIT controls how far back we recurse through the use-def
15238fd1498Szrj    chains.  */
15338fd1498Szrj 
15438fd1498Szrj void
derive_equivalences(tree name,tree value,int recursion_limit)15538fd1498Szrj edge_info::derive_equivalences (tree name, tree value, int recursion_limit)
15638fd1498Szrj {
15738fd1498Szrj   if (TREE_CODE (name) != SSA_NAME || TREE_CODE (value) != INTEGER_CST)
15838fd1498Szrj     return;
15938fd1498Szrj 
16038fd1498Szrj   /* This records the equivalence for the toplevel object.  Do
16138fd1498Szrj      this before checking the recursion limit.  */
16238fd1498Szrj   simple_equivalences.safe_push (equiv_pair (name, value));
16338fd1498Szrj 
16438fd1498Szrj   /* Limit how far up the use-def chains we are willing to walk.  */
16538fd1498Szrj   if (recursion_limit == 0)
16638fd1498Szrj     return;
16738fd1498Szrj 
16838fd1498Szrj   /* We can walk up the use-def chains to potentially find more
16938fd1498Szrj      equivalences.  */
17038fd1498Szrj   gimple *def_stmt = SSA_NAME_DEF_STMT (name);
17138fd1498Szrj   if (is_gimple_assign (def_stmt))
17238fd1498Szrj     {
17338fd1498Szrj       enum tree_code code = gimple_assign_rhs_code (def_stmt);
17438fd1498Szrj       switch (code)
17538fd1498Szrj 	{
176*e215fc28Szrj 	/* If the result of an OR is zero, then its operands are, too.  */
17738fd1498Szrj 	case BIT_IOR_EXPR:
17838fd1498Szrj 	  if (integer_zerop (value))
17938fd1498Szrj 	    {
18038fd1498Szrj 	      tree rhs1 = gimple_assign_rhs1 (def_stmt);
18138fd1498Szrj 	      tree rhs2 = gimple_assign_rhs2 (def_stmt);
18238fd1498Szrj 
18338fd1498Szrj 	      value = build_zero_cst (TREE_TYPE (rhs1));
18438fd1498Szrj 	      derive_equivalences (rhs1, value, recursion_limit - 1);
18538fd1498Szrj 	      value = build_zero_cst (TREE_TYPE (rhs2));
18638fd1498Szrj 	      derive_equivalences (rhs2, value, recursion_limit - 1);
18738fd1498Szrj 	    }
18838fd1498Szrj 	  break;
18938fd1498Szrj 
190*e215fc28Szrj 	/* If the result of an AND is nonzero, then its operands are, too.  */
19138fd1498Szrj 	case BIT_AND_EXPR:
19238fd1498Szrj 	  if (!integer_zerop (value))
19338fd1498Szrj 	    {
19438fd1498Szrj 	      tree rhs1 = gimple_assign_rhs1 (def_stmt);
19538fd1498Szrj 	      tree rhs2 = gimple_assign_rhs2 (def_stmt);
19638fd1498Szrj 
19738fd1498Szrj 	      /* If either operand has a boolean range, then we
19838fd1498Szrj 		 know its value must be one, otherwise we just know it
19938fd1498Szrj 		 is nonzero.  The former is clearly useful, I haven't
20038fd1498Szrj 		 seen cases where the latter is helpful yet.  */
20138fd1498Szrj 	      if (TREE_CODE (rhs1) == SSA_NAME)
20238fd1498Szrj 		{
20338fd1498Szrj 		  if (ssa_name_has_boolean_range (rhs1))
20438fd1498Szrj 		    {
20538fd1498Szrj 		      value = build_one_cst (TREE_TYPE (rhs1));
20638fd1498Szrj 		      derive_equivalences (rhs1, value, recursion_limit - 1);
20738fd1498Szrj 		    }
20838fd1498Szrj 		}
20938fd1498Szrj 	      if (TREE_CODE (rhs2) == SSA_NAME)
21038fd1498Szrj 		{
21138fd1498Szrj 		  if (ssa_name_has_boolean_range (rhs2))
21238fd1498Szrj 		    {
21338fd1498Szrj 		      value = build_one_cst (TREE_TYPE (rhs2));
21438fd1498Szrj 		      derive_equivalences (rhs2, value, recursion_limit - 1);
21538fd1498Szrj 		    }
21638fd1498Szrj 		}
21738fd1498Szrj 	    }
21838fd1498Szrj 	  break;
21938fd1498Szrj 
22038fd1498Szrj 	/* If LHS is an SSA_NAME and RHS is a constant integer and LHS was
22138fd1498Szrj 	   set via a widening type conversion, then we may be able to record
22238fd1498Szrj 	   additional equivalences.  */
22338fd1498Szrj 	case NOP_EXPR:
22438fd1498Szrj 	case CONVERT_EXPR:
22538fd1498Szrj 	  {
22638fd1498Szrj 	    tree rhs = gimple_assign_rhs1 (def_stmt);
22738fd1498Szrj 	    tree rhs_type = TREE_TYPE (rhs);
22838fd1498Szrj 	    if (INTEGRAL_TYPE_P (rhs_type)
22938fd1498Szrj 		&& (TYPE_PRECISION (TREE_TYPE (name))
23038fd1498Szrj 		    >= TYPE_PRECISION (rhs_type))
23138fd1498Szrj 		&& int_fits_type_p (value, rhs_type))
23238fd1498Szrj 	      derive_equivalences (rhs,
23338fd1498Szrj 				   fold_convert (rhs_type, value),
23438fd1498Szrj 				   recursion_limit - 1);
23538fd1498Szrj 	    break;
23638fd1498Szrj 	  }
23738fd1498Szrj 
23838fd1498Szrj 	/* We can invert the operation of these codes trivially if
23938fd1498Szrj 	   one of the RHS operands is a constant to produce a known
24038fd1498Szrj 	   value for the other RHS operand.  */
24138fd1498Szrj 	case POINTER_PLUS_EXPR:
24238fd1498Szrj 	case PLUS_EXPR:
24338fd1498Szrj 	  {
24438fd1498Szrj 	    tree rhs1 = gimple_assign_rhs1 (def_stmt);
24538fd1498Szrj 	    tree rhs2 = gimple_assign_rhs2 (def_stmt);
24638fd1498Szrj 
24738fd1498Szrj 	    /* If either argument is a constant, then we can compute
24838fd1498Szrj 	       a constant value for the nonconstant argument.  */
24938fd1498Szrj 	    if (TREE_CODE (rhs1) == INTEGER_CST
25038fd1498Szrj 		&& TREE_CODE (rhs2) == SSA_NAME)
25138fd1498Szrj 	      derive_equivalences (rhs2,
25238fd1498Szrj 				   fold_binary (MINUS_EXPR, TREE_TYPE (rhs1),
25338fd1498Szrj 						value, rhs1),
25438fd1498Szrj 				   recursion_limit - 1);
25538fd1498Szrj 	    else if (TREE_CODE (rhs2) == INTEGER_CST
25638fd1498Szrj 		     && TREE_CODE (rhs1) == SSA_NAME)
25738fd1498Szrj 	      derive_equivalences (rhs1,
25838fd1498Szrj 				   fold_binary (MINUS_EXPR, TREE_TYPE (rhs1),
25938fd1498Szrj 						value, rhs2),
26038fd1498Szrj 				   recursion_limit - 1);
26138fd1498Szrj 	    break;
26238fd1498Szrj 	  }
26338fd1498Szrj 
26438fd1498Szrj 	/* If one of the operands is a constant, then we can compute
26538fd1498Szrj 	   the value of the other operand.  If both operands are
26638fd1498Szrj 	   SSA_NAMEs, then they must be equal if the result is zero.  */
26738fd1498Szrj 	case MINUS_EXPR:
26838fd1498Szrj 	  {
26938fd1498Szrj 	    tree rhs1 = gimple_assign_rhs1 (def_stmt);
27038fd1498Szrj 	    tree rhs2 = gimple_assign_rhs2 (def_stmt);
27138fd1498Szrj 
27238fd1498Szrj 	    /* If either argument is a constant, then we can compute
27338fd1498Szrj 	       a constant value for the nonconstant argument.  */
27438fd1498Szrj 	    if (TREE_CODE (rhs1) == INTEGER_CST
27538fd1498Szrj 		&& TREE_CODE (rhs2) == SSA_NAME)
27638fd1498Szrj 	      derive_equivalences (rhs2,
27738fd1498Szrj 				   fold_binary (MINUS_EXPR, TREE_TYPE (rhs1),
27838fd1498Szrj 						rhs1, value),
27938fd1498Szrj 				   recursion_limit - 1);
28038fd1498Szrj 	    else if (TREE_CODE (rhs2) == INTEGER_CST
28138fd1498Szrj 		     && TREE_CODE (rhs1) == SSA_NAME)
28238fd1498Szrj 	      derive_equivalences (rhs1,
28338fd1498Szrj 				   fold_binary (PLUS_EXPR, TREE_TYPE (rhs1),
28438fd1498Szrj 						value, rhs2),
28538fd1498Szrj 				   recursion_limit - 1);
28638fd1498Szrj 	    else if (integer_zerop (value))
28738fd1498Szrj 	      {
28838fd1498Szrj 		tree cond = build2 (EQ_EXPR, boolean_type_node,
28938fd1498Szrj 				    gimple_assign_rhs1 (def_stmt),
29038fd1498Szrj 				    gimple_assign_rhs2 (def_stmt));
29138fd1498Szrj 		tree inverted = invert_truthvalue (cond);
29238fd1498Szrj 		record_conditions (&this->cond_equivalences, cond, inverted);
29338fd1498Szrj 	      }
29438fd1498Szrj 	    break;
29538fd1498Szrj 	  }
29638fd1498Szrj 
29738fd1498Szrj 	case EQ_EXPR:
29838fd1498Szrj 	case NE_EXPR:
29938fd1498Szrj 	  {
30038fd1498Szrj 	    if ((code == EQ_EXPR && integer_onep (value))
30138fd1498Szrj 		|| (code == NE_EXPR && integer_zerop (value)))
30238fd1498Szrj 	      {
30338fd1498Szrj 		tree rhs1 = gimple_assign_rhs1 (def_stmt);
30438fd1498Szrj 		tree rhs2 = gimple_assign_rhs2 (def_stmt);
30538fd1498Szrj 
30638fd1498Szrj 		/* If either argument is a constant, then record the
30738fd1498Szrj 		   other argument as being the same as that constant.
30838fd1498Szrj 
30938fd1498Szrj 		   If neither operand is a constant, then we have a
31038fd1498Szrj 		   conditional name == name equivalence.  */
31138fd1498Szrj 		if (TREE_CODE (rhs1) == INTEGER_CST)
31238fd1498Szrj 		  derive_equivalences (rhs2, rhs1, recursion_limit - 1);
31338fd1498Szrj 		else if (TREE_CODE (rhs2) == INTEGER_CST)
31438fd1498Szrj 		  derive_equivalences (rhs1, rhs2, recursion_limit - 1);
31538fd1498Szrj 	      }
31638fd1498Szrj 	    else
31738fd1498Szrj 	      {
31838fd1498Szrj 		tree cond = build2 (code, boolean_type_node,
31938fd1498Szrj 				    gimple_assign_rhs1 (def_stmt),
32038fd1498Szrj 				    gimple_assign_rhs2 (def_stmt));
32138fd1498Szrj 		tree inverted = invert_truthvalue (cond);
32238fd1498Szrj 		if (integer_zerop (value))
32338fd1498Szrj 		  std::swap (cond, inverted);
32438fd1498Szrj 		record_conditions (&this->cond_equivalences, cond, inverted);
32538fd1498Szrj 	      }
32638fd1498Szrj 	    break;
32738fd1498Szrj 	  }
32838fd1498Szrj 
32938fd1498Szrj 	/* For BIT_NOT and NEGATE, we can just apply the operation to the
33038fd1498Szrj 	   VALUE to get the new equivalence.  It will always be a constant
33138fd1498Szrj 	   so we can recurse.  */
33238fd1498Szrj 	case BIT_NOT_EXPR:
33338fd1498Szrj 	case NEGATE_EXPR:
33438fd1498Szrj 	  {
33538fd1498Szrj 	    tree rhs = gimple_assign_rhs1 (def_stmt);
336*e215fc28Szrj 	    tree res;
337*e215fc28Szrj 	    /* If this is a NOT and the operand has a boolean range, then we
338*e215fc28Szrj 	       know its value must be zero or one.  We are not supposed to
339*e215fc28Szrj 	       have a BIT_NOT_EXPR for boolean types with precision > 1 in
340*e215fc28Szrj 	       the general case, see e.g. the handling of TRUTH_NOT_EXPR in
341*e215fc28Szrj 	       the gimplifier, but it can be generated by match.pd out of
342*e215fc28Szrj 	       a BIT_XOR_EXPR wrapped in a BIT_AND_EXPR.  Now the handling
343*e215fc28Szrj 	       of BIT_AND_EXPR above already forces a specific semantics for
344*e215fc28Szrj 	       boolean types with precision > 1 so we must do the same here,
345*e215fc28Szrj 	       otherwise we could change the semantics of TRUTH_NOT_EXPR for
346*e215fc28Szrj 	       boolean types with precision > 1.  */
347*e215fc28Szrj 	    if (code == BIT_NOT_EXPR
348*e215fc28Szrj 		&& TREE_CODE (rhs) == SSA_NAME
349*e215fc28Szrj 		&& ssa_name_has_boolean_range (rhs))
350*e215fc28Szrj 	      {
351*e215fc28Szrj 		if ((TREE_INT_CST_LOW (value) & 1) == 0)
352*e215fc28Szrj 		  res = build_one_cst (TREE_TYPE (rhs));
353*e215fc28Szrj 		else
354*e215fc28Szrj 		  res = build_zero_cst (TREE_TYPE (rhs));
355*e215fc28Szrj 	      }
356*e215fc28Szrj 	    else
357*e215fc28Szrj 	      res = fold_build1 (code, TREE_TYPE (rhs), value);
35838fd1498Szrj 	    derive_equivalences (rhs, res, recursion_limit - 1);
35938fd1498Szrj 	    break;
36038fd1498Szrj 	  }
36138fd1498Szrj 
36238fd1498Szrj 	default:
36338fd1498Szrj 	  {
36438fd1498Szrj 	    if (TREE_CODE_CLASS (code) == tcc_comparison)
36538fd1498Szrj 	      {
36638fd1498Szrj 		tree cond = build2 (code, boolean_type_node,
36738fd1498Szrj 				    gimple_assign_rhs1 (def_stmt),
36838fd1498Szrj 				    gimple_assign_rhs2 (def_stmt));
36938fd1498Szrj 		tree inverted = invert_truthvalue (cond);
37038fd1498Szrj 		if (integer_zerop (value))
37138fd1498Szrj 		  std::swap (cond, inverted);
37238fd1498Szrj 		record_conditions (&this->cond_equivalences, cond, inverted);
37338fd1498Szrj 		break;
37438fd1498Szrj 	      }
37538fd1498Szrj 	    break;
37638fd1498Szrj 	  }
37738fd1498Szrj 	}
37838fd1498Szrj     }
37938fd1498Szrj }
38038fd1498Szrj 
38138fd1498Szrj void
record_simple_equiv(tree lhs,tree rhs)38238fd1498Szrj edge_info::record_simple_equiv (tree lhs, tree rhs)
38338fd1498Szrj {
38438fd1498Szrj   /* If the RHS is a constant, then we may be able to derive
38538fd1498Szrj      further equivalences.  Else just record the name = name
38638fd1498Szrj      equivalence.  */
38738fd1498Szrj   if (TREE_CODE (rhs) == INTEGER_CST)
38838fd1498Szrj     derive_equivalences (lhs, rhs, 4);
38938fd1498Szrj   else
39038fd1498Szrj     simple_equivalences.safe_push (equiv_pair (lhs, rhs));
39138fd1498Szrj }
39238fd1498Szrj 
39338fd1498Szrj /* Free the edge_info data attached to E, if it exists.  */
39438fd1498Szrj 
39538fd1498Szrj void
free_dom_edge_info(edge e)39638fd1498Szrj free_dom_edge_info (edge e)
39738fd1498Szrj {
39838fd1498Szrj   class edge_info *edge_info = (struct edge_info *)e->aux;
39938fd1498Szrj 
40038fd1498Szrj   if (edge_info)
40138fd1498Szrj     delete edge_info;
40238fd1498Szrj }
40338fd1498Szrj 
40438fd1498Szrj /* Free all EDGE_INFO structures associated with edges in the CFG.
40538fd1498Szrj    If a particular edge can be threaded, copy the redirection
40638fd1498Szrj    target from the EDGE_INFO structure into the edge's AUX field
40738fd1498Szrj    as required by code to update the CFG and SSA graph for
40838fd1498Szrj    jump threading.  */
40938fd1498Szrj 
41038fd1498Szrj static void
free_all_edge_infos(void)41138fd1498Szrj free_all_edge_infos (void)
41238fd1498Szrj {
41338fd1498Szrj   basic_block bb;
41438fd1498Szrj   edge_iterator ei;
41538fd1498Szrj   edge e;
41638fd1498Szrj 
41738fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
41838fd1498Szrj     {
41938fd1498Szrj       FOR_EACH_EDGE (e, ei, bb->preds)
42038fd1498Szrj         {
42138fd1498Szrj 	  free_dom_edge_info (e);
42238fd1498Szrj 	  e->aux = NULL;
42338fd1498Szrj 	}
42438fd1498Szrj     }
42538fd1498Szrj }
42638fd1498Szrj 
42738fd1498Szrj /* We have finished optimizing BB, record any information implied by
42838fd1498Szrj    taking a specific outgoing edge from BB.  */
42938fd1498Szrj 
43038fd1498Szrj static void
record_edge_info(basic_block bb)43138fd1498Szrj record_edge_info (basic_block bb)
43238fd1498Szrj {
43338fd1498Szrj   gimple_stmt_iterator gsi = gsi_last_bb (bb);
43438fd1498Szrj   class edge_info *edge_info;
43538fd1498Szrj 
43638fd1498Szrj   if (! gsi_end_p (gsi))
43738fd1498Szrj     {
43838fd1498Szrj       gimple *stmt = gsi_stmt (gsi);
43938fd1498Szrj       location_t loc = gimple_location (stmt);
44038fd1498Szrj 
44138fd1498Szrj       if (gimple_code (stmt) == GIMPLE_SWITCH)
44238fd1498Szrj 	{
44338fd1498Szrj 	  gswitch *switch_stmt = as_a <gswitch *> (stmt);
44438fd1498Szrj 	  tree index = gimple_switch_index (switch_stmt);
44538fd1498Szrj 
44638fd1498Szrj 	  if (TREE_CODE (index) == SSA_NAME)
44738fd1498Szrj 	    {
44838fd1498Szrj 	      int i;
44938fd1498Szrj               int n_labels = gimple_switch_num_labels (switch_stmt);
45038fd1498Szrj 	      tree *info = XCNEWVEC (tree, last_basic_block_for_fn (cfun));
45138fd1498Szrj 	      edge e;
45238fd1498Szrj 	      edge_iterator ei;
45338fd1498Szrj 
45438fd1498Szrj 	      for (i = 0; i < n_labels; i++)
45538fd1498Szrj 		{
45638fd1498Szrj 		  tree label = gimple_switch_label (switch_stmt, i);
45738fd1498Szrj 		  basic_block target_bb = label_to_block (CASE_LABEL (label));
45838fd1498Szrj 		  if (CASE_HIGH (label)
45938fd1498Szrj 		      || !CASE_LOW (label)
46038fd1498Szrj 		      || info[target_bb->index])
46138fd1498Szrj 		    info[target_bb->index] = error_mark_node;
46238fd1498Szrj 		  else
46338fd1498Szrj 		    info[target_bb->index] = label;
46438fd1498Szrj 		}
46538fd1498Szrj 
46638fd1498Szrj 	      FOR_EACH_EDGE (e, ei, bb->succs)
46738fd1498Szrj 		{
46838fd1498Szrj 		  basic_block target_bb = e->dest;
46938fd1498Szrj 		  tree label = info[target_bb->index];
47038fd1498Szrj 
47138fd1498Szrj 		  if (label != NULL && label != error_mark_node)
47238fd1498Szrj 		    {
47338fd1498Szrj 		      tree x = fold_convert_loc (loc, TREE_TYPE (index),
47438fd1498Szrj 						 CASE_LOW (label));
47538fd1498Szrj 		      edge_info = new class edge_info (e);
47638fd1498Szrj 		      edge_info->record_simple_equiv (index, x);
47738fd1498Szrj 		    }
47838fd1498Szrj 		}
47938fd1498Szrj 	      free (info);
48038fd1498Szrj 	    }
48138fd1498Szrj 	}
48238fd1498Szrj 
48338fd1498Szrj       /* A COND_EXPR may create equivalences too.  */
48438fd1498Szrj       if (gimple_code (stmt) == GIMPLE_COND)
48538fd1498Szrj 	{
48638fd1498Szrj 	  edge true_edge;
48738fd1498Szrj 	  edge false_edge;
48838fd1498Szrj 
48938fd1498Szrj           tree op0 = gimple_cond_lhs (stmt);
49038fd1498Szrj           tree op1 = gimple_cond_rhs (stmt);
49138fd1498Szrj           enum tree_code code = gimple_cond_code (stmt);
49238fd1498Szrj 
49338fd1498Szrj 	  extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
49438fd1498Szrj 
49538fd1498Szrj           /* Special case comparing booleans against a constant as we
49638fd1498Szrj              know the value of OP0 on both arms of the branch.  i.e., we
49738fd1498Szrj              can record an equivalence for OP0 rather than COND.
49838fd1498Szrj 
49938fd1498Szrj 	     However, don't do this if the constant isn't zero or one.
50038fd1498Szrj 	     Such conditionals will get optimized more thoroughly during
50138fd1498Szrj 	     the domwalk.  */
50238fd1498Szrj 	  if ((code == EQ_EXPR || code == NE_EXPR)
50338fd1498Szrj 	      && TREE_CODE (op0) == SSA_NAME
50438fd1498Szrj 	      && ssa_name_has_boolean_range (op0)
50538fd1498Szrj 	      && is_gimple_min_invariant (op1)
50638fd1498Szrj 	      && (integer_zerop (op1) || integer_onep (op1)))
50738fd1498Szrj             {
50838fd1498Szrj 	      tree true_val = constant_boolean_node (true, TREE_TYPE (op0));
50938fd1498Szrj 	      tree false_val = constant_boolean_node (false, TREE_TYPE (op0));
51038fd1498Szrj 
51138fd1498Szrj               if (code == EQ_EXPR)
51238fd1498Szrj                 {
51338fd1498Szrj 		  edge_info = new class edge_info (true_edge);
51438fd1498Szrj 		  edge_info->record_simple_equiv (op0,
51538fd1498Szrj 						  (integer_zerop (op1)
51638fd1498Szrj 						   ? false_val : true_val));
51738fd1498Szrj 		  edge_info = new class edge_info (false_edge);
51838fd1498Szrj 		  edge_info->record_simple_equiv (op0,
51938fd1498Szrj 						  (integer_zerop (op1)
52038fd1498Szrj 						   ? true_val : false_val));
52138fd1498Szrj                 }
52238fd1498Szrj               else
52338fd1498Szrj                 {
52438fd1498Szrj 		  edge_info = new class edge_info (true_edge);
52538fd1498Szrj 		  edge_info->record_simple_equiv (op0,
52638fd1498Szrj 						  (integer_zerop (op1)
52738fd1498Szrj 						   ? true_val : false_val));
52838fd1498Szrj 		  edge_info = new class edge_info (false_edge);
52938fd1498Szrj 		  edge_info->record_simple_equiv (op0,
53038fd1498Szrj 						  (integer_zerop (op1)
53138fd1498Szrj 						   ? false_val : true_val));
53238fd1498Szrj                 }
53338fd1498Szrj             }
53438fd1498Szrj 	  /* This can show up in the IL as a result of copy propagation
53538fd1498Szrj 	     it will eventually be canonicalized, but we have to cope
53638fd1498Szrj 	     with this case within the pass.  */
53738fd1498Szrj           else if (is_gimple_min_invariant (op0)
53838fd1498Szrj                    && TREE_CODE (op1) == SSA_NAME)
53938fd1498Szrj             {
54038fd1498Szrj               tree cond = build2 (code, boolean_type_node, op0, op1);
54138fd1498Szrj               tree inverted = invert_truthvalue_loc (loc, cond);
54238fd1498Szrj               bool can_infer_simple_equiv
54338fd1498Szrj                 = !(HONOR_SIGNED_ZEROS (op0)
54438fd1498Szrj                     && real_zerop (op0));
54538fd1498Szrj               struct edge_info *edge_info;
54638fd1498Szrj 
54738fd1498Szrj 	      edge_info = new class edge_info (true_edge);
54838fd1498Szrj               record_conditions (&edge_info->cond_equivalences, cond, inverted);
54938fd1498Szrj 
55038fd1498Szrj               if (can_infer_simple_equiv && code == EQ_EXPR)
55138fd1498Szrj 		edge_info->record_simple_equiv (op1, op0);
55238fd1498Szrj 
55338fd1498Szrj 	      edge_info = new class edge_info (false_edge);
55438fd1498Szrj               record_conditions (&edge_info->cond_equivalences, inverted, cond);
55538fd1498Szrj 
55638fd1498Szrj               if (can_infer_simple_equiv && TREE_CODE (inverted) == EQ_EXPR)
55738fd1498Szrj 		edge_info->record_simple_equiv (op1, op0);
55838fd1498Szrj             }
55938fd1498Szrj 
56038fd1498Szrj           else if (TREE_CODE (op0) == SSA_NAME
56138fd1498Szrj                    && (TREE_CODE (op1) == SSA_NAME
56238fd1498Szrj                        || is_gimple_min_invariant (op1)))
56338fd1498Szrj             {
56438fd1498Szrj               tree cond = build2 (code, boolean_type_node, op0, op1);
56538fd1498Szrj               tree inverted = invert_truthvalue_loc (loc, cond);
56638fd1498Szrj               bool can_infer_simple_equiv
56738fd1498Szrj                 = !(HONOR_SIGNED_ZEROS (op1)
56838fd1498Szrj                     && (TREE_CODE (op1) == SSA_NAME || real_zerop (op1)));
56938fd1498Szrj               struct edge_info *edge_info;
57038fd1498Szrj 
57138fd1498Szrj 	      edge_info = new class edge_info (true_edge);
57238fd1498Szrj               record_conditions (&edge_info->cond_equivalences, cond, inverted);
57338fd1498Szrj 
57438fd1498Szrj               if (can_infer_simple_equiv && code == EQ_EXPR)
57538fd1498Szrj 		edge_info->record_simple_equiv (op0, op1);
57638fd1498Szrj 
57738fd1498Szrj 	      edge_info = new class edge_info (false_edge);
57838fd1498Szrj               record_conditions (&edge_info->cond_equivalences, inverted, cond);
57938fd1498Szrj 
58038fd1498Szrj               if (can_infer_simple_equiv && TREE_CODE (inverted) == EQ_EXPR)
58138fd1498Szrj 		edge_info->record_simple_equiv (op0, op1);
58238fd1498Szrj             }
58338fd1498Szrj         }
58438fd1498Szrj     }
58538fd1498Szrj }
58638fd1498Szrj 
58738fd1498Szrj 
58838fd1498Szrj class dom_opt_dom_walker : public dom_walker
58938fd1498Szrj {
59038fd1498Szrj public:
dom_opt_dom_walker(cdi_direction direction,class const_and_copies * const_and_copies,class avail_exprs_stack * avail_exprs_stack,gcond * dummy_cond)59138fd1498Szrj   dom_opt_dom_walker (cdi_direction direction,
59238fd1498Szrj 		      class const_and_copies *const_and_copies,
59338fd1498Szrj 		      class avail_exprs_stack *avail_exprs_stack,
59438fd1498Szrj 		      gcond *dummy_cond)
59538fd1498Szrj     : dom_walker (direction, REACHABLE_BLOCKS),
59638fd1498Szrj       m_const_and_copies (const_and_copies),
59738fd1498Szrj       m_avail_exprs_stack (avail_exprs_stack),
59838fd1498Szrj       m_dummy_cond (dummy_cond) { }
59938fd1498Szrj 
60038fd1498Szrj   virtual edge before_dom_children (basic_block);
60138fd1498Szrj   virtual void after_dom_children (basic_block);
60238fd1498Szrj 
60338fd1498Szrj private:
60438fd1498Szrj 
60538fd1498Szrj   /* Unwindable equivalences, both const/copy and expression varieties.  */
60638fd1498Szrj   class const_and_copies *m_const_and_copies;
60738fd1498Szrj   class avail_exprs_stack *m_avail_exprs_stack;
60838fd1498Szrj 
60938fd1498Szrj   /* VRP data.  */
61038fd1498Szrj   class evrp_range_analyzer evrp_range_analyzer;
61138fd1498Szrj 
61238fd1498Szrj   /* Dummy condition to avoid creating lots of throw away statements.  */
61338fd1498Szrj   gcond *m_dummy_cond;
61438fd1498Szrj 
61538fd1498Szrj   /* Optimize a single statement within a basic block using the
61638fd1498Szrj      various tables mantained by DOM.  Returns the taken edge if
61738fd1498Szrj      the statement is a conditional with a statically determined
61838fd1498Szrj      value.  */
61938fd1498Szrj   edge optimize_stmt (basic_block, gimple_stmt_iterator);
62038fd1498Szrj };
62138fd1498Szrj 
62238fd1498Szrj /* Jump threading, redundancy elimination and const/copy propagation.
62338fd1498Szrj 
62438fd1498Szrj    This pass may expose new symbols that need to be renamed into SSA.  For
62538fd1498Szrj    every new symbol exposed, its corresponding bit will be set in
62638fd1498Szrj    VARS_TO_RENAME.  */
62738fd1498Szrj 
62838fd1498Szrj namespace {
62938fd1498Szrj 
63038fd1498Szrj const pass_data pass_data_dominator =
63138fd1498Szrj {
63238fd1498Szrj   GIMPLE_PASS, /* type */
63338fd1498Szrj   "dom", /* name */
63438fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
63538fd1498Szrj   TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
63638fd1498Szrj   ( PROP_cfg | PROP_ssa ), /* properties_required */
63738fd1498Szrj   0, /* properties_provided */
63838fd1498Szrj   0, /* properties_destroyed */
63938fd1498Szrj   0, /* todo_flags_start */
64038fd1498Szrj   ( TODO_cleanup_cfg | TODO_update_ssa ), /* todo_flags_finish */
64138fd1498Szrj };
64238fd1498Szrj 
64338fd1498Szrj class pass_dominator : public gimple_opt_pass
64438fd1498Szrj {
64538fd1498Szrj public:
pass_dominator(gcc::context * ctxt)64638fd1498Szrj   pass_dominator (gcc::context *ctxt)
64738fd1498Szrj     : gimple_opt_pass (pass_data_dominator, ctxt),
64838fd1498Szrj       may_peel_loop_headers_p (false)
64938fd1498Szrj   {}
65038fd1498Szrj 
65138fd1498Szrj   /* opt_pass methods: */
clone()65238fd1498Szrj   opt_pass * clone () { return new pass_dominator (m_ctxt); }
set_pass_param(unsigned int n,bool param)65338fd1498Szrj   void set_pass_param (unsigned int n, bool param)
65438fd1498Szrj     {
65538fd1498Szrj       gcc_assert (n == 0);
65638fd1498Szrj       may_peel_loop_headers_p = param;
65738fd1498Szrj     }
gate(function *)65838fd1498Szrj   virtual bool gate (function *) { return flag_tree_dom != 0; }
65938fd1498Szrj   virtual unsigned int execute (function *);
66038fd1498Szrj 
66138fd1498Szrj  private:
66238fd1498Szrj   /* This flag is used to prevent loops from being peeled repeatedly in jump
66338fd1498Szrj      threading; it will be removed once we preserve loop structures throughout
66438fd1498Szrj      the compilation -- we will be able to mark the affected loops directly in
66538fd1498Szrj      jump threading, and avoid peeling them next time.  */
66638fd1498Szrj   bool may_peel_loop_headers_p;
66738fd1498Szrj }; // class pass_dominator
66838fd1498Szrj 
66938fd1498Szrj unsigned int
execute(function * fun)67038fd1498Szrj pass_dominator::execute (function *fun)
67138fd1498Szrj {
67238fd1498Szrj   memset (&opt_stats, 0, sizeof (opt_stats));
67338fd1498Szrj 
67438fd1498Szrj   /* Create our hash tables.  */
67538fd1498Szrj   hash_table<expr_elt_hasher> *avail_exprs
67638fd1498Szrj     = new hash_table<expr_elt_hasher> (1024);
67738fd1498Szrj   class avail_exprs_stack *avail_exprs_stack
67838fd1498Szrj     = new class avail_exprs_stack (avail_exprs);
67938fd1498Szrj   class const_and_copies *const_and_copies = new class const_and_copies ();
68038fd1498Szrj   need_eh_cleanup = BITMAP_ALLOC (NULL);
68138fd1498Szrj   need_noreturn_fixup.create (0);
68238fd1498Szrj 
68338fd1498Szrj   calculate_dominance_info (CDI_DOMINATORS);
68438fd1498Szrj   cfg_altered = false;
68538fd1498Szrj 
68638fd1498Szrj   /* We need to know loop structures in order to avoid destroying them
68738fd1498Szrj      in jump threading.  Note that we still can e.g. thread through loop
68838fd1498Szrj      headers to an exit edge, or through loop header to the loop body, assuming
68938fd1498Szrj      that we update the loop info.
69038fd1498Szrj 
69138fd1498Szrj      TODO: We don't need to set LOOPS_HAVE_PREHEADERS generally, but due
69238fd1498Szrj      to several overly conservative bail-outs in jump threading, case
69338fd1498Szrj      gcc.dg/tree-ssa/pr21417.c can't be threaded if loop preheader is
69438fd1498Szrj      missing.  We should improve jump threading in future then
69538fd1498Szrj      LOOPS_HAVE_PREHEADERS won't be needed here.  */
69638fd1498Szrj   loop_optimizer_init (LOOPS_HAVE_PREHEADERS | LOOPS_HAVE_SIMPLE_LATCHES);
69738fd1498Szrj 
69838fd1498Szrj   /* Initialize the value-handle array.  */
69938fd1498Szrj   threadedge_initialize_values ();
70038fd1498Szrj 
70138fd1498Szrj   /* We need accurate information regarding back edges in the CFG
70238fd1498Szrj      for jump threading; this may include back edges that are not part of
70338fd1498Szrj      a single loop.  */
70438fd1498Szrj   mark_dfs_back_edges ();
70538fd1498Szrj 
70638fd1498Szrj   /* We want to create the edge info structures before the dominator walk
70738fd1498Szrj      so that they'll be in place for the jump threader, particularly when
70838fd1498Szrj      threading through a join block.
70938fd1498Szrj 
71038fd1498Szrj      The conditions will be lazily updated with global equivalences as
71138fd1498Szrj      we reach them during the dominator walk.  */
71238fd1498Szrj   basic_block bb;
71338fd1498Szrj   FOR_EACH_BB_FN (bb, fun)
71438fd1498Szrj     record_edge_info (bb);
71538fd1498Szrj 
71638fd1498Szrj   gcond *dummy_cond = gimple_build_cond (NE_EXPR, integer_zero_node,
71738fd1498Szrj 					 integer_zero_node, NULL, NULL);
71838fd1498Szrj 
71938fd1498Szrj   /* Recursively walk the dominator tree optimizing statements.  */
72038fd1498Szrj   dom_opt_dom_walker walker (CDI_DOMINATORS, const_and_copies,
72138fd1498Szrj 			     avail_exprs_stack, dummy_cond);
72238fd1498Szrj   walker.walk (fun->cfg->x_entry_block_ptr);
72338fd1498Szrj 
72438fd1498Szrj   /* Look for blocks where we cleared EDGE_EXECUTABLE on an outgoing
72538fd1498Szrj      edge.  When found, remove jump threads which contain any outgoing
72638fd1498Szrj      edge from the affected block.  */
72738fd1498Szrj   if (cfg_altered)
72838fd1498Szrj     {
72938fd1498Szrj       FOR_EACH_BB_FN (bb, fun)
73038fd1498Szrj 	{
73138fd1498Szrj 	  edge_iterator ei;
73238fd1498Szrj 	  edge e;
73338fd1498Szrj 
73438fd1498Szrj 	  /* First see if there are any edges without EDGE_EXECUTABLE
73538fd1498Szrj 	     set.  */
73638fd1498Szrj 	  bool found = false;
73738fd1498Szrj 	  FOR_EACH_EDGE (e, ei, bb->succs)
73838fd1498Szrj 	    {
73938fd1498Szrj 	      if ((e->flags & EDGE_EXECUTABLE) == 0)
74038fd1498Szrj 		{
74138fd1498Szrj 		  found = true;
74238fd1498Szrj 		  break;
74338fd1498Szrj 		}
74438fd1498Szrj 	    }
74538fd1498Szrj 
74638fd1498Szrj 	  /* If there were any such edges found, then remove jump threads
74738fd1498Szrj 	     containing any edge leaving BB.  */
74838fd1498Szrj 	  if (found)
74938fd1498Szrj 	    FOR_EACH_EDGE (e, ei, bb->succs)
75038fd1498Szrj 	      remove_jump_threads_including (e);
75138fd1498Szrj 	}
75238fd1498Szrj     }
75338fd1498Szrj 
75438fd1498Szrj   {
75538fd1498Szrj     gimple_stmt_iterator gsi;
75638fd1498Szrj     basic_block bb;
75738fd1498Szrj     FOR_EACH_BB_FN (bb, fun)
75838fd1498Szrj       {
75938fd1498Szrj 	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
76038fd1498Szrj 	  update_stmt_if_modified (gsi_stmt (gsi));
76138fd1498Szrj       }
76238fd1498Szrj   }
76338fd1498Szrj 
76438fd1498Szrj   /* If we exposed any new variables, go ahead and put them into
76538fd1498Szrj      SSA form now, before we handle jump threading.  This simplifies
76638fd1498Szrj      interactions between rewriting of _DECL nodes into SSA form
76738fd1498Szrj      and rewriting SSA_NAME nodes into SSA form after block
76838fd1498Szrj      duplication and CFG manipulation.  */
76938fd1498Szrj   update_ssa (TODO_update_ssa);
77038fd1498Szrj 
77138fd1498Szrj   free_all_edge_infos ();
77238fd1498Szrj 
77338fd1498Szrj   /* Thread jumps, creating duplicate blocks as needed.  */
77438fd1498Szrj   cfg_altered |= thread_through_all_blocks (may_peel_loop_headers_p);
77538fd1498Szrj 
77638fd1498Szrj   if (cfg_altered)
77738fd1498Szrj     free_dominance_info (CDI_DOMINATORS);
77838fd1498Szrj 
77938fd1498Szrj   /* Removal of statements may make some EH edges dead.  Purge
78038fd1498Szrj      such edges from the CFG as needed.  */
78138fd1498Szrj   if (!bitmap_empty_p (need_eh_cleanup))
78238fd1498Szrj     {
78338fd1498Szrj       unsigned i;
78438fd1498Szrj       bitmap_iterator bi;
78538fd1498Szrj 
78638fd1498Szrj       /* Jump threading may have created forwarder blocks from blocks
78738fd1498Szrj 	 needing EH cleanup; the new successor of these blocks, which
78838fd1498Szrj 	 has inherited from the original block, needs the cleanup.
78938fd1498Szrj 	 Don't clear bits in the bitmap, as that can break the bitmap
79038fd1498Szrj 	 iterator.  */
79138fd1498Szrj       EXECUTE_IF_SET_IN_BITMAP (need_eh_cleanup, 0, i, bi)
79238fd1498Szrj 	{
79338fd1498Szrj 	  basic_block bb = BASIC_BLOCK_FOR_FN (fun, i);
79438fd1498Szrj 	  if (bb == NULL)
79538fd1498Szrj 	    continue;
79638fd1498Szrj 	  while (single_succ_p (bb)
79758e805e6Szrj 		 && (single_succ_edge (bb)->flags
79858e805e6Szrj 		     & (EDGE_EH|EDGE_DFS_BACK)) == 0)
79938fd1498Szrj 	    bb = single_succ (bb);
80038fd1498Szrj 	  if (bb == EXIT_BLOCK_PTR_FOR_FN (fun))
80138fd1498Szrj 	    continue;
80238fd1498Szrj 	  if ((unsigned) bb->index != i)
80338fd1498Szrj 	    bitmap_set_bit (need_eh_cleanup, bb->index);
80438fd1498Szrj 	}
80538fd1498Szrj 
80638fd1498Szrj       gimple_purge_all_dead_eh_edges (need_eh_cleanup);
80738fd1498Szrj       bitmap_clear (need_eh_cleanup);
80838fd1498Szrj     }
80938fd1498Szrj 
81038fd1498Szrj   /* Fixup stmts that became noreturn calls.  This may require splitting
81138fd1498Szrj      blocks and thus isn't possible during the dominator walk or before
81238fd1498Szrj      jump threading finished.  Do this in reverse order so we don't
81338fd1498Szrj      inadvertedly remove a stmt we want to fixup by visiting a dominating
81438fd1498Szrj      now noreturn call first.  */
81538fd1498Szrj   while (!need_noreturn_fixup.is_empty ())
81638fd1498Szrj     {
81738fd1498Szrj       gimple *stmt = need_noreturn_fixup.pop ();
81838fd1498Szrj       if (dump_file && dump_flags & TDF_DETAILS)
81938fd1498Szrj 	{
82038fd1498Szrj 	  fprintf (dump_file, "Fixing up noreturn call ");
82138fd1498Szrj 	  print_gimple_stmt (dump_file, stmt, 0);
82238fd1498Szrj 	  fprintf (dump_file, "\n");
82338fd1498Szrj 	}
82438fd1498Szrj       fixup_noreturn_call (stmt);
82538fd1498Szrj     }
82638fd1498Szrj 
82738fd1498Szrj   statistics_counter_event (fun, "Redundant expressions eliminated",
82838fd1498Szrj 			    opt_stats.num_re);
82938fd1498Szrj   statistics_counter_event (fun, "Constants propagated",
83038fd1498Szrj 			    opt_stats.num_const_prop);
83138fd1498Szrj   statistics_counter_event (fun, "Copies propagated",
83238fd1498Szrj 			    opt_stats.num_copy_prop);
83338fd1498Szrj 
83438fd1498Szrj   /* Debugging dumps.  */
83538fd1498Szrj   if (dump_file && (dump_flags & TDF_STATS))
83638fd1498Szrj     dump_dominator_optimization_stats (dump_file, avail_exprs);
83738fd1498Szrj 
83838fd1498Szrj   loop_optimizer_finalize ();
83938fd1498Szrj 
84038fd1498Szrj   /* Delete our main hashtable.  */
84138fd1498Szrj   delete avail_exprs;
84238fd1498Szrj   avail_exprs = NULL;
84338fd1498Szrj 
84438fd1498Szrj   /* Free asserted bitmaps and stacks.  */
84538fd1498Szrj   BITMAP_FREE (need_eh_cleanup);
84638fd1498Szrj   need_noreturn_fixup.release ();
84738fd1498Szrj   delete avail_exprs_stack;
84838fd1498Szrj   delete const_and_copies;
84938fd1498Szrj 
85038fd1498Szrj   /* Free the value-handle array.  */
85138fd1498Szrj   threadedge_finalize_values ();
85238fd1498Szrj 
85338fd1498Szrj   return 0;
85438fd1498Szrj }
85538fd1498Szrj 
85638fd1498Szrj } // anon namespace
85738fd1498Szrj 
85838fd1498Szrj gimple_opt_pass *
make_pass_dominator(gcc::context * ctxt)85938fd1498Szrj make_pass_dominator (gcc::context *ctxt)
86038fd1498Szrj {
86138fd1498Szrj   return new pass_dominator (ctxt);
86238fd1498Szrj }
86338fd1498Szrj 
86438fd1498Szrj /* A hack until we remove threading from tree-vrp.c and bring the
86538fd1498Szrj    simplification routine into the dom_opt_dom_walker class.  */
86638fd1498Szrj static class vr_values *x_vr_values;
86738fd1498Szrj 
86838fd1498Szrj /* A trivial wrapper so that we can present the generic jump
86938fd1498Szrj    threading code with a simple API for simplifying statements.  */
87038fd1498Szrj static tree
simplify_stmt_for_jump_threading(gimple * stmt,gimple * within_stmt ATTRIBUTE_UNUSED,class avail_exprs_stack * avail_exprs_stack,basic_block bb ATTRIBUTE_UNUSED)87138fd1498Szrj simplify_stmt_for_jump_threading (gimple *stmt,
87238fd1498Szrj 				  gimple *within_stmt ATTRIBUTE_UNUSED,
87338fd1498Szrj 				  class avail_exprs_stack *avail_exprs_stack,
87438fd1498Szrj 				  basic_block bb ATTRIBUTE_UNUSED)
87538fd1498Szrj {
87638fd1498Szrj   /* First query our hash table to see if the the expression is available
87738fd1498Szrj      there.  A non-NULL return value will be either a constant or another
87838fd1498Szrj      SSA_NAME.  */
87938fd1498Szrj   tree cached_lhs =  avail_exprs_stack->lookup_avail_expr (stmt, false, true);
88038fd1498Szrj   if (cached_lhs)
88138fd1498Szrj     return cached_lhs;
88238fd1498Szrj 
88338fd1498Szrj   /* If the hash table query failed, query VRP information.  This is
88438fd1498Szrj      essentially the same as tree-vrp's simplification routine.  The
88538fd1498Szrj      copy in tree-vrp is scheduled for removal in gcc-9.  */
88638fd1498Szrj   if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
88738fd1498Szrj     {
88838fd1498Szrj       cached_lhs
88938fd1498Szrj 	= x_vr_values->vrp_evaluate_conditional (gimple_cond_code (cond_stmt),
89038fd1498Szrj 						 gimple_cond_lhs (cond_stmt),
89138fd1498Szrj 						 gimple_cond_rhs (cond_stmt),
89238fd1498Szrj 						 within_stmt);
89338fd1498Szrj       return cached_lhs;
89438fd1498Szrj     }
89538fd1498Szrj 
89638fd1498Szrj   if (gswitch *switch_stmt = dyn_cast <gswitch *> (stmt))
89738fd1498Szrj     {
89838fd1498Szrj       tree op = gimple_switch_index (switch_stmt);
89938fd1498Szrj       if (TREE_CODE (op) != SSA_NAME)
90038fd1498Szrj 	return NULL_TREE;
90138fd1498Szrj 
90238fd1498Szrj       value_range *vr = x_vr_values->get_value_range (op);
90338fd1498Szrj       if ((vr->type != VR_RANGE && vr->type != VR_ANTI_RANGE)
90438fd1498Szrj 	  || symbolic_range_p (vr))
90538fd1498Szrj 	return NULL_TREE;
90638fd1498Szrj 
90738fd1498Szrj       if (vr->type == VR_RANGE)
90838fd1498Szrj 	{
90938fd1498Szrj 	  size_t i, j;
91038fd1498Szrj 
91138fd1498Szrj 	  find_case_label_range (switch_stmt, vr->min, vr->max, &i, &j);
91238fd1498Szrj 
91338fd1498Szrj 	  if (i == j)
91438fd1498Szrj 	    {
91538fd1498Szrj 	      tree label = gimple_switch_label (switch_stmt, i);
91638fd1498Szrj 
91738fd1498Szrj 	      if (CASE_HIGH (label) != NULL_TREE
91838fd1498Szrj 		  ? (tree_int_cst_compare (CASE_LOW (label), vr->min) <= 0
91938fd1498Szrj 		     && tree_int_cst_compare (CASE_HIGH (label), vr->max) >= 0)
92038fd1498Szrj 		  : (tree_int_cst_equal (CASE_LOW (label), vr->min)
92138fd1498Szrj 		     && tree_int_cst_equal (vr->min, vr->max)))
92238fd1498Szrj 		return label;
92338fd1498Szrj 
92438fd1498Szrj 	      if (i > j)
92538fd1498Szrj 		return gimple_switch_label (switch_stmt, 0);
92638fd1498Szrj 	    }
92738fd1498Szrj 	}
92838fd1498Szrj 
92938fd1498Szrj       if (vr->type == VR_ANTI_RANGE)
93038fd1498Szrj           {
93138fd1498Szrj             unsigned n = gimple_switch_num_labels (switch_stmt);
93238fd1498Szrj             tree min_label = gimple_switch_label (switch_stmt, 1);
93338fd1498Szrj             tree max_label = gimple_switch_label (switch_stmt, n - 1);
93438fd1498Szrj 
93538fd1498Szrj             /* The default label will be taken only if the anti-range of the
93638fd1498Szrj                operand is entirely outside the bounds of all the (non-default)
93738fd1498Szrj                case labels.  */
93838fd1498Szrj             if (tree_int_cst_compare (vr->min, CASE_LOW (min_label)) <= 0
93938fd1498Szrj                 && (CASE_HIGH (max_label) != NULL_TREE
94038fd1498Szrj                     ? tree_int_cst_compare (vr->max, CASE_HIGH (max_label)) >= 0
94138fd1498Szrj                     : tree_int_cst_compare (vr->max, CASE_LOW (max_label)) >= 0))
94238fd1498Szrj             return gimple_switch_label (switch_stmt, 0);
94338fd1498Szrj           }
94438fd1498Szrj 	return NULL_TREE;
94538fd1498Szrj     }
94638fd1498Szrj 
94738fd1498Szrj   if (gassign *assign_stmt = dyn_cast <gassign *> (stmt))
94838fd1498Szrj     {
94938fd1498Szrj       tree lhs = gimple_assign_lhs (assign_stmt);
95038fd1498Szrj       if (TREE_CODE (lhs) == SSA_NAME
95138fd1498Szrj 	  && (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
95238fd1498Szrj 	      || POINTER_TYPE_P (TREE_TYPE (lhs)))
95338fd1498Szrj 	  && stmt_interesting_for_vrp (stmt))
95438fd1498Szrj 	{
95538fd1498Szrj 	  edge dummy_e;
95638fd1498Szrj 	  tree dummy_tree;
95738fd1498Szrj 	  value_range new_vr = VR_INITIALIZER;
95838fd1498Szrj 	  x_vr_values->extract_range_from_stmt (stmt, &dummy_e,
95938fd1498Szrj 					      &dummy_tree, &new_vr);
96038fd1498Szrj 	  if (range_int_cst_singleton_p (&new_vr))
96138fd1498Szrj 	    return new_vr.min;
96238fd1498Szrj 	}
96338fd1498Szrj     }
96438fd1498Szrj   return NULL;
96538fd1498Szrj }
96638fd1498Szrj 
96738fd1498Szrj /* Valueize hook for gimple_fold_stmt_to_constant_1.  */
96838fd1498Szrj 
96938fd1498Szrj static tree
dom_valueize(tree t)97038fd1498Szrj dom_valueize (tree t)
97138fd1498Szrj {
97238fd1498Szrj   if (TREE_CODE (t) == SSA_NAME)
97338fd1498Szrj     {
97438fd1498Szrj       tree tem = SSA_NAME_VALUE (t);
97538fd1498Szrj       if (tem)
97638fd1498Szrj 	return tem;
97738fd1498Szrj     }
97838fd1498Szrj   return t;
97938fd1498Szrj }
98038fd1498Szrj 
98138fd1498Szrj /* We have just found an equivalence for LHS on an edge E.
98238fd1498Szrj    Look backwards to other uses of LHS and see if we can derive
98338fd1498Szrj    additional equivalences that are valid on edge E.  */
98438fd1498Szrj static void
back_propagate_equivalences(tree lhs,edge e,class const_and_copies * const_and_copies)98538fd1498Szrj back_propagate_equivalences (tree lhs, edge e,
98638fd1498Szrj 			     class const_and_copies *const_and_copies)
98738fd1498Szrj {
98838fd1498Szrj   use_operand_p use_p;
98938fd1498Szrj   imm_use_iterator iter;
99038fd1498Szrj   bitmap domby = NULL;
99138fd1498Szrj   basic_block dest = e->dest;
99238fd1498Szrj 
99338fd1498Szrj   /* Iterate over the uses of LHS to see if any dominate E->dest.
99438fd1498Szrj      If so, they may create useful equivalences too.
99538fd1498Szrj 
99638fd1498Szrj      ???  If the code gets re-organized to a worklist to catch more
99738fd1498Szrj      indirect opportunities and it is made to handle PHIs then this
99838fd1498Szrj      should only consider use_stmts in basic-blocks we have already visited.  */
99938fd1498Szrj   FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
100038fd1498Szrj     {
100138fd1498Szrj       gimple *use_stmt = USE_STMT (use_p);
100238fd1498Szrj 
100338fd1498Szrj       /* Often the use is in DEST, which we trivially know we can't use.
100438fd1498Szrj 	 This is cheaper than the dominator set tests below.  */
100538fd1498Szrj       if (dest == gimple_bb (use_stmt))
100638fd1498Szrj 	continue;
100738fd1498Szrj 
100838fd1498Szrj       /* Filter out statements that can never produce a useful
100938fd1498Szrj 	 equivalence.  */
101038fd1498Szrj       tree lhs2 = gimple_get_lhs (use_stmt);
101138fd1498Szrj       if (!lhs2 || TREE_CODE (lhs2) != SSA_NAME)
101238fd1498Szrj 	continue;
101338fd1498Szrj 
101438fd1498Szrj       /* Profiling has shown the domination tests here can be fairly
101538fd1498Szrj 	 expensive.  We get significant improvements by building the
101638fd1498Szrj 	 set of blocks that dominate BB.  We can then just test
101738fd1498Szrj 	 for set membership below.
101838fd1498Szrj 
101938fd1498Szrj 	 We also initialize the set lazily since often the only uses
102038fd1498Szrj 	 are going to be in the same block as DEST.  */
102138fd1498Szrj       if (!domby)
102238fd1498Szrj 	{
102338fd1498Szrj 	  domby = BITMAP_ALLOC (NULL);
102438fd1498Szrj 	  basic_block bb = get_immediate_dominator (CDI_DOMINATORS, dest);
102538fd1498Szrj 	  while (bb)
102638fd1498Szrj 	    {
102738fd1498Szrj 	      bitmap_set_bit (domby, bb->index);
102838fd1498Szrj 	      bb = get_immediate_dominator (CDI_DOMINATORS, bb);
102938fd1498Szrj 	    }
103038fd1498Szrj 	}
103138fd1498Szrj 
103238fd1498Szrj       /* This tests if USE_STMT does not dominate DEST.  */
103338fd1498Szrj       if (!bitmap_bit_p (domby, gimple_bb (use_stmt)->index))
103438fd1498Szrj 	continue;
103538fd1498Szrj 
103638fd1498Szrj       /* At this point USE_STMT dominates DEST and may result in a
103738fd1498Szrj 	 useful equivalence.  Try to simplify its RHS to a constant
103838fd1498Szrj 	 or SSA_NAME.  */
103938fd1498Szrj       tree res = gimple_fold_stmt_to_constant_1 (use_stmt, dom_valueize,
104038fd1498Szrj 						 no_follow_ssa_edges);
104138fd1498Szrj       if (res && (TREE_CODE (res) == SSA_NAME || is_gimple_min_invariant (res)))
104238fd1498Szrj 	record_equality (lhs2, res, const_and_copies);
104338fd1498Szrj     }
104438fd1498Szrj 
104538fd1498Szrj   if (domby)
104638fd1498Szrj     BITMAP_FREE (domby);
104738fd1498Szrj }
104838fd1498Szrj 
104938fd1498Szrj /* Record into CONST_AND_COPIES and AVAIL_EXPRS_STACK any equivalences implied
105038fd1498Szrj    by traversing edge E (which are cached in E->aux).
105138fd1498Szrj 
105238fd1498Szrj    Callers are responsible for managing the unwinding markers.  */
105338fd1498Szrj void
record_temporary_equivalences(edge e,class const_and_copies * const_and_copies,class avail_exprs_stack * avail_exprs_stack)105438fd1498Szrj record_temporary_equivalences (edge e,
105538fd1498Szrj 			       class const_and_copies *const_and_copies,
105638fd1498Szrj 			       class avail_exprs_stack *avail_exprs_stack)
105738fd1498Szrj {
105838fd1498Szrj   int i;
105938fd1498Szrj   class edge_info *edge_info = (class edge_info *) e->aux;
106038fd1498Szrj 
106138fd1498Szrj   /* If we have info associated with this edge, record it into
106238fd1498Szrj      our equivalence tables.  */
106338fd1498Szrj   if (edge_info)
106438fd1498Szrj     {
106538fd1498Szrj       cond_equivalence *eq;
106638fd1498Szrj       /* If we have 0 = COND or 1 = COND equivalences, record them
106738fd1498Szrj 	 into our expression hash tables.  */
106838fd1498Szrj       for (i = 0; edge_info->cond_equivalences.iterate (i, &eq); ++i)
106938fd1498Szrj 	avail_exprs_stack->record_cond (eq);
107038fd1498Szrj 
107138fd1498Szrj       edge_info::equiv_pair *seq;
107238fd1498Szrj       for (i = 0; edge_info->simple_equivalences.iterate (i, &seq); ++i)
107338fd1498Szrj 	{
107438fd1498Szrj 	  tree lhs = seq->first;
107538fd1498Szrj 	  if (!lhs || TREE_CODE (lhs) != SSA_NAME)
107638fd1498Szrj 	    continue;
107738fd1498Szrj 
107838fd1498Szrj 	  /* Record the simple NAME = VALUE equivalence.  */
107938fd1498Szrj 	  tree rhs = seq->second;
108038fd1498Szrj 
108138fd1498Szrj 	  /* If this is a SSA_NAME = SSA_NAME equivalence and one operand is
108238fd1498Szrj 	     cheaper to compute than the other, then set up the equivalence
108338fd1498Szrj 	     such that we replace the expensive one with the cheap one.
108438fd1498Szrj 
108538fd1498Szrj 	     If they are the same cost to compute, then do not record
108638fd1498Szrj 	     anything.  */
108738fd1498Szrj 	  if (TREE_CODE (lhs) == SSA_NAME && TREE_CODE (rhs) == SSA_NAME)
108838fd1498Szrj 	    {
108938fd1498Szrj 	      gimple *rhs_def = SSA_NAME_DEF_STMT (rhs);
109038fd1498Szrj 	      int rhs_cost = estimate_num_insns (rhs_def, &eni_size_weights);
109138fd1498Szrj 
109238fd1498Szrj 	      gimple *lhs_def = SSA_NAME_DEF_STMT (lhs);
109338fd1498Szrj 	      int lhs_cost = estimate_num_insns (lhs_def, &eni_size_weights);
109438fd1498Szrj 
109538fd1498Szrj 	      if (rhs_cost > lhs_cost)
109638fd1498Szrj 	        record_equality (rhs, lhs, const_and_copies);
109738fd1498Szrj 	      else if (rhs_cost < lhs_cost)
109838fd1498Szrj 	        record_equality (lhs, rhs, const_and_copies);
109938fd1498Szrj 	    }
110038fd1498Szrj 	  else
110138fd1498Szrj 	    record_equality (lhs, rhs, const_and_copies);
110238fd1498Szrj 
110338fd1498Szrj 
110438fd1498Szrj 	  /* Any equivalence found for LHS may result in additional
110538fd1498Szrj 	     equivalences for other uses of LHS that we have already
110638fd1498Szrj 	     processed.  */
110738fd1498Szrj 	  back_propagate_equivalences (lhs, e, const_and_copies);
110838fd1498Szrj 	}
110938fd1498Szrj     }
111038fd1498Szrj }
111138fd1498Szrj 
111238fd1498Szrj /* PHI nodes can create equivalences too.
111338fd1498Szrj 
111438fd1498Szrj    Ignoring any alternatives which are the same as the result, if
111538fd1498Szrj    all the alternatives are equal, then the PHI node creates an
111638fd1498Szrj    equivalence.  */
111738fd1498Szrj 
111838fd1498Szrj static void
record_equivalences_from_phis(basic_block bb)111938fd1498Szrj record_equivalences_from_phis (basic_block bb)
112038fd1498Szrj {
112138fd1498Szrj   gphi_iterator gsi;
112238fd1498Szrj 
112338fd1498Szrj   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
112438fd1498Szrj     {
112538fd1498Szrj       gphi *phi = gsi.phi ();
112638fd1498Szrj 
112738fd1498Szrj       tree lhs = gimple_phi_result (phi);
112838fd1498Szrj       tree rhs = NULL;
112938fd1498Szrj       size_t i;
113038fd1498Szrj 
113138fd1498Szrj       for (i = 0; i < gimple_phi_num_args (phi); i++)
113238fd1498Szrj 	{
113338fd1498Szrj 	  tree t = gimple_phi_arg_def (phi, i);
113438fd1498Szrj 
113538fd1498Szrj 	  /* Ignore alternatives which are the same as our LHS.  Since
113638fd1498Szrj 	     LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
113738fd1498Szrj 	     can simply compare pointers.  */
113838fd1498Szrj 	  if (lhs == t)
113938fd1498Szrj 	    continue;
114038fd1498Szrj 
114138fd1498Szrj 	  /* If the associated edge is not marked as executable, then it
114238fd1498Szrj 	     can be ignored.  */
114338fd1498Szrj 	  if ((gimple_phi_arg_edge (phi, i)->flags & EDGE_EXECUTABLE) == 0)
114438fd1498Szrj 	    continue;
114538fd1498Szrj 
114638fd1498Szrj 	  t = dom_valueize (t);
114738fd1498Szrj 
114838fd1498Szrj 	  /* If T is an SSA_NAME and its associated edge is a backedge,
114938fd1498Szrj 	     then quit as we can not utilize this equivalence.  */
115038fd1498Szrj 	  if (TREE_CODE (t) == SSA_NAME
115138fd1498Szrj 	      && (gimple_phi_arg_edge (phi, i)->flags & EDGE_DFS_BACK))
115238fd1498Szrj 	    break;
115338fd1498Szrj 
115438fd1498Szrj 	  /* If we have not processed an alternative yet, then set
115538fd1498Szrj 	     RHS to this alternative.  */
115638fd1498Szrj 	  if (rhs == NULL)
115738fd1498Szrj 	    rhs = t;
115838fd1498Szrj 	  /* If we have processed an alternative (stored in RHS), then
115938fd1498Szrj 	     see if it is equal to this one.  If it isn't, then stop
116038fd1498Szrj 	     the search.  */
116138fd1498Szrj 	  else if (! operand_equal_for_phi_arg_p (rhs, t))
116238fd1498Szrj 	    break;
116338fd1498Szrj 	}
116438fd1498Szrj 
116538fd1498Szrj       /* If we had no interesting alternatives, then all the RHS alternatives
116638fd1498Szrj 	 must have been the same as LHS.  */
116738fd1498Szrj       if (!rhs)
116838fd1498Szrj 	rhs = lhs;
116938fd1498Szrj 
117038fd1498Szrj       /* If we managed to iterate through each PHI alternative without
117138fd1498Szrj 	 breaking out of the loop, then we have a PHI which may create
117238fd1498Szrj 	 a useful equivalence.  We do not need to record unwind data for
117338fd1498Szrj 	 this, since this is a true assignment and not an equivalence
117438fd1498Szrj 	 inferred from a comparison.  All uses of this ssa name are dominated
117538fd1498Szrj 	 by this assignment, so unwinding just costs time and space.  */
117638fd1498Szrj       if (i == gimple_phi_num_args (phi)
117738fd1498Szrj 	  && may_propagate_copy (lhs, rhs))
117838fd1498Szrj 	set_ssa_name_value (lhs, rhs);
117938fd1498Szrj     }
118038fd1498Szrj }
118138fd1498Szrj 
118238fd1498Szrj /* Record any equivalences created by the incoming edge to BB into
118338fd1498Szrj    CONST_AND_COPIES and AVAIL_EXPRS_STACK.  If BB has more than one
118438fd1498Szrj    incoming edge, then no equivalence is created.  */
118538fd1498Szrj 
118638fd1498Szrj static void
record_equivalences_from_incoming_edge(basic_block bb,class const_and_copies * const_and_copies,class avail_exprs_stack * avail_exprs_stack)118738fd1498Szrj record_equivalences_from_incoming_edge (basic_block bb,
118838fd1498Szrj     class const_and_copies *const_and_copies,
118938fd1498Szrj     class avail_exprs_stack *avail_exprs_stack)
119038fd1498Szrj {
119138fd1498Szrj   edge e;
119238fd1498Szrj   basic_block parent;
119338fd1498Szrj 
119438fd1498Szrj   /* If our parent block ended with a control statement, then we may be
119538fd1498Szrj      able to record some equivalences based on which outgoing edge from
119638fd1498Szrj      the parent was followed.  */
119738fd1498Szrj   parent = get_immediate_dominator (CDI_DOMINATORS, bb);
119838fd1498Szrj 
119938fd1498Szrj   e = single_pred_edge_ignoring_loop_edges (bb, true);
120038fd1498Szrj 
120138fd1498Szrj   /* If we had a single incoming edge from our parent block, then enter
120238fd1498Szrj      any data associated with the edge into our tables.  */
120338fd1498Szrj   if (e && e->src == parent)
120438fd1498Szrj     record_temporary_equivalences (e, const_and_copies, avail_exprs_stack);
120538fd1498Szrj }
120638fd1498Szrj 
120738fd1498Szrj /* Dump statistics for the hash table HTAB.  */
120838fd1498Szrj 
120938fd1498Szrj static void
htab_statistics(FILE * file,const hash_table<expr_elt_hasher> & htab)121038fd1498Szrj htab_statistics (FILE *file, const hash_table<expr_elt_hasher> &htab)
121138fd1498Szrj {
121238fd1498Szrj   fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
121338fd1498Szrj 	   (long) htab.size (),
121438fd1498Szrj 	   (long) htab.elements (),
121538fd1498Szrj 	   htab.collisions ());
121638fd1498Szrj }
121738fd1498Szrj 
121838fd1498Szrj /* Dump SSA statistics on FILE.  */
121938fd1498Szrj 
122038fd1498Szrj static void
dump_dominator_optimization_stats(FILE * file,hash_table<expr_elt_hasher> * avail_exprs)122138fd1498Szrj dump_dominator_optimization_stats (FILE *file,
122238fd1498Szrj 				   hash_table<expr_elt_hasher> *avail_exprs)
122338fd1498Szrj {
122438fd1498Szrj   fprintf (file, "Total number of statements:                   %6ld\n\n",
122538fd1498Szrj 	   opt_stats.num_stmts);
122638fd1498Szrj   fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
122738fd1498Szrj            opt_stats.num_exprs_considered);
122838fd1498Szrj 
122938fd1498Szrj   fprintf (file, "\nHash table statistics:\n");
123038fd1498Szrj 
123138fd1498Szrj   fprintf (file, "    avail_exprs: ");
123238fd1498Szrj   htab_statistics (file, *avail_exprs);
123338fd1498Szrj }
123438fd1498Szrj 
123538fd1498Szrj 
123638fd1498Szrj /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
123738fd1498Szrj    This constrains the cases in which we may treat this as assignment.  */
123838fd1498Szrj 
123938fd1498Szrj static void
record_equality(tree x,tree y,class const_and_copies * const_and_copies)124038fd1498Szrj record_equality (tree x, tree y, class const_and_copies *const_and_copies)
124138fd1498Szrj {
124238fd1498Szrj   tree prev_x = NULL, prev_y = NULL;
124338fd1498Szrj 
124438fd1498Szrj   if (tree_swap_operands_p (x, y))
124538fd1498Szrj     std::swap (x, y);
124638fd1498Szrj 
124738fd1498Szrj   /* Most of the time tree_swap_operands_p does what we want.  But there
124838fd1498Szrj      are cases where we know one operand is better for copy propagation than
124938fd1498Szrj      the other.  Given no other code cares about ordering of equality
125038fd1498Szrj      comparison operators for that purpose, we just handle the special cases
125138fd1498Szrj      here.  */
125238fd1498Szrj   if (TREE_CODE (x) == SSA_NAME && TREE_CODE (y) == SSA_NAME)
125338fd1498Szrj     {
125438fd1498Szrj       /* If one operand is a single use operand, then make it
125538fd1498Szrj 	 X.  This will preserve its single use properly and if this
125638fd1498Szrj 	 conditional is eliminated, the computation of X can be
125738fd1498Szrj 	 eliminated as well.  */
125838fd1498Szrj       if (has_single_use (y) && ! has_single_use (x))
125938fd1498Szrj 	std::swap (x, y);
126038fd1498Szrj     }
126138fd1498Szrj   if (TREE_CODE (x) == SSA_NAME)
126238fd1498Szrj     prev_x = SSA_NAME_VALUE (x);
126338fd1498Szrj   if (TREE_CODE (y) == SSA_NAME)
126438fd1498Szrj     prev_y = SSA_NAME_VALUE (y);
126538fd1498Szrj 
126638fd1498Szrj   /* If one of the previous values is invariant, or invariant in more loops
126738fd1498Szrj      (by depth), then use that.
126838fd1498Szrj      Otherwise it doesn't matter which value we choose, just so
126938fd1498Szrj      long as we canonicalize on one value.  */
127038fd1498Szrj   if (is_gimple_min_invariant (y))
127138fd1498Szrj     ;
127238fd1498Szrj   else if (is_gimple_min_invariant (x))
127338fd1498Szrj     prev_x = x, x = y, y = prev_x, prev_x = prev_y;
127438fd1498Szrj   else if (prev_x && is_gimple_min_invariant (prev_x))
127538fd1498Szrj     x = y, y = prev_x, prev_x = prev_y;
127638fd1498Szrj   else if (prev_y)
127738fd1498Szrj     y = prev_y;
127838fd1498Szrj 
127938fd1498Szrj   /* After the swapping, we must have one SSA_NAME.  */
128038fd1498Szrj   if (TREE_CODE (x) != SSA_NAME)
128138fd1498Szrj     return;
128238fd1498Szrj 
128338fd1498Szrj   /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
128438fd1498Szrj      variable compared against zero.  If we're honoring signed zeros,
128538fd1498Szrj      then we cannot record this value unless we know that the value is
128638fd1498Szrj      nonzero.  */
128738fd1498Szrj   if (HONOR_SIGNED_ZEROS (x)
128838fd1498Szrj       && (TREE_CODE (y) != REAL_CST
128938fd1498Szrj 	  || real_equal (&dconst0, &TREE_REAL_CST (y))))
129038fd1498Szrj     return;
129138fd1498Szrj 
129238fd1498Szrj   const_and_copies->record_const_or_copy (x, y, prev_x);
129338fd1498Szrj }
129438fd1498Szrj 
129538fd1498Szrj /* Returns true when STMT is a simple iv increment.  It detects the
129638fd1498Szrj    following situation:
129738fd1498Szrj 
129838fd1498Szrj    i_1 = phi (..., i_k)
129938fd1498Szrj    [...]
130038fd1498Szrj    i_j = i_{j-1}  for each j : 2 <= j <= k-1
130138fd1498Szrj    [...]
130238fd1498Szrj    i_k = i_{k-1} +/- ...  */
130338fd1498Szrj 
130438fd1498Szrj bool
simple_iv_increment_p(gimple * stmt)130538fd1498Szrj simple_iv_increment_p (gimple *stmt)
130638fd1498Szrj {
130738fd1498Szrj   enum tree_code code;
130838fd1498Szrj   tree lhs, preinc;
130938fd1498Szrj   gimple *phi;
131038fd1498Szrj   size_t i;
131138fd1498Szrj 
131238fd1498Szrj   if (gimple_code (stmt) != GIMPLE_ASSIGN)
131338fd1498Szrj     return false;
131438fd1498Szrj 
131538fd1498Szrj   lhs = gimple_assign_lhs (stmt);
131638fd1498Szrj   if (TREE_CODE (lhs) != SSA_NAME)
131738fd1498Szrj     return false;
131838fd1498Szrj 
131938fd1498Szrj   code = gimple_assign_rhs_code (stmt);
132038fd1498Szrj   if (code != PLUS_EXPR
132138fd1498Szrj       && code != MINUS_EXPR
132238fd1498Szrj       && code != POINTER_PLUS_EXPR)
132338fd1498Szrj     return false;
132438fd1498Szrj 
132538fd1498Szrj   preinc = gimple_assign_rhs1 (stmt);
132638fd1498Szrj   if (TREE_CODE (preinc) != SSA_NAME)
132738fd1498Szrj     return false;
132838fd1498Szrj 
132938fd1498Szrj   phi = SSA_NAME_DEF_STMT (preinc);
133038fd1498Szrj   while (gimple_code (phi) != GIMPLE_PHI)
133138fd1498Szrj     {
133238fd1498Szrj       /* Follow trivial copies, but not the DEF used in a back edge,
133338fd1498Szrj 	 so that we don't prevent coalescing.  */
133438fd1498Szrj       if (!gimple_assign_ssa_name_copy_p (phi))
133538fd1498Szrj 	return false;
133638fd1498Szrj       preinc = gimple_assign_rhs1 (phi);
133738fd1498Szrj       phi = SSA_NAME_DEF_STMT (preinc);
133838fd1498Szrj     }
133938fd1498Szrj 
134038fd1498Szrj   for (i = 0; i < gimple_phi_num_args (phi); i++)
134138fd1498Szrj     if (gimple_phi_arg_def (phi, i) == lhs)
134238fd1498Szrj       return true;
134338fd1498Szrj 
134438fd1498Szrj   return false;
134538fd1498Szrj }
134638fd1498Szrj 
134738fd1498Szrj /* Propagate know values from SSA_NAME_VALUE into the PHI nodes of the
134838fd1498Szrj    successors of BB.  */
134938fd1498Szrj 
135038fd1498Szrj static void
cprop_into_successor_phis(basic_block bb,class const_and_copies * const_and_copies)135138fd1498Szrj cprop_into_successor_phis (basic_block bb,
135238fd1498Szrj 			   class const_and_copies *const_and_copies)
135338fd1498Szrj {
135438fd1498Szrj   edge e;
135538fd1498Szrj   edge_iterator ei;
135638fd1498Szrj 
135738fd1498Szrj   FOR_EACH_EDGE (e, ei, bb->succs)
135838fd1498Szrj     {
135938fd1498Szrj       int indx;
136038fd1498Szrj       gphi_iterator gsi;
136138fd1498Szrj 
136238fd1498Szrj       /* If this is an abnormal edge, then we do not want to copy propagate
136338fd1498Szrj 	 into the PHI alternative associated with this edge.  */
136438fd1498Szrj       if (e->flags & EDGE_ABNORMAL)
136538fd1498Szrj 	continue;
136638fd1498Szrj 
136738fd1498Szrj       gsi = gsi_start_phis (e->dest);
136838fd1498Szrj       if (gsi_end_p (gsi))
136938fd1498Szrj 	continue;
137038fd1498Szrj 
137138fd1498Szrj       /* We may have an equivalence associated with this edge.  While
137238fd1498Szrj 	 we can not propagate it into non-dominated blocks, we can
137338fd1498Szrj 	 propagate them into PHIs in non-dominated blocks.  */
137438fd1498Szrj 
137538fd1498Szrj       /* Push the unwind marker so we can reset the const and copies
137638fd1498Szrj 	 table back to its original state after processing this edge.  */
137738fd1498Szrj       const_and_copies->push_marker ();
137838fd1498Szrj 
137938fd1498Szrj       /* Extract and record any simple NAME = VALUE equivalences.
138038fd1498Szrj 
138138fd1498Szrj 	 Don't bother with [01] = COND equivalences, they're not useful
138238fd1498Szrj 	 here.  */
138338fd1498Szrj       class edge_info *edge_info = (class edge_info *) e->aux;
138438fd1498Szrj 
138538fd1498Szrj       if (edge_info)
138638fd1498Szrj 	{
138738fd1498Szrj 	  edge_info::equiv_pair *seq;
138838fd1498Szrj 	  for (int i = 0; edge_info->simple_equivalences.iterate (i, &seq); ++i)
138938fd1498Szrj 	    {
139038fd1498Szrj 	      tree lhs = seq->first;
139138fd1498Szrj 	      tree rhs = seq->second;
139238fd1498Szrj 
139338fd1498Szrj 	      if (lhs && TREE_CODE (lhs) == SSA_NAME)
139438fd1498Szrj 		const_and_copies->record_const_or_copy (lhs, rhs);
139538fd1498Szrj 	    }
139638fd1498Szrj 
139738fd1498Szrj 	}
139838fd1498Szrj 
139938fd1498Szrj       indx = e->dest_idx;
140038fd1498Szrj       for ( ; !gsi_end_p (gsi); gsi_next (&gsi))
140138fd1498Szrj 	{
140238fd1498Szrj 	  tree new_val;
140338fd1498Szrj 	  use_operand_p orig_p;
140438fd1498Szrj 	  tree orig_val;
140538fd1498Szrj           gphi *phi = gsi.phi ();
140638fd1498Szrj 
140738fd1498Szrj 	  /* The alternative may be associated with a constant, so verify
140838fd1498Szrj 	     it is an SSA_NAME before doing anything with it.  */
140938fd1498Szrj 	  orig_p = gimple_phi_arg_imm_use_ptr (phi, indx);
141038fd1498Szrj 	  orig_val = get_use_from_ptr (orig_p);
141138fd1498Szrj 	  if (TREE_CODE (orig_val) != SSA_NAME)
141238fd1498Szrj 	    continue;
141338fd1498Szrj 
141438fd1498Szrj 	  /* If we have *ORIG_P in our constant/copy table, then replace
141538fd1498Szrj 	     ORIG_P with its value in our constant/copy table.  */
141638fd1498Szrj 	  new_val = SSA_NAME_VALUE (orig_val);
141738fd1498Szrj 	  if (new_val
141838fd1498Szrj 	      && new_val != orig_val
141938fd1498Szrj 	      && may_propagate_copy (orig_val, new_val))
142038fd1498Szrj 	    propagate_value (orig_p, new_val);
142138fd1498Szrj 	}
142238fd1498Szrj 
142338fd1498Szrj       const_and_copies->pop_to_marker ();
142438fd1498Szrj     }
142538fd1498Szrj }
142638fd1498Szrj 
142738fd1498Szrj edge
before_dom_children(basic_block bb)142838fd1498Szrj dom_opt_dom_walker::before_dom_children (basic_block bb)
142938fd1498Szrj {
143038fd1498Szrj   gimple_stmt_iterator gsi;
143138fd1498Szrj 
143238fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
143338fd1498Szrj     fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
143438fd1498Szrj 
143538fd1498Szrj   evrp_range_analyzer.enter (bb);
143638fd1498Szrj 
143738fd1498Szrj   /* Push a marker on the stacks of local information so that we know how
143838fd1498Szrj      far to unwind when we finalize this block.  */
143938fd1498Szrj   m_avail_exprs_stack->push_marker ();
144038fd1498Szrj   m_const_and_copies->push_marker ();
144138fd1498Szrj 
144238fd1498Szrj   record_equivalences_from_incoming_edge (bb, m_const_and_copies,
144338fd1498Szrj 					  m_avail_exprs_stack);
144438fd1498Szrj 
144538fd1498Szrj   /* PHI nodes can create equivalences too.  */
144638fd1498Szrj   record_equivalences_from_phis (bb);
144738fd1498Szrj 
144838fd1498Szrj   /* Create equivalences from redundant PHIs.  PHIs are only truly
144938fd1498Szrj      redundant when they exist in the same block, so push another
145038fd1498Szrj      marker and unwind right afterwards.  */
145138fd1498Szrj   m_avail_exprs_stack->push_marker ();
145238fd1498Szrj   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
145338fd1498Szrj     eliminate_redundant_computations (&gsi, m_const_and_copies,
145438fd1498Szrj 				      m_avail_exprs_stack);
145538fd1498Szrj   m_avail_exprs_stack->pop_to_marker ();
145638fd1498Szrj 
145738fd1498Szrj   edge taken_edge = NULL;
145838fd1498Szrj   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
145938fd1498Szrj     {
146038fd1498Szrj       evrp_range_analyzer.record_ranges_from_stmt (gsi_stmt (gsi), false);
146138fd1498Szrj       taken_edge = this->optimize_stmt (bb, gsi);
146238fd1498Szrj     }
146338fd1498Szrj 
146438fd1498Szrj   /* Now prepare to process dominated blocks.  */
146538fd1498Szrj   record_edge_info (bb);
146638fd1498Szrj   cprop_into_successor_phis (bb, m_const_and_copies);
146738fd1498Szrj   if (taken_edge && !dbg_cnt (dom_unreachable_edges))
146838fd1498Szrj     return NULL;
146938fd1498Szrj 
147038fd1498Szrj   return taken_edge;
147138fd1498Szrj }
147238fd1498Szrj 
147338fd1498Szrj /* We have finished processing the dominator children of BB, perform
147438fd1498Szrj    any finalization actions in preparation for leaving this node in
147538fd1498Szrj    the dominator tree.  */
147638fd1498Szrj 
147738fd1498Szrj void
after_dom_children(basic_block bb)147838fd1498Szrj dom_opt_dom_walker::after_dom_children (basic_block bb)
147938fd1498Szrj {
148038fd1498Szrj   x_vr_values = evrp_range_analyzer.get_vr_values ();
148138fd1498Szrj   thread_outgoing_edges (bb, m_dummy_cond, m_const_and_copies,
148238fd1498Szrj 			 m_avail_exprs_stack,
148338fd1498Szrj 			 &evrp_range_analyzer,
148438fd1498Szrj 			 simplify_stmt_for_jump_threading);
148538fd1498Szrj   x_vr_values = NULL;
148638fd1498Szrj 
148738fd1498Szrj   /* These remove expressions local to BB from the tables.  */
148838fd1498Szrj   m_avail_exprs_stack->pop_to_marker ();
148938fd1498Szrj   m_const_and_copies->pop_to_marker ();
149038fd1498Szrj   evrp_range_analyzer.leave (bb);
149138fd1498Szrj }
149238fd1498Szrj 
149338fd1498Szrj /* Search for redundant computations in STMT.  If any are found, then
149438fd1498Szrj    replace them with the variable holding the result of the computation.
149538fd1498Szrj 
149638fd1498Szrj    If safe, record this expression into AVAIL_EXPRS_STACK and
149738fd1498Szrj    CONST_AND_COPIES.  */
149838fd1498Szrj 
149938fd1498Szrj static void
eliminate_redundant_computations(gimple_stmt_iterator * gsi,class const_and_copies * const_and_copies,class avail_exprs_stack * avail_exprs_stack)150038fd1498Szrj eliminate_redundant_computations (gimple_stmt_iterator* gsi,
150138fd1498Szrj 				  class const_and_copies *const_and_copies,
150238fd1498Szrj 				  class avail_exprs_stack *avail_exprs_stack)
150338fd1498Szrj {
150438fd1498Szrj   tree expr_type;
150538fd1498Szrj   tree cached_lhs;
150638fd1498Szrj   tree def;
150738fd1498Szrj   bool insert = true;
150838fd1498Szrj   bool assigns_var_p = false;
150938fd1498Szrj 
151038fd1498Szrj   gimple *stmt = gsi_stmt (*gsi);
151138fd1498Szrj 
151238fd1498Szrj   if (gimple_code (stmt) == GIMPLE_PHI)
151338fd1498Szrj     def = gimple_phi_result (stmt);
151438fd1498Szrj   else
151538fd1498Szrj     def = gimple_get_lhs (stmt);
151638fd1498Szrj 
151738fd1498Szrj   /* Certain expressions on the RHS can be optimized away, but can not
151838fd1498Szrj      themselves be entered into the hash tables.  */
151938fd1498Szrj   if (! def
152038fd1498Szrj       || TREE_CODE (def) != SSA_NAME
152138fd1498Szrj       || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
152238fd1498Szrj       || gimple_vdef (stmt)
152338fd1498Szrj       /* Do not record equivalences for increments of ivs.  This would create
152438fd1498Szrj 	 overlapping live ranges for a very questionable gain.  */
152538fd1498Szrj       || simple_iv_increment_p (stmt))
152638fd1498Szrj     insert = false;
152738fd1498Szrj 
152838fd1498Szrj   /* Check if the expression has been computed before.  */
152938fd1498Szrj   cached_lhs = avail_exprs_stack->lookup_avail_expr (stmt, insert, true);
153038fd1498Szrj 
153138fd1498Szrj   opt_stats.num_exprs_considered++;
153238fd1498Szrj 
153338fd1498Szrj   /* Get the type of the expression we are trying to optimize.  */
153438fd1498Szrj   if (is_gimple_assign (stmt))
153538fd1498Szrj     {
153638fd1498Szrj       expr_type = TREE_TYPE (gimple_assign_lhs (stmt));
153738fd1498Szrj       assigns_var_p = true;
153838fd1498Szrj     }
153938fd1498Szrj   else if (gimple_code (stmt) == GIMPLE_COND)
154038fd1498Szrj     expr_type = boolean_type_node;
154138fd1498Szrj   else if (is_gimple_call (stmt))
154238fd1498Szrj     {
154338fd1498Szrj       gcc_assert (gimple_call_lhs (stmt));
154438fd1498Szrj       expr_type = TREE_TYPE (gimple_call_lhs (stmt));
154538fd1498Szrj       assigns_var_p = true;
154638fd1498Szrj     }
154738fd1498Szrj   else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
154838fd1498Szrj     expr_type = TREE_TYPE (gimple_switch_index (swtch_stmt));
154938fd1498Szrj   else if (gimple_code (stmt) == GIMPLE_PHI)
155038fd1498Szrj     /* We can't propagate into a phi, so the logic below doesn't apply.
155138fd1498Szrj        Instead record an equivalence between the cached LHS and the
155238fd1498Szrj        PHI result of this statement, provided they are in the same block.
155338fd1498Szrj        This should be sufficient to kill the redundant phi.  */
155438fd1498Szrj     {
155538fd1498Szrj       if (def && cached_lhs)
155638fd1498Szrj 	const_and_copies->record_const_or_copy (def, cached_lhs);
155738fd1498Szrj       return;
155838fd1498Szrj     }
155938fd1498Szrj   else
156038fd1498Szrj     gcc_unreachable ();
156138fd1498Szrj 
156238fd1498Szrj   if (!cached_lhs)
156338fd1498Szrj     return;
156438fd1498Szrj 
156538fd1498Szrj   /* It is safe to ignore types here since we have already done
156638fd1498Szrj      type checking in the hashing and equality routines.  In fact
156738fd1498Szrj      type checking here merely gets in the way of constant
156838fd1498Szrj      propagation.  Also, make sure that it is safe to propagate
156938fd1498Szrj      CACHED_LHS into the expression in STMT.  */
157038fd1498Szrj   if ((TREE_CODE (cached_lhs) != SSA_NAME
157138fd1498Szrj        && (assigns_var_p
157238fd1498Szrj            || useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs))))
157338fd1498Szrj       || may_propagate_copy_into_stmt (stmt, cached_lhs))
157438fd1498Szrj   {
157538fd1498Szrj       gcc_checking_assert (TREE_CODE (cached_lhs) == SSA_NAME
157638fd1498Szrj 			   || is_gimple_min_invariant (cached_lhs));
157738fd1498Szrj 
157838fd1498Szrj       if (dump_file && (dump_flags & TDF_DETAILS))
157938fd1498Szrj 	{
158038fd1498Szrj 	  fprintf (dump_file, "  Replaced redundant expr '");
158138fd1498Szrj 	  print_gimple_expr (dump_file, stmt, 0, dump_flags);
158238fd1498Szrj 	  fprintf (dump_file, "' with '");
158338fd1498Szrj 	  print_generic_expr (dump_file, cached_lhs, dump_flags);
158438fd1498Szrj           fprintf (dump_file, "'\n");
158538fd1498Szrj 	}
158638fd1498Szrj 
158738fd1498Szrj       opt_stats.num_re++;
158838fd1498Szrj 
158938fd1498Szrj       if (assigns_var_p
159038fd1498Szrj 	  && !useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs)))
159138fd1498Szrj 	cached_lhs = fold_convert (expr_type, cached_lhs);
159238fd1498Szrj 
159338fd1498Szrj       propagate_tree_value_into_stmt (gsi, cached_lhs);
159438fd1498Szrj 
159538fd1498Szrj       /* Since it is always necessary to mark the result as modified,
159638fd1498Szrj          perhaps we should move this into propagate_tree_value_into_stmt
159738fd1498Szrj          itself.  */
159838fd1498Szrj       gimple_set_modified (gsi_stmt (*gsi), true);
159938fd1498Szrj   }
160038fd1498Szrj }
160138fd1498Szrj 
160238fd1498Szrj /* STMT, a GIMPLE_ASSIGN, may create certain equivalences, in either
160338fd1498Szrj    the available expressions table or the const_and_copies table.
160438fd1498Szrj    Detect and record those equivalences into AVAIL_EXPRS_STACK.
160538fd1498Szrj 
160638fd1498Szrj    We handle only very simple copy equivalences here.  The heavy
160738fd1498Szrj    lifing is done by eliminate_redundant_computations.  */
160838fd1498Szrj 
160938fd1498Szrj static void
record_equivalences_from_stmt(gimple * stmt,int may_optimize_p,class avail_exprs_stack * avail_exprs_stack)161038fd1498Szrj record_equivalences_from_stmt (gimple *stmt, int may_optimize_p,
161138fd1498Szrj 			       class avail_exprs_stack *avail_exprs_stack)
161238fd1498Szrj {
161338fd1498Szrj   tree lhs;
161438fd1498Szrj   enum tree_code lhs_code;
161538fd1498Szrj 
161638fd1498Szrj   gcc_assert (is_gimple_assign (stmt));
161738fd1498Szrj 
161838fd1498Szrj   lhs = gimple_assign_lhs (stmt);
161938fd1498Szrj   lhs_code = TREE_CODE (lhs);
162038fd1498Szrj 
162138fd1498Szrj   if (lhs_code == SSA_NAME
162238fd1498Szrj       && gimple_assign_single_p (stmt))
162338fd1498Szrj     {
162438fd1498Szrj       tree rhs = gimple_assign_rhs1 (stmt);
162538fd1498Szrj 
162638fd1498Szrj       /* If the RHS of the assignment is a constant or another variable that
162738fd1498Szrj 	 may be propagated, register it in the CONST_AND_COPIES table.  We
162838fd1498Szrj 	 do not need to record unwind data for this, since this is a true
162938fd1498Szrj 	 assignment and not an equivalence inferred from a comparison.  All
163038fd1498Szrj 	 uses of this ssa name are dominated by this assignment, so unwinding
163138fd1498Szrj 	 just costs time and space.  */
163238fd1498Szrj       if (may_optimize_p
163338fd1498Szrj 	  && (TREE_CODE (rhs) == SSA_NAME
163438fd1498Szrj 	      || is_gimple_min_invariant (rhs)))
163538fd1498Szrj 	{
163638fd1498Szrj 	  rhs = dom_valueize (rhs);
163738fd1498Szrj 
163838fd1498Szrj 	  if (dump_file && (dump_flags & TDF_DETAILS))
163938fd1498Szrj 	    {
164038fd1498Szrj 	      fprintf (dump_file, "==== ASGN ");
164138fd1498Szrj 	      print_generic_expr (dump_file, lhs);
164238fd1498Szrj 	      fprintf (dump_file, " = ");
164338fd1498Szrj 	      print_generic_expr (dump_file, rhs);
164438fd1498Szrj 	      fprintf (dump_file, "\n");
164538fd1498Szrj 	    }
164638fd1498Szrj 
164738fd1498Szrj 	  set_ssa_name_value (lhs, rhs);
164838fd1498Szrj 	}
164938fd1498Szrj     }
165038fd1498Szrj 
165138fd1498Szrj   /* Make sure we can propagate &x + CST.  */
165238fd1498Szrj   if (lhs_code == SSA_NAME
165338fd1498Szrj       && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
165438fd1498Szrj       && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR
165538fd1498Szrj       && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
165638fd1498Szrj     {
165738fd1498Szrj       tree op0 = gimple_assign_rhs1 (stmt);
165838fd1498Szrj       tree op1 = gimple_assign_rhs2 (stmt);
165938fd1498Szrj       tree new_rhs
166038fd1498Szrj 	= build_fold_addr_expr (fold_build2 (MEM_REF,
166138fd1498Szrj 					     TREE_TYPE (TREE_TYPE (op0)),
166238fd1498Szrj 					     unshare_expr (op0),
166338fd1498Szrj 					     fold_convert (ptr_type_node,
166438fd1498Szrj 							   op1)));
166538fd1498Szrj       if (dump_file && (dump_flags & TDF_DETAILS))
166638fd1498Szrj 	{
166738fd1498Szrj 	  fprintf (dump_file, "==== ASGN ");
166838fd1498Szrj 	  print_generic_expr (dump_file, lhs);
166938fd1498Szrj 	  fprintf (dump_file, " = ");
167038fd1498Szrj 	  print_generic_expr (dump_file, new_rhs);
167138fd1498Szrj 	  fprintf (dump_file, "\n");
167238fd1498Szrj 	}
167338fd1498Szrj 
167438fd1498Szrj       set_ssa_name_value (lhs, new_rhs);
167538fd1498Szrj     }
167638fd1498Szrj 
167738fd1498Szrj   /* A memory store, even an aliased store, creates a useful
167838fd1498Szrj      equivalence.  By exchanging the LHS and RHS, creating suitable
167938fd1498Szrj      vops and recording the result in the available expression table,
168038fd1498Szrj      we may be able to expose more redundant loads.  */
168138fd1498Szrj   if (!gimple_has_volatile_ops (stmt)
168238fd1498Szrj       && gimple_references_memory_p (stmt)
168338fd1498Szrj       && gimple_assign_single_p (stmt)
168438fd1498Szrj       && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
168538fd1498Szrj 	  || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
168638fd1498Szrj       && !is_gimple_reg (lhs))
168738fd1498Szrj     {
168838fd1498Szrj       tree rhs = gimple_assign_rhs1 (stmt);
168938fd1498Szrj       gassign *new_stmt;
169038fd1498Szrj 
169138fd1498Szrj       /* Build a new statement with the RHS and LHS exchanged.  */
169238fd1498Szrj       if (TREE_CODE (rhs) == SSA_NAME)
169338fd1498Szrj         {
169438fd1498Szrj           /* NOTE tuples.  The call to gimple_build_assign below replaced
169538fd1498Szrj              a call to build_gimple_modify_stmt, which did not set the
169638fd1498Szrj              SSA_NAME_DEF_STMT on the LHS of the assignment.  Doing so
169738fd1498Szrj              may cause an SSA validation failure, as the LHS may be a
169838fd1498Szrj              default-initialized name and should have no definition.  I'm
169938fd1498Szrj              a bit dubious of this, as the artificial statement that we
170038fd1498Szrj              generate here may in fact be ill-formed, but it is simply
170138fd1498Szrj              used as an internal device in this pass, and never becomes
170238fd1498Szrj              part of the CFG.  */
170338fd1498Szrj 	  gimple *defstmt = SSA_NAME_DEF_STMT (rhs);
170438fd1498Szrj           new_stmt = gimple_build_assign (rhs, lhs);
170538fd1498Szrj           SSA_NAME_DEF_STMT (rhs) = defstmt;
170638fd1498Szrj         }
170738fd1498Szrj       else
170838fd1498Szrj         new_stmt = gimple_build_assign (rhs, lhs);
170938fd1498Szrj 
171038fd1498Szrj       gimple_set_vuse (new_stmt, gimple_vdef (stmt));
171138fd1498Szrj 
171238fd1498Szrj       /* Finally enter the statement into the available expression
171338fd1498Szrj 	 table.  */
171438fd1498Szrj       avail_exprs_stack->lookup_avail_expr (new_stmt, true, true);
171538fd1498Szrj     }
171638fd1498Szrj }
171738fd1498Szrj 
171838fd1498Szrj /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
171938fd1498Szrj    CONST_AND_COPIES.  */
172038fd1498Szrj 
172138fd1498Szrj static void
cprop_operand(gimple * stmt,use_operand_p op_p)172238fd1498Szrj cprop_operand (gimple *stmt, use_operand_p op_p)
172338fd1498Szrj {
172438fd1498Szrj   tree val;
172538fd1498Szrj   tree op = USE_FROM_PTR (op_p);
172638fd1498Szrj 
172738fd1498Szrj   /* If the operand has a known constant value or it is known to be a
172838fd1498Szrj      copy of some other variable, use the value or copy stored in
172938fd1498Szrj      CONST_AND_COPIES.  */
173038fd1498Szrj   val = SSA_NAME_VALUE (op);
173138fd1498Szrj   if (val && val != op)
173238fd1498Szrj     {
173338fd1498Szrj       /* Do not replace hard register operands in asm statements.  */
173438fd1498Szrj       if (gimple_code (stmt) == GIMPLE_ASM
173538fd1498Szrj 	  && !may_propagate_copy_into_asm (op))
173638fd1498Szrj 	return;
173738fd1498Szrj 
173838fd1498Szrj       /* Certain operands are not allowed to be copy propagated due
173938fd1498Szrj 	 to their interaction with exception handling and some GCC
174038fd1498Szrj 	 extensions.  */
174138fd1498Szrj       if (!may_propagate_copy (op, val))
174238fd1498Szrj 	return;
174338fd1498Szrj 
174438fd1498Szrj       /* Do not propagate copies into BIVs.
174538fd1498Szrj          See PR23821 and PR62217 for how this can disturb IV and
174638fd1498Szrj 	 number of iteration analysis.  */
174738fd1498Szrj       if (TREE_CODE (val) != INTEGER_CST)
174838fd1498Szrj 	{
174938fd1498Szrj 	  gimple *def = SSA_NAME_DEF_STMT (op);
175038fd1498Szrj 	  if (gimple_code (def) == GIMPLE_PHI
175138fd1498Szrj 	      && gimple_bb (def)->loop_father->header == gimple_bb (def))
175238fd1498Szrj 	    return;
175338fd1498Szrj 	}
175438fd1498Szrj 
175538fd1498Szrj       /* Dump details.  */
175638fd1498Szrj       if (dump_file && (dump_flags & TDF_DETAILS))
175738fd1498Szrj 	{
175838fd1498Szrj 	  fprintf (dump_file, "  Replaced '");
175938fd1498Szrj 	  print_generic_expr (dump_file, op, dump_flags);
176038fd1498Szrj 	  fprintf (dump_file, "' with %s '",
176138fd1498Szrj 		   (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
176238fd1498Szrj 	  print_generic_expr (dump_file, val, dump_flags);
176338fd1498Szrj 	  fprintf (dump_file, "'\n");
176438fd1498Szrj 	}
176538fd1498Szrj 
176638fd1498Szrj       if (TREE_CODE (val) != SSA_NAME)
176738fd1498Szrj 	opt_stats.num_const_prop++;
176838fd1498Szrj       else
176938fd1498Szrj 	opt_stats.num_copy_prop++;
177038fd1498Szrj 
177138fd1498Szrj       propagate_value (op_p, val);
177238fd1498Szrj 
177338fd1498Szrj       /* And note that we modified this statement.  This is now
177438fd1498Szrj 	 safe, even if we changed virtual operands since we will
177538fd1498Szrj 	 rescan the statement and rewrite its operands again.  */
177638fd1498Szrj       gimple_set_modified (stmt, true);
177738fd1498Szrj     }
177838fd1498Szrj }
177938fd1498Szrj 
178038fd1498Szrj /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
178138fd1498Szrj    known value for that SSA_NAME (or NULL if no value is known).
178238fd1498Szrj 
178338fd1498Szrj    Propagate values from CONST_AND_COPIES into the uses, vuses and
178438fd1498Szrj    vdef_ops of STMT.  */
178538fd1498Szrj 
178638fd1498Szrj static void
cprop_into_stmt(gimple * stmt)178738fd1498Szrj cprop_into_stmt (gimple *stmt)
178838fd1498Szrj {
178938fd1498Szrj   use_operand_p op_p;
179038fd1498Szrj   ssa_op_iter iter;
179138fd1498Szrj   tree last_copy_propagated_op = NULL;
179238fd1498Szrj 
179338fd1498Szrj   FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_USE)
179438fd1498Szrj     {
179538fd1498Szrj       tree old_op = USE_FROM_PTR (op_p);
179638fd1498Szrj 
179738fd1498Szrj       /* If we have A = B and B = A in the copy propagation tables
179838fd1498Szrj 	 (due to an equality comparison), avoid substituting B for A
179938fd1498Szrj 	 then A for B in the trivially discovered cases.   This allows
180038fd1498Szrj 	 optimization of statements were A and B appear as input
180138fd1498Szrj 	 operands.  */
180238fd1498Szrj       if (old_op != last_copy_propagated_op)
180338fd1498Szrj 	{
180438fd1498Szrj 	  cprop_operand (stmt, op_p);
180538fd1498Szrj 
180638fd1498Szrj 	  tree new_op = USE_FROM_PTR (op_p);
180738fd1498Szrj 	  if (new_op != old_op && TREE_CODE (new_op) == SSA_NAME)
180838fd1498Szrj 	    last_copy_propagated_op = new_op;
180938fd1498Szrj 	}
181038fd1498Szrj     }
181138fd1498Szrj }
181238fd1498Szrj 
181338fd1498Szrj /* If STMT contains a relational test, try to convert it into an
181438fd1498Szrj    equality test if there is only a single value which can ever
181538fd1498Szrj    make the test true.
181638fd1498Szrj 
181738fd1498Szrj    For example, if the expression hash table contains:
181838fd1498Szrj 
181938fd1498Szrj     TRUE = (i <= 1)
182038fd1498Szrj 
182138fd1498Szrj    And we have a test within statement of i >= 1, then we can safely
182238fd1498Szrj    rewrite the test as i == 1 since there only a single value where
182338fd1498Szrj    the test is true.
182438fd1498Szrj 
182538fd1498Szrj    This is similar to code in VRP.  */
182638fd1498Szrj 
182738fd1498Szrj static void
test_for_singularity(gimple * stmt,gcond * dummy_cond,avail_exprs_stack * avail_exprs_stack)182838fd1498Szrj test_for_singularity (gimple *stmt, gcond *dummy_cond,
182938fd1498Szrj 		      avail_exprs_stack *avail_exprs_stack)
183038fd1498Szrj {
183138fd1498Szrj   /* We want to support gimple conditionals as well as assignments
183238fd1498Szrj      where the RHS contains a conditional.  */
183338fd1498Szrj   if (is_gimple_assign (stmt) || gimple_code (stmt) == GIMPLE_COND)
183438fd1498Szrj     {
183538fd1498Szrj       enum tree_code code = ERROR_MARK;
183638fd1498Szrj       tree lhs, rhs;
183738fd1498Szrj 
183838fd1498Szrj       /* Extract the condition of interest from both forms we support.  */
183938fd1498Szrj       if (is_gimple_assign (stmt))
184038fd1498Szrj 	{
184138fd1498Szrj 	  code = gimple_assign_rhs_code (stmt);
184238fd1498Szrj 	  lhs = gimple_assign_rhs1 (stmt);
184338fd1498Szrj 	  rhs = gimple_assign_rhs2 (stmt);
184438fd1498Szrj 	}
184538fd1498Szrj       else if (gimple_code (stmt) == GIMPLE_COND)
184638fd1498Szrj 	{
184738fd1498Szrj 	  code = gimple_cond_code (as_a <gcond *> (stmt));
184838fd1498Szrj 	  lhs = gimple_cond_lhs (as_a <gcond *> (stmt));
184938fd1498Szrj 	  rhs = gimple_cond_rhs (as_a <gcond *> (stmt));
185038fd1498Szrj 	}
185138fd1498Szrj 
185238fd1498Szrj       /* We're looking for a relational test using LE/GE.  Also note we can
185338fd1498Szrj 	 canonicalize LT/GT tests against constants into LE/GT tests.  */
185438fd1498Szrj       if (code == LE_EXPR || code == GE_EXPR
185538fd1498Szrj 	  || ((code == LT_EXPR || code == GT_EXPR)
185638fd1498Szrj 	       && TREE_CODE (rhs) == INTEGER_CST))
185738fd1498Szrj 	{
185838fd1498Szrj 	  /* For LT_EXPR and GT_EXPR, canonicalize to LE_EXPR and GE_EXPR.  */
185938fd1498Szrj 	  if (code == LT_EXPR)
186038fd1498Szrj 	    rhs = fold_build2 (MINUS_EXPR, TREE_TYPE (rhs),
186138fd1498Szrj 			       rhs, build_int_cst (TREE_TYPE (rhs), 1));
186238fd1498Szrj 
186338fd1498Szrj 	  if (code == GT_EXPR)
186438fd1498Szrj 	    rhs = fold_build2 (PLUS_EXPR, TREE_TYPE (rhs),
186538fd1498Szrj 			       rhs, build_int_cst (TREE_TYPE (rhs), 1));
186638fd1498Szrj 
186738fd1498Szrj 	  /* Determine the code we want to check for in the hash table.  */
186838fd1498Szrj 	  enum tree_code test_code;
186938fd1498Szrj 	  if (code == GE_EXPR || code == GT_EXPR)
187038fd1498Szrj 	    test_code = LE_EXPR;
187138fd1498Szrj 	  else
187238fd1498Szrj 	    test_code = GE_EXPR;
187338fd1498Szrj 
187438fd1498Szrj 	  /* Update the dummy statement so we can query the hash tables.  */
187538fd1498Szrj 	  gimple_cond_set_code (dummy_cond, test_code);
187638fd1498Szrj 	  gimple_cond_set_lhs (dummy_cond, lhs);
187738fd1498Szrj 	  gimple_cond_set_rhs (dummy_cond, rhs);
187838fd1498Szrj 	  tree cached_lhs
187938fd1498Szrj 	    = avail_exprs_stack->lookup_avail_expr (dummy_cond, false, false);
188038fd1498Szrj 
188138fd1498Szrj 	  /* If the lookup returned 1 (true), then the expression we
188238fd1498Szrj 	     queried was in the hash table.  As a result there is only
188338fd1498Szrj 	     one value that makes the original conditional true.  Update
188438fd1498Szrj 	     STMT accordingly.  */
188538fd1498Szrj 	  if (cached_lhs && integer_onep (cached_lhs))
188638fd1498Szrj 	    {
188738fd1498Szrj 	      if (is_gimple_assign (stmt))
188838fd1498Szrj 		{
188938fd1498Szrj 		  gimple_assign_set_rhs_code (stmt, EQ_EXPR);
189038fd1498Szrj 		  gimple_assign_set_rhs2 (stmt, rhs);
189138fd1498Szrj 		  gimple_set_modified (stmt, true);
189238fd1498Szrj 		}
189338fd1498Szrj 	      else
189438fd1498Szrj 		{
189538fd1498Szrj 		  gimple_set_modified (stmt, true);
189638fd1498Szrj 		  gimple_cond_set_code (as_a <gcond *> (stmt), EQ_EXPR);
189738fd1498Szrj 		  gimple_cond_set_rhs (as_a <gcond *> (stmt), rhs);
189838fd1498Szrj 		  gimple_set_modified (stmt, true);
189938fd1498Szrj 		}
190038fd1498Szrj 	    }
190138fd1498Szrj 	}
190238fd1498Szrj     }
190338fd1498Szrj }
190438fd1498Szrj 
190538fd1498Szrj /* Optimize the statement in block BB pointed to by iterator SI.
190638fd1498Szrj 
190738fd1498Szrj    We try to perform some simplistic global redundancy elimination and
190838fd1498Szrj    constant propagation:
190938fd1498Szrj 
191038fd1498Szrj    1- To detect global redundancy, we keep track of expressions that have
191138fd1498Szrj       been computed in this block and its dominators.  If we find that the
191238fd1498Szrj       same expression is computed more than once, we eliminate repeated
191338fd1498Szrj       computations by using the target of the first one.
191438fd1498Szrj 
191538fd1498Szrj    2- Constant values and copy assignments.  This is used to do very
191638fd1498Szrj       simplistic constant and copy propagation.  When a constant or copy
191738fd1498Szrj       assignment is found, we map the value on the RHS of the assignment to
191838fd1498Szrj       the variable in the LHS in the CONST_AND_COPIES table.
191938fd1498Szrj 
192038fd1498Szrj    3- Very simple redundant store elimination is performed.
192138fd1498Szrj 
192238fd1498Szrj    4- We can simpify a condition to a constant or from a relational
192338fd1498Szrj       condition to an equality condition.  */
192438fd1498Szrj 
192538fd1498Szrj edge
optimize_stmt(basic_block bb,gimple_stmt_iterator si)192638fd1498Szrj dom_opt_dom_walker::optimize_stmt (basic_block bb, gimple_stmt_iterator si)
192738fd1498Szrj {
192838fd1498Szrj   gimple *stmt, *old_stmt;
192938fd1498Szrj   bool may_optimize_p;
193038fd1498Szrj   bool modified_p = false;
193138fd1498Szrj   bool was_noreturn;
193238fd1498Szrj   edge retval = NULL;
193338fd1498Szrj 
193438fd1498Szrj   old_stmt = stmt = gsi_stmt (si);
193538fd1498Szrj   was_noreturn = is_gimple_call (stmt) && gimple_call_noreturn_p (stmt);
193638fd1498Szrj 
193738fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
193838fd1498Szrj     {
193938fd1498Szrj       fprintf (dump_file, "Optimizing statement ");
194038fd1498Szrj       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
194138fd1498Szrj     }
194238fd1498Szrj 
194338fd1498Szrj   update_stmt_if_modified (stmt);
194438fd1498Szrj   opt_stats.num_stmts++;
194538fd1498Szrj 
194638fd1498Szrj   /* Const/copy propagate into USES, VUSES and the RHS of VDEFs.  */
194738fd1498Szrj   cprop_into_stmt (stmt);
194838fd1498Szrj 
194938fd1498Szrj   /* If the statement has been modified with constant replacements,
195038fd1498Szrj      fold its RHS before checking for redundant computations.  */
195138fd1498Szrj   if (gimple_modified_p (stmt))
195238fd1498Szrj     {
195338fd1498Szrj       tree rhs = NULL;
195438fd1498Szrj 
195538fd1498Szrj       /* Try to fold the statement making sure that STMT is kept
195638fd1498Szrj 	 up to date.  */
195738fd1498Szrj       if (fold_stmt (&si))
195838fd1498Szrj 	{
195938fd1498Szrj 	  stmt = gsi_stmt (si);
196038fd1498Szrj 	  gimple_set_modified (stmt, true);
196138fd1498Szrj 
196238fd1498Szrj 	  if (dump_file && (dump_flags & TDF_DETAILS))
196338fd1498Szrj 	    {
196438fd1498Szrj 	      fprintf (dump_file, "  Folded to: ");
196538fd1498Szrj 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
196638fd1498Szrj 	    }
196738fd1498Szrj 	}
196838fd1498Szrj 
196938fd1498Szrj       /* We only need to consider cases that can yield a gimple operand.  */
197038fd1498Szrj       if (gimple_assign_single_p (stmt))
197138fd1498Szrj         rhs = gimple_assign_rhs1 (stmt);
197238fd1498Szrj       else if (gimple_code (stmt) == GIMPLE_GOTO)
197338fd1498Szrj         rhs = gimple_goto_dest (stmt);
197438fd1498Szrj       else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
197538fd1498Szrj         /* This should never be an ADDR_EXPR.  */
197638fd1498Szrj         rhs = gimple_switch_index (swtch_stmt);
197738fd1498Szrj 
197838fd1498Szrj       if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
197938fd1498Szrj         recompute_tree_invariant_for_addr_expr (rhs);
198038fd1498Szrj 
198138fd1498Szrj       /* Indicate that maybe_clean_or_replace_eh_stmt needs to be called,
198238fd1498Szrj 	 even if fold_stmt updated the stmt already and thus cleared
198338fd1498Szrj 	 gimple_modified_p flag on it.  */
198438fd1498Szrj       modified_p = true;
198538fd1498Szrj     }
198638fd1498Szrj 
198738fd1498Szrj   /* Check for redundant computations.  Do this optimization only
198838fd1498Szrj      for assignments that have no volatile ops and conditionals.  */
198938fd1498Szrj   may_optimize_p = (!gimple_has_side_effects (stmt)
199038fd1498Szrj                     && (is_gimple_assign (stmt)
199138fd1498Szrj                         || (is_gimple_call (stmt)
199238fd1498Szrj                             && gimple_call_lhs (stmt) != NULL_TREE)
199338fd1498Szrj                         || gimple_code (stmt) == GIMPLE_COND
199438fd1498Szrj                         || gimple_code (stmt) == GIMPLE_SWITCH));
199538fd1498Szrj 
199638fd1498Szrj   if (may_optimize_p)
199738fd1498Szrj     {
199838fd1498Szrj       if (gimple_code (stmt) == GIMPLE_CALL)
199938fd1498Szrj 	{
200038fd1498Szrj 	  /* Resolve __builtin_constant_p.  If it hasn't been
200138fd1498Szrj 	     folded to integer_one_node by now, it's fairly
200238fd1498Szrj 	     certain that the value simply isn't constant.  */
200338fd1498Szrj 	  tree callee = gimple_call_fndecl (stmt);
200438fd1498Szrj 	  if (callee
200538fd1498Szrj 	      && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
200638fd1498Szrj 	      && DECL_FUNCTION_CODE (callee) == BUILT_IN_CONSTANT_P)
200738fd1498Szrj 	    {
200838fd1498Szrj 	      propagate_tree_value_into_stmt (&si, integer_zero_node);
200938fd1498Szrj 	      stmt = gsi_stmt (si);
201038fd1498Szrj 	    }
201138fd1498Szrj 	}
201238fd1498Szrj 
201338fd1498Szrj       if (gimple_code (stmt) == GIMPLE_COND)
201438fd1498Szrj 	{
201538fd1498Szrj 	  tree lhs = gimple_cond_lhs (stmt);
201638fd1498Szrj 	  tree rhs = gimple_cond_rhs (stmt);
201738fd1498Szrj 
201838fd1498Szrj 	  /* If the LHS has a range [0..1] and the RHS has a range ~[0..1],
201938fd1498Szrj 	     then this conditional is computable at compile time.  We can just
202038fd1498Szrj 	     shove either 0 or 1 into the LHS, mark the statement as modified
202138fd1498Szrj 	     and all the right things will just happen below.
202238fd1498Szrj 
202338fd1498Szrj 	     Note this would apply to any case where LHS has a range
202438fd1498Szrj 	     narrower than its type implies and RHS is outside that
202538fd1498Szrj 	     narrower range.  Future work.  */
202638fd1498Szrj 	  if (TREE_CODE (lhs) == SSA_NAME
202738fd1498Szrj 	      && ssa_name_has_boolean_range (lhs)
202838fd1498Szrj 	      && TREE_CODE (rhs) == INTEGER_CST
202938fd1498Szrj 	      && ! (integer_zerop (rhs) || integer_onep (rhs)))
203038fd1498Szrj 	    {
203138fd1498Szrj 	      gimple_cond_set_lhs (as_a <gcond *> (stmt),
203238fd1498Szrj 				   fold_convert (TREE_TYPE (lhs),
203338fd1498Szrj 						 integer_zero_node));
203438fd1498Szrj 	      gimple_set_modified (stmt, true);
203538fd1498Szrj 	    }
203638fd1498Szrj 	  else if (TREE_CODE (lhs) == SSA_NAME)
203738fd1498Szrj 	    {
203838fd1498Szrj 	      /* Exploiting EVRP data is not yet fully integrated into DOM
203938fd1498Szrj 		 but we need to do something for this case to avoid regressing
204038fd1498Szrj 		 udr4.f90 and new1.C which have unexecutable blocks with
204138fd1498Szrj 		 undefined behavior that get diagnosed if they're left in the
204238fd1498Szrj 		 IL because we've attached range information to new
204338fd1498Szrj 		 SSA_NAMES.  */
204438fd1498Szrj 	      update_stmt_if_modified (stmt);
204538fd1498Szrj 	      edge taken_edge = NULL;
204638fd1498Szrj 	      evrp_range_analyzer.vrp_visit_cond_stmt (as_a <gcond *> (stmt),
204738fd1498Szrj 						       &taken_edge);
204838fd1498Szrj 	      if (taken_edge)
204938fd1498Szrj 		{
205038fd1498Szrj 		  if (taken_edge->flags & EDGE_TRUE_VALUE)
205138fd1498Szrj 		    gimple_cond_make_true (as_a <gcond *> (stmt));
205238fd1498Szrj 		  else if (taken_edge->flags & EDGE_FALSE_VALUE)
205338fd1498Szrj 		    gimple_cond_make_false (as_a <gcond *> (stmt));
205438fd1498Szrj 		  else
205538fd1498Szrj 		    gcc_unreachable ();
205638fd1498Szrj 		  gimple_set_modified (stmt, true);
205738fd1498Szrj 		  update_stmt (stmt);
205838fd1498Szrj 		  cfg_altered = true;
205938fd1498Szrj 		  return taken_edge;
206038fd1498Szrj 		}
206138fd1498Szrj 	    }
206238fd1498Szrj 	}
206338fd1498Szrj 
206438fd1498Szrj       update_stmt_if_modified (stmt);
206538fd1498Szrj       eliminate_redundant_computations (&si, m_const_and_copies,
206638fd1498Szrj 					m_avail_exprs_stack);
206738fd1498Szrj       stmt = gsi_stmt (si);
206838fd1498Szrj 
206938fd1498Szrj       /* Perform simple redundant store elimination.  */
207038fd1498Szrj       if (gimple_assign_single_p (stmt)
207138fd1498Szrj 	  && TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
207238fd1498Szrj 	{
207338fd1498Szrj 	  tree lhs = gimple_assign_lhs (stmt);
207438fd1498Szrj 	  tree rhs = gimple_assign_rhs1 (stmt);
207538fd1498Szrj 	  tree cached_lhs;
207638fd1498Szrj 	  gassign *new_stmt;
207738fd1498Szrj 	  rhs = dom_valueize (rhs);
207838fd1498Szrj 	  /* Build a new statement with the RHS and LHS exchanged.  */
207938fd1498Szrj 	  if (TREE_CODE (rhs) == SSA_NAME)
208038fd1498Szrj 	    {
208138fd1498Szrj 	      gimple *defstmt = SSA_NAME_DEF_STMT (rhs);
208238fd1498Szrj 	      new_stmt = gimple_build_assign (rhs, lhs);
208338fd1498Szrj 	      SSA_NAME_DEF_STMT (rhs) = defstmt;
208438fd1498Szrj 	    }
208538fd1498Szrj 	  else
208638fd1498Szrj 	    new_stmt = gimple_build_assign (rhs, lhs);
208738fd1498Szrj 	  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
208838fd1498Szrj 	  cached_lhs = m_avail_exprs_stack->lookup_avail_expr (new_stmt, false,
208938fd1498Szrj 							       false);
209038fd1498Szrj 	  if (cached_lhs && operand_equal_p (rhs, cached_lhs, 0))
209138fd1498Szrj 	    {
209238fd1498Szrj 	      basic_block bb = gimple_bb (stmt);
209338fd1498Szrj 	      unlink_stmt_vdef (stmt);
209438fd1498Szrj 	      if (gsi_remove (&si, true))
209538fd1498Szrj 		{
209638fd1498Szrj 		  bitmap_set_bit (need_eh_cleanup, bb->index);
209738fd1498Szrj 		  if (dump_file && (dump_flags & TDF_DETAILS))
209838fd1498Szrj 		    fprintf (dump_file, "  Flagged to clear EH edges.\n");
209938fd1498Szrj 		}
210038fd1498Szrj 	      release_defs (stmt);
210138fd1498Szrj 	      return retval;
210238fd1498Szrj 	    }
210338fd1498Szrj 	}
210438fd1498Szrj 
210538fd1498Szrj       /* If this statement was not redundant, we may still be able to simplify
210638fd1498Szrj 	 it, which may in turn allow other part of DOM or other passes to do
210738fd1498Szrj 	 a better job.  */
210838fd1498Szrj       test_for_singularity (stmt, m_dummy_cond, m_avail_exprs_stack);
210938fd1498Szrj     }
211038fd1498Szrj 
211138fd1498Szrj   /* Record any additional equivalences created by this statement.  */
211238fd1498Szrj   if (is_gimple_assign (stmt))
211338fd1498Szrj     record_equivalences_from_stmt (stmt, may_optimize_p, m_avail_exprs_stack);
211438fd1498Szrj 
211538fd1498Szrj   /* If STMT is a COND_EXPR or SWITCH_EXPR and it was modified, then we may
211638fd1498Szrj      know where it goes.  */
211738fd1498Szrj   if (gimple_modified_p (stmt) || modified_p)
211838fd1498Szrj     {
211938fd1498Szrj       tree val = NULL;
212038fd1498Szrj 
212138fd1498Szrj       if (gimple_code (stmt) == GIMPLE_COND)
212238fd1498Szrj         val = fold_binary_loc (gimple_location (stmt),
212338fd1498Szrj 			       gimple_cond_code (stmt), boolean_type_node,
212438fd1498Szrj 			       gimple_cond_lhs (stmt),
212538fd1498Szrj 			       gimple_cond_rhs (stmt));
212638fd1498Szrj       else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
212738fd1498Szrj 	val = gimple_switch_index (swtch_stmt);
212838fd1498Szrj 
212938fd1498Szrj       if (val && TREE_CODE (val) == INTEGER_CST)
213038fd1498Szrj 	{
213138fd1498Szrj 	  retval = find_taken_edge (bb, val);
213238fd1498Szrj 	  if (retval)
213338fd1498Szrj 	    {
213438fd1498Szrj 	      /* Fix the condition to be either true or false.  */
213538fd1498Szrj 	      if (gimple_code (stmt) == GIMPLE_COND)
213638fd1498Szrj 		{
213738fd1498Szrj 		  if (integer_zerop (val))
213838fd1498Szrj 		    gimple_cond_make_false (as_a <gcond *> (stmt));
213938fd1498Szrj 		  else if (integer_onep (val))
214038fd1498Szrj 		    gimple_cond_make_true (as_a <gcond *> (stmt));
214138fd1498Szrj 		  else
214238fd1498Szrj 		    gcc_unreachable ();
214338fd1498Szrj 
214438fd1498Szrj 		  gimple_set_modified (stmt, true);
214538fd1498Szrj 		}
214638fd1498Szrj 
214738fd1498Szrj 	      /* Further simplifications may be possible.  */
214838fd1498Szrj 	      cfg_altered = true;
214938fd1498Szrj 	    }
215038fd1498Szrj 	}
215138fd1498Szrj 
215238fd1498Szrj       update_stmt_if_modified (stmt);
215338fd1498Szrj 
215438fd1498Szrj       /* If we simplified a statement in such a way as to be shown that it
215538fd1498Szrj 	 cannot trap, update the eh information and the cfg to match.  */
215638fd1498Szrj       if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
215738fd1498Szrj 	{
215838fd1498Szrj 	  bitmap_set_bit (need_eh_cleanup, bb->index);
215938fd1498Szrj 	  if (dump_file && (dump_flags & TDF_DETAILS))
216038fd1498Szrj 	    fprintf (dump_file, "  Flagged to clear EH edges.\n");
216138fd1498Szrj 	}
216238fd1498Szrj 
216338fd1498Szrj       if (!was_noreturn
216438fd1498Szrj 	  && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
216538fd1498Szrj 	need_noreturn_fixup.safe_push (stmt);
216638fd1498Szrj     }
216738fd1498Szrj   return retval;
216838fd1498Szrj }
2169