138fd1498Szrj /* Conditional constant propagation pass for the GNU compiler.
238fd1498Szrj    Copyright (C) 2000-2018 Free Software Foundation, Inc.
338fd1498Szrj    Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
438fd1498Szrj    Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
538fd1498Szrj 
638fd1498Szrj This file is part of GCC.
738fd1498Szrj 
838fd1498Szrj GCC is free software; you can redistribute it and/or modify it
938fd1498Szrj under the terms of the GNU General Public License as published by the
1038fd1498Szrj Free Software Foundation; either version 3, or (at your option) any
1138fd1498Szrj later version.
1238fd1498Szrj 
1338fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT
1438fd1498Szrj ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
1538fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1638fd1498Szrj for more details.
1738fd1498Szrj 
1838fd1498Szrj You should have received a copy of the GNU General Public License
1938fd1498Szrj along with GCC; see the file COPYING3.  If not see
2038fd1498Szrj <http://www.gnu.org/licenses/>.  */
2138fd1498Szrj 
2238fd1498Szrj /* Conditional constant propagation (CCP) is based on the SSA
2338fd1498Szrj    propagation engine (tree-ssa-propagate.c).  Constant assignments of
2438fd1498Szrj    the form VAR = CST are propagated from the assignments into uses of
2538fd1498Szrj    VAR, which in turn may generate new constants.  The simulation uses
2638fd1498Szrj    a four level lattice to keep track of constant values associated
2738fd1498Szrj    with SSA names.  Given an SSA name V_i, it may take one of the
2838fd1498Szrj    following values:
2938fd1498Szrj 
3038fd1498Szrj 	UNINITIALIZED   ->  the initial state of the value.  This value
3138fd1498Szrj 			    is replaced with a correct initial value
3238fd1498Szrj 			    the first time the value is used, so the
3338fd1498Szrj 			    rest of the pass does not need to care about
3438fd1498Szrj 			    it.  Using this value simplifies initialization
3538fd1498Szrj 			    of the pass, and prevents us from needlessly
3638fd1498Szrj 			    scanning statements that are never reached.
3738fd1498Szrj 
3838fd1498Szrj 	UNDEFINED	->  V_i is a local variable whose definition
3938fd1498Szrj 			    has not been processed yet.  Therefore we
4038fd1498Szrj 			    don't yet know if its value is a constant
4138fd1498Szrj 			    or not.
4238fd1498Szrj 
4338fd1498Szrj 	CONSTANT	->  V_i has been found to hold a constant
4438fd1498Szrj 			    value C.
4538fd1498Szrj 
4638fd1498Szrj 	VARYING		->  V_i cannot take a constant value, or if it
4738fd1498Szrj 			    does, it is not possible to determine it
4838fd1498Szrj 			    at compile time.
4938fd1498Szrj 
5038fd1498Szrj    The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
5138fd1498Szrj 
5238fd1498Szrj    1- In ccp_visit_stmt, we are interested in assignments whose RHS
5338fd1498Szrj       evaluates into a constant and conditional jumps whose predicate
5438fd1498Szrj       evaluates into a boolean true or false.  When an assignment of
5538fd1498Szrj       the form V_i = CONST is found, V_i's lattice value is set to
5638fd1498Szrj       CONSTANT and CONST is associated with it.  This causes the
5738fd1498Szrj       propagation engine to add all the SSA edges coming out the
5838fd1498Szrj       assignment into the worklists, so that statements that use V_i
5938fd1498Szrj       can be visited.
6038fd1498Szrj 
6138fd1498Szrj       If the statement is a conditional with a constant predicate, we
6238fd1498Szrj       mark the outgoing edges as executable or not executable
6338fd1498Szrj       depending on the predicate's value.  This is then used when
6438fd1498Szrj       visiting PHI nodes to know when a PHI argument can be ignored.
6538fd1498Szrj 
6638fd1498Szrj 
6738fd1498Szrj    2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
6838fd1498Szrj       same constant C, then the LHS of the PHI is set to C.  This
6938fd1498Szrj       evaluation is known as the "meet operation".  Since one of the
7038fd1498Szrj       goals of this evaluation is to optimistically return constant
7138fd1498Szrj       values as often as possible, it uses two main short cuts:
7238fd1498Szrj 
7338fd1498Szrj       - If an argument is flowing in through a non-executable edge, it
7438fd1498Szrj 	is ignored.  This is useful in cases like this:
7538fd1498Szrj 
7638fd1498Szrj 			if (PRED)
7738fd1498Szrj 			  a_9 = 3;
7838fd1498Szrj 			else
7938fd1498Szrj 			  a_10 = 100;
8038fd1498Szrj 			a_11 = PHI (a_9, a_10)
8138fd1498Szrj 
8238fd1498Szrj 	If PRED is known to always evaluate to false, then we can
8338fd1498Szrj 	assume that a_11 will always take its value from a_10, meaning
8438fd1498Szrj 	that instead of consider it VARYING (a_9 and a_10 have
8538fd1498Szrj 	different values), we can consider it CONSTANT 100.
8638fd1498Szrj 
8738fd1498Szrj       - If an argument has an UNDEFINED value, then it does not affect
8838fd1498Szrj 	the outcome of the meet operation.  If a variable V_i has an
8938fd1498Szrj 	UNDEFINED value, it means that either its defining statement
9038fd1498Szrj 	hasn't been visited yet or V_i has no defining statement, in
9138fd1498Szrj 	which case the original symbol 'V' is being used
9238fd1498Szrj 	uninitialized.  Since 'V' is a local variable, the compiler
9338fd1498Szrj 	may assume any initial value for it.
9438fd1498Szrj 
9538fd1498Szrj 
9638fd1498Szrj    After propagation, every variable V_i that ends up with a lattice
9738fd1498Szrj    value of CONSTANT will have the associated constant value in the
9838fd1498Szrj    array CONST_VAL[i].VALUE.  That is fed into substitute_and_fold for
9938fd1498Szrj    final substitution and folding.
10038fd1498Szrj 
10138fd1498Szrj    This algorithm uses wide-ints at the max precision of the target.
10238fd1498Szrj    This means that, with one uninteresting exception, variables with
10338fd1498Szrj    UNSIGNED types never go to VARYING because the bits above the
10438fd1498Szrj    precision of the type of the variable are always zero.  The
10538fd1498Szrj    uninteresting case is a variable of UNSIGNED type that has the
10638fd1498Szrj    maximum precision of the target.  Such variables can go to VARYING,
10738fd1498Szrj    but this causes no loss of infomation since these variables will
10838fd1498Szrj    never be extended.
10938fd1498Szrj 
11038fd1498Szrj    References:
11138fd1498Szrj 
11238fd1498Szrj      Constant propagation with conditional branches,
11338fd1498Szrj      Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
11438fd1498Szrj 
11538fd1498Szrj      Building an Optimizing Compiler,
11638fd1498Szrj      Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
11738fd1498Szrj 
11838fd1498Szrj      Advanced Compiler Design and Implementation,
11938fd1498Szrj      Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6  */
12038fd1498Szrj 
12138fd1498Szrj #include "config.h"
12238fd1498Szrj #include "system.h"
12338fd1498Szrj #include "coretypes.h"
12438fd1498Szrj #include "backend.h"
12538fd1498Szrj #include "target.h"
12638fd1498Szrj #include "tree.h"
12738fd1498Szrj #include "gimple.h"
12838fd1498Szrj #include "tree-pass.h"
12938fd1498Szrj #include "ssa.h"
13038fd1498Szrj #include "gimple-pretty-print.h"
13138fd1498Szrj #include "fold-const.h"
13238fd1498Szrj #include "gimple-fold.h"
13338fd1498Szrj #include "tree-eh.h"
13438fd1498Szrj #include "gimplify.h"
13538fd1498Szrj #include "gimple-iterator.h"
13638fd1498Szrj #include "tree-cfg.h"
13738fd1498Szrj #include "tree-ssa-propagate.h"
13838fd1498Szrj #include "dbgcnt.h"
13938fd1498Szrj #include "params.h"
14038fd1498Szrj #include "builtins.h"
14138fd1498Szrj #include "tree-chkp.h"
14238fd1498Szrj #include "cfgloop.h"
14338fd1498Szrj #include "stor-layout.h"
14438fd1498Szrj #include "optabs-query.h"
14538fd1498Szrj #include "tree-ssa-ccp.h"
14638fd1498Szrj #include "tree-dfa.h"
14738fd1498Szrj #include "diagnostic-core.h"
14838fd1498Szrj #include "stringpool.h"
14938fd1498Szrj #include "attribs.h"
15038fd1498Szrj #include "tree-vector-builder.h"
15138fd1498Szrj 
15238fd1498Szrj /* Possible lattice values.  */
15338fd1498Szrj typedef enum
15438fd1498Szrj {
15538fd1498Szrj   UNINITIALIZED,
15638fd1498Szrj   UNDEFINED,
15738fd1498Szrj   CONSTANT,
15838fd1498Szrj   VARYING
15938fd1498Szrj } ccp_lattice_t;
16038fd1498Szrj 
16138fd1498Szrj struct ccp_prop_value_t {
16238fd1498Szrj     /* Lattice value.  */
16338fd1498Szrj     ccp_lattice_t lattice_val;
16438fd1498Szrj 
16538fd1498Szrj     /* Propagated value.  */
16638fd1498Szrj     tree value;
16738fd1498Szrj 
16838fd1498Szrj     /* Mask that applies to the propagated value during CCP.  For X
16938fd1498Szrj        with a CONSTANT lattice value X & ~mask == value & ~mask.  The
17038fd1498Szrj        zero bits in the mask cover constant values.  The ones mean no
17138fd1498Szrj        information.  */
17238fd1498Szrj     widest_int mask;
17338fd1498Szrj };
17438fd1498Szrj 
17538fd1498Szrj class ccp_propagate : public ssa_propagation_engine
17638fd1498Szrj {
17738fd1498Szrj  public:
17838fd1498Szrj   enum ssa_prop_result visit_stmt (gimple *, edge *, tree *) FINAL OVERRIDE;
17938fd1498Szrj   enum ssa_prop_result visit_phi (gphi *) FINAL OVERRIDE;
18038fd1498Szrj };
18138fd1498Szrj 
18238fd1498Szrj /* Array of propagated constant values.  After propagation,
18338fd1498Szrj    CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I).  If
18438fd1498Szrj    the constant is held in an SSA name representing a memory store
18538fd1498Szrj    (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
18638fd1498Szrj    memory reference used to store (i.e., the LHS of the assignment
18738fd1498Szrj    doing the store).  */
18838fd1498Szrj static ccp_prop_value_t *const_val;
18938fd1498Szrj static unsigned n_const_val;
19038fd1498Szrj 
19138fd1498Szrj static void canonicalize_value (ccp_prop_value_t *);
19238fd1498Szrj static void ccp_lattice_meet (ccp_prop_value_t *, ccp_prop_value_t *);
19338fd1498Szrj 
19438fd1498Szrj /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX.  */
19538fd1498Szrj 
19638fd1498Szrj static void
dump_lattice_value(FILE * outf,const char * prefix,ccp_prop_value_t val)19738fd1498Szrj dump_lattice_value (FILE *outf, const char *prefix, ccp_prop_value_t val)
19838fd1498Szrj {
19938fd1498Szrj   switch (val.lattice_val)
20038fd1498Szrj     {
20138fd1498Szrj     case UNINITIALIZED:
20238fd1498Szrj       fprintf (outf, "%sUNINITIALIZED", prefix);
20338fd1498Szrj       break;
20438fd1498Szrj     case UNDEFINED:
20538fd1498Szrj       fprintf (outf, "%sUNDEFINED", prefix);
20638fd1498Szrj       break;
20738fd1498Szrj     case VARYING:
20838fd1498Szrj       fprintf (outf, "%sVARYING", prefix);
20938fd1498Szrj       break;
21038fd1498Szrj     case CONSTANT:
21138fd1498Szrj       if (TREE_CODE (val.value) != INTEGER_CST
21238fd1498Szrj 	  || val.mask == 0)
21338fd1498Szrj 	{
21438fd1498Szrj 	  fprintf (outf, "%sCONSTANT ", prefix);
21538fd1498Szrj 	  print_generic_expr (outf, val.value, dump_flags);
21638fd1498Szrj 	}
21738fd1498Szrj       else
21838fd1498Szrj 	{
21938fd1498Szrj 	  widest_int cval = wi::bit_and_not (wi::to_widest (val.value),
22038fd1498Szrj 					     val.mask);
22138fd1498Szrj 	  fprintf (outf, "%sCONSTANT ", prefix);
22238fd1498Szrj 	  print_hex (cval, outf);
22338fd1498Szrj 	  fprintf (outf, " (");
22438fd1498Szrj 	  print_hex (val.mask, outf);
22538fd1498Szrj 	  fprintf (outf, ")");
22638fd1498Szrj 	}
22738fd1498Szrj       break;
22838fd1498Szrj     default:
22938fd1498Szrj       gcc_unreachable ();
23038fd1498Szrj     }
23138fd1498Szrj }
23238fd1498Szrj 
23338fd1498Szrj 
23438fd1498Szrj /* Print lattice value VAL to stderr.  */
23538fd1498Szrj 
23638fd1498Szrj void debug_lattice_value (ccp_prop_value_t val);
23738fd1498Szrj 
23838fd1498Szrj DEBUG_FUNCTION void
debug_lattice_value(ccp_prop_value_t val)23938fd1498Szrj debug_lattice_value (ccp_prop_value_t val)
24038fd1498Szrj {
24138fd1498Szrj   dump_lattice_value (stderr, "", val);
24238fd1498Szrj   fprintf (stderr, "\n");
24338fd1498Szrj }
24438fd1498Szrj 
24538fd1498Szrj /* Extend NONZERO_BITS to a full mask, based on sgn.  */
24638fd1498Szrj 
24738fd1498Szrj static widest_int
extend_mask(const wide_int & nonzero_bits,signop sgn)24838fd1498Szrj extend_mask (const wide_int &nonzero_bits, signop sgn)
24938fd1498Szrj {
25038fd1498Szrj   return widest_int::from (nonzero_bits, sgn);
25138fd1498Szrj }
25238fd1498Szrj 
25338fd1498Szrj /* Compute a default value for variable VAR and store it in the
25438fd1498Szrj    CONST_VAL array.  The following rules are used to get default
25538fd1498Szrj    values:
25638fd1498Szrj 
25738fd1498Szrj    1- Global and static variables that are declared constant are
25838fd1498Szrj       considered CONSTANT.
25938fd1498Szrj 
26038fd1498Szrj    2- Any other value is considered UNDEFINED.  This is useful when
26138fd1498Szrj       considering PHI nodes.  PHI arguments that are undefined do not
26238fd1498Szrj       change the constant value of the PHI node, which allows for more
26338fd1498Szrj       constants to be propagated.
26438fd1498Szrj 
26538fd1498Szrj    3- Variables defined by statements other than assignments and PHI
26638fd1498Szrj       nodes are considered VARYING.
26738fd1498Szrj 
26838fd1498Szrj    4- Initial values of variables that are not GIMPLE registers are
26938fd1498Szrj       considered VARYING.  */
27038fd1498Szrj 
27138fd1498Szrj static ccp_prop_value_t
get_default_value(tree var)27238fd1498Szrj get_default_value (tree var)
27338fd1498Szrj {
27438fd1498Szrj   ccp_prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
27538fd1498Szrj   gimple *stmt;
27638fd1498Szrj 
27738fd1498Szrj   stmt = SSA_NAME_DEF_STMT (var);
27838fd1498Szrj 
27938fd1498Szrj   if (gimple_nop_p (stmt))
28038fd1498Szrj     {
28138fd1498Szrj       /* Variables defined by an empty statement are those used
28238fd1498Szrj 	 before being initialized.  If VAR is a local variable, we
28338fd1498Szrj 	 can assume initially that it is UNDEFINED, otherwise we must
28438fd1498Szrj 	 consider it VARYING.  */
28538fd1498Szrj       if (!virtual_operand_p (var)
28638fd1498Szrj 	  && SSA_NAME_VAR (var)
28738fd1498Szrj 	  && TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
28838fd1498Szrj 	val.lattice_val = UNDEFINED;
28938fd1498Szrj       else
29038fd1498Szrj 	{
29138fd1498Szrj 	  val.lattice_val = VARYING;
29238fd1498Szrj 	  val.mask = -1;
29338fd1498Szrj 	  if (flag_tree_bit_ccp)
29438fd1498Szrj 	    {
29538fd1498Szrj 	      wide_int nonzero_bits = get_nonzero_bits (var);
29638fd1498Szrj 	      if (nonzero_bits != -1)
29738fd1498Szrj 		{
29838fd1498Szrj 		  val.lattice_val = CONSTANT;
29938fd1498Szrj 		  val.value = build_zero_cst (TREE_TYPE (var));
30038fd1498Szrj 		  val.mask = extend_mask (nonzero_bits, TYPE_SIGN (TREE_TYPE (var)));
30138fd1498Szrj 		}
30238fd1498Szrj 	    }
30338fd1498Szrj 	}
30438fd1498Szrj     }
30538fd1498Szrj   else if (is_gimple_assign (stmt))
30638fd1498Szrj     {
30738fd1498Szrj       tree cst;
30838fd1498Szrj       if (gimple_assign_single_p (stmt)
30938fd1498Szrj 	  && DECL_P (gimple_assign_rhs1 (stmt))
31038fd1498Szrj 	  && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
31138fd1498Szrj 	{
31238fd1498Szrj 	  val.lattice_val = CONSTANT;
31338fd1498Szrj 	  val.value = cst;
31438fd1498Szrj 	}
31538fd1498Szrj       else
31638fd1498Szrj 	{
31738fd1498Szrj 	  /* Any other variable defined by an assignment is considered
31838fd1498Szrj 	     UNDEFINED.  */
31938fd1498Szrj 	  val.lattice_val = UNDEFINED;
32038fd1498Szrj 	}
32138fd1498Szrj     }
32238fd1498Szrj   else if ((is_gimple_call (stmt)
32338fd1498Szrj 	    && gimple_call_lhs (stmt) != NULL_TREE)
32438fd1498Szrj 	   || gimple_code (stmt) == GIMPLE_PHI)
32538fd1498Szrj     {
32638fd1498Szrj       /* A variable defined by a call or a PHI node is considered
32738fd1498Szrj 	 UNDEFINED.  */
32838fd1498Szrj       val.lattice_val = UNDEFINED;
32938fd1498Szrj     }
33038fd1498Szrj   else
33138fd1498Szrj     {
33238fd1498Szrj       /* Otherwise, VAR will never take on a constant value.  */
33338fd1498Szrj       val.lattice_val = VARYING;
33438fd1498Szrj       val.mask = -1;
33538fd1498Szrj     }
33638fd1498Szrj 
33738fd1498Szrj   return val;
33838fd1498Szrj }
33938fd1498Szrj 
34038fd1498Szrj 
34138fd1498Szrj /* Get the constant value associated with variable VAR.  */
34238fd1498Szrj 
34338fd1498Szrj static inline ccp_prop_value_t *
get_value(tree var)34438fd1498Szrj get_value (tree var)
34538fd1498Szrj {
34638fd1498Szrj   ccp_prop_value_t *val;
34738fd1498Szrj 
34838fd1498Szrj   if (const_val == NULL
34938fd1498Szrj       || SSA_NAME_VERSION (var) >= n_const_val)
35038fd1498Szrj     return NULL;
35138fd1498Szrj 
35238fd1498Szrj   val = &const_val[SSA_NAME_VERSION (var)];
35338fd1498Szrj   if (val->lattice_val == UNINITIALIZED)
35438fd1498Szrj     *val = get_default_value (var);
35538fd1498Szrj 
35638fd1498Szrj   canonicalize_value (val);
35738fd1498Szrj 
35838fd1498Szrj   return val;
35938fd1498Szrj }
36038fd1498Szrj 
36138fd1498Szrj /* Return the constant tree value associated with VAR.  */
36238fd1498Szrj 
36338fd1498Szrj static inline tree
get_constant_value(tree var)36438fd1498Szrj get_constant_value (tree var)
36538fd1498Szrj {
36638fd1498Szrj   ccp_prop_value_t *val;
36738fd1498Szrj   if (TREE_CODE (var) != SSA_NAME)
36838fd1498Szrj     {
36938fd1498Szrj       if (is_gimple_min_invariant (var))
37038fd1498Szrj         return var;
37138fd1498Szrj       return NULL_TREE;
37238fd1498Szrj     }
37338fd1498Szrj   val = get_value (var);
37438fd1498Szrj   if (val
37538fd1498Szrj       && val->lattice_val == CONSTANT
37638fd1498Szrj       && (TREE_CODE (val->value) != INTEGER_CST
37738fd1498Szrj 	  || val->mask == 0))
37838fd1498Szrj     return val->value;
37938fd1498Szrj   return NULL_TREE;
38038fd1498Szrj }
38138fd1498Szrj 
38238fd1498Szrj /* Sets the value associated with VAR to VARYING.  */
38338fd1498Szrj 
38438fd1498Szrj static inline void
set_value_varying(tree var)38538fd1498Szrj set_value_varying (tree var)
38638fd1498Szrj {
38738fd1498Szrj   ccp_prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
38838fd1498Szrj 
38938fd1498Szrj   val->lattice_val = VARYING;
39038fd1498Szrj   val->value = NULL_TREE;
39138fd1498Szrj   val->mask = -1;
39238fd1498Szrj }
39338fd1498Szrj 
39438fd1498Szrj /* For integer constants, make sure to drop TREE_OVERFLOW.  */
39538fd1498Szrj 
39638fd1498Szrj static void
canonicalize_value(ccp_prop_value_t * val)39738fd1498Szrj canonicalize_value (ccp_prop_value_t *val)
39838fd1498Szrj {
39938fd1498Szrj   if (val->lattice_val != CONSTANT)
40038fd1498Szrj     return;
40138fd1498Szrj 
40238fd1498Szrj   if (TREE_OVERFLOW_P (val->value))
40338fd1498Szrj     val->value = drop_tree_overflow (val->value);
40438fd1498Szrj }
40538fd1498Szrj 
40638fd1498Szrj /* Return whether the lattice transition is valid.  */
40738fd1498Szrj 
40838fd1498Szrj static bool
valid_lattice_transition(ccp_prop_value_t old_val,ccp_prop_value_t new_val)40938fd1498Szrj valid_lattice_transition (ccp_prop_value_t old_val, ccp_prop_value_t new_val)
41038fd1498Szrj {
41138fd1498Szrj   /* Lattice transitions must always be monotonically increasing in
41238fd1498Szrj      value.  */
41338fd1498Szrj   if (old_val.lattice_val < new_val.lattice_val)
41438fd1498Szrj     return true;
41538fd1498Szrj 
41638fd1498Szrj   if (old_val.lattice_val != new_val.lattice_val)
41738fd1498Szrj     return false;
41838fd1498Szrj 
41938fd1498Szrj   if (!old_val.value && !new_val.value)
42038fd1498Szrj     return true;
42138fd1498Szrj 
42238fd1498Szrj   /* Now both lattice values are CONSTANT.  */
42338fd1498Szrj 
42438fd1498Szrj   /* Allow arbitrary copy changes as we might look through PHI <a_1, ...>
42538fd1498Szrj      when only a single copy edge is executable.  */
42638fd1498Szrj   if (TREE_CODE (old_val.value) == SSA_NAME
42738fd1498Szrj       && TREE_CODE (new_val.value) == SSA_NAME)
42838fd1498Szrj     return true;
42938fd1498Szrj 
43038fd1498Szrj   /* Allow transitioning from a constant to a copy.  */
43138fd1498Szrj   if (is_gimple_min_invariant (old_val.value)
43238fd1498Szrj       && TREE_CODE (new_val.value) == SSA_NAME)
43338fd1498Szrj     return true;
43438fd1498Szrj 
43538fd1498Szrj   /* Allow transitioning from PHI <&x, not executable> == &x
43638fd1498Szrj      to PHI <&x, &y> == common alignment.  */
43738fd1498Szrj   if (TREE_CODE (old_val.value) != INTEGER_CST
43838fd1498Szrj       && TREE_CODE (new_val.value) == INTEGER_CST)
43938fd1498Szrj     return true;
44038fd1498Szrj 
44138fd1498Szrj   /* Bit-lattices have to agree in the still valid bits.  */
44238fd1498Szrj   if (TREE_CODE (old_val.value) == INTEGER_CST
44338fd1498Szrj       && TREE_CODE (new_val.value) == INTEGER_CST)
44438fd1498Szrj     return (wi::bit_and_not (wi::to_widest (old_val.value), new_val.mask)
44538fd1498Szrj 	    == wi::bit_and_not (wi::to_widest (new_val.value), new_val.mask));
44638fd1498Szrj 
44738fd1498Szrj   /* Otherwise constant values have to agree.  */
44838fd1498Szrj   if (operand_equal_p (old_val.value, new_val.value, 0))
44938fd1498Szrj     return true;
45038fd1498Szrj 
45138fd1498Szrj   /* At least the kinds and types should agree now.  */
45238fd1498Szrj   if (TREE_CODE (old_val.value) != TREE_CODE (new_val.value)
45338fd1498Szrj       || !types_compatible_p (TREE_TYPE (old_val.value),
45438fd1498Szrj 			      TREE_TYPE (new_val.value)))
45538fd1498Szrj     return false;
45638fd1498Szrj 
45738fd1498Szrj   /* For floats and !HONOR_NANS allow transitions from (partial) NaN
45838fd1498Szrj      to non-NaN.  */
45938fd1498Szrj   tree type = TREE_TYPE (new_val.value);
46038fd1498Szrj   if (SCALAR_FLOAT_TYPE_P (type)
46138fd1498Szrj       && !HONOR_NANS (type))
46238fd1498Szrj     {
46338fd1498Szrj       if (REAL_VALUE_ISNAN (TREE_REAL_CST (old_val.value)))
46438fd1498Szrj 	return true;
46538fd1498Szrj     }
46638fd1498Szrj   else if (VECTOR_FLOAT_TYPE_P (type)
46738fd1498Szrj 	   && !HONOR_NANS (type))
46838fd1498Szrj     {
46938fd1498Szrj       unsigned int count
47038fd1498Szrj 	= tree_vector_builder::binary_encoded_nelts (old_val.value,
47138fd1498Szrj 						     new_val.value);
47238fd1498Szrj       for (unsigned int i = 0; i < count; ++i)
47338fd1498Szrj 	if (!REAL_VALUE_ISNAN
47438fd1498Szrj 	       (TREE_REAL_CST (VECTOR_CST_ENCODED_ELT (old_val.value, i)))
47538fd1498Szrj 	    && !operand_equal_p (VECTOR_CST_ENCODED_ELT (old_val.value, i),
47638fd1498Szrj 				 VECTOR_CST_ENCODED_ELT (new_val.value, i), 0))
47738fd1498Szrj 	  return false;
47838fd1498Szrj       return true;
47938fd1498Szrj     }
48038fd1498Szrj   else if (COMPLEX_FLOAT_TYPE_P (type)
48138fd1498Szrj 	   && !HONOR_NANS (type))
48238fd1498Szrj     {
48338fd1498Szrj       if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_REALPART (old_val.value)))
48438fd1498Szrj 	  && !operand_equal_p (TREE_REALPART (old_val.value),
48538fd1498Szrj 			       TREE_REALPART (new_val.value), 0))
48638fd1498Szrj 	return false;
48738fd1498Szrj       if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_IMAGPART (old_val.value)))
48838fd1498Szrj 	  && !operand_equal_p (TREE_IMAGPART (old_val.value),
48938fd1498Szrj 			       TREE_IMAGPART (new_val.value), 0))
49038fd1498Szrj 	return false;
49138fd1498Szrj       return true;
49238fd1498Szrj     }
49338fd1498Szrj   return false;
49438fd1498Szrj }
49538fd1498Szrj 
49638fd1498Szrj /* Set the value for variable VAR to NEW_VAL.  Return true if the new
49738fd1498Szrj    value is different from VAR's previous value.  */
49838fd1498Szrj 
49938fd1498Szrj static bool
set_lattice_value(tree var,ccp_prop_value_t * new_val)50038fd1498Szrj set_lattice_value (tree var, ccp_prop_value_t *new_val)
50138fd1498Szrj {
50238fd1498Szrj   /* We can deal with old UNINITIALIZED values just fine here.  */
50338fd1498Szrj   ccp_prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
50438fd1498Szrj 
50538fd1498Szrj   canonicalize_value (new_val);
50638fd1498Szrj 
50738fd1498Szrj   /* We have to be careful to not go up the bitwise lattice
50838fd1498Szrj      represented by the mask.  Instead of dropping to VARYING
50938fd1498Szrj      use the meet operator to retain a conservative value.
51038fd1498Szrj      Missed optimizations like PR65851 makes this necessary.
51138fd1498Szrj      It also ensures we converge to a stable lattice solution.  */
51238fd1498Szrj   if (old_val->lattice_val != UNINITIALIZED)
51338fd1498Szrj     ccp_lattice_meet (new_val, old_val);
51438fd1498Szrj 
51538fd1498Szrj   gcc_checking_assert (valid_lattice_transition (*old_val, *new_val));
51638fd1498Szrj 
51738fd1498Szrj   /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
51838fd1498Szrj      caller that this was a non-transition.  */
51938fd1498Szrj   if (old_val->lattice_val != new_val->lattice_val
52038fd1498Szrj       || (new_val->lattice_val == CONSTANT
52138fd1498Szrj 	  && (TREE_CODE (new_val->value) != TREE_CODE (old_val->value)
52238fd1498Szrj 	      || (TREE_CODE (new_val->value) == INTEGER_CST
52338fd1498Szrj 		  && (new_val->mask != old_val->mask
52438fd1498Szrj 		      || (wi::bit_and_not (wi::to_widest (old_val->value),
52538fd1498Szrj 					   new_val->mask)
52638fd1498Szrj 			  != wi::bit_and_not (wi::to_widest (new_val->value),
52738fd1498Szrj 					      new_val->mask))))
52838fd1498Szrj 	      || (TREE_CODE (new_val->value) != INTEGER_CST
52938fd1498Szrj 		  && !operand_equal_p (new_val->value, old_val->value, 0)))))
53038fd1498Szrj     {
53138fd1498Szrj       /* ???  We would like to delay creation of INTEGER_CSTs from
53238fd1498Szrj 	 partially constants here.  */
53338fd1498Szrj 
53438fd1498Szrj       if (dump_file && (dump_flags & TDF_DETAILS))
53538fd1498Szrj 	{
53638fd1498Szrj 	  dump_lattice_value (dump_file, "Lattice value changed to ", *new_val);
53738fd1498Szrj 	  fprintf (dump_file, ".  Adding SSA edges to worklist.\n");
53838fd1498Szrj 	}
53938fd1498Szrj 
54038fd1498Szrj       *old_val = *new_val;
54138fd1498Szrj 
54238fd1498Szrj       gcc_assert (new_val->lattice_val != UNINITIALIZED);
54338fd1498Szrj       return true;
54438fd1498Szrj     }
54538fd1498Szrj 
54638fd1498Szrj   return false;
54738fd1498Szrj }
54838fd1498Szrj 
54938fd1498Szrj static ccp_prop_value_t get_value_for_expr (tree, bool);
55038fd1498Szrj static ccp_prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
55138fd1498Szrj void bit_value_binop (enum tree_code, signop, int, widest_int *, widest_int *,
55238fd1498Szrj 		      signop, int, const widest_int &, const widest_int &,
55338fd1498Szrj 		      signop, int, const widest_int &, const widest_int &);
55438fd1498Szrj 
55538fd1498Szrj /* Return a widest_int that can be used for bitwise simplifications
55638fd1498Szrj    from VAL.  */
55738fd1498Szrj 
55838fd1498Szrj static widest_int
value_to_wide_int(ccp_prop_value_t val)55938fd1498Szrj value_to_wide_int (ccp_prop_value_t val)
56038fd1498Szrj {
56138fd1498Szrj   if (val.value
56238fd1498Szrj       && TREE_CODE (val.value) == INTEGER_CST)
56338fd1498Szrj     return wi::to_widest (val.value);
56438fd1498Szrj 
56538fd1498Szrj   return 0;
56638fd1498Szrj }
56738fd1498Szrj 
56838fd1498Szrj /* Return the value for the address expression EXPR based on alignment
56938fd1498Szrj    information.  */
57038fd1498Szrj 
57138fd1498Szrj static ccp_prop_value_t
get_value_from_alignment(tree expr)57238fd1498Szrj get_value_from_alignment (tree expr)
57338fd1498Szrj {
57438fd1498Szrj   tree type = TREE_TYPE (expr);
57538fd1498Szrj   ccp_prop_value_t val;
57638fd1498Szrj   unsigned HOST_WIDE_INT bitpos;
57738fd1498Szrj   unsigned int align;
57838fd1498Szrj 
57938fd1498Szrj   gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
58038fd1498Szrj 
58138fd1498Szrj   get_pointer_alignment_1 (expr, &align, &bitpos);
58238fd1498Szrj   val.mask = wi::bit_and_not
58338fd1498Szrj     (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
58438fd1498Szrj      ? wi::mask <widest_int> (TYPE_PRECISION (type), false)
58538fd1498Szrj      : -1,
58638fd1498Szrj      align / BITS_PER_UNIT - 1);
58738fd1498Szrj   val.lattice_val
58838fd1498Szrj     = wi::sext (val.mask, TYPE_PRECISION (type)) == -1 ? VARYING : CONSTANT;
58938fd1498Szrj   if (val.lattice_val == CONSTANT)
59038fd1498Szrj     val.value = build_int_cstu (type, bitpos / BITS_PER_UNIT);
59138fd1498Szrj   else
59238fd1498Szrj     val.value = NULL_TREE;
59338fd1498Szrj 
59438fd1498Szrj   return val;
59538fd1498Szrj }
59638fd1498Szrj 
59738fd1498Szrj /* Return the value for the tree operand EXPR.  If FOR_BITS_P is true
59838fd1498Szrj    return constant bits extracted from alignment information for
59938fd1498Szrj    invariant addresses.  */
60038fd1498Szrj 
60138fd1498Szrj static ccp_prop_value_t
get_value_for_expr(tree expr,bool for_bits_p)60238fd1498Szrj get_value_for_expr (tree expr, bool for_bits_p)
60338fd1498Szrj {
60438fd1498Szrj   ccp_prop_value_t val;
60538fd1498Szrj 
60638fd1498Szrj   if (TREE_CODE (expr) == SSA_NAME)
60738fd1498Szrj     {
60838fd1498Szrj       ccp_prop_value_t *val_ = get_value (expr);
60938fd1498Szrj       if (val_)
61038fd1498Szrj 	val = *val_;
61138fd1498Szrj       else
61238fd1498Szrj 	{
61338fd1498Szrj 	  val.lattice_val = VARYING;
61438fd1498Szrj 	  val.value = NULL_TREE;
61538fd1498Szrj 	  val.mask = -1;
61638fd1498Szrj 	}
61738fd1498Szrj       if (for_bits_p
61838fd1498Szrj 	  && val.lattice_val == CONSTANT
61938fd1498Szrj 	  && TREE_CODE (val.value) == ADDR_EXPR)
62038fd1498Szrj 	val = get_value_from_alignment (val.value);
62138fd1498Szrj       /* Fall back to a copy value.  */
62238fd1498Szrj       if (!for_bits_p
62338fd1498Szrj 	  && val.lattice_val == VARYING
62438fd1498Szrj 	  && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr))
62538fd1498Szrj 	{
62638fd1498Szrj 	  val.lattice_val = CONSTANT;
62738fd1498Szrj 	  val.value = expr;
62838fd1498Szrj 	  val.mask = -1;
62938fd1498Szrj 	}
63038fd1498Szrj     }
63138fd1498Szrj   else if (is_gimple_min_invariant (expr)
63238fd1498Szrj 	   && (!for_bits_p || TREE_CODE (expr) == INTEGER_CST))
63338fd1498Szrj     {
63438fd1498Szrj       val.lattice_val = CONSTANT;
63538fd1498Szrj       val.value = expr;
63638fd1498Szrj       val.mask = 0;
63738fd1498Szrj       canonicalize_value (&val);
63838fd1498Szrj     }
63938fd1498Szrj   else if (TREE_CODE (expr) == ADDR_EXPR)
64038fd1498Szrj     val = get_value_from_alignment (expr);
64138fd1498Szrj   else
64238fd1498Szrj     {
64338fd1498Szrj       val.lattice_val = VARYING;
64438fd1498Szrj       val.mask = -1;
64538fd1498Szrj       val.value = NULL_TREE;
64638fd1498Szrj     }
64738fd1498Szrj 
64838fd1498Szrj   if (val.lattice_val == VARYING
64938fd1498Szrj       && TYPE_UNSIGNED (TREE_TYPE (expr)))
65038fd1498Szrj     val.mask = wi::zext (val.mask, TYPE_PRECISION (TREE_TYPE (expr)));
65138fd1498Szrj 
65238fd1498Szrj   return val;
65338fd1498Szrj }
65438fd1498Szrj 
65538fd1498Szrj /* Return the likely CCP lattice value for STMT.
65638fd1498Szrj 
65738fd1498Szrj    If STMT has no operands, then return CONSTANT.
65838fd1498Szrj 
65938fd1498Szrj    Else if undefinedness of operands of STMT cause its value to be
66038fd1498Szrj    undefined, then return UNDEFINED.
66138fd1498Szrj 
66238fd1498Szrj    Else if any operands of STMT are constants, then return CONSTANT.
66338fd1498Szrj 
66438fd1498Szrj    Else return VARYING.  */
66538fd1498Szrj 
66638fd1498Szrj static ccp_lattice_t
likely_value(gimple * stmt)66738fd1498Szrj likely_value (gimple *stmt)
66838fd1498Szrj {
66938fd1498Szrj   bool has_constant_operand, has_undefined_operand, all_undefined_operands;
67038fd1498Szrj   bool has_nsa_operand;
67138fd1498Szrj   tree use;
67238fd1498Szrj   ssa_op_iter iter;
67338fd1498Szrj   unsigned i;
67438fd1498Szrj 
67538fd1498Szrj   enum gimple_code code = gimple_code (stmt);
67638fd1498Szrj 
67738fd1498Szrj   /* This function appears to be called only for assignments, calls,
67838fd1498Szrj      conditionals, and switches, due to the logic in visit_stmt.  */
67938fd1498Szrj   gcc_assert (code == GIMPLE_ASSIGN
68038fd1498Szrj               || code == GIMPLE_CALL
68138fd1498Szrj               || code == GIMPLE_COND
68238fd1498Szrj               || code == GIMPLE_SWITCH);
68338fd1498Szrj 
68438fd1498Szrj   /* If the statement has volatile operands, it won't fold to a
68538fd1498Szrj      constant value.  */
68638fd1498Szrj   if (gimple_has_volatile_ops (stmt))
68738fd1498Szrj     return VARYING;
68838fd1498Szrj 
68938fd1498Szrj   /* Arrive here for more complex cases.  */
69038fd1498Szrj   has_constant_operand = false;
69138fd1498Szrj   has_undefined_operand = false;
69238fd1498Szrj   all_undefined_operands = true;
69338fd1498Szrj   has_nsa_operand = false;
69438fd1498Szrj   FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
69538fd1498Szrj     {
69638fd1498Szrj       ccp_prop_value_t *val = get_value (use);
69738fd1498Szrj 
69838fd1498Szrj       if (val && val->lattice_val == UNDEFINED)
69938fd1498Szrj 	has_undefined_operand = true;
70038fd1498Szrj       else
70138fd1498Szrj 	all_undefined_operands = false;
70238fd1498Szrj 
70338fd1498Szrj       if (val && val->lattice_val == CONSTANT)
70438fd1498Szrj 	has_constant_operand = true;
70538fd1498Szrj 
70638fd1498Szrj       if (SSA_NAME_IS_DEFAULT_DEF (use)
70738fd1498Szrj 	  || !prop_simulate_again_p (SSA_NAME_DEF_STMT (use)))
70838fd1498Szrj 	has_nsa_operand = true;
70938fd1498Szrj     }
71038fd1498Szrj 
71138fd1498Szrj   /* There may be constants in regular rhs operands.  For calls we
71238fd1498Szrj      have to ignore lhs, fndecl and static chain, otherwise only
71338fd1498Szrj      the lhs.  */
71438fd1498Szrj   for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
71538fd1498Szrj        i < gimple_num_ops (stmt); ++i)
71638fd1498Szrj     {
71738fd1498Szrj       tree op = gimple_op (stmt, i);
71838fd1498Szrj       if (!op || TREE_CODE (op) == SSA_NAME)
71938fd1498Szrj 	continue;
72038fd1498Szrj       if (is_gimple_min_invariant (op))
72138fd1498Szrj 	has_constant_operand = true;
72238fd1498Szrj     }
72338fd1498Szrj 
72438fd1498Szrj   if (has_constant_operand)
72538fd1498Szrj     all_undefined_operands = false;
72638fd1498Szrj 
72738fd1498Szrj   if (has_undefined_operand
72838fd1498Szrj       && code == GIMPLE_CALL
72938fd1498Szrj       && gimple_call_internal_p (stmt))
73038fd1498Szrj     switch (gimple_call_internal_fn (stmt))
73138fd1498Szrj       {
73238fd1498Szrj 	/* These 3 builtins use the first argument just as a magic
73338fd1498Szrj 	   way how to find out a decl uid.  */
73438fd1498Szrj       case IFN_GOMP_SIMD_LANE:
73538fd1498Szrj       case IFN_GOMP_SIMD_VF:
73638fd1498Szrj       case IFN_GOMP_SIMD_LAST_LANE:
73738fd1498Szrj 	has_undefined_operand = false;
73838fd1498Szrj 	break;
73938fd1498Szrj       default:
74038fd1498Szrj 	break;
74138fd1498Szrj       }
74238fd1498Szrj 
74338fd1498Szrj   /* If the operation combines operands like COMPLEX_EXPR make sure to
74438fd1498Szrj      not mark the result UNDEFINED if only one part of the result is
74538fd1498Szrj      undefined.  */
74638fd1498Szrj   if (has_undefined_operand && all_undefined_operands)
74738fd1498Szrj     return UNDEFINED;
74838fd1498Szrj   else if (code == GIMPLE_ASSIGN && has_undefined_operand)
74938fd1498Szrj     {
75038fd1498Szrj       switch (gimple_assign_rhs_code (stmt))
75138fd1498Szrj 	{
75238fd1498Szrj 	/* Unary operators are handled with all_undefined_operands.  */
75338fd1498Szrj 	case PLUS_EXPR:
75438fd1498Szrj 	case MINUS_EXPR:
75538fd1498Szrj 	case POINTER_PLUS_EXPR:
75638fd1498Szrj 	case BIT_XOR_EXPR:
75738fd1498Szrj 	  /* Not MIN_EXPR, MAX_EXPR.  One VARYING operand may be selected.
75838fd1498Szrj 	     Not bitwise operators, one VARYING operand may specify the
75938fd1498Szrj 	     result completely.
76038fd1498Szrj 	     Not logical operators for the same reason, apart from XOR.
76138fd1498Szrj 	     Not COMPLEX_EXPR as one VARYING operand makes the result partly
76238fd1498Szrj 	     not UNDEFINED.  Not *DIV_EXPR, comparisons and shifts because
76338fd1498Szrj 	     the undefined operand may be promoted.  */
76438fd1498Szrj 	  return UNDEFINED;
76538fd1498Szrj 
76638fd1498Szrj 	case ADDR_EXPR:
76738fd1498Szrj 	  /* If any part of an address is UNDEFINED, like the index
76838fd1498Szrj 	     of an ARRAY_EXPR, then treat the result as UNDEFINED.  */
76938fd1498Szrj 	  return UNDEFINED;
77038fd1498Szrj 
77138fd1498Szrj 	default:
77238fd1498Szrj 	  ;
77338fd1498Szrj 	}
77438fd1498Szrj     }
77538fd1498Szrj   /* If there was an UNDEFINED operand but the result may be not UNDEFINED
77638fd1498Szrj      fall back to CONSTANT.  During iteration UNDEFINED may still drop
77738fd1498Szrj      to CONSTANT.  */
77838fd1498Szrj   if (has_undefined_operand)
77938fd1498Szrj     return CONSTANT;
78038fd1498Szrj 
78138fd1498Szrj   /* We do not consider virtual operands here -- load from read-only
78238fd1498Szrj      memory may have only VARYING virtual operands, but still be
78338fd1498Szrj      constant.  Also we can combine the stmt with definitions from
78438fd1498Szrj      operands whose definitions are not simulated again.  */
78538fd1498Szrj   if (has_constant_operand
78638fd1498Szrj       || has_nsa_operand
78738fd1498Szrj       || gimple_references_memory_p (stmt))
78838fd1498Szrj     return CONSTANT;
78938fd1498Szrj 
79038fd1498Szrj   return VARYING;
79138fd1498Szrj }
79238fd1498Szrj 
79338fd1498Szrj /* Returns true if STMT cannot be constant.  */
79438fd1498Szrj 
79538fd1498Szrj static bool
surely_varying_stmt_p(gimple * stmt)79638fd1498Szrj surely_varying_stmt_p (gimple *stmt)
79738fd1498Szrj {
79838fd1498Szrj   /* If the statement has operands that we cannot handle, it cannot be
79938fd1498Szrj      constant.  */
80038fd1498Szrj   if (gimple_has_volatile_ops (stmt))
80138fd1498Szrj     return true;
80238fd1498Szrj 
80338fd1498Szrj   /* If it is a call and does not return a value or is not a
80438fd1498Szrj      builtin and not an indirect call or a call to function with
80538fd1498Szrj      assume_aligned/alloc_align attribute, it is varying.  */
80638fd1498Szrj   if (is_gimple_call (stmt))
80738fd1498Szrj     {
80838fd1498Szrj       tree fndecl, fntype = gimple_call_fntype (stmt);
80938fd1498Szrj       if (!gimple_call_lhs (stmt)
81038fd1498Szrj 	  || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
81138fd1498Szrj 	      && !DECL_BUILT_IN (fndecl)
81238fd1498Szrj 	      && !lookup_attribute ("assume_aligned",
81338fd1498Szrj 				    TYPE_ATTRIBUTES (fntype))
81438fd1498Szrj 	      && !lookup_attribute ("alloc_align",
81538fd1498Szrj 				    TYPE_ATTRIBUTES (fntype))))
81638fd1498Szrj 	return true;
81738fd1498Szrj     }
81838fd1498Szrj 
81938fd1498Szrj   /* Any other store operation is not interesting.  */
82038fd1498Szrj   else if (gimple_vdef (stmt))
82138fd1498Szrj     return true;
82238fd1498Szrj 
82338fd1498Szrj   /* Anything other than assignments and conditional jumps are not
82438fd1498Szrj      interesting for CCP.  */
82538fd1498Szrj   if (gimple_code (stmt) != GIMPLE_ASSIGN
82638fd1498Szrj       && gimple_code (stmt) != GIMPLE_COND
82738fd1498Szrj       && gimple_code (stmt) != GIMPLE_SWITCH
82838fd1498Szrj       && gimple_code (stmt) != GIMPLE_CALL)
82938fd1498Szrj     return true;
83038fd1498Szrj 
83138fd1498Szrj   return false;
83238fd1498Szrj }
83338fd1498Szrj 
83438fd1498Szrj /* Initialize local data structures for CCP.  */
83538fd1498Szrj 
83638fd1498Szrj static void
ccp_initialize(void)83738fd1498Szrj ccp_initialize (void)
83838fd1498Szrj {
83938fd1498Szrj   basic_block bb;
84038fd1498Szrj 
84138fd1498Szrj   n_const_val = num_ssa_names;
84238fd1498Szrj   const_val = XCNEWVEC (ccp_prop_value_t, n_const_val);
84338fd1498Szrj 
84438fd1498Szrj   /* Initialize simulation flags for PHI nodes and statements.  */
84538fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
84638fd1498Szrj     {
84738fd1498Szrj       gimple_stmt_iterator i;
84838fd1498Szrj 
84938fd1498Szrj       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
85038fd1498Szrj         {
85138fd1498Szrj 	  gimple *stmt = gsi_stmt (i);
85238fd1498Szrj 	  bool is_varying;
85338fd1498Szrj 
85438fd1498Szrj 	  /* If the statement is a control insn, then we do not
85538fd1498Szrj 	     want to avoid simulating the statement once.  Failure
85638fd1498Szrj 	     to do so means that those edges will never get added.  */
85738fd1498Szrj 	  if (stmt_ends_bb_p (stmt))
85838fd1498Szrj 	    is_varying = false;
85938fd1498Szrj 	  else
86038fd1498Szrj 	    is_varying = surely_varying_stmt_p (stmt);
86138fd1498Szrj 
86238fd1498Szrj 	  if (is_varying)
86338fd1498Szrj 	    {
86438fd1498Szrj 	      tree def;
86538fd1498Szrj 	      ssa_op_iter iter;
86638fd1498Szrj 
86738fd1498Szrj 	      /* If the statement will not produce a constant, mark
86838fd1498Szrj 		 all its outputs VARYING.  */
86938fd1498Szrj 	      FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
87038fd1498Szrj 		set_value_varying (def);
87138fd1498Szrj 	    }
87238fd1498Szrj           prop_set_simulate_again (stmt, !is_varying);
87338fd1498Szrj 	}
87438fd1498Szrj     }
87538fd1498Szrj 
87638fd1498Szrj   /* Now process PHI nodes.  We never clear the simulate_again flag on
87738fd1498Szrj      phi nodes, since we do not know which edges are executable yet,
87838fd1498Szrj      except for phi nodes for virtual operands when we do not do store ccp.  */
87938fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
88038fd1498Szrj     {
88138fd1498Szrj       gphi_iterator i;
88238fd1498Szrj 
88338fd1498Szrj       for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
88438fd1498Szrj         {
88538fd1498Szrj           gphi *phi = i.phi ();
88638fd1498Szrj 
88738fd1498Szrj 	  if (virtual_operand_p (gimple_phi_result (phi)))
88838fd1498Szrj             prop_set_simulate_again (phi, false);
88938fd1498Szrj 	  else
89038fd1498Szrj             prop_set_simulate_again (phi, true);
89138fd1498Szrj 	}
89238fd1498Szrj     }
89338fd1498Szrj }
89438fd1498Szrj 
89538fd1498Szrj /* Debug count support. Reset the values of ssa names
89638fd1498Szrj    VARYING when the total number ssa names analyzed is
89738fd1498Szrj    beyond the debug count specified.  */
89838fd1498Szrj 
89938fd1498Szrj static void
do_dbg_cnt(void)90038fd1498Szrj do_dbg_cnt (void)
90138fd1498Szrj {
90238fd1498Szrj   unsigned i;
90338fd1498Szrj   for (i = 0; i < num_ssa_names; i++)
90438fd1498Szrj     {
90538fd1498Szrj       if (!dbg_cnt (ccp))
90638fd1498Szrj         {
90738fd1498Szrj           const_val[i].lattice_val = VARYING;
90838fd1498Szrj 	  const_val[i].mask = -1;
90938fd1498Szrj           const_val[i].value = NULL_TREE;
91038fd1498Szrj         }
91138fd1498Szrj     }
91238fd1498Szrj }
91338fd1498Szrj 
91438fd1498Szrj 
91538fd1498Szrj /* We want to provide our own GET_VALUE and FOLD_STMT virtual methods.  */
91638fd1498Szrj class ccp_folder : public substitute_and_fold_engine
91738fd1498Szrj {
91838fd1498Szrj  public:
91938fd1498Szrj   tree get_value (tree) FINAL OVERRIDE;
92038fd1498Szrj   bool fold_stmt (gimple_stmt_iterator *) FINAL OVERRIDE;
92138fd1498Szrj };
92238fd1498Szrj 
92338fd1498Szrj /* This method just wraps GET_CONSTANT_VALUE for now.  Over time
92438fd1498Szrj    naked calls to GET_CONSTANT_VALUE should be eliminated in favor
92538fd1498Szrj    of calling member functions.  */
92638fd1498Szrj 
92738fd1498Szrj tree
get_value(tree op)92838fd1498Szrj ccp_folder::get_value (tree op)
92938fd1498Szrj {
93038fd1498Szrj   return get_constant_value (op);
93138fd1498Szrj }
93238fd1498Szrj 
93338fd1498Szrj /* Do final substitution of propagated values, cleanup the flowgraph and
93438fd1498Szrj    free allocated storage.  If NONZERO_P, record nonzero bits.
93538fd1498Szrj 
93638fd1498Szrj    Return TRUE when something was optimized.  */
93738fd1498Szrj 
93838fd1498Szrj static bool
ccp_finalize(bool nonzero_p)93938fd1498Szrj ccp_finalize (bool nonzero_p)
94038fd1498Szrj {
94138fd1498Szrj   bool something_changed;
94238fd1498Szrj   unsigned i;
94338fd1498Szrj   tree name;
94438fd1498Szrj 
94538fd1498Szrj   do_dbg_cnt ();
94638fd1498Szrj 
94738fd1498Szrj   /* Derive alignment and misalignment information from partially
94838fd1498Szrj      constant pointers in the lattice or nonzero bits from partially
94938fd1498Szrj      constant integers.  */
95038fd1498Szrj   FOR_EACH_SSA_NAME (i, name, cfun)
95138fd1498Szrj     {
95238fd1498Szrj       ccp_prop_value_t *val;
95338fd1498Szrj       unsigned int tem, align;
95438fd1498Szrj 
95538fd1498Szrj       if (!POINTER_TYPE_P (TREE_TYPE (name))
95638fd1498Szrj 	  && (!INTEGRAL_TYPE_P (TREE_TYPE (name))
95738fd1498Szrj 	      /* Don't record nonzero bits before IPA to avoid
95838fd1498Szrj 		 using too much memory.  */
95938fd1498Szrj 	      || !nonzero_p))
96038fd1498Szrj 	continue;
96138fd1498Szrj 
96238fd1498Szrj       val = get_value (name);
96338fd1498Szrj       if (val->lattice_val != CONSTANT
96438fd1498Szrj 	  || TREE_CODE (val->value) != INTEGER_CST
96538fd1498Szrj 	  || val->mask == 0)
96638fd1498Szrj 	continue;
96738fd1498Szrj 
96838fd1498Szrj       if (POINTER_TYPE_P (TREE_TYPE (name)))
96938fd1498Szrj 	{
97038fd1498Szrj 	  /* Trailing mask bits specify the alignment, trailing value
97138fd1498Szrj 	     bits the misalignment.  */
97238fd1498Szrj 	  tem = val->mask.to_uhwi ();
97338fd1498Szrj 	  align = least_bit_hwi (tem);
97438fd1498Szrj 	  if (align > 1)
97538fd1498Szrj 	    set_ptr_info_alignment (get_ptr_info (name), align,
97638fd1498Szrj 				    (TREE_INT_CST_LOW (val->value)
97738fd1498Szrj 				     & (align - 1)));
97838fd1498Szrj 	}
97938fd1498Szrj       else
98038fd1498Szrj 	{
98138fd1498Szrj 	  unsigned int precision = TYPE_PRECISION (TREE_TYPE (val->value));
98238fd1498Szrj 	  wide_int nonzero_bits
98338fd1498Szrj 	    = (wide_int::from (val->mask, precision, UNSIGNED)
98438fd1498Szrj 	       | wi::to_wide (val->value));
98538fd1498Szrj 	  nonzero_bits &= get_nonzero_bits (name);
98638fd1498Szrj 	  set_nonzero_bits (name, nonzero_bits);
98738fd1498Szrj 	}
98838fd1498Szrj     }
98938fd1498Szrj 
99038fd1498Szrj   /* Perform substitutions based on the known constant values.  */
99138fd1498Szrj   class ccp_folder ccp_folder;
99238fd1498Szrj   something_changed = ccp_folder.substitute_and_fold ();
99338fd1498Szrj 
99438fd1498Szrj   free (const_val);
99538fd1498Szrj   const_val = NULL;
99638fd1498Szrj   return something_changed;
99738fd1498Szrj }
99838fd1498Szrj 
99938fd1498Szrj 
100038fd1498Szrj /* Compute the meet operator between *VAL1 and *VAL2.  Store the result
100138fd1498Szrj    in VAL1.
100238fd1498Szrj 
100338fd1498Szrj    		any  M UNDEFINED   = any
100438fd1498Szrj 		any  M VARYING     = VARYING
100538fd1498Szrj 		Ci   M Cj	   = Ci		if (i == j)
100638fd1498Szrj 		Ci   M Cj	   = VARYING	if (i != j)
100738fd1498Szrj    */
100838fd1498Szrj 
100938fd1498Szrj static void
ccp_lattice_meet(ccp_prop_value_t * val1,ccp_prop_value_t * val2)101038fd1498Szrj ccp_lattice_meet (ccp_prop_value_t *val1, ccp_prop_value_t *val2)
101138fd1498Szrj {
101238fd1498Szrj   if (val1->lattice_val == UNDEFINED
101338fd1498Szrj       /* For UNDEFINED M SSA we can't always SSA because its definition
101438fd1498Szrj          may not dominate the PHI node.  Doing optimistic copy propagation
101538fd1498Szrj 	 also causes a lot of gcc.dg/uninit-pred*.c FAILs.  */
101638fd1498Szrj       && (val2->lattice_val != CONSTANT
101738fd1498Szrj 	  || TREE_CODE (val2->value) != SSA_NAME))
101838fd1498Szrj     {
101938fd1498Szrj       /* UNDEFINED M any = any   */
102038fd1498Szrj       *val1 = *val2;
102138fd1498Szrj     }
102238fd1498Szrj   else if (val2->lattice_val == UNDEFINED
102338fd1498Szrj 	   /* See above.  */
102438fd1498Szrj 	   && (val1->lattice_val != CONSTANT
102538fd1498Szrj 	       || TREE_CODE (val1->value) != SSA_NAME))
102638fd1498Szrj     {
102738fd1498Szrj       /* any M UNDEFINED = any
102838fd1498Szrj          Nothing to do.  VAL1 already contains the value we want.  */
102938fd1498Szrj       ;
103038fd1498Szrj     }
103138fd1498Szrj   else if (val1->lattice_val == VARYING
103238fd1498Szrj            || val2->lattice_val == VARYING)
103338fd1498Szrj     {
103438fd1498Szrj       /* any M VARYING = VARYING.  */
103538fd1498Szrj       val1->lattice_val = VARYING;
103638fd1498Szrj       val1->mask = -1;
103738fd1498Szrj       val1->value = NULL_TREE;
103838fd1498Szrj     }
103938fd1498Szrj   else if (val1->lattice_val == CONSTANT
104038fd1498Szrj 	   && val2->lattice_val == CONSTANT
104138fd1498Szrj 	   && TREE_CODE (val1->value) == INTEGER_CST
104238fd1498Szrj 	   && TREE_CODE (val2->value) == INTEGER_CST)
104338fd1498Szrj     {
104438fd1498Szrj       /* Ci M Cj = Ci		if (i == j)
104538fd1498Szrj 	 Ci M Cj = VARYING	if (i != j)
104638fd1498Szrj 
104738fd1498Szrj          For INTEGER_CSTs mask unequal bits.  If no equal bits remain,
104838fd1498Szrj 	 drop to varying.  */
104938fd1498Szrj       val1->mask = (val1->mask | val2->mask
105038fd1498Szrj 		    | (wi::to_widest (val1->value)
105138fd1498Szrj 		       ^ wi::to_widest (val2->value)));
105238fd1498Szrj       if (wi::sext (val1->mask, TYPE_PRECISION (TREE_TYPE (val1->value))) == -1)
105338fd1498Szrj 	{
105438fd1498Szrj 	  val1->lattice_val = VARYING;
105538fd1498Szrj 	  val1->value = NULL_TREE;
105638fd1498Szrj 	}
105738fd1498Szrj     }
105838fd1498Szrj   else if (val1->lattice_val == CONSTANT
105938fd1498Szrj 	   && val2->lattice_val == CONSTANT
106038fd1498Szrj 	   && operand_equal_p (val1->value, val2->value, 0))
106138fd1498Szrj     {
106238fd1498Szrj       /* Ci M Cj = Ci		if (i == j)
106338fd1498Szrj 	 Ci M Cj = VARYING	if (i != j)
106438fd1498Szrj 
106538fd1498Szrj          VAL1 already contains the value we want for equivalent values.  */
106638fd1498Szrj     }
106738fd1498Szrj   else if (val1->lattice_val == CONSTANT
106838fd1498Szrj 	   && val2->lattice_val == CONSTANT
106938fd1498Szrj 	   && (TREE_CODE (val1->value) == ADDR_EXPR
107038fd1498Szrj 	       || TREE_CODE (val2->value) == ADDR_EXPR))
107138fd1498Szrj     {
107238fd1498Szrj       /* When not equal addresses are involved try meeting for
107338fd1498Szrj 	 alignment.  */
107438fd1498Szrj       ccp_prop_value_t tem = *val2;
107538fd1498Szrj       if (TREE_CODE (val1->value) == ADDR_EXPR)
107638fd1498Szrj 	*val1 = get_value_for_expr (val1->value, true);
107738fd1498Szrj       if (TREE_CODE (val2->value) == ADDR_EXPR)
107838fd1498Szrj 	tem = get_value_for_expr (val2->value, true);
107938fd1498Szrj       ccp_lattice_meet (val1, &tem);
108038fd1498Szrj     }
108138fd1498Szrj   else
108238fd1498Szrj     {
108338fd1498Szrj       /* Any other combination is VARYING.  */
108438fd1498Szrj       val1->lattice_val = VARYING;
108538fd1498Szrj       val1->mask = -1;
108638fd1498Szrj       val1->value = NULL_TREE;
108738fd1498Szrj     }
108838fd1498Szrj }
108938fd1498Szrj 
109038fd1498Szrj 
109138fd1498Szrj /* Loop through the PHI_NODE's parameters for BLOCK and compare their
109238fd1498Szrj    lattice values to determine PHI_NODE's lattice value.  The value of a
109338fd1498Szrj    PHI node is determined calling ccp_lattice_meet with all the arguments
109438fd1498Szrj    of the PHI node that are incoming via executable edges.  */
109538fd1498Szrj 
109638fd1498Szrj enum ssa_prop_result
visit_phi(gphi * phi)109738fd1498Szrj ccp_propagate::visit_phi (gphi *phi)
109838fd1498Szrj {
109938fd1498Szrj   unsigned i;
110038fd1498Szrj   ccp_prop_value_t new_val;
110138fd1498Szrj 
110238fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
110338fd1498Szrj     {
110438fd1498Szrj       fprintf (dump_file, "\nVisiting PHI node: ");
110538fd1498Szrj       print_gimple_stmt (dump_file, phi, 0, dump_flags);
110638fd1498Szrj     }
110738fd1498Szrj 
110838fd1498Szrj   new_val.lattice_val = UNDEFINED;
110938fd1498Szrj   new_val.value = NULL_TREE;
111038fd1498Szrj   new_val.mask = 0;
111138fd1498Szrj 
111238fd1498Szrj   bool first = true;
111338fd1498Szrj   bool non_exec_edge = false;
111438fd1498Szrj   for (i = 0; i < gimple_phi_num_args (phi); i++)
111538fd1498Szrj     {
111638fd1498Szrj       /* Compute the meet operator over all the PHI arguments flowing
111738fd1498Szrj 	 through executable edges.  */
111838fd1498Szrj       edge e = gimple_phi_arg_edge (phi, i);
111938fd1498Szrj 
112038fd1498Szrj       if (dump_file && (dump_flags & TDF_DETAILS))
112138fd1498Szrj 	{
112238fd1498Szrj 	  fprintf (dump_file,
1123*58e805e6Szrj 	      "\tArgument #%d (%d -> %d %sexecutable)\n",
112438fd1498Szrj 	      i, e->src->index, e->dest->index,
112538fd1498Szrj 	      (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
112638fd1498Szrj 	}
112738fd1498Szrj 
112838fd1498Szrj       /* If the incoming edge is executable, Compute the meet operator for
112938fd1498Szrj 	 the existing value of the PHI node and the current PHI argument.  */
113038fd1498Szrj       if (e->flags & EDGE_EXECUTABLE)
113138fd1498Szrj 	{
113238fd1498Szrj 	  tree arg = gimple_phi_arg (phi, i)->def;
113338fd1498Szrj 	  ccp_prop_value_t arg_val = get_value_for_expr (arg, false);
113438fd1498Szrj 
113538fd1498Szrj 	  if (first)
113638fd1498Szrj 	    {
113738fd1498Szrj 	      new_val = arg_val;
113838fd1498Szrj 	      first = false;
113938fd1498Szrj 	    }
114038fd1498Szrj 	  else
114138fd1498Szrj 	    ccp_lattice_meet (&new_val, &arg_val);
114238fd1498Szrj 
114338fd1498Szrj 	  if (dump_file && (dump_flags & TDF_DETAILS))
114438fd1498Szrj 	    {
114538fd1498Szrj 	      fprintf (dump_file, "\t");
114638fd1498Szrj 	      print_generic_expr (dump_file, arg, dump_flags);
114738fd1498Szrj 	      dump_lattice_value (dump_file, "\tValue: ", arg_val);
114838fd1498Szrj 	      fprintf (dump_file, "\n");
114938fd1498Szrj 	    }
115038fd1498Szrj 
115138fd1498Szrj 	  if (new_val.lattice_val == VARYING)
115238fd1498Szrj 	    break;
115338fd1498Szrj 	}
115438fd1498Szrj       else
115538fd1498Szrj 	non_exec_edge = true;
115638fd1498Szrj     }
115738fd1498Szrj 
115838fd1498Szrj   /* In case there were non-executable edges and the value is a copy
115938fd1498Szrj      make sure its definition dominates the PHI node.  */
116038fd1498Szrj   if (non_exec_edge
116138fd1498Szrj       && new_val.lattice_val == CONSTANT
116238fd1498Szrj       && TREE_CODE (new_val.value) == SSA_NAME
116338fd1498Szrj       && ! SSA_NAME_IS_DEFAULT_DEF (new_val.value)
116438fd1498Szrj       && ! dominated_by_p (CDI_DOMINATORS, gimple_bb (phi),
116538fd1498Szrj 			   gimple_bb (SSA_NAME_DEF_STMT (new_val.value))))
116638fd1498Szrj     {
116738fd1498Szrj       new_val.lattice_val = VARYING;
116838fd1498Szrj       new_val.value = NULL_TREE;
116938fd1498Szrj       new_val.mask = -1;
117038fd1498Szrj     }
117138fd1498Szrj 
117238fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
117338fd1498Szrj     {
117438fd1498Szrj       dump_lattice_value (dump_file, "\n    PHI node value: ", new_val);
117538fd1498Szrj       fprintf (dump_file, "\n\n");
117638fd1498Szrj     }
117738fd1498Szrj 
117838fd1498Szrj   /* Make the transition to the new value.  */
117938fd1498Szrj   if (set_lattice_value (gimple_phi_result (phi), &new_val))
118038fd1498Szrj     {
118138fd1498Szrj       if (new_val.lattice_val == VARYING)
118238fd1498Szrj 	return SSA_PROP_VARYING;
118338fd1498Szrj       else
118438fd1498Szrj 	return SSA_PROP_INTERESTING;
118538fd1498Szrj     }
118638fd1498Szrj   else
118738fd1498Szrj     return SSA_PROP_NOT_INTERESTING;
118838fd1498Szrj }
118938fd1498Szrj 
119038fd1498Szrj /* Return the constant value for OP or OP otherwise.  */
119138fd1498Szrj 
119238fd1498Szrj static tree
valueize_op(tree op)119338fd1498Szrj valueize_op (tree op)
119438fd1498Szrj {
119538fd1498Szrj   if (TREE_CODE (op) == SSA_NAME)
119638fd1498Szrj     {
119738fd1498Szrj       tree tem = get_constant_value (op);
119838fd1498Szrj       if (tem)
119938fd1498Szrj 	return tem;
120038fd1498Szrj     }
120138fd1498Szrj   return op;
120238fd1498Szrj }
120338fd1498Szrj 
120438fd1498Szrj /* Return the constant value for OP, but signal to not follow SSA
120538fd1498Szrj    edges if the definition may be simulated again.  */
120638fd1498Szrj 
120738fd1498Szrj static tree
valueize_op_1(tree op)120838fd1498Szrj valueize_op_1 (tree op)
120938fd1498Szrj {
121038fd1498Szrj   if (TREE_CODE (op) == SSA_NAME)
121138fd1498Szrj     {
121238fd1498Szrj       /* If the definition may be simulated again we cannot follow
121338fd1498Szrj          this SSA edge as the SSA propagator does not necessarily
121438fd1498Szrj 	 re-visit the use.  */
121538fd1498Szrj       gimple *def_stmt = SSA_NAME_DEF_STMT (op);
121638fd1498Szrj       if (!gimple_nop_p (def_stmt)
121738fd1498Szrj 	  && prop_simulate_again_p (def_stmt))
121838fd1498Szrj 	return NULL_TREE;
121938fd1498Szrj       tree tem = get_constant_value (op);
122038fd1498Szrj       if (tem)
122138fd1498Szrj 	return tem;
122238fd1498Szrj     }
122338fd1498Szrj   return op;
122438fd1498Szrj }
122538fd1498Szrj 
122638fd1498Szrj /* CCP specific front-end to the non-destructive constant folding
122738fd1498Szrj    routines.
122838fd1498Szrj 
122938fd1498Szrj    Attempt to simplify the RHS of STMT knowing that one or more
123038fd1498Szrj    operands are constants.
123138fd1498Szrj 
123238fd1498Szrj    If simplification is possible, return the simplified RHS,
123338fd1498Szrj    otherwise return the original RHS or NULL_TREE.  */
123438fd1498Szrj 
123538fd1498Szrj static tree
ccp_fold(gimple * stmt)123638fd1498Szrj ccp_fold (gimple *stmt)
123738fd1498Szrj {
123838fd1498Szrj   location_t loc = gimple_location (stmt);
123938fd1498Szrj   switch (gimple_code (stmt))
124038fd1498Szrj     {
124138fd1498Szrj     case GIMPLE_COND:
124238fd1498Szrj       {
124338fd1498Szrj         /* Handle comparison operators that can appear in GIMPLE form.  */
124438fd1498Szrj         tree op0 = valueize_op (gimple_cond_lhs (stmt));
124538fd1498Szrj         tree op1 = valueize_op (gimple_cond_rhs (stmt));
124638fd1498Szrj         enum tree_code code = gimple_cond_code (stmt);
124738fd1498Szrj         return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
124838fd1498Szrj       }
124938fd1498Szrj 
125038fd1498Szrj     case GIMPLE_SWITCH:
125138fd1498Szrj       {
125238fd1498Szrj 	/* Return the constant switch index.  */
125338fd1498Szrj         return valueize_op (gimple_switch_index (as_a <gswitch *> (stmt)));
125438fd1498Szrj       }
125538fd1498Szrj 
125638fd1498Szrj     case GIMPLE_ASSIGN:
125738fd1498Szrj     case GIMPLE_CALL:
125838fd1498Szrj       return gimple_fold_stmt_to_constant_1 (stmt,
125938fd1498Szrj 					     valueize_op, valueize_op_1);
126038fd1498Szrj 
126138fd1498Szrj     default:
126238fd1498Szrj       gcc_unreachable ();
126338fd1498Szrj     }
126438fd1498Szrj }
126538fd1498Szrj 
126638fd1498Szrj /* Apply the operation CODE in type TYPE to the value, mask pair
126738fd1498Szrj    RVAL and RMASK representing a value of type RTYPE and set
126838fd1498Szrj    the value, mask pair *VAL and *MASK to the result.  */
126938fd1498Szrj 
127038fd1498Szrj void
bit_value_unop(enum tree_code code,signop type_sgn,int type_precision,widest_int * val,widest_int * mask,signop rtype_sgn,int rtype_precision,const widest_int & rval,const widest_int & rmask)127138fd1498Szrj bit_value_unop (enum tree_code code, signop type_sgn, int type_precision,
127238fd1498Szrj 		widest_int *val, widest_int *mask,
127338fd1498Szrj 		signop rtype_sgn, int rtype_precision,
127438fd1498Szrj 		const widest_int &rval, const widest_int &rmask)
127538fd1498Szrj {
127638fd1498Szrj   switch (code)
127738fd1498Szrj     {
127838fd1498Szrj     case BIT_NOT_EXPR:
127938fd1498Szrj       *mask = rmask;
128038fd1498Szrj       *val = ~rval;
128138fd1498Szrj       break;
128238fd1498Szrj 
128338fd1498Szrj     case NEGATE_EXPR:
128438fd1498Szrj       {
128538fd1498Szrj 	widest_int temv, temm;
128638fd1498Szrj 	/* Return ~rval + 1.  */
128738fd1498Szrj 	bit_value_unop (BIT_NOT_EXPR, type_sgn, type_precision, &temv, &temm,
128838fd1498Szrj 			type_sgn, type_precision, rval, rmask);
128938fd1498Szrj 	bit_value_binop (PLUS_EXPR, type_sgn, type_precision, val, mask,
129038fd1498Szrj 			 type_sgn, type_precision, temv, temm,
129138fd1498Szrj 			 type_sgn, type_precision, 1, 0);
129238fd1498Szrj 	break;
129338fd1498Szrj       }
129438fd1498Szrj 
129538fd1498Szrj     CASE_CONVERT:
129638fd1498Szrj       {
129738fd1498Szrj 	/* First extend mask and value according to the original type.  */
129838fd1498Szrj 	*mask = wi::ext (rmask, rtype_precision, rtype_sgn);
129938fd1498Szrj 	*val = wi::ext (rval, rtype_precision, rtype_sgn);
130038fd1498Szrj 
130138fd1498Szrj 	/* Then extend mask and value according to the target type.  */
130238fd1498Szrj 	*mask = wi::ext (*mask, type_precision, type_sgn);
130338fd1498Szrj 	*val = wi::ext (*val, type_precision, type_sgn);
130438fd1498Szrj 	break;
130538fd1498Szrj       }
130638fd1498Szrj 
130738fd1498Szrj     default:
130838fd1498Szrj       *mask = -1;
130938fd1498Szrj       break;
131038fd1498Szrj     }
131138fd1498Szrj }
131238fd1498Szrj 
131338fd1498Szrj /* Apply the operation CODE in type TYPE to the value, mask pairs
131438fd1498Szrj    R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
131538fd1498Szrj    and R2TYPE and set the value, mask pair *VAL and *MASK to the result.  */
131638fd1498Szrj 
131738fd1498Szrj void
bit_value_binop(enum tree_code code,signop sgn,int width,widest_int * val,widest_int * mask,signop r1type_sgn,int r1type_precision,const widest_int & r1val,const widest_int & r1mask,signop r2type_sgn,int r2type_precision,const widest_int & r2val,const widest_int & r2mask)131838fd1498Szrj bit_value_binop (enum tree_code code, signop sgn, int width,
131938fd1498Szrj 		 widest_int *val, widest_int *mask,
132038fd1498Szrj 		 signop r1type_sgn, int r1type_precision,
132138fd1498Szrj 		 const widest_int &r1val, const widest_int &r1mask,
132238fd1498Szrj 		 signop r2type_sgn, int r2type_precision,
132338fd1498Szrj 		 const widest_int &r2val, const widest_int &r2mask)
132438fd1498Szrj {
132538fd1498Szrj   bool swap_p = false;
132638fd1498Szrj 
132738fd1498Szrj   /* Assume we'll get a constant result.  Use an initial non varying
132838fd1498Szrj      value, we fall back to varying in the end if necessary.  */
132938fd1498Szrj   *mask = -1;
133038fd1498Szrj 
133138fd1498Szrj   switch (code)
133238fd1498Szrj     {
133338fd1498Szrj     case BIT_AND_EXPR:
133438fd1498Szrj       /* The mask is constant where there is a known not
133538fd1498Szrj 	 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
133638fd1498Szrj       *mask = (r1mask | r2mask) & (r1val | r1mask) & (r2val | r2mask);
133738fd1498Szrj       *val = r1val & r2val;
133838fd1498Szrj       break;
133938fd1498Szrj 
134038fd1498Szrj     case BIT_IOR_EXPR:
134138fd1498Szrj       /* The mask is constant where there is a known
134238fd1498Szrj 	 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)).  */
134338fd1498Szrj       *mask = wi::bit_and_not (r1mask | r2mask,
134438fd1498Szrj 			       wi::bit_and_not (r1val, r1mask)
134538fd1498Szrj 			       | wi::bit_and_not (r2val, r2mask));
134638fd1498Szrj       *val = r1val | r2val;
134738fd1498Szrj       break;
134838fd1498Szrj 
134938fd1498Szrj     case BIT_XOR_EXPR:
135038fd1498Szrj       /* m1 | m2  */
135138fd1498Szrj       *mask = r1mask | r2mask;
135238fd1498Szrj       *val = r1val ^ r2val;
135338fd1498Szrj       break;
135438fd1498Szrj 
135538fd1498Szrj     case LROTATE_EXPR:
135638fd1498Szrj     case RROTATE_EXPR:
135738fd1498Szrj       if (r2mask == 0)
135838fd1498Szrj 	{
135938fd1498Szrj 	  widest_int shift = r2val;
136038fd1498Szrj 	  if (shift == 0)
136138fd1498Szrj 	    {
136238fd1498Szrj 	      *mask = r1mask;
136338fd1498Szrj 	      *val = r1val;
136438fd1498Szrj 	    }
136538fd1498Szrj 	  else
136638fd1498Szrj 	    {
136738fd1498Szrj 	      if (wi::neg_p (shift))
136838fd1498Szrj 		{
136938fd1498Szrj 		  shift = -shift;
137038fd1498Szrj 		  if (code == RROTATE_EXPR)
137138fd1498Szrj 		    code = LROTATE_EXPR;
137238fd1498Szrj 		  else
137338fd1498Szrj 		    code = RROTATE_EXPR;
137438fd1498Szrj 		}
137538fd1498Szrj 	      if (code == RROTATE_EXPR)
137638fd1498Szrj 		{
137738fd1498Szrj 		  *mask = wi::rrotate (r1mask, shift, width);
137838fd1498Szrj 		  *val = wi::rrotate (r1val, shift, width);
137938fd1498Szrj 		}
138038fd1498Szrj 	      else
138138fd1498Szrj 		{
138238fd1498Szrj 		  *mask = wi::lrotate (r1mask, shift, width);
138338fd1498Szrj 		  *val = wi::lrotate (r1val, shift, width);
138438fd1498Szrj 		}
138538fd1498Szrj 	    }
138638fd1498Szrj 	}
138738fd1498Szrj       break;
138838fd1498Szrj 
138938fd1498Szrj     case LSHIFT_EXPR:
139038fd1498Szrj     case RSHIFT_EXPR:
139138fd1498Szrj       /* ???  We can handle partially known shift counts if we know
139238fd1498Szrj 	 its sign.  That way we can tell that (x << (y | 8)) & 255
139338fd1498Szrj 	 is zero.  */
139438fd1498Szrj       if (r2mask == 0)
139538fd1498Szrj 	{
139638fd1498Szrj 	  widest_int shift = r2val;
139738fd1498Szrj 	  if (shift == 0)
139838fd1498Szrj 	    {
139938fd1498Szrj 	      *mask = r1mask;
140038fd1498Szrj 	      *val = r1val;
140138fd1498Szrj 	    }
140238fd1498Szrj 	  else
140338fd1498Szrj 	    {
140438fd1498Szrj 	      if (wi::neg_p (shift))
140538fd1498Szrj 		{
140638fd1498Szrj 		  shift = -shift;
140738fd1498Szrj 		  if (code == RSHIFT_EXPR)
140838fd1498Szrj 		    code = LSHIFT_EXPR;
140938fd1498Szrj 		  else
141038fd1498Szrj 		    code = RSHIFT_EXPR;
141138fd1498Szrj 		}
141238fd1498Szrj 	      if (code == RSHIFT_EXPR)
141338fd1498Szrj 		{
141438fd1498Szrj 		  *mask = wi::rshift (wi::ext (r1mask, width, sgn), shift, sgn);
141538fd1498Szrj 		  *val = wi::rshift (wi::ext (r1val, width, sgn), shift, sgn);
141638fd1498Szrj 		}
141738fd1498Szrj 	      else
141838fd1498Szrj 		{
141938fd1498Szrj 		  *mask = wi::ext (r1mask << shift, width, sgn);
142038fd1498Szrj 		  *val = wi::ext (r1val << shift, width, sgn);
142138fd1498Szrj 		}
142238fd1498Szrj 	    }
142338fd1498Szrj 	}
142438fd1498Szrj       break;
142538fd1498Szrj 
142638fd1498Szrj     case PLUS_EXPR:
142738fd1498Szrj     case POINTER_PLUS_EXPR:
142838fd1498Szrj       {
142938fd1498Szrj 	/* Do the addition with unknown bits set to zero, to give carry-ins of
143038fd1498Szrj 	   zero wherever possible.  */
143138fd1498Szrj 	widest_int lo = (wi::bit_and_not (r1val, r1mask)
143238fd1498Szrj 			 + wi::bit_and_not (r2val, r2mask));
143338fd1498Szrj 	lo = wi::ext (lo, width, sgn);
143438fd1498Szrj 	/* Do the addition with unknown bits set to one, to give carry-ins of
143538fd1498Szrj 	   one wherever possible.  */
143638fd1498Szrj 	widest_int hi = (r1val | r1mask) + (r2val | r2mask);
143738fd1498Szrj 	hi = wi::ext (hi, width, sgn);
143838fd1498Szrj 	/* Each bit in the result is known if (a) the corresponding bits in
143938fd1498Szrj 	   both inputs are known, and (b) the carry-in to that bit position
144038fd1498Szrj 	   is known.  We can check condition (b) by seeing if we got the same
144138fd1498Szrj 	   result with minimised carries as with maximised carries.  */
144238fd1498Szrj 	*mask = r1mask | r2mask | (lo ^ hi);
144338fd1498Szrj 	*mask = wi::ext (*mask, width, sgn);
144438fd1498Szrj 	/* It shouldn't matter whether we choose lo or hi here.  */
144538fd1498Szrj 	*val = lo;
144638fd1498Szrj 	break;
144738fd1498Szrj       }
144838fd1498Szrj 
144938fd1498Szrj     case MINUS_EXPR:
145038fd1498Szrj       {
145138fd1498Szrj 	widest_int temv, temm;
145238fd1498Szrj 	bit_value_unop (NEGATE_EXPR, r2type_sgn, r2type_precision, &temv, &temm,
145338fd1498Szrj 			  r2type_sgn, r2type_precision, r2val, r2mask);
145438fd1498Szrj 	bit_value_binop (PLUS_EXPR, sgn, width, val, mask,
145538fd1498Szrj 			 r1type_sgn, r1type_precision, r1val, r1mask,
145638fd1498Szrj 			 r2type_sgn, r2type_precision, temv, temm);
145738fd1498Szrj 	break;
145838fd1498Szrj       }
145938fd1498Szrj 
146038fd1498Szrj     case MULT_EXPR:
146138fd1498Szrj       {
146238fd1498Szrj 	/* Just track trailing zeros in both operands and transfer
146338fd1498Szrj 	   them to the other.  */
146438fd1498Szrj 	int r1tz = wi::ctz (r1val | r1mask);
146538fd1498Szrj 	int r2tz = wi::ctz (r2val | r2mask);
146638fd1498Szrj 	if (r1tz + r2tz >= width)
146738fd1498Szrj 	  {
146838fd1498Szrj 	    *mask = 0;
146938fd1498Szrj 	    *val = 0;
147038fd1498Szrj 	  }
147138fd1498Szrj 	else if (r1tz + r2tz > 0)
147238fd1498Szrj 	  {
147338fd1498Szrj 	    *mask = wi::ext (wi::mask <widest_int> (r1tz + r2tz, true),
147438fd1498Szrj 			     width, sgn);
147538fd1498Szrj 	    *val = 0;
147638fd1498Szrj 	  }
147738fd1498Szrj 	break;
147838fd1498Szrj       }
147938fd1498Szrj 
148038fd1498Szrj     case EQ_EXPR:
148138fd1498Szrj     case NE_EXPR:
148238fd1498Szrj       {
148338fd1498Szrj 	widest_int m = r1mask | r2mask;
148438fd1498Szrj 	if (wi::bit_and_not (r1val, m) != wi::bit_and_not (r2val, m))
148538fd1498Szrj 	  {
148638fd1498Szrj 	    *mask = 0;
148738fd1498Szrj 	    *val = ((code == EQ_EXPR) ? 0 : 1);
148838fd1498Szrj 	  }
148938fd1498Szrj 	else
149038fd1498Szrj 	  {
149138fd1498Szrj 	    /* We know the result of a comparison is always one or zero.  */
149238fd1498Szrj 	    *mask = 1;
149338fd1498Szrj 	    *val = 0;
149438fd1498Szrj 	  }
149538fd1498Szrj 	break;
149638fd1498Szrj       }
149738fd1498Szrj 
149838fd1498Szrj     case GE_EXPR:
149938fd1498Szrj     case GT_EXPR:
150038fd1498Szrj       swap_p = true;
150138fd1498Szrj       code = swap_tree_comparison (code);
150238fd1498Szrj       /* Fall through.  */
150338fd1498Szrj     case LT_EXPR:
150438fd1498Szrj     case LE_EXPR:
150538fd1498Szrj       {
150638fd1498Szrj 	int minmax, maxmin;
150738fd1498Szrj 
150838fd1498Szrj 	const widest_int &o1val = swap_p ? r2val : r1val;
150938fd1498Szrj 	const widest_int &o1mask = swap_p ? r2mask : r1mask;
151038fd1498Szrj 	const widest_int &o2val = swap_p ? r1val : r2val;
151138fd1498Szrj 	const widest_int &o2mask = swap_p ? r1mask : r2mask;
151238fd1498Szrj 
151338fd1498Szrj 	/* If the most significant bits are not known we know nothing.  */
151438fd1498Szrj 	if (wi::neg_p (o1mask) || wi::neg_p (o2mask))
151538fd1498Szrj 	  break;
151638fd1498Szrj 
151738fd1498Szrj 	/* For comparisons the signedness is in the comparison operands.  */
151838fd1498Szrj 	sgn = r1type_sgn;
151938fd1498Szrj 
152038fd1498Szrj 	/* If we know the most significant bits we know the values
152138fd1498Szrj 	   value ranges by means of treating varying bits as zero
152238fd1498Szrj 	   or one.  Do a cross comparison of the max/min pairs.  */
152338fd1498Szrj 	maxmin = wi::cmp (o1val | o1mask,
152438fd1498Szrj 			  wi::bit_and_not (o2val, o2mask), sgn);
152538fd1498Szrj 	minmax = wi::cmp (wi::bit_and_not (o1val, o1mask),
152638fd1498Szrj 			  o2val | o2mask, sgn);
152738fd1498Szrj 	if (maxmin < 0)  /* o1 is less than o2.  */
152838fd1498Szrj 	  {
152938fd1498Szrj 	    *mask = 0;
153038fd1498Szrj 	    *val = 1;
153138fd1498Szrj 	  }
153238fd1498Szrj 	else if (minmax > 0)  /* o1 is not less or equal to o2.  */
153338fd1498Szrj 	  {
153438fd1498Szrj 	    *mask = 0;
153538fd1498Szrj 	    *val = 0;
153638fd1498Szrj 	  }
153738fd1498Szrj 	else if (maxmin == minmax)  /* o1 and o2 are equal.  */
153838fd1498Szrj 	  {
153938fd1498Szrj 	    /* This probably should never happen as we'd have
154038fd1498Szrj 	       folded the thing during fully constant value folding.  */
154138fd1498Szrj 	    *mask = 0;
154238fd1498Szrj 	    *val = (code == LE_EXPR ? 1 : 0);
154338fd1498Szrj 	  }
154438fd1498Szrj 	else
154538fd1498Szrj 	  {
154638fd1498Szrj 	    /* We know the result of a comparison is always one or zero.  */
154738fd1498Szrj 	    *mask = 1;
154838fd1498Szrj 	    *val = 0;
154938fd1498Szrj 	  }
155038fd1498Szrj 	break;
155138fd1498Szrj       }
155238fd1498Szrj 
155338fd1498Szrj     default:;
155438fd1498Szrj     }
155538fd1498Szrj }
155638fd1498Szrj 
155738fd1498Szrj /* Return the propagation value when applying the operation CODE to
155838fd1498Szrj    the value RHS yielding type TYPE.  */
155938fd1498Szrj 
156038fd1498Szrj static ccp_prop_value_t
bit_value_unop(enum tree_code code,tree type,tree rhs)156138fd1498Szrj bit_value_unop (enum tree_code code, tree type, tree rhs)
156238fd1498Szrj {
156338fd1498Szrj   ccp_prop_value_t rval = get_value_for_expr (rhs, true);
156438fd1498Szrj   widest_int value, mask;
156538fd1498Szrj   ccp_prop_value_t val;
156638fd1498Szrj 
156738fd1498Szrj   if (rval.lattice_val == UNDEFINED)
156838fd1498Szrj     return rval;
156938fd1498Szrj 
157038fd1498Szrj   gcc_assert ((rval.lattice_val == CONSTANT
157138fd1498Szrj 	       && TREE_CODE (rval.value) == INTEGER_CST)
157238fd1498Szrj 	      || wi::sext (rval.mask, TYPE_PRECISION (TREE_TYPE (rhs))) == -1);
157338fd1498Szrj   bit_value_unop (code, TYPE_SIGN (type), TYPE_PRECISION (type), &value, &mask,
157438fd1498Szrj 		  TYPE_SIGN (TREE_TYPE (rhs)), TYPE_PRECISION (TREE_TYPE (rhs)),
157538fd1498Szrj 		  value_to_wide_int (rval), rval.mask);
157638fd1498Szrj   if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
157738fd1498Szrj     {
157838fd1498Szrj       val.lattice_val = CONSTANT;
157938fd1498Szrj       val.mask = mask;
158038fd1498Szrj       /* ???  Delay building trees here.  */
158138fd1498Szrj       val.value = wide_int_to_tree (type, value);
158238fd1498Szrj     }
158338fd1498Szrj   else
158438fd1498Szrj     {
158538fd1498Szrj       val.lattice_val = VARYING;
158638fd1498Szrj       val.value = NULL_TREE;
158738fd1498Szrj       val.mask = -1;
158838fd1498Szrj     }
158938fd1498Szrj   return val;
159038fd1498Szrj }
159138fd1498Szrj 
159238fd1498Szrj /* Return the propagation value when applying the operation CODE to
159338fd1498Szrj    the values RHS1 and RHS2 yielding type TYPE.  */
159438fd1498Szrj 
159538fd1498Szrj static ccp_prop_value_t
bit_value_binop(enum tree_code code,tree type,tree rhs1,tree rhs2)159638fd1498Szrj bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
159738fd1498Szrj {
159838fd1498Szrj   ccp_prop_value_t r1val = get_value_for_expr (rhs1, true);
159938fd1498Szrj   ccp_prop_value_t r2val = get_value_for_expr (rhs2, true);
160038fd1498Szrj   widest_int value, mask;
160138fd1498Szrj   ccp_prop_value_t val;
160238fd1498Szrj 
160338fd1498Szrj   if (r1val.lattice_val == UNDEFINED
160438fd1498Szrj       || r2val.lattice_val == UNDEFINED)
160538fd1498Szrj     {
160638fd1498Szrj       val.lattice_val = VARYING;
160738fd1498Szrj       val.value = NULL_TREE;
160838fd1498Szrj       val.mask = -1;
160938fd1498Szrj       return val;
161038fd1498Szrj     }
161138fd1498Szrj 
161238fd1498Szrj   gcc_assert ((r1val.lattice_val == CONSTANT
161338fd1498Szrj 	       && TREE_CODE (r1val.value) == INTEGER_CST)
161438fd1498Szrj 	      || wi::sext (r1val.mask,
161538fd1498Szrj 			   TYPE_PRECISION (TREE_TYPE (rhs1))) == -1);
161638fd1498Szrj   gcc_assert ((r2val.lattice_val == CONSTANT
161738fd1498Szrj 	       && TREE_CODE (r2val.value) == INTEGER_CST)
161838fd1498Szrj 	      || wi::sext (r2val.mask,
161938fd1498Szrj 			   TYPE_PRECISION (TREE_TYPE (rhs2))) == -1);
162038fd1498Szrj   bit_value_binop (code, TYPE_SIGN (type), TYPE_PRECISION (type), &value, &mask,
162138fd1498Szrj 		   TYPE_SIGN (TREE_TYPE (rhs1)), TYPE_PRECISION (TREE_TYPE (rhs1)),
162238fd1498Szrj 		   value_to_wide_int (r1val), r1val.mask,
162338fd1498Szrj 		   TYPE_SIGN (TREE_TYPE (rhs2)), TYPE_PRECISION (TREE_TYPE (rhs2)),
162438fd1498Szrj 		   value_to_wide_int (r2val), r2val.mask);
162538fd1498Szrj 
162638fd1498Szrj   if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
162738fd1498Szrj     {
162838fd1498Szrj       val.lattice_val = CONSTANT;
162938fd1498Szrj       val.mask = mask;
163038fd1498Szrj       /* ???  Delay building trees here.  */
163138fd1498Szrj       val.value = wide_int_to_tree (type, value);
163238fd1498Szrj     }
163338fd1498Szrj   else
163438fd1498Szrj     {
163538fd1498Szrj       val.lattice_val = VARYING;
163638fd1498Szrj       val.value = NULL_TREE;
163738fd1498Szrj       val.mask = -1;
163838fd1498Szrj     }
163938fd1498Szrj   return val;
164038fd1498Szrj }
164138fd1498Szrj 
164238fd1498Szrj /* Return the propagation value for __builtin_assume_aligned
164338fd1498Szrj    and functions with assume_aligned or alloc_aligned attribute.
164438fd1498Szrj    For __builtin_assume_aligned, ATTR is NULL_TREE,
164538fd1498Szrj    for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED
164638fd1498Szrj    is false, for alloc_aligned attribute ATTR is non-NULL and
164738fd1498Szrj    ALLOC_ALIGNED is true.  */
164838fd1498Szrj 
164938fd1498Szrj static ccp_prop_value_t
bit_value_assume_aligned(gimple * stmt,tree attr,ccp_prop_value_t ptrval,bool alloc_aligned)165038fd1498Szrj bit_value_assume_aligned (gimple *stmt, tree attr, ccp_prop_value_t ptrval,
165138fd1498Szrj 			  bool alloc_aligned)
165238fd1498Szrj {
165338fd1498Szrj   tree align, misalign = NULL_TREE, type;
165438fd1498Szrj   unsigned HOST_WIDE_INT aligni, misaligni = 0;
165538fd1498Szrj   ccp_prop_value_t alignval;
165638fd1498Szrj   widest_int value, mask;
165738fd1498Szrj   ccp_prop_value_t val;
165838fd1498Szrj 
165938fd1498Szrj   if (attr == NULL_TREE)
166038fd1498Szrj     {
166138fd1498Szrj       tree ptr = gimple_call_arg (stmt, 0);
166238fd1498Szrj       type = TREE_TYPE (ptr);
166338fd1498Szrj       ptrval = get_value_for_expr (ptr, true);
166438fd1498Szrj     }
166538fd1498Szrj   else
166638fd1498Szrj     {
166738fd1498Szrj       tree lhs = gimple_call_lhs (stmt);
166838fd1498Szrj       type = TREE_TYPE (lhs);
166938fd1498Szrj     }
167038fd1498Szrj 
167138fd1498Szrj   if (ptrval.lattice_val == UNDEFINED)
167238fd1498Szrj     return ptrval;
167338fd1498Szrj   gcc_assert ((ptrval.lattice_val == CONSTANT
167438fd1498Szrj 	       && TREE_CODE (ptrval.value) == INTEGER_CST)
167538fd1498Szrj 	      || wi::sext (ptrval.mask, TYPE_PRECISION (type)) == -1);
167638fd1498Szrj   if (attr == NULL_TREE)
167738fd1498Szrj     {
167838fd1498Szrj       /* Get aligni and misaligni from __builtin_assume_aligned.  */
167938fd1498Szrj       align = gimple_call_arg (stmt, 1);
168038fd1498Szrj       if (!tree_fits_uhwi_p (align))
168138fd1498Szrj 	return ptrval;
168238fd1498Szrj       aligni = tree_to_uhwi (align);
168338fd1498Szrj       if (gimple_call_num_args (stmt) > 2)
168438fd1498Szrj 	{
168538fd1498Szrj 	  misalign = gimple_call_arg (stmt, 2);
168638fd1498Szrj 	  if (!tree_fits_uhwi_p (misalign))
168738fd1498Szrj 	    return ptrval;
168838fd1498Szrj 	  misaligni = tree_to_uhwi (misalign);
168938fd1498Szrj 	}
169038fd1498Szrj     }
169138fd1498Szrj   else
169238fd1498Szrj     {
169338fd1498Szrj       /* Get aligni and misaligni from assume_aligned or
169438fd1498Szrj 	 alloc_align attributes.  */
169538fd1498Szrj       if (TREE_VALUE (attr) == NULL_TREE)
169638fd1498Szrj 	return ptrval;
169738fd1498Szrj       attr = TREE_VALUE (attr);
169838fd1498Szrj       align = TREE_VALUE (attr);
169938fd1498Szrj       if (!tree_fits_uhwi_p (align))
170038fd1498Szrj 	return ptrval;
170138fd1498Szrj       aligni = tree_to_uhwi (align);
170238fd1498Szrj       if (alloc_aligned)
170338fd1498Szrj 	{
170438fd1498Szrj 	  if (aligni == 0 || aligni > gimple_call_num_args (stmt))
170538fd1498Szrj 	    return ptrval;
170638fd1498Szrj 	  align = gimple_call_arg (stmt, aligni - 1);
170738fd1498Szrj 	  if (!tree_fits_uhwi_p (align))
170838fd1498Szrj 	    return ptrval;
170938fd1498Szrj 	  aligni = tree_to_uhwi (align);
171038fd1498Szrj 	}
171138fd1498Szrj       else if (TREE_CHAIN (attr) && TREE_VALUE (TREE_CHAIN (attr)))
171238fd1498Szrj 	{
171338fd1498Szrj 	  misalign = TREE_VALUE (TREE_CHAIN (attr));
171438fd1498Szrj 	  if (!tree_fits_uhwi_p (misalign))
171538fd1498Szrj 	    return ptrval;
171638fd1498Szrj 	  misaligni = tree_to_uhwi (misalign);
171738fd1498Szrj 	}
171838fd1498Szrj     }
171938fd1498Szrj   if (aligni <= 1 || (aligni & (aligni - 1)) != 0 || misaligni >= aligni)
172038fd1498Szrj     return ptrval;
172138fd1498Szrj 
172238fd1498Szrj   align = build_int_cst_type (type, -aligni);
172338fd1498Szrj   alignval = get_value_for_expr (align, true);
172438fd1498Szrj   bit_value_binop (BIT_AND_EXPR, TYPE_SIGN (type), TYPE_PRECISION (type), &value, &mask,
172538fd1498Szrj 		   TYPE_SIGN (type), TYPE_PRECISION (type), value_to_wide_int (ptrval), ptrval.mask,
172638fd1498Szrj 		   TYPE_SIGN (type), TYPE_PRECISION (type), value_to_wide_int (alignval), alignval.mask);
172738fd1498Szrj 
172838fd1498Szrj   if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
172938fd1498Szrj     {
173038fd1498Szrj       val.lattice_val = CONSTANT;
173138fd1498Szrj       val.mask = mask;
173238fd1498Szrj       gcc_assert ((mask.to_uhwi () & (aligni - 1)) == 0);
173338fd1498Szrj       gcc_assert ((value.to_uhwi () & (aligni - 1)) == 0);
173438fd1498Szrj       value |= misaligni;
173538fd1498Szrj       /* ???  Delay building trees here.  */
173638fd1498Szrj       val.value = wide_int_to_tree (type, value);
173738fd1498Szrj     }
173838fd1498Szrj   else
173938fd1498Szrj     {
174038fd1498Szrj       val.lattice_val = VARYING;
174138fd1498Szrj       val.value = NULL_TREE;
174238fd1498Szrj       val.mask = -1;
174338fd1498Szrj     }
174438fd1498Szrj   return val;
174538fd1498Szrj }
174638fd1498Szrj 
174738fd1498Szrj /* Evaluate statement STMT.
174838fd1498Szrj    Valid only for assignments, calls, conditionals, and switches. */
174938fd1498Szrj 
175038fd1498Szrj static ccp_prop_value_t
evaluate_stmt(gimple * stmt)175138fd1498Szrj evaluate_stmt (gimple *stmt)
175238fd1498Szrj {
175338fd1498Szrj   ccp_prop_value_t val;
175438fd1498Szrj   tree simplified = NULL_TREE;
175538fd1498Szrj   ccp_lattice_t likelyvalue = likely_value (stmt);
175638fd1498Szrj   bool is_constant = false;
175738fd1498Szrj   unsigned int align;
175838fd1498Szrj 
175938fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
176038fd1498Szrj     {
176138fd1498Szrj       fprintf (dump_file, "which is likely ");
176238fd1498Szrj       switch (likelyvalue)
176338fd1498Szrj 	{
176438fd1498Szrj 	case CONSTANT:
176538fd1498Szrj 	  fprintf (dump_file, "CONSTANT");
176638fd1498Szrj 	  break;
176738fd1498Szrj 	case UNDEFINED:
176838fd1498Szrj 	  fprintf (dump_file, "UNDEFINED");
176938fd1498Szrj 	  break;
177038fd1498Szrj 	case VARYING:
177138fd1498Szrj 	  fprintf (dump_file, "VARYING");
177238fd1498Szrj 	  break;
177338fd1498Szrj 	default:;
177438fd1498Szrj 	}
177538fd1498Szrj       fprintf (dump_file, "\n");
177638fd1498Szrj     }
177738fd1498Szrj 
177838fd1498Szrj   /* If the statement is likely to have a CONSTANT result, then try
177938fd1498Szrj      to fold the statement to determine the constant value.  */
178038fd1498Szrj   /* FIXME.  This is the only place that we call ccp_fold.
178138fd1498Szrj      Since likely_value never returns CONSTANT for calls, we will
178238fd1498Szrj      not attempt to fold them, including builtins that may profit.  */
178338fd1498Szrj   if (likelyvalue == CONSTANT)
178438fd1498Szrj     {
178538fd1498Szrj       fold_defer_overflow_warnings ();
178638fd1498Szrj       simplified = ccp_fold (stmt);
178738fd1498Szrj       if (simplified
178838fd1498Szrj 	  && TREE_CODE (simplified) == SSA_NAME)
178938fd1498Szrj 	{
179038fd1498Szrj 	  /* We may not use values of something that may be simulated again,
179138fd1498Szrj 	     see valueize_op_1.  */
179238fd1498Szrj 	  if (SSA_NAME_IS_DEFAULT_DEF (simplified)
179338fd1498Szrj 	      || ! prop_simulate_again_p (SSA_NAME_DEF_STMT (simplified)))
179438fd1498Szrj 	    {
179538fd1498Szrj 	      ccp_prop_value_t *val = get_value (simplified);
179638fd1498Szrj 	      if (val && val->lattice_val != VARYING)
179738fd1498Szrj 		{
179838fd1498Szrj 		  fold_undefer_overflow_warnings (true, stmt, 0);
179938fd1498Szrj 		  return *val;
180038fd1498Szrj 		}
180138fd1498Szrj 	    }
180238fd1498Szrj 	  else
180338fd1498Szrj 	    /* We may also not place a non-valueized copy in the lattice
180438fd1498Szrj 	       as that might become stale if we never re-visit this stmt.  */
180538fd1498Szrj 	    simplified = NULL_TREE;
180638fd1498Szrj 	}
180738fd1498Szrj       is_constant = simplified && is_gimple_min_invariant (simplified);
180838fd1498Szrj       fold_undefer_overflow_warnings (is_constant, stmt, 0);
180938fd1498Szrj       if (is_constant)
181038fd1498Szrj 	{
181138fd1498Szrj 	  /* The statement produced a constant value.  */
181238fd1498Szrj 	  val.lattice_val = CONSTANT;
181338fd1498Szrj 	  val.value = simplified;
181438fd1498Szrj 	  val.mask = 0;
181538fd1498Szrj 	  return val;
181638fd1498Szrj 	}
181738fd1498Szrj     }
181838fd1498Szrj   /* If the statement is likely to have a VARYING result, then do not
181938fd1498Szrj      bother folding the statement.  */
182038fd1498Szrj   else if (likelyvalue == VARYING)
182138fd1498Szrj     {
182238fd1498Szrj       enum gimple_code code = gimple_code (stmt);
182338fd1498Szrj       if (code == GIMPLE_ASSIGN)
182438fd1498Szrj         {
182538fd1498Szrj           enum tree_code subcode = gimple_assign_rhs_code (stmt);
182638fd1498Szrj 
182738fd1498Szrj           /* Other cases cannot satisfy is_gimple_min_invariant
182838fd1498Szrj              without folding.  */
182938fd1498Szrj           if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
183038fd1498Szrj             simplified = gimple_assign_rhs1 (stmt);
183138fd1498Szrj         }
183238fd1498Szrj       else if (code == GIMPLE_SWITCH)
183338fd1498Szrj         simplified = gimple_switch_index (as_a <gswitch *> (stmt));
183438fd1498Szrj       else
183538fd1498Szrj 	/* These cannot satisfy is_gimple_min_invariant without folding.  */
183638fd1498Szrj 	gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
183738fd1498Szrj       is_constant = simplified && is_gimple_min_invariant (simplified);
183838fd1498Szrj       if (is_constant)
183938fd1498Szrj 	{
184038fd1498Szrj 	  /* The statement produced a constant value.  */
184138fd1498Szrj 	  val.lattice_val = CONSTANT;
184238fd1498Szrj 	  val.value = simplified;
184338fd1498Szrj 	  val.mask = 0;
184438fd1498Szrj 	}
184538fd1498Szrj     }
184638fd1498Szrj   /* If the statement result is likely UNDEFINED, make it so.  */
184738fd1498Szrj   else if (likelyvalue == UNDEFINED)
184838fd1498Szrj     {
184938fd1498Szrj       val.lattice_val = UNDEFINED;
185038fd1498Szrj       val.value = NULL_TREE;
185138fd1498Szrj       val.mask = 0;
185238fd1498Szrj       return val;
185338fd1498Szrj     }
185438fd1498Szrj 
185538fd1498Szrj   /* Resort to simplification for bitwise tracking.  */
185638fd1498Szrj   if (flag_tree_bit_ccp
185738fd1498Szrj       && (likelyvalue == CONSTANT || is_gimple_call (stmt)
185838fd1498Szrj 	  || (gimple_assign_single_p (stmt)
185938fd1498Szrj 	      && gimple_assign_rhs_code (stmt) == ADDR_EXPR))
186038fd1498Szrj       && !is_constant)
186138fd1498Szrj     {
186238fd1498Szrj       enum gimple_code code = gimple_code (stmt);
186338fd1498Szrj       val.lattice_val = VARYING;
186438fd1498Szrj       val.value = NULL_TREE;
186538fd1498Szrj       val.mask = -1;
186638fd1498Szrj       if (code == GIMPLE_ASSIGN)
186738fd1498Szrj 	{
186838fd1498Szrj 	  enum tree_code subcode = gimple_assign_rhs_code (stmt);
186938fd1498Szrj 	  tree rhs1 = gimple_assign_rhs1 (stmt);
187038fd1498Szrj 	  tree lhs = gimple_assign_lhs (stmt);
187138fd1498Szrj 	  if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
187238fd1498Szrj 	       || POINTER_TYPE_P (TREE_TYPE (lhs)))
187338fd1498Szrj 	      && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
187438fd1498Szrj 		  || POINTER_TYPE_P (TREE_TYPE (rhs1))))
187538fd1498Szrj 	    switch (get_gimple_rhs_class (subcode))
187638fd1498Szrj 	      {
187738fd1498Szrj 	      case GIMPLE_SINGLE_RHS:
187838fd1498Szrj 	        val = get_value_for_expr (rhs1, true);
187938fd1498Szrj 		break;
188038fd1498Szrj 
188138fd1498Szrj 	      case GIMPLE_UNARY_RHS:
188238fd1498Szrj 		val = bit_value_unop (subcode, TREE_TYPE (lhs), rhs1);
188338fd1498Szrj 		break;
188438fd1498Szrj 
188538fd1498Szrj 	      case GIMPLE_BINARY_RHS:
188638fd1498Szrj 		val = bit_value_binop (subcode, TREE_TYPE (lhs), rhs1,
188738fd1498Szrj 				       gimple_assign_rhs2 (stmt));
188838fd1498Szrj 		break;
188938fd1498Szrj 
189038fd1498Szrj 	      default:;
189138fd1498Szrj 	      }
189238fd1498Szrj 	}
189338fd1498Szrj       else if (code == GIMPLE_COND)
189438fd1498Szrj 	{
189538fd1498Szrj 	  enum tree_code code = gimple_cond_code (stmt);
189638fd1498Szrj 	  tree rhs1 = gimple_cond_lhs (stmt);
189738fd1498Szrj 	  tree rhs2 = gimple_cond_rhs (stmt);
189838fd1498Szrj 	  if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
189938fd1498Szrj 	      || POINTER_TYPE_P (TREE_TYPE (rhs1)))
190038fd1498Szrj 	    val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
190138fd1498Szrj 	}
190238fd1498Szrj       else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
190338fd1498Szrj 	{
190438fd1498Szrj 	  tree fndecl = gimple_call_fndecl (stmt);
190538fd1498Szrj 	  switch (DECL_FUNCTION_CODE (fndecl))
190638fd1498Szrj 	    {
190738fd1498Szrj 	    case BUILT_IN_MALLOC:
190838fd1498Szrj 	    case BUILT_IN_REALLOC:
190938fd1498Szrj 	    case BUILT_IN_CALLOC:
191038fd1498Szrj 	    case BUILT_IN_STRDUP:
191138fd1498Szrj 	    case BUILT_IN_STRNDUP:
191238fd1498Szrj 	      val.lattice_val = CONSTANT;
191338fd1498Szrj 	      val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
191438fd1498Szrj 	      val.mask = ~((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT
191538fd1498Szrj 			   / BITS_PER_UNIT - 1);
191638fd1498Szrj 	      break;
191738fd1498Szrj 
191838fd1498Szrj 	    CASE_BUILT_IN_ALLOCA:
191938fd1498Szrj 	      align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
192038fd1498Szrj 		       ? BIGGEST_ALIGNMENT
192138fd1498Szrj 		       : TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)));
192238fd1498Szrj 	      val.lattice_val = CONSTANT;
192338fd1498Szrj 	      val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
192438fd1498Szrj 	      val.mask = ~((HOST_WIDE_INT) align / BITS_PER_UNIT - 1);
192538fd1498Szrj 	      break;
192638fd1498Szrj 
192738fd1498Szrj 	    /* These builtins return their first argument, unmodified.  */
192838fd1498Szrj 	    case BUILT_IN_MEMCPY:
192938fd1498Szrj 	    case BUILT_IN_MEMMOVE:
193038fd1498Szrj 	    case BUILT_IN_MEMSET:
193138fd1498Szrj 	    case BUILT_IN_STRCPY:
193238fd1498Szrj 	    case BUILT_IN_STRNCPY:
193338fd1498Szrj 	    case BUILT_IN_MEMCPY_CHK:
193438fd1498Szrj 	    case BUILT_IN_MEMMOVE_CHK:
193538fd1498Szrj 	    case BUILT_IN_MEMSET_CHK:
193638fd1498Szrj 	    case BUILT_IN_STRCPY_CHK:
193738fd1498Szrj 	    case BUILT_IN_STRNCPY_CHK:
193838fd1498Szrj 	      val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
193938fd1498Szrj 	      break;
194038fd1498Szrj 
194138fd1498Szrj 	    case BUILT_IN_ASSUME_ALIGNED:
194238fd1498Szrj 	      val = bit_value_assume_aligned (stmt, NULL_TREE, val, false);
194338fd1498Szrj 	      break;
194438fd1498Szrj 
194538fd1498Szrj 	    case BUILT_IN_ALIGNED_ALLOC:
194638fd1498Szrj 	      {
194738fd1498Szrj 		tree align = get_constant_value (gimple_call_arg (stmt, 0));
194838fd1498Szrj 		if (align
194938fd1498Szrj 		    && tree_fits_uhwi_p (align))
195038fd1498Szrj 		  {
195138fd1498Szrj 		    unsigned HOST_WIDE_INT aligni = tree_to_uhwi (align);
195238fd1498Szrj 		    if (aligni > 1
195338fd1498Szrj 			/* align must be power-of-two */
195438fd1498Szrj 			&& (aligni & (aligni - 1)) == 0)
195538fd1498Szrj 		      {
195638fd1498Szrj 			val.lattice_val = CONSTANT;
195738fd1498Szrj 			val.value = build_int_cst (ptr_type_node, 0);
195838fd1498Szrj 			val.mask = -aligni;
195938fd1498Szrj 		      }
196038fd1498Szrj 		  }
196138fd1498Szrj 		break;
196238fd1498Szrj 	      }
196338fd1498Szrj 
196438fd1498Szrj 	    default:;
196538fd1498Szrj 	    }
196638fd1498Szrj 	}
196738fd1498Szrj       if (is_gimple_call (stmt) && gimple_call_lhs (stmt))
196838fd1498Szrj 	{
196938fd1498Szrj 	  tree fntype = gimple_call_fntype (stmt);
197038fd1498Szrj 	  if (fntype)
197138fd1498Szrj 	    {
197238fd1498Szrj 	      tree attrs = lookup_attribute ("assume_aligned",
197338fd1498Szrj 					     TYPE_ATTRIBUTES (fntype));
197438fd1498Szrj 	      if (attrs)
197538fd1498Szrj 		val = bit_value_assume_aligned (stmt, attrs, val, false);
197638fd1498Szrj 	      attrs = lookup_attribute ("alloc_align",
197738fd1498Szrj 					TYPE_ATTRIBUTES (fntype));
197838fd1498Szrj 	      if (attrs)
197938fd1498Szrj 		val = bit_value_assume_aligned (stmt, attrs, val, true);
198038fd1498Szrj 	    }
198138fd1498Szrj 	}
198238fd1498Szrj       is_constant = (val.lattice_val == CONSTANT);
198338fd1498Szrj     }
198438fd1498Szrj 
198538fd1498Szrj   if (flag_tree_bit_ccp
198638fd1498Szrj       && ((is_constant && TREE_CODE (val.value) == INTEGER_CST)
198738fd1498Szrj 	  || !is_constant)
198838fd1498Szrj       && gimple_get_lhs (stmt)
198938fd1498Szrj       && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME)
199038fd1498Szrj     {
199138fd1498Szrj       tree lhs = gimple_get_lhs (stmt);
199238fd1498Szrj       wide_int nonzero_bits = get_nonzero_bits (lhs);
199338fd1498Szrj       if (nonzero_bits != -1)
199438fd1498Szrj 	{
199538fd1498Szrj 	  if (!is_constant)
199638fd1498Szrj 	    {
199738fd1498Szrj 	      val.lattice_val = CONSTANT;
199838fd1498Szrj 	      val.value = build_zero_cst (TREE_TYPE (lhs));
199938fd1498Szrj 	      val.mask = extend_mask (nonzero_bits, TYPE_SIGN (TREE_TYPE (lhs)));
200038fd1498Szrj 	      is_constant = true;
200138fd1498Szrj 	    }
200238fd1498Szrj 	  else
200338fd1498Szrj 	    {
200438fd1498Szrj 	      if (wi::bit_and_not (wi::to_wide (val.value), nonzero_bits) != 0)
200538fd1498Szrj 		val.value = wide_int_to_tree (TREE_TYPE (lhs),
200638fd1498Szrj 					      nonzero_bits
200738fd1498Szrj 					      & wi::to_wide (val.value));
200838fd1498Szrj 	      if (nonzero_bits == 0)
200938fd1498Szrj 		val.mask = 0;
201038fd1498Szrj 	      else
201138fd1498Szrj 		val.mask = val.mask & extend_mask (nonzero_bits,
201238fd1498Szrj 						   TYPE_SIGN (TREE_TYPE (lhs)));
201338fd1498Szrj 	    }
201438fd1498Szrj 	}
201538fd1498Szrj     }
201638fd1498Szrj 
201738fd1498Szrj   /* The statement produced a nonconstant value.  */
201838fd1498Szrj   if (!is_constant)
201938fd1498Szrj     {
202038fd1498Szrj       /* The statement produced a copy.  */
202138fd1498Szrj       if (simplified && TREE_CODE (simplified) == SSA_NAME
202238fd1498Szrj 	  && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (simplified))
202338fd1498Szrj 	{
202438fd1498Szrj 	  val.lattice_val = CONSTANT;
202538fd1498Szrj 	  val.value = simplified;
202638fd1498Szrj 	  val.mask = -1;
202738fd1498Szrj 	}
202838fd1498Szrj       /* The statement is VARYING.  */
202938fd1498Szrj       else
203038fd1498Szrj 	{
203138fd1498Szrj 	  val.lattice_val = VARYING;
203238fd1498Szrj 	  val.value = NULL_TREE;
203338fd1498Szrj 	  val.mask = -1;
203438fd1498Szrj 	}
203538fd1498Szrj     }
203638fd1498Szrj 
203738fd1498Szrj   return val;
203838fd1498Szrj }
203938fd1498Szrj 
204038fd1498Szrj typedef hash_table<nofree_ptr_hash<gimple> > gimple_htab;
204138fd1498Szrj 
204238fd1498Szrj /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
204338fd1498Szrj    each matching BUILT_IN_STACK_RESTORE.  Mark visited phis in VISITED.  */
204438fd1498Szrj 
204538fd1498Szrj static void
insert_clobber_before_stack_restore(tree saved_val,tree var,gimple_htab ** visited)204638fd1498Szrj insert_clobber_before_stack_restore (tree saved_val, tree var,
204738fd1498Szrj 				     gimple_htab **visited)
204838fd1498Szrj {
204938fd1498Szrj   gimple *stmt;
205038fd1498Szrj   gassign *clobber_stmt;
205138fd1498Szrj   tree clobber;
205238fd1498Szrj   imm_use_iterator iter;
205338fd1498Szrj   gimple_stmt_iterator i;
205438fd1498Szrj   gimple **slot;
205538fd1498Szrj 
205638fd1498Szrj   FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
205738fd1498Szrj     if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
205838fd1498Szrj       {
205938fd1498Szrj 	clobber = build_constructor (TREE_TYPE (var),
206038fd1498Szrj 				     NULL);
206138fd1498Szrj 	TREE_THIS_VOLATILE (clobber) = 1;
206238fd1498Szrj 	clobber_stmt = gimple_build_assign (var, clobber);
206338fd1498Szrj 
206438fd1498Szrj 	i = gsi_for_stmt (stmt);
206538fd1498Szrj 	gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
206638fd1498Szrj       }
206738fd1498Szrj     else if (gimple_code (stmt) == GIMPLE_PHI)
206838fd1498Szrj       {
206938fd1498Szrj 	if (!*visited)
207038fd1498Szrj 	  *visited = new gimple_htab (10);
207138fd1498Szrj 
207238fd1498Szrj 	slot = (*visited)->find_slot (stmt, INSERT);
207338fd1498Szrj 	if (*slot != NULL)
207438fd1498Szrj 	  continue;
207538fd1498Szrj 
207638fd1498Szrj 	*slot = stmt;
207738fd1498Szrj 	insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
207838fd1498Szrj 					     visited);
207938fd1498Szrj       }
208038fd1498Szrj     else if (gimple_assign_ssa_name_copy_p (stmt))
208138fd1498Szrj       insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var,
208238fd1498Szrj 					   visited);
208338fd1498Szrj     else if (chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
208438fd1498Szrj       continue;
208538fd1498Szrj     else
208638fd1498Szrj       gcc_assert (is_gimple_debug (stmt));
208738fd1498Szrj }
208838fd1498Szrj 
208938fd1498Szrj /* Advance the iterator to the previous non-debug gimple statement in the same
209038fd1498Szrj    or dominating basic block.  */
209138fd1498Szrj 
209238fd1498Szrj static inline void
gsi_prev_dom_bb_nondebug(gimple_stmt_iterator * i)209338fd1498Szrj gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
209438fd1498Szrj {
209538fd1498Szrj   basic_block dom;
209638fd1498Szrj 
209738fd1498Szrj   gsi_prev_nondebug (i);
209838fd1498Szrj   while (gsi_end_p (*i))
209938fd1498Szrj     {
210038fd1498Szrj       dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
210138fd1498Szrj       if (dom == NULL || dom == ENTRY_BLOCK_PTR_FOR_FN (cfun))
210238fd1498Szrj 	return;
210338fd1498Szrj 
210438fd1498Szrj       *i = gsi_last_bb (dom);
210538fd1498Szrj     }
210638fd1498Szrj }
210738fd1498Szrj 
210838fd1498Szrj /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
210938fd1498Szrj    a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
211038fd1498Szrj 
211138fd1498Szrj    It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
211238fd1498Szrj    previous pass (such as DOM) duplicated it along multiple paths to a BB.  In
211338fd1498Szrj    that case the function gives up without inserting the clobbers.  */
211438fd1498Szrj 
211538fd1498Szrj static void
insert_clobbers_for_var(gimple_stmt_iterator i,tree var)211638fd1498Szrj insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
211738fd1498Szrj {
211838fd1498Szrj   gimple *stmt;
211938fd1498Szrj   tree saved_val;
212038fd1498Szrj   gimple_htab *visited = NULL;
212138fd1498Szrj 
212238fd1498Szrj   for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
212338fd1498Szrj     {
212438fd1498Szrj       stmt = gsi_stmt (i);
212538fd1498Szrj 
212638fd1498Szrj       if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
212738fd1498Szrj 	continue;
212838fd1498Szrj 
212938fd1498Szrj       saved_val = gimple_call_lhs (stmt);
213038fd1498Szrj       if (saved_val == NULL_TREE)
213138fd1498Szrj 	continue;
213238fd1498Szrj 
213338fd1498Szrj       insert_clobber_before_stack_restore (saved_val, var, &visited);
213438fd1498Szrj       break;
213538fd1498Szrj     }
213638fd1498Szrj 
213738fd1498Szrj   delete visited;
213838fd1498Szrj }
213938fd1498Szrj 
214038fd1498Szrj /* Detects a __builtin_alloca_with_align with constant size argument.  Declares
214138fd1498Szrj    fixed-size array and returns the address, if found, otherwise returns
214238fd1498Szrj    NULL_TREE.  */
214338fd1498Szrj 
214438fd1498Szrj static tree
fold_builtin_alloca_with_align(gimple * stmt)214538fd1498Szrj fold_builtin_alloca_with_align (gimple *stmt)
214638fd1498Szrj {
214738fd1498Szrj   unsigned HOST_WIDE_INT size, threshold, n_elem;
214838fd1498Szrj   tree lhs, arg, block, var, elem_type, array_type;
214938fd1498Szrj 
215038fd1498Szrj   /* Get lhs.  */
215138fd1498Szrj   lhs = gimple_call_lhs (stmt);
215238fd1498Szrj   if (lhs == NULL_TREE)
215338fd1498Szrj     return NULL_TREE;
215438fd1498Szrj 
215538fd1498Szrj   /* Detect constant argument.  */
215638fd1498Szrj   arg = get_constant_value (gimple_call_arg (stmt, 0));
215738fd1498Szrj   if (arg == NULL_TREE
215838fd1498Szrj       || TREE_CODE (arg) != INTEGER_CST
215938fd1498Szrj       || !tree_fits_uhwi_p (arg))
216038fd1498Szrj     return NULL_TREE;
216138fd1498Szrj 
216238fd1498Szrj   size = tree_to_uhwi (arg);
216338fd1498Szrj 
216438fd1498Szrj   /* Heuristic: don't fold large allocas.  */
216538fd1498Szrj   threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
216638fd1498Szrj   /* In case the alloca is located at function entry, it has the same lifetime
216738fd1498Szrj      as a declared array, so we allow a larger size.  */
216838fd1498Szrj   block = gimple_block (stmt);
216938fd1498Szrj   if (!(cfun->after_inlining
217038fd1498Szrj 	&& block
217138fd1498Szrj         && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
217238fd1498Szrj     threshold /= 10;
217338fd1498Szrj   if (size > threshold)
217438fd1498Szrj     return NULL_TREE;
217538fd1498Szrj 
217638fd1498Szrj   /* Declare array.  */
217738fd1498Szrj   elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
217838fd1498Szrj   n_elem = size * 8 / BITS_PER_UNIT;
217938fd1498Szrj   array_type = build_array_type_nelts (elem_type, n_elem);
218038fd1498Szrj   var = create_tmp_var (array_type);
218138fd1498Szrj   SET_DECL_ALIGN (var, TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)));
218238fd1498Szrj   {
218338fd1498Szrj     struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
218438fd1498Szrj     if (pi != NULL && !pi->pt.anything)
218538fd1498Szrj       {
218638fd1498Szrj 	bool singleton_p;
218738fd1498Szrj 	unsigned uid;
218838fd1498Szrj 	singleton_p = pt_solution_singleton_or_null_p (&pi->pt, &uid);
218938fd1498Szrj 	gcc_assert (singleton_p);
219038fd1498Szrj 	SET_DECL_PT_UID (var, uid);
219138fd1498Szrj       }
219238fd1498Szrj   }
219338fd1498Szrj 
219438fd1498Szrj   /* Fold alloca to the address of the array.  */
219538fd1498Szrj   return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
219638fd1498Szrj }
219738fd1498Szrj 
219838fd1498Szrj /* Fold the stmt at *GSI with CCP specific information that propagating
219938fd1498Szrj    and regular folding does not catch.  */
220038fd1498Szrj 
220138fd1498Szrj bool
fold_stmt(gimple_stmt_iterator * gsi)220238fd1498Szrj ccp_folder::fold_stmt (gimple_stmt_iterator *gsi)
220338fd1498Szrj {
220438fd1498Szrj   gimple *stmt = gsi_stmt (*gsi);
220538fd1498Szrj 
220638fd1498Szrj   switch (gimple_code (stmt))
220738fd1498Szrj     {
220838fd1498Szrj     case GIMPLE_COND:
220938fd1498Szrj       {
221038fd1498Szrj 	gcond *cond_stmt = as_a <gcond *> (stmt);
221138fd1498Szrj 	ccp_prop_value_t val;
221238fd1498Szrj 	/* Statement evaluation will handle type mismatches in constants
221338fd1498Szrj 	   more gracefully than the final propagation.  This allows us to
221438fd1498Szrj 	   fold more conditionals here.  */
221538fd1498Szrj 	val = evaluate_stmt (stmt);
221638fd1498Szrj 	if (val.lattice_val != CONSTANT
221738fd1498Szrj 	    || val.mask != 0)
221838fd1498Szrj 	  return false;
221938fd1498Szrj 
222038fd1498Szrj 	if (dump_file)
222138fd1498Szrj 	  {
222238fd1498Szrj 	    fprintf (dump_file, "Folding predicate ");
222338fd1498Szrj 	    print_gimple_expr (dump_file, stmt, 0);
222438fd1498Szrj 	    fprintf (dump_file, " to ");
222538fd1498Szrj 	    print_generic_expr (dump_file, val.value);
222638fd1498Szrj 	    fprintf (dump_file, "\n");
222738fd1498Szrj 	  }
222838fd1498Szrj 
222938fd1498Szrj 	if (integer_zerop (val.value))
223038fd1498Szrj 	  gimple_cond_make_false (cond_stmt);
223138fd1498Szrj 	else
223238fd1498Szrj 	  gimple_cond_make_true (cond_stmt);
223338fd1498Szrj 
223438fd1498Szrj 	return true;
223538fd1498Szrj       }
223638fd1498Szrj 
223738fd1498Szrj     case GIMPLE_CALL:
223838fd1498Szrj       {
223938fd1498Szrj 	tree lhs = gimple_call_lhs (stmt);
224038fd1498Szrj 	int flags = gimple_call_flags (stmt);
224138fd1498Szrj 	tree val;
224238fd1498Szrj 	tree argt;
224338fd1498Szrj 	bool changed = false;
224438fd1498Szrj 	unsigned i;
224538fd1498Szrj 
224638fd1498Szrj 	/* If the call was folded into a constant make sure it goes
224738fd1498Szrj 	   away even if we cannot propagate into all uses because of
224838fd1498Szrj 	   type issues.  */
224938fd1498Szrj 	if (lhs
225038fd1498Szrj 	    && TREE_CODE (lhs) == SSA_NAME
225138fd1498Szrj 	    && (val = get_constant_value (lhs))
225238fd1498Szrj 	    /* Don't optimize away calls that have side-effects.  */
225338fd1498Szrj 	    && (flags & (ECF_CONST|ECF_PURE)) != 0
225438fd1498Szrj 	    && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
225538fd1498Szrj 	  {
225638fd1498Szrj 	    tree new_rhs = unshare_expr (val);
225738fd1498Szrj 	    bool res;
225838fd1498Szrj 	    if (!useless_type_conversion_p (TREE_TYPE (lhs),
225938fd1498Szrj 					    TREE_TYPE (new_rhs)))
226038fd1498Szrj 	      new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
226138fd1498Szrj 	    res = update_call_from_tree (gsi, new_rhs);
226238fd1498Szrj 	    gcc_assert (res);
226338fd1498Szrj 	    return true;
226438fd1498Szrj 	  }
226538fd1498Szrj 
226638fd1498Szrj 	/* Internal calls provide no argument types, so the extra laxity
226738fd1498Szrj 	   for normal calls does not apply.  */
226838fd1498Szrj 	if (gimple_call_internal_p (stmt))
226938fd1498Szrj 	  return false;
227038fd1498Szrj 
227138fd1498Szrj         /* The heuristic of fold_builtin_alloca_with_align differs before and
227238fd1498Szrj 	   after inlining, so we don't require the arg to be changed into a
227338fd1498Szrj 	   constant for folding, but just to be constant.  */
227438fd1498Szrj         if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN)
227538fd1498Szrj 	    || gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX))
227638fd1498Szrj           {
227738fd1498Szrj             tree new_rhs = fold_builtin_alloca_with_align (stmt);
227838fd1498Szrj             if (new_rhs)
227938fd1498Szrj 	      {
228038fd1498Szrj 		bool res = update_call_from_tree (gsi, new_rhs);
228138fd1498Szrj 		tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
228238fd1498Szrj 		gcc_assert (res);
228338fd1498Szrj 		insert_clobbers_for_var (*gsi, var);
228438fd1498Szrj 		return true;
228538fd1498Szrj 	      }
228638fd1498Szrj           }
228738fd1498Szrj 
228838fd1498Szrj 	/* Propagate into the call arguments.  Compared to replace_uses_in
228938fd1498Szrj 	   this can use the argument slot types for type verification
229038fd1498Szrj 	   instead of the current argument type.  We also can safely
229138fd1498Szrj 	   drop qualifiers here as we are dealing with constants anyway.  */
229238fd1498Szrj 	argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
229338fd1498Szrj 	for (i = 0; i < gimple_call_num_args (stmt) && argt;
229438fd1498Szrj 	     ++i, argt = TREE_CHAIN (argt))
229538fd1498Szrj 	  {
229638fd1498Szrj 	    tree arg = gimple_call_arg (stmt, i);
229738fd1498Szrj 	    if (TREE_CODE (arg) == SSA_NAME
229838fd1498Szrj 		&& (val = get_constant_value (arg))
229938fd1498Szrj 		&& useless_type_conversion_p
230038fd1498Szrj 		     (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
230138fd1498Szrj 		      TYPE_MAIN_VARIANT (TREE_TYPE (val))))
230238fd1498Szrj 	      {
230338fd1498Szrj 		gimple_call_set_arg (stmt, i, unshare_expr (val));
230438fd1498Szrj 		changed = true;
230538fd1498Szrj 	      }
230638fd1498Szrj 	  }
230738fd1498Szrj 
230838fd1498Szrj 	return changed;
230938fd1498Szrj       }
231038fd1498Szrj 
231138fd1498Szrj     case GIMPLE_ASSIGN:
231238fd1498Szrj       {
231338fd1498Szrj 	tree lhs = gimple_assign_lhs (stmt);
231438fd1498Szrj 	tree val;
231538fd1498Szrj 
231638fd1498Szrj 	/* If we have a load that turned out to be constant replace it
231738fd1498Szrj 	   as we cannot propagate into all uses in all cases.  */
231838fd1498Szrj 	if (gimple_assign_single_p (stmt)
231938fd1498Szrj 	    && TREE_CODE (lhs) == SSA_NAME
232038fd1498Szrj 	    && (val = get_constant_value (lhs)))
232138fd1498Szrj 	  {
232238fd1498Szrj 	    tree rhs = unshare_expr (val);
232338fd1498Szrj 	    if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
232438fd1498Szrj 	      rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
232538fd1498Szrj 	    gimple_assign_set_rhs_from_tree (gsi, rhs);
232638fd1498Szrj 	    return true;
232738fd1498Szrj 	  }
232838fd1498Szrj 
232938fd1498Szrj 	return false;
233038fd1498Szrj       }
233138fd1498Szrj 
233238fd1498Szrj     default:
233338fd1498Szrj       return false;
233438fd1498Szrj     }
233538fd1498Szrj }
233638fd1498Szrj 
233738fd1498Szrj /* Visit the assignment statement STMT.  Set the value of its LHS to the
233838fd1498Szrj    value computed by the RHS and store LHS in *OUTPUT_P.  If STMT
233938fd1498Szrj    creates virtual definitions, set the value of each new name to that
234038fd1498Szrj    of the RHS (if we can derive a constant out of the RHS).
234138fd1498Szrj    Value-returning call statements also perform an assignment, and
234238fd1498Szrj    are handled here.  */
234338fd1498Szrj 
234438fd1498Szrj static enum ssa_prop_result
visit_assignment(gimple * stmt,tree * output_p)234538fd1498Szrj visit_assignment (gimple *stmt, tree *output_p)
234638fd1498Szrj {
234738fd1498Szrj   ccp_prop_value_t val;
234838fd1498Szrj   enum ssa_prop_result retval = SSA_PROP_NOT_INTERESTING;
234938fd1498Szrj 
235038fd1498Szrj   tree lhs = gimple_get_lhs (stmt);
235138fd1498Szrj   if (TREE_CODE (lhs) == SSA_NAME)
235238fd1498Szrj     {
235338fd1498Szrj       /* Evaluate the statement, which could be
235438fd1498Szrj 	 either a GIMPLE_ASSIGN or a GIMPLE_CALL.  */
235538fd1498Szrj       val = evaluate_stmt (stmt);
235638fd1498Szrj 
235738fd1498Szrj       /* If STMT is an assignment to an SSA_NAME, we only have one
235838fd1498Szrj 	 value to set.  */
235938fd1498Szrj       if (set_lattice_value (lhs, &val))
236038fd1498Szrj 	{
236138fd1498Szrj 	  *output_p = lhs;
236238fd1498Szrj 	  if (val.lattice_val == VARYING)
236338fd1498Szrj 	    retval = SSA_PROP_VARYING;
236438fd1498Szrj 	  else
236538fd1498Szrj 	    retval = SSA_PROP_INTERESTING;
236638fd1498Szrj 	}
236738fd1498Szrj     }
236838fd1498Szrj 
236938fd1498Szrj   return retval;
237038fd1498Szrj }
237138fd1498Szrj 
237238fd1498Szrj 
237338fd1498Szrj /* Visit the conditional statement STMT.  Return SSA_PROP_INTERESTING
237438fd1498Szrj    if it can determine which edge will be taken.  Otherwise, return
237538fd1498Szrj    SSA_PROP_VARYING.  */
237638fd1498Szrj 
237738fd1498Szrj static enum ssa_prop_result
visit_cond_stmt(gimple * stmt,edge * taken_edge_p)237838fd1498Szrj visit_cond_stmt (gimple *stmt, edge *taken_edge_p)
237938fd1498Szrj {
238038fd1498Szrj   ccp_prop_value_t val;
238138fd1498Szrj   basic_block block;
238238fd1498Szrj 
238338fd1498Szrj   block = gimple_bb (stmt);
238438fd1498Szrj   val = evaluate_stmt (stmt);
238538fd1498Szrj   if (val.lattice_val != CONSTANT
238638fd1498Szrj       || val.mask != 0)
238738fd1498Szrj     return SSA_PROP_VARYING;
238838fd1498Szrj 
238938fd1498Szrj   /* Find which edge out of the conditional block will be taken and add it
239038fd1498Szrj      to the worklist.  If no single edge can be determined statically,
239138fd1498Szrj      return SSA_PROP_VARYING to feed all the outgoing edges to the
239238fd1498Szrj      propagation engine.  */
239338fd1498Szrj   *taken_edge_p = find_taken_edge (block, val.value);
239438fd1498Szrj   if (*taken_edge_p)
239538fd1498Szrj     return SSA_PROP_INTERESTING;
239638fd1498Szrj   else
239738fd1498Szrj     return SSA_PROP_VARYING;
239838fd1498Szrj }
239938fd1498Szrj 
240038fd1498Szrj 
240138fd1498Szrj /* Evaluate statement STMT.  If the statement produces an output value and
240238fd1498Szrj    its evaluation changes the lattice value of its output, return
240338fd1498Szrj    SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
240438fd1498Szrj    output value.
240538fd1498Szrj 
240638fd1498Szrj    If STMT is a conditional branch and we can determine its truth
240738fd1498Szrj    value, set *TAKEN_EDGE_P accordingly.  If STMT produces a varying
240838fd1498Szrj    value, return SSA_PROP_VARYING.  */
240938fd1498Szrj 
241038fd1498Szrj enum ssa_prop_result
visit_stmt(gimple * stmt,edge * taken_edge_p,tree * output_p)241138fd1498Szrj ccp_propagate::visit_stmt (gimple *stmt, edge *taken_edge_p, tree *output_p)
241238fd1498Szrj {
241338fd1498Szrj   tree def;
241438fd1498Szrj   ssa_op_iter iter;
241538fd1498Szrj 
241638fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
241738fd1498Szrj     {
241838fd1498Szrj       fprintf (dump_file, "\nVisiting statement:\n");
241938fd1498Szrj       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
242038fd1498Szrj     }
242138fd1498Szrj 
242238fd1498Szrj   switch (gimple_code (stmt))
242338fd1498Szrj     {
242438fd1498Szrj       case GIMPLE_ASSIGN:
242538fd1498Szrj         /* If the statement is an assignment that produces a single
242638fd1498Szrj            output value, evaluate its RHS to see if the lattice value of
242738fd1498Szrj            its output has changed.  */
242838fd1498Szrj         return visit_assignment (stmt, output_p);
242938fd1498Szrj 
243038fd1498Szrj       case GIMPLE_CALL:
243138fd1498Szrj         /* A value-returning call also performs an assignment.  */
243238fd1498Szrj         if (gimple_call_lhs (stmt) != NULL_TREE)
243338fd1498Szrj           return visit_assignment (stmt, output_p);
243438fd1498Szrj         break;
243538fd1498Szrj 
243638fd1498Szrj       case GIMPLE_COND:
243738fd1498Szrj       case GIMPLE_SWITCH:
243838fd1498Szrj         /* If STMT is a conditional branch, see if we can determine
243938fd1498Szrj            which branch will be taken.   */
244038fd1498Szrj         /* FIXME.  It appears that we should be able to optimize
244138fd1498Szrj            computed GOTOs here as well.  */
244238fd1498Szrj         return visit_cond_stmt (stmt, taken_edge_p);
244338fd1498Szrj 
244438fd1498Szrj       default:
244538fd1498Szrj         break;
244638fd1498Szrj     }
244738fd1498Szrj 
244838fd1498Szrj   /* Any other kind of statement is not interesting for constant
244938fd1498Szrj      propagation and, therefore, not worth simulating.  */
245038fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
245138fd1498Szrj     fprintf (dump_file, "No interesting values produced.  Marked VARYING.\n");
245238fd1498Szrj 
245338fd1498Szrj   /* Definitions made by statements other than assignments to
245438fd1498Szrj      SSA_NAMEs represent unknown modifications to their outputs.
245538fd1498Szrj      Mark them VARYING.  */
245638fd1498Szrj   FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
245738fd1498Szrj     set_value_varying (def);
245838fd1498Szrj 
245938fd1498Szrj   return SSA_PROP_VARYING;
246038fd1498Szrj }
246138fd1498Szrj 
246238fd1498Szrj 
246338fd1498Szrj /* Main entry point for SSA Conditional Constant Propagation.  If NONZERO_P,
246438fd1498Szrj    record nonzero bits.  */
246538fd1498Szrj 
246638fd1498Szrj static unsigned int
do_ssa_ccp(bool nonzero_p)246738fd1498Szrj do_ssa_ccp (bool nonzero_p)
246838fd1498Szrj {
246938fd1498Szrj   unsigned int todo = 0;
247038fd1498Szrj   calculate_dominance_info (CDI_DOMINATORS);
247138fd1498Szrj 
247238fd1498Szrj   ccp_initialize ();
247338fd1498Szrj   class ccp_propagate ccp_propagate;
247438fd1498Szrj   ccp_propagate.ssa_propagate ();
247538fd1498Szrj   if (ccp_finalize (nonzero_p || flag_ipa_bit_cp))
247638fd1498Szrj     {
247738fd1498Szrj       todo = (TODO_cleanup_cfg | TODO_update_ssa);
247838fd1498Szrj 
247938fd1498Szrj       /* ccp_finalize does not preserve loop-closed ssa.  */
248038fd1498Szrj       loops_state_clear (LOOP_CLOSED_SSA);
248138fd1498Szrj     }
248238fd1498Szrj 
248338fd1498Szrj   free_dominance_info (CDI_DOMINATORS);
248438fd1498Szrj   return todo;
248538fd1498Szrj }
248638fd1498Szrj 
248738fd1498Szrj 
248838fd1498Szrj namespace {
248938fd1498Szrj 
249038fd1498Szrj const pass_data pass_data_ccp =
249138fd1498Szrj {
249238fd1498Szrj   GIMPLE_PASS, /* type */
249338fd1498Szrj   "ccp", /* name */
249438fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
249538fd1498Szrj   TV_TREE_CCP, /* tv_id */
249638fd1498Szrj   ( PROP_cfg | PROP_ssa ), /* properties_required */
249738fd1498Szrj   0, /* properties_provided */
249838fd1498Szrj   0, /* properties_destroyed */
249938fd1498Szrj   0, /* todo_flags_start */
250038fd1498Szrj   TODO_update_address_taken, /* todo_flags_finish */
250138fd1498Szrj };
250238fd1498Szrj 
250338fd1498Szrj class pass_ccp : public gimple_opt_pass
250438fd1498Szrj {
250538fd1498Szrj public:
pass_ccp(gcc::context * ctxt)250638fd1498Szrj   pass_ccp (gcc::context *ctxt)
250738fd1498Szrj     : gimple_opt_pass (pass_data_ccp, ctxt), nonzero_p (false)
250838fd1498Szrj   {}
250938fd1498Szrj 
251038fd1498Szrj   /* opt_pass methods: */
clone()251138fd1498Szrj   opt_pass * clone () { return new pass_ccp (m_ctxt); }
set_pass_param(unsigned int n,bool param)251238fd1498Szrj   void set_pass_param (unsigned int n, bool param)
251338fd1498Szrj     {
251438fd1498Szrj       gcc_assert (n == 0);
251538fd1498Szrj       nonzero_p = param;
251638fd1498Szrj     }
gate(function *)251738fd1498Szrj   virtual bool gate (function *) { return flag_tree_ccp != 0; }
execute(function *)251838fd1498Szrj   virtual unsigned int execute (function *) { return do_ssa_ccp (nonzero_p); }
251938fd1498Szrj 
252038fd1498Szrj  private:
252138fd1498Szrj   /* Determines whether the pass instance records nonzero bits.  */
252238fd1498Szrj   bool nonzero_p;
252338fd1498Szrj }; // class pass_ccp
252438fd1498Szrj 
252538fd1498Szrj } // anon namespace
252638fd1498Szrj 
252738fd1498Szrj gimple_opt_pass *
make_pass_ccp(gcc::context * ctxt)252838fd1498Szrj make_pass_ccp (gcc::context *ctxt)
252938fd1498Szrj {
253038fd1498Szrj   return new pass_ccp (ctxt);
253138fd1498Szrj }
253238fd1498Szrj 
253338fd1498Szrj 
253438fd1498Szrj 
253538fd1498Szrj /* Try to optimize out __builtin_stack_restore.  Optimize it out
253638fd1498Szrj    if there is another __builtin_stack_restore in the same basic
253738fd1498Szrj    block and no calls or ASM_EXPRs are in between, or if this block's
253838fd1498Szrj    only outgoing edge is to EXIT_BLOCK and there are no calls or
253938fd1498Szrj    ASM_EXPRs after this __builtin_stack_restore.  */
254038fd1498Szrj 
254138fd1498Szrj static tree
optimize_stack_restore(gimple_stmt_iterator i)254238fd1498Szrj optimize_stack_restore (gimple_stmt_iterator i)
254338fd1498Szrj {
254438fd1498Szrj   tree callee;
254538fd1498Szrj   gimple *stmt;
254638fd1498Szrj 
254738fd1498Szrj   basic_block bb = gsi_bb (i);
254838fd1498Szrj   gimple *call = gsi_stmt (i);
254938fd1498Szrj 
255038fd1498Szrj   if (gimple_code (call) != GIMPLE_CALL
255138fd1498Szrj       || gimple_call_num_args (call) != 1
255238fd1498Szrj       || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
255338fd1498Szrj       || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
255438fd1498Szrj     return NULL_TREE;
255538fd1498Szrj 
255638fd1498Szrj   for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
255738fd1498Szrj     {
255838fd1498Szrj       stmt = gsi_stmt (i);
255938fd1498Szrj       if (gimple_code (stmt) == GIMPLE_ASM)
256038fd1498Szrj 	return NULL_TREE;
256138fd1498Szrj       if (gimple_code (stmt) != GIMPLE_CALL)
256238fd1498Szrj 	continue;
256338fd1498Szrj 
256438fd1498Szrj       callee = gimple_call_fndecl (stmt);
256538fd1498Szrj       if (!callee
256638fd1498Szrj 	  || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
256738fd1498Szrj 	  /* All regular builtins are ok, just obviously not alloca.  */
256838fd1498Szrj 	  || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee)))
256938fd1498Szrj 	return NULL_TREE;
257038fd1498Szrj 
257138fd1498Szrj       if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
257238fd1498Szrj 	goto second_stack_restore;
257338fd1498Szrj     }
257438fd1498Szrj 
257538fd1498Szrj   if (!gsi_end_p (i))
257638fd1498Szrj     return NULL_TREE;
257738fd1498Szrj 
257838fd1498Szrj   /* Allow one successor of the exit block, or zero successors.  */
257938fd1498Szrj   switch (EDGE_COUNT (bb->succs))
258038fd1498Szrj     {
258138fd1498Szrj     case 0:
258238fd1498Szrj       break;
258338fd1498Szrj     case 1:
258438fd1498Szrj       if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
258538fd1498Szrj 	return NULL_TREE;
258638fd1498Szrj       break;
258738fd1498Szrj     default:
258838fd1498Szrj       return NULL_TREE;
258938fd1498Szrj     }
259038fd1498Szrj  second_stack_restore:
259138fd1498Szrj 
259238fd1498Szrj   /* If there's exactly one use, then zap the call to __builtin_stack_save.
259338fd1498Szrj      If there are multiple uses, then the last one should remove the call.
259438fd1498Szrj      In any case, whether the call to __builtin_stack_save can be removed
259538fd1498Szrj      or not is irrelevant to removing the call to __builtin_stack_restore.  */
259638fd1498Szrj   if (has_single_use (gimple_call_arg (call, 0)))
259738fd1498Szrj     {
259838fd1498Szrj       gimple *stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
259938fd1498Szrj       if (is_gimple_call (stack_save))
260038fd1498Szrj 	{
260138fd1498Szrj 	  callee = gimple_call_fndecl (stack_save);
260238fd1498Szrj 	  if (callee
260338fd1498Szrj 	      && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
260438fd1498Szrj 	      && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
260538fd1498Szrj 	    {
260638fd1498Szrj 	      gimple_stmt_iterator stack_save_gsi;
260738fd1498Szrj 	      tree rhs;
260838fd1498Szrj 
260938fd1498Szrj 	      stack_save_gsi = gsi_for_stmt (stack_save);
261038fd1498Szrj 	      rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
261138fd1498Szrj 	      update_call_from_tree (&stack_save_gsi, rhs);
261238fd1498Szrj 	    }
261338fd1498Szrj 	}
261438fd1498Szrj     }
261538fd1498Szrj 
261638fd1498Szrj   /* No effect, so the statement will be deleted.  */
261738fd1498Szrj   return integer_zero_node;
261838fd1498Szrj }
261938fd1498Szrj 
262038fd1498Szrj /* If va_list type is a simple pointer and nothing special is needed,
262138fd1498Szrj    optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
262238fd1498Szrj    __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
262338fd1498Szrj    pointer assignment.  */
262438fd1498Szrj 
262538fd1498Szrj static tree
optimize_stdarg_builtin(gimple * call)262638fd1498Szrj optimize_stdarg_builtin (gimple *call)
262738fd1498Szrj {
262838fd1498Szrj   tree callee, lhs, rhs, cfun_va_list;
262938fd1498Szrj   bool va_list_simple_ptr;
263038fd1498Szrj   location_t loc = gimple_location (call);
263138fd1498Szrj 
263238fd1498Szrj   if (gimple_code (call) != GIMPLE_CALL)
263338fd1498Szrj     return NULL_TREE;
263438fd1498Szrj 
263538fd1498Szrj   callee = gimple_call_fndecl (call);
263638fd1498Szrj 
263738fd1498Szrj   cfun_va_list = targetm.fn_abi_va_list (callee);
263838fd1498Szrj   va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
263938fd1498Szrj 		       && (TREE_TYPE (cfun_va_list) == void_type_node
264038fd1498Szrj 			   || TREE_TYPE (cfun_va_list) == char_type_node);
264138fd1498Szrj 
264238fd1498Szrj   switch (DECL_FUNCTION_CODE (callee))
264338fd1498Szrj     {
264438fd1498Szrj     case BUILT_IN_VA_START:
264538fd1498Szrj       if (!va_list_simple_ptr
264638fd1498Szrj 	  || targetm.expand_builtin_va_start != NULL
264738fd1498Szrj 	  || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
264838fd1498Szrj 	return NULL_TREE;
264938fd1498Szrj 
265038fd1498Szrj       if (gimple_call_num_args (call) != 2)
265138fd1498Szrj 	return NULL_TREE;
265238fd1498Szrj 
265338fd1498Szrj       lhs = gimple_call_arg (call, 0);
265438fd1498Szrj       if (!POINTER_TYPE_P (TREE_TYPE (lhs))
265538fd1498Szrj 	  || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
265638fd1498Szrj 	     != TYPE_MAIN_VARIANT (cfun_va_list))
265738fd1498Szrj 	return NULL_TREE;
265838fd1498Szrj 
265938fd1498Szrj       lhs = build_fold_indirect_ref_loc (loc, lhs);
266038fd1498Szrj       rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
266138fd1498Szrj                              1, integer_zero_node);
266238fd1498Szrj       rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
266338fd1498Szrj       return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
266438fd1498Szrj 
266538fd1498Szrj     case BUILT_IN_VA_COPY:
266638fd1498Szrj       if (!va_list_simple_ptr)
266738fd1498Szrj 	return NULL_TREE;
266838fd1498Szrj 
266938fd1498Szrj       if (gimple_call_num_args (call) != 2)
267038fd1498Szrj 	return NULL_TREE;
267138fd1498Szrj 
267238fd1498Szrj       lhs = gimple_call_arg (call, 0);
267338fd1498Szrj       if (!POINTER_TYPE_P (TREE_TYPE (lhs))
267438fd1498Szrj 	  || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
267538fd1498Szrj 	     != TYPE_MAIN_VARIANT (cfun_va_list))
267638fd1498Szrj 	return NULL_TREE;
267738fd1498Szrj 
267838fd1498Szrj       lhs = build_fold_indirect_ref_loc (loc, lhs);
267938fd1498Szrj       rhs = gimple_call_arg (call, 1);
268038fd1498Szrj       if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
268138fd1498Szrj 	  != TYPE_MAIN_VARIANT (cfun_va_list))
268238fd1498Szrj 	return NULL_TREE;
268338fd1498Szrj 
268438fd1498Szrj       rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
268538fd1498Szrj       return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
268638fd1498Szrj 
268738fd1498Szrj     case BUILT_IN_VA_END:
268838fd1498Szrj       /* No effect, so the statement will be deleted.  */
268938fd1498Szrj       return integer_zero_node;
269038fd1498Szrj 
269138fd1498Szrj     default:
269238fd1498Szrj       gcc_unreachable ();
269338fd1498Szrj     }
269438fd1498Szrj }
269538fd1498Szrj 
269638fd1498Szrj /* Attemp to make the block of __builtin_unreachable I unreachable by changing
269738fd1498Szrj    the incoming jumps.  Return true if at least one jump was changed.  */
269838fd1498Szrj 
269938fd1498Szrj static bool
optimize_unreachable(gimple_stmt_iterator i)270038fd1498Szrj optimize_unreachable (gimple_stmt_iterator i)
270138fd1498Szrj {
270238fd1498Szrj   basic_block bb = gsi_bb (i);
270338fd1498Szrj   gimple_stmt_iterator gsi;
270438fd1498Szrj   gimple *stmt;
270538fd1498Szrj   edge_iterator ei;
270638fd1498Szrj   edge e;
270738fd1498Szrj   bool ret;
270838fd1498Szrj 
270938fd1498Szrj   if (flag_sanitize & SANITIZE_UNREACHABLE)
271038fd1498Szrj     return false;
271138fd1498Szrj 
271238fd1498Szrj   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
271338fd1498Szrj     {
271438fd1498Szrj       stmt = gsi_stmt (gsi);
271538fd1498Szrj 
271638fd1498Szrj       if (is_gimple_debug (stmt))
271738fd1498Szrj        continue;
271838fd1498Szrj 
271938fd1498Szrj       if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
272038fd1498Szrj 	{
272138fd1498Szrj 	  /* Verify we do not need to preserve the label.  */
272238fd1498Szrj 	  if (FORCED_LABEL (gimple_label_label (label_stmt)))
272338fd1498Szrj 	    return false;
272438fd1498Szrj 
272538fd1498Szrj 	  continue;
272638fd1498Szrj 	}
272738fd1498Szrj 
272838fd1498Szrj       /* Only handle the case that __builtin_unreachable is the first statement
272938fd1498Szrj 	 in the block.  We rely on DCE to remove stmts without side-effects
273038fd1498Szrj 	 before __builtin_unreachable.  */
273138fd1498Szrj       if (gsi_stmt (gsi) != gsi_stmt (i))
273238fd1498Szrj         return false;
273338fd1498Szrj     }
273438fd1498Szrj 
273538fd1498Szrj   ret = false;
273638fd1498Szrj   FOR_EACH_EDGE (e, ei, bb->preds)
273738fd1498Szrj     {
273838fd1498Szrj       gsi = gsi_last_bb (e->src);
273938fd1498Szrj       if (gsi_end_p (gsi))
274038fd1498Szrj 	continue;
274138fd1498Szrj 
274238fd1498Szrj       stmt = gsi_stmt (gsi);
274338fd1498Szrj       if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
274438fd1498Szrj 	{
274538fd1498Szrj 	  if (e->flags & EDGE_TRUE_VALUE)
274638fd1498Szrj 	    gimple_cond_make_false (cond_stmt);
274738fd1498Szrj 	  else if (e->flags & EDGE_FALSE_VALUE)
274838fd1498Szrj 	    gimple_cond_make_true (cond_stmt);
274938fd1498Szrj 	  else
275038fd1498Szrj 	    gcc_unreachable ();
275138fd1498Szrj 	  update_stmt (cond_stmt);
275238fd1498Szrj 	}
275338fd1498Szrj       else
275438fd1498Szrj 	{
275538fd1498Szrj 	  /* Todo: handle other cases.  Note that unreachable switch case
275638fd1498Szrj 	     statements have already been removed.  */
275738fd1498Szrj 	  continue;
275838fd1498Szrj 	}
275938fd1498Szrj 
276038fd1498Szrj       ret = true;
276138fd1498Szrj     }
276238fd1498Szrj 
276338fd1498Szrj   return ret;
276438fd1498Szrj }
276538fd1498Szrj 
276638fd1498Szrj /* Optimize
276738fd1498Szrj      mask_2 = 1 << cnt_1;
276838fd1498Szrj      _4 = __atomic_fetch_or_* (ptr_6, mask_2, _3);
276938fd1498Szrj      _5 = _4 & mask_2;
277038fd1498Szrj    to
277138fd1498Szrj      _4 = ATOMIC_BIT_TEST_AND_SET (ptr_6, cnt_1, 0, _3);
277238fd1498Szrj      _5 = _4;
277338fd1498Szrj    If _5 is only used in _5 != 0 or _5 == 0 comparisons, 1
277438fd1498Szrj    is passed instead of 0, and the builtin just returns a zero
277538fd1498Szrj    or 1 value instead of the actual bit.
277638fd1498Szrj    Similarly for __sync_fetch_and_or_* (without the ", _3" part
277738fd1498Szrj    in there), and/or if mask_2 is a power of 2 constant.
277838fd1498Szrj    Similarly for xor instead of or, use ATOMIC_BIT_TEST_AND_COMPLEMENT
277938fd1498Szrj    in that case.  And similarly for and instead of or, except that
278038fd1498Szrj    the second argument to the builtin needs to be one's complement
278138fd1498Szrj    of the mask instead of mask.  */
278238fd1498Szrj 
278338fd1498Szrj static void
optimize_atomic_bit_test_and(gimple_stmt_iterator * gsip,enum internal_fn fn,bool has_model_arg,bool after)278438fd1498Szrj optimize_atomic_bit_test_and (gimple_stmt_iterator *gsip,
278538fd1498Szrj 			      enum internal_fn fn, bool has_model_arg,
278638fd1498Szrj 			      bool after)
278738fd1498Szrj {
278838fd1498Szrj   gimple *call = gsi_stmt (*gsip);
278938fd1498Szrj   tree lhs = gimple_call_lhs (call);
279038fd1498Szrj   use_operand_p use_p;
279138fd1498Szrj   gimple *use_stmt;
279238fd1498Szrj   tree mask, bit;
279338fd1498Szrj   optab optab;
279438fd1498Szrj 
279538fd1498Szrj   if (!flag_inline_atomics
279638fd1498Szrj       || optimize_debug
279738fd1498Szrj       || !gimple_call_builtin_p (call, BUILT_IN_NORMAL)
279838fd1498Szrj       || !lhs
279938fd1498Szrj       || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)
280038fd1498Szrj       || !single_imm_use (lhs, &use_p, &use_stmt)
280138fd1498Szrj       || !is_gimple_assign (use_stmt)
280238fd1498Szrj       || gimple_assign_rhs_code (use_stmt) != BIT_AND_EXPR
280338fd1498Szrj       || !gimple_vdef (call))
280438fd1498Szrj     return;
280538fd1498Szrj 
280638fd1498Szrj   switch (fn)
280738fd1498Szrj     {
280838fd1498Szrj     case IFN_ATOMIC_BIT_TEST_AND_SET:
280938fd1498Szrj       optab = atomic_bit_test_and_set_optab;
281038fd1498Szrj       break;
281138fd1498Szrj     case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
281238fd1498Szrj       optab = atomic_bit_test_and_complement_optab;
281338fd1498Szrj       break;
281438fd1498Szrj     case IFN_ATOMIC_BIT_TEST_AND_RESET:
281538fd1498Szrj       optab = atomic_bit_test_and_reset_optab;
281638fd1498Szrj       break;
281738fd1498Szrj     default:
281838fd1498Szrj       return;
281938fd1498Szrj     }
282038fd1498Szrj 
282138fd1498Szrj   if (optab_handler (optab, TYPE_MODE (TREE_TYPE (lhs))) == CODE_FOR_nothing)
282238fd1498Szrj     return;
282338fd1498Szrj 
282438fd1498Szrj   mask = gimple_call_arg (call, 1);
282538fd1498Szrj   tree use_lhs = gimple_assign_lhs (use_stmt);
282638fd1498Szrj   if (!use_lhs)
282738fd1498Szrj     return;
282838fd1498Szrj 
282938fd1498Szrj   if (TREE_CODE (mask) == INTEGER_CST)
283038fd1498Szrj     {
283138fd1498Szrj       if (fn == IFN_ATOMIC_BIT_TEST_AND_RESET)
283238fd1498Szrj 	mask = const_unop (BIT_NOT_EXPR, TREE_TYPE (mask), mask);
283338fd1498Szrj       mask = fold_convert (TREE_TYPE (lhs), mask);
283438fd1498Szrj       int ibit = tree_log2 (mask);
283538fd1498Szrj       if (ibit < 0)
283638fd1498Szrj 	return;
283738fd1498Szrj       bit = build_int_cst (TREE_TYPE (lhs), ibit);
283838fd1498Szrj     }
283938fd1498Szrj   else if (TREE_CODE (mask) == SSA_NAME)
284038fd1498Szrj     {
284138fd1498Szrj       gimple *g = SSA_NAME_DEF_STMT (mask);
284238fd1498Szrj       if (fn == IFN_ATOMIC_BIT_TEST_AND_RESET)
284338fd1498Szrj 	{
284438fd1498Szrj 	  if (!is_gimple_assign (g)
284538fd1498Szrj 	      || gimple_assign_rhs_code (g) != BIT_NOT_EXPR)
284638fd1498Szrj 	    return;
284738fd1498Szrj 	  mask = gimple_assign_rhs1 (g);
284838fd1498Szrj 	  if (TREE_CODE (mask) != SSA_NAME)
284938fd1498Szrj 	    return;
285038fd1498Szrj 	  g = SSA_NAME_DEF_STMT (mask);
285138fd1498Szrj 	}
285238fd1498Szrj       if (!is_gimple_assign (g)
285338fd1498Szrj 	  || gimple_assign_rhs_code (g) != LSHIFT_EXPR
285438fd1498Szrj 	  || !integer_onep (gimple_assign_rhs1 (g)))
285538fd1498Szrj 	return;
285638fd1498Szrj       bit = gimple_assign_rhs2 (g);
285738fd1498Szrj     }
285838fd1498Szrj   else
285938fd1498Szrj     return;
286038fd1498Szrj 
286138fd1498Szrj   if (gimple_assign_rhs1 (use_stmt) == lhs)
286238fd1498Szrj     {
286338fd1498Szrj       if (!operand_equal_p (gimple_assign_rhs2 (use_stmt), mask, 0))
286438fd1498Szrj 	return;
286538fd1498Szrj     }
286638fd1498Szrj   else if (gimple_assign_rhs2 (use_stmt) != lhs
286738fd1498Szrj 	   || !operand_equal_p (gimple_assign_rhs1 (use_stmt), mask, 0))
286838fd1498Szrj     return;
286938fd1498Szrj 
287038fd1498Szrj   bool use_bool = true;
287138fd1498Szrj   bool has_debug_uses = false;
287238fd1498Szrj   imm_use_iterator iter;
287338fd1498Szrj   gimple *g;
287438fd1498Szrj 
287538fd1498Szrj   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use_lhs))
287638fd1498Szrj     use_bool = false;
287738fd1498Szrj   FOR_EACH_IMM_USE_STMT (g, iter, use_lhs)
287838fd1498Szrj     {
287938fd1498Szrj       enum tree_code code = ERROR_MARK;
288038fd1498Szrj       tree op0 = NULL_TREE, op1 = NULL_TREE;
288138fd1498Szrj       if (is_gimple_debug (g))
288238fd1498Szrj 	{
288338fd1498Szrj 	  has_debug_uses = true;
288438fd1498Szrj 	  continue;
288538fd1498Szrj 	}
288638fd1498Szrj       else if (is_gimple_assign (g))
288738fd1498Szrj 	switch (gimple_assign_rhs_code (g))
288838fd1498Szrj 	  {
288938fd1498Szrj 	  case COND_EXPR:
289038fd1498Szrj 	    op1 = gimple_assign_rhs1 (g);
289138fd1498Szrj 	    code = TREE_CODE (op1);
289238fd1498Szrj 	    op0 = TREE_OPERAND (op1, 0);
289338fd1498Szrj 	    op1 = TREE_OPERAND (op1, 1);
289438fd1498Szrj 	    break;
289538fd1498Szrj 	  case EQ_EXPR:
289638fd1498Szrj 	  case NE_EXPR:
289738fd1498Szrj 	    code = gimple_assign_rhs_code (g);
289838fd1498Szrj 	    op0 = gimple_assign_rhs1 (g);
289938fd1498Szrj 	    op1 = gimple_assign_rhs2 (g);
290038fd1498Szrj 	    break;
290138fd1498Szrj 	  default:
290238fd1498Szrj 	    break;
290338fd1498Szrj 	  }
290438fd1498Szrj       else if (gimple_code (g) == GIMPLE_COND)
290538fd1498Szrj 	{
290638fd1498Szrj 	  code = gimple_cond_code (g);
290738fd1498Szrj 	  op0 = gimple_cond_lhs (g);
290838fd1498Szrj 	  op1 = gimple_cond_rhs (g);
290938fd1498Szrj 	}
291038fd1498Szrj 
291138fd1498Szrj       if ((code == EQ_EXPR || code == NE_EXPR)
291238fd1498Szrj 	  && op0 == use_lhs
291338fd1498Szrj 	  && integer_zerop (op1))
291438fd1498Szrj 	{
291538fd1498Szrj 	  use_operand_p use_p;
291638fd1498Szrj 	  int n = 0;
291738fd1498Szrj 	  FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
291838fd1498Szrj 	    n++;
291938fd1498Szrj 	  if (n == 1)
292038fd1498Szrj 	    continue;
292138fd1498Szrj 	}
292238fd1498Szrj 
292338fd1498Szrj       use_bool = false;
292438fd1498Szrj       BREAK_FROM_IMM_USE_STMT (iter);
292538fd1498Szrj     }
292638fd1498Szrj 
292738fd1498Szrj   tree new_lhs = make_ssa_name (TREE_TYPE (lhs));
292838fd1498Szrj   tree flag = build_int_cst (TREE_TYPE (lhs), use_bool);
292938fd1498Szrj   if (has_model_arg)
293038fd1498Szrj     g = gimple_build_call_internal (fn, 4, gimple_call_arg (call, 0),
293138fd1498Szrj 				    bit, flag, gimple_call_arg (call, 2));
293238fd1498Szrj   else
293338fd1498Szrj     g = gimple_build_call_internal (fn, 3, gimple_call_arg (call, 0),
293438fd1498Szrj 				    bit, flag);
293538fd1498Szrj   gimple_call_set_lhs (g, new_lhs);
293638fd1498Szrj   gimple_set_location (g, gimple_location (call));
293738fd1498Szrj   gimple_set_vuse (g, gimple_vuse (call));
293838fd1498Szrj   gimple_set_vdef (g, gimple_vdef (call));
293938fd1498Szrj   bool throws = stmt_can_throw_internal (call);
294038fd1498Szrj   gimple_call_set_nothrow (as_a <gcall *> (g),
294138fd1498Szrj 			   gimple_call_nothrow_p (as_a <gcall *> (call)));
294238fd1498Szrj   SSA_NAME_DEF_STMT (gimple_vdef (call)) = g;
294338fd1498Szrj   gimple_stmt_iterator gsi = *gsip;
294438fd1498Szrj   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
294538fd1498Szrj   edge e = NULL;
294638fd1498Szrj   if (throws)
294738fd1498Szrj     {
294838fd1498Szrj       maybe_clean_or_replace_eh_stmt (call, g);
294938fd1498Szrj       if (after || (use_bool && has_debug_uses))
295038fd1498Szrj 	e = find_fallthru_edge (gsi_bb (gsi)->succs);
295138fd1498Szrj     }
295238fd1498Szrj   if (after)
295338fd1498Szrj     {
295438fd1498Szrj       /* The internal function returns the value of the specified bit
295538fd1498Szrj 	 before the atomic operation.  If we are interested in the value
295638fd1498Szrj 	 of the specified bit after the atomic operation (makes only sense
295738fd1498Szrj 	 for xor, otherwise the bit content is compile time known),
295838fd1498Szrj 	 we need to invert the bit.  */
295938fd1498Szrj       g = gimple_build_assign (make_ssa_name (TREE_TYPE (lhs)),
296038fd1498Szrj 			       BIT_XOR_EXPR, new_lhs,
296138fd1498Szrj 			       use_bool ? build_int_cst (TREE_TYPE (lhs), 1)
296238fd1498Szrj 					: mask);
296338fd1498Szrj       new_lhs = gimple_assign_lhs (g);
296438fd1498Szrj       if (throws)
296538fd1498Szrj 	{
296638fd1498Szrj 	  gsi_insert_on_edge_immediate (e, g);
296738fd1498Szrj 	  gsi = gsi_for_stmt (g);
296838fd1498Szrj 	}
296938fd1498Szrj       else
297038fd1498Szrj 	gsi_insert_after (&gsi, g, GSI_NEW_STMT);
297138fd1498Szrj     }
297238fd1498Szrj   if (use_bool && has_debug_uses)
297338fd1498Szrj     {
297438fd1498Szrj       tree temp = NULL_TREE;
297538fd1498Szrj       if (!throws || after || single_pred_p (e->dest))
297638fd1498Szrj 	{
297738fd1498Szrj 	  temp = make_node (DEBUG_EXPR_DECL);
297838fd1498Szrj 	  DECL_ARTIFICIAL (temp) = 1;
297938fd1498Szrj 	  TREE_TYPE (temp) = TREE_TYPE (lhs);
298038fd1498Szrj 	  SET_DECL_MODE (temp, TYPE_MODE (TREE_TYPE (lhs)));
298138fd1498Szrj 	  tree t = build2 (LSHIFT_EXPR, TREE_TYPE (lhs), new_lhs, bit);
298238fd1498Szrj 	  g = gimple_build_debug_bind (temp, t, g);
298338fd1498Szrj 	  if (throws && !after)
298438fd1498Szrj 	    {
298538fd1498Szrj 	      gsi = gsi_after_labels (e->dest);
298638fd1498Szrj 	      gsi_insert_before (&gsi, g, GSI_SAME_STMT);
298738fd1498Szrj 	    }
298838fd1498Szrj 	  else
298938fd1498Szrj 	    gsi_insert_after (&gsi, g, GSI_NEW_STMT);
299038fd1498Szrj 	}
299138fd1498Szrj       FOR_EACH_IMM_USE_STMT (g, iter, use_lhs)
299238fd1498Szrj 	if (is_gimple_debug (g))
299338fd1498Szrj 	  {
299438fd1498Szrj 	    use_operand_p use_p;
299538fd1498Szrj 	    if (temp == NULL_TREE)
299638fd1498Szrj 	      gimple_debug_bind_reset_value (g);
299738fd1498Szrj 	    else
299838fd1498Szrj 	      FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
299938fd1498Szrj 		SET_USE (use_p, temp);
300038fd1498Szrj 	    update_stmt (g);
300138fd1498Szrj 	  }
300238fd1498Szrj     }
300338fd1498Szrj   SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_lhs)
300438fd1498Szrj     = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use_lhs);
300538fd1498Szrj   replace_uses_by (use_lhs, new_lhs);
300638fd1498Szrj   gsi = gsi_for_stmt (use_stmt);
300738fd1498Szrj   gsi_remove (&gsi, true);
300838fd1498Szrj   release_defs (use_stmt);
300938fd1498Szrj   gsi_remove (gsip, true);
301038fd1498Szrj   release_ssa_name (lhs);
301138fd1498Szrj }
301238fd1498Szrj 
301338fd1498Szrj /* Optimize
301438fd1498Szrj    a = {};
301538fd1498Szrj    b = a;
301638fd1498Szrj    into
301738fd1498Szrj    a = {};
301838fd1498Szrj    b = {};
301938fd1498Szrj    Similarly for memset (&a, ..., sizeof (a)); instead of a = {};
302038fd1498Szrj    and/or memcpy (&b, &a, sizeof (a)); instead of b = a;  */
302138fd1498Szrj 
302238fd1498Szrj static void
optimize_memcpy(gimple_stmt_iterator * gsip,tree dest,tree src,tree len)302338fd1498Szrj optimize_memcpy (gimple_stmt_iterator *gsip, tree dest, tree src, tree len)
302438fd1498Szrj {
302538fd1498Szrj   gimple *stmt = gsi_stmt (*gsip);
302638fd1498Szrj   if (gimple_has_volatile_ops (stmt))
302738fd1498Szrj     return;
302838fd1498Szrj 
302938fd1498Szrj   tree vuse = gimple_vuse (stmt);
303038fd1498Szrj   if (vuse == NULL)
303138fd1498Szrj     return;
303238fd1498Szrj 
303338fd1498Szrj   gimple *defstmt = SSA_NAME_DEF_STMT (vuse);
303438fd1498Szrj   tree src2 = NULL_TREE, len2 = NULL_TREE;
303538fd1498Szrj   poly_int64 offset, offset2;
303638fd1498Szrj   tree val = integer_zero_node;
303738fd1498Szrj   if (gimple_store_p (defstmt)
303838fd1498Szrj       && gimple_assign_single_p (defstmt)
303938fd1498Szrj       && TREE_CODE (gimple_assign_rhs1 (defstmt)) == CONSTRUCTOR
304038fd1498Szrj       && !gimple_clobber_p (defstmt))
304138fd1498Szrj     src2 = gimple_assign_lhs (defstmt);
304238fd1498Szrj   else if (gimple_call_builtin_p (defstmt, BUILT_IN_MEMSET)
304338fd1498Szrj 	   && TREE_CODE (gimple_call_arg (defstmt, 0)) == ADDR_EXPR
304438fd1498Szrj 	   && TREE_CODE (gimple_call_arg (defstmt, 1)) == INTEGER_CST)
304538fd1498Szrj     {
304638fd1498Szrj       src2 = TREE_OPERAND (gimple_call_arg (defstmt, 0), 0);
304738fd1498Szrj       len2 = gimple_call_arg (defstmt, 2);
304838fd1498Szrj       val = gimple_call_arg (defstmt, 1);
304938fd1498Szrj       /* For non-0 val, we'd have to transform stmt from assignment
305038fd1498Szrj 	 into memset (only if dest is addressable).  */
305138fd1498Szrj       if (!integer_zerop (val) && is_gimple_assign (stmt))
305238fd1498Szrj 	src2 = NULL_TREE;
305338fd1498Szrj     }
305438fd1498Szrj 
305538fd1498Szrj   if (src2 == NULL_TREE)
305638fd1498Szrj     return;
305738fd1498Szrj 
305838fd1498Szrj   if (len == NULL_TREE)
305938fd1498Szrj     len = (TREE_CODE (src) == COMPONENT_REF
306038fd1498Szrj 	   ? DECL_SIZE_UNIT (TREE_OPERAND (src, 1))
306138fd1498Szrj 	   : TYPE_SIZE_UNIT (TREE_TYPE (src)));
306238fd1498Szrj   if (len2 == NULL_TREE)
306338fd1498Szrj     len2 = (TREE_CODE (src2) == COMPONENT_REF
306438fd1498Szrj 	    ? DECL_SIZE_UNIT (TREE_OPERAND (src2, 1))
306538fd1498Szrj 	    : TYPE_SIZE_UNIT (TREE_TYPE (src2)));
306638fd1498Szrj   if (len == NULL_TREE
306738fd1498Szrj       || !poly_int_tree_p (len)
306838fd1498Szrj       || len2 == NULL_TREE
306938fd1498Szrj       || !poly_int_tree_p (len2))
307038fd1498Szrj     return;
307138fd1498Szrj 
307238fd1498Szrj   src = get_addr_base_and_unit_offset (src, &offset);
307338fd1498Szrj   src2 = get_addr_base_and_unit_offset (src2, &offset2);
307438fd1498Szrj   if (src == NULL_TREE
307538fd1498Szrj       || src2 == NULL_TREE
307638fd1498Szrj       || maybe_lt (offset, offset2))
307738fd1498Szrj     return;
307838fd1498Szrj 
307938fd1498Szrj   if (!operand_equal_p (src, src2, 0))
308038fd1498Szrj     return;
308138fd1498Szrj 
308238fd1498Szrj   /* [ src + offset2, src + offset2 + len2 - 1 ] is set to val.
308338fd1498Szrj      Make sure that
308438fd1498Szrj      [ src + offset, src + offset + len - 1 ] is a subset of that.  */
308538fd1498Szrj   if (maybe_gt (wi::to_poly_offset (len) + (offset - offset2),
308638fd1498Szrj 		wi::to_poly_offset (len2)))
308738fd1498Szrj     return;
308838fd1498Szrj 
308938fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
309038fd1498Szrj     {
309138fd1498Szrj       fprintf (dump_file, "Simplified\n  ");
309238fd1498Szrj       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
309338fd1498Szrj       fprintf (dump_file, "after previous\n  ");
309438fd1498Szrj       print_gimple_stmt (dump_file, defstmt, 0, dump_flags);
309538fd1498Szrj     }
309638fd1498Szrj 
309738fd1498Szrj   /* For simplicity, don't change the kind of the stmt,
309838fd1498Szrj      turn dest = src; into dest = {}; and memcpy (&dest, &src, len);
309938fd1498Szrj      into memset (&dest, val, len);
310038fd1498Szrj      In theory we could change dest = src into memset if dest
310138fd1498Szrj      is addressable (maybe beneficial if val is not 0), or
310238fd1498Szrj      memcpy (&dest, &src, len) into dest = {} if len is the size
310338fd1498Szrj      of dest, dest isn't volatile.  */
310438fd1498Szrj   if (is_gimple_assign (stmt))
310538fd1498Szrj     {
310638fd1498Szrj       tree ctor = build_constructor (TREE_TYPE (dest), NULL);
310738fd1498Szrj       gimple_assign_set_rhs_from_tree (gsip, ctor);
310838fd1498Szrj       update_stmt (stmt);
310938fd1498Szrj     }
311038fd1498Szrj   else /* If stmt is memcpy, transform it into memset.  */
311138fd1498Szrj     {
311238fd1498Szrj       gcall *call = as_a <gcall *> (stmt);
311338fd1498Szrj       tree fndecl = builtin_decl_implicit (BUILT_IN_MEMSET);
311438fd1498Szrj       gimple_call_set_fndecl (call, fndecl);
311538fd1498Szrj       gimple_call_set_fntype (call, TREE_TYPE (fndecl));
311638fd1498Szrj       gimple_call_set_arg (call, 1, val);
311738fd1498Szrj       update_stmt (stmt);
311838fd1498Szrj     }
311938fd1498Szrj 
312038fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
312138fd1498Szrj     {
312238fd1498Szrj       fprintf (dump_file, "into\n  ");
312338fd1498Szrj       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
312438fd1498Szrj     }
312538fd1498Szrj }
312638fd1498Szrj 
312738fd1498Szrj /* A simple pass that attempts to fold all builtin functions.  This pass
312838fd1498Szrj    is run after we've propagated as many constants as we can.  */
312938fd1498Szrj 
313038fd1498Szrj namespace {
313138fd1498Szrj 
313238fd1498Szrj const pass_data pass_data_fold_builtins =
313338fd1498Szrj {
313438fd1498Szrj   GIMPLE_PASS, /* type */
313538fd1498Szrj   "fab", /* name */
313638fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
313738fd1498Szrj   TV_NONE, /* tv_id */
313838fd1498Szrj   ( PROP_cfg | PROP_ssa ), /* properties_required */
313938fd1498Szrj   0, /* properties_provided */
314038fd1498Szrj   0, /* properties_destroyed */
314138fd1498Szrj   0, /* todo_flags_start */
314238fd1498Szrj   TODO_update_ssa, /* todo_flags_finish */
314338fd1498Szrj };
314438fd1498Szrj 
314538fd1498Szrj class pass_fold_builtins : public gimple_opt_pass
314638fd1498Szrj {
314738fd1498Szrj public:
pass_fold_builtins(gcc::context * ctxt)314838fd1498Szrj   pass_fold_builtins (gcc::context *ctxt)
314938fd1498Szrj     : gimple_opt_pass (pass_data_fold_builtins, ctxt)
315038fd1498Szrj   {}
315138fd1498Szrj 
315238fd1498Szrj   /* opt_pass methods: */
clone()315338fd1498Szrj   opt_pass * clone () { return new pass_fold_builtins (m_ctxt); }
315438fd1498Szrj   virtual unsigned int execute (function *);
315538fd1498Szrj 
315638fd1498Szrj }; // class pass_fold_builtins
315738fd1498Szrj 
315838fd1498Szrj unsigned int
execute(function * fun)315938fd1498Szrj pass_fold_builtins::execute (function *fun)
316038fd1498Szrj {
316138fd1498Szrj   bool cfg_changed = false;
316238fd1498Szrj   basic_block bb;
316338fd1498Szrj   unsigned int todoflags = 0;
316438fd1498Szrj 
316538fd1498Szrj   FOR_EACH_BB_FN (bb, fun)
316638fd1498Szrj     {
316738fd1498Szrj       gimple_stmt_iterator i;
316838fd1498Szrj       for (i = gsi_start_bb (bb); !gsi_end_p (i); )
316938fd1498Szrj 	{
317038fd1498Szrj 	  gimple *stmt, *old_stmt;
317138fd1498Szrj 	  tree callee;
317238fd1498Szrj 	  enum built_in_function fcode;
317338fd1498Szrj 
317438fd1498Szrj 	  stmt = gsi_stmt (i);
317538fd1498Szrj 
317638fd1498Szrj           if (gimple_code (stmt) != GIMPLE_CALL)
317738fd1498Szrj 	    {
317838fd1498Szrj 	      /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
317938fd1498Szrj 		 after the last GIMPLE DSE they aren't needed and might
318038fd1498Szrj 		 unnecessarily keep the SSA_NAMEs live.  */
318138fd1498Szrj 	      if (gimple_clobber_p (stmt))
318238fd1498Szrj 		{
318338fd1498Szrj 		  tree lhs = gimple_assign_lhs (stmt);
318438fd1498Szrj 		  if (TREE_CODE (lhs) == MEM_REF
318538fd1498Szrj 		      && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
318638fd1498Szrj 		    {
318738fd1498Szrj 		      unlink_stmt_vdef (stmt);
318838fd1498Szrj 		      gsi_remove (&i, true);
318938fd1498Szrj 		      release_defs (stmt);
319038fd1498Szrj 		      continue;
319138fd1498Szrj 		    }
319238fd1498Szrj 		}
319338fd1498Szrj 	      else if (gimple_assign_load_p (stmt) && gimple_store_p (stmt))
319438fd1498Szrj 		optimize_memcpy (&i, gimple_assign_lhs (stmt),
319538fd1498Szrj 				 gimple_assign_rhs1 (stmt), NULL_TREE);
319638fd1498Szrj 	      gsi_next (&i);
319738fd1498Szrj 	      continue;
319838fd1498Szrj 	    }
319938fd1498Szrj 
320038fd1498Szrj 	  callee = gimple_call_fndecl (stmt);
320138fd1498Szrj 	  if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
320238fd1498Szrj 	    {
320338fd1498Szrj 	      gsi_next (&i);
320438fd1498Szrj 	      continue;
320538fd1498Szrj 	    }
320638fd1498Szrj 
320738fd1498Szrj 	  fcode = DECL_FUNCTION_CODE (callee);
320838fd1498Szrj 	  if (fold_stmt (&i))
320938fd1498Szrj 	    ;
321038fd1498Szrj 	  else
321138fd1498Szrj 	    {
321238fd1498Szrj 	      tree result = NULL_TREE;
321338fd1498Szrj 	      switch (DECL_FUNCTION_CODE (callee))
321438fd1498Szrj 		{
321538fd1498Szrj 		case BUILT_IN_CONSTANT_P:
321638fd1498Szrj 		  /* Resolve __builtin_constant_p.  If it hasn't been
321738fd1498Szrj 		     folded to integer_one_node by now, it's fairly
321838fd1498Szrj 		     certain that the value simply isn't constant.  */
321938fd1498Szrj 		  result = integer_zero_node;
322038fd1498Szrj 		  break;
322138fd1498Szrj 
322238fd1498Szrj 		case BUILT_IN_ASSUME_ALIGNED:
322338fd1498Szrj 		  /* Remove __builtin_assume_aligned.  */
322438fd1498Szrj 		  result = gimple_call_arg (stmt, 0);
322538fd1498Szrj 		  break;
322638fd1498Szrj 
322738fd1498Szrj 		case BUILT_IN_STACK_RESTORE:
322838fd1498Szrj 		  result = optimize_stack_restore (i);
322938fd1498Szrj 		  if (result)
323038fd1498Szrj 		    break;
323138fd1498Szrj 		  gsi_next (&i);
323238fd1498Szrj 		  continue;
323338fd1498Szrj 
323438fd1498Szrj 		case BUILT_IN_UNREACHABLE:
323538fd1498Szrj 		  if (optimize_unreachable (i))
323638fd1498Szrj 		    cfg_changed = true;
323738fd1498Szrj 		  break;
323838fd1498Szrj 
323938fd1498Szrj 		case BUILT_IN_ATOMIC_FETCH_OR_1:
324038fd1498Szrj 		case BUILT_IN_ATOMIC_FETCH_OR_2:
324138fd1498Szrj 		case BUILT_IN_ATOMIC_FETCH_OR_4:
324238fd1498Szrj 		case BUILT_IN_ATOMIC_FETCH_OR_8:
324338fd1498Szrj 		case BUILT_IN_ATOMIC_FETCH_OR_16:
324438fd1498Szrj 		  optimize_atomic_bit_test_and (&i,
324538fd1498Szrj 						IFN_ATOMIC_BIT_TEST_AND_SET,
324638fd1498Szrj 						true, false);
324738fd1498Szrj 		  break;
324838fd1498Szrj 		case BUILT_IN_SYNC_FETCH_AND_OR_1:
324938fd1498Szrj 		case BUILT_IN_SYNC_FETCH_AND_OR_2:
325038fd1498Szrj 		case BUILT_IN_SYNC_FETCH_AND_OR_4:
325138fd1498Szrj 		case BUILT_IN_SYNC_FETCH_AND_OR_8:
325238fd1498Szrj 		case BUILT_IN_SYNC_FETCH_AND_OR_16:
325338fd1498Szrj 		  optimize_atomic_bit_test_and (&i,
325438fd1498Szrj 						IFN_ATOMIC_BIT_TEST_AND_SET,
325538fd1498Szrj 						false, false);
325638fd1498Szrj 		  break;
325738fd1498Szrj 
325838fd1498Szrj 		case BUILT_IN_ATOMIC_FETCH_XOR_1:
325938fd1498Szrj 		case BUILT_IN_ATOMIC_FETCH_XOR_2:
326038fd1498Szrj 		case BUILT_IN_ATOMIC_FETCH_XOR_4:
326138fd1498Szrj 		case BUILT_IN_ATOMIC_FETCH_XOR_8:
326238fd1498Szrj 		case BUILT_IN_ATOMIC_FETCH_XOR_16:
326338fd1498Szrj 		  optimize_atomic_bit_test_and
326438fd1498Szrj 			(&i, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT, true, false);
326538fd1498Szrj 		  break;
326638fd1498Szrj 		case BUILT_IN_SYNC_FETCH_AND_XOR_1:
326738fd1498Szrj 		case BUILT_IN_SYNC_FETCH_AND_XOR_2:
326838fd1498Szrj 		case BUILT_IN_SYNC_FETCH_AND_XOR_4:
326938fd1498Szrj 		case BUILT_IN_SYNC_FETCH_AND_XOR_8:
327038fd1498Szrj 		case BUILT_IN_SYNC_FETCH_AND_XOR_16:
327138fd1498Szrj 		  optimize_atomic_bit_test_and
327238fd1498Szrj 			(&i, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT, false, false);
327338fd1498Szrj 		  break;
327438fd1498Szrj 
327538fd1498Szrj 		case BUILT_IN_ATOMIC_XOR_FETCH_1:
327638fd1498Szrj 		case BUILT_IN_ATOMIC_XOR_FETCH_2:
327738fd1498Szrj 		case BUILT_IN_ATOMIC_XOR_FETCH_4:
327838fd1498Szrj 		case BUILT_IN_ATOMIC_XOR_FETCH_8:
327938fd1498Szrj 		case BUILT_IN_ATOMIC_XOR_FETCH_16:
328038fd1498Szrj 		  optimize_atomic_bit_test_and
328138fd1498Szrj 			(&i, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT, true, true);
328238fd1498Szrj 		  break;
328338fd1498Szrj 		case BUILT_IN_SYNC_XOR_AND_FETCH_1:
328438fd1498Szrj 		case BUILT_IN_SYNC_XOR_AND_FETCH_2:
328538fd1498Szrj 		case BUILT_IN_SYNC_XOR_AND_FETCH_4:
328638fd1498Szrj 		case BUILT_IN_SYNC_XOR_AND_FETCH_8:
328738fd1498Szrj 		case BUILT_IN_SYNC_XOR_AND_FETCH_16:
328838fd1498Szrj 		  optimize_atomic_bit_test_and
328938fd1498Szrj 			(&i, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT, false, true);
329038fd1498Szrj 		  break;
329138fd1498Szrj 
329238fd1498Szrj 		case BUILT_IN_ATOMIC_FETCH_AND_1:
329338fd1498Szrj 		case BUILT_IN_ATOMIC_FETCH_AND_2:
329438fd1498Szrj 		case BUILT_IN_ATOMIC_FETCH_AND_4:
329538fd1498Szrj 		case BUILT_IN_ATOMIC_FETCH_AND_8:
329638fd1498Szrj 		case BUILT_IN_ATOMIC_FETCH_AND_16:
329738fd1498Szrj 		  optimize_atomic_bit_test_and (&i,
329838fd1498Szrj 						IFN_ATOMIC_BIT_TEST_AND_RESET,
329938fd1498Szrj 						true, false);
330038fd1498Szrj 		  break;
330138fd1498Szrj 		case BUILT_IN_SYNC_FETCH_AND_AND_1:
330238fd1498Szrj 		case BUILT_IN_SYNC_FETCH_AND_AND_2:
330338fd1498Szrj 		case BUILT_IN_SYNC_FETCH_AND_AND_4:
330438fd1498Szrj 		case BUILT_IN_SYNC_FETCH_AND_AND_8:
330538fd1498Szrj 		case BUILT_IN_SYNC_FETCH_AND_AND_16:
330638fd1498Szrj 		  optimize_atomic_bit_test_and (&i,
330738fd1498Szrj 						IFN_ATOMIC_BIT_TEST_AND_RESET,
330838fd1498Szrj 						false, false);
330938fd1498Szrj 		  break;
331038fd1498Szrj 
331138fd1498Szrj 		case BUILT_IN_MEMCPY:
331238fd1498Szrj 		  if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
331338fd1498Szrj 		      && TREE_CODE (gimple_call_arg (stmt, 0)) == ADDR_EXPR
331438fd1498Szrj 		      && TREE_CODE (gimple_call_arg (stmt, 1)) == ADDR_EXPR
331538fd1498Szrj 		      && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST)
331638fd1498Szrj 		    {
331738fd1498Szrj 		      tree dest = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
331838fd1498Szrj 		      tree src = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
331938fd1498Szrj 		      tree len = gimple_call_arg (stmt, 2);
332038fd1498Szrj 		      optimize_memcpy (&i, dest, src, len);
332138fd1498Szrj 		    }
332238fd1498Szrj 		  break;
332338fd1498Szrj 
332438fd1498Szrj 		case BUILT_IN_VA_START:
332538fd1498Szrj 		case BUILT_IN_VA_END:
332638fd1498Szrj 		case BUILT_IN_VA_COPY:
332738fd1498Szrj 		  /* These shouldn't be folded before pass_stdarg.  */
332838fd1498Szrj 		  result = optimize_stdarg_builtin (stmt);
332938fd1498Szrj 		  break;
333038fd1498Szrj 
333138fd1498Szrj 		default:;
333238fd1498Szrj 		}
333338fd1498Szrj 
333438fd1498Szrj 	      if (!result)
333538fd1498Szrj 		{
333638fd1498Szrj 		  gsi_next (&i);
333738fd1498Szrj 		  continue;
333838fd1498Szrj 		}
333938fd1498Szrj 
334038fd1498Szrj 	      if (!update_call_from_tree (&i, result))
334138fd1498Szrj 		gimplify_and_update_call_from_tree (&i, result);
334238fd1498Szrj 	    }
334338fd1498Szrj 
334438fd1498Szrj 	  todoflags |= TODO_update_address_taken;
334538fd1498Szrj 
334638fd1498Szrj 	  if (dump_file && (dump_flags & TDF_DETAILS))
334738fd1498Szrj 	    {
334838fd1498Szrj 	      fprintf (dump_file, "Simplified\n  ");
334938fd1498Szrj 	      print_gimple_stmt (dump_file, stmt, 0, dump_flags);
335038fd1498Szrj 	    }
335138fd1498Szrj 
335238fd1498Szrj           old_stmt = stmt;
335338fd1498Szrj 	  stmt = gsi_stmt (i);
335438fd1498Szrj 	  update_stmt (stmt);
335538fd1498Szrj 
335638fd1498Szrj 	  if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
335738fd1498Szrj 	      && gimple_purge_dead_eh_edges (bb))
335838fd1498Szrj 	    cfg_changed = true;
335938fd1498Szrj 
336038fd1498Szrj 	  if (dump_file && (dump_flags & TDF_DETAILS))
336138fd1498Szrj 	    {
336238fd1498Szrj 	      fprintf (dump_file, "to\n  ");
336338fd1498Szrj 	      print_gimple_stmt (dump_file, stmt, 0, dump_flags);
336438fd1498Szrj 	      fprintf (dump_file, "\n");
336538fd1498Szrj 	    }
336638fd1498Szrj 
336738fd1498Szrj 	  /* Retry the same statement if it changed into another
336838fd1498Szrj 	     builtin, there might be new opportunities now.  */
336938fd1498Szrj           if (gimple_code (stmt) != GIMPLE_CALL)
337038fd1498Szrj 	    {
337138fd1498Szrj 	      gsi_next (&i);
337238fd1498Szrj 	      continue;
337338fd1498Szrj 	    }
337438fd1498Szrj 	  callee = gimple_call_fndecl (stmt);
337538fd1498Szrj 	  if (!callee
337638fd1498Szrj               || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
337738fd1498Szrj 	      || DECL_FUNCTION_CODE (callee) == fcode)
337838fd1498Szrj 	    gsi_next (&i);
337938fd1498Szrj 	}
338038fd1498Szrj     }
338138fd1498Szrj 
338238fd1498Szrj   /* Delete unreachable blocks.  */
338338fd1498Szrj   if (cfg_changed)
338438fd1498Szrj     todoflags |= TODO_cleanup_cfg;
338538fd1498Szrj 
338638fd1498Szrj   return todoflags;
338738fd1498Szrj }
338838fd1498Szrj 
338938fd1498Szrj } // anon namespace
339038fd1498Szrj 
339138fd1498Szrj gimple_opt_pass *
make_pass_fold_builtins(gcc::context * ctxt)339238fd1498Szrj make_pass_fold_builtins (gcc::context *ctxt)
339338fd1498Szrj {
339438fd1498Szrj   return new pass_fold_builtins (ctxt);
339538fd1498Szrj }
339638fd1498Szrj 
339738fd1498Szrj /* A simple pass that emits some warnings post IPA.  */
339838fd1498Szrj 
339938fd1498Szrj namespace {
340038fd1498Szrj 
340138fd1498Szrj const pass_data pass_data_post_ipa_warn =
340238fd1498Szrj {
340338fd1498Szrj   GIMPLE_PASS, /* type */
340438fd1498Szrj   "post_ipa_warn", /* name */
340538fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
340638fd1498Szrj   TV_NONE, /* tv_id */
340738fd1498Szrj   ( PROP_cfg | PROP_ssa ), /* properties_required */
340838fd1498Szrj   0, /* properties_provided */
340938fd1498Szrj   0, /* properties_destroyed */
341038fd1498Szrj   0, /* todo_flags_start */
341138fd1498Szrj   0, /* todo_flags_finish */
341238fd1498Szrj };
341338fd1498Szrj 
341438fd1498Szrj class pass_post_ipa_warn : public gimple_opt_pass
341538fd1498Szrj {
341638fd1498Szrj public:
pass_post_ipa_warn(gcc::context * ctxt)341738fd1498Szrj   pass_post_ipa_warn (gcc::context *ctxt)
341838fd1498Szrj     : gimple_opt_pass (pass_data_post_ipa_warn, ctxt)
341938fd1498Szrj   {}
342038fd1498Szrj 
342138fd1498Szrj   /* opt_pass methods: */
clone()342238fd1498Szrj   opt_pass * clone () { return new pass_post_ipa_warn (m_ctxt); }
gate(function *)342338fd1498Szrj   virtual bool gate (function *) { return warn_nonnull != 0; }
342438fd1498Szrj   virtual unsigned int execute (function *);
342538fd1498Szrj 
342638fd1498Szrj }; // class pass_fold_builtins
342738fd1498Szrj 
342838fd1498Szrj unsigned int
execute(function * fun)342938fd1498Szrj pass_post_ipa_warn::execute (function *fun)
343038fd1498Szrj {
343138fd1498Szrj   basic_block bb;
343238fd1498Szrj 
343338fd1498Szrj   FOR_EACH_BB_FN (bb, fun)
343438fd1498Szrj     {
343538fd1498Szrj       gimple_stmt_iterator gsi;
343638fd1498Szrj       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
343738fd1498Szrj 	{
343838fd1498Szrj 	  gimple *stmt = gsi_stmt (gsi);
343938fd1498Szrj 	  if (!is_gimple_call (stmt) || gimple_no_warning_p (stmt))
344038fd1498Szrj 	    continue;
344138fd1498Szrj 
344238fd1498Szrj 	  if (warn_nonnull)
344338fd1498Szrj 	    {
344438fd1498Szrj 	      bitmap nonnullargs
344538fd1498Szrj 		= get_nonnull_args (gimple_call_fntype (stmt));
344638fd1498Szrj 	      if (nonnullargs)
344738fd1498Szrj 		{
344838fd1498Szrj 		  for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
344938fd1498Szrj 		    {
345038fd1498Szrj 		      tree arg = gimple_call_arg (stmt, i);
345138fd1498Szrj 		      if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
345238fd1498Szrj 			continue;
345338fd1498Szrj 		      if (!integer_zerop (arg))
345438fd1498Szrj 			continue;
345538fd1498Szrj 		      if (!bitmap_empty_p (nonnullargs)
345638fd1498Szrj 			  && !bitmap_bit_p (nonnullargs, i))
345738fd1498Szrj 			continue;
345838fd1498Szrj 
345938fd1498Szrj 		      location_t loc = gimple_location (stmt);
346038fd1498Szrj 		      if (warning_at (loc, OPT_Wnonnull,
346138fd1498Szrj 				      "%Gargument %u null where non-null "
346238fd1498Szrj 				      "expected", as_a <gcall *>(stmt), i + 1))
346338fd1498Szrj 			{
346438fd1498Szrj 			  tree fndecl = gimple_call_fndecl (stmt);
346538fd1498Szrj 			  if (fndecl && DECL_IS_BUILTIN (fndecl))
346638fd1498Szrj 			    inform (loc, "in a call to built-in function %qD",
346738fd1498Szrj 				    fndecl);
346838fd1498Szrj 			  else if (fndecl)
346938fd1498Szrj 			    inform (DECL_SOURCE_LOCATION (fndecl),
347038fd1498Szrj 				    "in a call to function %qD declared here",
347138fd1498Szrj 				    fndecl);
347238fd1498Szrj 
347338fd1498Szrj 			}
347438fd1498Szrj 		    }
347538fd1498Szrj 		  BITMAP_FREE (nonnullargs);
347638fd1498Szrj 		}
347738fd1498Szrj 	    }
347838fd1498Szrj 	}
347938fd1498Szrj     }
348038fd1498Szrj   return 0;
348138fd1498Szrj }
348238fd1498Szrj 
348338fd1498Szrj } // anon namespace
348438fd1498Szrj 
348538fd1498Szrj gimple_opt_pass *
make_pass_post_ipa_warn(gcc::context * ctxt)348638fd1498Szrj make_pass_post_ipa_warn (gcc::context *ctxt)
348738fd1498Szrj {
348838fd1498Szrj   return new pass_post_ipa_warn (ctxt);
348938fd1498Szrj }
3490