1*38fd1498Szrj /* Forward propagation of expressions for single use variables.
2*38fd1498Szrj    Copyright (C) 2004-2018 Free Software Foundation, Inc.
3*38fd1498Szrj 
4*38fd1498Szrj This file is part of GCC.
5*38fd1498Szrj 
6*38fd1498Szrj GCC is free software; you can redistribute it and/or modify
7*38fd1498Szrj it under the terms of the GNU General Public License as published by
8*38fd1498Szrj the Free Software Foundation; either version 3, or (at your option)
9*38fd1498Szrj any later version.
10*38fd1498Szrj 
11*38fd1498Szrj GCC is distributed in the hope that it will be useful,
12*38fd1498Szrj but WITHOUT ANY WARRANTY; without even the implied warranty of
13*38fd1498Szrj MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14*38fd1498Szrj GNU General Public License for more details.
15*38fd1498Szrj 
16*38fd1498Szrj You should have received a copy of the GNU General Public License
17*38fd1498Szrj along with GCC; see the file COPYING3.  If not see
18*38fd1498Szrj <http://www.gnu.org/licenses/>.  */
19*38fd1498Szrj 
20*38fd1498Szrj #include "config.h"
21*38fd1498Szrj #include "system.h"
22*38fd1498Szrj #include "coretypes.h"
23*38fd1498Szrj #include "backend.h"
24*38fd1498Szrj #include "rtl.h"
25*38fd1498Szrj #include "tree.h"
26*38fd1498Szrj #include "gimple.h"
27*38fd1498Szrj #include "cfghooks.h"
28*38fd1498Szrj #include "tree-pass.h"
29*38fd1498Szrj #include "ssa.h"
30*38fd1498Szrj #include "expmed.h"
31*38fd1498Szrj #include "optabs-query.h"
32*38fd1498Szrj #include "gimple-pretty-print.h"
33*38fd1498Szrj #include "fold-const.h"
34*38fd1498Szrj #include "stor-layout.h"
35*38fd1498Szrj #include "gimple-fold.h"
36*38fd1498Szrj #include "tree-eh.h"
37*38fd1498Szrj #include "gimplify.h"
38*38fd1498Szrj #include "gimple-iterator.h"
39*38fd1498Szrj #include "gimplify-me.h"
40*38fd1498Szrj #include "tree-cfg.h"
41*38fd1498Szrj #include "expr.h"
42*38fd1498Szrj #include "tree-dfa.h"
43*38fd1498Szrj #include "tree-ssa-propagate.h"
44*38fd1498Szrj #include "tree-ssa-dom.h"
45*38fd1498Szrj #include "builtins.h"
46*38fd1498Szrj #include "tree-cfgcleanup.h"
47*38fd1498Szrj #include "cfganal.h"
48*38fd1498Szrj #include "optabs-tree.h"
49*38fd1498Szrj #include "tree-vector-builder.h"
50*38fd1498Szrj #include "vec-perm-indices.h"
51*38fd1498Szrj 
52*38fd1498Szrj /* This pass propagates the RHS of assignment statements into use
53*38fd1498Szrj    sites of the LHS of the assignment.  It's basically a specialized
54*38fd1498Szrj    form of tree combination.   It is hoped all of this can disappear
55*38fd1498Szrj    when we have a generalized tree combiner.
56*38fd1498Szrj 
57*38fd1498Szrj    One class of common cases we handle is forward propagating a single use
58*38fd1498Szrj    variable into a COND_EXPR.
59*38fd1498Szrj 
60*38fd1498Szrj      bb0:
61*38fd1498Szrj        x = a COND b;
62*38fd1498Szrj        if (x) goto ... else goto ...
63*38fd1498Szrj 
64*38fd1498Szrj    Will be transformed into:
65*38fd1498Szrj 
66*38fd1498Szrj      bb0:
67*38fd1498Szrj        if (a COND b) goto ... else goto ...
68*38fd1498Szrj 
69*38fd1498Szrj    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
70*38fd1498Szrj 
71*38fd1498Szrj    Or (assuming c1 and c2 are constants):
72*38fd1498Szrj 
73*38fd1498Szrj      bb0:
74*38fd1498Szrj        x = a + c1;
75*38fd1498Szrj        if (x EQ/NEQ c2) goto ... else goto ...
76*38fd1498Szrj 
77*38fd1498Szrj    Will be transformed into:
78*38fd1498Szrj 
79*38fd1498Szrj      bb0:
80*38fd1498Szrj         if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
81*38fd1498Szrj 
82*38fd1498Szrj    Similarly for x = a - c1.
83*38fd1498Szrj 
84*38fd1498Szrj    Or
85*38fd1498Szrj 
86*38fd1498Szrj      bb0:
87*38fd1498Szrj        x = !a
88*38fd1498Szrj        if (x) goto ... else goto ...
89*38fd1498Szrj 
90*38fd1498Szrj    Will be transformed into:
91*38fd1498Szrj 
92*38fd1498Szrj      bb0:
93*38fd1498Szrj         if (a == 0) goto ... else goto ...
94*38fd1498Szrj 
95*38fd1498Szrj    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
96*38fd1498Szrj    For these cases, we propagate A into all, possibly more than one,
97*38fd1498Szrj    COND_EXPRs that use X.
98*38fd1498Szrj 
99*38fd1498Szrj    Or
100*38fd1498Szrj 
101*38fd1498Szrj      bb0:
102*38fd1498Szrj        x = (typecast) a
103*38fd1498Szrj        if (x) goto ... else goto ...
104*38fd1498Szrj 
105*38fd1498Szrj    Will be transformed into:
106*38fd1498Szrj 
107*38fd1498Szrj      bb0:
108*38fd1498Szrj         if (a != 0) goto ... else goto ...
109*38fd1498Szrj 
110*38fd1498Szrj    (Assuming a is an integral type and x is a boolean or x is an
111*38fd1498Szrj     integral and a is a boolean.)
112*38fd1498Szrj 
113*38fd1498Szrj    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
114*38fd1498Szrj    For these cases, we propagate A into all, possibly more than one,
115*38fd1498Szrj    COND_EXPRs that use X.
116*38fd1498Szrj 
117*38fd1498Szrj    In addition to eliminating the variable and the statement which assigns
118*38fd1498Szrj    a value to the variable, we may be able to later thread the jump without
119*38fd1498Szrj    adding insane complexity in the dominator optimizer.
120*38fd1498Szrj 
121*38fd1498Szrj    Also note these transformations can cascade.  We handle this by having
122*38fd1498Szrj    a worklist of COND_EXPR statements to examine.  As we make a change to
123*38fd1498Szrj    a statement, we put it back on the worklist to examine on the next
124*38fd1498Szrj    iteration of the main loop.
125*38fd1498Szrj 
126*38fd1498Szrj    A second class of propagation opportunities arises for ADDR_EXPR
127*38fd1498Szrj    nodes.
128*38fd1498Szrj 
129*38fd1498Szrj      ptr = &x->y->z;
130*38fd1498Szrj      res = *ptr;
131*38fd1498Szrj 
132*38fd1498Szrj    Will get turned into
133*38fd1498Szrj 
134*38fd1498Szrj      res = x->y->z;
135*38fd1498Szrj 
136*38fd1498Szrj    Or
137*38fd1498Szrj      ptr = (type1*)&type2var;
138*38fd1498Szrj      res = *ptr
139*38fd1498Szrj 
140*38fd1498Szrj    Will get turned into (if type1 and type2 are the same size
141*38fd1498Szrj    and neither have volatile on them):
142*38fd1498Szrj      res = VIEW_CONVERT_EXPR<type1>(type2var)
143*38fd1498Szrj 
144*38fd1498Szrj    Or
145*38fd1498Szrj 
146*38fd1498Szrj      ptr = &x[0];
147*38fd1498Szrj      ptr2 = ptr + <constant>;
148*38fd1498Szrj 
149*38fd1498Szrj    Will get turned into
150*38fd1498Szrj 
151*38fd1498Szrj      ptr2 = &x[constant/elementsize];
152*38fd1498Szrj 
153*38fd1498Szrj   Or
154*38fd1498Szrj 
155*38fd1498Szrj      ptr = &x[0];
156*38fd1498Szrj      offset = index * element_size;
157*38fd1498Szrj      offset_p = (pointer) offset;
158*38fd1498Szrj      ptr2 = ptr + offset_p
159*38fd1498Szrj 
160*38fd1498Szrj   Will get turned into:
161*38fd1498Szrj 
162*38fd1498Szrj      ptr2 = &x[index];
163*38fd1498Szrj 
164*38fd1498Szrj   Or
165*38fd1498Szrj     ssa = (int) decl
166*38fd1498Szrj     res = ssa & 1
167*38fd1498Szrj 
168*38fd1498Szrj   Provided that decl has known alignment >= 2, will get turned into
169*38fd1498Szrj 
170*38fd1498Szrj     res = 0
171*38fd1498Szrj 
172*38fd1498Szrj   We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
173*38fd1498Szrj   allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
174*38fd1498Szrj   {NOT_EXPR,NEG_EXPR}.
175*38fd1498Szrj 
176*38fd1498Szrj    This will (of course) be extended as other needs arise.  */
177*38fd1498Szrj 
178*38fd1498Szrj static bool forward_propagate_addr_expr (tree, tree, bool);
179*38fd1498Szrj 
180*38fd1498Szrj /* Set to true if we delete dead edges during the optimization.  */
181*38fd1498Szrj static bool cfg_changed;
182*38fd1498Szrj 
183*38fd1498Szrj static tree rhs_to_tree (tree type, gimple *stmt);
184*38fd1498Szrj 
185*38fd1498Szrj static bitmap to_purge;
186*38fd1498Szrj 
187*38fd1498Szrj /* Const-and-copy lattice.  */
188*38fd1498Szrj static vec<tree> lattice;
189*38fd1498Szrj 
190*38fd1498Szrj /* Set the lattice entry for NAME to VAL.  */
191*38fd1498Szrj static void
fwprop_set_lattice_val(tree name,tree val)192*38fd1498Szrj fwprop_set_lattice_val (tree name, tree val)
193*38fd1498Szrj {
194*38fd1498Szrj   if (TREE_CODE (name) == SSA_NAME)
195*38fd1498Szrj     {
196*38fd1498Szrj       if (SSA_NAME_VERSION (name) >= lattice.length ())
197*38fd1498Szrj 	{
198*38fd1498Szrj 	  lattice.reserve (num_ssa_names - lattice.length ());
199*38fd1498Szrj 	  lattice.quick_grow_cleared (num_ssa_names);
200*38fd1498Szrj 	}
201*38fd1498Szrj       lattice[SSA_NAME_VERSION (name)] = val;
202*38fd1498Szrj     }
203*38fd1498Szrj }
204*38fd1498Szrj 
205*38fd1498Szrj /* Invalidate the lattice entry for NAME, done when releasing SSA names.  */
206*38fd1498Szrj static void
fwprop_invalidate_lattice(tree name)207*38fd1498Szrj fwprop_invalidate_lattice (tree name)
208*38fd1498Szrj {
209*38fd1498Szrj   if (name
210*38fd1498Szrj       && TREE_CODE (name) == SSA_NAME
211*38fd1498Szrj       && SSA_NAME_VERSION (name) < lattice.length ())
212*38fd1498Szrj     lattice[SSA_NAME_VERSION (name)] = NULL_TREE;
213*38fd1498Szrj }
214*38fd1498Szrj 
215*38fd1498Szrj 
216*38fd1498Szrj /* Get the statement we can propagate from into NAME skipping
217*38fd1498Szrj    trivial copies.  Returns the statement which defines the
218*38fd1498Szrj    propagation source or NULL_TREE if there is no such one.
219*38fd1498Szrj    If SINGLE_USE_ONLY is set considers only sources which have
220*38fd1498Szrj    a single use chain up to NAME.  If SINGLE_USE_P is non-null,
221*38fd1498Szrj    it is set to whether the chain to NAME is a single use chain
222*38fd1498Szrj    or not.  SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set.  */
223*38fd1498Szrj 
224*38fd1498Szrj static gimple *
get_prop_source_stmt(tree name,bool single_use_only,bool * single_use_p)225*38fd1498Szrj get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
226*38fd1498Szrj {
227*38fd1498Szrj   bool single_use = true;
228*38fd1498Szrj 
229*38fd1498Szrj   do {
230*38fd1498Szrj     gimple *def_stmt = SSA_NAME_DEF_STMT (name);
231*38fd1498Szrj 
232*38fd1498Szrj     if (!has_single_use (name))
233*38fd1498Szrj       {
234*38fd1498Szrj 	single_use = false;
235*38fd1498Szrj 	if (single_use_only)
236*38fd1498Szrj 	  return NULL;
237*38fd1498Szrj       }
238*38fd1498Szrj 
239*38fd1498Szrj     /* If name is defined by a PHI node or is the default def, bail out.  */
240*38fd1498Szrj     if (!is_gimple_assign (def_stmt))
241*38fd1498Szrj       return NULL;
242*38fd1498Szrj 
243*38fd1498Szrj     /* If def_stmt is a simple copy, continue looking.  */
244*38fd1498Szrj     if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
245*38fd1498Szrj       name = gimple_assign_rhs1 (def_stmt);
246*38fd1498Szrj     else
247*38fd1498Szrj       {
248*38fd1498Szrj 	if (!single_use_only && single_use_p)
249*38fd1498Szrj 	  *single_use_p = single_use;
250*38fd1498Szrj 
251*38fd1498Szrj 	return def_stmt;
252*38fd1498Szrj       }
253*38fd1498Szrj   } while (1);
254*38fd1498Szrj }
255*38fd1498Szrj 
256*38fd1498Szrj /* Checks if the destination ssa name in DEF_STMT can be used as
257*38fd1498Szrj    propagation source.  Returns true if so, otherwise false.  */
258*38fd1498Szrj 
259*38fd1498Szrj static bool
can_propagate_from(gimple * def_stmt)260*38fd1498Szrj can_propagate_from (gimple *def_stmt)
261*38fd1498Szrj {
262*38fd1498Szrj   gcc_assert (is_gimple_assign (def_stmt));
263*38fd1498Szrj 
264*38fd1498Szrj   /* If the rhs has side-effects we cannot propagate from it.  */
265*38fd1498Szrj   if (gimple_has_volatile_ops (def_stmt))
266*38fd1498Szrj     return false;
267*38fd1498Szrj 
268*38fd1498Szrj   /* If the rhs is a load we cannot propagate from it.  */
269*38fd1498Szrj   if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
270*38fd1498Szrj       || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
271*38fd1498Szrj     return false;
272*38fd1498Szrj 
273*38fd1498Szrj   /* Constants can be always propagated.  */
274*38fd1498Szrj   if (gimple_assign_single_p (def_stmt)
275*38fd1498Szrj       && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
276*38fd1498Szrj     return true;
277*38fd1498Szrj 
278*38fd1498Szrj   /* We cannot propagate ssa names that occur in abnormal phi nodes.  */
279*38fd1498Szrj   if (stmt_references_abnormal_ssa_name (def_stmt))
280*38fd1498Szrj     return false;
281*38fd1498Szrj 
282*38fd1498Szrj   /* If the definition is a conversion of a pointer to a function type,
283*38fd1498Szrj      then we can not apply optimizations as some targets require
284*38fd1498Szrj      function pointers to be canonicalized and in this case this
285*38fd1498Szrj      optimization could eliminate a necessary canonicalization.  */
286*38fd1498Szrj   if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
287*38fd1498Szrj     {
288*38fd1498Szrj       tree rhs = gimple_assign_rhs1 (def_stmt);
289*38fd1498Szrj       if (POINTER_TYPE_P (TREE_TYPE (rhs))
290*38fd1498Szrj           && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
291*38fd1498Szrj         return false;
292*38fd1498Szrj     }
293*38fd1498Szrj 
294*38fd1498Szrj   return true;
295*38fd1498Szrj }
296*38fd1498Szrj 
297*38fd1498Szrj /* Remove a chain of dead statements starting at the definition of
298*38fd1498Szrj    NAME.  The chain is linked via the first operand of the defining statements.
299*38fd1498Szrj    If NAME was replaced in its only use then this function can be used
300*38fd1498Szrj    to clean up dead stmts.  The function handles already released SSA
301*38fd1498Szrj    names gracefully.
302*38fd1498Szrj    Returns true if cleanup-cfg has to run.  */
303*38fd1498Szrj 
304*38fd1498Szrj static bool
remove_prop_source_from_use(tree name)305*38fd1498Szrj remove_prop_source_from_use (tree name)
306*38fd1498Szrj {
307*38fd1498Szrj   gimple_stmt_iterator gsi;
308*38fd1498Szrj   gimple *stmt;
309*38fd1498Szrj   bool cfg_changed = false;
310*38fd1498Szrj 
311*38fd1498Szrj   do {
312*38fd1498Szrj     basic_block bb;
313*38fd1498Szrj 
314*38fd1498Szrj     if (SSA_NAME_IN_FREE_LIST (name)
315*38fd1498Szrj 	|| SSA_NAME_IS_DEFAULT_DEF (name)
316*38fd1498Szrj 	|| !has_zero_uses (name))
317*38fd1498Szrj       return cfg_changed;
318*38fd1498Szrj 
319*38fd1498Szrj     stmt = SSA_NAME_DEF_STMT (name);
320*38fd1498Szrj     if (gimple_code (stmt) == GIMPLE_PHI
321*38fd1498Szrj 	|| gimple_has_side_effects (stmt))
322*38fd1498Szrj       return cfg_changed;
323*38fd1498Szrj 
324*38fd1498Szrj     bb = gimple_bb (stmt);
325*38fd1498Szrj     gsi = gsi_for_stmt (stmt);
326*38fd1498Szrj     unlink_stmt_vdef (stmt);
327*38fd1498Szrj     if (gsi_remove (&gsi, true))
328*38fd1498Szrj       bitmap_set_bit (to_purge, bb->index);
329*38fd1498Szrj     fwprop_invalidate_lattice (gimple_get_lhs (stmt));
330*38fd1498Szrj     release_defs (stmt);
331*38fd1498Szrj 
332*38fd1498Szrj     name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
333*38fd1498Szrj   } while (name && TREE_CODE (name) == SSA_NAME);
334*38fd1498Szrj 
335*38fd1498Szrj   return cfg_changed;
336*38fd1498Szrj }
337*38fd1498Szrj 
338*38fd1498Szrj /* Return the rhs of a gassign *STMT in a form of a single tree,
339*38fd1498Szrj    converted to type TYPE.
340*38fd1498Szrj 
341*38fd1498Szrj    This should disappear, but is needed so we can combine expressions and use
342*38fd1498Szrj    the fold() interfaces. Long term, we need to develop folding and combine
343*38fd1498Szrj    routines that deal with gimple exclusively . */
344*38fd1498Szrj 
345*38fd1498Szrj static tree
rhs_to_tree(tree type,gimple * stmt)346*38fd1498Szrj rhs_to_tree (tree type, gimple *stmt)
347*38fd1498Szrj {
348*38fd1498Szrj   location_t loc = gimple_location (stmt);
349*38fd1498Szrj   enum tree_code code = gimple_assign_rhs_code (stmt);
350*38fd1498Szrj   if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
351*38fd1498Szrj     return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
352*38fd1498Szrj 			    gimple_assign_rhs2 (stmt),
353*38fd1498Szrj 			    gimple_assign_rhs3 (stmt));
354*38fd1498Szrj   else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
355*38fd1498Szrj     return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
356*38fd1498Szrj 			gimple_assign_rhs2 (stmt));
357*38fd1498Szrj   else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
358*38fd1498Szrj     return build1 (code, type, gimple_assign_rhs1 (stmt));
359*38fd1498Szrj   else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
360*38fd1498Szrj     return gimple_assign_rhs1 (stmt);
361*38fd1498Szrj   else
362*38fd1498Szrj     gcc_unreachable ();
363*38fd1498Szrj }
364*38fd1498Szrj 
365*38fd1498Szrj /* Combine OP0 CODE OP1 in the context of a COND_EXPR.  Returns
366*38fd1498Szrj    the folded result in a form suitable for COND_EXPR_COND or
367*38fd1498Szrj    NULL_TREE, if there is no suitable simplified form.  If
368*38fd1498Szrj    INVARIANT_ONLY is true only gimple_min_invariant results are
369*38fd1498Szrj    considered simplified.  */
370*38fd1498Szrj 
371*38fd1498Szrj static tree
combine_cond_expr_cond(gimple * stmt,enum tree_code code,tree type,tree op0,tree op1,bool invariant_only)372*38fd1498Szrj combine_cond_expr_cond (gimple *stmt, enum tree_code code, tree type,
373*38fd1498Szrj 			tree op0, tree op1, bool invariant_only)
374*38fd1498Szrj {
375*38fd1498Szrj   tree t;
376*38fd1498Szrj 
377*38fd1498Szrj   gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
378*38fd1498Szrj 
379*38fd1498Szrj   fold_defer_overflow_warnings ();
380*38fd1498Szrj   t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
381*38fd1498Szrj   if (!t)
382*38fd1498Szrj     {
383*38fd1498Szrj       fold_undefer_overflow_warnings (false, NULL, 0);
384*38fd1498Szrj       return NULL_TREE;
385*38fd1498Szrj     }
386*38fd1498Szrj 
387*38fd1498Szrj   /* Require that we got a boolean type out if we put one in.  */
388*38fd1498Szrj   gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
389*38fd1498Szrj 
390*38fd1498Szrj   /* Canonicalize the combined condition for use in a COND_EXPR.  */
391*38fd1498Szrj   t = canonicalize_cond_expr_cond (t);
392*38fd1498Szrj 
393*38fd1498Szrj   /* Bail out if we required an invariant but didn't get one.  */
394*38fd1498Szrj   if (!t || (invariant_only && !is_gimple_min_invariant (t)))
395*38fd1498Szrj     {
396*38fd1498Szrj       fold_undefer_overflow_warnings (false, NULL, 0);
397*38fd1498Szrj       return NULL_TREE;
398*38fd1498Szrj     }
399*38fd1498Szrj 
400*38fd1498Szrj   fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
401*38fd1498Szrj 
402*38fd1498Szrj   return t;
403*38fd1498Szrj }
404*38fd1498Szrj 
405*38fd1498Szrj /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
406*38fd1498Szrj    of its operand.  Return a new comparison tree or NULL_TREE if there
407*38fd1498Szrj    were no simplifying combines.  */
408*38fd1498Szrj 
409*38fd1498Szrj static tree
forward_propagate_into_comparison_1(gimple * stmt,enum tree_code code,tree type,tree op0,tree op1)410*38fd1498Szrj forward_propagate_into_comparison_1 (gimple *stmt,
411*38fd1498Szrj 				     enum tree_code code, tree type,
412*38fd1498Szrj 				     tree op0, tree op1)
413*38fd1498Szrj {
414*38fd1498Szrj   tree tmp = NULL_TREE;
415*38fd1498Szrj   tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
416*38fd1498Szrj   bool single_use0_p = false, single_use1_p = false;
417*38fd1498Szrj 
418*38fd1498Szrj   /* For comparisons use the first operand, that is likely to
419*38fd1498Szrj      simplify comparisons against constants.  */
420*38fd1498Szrj   if (TREE_CODE (op0) == SSA_NAME)
421*38fd1498Szrj     {
422*38fd1498Szrj       gimple *def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
423*38fd1498Szrj       if (def_stmt && can_propagate_from (def_stmt))
424*38fd1498Szrj 	{
425*38fd1498Szrj 	  enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
426*38fd1498Szrj 	  bool invariant_only_p = !single_use0_p;
427*38fd1498Szrj 
428*38fd1498Szrj 	  rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
429*38fd1498Szrj 
430*38fd1498Szrj 	  /* Always combine comparisons or conversions from booleans.  */
431*38fd1498Szrj 	  if (TREE_CODE (op1) == INTEGER_CST
432*38fd1498Szrj 	      && ((CONVERT_EXPR_CODE_P (def_code)
433*38fd1498Szrj 		   && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0, 0)))
434*38fd1498Szrj 		      == BOOLEAN_TYPE)
435*38fd1498Szrj 		  || TREE_CODE_CLASS (def_code) == tcc_comparison))
436*38fd1498Szrj 	    invariant_only_p = false;
437*38fd1498Szrj 
438*38fd1498Szrj 	  tmp = combine_cond_expr_cond (stmt, code, type,
439*38fd1498Szrj 					rhs0, op1, invariant_only_p);
440*38fd1498Szrj 	  if (tmp)
441*38fd1498Szrj 	    return tmp;
442*38fd1498Szrj 	}
443*38fd1498Szrj     }
444*38fd1498Szrj 
445*38fd1498Szrj   /* If that wasn't successful, try the second operand.  */
446*38fd1498Szrj   if (TREE_CODE (op1) == SSA_NAME)
447*38fd1498Szrj     {
448*38fd1498Szrj       gimple *def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
449*38fd1498Szrj       if (def_stmt && can_propagate_from (def_stmt))
450*38fd1498Szrj 	{
451*38fd1498Szrj 	  rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
452*38fd1498Szrj 	  tmp = combine_cond_expr_cond (stmt, code, type,
453*38fd1498Szrj 					op0, rhs1, !single_use1_p);
454*38fd1498Szrj 	  if (tmp)
455*38fd1498Szrj 	    return tmp;
456*38fd1498Szrj 	}
457*38fd1498Szrj     }
458*38fd1498Szrj 
459*38fd1498Szrj   /* If that wasn't successful either, try both operands.  */
460*38fd1498Szrj   if (rhs0 != NULL_TREE
461*38fd1498Szrj       && rhs1 != NULL_TREE)
462*38fd1498Szrj     tmp = combine_cond_expr_cond (stmt, code, type,
463*38fd1498Szrj 				  rhs0, rhs1,
464*38fd1498Szrj 				  !(single_use0_p && single_use1_p));
465*38fd1498Szrj 
466*38fd1498Szrj   return tmp;
467*38fd1498Szrj }
468*38fd1498Szrj 
469*38fd1498Szrj /* Propagate from the ssa name definition statements of the assignment
470*38fd1498Szrj    from a comparison at *GSI into the conditional if that simplifies it.
471*38fd1498Szrj    Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
472*38fd1498Szrj    otherwise returns 0.  */
473*38fd1498Szrj 
474*38fd1498Szrj static int
forward_propagate_into_comparison(gimple_stmt_iterator * gsi)475*38fd1498Szrj forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
476*38fd1498Szrj {
477*38fd1498Szrj   gimple *stmt = gsi_stmt (*gsi);
478*38fd1498Szrj   tree tmp;
479*38fd1498Szrj   bool cfg_changed = false;
480*38fd1498Szrj   tree type = TREE_TYPE (gimple_assign_lhs (stmt));
481*38fd1498Szrj   tree rhs1 = gimple_assign_rhs1 (stmt);
482*38fd1498Szrj   tree rhs2 = gimple_assign_rhs2 (stmt);
483*38fd1498Szrj 
484*38fd1498Szrj   /* Combine the comparison with defining statements.  */
485*38fd1498Szrj   tmp = forward_propagate_into_comparison_1 (stmt,
486*38fd1498Szrj 					     gimple_assign_rhs_code (stmt),
487*38fd1498Szrj 					     type, rhs1, rhs2);
488*38fd1498Szrj   if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
489*38fd1498Szrj     {
490*38fd1498Szrj       gimple_assign_set_rhs_from_tree (gsi, tmp);
491*38fd1498Szrj       fold_stmt (gsi);
492*38fd1498Szrj       update_stmt (gsi_stmt (*gsi));
493*38fd1498Szrj 
494*38fd1498Szrj       if (TREE_CODE (rhs1) == SSA_NAME)
495*38fd1498Szrj 	cfg_changed |= remove_prop_source_from_use (rhs1);
496*38fd1498Szrj       if (TREE_CODE (rhs2) == SSA_NAME)
497*38fd1498Szrj 	cfg_changed |= remove_prop_source_from_use (rhs2);
498*38fd1498Szrj       return cfg_changed ? 2 : 1;
499*38fd1498Szrj     }
500*38fd1498Szrj 
501*38fd1498Szrj   return 0;
502*38fd1498Szrj }
503*38fd1498Szrj 
504*38fd1498Szrj /* Propagate from the ssa name definition statements of COND_EXPR
505*38fd1498Szrj    in GIMPLE_COND statement STMT into the conditional if that simplifies it.
506*38fd1498Szrj    Returns zero if no statement was changed, one if there were
507*38fd1498Szrj    changes and two if cfg_cleanup needs to run.
508*38fd1498Szrj 
509*38fd1498Szrj    This must be kept in sync with forward_propagate_into_cond.  */
510*38fd1498Szrj 
511*38fd1498Szrj static int
forward_propagate_into_gimple_cond(gcond * stmt)512*38fd1498Szrj forward_propagate_into_gimple_cond (gcond *stmt)
513*38fd1498Szrj {
514*38fd1498Szrj   tree tmp;
515*38fd1498Szrj   enum tree_code code = gimple_cond_code (stmt);
516*38fd1498Szrj   bool cfg_changed = false;
517*38fd1498Szrj   tree rhs1 = gimple_cond_lhs (stmt);
518*38fd1498Szrj   tree rhs2 = gimple_cond_rhs (stmt);
519*38fd1498Szrj 
520*38fd1498Szrj   /* We can do tree combining on SSA_NAME and comparison expressions.  */
521*38fd1498Szrj   if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
522*38fd1498Szrj     return 0;
523*38fd1498Szrj 
524*38fd1498Szrj   tmp = forward_propagate_into_comparison_1 (stmt, code,
525*38fd1498Szrj 					     boolean_type_node,
526*38fd1498Szrj 					     rhs1, rhs2);
527*38fd1498Szrj   if (tmp)
528*38fd1498Szrj     {
529*38fd1498Szrj       if (dump_file && tmp)
530*38fd1498Szrj 	{
531*38fd1498Szrj 	  fprintf (dump_file, "  Replaced '");
532*38fd1498Szrj 	  print_gimple_expr (dump_file, stmt, 0);
533*38fd1498Szrj 	  fprintf (dump_file, "' with '");
534*38fd1498Szrj 	  print_generic_expr (dump_file, tmp);
535*38fd1498Szrj 	  fprintf (dump_file, "'\n");
536*38fd1498Szrj 	}
537*38fd1498Szrj 
538*38fd1498Szrj       gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
539*38fd1498Szrj       update_stmt (stmt);
540*38fd1498Szrj 
541*38fd1498Szrj       if (TREE_CODE (rhs1) == SSA_NAME)
542*38fd1498Szrj 	cfg_changed |= remove_prop_source_from_use (rhs1);
543*38fd1498Szrj       if (TREE_CODE (rhs2) == SSA_NAME)
544*38fd1498Szrj 	cfg_changed |= remove_prop_source_from_use (rhs2);
545*38fd1498Szrj       return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
546*38fd1498Szrj     }
547*38fd1498Szrj 
548*38fd1498Szrj   /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges.  */
549*38fd1498Szrj   if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
550*38fd1498Szrj        || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
551*38fd1498Szrj 	   && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
552*38fd1498Szrj       && ((code == EQ_EXPR
553*38fd1498Szrj 	   && integer_zerop (rhs2))
554*38fd1498Szrj 	  || (code == NE_EXPR
555*38fd1498Szrj 	      && integer_onep (rhs2))))
556*38fd1498Szrj     {
557*38fd1498Szrj       basic_block bb = gimple_bb (stmt);
558*38fd1498Szrj       gimple_cond_set_code (stmt, NE_EXPR);
559*38fd1498Szrj       gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
560*38fd1498Szrj       EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
561*38fd1498Szrj       EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
562*38fd1498Szrj       return 1;
563*38fd1498Szrj     }
564*38fd1498Szrj 
565*38fd1498Szrj   return 0;
566*38fd1498Szrj }
567*38fd1498Szrj 
568*38fd1498Szrj 
569*38fd1498Szrj /* Propagate from the ssa name definition statements of COND_EXPR
570*38fd1498Szrj    in the rhs of statement STMT into the conditional if that simplifies it.
571*38fd1498Szrj    Returns true zero if the stmt was changed.  */
572*38fd1498Szrj 
573*38fd1498Szrj static bool
forward_propagate_into_cond(gimple_stmt_iterator * gsi_p)574*38fd1498Szrj forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
575*38fd1498Szrj {
576*38fd1498Szrj   gimple *stmt = gsi_stmt (*gsi_p);
577*38fd1498Szrj   tree tmp = NULL_TREE;
578*38fd1498Szrj   tree cond = gimple_assign_rhs1 (stmt);
579*38fd1498Szrj   enum tree_code code = gimple_assign_rhs_code (stmt);
580*38fd1498Szrj 
581*38fd1498Szrj   /* We can do tree combining on SSA_NAME and comparison expressions.  */
582*38fd1498Szrj   if (COMPARISON_CLASS_P (cond))
583*38fd1498Szrj     tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
584*38fd1498Szrj 					       TREE_TYPE (cond),
585*38fd1498Szrj 					       TREE_OPERAND (cond, 0),
586*38fd1498Szrj 					       TREE_OPERAND (cond, 1));
587*38fd1498Szrj   else if (TREE_CODE (cond) == SSA_NAME)
588*38fd1498Szrj     {
589*38fd1498Szrj       enum tree_code def_code;
590*38fd1498Szrj       tree name = cond;
591*38fd1498Szrj       gimple *def_stmt = get_prop_source_stmt (name, true, NULL);
592*38fd1498Szrj       if (!def_stmt || !can_propagate_from (def_stmt))
593*38fd1498Szrj 	return 0;
594*38fd1498Szrj 
595*38fd1498Szrj       def_code = gimple_assign_rhs_code (def_stmt);
596*38fd1498Szrj       if (TREE_CODE_CLASS (def_code) == tcc_comparison)
597*38fd1498Szrj 	tmp = fold_build2_loc (gimple_location (def_stmt),
598*38fd1498Szrj 			       def_code,
599*38fd1498Szrj 			       TREE_TYPE (cond),
600*38fd1498Szrj 			       gimple_assign_rhs1 (def_stmt),
601*38fd1498Szrj 			       gimple_assign_rhs2 (def_stmt));
602*38fd1498Szrj     }
603*38fd1498Szrj 
604*38fd1498Szrj   if (tmp
605*38fd1498Szrj       && is_gimple_condexpr (tmp))
606*38fd1498Szrj     {
607*38fd1498Szrj       if (dump_file && tmp)
608*38fd1498Szrj 	{
609*38fd1498Szrj 	  fprintf (dump_file, "  Replaced '");
610*38fd1498Szrj 	  print_generic_expr (dump_file, cond);
611*38fd1498Szrj 	  fprintf (dump_file, "' with '");
612*38fd1498Szrj 	  print_generic_expr (dump_file, tmp);
613*38fd1498Szrj 	  fprintf (dump_file, "'\n");
614*38fd1498Szrj 	}
615*38fd1498Szrj 
616*38fd1498Szrj       if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
617*38fd1498Szrj 				  : integer_onep (tmp))
618*38fd1498Szrj 	gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
619*38fd1498Szrj       else if (integer_zerop (tmp))
620*38fd1498Szrj 	gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
621*38fd1498Szrj       else
622*38fd1498Szrj 	gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
623*38fd1498Szrj       stmt = gsi_stmt (*gsi_p);
624*38fd1498Szrj       update_stmt (stmt);
625*38fd1498Szrj 
626*38fd1498Szrj       return true;
627*38fd1498Szrj     }
628*38fd1498Szrj 
629*38fd1498Szrj   return 0;
630*38fd1498Szrj }
631*38fd1498Szrj 
632*38fd1498Szrj /* We've just substituted an ADDR_EXPR into stmt.  Update all the
633*38fd1498Szrj    relevant data structures to match.  */
634*38fd1498Szrj 
635*38fd1498Szrj static void
tidy_after_forward_propagate_addr(gimple * stmt)636*38fd1498Szrj tidy_after_forward_propagate_addr (gimple *stmt)
637*38fd1498Szrj {
638*38fd1498Szrj   /* We may have turned a trapping insn into a non-trapping insn.  */
639*38fd1498Szrj   if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
640*38fd1498Szrj     bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
641*38fd1498Szrj 
642*38fd1498Szrj   if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
643*38fd1498Szrj      recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
644*38fd1498Szrj }
645*38fd1498Szrj 
646*38fd1498Szrj /* NAME is a SSA_NAME representing DEF_RHS which is of the form
647*38fd1498Szrj    ADDR_EXPR <whatever>.
648*38fd1498Szrj 
649*38fd1498Szrj    Try to forward propagate the ADDR_EXPR into the use USE_STMT.
650*38fd1498Szrj    Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
651*38fd1498Szrj    node or for recovery of array indexing from pointer arithmetic.
652*38fd1498Szrj 
653*38fd1498Szrj    Return true if the propagation was successful (the propagation can
654*38fd1498Szrj    be not totally successful, yet things may have been changed).  */
655*38fd1498Szrj 
656*38fd1498Szrj static bool
forward_propagate_addr_expr_1(tree name,tree def_rhs,gimple_stmt_iterator * use_stmt_gsi,bool single_use_p)657*38fd1498Szrj forward_propagate_addr_expr_1 (tree name, tree def_rhs,
658*38fd1498Szrj 			       gimple_stmt_iterator *use_stmt_gsi,
659*38fd1498Szrj 			       bool single_use_p)
660*38fd1498Szrj {
661*38fd1498Szrj   tree lhs, rhs, rhs2, array_ref;
662*38fd1498Szrj   gimple *use_stmt = gsi_stmt (*use_stmt_gsi);
663*38fd1498Szrj   enum tree_code rhs_code;
664*38fd1498Szrj   bool res = true;
665*38fd1498Szrj 
666*38fd1498Szrj   gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
667*38fd1498Szrj 
668*38fd1498Szrj   lhs = gimple_assign_lhs (use_stmt);
669*38fd1498Szrj   rhs_code = gimple_assign_rhs_code (use_stmt);
670*38fd1498Szrj   rhs = gimple_assign_rhs1 (use_stmt);
671*38fd1498Szrj 
672*38fd1498Szrj   /* Do not perform copy-propagation but recurse through copy chains.  */
673*38fd1498Szrj   if (TREE_CODE (lhs) == SSA_NAME
674*38fd1498Szrj       && rhs_code == SSA_NAME)
675*38fd1498Szrj     return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
676*38fd1498Szrj 
677*38fd1498Szrj   /* The use statement could be a conversion.  Recurse to the uses of the
678*38fd1498Szrj      lhs as copyprop does not copy through pointer to integer to pointer
679*38fd1498Szrj      conversions and FRE does not catch all cases either.
680*38fd1498Szrj      Treat the case of a single-use name and
681*38fd1498Szrj      a conversion to def_rhs type separate, though.  */
682*38fd1498Szrj   if (TREE_CODE (lhs) == SSA_NAME
683*38fd1498Szrj       && CONVERT_EXPR_CODE_P (rhs_code))
684*38fd1498Szrj     {
685*38fd1498Szrj       /* If there is a point in a conversion chain where the types match
686*38fd1498Szrj          so we can remove a conversion re-materialize the address here
687*38fd1498Szrj 	 and stop.  */
688*38fd1498Szrj       if (single_use_p
689*38fd1498Szrj 	  && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
690*38fd1498Szrj 	{
691*38fd1498Szrj 	  gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
692*38fd1498Szrj 	  gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
693*38fd1498Szrj 	  return true;
694*38fd1498Szrj 	}
695*38fd1498Szrj 
696*38fd1498Szrj       /* Else recurse if the conversion preserves the address value.  */
697*38fd1498Szrj       if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
698*38fd1498Szrj 	   || POINTER_TYPE_P (TREE_TYPE (lhs)))
699*38fd1498Szrj 	  && (TYPE_PRECISION (TREE_TYPE (lhs))
700*38fd1498Szrj 	      >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
701*38fd1498Szrj 	return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
702*38fd1498Szrj 
703*38fd1498Szrj       return false;
704*38fd1498Szrj     }
705*38fd1498Szrj 
706*38fd1498Szrj   /* If this isn't a conversion chain from this on we only can propagate
707*38fd1498Szrj      into compatible pointer contexts.  */
708*38fd1498Szrj   if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
709*38fd1498Szrj     return false;
710*38fd1498Szrj 
711*38fd1498Szrj   /* Propagate through constant pointer adjustments.  */
712*38fd1498Szrj   if (TREE_CODE (lhs) == SSA_NAME
713*38fd1498Szrj       && rhs_code == POINTER_PLUS_EXPR
714*38fd1498Szrj       && rhs == name
715*38fd1498Szrj       && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
716*38fd1498Szrj     {
717*38fd1498Szrj       tree new_def_rhs;
718*38fd1498Szrj       /* As we come here with non-invariant addresses in def_rhs we need
719*38fd1498Szrj          to make sure we can build a valid constant offsetted address
720*38fd1498Szrj 	 for further propagation.  Simply rely on fold building that
721*38fd1498Szrj 	 and check after the fact.  */
722*38fd1498Szrj       new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
723*38fd1498Szrj 				 def_rhs,
724*38fd1498Szrj 				 fold_convert (ptr_type_node,
725*38fd1498Szrj 					       gimple_assign_rhs2 (use_stmt)));
726*38fd1498Szrj       if (TREE_CODE (new_def_rhs) == MEM_REF
727*38fd1498Szrj 	  && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
728*38fd1498Szrj 	return false;
729*38fd1498Szrj       new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
730*38fd1498Szrj 						    TREE_TYPE (rhs));
731*38fd1498Szrj 
732*38fd1498Szrj       /* Recurse.  If we could propagate into all uses of lhs do not
733*38fd1498Szrj 	 bother to replace into the current use but just pretend we did.  */
734*38fd1498Szrj       if (TREE_CODE (new_def_rhs) == ADDR_EXPR
735*38fd1498Szrj 	  && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
736*38fd1498Szrj 	return true;
737*38fd1498Szrj 
738*38fd1498Szrj       if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
739*38fd1498Szrj 	gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
740*38fd1498Szrj 					new_def_rhs);
741*38fd1498Szrj       else if (is_gimple_min_invariant (new_def_rhs))
742*38fd1498Szrj 	gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, new_def_rhs);
743*38fd1498Szrj       else
744*38fd1498Szrj 	return false;
745*38fd1498Szrj       gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
746*38fd1498Szrj       update_stmt (use_stmt);
747*38fd1498Szrj       return true;
748*38fd1498Szrj     }
749*38fd1498Szrj 
750*38fd1498Szrj   /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
751*38fd1498Szrj      ADDR_EXPR will not appear on the LHS.  */
752*38fd1498Szrj   tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
753*38fd1498Szrj   while (handled_component_p (*lhsp))
754*38fd1498Szrj     lhsp = &TREE_OPERAND (*lhsp, 0);
755*38fd1498Szrj   lhs = *lhsp;
756*38fd1498Szrj 
757*38fd1498Szrj   /* Now see if the LHS node is a MEM_REF using NAME.  If so,
758*38fd1498Szrj      propagate the ADDR_EXPR into the use of NAME and fold the result.  */
759*38fd1498Szrj   if (TREE_CODE (lhs) == MEM_REF
760*38fd1498Szrj       && TREE_OPERAND (lhs, 0) == name)
761*38fd1498Szrj     {
762*38fd1498Szrj       tree def_rhs_base;
763*38fd1498Szrj       poly_int64 def_rhs_offset;
764*38fd1498Szrj       /* If the address is invariant we can always fold it.  */
765*38fd1498Szrj       if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
766*38fd1498Szrj 							 &def_rhs_offset)))
767*38fd1498Szrj 	{
768*38fd1498Szrj 	  poly_offset_int off = mem_ref_offset (lhs);
769*38fd1498Szrj 	  tree new_ptr;
770*38fd1498Szrj 	  off += def_rhs_offset;
771*38fd1498Szrj 	  if (TREE_CODE (def_rhs_base) == MEM_REF)
772*38fd1498Szrj 	    {
773*38fd1498Szrj 	      off += mem_ref_offset (def_rhs_base);
774*38fd1498Szrj 	      new_ptr = TREE_OPERAND (def_rhs_base, 0);
775*38fd1498Szrj 	    }
776*38fd1498Szrj 	  else
777*38fd1498Szrj 	    new_ptr = build_fold_addr_expr (def_rhs_base);
778*38fd1498Szrj 	  TREE_OPERAND (lhs, 0) = new_ptr;
779*38fd1498Szrj 	  TREE_OPERAND (lhs, 1)
780*38fd1498Szrj 	    = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
781*38fd1498Szrj 	  tidy_after_forward_propagate_addr (use_stmt);
782*38fd1498Szrj 	  /* Continue propagating into the RHS if this was not the only use.  */
783*38fd1498Szrj 	  if (single_use_p)
784*38fd1498Szrj 	    return true;
785*38fd1498Szrj 	}
786*38fd1498Szrj       /* If the LHS is a plain dereference and the value type is the same as
787*38fd1498Szrj          that of the pointed-to type of the address we can put the
788*38fd1498Szrj 	 dereferenced address on the LHS preserving the original alias-type.  */
789*38fd1498Szrj       else if (integer_zerop (TREE_OPERAND (lhs, 1))
790*38fd1498Szrj 	       && ((gimple_assign_lhs (use_stmt) == lhs
791*38fd1498Szrj 		    && useless_type_conversion_p
792*38fd1498Szrj 		         (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
793*38fd1498Szrj 		          TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
794*38fd1498Szrj 		   || types_compatible_p (TREE_TYPE (lhs),
795*38fd1498Szrj 					  TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
796*38fd1498Szrj 	       /* Don't forward anything into clobber stmts if it would result
797*38fd1498Szrj 		  in the lhs no longer being a MEM_REF.  */
798*38fd1498Szrj 	       && (!gimple_clobber_p (use_stmt)
799*38fd1498Szrj 		   || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
800*38fd1498Szrj 	{
801*38fd1498Szrj 	  tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
802*38fd1498Szrj 	  tree new_offset, new_base, saved, new_lhs;
803*38fd1498Szrj 	  while (handled_component_p (*def_rhs_basep))
804*38fd1498Szrj 	    def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
805*38fd1498Szrj 	  saved = *def_rhs_basep;
806*38fd1498Szrj 	  if (TREE_CODE (*def_rhs_basep) == MEM_REF)
807*38fd1498Szrj 	    {
808*38fd1498Szrj 	      new_base = TREE_OPERAND (*def_rhs_basep, 0);
809*38fd1498Szrj 	      new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
810*38fd1498Szrj 					 TREE_OPERAND (*def_rhs_basep, 1));
811*38fd1498Szrj 	    }
812*38fd1498Szrj 	  else
813*38fd1498Szrj 	    {
814*38fd1498Szrj 	      new_base = build_fold_addr_expr (*def_rhs_basep);
815*38fd1498Szrj 	      new_offset = TREE_OPERAND (lhs, 1);
816*38fd1498Szrj 	    }
817*38fd1498Szrj 	  *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
818*38fd1498Szrj 				   new_base, new_offset);
819*38fd1498Szrj 	  TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
820*38fd1498Szrj 	  TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
821*38fd1498Szrj 	  TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
822*38fd1498Szrj 	  new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
823*38fd1498Szrj 	  *lhsp = new_lhs;
824*38fd1498Szrj 	  TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
825*38fd1498Szrj 	  TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
826*38fd1498Szrj 	  *def_rhs_basep = saved;
827*38fd1498Szrj 	  tidy_after_forward_propagate_addr (use_stmt);
828*38fd1498Szrj 	  /* Continue propagating into the RHS if this was not the
829*38fd1498Szrj 	     only use.  */
830*38fd1498Szrj 	  if (single_use_p)
831*38fd1498Szrj 	    return true;
832*38fd1498Szrj 	}
833*38fd1498Szrj       else
834*38fd1498Szrj 	/* We can have a struct assignment dereferencing our name twice.
835*38fd1498Szrj 	   Note that we didn't propagate into the lhs to not falsely
836*38fd1498Szrj 	   claim we did when propagating into the rhs.  */
837*38fd1498Szrj 	res = false;
838*38fd1498Szrj     }
839*38fd1498Szrj 
840*38fd1498Szrj   /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
841*38fd1498Szrj      nodes from the RHS.  */
842*38fd1498Szrj   tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
843*38fd1498Szrj   if (TREE_CODE (*rhsp) == ADDR_EXPR)
844*38fd1498Szrj     rhsp = &TREE_OPERAND (*rhsp, 0);
845*38fd1498Szrj   while (handled_component_p (*rhsp))
846*38fd1498Szrj     rhsp = &TREE_OPERAND (*rhsp, 0);
847*38fd1498Szrj   rhs = *rhsp;
848*38fd1498Szrj 
849*38fd1498Szrj   /* Now see if the RHS node is a MEM_REF using NAME.  If so,
850*38fd1498Szrj      propagate the ADDR_EXPR into the use of NAME and fold the result.  */
851*38fd1498Szrj   if (TREE_CODE (rhs) == MEM_REF
852*38fd1498Szrj       && TREE_OPERAND (rhs, 0) == name)
853*38fd1498Szrj     {
854*38fd1498Szrj       tree def_rhs_base;
855*38fd1498Szrj       poly_int64 def_rhs_offset;
856*38fd1498Szrj       if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
857*38fd1498Szrj 							 &def_rhs_offset)))
858*38fd1498Szrj 	{
859*38fd1498Szrj 	  poly_offset_int off = mem_ref_offset (rhs);
860*38fd1498Szrj 	  tree new_ptr;
861*38fd1498Szrj 	  off += def_rhs_offset;
862*38fd1498Szrj 	  if (TREE_CODE (def_rhs_base) == MEM_REF)
863*38fd1498Szrj 	    {
864*38fd1498Szrj 	      off += mem_ref_offset (def_rhs_base);
865*38fd1498Szrj 	      new_ptr = TREE_OPERAND (def_rhs_base, 0);
866*38fd1498Szrj 	    }
867*38fd1498Szrj 	  else
868*38fd1498Szrj 	    new_ptr = build_fold_addr_expr (def_rhs_base);
869*38fd1498Szrj 	  TREE_OPERAND (rhs, 0) = new_ptr;
870*38fd1498Szrj 	  TREE_OPERAND (rhs, 1)
871*38fd1498Szrj 	    = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
872*38fd1498Szrj 	  fold_stmt_inplace (use_stmt_gsi);
873*38fd1498Szrj 	  tidy_after_forward_propagate_addr (use_stmt);
874*38fd1498Szrj 	  return res;
875*38fd1498Szrj 	}
876*38fd1498Szrj       /* If the RHS is a plain dereference and the value type is the same as
877*38fd1498Szrj          that of the pointed-to type of the address we can put the
878*38fd1498Szrj 	 dereferenced address on the RHS preserving the original alias-type.  */
879*38fd1498Szrj       else if (integer_zerop (TREE_OPERAND (rhs, 1))
880*38fd1498Szrj 	       && ((gimple_assign_rhs1 (use_stmt) == rhs
881*38fd1498Szrj 		    && useless_type_conversion_p
882*38fd1498Szrj 		         (TREE_TYPE (gimple_assign_lhs (use_stmt)),
883*38fd1498Szrj 		          TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
884*38fd1498Szrj 		   || types_compatible_p (TREE_TYPE (rhs),
885*38fd1498Szrj 					  TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
886*38fd1498Szrj 	{
887*38fd1498Szrj 	  tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
888*38fd1498Szrj 	  tree new_offset, new_base, saved, new_rhs;
889*38fd1498Szrj 	  while (handled_component_p (*def_rhs_basep))
890*38fd1498Szrj 	    def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
891*38fd1498Szrj 	  saved = *def_rhs_basep;
892*38fd1498Szrj 	  if (TREE_CODE (*def_rhs_basep) == MEM_REF)
893*38fd1498Szrj 	    {
894*38fd1498Szrj 	      new_base = TREE_OPERAND (*def_rhs_basep, 0);
895*38fd1498Szrj 	      new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
896*38fd1498Szrj 					 TREE_OPERAND (*def_rhs_basep, 1));
897*38fd1498Szrj 	    }
898*38fd1498Szrj 	  else
899*38fd1498Szrj 	    {
900*38fd1498Szrj 	      new_base = build_fold_addr_expr (*def_rhs_basep);
901*38fd1498Szrj 	      new_offset = TREE_OPERAND (rhs, 1);
902*38fd1498Szrj 	    }
903*38fd1498Szrj 	  *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
904*38fd1498Szrj 				   new_base, new_offset);
905*38fd1498Szrj 	  TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
906*38fd1498Szrj 	  TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
907*38fd1498Szrj 	  TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
908*38fd1498Szrj 	  new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
909*38fd1498Szrj 	  *rhsp = new_rhs;
910*38fd1498Szrj 	  TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
911*38fd1498Szrj 	  TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
912*38fd1498Szrj 	  *def_rhs_basep = saved;
913*38fd1498Szrj 	  fold_stmt_inplace (use_stmt_gsi);
914*38fd1498Szrj 	  tidy_after_forward_propagate_addr (use_stmt);
915*38fd1498Szrj 	  return res;
916*38fd1498Szrj 	}
917*38fd1498Szrj     }
918*38fd1498Szrj 
919*38fd1498Szrj   /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
920*38fd1498Szrj      is nothing to do. */
921*38fd1498Szrj   if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
922*38fd1498Szrj       || gimple_assign_rhs1 (use_stmt) != name)
923*38fd1498Szrj     return false;
924*38fd1498Szrj 
925*38fd1498Szrj   /* The remaining cases are all for turning pointer arithmetic into
926*38fd1498Szrj      array indexing.  They only apply when we have the address of
927*38fd1498Szrj      element zero in an array.  If that is not the case then there
928*38fd1498Szrj      is nothing to do.  */
929*38fd1498Szrj   array_ref = TREE_OPERAND (def_rhs, 0);
930*38fd1498Szrj   if ((TREE_CODE (array_ref) != ARRAY_REF
931*38fd1498Szrj        || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
932*38fd1498Szrj        || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
933*38fd1498Szrj       && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
934*38fd1498Szrj     return false;
935*38fd1498Szrj 
936*38fd1498Szrj   rhs2 = gimple_assign_rhs2 (use_stmt);
937*38fd1498Szrj   /* Optimize &x[C1] p+ C2 to  &x p+ C3 with C3 = C1 * element_size + C2.  */
938*38fd1498Szrj   if (TREE_CODE (rhs2) == INTEGER_CST)
939*38fd1498Szrj     {
940*38fd1498Szrj       tree new_rhs = build1_loc (gimple_location (use_stmt),
941*38fd1498Szrj 				 ADDR_EXPR, TREE_TYPE (def_rhs),
942*38fd1498Szrj 				 fold_build2 (MEM_REF,
943*38fd1498Szrj 					      TREE_TYPE (TREE_TYPE (def_rhs)),
944*38fd1498Szrj 					      unshare_expr (def_rhs),
945*38fd1498Szrj 					      fold_convert (ptr_type_node,
946*38fd1498Szrj 							    rhs2)));
947*38fd1498Szrj       gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
948*38fd1498Szrj       use_stmt = gsi_stmt (*use_stmt_gsi);
949*38fd1498Szrj       update_stmt (use_stmt);
950*38fd1498Szrj       tidy_after_forward_propagate_addr (use_stmt);
951*38fd1498Szrj       return true;
952*38fd1498Szrj     }
953*38fd1498Szrj 
954*38fd1498Szrj   return false;
955*38fd1498Szrj }
956*38fd1498Szrj 
957*38fd1498Szrj /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
958*38fd1498Szrj 
959*38fd1498Szrj    Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
960*38fd1498Szrj    Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
961*38fd1498Szrj    node or for recovery of array indexing from pointer arithmetic.
962*38fd1498Szrj 
963*38fd1498Szrj    PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
964*38fd1498Szrj    the single use in the previous invocation.  Pass true when calling
965*38fd1498Szrj    this as toplevel.
966*38fd1498Szrj 
967*38fd1498Szrj    Returns true, if all uses have been propagated into.  */
968*38fd1498Szrj 
969*38fd1498Szrj static bool
forward_propagate_addr_expr(tree name,tree rhs,bool parent_single_use_p)970*38fd1498Szrj forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
971*38fd1498Szrj {
972*38fd1498Szrj   imm_use_iterator iter;
973*38fd1498Szrj   gimple *use_stmt;
974*38fd1498Szrj   bool all = true;
975*38fd1498Szrj   bool single_use_p = parent_single_use_p && has_single_use (name);
976*38fd1498Szrj 
977*38fd1498Szrj   FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
978*38fd1498Szrj     {
979*38fd1498Szrj       bool result;
980*38fd1498Szrj       tree use_rhs;
981*38fd1498Szrj 
982*38fd1498Szrj       /* If the use is not in a simple assignment statement, then
983*38fd1498Szrj 	 there is nothing we can do.  */
984*38fd1498Szrj       if (!is_gimple_assign (use_stmt))
985*38fd1498Szrj 	{
986*38fd1498Szrj 	  if (!is_gimple_debug (use_stmt))
987*38fd1498Szrj 	    all = false;
988*38fd1498Szrj 	  continue;
989*38fd1498Szrj 	}
990*38fd1498Szrj 
991*38fd1498Szrj       gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
992*38fd1498Szrj       result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
993*38fd1498Szrj 					      single_use_p);
994*38fd1498Szrj       /* If the use has moved to a different statement adjust
995*38fd1498Szrj 	 the update machinery for the old statement too.  */
996*38fd1498Szrj       if (use_stmt != gsi_stmt (gsi))
997*38fd1498Szrj 	{
998*38fd1498Szrj 	  update_stmt (use_stmt);
999*38fd1498Szrj 	  use_stmt = gsi_stmt (gsi);
1000*38fd1498Szrj 	}
1001*38fd1498Szrj       update_stmt (use_stmt);
1002*38fd1498Szrj       all &= result;
1003*38fd1498Szrj 
1004*38fd1498Szrj       /* Remove intermediate now unused copy and conversion chains.  */
1005*38fd1498Szrj       use_rhs = gimple_assign_rhs1 (use_stmt);
1006*38fd1498Szrj       if (result
1007*38fd1498Szrj 	  && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1008*38fd1498Szrj 	  && TREE_CODE (use_rhs) == SSA_NAME
1009*38fd1498Szrj 	  && has_zero_uses (gimple_assign_lhs (use_stmt)))
1010*38fd1498Szrj 	{
1011*38fd1498Szrj 	  gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1012*38fd1498Szrj 	  fwprop_invalidate_lattice (gimple_get_lhs (use_stmt));
1013*38fd1498Szrj 	  release_defs (use_stmt);
1014*38fd1498Szrj 	  gsi_remove (&gsi, true);
1015*38fd1498Szrj 	}
1016*38fd1498Szrj     }
1017*38fd1498Szrj 
1018*38fd1498Szrj   return all && has_zero_uses (name);
1019*38fd1498Szrj }
1020*38fd1498Szrj 
1021*38fd1498Szrj 
1022*38fd1498Szrj /* Helper function for simplify_gimple_switch.  Remove case labels that
1023*38fd1498Szrj    have values outside the range of the new type.  */
1024*38fd1498Szrj 
1025*38fd1498Szrj static void
simplify_gimple_switch_label_vec(gswitch * stmt,tree index_type)1026*38fd1498Szrj simplify_gimple_switch_label_vec (gswitch *stmt, tree index_type)
1027*38fd1498Szrj {
1028*38fd1498Szrj   unsigned int branch_num = gimple_switch_num_labels (stmt);
1029*38fd1498Szrj   auto_vec<tree> labels (branch_num);
1030*38fd1498Szrj   unsigned int i, len;
1031*38fd1498Szrj 
1032*38fd1498Szrj   /* Collect the existing case labels in a VEC, and preprocess it as if
1033*38fd1498Szrj      we are gimplifying a GENERIC SWITCH_EXPR.  */
1034*38fd1498Szrj   for (i = 1; i < branch_num; i++)
1035*38fd1498Szrj     labels.quick_push (gimple_switch_label (stmt, i));
1036*38fd1498Szrj   preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1037*38fd1498Szrj 
1038*38fd1498Szrj   /* If any labels were removed, replace the existing case labels
1039*38fd1498Szrj      in the GIMPLE_SWITCH statement with the correct ones.
1040*38fd1498Szrj      Note that the type updates were done in-place on the case labels,
1041*38fd1498Szrj      so we only have to replace the case labels in the GIMPLE_SWITCH
1042*38fd1498Szrj      if the number of labels changed.  */
1043*38fd1498Szrj   len = labels.length ();
1044*38fd1498Szrj   if (len < branch_num - 1)
1045*38fd1498Szrj     {
1046*38fd1498Szrj       bitmap target_blocks;
1047*38fd1498Szrj       edge_iterator ei;
1048*38fd1498Szrj       edge e;
1049*38fd1498Szrj 
1050*38fd1498Szrj       /* Corner case: *all* case labels have been removed as being
1051*38fd1498Szrj 	 out-of-range for INDEX_TYPE.  Push one label and let the
1052*38fd1498Szrj 	 CFG cleanups deal with this further.  */
1053*38fd1498Szrj       if (len == 0)
1054*38fd1498Szrj 	{
1055*38fd1498Szrj 	  tree label, elt;
1056*38fd1498Szrj 
1057*38fd1498Szrj 	  label = CASE_LABEL (gimple_switch_default_label (stmt));
1058*38fd1498Szrj 	  elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1059*38fd1498Szrj 	  labels.quick_push (elt);
1060*38fd1498Szrj 	  len = 1;
1061*38fd1498Szrj 	}
1062*38fd1498Szrj 
1063*38fd1498Szrj       for (i = 0; i < labels.length (); i++)
1064*38fd1498Szrj 	gimple_switch_set_label (stmt, i + 1, labels[i]);
1065*38fd1498Szrj       for (i++ ; i < branch_num; i++)
1066*38fd1498Szrj 	gimple_switch_set_label (stmt, i, NULL_TREE);
1067*38fd1498Szrj       gimple_switch_set_num_labels (stmt, len + 1);
1068*38fd1498Szrj 
1069*38fd1498Szrj       /* Cleanup any edges that are now dead.  */
1070*38fd1498Szrj       target_blocks = BITMAP_ALLOC (NULL);
1071*38fd1498Szrj       for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1072*38fd1498Szrj 	{
1073*38fd1498Szrj 	  tree elt = gimple_switch_label (stmt, i);
1074*38fd1498Szrj 	  basic_block target = label_to_block (CASE_LABEL (elt));
1075*38fd1498Szrj 	  bitmap_set_bit (target_blocks, target->index);
1076*38fd1498Szrj 	}
1077*38fd1498Szrj       for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1078*38fd1498Szrj 	{
1079*38fd1498Szrj 	  if (! bitmap_bit_p (target_blocks, e->dest->index))
1080*38fd1498Szrj 	    {
1081*38fd1498Szrj 	      remove_edge (e);
1082*38fd1498Szrj 	      cfg_changed = true;
1083*38fd1498Szrj 	      free_dominance_info (CDI_DOMINATORS);
1084*38fd1498Szrj 	    }
1085*38fd1498Szrj 	  else
1086*38fd1498Szrj 	    ei_next (&ei);
1087*38fd1498Szrj 	}
1088*38fd1498Szrj       BITMAP_FREE (target_blocks);
1089*38fd1498Szrj     }
1090*38fd1498Szrj }
1091*38fd1498Szrj 
1092*38fd1498Szrj /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1093*38fd1498Szrj    the condition which we may be able to optimize better.  */
1094*38fd1498Szrj 
1095*38fd1498Szrj static bool
simplify_gimple_switch(gswitch * stmt)1096*38fd1498Szrj simplify_gimple_switch (gswitch *stmt)
1097*38fd1498Szrj {
1098*38fd1498Szrj   /* The optimization that we really care about is removing unnecessary
1099*38fd1498Szrj      casts.  That will let us do much better in propagating the inferred
1100*38fd1498Szrj      constant at the switch target.  */
1101*38fd1498Szrj   tree cond = gimple_switch_index (stmt);
1102*38fd1498Szrj   if (TREE_CODE (cond) == SSA_NAME)
1103*38fd1498Szrj     {
1104*38fd1498Szrj       gimple *def_stmt = SSA_NAME_DEF_STMT (cond);
1105*38fd1498Szrj       if (gimple_assign_cast_p (def_stmt))
1106*38fd1498Szrj 	{
1107*38fd1498Szrj 	  tree def = gimple_assign_rhs1 (def_stmt);
1108*38fd1498Szrj 	  if (TREE_CODE (def) != SSA_NAME)
1109*38fd1498Szrj 	    return false;
1110*38fd1498Szrj 
1111*38fd1498Szrj 	  /* If we have an extension or sign-change that preserves the
1112*38fd1498Szrj 	     values we check against then we can copy the source value into
1113*38fd1498Szrj 	     the switch.  */
1114*38fd1498Szrj 	  tree ti = TREE_TYPE (def);
1115*38fd1498Szrj 	  if (INTEGRAL_TYPE_P (ti)
1116*38fd1498Szrj 	      && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
1117*38fd1498Szrj 	    {
1118*38fd1498Szrj 	      size_t n = gimple_switch_num_labels (stmt);
1119*38fd1498Szrj 	      tree min = NULL_TREE, max = NULL_TREE;
1120*38fd1498Szrj 	      if (n > 1)
1121*38fd1498Szrj 		{
1122*38fd1498Szrj 		  min = CASE_LOW (gimple_switch_label (stmt, 1));
1123*38fd1498Szrj 		  if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1124*38fd1498Szrj 		    max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1125*38fd1498Szrj 		  else
1126*38fd1498Szrj 		    max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1127*38fd1498Szrj 		}
1128*38fd1498Szrj 	      if ((!min || int_fits_type_p (min, ti))
1129*38fd1498Szrj 		  && (!max || int_fits_type_p (max, ti)))
1130*38fd1498Szrj 		{
1131*38fd1498Szrj 		  gimple_switch_set_index (stmt, def);
1132*38fd1498Szrj 		  simplify_gimple_switch_label_vec (stmt, ti);
1133*38fd1498Szrj 		  update_stmt (stmt);
1134*38fd1498Szrj 		  return true;
1135*38fd1498Szrj 		}
1136*38fd1498Szrj 	    }
1137*38fd1498Szrj 	}
1138*38fd1498Szrj     }
1139*38fd1498Szrj 
1140*38fd1498Szrj   return false;
1141*38fd1498Szrj }
1142*38fd1498Szrj 
1143*38fd1498Szrj /* For pointers p2 and p1 return p2 - p1 if the
1144*38fd1498Szrj    difference is known and constant, otherwise return NULL.  */
1145*38fd1498Szrj 
1146*38fd1498Szrj static tree
constant_pointer_difference(tree p1,tree p2)1147*38fd1498Szrj constant_pointer_difference (tree p1, tree p2)
1148*38fd1498Szrj {
1149*38fd1498Szrj   int i, j;
1150*38fd1498Szrj #define CPD_ITERATIONS 5
1151*38fd1498Szrj   tree exps[2][CPD_ITERATIONS];
1152*38fd1498Szrj   tree offs[2][CPD_ITERATIONS];
1153*38fd1498Szrj   int cnt[2];
1154*38fd1498Szrj 
1155*38fd1498Szrj   for (i = 0; i < 2; i++)
1156*38fd1498Szrj     {
1157*38fd1498Szrj       tree p = i ? p1 : p2;
1158*38fd1498Szrj       tree off = size_zero_node;
1159*38fd1498Szrj       gimple *stmt;
1160*38fd1498Szrj       enum tree_code code;
1161*38fd1498Szrj 
1162*38fd1498Szrj       /* For each of p1 and p2 we need to iterate at least
1163*38fd1498Szrj 	 twice, to handle ADDR_EXPR directly in p1/p2,
1164*38fd1498Szrj 	 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1165*38fd1498Szrj 	 on definition's stmt RHS.  Iterate a few extra times.  */
1166*38fd1498Szrj       j = 0;
1167*38fd1498Szrj       do
1168*38fd1498Szrj 	{
1169*38fd1498Szrj 	  if (!POINTER_TYPE_P (TREE_TYPE (p)))
1170*38fd1498Szrj 	    break;
1171*38fd1498Szrj 	  if (TREE_CODE (p) == ADDR_EXPR)
1172*38fd1498Szrj 	    {
1173*38fd1498Szrj 	      tree q = TREE_OPERAND (p, 0);
1174*38fd1498Szrj 	      poly_int64 offset;
1175*38fd1498Szrj 	      tree base = get_addr_base_and_unit_offset (q, &offset);
1176*38fd1498Szrj 	      if (base)
1177*38fd1498Szrj 		{
1178*38fd1498Szrj 		  q = base;
1179*38fd1498Szrj 		  if (maybe_ne (offset, 0))
1180*38fd1498Szrj 		    off = size_binop (PLUS_EXPR, off, size_int (offset));
1181*38fd1498Szrj 		}
1182*38fd1498Szrj 	      if (TREE_CODE (q) == MEM_REF
1183*38fd1498Szrj 		  && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1184*38fd1498Szrj 		{
1185*38fd1498Szrj 		  p = TREE_OPERAND (q, 0);
1186*38fd1498Szrj 		  off = size_binop (PLUS_EXPR, off,
1187*38fd1498Szrj 				    wide_int_to_tree (sizetype,
1188*38fd1498Szrj 						      mem_ref_offset (q)));
1189*38fd1498Szrj 		}
1190*38fd1498Szrj 	      else
1191*38fd1498Szrj 		{
1192*38fd1498Szrj 		  exps[i][j] = q;
1193*38fd1498Szrj 		  offs[i][j++] = off;
1194*38fd1498Szrj 		  break;
1195*38fd1498Szrj 		}
1196*38fd1498Szrj 	    }
1197*38fd1498Szrj 	  if (TREE_CODE (p) != SSA_NAME)
1198*38fd1498Szrj 	    break;
1199*38fd1498Szrj 	  exps[i][j] = p;
1200*38fd1498Szrj 	  offs[i][j++] = off;
1201*38fd1498Szrj 	  if (j == CPD_ITERATIONS)
1202*38fd1498Szrj 	    break;
1203*38fd1498Szrj 	  stmt = SSA_NAME_DEF_STMT (p);
1204*38fd1498Szrj 	  if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1205*38fd1498Szrj 	    break;
1206*38fd1498Szrj 	  code = gimple_assign_rhs_code (stmt);
1207*38fd1498Szrj 	  if (code == POINTER_PLUS_EXPR)
1208*38fd1498Szrj 	    {
1209*38fd1498Szrj 	      if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1210*38fd1498Szrj 		break;
1211*38fd1498Szrj 	      off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1212*38fd1498Szrj 	      p = gimple_assign_rhs1 (stmt);
1213*38fd1498Szrj 	    }
1214*38fd1498Szrj 	  else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
1215*38fd1498Szrj 	    p = gimple_assign_rhs1 (stmt);
1216*38fd1498Szrj 	  else
1217*38fd1498Szrj 	    break;
1218*38fd1498Szrj 	}
1219*38fd1498Szrj       while (1);
1220*38fd1498Szrj       cnt[i] = j;
1221*38fd1498Szrj     }
1222*38fd1498Szrj 
1223*38fd1498Szrj   for (i = 0; i < cnt[0]; i++)
1224*38fd1498Szrj     for (j = 0; j < cnt[1]; j++)
1225*38fd1498Szrj       if (exps[0][i] == exps[1][j])
1226*38fd1498Szrj 	return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1227*38fd1498Szrj 
1228*38fd1498Szrj   return NULL_TREE;
1229*38fd1498Szrj }
1230*38fd1498Szrj 
1231*38fd1498Szrj /* *GSI_P is a GIMPLE_CALL to a builtin function.
1232*38fd1498Szrj    Optimize
1233*38fd1498Szrj    memcpy (p, "abcd", 4);
1234*38fd1498Szrj    memset (p + 4, ' ', 3);
1235*38fd1498Szrj    into
1236*38fd1498Szrj    memcpy (p, "abcd   ", 7);
1237*38fd1498Szrj    call if the latter can be stored by pieces during expansion.  */
1238*38fd1498Szrj 
1239*38fd1498Szrj static bool
simplify_builtin_call(gimple_stmt_iterator * gsi_p,tree callee2)1240*38fd1498Szrj simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1241*38fd1498Szrj {
1242*38fd1498Szrj   gimple *stmt1, *stmt2 = gsi_stmt (*gsi_p);
1243*38fd1498Szrj   tree vuse = gimple_vuse (stmt2);
1244*38fd1498Szrj   if (vuse == NULL)
1245*38fd1498Szrj     return false;
1246*38fd1498Szrj   stmt1 = SSA_NAME_DEF_STMT (vuse);
1247*38fd1498Szrj 
1248*38fd1498Szrj   switch (DECL_FUNCTION_CODE (callee2))
1249*38fd1498Szrj     {
1250*38fd1498Szrj     case BUILT_IN_MEMSET:
1251*38fd1498Szrj       if (gimple_call_num_args (stmt2) != 3
1252*38fd1498Szrj 	  || gimple_call_lhs (stmt2)
1253*38fd1498Szrj 	  || CHAR_BIT != 8
1254*38fd1498Szrj 	  || BITS_PER_UNIT != 8)
1255*38fd1498Szrj 	break;
1256*38fd1498Szrj       else
1257*38fd1498Szrj 	{
1258*38fd1498Szrj 	  tree callee1;
1259*38fd1498Szrj 	  tree ptr1, src1, str1, off1, len1, lhs1;
1260*38fd1498Szrj 	  tree ptr2 = gimple_call_arg (stmt2, 0);
1261*38fd1498Szrj 	  tree val2 = gimple_call_arg (stmt2, 1);
1262*38fd1498Szrj 	  tree len2 = gimple_call_arg (stmt2, 2);
1263*38fd1498Szrj 	  tree diff, vdef, new_str_cst;
1264*38fd1498Szrj 	  gimple *use_stmt;
1265*38fd1498Szrj 	  unsigned int ptr1_align;
1266*38fd1498Szrj 	  unsigned HOST_WIDE_INT src_len;
1267*38fd1498Szrj 	  char *src_buf;
1268*38fd1498Szrj 	  use_operand_p use_p;
1269*38fd1498Szrj 
1270*38fd1498Szrj 	  if (!tree_fits_shwi_p (val2)
1271*38fd1498Szrj 	      || !tree_fits_uhwi_p (len2)
1272*38fd1498Szrj 	      || compare_tree_int (len2, 1024) == 1)
1273*38fd1498Szrj 	    break;
1274*38fd1498Szrj 	  if (is_gimple_call (stmt1))
1275*38fd1498Szrj 	    {
1276*38fd1498Szrj 	      /* If first stmt is a call, it needs to be memcpy
1277*38fd1498Szrj 		 or mempcpy, with string literal as second argument and
1278*38fd1498Szrj 		 constant length.  */
1279*38fd1498Szrj 	      callee1 = gimple_call_fndecl (stmt1);
1280*38fd1498Szrj 	      if (callee1 == NULL_TREE
1281*38fd1498Szrj 		  || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1282*38fd1498Szrj 		  || gimple_call_num_args (stmt1) != 3)
1283*38fd1498Szrj 		break;
1284*38fd1498Szrj 	      if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1285*38fd1498Szrj 		  && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1286*38fd1498Szrj 		break;
1287*38fd1498Szrj 	      ptr1 = gimple_call_arg (stmt1, 0);
1288*38fd1498Szrj 	      src1 = gimple_call_arg (stmt1, 1);
1289*38fd1498Szrj 	      len1 = gimple_call_arg (stmt1, 2);
1290*38fd1498Szrj 	      lhs1 = gimple_call_lhs (stmt1);
1291*38fd1498Szrj 	      if (!tree_fits_uhwi_p (len1))
1292*38fd1498Szrj 		break;
1293*38fd1498Szrj 	      str1 = string_constant (src1, &off1);
1294*38fd1498Szrj 	      if (str1 == NULL_TREE)
1295*38fd1498Szrj 		break;
1296*38fd1498Szrj 	      if (!tree_fits_uhwi_p (off1)
1297*38fd1498Szrj 		  || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1298*38fd1498Szrj 		  || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1299*38fd1498Szrj 					     - tree_to_uhwi (off1)) > 0
1300*38fd1498Szrj 		  || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1301*38fd1498Szrj 		  || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1302*38fd1498Szrj 		     != TYPE_MODE (char_type_node))
1303*38fd1498Szrj 		break;
1304*38fd1498Szrj 	    }
1305*38fd1498Szrj 	  else if (gimple_assign_single_p (stmt1))
1306*38fd1498Szrj 	    {
1307*38fd1498Szrj 	      /* Otherwise look for length 1 memcpy optimized into
1308*38fd1498Szrj 		 assignment.  */
1309*38fd1498Szrj     	      ptr1 = gimple_assign_lhs (stmt1);
1310*38fd1498Szrj 	      src1 = gimple_assign_rhs1 (stmt1);
1311*38fd1498Szrj 	      if (TREE_CODE (ptr1) != MEM_REF
1312*38fd1498Szrj 		  || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1313*38fd1498Szrj 		  || !tree_fits_shwi_p (src1))
1314*38fd1498Szrj 		break;
1315*38fd1498Szrj 	      ptr1 = build_fold_addr_expr (ptr1);
1316*38fd1498Szrj 	      callee1 = NULL_TREE;
1317*38fd1498Szrj 	      len1 = size_one_node;
1318*38fd1498Szrj 	      lhs1 = NULL_TREE;
1319*38fd1498Szrj 	      off1 = size_zero_node;
1320*38fd1498Szrj 	      str1 = NULL_TREE;
1321*38fd1498Szrj 	    }
1322*38fd1498Szrj 	  else
1323*38fd1498Szrj 	    break;
1324*38fd1498Szrj 
1325*38fd1498Szrj 	  diff = constant_pointer_difference (ptr1, ptr2);
1326*38fd1498Szrj 	  if (diff == NULL && lhs1 != NULL)
1327*38fd1498Szrj 	    {
1328*38fd1498Szrj 	      diff = constant_pointer_difference (lhs1, ptr2);
1329*38fd1498Szrj 	      if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1330*38fd1498Szrj 		  && diff != NULL)
1331*38fd1498Szrj 		diff = size_binop (PLUS_EXPR, diff,
1332*38fd1498Szrj 				   fold_convert (sizetype, len1));
1333*38fd1498Szrj 	    }
1334*38fd1498Szrj 	  /* If the difference between the second and first destination pointer
1335*38fd1498Szrj 	     is not constant, or is bigger than memcpy length, bail out.  */
1336*38fd1498Szrj 	  if (diff == NULL
1337*38fd1498Szrj 	      || !tree_fits_uhwi_p (diff)
1338*38fd1498Szrj 	      || tree_int_cst_lt (len1, diff)
1339*38fd1498Szrj 	      || compare_tree_int (diff, 1024) == 1)
1340*38fd1498Szrj 	    break;
1341*38fd1498Szrj 
1342*38fd1498Szrj 	  /* Use maximum of difference plus memset length and memcpy length
1343*38fd1498Szrj 	     as the new memcpy length, if it is too big, bail out.  */
1344*38fd1498Szrj 	  src_len = tree_to_uhwi (diff);
1345*38fd1498Szrj 	  src_len += tree_to_uhwi (len2);
1346*38fd1498Szrj 	  if (src_len < tree_to_uhwi (len1))
1347*38fd1498Szrj 	    src_len = tree_to_uhwi (len1);
1348*38fd1498Szrj 	  if (src_len > 1024)
1349*38fd1498Szrj 	    break;
1350*38fd1498Szrj 
1351*38fd1498Szrj 	  /* If mempcpy value is used elsewhere, bail out, as mempcpy
1352*38fd1498Szrj 	     with bigger length will return different result.  */
1353*38fd1498Szrj 	  if (lhs1 != NULL_TREE
1354*38fd1498Szrj 	      && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1355*38fd1498Szrj 	      && (TREE_CODE (lhs1) != SSA_NAME
1356*38fd1498Szrj 		  || !single_imm_use (lhs1, &use_p, &use_stmt)
1357*38fd1498Szrj 		  || use_stmt != stmt2))
1358*38fd1498Szrj 	    break;
1359*38fd1498Szrj 
1360*38fd1498Szrj 	  /* If anything reads memory in between memcpy and memset
1361*38fd1498Szrj 	     call, the modified memcpy call might change it.  */
1362*38fd1498Szrj 	  vdef = gimple_vdef (stmt1);
1363*38fd1498Szrj 	  if (vdef != NULL
1364*38fd1498Szrj 	      && (!single_imm_use (vdef, &use_p, &use_stmt)
1365*38fd1498Szrj 		  || use_stmt != stmt2))
1366*38fd1498Szrj 	    break;
1367*38fd1498Szrj 
1368*38fd1498Szrj 	  ptr1_align = get_pointer_alignment (ptr1);
1369*38fd1498Szrj 	  /* Construct the new source string literal.  */
1370*38fd1498Szrj 	  src_buf = XALLOCAVEC (char, src_len + 1);
1371*38fd1498Szrj 	  if (callee1)
1372*38fd1498Szrj 	    memcpy (src_buf,
1373*38fd1498Szrj 		    TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1374*38fd1498Szrj 		    tree_to_uhwi (len1));
1375*38fd1498Szrj 	  else
1376*38fd1498Szrj 	    src_buf[0] = tree_to_shwi (src1);
1377*38fd1498Szrj 	  memset (src_buf + tree_to_uhwi (diff),
1378*38fd1498Szrj 		  tree_to_shwi (val2), tree_to_uhwi (len2));
1379*38fd1498Szrj 	  src_buf[src_len] = '\0';
1380*38fd1498Szrj 	  /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1381*38fd1498Szrj 	     handle embedded '\0's.  */
1382*38fd1498Szrj 	  if (strlen (src_buf) != src_len)
1383*38fd1498Szrj 	    break;
1384*38fd1498Szrj 	  rtl_profile_for_bb (gimple_bb (stmt2));
1385*38fd1498Szrj 	  /* If the new memcpy wouldn't be emitted by storing the literal
1386*38fd1498Szrj 	     by pieces, this optimization might enlarge .rodata too much,
1387*38fd1498Szrj 	     as commonly used string literals couldn't be shared any
1388*38fd1498Szrj 	     longer.  */
1389*38fd1498Szrj 	  if (!can_store_by_pieces (src_len,
1390*38fd1498Szrj 				    builtin_strncpy_read_str,
1391*38fd1498Szrj 				    src_buf, ptr1_align, false))
1392*38fd1498Szrj 	    break;
1393*38fd1498Szrj 
1394*38fd1498Szrj 	  new_str_cst = build_string_literal (src_len, src_buf);
1395*38fd1498Szrj 	  if (callee1)
1396*38fd1498Szrj 	    {
1397*38fd1498Szrj 	      /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1398*38fd1498Szrj 		 memset call.  */
1399*38fd1498Szrj 	      if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1400*38fd1498Szrj 		gimple_call_set_lhs (stmt1, NULL_TREE);
1401*38fd1498Szrj 	      gimple_call_set_arg (stmt1, 1, new_str_cst);
1402*38fd1498Szrj 	      gimple_call_set_arg (stmt1, 2,
1403*38fd1498Szrj 				   build_int_cst (TREE_TYPE (len1), src_len));
1404*38fd1498Szrj 	      update_stmt (stmt1);
1405*38fd1498Szrj 	      unlink_stmt_vdef (stmt2);
1406*38fd1498Szrj 	      gsi_remove (gsi_p, true);
1407*38fd1498Szrj 	      fwprop_invalidate_lattice (gimple_get_lhs (stmt2));
1408*38fd1498Szrj 	      release_defs (stmt2);
1409*38fd1498Szrj 	      if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1410*38fd1498Szrj 		{
1411*38fd1498Szrj 		  fwprop_invalidate_lattice (lhs1);
1412*38fd1498Szrj 		  release_ssa_name (lhs1);
1413*38fd1498Szrj 		}
1414*38fd1498Szrj 	      return true;
1415*38fd1498Szrj 	    }
1416*38fd1498Szrj 	  else
1417*38fd1498Szrj 	    {
1418*38fd1498Szrj 	      /* Otherwise, if STMT1 is length 1 memcpy optimized into
1419*38fd1498Szrj 		 assignment, remove STMT1 and change memset call into
1420*38fd1498Szrj 		 memcpy call.  */
1421*38fd1498Szrj 	      gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1422*38fd1498Szrj 
1423*38fd1498Szrj 	      if (!is_gimple_val (ptr1))
1424*38fd1498Szrj 		ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1425*38fd1498Szrj 						 true, GSI_SAME_STMT);
1426*38fd1498Szrj 	      gimple_call_set_fndecl (stmt2,
1427*38fd1498Szrj 				      builtin_decl_explicit (BUILT_IN_MEMCPY));
1428*38fd1498Szrj 	      gimple_call_set_arg (stmt2, 0, ptr1);
1429*38fd1498Szrj 	      gimple_call_set_arg (stmt2, 1, new_str_cst);
1430*38fd1498Szrj 	      gimple_call_set_arg (stmt2, 2,
1431*38fd1498Szrj 				   build_int_cst (TREE_TYPE (len2), src_len));
1432*38fd1498Szrj 	      unlink_stmt_vdef (stmt1);
1433*38fd1498Szrj 	      gsi_remove (&gsi, true);
1434*38fd1498Szrj 	      fwprop_invalidate_lattice (gimple_get_lhs (stmt1));
1435*38fd1498Szrj 	      release_defs (stmt1);
1436*38fd1498Szrj 	      update_stmt (stmt2);
1437*38fd1498Szrj 	      return false;
1438*38fd1498Szrj 	    }
1439*38fd1498Szrj 	}
1440*38fd1498Szrj       break;
1441*38fd1498Szrj     default:
1442*38fd1498Szrj       break;
1443*38fd1498Szrj     }
1444*38fd1498Szrj   return false;
1445*38fd1498Szrj }
1446*38fd1498Szrj 
1447*38fd1498Szrj /* Given a ssa_name in NAME see if it was defined by an assignment and
1448*38fd1498Szrj    set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1449*38fd1498Szrj    to the second operand on the rhs. */
1450*38fd1498Szrj 
1451*38fd1498Szrj static inline void
defcodefor_name(tree name,enum tree_code * code,tree * arg1,tree * arg2)1452*38fd1498Szrj defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1453*38fd1498Szrj {
1454*38fd1498Szrj   gimple *def;
1455*38fd1498Szrj   enum tree_code code1;
1456*38fd1498Szrj   tree arg11;
1457*38fd1498Szrj   tree arg21;
1458*38fd1498Szrj   tree arg31;
1459*38fd1498Szrj   enum gimple_rhs_class grhs_class;
1460*38fd1498Szrj 
1461*38fd1498Szrj   code1 = TREE_CODE (name);
1462*38fd1498Szrj   arg11 = name;
1463*38fd1498Szrj   arg21 = NULL_TREE;
1464*38fd1498Szrj   arg31 = NULL_TREE;
1465*38fd1498Szrj   grhs_class = get_gimple_rhs_class (code1);
1466*38fd1498Szrj 
1467*38fd1498Szrj   if (code1 == SSA_NAME)
1468*38fd1498Szrj     {
1469*38fd1498Szrj       def = SSA_NAME_DEF_STMT (name);
1470*38fd1498Szrj 
1471*38fd1498Szrj       if (def && is_gimple_assign (def)
1472*38fd1498Szrj 	  && can_propagate_from (def))
1473*38fd1498Szrj 	{
1474*38fd1498Szrj 	  code1 = gimple_assign_rhs_code (def);
1475*38fd1498Szrj 	  arg11 = gimple_assign_rhs1 (def);
1476*38fd1498Szrj           arg21 = gimple_assign_rhs2 (def);
1477*38fd1498Szrj           arg31 = gimple_assign_rhs3 (def);
1478*38fd1498Szrj 	}
1479*38fd1498Szrj     }
1480*38fd1498Szrj   else if (grhs_class != GIMPLE_SINGLE_RHS)
1481*38fd1498Szrj     code1 = ERROR_MARK;
1482*38fd1498Szrj 
1483*38fd1498Szrj   *code = code1;
1484*38fd1498Szrj   *arg1 = arg11;
1485*38fd1498Szrj   if (arg2)
1486*38fd1498Szrj     *arg2 = arg21;
1487*38fd1498Szrj   if (arg31)
1488*38fd1498Szrj     *code = ERROR_MARK;
1489*38fd1498Szrj }
1490*38fd1498Szrj 
1491*38fd1498Szrj 
1492*38fd1498Szrj /* Recognize rotation patterns.  Return true if a transformation
1493*38fd1498Szrj    applied, otherwise return false.
1494*38fd1498Szrj 
1495*38fd1498Szrj    We are looking for X with unsigned type T with bitsize B, OP being
1496*38fd1498Szrj    +, | or ^, some type T2 wider than T.  For:
1497*38fd1498Szrj    (X << CNT1) OP (X >> CNT2)				iff CNT1 + CNT2 == B
1498*38fd1498Szrj    ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2))	iff CNT1 + CNT2 == B
1499*38fd1498Szrj 
1500*38fd1498Szrj    transform these into:
1501*38fd1498Szrj    X r<< CNT1
1502*38fd1498Szrj 
1503*38fd1498Szrj    Or for:
1504*38fd1498Szrj    (X << Y) OP (X >> (B - Y))
1505*38fd1498Szrj    (X << (int) Y) OP (X >> (int) (B - Y))
1506*38fd1498Szrj    ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1507*38fd1498Szrj    ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1508*38fd1498Szrj    (X << Y) | (X >> ((-Y) & (B - 1)))
1509*38fd1498Szrj    (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1510*38fd1498Szrj    ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1511*38fd1498Szrj    ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1512*38fd1498Szrj 
1513*38fd1498Szrj    transform these into:
1514*38fd1498Szrj    X r<< Y
1515*38fd1498Szrj 
1516*38fd1498Szrj    Or for:
1517*38fd1498Szrj    (X << (Y & (B - 1))) | (X >> ((-Y) & (B - 1)))
1518*38fd1498Szrj    (X << (int) (Y & (B - 1))) | (X >> (int) ((-Y) & (B - 1)))
1519*38fd1498Szrj    ((T) ((T2) X << (Y & (B - 1)))) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1520*38fd1498Szrj    ((T) ((T2) X << (int) (Y & (B - 1)))) \
1521*38fd1498Szrj      | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1522*38fd1498Szrj 
1523*38fd1498Szrj    transform these into:
1524*38fd1498Szrj    X r<< (Y & (B - 1))
1525*38fd1498Szrj 
1526*38fd1498Szrj    Note, in the patterns with T2 type, the type of OP operands
1527*38fd1498Szrj    might be even a signed type, but should have precision B.
1528*38fd1498Szrj    Expressions with & (B - 1) should be recognized only if B is
1529*38fd1498Szrj    a power of 2.  */
1530*38fd1498Szrj 
1531*38fd1498Szrj static bool
simplify_rotate(gimple_stmt_iterator * gsi)1532*38fd1498Szrj simplify_rotate (gimple_stmt_iterator *gsi)
1533*38fd1498Szrj {
1534*38fd1498Szrj   gimple *stmt = gsi_stmt (*gsi);
1535*38fd1498Szrj   tree arg[2], rtype, rotcnt = NULL_TREE;
1536*38fd1498Szrj   tree def_arg1[2], def_arg2[2];
1537*38fd1498Szrj   enum tree_code def_code[2];
1538*38fd1498Szrj   tree lhs;
1539*38fd1498Szrj   int i;
1540*38fd1498Szrj   bool swapped_p = false;
1541*38fd1498Szrj   gimple *g;
1542*38fd1498Szrj 
1543*38fd1498Szrj   arg[0] = gimple_assign_rhs1 (stmt);
1544*38fd1498Szrj   arg[1] = gimple_assign_rhs2 (stmt);
1545*38fd1498Szrj   rtype = TREE_TYPE (arg[0]);
1546*38fd1498Szrj 
1547*38fd1498Szrj   /* Only create rotates in complete modes.  Other cases are not
1548*38fd1498Szrj      expanded properly.  */
1549*38fd1498Szrj   if (!INTEGRAL_TYPE_P (rtype)
1550*38fd1498Szrj       || !type_has_mode_precision_p (rtype))
1551*38fd1498Szrj     return false;
1552*38fd1498Szrj 
1553*38fd1498Szrj   for (i = 0; i < 2; i++)
1554*38fd1498Szrj     defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1555*38fd1498Szrj 
1556*38fd1498Szrj   /* Look through narrowing conversions.  */
1557*38fd1498Szrj   if (CONVERT_EXPR_CODE_P (def_code[0])
1558*38fd1498Szrj       && CONVERT_EXPR_CODE_P (def_code[1])
1559*38fd1498Szrj       && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
1560*38fd1498Szrj       && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
1561*38fd1498Szrj       && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1562*38fd1498Szrj 	 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
1563*38fd1498Szrj       && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
1564*38fd1498Szrj       && has_single_use (arg[0])
1565*38fd1498Szrj       && has_single_use (arg[1]))
1566*38fd1498Szrj     {
1567*38fd1498Szrj       for (i = 0; i < 2; i++)
1568*38fd1498Szrj 	{
1569*38fd1498Szrj 	  arg[i] = def_arg1[i];
1570*38fd1498Szrj 	  defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1571*38fd1498Szrj 	}
1572*38fd1498Szrj     }
1573*38fd1498Szrj 
1574*38fd1498Szrj   /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR.  */
1575*38fd1498Szrj   for (i = 0; i < 2; i++)
1576*38fd1498Szrj     if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
1577*38fd1498Szrj       return false;
1578*38fd1498Szrj     else if (!has_single_use (arg[i]))
1579*38fd1498Szrj       return false;
1580*38fd1498Szrj   if (def_code[0] == def_code[1])
1581*38fd1498Szrj     return false;
1582*38fd1498Szrj 
1583*38fd1498Szrj   /* If we've looked through narrowing conversions before, look through
1584*38fd1498Szrj      widening conversions from unsigned type with the same precision
1585*38fd1498Szrj      as rtype here.  */
1586*38fd1498Szrj   if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
1587*38fd1498Szrj     for (i = 0; i < 2; i++)
1588*38fd1498Szrj       {
1589*38fd1498Szrj 	tree tem;
1590*38fd1498Szrj 	enum tree_code code;
1591*38fd1498Szrj 	defcodefor_name (def_arg1[i], &code, &tem, NULL);
1592*38fd1498Szrj 	if (!CONVERT_EXPR_CODE_P (code)
1593*38fd1498Szrj 	    || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1594*38fd1498Szrj 	    || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1595*38fd1498Szrj 	  return false;
1596*38fd1498Szrj 	def_arg1[i] = tem;
1597*38fd1498Szrj       }
1598*38fd1498Szrj   /* Both shifts have to use the same first operand.  */
1599*38fd1498Szrj   if (!operand_equal_for_phi_arg_p (def_arg1[0], def_arg1[1])
1600*38fd1498Szrj       || !types_compatible_p (TREE_TYPE (def_arg1[0]),
1601*38fd1498Szrj 			      TREE_TYPE (def_arg1[1])))
1602*38fd1498Szrj     return false;
1603*38fd1498Szrj   if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
1604*38fd1498Szrj     return false;
1605*38fd1498Szrj 
1606*38fd1498Szrj   /* CNT1 + CNT2 == B case above.  */
1607*38fd1498Szrj   if (tree_fits_uhwi_p (def_arg2[0])
1608*38fd1498Szrj       && tree_fits_uhwi_p (def_arg2[1])
1609*38fd1498Szrj       && tree_to_uhwi (def_arg2[0])
1610*38fd1498Szrj 	 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
1611*38fd1498Szrj     rotcnt = def_arg2[0];
1612*38fd1498Szrj   else if (TREE_CODE (def_arg2[0]) != SSA_NAME
1613*38fd1498Szrj 	   || TREE_CODE (def_arg2[1]) != SSA_NAME)
1614*38fd1498Szrj     return false;
1615*38fd1498Szrj   else
1616*38fd1498Szrj     {
1617*38fd1498Szrj       tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
1618*38fd1498Szrj       enum tree_code cdef_code[2];
1619*38fd1498Szrj       /* Look through conversion of the shift count argument.
1620*38fd1498Szrj 	 The C/C++ FE cast any shift count argument to integer_type_node.
1621*38fd1498Szrj 	 The only problem might be if the shift count type maximum value
1622*38fd1498Szrj 	 is equal or smaller than number of bits in rtype.  */
1623*38fd1498Szrj       for (i = 0; i < 2; i++)
1624*38fd1498Szrj 	{
1625*38fd1498Szrj 	  def_arg2_alt[i] = def_arg2[i];
1626*38fd1498Szrj 	  defcodefor_name (def_arg2[i], &cdef_code[i],
1627*38fd1498Szrj 			   &cdef_arg1[i], &cdef_arg2[i]);
1628*38fd1498Szrj 	  if (CONVERT_EXPR_CODE_P (cdef_code[i])
1629*38fd1498Szrj 	      && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
1630*38fd1498Szrj 	      && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1631*38fd1498Szrj 		 > floor_log2 (TYPE_PRECISION (rtype))
1632*38fd1498Szrj 	      && type_has_mode_precision_p (TREE_TYPE (cdef_arg1[i])))
1633*38fd1498Szrj 	    {
1634*38fd1498Szrj 	      def_arg2_alt[i] = cdef_arg1[i];
1635*38fd1498Szrj 	      defcodefor_name (def_arg2_alt[i], &cdef_code[i],
1636*38fd1498Szrj 			       &cdef_arg1[i], &cdef_arg2[i]);
1637*38fd1498Szrj 	    }
1638*38fd1498Szrj 	}
1639*38fd1498Szrj       for (i = 0; i < 2; i++)
1640*38fd1498Szrj 	/* Check for one shift count being Y and the other B - Y,
1641*38fd1498Szrj 	   with optional casts.  */
1642*38fd1498Szrj 	if (cdef_code[i] == MINUS_EXPR
1643*38fd1498Szrj 	    && tree_fits_shwi_p (cdef_arg1[i])
1644*38fd1498Szrj 	    && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
1645*38fd1498Szrj 	    && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
1646*38fd1498Szrj 	  {
1647*38fd1498Szrj 	    tree tem;
1648*38fd1498Szrj 	    enum tree_code code;
1649*38fd1498Szrj 
1650*38fd1498Szrj 	    if (cdef_arg2[i] == def_arg2[1 - i]
1651*38fd1498Szrj 		|| cdef_arg2[i] == def_arg2_alt[1 - i])
1652*38fd1498Szrj 	      {
1653*38fd1498Szrj 		rotcnt = cdef_arg2[i];
1654*38fd1498Szrj 		break;
1655*38fd1498Szrj 	      }
1656*38fd1498Szrj 	    defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
1657*38fd1498Szrj 	    if (CONVERT_EXPR_CODE_P (code)
1658*38fd1498Szrj 		&& INTEGRAL_TYPE_P (TREE_TYPE (tem))
1659*38fd1498Szrj 		&& TYPE_PRECISION (TREE_TYPE (tem))
1660*38fd1498Szrj 		 > floor_log2 (TYPE_PRECISION (rtype))
1661*38fd1498Szrj 		&& type_has_mode_precision_p (TREE_TYPE (tem))
1662*38fd1498Szrj 		&& (tem == def_arg2[1 - i]
1663*38fd1498Szrj 		    || tem == def_arg2_alt[1 - i]))
1664*38fd1498Szrj 	      {
1665*38fd1498Szrj 		rotcnt = tem;
1666*38fd1498Szrj 		break;
1667*38fd1498Szrj 	      }
1668*38fd1498Szrj 	  }
1669*38fd1498Szrj 	/* The above sequence isn't safe for Y being 0,
1670*38fd1498Szrj 	   because then one of the shifts triggers undefined behavior.
1671*38fd1498Szrj 	   This alternative is safe even for rotation count of 0.
1672*38fd1498Szrj 	   One shift count is Y and the other (-Y) & (B - 1).
1673*38fd1498Szrj 	   Or one shift count is Y & (B - 1) and the other (-Y) & (B - 1).  */
1674*38fd1498Szrj 	else if (cdef_code[i] == BIT_AND_EXPR
1675*38fd1498Szrj 		 && pow2p_hwi (TYPE_PRECISION (rtype))
1676*38fd1498Szrj 		 && tree_fits_shwi_p (cdef_arg2[i])
1677*38fd1498Szrj 		 && tree_to_shwi (cdef_arg2[i])
1678*38fd1498Szrj 		    == TYPE_PRECISION (rtype) - 1
1679*38fd1498Szrj 		 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
1680*38fd1498Szrj 		 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
1681*38fd1498Szrj 	  {
1682*38fd1498Szrj 	    tree tem;
1683*38fd1498Szrj 	    enum tree_code code;
1684*38fd1498Szrj 
1685*38fd1498Szrj 	    defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
1686*38fd1498Szrj 	    if (CONVERT_EXPR_CODE_P (code)
1687*38fd1498Szrj 		&& INTEGRAL_TYPE_P (TREE_TYPE (tem))
1688*38fd1498Szrj 		&& TYPE_PRECISION (TREE_TYPE (tem))
1689*38fd1498Szrj 		 > floor_log2 (TYPE_PRECISION (rtype))
1690*38fd1498Szrj 		&& type_has_mode_precision_p (TREE_TYPE (tem)))
1691*38fd1498Szrj 	      defcodefor_name (tem, &code, &tem, NULL);
1692*38fd1498Szrj 
1693*38fd1498Szrj 	    if (code == NEGATE_EXPR)
1694*38fd1498Szrj 	      {
1695*38fd1498Szrj 		if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
1696*38fd1498Szrj 		  {
1697*38fd1498Szrj 		    rotcnt = tem;
1698*38fd1498Szrj 		    break;
1699*38fd1498Szrj 		  }
1700*38fd1498Szrj 		tree tem2;
1701*38fd1498Szrj 		defcodefor_name (tem, &code, &tem2, NULL);
1702*38fd1498Szrj 		if (CONVERT_EXPR_CODE_P (code)
1703*38fd1498Szrj 		    && INTEGRAL_TYPE_P (TREE_TYPE (tem2))
1704*38fd1498Szrj 		    && TYPE_PRECISION (TREE_TYPE (tem2))
1705*38fd1498Szrj 		       > floor_log2 (TYPE_PRECISION (rtype))
1706*38fd1498Szrj 		    && type_has_mode_precision_p (TREE_TYPE (tem2)))
1707*38fd1498Szrj 		  {
1708*38fd1498Szrj 		    if (tem2 == def_arg2[1 - i]
1709*38fd1498Szrj 			|| tem2 == def_arg2_alt[1 - i])
1710*38fd1498Szrj 		      {
1711*38fd1498Szrj 			rotcnt = tem2;
1712*38fd1498Szrj 			break;
1713*38fd1498Szrj 		      }
1714*38fd1498Szrj 		  }
1715*38fd1498Szrj 		else
1716*38fd1498Szrj 		  tem2 = NULL_TREE;
1717*38fd1498Szrj 
1718*38fd1498Szrj 		if (cdef_code[1 - i] == BIT_AND_EXPR
1719*38fd1498Szrj 		    && tree_fits_shwi_p (cdef_arg2[1 - i])
1720*38fd1498Szrj 		    && tree_to_shwi (cdef_arg2[1 - i])
1721*38fd1498Szrj 		       == TYPE_PRECISION (rtype) - 1
1722*38fd1498Szrj 		    && TREE_CODE (cdef_arg1[1 - i]) == SSA_NAME)
1723*38fd1498Szrj 		  {
1724*38fd1498Szrj 		    if (tem == cdef_arg1[1 - i]
1725*38fd1498Szrj 			|| tem2 == cdef_arg1[1 - i])
1726*38fd1498Szrj 		      {
1727*38fd1498Szrj 			rotcnt = def_arg2[1 - i];
1728*38fd1498Szrj 			break;
1729*38fd1498Szrj 		      }
1730*38fd1498Szrj 		    tree tem3;
1731*38fd1498Szrj 		    defcodefor_name (cdef_arg1[1 - i], &code, &tem3, NULL);
1732*38fd1498Szrj 		    if (CONVERT_EXPR_CODE_P (code)
1733*38fd1498Szrj 			&& INTEGRAL_TYPE_P (TREE_TYPE (tem3))
1734*38fd1498Szrj 			&& TYPE_PRECISION (TREE_TYPE (tem3))
1735*38fd1498Szrj 			   > floor_log2 (TYPE_PRECISION (rtype))
1736*38fd1498Szrj 			&& type_has_mode_precision_p (TREE_TYPE (tem3)))
1737*38fd1498Szrj 		      {
1738*38fd1498Szrj 			if (tem == tem3 || tem2 == tem3)
1739*38fd1498Szrj 			  {
1740*38fd1498Szrj 			    rotcnt = def_arg2[1 - i];
1741*38fd1498Szrj 			    break;
1742*38fd1498Szrj 			  }
1743*38fd1498Szrj 		      }
1744*38fd1498Szrj 		  }
1745*38fd1498Szrj 	      }
1746*38fd1498Szrj 	  }
1747*38fd1498Szrj       if (rotcnt == NULL_TREE)
1748*38fd1498Szrj 	return false;
1749*38fd1498Szrj       swapped_p = i != 1;
1750*38fd1498Szrj     }
1751*38fd1498Szrj 
1752*38fd1498Szrj   if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
1753*38fd1498Szrj 				  TREE_TYPE (rotcnt)))
1754*38fd1498Szrj     {
1755*38fd1498Szrj       g = gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2[0])),
1756*38fd1498Szrj 			       NOP_EXPR, rotcnt);
1757*38fd1498Szrj       gsi_insert_before (gsi, g, GSI_SAME_STMT);
1758*38fd1498Szrj       rotcnt = gimple_assign_lhs (g);
1759*38fd1498Szrj     }
1760*38fd1498Szrj   lhs = gimple_assign_lhs (stmt);
1761*38fd1498Szrj   if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1762*38fd1498Szrj     lhs = make_ssa_name (TREE_TYPE (def_arg1[0]));
1763*38fd1498Szrj   g = gimple_build_assign (lhs,
1764*38fd1498Szrj 			   ((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
1765*38fd1498Szrj 			   ? LROTATE_EXPR : RROTATE_EXPR, def_arg1[0], rotcnt);
1766*38fd1498Szrj   if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1767*38fd1498Szrj     {
1768*38fd1498Szrj       gsi_insert_before (gsi, g, GSI_SAME_STMT);
1769*38fd1498Szrj       g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, lhs);
1770*38fd1498Szrj     }
1771*38fd1498Szrj   gsi_replace (gsi, g, false);
1772*38fd1498Szrj   return true;
1773*38fd1498Szrj }
1774*38fd1498Szrj 
1775*38fd1498Szrj /* Combine an element access with a shuffle.  Returns true if there were
1776*38fd1498Szrj    any changes made, else it returns false.  */
1777*38fd1498Szrj 
1778*38fd1498Szrj static bool
simplify_bitfield_ref(gimple_stmt_iterator * gsi)1779*38fd1498Szrj simplify_bitfield_ref (gimple_stmt_iterator *gsi)
1780*38fd1498Szrj {
1781*38fd1498Szrj   gimple *stmt = gsi_stmt (*gsi);
1782*38fd1498Szrj   gimple *def_stmt;
1783*38fd1498Szrj   tree op, op0, op1, op2;
1784*38fd1498Szrj   tree elem_type;
1785*38fd1498Szrj   unsigned idx, size;
1786*38fd1498Szrj   enum tree_code code;
1787*38fd1498Szrj 
1788*38fd1498Szrj   op = gimple_assign_rhs1 (stmt);
1789*38fd1498Szrj   gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
1790*38fd1498Szrj 
1791*38fd1498Szrj   op0 = TREE_OPERAND (op, 0);
1792*38fd1498Szrj   if (TREE_CODE (op0) != SSA_NAME
1793*38fd1498Szrj       || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
1794*38fd1498Szrj     return false;
1795*38fd1498Szrj 
1796*38fd1498Szrj   def_stmt = get_prop_source_stmt (op0, false, NULL);
1797*38fd1498Szrj   if (!def_stmt || !can_propagate_from (def_stmt))
1798*38fd1498Szrj     return false;
1799*38fd1498Szrj 
1800*38fd1498Szrj   op1 = TREE_OPERAND (op, 1);
1801*38fd1498Szrj   op2 = TREE_OPERAND (op, 2);
1802*38fd1498Szrj   code = gimple_assign_rhs_code (def_stmt);
1803*38fd1498Szrj 
1804*38fd1498Szrj   if (code == CONSTRUCTOR)
1805*38fd1498Szrj     {
1806*38fd1498Szrj       tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
1807*38fd1498Szrj 			       gimple_assign_rhs1 (def_stmt), op1, op2);
1808*38fd1498Szrj       if (!tem || !valid_gimple_rhs_p (tem))
1809*38fd1498Szrj 	return false;
1810*38fd1498Szrj       gimple_assign_set_rhs_from_tree (gsi, tem);
1811*38fd1498Szrj       update_stmt (gsi_stmt (*gsi));
1812*38fd1498Szrj       return true;
1813*38fd1498Szrj     }
1814*38fd1498Szrj 
1815*38fd1498Szrj   elem_type = TREE_TYPE (TREE_TYPE (op0));
1816*38fd1498Szrj   if (TREE_TYPE (op) != elem_type)
1817*38fd1498Szrj     return false;
1818*38fd1498Szrj 
1819*38fd1498Szrj   size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
1820*38fd1498Szrj   if (maybe_ne (bit_field_size (op), size))
1821*38fd1498Szrj     return false;
1822*38fd1498Szrj 
1823*38fd1498Szrj   if (code == VEC_PERM_EXPR
1824*38fd1498Szrj       && constant_multiple_p (bit_field_offset (op), size, &idx))
1825*38fd1498Szrj     {
1826*38fd1498Szrj       tree p, m, tem;
1827*38fd1498Szrj       unsigned HOST_WIDE_INT nelts;
1828*38fd1498Szrj       m = gimple_assign_rhs3 (def_stmt);
1829*38fd1498Szrj       if (TREE_CODE (m) != VECTOR_CST
1830*38fd1498Szrj 	  || !VECTOR_CST_NELTS (m).is_constant (&nelts))
1831*38fd1498Szrj 	return false;
1832*38fd1498Szrj       idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
1833*38fd1498Szrj       idx %= 2 * nelts;
1834*38fd1498Szrj       if (idx < nelts)
1835*38fd1498Szrj 	{
1836*38fd1498Szrj 	  p = gimple_assign_rhs1 (def_stmt);
1837*38fd1498Szrj 	}
1838*38fd1498Szrj       else
1839*38fd1498Szrj 	{
1840*38fd1498Szrj 	  p = gimple_assign_rhs2 (def_stmt);
1841*38fd1498Szrj 	  idx -= nelts;
1842*38fd1498Szrj 	}
1843*38fd1498Szrj       tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
1844*38fd1498Szrj 		    unshare_expr (p), op1, bitsize_int (idx * size));
1845*38fd1498Szrj       gimple_assign_set_rhs1 (stmt, tem);
1846*38fd1498Szrj       fold_stmt (gsi);
1847*38fd1498Szrj       update_stmt (gsi_stmt (*gsi));
1848*38fd1498Szrj       return true;
1849*38fd1498Szrj     }
1850*38fd1498Szrj 
1851*38fd1498Szrj   return false;
1852*38fd1498Szrj }
1853*38fd1498Szrj 
1854*38fd1498Szrj /* Determine whether applying the 2 permutations (mask1 then mask2)
1855*38fd1498Szrj    gives back one of the input.  */
1856*38fd1498Szrj 
1857*38fd1498Szrj static int
is_combined_permutation_identity(tree mask1,tree mask2)1858*38fd1498Szrj is_combined_permutation_identity (tree mask1, tree mask2)
1859*38fd1498Szrj {
1860*38fd1498Szrj   tree mask;
1861*38fd1498Szrj   unsigned HOST_WIDE_INT nelts, i, j;
1862*38fd1498Szrj   bool maybe_identity1 = true;
1863*38fd1498Szrj   bool maybe_identity2 = true;
1864*38fd1498Szrj 
1865*38fd1498Szrj   gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
1866*38fd1498Szrj 		       && TREE_CODE (mask2) == VECTOR_CST);
1867*38fd1498Szrj   mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
1868*38fd1498Szrj   if (mask == NULL_TREE || TREE_CODE (mask) != VECTOR_CST)
1869*38fd1498Szrj     return 0;
1870*38fd1498Szrj 
1871*38fd1498Szrj   if (!VECTOR_CST_NELTS (mask).is_constant (&nelts))
1872*38fd1498Szrj     return 0;
1873*38fd1498Szrj   for (i = 0; i < nelts; i++)
1874*38fd1498Szrj     {
1875*38fd1498Szrj       tree val = VECTOR_CST_ELT (mask, i);
1876*38fd1498Szrj       gcc_assert (TREE_CODE (val) == INTEGER_CST);
1877*38fd1498Szrj       j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
1878*38fd1498Szrj       if (j == i)
1879*38fd1498Szrj 	maybe_identity2 = false;
1880*38fd1498Szrj       else if (j == i + nelts)
1881*38fd1498Szrj 	maybe_identity1 = false;
1882*38fd1498Szrj       else
1883*38fd1498Szrj 	return 0;
1884*38fd1498Szrj     }
1885*38fd1498Szrj   return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
1886*38fd1498Szrj }
1887*38fd1498Szrj 
1888*38fd1498Szrj /* Combine a shuffle with its arguments.  Returns 1 if there were any
1889*38fd1498Szrj    changes made, 2 if cfg-cleanup needs to run.  Else it returns 0.  */
1890*38fd1498Szrj 
1891*38fd1498Szrj static int
simplify_permutation(gimple_stmt_iterator * gsi)1892*38fd1498Szrj simplify_permutation (gimple_stmt_iterator *gsi)
1893*38fd1498Szrj {
1894*38fd1498Szrj   gimple *stmt = gsi_stmt (*gsi);
1895*38fd1498Szrj   gimple *def_stmt;
1896*38fd1498Szrj   tree op0, op1, op2, op3, arg0, arg1;
1897*38fd1498Szrj   enum tree_code code;
1898*38fd1498Szrj   bool single_use_op0 = false;
1899*38fd1498Szrj 
1900*38fd1498Szrj   gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
1901*38fd1498Szrj 
1902*38fd1498Szrj   op0 = gimple_assign_rhs1 (stmt);
1903*38fd1498Szrj   op1 = gimple_assign_rhs2 (stmt);
1904*38fd1498Szrj   op2 = gimple_assign_rhs3 (stmt);
1905*38fd1498Szrj 
1906*38fd1498Szrj   if (TREE_CODE (op2) != VECTOR_CST)
1907*38fd1498Szrj     return 0;
1908*38fd1498Szrj 
1909*38fd1498Szrj   if (TREE_CODE (op0) == VECTOR_CST)
1910*38fd1498Szrj     {
1911*38fd1498Szrj       code = VECTOR_CST;
1912*38fd1498Szrj       arg0 = op0;
1913*38fd1498Szrj     }
1914*38fd1498Szrj   else if (TREE_CODE (op0) == SSA_NAME)
1915*38fd1498Szrj     {
1916*38fd1498Szrj       def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
1917*38fd1498Szrj       if (!def_stmt || !can_propagate_from (def_stmt))
1918*38fd1498Szrj 	return 0;
1919*38fd1498Szrj 
1920*38fd1498Szrj       code = gimple_assign_rhs_code (def_stmt);
1921*38fd1498Szrj       arg0 = gimple_assign_rhs1 (def_stmt);
1922*38fd1498Szrj     }
1923*38fd1498Szrj   else
1924*38fd1498Szrj     return 0;
1925*38fd1498Szrj 
1926*38fd1498Szrj   /* Two consecutive shuffles.  */
1927*38fd1498Szrj   if (code == VEC_PERM_EXPR)
1928*38fd1498Szrj     {
1929*38fd1498Szrj       tree orig;
1930*38fd1498Szrj       int ident;
1931*38fd1498Szrj 
1932*38fd1498Szrj       if (op0 != op1)
1933*38fd1498Szrj 	return 0;
1934*38fd1498Szrj       op3 = gimple_assign_rhs3 (def_stmt);
1935*38fd1498Szrj       if (TREE_CODE (op3) != VECTOR_CST)
1936*38fd1498Szrj 	return 0;
1937*38fd1498Szrj       ident = is_combined_permutation_identity (op3, op2);
1938*38fd1498Szrj       if (!ident)
1939*38fd1498Szrj 	return 0;
1940*38fd1498Szrj       orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
1941*38fd1498Szrj 			  : gimple_assign_rhs2 (def_stmt);
1942*38fd1498Szrj       gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
1943*38fd1498Szrj       gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
1944*38fd1498Szrj       gimple_set_num_ops (stmt, 2);
1945*38fd1498Szrj       update_stmt (stmt);
1946*38fd1498Szrj       return remove_prop_source_from_use (op0) ? 2 : 1;
1947*38fd1498Szrj     }
1948*38fd1498Szrj 
1949*38fd1498Szrj   /* Shuffle of a constructor.  */
1950*38fd1498Szrj   else if (code == CONSTRUCTOR || code == VECTOR_CST)
1951*38fd1498Szrj     {
1952*38fd1498Szrj       tree opt;
1953*38fd1498Szrj       bool ret = false;
1954*38fd1498Szrj       if (op0 != op1)
1955*38fd1498Szrj 	{
1956*38fd1498Szrj 	  if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
1957*38fd1498Szrj 	    return 0;
1958*38fd1498Szrj 
1959*38fd1498Szrj 	  if (TREE_CODE (op1) == VECTOR_CST)
1960*38fd1498Szrj 	    arg1 = op1;
1961*38fd1498Szrj 	  else if (TREE_CODE (op1) == SSA_NAME)
1962*38fd1498Szrj 	    {
1963*38fd1498Szrj 	      enum tree_code code2;
1964*38fd1498Szrj 
1965*38fd1498Szrj 	      gimple *def_stmt2 = get_prop_source_stmt (op1, true, NULL);
1966*38fd1498Szrj 	      if (!def_stmt2 || !can_propagate_from (def_stmt2))
1967*38fd1498Szrj 		return 0;
1968*38fd1498Szrj 
1969*38fd1498Szrj 	      code2 = gimple_assign_rhs_code (def_stmt2);
1970*38fd1498Szrj 	      if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
1971*38fd1498Szrj 		return 0;
1972*38fd1498Szrj 	      arg1 = gimple_assign_rhs1 (def_stmt2);
1973*38fd1498Szrj 	    }
1974*38fd1498Szrj 	  else
1975*38fd1498Szrj 	    return 0;
1976*38fd1498Szrj 	}
1977*38fd1498Szrj       else
1978*38fd1498Szrj 	{
1979*38fd1498Szrj 	  /* Already used twice in this statement.  */
1980*38fd1498Szrj 	  if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
1981*38fd1498Szrj 	    return 0;
1982*38fd1498Szrj 	  arg1 = arg0;
1983*38fd1498Szrj 	}
1984*38fd1498Szrj       opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
1985*38fd1498Szrj       if (!opt
1986*38fd1498Szrj 	  || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
1987*38fd1498Szrj 	return 0;
1988*38fd1498Szrj       gimple_assign_set_rhs_from_tree (gsi, opt);
1989*38fd1498Szrj       update_stmt (gsi_stmt (*gsi));
1990*38fd1498Szrj       if (TREE_CODE (op0) == SSA_NAME)
1991*38fd1498Szrj 	ret = remove_prop_source_from_use (op0);
1992*38fd1498Szrj       if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
1993*38fd1498Szrj 	ret |= remove_prop_source_from_use (op1);
1994*38fd1498Szrj       return ret ? 2 : 1;
1995*38fd1498Szrj     }
1996*38fd1498Szrj 
1997*38fd1498Szrj   return 0;
1998*38fd1498Szrj }
1999*38fd1498Szrj 
2000*38fd1498Szrj /* Recognize a VEC_PERM_EXPR.  Returns true if there were any changes.  */
2001*38fd1498Szrj 
2002*38fd1498Szrj static bool
simplify_vector_constructor(gimple_stmt_iterator * gsi)2003*38fd1498Szrj simplify_vector_constructor (gimple_stmt_iterator *gsi)
2004*38fd1498Szrj {
2005*38fd1498Szrj   gimple *stmt = gsi_stmt (*gsi);
2006*38fd1498Szrj   gimple *def_stmt;
2007*38fd1498Szrj   tree op, op2, orig, type, elem_type;
2008*38fd1498Szrj   unsigned elem_size, i;
2009*38fd1498Szrj   unsigned HOST_WIDE_INT nelts;
2010*38fd1498Szrj   enum tree_code code, conv_code;
2011*38fd1498Szrj   constructor_elt *elt;
2012*38fd1498Szrj   bool maybe_ident;
2013*38fd1498Szrj 
2014*38fd1498Szrj   gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
2015*38fd1498Szrj 
2016*38fd1498Szrj   op = gimple_assign_rhs1 (stmt);
2017*38fd1498Szrj   type = TREE_TYPE (op);
2018*38fd1498Szrj   gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
2019*38fd1498Szrj 
2020*38fd1498Szrj   if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
2021*38fd1498Szrj     return false;
2022*38fd1498Szrj   elem_type = TREE_TYPE (type);
2023*38fd1498Szrj   elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
2024*38fd1498Szrj 
2025*38fd1498Szrj   vec_perm_builder sel (nelts, nelts, 1);
2026*38fd1498Szrj   orig = NULL;
2027*38fd1498Szrj   conv_code = ERROR_MARK;
2028*38fd1498Szrj   maybe_ident = true;
2029*38fd1498Szrj   FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
2030*38fd1498Szrj     {
2031*38fd1498Szrj       tree ref, op1;
2032*38fd1498Szrj 
2033*38fd1498Szrj       if (i >= nelts)
2034*38fd1498Szrj 	return false;
2035*38fd1498Szrj 
2036*38fd1498Szrj       if (TREE_CODE (elt->value) != SSA_NAME)
2037*38fd1498Szrj 	return false;
2038*38fd1498Szrj       def_stmt = get_prop_source_stmt (elt->value, false, NULL);
2039*38fd1498Szrj       if (!def_stmt)
2040*38fd1498Szrj 	return false;
2041*38fd1498Szrj       code = gimple_assign_rhs_code (def_stmt);
2042*38fd1498Szrj       if (code == FLOAT_EXPR
2043*38fd1498Szrj 	  || code == FIX_TRUNC_EXPR)
2044*38fd1498Szrj 	{
2045*38fd1498Szrj 	  op1 = gimple_assign_rhs1 (def_stmt);
2046*38fd1498Szrj 	  if (conv_code == ERROR_MARK)
2047*38fd1498Szrj 	    {
2048*38fd1498Szrj 	      if (maybe_ne (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (elt->value))),
2049*38fd1498Szrj 			    GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op1)))))
2050*38fd1498Szrj 		return false;
2051*38fd1498Szrj 	      conv_code = code;
2052*38fd1498Szrj 	    }
2053*38fd1498Szrj 	  else if (conv_code != code)
2054*38fd1498Szrj 	    return false;
2055*38fd1498Szrj 	  if (TREE_CODE (op1) != SSA_NAME)
2056*38fd1498Szrj 	    return false;
2057*38fd1498Szrj 	  def_stmt = SSA_NAME_DEF_STMT (op1);
2058*38fd1498Szrj 	  if (! is_gimple_assign (def_stmt))
2059*38fd1498Szrj 	    return false;
2060*38fd1498Szrj 	  code = gimple_assign_rhs_code (def_stmt);
2061*38fd1498Szrj 	}
2062*38fd1498Szrj       if (code != BIT_FIELD_REF)
2063*38fd1498Szrj 	return false;
2064*38fd1498Szrj       op1 = gimple_assign_rhs1 (def_stmt);
2065*38fd1498Szrj       ref = TREE_OPERAND (op1, 0);
2066*38fd1498Szrj       if (orig)
2067*38fd1498Szrj 	{
2068*38fd1498Szrj 	  if (ref != orig)
2069*38fd1498Szrj 	    return false;
2070*38fd1498Szrj 	}
2071*38fd1498Szrj       else
2072*38fd1498Szrj 	{
2073*38fd1498Szrj 	  if (TREE_CODE (ref) != SSA_NAME)
2074*38fd1498Szrj 	    return false;
2075*38fd1498Szrj 	  if (! VECTOR_TYPE_P (TREE_TYPE (ref))
2076*38fd1498Szrj 	      || ! useless_type_conversion_p (TREE_TYPE (op1),
2077*38fd1498Szrj 					      TREE_TYPE (TREE_TYPE (ref))))
2078*38fd1498Szrj 	    return false;
2079*38fd1498Szrj 	  orig = ref;
2080*38fd1498Szrj 	}
2081*38fd1498Szrj       unsigned int elt;
2082*38fd1498Szrj       if (maybe_ne (bit_field_size (op1), elem_size)
2083*38fd1498Szrj 	  || !constant_multiple_p (bit_field_offset (op1), elem_size, &elt))
2084*38fd1498Szrj 	return false;
2085*38fd1498Szrj       if (elt != i)
2086*38fd1498Szrj 	maybe_ident = false;
2087*38fd1498Szrj       sel.quick_push (elt);
2088*38fd1498Szrj     }
2089*38fd1498Szrj   if (i < nelts)
2090*38fd1498Szrj     return false;
2091*38fd1498Szrj 
2092*38fd1498Szrj   if (! VECTOR_TYPE_P (TREE_TYPE (orig))
2093*38fd1498Szrj       || maybe_ne (TYPE_VECTOR_SUBPARTS (type),
2094*38fd1498Szrj 		   TYPE_VECTOR_SUBPARTS (TREE_TYPE (orig))))
2095*38fd1498Szrj     return false;
2096*38fd1498Szrj 
2097*38fd1498Szrj   tree tem;
2098*38fd1498Szrj   if (conv_code != ERROR_MARK
2099*38fd1498Szrj       && (! supportable_convert_operation (conv_code, type, TREE_TYPE (orig),
2100*38fd1498Szrj 					   &tem, &conv_code)
2101*38fd1498Szrj 	  || conv_code == CALL_EXPR))
2102*38fd1498Szrj     return false;
2103*38fd1498Szrj 
2104*38fd1498Szrj   if (maybe_ident)
2105*38fd1498Szrj     {
2106*38fd1498Szrj       if (conv_code == ERROR_MARK)
2107*38fd1498Szrj 	gimple_assign_set_rhs_from_tree (gsi, orig);
2108*38fd1498Szrj       else
2109*38fd1498Szrj 	gimple_assign_set_rhs_with_ops (gsi, conv_code, orig,
2110*38fd1498Szrj 					NULL_TREE, NULL_TREE);
2111*38fd1498Szrj     }
2112*38fd1498Szrj   else
2113*38fd1498Szrj     {
2114*38fd1498Szrj       tree mask_type;
2115*38fd1498Szrj 
2116*38fd1498Szrj       vec_perm_indices indices (sel, 1, nelts);
2117*38fd1498Szrj       if (!can_vec_perm_const_p (TYPE_MODE (type), indices))
2118*38fd1498Szrj 	return false;
2119*38fd1498Szrj       mask_type
2120*38fd1498Szrj 	= build_vector_type (build_nonstandard_integer_type (elem_size, 1),
2121*38fd1498Szrj 			     nelts);
2122*38fd1498Szrj       if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
2123*38fd1498Szrj 	  || maybe_ne (GET_MODE_SIZE (TYPE_MODE (mask_type)),
2124*38fd1498Szrj 		       GET_MODE_SIZE (TYPE_MODE (type))))
2125*38fd1498Szrj 	return false;
2126*38fd1498Szrj       op2 = vec_perm_indices_to_tree (mask_type, indices);
2127*38fd1498Szrj       if (conv_code == ERROR_MARK)
2128*38fd1498Szrj 	gimple_assign_set_rhs_with_ops (gsi, VEC_PERM_EXPR, orig, orig, op2);
2129*38fd1498Szrj       else
2130*38fd1498Szrj 	{
2131*38fd1498Szrj 	  gimple *perm
2132*38fd1498Szrj 	    = gimple_build_assign (make_ssa_name (TREE_TYPE (orig)),
2133*38fd1498Szrj 				   VEC_PERM_EXPR, orig, orig, op2);
2134*38fd1498Szrj 	  orig = gimple_assign_lhs (perm);
2135*38fd1498Szrj 	  gsi_insert_before (gsi, perm, GSI_SAME_STMT);
2136*38fd1498Szrj 	  gimple_assign_set_rhs_with_ops (gsi, conv_code, orig,
2137*38fd1498Szrj 					  NULL_TREE, NULL_TREE);
2138*38fd1498Szrj 	}
2139*38fd1498Szrj     }
2140*38fd1498Szrj   update_stmt (gsi_stmt (*gsi));
2141*38fd1498Szrj   return true;
2142*38fd1498Szrj }
2143*38fd1498Szrj 
2144*38fd1498Szrj 
2145*38fd1498Szrj /* Primitive "lattice" function for gimple_simplify.  */
2146*38fd1498Szrj 
2147*38fd1498Szrj static tree
fwprop_ssa_val(tree name)2148*38fd1498Szrj fwprop_ssa_val (tree name)
2149*38fd1498Szrj {
2150*38fd1498Szrj   /* First valueize NAME.  */
2151*38fd1498Szrj   if (TREE_CODE (name) == SSA_NAME
2152*38fd1498Szrj       && SSA_NAME_VERSION (name) < lattice.length ())
2153*38fd1498Szrj     {
2154*38fd1498Szrj       tree val = lattice[SSA_NAME_VERSION (name)];
2155*38fd1498Szrj       if (val)
2156*38fd1498Szrj 	name = val;
2157*38fd1498Szrj     }
2158*38fd1498Szrj   /* We continue matching along SSA use-def edges for SSA names
2159*38fd1498Szrj      that are not single-use.  Currently there are no patterns
2160*38fd1498Szrj      that would cause any issues with that.  */
2161*38fd1498Szrj   return name;
2162*38fd1498Szrj }
2163*38fd1498Szrj 
2164*38fd1498Szrj /* Main entry point for the forward propagation and statement combine
2165*38fd1498Szrj    optimizer.  */
2166*38fd1498Szrj 
2167*38fd1498Szrj namespace {
2168*38fd1498Szrj 
2169*38fd1498Szrj const pass_data pass_data_forwprop =
2170*38fd1498Szrj {
2171*38fd1498Szrj   GIMPLE_PASS, /* type */
2172*38fd1498Szrj   "forwprop", /* name */
2173*38fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
2174*38fd1498Szrj   TV_TREE_FORWPROP, /* tv_id */
2175*38fd1498Szrj   ( PROP_cfg | PROP_ssa ), /* properties_required */
2176*38fd1498Szrj   0, /* properties_provided */
2177*38fd1498Szrj   0, /* properties_destroyed */
2178*38fd1498Szrj   0, /* todo_flags_start */
2179*38fd1498Szrj   TODO_update_ssa, /* todo_flags_finish */
2180*38fd1498Szrj };
2181*38fd1498Szrj 
2182*38fd1498Szrj class pass_forwprop : public gimple_opt_pass
2183*38fd1498Szrj {
2184*38fd1498Szrj public:
pass_forwprop(gcc::context * ctxt)2185*38fd1498Szrj   pass_forwprop (gcc::context *ctxt)
2186*38fd1498Szrj     : gimple_opt_pass (pass_data_forwprop, ctxt)
2187*38fd1498Szrj   {}
2188*38fd1498Szrj 
2189*38fd1498Szrj   /* opt_pass methods: */
clone()2190*38fd1498Szrj   opt_pass * clone () { return new pass_forwprop (m_ctxt); }
gate(function *)2191*38fd1498Szrj   virtual bool gate (function *) { return flag_tree_forwprop; }
2192*38fd1498Szrj   virtual unsigned int execute (function *);
2193*38fd1498Szrj 
2194*38fd1498Szrj }; // class pass_forwprop
2195*38fd1498Szrj 
2196*38fd1498Szrj unsigned int
execute(function * fun)2197*38fd1498Szrj pass_forwprop::execute (function *fun)
2198*38fd1498Szrj {
2199*38fd1498Szrj   unsigned int todoflags = 0;
2200*38fd1498Szrj 
2201*38fd1498Szrj   cfg_changed = false;
2202*38fd1498Szrj 
2203*38fd1498Szrj   /* Combine stmts with the stmts defining their operands.  Do that
2204*38fd1498Szrj      in an order that guarantees visiting SSA defs before SSA uses.  */
2205*38fd1498Szrj   lattice.create (num_ssa_names);
2206*38fd1498Szrj   lattice.quick_grow_cleared (num_ssa_names);
2207*38fd1498Szrj   int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
2208*38fd1498Szrj   int postorder_num = pre_and_rev_post_order_compute_fn (cfun, NULL,
2209*38fd1498Szrj 							 postorder, false);
2210*38fd1498Szrj   auto_vec<gimple *, 4> to_fixup;
2211*38fd1498Szrj   to_purge = BITMAP_ALLOC (NULL);
2212*38fd1498Szrj   for (int i = 0; i < postorder_num; ++i)
2213*38fd1498Szrj     {
2214*38fd1498Szrj       gimple_stmt_iterator gsi;
2215*38fd1498Szrj       basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
2216*38fd1498Szrj 
2217*38fd1498Szrj       /* Propagate into PHIs and record degenerate ones in the lattice.  */
2218*38fd1498Szrj       for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
2219*38fd1498Szrj 	   gsi_next (&si))
2220*38fd1498Szrj 	{
2221*38fd1498Szrj 	  gphi *phi = si.phi ();
2222*38fd1498Szrj 	  tree res = gimple_phi_result (phi);
2223*38fd1498Szrj 	  if (virtual_operand_p (res))
2224*38fd1498Szrj 	    continue;
2225*38fd1498Szrj 
2226*38fd1498Szrj 	  use_operand_p use_p;
2227*38fd1498Szrj 	  ssa_op_iter it;
2228*38fd1498Szrj 	  tree first = NULL_TREE;
2229*38fd1498Szrj 	  bool all_same = true;
2230*38fd1498Szrj 	  FOR_EACH_PHI_ARG (use_p, phi, it, SSA_OP_USE)
2231*38fd1498Szrj 	    {
2232*38fd1498Szrj 	      tree use = USE_FROM_PTR (use_p);
2233*38fd1498Szrj 	      tree tem = fwprop_ssa_val (use);
2234*38fd1498Szrj 	      if (! first)
2235*38fd1498Szrj 		first = tem;
2236*38fd1498Szrj 	      else if (! operand_equal_p (first, tem, 0))
2237*38fd1498Szrj 		all_same = false;
2238*38fd1498Szrj 	      if (tem != use
2239*38fd1498Szrj 		  && may_propagate_copy (use, tem))
2240*38fd1498Szrj 		propagate_value (use_p, tem);
2241*38fd1498Szrj 	    }
2242*38fd1498Szrj 	  if (all_same)
2243*38fd1498Szrj 	    fwprop_set_lattice_val (res, first);
2244*38fd1498Szrj 	}
2245*38fd1498Szrj 
2246*38fd1498Szrj       /* Apply forward propagation to all stmts in the basic-block.
2247*38fd1498Szrj 	 Note we update GSI within the loop as necessary.  */
2248*38fd1498Szrj       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2249*38fd1498Szrj 	{
2250*38fd1498Szrj 	  gimple *stmt = gsi_stmt (gsi);
2251*38fd1498Szrj 	  tree lhs, rhs;
2252*38fd1498Szrj 	  enum tree_code code;
2253*38fd1498Szrj 
2254*38fd1498Szrj 	  if (!is_gimple_assign (stmt))
2255*38fd1498Szrj 	    {
2256*38fd1498Szrj 	      gsi_next (&gsi);
2257*38fd1498Szrj 	      continue;
2258*38fd1498Szrj 	    }
2259*38fd1498Szrj 
2260*38fd1498Szrj 	  lhs = gimple_assign_lhs (stmt);
2261*38fd1498Szrj 	  rhs = gimple_assign_rhs1 (stmt);
2262*38fd1498Szrj 	  code = gimple_assign_rhs_code (stmt);
2263*38fd1498Szrj 	  if (TREE_CODE (lhs) != SSA_NAME
2264*38fd1498Szrj 	      || has_zero_uses (lhs))
2265*38fd1498Szrj 	    {
2266*38fd1498Szrj 	      gsi_next (&gsi);
2267*38fd1498Szrj 	      continue;
2268*38fd1498Szrj 	    }
2269*38fd1498Szrj 
2270*38fd1498Szrj 	  /* If this statement sets an SSA_NAME to an address,
2271*38fd1498Szrj 	     try to propagate the address into the uses of the SSA_NAME.  */
2272*38fd1498Szrj 	  if (code == ADDR_EXPR
2273*38fd1498Szrj 	      /* Handle pointer conversions on invariant addresses
2274*38fd1498Szrj 		 as well, as this is valid gimple.  */
2275*38fd1498Szrj 	      || (CONVERT_EXPR_CODE_P (code)
2276*38fd1498Szrj 		  && TREE_CODE (rhs) == ADDR_EXPR
2277*38fd1498Szrj 		  && POINTER_TYPE_P (TREE_TYPE (lhs))))
2278*38fd1498Szrj 	    {
2279*38fd1498Szrj 	      tree base = get_base_address (TREE_OPERAND (rhs, 0));
2280*38fd1498Szrj 	      if ((!base
2281*38fd1498Szrj 		   || !DECL_P (base)
2282*38fd1498Szrj 		   || decl_address_invariant_p (base))
2283*38fd1498Szrj 		  && !stmt_references_abnormal_ssa_name (stmt)
2284*38fd1498Szrj 		  && forward_propagate_addr_expr (lhs, rhs, true))
2285*38fd1498Szrj 		{
2286*38fd1498Szrj 		  fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2287*38fd1498Szrj 		  release_defs (stmt);
2288*38fd1498Szrj 		  gsi_remove (&gsi, true);
2289*38fd1498Szrj 		}
2290*38fd1498Szrj 	      else
2291*38fd1498Szrj 		gsi_next (&gsi);
2292*38fd1498Szrj 	    }
2293*38fd1498Szrj 	  else if (code == POINTER_PLUS_EXPR)
2294*38fd1498Szrj 	    {
2295*38fd1498Szrj 	      tree off = gimple_assign_rhs2 (stmt);
2296*38fd1498Szrj 	      if (TREE_CODE (off) == INTEGER_CST
2297*38fd1498Szrj 		  && can_propagate_from (stmt)
2298*38fd1498Szrj 		  && !simple_iv_increment_p (stmt)
2299*38fd1498Szrj 		  /* ???  Better adjust the interface to that function
2300*38fd1498Szrj 		     instead of building new trees here.  */
2301*38fd1498Szrj 		  && forward_propagate_addr_expr
2302*38fd1498Szrj 		       (lhs,
2303*38fd1498Szrj 			build1_loc (gimple_location (stmt),
2304*38fd1498Szrj 				    ADDR_EXPR, TREE_TYPE (rhs),
2305*38fd1498Szrj 				    fold_build2 (MEM_REF,
2306*38fd1498Szrj 						 TREE_TYPE (TREE_TYPE (rhs)),
2307*38fd1498Szrj 						 rhs,
2308*38fd1498Szrj 						 fold_convert (ptr_type_node,
2309*38fd1498Szrj 							       off))), true))
2310*38fd1498Szrj 		{
2311*38fd1498Szrj 		  fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2312*38fd1498Szrj 		  release_defs (stmt);
2313*38fd1498Szrj 		  gsi_remove (&gsi, true);
2314*38fd1498Szrj 		}
2315*38fd1498Szrj 	      else if (is_gimple_min_invariant (rhs))
2316*38fd1498Szrj 		{
2317*38fd1498Szrj 		  /* Make sure to fold &a[0] + off_1 here.  */
2318*38fd1498Szrj 		  fold_stmt_inplace (&gsi);
2319*38fd1498Szrj 		  update_stmt (stmt);
2320*38fd1498Szrj 		  if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2321*38fd1498Szrj 		    gsi_next (&gsi);
2322*38fd1498Szrj 		}
2323*38fd1498Szrj 	      else
2324*38fd1498Szrj 		gsi_next (&gsi);
2325*38fd1498Szrj 	    }
2326*38fd1498Szrj 	  else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
2327*38fd1498Szrj 		   && gimple_assign_load_p (stmt)
2328*38fd1498Szrj 		   && !gimple_has_volatile_ops (stmt)
2329*38fd1498Szrj 		   && (TREE_CODE (gimple_assign_rhs1 (stmt))
2330*38fd1498Szrj 		       != TARGET_MEM_REF)
2331*38fd1498Szrj 		   && !stmt_can_throw_internal (stmt))
2332*38fd1498Szrj 	    {
2333*38fd1498Szrj 	      /* Rewrite loads used only in real/imagpart extractions to
2334*38fd1498Szrj 	         component-wise loads.  */
2335*38fd1498Szrj 	      use_operand_p use_p;
2336*38fd1498Szrj 	      imm_use_iterator iter;
2337*38fd1498Szrj 	      bool rewrite = true;
2338*38fd1498Szrj 	      FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
2339*38fd1498Szrj 		{
2340*38fd1498Szrj 		  gimple *use_stmt = USE_STMT (use_p);
2341*38fd1498Szrj 		  if (is_gimple_debug (use_stmt))
2342*38fd1498Szrj 		    continue;
2343*38fd1498Szrj 		  if (!is_gimple_assign (use_stmt)
2344*38fd1498Szrj 		      || (gimple_assign_rhs_code (use_stmt) != REALPART_EXPR
2345*38fd1498Szrj 			  && gimple_assign_rhs_code (use_stmt) != IMAGPART_EXPR))
2346*38fd1498Szrj 		    {
2347*38fd1498Szrj 		      rewrite = false;
2348*38fd1498Szrj 		      break;
2349*38fd1498Szrj 		    }
2350*38fd1498Szrj 		}
2351*38fd1498Szrj 	      if (rewrite)
2352*38fd1498Szrj 		{
2353*38fd1498Szrj 		  gimple *use_stmt;
2354*38fd1498Szrj 		  FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2355*38fd1498Szrj 		    {
2356*38fd1498Szrj 		      if (is_gimple_debug (use_stmt))
2357*38fd1498Szrj 			{
2358*38fd1498Szrj 			  if (gimple_debug_bind_p (use_stmt))
2359*38fd1498Szrj 			    {
2360*38fd1498Szrj 			      gimple_debug_bind_reset_value (use_stmt);
2361*38fd1498Szrj 			      update_stmt (use_stmt);
2362*38fd1498Szrj 			    }
2363*38fd1498Szrj 			  continue;
2364*38fd1498Szrj 			}
2365*38fd1498Szrj 
2366*38fd1498Szrj 		      tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
2367*38fd1498Szrj 					     TREE_TYPE (TREE_TYPE (rhs)),
2368*38fd1498Szrj 					     unshare_expr (rhs));
2369*38fd1498Szrj 		      gimple *new_stmt
2370*38fd1498Szrj 			= gimple_build_assign (gimple_assign_lhs (use_stmt),
2371*38fd1498Szrj 					       new_rhs);
2372*38fd1498Szrj 
2373*38fd1498Szrj 		      location_t loc = gimple_location (use_stmt);
2374*38fd1498Szrj 		      gimple_set_location (new_stmt, loc);
2375*38fd1498Szrj 		      gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2376*38fd1498Szrj 		      unlink_stmt_vdef (use_stmt);
2377*38fd1498Szrj 		      gsi_remove (&gsi2, true);
2378*38fd1498Szrj 
2379*38fd1498Szrj 		      gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
2380*38fd1498Szrj 		    }
2381*38fd1498Szrj 
2382*38fd1498Szrj 		  release_defs (stmt);
2383*38fd1498Szrj 		  gsi_remove (&gsi, true);
2384*38fd1498Szrj 		}
2385*38fd1498Szrj 	      else
2386*38fd1498Szrj 		gsi_next (&gsi);
2387*38fd1498Szrj 	    }
2388*38fd1498Szrj 	  else if (code == COMPLEX_EXPR)
2389*38fd1498Szrj 	    {
2390*38fd1498Szrj 	      /* Rewrite stores of a single-use complex build expression
2391*38fd1498Szrj 	         to component-wise stores.  */
2392*38fd1498Szrj 	      use_operand_p use_p;
2393*38fd1498Szrj 	      gimple *use_stmt;
2394*38fd1498Szrj 	      if (single_imm_use (lhs, &use_p, &use_stmt)
2395*38fd1498Szrj 		  && gimple_store_p (use_stmt)
2396*38fd1498Szrj 		  && !gimple_has_volatile_ops (use_stmt)
2397*38fd1498Szrj 		  && is_gimple_assign (use_stmt)
2398*38fd1498Szrj 		  && (TREE_CODE (gimple_assign_lhs (use_stmt))
2399*38fd1498Szrj 		      != TARGET_MEM_REF))
2400*38fd1498Szrj 		{
2401*38fd1498Szrj 		  tree use_lhs = gimple_assign_lhs (use_stmt);
2402*38fd1498Szrj 		  tree new_lhs = build1 (REALPART_EXPR,
2403*38fd1498Szrj 					 TREE_TYPE (TREE_TYPE (use_lhs)),
2404*38fd1498Szrj 					 unshare_expr (use_lhs));
2405*38fd1498Szrj 		  gimple *new_stmt = gimple_build_assign (new_lhs, rhs);
2406*38fd1498Szrj 		  location_t loc = gimple_location (use_stmt);
2407*38fd1498Szrj 		  gimple_set_location (new_stmt, loc);
2408*38fd1498Szrj 		  gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
2409*38fd1498Szrj 		  gimple_set_vdef (new_stmt, make_ssa_name (gimple_vop (cfun)));
2410*38fd1498Szrj 		  SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2411*38fd1498Szrj 		  gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
2412*38fd1498Szrj 		  gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2413*38fd1498Szrj 		  gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
2414*38fd1498Szrj 
2415*38fd1498Szrj 		  new_lhs = build1 (IMAGPART_EXPR,
2416*38fd1498Szrj 				    TREE_TYPE (TREE_TYPE (use_lhs)),
2417*38fd1498Szrj 				    unshare_expr (use_lhs));
2418*38fd1498Szrj 		  gimple_assign_set_lhs (use_stmt, new_lhs);
2419*38fd1498Szrj 		  gimple_assign_set_rhs1 (use_stmt, gimple_assign_rhs2 (stmt));
2420*38fd1498Szrj 		  update_stmt (use_stmt);
2421*38fd1498Szrj 
2422*38fd1498Szrj 		  release_defs (stmt);
2423*38fd1498Szrj 		  gsi_remove (&gsi, true);
2424*38fd1498Szrj 		}
2425*38fd1498Szrj 	      else
2426*38fd1498Szrj 		gsi_next (&gsi);
2427*38fd1498Szrj 	    }
2428*38fd1498Szrj 	  else
2429*38fd1498Szrj 	    gsi_next (&gsi);
2430*38fd1498Szrj 	}
2431*38fd1498Szrj 
2432*38fd1498Szrj       /* Combine stmts with the stmts defining their operands.
2433*38fd1498Szrj 	 Note we update GSI within the loop as necessary.  */
2434*38fd1498Szrj       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2435*38fd1498Szrj 	{
2436*38fd1498Szrj 	  gimple *stmt = gsi_stmt (gsi);
2437*38fd1498Szrj 	  gimple *orig_stmt = stmt;
2438*38fd1498Szrj 	  bool changed = false;
2439*38fd1498Szrj 	  bool was_noreturn = (is_gimple_call (stmt)
2440*38fd1498Szrj 			       && gimple_call_noreturn_p (stmt));
2441*38fd1498Szrj 
2442*38fd1498Szrj 	  /* Mark stmt as potentially needing revisiting.  */
2443*38fd1498Szrj 	  gimple_set_plf (stmt, GF_PLF_1, false);
2444*38fd1498Szrj 
2445*38fd1498Szrj 	  if (fold_stmt (&gsi, fwprop_ssa_val))
2446*38fd1498Szrj 	    {
2447*38fd1498Szrj 	      changed = true;
2448*38fd1498Szrj 	      stmt = gsi_stmt (gsi);
2449*38fd1498Szrj 	      if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2450*38fd1498Szrj 		bitmap_set_bit (to_purge, bb->index);
2451*38fd1498Szrj 	      if (!was_noreturn
2452*38fd1498Szrj 		  && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
2453*38fd1498Szrj 		to_fixup.safe_push (stmt);
2454*38fd1498Szrj 	      /* Cleanup the CFG if we simplified a condition to
2455*38fd1498Szrj 	         true or false.  */
2456*38fd1498Szrj 	      if (gcond *cond = dyn_cast <gcond *> (stmt))
2457*38fd1498Szrj 		if (gimple_cond_true_p (cond)
2458*38fd1498Szrj 		    || gimple_cond_false_p (cond))
2459*38fd1498Szrj 		  cfg_changed = true;
2460*38fd1498Szrj 	      update_stmt (stmt);
2461*38fd1498Szrj 	    }
2462*38fd1498Szrj 
2463*38fd1498Szrj 	  switch (gimple_code (stmt))
2464*38fd1498Szrj 	    {
2465*38fd1498Szrj 	    case GIMPLE_ASSIGN:
2466*38fd1498Szrj 	      {
2467*38fd1498Szrj 		tree rhs1 = gimple_assign_rhs1 (stmt);
2468*38fd1498Szrj 		enum tree_code code = gimple_assign_rhs_code (stmt);
2469*38fd1498Szrj 
2470*38fd1498Szrj 		if (code == COND_EXPR
2471*38fd1498Szrj 		    || code == VEC_COND_EXPR)
2472*38fd1498Szrj 		  {
2473*38fd1498Szrj 		    /* In this case the entire COND_EXPR is in rhs1. */
2474*38fd1498Szrj 		    if (forward_propagate_into_cond (&gsi))
2475*38fd1498Szrj 		      {
2476*38fd1498Szrj 			changed = true;
2477*38fd1498Szrj 			stmt = gsi_stmt (gsi);
2478*38fd1498Szrj 		      }
2479*38fd1498Szrj 		  }
2480*38fd1498Szrj 		else if (TREE_CODE_CLASS (code) == tcc_comparison)
2481*38fd1498Szrj 		  {
2482*38fd1498Szrj 		    int did_something;
2483*38fd1498Szrj 		    did_something = forward_propagate_into_comparison (&gsi);
2484*38fd1498Szrj 		    if (did_something == 2)
2485*38fd1498Szrj 		      cfg_changed = true;
2486*38fd1498Szrj 		    changed = did_something != 0;
2487*38fd1498Szrj 		  }
2488*38fd1498Szrj 		else if ((code == PLUS_EXPR
2489*38fd1498Szrj 			  || code == BIT_IOR_EXPR
2490*38fd1498Szrj 			  || code == BIT_XOR_EXPR)
2491*38fd1498Szrj 			 && simplify_rotate (&gsi))
2492*38fd1498Szrj 		  changed = true;
2493*38fd1498Szrj 		else if (code == VEC_PERM_EXPR)
2494*38fd1498Szrj 		  {
2495*38fd1498Szrj 		    int did_something = simplify_permutation (&gsi);
2496*38fd1498Szrj 		    if (did_something == 2)
2497*38fd1498Szrj 		      cfg_changed = true;
2498*38fd1498Szrj 		    changed = did_something != 0;
2499*38fd1498Szrj 		  }
2500*38fd1498Szrj 		else if (code == BIT_FIELD_REF)
2501*38fd1498Szrj 		  changed = simplify_bitfield_ref (&gsi);
2502*38fd1498Szrj                 else if (code == CONSTRUCTOR
2503*38fd1498Szrj                          && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
2504*38fd1498Szrj                   changed = simplify_vector_constructor (&gsi);
2505*38fd1498Szrj 		break;
2506*38fd1498Szrj 	      }
2507*38fd1498Szrj 
2508*38fd1498Szrj 	    case GIMPLE_SWITCH:
2509*38fd1498Szrj 	      changed = simplify_gimple_switch (as_a <gswitch *> (stmt));
2510*38fd1498Szrj 	      break;
2511*38fd1498Szrj 
2512*38fd1498Szrj 	    case GIMPLE_COND:
2513*38fd1498Szrj 	      {
2514*38fd1498Szrj 		int did_something
2515*38fd1498Szrj 		  = forward_propagate_into_gimple_cond (as_a <gcond *> (stmt));
2516*38fd1498Szrj 		if (did_something == 2)
2517*38fd1498Szrj 		  cfg_changed = true;
2518*38fd1498Szrj 		changed = did_something != 0;
2519*38fd1498Szrj 		break;
2520*38fd1498Szrj 	      }
2521*38fd1498Szrj 
2522*38fd1498Szrj 	    case GIMPLE_CALL:
2523*38fd1498Szrj 	      {
2524*38fd1498Szrj 		tree callee = gimple_call_fndecl (stmt);
2525*38fd1498Szrj 		if (callee != NULL_TREE
2526*38fd1498Szrj 		    && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2527*38fd1498Szrj 		  changed = simplify_builtin_call (&gsi, callee);
2528*38fd1498Szrj 		break;
2529*38fd1498Szrj 	      }
2530*38fd1498Szrj 
2531*38fd1498Szrj 	    default:;
2532*38fd1498Szrj 	    }
2533*38fd1498Szrj 
2534*38fd1498Szrj 	  if (changed)
2535*38fd1498Szrj 	    {
2536*38fd1498Szrj 	      /* If the stmt changed then re-visit it and the statements
2537*38fd1498Szrj 		 inserted before it.  */
2538*38fd1498Szrj 	      for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2539*38fd1498Szrj 		if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
2540*38fd1498Szrj 		  break;
2541*38fd1498Szrj 	      if (gsi_end_p (gsi))
2542*38fd1498Szrj 		gsi = gsi_start_bb (bb);
2543*38fd1498Szrj 	      else
2544*38fd1498Szrj 		gsi_next (&gsi);
2545*38fd1498Szrj 	    }
2546*38fd1498Szrj 	  else
2547*38fd1498Szrj 	    {
2548*38fd1498Szrj 	      /* Stmt no longer needs to be revisited.  */
2549*38fd1498Szrj 	      gimple_set_plf (stmt, GF_PLF_1, true);
2550*38fd1498Szrj 
2551*38fd1498Szrj 	      /* Fill up the lattice.  */
2552*38fd1498Szrj 	      if (gimple_assign_single_p (stmt))
2553*38fd1498Szrj 		{
2554*38fd1498Szrj 		  tree lhs = gimple_assign_lhs (stmt);
2555*38fd1498Szrj 		  tree rhs = gimple_assign_rhs1 (stmt);
2556*38fd1498Szrj 		  if (TREE_CODE (lhs) == SSA_NAME)
2557*38fd1498Szrj 		    {
2558*38fd1498Szrj 		      tree val = lhs;
2559*38fd1498Szrj 		      if (TREE_CODE (rhs) == SSA_NAME)
2560*38fd1498Szrj 			val = fwprop_ssa_val (rhs);
2561*38fd1498Szrj 		      else if (is_gimple_min_invariant (rhs))
2562*38fd1498Szrj 			val = rhs;
2563*38fd1498Szrj 		      fwprop_set_lattice_val (lhs, val);
2564*38fd1498Szrj 		    }
2565*38fd1498Szrj 		}
2566*38fd1498Szrj 
2567*38fd1498Szrj 	      gsi_next (&gsi);
2568*38fd1498Szrj 	    }
2569*38fd1498Szrj 	}
2570*38fd1498Szrj     }
2571*38fd1498Szrj   free (postorder);
2572*38fd1498Szrj   lattice.release ();
2573*38fd1498Szrj 
2574*38fd1498Szrj   /* Fixup stmts that became noreturn calls.  This may require splitting
2575*38fd1498Szrj      blocks and thus isn't possible during the walk.  Do this
2576*38fd1498Szrj      in reverse order so we don't inadvertedly remove a stmt we want to
2577*38fd1498Szrj      fixup by visiting a dominating now noreturn call first.  */
2578*38fd1498Szrj   while (!to_fixup.is_empty ())
2579*38fd1498Szrj     {
2580*38fd1498Szrj       gimple *stmt = to_fixup.pop ();
2581*38fd1498Szrj       if (dump_file && dump_flags & TDF_DETAILS)
2582*38fd1498Szrj 	{
2583*38fd1498Szrj 	  fprintf (dump_file, "Fixing up noreturn call ");
2584*38fd1498Szrj 	  print_gimple_stmt (dump_file, stmt, 0);
2585*38fd1498Szrj 	  fprintf (dump_file, "\n");
2586*38fd1498Szrj 	}
2587*38fd1498Szrj       cfg_changed |= fixup_noreturn_call (stmt);
2588*38fd1498Szrj     }
2589*38fd1498Szrj 
2590*38fd1498Szrj   cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
2591*38fd1498Szrj   BITMAP_FREE (to_purge);
2592*38fd1498Szrj 
2593*38fd1498Szrj   if (cfg_changed)
2594*38fd1498Szrj     todoflags |= TODO_cleanup_cfg;
2595*38fd1498Szrj 
2596*38fd1498Szrj   return todoflags;
2597*38fd1498Szrj }
2598*38fd1498Szrj 
2599*38fd1498Szrj } // anon namespace
2600*38fd1498Szrj 
2601*38fd1498Szrj gimple_opt_pass *
make_pass_forwprop(gcc::context * ctxt)2602*38fd1498Szrj make_pass_forwprop (gcc::context *ctxt)
2603*38fd1498Szrj {
2604*38fd1498Szrj   return new pass_forwprop (ctxt);
2605*38fd1498Szrj }
2606