1 /* Forward propagation of expressions for single use variables.
2    Copyright (C) 2004-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "optabs-query.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
35 #include "gimple-fold.h"
36 #include "tree-eh.h"
37 #include "gimplify.h"
38 #include "gimple-iterator.h"
39 #include "gimplify-me.h"
40 #include "tree-cfg.h"
41 #include "expr.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa-propagate.h"
44 #include "tree-ssa-dom.h"
45 #include "builtins.h"
46 #include "tree-cfgcleanup.h"
47 #include "cfganal.h"
48 #include "optabs-tree.h"
49 #include "tree-vector-builder.h"
50 #include "vec-perm-indices.h"
51 
52 /* This pass propagates the RHS of assignment statements into use
53    sites of the LHS of the assignment.  It's basically a specialized
54    form of tree combination.   It is hoped all of this can disappear
55    when we have a generalized tree combiner.
56 
57    One class of common cases we handle is forward propagating a single use
58    variable into a COND_EXPR.
59 
60      bb0:
61        x = a COND b;
62        if (x) goto ... else goto ...
63 
64    Will be transformed into:
65 
66      bb0:
67        if (a COND b) goto ... else goto ...
68 
69    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
70 
71    Or (assuming c1 and c2 are constants):
72 
73      bb0:
74        x = a + c1;
75        if (x EQ/NEQ c2) goto ... else goto ...
76 
77    Will be transformed into:
78 
79      bb0:
80         if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
81 
82    Similarly for x = a - c1.
83 
84    Or
85 
86      bb0:
87        x = !a
88        if (x) goto ... else goto ...
89 
90    Will be transformed into:
91 
92      bb0:
93         if (a == 0) goto ... else goto ...
94 
95    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
96    For these cases, we propagate A into all, possibly more than one,
97    COND_EXPRs that use X.
98 
99    Or
100 
101      bb0:
102        x = (typecast) a
103        if (x) goto ... else goto ...
104 
105    Will be transformed into:
106 
107      bb0:
108         if (a != 0) goto ... else goto ...
109 
110    (Assuming a is an integral type and x is a boolean or x is an
111     integral and a is a boolean.)
112 
113    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
114    For these cases, we propagate A into all, possibly more than one,
115    COND_EXPRs that use X.
116 
117    In addition to eliminating the variable and the statement which assigns
118    a value to the variable, we may be able to later thread the jump without
119    adding insane complexity in the dominator optimizer.
120 
121    Also note these transformations can cascade.  We handle this by having
122    a worklist of COND_EXPR statements to examine.  As we make a change to
123    a statement, we put it back on the worklist to examine on the next
124    iteration of the main loop.
125 
126    A second class of propagation opportunities arises for ADDR_EXPR
127    nodes.
128 
129      ptr = &x->y->z;
130      res = *ptr;
131 
132    Will get turned into
133 
134      res = x->y->z;
135 
136    Or
137      ptr = (type1*)&type2var;
138      res = *ptr
139 
140    Will get turned into (if type1 and type2 are the same size
141    and neither have volatile on them):
142      res = VIEW_CONVERT_EXPR<type1>(type2var)
143 
144    Or
145 
146      ptr = &x[0];
147      ptr2 = ptr + <constant>;
148 
149    Will get turned into
150 
151      ptr2 = &x[constant/elementsize];
152 
153   Or
154 
155      ptr = &x[0];
156      offset = index * element_size;
157      offset_p = (pointer) offset;
158      ptr2 = ptr + offset_p
159 
160   Will get turned into:
161 
162      ptr2 = &x[index];
163 
164   Or
165     ssa = (int) decl
166     res = ssa & 1
167 
168   Provided that decl has known alignment >= 2, will get turned into
169 
170     res = 0
171 
172   We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
173   allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
174   {NOT_EXPR,NEG_EXPR}.
175 
176    This will (of course) be extended as other needs arise.  */
177 
178 static bool forward_propagate_addr_expr (tree, tree, bool);
179 
180 /* Set to true if we delete dead edges during the optimization.  */
181 static bool cfg_changed;
182 
183 static tree rhs_to_tree (tree type, gimple *stmt);
184 
185 static bitmap to_purge;
186 
187 /* Const-and-copy lattice.  */
188 static vec<tree> lattice;
189 
190 /* Set the lattice entry for NAME to VAL.  */
191 static void
192 fwprop_set_lattice_val (tree name, tree val)
193 {
194   if (TREE_CODE (name) == SSA_NAME)
195     {
196       if (SSA_NAME_VERSION (name) >= lattice.length ())
197 	{
198 	  lattice.reserve (num_ssa_names - lattice.length ());
199 	  lattice.quick_grow_cleared (num_ssa_names);
200 	}
201       lattice[SSA_NAME_VERSION (name)] = val;
202     }
203 }
204 
205 /* Invalidate the lattice entry for NAME, done when releasing SSA names.  */
206 static void
207 fwprop_invalidate_lattice (tree name)
208 {
209   if (name
210       && TREE_CODE (name) == SSA_NAME
211       && SSA_NAME_VERSION (name) < lattice.length ())
212     lattice[SSA_NAME_VERSION (name)] = NULL_TREE;
213 }
214 
215 
216 /* Get the statement we can propagate from into NAME skipping
217    trivial copies.  Returns the statement which defines the
218    propagation source or NULL_TREE if there is no such one.
219    If SINGLE_USE_ONLY is set considers only sources which have
220    a single use chain up to NAME.  If SINGLE_USE_P is non-null,
221    it is set to whether the chain to NAME is a single use chain
222    or not.  SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set.  */
223 
224 static gimple *
225 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
226 {
227   bool single_use = true;
228 
229   do {
230     gimple *def_stmt = SSA_NAME_DEF_STMT (name);
231 
232     if (!has_single_use (name))
233       {
234 	single_use = false;
235 	if (single_use_only)
236 	  return NULL;
237       }
238 
239     /* If name is defined by a PHI node or is the default def, bail out.  */
240     if (!is_gimple_assign (def_stmt))
241       return NULL;
242 
243     /* If def_stmt is a simple copy, continue looking.  */
244     if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
245       name = gimple_assign_rhs1 (def_stmt);
246     else
247       {
248 	if (!single_use_only && single_use_p)
249 	  *single_use_p = single_use;
250 
251 	return def_stmt;
252       }
253   } while (1);
254 }
255 
256 /* Checks if the destination ssa name in DEF_STMT can be used as
257    propagation source.  Returns true if so, otherwise false.  */
258 
259 static bool
260 can_propagate_from (gimple *def_stmt)
261 {
262   gcc_assert (is_gimple_assign (def_stmt));
263 
264   /* If the rhs has side-effects we cannot propagate from it.  */
265   if (gimple_has_volatile_ops (def_stmt))
266     return false;
267 
268   /* If the rhs is a load we cannot propagate from it.  */
269   if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
270       || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
271     return false;
272 
273   /* Constants can be always propagated.  */
274   if (gimple_assign_single_p (def_stmt)
275       && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
276     return true;
277 
278   /* We cannot propagate ssa names that occur in abnormal phi nodes.  */
279   if (stmt_references_abnormal_ssa_name (def_stmt))
280     return false;
281 
282   /* If the definition is a conversion of a pointer to a function type,
283      then we can not apply optimizations as some targets require
284      function pointers to be canonicalized and in this case this
285      optimization could eliminate a necessary canonicalization.  */
286   if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
287     {
288       tree rhs = gimple_assign_rhs1 (def_stmt);
289       if (POINTER_TYPE_P (TREE_TYPE (rhs))
290           && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
291         return false;
292     }
293 
294   return true;
295 }
296 
297 /* Remove a chain of dead statements starting at the definition of
298    NAME.  The chain is linked via the first operand of the defining statements.
299    If NAME was replaced in its only use then this function can be used
300    to clean up dead stmts.  The function handles already released SSA
301    names gracefully.
302    Returns true if cleanup-cfg has to run.  */
303 
304 static bool
305 remove_prop_source_from_use (tree name)
306 {
307   gimple_stmt_iterator gsi;
308   gimple *stmt;
309   bool cfg_changed = false;
310 
311   do {
312     basic_block bb;
313 
314     if (SSA_NAME_IN_FREE_LIST (name)
315 	|| SSA_NAME_IS_DEFAULT_DEF (name)
316 	|| !has_zero_uses (name))
317       return cfg_changed;
318 
319     stmt = SSA_NAME_DEF_STMT (name);
320     if (gimple_code (stmt) == GIMPLE_PHI
321 	|| gimple_has_side_effects (stmt))
322       return cfg_changed;
323 
324     bb = gimple_bb (stmt);
325     gsi = gsi_for_stmt (stmt);
326     unlink_stmt_vdef (stmt);
327     if (gsi_remove (&gsi, true))
328       bitmap_set_bit (to_purge, bb->index);
329     fwprop_invalidate_lattice (gimple_get_lhs (stmt));
330     release_defs (stmt);
331 
332     name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
333   } while (name && TREE_CODE (name) == SSA_NAME);
334 
335   return cfg_changed;
336 }
337 
338 /* Return the rhs of a gassign *STMT in a form of a single tree,
339    converted to type TYPE.
340 
341    This should disappear, but is needed so we can combine expressions and use
342    the fold() interfaces. Long term, we need to develop folding and combine
343    routines that deal with gimple exclusively . */
344 
345 static tree
346 rhs_to_tree (tree type, gimple *stmt)
347 {
348   location_t loc = gimple_location (stmt);
349   enum tree_code code = gimple_assign_rhs_code (stmt);
350   if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
351     return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
352 			    gimple_assign_rhs2 (stmt),
353 			    gimple_assign_rhs3 (stmt));
354   else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
355     return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
356 			gimple_assign_rhs2 (stmt));
357   else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
358     return build1 (code, type, gimple_assign_rhs1 (stmt));
359   else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
360     return gimple_assign_rhs1 (stmt);
361   else
362     gcc_unreachable ();
363 }
364 
365 /* Combine OP0 CODE OP1 in the context of a COND_EXPR.  Returns
366    the folded result in a form suitable for COND_EXPR_COND or
367    NULL_TREE, if there is no suitable simplified form.  If
368    INVARIANT_ONLY is true only gimple_min_invariant results are
369    considered simplified.  */
370 
371 static tree
372 combine_cond_expr_cond (gimple *stmt, enum tree_code code, tree type,
373 			tree op0, tree op1, bool invariant_only)
374 {
375   tree t;
376 
377   gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
378 
379   fold_defer_overflow_warnings ();
380   t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
381   if (!t)
382     {
383       fold_undefer_overflow_warnings (false, NULL, 0);
384       return NULL_TREE;
385     }
386 
387   /* Require that we got a boolean type out if we put one in.  */
388   gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
389 
390   /* Canonicalize the combined condition for use in a COND_EXPR.  */
391   t = canonicalize_cond_expr_cond (t);
392 
393   /* Bail out if we required an invariant but didn't get one.  */
394   if (!t || (invariant_only && !is_gimple_min_invariant (t)))
395     {
396       fold_undefer_overflow_warnings (false, NULL, 0);
397       return NULL_TREE;
398     }
399 
400   fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
401 
402   return t;
403 }
404 
405 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
406    of its operand.  Return a new comparison tree or NULL_TREE if there
407    were no simplifying combines.  */
408 
409 static tree
410 forward_propagate_into_comparison_1 (gimple *stmt,
411 				     enum tree_code code, tree type,
412 				     tree op0, tree op1)
413 {
414   tree tmp = NULL_TREE;
415   tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
416   bool single_use0_p = false, single_use1_p = false;
417 
418   /* For comparisons use the first operand, that is likely to
419      simplify comparisons against constants.  */
420   if (TREE_CODE (op0) == SSA_NAME)
421     {
422       gimple *def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
423       if (def_stmt && can_propagate_from (def_stmt))
424 	{
425 	  enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
426 	  bool invariant_only_p = !single_use0_p;
427 
428 	  rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
429 
430 	  /* Always combine comparisons or conversions from booleans.  */
431 	  if (TREE_CODE (op1) == INTEGER_CST
432 	      && ((CONVERT_EXPR_CODE_P (def_code)
433 		   && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0, 0)))
434 		      == BOOLEAN_TYPE)
435 		  || TREE_CODE_CLASS (def_code) == tcc_comparison))
436 	    invariant_only_p = false;
437 
438 	  tmp = combine_cond_expr_cond (stmt, code, type,
439 					rhs0, op1, invariant_only_p);
440 	  if (tmp)
441 	    return tmp;
442 	}
443     }
444 
445   /* If that wasn't successful, try the second operand.  */
446   if (TREE_CODE (op1) == SSA_NAME)
447     {
448       gimple *def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
449       if (def_stmt && can_propagate_from (def_stmt))
450 	{
451 	  rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
452 	  tmp = combine_cond_expr_cond (stmt, code, type,
453 					op0, rhs1, !single_use1_p);
454 	  if (tmp)
455 	    return tmp;
456 	}
457     }
458 
459   /* If that wasn't successful either, try both operands.  */
460   if (rhs0 != NULL_TREE
461       && rhs1 != NULL_TREE)
462     tmp = combine_cond_expr_cond (stmt, code, type,
463 				  rhs0, rhs1,
464 				  !(single_use0_p && single_use1_p));
465 
466   return tmp;
467 }
468 
469 /* Propagate from the ssa name definition statements of the assignment
470    from a comparison at *GSI into the conditional if that simplifies it.
471    Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
472    otherwise returns 0.  */
473 
474 static int
475 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
476 {
477   gimple *stmt = gsi_stmt (*gsi);
478   tree tmp;
479   bool cfg_changed = false;
480   tree type = TREE_TYPE (gimple_assign_lhs (stmt));
481   tree rhs1 = gimple_assign_rhs1 (stmt);
482   tree rhs2 = gimple_assign_rhs2 (stmt);
483 
484   /* Combine the comparison with defining statements.  */
485   tmp = forward_propagate_into_comparison_1 (stmt,
486 					     gimple_assign_rhs_code (stmt),
487 					     type, rhs1, rhs2);
488   if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
489     {
490       gimple_assign_set_rhs_from_tree (gsi, tmp);
491       fold_stmt (gsi);
492       update_stmt (gsi_stmt (*gsi));
493 
494       if (TREE_CODE (rhs1) == SSA_NAME)
495 	cfg_changed |= remove_prop_source_from_use (rhs1);
496       if (TREE_CODE (rhs2) == SSA_NAME)
497 	cfg_changed |= remove_prop_source_from_use (rhs2);
498       return cfg_changed ? 2 : 1;
499     }
500 
501   return 0;
502 }
503 
504 /* Propagate from the ssa name definition statements of COND_EXPR
505    in GIMPLE_COND statement STMT into the conditional if that simplifies it.
506    Returns zero if no statement was changed, one if there were
507    changes and two if cfg_cleanup needs to run.
508 
509    This must be kept in sync with forward_propagate_into_cond.  */
510 
511 static int
512 forward_propagate_into_gimple_cond (gcond *stmt)
513 {
514   tree tmp;
515   enum tree_code code = gimple_cond_code (stmt);
516   bool cfg_changed = false;
517   tree rhs1 = gimple_cond_lhs (stmt);
518   tree rhs2 = gimple_cond_rhs (stmt);
519 
520   /* We can do tree combining on SSA_NAME and comparison expressions.  */
521   if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
522     return 0;
523 
524   tmp = forward_propagate_into_comparison_1 (stmt, code,
525 					     boolean_type_node,
526 					     rhs1, rhs2);
527   if (tmp)
528     {
529       if (dump_file && tmp)
530 	{
531 	  fprintf (dump_file, "  Replaced '");
532 	  print_gimple_expr (dump_file, stmt, 0);
533 	  fprintf (dump_file, "' with '");
534 	  print_generic_expr (dump_file, tmp);
535 	  fprintf (dump_file, "'\n");
536 	}
537 
538       gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
539       update_stmt (stmt);
540 
541       if (TREE_CODE (rhs1) == SSA_NAME)
542 	cfg_changed |= remove_prop_source_from_use (rhs1);
543       if (TREE_CODE (rhs2) == SSA_NAME)
544 	cfg_changed |= remove_prop_source_from_use (rhs2);
545       return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
546     }
547 
548   /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges.  */
549   if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
550        || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
551 	   && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
552       && ((code == EQ_EXPR
553 	   && integer_zerop (rhs2))
554 	  || (code == NE_EXPR
555 	      && integer_onep (rhs2))))
556     {
557       basic_block bb = gimple_bb (stmt);
558       gimple_cond_set_code (stmt, NE_EXPR);
559       gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
560       EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
561       EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
562       return 1;
563     }
564 
565   return 0;
566 }
567 
568 
569 /* Propagate from the ssa name definition statements of COND_EXPR
570    in the rhs of statement STMT into the conditional if that simplifies it.
571    Returns true zero if the stmt was changed.  */
572 
573 static bool
574 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
575 {
576   gimple *stmt = gsi_stmt (*gsi_p);
577   tree tmp = NULL_TREE;
578   tree cond = gimple_assign_rhs1 (stmt);
579   enum tree_code code = gimple_assign_rhs_code (stmt);
580 
581   /* We can do tree combining on SSA_NAME and comparison expressions.  */
582   if (COMPARISON_CLASS_P (cond))
583     tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
584 					       TREE_TYPE (cond),
585 					       TREE_OPERAND (cond, 0),
586 					       TREE_OPERAND (cond, 1));
587   else if (TREE_CODE (cond) == SSA_NAME)
588     {
589       enum tree_code def_code;
590       tree name = cond;
591       gimple *def_stmt = get_prop_source_stmt (name, true, NULL);
592       if (!def_stmt || !can_propagate_from (def_stmt))
593 	return 0;
594 
595       def_code = gimple_assign_rhs_code (def_stmt);
596       if (TREE_CODE_CLASS (def_code) == tcc_comparison)
597 	tmp = fold_build2_loc (gimple_location (def_stmt),
598 			       def_code,
599 			       TREE_TYPE (cond),
600 			       gimple_assign_rhs1 (def_stmt),
601 			       gimple_assign_rhs2 (def_stmt));
602     }
603 
604   if (tmp
605       && is_gimple_condexpr (tmp))
606     {
607       if (dump_file && tmp)
608 	{
609 	  fprintf (dump_file, "  Replaced '");
610 	  print_generic_expr (dump_file, cond);
611 	  fprintf (dump_file, "' with '");
612 	  print_generic_expr (dump_file, tmp);
613 	  fprintf (dump_file, "'\n");
614 	}
615 
616       if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
617 				  : integer_onep (tmp))
618 	gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
619       else if (integer_zerop (tmp))
620 	gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
621       else
622 	gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
623       stmt = gsi_stmt (*gsi_p);
624       update_stmt (stmt);
625 
626       return true;
627     }
628 
629   return 0;
630 }
631 
632 /* We've just substituted an ADDR_EXPR into stmt.  Update all the
633    relevant data structures to match.  */
634 
635 static void
636 tidy_after_forward_propagate_addr (gimple *stmt)
637 {
638   /* We may have turned a trapping insn into a non-trapping insn.  */
639   if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
640     bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
641 
642   if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
643      recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
644 }
645 
646 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
647    ADDR_EXPR <whatever>.
648 
649    Try to forward propagate the ADDR_EXPR into the use USE_STMT.
650    Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
651    node or for recovery of array indexing from pointer arithmetic.
652 
653    Return true if the propagation was successful (the propagation can
654    be not totally successful, yet things may have been changed).  */
655 
656 static bool
657 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
658 			       gimple_stmt_iterator *use_stmt_gsi,
659 			       bool single_use_p)
660 {
661   tree lhs, rhs, rhs2, array_ref;
662   gimple *use_stmt = gsi_stmt (*use_stmt_gsi);
663   enum tree_code rhs_code;
664   bool res = true;
665 
666   gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
667 
668   lhs = gimple_assign_lhs (use_stmt);
669   rhs_code = gimple_assign_rhs_code (use_stmt);
670   rhs = gimple_assign_rhs1 (use_stmt);
671 
672   /* Do not perform copy-propagation but recurse through copy chains.  */
673   if (TREE_CODE (lhs) == SSA_NAME
674       && rhs_code == SSA_NAME)
675     return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
676 
677   /* The use statement could be a conversion.  Recurse to the uses of the
678      lhs as copyprop does not copy through pointer to integer to pointer
679      conversions and FRE does not catch all cases either.
680      Treat the case of a single-use name and
681      a conversion to def_rhs type separate, though.  */
682   if (TREE_CODE (lhs) == SSA_NAME
683       && CONVERT_EXPR_CODE_P (rhs_code))
684     {
685       /* If there is a point in a conversion chain where the types match
686          so we can remove a conversion re-materialize the address here
687 	 and stop.  */
688       if (single_use_p
689 	  && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
690 	{
691 	  gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
692 	  gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
693 	  return true;
694 	}
695 
696       /* Else recurse if the conversion preserves the address value.  */
697       if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
698 	   || POINTER_TYPE_P (TREE_TYPE (lhs)))
699 	  && (TYPE_PRECISION (TREE_TYPE (lhs))
700 	      >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
701 	return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
702 
703       return false;
704     }
705 
706   /* If this isn't a conversion chain from this on we only can propagate
707      into compatible pointer contexts.  */
708   if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
709     return false;
710 
711   /* Propagate through constant pointer adjustments.  */
712   if (TREE_CODE (lhs) == SSA_NAME
713       && rhs_code == POINTER_PLUS_EXPR
714       && rhs == name
715       && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
716     {
717       tree new_def_rhs;
718       /* As we come here with non-invariant addresses in def_rhs we need
719          to make sure we can build a valid constant offsetted address
720 	 for further propagation.  Simply rely on fold building that
721 	 and check after the fact.  */
722       new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
723 				 def_rhs,
724 				 fold_convert (ptr_type_node,
725 					       gimple_assign_rhs2 (use_stmt)));
726       if (TREE_CODE (new_def_rhs) == MEM_REF
727 	  && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
728 	return false;
729       new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
730 						    TREE_TYPE (rhs));
731 
732       /* Recurse.  If we could propagate into all uses of lhs do not
733 	 bother to replace into the current use but just pretend we did.  */
734       if (TREE_CODE (new_def_rhs) == ADDR_EXPR
735 	  && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
736 	return true;
737 
738       if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
739 	gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
740 					new_def_rhs);
741       else if (is_gimple_min_invariant (new_def_rhs))
742 	gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, new_def_rhs);
743       else
744 	return false;
745       gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
746       update_stmt (use_stmt);
747       return true;
748     }
749 
750   /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
751      ADDR_EXPR will not appear on the LHS.  */
752   tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
753   while (handled_component_p (*lhsp))
754     lhsp = &TREE_OPERAND (*lhsp, 0);
755   lhs = *lhsp;
756 
757   /* Now see if the LHS node is a MEM_REF using NAME.  If so,
758      propagate the ADDR_EXPR into the use of NAME and fold the result.  */
759   if (TREE_CODE (lhs) == MEM_REF
760       && TREE_OPERAND (lhs, 0) == name)
761     {
762       tree def_rhs_base;
763       poly_int64 def_rhs_offset;
764       /* If the address is invariant we can always fold it.  */
765       if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
766 							 &def_rhs_offset)))
767 	{
768 	  poly_offset_int off = mem_ref_offset (lhs);
769 	  tree new_ptr;
770 	  off += def_rhs_offset;
771 	  if (TREE_CODE (def_rhs_base) == MEM_REF)
772 	    {
773 	      off += mem_ref_offset (def_rhs_base);
774 	      new_ptr = TREE_OPERAND (def_rhs_base, 0);
775 	    }
776 	  else
777 	    new_ptr = build_fold_addr_expr (def_rhs_base);
778 	  TREE_OPERAND (lhs, 0) = new_ptr;
779 	  TREE_OPERAND (lhs, 1)
780 	    = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
781 	  tidy_after_forward_propagate_addr (use_stmt);
782 	  /* Continue propagating into the RHS if this was not the only use.  */
783 	  if (single_use_p)
784 	    return true;
785 	}
786       /* If the LHS is a plain dereference and the value type is the same as
787          that of the pointed-to type of the address we can put the
788 	 dereferenced address on the LHS preserving the original alias-type.  */
789       else if (integer_zerop (TREE_OPERAND (lhs, 1))
790 	       && ((gimple_assign_lhs (use_stmt) == lhs
791 		    && useless_type_conversion_p
792 		         (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
793 		          TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
794 		   || types_compatible_p (TREE_TYPE (lhs),
795 					  TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
796 	       /* Don't forward anything into clobber stmts if it would result
797 		  in the lhs no longer being a MEM_REF.  */
798 	       && (!gimple_clobber_p (use_stmt)
799 		   || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
800 	{
801 	  tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
802 	  tree new_offset, new_base, saved, new_lhs;
803 	  while (handled_component_p (*def_rhs_basep))
804 	    def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
805 	  saved = *def_rhs_basep;
806 	  if (TREE_CODE (*def_rhs_basep) == MEM_REF)
807 	    {
808 	      new_base = TREE_OPERAND (*def_rhs_basep, 0);
809 	      new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
810 					 TREE_OPERAND (*def_rhs_basep, 1));
811 	    }
812 	  else
813 	    {
814 	      new_base = build_fold_addr_expr (*def_rhs_basep);
815 	      new_offset = TREE_OPERAND (lhs, 1);
816 	    }
817 	  *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
818 				   new_base, new_offset);
819 	  TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
820 	  TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
821 	  TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
822 	  new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
823 	  *lhsp = new_lhs;
824 	  TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
825 	  TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
826 	  *def_rhs_basep = saved;
827 	  tidy_after_forward_propagate_addr (use_stmt);
828 	  /* Continue propagating into the RHS if this was not the
829 	     only use.  */
830 	  if (single_use_p)
831 	    return true;
832 	}
833       else
834 	/* We can have a struct assignment dereferencing our name twice.
835 	   Note that we didn't propagate into the lhs to not falsely
836 	   claim we did when propagating into the rhs.  */
837 	res = false;
838     }
839 
840   /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
841      nodes from the RHS.  */
842   tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
843   if (TREE_CODE (*rhsp) == ADDR_EXPR)
844     rhsp = &TREE_OPERAND (*rhsp, 0);
845   while (handled_component_p (*rhsp))
846     rhsp = &TREE_OPERAND (*rhsp, 0);
847   rhs = *rhsp;
848 
849   /* Now see if the RHS node is a MEM_REF using NAME.  If so,
850      propagate the ADDR_EXPR into the use of NAME and fold the result.  */
851   if (TREE_CODE (rhs) == MEM_REF
852       && TREE_OPERAND (rhs, 0) == name)
853     {
854       tree def_rhs_base;
855       poly_int64 def_rhs_offset;
856       if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
857 							 &def_rhs_offset)))
858 	{
859 	  poly_offset_int off = mem_ref_offset (rhs);
860 	  tree new_ptr;
861 	  off += def_rhs_offset;
862 	  if (TREE_CODE (def_rhs_base) == MEM_REF)
863 	    {
864 	      off += mem_ref_offset (def_rhs_base);
865 	      new_ptr = TREE_OPERAND (def_rhs_base, 0);
866 	    }
867 	  else
868 	    new_ptr = build_fold_addr_expr (def_rhs_base);
869 	  TREE_OPERAND (rhs, 0) = new_ptr;
870 	  TREE_OPERAND (rhs, 1)
871 	    = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
872 	  fold_stmt_inplace (use_stmt_gsi);
873 	  tidy_after_forward_propagate_addr (use_stmt);
874 	  return res;
875 	}
876       /* If the RHS is a plain dereference and the value type is the same as
877          that of the pointed-to type of the address we can put the
878 	 dereferenced address on the RHS preserving the original alias-type.  */
879       else if (integer_zerop (TREE_OPERAND (rhs, 1))
880 	       && ((gimple_assign_rhs1 (use_stmt) == rhs
881 		    && useless_type_conversion_p
882 		         (TREE_TYPE (gimple_assign_lhs (use_stmt)),
883 		          TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
884 		   || types_compatible_p (TREE_TYPE (rhs),
885 					  TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
886 	{
887 	  tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
888 	  tree new_offset, new_base, saved, new_rhs;
889 	  while (handled_component_p (*def_rhs_basep))
890 	    def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
891 	  saved = *def_rhs_basep;
892 	  if (TREE_CODE (*def_rhs_basep) == MEM_REF)
893 	    {
894 	      new_base = TREE_OPERAND (*def_rhs_basep, 0);
895 	      new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
896 					 TREE_OPERAND (*def_rhs_basep, 1));
897 	    }
898 	  else
899 	    {
900 	      new_base = build_fold_addr_expr (*def_rhs_basep);
901 	      new_offset = TREE_OPERAND (rhs, 1);
902 	    }
903 	  *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
904 				   new_base, new_offset);
905 	  TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
906 	  TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
907 	  TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
908 	  new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
909 	  *rhsp = new_rhs;
910 	  TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
911 	  TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
912 	  *def_rhs_basep = saved;
913 	  fold_stmt_inplace (use_stmt_gsi);
914 	  tidy_after_forward_propagate_addr (use_stmt);
915 	  return res;
916 	}
917     }
918 
919   /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
920      is nothing to do. */
921   if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
922       || gimple_assign_rhs1 (use_stmt) != name)
923     return false;
924 
925   /* The remaining cases are all for turning pointer arithmetic into
926      array indexing.  They only apply when we have the address of
927      element zero in an array.  If that is not the case then there
928      is nothing to do.  */
929   array_ref = TREE_OPERAND (def_rhs, 0);
930   if ((TREE_CODE (array_ref) != ARRAY_REF
931        || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
932        || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
933       && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
934     return false;
935 
936   rhs2 = gimple_assign_rhs2 (use_stmt);
937   /* Optimize &x[C1] p+ C2 to  &x p+ C3 with C3 = C1 * element_size + C2.  */
938   if (TREE_CODE (rhs2) == INTEGER_CST)
939     {
940       tree new_rhs = build1_loc (gimple_location (use_stmt),
941 				 ADDR_EXPR, TREE_TYPE (def_rhs),
942 				 fold_build2 (MEM_REF,
943 					      TREE_TYPE (TREE_TYPE (def_rhs)),
944 					      unshare_expr (def_rhs),
945 					      fold_convert (ptr_type_node,
946 							    rhs2)));
947       gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
948       use_stmt = gsi_stmt (*use_stmt_gsi);
949       update_stmt (use_stmt);
950       tidy_after_forward_propagate_addr (use_stmt);
951       return true;
952     }
953 
954   return false;
955 }
956 
957 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
958 
959    Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
960    Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
961    node or for recovery of array indexing from pointer arithmetic.
962 
963    PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
964    the single use in the previous invocation.  Pass true when calling
965    this as toplevel.
966 
967    Returns true, if all uses have been propagated into.  */
968 
969 static bool
970 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
971 {
972   imm_use_iterator iter;
973   gimple *use_stmt;
974   bool all = true;
975   bool single_use_p = parent_single_use_p && has_single_use (name);
976 
977   FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
978     {
979       bool result;
980       tree use_rhs;
981 
982       /* If the use is not in a simple assignment statement, then
983 	 there is nothing we can do.  */
984       if (!is_gimple_assign (use_stmt))
985 	{
986 	  if (!is_gimple_debug (use_stmt))
987 	    all = false;
988 	  continue;
989 	}
990 
991       gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
992       result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
993 					      single_use_p);
994       /* If the use has moved to a different statement adjust
995 	 the update machinery for the old statement too.  */
996       if (use_stmt != gsi_stmt (gsi))
997 	{
998 	  update_stmt (use_stmt);
999 	  use_stmt = gsi_stmt (gsi);
1000 	}
1001       update_stmt (use_stmt);
1002       all &= result;
1003 
1004       /* Remove intermediate now unused copy and conversion chains.  */
1005       use_rhs = gimple_assign_rhs1 (use_stmt);
1006       if (result
1007 	  && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1008 	  && TREE_CODE (use_rhs) == SSA_NAME
1009 	  && has_zero_uses (gimple_assign_lhs (use_stmt)))
1010 	{
1011 	  gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1012 	  fwprop_invalidate_lattice (gimple_get_lhs (use_stmt));
1013 	  release_defs (use_stmt);
1014 	  gsi_remove (&gsi, true);
1015 	}
1016     }
1017 
1018   return all && has_zero_uses (name);
1019 }
1020 
1021 
1022 /* Helper function for simplify_gimple_switch.  Remove case labels that
1023    have values outside the range of the new type.  */
1024 
1025 static void
1026 simplify_gimple_switch_label_vec (gswitch *stmt, tree index_type)
1027 {
1028   unsigned int branch_num = gimple_switch_num_labels (stmt);
1029   auto_vec<tree> labels (branch_num);
1030   unsigned int i, len;
1031 
1032   /* Collect the existing case labels in a VEC, and preprocess it as if
1033      we are gimplifying a GENERIC SWITCH_EXPR.  */
1034   for (i = 1; i < branch_num; i++)
1035     labels.quick_push (gimple_switch_label (stmt, i));
1036   preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1037 
1038   /* If any labels were removed, replace the existing case labels
1039      in the GIMPLE_SWITCH statement with the correct ones.
1040      Note that the type updates were done in-place on the case labels,
1041      so we only have to replace the case labels in the GIMPLE_SWITCH
1042      if the number of labels changed.  */
1043   len = labels.length ();
1044   if (len < branch_num - 1)
1045     {
1046       bitmap target_blocks;
1047       edge_iterator ei;
1048       edge e;
1049 
1050       /* Corner case: *all* case labels have been removed as being
1051 	 out-of-range for INDEX_TYPE.  Push one label and let the
1052 	 CFG cleanups deal with this further.  */
1053       if (len == 0)
1054 	{
1055 	  tree label, elt;
1056 
1057 	  label = CASE_LABEL (gimple_switch_default_label (stmt));
1058 	  elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1059 	  labels.quick_push (elt);
1060 	  len = 1;
1061 	}
1062 
1063       for (i = 0; i < labels.length (); i++)
1064 	gimple_switch_set_label (stmt, i + 1, labels[i]);
1065       for (i++ ; i < branch_num; i++)
1066 	gimple_switch_set_label (stmt, i, NULL_TREE);
1067       gimple_switch_set_num_labels (stmt, len + 1);
1068 
1069       /* Cleanup any edges that are now dead.  */
1070       target_blocks = BITMAP_ALLOC (NULL);
1071       for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1072 	{
1073 	  tree elt = gimple_switch_label (stmt, i);
1074 	  basic_block target = label_to_block (CASE_LABEL (elt));
1075 	  bitmap_set_bit (target_blocks, target->index);
1076 	}
1077       for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1078 	{
1079 	  if (! bitmap_bit_p (target_blocks, e->dest->index))
1080 	    {
1081 	      remove_edge (e);
1082 	      cfg_changed = true;
1083 	      free_dominance_info (CDI_DOMINATORS);
1084 	    }
1085 	  else
1086 	    ei_next (&ei);
1087 	}
1088       BITMAP_FREE (target_blocks);
1089     }
1090 }
1091 
1092 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1093    the condition which we may be able to optimize better.  */
1094 
1095 static bool
1096 simplify_gimple_switch (gswitch *stmt)
1097 {
1098   /* The optimization that we really care about is removing unnecessary
1099      casts.  That will let us do much better in propagating the inferred
1100      constant at the switch target.  */
1101   tree cond = gimple_switch_index (stmt);
1102   if (TREE_CODE (cond) == SSA_NAME)
1103     {
1104       gimple *def_stmt = SSA_NAME_DEF_STMT (cond);
1105       if (gimple_assign_cast_p (def_stmt))
1106 	{
1107 	  tree def = gimple_assign_rhs1 (def_stmt);
1108 	  if (TREE_CODE (def) != SSA_NAME)
1109 	    return false;
1110 
1111 	  /* If we have an extension or sign-change that preserves the
1112 	     values we check against then we can copy the source value into
1113 	     the switch.  */
1114 	  tree ti = TREE_TYPE (def);
1115 	  if (INTEGRAL_TYPE_P (ti)
1116 	      && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
1117 	    {
1118 	      size_t n = gimple_switch_num_labels (stmt);
1119 	      tree min = NULL_TREE, max = NULL_TREE;
1120 	      if (n > 1)
1121 		{
1122 		  min = CASE_LOW (gimple_switch_label (stmt, 1));
1123 		  if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1124 		    max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1125 		  else
1126 		    max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1127 		}
1128 	      if ((!min || int_fits_type_p (min, ti))
1129 		  && (!max || int_fits_type_p (max, ti)))
1130 		{
1131 		  gimple_switch_set_index (stmt, def);
1132 		  simplify_gimple_switch_label_vec (stmt, ti);
1133 		  update_stmt (stmt);
1134 		  return true;
1135 		}
1136 	    }
1137 	}
1138     }
1139 
1140   return false;
1141 }
1142 
1143 /* For pointers p2 and p1 return p2 - p1 if the
1144    difference is known and constant, otherwise return NULL.  */
1145 
1146 static tree
1147 constant_pointer_difference (tree p1, tree p2)
1148 {
1149   int i, j;
1150 #define CPD_ITERATIONS 5
1151   tree exps[2][CPD_ITERATIONS];
1152   tree offs[2][CPD_ITERATIONS];
1153   int cnt[2];
1154 
1155   for (i = 0; i < 2; i++)
1156     {
1157       tree p = i ? p1 : p2;
1158       tree off = size_zero_node;
1159       gimple *stmt;
1160       enum tree_code code;
1161 
1162       /* For each of p1 and p2 we need to iterate at least
1163 	 twice, to handle ADDR_EXPR directly in p1/p2,
1164 	 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1165 	 on definition's stmt RHS.  Iterate a few extra times.  */
1166       j = 0;
1167       do
1168 	{
1169 	  if (!POINTER_TYPE_P (TREE_TYPE (p)))
1170 	    break;
1171 	  if (TREE_CODE (p) == ADDR_EXPR)
1172 	    {
1173 	      tree q = TREE_OPERAND (p, 0);
1174 	      poly_int64 offset;
1175 	      tree base = get_addr_base_and_unit_offset (q, &offset);
1176 	      if (base)
1177 		{
1178 		  q = base;
1179 		  if (maybe_ne (offset, 0))
1180 		    off = size_binop (PLUS_EXPR, off, size_int (offset));
1181 		}
1182 	      if (TREE_CODE (q) == MEM_REF
1183 		  && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1184 		{
1185 		  p = TREE_OPERAND (q, 0);
1186 		  off = size_binop (PLUS_EXPR, off,
1187 				    wide_int_to_tree (sizetype,
1188 						      mem_ref_offset (q)));
1189 		}
1190 	      else
1191 		{
1192 		  exps[i][j] = q;
1193 		  offs[i][j++] = off;
1194 		  break;
1195 		}
1196 	    }
1197 	  if (TREE_CODE (p) != SSA_NAME)
1198 	    break;
1199 	  exps[i][j] = p;
1200 	  offs[i][j++] = off;
1201 	  if (j == CPD_ITERATIONS)
1202 	    break;
1203 	  stmt = SSA_NAME_DEF_STMT (p);
1204 	  if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1205 	    break;
1206 	  code = gimple_assign_rhs_code (stmt);
1207 	  if (code == POINTER_PLUS_EXPR)
1208 	    {
1209 	      if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1210 		break;
1211 	      off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1212 	      p = gimple_assign_rhs1 (stmt);
1213 	    }
1214 	  else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
1215 	    p = gimple_assign_rhs1 (stmt);
1216 	  else
1217 	    break;
1218 	}
1219       while (1);
1220       cnt[i] = j;
1221     }
1222 
1223   for (i = 0; i < cnt[0]; i++)
1224     for (j = 0; j < cnt[1]; j++)
1225       if (exps[0][i] == exps[1][j])
1226 	return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1227 
1228   return NULL_TREE;
1229 }
1230 
1231 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1232    Optimize
1233    memcpy (p, "abcd", 4);
1234    memset (p + 4, ' ', 3);
1235    into
1236    memcpy (p, "abcd   ", 7);
1237    call if the latter can be stored by pieces during expansion.  */
1238 
1239 static bool
1240 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1241 {
1242   gimple *stmt1, *stmt2 = gsi_stmt (*gsi_p);
1243   tree vuse = gimple_vuse (stmt2);
1244   if (vuse == NULL)
1245     return false;
1246   stmt1 = SSA_NAME_DEF_STMT (vuse);
1247 
1248   switch (DECL_FUNCTION_CODE (callee2))
1249     {
1250     case BUILT_IN_MEMSET:
1251       if (gimple_call_num_args (stmt2) != 3
1252 	  || gimple_call_lhs (stmt2)
1253 	  || CHAR_BIT != 8
1254 	  || BITS_PER_UNIT != 8)
1255 	break;
1256       else
1257 	{
1258 	  tree callee1;
1259 	  tree ptr1, src1, str1, off1, len1, lhs1;
1260 	  tree ptr2 = gimple_call_arg (stmt2, 0);
1261 	  tree val2 = gimple_call_arg (stmt2, 1);
1262 	  tree len2 = gimple_call_arg (stmt2, 2);
1263 	  tree diff, vdef, new_str_cst;
1264 	  gimple *use_stmt;
1265 	  unsigned int ptr1_align;
1266 	  unsigned HOST_WIDE_INT src_len;
1267 	  char *src_buf;
1268 	  use_operand_p use_p;
1269 
1270 	  if (!tree_fits_shwi_p (val2)
1271 	      || !tree_fits_uhwi_p (len2)
1272 	      || compare_tree_int (len2, 1024) == 1)
1273 	    break;
1274 	  if (is_gimple_call (stmt1))
1275 	    {
1276 	      /* If first stmt is a call, it needs to be memcpy
1277 		 or mempcpy, with string literal as second argument and
1278 		 constant length.  */
1279 	      callee1 = gimple_call_fndecl (stmt1);
1280 	      if (callee1 == NULL_TREE
1281 		  || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1282 		  || gimple_call_num_args (stmt1) != 3)
1283 		break;
1284 	      if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1285 		  && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1286 		break;
1287 	      ptr1 = gimple_call_arg (stmt1, 0);
1288 	      src1 = gimple_call_arg (stmt1, 1);
1289 	      len1 = gimple_call_arg (stmt1, 2);
1290 	      lhs1 = gimple_call_lhs (stmt1);
1291 	      if (!tree_fits_uhwi_p (len1))
1292 		break;
1293 	      str1 = string_constant (src1, &off1);
1294 	      if (str1 == NULL_TREE)
1295 		break;
1296 	      if (!tree_fits_uhwi_p (off1)
1297 		  || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1298 		  || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1299 					     - tree_to_uhwi (off1)) > 0
1300 		  || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1301 		  || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1302 		     != TYPE_MODE (char_type_node))
1303 		break;
1304 	    }
1305 	  else if (gimple_assign_single_p (stmt1))
1306 	    {
1307 	      /* Otherwise look for length 1 memcpy optimized into
1308 		 assignment.  */
1309     	      ptr1 = gimple_assign_lhs (stmt1);
1310 	      src1 = gimple_assign_rhs1 (stmt1);
1311 	      if (TREE_CODE (ptr1) != MEM_REF
1312 		  || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1313 		  || !tree_fits_shwi_p (src1))
1314 		break;
1315 	      ptr1 = build_fold_addr_expr (ptr1);
1316 	      callee1 = NULL_TREE;
1317 	      len1 = size_one_node;
1318 	      lhs1 = NULL_TREE;
1319 	      off1 = size_zero_node;
1320 	      str1 = NULL_TREE;
1321 	    }
1322 	  else
1323 	    break;
1324 
1325 	  diff = constant_pointer_difference (ptr1, ptr2);
1326 	  if (diff == NULL && lhs1 != NULL)
1327 	    {
1328 	      diff = constant_pointer_difference (lhs1, ptr2);
1329 	      if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1330 		  && diff != NULL)
1331 		diff = size_binop (PLUS_EXPR, diff,
1332 				   fold_convert (sizetype, len1));
1333 	    }
1334 	  /* If the difference between the second and first destination pointer
1335 	     is not constant, or is bigger than memcpy length, bail out.  */
1336 	  if (diff == NULL
1337 	      || !tree_fits_uhwi_p (diff)
1338 	      || tree_int_cst_lt (len1, diff)
1339 	      || compare_tree_int (diff, 1024) == 1)
1340 	    break;
1341 
1342 	  /* Use maximum of difference plus memset length and memcpy length
1343 	     as the new memcpy length, if it is too big, bail out.  */
1344 	  src_len = tree_to_uhwi (diff);
1345 	  src_len += tree_to_uhwi (len2);
1346 	  if (src_len < tree_to_uhwi (len1))
1347 	    src_len = tree_to_uhwi (len1);
1348 	  if (src_len > 1024)
1349 	    break;
1350 
1351 	  /* If mempcpy value is used elsewhere, bail out, as mempcpy
1352 	     with bigger length will return different result.  */
1353 	  if (lhs1 != NULL_TREE
1354 	      && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1355 	      && (TREE_CODE (lhs1) != SSA_NAME
1356 		  || !single_imm_use (lhs1, &use_p, &use_stmt)
1357 		  || use_stmt != stmt2))
1358 	    break;
1359 
1360 	  /* If anything reads memory in between memcpy and memset
1361 	     call, the modified memcpy call might change it.  */
1362 	  vdef = gimple_vdef (stmt1);
1363 	  if (vdef != NULL
1364 	      && (!single_imm_use (vdef, &use_p, &use_stmt)
1365 		  || use_stmt != stmt2))
1366 	    break;
1367 
1368 	  ptr1_align = get_pointer_alignment (ptr1);
1369 	  /* Construct the new source string literal.  */
1370 	  src_buf = XALLOCAVEC (char, src_len + 1);
1371 	  if (callee1)
1372 	    memcpy (src_buf,
1373 		    TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1374 		    tree_to_uhwi (len1));
1375 	  else
1376 	    src_buf[0] = tree_to_shwi (src1);
1377 	  memset (src_buf + tree_to_uhwi (diff),
1378 		  tree_to_shwi (val2), tree_to_uhwi (len2));
1379 	  src_buf[src_len] = '\0';
1380 	  /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1381 	     handle embedded '\0's.  */
1382 	  if (strlen (src_buf) != src_len)
1383 	    break;
1384 	  rtl_profile_for_bb (gimple_bb (stmt2));
1385 	  /* If the new memcpy wouldn't be emitted by storing the literal
1386 	     by pieces, this optimization might enlarge .rodata too much,
1387 	     as commonly used string literals couldn't be shared any
1388 	     longer.  */
1389 	  if (!can_store_by_pieces (src_len,
1390 				    builtin_strncpy_read_str,
1391 				    src_buf, ptr1_align, false))
1392 	    break;
1393 
1394 	  new_str_cst = build_string_literal (src_len, src_buf);
1395 	  if (callee1)
1396 	    {
1397 	      /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1398 		 memset call.  */
1399 	      if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1400 		gimple_call_set_lhs (stmt1, NULL_TREE);
1401 	      gimple_call_set_arg (stmt1, 1, new_str_cst);
1402 	      gimple_call_set_arg (stmt1, 2,
1403 				   build_int_cst (TREE_TYPE (len1), src_len));
1404 	      update_stmt (stmt1);
1405 	      unlink_stmt_vdef (stmt2);
1406 	      gsi_remove (gsi_p, true);
1407 	      fwprop_invalidate_lattice (gimple_get_lhs (stmt2));
1408 	      release_defs (stmt2);
1409 	      if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1410 		{
1411 		  fwprop_invalidate_lattice (lhs1);
1412 		  release_ssa_name (lhs1);
1413 		}
1414 	      return true;
1415 	    }
1416 	  else
1417 	    {
1418 	      /* Otherwise, if STMT1 is length 1 memcpy optimized into
1419 		 assignment, remove STMT1 and change memset call into
1420 		 memcpy call.  */
1421 	      gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1422 
1423 	      if (!is_gimple_val (ptr1))
1424 		ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1425 						 true, GSI_SAME_STMT);
1426 	      gimple_call_set_fndecl (stmt2,
1427 				      builtin_decl_explicit (BUILT_IN_MEMCPY));
1428 	      gimple_call_set_arg (stmt2, 0, ptr1);
1429 	      gimple_call_set_arg (stmt2, 1, new_str_cst);
1430 	      gimple_call_set_arg (stmt2, 2,
1431 				   build_int_cst (TREE_TYPE (len2), src_len));
1432 	      unlink_stmt_vdef (stmt1);
1433 	      gsi_remove (&gsi, true);
1434 	      fwprop_invalidate_lattice (gimple_get_lhs (stmt1));
1435 	      release_defs (stmt1);
1436 	      update_stmt (stmt2);
1437 	      return false;
1438 	    }
1439 	}
1440       break;
1441     default:
1442       break;
1443     }
1444   return false;
1445 }
1446 
1447 /* Given a ssa_name in NAME see if it was defined by an assignment and
1448    set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1449    to the second operand on the rhs. */
1450 
1451 static inline void
1452 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1453 {
1454   gimple *def;
1455   enum tree_code code1;
1456   tree arg11;
1457   tree arg21;
1458   tree arg31;
1459   enum gimple_rhs_class grhs_class;
1460 
1461   code1 = TREE_CODE (name);
1462   arg11 = name;
1463   arg21 = NULL_TREE;
1464   arg31 = NULL_TREE;
1465   grhs_class = get_gimple_rhs_class (code1);
1466 
1467   if (code1 == SSA_NAME)
1468     {
1469       def = SSA_NAME_DEF_STMT (name);
1470 
1471       if (def && is_gimple_assign (def)
1472 	  && can_propagate_from (def))
1473 	{
1474 	  code1 = gimple_assign_rhs_code (def);
1475 	  arg11 = gimple_assign_rhs1 (def);
1476           arg21 = gimple_assign_rhs2 (def);
1477           arg31 = gimple_assign_rhs3 (def);
1478 	}
1479     }
1480   else if (grhs_class != GIMPLE_SINGLE_RHS)
1481     code1 = ERROR_MARK;
1482 
1483   *code = code1;
1484   *arg1 = arg11;
1485   if (arg2)
1486     *arg2 = arg21;
1487   if (arg31)
1488     *code = ERROR_MARK;
1489 }
1490 
1491 
1492 /* Recognize rotation patterns.  Return true if a transformation
1493    applied, otherwise return false.
1494 
1495    We are looking for X with unsigned type T with bitsize B, OP being
1496    +, | or ^, some type T2 wider than T.  For:
1497    (X << CNT1) OP (X >> CNT2)				iff CNT1 + CNT2 == B
1498    ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2))	iff CNT1 + CNT2 == B
1499 
1500    transform these into:
1501    X r<< CNT1
1502 
1503    Or for:
1504    (X << Y) OP (X >> (B - Y))
1505    (X << (int) Y) OP (X >> (int) (B - Y))
1506    ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1507    ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1508    (X << Y) | (X >> ((-Y) & (B - 1)))
1509    (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1510    ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1511    ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1512 
1513    transform these into:
1514    X r<< Y
1515 
1516    Or for:
1517    (X << (Y & (B - 1))) | (X >> ((-Y) & (B - 1)))
1518    (X << (int) (Y & (B - 1))) | (X >> (int) ((-Y) & (B - 1)))
1519    ((T) ((T2) X << (Y & (B - 1)))) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1520    ((T) ((T2) X << (int) (Y & (B - 1)))) \
1521      | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1522 
1523    transform these into:
1524    X r<< (Y & (B - 1))
1525 
1526    Note, in the patterns with T2 type, the type of OP operands
1527    might be even a signed type, but should have precision B.
1528    Expressions with & (B - 1) should be recognized only if B is
1529    a power of 2.  */
1530 
1531 static bool
1532 simplify_rotate (gimple_stmt_iterator *gsi)
1533 {
1534   gimple *stmt = gsi_stmt (*gsi);
1535   tree arg[2], rtype, rotcnt = NULL_TREE;
1536   tree def_arg1[2], def_arg2[2];
1537   enum tree_code def_code[2];
1538   tree lhs;
1539   int i;
1540   bool swapped_p = false;
1541   gimple *g;
1542 
1543   arg[0] = gimple_assign_rhs1 (stmt);
1544   arg[1] = gimple_assign_rhs2 (stmt);
1545   rtype = TREE_TYPE (arg[0]);
1546 
1547   /* Only create rotates in complete modes.  Other cases are not
1548      expanded properly.  */
1549   if (!INTEGRAL_TYPE_P (rtype)
1550       || !type_has_mode_precision_p (rtype))
1551     return false;
1552 
1553   for (i = 0; i < 2; i++)
1554     defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1555 
1556   /* Look through narrowing conversions.  */
1557   if (CONVERT_EXPR_CODE_P (def_code[0])
1558       && CONVERT_EXPR_CODE_P (def_code[1])
1559       && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
1560       && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
1561       && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1562 	 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
1563       && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
1564       && has_single_use (arg[0])
1565       && has_single_use (arg[1]))
1566     {
1567       for (i = 0; i < 2; i++)
1568 	{
1569 	  arg[i] = def_arg1[i];
1570 	  defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1571 	}
1572     }
1573 
1574   /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR.  */
1575   for (i = 0; i < 2; i++)
1576     if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
1577       return false;
1578     else if (!has_single_use (arg[i]))
1579       return false;
1580   if (def_code[0] == def_code[1])
1581     return false;
1582 
1583   /* If we've looked through narrowing conversions before, look through
1584      widening conversions from unsigned type with the same precision
1585      as rtype here.  */
1586   if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
1587     for (i = 0; i < 2; i++)
1588       {
1589 	tree tem;
1590 	enum tree_code code;
1591 	defcodefor_name (def_arg1[i], &code, &tem, NULL);
1592 	if (!CONVERT_EXPR_CODE_P (code)
1593 	    || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1594 	    || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1595 	  return false;
1596 	def_arg1[i] = tem;
1597       }
1598   /* Both shifts have to use the same first operand.  */
1599   if (!operand_equal_for_phi_arg_p (def_arg1[0], def_arg1[1])
1600       || !types_compatible_p (TREE_TYPE (def_arg1[0]),
1601 			      TREE_TYPE (def_arg1[1])))
1602     return false;
1603   if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
1604     return false;
1605 
1606   /* CNT1 + CNT2 == B case above.  */
1607   if (tree_fits_uhwi_p (def_arg2[0])
1608       && tree_fits_uhwi_p (def_arg2[1])
1609       && tree_to_uhwi (def_arg2[0])
1610 	 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
1611     rotcnt = def_arg2[0];
1612   else if (TREE_CODE (def_arg2[0]) != SSA_NAME
1613 	   || TREE_CODE (def_arg2[1]) != SSA_NAME)
1614     return false;
1615   else
1616     {
1617       tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
1618       enum tree_code cdef_code[2];
1619       /* Look through conversion of the shift count argument.
1620 	 The C/C++ FE cast any shift count argument to integer_type_node.
1621 	 The only problem might be if the shift count type maximum value
1622 	 is equal or smaller than number of bits in rtype.  */
1623       for (i = 0; i < 2; i++)
1624 	{
1625 	  def_arg2_alt[i] = def_arg2[i];
1626 	  defcodefor_name (def_arg2[i], &cdef_code[i],
1627 			   &cdef_arg1[i], &cdef_arg2[i]);
1628 	  if (CONVERT_EXPR_CODE_P (cdef_code[i])
1629 	      && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
1630 	      && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1631 		 > floor_log2 (TYPE_PRECISION (rtype))
1632 	      && type_has_mode_precision_p (TREE_TYPE (cdef_arg1[i])))
1633 	    {
1634 	      def_arg2_alt[i] = cdef_arg1[i];
1635 	      defcodefor_name (def_arg2_alt[i], &cdef_code[i],
1636 			       &cdef_arg1[i], &cdef_arg2[i]);
1637 	    }
1638 	}
1639       for (i = 0; i < 2; i++)
1640 	/* Check for one shift count being Y and the other B - Y,
1641 	   with optional casts.  */
1642 	if (cdef_code[i] == MINUS_EXPR
1643 	    && tree_fits_shwi_p (cdef_arg1[i])
1644 	    && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
1645 	    && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
1646 	  {
1647 	    tree tem;
1648 	    enum tree_code code;
1649 
1650 	    if (cdef_arg2[i] == def_arg2[1 - i]
1651 		|| cdef_arg2[i] == def_arg2_alt[1 - i])
1652 	      {
1653 		rotcnt = cdef_arg2[i];
1654 		break;
1655 	      }
1656 	    defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
1657 	    if (CONVERT_EXPR_CODE_P (code)
1658 		&& INTEGRAL_TYPE_P (TREE_TYPE (tem))
1659 		&& TYPE_PRECISION (TREE_TYPE (tem))
1660 		 > floor_log2 (TYPE_PRECISION (rtype))
1661 		&& type_has_mode_precision_p (TREE_TYPE (tem))
1662 		&& (tem == def_arg2[1 - i]
1663 		    || tem == def_arg2_alt[1 - i]))
1664 	      {
1665 		rotcnt = tem;
1666 		break;
1667 	      }
1668 	  }
1669 	/* The above sequence isn't safe for Y being 0,
1670 	   because then one of the shifts triggers undefined behavior.
1671 	   This alternative is safe even for rotation count of 0.
1672 	   One shift count is Y and the other (-Y) & (B - 1).
1673 	   Or one shift count is Y & (B - 1) and the other (-Y) & (B - 1).  */
1674 	else if (cdef_code[i] == BIT_AND_EXPR
1675 		 && pow2p_hwi (TYPE_PRECISION (rtype))
1676 		 && tree_fits_shwi_p (cdef_arg2[i])
1677 		 && tree_to_shwi (cdef_arg2[i])
1678 		    == TYPE_PRECISION (rtype) - 1
1679 		 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
1680 		 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
1681 	  {
1682 	    tree tem;
1683 	    enum tree_code code;
1684 
1685 	    defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
1686 	    if (CONVERT_EXPR_CODE_P (code)
1687 		&& INTEGRAL_TYPE_P (TREE_TYPE (tem))
1688 		&& TYPE_PRECISION (TREE_TYPE (tem))
1689 		 > floor_log2 (TYPE_PRECISION (rtype))
1690 		&& type_has_mode_precision_p (TREE_TYPE (tem)))
1691 	      defcodefor_name (tem, &code, &tem, NULL);
1692 
1693 	    if (code == NEGATE_EXPR)
1694 	      {
1695 		if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
1696 		  {
1697 		    rotcnt = tem;
1698 		    break;
1699 		  }
1700 		tree tem2;
1701 		defcodefor_name (tem, &code, &tem2, NULL);
1702 		if (CONVERT_EXPR_CODE_P (code)
1703 		    && INTEGRAL_TYPE_P (TREE_TYPE (tem2))
1704 		    && TYPE_PRECISION (TREE_TYPE (tem2))
1705 		       > floor_log2 (TYPE_PRECISION (rtype))
1706 		    && type_has_mode_precision_p (TREE_TYPE (tem2)))
1707 		  {
1708 		    if (tem2 == def_arg2[1 - i]
1709 			|| tem2 == def_arg2_alt[1 - i])
1710 		      {
1711 			rotcnt = tem2;
1712 			break;
1713 		      }
1714 		  }
1715 		else
1716 		  tem2 = NULL_TREE;
1717 
1718 		if (cdef_code[1 - i] == BIT_AND_EXPR
1719 		    && tree_fits_shwi_p (cdef_arg2[1 - i])
1720 		    && tree_to_shwi (cdef_arg2[1 - i])
1721 		       == TYPE_PRECISION (rtype) - 1
1722 		    && TREE_CODE (cdef_arg1[1 - i]) == SSA_NAME)
1723 		  {
1724 		    if (tem == cdef_arg1[1 - i]
1725 			|| tem2 == cdef_arg1[1 - i])
1726 		      {
1727 			rotcnt = def_arg2[1 - i];
1728 			break;
1729 		      }
1730 		    tree tem3;
1731 		    defcodefor_name (cdef_arg1[1 - i], &code, &tem3, NULL);
1732 		    if (CONVERT_EXPR_CODE_P (code)
1733 			&& INTEGRAL_TYPE_P (TREE_TYPE (tem3))
1734 			&& TYPE_PRECISION (TREE_TYPE (tem3))
1735 			   > floor_log2 (TYPE_PRECISION (rtype))
1736 			&& type_has_mode_precision_p (TREE_TYPE (tem3)))
1737 		      {
1738 			if (tem == tem3 || tem2 == tem3)
1739 			  {
1740 			    rotcnt = def_arg2[1 - i];
1741 			    break;
1742 			  }
1743 		      }
1744 		  }
1745 	      }
1746 	  }
1747       if (rotcnt == NULL_TREE)
1748 	return false;
1749       swapped_p = i != 1;
1750     }
1751 
1752   if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
1753 				  TREE_TYPE (rotcnt)))
1754     {
1755       g = gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2[0])),
1756 			       NOP_EXPR, rotcnt);
1757       gsi_insert_before (gsi, g, GSI_SAME_STMT);
1758       rotcnt = gimple_assign_lhs (g);
1759     }
1760   lhs = gimple_assign_lhs (stmt);
1761   if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1762     lhs = make_ssa_name (TREE_TYPE (def_arg1[0]));
1763   g = gimple_build_assign (lhs,
1764 			   ((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
1765 			   ? LROTATE_EXPR : RROTATE_EXPR, def_arg1[0], rotcnt);
1766   if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1767     {
1768       gsi_insert_before (gsi, g, GSI_SAME_STMT);
1769       g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, lhs);
1770     }
1771   gsi_replace (gsi, g, false);
1772   return true;
1773 }
1774 
1775 /* Combine an element access with a shuffle.  Returns true if there were
1776    any changes made, else it returns false.  */
1777 
1778 static bool
1779 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
1780 {
1781   gimple *stmt = gsi_stmt (*gsi);
1782   gimple *def_stmt;
1783   tree op, op0, op1, op2;
1784   tree elem_type;
1785   unsigned idx, size;
1786   enum tree_code code;
1787 
1788   op = gimple_assign_rhs1 (stmt);
1789   gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
1790 
1791   op0 = TREE_OPERAND (op, 0);
1792   if (TREE_CODE (op0) != SSA_NAME
1793       || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
1794     return false;
1795 
1796   def_stmt = get_prop_source_stmt (op0, false, NULL);
1797   if (!def_stmt || !can_propagate_from (def_stmt))
1798     return false;
1799 
1800   op1 = TREE_OPERAND (op, 1);
1801   op2 = TREE_OPERAND (op, 2);
1802   code = gimple_assign_rhs_code (def_stmt);
1803 
1804   if (code == CONSTRUCTOR)
1805     {
1806       tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
1807 			       gimple_assign_rhs1 (def_stmt), op1, op2);
1808       if (!tem || !valid_gimple_rhs_p (tem))
1809 	return false;
1810       gimple_assign_set_rhs_from_tree (gsi, tem);
1811       update_stmt (gsi_stmt (*gsi));
1812       return true;
1813     }
1814 
1815   elem_type = TREE_TYPE (TREE_TYPE (op0));
1816   if (TREE_TYPE (op) != elem_type)
1817     return false;
1818 
1819   size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
1820   if (maybe_ne (bit_field_size (op), size))
1821     return false;
1822 
1823   if (code == VEC_PERM_EXPR
1824       && constant_multiple_p (bit_field_offset (op), size, &idx))
1825     {
1826       tree p, m, tem;
1827       unsigned HOST_WIDE_INT nelts;
1828       m = gimple_assign_rhs3 (def_stmt);
1829       if (TREE_CODE (m) != VECTOR_CST
1830 	  || !VECTOR_CST_NELTS (m).is_constant (&nelts))
1831 	return false;
1832       idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
1833       idx %= 2 * nelts;
1834       if (idx < nelts)
1835 	{
1836 	  p = gimple_assign_rhs1 (def_stmt);
1837 	}
1838       else
1839 	{
1840 	  p = gimple_assign_rhs2 (def_stmt);
1841 	  idx -= nelts;
1842 	}
1843       tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
1844 		    unshare_expr (p), op1, bitsize_int (idx * size));
1845       gimple_assign_set_rhs1 (stmt, tem);
1846       fold_stmt (gsi);
1847       update_stmt (gsi_stmt (*gsi));
1848       return true;
1849     }
1850 
1851   return false;
1852 }
1853 
1854 /* Determine whether applying the 2 permutations (mask1 then mask2)
1855    gives back one of the input.  */
1856 
1857 static int
1858 is_combined_permutation_identity (tree mask1, tree mask2)
1859 {
1860   tree mask;
1861   unsigned HOST_WIDE_INT nelts, i, j;
1862   bool maybe_identity1 = true;
1863   bool maybe_identity2 = true;
1864 
1865   gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
1866 		       && TREE_CODE (mask2) == VECTOR_CST);
1867   mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
1868   if (mask == NULL_TREE || TREE_CODE (mask) != VECTOR_CST)
1869     return 0;
1870 
1871   if (!VECTOR_CST_NELTS (mask).is_constant (&nelts))
1872     return 0;
1873   for (i = 0; i < nelts; i++)
1874     {
1875       tree val = VECTOR_CST_ELT (mask, i);
1876       gcc_assert (TREE_CODE (val) == INTEGER_CST);
1877       j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
1878       if (j == i)
1879 	maybe_identity2 = false;
1880       else if (j == i + nelts)
1881 	maybe_identity1 = false;
1882       else
1883 	return 0;
1884     }
1885   return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
1886 }
1887 
1888 /* Combine a shuffle with its arguments.  Returns 1 if there were any
1889    changes made, 2 if cfg-cleanup needs to run.  Else it returns 0.  */
1890 
1891 static int
1892 simplify_permutation (gimple_stmt_iterator *gsi)
1893 {
1894   gimple *stmt = gsi_stmt (*gsi);
1895   gimple *def_stmt;
1896   tree op0, op1, op2, op3, arg0, arg1;
1897   enum tree_code code;
1898   bool single_use_op0 = false;
1899 
1900   gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
1901 
1902   op0 = gimple_assign_rhs1 (stmt);
1903   op1 = gimple_assign_rhs2 (stmt);
1904   op2 = gimple_assign_rhs3 (stmt);
1905 
1906   if (TREE_CODE (op2) != VECTOR_CST)
1907     return 0;
1908 
1909   if (TREE_CODE (op0) == VECTOR_CST)
1910     {
1911       code = VECTOR_CST;
1912       arg0 = op0;
1913     }
1914   else if (TREE_CODE (op0) == SSA_NAME)
1915     {
1916       def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
1917       if (!def_stmt || !can_propagate_from (def_stmt))
1918 	return 0;
1919 
1920       code = gimple_assign_rhs_code (def_stmt);
1921       arg0 = gimple_assign_rhs1 (def_stmt);
1922     }
1923   else
1924     return 0;
1925 
1926   /* Two consecutive shuffles.  */
1927   if (code == VEC_PERM_EXPR)
1928     {
1929       tree orig;
1930       int ident;
1931 
1932       if (op0 != op1)
1933 	return 0;
1934       op3 = gimple_assign_rhs3 (def_stmt);
1935       if (TREE_CODE (op3) != VECTOR_CST)
1936 	return 0;
1937       ident = is_combined_permutation_identity (op3, op2);
1938       if (!ident)
1939 	return 0;
1940       orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
1941 			  : gimple_assign_rhs2 (def_stmt);
1942       gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
1943       gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
1944       gimple_set_num_ops (stmt, 2);
1945       update_stmt (stmt);
1946       return remove_prop_source_from_use (op0) ? 2 : 1;
1947     }
1948 
1949   /* Shuffle of a constructor.  */
1950   else if (code == CONSTRUCTOR || code == VECTOR_CST)
1951     {
1952       tree opt;
1953       bool ret = false;
1954       if (op0 != op1)
1955 	{
1956 	  if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
1957 	    return 0;
1958 
1959 	  if (TREE_CODE (op1) == VECTOR_CST)
1960 	    arg1 = op1;
1961 	  else if (TREE_CODE (op1) == SSA_NAME)
1962 	    {
1963 	      enum tree_code code2;
1964 
1965 	      gimple *def_stmt2 = get_prop_source_stmt (op1, true, NULL);
1966 	      if (!def_stmt2 || !can_propagate_from (def_stmt2))
1967 		return 0;
1968 
1969 	      code2 = gimple_assign_rhs_code (def_stmt2);
1970 	      if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
1971 		return 0;
1972 	      arg1 = gimple_assign_rhs1 (def_stmt2);
1973 	    }
1974 	  else
1975 	    return 0;
1976 	}
1977       else
1978 	{
1979 	  /* Already used twice in this statement.  */
1980 	  if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
1981 	    return 0;
1982 	  arg1 = arg0;
1983 	}
1984       opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
1985       if (!opt
1986 	  || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
1987 	return 0;
1988       gimple_assign_set_rhs_from_tree (gsi, opt);
1989       update_stmt (gsi_stmt (*gsi));
1990       if (TREE_CODE (op0) == SSA_NAME)
1991 	ret = remove_prop_source_from_use (op0);
1992       if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
1993 	ret |= remove_prop_source_from_use (op1);
1994       return ret ? 2 : 1;
1995     }
1996 
1997   return 0;
1998 }
1999 
2000 /* Recognize a VEC_PERM_EXPR.  Returns true if there were any changes.  */
2001 
2002 static bool
2003 simplify_vector_constructor (gimple_stmt_iterator *gsi)
2004 {
2005   gimple *stmt = gsi_stmt (*gsi);
2006   gimple *def_stmt;
2007   tree op, op2, orig, type, elem_type;
2008   unsigned elem_size, i;
2009   unsigned HOST_WIDE_INT nelts;
2010   enum tree_code code, conv_code;
2011   constructor_elt *elt;
2012   bool maybe_ident;
2013 
2014   gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
2015 
2016   op = gimple_assign_rhs1 (stmt);
2017   type = TREE_TYPE (op);
2018   gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
2019 
2020   if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
2021     return false;
2022   elem_type = TREE_TYPE (type);
2023   elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
2024 
2025   vec_perm_builder sel (nelts, nelts, 1);
2026   orig = NULL;
2027   conv_code = ERROR_MARK;
2028   maybe_ident = true;
2029   FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
2030     {
2031       tree ref, op1;
2032 
2033       if (i >= nelts)
2034 	return false;
2035 
2036       if (TREE_CODE (elt->value) != SSA_NAME)
2037 	return false;
2038       def_stmt = get_prop_source_stmt (elt->value, false, NULL);
2039       if (!def_stmt)
2040 	return false;
2041       code = gimple_assign_rhs_code (def_stmt);
2042       if (code == FLOAT_EXPR
2043 	  || code == FIX_TRUNC_EXPR)
2044 	{
2045 	  op1 = gimple_assign_rhs1 (def_stmt);
2046 	  if (conv_code == ERROR_MARK)
2047 	    {
2048 	      if (maybe_ne (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (elt->value))),
2049 			    GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op1)))))
2050 		return false;
2051 	      conv_code = code;
2052 	    }
2053 	  else if (conv_code != code)
2054 	    return false;
2055 	  if (TREE_CODE (op1) != SSA_NAME)
2056 	    return false;
2057 	  def_stmt = SSA_NAME_DEF_STMT (op1);
2058 	  if (! is_gimple_assign (def_stmt))
2059 	    return false;
2060 	  code = gimple_assign_rhs_code (def_stmt);
2061 	}
2062       if (code != BIT_FIELD_REF)
2063 	return false;
2064       op1 = gimple_assign_rhs1 (def_stmt);
2065       ref = TREE_OPERAND (op1, 0);
2066       if (orig)
2067 	{
2068 	  if (ref != orig)
2069 	    return false;
2070 	}
2071       else
2072 	{
2073 	  if (TREE_CODE (ref) != SSA_NAME)
2074 	    return false;
2075 	  if (! VECTOR_TYPE_P (TREE_TYPE (ref))
2076 	      || ! useless_type_conversion_p (TREE_TYPE (op1),
2077 					      TREE_TYPE (TREE_TYPE (ref))))
2078 	    return false;
2079 	  orig = ref;
2080 	}
2081       unsigned int elt;
2082       if (maybe_ne (bit_field_size (op1), elem_size)
2083 	  || !constant_multiple_p (bit_field_offset (op1), elem_size, &elt))
2084 	return false;
2085       if (elt != i)
2086 	maybe_ident = false;
2087       sel.quick_push (elt);
2088     }
2089   if (i < nelts)
2090     return false;
2091 
2092   if (! VECTOR_TYPE_P (TREE_TYPE (orig))
2093       || maybe_ne (TYPE_VECTOR_SUBPARTS (type),
2094 		   TYPE_VECTOR_SUBPARTS (TREE_TYPE (orig))))
2095     return false;
2096 
2097   tree tem;
2098   if (conv_code != ERROR_MARK
2099       && (! supportable_convert_operation (conv_code, type, TREE_TYPE (orig),
2100 					   &tem, &conv_code)
2101 	  || conv_code == CALL_EXPR))
2102     return false;
2103 
2104   if (maybe_ident)
2105     {
2106       if (conv_code == ERROR_MARK)
2107 	gimple_assign_set_rhs_from_tree (gsi, orig);
2108       else
2109 	gimple_assign_set_rhs_with_ops (gsi, conv_code, orig,
2110 					NULL_TREE, NULL_TREE);
2111     }
2112   else
2113     {
2114       tree mask_type;
2115 
2116       vec_perm_indices indices (sel, 1, nelts);
2117       if (!can_vec_perm_const_p (TYPE_MODE (type), indices))
2118 	return false;
2119       mask_type
2120 	= build_vector_type (build_nonstandard_integer_type (elem_size, 1),
2121 			     nelts);
2122       if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
2123 	  || maybe_ne (GET_MODE_SIZE (TYPE_MODE (mask_type)),
2124 		       GET_MODE_SIZE (TYPE_MODE (type))))
2125 	return false;
2126       op2 = vec_perm_indices_to_tree (mask_type, indices);
2127       if (conv_code == ERROR_MARK)
2128 	gimple_assign_set_rhs_with_ops (gsi, VEC_PERM_EXPR, orig, orig, op2);
2129       else
2130 	{
2131 	  gimple *perm
2132 	    = gimple_build_assign (make_ssa_name (TREE_TYPE (orig)),
2133 				   VEC_PERM_EXPR, orig, orig, op2);
2134 	  orig = gimple_assign_lhs (perm);
2135 	  gsi_insert_before (gsi, perm, GSI_SAME_STMT);
2136 	  gimple_assign_set_rhs_with_ops (gsi, conv_code, orig,
2137 					  NULL_TREE, NULL_TREE);
2138 	}
2139     }
2140   update_stmt (gsi_stmt (*gsi));
2141   return true;
2142 }
2143 
2144 
2145 /* Primitive "lattice" function for gimple_simplify.  */
2146 
2147 static tree
2148 fwprop_ssa_val (tree name)
2149 {
2150   /* First valueize NAME.  */
2151   if (TREE_CODE (name) == SSA_NAME
2152       && SSA_NAME_VERSION (name) < lattice.length ())
2153     {
2154       tree val = lattice[SSA_NAME_VERSION (name)];
2155       if (val)
2156 	name = val;
2157     }
2158   /* We continue matching along SSA use-def edges for SSA names
2159      that are not single-use.  Currently there are no patterns
2160      that would cause any issues with that.  */
2161   return name;
2162 }
2163 
2164 /* Main entry point for the forward propagation and statement combine
2165    optimizer.  */
2166 
2167 namespace {
2168 
2169 const pass_data pass_data_forwprop =
2170 {
2171   GIMPLE_PASS, /* type */
2172   "forwprop", /* name */
2173   OPTGROUP_NONE, /* optinfo_flags */
2174   TV_TREE_FORWPROP, /* tv_id */
2175   ( PROP_cfg | PROP_ssa ), /* properties_required */
2176   0, /* properties_provided */
2177   0, /* properties_destroyed */
2178   0, /* todo_flags_start */
2179   TODO_update_ssa, /* todo_flags_finish */
2180 };
2181 
2182 class pass_forwprop : public gimple_opt_pass
2183 {
2184 public:
2185   pass_forwprop (gcc::context *ctxt)
2186     : gimple_opt_pass (pass_data_forwprop, ctxt)
2187   {}
2188 
2189   /* opt_pass methods: */
2190   opt_pass * clone () { return new pass_forwprop (m_ctxt); }
2191   virtual bool gate (function *) { return flag_tree_forwprop; }
2192   virtual unsigned int execute (function *);
2193 
2194 }; // class pass_forwprop
2195 
2196 unsigned int
2197 pass_forwprop::execute (function *fun)
2198 {
2199   unsigned int todoflags = 0;
2200 
2201   cfg_changed = false;
2202 
2203   /* Combine stmts with the stmts defining their operands.  Do that
2204      in an order that guarantees visiting SSA defs before SSA uses.  */
2205   lattice.create (num_ssa_names);
2206   lattice.quick_grow_cleared (num_ssa_names);
2207   int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
2208   int postorder_num = pre_and_rev_post_order_compute_fn (cfun, NULL,
2209 							 postorder, false);
2210   auto_vec<gimple *, 4> to_fixup;
2211   to_purge = BITMAP_ALLOC (NULL);
2212   for (int i = 0; i < postorder_num; ++i)
2213     {
2214       gimple_stmt_iterator gsi;
2215       basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
2216 
2217       /* Propagate into PHIs and record degenerate ones in the lattice.  */
2218       for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
2219 	   gsi_next (&si))
2220 	{
2221 	  gphi *phi = si.phi ();
2222 	  tree res = gimple_phi_result (phi);
2223 	  if (virtual_operand_p (res))
2224 	    continue;
2225 
2226 	  use_operand_p use_p;
2227 	  ssa_op_iter it;
2228 	  tree first = NULL_TREE;
2229 	  bool all_same = true;
2230 	  FOR_EACH_PHI_ARG (use_p, phi, it, SSA_OP_USE)
2231 	    {
2232 	      tree use = USE_FROM_PTR (use_p);
2233 	      tree tem = fwprop_ssa_val (use);
2234 	      if (! first)
2235 		first = tem;
2236 	      else if (! operand_equal_p (first, tem, 0))
2237 		all_same = false;
2238 	      if (tem != use
2239 		  && may_propagate_copy (use, tem))
2240 		propagate_value (use_p, tem);
2241 	    }
2242 	  if (all_same)
2243 	    fwprop_set_lattice_val (res, first);
2244 	}
2245 
2246       /* Apply forward propagation to all stmts in the basic-block.
2247 	 Note we update GSI within the loop as necessary.  */
2248       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2249 	{
2250 	  gimple *stmt = gsi_stmt (gsi);
2251 	  tree lhs, rhs;
2252 	  enum tree_code code;
2253 
2254 	  if (!is_gimple_assign (stmt))
2255 	    {
2256 	      gsi_next (&gsi);
2257 	      continue;
2258 	    }
2259 
2260 	  lhs = gimple_assign_lhs (stmt);
2261 	  rhs = gimple_assign_rhs1 (stmt);
2262 	  code = gimple_assign_rhs_code (stmt);
2263 	  if (TREE_CODE (lhs) != SSA_NAME
2264 	      || has_zero_uses (lhs))
2265 	    {
2266 	      gsi_next (&gsi);
2267 	      continue;
2268 	    }
2269 
2270 	  /* If this statement sets an SSA_NAME to an address,
2271 	     try to propagate the address into the uses of the SSA_NAME.  */
2272 	  if (code == ADDR_EXPR
2273 	      /* Handle pointer conversions on invariant addresses
2274 		 as well, as this is valid gimple.  */
2275 	      || (CONVERT_EXPR_CODE_P (code)
2276 		  && TREE_CODE (rhs) == ADDR_EXPR
2277 		  && POINTER_TYPE_P (TREE_TYPE (lhs))))
2278 	    {
2279 	      tree base = get_base_address (TREE_OPERAND (rhs, 0));
2280 	      if ((!base
2281 		   || !DECL_P (base)
2282 		   || decl_address_invariant_p (base))
2283 		  && !stmt_references_abnormal_ssa_name (stmt)
2284 		  && forward_propagate_addr_expr (lhs, rhs, true))
2285 		{
2286 		  fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2287 		  release_defs (stmt);
2288 		  gsi_remove (&gsi, true);
2289 		}
2290 	      else
2291 		gsi_next (&gsi);
2292 	    }
2293 	  else if (code == POINTER_PLUS_EXPR)
2294 	    {
2295 	      tree off = gimple_assign_rhs2 (stmt);
2296 	      if (TREE_CODE (off) == INTEGER_CST
2297 		  && can_propagate_from (stmt)
2298 		  && !simple_iv_increment_p (stmt)
2299 		  /* ???  Better adjust the interface to that function
2300 		     instead of building new trees here.  */
2301 		  && forward_propagate_addr_expr
2302 		       (lhs,
2303 			build1_loc (gimple_location (stmt),
2304 				    ADDR_EXPR, TREE_TYPE (rhs),
2305 				    fold_build2 (MEM_REF,
2306 						 TREE_TYPE (TREE_TYPE (rhs)),
2307 						 rhs,
2308 						 fold_convert (ptr_type_node,
2309 							       off))), true))
2310 		{
2311 		  fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2312 		  release_defs (stmt);
2313 		  gsi_remove (&gsi, true);
2314 		}
2315 	      else if (is_gimple_min_invariant (rhs))
2316 		{
2317 		  /* Make sure to fold &a[0] + off_1 here.  */
2318 		  fold_stmt_inplace (&gsi);
2319 		  update_stmt (stmt);
2320 		  if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2321 		    gsi_next (&gsi);
2322 		}
2323 	      else
2324 		gsi_next (&gsi);
2325 	    }
2326 	  else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
2327 		   && gimple_assign_load_p (stmt)
2328 		   && !gimple_has_volatile_ops (stmt)
2329 		   && (TREE_CODE (gimple_assign_rhs1 (stmt))
2330 		       != TARGET_MEM_REF)
2331 		   && !stmt_can_throw_internal (stmt))
2332 	    {
2333 	      /* Rewrite loads used only in real/imagpart extractions to
2334 	         component-wise loads.  */
2335 	      use_operand_p use_p;
2336 	      imm_use_iterator iter;
2337 	      bool rewrite = true;
2338 	      FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
2339 		{
2340 		  gimple *use_stmt = USE_STMT (use_p);
2341 		  if (is_gimple_debug (use_stmt))
2342 		    continue;
2343 		  if (!is_gimple_assign (use_stmt)
2344 		      || (gimple_assign_rhs_code (use_stmt) != REALPART_EXPR
2345 			  && gimple_assign_rhs_code (use_stmt) != IMAGPART_EXPR))
2346 		    {
2347 		      rewrite = false;
2348 		      break;
2349 		    }
2350 		}
2351 	      if (rewrite)
2352 		{
2353 		  gimple *use_stmt;
2354 		  FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2355 		    {
2356 		      if (is_gimple_debug (use_stmt))
2357 			{
2358 			  if (gimple_debug_bind_p (use_stmt))
2359 			    {
2360 			      gimple_debug_bind_reset_value (use_stmt);
2361 			      update_stmt (use_stmt);
2362 			    }
2363 			  continue;
2364 			}
2365 
2366 		      tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
2367 					     TREE_TYPE (TREE_TYPE (rhs)),
2368 					     unshare_expr (rhs));
2369 		      gimple *new_stmt
2370 			= gimple_build_assign (gimple_assign_lhs (use_stmt),
2371 					       new_rhs);
2372 
2373 		      location_t loc = gimple_location (use_stmt);
2374 		      gimple_set_location (new_stmt, loc);
2375 		      gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2376 		      unlink_stmt_vdef (use_stmt);
2377 		      gsi_remove (&gsi2, true);
2378 
2379 		      gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
2380 		    }
2381 
2382 		  release_defs (stmt);
2383 		  gsi_remove (&gsi, true);
2384 		}
2385 	      else
2386 		gsi_next (&gsi);
2387 	    }
2388 	  else if (code == COMPLEX_EXPR)
2389 	    {
2390 	      /* Rewrite stores of a single-use complex build expression
2391 	         to component-wise stores.  */
2392 	      use_operand_p use_p;
2393 	      gimple *use_stmt;
2394 	      if (single_imm_use (lhs, &use_p, &use_stmt)
2395 		  && gimple_store_p (use_stmt)
2396 		  && !gimple_has_volatile_ops (use_stmt)
2397 		  && is_gimple_assign (use_stmt)
2398 		  && (TREE_CODE (gimple_assign_lhs (use_stmt))
2399 		      != TARGET_MEM_REF))
2400 		{
2401 		  tree use_lhs = gimple_assign_lhs (use_stmt);
2402 		  tree new_lhs = build1 (REALPART_EXPR,
2403 					 TREE_TYPE (TREE_TYPE (use_lhs)),
2404 					 unshare_expr (use_lhs));
2405 		  gimple *new_stmt = gimple_build_assign (new_lhs, rhs);
2406 		  location_t loc = gimple_location (use_stmt);
2407 		  gimple_set_location (new_stmt, loc);
2408 		  gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
2409 		  gimple_set_vdef (new_stmt, make_ssa_name (gimple_vop (cfun)));
2410 		  SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2411 		  gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
2412 		  gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2413 		  gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
2414 
2415 		  new_lhs = build1 (IMAGPART_EXPR,
2416 				    TREE_TYPE (TREE_TYPE (use_lhs)),
2417 				    unshare_expr (use_lhs));
2418 		  gimple_assign_set_lhs (use_stmt, new_lhs);
2419 		  gimple_assign_set_rhs1 (use_stmt, gimple_assign_rhs2 (stmt));
2420 		  update_stmt (use_stmt);
2421 
2422 		  release_defs (stmt);
2423 		  gsi_remove (&gsi, true);
2424 		}
2425 	      else
2426 		gsi_next (&gsi);
2427 	    }
2428 	  else
2429 	    gsi_next (&gsi);
2430 	}
2431 
2432       /* Combine stmts with the stmts defining their operands.
2433 	 Note we update GSI within the loop as necessary.  */
2434       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2435 	{
2436 	  gimple *stmt = gsi_stmt (gsi);
2437 	  gimple *orig_stmt = stmt;
2438 	  bool changed = false;
2439 	  bool was_noreturn = (is_gimple_call (stmt)
2440 			       && gimple_call_noreturn_p (stmt));
2441 
2442 	  /* Mark stmt as potentially needing revisiting.  */
2443 	  gimple_set_plf (stmt, GF_PLF_1, false);
2444 
2445 	  if (fold_stmt (&gsi, fwprop_ssa_val))
2446 	    {
2447 	      changed = true;
2448 	      stmt = gsi_stmt (gsi);
2449 	      if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2450 		bitmap_set_bit (to_purge, bb->index);
2451 	      if (!was_noreturn
2452 		  && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
2453 		to_fixup.safe_push (stmt);
2454 	      /* Cleanup the CFG if we simplified a condition to
2455 	         true or false.  */
2456 	      if (gcond *cond = dyn_cast <gcond *> (stmt))
2457 		if (gimple_cond_true_p (cond)
2458 		    || gimple_cond_false_p (cond))
2459 		  cfg_changed = true;
2460 	      update_stmt (stmt);
2461 	    }
2462 
2463 	  switch (gimple_code (stmt))
2464 	    {
2465 	    case GIMPLE_ASSIGN:
2466 	      {
2467 		tree rhs1 = gimple_assign_rhs1 (stmt);
2468 		enum tree_code code = gimple_assign_rhs_code (stmt);
2469 
2470 		if (code == COND_EXPR
2471 		    || code == VEC_COND_EXPR)
2472 		  {
2473 		    /* In this case the entire COND_EXPR is in rhs1. */
2474 		    if (forward_propagate_into_cond (&gsi))
2475 		      {
2476 			changed = true;
2477 			stmt = gsi_stmt (gsi);
2478 		      }
2479 		  }
2480 		else if (TREE_CODE_CLASS (code) == tcc_comparison)
2481 		  {
2482 		    int did_something;
2483 		    did_something = forward_propagate_into_comparison (&gsi);
2484 		    if (did_something == 2)
2485 		      cfg_changed = true;
2486 		    changed = did_something != 0;
2487 		  }
2488 		else if ((code == PLUS_EXPR
2489 			  || code == BIT_IOR_EXPR
2490 			  || code == BIT_XOR_EXPR)
2491 			 && simplify_rotate (&gsi))
2492 		  changed = true;
2493 		else if (code == VEC_PERM_EXPR)
2494 		  {
2495 		    int did_something = simplify_permutation (&gsi);
2496 		    if (did_something == 2)
2497 		      cfg_changed = true;
2498 		    changed = did_something != 0;
2499 		  }
2500 		else if (code == BIT_FIELD_REF)
2501 		  changed = simplify_bitfield_ref (&gsi);
2502                 else if (code == CONSTRUCTOR
2503                          && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
2504                   changed = simplify_vector_constructor (&gsi);
2505 		break;
2506 	      }
2507 
2508 	    case GIMPLE_SWITCH:
2509 	      changed = simplify_gimple_switch (as_a <gswitch *> (stmt));
2510 	      break;
2511 
2512 	    case GIMPLE_COND:
2513 	      {
2514 		int did_something
2515 		  = forward_propagate_into_gimple_cond (as_a <gcond *> (stmt));
2516 		if (did_something == 2)
2517 		  cfg_changed = true;
2518 		changed = did_something != 0;
2519 		break;
2520 	      }
2521 
2522 	    case GIMPLE_CALL:
2523 	      {
2524 		tree callee = gimple_call_fndecl (stmt);
2525 		if (callee != NULL_TREE
2526 		    && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2527 		  changed = simplify_builtin_call (&gsi, callee);
2528 		break;
2529 	      }
2530 
2531 	    default:;
2532 	    }
2533 
2534 	  if (changed)
2535 	    {
2536 	      /* If the stmt changed then re-visit it and the statements
2537 		 inserted before it.  */
2538 	      for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2539 		if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
2540 		  break;
2541 	      if (gsi_end_p (gsi))
2542 		gsi = gsi_start_bb (bb);
2543 	      else
2544 		gsi_next (&gsi);
2545 	    }
2546 	  else
2547 	    {
2548 	      /* Stmt no longer needs to be revisited.  */
2549 	      gimple_set_plf (stmt, GF_PLF_1, true);
2550 
2551 	      /* Fill up the lattice.  */
2552 	      if (gimple_assign_single_p (stmt))
2553 		{
2554 		  tree lhs = gimple_assign_lhs (stmt);
2555 		  tree rhs = gimple_assign_rhs1 (stmt);
2556 		  if (TREE_CODE (lhs) == SSA_NAME)
2557 		    {
2558 		      tree val = lhs;
2559 		      if (TREE_CODE (rhs) == SSA_NAME)
2560 			val = fwprop_ssa_val (rhs);
2561 		      else if (is_gimple_min_invariant (rhs))
2562 			val = rhs;
2563 		      fwprop_set_lattice_val (lhs, val);
2564 		    }
2565 		}
2566 
2567 	      gsi_next (&gsi);
2568 	    }
2569 	}
2570     }
2571   free (postorder);
2572   lattice.release ();
2573 
2574   /* Fixup stmts that became noreturn calls.  This may require splitting
2575      blocks and thus isn't possible during the walk.  Do this
2576      in reverse order so we don't inadvertedly remove a stmt we want to
2577      fixup by visiting a dominating now noreturn call first.  */
2578   while (!to_fixup.is_empty ())
2579     {
2580       gimple *stmt = to_fixup.pop ();
2581       if (dump_file && dump_flags & TDF_DETAILS)
2582 	{
2583 	  fprintf (dump_file, "Fixing up noreturn call ");
2584 	  print_gimple_stmt (dump_file, stmt, 0);
2585 	  fprintf (dump_file, "\n");
2586 	}
2587       cfg_changed |= fixup_noreturn_call (stmt);
2588     }
2589 
2590   cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
2591   BITMAP_FREE (to_purge);
2592 
2593   if (cfg_changed)
2594     todoflags |= TODO_cleanup_cfg;
2595 
2596   return todoflags;
2597 }
2598 
2599 } // anon namespace
2600 
2601 gimple_opt_pass *
2602 make_pass_forwprop (gcc::context *ctxt)
2603 {
2604   return new pass_forwprop (ctxt);
2605 }
2606