1 /* Forward propagation of expressions for single use variables.
2    Copyright (C) 2004-2016 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "optabs-query.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
35 #include "gimple-fold.h"
36 #include "tree-eh.h"
37 #include "gimplify.h"
38 #include "gimple-iterator.h"
39 #include "gimplify-me.h"
40 #include "tree-cfg.h"
41 #include "expr.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa-propagate.h"
44 #include "tree-ssa-dom.h"
45 #include "builtins.h"
46 #include "tree-cfgcleanup.h"
47 #include "cfganal.h"
48 
49 /* This pass propagates the RHS of assignment statements into use
50    sites of the LHS of the assignment.  It's basically a specialized
51    form of tree combination.   It is hoped all of this can disappear
52    when we have a generalized tree combiner.
53 
54    One class of common cases we handle is forward propagating a single use
55    variable into a COND_EXPR.
56 
57      bb0:
58        x = a COND b;
59        if (x) goto ... else goto ...
60 
61    Will be transformed into:
62 
63      bb0:
64        if (a COND b) goto ... else goto ...
65 
66    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
67 
68    Or (assuming c1 and c2 are constants):
69 
70      bb0:
71        x = a + c1;
72        if (x EQ/NEQ c2) goto ... else goto ...
73 
74    Will be transformed into:
75 
76      bb0:
77         if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
78 
79    Similarly for x = a - c1.
80 
81    Or
82 
83      bb0:
84        x = !a
85        if (x) goto ... else goto ...
86 
87    Will be transformed into:
88 
89      bb0:
90         if (a == 0) goto ... else goto ...
91 
92    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
93    For these cases, we propagate A into all, possibly more than one,
94    COND_EXPRs that use X.
95 
96    Or
97 
98      bb0:
99        x = (typecast) a
100        if (x) goto ... else goto ...
101 
102    Will be transformed into:
103 
104      bb0:
105         if (a != 0) goto ... else goto ...
106 
107    (Assuming a is an integral type and x is a boolean or x is an
108     integral and a is a boolean.)
109 
110    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
111    For these cases, we propagate A into all, possibly more than one,
112    COND_EXPRs that use X.
113 
114    In addition to eliminating the variable and the statement which assigns
115    a value to the variable, we may be able to later thread the jump without
116    adding insane complexity in the dominator optimizer.
117 
118    Also note these transformations can cascade.  We handle this by having
119    a worklist of COND_EXPR statements to examine.  As we make a change to
120    a statement, we put it back on the worklist to examine on the next
121    iteration of the main loop.
122 
123    A second class of propagation opportunities arises for ADDR_EXPR
124    nodes.
125 
126      ptr = &x->y->z;
127      res = *ptr;
128 
129    Will get turned into
130 
131      res = x->y->z;
132 
133    Or
134      ptr = (type1*)&type2var;
135      res = *ptr
136 
137    Will get turned into (if type1 and type2 are the same size
138    and neither have volatile on them):
139      res = VIEW_CONVERT_EXPR<type1>(type2var)
140 
141    Or
142 
143      ptr = &x[0];
144      ptr2 = ptr + <constant>;
145 
146    Will get turned into
147 
148      ptr2 = &x[constant/elementsize];
149 
150   Or
151 
152      ptr = &x[0];
153      offset = index * element_size;
154      offset_p = (pointer) offset;
155      ptr2 = ptr + offset_p
156 
157   Will get turned into:
158 
159      ptr2 = &x[index];
160 
161   Or
162     ssa = (int) decl
163     res = ssa & 1
164 
165   Provided that decl has known alignment >= 2, will get turned into
166 
167     res = 0
168 
169   We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
170   allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
171   {NOT_EXPR,NEG_EXPR}.
172 
173    This will (of course) be extended as other needs arise.  */
174 
175 static bool forward_propagate_addr_expr (tree, tree, bool);
176 
177 /* Set to true if we delete dead edges during the optimization.  */
178 static bool cfg_changed;
179 
180 static tree rhs_to_tree (tree type, gimple *stmt);
181 
182 static bitmap to_purge;
183 
184 /* Const-and-copy lattice.  */
185 static vec<tree> lattice;
186 
187 /* Set the lattice entry for NAME to VAL.  */
188 static void
fwprop_set_lattice_val(tree name,tree val)189 fwprop_set_lattice_val (tree name, tree val)
190 {
191   if (TREE_CODE (name) == SSA_NAME)
192     {
193       if (SSA_NAME_VERSION (name) >= lattice.length ())
194 	{
195 	  lattice.reserve (num_ssa_names - lattice.length ());
196 	  lattice.quick_grow_cleared (num_ssa_names);
197 	}
198       lattice[SSA_NAME_VERSION (name)] = val;
199     }
200 }
201 
202 /* Invalidate the lattice entry for NAME, done when releasing SSA names.  */
203 static void
fwprop_invalidate_lattice(tree name)204 fwprop_invalidate_lattice (tree name)
205 {
206   if (name
207       && TREE_CODE (name) == SSA_NAME
208       && SSA_NAME_VERSION (name) < lattice.length ())
209     lattice[SSA_NAME_VERSION (name)] = NULL_TREE;
210 }
211 
212 
213 /* Get the statement we can propagate from into NAME skipping
214    trivial copies.  Returns the statement which defines the
215    propagation source or NULL_TREE if there is no such one.
216    If SINGLE_USE_ONLY is set considers only sources which have
217    a single use chain up to NAME.  If SINGLE_USE_P is non-null,
218    it is set to whether the chain to NAME is a single use chain
219    or not.  SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set.  */
220 
221 static gimple *
get_prop_source_stmt(tree name,bool single_use_only,bool * single_use_p)222 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
223 {
224   bool single_use = true;
225 
226   do {
227     gimple *def_stmt = SSA_NAME_DEF_STMT (name);
228 
229     if (!has_single_use (name))
230       {
231 	single_use = false;
232 	if (single_use_only)
233 	  return NULL;
234       }
235 
236     /* If name is defined by a PHI node or is the default def, bail out.  */
237     if (!is_gimple_assign (def_stmt))
238       return NULL;
239 
240     /* If def_stmt is a simple copy, continue looking.  */
241     if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
242       name = gimple_assign_rhs1 (def_stmt);
243     else
244       {
245 	if (!single_use_only && single_use_p)
246 	  *single_use_p = single_use;
247 
248 	return def_stmt;
249       }
250   } while (1);
251 }
252 
253 /* Checks if the destination ssa name in DEF_STMT can be used as
254    propagation source.  Returns true if so, otherwise false.  */
255 
256 static bool
can_propagate_from(gimple * def_stmt)257 can_propagate_from (gimple *def_stmt)
258 {
259   gcc_assert (is_gimple_assign (def_stmt));
260 
261   /* If the rhs has side-effects we cannot propagate from it.  */
262   if (gimple_has_volatile_ops (def_stmt))
263     return false;
264 
265   /* If the rhs is a load we cannot propagate from it.  */
266   if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
267       || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
268     return false;
269 
270   /* Constants can be always propagated.  */
271   if (gimple_assign_single_p (def_stmt)
272       && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
273     return true;
274 
275   /* We cannot propagate ssa names that occur in abnormal phi nodes.  */
276   if (stmt_references_abnormal_ssa_name (def_stmt))
277     return false;
278 
279   /* If the definition is a conversion of a pointer to a function type,
280      then we can not apply optimizations as some targets require
281      function pointers to be canonicalized and in this case this
282      optimization could eliminate a necessary canonicalization.  */
283   if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
284     {
285       tree rhs = gimple_assign_rhs1 (def_stmt);
286       if (POINTER_TYPE_P (TREE_TYPE (rhs))
287           && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
288         return false;
289     }
290 
291   return true;
292 }
293 
294 /* Remove a chain of dead statements starting at the definition of
295    NAME.  The chain is linked via the first operand of the defining statements.
296    If NAME was replaced in its only use then this function can be used
297    to clean up dead stmts.  The function handles already released SSA
298    names gracefully.
299    Returns true if cleanup-cfg has to run.  */
300 
301 static bool
remove_prop_source_from_use(tree name)302 remove_prop_source_from_use (tree name)
303 {
304   gimple_stmt_iterator gsi;
305   gimple *stmt;
306   bool cfg_changed = false;
307 
308   do {
309     basic_block bb;
310 
311     if (SSA_NAME_IN_FREE_LIST (name)
312 	|| SSA_NAME_IS_DEFAULT_DEF (name)
313 	|| !has_zero_uses (name))
314       return cfg_changed;
315 
316     stmt = SSA_NAME_DEF_STMT (name);
317     if (gimple_code (stmt) == GIMPLE_PHI
318 	|| gimple_has_side_effects (stmt))
319       return cfg_changed;
320 
321     bb = gimple_bb (stmt);
322     gsi = gsi_for_stmt (stmt);
323     unlink_stmt_vdef (stmt);
324     if (gsi_remove (&gsi, true))
325       bitmap_set_bit (to_purge, bb->index);
326     fwprop_invalidate_lattice (gimple_get_lhs (stmt));
327     release_defs (stmt);
328 
329     name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
330   } while (name && TREE_CODE (name) == SSA_NAME);
331 
332   return cfg_changed;
333 }
334 
335 /* Return the rhs of a gassign *STMT in a form of a single tree,
336    converted to type TYPE.
337 
338    This should disappear, but is needed so we can combine expressions and use
339    the fold() interfaces. Long term, we need to develop folding and combine
340    routines that deal with gimple exclusively . */
341 
342 static tree
rhs_to_tree(tree type,gimple * stmt)343 rhs_to_tree (tree type, gimple *stmt)
344 {
345   location_t loc = gimple_location (stmt);
346   enum tree_code code = gimple_assign_rhs_code (stmt);
347   if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
348     return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
349 			    gimple_assign_rhs2 (stmt),
350 			    gimple_assign_rhs3 (stmt));
351   else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
352     return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
353 			gimple_assign_rhs2 (stmt));
354   else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
355     return build1 (code, type, gimple_assign_rhs1 (stmt));
356   else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
357     return gimple_assign_rhs1 (stmt);
358   else
359     gcc_unreachable ();
360 }
361 
362 /* Combine OP0 CODE OP1 in the context of a COND_EXPR.  Returns
363    the folded result in a form suitable for COND_EXPR_COND or
364    NULL_TREE, if there is no suitable simplified form.  If
365    INVARIANT_ONLY is true only gimple_min_invariant results are
366    considered simplified.  */
367 
368 static tree
combine_cond_expr_cond(gimple * stmt,enum tree_code code,tree type,tree op0,tree op1,bool invariant_only)369 combine_cond_expr_cond (gimple *stmt, enum tree_code code, tree type,
370 			tree op0, tree op1, bool invariant_only)
371 {
372   tree t;
373 
374   gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
375 
376   fold_defer_overflow_warnings ();
377   t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
378   if (!t)
379     {
380       fold_undefer_overflow_warnings (false, NULL, 0);
381       return NULL_TREE;
382     }
383 
384   /* Require that we got a boolean type out if we put one in.  */
385   gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
386 
387   /* Canonicalize the combined condition for use in a COND_EXPR.  */
388   t = canonicalize_cond_expr_cond (t);
389 
390   /* Bail out if we required an invariant but didn't get one.  */
391   if (!t || (invariant_only && !is_gimple_min_invariant (t)))
392     {
393       fold_undefer_overflow_warnings (false, NULL, 0);
394       return NULL_TREE;
395     }
396 
397   fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
398 
399   return t;
400 }
401 
402 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
403    of its operand.  Return a new comparison tree or NULL_TREE if there
404    were no simplifying combines.  */
405 
406 static tree
forward_propagate_into_comparison_1(gimple * stmt,enum tree_code code,tree type,tree op0,tree op1)407 forward_propagate_into_comparison_1 (gimple *stmt,
408 				     enum tree_code code, tree type,
409 				     tree op0, tree op1)
410 {
411   tree tmp = NULL_TREE;
412   tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
413   bool single_use0_p = false, single_use1_p = false;
414 
415   /* For comparisons use the first operand, that is likely to
416      simplify comparisons against constants.  */
417   if (TREE_CODE (op0) == SSA_NAME)
418     {
419       gimple *def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
420       if (def_stmt && can_propagate_from (def_stmt))
421 	{
422 	  enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
423 	  bool invariant_only_p = !single_use0_p;
424 
425 	  rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
426 
427 	  /* Always combine comparisons or conversions from booleans.  */
428 	  if (TREE_CODE (op1) == INTEGER_CST
429 	      && ((CONVERT_EXPR_CODE_P (def_code)
430 		   && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0, 0)))
431 		      == BOOLEAN_TYPE)
432 		  || TREE_CODE_CLASS (def_code) == tcc_comparison))
433 	    invariant_only_p = false;
434 
435 	  tmp = combine_cond_expr_cond (stmt, code, type,
436 					rhs0, op1, invariant_only_p);
437 	  if (tmp)
438 	    return tmp;
439 	}
440     }
441 
442   /* If that wasn't successful, try the second operand.  */
443   if (TREE_CODE (op1) == SSA_NAME)
444     {
445       gimple *def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
446       if (def_stmt && can_propagate_from (def_stmt))
447 	{
448 	  rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
449 	  tmp = combine_cond_expr_cond (stmt, code, type,
450 					op0, rhs1, !single_use1_p);
451 	  if (tmp)
452 	    return tmp;
453 	}
454     }
455 
456   /* If that wasn't successful either, try both operands.  */
457   if (rhs0 != NULL_TREE
458       && rhs1 != NULL_TREE)
459     tmp = combine_cond_expr_cond (stmt, code, type,
460 				  rhs0, rhs1,
461 				  !(single_use0_p && single_use1_p));
462 
463   return tmp;
464 }
465 
466 /* Propagate from the ssa name definition statements of the assignment
467    from a comparison at *GSI into the conditional if that simplifies it.
468    Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
469    otherwise returns 0.  */
470 
471 static int
forward_propagate_into_comparison(gimple_stmt_iterator * gsi)472 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
473 {
474   gimple *stmt = gsi_stmt (*gsi);
475   tree tmp;
476   bool cfg_changed = false;
477   tree type = TREE_TYPE (gimple_assign_lhs (stmt));
478   tree rhs1 = gimple_assign_rhs1 (stmt);
479   tree rhs2 = gimple_assign_rhs2 (stmt);
480 
481   /* Combine the comparison with defining statements.  */
482   tmp = forward_propagate_into_comparison_1 (stmt,
483 					     gimple_assign_rhs_code (stmt),
484 					     type, rhs1, rhs2);
485   if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
486     {
487       gimple_assign_set_rhs_from_tree (gsi, tmp);
488       fold_stmt (gsi);
489       update_stmt (gsi_stmt (*gsi));
490 
491       if (TREE_CODE (rhs1) == SSA_NAME)
492 	cfg_changed |= remove_prop_source_from_use (rhs1);
493       if (TREE_CODE (rhs2) == SSA_NAME)
494 	cfg_changed |= remove_prop_source_from_use (rhs2);
495       return cfg_changed ? 2 : 1;
496     }
497 
498   return 0;
499 }
500 
501 /* Propagate from the ssa name definition statements of COND_EXPR
502    in GIMPLE_COND statement STMT into the conditional if that simplifies it.
503    Returns zero if no statement was changed, one if there were
504    changes and two if cfg_cleanup needs to run.
505 
506    This must be kept in sync with forward_propagate_into_cond.  */
507 
508 static int
forward_propagate_into_gimple_cond(gcond * stmt)509 forward_propagate_into_gimple_cond (gcond *stmt)
510 {
511   tree tmp;
512   enum tree_code code = gimple_cond_code (stmt);
513   bool cfg_changed = false;
514   tree rhs1 = gimple_cond_lhs (stmt);
515   tree rhs2 = gimple_cond_rhs (stmt);
516 
517   /* We can do tree combining on SSA_NAME and comparison expressions.  */
518   if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
519     return 0;
520 
521   tmp = forward_propagate_into_comparison_1 (stmt, code,
522 					     boolean_type_node,
523 					     rhs1, rhs2);
524   if (tmp)
525     {
526       if (dump_file && tmp)
527 	{
528 	  fprintf (dump_file, "  Replaced '");
529 	  print_gimple_expr (dump_file, stmt, 0, 0);
530 	  fprintf (dump_file, "' with '");
531 	  print_generic_expr (dump_file, tmp, 0);
532 	  fprintf (dump_file, "'\n");
533 	}
534 
535       gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
536       update_stmt (stmt);
537 
538       if (TREE_CODE (rhs1) == SSA_NAME)
539 	cfg_changed |= remove_prop_source_from_use (rhs1);
540       if (TREE_CODE (rhs2) == SSA_NAME)
541 	cfg_changed |= remove_prop_source_from_use (rhs2);
542       return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
543     }
544 
545   /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges.  */
546   if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
547        || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
548 	   && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
549       && ((code == EQ_EXPR
550 	   && integer_zerop (rhs2))
551 	  || (code == NE_EXPR
552 	      && integer_onep (rhs2))))
553     {
554       basic_block bb = gimple_bb (stmt);
555       gimple_cond_set_code (stmt, NE_EXPR);
556       gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
557       EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
558       EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
559       return 1;
560     }
561 
562   return 0;
563 }
564 
565 
566 /* Propagate from the ssa name definition statements of COND_EXPR
567    in the rhs of statement STMT into the conditional if that simplifies it.
568    Returns true zero if the stmt was changed.  */
569 
570 static bool
forward_propagate_into_cond(gimple_stmt_iterator * gsi_p)571 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
572 {
573   gimple *stmt = gsi_stmt (*gsi_p);
574   tree tmp = NULL_TREE;
575   tree cond = gimple_assign_rhs1 (stmt);
576   enum tree_code code = gimple_assign_rhs_code (stmt);
577 
578   /* We can do tree combining on SSA_NAME and comparison expressions.  */
579   if (COMPARISON_CLASS_P (cond))
580     tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
581 					       TREE_TYPE (cond),
582 					       TREE_OPERAND (cond, 0),
583 					       TREE_OPERAND (cond, 1));
584   else if (TREE_CODE (cond) == SSA_NAME)
585     {
586       enum tree_code def_code;
587       tree name = cond;
588       gimple *def_stmt = get_prop_source_stmt (name, true, NULL);
589       if (!def_stmt || !can_propagate_from (def_stmt))
590 	return 0;
591 
592       def_code = gimple_assign_rhs_code (def_stmt);
593       if (TREE_CODE_CLASS (def_code) == tcc_comparison)
594 	tmp = fold_build2_loc (gimple_location (def_stmt),
595 			       def_code,
596 			       TREE_TYPE (cond),
597 			       gimple_assign_rhs1 (def_stmt),
598 			       gimple_assign_rhs2 (def_stmt));
599     }
600 
601   if (tmp
602       && is_gimple_condexpr (tmp))
603     {
604       if (dump_file && tmp)
605 	{
606 	  fprintf (dump_file, "  Replaced '");
607 	  print_generic_expr (dump_file, cond, 0);
608 	  fprintf (dump_file, "' with '");
609 	  print_generic_expr (dump_file, tmp, 0);
610 	  fprintf (dump_file, "'\n");
611 	}
612 
613       if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
614 				  : integer_onep (tmp))
615 	gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
616       else if (integer_zerop (tmp))
617 	gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
618       else
619 	gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
620       stmt = gsi_stmt (*gsi_p);
621       update_stmt (stmt);
622 
623       return true;
624     }
625 
626   return 0;
627 }
628 
629 /* We've just substituted an ADDR_EXPR into stmt.  Update all the
630    relevant data structures to match.  */
631 
632 static void
tidy_after_forward_propagate_addr(gimple * stmt)633 tidy_after_forward_propagate_addr (gimple *stmt)
634 {
635   /* We may have turned a trapping insn into a non-trapping insn.  */
636   if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
637     bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
638 
639   if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
640      recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
641 }
642 
643 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
644    ADDR_EXPR <whatever>.
645 
646    Try to forward propagate the ADDR_EXPR into the use USE_STMT.
647    Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
648    node or for recovery of array indexing from pointer arithmetic.
649 
650    Return true if the propagation was successful (the propagation can
651    be not totally successful, yet things may have been changed).  */
652 
653 static bool
forward_propagate_addr_expr_1(tree name,tree def_rhs,gimple_stmt_iterator * use_stmt_gsi,bool single_use_p)654 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
655 			       gimple_stmt_iterator *use_stmt_gsi,
656 			       bool single_use_p)
657 {
658   tree lhs, rhs, rhs2, array_ref;
659   gimple *use_stmt = gsi_stmt (*use_stmt_gsi);
660   enum tree_code rhs_code;
661   bool res = true;
662 
663   gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
664 
665   lhs = gimple_assign_lhs (use_stmt);
666   rhs_code = gimple_assign_rhs_code (use_stmt);
667   rhs = gimple_assign_rhs1 (use_stmt);
668 
669   /* Do not perform copy-propagation but recurse through copy chains.  */
670   if (TREE_CODE (lhs) == SSA_NAME
671       && rhs_code == SSA_NAME)
672     return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
673 
674   /* The use statement could be a conversion.  Recurse to the uses of the
675      lhs as copyprop does not copy through pointer to integer to pointer
676      conversions and FRE does not catch all cases either.
677      Treat the case of a single-use name and
678      a conversion to def_rhs type separate, though.  */
679   if (TREE_CODE (lhs) == SSA_NAME
680       && CONVERT_EXPR_CODE_P (rhs_code))
681     {
682       /* If there is a point in a conversion chain where the types match
683          so we can remove a conversion re-materialize the address here
684 	 and stop.  */
685       if (single_use_p
686 	  && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
687 	{
688 	  gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
689 	  gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
690 	  return true;
691 	}
692 
693       /* Else recurse if the conversion preserves the address value.  */
694       if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
695 	   || POINTER_TYPE_P (TREE_TYPE (lhs)))
696 	  && (TYPE_PRECISION (TREE_TYPE (lhs))
697 	      >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
698 	return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
699 
700       return false;
701     }
702 
703   /* If this isn't a conversion chain from this on we only can propagate
704      into compatible pointer contexts.  */
705   if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
706     return false;
707 
708   /* Propagate through constant pointer adjustments.  */
709   if (TREE_CODE (lhs) == SSA_NAME
710       && rhs_code == POINTER_PLUS_EXPR
711       && rhs == name
712       && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
713     {
714       tree new_def_rhs;
715       /* As we come here with non-invariant addresses in def_rhs we need
716          to make sure we can build a valid constant offsetted address
717 	 for further propagation.  Simply rely on fold building that
718 	 and check after the fact.  */
719       new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
720 				 def_rhs,
721 				 fold_convert (ptr_type_node,
722 					       gimple_assign_rhs2 (use_stmt)));
723       if (TREE_CODE (new_def_rhs) == MEM_REF
724 	  && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
725 	return false;
726       new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
727 						    TREE_TYPE (rhs));
728 
729       /* Recurse.  If we could propagate into all uses of lhs do not
730 	 bother to replace into the current use but just pretend we did.  */
731       if (TREE_CODE (new_def_rhs) == ADDR_EXPR
732 	  && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
733 	return true;
734 
735       if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
736 	gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
737 					new_def_rhs);
738       else if (is_gimple_min_invariant (new_def_rhs))
739 	gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, new_def_rhs);
740       else
741 	return false;
742       gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
743       update_stmt (use_stmt);
744       return true;
745     }
746 
747   /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
748      ADDR_EXPR will not appear on the LHS.  */
749   tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
750   while (handled_component_p (*lhsp))
751     lhsp = &TREE_OPERAND (*lhsp, 0);
752   lhs = *lhsp;
753 
754   /* Now see if the LHS node is a MEM_REF using NAME.  If so,
755      propagate the ADDR_EXPR into the use of NAME and fold the result.  */
756   if (TREE_CODE (lhs) == MEM_REF
757       && TREE_OPERAND (lhs, 0) == name)
758     {
759       tree def_rhs_base;
760       HOST_WIDE_INT def_rhs_offset;
761       /* If the address is invariant we can always fold it.  */
762       if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
763 							 &def_rhs_offset)))
764 	{
765 	  offset_int off = mem_ref_offset (lhs);
766 	  tree new_ptr;
767 	  off += def_rhs_offset;
768 	  if (TREE_CODE (def_rhs_base) == MEM_REF)
769 	    {
770 	      off += mem_ref_offset (def_rhs_base);
771 	      new_ptr = TREE_OPERAND (def_rhs_base, 0);
772 	    }
773 	  else
774 	    new_ptr = build_fold_addr_expr (def_rhs_base);
775 	  TREE_OPERAND (lhs, 0) = new_ptr;
776 	  TREE_OPERAND (lhs, 1)
777 	    = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
778 	  tidy_after_forward_propagate_addr (use_stmt);
779 	  /* Continue propagating into the RHS if this was not the only use.  */
780 	  if (single_use_p)
781 	    return true;
782 	}
783       /* If the LHS is a plain dereference and the value type is the same as
784          that of the pointed-to type of the address we can put the
785 	 dereferenced address on the LHS preserving the original alias-type.  */
786       else if (integer_zerop (TREE_OPERAND (lhs, 1))
787 	       && ((gimple_assign_lhs (use_stmt) == lhs
788 		    && useless_type_conversion_p
789 		         (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
790 		          TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
791 		   || types_compatible_p (TREE_TYPE (lhs),
792 					  TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
793 	       /* Don't forward anything into clobber stmts if it would result
794 		  in the lhs no longer being a MEM_REF.  */
795 	       && (!gimple_clobber_p (use_stmt)
796 		   || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
797 	{
798 	  tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
799 	  tree new_offset, new_base, saved, new_lhs;
800 	  while (handled_component_p (*def_rhs_basep))
801 	    def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
802 	  saved = *def_rhs_basep;
803 	  if (TREE_CODE (*def_rhs_basep) == MEM_REF)
804 	    {
805 	      new_base = TREE_OPERAND (*def_rhs_basep, 0);
806 	      new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
807 					 TREE_OPERAND (*def_rhs_basep, 1));
808 	    }
809 	  else
810 	    {
811 	      new_base = build_fold_addr_expr (*def_rhs_basep);
812 	      new_offset = TREE_OPERAND (lhs, 1);
813 	    }
814 	  *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
815 				   new_base, new_offset);
816 	  TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
817 	  TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
818 	  TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
819 	  new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
820 	  *lhsp = new_lhs;
821 	  TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
822 	  TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
823 	  *def_rhs_basep = saved;
824 	  tidy_after_forward_propagate_addr (use_stmt);
825 	  /* Continue propagating into the RHS if this was not the
826 	     only use.  */
827 	  if (single_use_p)
828 	    return true;
829 	}
830       else
831 	/* We can have a struct assignment dereferencing our name twice.
832 	   Note that we didn't propagate into the lhs to not falsely
833 	   claim we did when propagating into the rhs.  */
834 	res = false;
835     }
836 
837   /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
838      nodes from the RHS.  */
839   tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
840   if (TREE_CODE (*rhsp) == ADDR_EXPR)
841     rhsp = &TREE_OPERAND (*rhsp, 0);
842   while (handled_component_p (*rhsp))
843     rhsp = &TREE_OPERAND (*rhsp, 0);
844   rhs = *rhsp;
845 
846   /* Now see if the RHS node is a MEM_REF using NAME.  If so,
847      propagate the ADDR_EXPR into the use of NAME and fold the result.  */
848   if (TREE_CODE (rhs) == MEM_REF
849       && TREE_OPERAND (rhs, 0) == name)
850     {
851       tree def_rhs_base;
852       HOST_WIDE_INT def_rhs_offset;
853       if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
854 							 &def_rhs_offset)))
855 	{
856 	  offset_int off = mem_ref_offset (rhs);
857 	  tree new_ptr;
858 	  off += def_rhs_offset;
859 	  if (TREE_CODE (def_rhs_base) == MEM_REF)
860 	    {
861 	      off += mem_ref_offset (def_rhs_base);
862 	      new_ptr = TREE_OPERAND (def_rhs_base, 0);
863 	    }
864 	  else
865 	    new_ptr = build_fold_addr_expr (def_rhs_base);
866 	  TREE_OPERAND (rhs, 0) = new_ptr;
867 	  TREE_OPERAND (rhs, 1)
868 	    = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
869 	  fold_stmt_inplace (use_stmt_gsi);
870 	  tidy_after_forward_propagate_addr (use_stmt);
871 	  return res;
872 	}
873       /* If the RHS is a plain dereference and the value type is the same as
874          that of the pointed-to type of the address we can put the
875 	 dereferenced address on the RHS preserving the original alias-type.  */
876       else if (integer_zerop (TREE_OPERAND (rhs, 1))
877 	       && ((gimple_assign_rhs1 (use_stmt) == rhs
878 		    && useless_type_conversion_p
879 		         (TREE_TYPE (gimple_assign_lhs (use_stmt)),
880 		          TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
881 		   || types_compatible_p (TREE_TYPE (rhs),
882 					  TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
883 	{
884 	  tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
885 	  tree new_offset, new_base, saved, new_rhs;
886 	  while (handled_component_p (*def_rhs_basep))
887 	    def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
888 	  saved = *def_rhs_basep;
889 	  if (TREE_CODE (*def_rhs_basep) == MEM_REF)
890 	    {
891 	      new_base = TREE_OPERAND (*def_rhs_basep, 0);
892 	      new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
893 					 TREE_OPERAND (*def_rhs_basep, 1));
894 	    }
895 	  else
896 	    {
897 	      new_base = build_fold_addr_expr (*def_rhs_basep);
898 	      new_offset = TREE_OPERAND (rhs, 1);
899 	    }
900 	  *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
901 				   new_base, new_offset);
902 	  TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
903 	  TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
904 	  TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
905 	  new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
906 	  *rhsp = new_rhs;
907 	  TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
908 	  TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
909 	  *def_rhs_basep = saved;
910 	  fold_stmt_inplace (use_stmt_gsi);
911 	  tidy_after_forward_propagate_addr (use_stmt);
912 	  return res;
913 	}
914     }
915 
916   /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
917      is nothing to do. */
918   if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
919       || gimple_assign_rhs1 (use_stmt) != name)
920     return false;
921 
922   /* The remaining cases are all for turning pointer arithmetic into
923      array indexing.  They only apply when we have the address of
924      element zero in an array.  If that is not the case then there
925      is nothing to do.  */
926   array_ref = TREE_OPERAND (def_rhs, 0);
927   if ((TREE_CODE (array_ref) != ARRAY_REF
928        || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
929        || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
930       && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
931     return false;
932 
933   rhs2 = gimple_assign_rhs2 (use_stmt);
934   /* Optimize &x[C1] p+ C2 to  &x p+ C3 with C3 = C1 * element_size + C2.  */
935   if (TREE_CODE (rhs2) == INTEGER_CST)
936     {
937       tree new_rhs = build1_loc (gimple_location (use_stmt),
938 				 ADDR_EXPR, TREE_TYPE (def_rhs),
939 				 fold_build2 (MEM_REF,
940 					      TREE_TYPE (TREE_TYPE (def_rhs)),
941 					      unshare_expr (def_rhs),
942 					      fold_convert (ptr_type_node,
943 							    rhs2)));
944       gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
945       use_stmt = gsi_stmt (*use_stmt_gsi);
946       update_stmt (use_stmt);
947       tidy_after_forward_propagate_addr (use_stmt);
948       return true;
949     }
950 
951   return false;
952 }
953 
954 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
955 
956    Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
957    Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
958    node or for recovery of array indexing from pointer arithmetic.
959 
960    PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
961    the single use in the previous invocation.  Pass true when calling
962    this as toplevel.
963 
964    Returns true, if all uses have been propagated into.  */
965 
966 static bool
forward_propagate_addr_expr(tree name,tree rhs,bool parent_single_use_p)967 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
968 {
969   imm_use_iterator iter;
970   gimple *use_stmt;
971   bool all = true;
972   bool single_use_p = parent_single_use_p && has_single_use (name);
973 
974   FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
975     {
976       bool result;
977       tree use_rhs;
978 
979       /* If the use is not in a simple assignment statement, then
980 	 there is nothing we can do.  */
981       if (!is_gimple_assign (use_stmt))
982 	{
983 	  if (!is_gimple_debug (use_stmt))
984 	    all = false;
985 	  continue;
986 	}
987 
988       gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
989       result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
990 					      single_use_p);
991       /* If the use has moved to a different statement adjust
992 	 the update machinery for the old statement too.  */
993       if (use_stmt != gsi_stmt (gsi))
994 	{
995 	  update_stmt (use_stmt);
996 	  use_stmt = gsi_stmt (gsi);
997 	}
998       update_stmt (use_stmt);
999       all &= result;
1000 
1001       /* Remove intermediate now unused copy and conversion chains.  */
1002       use_rhs = gimple_assign_rhs1 (use_stmt);
1003       if (result
1004 	  && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1005 	  && TREE_CODE (use_rhs) == SSA_NAME
1006 	  && has_zero_uses (gimple_assign_lhs (use_stmt)))
1007 	{
1008 	  gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1009 	  fwprop_invalidate_lattice (gimple_get_lhs (use_stmt));
1010 	  release_defs (use_stmt);
1011 	  gsi_remove (&gsi, true);
1012 	}
1013     }
1014 
1015   return all && has_zero_uses (name);
1016 }
1017 
1018 
1019 /* Helper function for simplify_gimple_switch.  Remove case labels that
1020    have values outside the range of the new type.  */
1021 
1022 static void
simplify_gimple_switch_label_vec(gswitch * stmt,tree index_type)1023 simplify_gimple_switch_label_vec (gswitch *stmt, tree index_type)
1024 {
1025   unsigned int branch_num = gimple_switch_num_labels (stmt);
1026   auto_vec<tree> labels (branch_num);
1027   unsigned int i, len;
1028 
1029   /* Collect the existing case labels in a VEC, and preprocess it as if
1030      we are gimplifying a GENERIC SWITCH_EXPR.  */
1031   for (i = 1; i < branch_num; i++)
1032     labels.quick_push (gimple_switch_label (stmt, i));
1033   preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1034 
1035   /* If any labels were removed, replace the existing case labels
1036      in the GIMPLE_SWITCH statement with the correct ones.
1037      Note that the type updates were done in-place on the case labels,
1038      so we only have to replace the case labels in the GIMPLE_SWITCH
1039      if the number of labels changed.  */
1040   len = labels.length ();
1041   if (len < branch_num - 1)
1042     {
1043       bitmap target_blocks;
1044       edge_iterator ei;
1045       edge e;
1046 
1047       /* Corner case: *all* case labels have been removed as being
1048 	 out-of-range for INDEX_TYPE.  Push one label and let the
1049 	 CFG cleanups deal with this further.  */
1050       if (len == 0)
1051 	{
1052 	  tree label, elt;
1053 
1054 	  label = CASE_LABEL (gimple_switch_default_label (stmt));
1055 	  elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1056 	  labels.quick_push (elt);
1057 	  len = 1;
1058 	}
1059 
1060       for (i = 0; i < labels.length (); i++)
1061 	gimple_switch_set_label (stmt, i + 1, labels[i]);
1062       for (i++ ; i < branch_num; i++)
1063 	gimple_switch_set_label (stmt, i, NULL_TREE);
1064       gimple_switch_set_num_labels (stmt, len + 1);
1065 
1066       /* Cleanup any edges that are now dead.  */
1067       target_blocks = BITMAP_ALLOC (NULL);
1068       for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1069 	{
1070 	  tree elt = gimple_switch_label (stmt, i);
1071 	  basic_block target = label_to_block (CASE_LABEL (elt));
1072 	  bitmap_set_bit (target_blocks, target->index);
1073 	}
1074       for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1075 	{
1076 	  if (! bitmap_bit_p (target_blocks, e->dest->index))
1077 	    {
1078 	      remove_edge (e);
1079 	      cfg_changed = true;
1080 	      free_dominance_info (CDI_DOMINATORS);
1081 	    }
1082 	  else
1083 	    ei_next (&ei);
1084 	}
1085       BITMAP_FREE (target_blocks);
1086     }
1087 }
1088 
1089 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1090    the condition which we may be able to optimize better.  */
1091 
1092 static bool
simplify_gimple_switch(gswitch * stmt)1093 simplify_gimple_switch (gswitch *stmt)
1094 {
1095   /* The optimization that we really care about is removing unnecessary
1096      casts.  That will let us do much better in propagating the inferred
1097      constant at the switch target.  */
1098   tree cond = gimple_switch_index (stmt);
1099   if (TREE_CODE (cond) == SSA_NAME)
1100     {
1101       gimple *def_stmt = SSA_NAME_DEF_STMT (cond);
1102       if (gimple_assign_cast_p (def_stmt))
1103 	{
1104 	  tree def = gimple_assign_rhs1 (def_stmt);
1105 	  if (TREE_CODE (def) != SSA_NAME)
1106 	    return false;
1107 
1108 	  /* If we have an extension or sign-change that preserves the
1109 	     values we check against then we can copy the source value into
1110 	     the switch.  */
1111 	  tree ti = TREE_TYPE (def);
1112 	  if (INTEGRAL_TYPE_P (ti)
1113 	      && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
1114 	    {
1115 	      size_t n = gimple_switch_num_labels (stmt);
1116 	      tree min = NULL_TREE, max = NULL_TREE;
1117 	      if (n > 1)
1118 		{
1119 		  min = CASE_LOW (gimple_switch_label (stmt, 1));
1120 		  if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1121 		    max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1122 		  else
1123 		    max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1124 		}
1125 	      if ((!min || int_fits_type_p (min, ti))
1126 		  && (!max || int_fits_type_p (max, ti)))
1127 		{
1128 		  gimple_switch_set_index (stmt, def);
1129 		  simplify_gimple_switch_label_vec (stmt, ti);
1130 		  update_stmt (stmt);
1131 		  return true;
1132 		}
1133 	    }
1134 	}
1135     }
1136 
1137   return false;
1138 }
1139 
1140 /* For pointers p2 and p1 return p2 - p1 if the
1141    difference is known and constant, otherwise return NULL.  */
1142 
1143 static tree
constant_pointer_difference(tree p1,tree p2)1144 constant_pointer_difference (tree p1, tree p2)
1145 {
1146   int i, j;
1147 #define CPD_ITERATIONS 5
1148   tree exps[2][CPD_ITERATIONS];
1149   tree offs[2][CPD_ITERATIONS];
1150   int cnt[2];
1151 
1152   for (i = 0; i < 2; i++)
1153     {
1154       tree p = i ? p1 : p2;
1155       tree off = size_zero_node;
1156       gimple *stmt;
1157       enum tree_code code;
1158 
1159       /* For each of p1 and p2 we need to iterate at least
1160 	 twice, to handle ADDR_EXPR directly in p1/p2,
1161 	 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1162 	 on definition's stmt RHS.  Iterate a few extra times.  */
1163       j = 0;
1164       do
1165 	{
1166 	  if (!POINTER_TYPE_P (TREE_TYPE (p)))
1167 	    break;
1168 	  if (TREE_CODE (p) == ADDR_EXPR)
1169 	    {
1170 	      tree q = TREE_OPERAND (p, 0);
1171 	      HOST_WIDE_INT offset;
1172 	      tree base = get_addr_base_and_unit_offset (q, &offset);
1173 	      if (base)
1174 		{
1175 		  q = base;
1176 		  if (offset)
1177 		    off = size_binop (PLUS_EXPR, off, size_int (offset));
1178 		}
1179 	      if (TREE_CODE (q) == MEM_REF
1180 		  && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1181 		{
1182 		  p = TREE_OPERAND (q, 0);
1183 		  off = size_binop (PLUS_EXPR, off,
1184 				    wide_int_to_tree (sizetype,
1185 						      mem_ref_offset (q)));
1186 		}
1187 	      else
1188 		{
1189 		  exps[i][j] = q;
1190 		  offs[i][j++] = off;
1191 		  break;
1192 		}
1193 	    }
1194 	  if (TREE_CODE (p) != SSA_NAME)
1195 	    break;
1196 	  exps[i][j] = p;
1197 	  offs[i][j++] = off;
1198 	  if (j == CPD_ITERATIONS)
1199 	    break;
1200 	  stmt = SSA_NAME_DEF_STMT (p);
1201 	  if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1202 	    break;
1203 	  code = gimple_assign_rhs_code (stmt);
1204 	  if (code == POINTER_PLUS_EXPR)
1205 	    {
1206 	      if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1207 		break;
1208 	      off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1209 	      p = gimple_assign_rhs1 (stmt);
1210 	    }
1211 	  else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
1212 	    p = gimple_assign_rhs1 (stmt);
1213 	  else
1214 	    break;
1215 	}
1216       while (1);
1217       cnt[i] = j;
1218     }
1219 
1220   for (i = 0; i < cnt[0]; i++)
1221     for (j = 0; j < cnt[1]; j++)
1222       if (exps[0][i] == exps[1][j])
1223 	return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1224 
1225   return NULL_TREE;
1226 }
1227 
1228 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1229    Optimize
1230    memcpy (p, "abcd", 4);
1231    memset (p + 4, ' ', 3);
1232    into
1233    memcpy (p, "abcd   ", 7);
1234    call if the latter can be stored by pieces during expansion.  */
1235 
1236 static bool
simplify_builtin_call(gimple_stmt_iterator * gsi_p,tree callee2)1237 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1238 {
1239   gimple *stmt1, *stmt2 = gsi_stmt (*gsi_p);
1240   tree vuse = gimple_vuse (stmt2);
1241   if (vuse == NULL)
1242     return false;
1243   stmt1 = SSA_NAME_DEF_STMT (vuse);
1244 
1245   switch (DECL_FUNCTION_CODE (callee2))
1246     {
1247     case BUILT_IN_MEMSET:
1248       if (gimple_call_num_args (stmt2) != 3
1249 	  || gimple_call_lhs (stmt2)
1250 	  || CHAR_BIT != 8
1251 	  || BITS_PER_UNIT != 8)
1252 	break;
1253       else
1254 	{
1255 	  tree callee1;
1256 	  tree ptr1, src1, str1, off1, len1, lhs1;
1257 	  tree ptr2 = gimple_call_arg (stmt2, 0);
1258 	  tree val2 = gimple_call_arg (stmt2, 1);
1259 	  tree len2 = gimple_call_arg (stmt2, 2);
1260 	  tree diff, vdef, new_str_cst;
1261 	  gimple *use_stmt;
1262 	  unsigned int ptr1_align;
1263 	  unsigned HOST_WIDE_INT src_len;
1264 	  char *src_buf;
1265 	  use_operand_p use_p;
1266 
1267 	  if (!tree_fits_shwi_p (val2)
1268 	      || !tree_fits_uhwi_p (len2)
1269 	      || compare_tree_int (len2, 1024) == 1)
1270 	    break;
1271 	  if (is_gimple_call (stmt1))
1272 	    {
1273 	      /* If first stmt is a call, it needs to be memcpy
1274 		 or mempcpy, with string literal as second argument and
1275 		 constant length.  */
1276 	      callee1 = gimple_call_fndecl (stmt1);
1277 	      if (callee1 == NULL_TREE
1278 		  || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1279 		  || gimple_call_num_args (stmt1) != 3)
1280 		break;
1281 	      if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1282 		  && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1283 		break;
1284 	      ptr1 = gimple_call_arg (stmt1, 0);
1285 	      src1 = gimple_call_arg (stmt1, 1);
1286 	      len1 = gimple_call_arg (stmt1, 2);
1287 	      lhs1 = gimple_call_lhs (stmt1);
1288 	      if (!tree_fits_uhwi_p (len1))
1289 		break;
1290 	      str1 = string_constant (src1, &off1);
1291 	      if (str1 == NULL_TREE)
1292 		break;
1293 	      if (!tree_fits_uhwi_p (off1)
1294 		  || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1295 		  || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1296 					     - tree_to_uhwi (off1)) > 0
1297 		  || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1298 		  || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1299 		     != TYPE_MODE (char_type_node))
1300 		break;
1301 	    }
1302 	  else if (gimple_assign_single_p (stmt1))
1303 	    {
1304 	      /* Otherwise look for length 1 memcpy optimized into
1305 		 assignment.  */
1306     	      ptr1 = gimple_assign_lhs (stmt1);
1307 	      src1 = gimple_assign_rhs1 (stmt1);
1308 	      if (TREE_CODE (ptr1) != MEM_REF
1309 		  || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1310 		  || !tree_fits_shwi_p (src1))
1311 		break;
1312 	      ptr1 = build_fold_addr_expr (ptr1);
1313 	      callee1 = NULL_TREE;
1314 	      len1 = size_one_node;
1315 	      lhs1 = NULL_TREE;
1316 	      off1 = size_zero_node;
1317 	      str1 = NULL_TREE;
1318 	    }
1319 	  else
1320 	    break;
1321 
1322 	  diff = constant_pointer_difference (ptr1, ptr2);
1323 	  if (diff == NULL && lhs1 != NULL)
1324 	    {
1325 	      diff = constant_pointer_difference (lhs1, ptr2);
1326 	      if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1327 		  && diff != NULL)
1328 		diff = size_binop (PLUS_EXPR, diff,
1329 				   fold_convert (sizetype, len1));
1330 	    }
1331 	  /* If the difference between the second and first destination pointer
1332 	     is not constant, or is bigger than memcpy length, bail out.  */
1333 	  if (diff == NULL
1334 	      || !tree_fits_uhwi_p (diff)
1335 	      || tree_int_cst_lt (len1, diff)
1336 	      || compare_tree_int (diff, 1024) == 1)
1337 	    break;
1338 
1339 	  /* Use maximum of difference plus memset length and memcpy length
1340 	     as the new memcpy length, if it is too big, bail out.  */
1341 	  src_len = tree_to_uhwi (diff);
1342 	  src_len += tree_to_uhwi (len2);
1343 	  if (src_len < tree_to_uhwi (len1))
1344 	    src_len = tree_to_uhwi (len1);
1345 	  if (src_len > 1024)
1346 	    break;
1347 
1348 	  /* If mempcpy value is used elsewhere, bail out, as mempcpy
1349 	     with bigger length will return different result.  */
1350 	  if (lhs1 != NULL_TREE
1351 	      && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1352 	      && (TREE_CODE (lhs1) != SSA_NAME
1353 		  || !single_imm_use (lhs1, &use_p, &use_stmt)
1354 		  || use_stmt != stmt2))
1355 	    break;
1356 
1357 	  /* If anything reads memory in between memcpy and memset
1358 	     call, the modified memcpy call might change it.  */
1359 	  vdef = gimple_vdef (stmt1);
1360 	  if (vdef != NULL
1361 	      && (!single_imm_use (vdef, &use_p, &use_stmt)
1362 		  || use_stmt != stmt2))
1363 	    break;
1364 
1365 	  ptr1_align = get_pointer_alignment (ptr1);
1366 	  /* Construct the new source string literal.  */
1367 	  src_buf = XALLOCAVEC (char, src_len + 1);
1368 	  if (callee1)
1369 	    memcpy (src_buf,
1370 		    TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1371 		    tree_to_uhwi (len1));
1372 	  else
1373 	    src_buf[0] = tree_to_shwi (src1);
1374 	  memset (src_buf + tree_to_uhwi (diff),
1375 		  tree_to_shwi (val2), tree_to_uhwi (len2));
1376 	  src_buf[src_len] = '\0';
1377 	  /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1378 	     handle embedded '\0's.  */
1379 	  if (strlen (src_buf) != src_len)
1380 	    break;
1381 	  rtl_profile_for_bb (gimple_bb (stmt2));
1382 	  /* If the new memcpy wouldn't be emitted by storing the literal
1383 	     by pieces, this optimization might enlarge .rodata too much,
1384 	     as commonly used string literals couldn't be shared any
1385 	     longer.  */
1386 	  if (!can_store_by_pieces (src_len,
1387 				    builtin_strncpy_read_str,
1388 				    src_buf, ptr1_align, false))
1389 	    break;
1390 
1391 	  new_str_cst = build_string_literal (src_len, src_buf);
1392 	  if (callee1)
1393 	    {
1394 	      /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1395 		 memset call.  */
1396 	      if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1397 		gimple_call_set_lhs (stmt1, NULL_TREE);
1398 	      gimple_call_set_arg (stmt1, 1, new_str_cst);
1399 	      gimple_call_set_arg (stmt1, 2,
1400 				   build_int_cst (TREE_TYPE (len1), src_len));
1401 	      update_stmt (stmt1);
1402 	      unlink_stmt_vdef (stmt2);
1403 	      gsi_remove (gsi_p, true);
1404 	      fwprop_invalidate_lattice (gimple_get_lhs (stmt2));
1405 	      release_defs (stmt2);
1406 	      if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1407 		{
1408 		  fwprop_invalidate_lattice (lhs1);
1409 		  release_ssa_name (lhs1);
1410 		}
1411 	      return true;
1412 	    }
1413 	  else
1414 	    {
1415 	      /* Otherwise, if STMT1 is length 1 memcpy optimized into
1416 		 assignment, remove STMT1 and change memset call into
1417 		 memcpy call.  */
1418 	      gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1419 
1420 	      if (!is_gimple_val (ptr1))
1421 		ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1422 						 true, GSI_SAME_STMT);
1423 	      gimple_call_set_fndecl (stmt2,
1424 				      builtin_decl_explicit (BUILT_IN_MEMCPY));
1425 	      gimple_call_set_arg (stmt2, 0, ptr1);
1426 	      gimple_call_set_arg (stmt2, 1, new_str_cst);
1427 	      gimple_call_set_arg (stmt2, 2,
1428 				   build_int_cst (TREE_TYPE (len2), src_len));
1429 	      unlink_stmt_vdef (stmt1);
1430 	      gsi_remove (&gsi, true);
1431 	      fwprop_invalidate_lattice (gimple_get_lhs (stmt1));
1432 	      release_defs (stmt1);
1433 	      update_stmt (stmt2);
1434 	      return false;
1435 	    }
1436 	}
1437       break;
1438     default:
1439       break;
1440     }
1441   return false;
1442 }
1443 
1444 /* Given a ssa_name in NAME see if it was defined by an assignment and
1445    set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1446    to the second operand on the rhs. */
1447 
1448 static inline void
defcodefor_name(tree name,enum tree_code * code,tree * arg1,tree * arg2)1449 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1450 {
1451   gimple *def;
1452   enum tree_code code1;
1453   tree arg11;
1454   tree arg21;
1455   tree arg31;
1456   enum gimple_rhs_class grhs_class;
1457 
1458   code1 = TREE_CODE (name);
1459   arg11 = name;
1460   arg21 = NULL_TREE;
1461   grhs_class = get_gimple_rhs_class (code1);
1462 
1463   if (code1 == SSA_NAME)
1464     {
1465       def = SSA_NAME_DEF_STMT (name);
1466 
1467       if (def && is_gimple_assign (def)
1468 	  && can_propagate_from (def))
1469 	{
1470 	  code1 = gimple_assign_rhs_code (def);
1471 	  arg11 = gimple_assign_rhs1 (def);
1472           arg21 = gimple_assign_rhs2 (def);
1473           arg31 = gimple_assign_rhs2 (def);
1474 	}
1475     }
1476   else if (grhs_class == GIMPLE_TERNARY_RHS
1477 	   || GIMPLE_BINARY_RHS
1478 	   || GIMPLE_UNARY_RHS
1479 	   || GIMPLE_SINGLE_RHS)
1480     extract_ops_from_tree (name, &code1, &arg11, &arg21, &arg31);
1481 
1482   *code = code1;
1483   *arg1 = arg11;
1484   if (arg2)
1485     *arg2 = arg21;
1486   /* Ignore arg3 currently. */
1487 }
1488 
1489 
1490 /* Recognize rotation patterns.  Return true if a transformation
1491    applied, otherwise return false.
1492 
1493    We are looking for X with unsigned type T with bitsize B, OP being
1494    +, | or ^, some type T2 wider than T and
1495    (X << CNT1) OP (X >> CNT2)				iff CNT1 + CNT2 == B
1496    ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2))	iff CNT1 + CNT2 == B
1497    (X << Y) OP (X >> (B - Y))
1498    (X << (int) Y) OP (X >> (int) (B - Y))
1499    ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1500    ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1501    (X << Y) | (X >> ((-Y) & (B - 1)))
1502    (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1503    ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1504    ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1505 
1506    and transform these into:
1507    X r<< CNT1
1508    X r<< Y
1509 
1510    Note, in the patterns with T2 type, the type of OP operands
1511    might be even a signed type, but should have precision B.  */
1512 
1513 static bool
simplify_rotate(gimple_stmt_iterator * gsi)1514 simplify_rotate (gimple_stmt_iterator *gsi)
1515 {
1516   gimple *stmt = gsi_stmt (*gsi);
1517   tree arg[2], rtype, rotcnt = NULL_TREE;
1518   tree def_arg1[2], def_arg2[2];
1519   enum tree_code def_code[2];
1520   tree lhs;
1521   int i;
1522   bool swapped_p = false;
1523   gimple *g;
1524 
1525   arg[0] = gimple_assign_rhs1 (stmt);
1526   arg[1] = gimple_assign_rhs2 (stmt);
1527   rtype = TREE_TYPE (arg[0]);
1528 
1529   /* Only create rotates in complete modes.  Other cases are not
1530      expanded properly.  */
1531   if (!INTEGRAL_TYPE_P (rtype)
1532       || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
1533     return false;
1534 
1535   for (i = 0; i < 2; i++)
1536     defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1537 
1538   /* Look through narrowing conversions.  */
1539   if (CONVERT_EXPR_CODE_P (def_code[0])
1540       && CONVERT_EXPR_CODE_P (def_code[1])
1541       && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
1542       && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
1543       && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1544 	 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
1545       && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
1546       && has_single_use (arg[0])
1547       && has_single_use (arg[1]))
1548     {
1549       for (i = 0; i < 2; i++)
1550 	{
1551 	  arg[i] = def_arg1[i];
1552 	  defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1553 	}
1554     }
1555 
1556   /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR.  */
1557   for (i = 0; i < 2; i++)
1558     if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
1559       return false;
1560     else if (!has_single_use (arg[i]))
1561       return false;
1562   if (def_code[0] == def_code[1])
1563     return false;
1564 
1565   /* If we've looked through narrowing conversions before, look through
1566      widening conversions from unsigned type with the same precision
1567      as rtype here.  */
1568   if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
1569     for (i = 0; i < 2; i++)
1570       {
1571 	tree tem;
1572 	enum tree_code code;
1573 	defcodefor_name (def_arg1[i], &code, &tem, NULL);
1574 	if (!CONVERT_EXPR_CODE_P (code)
1575 	    || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1576 	    || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1577 	  return false;
1578 	def_arg1[i] = tem;
1579       }
1580   /* Both shifts have to use the same first operand.  */
1581   if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
1582     return false;
1583   if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
1584     return false;
1585 
1586   /* CNT1 + CNT2 == B case above.  */
1587   if (tree_fits_uhwi_p (def_arg2[0])
1588       && tree_fits_uhwi_p (def_arg2[1])
1589       && tree_to_uhwi (def_arg2[0])
1590 	 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
1591     rotcnt = def_arg2[0];
1592   else if (TREE_CODE (def_arg2[0]) != SSA_NAME
1593 	   || TREE_CODE (def_arg2[1]) != SSA_NAME)
1594     return false;
1595   else
1596     {
1597       tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
1598       enum tree_code cdef_code[2];
1599       /* Look through conversion of the shift count argument.
1600 	 The C/C++ FE cast any shift count argument to integer_type_node.
1601 	 The only problem might be if the shift count type maximum value
1602 	 is equal or smaller than number of bits in rtype.  */
1603       for (i = 0; i < 2; i++)
1604 	{
1605 	  def_arg2_alt[i] = def_arg2[i];
1606 	  defcodefor_name (def_arg2[i], &cdef_code[i],
1607 			   &cdef_arg1[i], &cdef_arg2[i]);
1608 	  if (CONVERT_EXPR_CODE_P (cdef_code[i])
1609 	      && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
1610 	      && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1611 		 > floor_log2 (TYPE_PRECISION (rtype))
1612 	      && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1613 		 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
1614 	    {
1615 	      def_arg2_alt[i] = cdef_arg1[i];
1616 	      defcodefor_name (def_arg2_alt[i], &cdef_code[i],
1617 			       &cdef_arg1[i], &cdef_arg2[i]);
1618 	    }
1619 	}
1620       for (i = 0; i < 2; i++)
1621 	/* Check for one shift count being Y and the other B - Y,
1622 	   with optional casts.  */
1623 	if (cdef_code[i] == MINUS_EXPR
1624 	    && tree_fits_shwi_p (cdef_arg1[i])
1625 	    && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
1626 	    && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
1627 	  {
1628 	    tree tem;
1629 	    enum tree_code code;
1630 
1631 	    if (cdef_arg2[i] == def_arg2[1 - i]
1632 		|| cdef_arg2[i] == def_arg2_alt[1 - i])
1633 	      {
1634 		rotcnt = cdef_arg2[i];
1635 		break;
1636 	      }
1637 	    defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
1638 	    if (CONVERT_EXPR_CODE_P (code)
1639 		&& INTEGRAL_TYPE_P (TREE_TYPE (tem))
1640 		&& TYPE_PRECISION (TREE_TYPE (tem))
1641 		 > floor_log2 (TYPE_PRECISION (rtype))
1642 		&& TYPE_PRECISION (TREE_TYPE (tem))
1643 		 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
1644 		&& (tem == def_arg2[1 - i]
1645 		    || tem == def_arg2_alt[1 - i]))
1646 	      {
1647 		rotcnt = tem;
1648 		break;
1649 	      }
1650 	  }
1651 	/* The above sequence isn't safe for Y being 0,
1652 	   because then one of the shifts triggers undefined behavior.
1653 	   This alternative is safe even for rotation count of 0.
1654 	   One shift count is Y and the other (-Y) & (B - 1).  */
1655 	else if (cdef_code[i] == BIT_AND_EXPR
1656 		 && tree_fits_shwi_p (cdef_arg2[i])
1657 		 && tree_to_shwi (cdef_arg2[i])
1658 		    == TYPE_PRECISION (rtype) - 1
1659 		 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
1660 		 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
1661 	  {
1662 	    tree tem;
1663 	    enum tree_code code;
1664 
1665 	    defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
1666 	    if (CONVERT_EXPR_CODE_P (code)
1667 		&& INTEGRAL_TYPE_P (TREE_TYPE (tem))
1668 		&& TYPE_PRECISION (TREE_TYPE (tem))
1669 		 > floor_log2 (TYPE_PRECISION (rtype))
1670 		&& TYPE_PRECISION (TREE_TYPE (tem))
1671 		 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
1672 	      defcodefor_name (tem, &code, &tem, NULL);
1673 
1674 	    if (code == NEGATE_EXPR)
1675 	      {
1676 		if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
1677 		  {
1678 		    rotcnt = tem;
1679 		    break;
1680 		  }
1681 		defcodefor_name (tem, &code, &tem, NULL);
1682 		if (CONVERT_EXPR_CODE_P (code)
1683 		    && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1684 		    && TYPE_PRECISION (TREE_TYPE (tem))
1685 		       > floor_log2 (TYPE_PRECISION (rtype))
1686 		    && TYPE_PRECISION (TREE_TYPE (tem))
1687 		       == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
1688 		    && (tem == def_arg2[1 - i]
1689 			|| tem == def_arg2_alt[1 - i]))
1690 		  {
1691 		    rotcnt = tem;
1692 		    break;
1693 		  }
1694 	      }
1695 	  }
1696       if (rotcnt == NULL_TREE)
1697 	return false;
1698       swapped_p = i != 1;
1699     }
1700 
1701   if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
1702 				  TREE_TYPE (rotcnt)))
1703     {
1704       g = gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2[0])),
1705 			       NOP_EXPR, rotcnt);
1706       gsi_insert_before (gsi, g, GSI_SAME_STMT);
1707       rotcnt = gimple_assign_lhs (g);
1708     }
1709   lhs = gimple_assign_lhs (stmt);
1710   if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1711     lhs = make_ssa_name (TREE_TYPE (def_arg1[0]));
1712   g = gimple_build_assign (lhs,
1713 			   ((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
1714 			   ? LROTATE_EXPR : RROTATE_EXPR, def_arg1[0], rotcnt);
1715   if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1716     {
1717       gsi_insert_before (gsi, g, GSI_SAME_STMT);
1718       g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, lhs);
1719     }
1720   gsi_replace (gsi, g, false);
1721   return true;
1722 }
1723 
1724 /* Combine an element access with a shuffle.  Returns true if there were
1725    any changes made, else it returns false.  */
1726 
1727 static bool
simplify_bitfield_ref(gimple_stmt_iterator * gsi)1728 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
1729 {
1730   gimple *stmt = gsi_stmt (*gsi);
1731   gimple *def_stmt;
1732   tree op, op0, op1, op2;
1733   tree elem_type;
1734   unsigned idx, n, size;
1735   enum tree_code code;
1736 
1737   op = gimple_assign_rhs1 (stmt);
1738   gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
1739 
1740   op0 = TREE_OPERAND (op, 0);
1741   if (TREE_CODE (op0) != SSA_NAME
1742       || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
1743     return false;
1744 
1745   def_stmt = get_prop_source_stmt (op0, false, NULL);
1746   if (!def_stmt || !can_propagate_from (def_stmt))
1747     return false;
1748 
1749   op1 = TREE_OPERAND (op, 1);
1750   op2 = TREE_OPERAND (op, 2);
1751   code = gimple_assign_rhs_code (def_stmt);
1752 
1753   if (code == CONSTRUCTOR)
1754     {
1755       tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
1756 			       gimple_assign_rhs1 (def_stmt), op1, op2);
1757       if (!tem || !valid_gimple_rhs_p (tem))
1758 	return false;
1759       gimple_assign_set_rhs_from_tree (gsi, tem);
1760       update_stmt (gsi_stmt (*gsi));
1761       return true;
1762     }
1763 
1764   elem_type = TREE_TYPE (TREE_TYPE (op0));
1765   if (TREE_TYPE (op) != elem_type)
1766     return false;
1767 
1768   size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
1769   n = TREE_INT_CST_LOW (op1) / size;
1770   if (n != 1)
1771     return false;
1772   idx = TREE_INT_CST_LOW (op2) / size;
1773 
1774   if (code == VEC_PERM_EXPR)
1775     {
1776       tree p, m, tem;
1777       unsigned nelts;
1778       m = gimple_assign_rhs3 (def_stmt);
1779       if (TREE_CODE (m) != VECTOR_CST)
1780 	return false;
1781       nelts = VECTOR_CST_NELTS (m);
1782       idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
1783       idx %= 2 * nelts;
1784       if (idx < nelts)
1785 	{
1786 	  p = gimple_assign_rhs1 (def_stmt);
1787 	}
1788       else
1789 	{
1790 	  p = gimple_assign_rhs2 (def_stmt);
1791 	  idx -= nelts;
1792 	}
1793       tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
1794 		    unshare_expr (p), op1, bitsize_int (idx * size));
1795       gimple_assign_set_rhs1 (stmt, tem);
1796       fold_stmt (gsi);
1797       update_stmt (gsi_stmt (*gsi));
1798       return true;
1799     }
1800 
1801   return false;
1802 }
1803 
1804 /* Determine whether applying the 2 permutations (mask1 then mask2)
1805    gives back one of the input.  */
1806 
1807 static int
is_combined_permutation_identity(tree mask1,tree mask2)1808 is_combined_permutation_identity (tree mask1, tree mask2)
1809 {
1810   tree mask;
1811   unsigned int nelts, i, j;
1812   bool maybe_identity1 = true;
1813   bool maybe_identity2 = true;
1814 
1815   gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
1816 		       && TREE_CODE (mask2) == VECTOR_CST);
1817   mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
1818   gcc_assert (TREE_CODE (mask) == VECTOR_CST);
1819 
1820   nelts = VECTOR_CST_NELTS (mask);
1821   for (i = 0; i < nelts; i++)
1822     {
1823       tree val = VECTOR_CST_ELT (mask, i);
1824       gcc_assert (TREE_CODE (val) == INTEGER_CST);
1825       j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
1826       if (j == i)
1827 	maybe_identity2 = false;
1828       else if (j == i + nelts)
1829 	maybe_identity1 = false;
1830       else
1831 	return 0;
1832     }
1833   return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
1834 }
1835 
1836 /* Combine a shuffle with its arguments.  Returns 1 if there were any
1837    changes made, 2 if cfg-cleanup needs to run.  Else it returns 0.  */
1838 
1839 static int
simplify_permutation(gimple_stmt_iterator * gsi)1840 simplify_permutation (gimple_stmt_iterator *gsi)
1841 {
1842   gimple *stmt = gsi_stmt (*gsi);
1843   gimple *def_stmt;
1844   tree op0, op1, op2, op3, arg0, arg1;
1845   enum tree_code code;
1846   bool single_use_op0 = false;
1847 
1848   gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
1849 
1850   op0 = gimple_assign_rhs1 (stmt);
1851   op1 = gimple_assign_rhs2 (stmt);
1852   op2 = gimple_assign_rhs3 (stmt);
1853 
1854   if (TREE_CODE (op2) != VECTOR_CST)
1855     return 0;
1856 
1857   if (TREE_CODE (op0) == VECTOR_CST)
1858     {
1859       code = VECTOR_CST;
1860       arg0 = op0;
1861     }
1862   else if (TREE_CODE (op0) == SSA_NAME)
1863     {
1864       def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
1865       if (!def_stmt || !can_propagate_from (def_stmt))
1866 	return 0;
1867 
1868       code = gimple_assign_rhs_code (def_stmt);
1869       arg0 = gimple_assign_rhs1 (def_stmt);
1870     }
1871   else
1872     return 0;
1873 
1874   /* Two consecutive shuffles.  */
1875   if (code == VEC_PERM_EXPR)
1876     {
1877       tree orig;
1878       int ident;
1879 
1880       if (op0 != op1)
1881 	return 0;
1882       op3 = gimple_assign_rhs3 (def_stmt);
1883       if (TREE_CODE (op3) != VECTOR_CST)
1884 	return 0;
1885       ident = is_combined_permutation_identity (op3, op2);
1886       if (!ident)
1887 	return 0;
1888       orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
1889 			  : gimple_assign_rhs2 (def_stmt);
1890       gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
1891       gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
1892       gimple_set_num_ops (stmt, 2);
1893       update_stmt (stmt);
1894       return remove_prop_source_from_use (op0) ? 2 : 1;
1895     }
1896 
1897   /* Shuffle of a constructor.  */
1898   else if (code == CONSTRUCTOR || code == VECTOR_CST)
1899     {
1900       tree opt;
1901       bool ret = false;
1902       if (op0 != op1)
1903 	{
1904 	  if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
1905 	    return 0;
1906 
1907 	  if (TREE_CODE (op1) == VECTOR_CST)
1908 	    arg1 = op1;
1909 	  else if (TREE_CODE (op1) == SSA_NAME)
1910 	    {
1911 	      enum tree_code code2;
1912 
1913 	      gimple *def_stmt2 = get_prop_source_stmt (op1, true, NULL);
1914 	      if (!def_stmt2 || !can_propagate_from (def_stmt2))
1915 		return 0;
1916 
1917 	      code2 = gimple_assign_rhs_code (def_stmt2);
1918 	      if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
1919 		return 0;
1920 	      arg1 = gimple_assign_rhs1 (def_stmt2);
1921 	    }
1922 	  else
1923 	    return 0;
1924 	}
1925       else
1926 	{
1927 	  /* Already used twice in this statement.  */
1928 	  if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
1929 	    return 0;
1930 	  arg1 = arg0;
1931 	}
1932       opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
1933       if (!opt
1934 	  || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
1935 	return 0;
1936       gimple_assign_set_rhs_from_tree (gsi, opt);
1937       update_stmt (gsi_stmt (*gsi));
1938       if (TREE_CODE (op0) == SSA_NAME)
1939 	ret = remove_prop_source_from_use (op0);
1940       if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
1941 	ret |= remove_prop_source_from_use (op1);
1942       return ret ? 2 : 1;
1943     }
1944 
1945   return 0;
1946 }
1947 
1948 /* Recognize a VEC_PERM_EXPR.  Returns true if there were any changes.  */
1949 
1950 static bool
simplify_vector_constructor(gimple_stmt_iterator * gsi)1951 simplify_vector_constructor (gimple_stmt_iterator *gsi)
1952 {
1953   gimple *stmt = gsi_stmt (*gsi);
1954   gimple *def_stmt;
1955   tree op, op2, orig, type, elem_type;
1956   unsigned elem_size, nelts, i;
1957   enum tree_code code;
1958   constructor_elt *elt;
1959   unsigned char *sel;
1960   bool maybe_ident;
1961 
1962   gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
1963 
1964   op = gimple_assign_rhs1 (stmt);
1965   type = TREE_TYPE (op);
1966   gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
1967 
1968   nelts = TYPE_VECTOR_SUBPARTS (type);
1969   elem_type = TREE_TYPE (type);
1970   elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
1971 
1972   sel = XALLOCAVEC (unsigned char, nelts);
1973   orig = NULL;
1974   maybe_ident = true;
1975   FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
1976     {
1977       tree ref, op1;
1978 
1979       if (i >= nelts)
1980 	return false;
1981 
1982       if (TREE_CODE (elt->value) != SSA_NAME)
1983 	return false;
1984       def_stmt = get_prop_source_stmt (elt->value, false, NULL);
1985       if (!def_stmt)
1986 	return false;
1987       code = gimple_assign_rhs_code (def_stmt);
1988       if (code != BIT_FIELD_REF)
1989 	return false;
1990       op1 = gimple_assign_rhs1 (def_stmt);
1991       ref = TREE_OPERAND (op1, 0);
1992       if (orig)
1993 	{
1994 	  if (ref != orig)
1995 	    return false;
1996 	}
1997       else
1998 	{
1999 	  if (TREE_CODE (ref) != SSA_NAME)
2000 	    return false;
2001 	  if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
2002 	    return false;
2003 	  orig = ref;
2004 	}
2005       if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
2006 	return false;
2007       sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
2008       if (sel[i] != i) maybe_ident = false;
2009     }
2010   if (i < nelts)
2011     return false;
2012 
2013   if (maybe_ident)
2014     gimple_assign_set_rhs_from_tree (gsi, orig);
2015   else
2016     {
2017       tree mask_type, *mask_elts;
2018 
2019       if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
2020 	return false;
2021       mask_type
2022 	= build_vector_type (build_nonstandard_integer_type (elem_size, 1),
2023 			     nelts);
2024       if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
2025 	  || GET_MODE_SIZE (TYPE_MODE (mask_type))
2026 	     != GET_MODE_SIZE (TYPE_MODE (type)))
2027 	return false;
2028       mask_elts = XALLOCAVEC (tree, nelts);
2029       for (i = 0; i < nelts; i++)
2030 	mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
2031       op2 = build_vector (mask_type, mask_elts);
2032       gimple_assign_set_rhs_with_ops (gsi, VEC_PERM_EXPR, orig, orig, op2);
2033     }
2034   update_stmt (gsi_stmt (*gsi));
2035   return true;
2036 }
2037 
2038 
2039 /* Primitive "lattice" function for gimple_simplify.  */
2040 
2041 static tree
fwprop_ssa_val(tree name)2042 fwprop_ssa_val (tree name)
2043 {
2044   /* First valueize NAME.  */
2045   if (TREE_CODE (name) == SSA_NAME
2046       && SSA_NAME_VERSION (name) < lattice.length ())
2047     {
2048       tree val = lattice[SSA_NAME_VERSION (name)];
2049       if (val)
2050 	name = val;
2051     }
2052   /* We continue matching along SSA use-def edges for SSA names
2053      that are not single-use.  Currently there are no patterns
2054      that would cause any issues with that.  */
2055   return name;
2056 }
2057 
2058 /* Main entry point for the forward propagation and statement combine
2059    optimizer.  */
2060 
2061 namespace {
2062 
2063 const pass_data pass_data_forwprop =
2064 {
2065   GIMPLE_PASS, /* type */
2066   "forwprop", /* name */
2067   OPTGROUP_NONE, /* optinfo_flags */
2068   TV_TREE_FORWPROP, /* tv_id */
2069   ( PROP_cfg | PROP_ssa ), /* properties_required */
2070   0, /* properties_provided */
2071   0, /* properties_destroyed */
2072   0, /* todo_flags_start */
2073   TODO_update_ssa, /* todo_flags_finish */
2074 };
2075 
2076 class pass_forwprop : public gimple_opt_pass
2077 {
2078 public:
pass_forwprop(gcc::context * ctxt)2079   pass_forwprop (gcc::context *ctxt)
2080     : gimple_opt_pass (pass_data_forwprop, ctxt)
2081   {}
2082 
2083   /* opt_pass methods: */
clone()2084   opt_pass * clone () { return new pass_forwprop (m_ctxt); }
gate(function *)2085   virtual bool gate (function *) { return flag_tree_forwprop; }
2086   virtual unsigned int execute (function *);
2087 
2088 }; // class pass_forwprop
2089 
2090 unsigned int
execute(function * fun)2091 pass_forwprop::execute (function *fun)
2092 {
2093   unsigned int todoflags = 0;
2094 
2095   cfg_changed = false;
2096 
2097   /* Combine stmts with the stmts defining their operands.  Do that
2098      in an order that guarantees visiting SSA defs before SSA uses.  */
2099   lattice.create (num_ssa_names);
2100   lattice.quick_grow_cleared (num_ssa_names);
2101   int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
2102   int postorder_num = inverted_post_order_compute (postorder);
2103   auto_vec<gimple *, 4> to_fixup;
2104   to_purge = BITMAP_ALLOC (NULL);
2105   for (int i = 0; i < postorder_num; ++i)
2106     {
2107       gimple_stmt_iterator gsi;
2108       basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
2109 
2110       /* Apply forward propagation to all stmts in the basic-block.
2111 	 Note we update GSI within the loop as necessary.  */
2112       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2113 	{
2114 	  gimple *stmt = gsi_stmt (gsi);
2115 	  tree lhs, rhs;
2116 	  enum tree_code code;
2117 
2118 	  if (!is_gimple_assign (stmt))
2119 	    {
2120 	      gsi_next (&gsi);
2121 	      continue;
2122 	    }
2123 
2124 	  lhs = gimple_assign_lhs (stmt);
2125 	  rhs = gimple_assign_rhs1 (stmt);
2126 	  code = gimple_assign_rhs_code (stmt);
2127 	  if (TREE_CODE (lhs) != SSA_NAME
2128 	      || has_zero_uses (lhs))
2129 	    {
2130 	      gsi_next (&gsi);
2131 	      continue;
2132 	    }
2133 
2134 	  /* If this statement sets an SSA_NAME to an address,
2135 	     try to propagate the address into the uses of the SSA_NAME.  */
2136 	  if (code == ADDR_EXPR
2137 	      /* Handle pointer conversions on invariant addresses
2138 		 as well, as this is valid gimple.  */
2139 	      || (CONVERT_EXPR_CODE_P (code)
2140 		  && TREE_CODE (rhs) == ADDR_EXPR
2141 		  && POINTER_TYPE_P (TREE_TYPE (lhs))))
2142 	    {
2143 	      tree base = get_base_address (TREE_OPERAND (rhs, 0));
2144 	      if ((!base
2145 		   || !DECL_P (base)
2146 		   || decl_address_invariant_p (base))
2147 		  && !stmt_references_abnormal_ssa_name (stmt)
2148 		  && forward_propagate_addr_expr (lhs, rhs, true))
2149 		{
2150 		  fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2151 		  release_defs (stmt);
2152 		  gsi_remove (&gsi, true);
2153 		}
2154 	      else
2155 		gsi_next (&gsi);
2156 	    }
2157 	  else if (code == POINTER_PLUS_EXPR)
2158 	    {
2159 	      tree off = gimple_assign_rhs2 (stmt);
2160 	      if (TREE_CODE (off) == INTEGER_CST
2161 		  && can_propagate_from (stmt)
2162 		  && !simple_iv_increment_p (stmt)
2163 		  /* ???  Better adjust the interface to that function
2164 		     instead of building new trees here.  */
2165 		  && forward_propagate_addr_expr
2166 		       (lhs,
2167 			build1_loc (gimple_location (stmt),
2168 				    ADDR_EXPR, TREE_TYPE (rhs),
2169 				    fold_build2 (MEM_REF,
2170 						 TREE_TYPE (TREE_TYPE (rhs)),
2171 						 rhs,
2172 						 fold_convert (ptr_type_node,
2173 							       off))), true))
2174 		{
2175 		  fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2176 		  release_defs (stmt);
2177 		  gsi_remove (&gsi, true);
2178 		}
2179 	      else if (is_gimple_min_invariant (rhs))
2180 		{
2181 		  /* Make sure to fold &a[0] + off_1 here.  */
2182 		  fold_stmt_inplace (&gsi);
2183 		  update_stmt (stmt);
2184 		  if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2185 		    gsi_next (&gsi);
2186 		}
2187 	      else
2188 		gsi_next (&gsi);
2189 	    }
2190 	  else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
2191 		   && gimple_assign_load_p (stmt)
2192 		   && !gimple_has_volatile_ops (stmt)
2193 		   && (TREE_CODE (gimple_assign_rhs1 (stmt))
2194 		       != TARGET_MEM_REF)
2195 		   && !stmt_can_throw_internal (stmt))
2196 	    {
2197 	      /* Rewrite loads used only in real/imagpart extractions to
2198 	         component-wise loads.  */
2199 	      use_operand_p use_p;
2200 	      imm_use_iterator iter;
2201 	      bool rewrite = true;
2202 	      FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
2203 		{
2204 		  gimple *use_stmt = USE_STMT (use_p);
2205 		  if (is_gimple_debug (use_stmt))
2206 		    continue;
2207 		  if (!is_gimple_assign (use_stmt)
2208 		      || (gimple_assign_rhs_code (use_stmt) != REALPART_EXPR
2209 			  && gimple_assign_rhs_code (use_stmt) != IMAGPART_EXPR))
2210 		    {
2211 		      rewrite = false;
2212 		      break;
2213 		    }
2214 		}
2215 	      if (rewrite)
2216 		{
2217 		  gimple *use_stmt;
2218 		  FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2219 		    {
2220 		      if (is_gimple_debug (use_stmt))
2221 			{
2222 			  if (gimple_debug_bind_p (use_stmt))
2223 			    {
2224 			      gimple_debug_bind_reset_value (use_stmt);
2225 			      update_stmt (use_stmt);
2226 			    }
2227 			  continue;
2228 			}
2229 
2230 		      tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
2231 					     TREE_TYPE (TREE_TYPE (rhs)),
2232 					     unshare_expr (rhs));
2233 		      gimple *new_stmt
2234 			= gimple_build_assign (gimple_assign_lhs (use_stmt),
2235 					       new_rhs);
2236 
2237 		      location_t loc = gimple_location (use_stmt);
2238 		      gimple_set_location (new_stmt, loc);
2239 		      gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2240 		      unlink_stmt_vdef (use_stmt);
2241 		      gsi_remove (&gsi2, true);
2242 
2243 		      gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
2244 		    }
2245 
2246 		  release_defs (stmt);
2247 		  gsi_remove (&gsi, true);
2248 		}
2249 	      else
2250 		gsi_next (&gsi);
2251 	    }
2252 	  else if (code == COMPLEX_EXPR)
2253 	    {
2254 	      /* Rewrite stores of a single-use complex build expression
2255 	         to component-wise stores.  */
2256 	      use_operand_p use_p;
2257 	      gimple *use_stmt;
2258 	      if (single_imm_use (lhs, &use_p, &use_stmt)
2259 		  && gimple_store_p (use_stmt)
2260 		  && !gimple_has_volatile_ops (use_stmt)
2261 		  && is_gimple_assign (use_stmt)
2262 		  && (TREE_CODE (gimple_assign_lhs (use_stmt))
2263 		      != TARGET_MEM_REF))
2264 		{
2265 		  tree use_lhs = gimple_assign_lhs (use_stmt);
2266 		  tree new_lhs = build1 (REALPART_EXPR,
2267 					 TREE_TYPE (TREE_TYPE (use_lhs)),
2268 					 unshare_expr (use_lhs));
2269 		  gimple *new_stmt = gimple_build_assign (new_lhs, rhs);
2270 		  location_t loc = gimple_location (use_stmt);
2271 		  gimple_set_location (new_stmt, loc);
2272 		  gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
2273 		  gimple_set_vdef (new_stmt, make_ssa_name (gimple_vop (cfun)));
2274 		  SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2275 		  gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
2276 		  gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2277 		  gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
2278 
2279 		  new_lhs = build1 (IMAGPART_EXPR,
2280 				    TREE_TYPE (TREE_TYPE (use_lhs)),
2281 				    unshare_expr (use_lhs));
2282 		  gimple_assign_set_lhs (use_stmt, new_lhs);
2283 		  gimple_assign_set_rhs1 (use_stmt, gimple_assign_rhs2 (stmt));
2284 		  update_stmt (use_stmt);
2285 
2286 		  release_defs (stmt);
2287 		  gsi_remove (&gsi, true);
2288 		}
2289 	      else
2290 		gsi_next (&gsi);
2291 	    }
2292 	  else
2293 	    gsi_next (&gsi);
2294 	}
2295 
2296       /* Combine stmts with the stmts defining their operands.
2297 	 Note we update GSI within the loop as necessary.  */
2298       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2299 	{
2300 	  gimple *stmt = gsi_stmt (gsi);
2301 	  gimple *orig_stmt = stmt;
2302 	  bool changed = false;
2303 	  bool was_noreturn = (is_gimple_call (stmt)
2304 			       && gimple_call_noreturn_p (stmt));
2305 
2306 	  /* Mark stmt as potentially needing revisiting.  */
2307 	  gimple_set_plf (stmt, GF_PLF_1, false);
2308 
2309 	  if (fold_stmt (&gsi, fwprop_ssa_val))
2310 	    {
2311 	      changed = true;
2312 	      stmt = gsi_stmt (gsi);
2313 	      if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2314 		bitmap_set_bit (to_purge, bb->index);
2315 	      if (!was_noreturn
2316 		  && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
2317 		to_fixup.safe_push (stmt);
2318 	      /* Cleanup the CFG if we simplified a condition to
2319 	         true or false.  */
2320 	      if (gcond *cond = dyn_cast <gcond *> (stmt))
2321 		if (gimple_cond_true_p (cond)
2322 		    || gimple_cond_false_p (cond))
2323 		  cfg_changed = true;
2324 	      update_stmt (stmt);
2325 	    }
2326 
2327 	  switch (gimple_code (stmt))
2328 	    {
2329 	    case GIMPLE_ASSIGN:
2330 	      {
2331 		tree rhs1 = gimple_assign_rhs1 (stmt);
2332 		enum tree_code code = gimple_assign_rhs_code (stmt);
2333 
2334 		if (code == COND_EXPR
2335 		    || code == VEC_COND_EXPR)
2336 		  {
2337 		    /* In this case the entire COND_EXPR is in rhs1. */
2338 		    if (forward_propagate_into_cond (&gsi))
2339 		      {
2340 			changed = true;
2341 			stmt = gsi_stmt (gsi);
2342 		      }
2343 		  }
2344 		else if (TREE_CODE_CLASS (code) == tcc_comparison)
2345 		  {
2346 		    int did_something;
2347 		    did_something = forward_propagate_into_comparison (&gsi);
2348 		    if (did_something == 2)
2349 		      cfg_changed = true;
2350 		    changed = did_something != 0;
2351 		  }
2352 		else if ((code == PLUS_EXPR
2353 			  || code == BIT_IOR_EXPR
2354 			  || code == BIT_XOR_EXPR)
2355 			 && simplify_rotate (&gsi))
2356 		  changed = true;
2357 		else if (code == VEC_PERM_EXPR)
2358 		  {
2359 		    int did_something = simplify_permutation (&gsi);
2360 		    if (did_something == 2)
2361 		      cfg_changed = true;
2362 		    changed = did_something != 0;
2363 		  }
2364 		else if (code == BIT_FIELD_REF)
2365 		  changed = simplify_bitfield_ref (&gsi);
2366                 else if (code == CONSTRUCTOR
2367                          && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
2368                   changed = simplify_vector_constructor (&gsi);
2369 		break;
2370 	      }
2371 
2372 	    case GIMPLE_SWITCH:
2373 	      changed = simplify_gimple_switch (as_a <gswitch *> (stmt));
2374 	      break;
2375 
2376 	    case GIMPLE_COND:
2377 	      {
2378 		int did_something
2379 		  = forward_propagate_into_gimple_cond (as_a <gcond *> (stmt));
2380 		if (did_something == 2)
2381 		  cfg_changed = true;
2382 		changed = did_something != 0;
2383 		break;
2384 	      }
2385 
2386 	    case GIMPLE_CALL:
2387 	      {
2388 		tree callee = gimple_call_fndecl (stmt);
2389 		if (callee != NULL_TREE
2390 		    && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2391 		  changed = simplify_builtin_call (&gsi, callee);
2392 		break;
2393 	      }
2394 
2395 	    default:;
2396 	    }
2397 
2398 	  if (changed)
2399 	    {
2400 	      /* If the stmt changed then re-visit it and the statements
2401 		 inserted before it.  */
2402 	      for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2403 		if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
2404 		  break;
2405 	      if (gsi_end_p (gsi))
2406 		gsi = gsi_start_bb (bb);
2407 	      else
2408 		gsi_next (&gsi);
2409 	    }
2410 	  else
2411 	    {
2412 	      /* Stmt no longer needs to be revisited.  */
2413 	      gimple_set_plf (stmt, GF_PLF_1, true);
2414 
2415 	      /* Fill up the lattice.  */
2416 	      if (gimple_assign_single_p (stmt))
2417 		{
2418 		  tree lhs = gimple_assign_lhs (stmt);
2419 		  tree rhs = gimple_assign_rhs1 (stmt);
2420 		  if (TREE_CODE (lhs) == SSA_NAME)
2421 		    {
2422 		      tree val = lhs;
2423 		      if (TREE_CODE (rhs) == SSA_NAME)
2424 			val = fwprop_ssa_val (rhs);
2425 		      else if (is_gimple_min_invariant (rhs))
2426 			val = rhs;
2427 		      fwprop_set_lattice_val (lhs, val);
2428 		    }
2429 		}
2430 
2431 	      gsi_next (&gsi);
2432 	    }
2433 	}
2434     }
2435   free (postorder);
2436   lattice.release ();
2437 
2438   /* Fixup stmts that became noreturn calls.  This may require splitting
2439      blocks and thus isn't possible during the walk.  Do this
2440      in reverse order so we don't inadvertedly remove a stmt we want to
2441      fixup by visiting a dominating now noreturn call first.  */
2442   while (!to_fixup.is_empty ())
2443     {
2444       gimple *stmt = to_fixup.pop ();
2445       if (dump_file && dump_flags & TDF_DETAILS)
2446 	{
2447 	  fprintf (dump_file, "Fixing up noreturn call ");
2448 	  print_gimple_stmt (dump_file, stmt, 0, 0);
2449 	  fprintf (dump_file, "\n");
2450 	}
2451       cfg_changed |= fixup_noreturn_call (stmt);
2452     }
2453 
2454   cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
2455   BITMAP_FREE (to_purge);
2456 
2457   if (cfg_changed)
2458     todoflags |= TODO_cleanup_cfg;
2459 
2460   return todoflags;
2461 }
2462 
2463 } // anon namespace
2464 
2465 gimple_opt_pass *
make_pass_forwprop(gcc::context * ctxt)2466 make_pass_forwprop (gcc::context *ctxt)
2467 {
2468   return new pass_forwprop (ctxt);
2469 }
2470