1 /* Straight-line strength reduction.
2    Copyright (C) 2012-2018 Free Software Foundation, Inc.
3    Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* There are many algorithms for performing strength reduction on
22    loops.  This is not one of them.  IVOPTS handles strength reduction
23    of induction variables just fine.  This pass is intended to pick
24    up the crumbs it leaves behind, by considering opportunities for
25    strength reduction along dominator paths.
26 
27    Strength reduction addresses explicit multiplies, and certain
28    multiplies implicit in addressing expressions.  It would also be
29    possible to apply strength reduction to divisions and modulos,
30    but such opportunities are relatively uncommon.
31 
32    Strength reduction is also currently restricted to integer operations.
33    If desired, it could be extended to floating-point operations under
34    control of something like -funsafe-math-optimizations.  */
35 
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "backend.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "gimple.h"
43 #include "cfghooks.h"
44 #include "tree-pass.h"
45 #include "ssa.h"
46 #include "expmed.h"
47 #include "gimple-pretty-print.h"
48 #include "fold-const.h"
49 #include "gimple-iterator.h"
50 #include "gimplify-me.h"
51 #include "stor-layout.h"
52 #include "cfgloop.h"
53 #include "tree-cfg.h"
54 #include "domwalk.h"
55 #include "params.h"
56 #include "tree-ssa-address.h"
57 #include "tree-affine.h"
58 #include "tree-eh.h"
59 #include "builtins.h"
60 
61 /* Information about a strength reduction candidate.  Each statement
62    in the candidate table represents an expression of one of the
63    following forms (the special case of CAND_REF will be described
64    later):
65 
66    (CAND_MULT)  S1:  X = (B + i) * S
67    (CAND_ADD)   S1:  X = B + (i * S)
68 
69    Here X and B are SSA names, i is an integer constant, and S is
70    either an SSA name or a constant.  We call B the "base," i the
71    "index", and S the "stride."
72 
73    Any statement S0 that dominates S1 and is of the form:
74 
75    (CAND_MULT)  S0:  Y = (B + i') * S
76    (CAND_ADD)   S0:  Y = B + (i' * S)
77 
78    is called a "basis" for S1.  In both cases, S1 may be replaced by
79 
80                 S1':  X = Y + (i - i') * S,
81 
82    where (i - i') * S is folded to the extent possible.
83 
84    All gimple statements are visited in dominator order, and each
85    statement that may contribute to one of the forms of S1 above is
86    given at least one entry in the candidate table.  Such statements
87    include addition, pointer addition, subtraction, multiplication,
88    negation, copies, and nontrivial type casts.  If a statement may
89    represent more than one expression of the forms of S1 above,
90    multiple "interpretations" are stored in the table and chained
91    together.  Examples:
92 
93    * An add of two SSA names may treat either operand as the base.
94    * A multiply of two SSA names, likewise.
95    * A copy or cast may be thought of as either a CAND_MULT with
96      i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
97 
98    Candidate records are allocated from an obstack.  They are addressed
99    both from a hash table keyed on S1, and from a vector of candidate
100    pointers arranged in predominator order.
101 
102    Opportunity note
103    ----------------
104    Currently we don't recognize:
105 
106      S0: Y = (S * i') - B
107      S1: X = (S * i) - B
108 
109    as a strength reduction opportunity, even though this S1 would
110    also be replaceable by the S1' above.  This can be added if it
111    comes up in practice.
112 
113    Strength reduction in addressing
114    --------------------------------
115    There is another kind of candidate known as CAND_REF.  A CAND_REF
116    describes a statement containing a memory reference having
117    complex addressing that might benefit from strength reduction.
118    Specifically, we are interested in references for which
119    get_inner_reference returns a base address, offset, and bitpos as
120    follows:
121 
122      base:    MEM_REF (T1, C1)
123      offset:  MULT_EXPR (PLUS_EXPR (T2, C2), C3)
124      bitpos:  C4 * BITS_PER_UNIT
125 
126    Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
127    arbitrary integer constants.  Note that C2 may be zero, in which
128    case the offset will be MULT_EXPR (T2, C3).
129 
130    When this pattern is recognized, the original memory reference
131    can be replaced with:
132 
133      MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
134               C1 + (C2 * C3) + C4)
135 
136    which distributes the multiply to allow constant folding.  When
137    two or more addressing expressions can be represented by MEM_REFs
138    of this form, differing only in the constants C1, C2, and C4,
139    making this substitution produces more efficient addressing during
140    the RTL phases.  When there are not at least two expressions with
141    the same values of T1, T2, and C3, there is nothing to be gained
142    by the replacement.
143 
144    Strength reduction of CAND_REFs uses the same infrastructure as
145    that used by CAND_MULTs and CAND_ADDs.  We record T1 in the base (B)
146    field, MULT_EXPR (T2, C3) in the stride (S) field, and
147    C1 + (C2 * C3) + C4 in the index (i) field.  A basis for a CAND_REF
148    is thus another CAND_REF with the same B and S values.  When at
149    least two CAND_REFs are chained together using the basis relation,
150    each of them is replaced as above, resulting in improved code
151    generation for addressing.
152 
153    Conditional candidates
154    ======================
155 
156    Conditional candidates are best illustrated with an example.
157    Consider the code sequence:
158 
159    (1)  x_0 = ...;
160    (2)  a_0 = x_0 * 5;          MULT (B: x_0; i: 0; S: 5)
161         if (...)
162    (3)    x_1 = x_0 + 1;        ADD  (B: x_0, i: 1; S: 1)
163    (4)  x_2 = PHI <x_0, x_1>;   PHI  (B: x_0, i: 0, S: 1)
164    (5)  x_3 = x_2 + 1;          ADD  (B: x_2, i: 1, S: 1)
165    (6)  a_1 = x_3 * 5;          MULT (B: x_2, i: 1; S: 5)
166 
167    Here strength reduction is complicated by the uncertain value of x_2.
168    A legitimate transformation is:
169 
170    (1)  x_0 = ...;
171    (2)  a_0 = x_0 * 5;
172         if (...)
173 	  {
174    (3)      [x_1 = x_0 + 1;]
175    (3a)     t_1 = a_0 + 5;
176           }
177    (4)  [x_2 = PHI <x_0, x_1>;]
178    (4a) t_2 = PHI <a_0, t_1>;
179    (5)  [x_3 = x_2 + 1;]
180    (6r) a_1 = t_2 + 5;
181 
182    where the bracketed instructions may go dead.
183 
184    To recognize this opportunity, we have to observe that statement (6)
185    has a "hidden basis" (2).  The hidden basis is unlike a normal basis
186    in that the statement and the hidden basis have different base SSA
187    names (x_2 and x_0, respectively).  The relationship is established
188    when a statement's base name (x_2) is defined by a phi statement (4),
189    each argument of which (x_0, x_1) has an identical "derived base name."
190    If the argument is defined by a candidate (as x_1 is by (3)) that is a
191    CAND_ADD having a stride of 1, the derived base name of the argument is
192    the base name of the candidate (x_0).  Otherwise, the argument itself
193    is its derived base name (as is the case with argument x_0).
194 
195    The hidden basis for statement (6) is the nearest dominating candidate
196    whose base name is the derived base name (x_0) of the feeding phi (4),
197    and whose stride is identical to that of the statement.  We can then
198    create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
199    allowing the final replacement of (6) by the strength-reduced (6r).
200 
201    To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
202    A CAND_PHI is not a candidate for replacement, but is maintained in the
203    candidate table to ease discovery of hidden bases.  Any phi statement
204    whose arguments share a common derived base name is entered into the
205    table with the derived base name, an (arbitrary) index of zero, and a
206    stride of 1.  A statement with a hidden basis can then be detected by
207    simply looking up its feeding phi definition in the candidate table,
208    extracting the derived base name, and searching for a basis in the
209    usual manner after substituting the derived base name.
210 
211    Note that the transformation is only valid when the original phi and
212    the statements that define the phi's arguments are all at the same
213    position in the loop hierarchy.  */
214 
215 
216 /* Index into the candidate vector, offset by 1.  VECs are zero-based,
217    while cand_idx's are one-based, with zero indicating null.  */
218 typedef unsigned cand_idx;
219 
220 /* The kind of candidate.  */
221 enum cand_kind
222 {
223   CAND_MULT,
224   CAND_ADD,
225   CAND_REF,
226   CAND_PHI
227 };
228 
229 struct slsr_cand_d
230 {
231   /* The candidate statement S1.  */
232   gimple *cand_stmt;
233 
234   /* The base expression B:  often an SSA name, but not always.  */
235   tree base_expr;
236 
237   /* The stride S.  */
238   tree stride;
239 
240   /* The index constant i.  */
241   widest_int index;
242 
243   /* The type of the candidate.  This is normally the type of base_expr,
244      but casts may have occurred when combining feeding instructions.
245      A candidate can only be a basis for candidates of the same final type.
246      (For CAND_REFs, this is the type to be used for operand 1 of the
247      replacement MEM_REF.)  */
248   tree cand_type;
249 
250   /* The type to be used to interpret the stride field when the stride
251      is not a constant.  Normally the same as the type of the recorded
252      stride, but when the stride has been cast we need to maintain that
253      knowledge in order to make legal substitutions without losing
254      precision.  When the stride is a constant, this will be sizetype.  */
255   tree stride_type;
256 
257   /* The kind of candidate (CAND_MULT, etc.).  */
258   enum cand_kind kind;
259 
260   /* Index of this candidate in the candidate vector.  */
261   cand_idx cand_num;
262 
263   /* Index of the next candidate record for the same statement.
264      A statement may be useful in more than one way (e.g., due to
265      commutativity).  So we can have multiple "interpretations"
266      of a statement.  */
267   cand_idx next_interp;
268 
269   /* Index of the first candidate record in a chain for the same
270      statement.  */
271   cand_idx first_interp;
272 
273   /* Index of the basis statement S0, if any, in the candidate vector.  */
274   cand_idx basis;
275 
276   /* First candidate for which this candidate is a basis, if one exists.  */
277   cand_idx dependent;
278 
279   /* Next candidate having the same basis as this one.  */
280   cand_idx sibling;
281 
282   /* If this is a conditional candidate, the CAND_PHI candidate
283      that defines the base SSA name B.  */
284   cand_idx def_phi;
285 
286   /* Savings that can be expected from eliminating dead code if this
287      candidate is replaced.  */
288   int dead_savings;
289 
290   /* For PHI candidates, use a visited flag to keep from processing the
291      same PHI twice from multiple paths.  */
292   int visited;
293 
294   /* We sometimes have to cache a phi basis with a phi candidate to
295      avoid processing it twice.  Valid only if visited==1.  */
296   tree cached_basis;
297 };
298 
299 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
300 typedef const struct slsr_cand_d *const_slsr_cand_t;
301 
302 /* Pointers to candidates are chained together as part of a mapping
303    from base expressions to the candidates that use them.  */
304 
305 struct cand_chain_d
306 {
307   /* Base expression for the chain of candidates:  often, but not
308      always, an SSA name.  */
309   tree base_expr;
310 
311   /* Pointer to a candidate.  */
312   slsr_cand_t cand;
313 
314   /* Chain pointer.  */
315   struct cand_chain_d *next;
316 
317 };
318 
319 typedef struct cand_chain_d cand_chain, *cand_chain_t;
320 typedef const struct cand_chain_d *const_cand_chain_t;
321 
322 /* Information about a unique "increment" associated with candidates
323    having an SSA name for a stride.  An increment is the difference
324    between the index of the candidate and the index of its basis,
325    i.e., (i - i') as discussed in the module commentary.
326 
327    When we are not going to generate address arithmetic we treat
328    increments that differ only in sign as the same, allowing sharing
329    of the cost of initializers.  The absolute value of the increment
330    is stored in the incr_info.  */
331 
332 struct incr_info_d
333 {
334   /* The increment that relates a candidate to its basis.  */
335   widest_int incr;
336 
337   /* How many times the increment occurs in the candidate tree.  */
338   unsigned count;
339 
340   /* Cost of replacing candidates using this increment.  Negative and
341      zero costs indicate replacement should be performed.  */
342   int cost;
343 
344   /* If this increment is profitable but is not -1, 0, or 1, it requires
345      an initializer T_0 = stride * incr to be found or introduced in the
346      nearest common dominator of all candidates.  This field holds T_0
347      for subsequent use.  */
348   tree initializer;
349 
350   /* If the initializer was found to already exist, this is the block
351      where it was found.  */
352   basic_block init_bb;
353 };
354 
355 typedef struct incr_info_d incr_info, *incr_info_t;
356 
357 /* Candidates are maintained in a vector.  If candidate X dominates
358    candidate Y, then X appears before Y in the vector; but the
359    converse does not necessarily hold.  */
360 static vec<slsr_cand_t> cand_vec;
361 
362 enum cost_consts
363 {
364   COST_NEUTRAL = 0,
365   COST_INFINITE = 1000
366 };
367 
368 enum stride_status
369 {
370   UNKNOWN_STRIDE = 0,
371   KNOWN_STRIDE = 1
372 };
373 
374 enum phi_adjust_status
375 {
376   NOT_PHI_ADJUST = 0,
377   PHI_ADJUST = 1
378 };
379 
380 enum count_phis_status
381 {
382   DONT_COUNT_PHIS = 0,
383   COUNT_PHIS = 1
384 };
385 
386 /* Constrain how many PHI nodes we will visit for a conditional
387    candidate (depth and breadth).  */
388 const int MAX_SPREAD = 16;
389 
390 /* Pointer map embodying a mapping from statements to candidates.  */
391 static hash_map<gimple *, slsr_cand_t> *stmt_cand_map;
392 
393 /* Obstack for candidates.  */
394 static struct obstack cand_obstack;
395 
396 /* Obstack for candidate chains.  */
397 static struct obstack chain_obstack;
398 
399 /* An array INCR_VEC of incr_infos is used during analysis of related
400    candidates having an SSA name for a stride.  INCR_VEC_LEN describes
401    its current length.  MAX_INCR_VEC_LEN is used to avoid costly
402    pathological cases. */
403 static incr_info_t incr_vec;
404 static unsigned incr_vec_len;
405 const int MAX_INCR_VEC_LEN = 16;
406 
407 /* For a chain of candidates with unknown stride, indicates whether or not
408    we must generate pointer arithmetic when replacing statements.  */
409 static bool address_arithmetic_p;
410 
411 /* Forward function declarations.  */
412 static slsr_cand_t base_cand_from_table (tree);
413 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
414 static bool legal_cast_p_1 (tree, tree);
415 
416 /* Produce a pointer to the IDX'th candidate in the candidate vector.  */
417 
418 static slsr_cand_t
419 lookup_cand (cand_idx idx)
420 {
421   return cand_vec[idx - 1];
422 }
423 
424 /* Helper for hashing a candidate chain header.  */
425 
426 struct cand_chain_hasher : nofree_ptr_hash <cand_chain>
427 {
428   static inline hashval_t hash (const cand_chain *);
429   static inline bool equal (const cand_chain *, const cand_chain *);
430 };
431 
432 inline hashval_t
433 cand_chain_hasher::hash (const cand_chain *p)
434 {
435   tree base_expr = p->base_expr;
436   return iterative_hash_expr (base_expr, 0);
437 }
438 
439 inline bool
440 cand_chain_hasher::equal (const cand_chain *chain1, const cand_chain *chain2)
441 {
442   return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
443 }
444 
445 /* Hash table embodying a mapping from base exprs to chains of candidates.  */
446 static hash_table<cand_chain_hasher> *base_cand_map;
447 
448 /* Pointer map used by tree_to_aff_combination_expand.  */
449 static hash_map<tree, name_expansion *> *name_expansions;
450 /* Pointer map embodying a mapping from bases to alternative bases.  */
451 static hash_map<tree, tree> *alt_base_map;
452 
453 /* Given BASE, use the tree affine combiniation facilities to
454    find the underlying tree expression for BASE, with any
455    immediate offset excluded.
456 
457    N.B. we should eliminate this backtracking with better forward
458    analysis in a future release.  */
459 
460 static tree
461 get_alternative_base (tree base)
462 {
463   tree *result = alt_base_map->get (base);
464 
465   if (result == NULL)
466     {
467       tree expr;
468       aff_tree aff;
469 
470       tree_to_aff_combination_expand (base, TREE_TYPE (base),
471 				      &aff, &name_expansions);
472       aff.offset = 0;
473       expr = aff_combination_to_tree (&aff);
474 
475       gcc_assert (!alt_base_map->put (base, base == expr ? NULL : expr));
476 
477       return expr == base ? NULL : expr;
478     }
479 
480   return *result;
481 }
482 
483 /* Look in the candidate table for a CAND_PHI that defines BASE and
484    return it if found; otherwise return NULL.  */
485 
486 static cand_idx
487 find_phi_def (tree base)
488 {
489   slsr_cand_t c;
490 
491   if (TREE_CODE (base) != SSA_NAME)
492     return 0;
493 
494   c = base_cand_from_table (base);
495 
496   if (!c || c->kind != CAND_PHI
497       || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (c->cand_stmt)))
498     return 0;
499 
500   return c->cand_num;
501 }
502 
503 /* Determine whether all uses of NAME are directly or indirectly
504    used by STMT.  That is, we want to know whether if STMT goes
505    dead, the definition of NAME also goes dead.  */
506 static bool
507 uses_consumed_by_stmt (tree name, gimple *stmt, unsigned recurse = 0)
508 {
509   gimple *use_stmt;
510   imm_use_iterator iter;
511   bool retval = true;
512 
513   FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
514     {
515       if (use_stmt == stmt || is_gimple_debug (use_stmt))
516 	continue;
517 
518       if (!is_gimple_assign (use_stmt)
519 	  || !gimple_get_lhs (use_stmt)
520 	  || !is_gimple_reg (gimple_get_lhs (use_stmt))
521 	  || recurse >= 10
522 	  || !uses_consumed_by_stmt (gimple_get_lhs (use_stmt), stmt,
523 				     recurse + 1))
524 	{
525 	  retval = false;
526 	  BREAK_FROM_IMM_USE_STMT (iter);
527 	}
528     }
529 
530   return retval;
531 }
532 
533 /* Helper routine for find_basis_for_candidate.  May be called twice:
534    once for the candidate's base expr, and optionally again either for
535    the candidate's phi definition or for a CAND_REF's alternative base
536    expression.  */
537 
538 static slsr_cand_t
539 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
540 {
541   cand_chain mapping_key;
542   cand_chain_t chain;
543   slsr_cand_t basis = NULL;
544 
545   // Limit potential of N^2 behavior for long candidate chains.
546   int iters = 0;
547   int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
548 
549   mapping_key.base_expr = base_expr;
550   chain = base_cand_map->find (&mapping_key);
551 
552   for (; chain && iters < max_iters; chain = chain->next, ++iters)
553     {
554       slsr_cand_t one_basis = chain->cand;
555 
556       if (one_basis->kind != c->kind
557 	  || one_basis->cand_stmt == c->cand_stmt
558 	  || !operand_equal_p (one_basis->stride, c->stride, 0)
559 	  || !types_compatible_p (one_basis->cand_type, c->cand_type)
560 	  || !types_compatible_p (one_basis->stride_type, c->stride_type)
561 	  || !dominated_by_p (CDI_DOMINATORS,
562 			      gimple_bb (c->cand_stmt),
563 			      gimple_bb (one_basis->cand_stmt)))
564 	continue;
565 
566       tree lhs = gimple_assign_lhs (one_basis->cand_stmt);
567       if (lhs && TREE_CODE (lhs) == SSA_NAME
568 	  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
569 	continue;
570 
571       if (!basis || basis->cand_num < one_basis->cand_num)
572 	basis = one_basis;
573     }
574 
575   return basis;
576 }
577 
578 /* Use the base expr from candidate C to look for possible candidates
579    that can serve as a basis for C.  Each potential basis must also
580    appear in a block that dominates the candidate statement and have
581    the same stride and type.  If more than one possible basis exists,
582    the one with highest index in the vector is chosen; this will be
583    the most immediately dominating basis.  */
584 
585 static int
586 find_basis_for_candidate (slsr_cand_t c)
587 {
588   slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
589 
590   /* If a candidate doesn't have a basis using its base expression,
591      it may have a basis hidden by one or more intervening phis.  */
592   if (!basis && c->def_phi)
593     {
594       basic_block basis_bb, phi_bb;
595       slsr_cand_t phi_cand = lookup_cand (c->def_phi);
596       basis = find_basis_for_base_expr (c, phi_cand->base_expr);
597 
598       if (basis)
599 	{
600 	  /* A hidden basis must dominate the phi-definition of the
601 	     candidate's base name.  */
602 	  phi_bb = gimple_bb (phi_cand->cand_stmt);
603 	  basis_bb = gimple_bb (basis->cand_stmt);
604 
605 	  if (phi_bb == basis_bb
606 	      || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
607 	    {
608 	      basis = NULL;
609 	      c->basis = 0;
610 	    }
611 
612 	  /* If we found a hidden basis, estimate additional dead-code
613 	     savings if the phi and its feeding statements can be removed.  */
614 	  tree feeding_var = gimple_phi_result (phi_cand->cand_stmt);
615 	  if (basis && uses_consumed_by_stmt (feeding_var, c->cand_stmt))
616 	    c->dead_savings += phi_cand->dead_savings;
617 	}
618     }
619 
620   if (flag_expensive_optimizations && !basis && c->kind == CAND_REF)
621     {
622       tree alt_base_expr = get_alternative_base (c->base_expr);
623       if (alt_base_expr)
624 	basis = find_basis_for_base_expr (c, alt_base_expr);
625     }
626 
627   if (basis)
628     {
629       c->sibling = basis->dependent;
630       basis->dependent = c->cand_num;
631       return basis->cand_num;
632     }
633 
634   return 0;
635 }
636 
637 /* Record a mapping from BASE to C, indicating that C may potentially serve
638    as a basis using that base expression.  BASE may be the same as
639    C->BASE_EXPR; alternatively BASE can be a different tree that share the
640    underlining expression of C->BASE_EXPR.  */
641 
642 static void
643 record_potential_basis (slsr_cand_t c, tree base)
644 {
645   cand_chain_t node;
646   cand_chain **slot;
647 
648   gcc_assert (base);
649 
650   node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
651   node->base_expr = base;
652   node->cand = c;
653   node->next = NULL;
654   slot = base_cand_map->find_slot (node, INSERT);
655 
656   if (*slot)
657     {
658       cand_chain_t head = (cand_chain_t) (*slot);
659       node->next = head->next;
660       head->next = node;
661     }
662   else
663     *slot = node;
664 }
665 
666 /* Allocate storage for a new candidate and initialize its fields.
667    Attempt to find a basis for the candidate.
668 
669    For CAND_REF, an alternative base may also be recorded and used
670    to find a basis.  This helps cases where the expression hidden
671    behind BASE (which is usually an SSA_NAME) has immediate offset,
672    e.g.
673 
674      a2[i][j] = 1;
675      a2[i + 20][j] = 2;  */
676 
677 static slsr_cand_t
678 alloc_cand_and_find_basis (enum cand_kind kind, gimple *gs, tree base,
679 			   const widest_int &index, tree stride, tree ctype,
680 			   tree stype, unsigned savings)
681 {
682   slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
683 					       sizeof (slsr_cand));
684   c->cand_stmt = gs;
685   c->base_expr = base;
686   c->stride = stride;
687   c->index = index;
688   c->cand_type = ctype;
689   c->stride_type = stype;
690   c->kind = kind;
691   c->cand_num = cand_vec.length () + 1;
692   c->next_interp = 0;
693   c->first_interp = c->cand_num;
694   c->dependent = 0;
695   c->sibling = 0;
696   c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
697   c->dead_savings = savings;
698   c->visited = 0;
699   c->cached_basis = NULL_TREE;
700 
701   cand_vec.safe_push (c);
702 
703   if (kind == CAND_PHI)
704     c->basis = 0;
705   else
706     c->basis = find_basis_for_candidate (c);
707 
708   record_potential_basis (c, base);
709   if (flag_expensive_optimizations && kind == CAND_REF)
710     {
711       tree alt_base = get_alternative_base (base);
712       if (alt_base)
713 	record_potential_basis (c, alt_base);
714     }
715 
716   return c;
717 }
718 
719 /* Determine the target cost of statement GS when compiling according
720    to SPEED.  */
721 
722 static int
723 stmt_cost (gimple *gs, bool speed)
724 {
725   tree lhs, rhs1, rhs2;
726   machine_mode lhs_mode;
727 
728   gcc_assert (is_gimple_assign (gs));
729   lhs = gimple_assign_lhs (gs);
730   rhs1 = gimple_assign_rhs1 (gs);
731   lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
732 
733   switch (gimple_assign_rhs_code (gs))
734     {
735     case MULT_EXPR:
736       rhs2 = gimple_assign_rhs2 (gs);
737 
738       if (tree_fits_shwi_p (rhs2))
739 	return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed);
740 
741       gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
742       return mul_cost (speed, lhs_mode);
743 
744     case PLUS_EXPR:
745     case POINTER_PLUS_EXPR:
746     case MINUS_EXPR:
747       return add_cost (speed, lhs_mode);
748 
749     case NEGATE_EXPR:
750       return neg_cost (speed, lhs_mode);
751 
752     CASE_CONVERT:
753       return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
754 
755     /* Note that we don't assign costs to copies that in most cases
756        will go away.  */
757     case SSA_NAME:
758       return 0;
759 
760     default:
761       ;
762     }
763 
764   gcc_unreachable ();
765   return 0;
766 }
767 
768 /* Look up the defining statement for BASE_IN and return a pointer
769    to its candidate in the candidate table, if any; otherwise NULL.
770    Only CAND_ADD and CAND_MULT candidates are returned.  */
771 
772 static slsr_cand_t
773 base_cand_from_table (tree base_in)
774 {
775   slsr_cand_t *result;
776 
777   gimple *def = SSA_NAME_DEF_STMT (base_in);
778   if (!def)
779     return (slsr_cand_t) NULL;
780 
781   result = stmt_cand_map->get (def);
782 
783   if (result && (*result)->kind != CAND_REF)
784     return *result;
785 
786   return (slsr_cand_t) NULL;
787 }
788 
789 /* Add an entry to the statement-to-candidate mapping.  */
790 
791 static void
792 add_cand_for_stmt (gimple *gs, slsr_cand_t c)
793 {
794   gcc_assert (!stmt_cand_map->put (gs, c));
795 }
796 
797 /* Given PHI which contains a phi statement, determine whether it
798    satisfies all the requirements of a phi candidate.  If so, create
799    a candidate.  Note that a CAND_PHI never has a basis itself, but
800    is used to help find a basis for subsequent candidates.  */
801 
802 static void
803 slsr_process_phi (gphi *phi, bool speed)
804 {
805   unsigned i;
806   tree arg0_base = NULL_TREE, base_type;
807   slsr_cand_t c;
808   struct loop *cand_loop = gimple_bb (phi)->loop_father;
809   unsigned savings = 0;
810 
811   /* A CAND_PHI requires each of its arguments to have the same
812      derived base name.  (See the module header commentary for a
813      definition of derived base names.)  Furthermore, all feeding
814      definitions must be in the same position in the loop hierarchy
815      as PHI.  */
816 
817   for (i = 0; i < gimple_phi_num_args (phi); i++)
818     {
819       slsr_cand_t arg_cand;
820       tree arg = gimple_phi_arg_def (phi, i);
821       tree derived_base_name = NULL_TREE;
822       gimple *arg_stmt = NULL;
823       basic_block arg_bb = NULL;
824 
825       if (TREE_CODE (arg) != SSA_NAME)
826 	return;
827 
828       arg_cand = base_cand_from_table (arg);
829 
830       if (arg_cand)
831 	{
832 	  while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
833 	    {
834 	      if (!arg_cand->next_interp)
835 		return;
836 
837 	      arg_cand = lookup_cand (arg_cand->next_interp);
838 	    }
839 
840 	  if (!integer_onep (arg_cand->stride))
841 	    return;
842 
843 	  derived_base_name = arg_cand->base_expr;
844 	  arg_stmt = arg_cand->cand_stmt;
845 	  arg_bb = gimple_bb (arg_stmt);
846 
847 	  /* Gather potential dead code savings if the phi statement
848 	     can be removed later on.  */
849 	  if (uses_consumed_by_stmt (arg, phi))
850 	    {
851 	      if (gimple_code (arg_stmt) == GIMPLE_PHI)
852 		savings += arg_cand->dead_savings;
853 	      else
854 		savings += stmt_cost (arg_stmt, speed);
855 	    }
856 	}
857       else if (SSA_NAME_IS_DEFAULT_DEF (arg))
858 	{
859 	  derived_base_name = arg;
860 	  arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
861 	}
862 
863       if (!arg_bb || arg_bb->loop_father != cand_loop)
864 	return;
865 
866       if (i == 0)
867 	arg0_base = derived_base_name;
868       else if (!operand_equal_p (derived_base_name, arg0_base, 0))
869 	return;
870     }
871 
872   /* Create the candidate.  "alloc_cand_and_find_basis" is named
873      misleadingly for this case, as no basis will be sought for a
874      CAND_PHI.  */
875   base_type = TREE_TYPE (arg0_base);
876 
877   c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base,
878 				 0, integer_one_node, base_type,
879 				 sizetype, savings);
880 
881   /* Add the candidate to the statement-candidate mapping.  */
882   add_cand_for_stmt (phi, c);
883 }
884 
885 /* Given PBASE which is a pointer to tree, look up the defining
886    statement for it and check whether the candidate is in the
887    form of:
888 
889      X = B + (1 * S), S is integer constant
890      X = B + (i * S), S is integer one
891 
892    If so, set PBASE to the candidate's base_expr and return double
893    int (i * S).
894    Otherwise, just return double int zero.  */
895 
896 static widest_int
897 backtrace_base_for_ref (tree *pbase)
898 {
899   tree base_in = *pbase;
900   slsr_cand_t base_cand;
901 
902   STRIP_NOPS (base_in);
903 
904   /* Strip off widening conversion(s) to handle cases where
905      e.g. 'B' is widened from an 'int' in order to calculate
906      a 64-bit address.  */
907   if (CONVERT_EXPR_P (base_in)
908       && legal_cast_p_1 (TREE_TYPE (base_in),
909 			 TREE_TYPE (TREE_OPERAND (base_in, 0))))
910     base_in = get_unwidened (base_in, NULL_TREE);
911 
912   if (TREE_CODE (base_in) != SSA_NAME)
913     return 0;
914 
915   base_cand = base_cand_from_table (base_in);
916 
917   while (base_cand && base_cand->kind != CAND_PHI)
918     {
919       if (base_cand->kind == CAND_ADD
920 	  && base_cand->index == 1
921 	  && TREE_CODE (base_cand->stride) == INTEGER_CST)
922 	{
923 	  /* X = B + (1 * S), S is integer constant.  */
924 	  *pbase = base_cand->base_expr;
925 	  return wi::to_widest (base_cand->stride);
926 	}
927       else if (base_cand->kind == CAND_ADD
928 	       && TREE_CODE (base_cand->stride) == INTEGER_CST
929 	       && integer_onep (base_cand->stride))
930 	{
931 	  /* X = B + (i * S), S is integer one.  */
932 	  *pbase = base_cand->base_expr;
933 	  return base_cand->index;
934 	}
935 
936       if (base_cand->next_interp)
937 	base_cand = lookup_cand (base_cand->next_interp);
938       else
939 	base_cand = NULL;
940     }
941 
942   return 0;
943 }
944 
945 /* Look for the following pattern:
946 
947     *PBASE:    MEM_REF (T1, C1)
948 
949     *POFFSET:  MULT_EXPR (T2, C3)        [C2 is zero]
950                      or
951                MULT_EXPR (PLUS_EXPR (T2, C2), C3)
952                      or
953                MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
954 
955     *PINDEX:   C4 * BITS_PER_UNIT
956 
957    If not present, leave the input values unchanged and return FALSE.
958    Otherwise, modify the input values as follows and return TRUE:
959 
960     *PBASE:    T1
961     *POFFSET:  MULT_EXPR (T2, C3)
962     *PINDEX:   C1 + (C2 * C3) + C4
963 
964    When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
965    will be further restructured to:
966 
967     *PBASE:    T1
968     *POFFSET:  MULT_EXPR (T2', C3)
969     *PINDEX:   C1 + (C2 * C3) + C4 + (C5 * C3)  */
970 
971 static bool
972 restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
973 		       tree *ptype)
974 {
975   tree base = *pbase, offset = *poffset;
976   widest_int index = *pindex;
977   tree mult_op0, t1, t2, type;
978   widest_int c1, c2, c3, c4, c5;
979   offset_int mem_offset;
980 
981   if (!base
982       || !offset
983       || TREE_CODE (base) != MEM_REF
984       || !mem_ref_offset (base).is_constant (&mem_offset)
985       || TREE_CODE (offset) != MULT_EXPR
986       || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
987       || wi::umod_floor (index, BITS_PER_UNIT) != 0)
988     return false;
989 
990   t1 = TREE_OPERAND (base, 0);
991   c1 = widest_int::from (mem_offset, SIGNED);
992   type = TREE_TYPE (TREE_OPERAND (base, 1));
993 
994   mult_op0 = TREE_OPERAND (offset, 0);
995   c3 = wi::to_widest (TREE_OPERAND (offset, 1));
996 
997   if (TREE_CODE (mult_op0) == PLUS_EXPR)
998 
999     if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
1000       {
1001 	t2 = TREE_OPERAND (mult_op0, 0);
1002 	c2 = wi::to_widest (TREE_OPERAND (mult_op0, 1));
1003       }
1004     else
1005       return false;
1006 
1007   else if (TREE_CODE (mult_op0) == MINUS_EXPR)
1008 
1009     if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
1010       {
1011 	t2 = TREE_OPERAND (mult_op0, 0);
1012 	c2 = -wi::to_widest (TREE_OPERAND (mult_op0, 1));
1013       }
1014     else
1015       return false;
1016 
1017   else
1018     {
1019       t2 = mult_op0;
1020       c2 = 0;
1021     }
1022 
1023   c4 = index >> LOG2_BITS_PER_UNIT;
1024   c5 = backtrace_base_for_ref (&t2);
1025 
1026   *pbase = t1;
1027   *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
1028 			  wide_int_to_tree (sizetype, c3));
1029   *pindex = c1 + c2 * c3 + c4 + c5 * c3;
1030   *ptype = type;
1031 
1032   return true;
1033 }
1034 
1035 /* Given GS which contains a data reference, create a CAND_REF entry in
1036    the candidate table and attempt to find a basis.  */
1037 
1038 static void
1039 slsr_process_ref (gimple *gs)
1040 {
1041   tree ref_expr, base, offset, type;
1042   poly_int64 bitsize, bitpos;
1043   machine_mode mode;
1044   int unsignedp, reversep, volatilep;
1045   slsr_cand_t c;
1046 
1047   if (gimple_vdef (gs))
1048     ref_expr = gimple_assign_lhs (gs);
1049   else
1050     ref_expr = gimple_assign_rhs1 (gs);
1051 
1052   if (!handled_component_p (ref_expr)
1053       || TREE_CODE (ref_expr) == BIT_FIELD_REF
1054       || (TREE_CODE (ref_expr) == COMPONENT_REF
1055 	  && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
1056     return;
1057 
1058   base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
1059 			      &unsignedp, &reversep, &volatilep);
1060   HOST_WIDE_INT cbitpos;
1061   if (reversep || !bitpos.is_constant (&cbitpos))
1062     return;
1063   widest_int index = cbitpos;
1064 
1065   if (!restructure_reference (&base, &offset, &index, &type))
1066     return;
1067 
1068   c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
1069 				 type, sizetype, 0);
1070 
1071   /* Add the candidate to the statement-candidate mapping.  */
1072   add_cand_for_stmt (gs, c);
1073 }
1074 
1075 /* Create a candidate entry for a statement GS, where GS multiplies
1076    two SSA names BASE_IN and STRIDE_IN.  Propagate any known information
1077    about the two SSA names into the new candidate.  Return the new
1078    candidate.  */
1079 
1080 static slsr_cand_t
1081 create_mul_ssa_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1082 {
1083   tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1084   tree stype = NULL_TREE;
1085   widest_int index;
1086   unsigned savings = 0;
1087   slsr_cand_t c;
1088   slsr_cand_t base_cand = base_cand_from_table (base_in);
1089 
1090   /* Look at all interpretations of the base candidate, if necessary,
1091      to find information to propagate into this candidate.  */
1092   while (base_cand && !base && base_cand->kind != CAND_PHI)
1093     {
1094 
1095       if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
1096 	{
1097 	  /* Y = (B + i') * 1
1098 	     X = Y * Z
1099 	     ================
1100 	     X = (B + i') * Z  */
1101 	  base = base_cand->base_expr;
1102 	  index = base_cand->index;
1103 	  stride = stride_in;
1104 	  ctype = base_cand->cand_type;
1105 	  stype = TREE_TYPE (stride_in);
1106 	  if (has_single_use (base_in))
1107 	    savings = (base_cand->dead_savings
1108 		       + stmt_cost (base_cand->cand_stmt, speed));
1109 	}
1110       else if (base_cand->kind == CAND_ADD
1111 	       && TREE_CODE (base_cand->stride) == INTEGER_CST)
1112 	{
1113 	  /* Y = B + (i' * S), S constant
1114 	     X = Y * Z
1115 	     ============================
1116 	     X = B + ((i' * S) * Z)  */
1117 	  base = base_cand->base_expr;
1118 	  index = base_cand->index * wi::to_widest (base_cand->stride);
1119 	  stride = stride_in;
1120 	  ctype = base_cand->cand_type;
1121 	  stype = TREE_TYPE (stride_in);
1122 	  if (has_single_use (base_in))
1123 	    savings = (base_cand->dead_savings
1124 		       + stmt_cost (base_cand->cand_stmt, speed));
1125 	}
1126 
1127       if (base_cand->next_interp)
1128 	base_cand = lookup_cand (base_cand->next_interp);
1129       else
1130 	base_cand = NULL;
1131     }
1132 
1133   if (!base)
1134     {
1135       /* No interpretations had anything useful to propagate, so
1136 	 produce X = (Y + 0) * Z.  */
1137       base = base_in;
1138       index = 0;
1139       stride = stride_in;
1140       ctype = TREE_TYPE (base_in);
1141       stype = TREE_TYPE (stride_in);
1142     }
1143 
1144   c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1145 				 ctype, stype, savings);
1146   return c;
1147 }
1148 
1149 /* Create a candidate entry for a statement GS, where GS multiplies
1150    SSA name BASE_IN by constant STRIDE_IN.  Propagate any known
1151    information about BASE_IN into the new candidate.  Return the new
1152    candidate.  */
1153 
1154 static slsr_cand_t
1155 create_mul_imm_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1156 {
1157   tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1158   widest_int index, temp;
1159   unsigned savings = 0;
1160   slsr_cand_t c;
1161   slsr_cand_t base_cand = base_cand_from_table (base_in);
1162 
1163   /* Look at all interpretations of the base candidate, if necessary,
1164      to find information to propagate into this candidate.  */
1165   while (base_cand && !base && base_cand->kind != CAND_PHI)
1166     {
1167       if (base_cand->kind == CAND_MULT
1168 	  && TREE_CODE (base_cand->stride) == INTEGER_CST)
1169 	{
1170 	  /* Y = (B + i') * S, S constant
1171 	     X = Y * c
1172 	     ============================
1173 	     X = (B + i') * (S * c)  */
1174 	  temp = wi::to_widest (base_cand->stride) * wi::to_widest (stride_in);
1175 	  if (wi::fits_to_tree_p (temp, TREE_TYPE (stride_in)))
1176 	    {
1177 	      base = base_cand->base_expr;
1178 	      index = base_cand->index;
1179 	      stride = wide_int_to_tree (TREE_TYPE (stride_in), temp);
1180 	      ctype = base_cand->cand_type;
1181 	      if (has_single_use (base_in))
1182 		savings = (base_cand->dead_savings
1183 			   + stmt_cost (base_cand->cand_stmt, speed));
1184 	    }
1185 	}
1186       else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1187 	{
1188 	  /* Y = B + (i' * 1)
1189 	     X = Y * c
1190 	     ===========================
1191 	     X = (B + i') * c  */
1192 	  base = base_cand->base_expr;
1193 	  index = base_cand->index;
1194 	  stride = stride_in;
1195 	  ctype = base_cand->cand_type;
1196 	  if (has_single_use (base_in))
1197 	    savings = (base_cand->dead_savings
1198 		       + stmt_cost (base_cand->cand_stmt, speed));
1199 	}
1200       else if (base_cand->kind == CAND_ADD
1201 	       && base_cand->index == 1
1202 	       && TREE_CODE (base_cand->stride) == INTEGER_CST)
1203 	{
1204 	  /* Y = B + (1 * S), S constant
1205 	     X = Y * c
1206 	     ===========================
1207 	     X = (B + S) * c  */
1208 	  base = base_cand->base_expr;
1209 	  index = wi::to_widest (base_cand->stride);
1210 	  stride = stride_in;
1211 	  ctype = base_cand->cand_type;
1212 	  if (has_single_use (base_in))
1213 	    savings = (base_cand->dead_savings
1214 		       + stmt_cost (base_cand->cand_stmt, speed));
1215 	}
1216 
1217       if (base_cand->next_interp)
1218 	base_cand = lookup_cand (base_cand->next_interp);
1219       else
1220 	base_cand = NULL;
1221     }
1222 
1223   if (!base)
1224     {
1225       /* No interpretations had anything useful to propagate, so
1226 	 produce X = (Y + 0) * c.  */
1227       base = base_in;
1228       index = 0;
1229       stride = stride_in;
1230       ctype = TREE_TYPE (base_in);
1231     }
1232 
1233   c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1234 				 ctype, sizetype, savings);
1235   return c;
1236 }
1237 
1238 /* Given GS which is a multiply of scalar integers, make an appropriate
1239    entry in the candidate table.  If this is a multiply of two SSA names,
1240    create two CAND_MULT interpretations and attempt to find a basis for
1241    each of them.  Otherwise, create a single CAND_MULT and attempt to
1242    find a basis.  */
1243 
1244 static void
1245 slsr_process_mul (gimple *gs, tree rhs1, tree rhs2, bool speed)
1246 {
1247   slsr_cand_t c, c2;
1248 
1249   /* If this is a multiply of an SSA name with itself, it is highly
1250      unlikely that we will get a strength reduction opportunity, so
1251      don't record it as a candidate.  This simplifies the logic for
1252      finding a basis, so if this is removed that must be considered.  */
1253   if (rhs1 == rhs2)
1254     return;
1255 
1256   if (TREE_CODE (rhs2) == SSA_NAME)
1257     {
1258       /* Record an interpretation of this statement in the candidate table
1259 	 assuming RHS1 is the base expression and RHS2 is the stride.  */
1260       c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1261 
1262       /* Add the first interpretation to the statement-candidate mapping.  */
1263       add_cand_for_stmt (gs, c);
1264 
1265       /* Record another interpretation of this statement assuming RHS1
1266 	 is the stride and RHS2 is the base expression.  */
1267       c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1268       c->next_interp = c2->cand_num;
1269       c2->first_interp = c->cand_num;
1270     }
1271   else if (TREE_CODE (rhs2) == INTEGER_CST && !integer_zerop (rhs2))
1272     {
1273       /* Record an interpretation for the multiply-immediate.  */
1274       c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1275 
1276       /* Add the interpretation to the statement-candidate mapping.  */
1277       add_cand_for_stmt (gs, c);
1278     }
1279 }
1280 
1281 /* Create a candidate entry for a statement GS, where GS adds two
1282    SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1283    subtracts ADDEND_IN from BASE_IN otherwise.  Propagate any known
1284    information about the two SSA names into the new candidate.
1285    Return the new candidate.  */
1286 
1287 static slsr_cand_t
1288 create_add_ssa_cand (gimple *gs, tree base_in, tree addend_in,
1289 		     bool subtract_p, bool speed)
1290 {
1291   tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1292   tree stype = NULL_TREE;
1293   widest_int index;
1294   unsigned savings = 0;
1295   slsr_cand_t c;
1296   slsr_cand_t base_cand = base_cand_from_table (base_in);
1297   slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1298 
1299   /* The most useful transformation is a multiply-immediate feeding
1300      an add or subtract.  Look for that first.  */
1301   while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1302     {
1303       if (addend_cand->kind == CAND_MULT
1304 	  && addend_cand->index == 0
1305 	  && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1306 	{
1307 	  /* Z = (B + 0) * S, S constant
1308 	     X = Y +/- Z
1309 	     ===========================
1310 	     X = Y + ((+/-1 * S) * B)  */
1311 	  base = base_in;
1312 	  index = wi::to_widest (addend_cand->stride);
1313 	  if (subtract_p)
1314 	    index = -index;
1315 	  stride = addend_cand->base_expr;
1316 	  ctype = TREE_TYPE (base_in);
1317 	  stype = addend_cand->cand_type;
1318 	  if (has_single_use (addend_in))
1319 	    savings = (addend_cand->dead_savings
1320 		       + stmt_cost (addend_cand->cand_stmt, speed));
1321 	}
1322 
1323       if (addend_cand->next_interp)
1324 	addend_cand = lookup_cand (addend_cand->next_interp);
1325       else
1326 	addend_cand = NULL;
1327     }
1328 
1329   while (base_cand && !base && base_cand->kind != CAND_PHI)
1330     {
1331       if (base_cand->kind == CAND_ADD
1332 	  && (base_cand->index == 0
1333 	      || operand_equal_p (base_cand->stride,
1334 				  integer_zero_node, 0)))
1335 	{
1336 	  /* Y = B + (i' * S), i' * S = 0
1337 	     X = Y +/- Z
1338 	     ============================
1339 	     X = B + (+/-1 * Z)  */
1340 	  base = base_cand->base_expr;
1341 	  index = subtract_p ? -1 : 1;
1342 	  stride = addend_in;
1343 	  ctype = base_cand->cand_type;
1344 	  stype = (TREE_CODE (addend_in) == INTEGER_CST ? sizetype
1345 		   : TREE_TYPE (addend_in));
1346 	  if (has_single_use (base_in))
1347 	    savings = (base_cand->dead_savings
1348 		       + stmt_cost (base_cand->cand_stmt, speed));
1349 	}
1350       else if (subtract_p)
1351 	{
1352 	  slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1353 
1354 	  while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1355 	    {
1356 	      if (subtrahend_cand->kind == CAND_MULT
1357 		  && subtrahend_cand->index == 0
1358 		  && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1359 		{
1360 		  /* Z = (B + 0) * S, S constant
1361 		     X = Y - Z
1362 		     ===========================
1363 		     Value:  X = Y + ((-1 * S) * B)  */
1364 		  base = base_in;
1365 		  index = wi::to_widest (subtrahend_cand->stride);
1366 		  index = -index;
1367 		  stride = subtrahend_cand->base_expr;
1368 		  ctype = TREE_TYPE (base_in);
1369 		  stype = subtrahend_cand->cand_type;
1370 		  if (has_single_use (addend_in))
1371 		    savings = (subtrahend_cand->dead_savings
1372 			       + stmt_cost (subtrahend_cand->cand_stmt, speed));
1373 		}
1374 
1375 	      if (subtrahend_cand->next_interp)
1376 		subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1377 	      else
1378 		subtrahend_cand = NULL;
1379 	    }
1380 	}
1381 
1382       if (base_cand->next_interp)
1383 	base_cand = lookup_cand (base_cand->next_interp);
1384       else
1385 	base_cand = NULL;
1386     }
1387 
1388   if (!base)
1389     {
1390       /* No interpretations had anything useful to propagate, so
1391 	 produce X = Y + (1 * Z).  */
1392       base = base_in;
1393       index = subtract_p ? -1 : 1;
1394       stride = addend_in;
1395       ctype = TREE_TYPE (base_in);
1396       stype = (TREE_CODE (addend_in) == INTEGER_CST ? sizetype
1397 	       : TREE_TYPE (addend_in));
1398     }
1399 
1400   c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1401 				 ctype, stype, savings);
1402   return c;
1403 }
1404 
1405 /* Create a candidate entry for a statement GS, where GS adds SSA
1406    name BASE_IN to constant INDEX_IN.  Propagate any known information
1407    about BASE_IN into the new candidate.  Return the new candidate.  */
1408 
1409 static slsr_cand_t
1410 create_add_imm_cand (gimple *gs, tree base_in, const widest_int &index_in,
1411 		     bool speed)
1412 {
1413   enum cand_kind kind = CAND_ADD;
1414   tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1415   tree stype = NULL_TREE;
1416   widest_int index, multiple;
1417   unsigned savings = 0;
1418   slsr_cand_t c;
1419   slsr_cand_t base_cand = base_cand_from_table (base_in);
1420 
1421   while (base_cand && !base && base_cand->kind != CAND_PHI)
1422     {
1423       signop sign = TYPE_SIGN (TREE_TYPE (base_cand->stride));
1424 
1425       if (TREE_CODE (base_cand->stride) == INTEGER_CST
1426 	  && wi::multiple_of_p (index_in, wi::to_widest (base_cand->stride),
1427 				sign, &multiple))
1428 	{
1429 	  /* Y = (B + i') * S, S constant, c = kS for some integer k
1430 	     X = Y + c
1431 	     ============================
1432 	     X = (B + (i'+ k)) * S
1433 	  OR
1434 	     Y = B + (i' * S), S constant, c = kS for some integer k
1435 	     X = Y + c
1436 	     ============================
1437 	     X = (B + (i'+ k)) * S  */
1438 	  kind = base_cand->kind;
1439 	  base = base_cand->base_expr;
1440 	  index = base_cand->index + multiple;
1441 	  stride = base_cand->stride;
1442 	  ctype = base_cand->cand_type;
1443 	  stype = base_cand->stride_type;
1444 	  if (has_single_use (base_in))
1445 	    savings = (base_cand->dead_savings
1446 		       + stmt_cost (base_cand->cand_stmt, speed));
1447 	}
1448 
1449       if (base_cand->next_interp)
1450 	base_cand = lookup_cand (base_cand->next_interp);
1451       else
1452 	base_cand = NULL;
1453     }
1454 
1455   if (!base)
1456     {
1457       /* No interpretations had anything useful to propagate, so
1458 	 produce X = Y + (c * 1).  */
1459       kind = CAND_ADD;
1460       base = base_in;
1461       index = index_in;
1462       stride = integer_one_node;
1463       ctype = TREE_TYPE (base_in);
1464       stype = sizetype;
1465     }
1466 
1467   c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1468 				 ctype, stype, savings);
1469   return c;
1470 }
1471 
1472 /* Given GS which is an add or subtract of scalar integers or pointers,
1473    make at least one appropriate entry in the candidate table.  */
1474 
1475 static void
1476 slsr_process_add (gimple *gs, tree rhs1, tree rhs2, bool speed)
1477 {
1478   bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1479   slsr_cand_t c = NULL, c2;
1480 
1481   if (TREE_CODE (rhs2) == SSA_NAME)
1482     {
1483       /* First record an interpretation assuming RHS1 is the base expression
1484 	 and RHS2 is the stride.  But it doesn't make sense for the
1485 	 stride to be a pointer, so don't record a candidate in that case.  */
1486       if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1487 	{
1488 	  c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1489 
1490 	  /* Add the first interpretation to the statement-candidate
1491 	     mapping.  */
1492 	  add_cand_for_stmt (gs, c);
1493 	}
1494 
1495       /* If the two RHS operands are identical, or this is a subtract,
1496 	 we're done.  */
1497       if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1498 	return;
1499 
1500       /* Otherwise, record another interpretation assuming RHS2 is the
1501 	 base expression and RHS1 is the stride, again provided that the
1502 	 stride is not a pointer.  */
1503       if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1504 	{
1505 	  c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1506 	  if (c)
1507 	    {
1508 	      c->next_interp = c2->cand_num;
1509 	      c2->first_interp = c->cand_num;
1510 	    }
1511 	  else
1512 	    add_cand_for_stmt (gs, c2);
1513 	}
1514     }
1515   else if (TREE_CODE (rhs2) == INTEGER_CST)
1516     {
1517       /* Record an interpretation for the add-immediate.  */
1518       widest_int index = wi::to_widest (rhs2);
1519       if (subtract_p)
1520 	index = -index;
1521 
1522       c = create_add_imm_cand (gs, rhs1, index, speed);
1523 
1524       /* Add the interpretation to the statement-candidate mapping.  */
1525       add_cand_for_stmt (gs, c);
1526     }
1527 }
1528 
1529 /* Given GS which is a negate of a scalar integer, make an appropriate
1530    entry in the candidate table.  A negate is equivalent to a multiply
1531    by -1.  */
1532 
1533 static void
1534 slsr_process_neg (gimple *gs, tree rhs1, bool speed)
1535 {
1536   /* Record a CAND_MULT interpretation for the multiply by -1.  */
1537   slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1538 
1539   /* Add the interpretation to the statement-candidate mapping.  */
1540   add_cand_for_stmt (gs, c);
1541 }
1542 
1543 /* Help function for legal_cast_p, operating on two trees.  Checks
1544    whether it's allowable to cast from RHS to LHS.  See legal_cast_p
1545    for more details.  */
1546 
1547 static bool
1548 legal_cast_p_1 (tree lhs_type, tree rhs_type)
1549 {
1550   unsigned lhs_size, rhs_size;
1551   bool lhs_wraps, rhs_wraps;
1552 
1553   lhs_size = TYPE_PRECISION (lhs_type);
1554   rhs_size = TYPE_PRECISION (rhs_type);
1555   lhs_wraps = ANY_INTEGRAL_TYPE_P (lhs_type) && TYPE_OVERFLOW_WRAPS (lhs_type);
1556   rhs_wraps = ANY_INTEGRAL_TYPE_P (rhs_type) && TYPE_OVERFLOW_WRAPS (rhs_type);
1557 
1558   if (lhs_size < rhs_size
1559       || (rhs_wraps && !lhs_wraps)
1560       || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1561     return false;
1562 
1563   return true;
1564 }
1565 
1566 /* Return TRUE if GS is a statement that defines an SSA name from
1567    a conversion and is legal for us to combine with an add and multiply
1568    in the candidate table.  For example, suppose we have:
1569 
1570      A = B + i;
1571      C = (type) A;
1572      D = C * S;
1573 
1574    Without the type-cast, we would create a CAND_MULT for D with base B,
1575    index i, and stride S.  We want to record this candidate only if it
1576    is equivalent to apply the type cast following the multiply:
1577 
1578      A = B + i;
1579      E = A * S;
1580      D = (type) E;
1581 
1582    We will record the type with the candidate for D.  This allows us
1583    to use a similar previous candidate as a basis.  If we have earlier seen
1584 
1585      A' = B + i';
1586      C' = (type) A';
1587      D' = C' * S;
1588 
1589    we can replace D with
1590 
1591      D = D' + (i - i') * S;
1592 
1593    But if moving the type-cast would change semantics, we mustn't do this.
1594 
1595    This is legitimate for casts from a non-wrapping integral type to
1596    any integral type of the same or larger size.  It is not legitimate
1597    to convert a wrapping type to a non-wrapping type, or to a wrapping
1598    type of a different size.  I.e., with a wrapping type, we must
1599    assume that the addition B + i could wrap, in which case performing
1600    the multiply before or after one of the "illegal" type casts will
1601    have different semantics.  */
1602 
1603 static bool
1604 legal_cast_p (gimple *gs, tree rhs)
1605 {
1606   if (!is_gimple_assign (gs)
1607       || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1608     return false;
1609 
1610   return legal_cast_p_1 (TREE_TYPE (gimple_assign_lhs (gs)), TREE_TYPE (rhs));
1611 }
1612 
1613 /* Given GS which is a cast to a scalar integer type, determine whether
1614    the cast is legal for strength reduction.  If so, make at least one
1615    appropriate entry in the candidate table.  */
1616 
1617 static void
1618 slsr_process_cast (gimple *gs, tree rhs1, bool speed)
1619 {
1620   tree lhs, ctype;
1621   slsr_cand_t base_cand, c = NULL, c2;
1622   unsigned savings = 0;
1623 
1624   if (!legal_cast_p (gs, rhs1))
1625     return;
1626 
1627   lhs = gimple_assign_lhs (gs);
1628   base_cand = base_cand_from_table (rhs1);
1629   ctype = TREE_TYPE (lhs);
1630 
1631   if (base_cand && base_cand->kind != CAND_PHI)
1632     {
1633       slsr_cand_t first_cand = NULL;
1634 
1635       while (base_cand)
1636 	{
1637 	  /* Propagate all data from the base candidate except the type,
1638 	     which comes from the cast, and the base candidate's cast,
1639 	     which is no longer applicable.  */
1640 	  if (has_single_use (rhs1))
1641 	    savings = (base_cand->dead_savings
1642 		       + stmt_cost (base_cand->cand_stmt, speed));
1643 
1644 	  c = alloc_cand_and_find_basis (base_cand->kind, gs,
1645 					 base_cand->base_expr,
1646 					 base_cand->index, base_cand->stride,
1647 					 ctype, base_cand->stride_type,
1648 					 savings);
1649 	  if (!first_cand)
1650 	    first_cand = c;
1651 
1652 	  if (first_cand != c)
1653 	    c->first_interp = first_cand->cand_num;
1654 
1655 	  if (base_cand->next_interp)
1656 	    base_cand = lookup_cand (base_cand->next_interp);
1657 	  else
1658 	    base_cand = NULL;
1659 	}
1660     }
1661   else
1662     {
1663       /* If nothing is known about the RHS, create fresh CAND_ADD and
1664 	 CAND_MULT interpretations:
1665 
1666 	 X = Y + (0 * 1)
1667 	 X = (Y + 0) * 1
1668 
1669 	 The first of these is somewhat arbitrary, but the choice of
1670 	 1 for the stride simplifies the logic for propagating casts
1671 	 into their uses.  */
1672       c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, 0,
1673 				     integer_one_node, ctype, sizetype, 0);
1674       c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, 0,
1675 				      integer_one_node, ctype, sizetype, 0);
1676       c->next_interp = c2->cand_num;
1677       c2->first_interp = c->cand_num;
1678     }
1679 
1680   /* Add the first (or only) interpretation to the statement-candidate
1681      mapping.  */
1682   add_cand_for_stmt (gs, c);
1683 }
1684 
1685 /* Given GS which is a copy of a scalar integer type, make at least one
1686    appropriate entry in the candidate table.
1687 
1688    This interface is included for completeness, but is unnecessary
1689    if this pass immediately follows a pass that performs copy
1690    propagation, such as DOM.  */
1691 
1692 static void
1693 slsr_process_copy (gimple *gs, tree rhs1, bool speed)
1694 {
1695   slsr_cand_t base_cand, c = NULL, c2;
1696   unsigned savings = 0;
1697 
1698   base_cand = base_cand_from_table (rhs1);
1699 
1700   if (base_cand && base_cand->kind != CAND_PHI)
1701     {
1702       slsr_cand_t first_cand = NULL;
1703 
1704       while (base_cand)
1705 	{
1706 	  /* Propagate all data from the base candidate.  */
1707 	  if (has_single_use (rhs1))
1708 	    savings = (base_cand->dead_savings
1709 		       + stmt_cost (base_cand->cand_stmt, speed));
1710 
1711 	  c = alloc_cand_and_find_basis (base_cand->kind, gs,
1712 					 base_cand->base_expr,
1713 					 base_cand->index, base_cand->stride,
1714 					 base_cand->cand_type,
1715 					 base_cand->stride_type, savings);
1716 	  if (!first_cand)
1717 	    first_cand = c;
1718 
1719 	  if (first_cand != c)
1720 	    c->first_interp = first_cand->cand_num;
1721 
1722 	  if (base_cand->next_interp)
1723 	    base_cand = lookup_cand (base_cand->next_interp);
1724 	  else
1725 	    base_cand = NULL;
1726 	}
1727     }
1728   else
1729     {
1730       /* If nothing is known about the RHS, create fresh CAND_ADD and
1731 	 CAND_MULT interpretations:
1732 
1733 	 X = Y + (0 * 1)
1734 	 X = (Y + 0) * 1
1735 
1736 	 The first of these is somewhat arbitrary, but the choice of
1737 	 1 for the stride simplifies the logic for propagating casts
1738 	 into their uses.  */
1739       c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, 0,
1740 				     integer_one_node, TREE_TYPE (rhs1),
1741 				     sizetype, 0);
1742       c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, 0,
1743 				      integer_one_node, TREE_TYPE (rhs1),
1744 				      sizetype, 0);
1745       c->next_interp = c2->cand_num;
1746       c2->first_interp = c->cand_num;
1747     }
1748 
1749   /* Add the first (or only) interpretation to the statement-candidate
1750      mapping.  */
1751   add_cand_for_stmt (gs, c);
1752 }
1753 
1754 class find_candidates_dom_walker : public dom_walker
1755 {
1756 public:
1757   find_candidates_dom_walker (cdi_direction direction)
1758     : dom_walker (direction) {}
1759   virtual edge before_dom_children (basic_block);
1760 };
1761 
1762 /* Find strength-reduction candidates in block BB.  */
1763 
1764 edge
1765 find_candidates_dom_walker::before_dom_children (basic_block bb)
1766 {
1767   bool speed = optimize_bb_for_speed_p (bb);
1768 
1769   for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1770        gsi_next (&gsi))
1771     slsr_process_phi (gsi.phi (), speed);
1772 
1773   for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1774        gsi_next (&gsi))
1775     {
1776       gimple *gs = gsi_stmt (gsi);
1777 
1778       if (stmt_could_throw_p (gs))
1779 	continue;
1780 
1781       if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1782 	slsr_process_ref (gs);
1783 
1784       else if (is_gimple_assign (gs)
1785 	       && (INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (gs)))
1786 		   || POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (gs)))))
1787 	{
1788 	  tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1789 
1790 	  switch (gimple_assign_rhs_code (gs))
1791 	    {
1792 	    case MULT_EXPR:
1793 	    case PLUS_EXPR:
1794 	      rhs1 = gimple_assign_rhs1 (gs);
1795 	      rhs2 = gimple_assign_rhs2 (gs);
1796 	      /* Should never happen, but currently some buggy situations
1797 		 in earlier phases put constants in rhs1.  */
1798 	      if (TREE_CODE (rhs1) != SSA_NAME)
1799 		continue;
1800 	      break;
1801 
1802 	    /* Possible future opportunity: rhs1 of a ptr+ can be
1803 	       an ADDR_EXPR.  */
1804 	    case POINTER_PLUS_EXPR:
1805 	    case MINUS_EXPR:
1806 	      rhs2 = gimple_assign_rhs2 (gs);
1807 	      gcc_fallthrough ();
1808 
1809 	    CASE_CONVERT:
1810 	    case SSA_NAME:
1811 	    case NEGATE_EXPR:
1812 	      rhs1 = gimple_assign_rhs1 (gs);
1813 	      if (TREE_CODE (rhs1) != SSA_NAME)
1814 		continue;
1815 	      break;
1816 
1817 	    default:
1818 	      ;
1819 	    }
1820 
1821 	  switch (gimple_assign_rhs_code (gs))
1822 	    {
1823 	    case MULT_EXPR:
1824 	      slsr_process_mul (gs, rhs1, rhs2, speed);
1825 	      break;
1826 
1827 	    case PLUS_EXPR:
1828 	    case POINTER_PLUS_EXPR:
1829 	    case MINUS_EXPR:
1830 	      slsr_process_add (gs, rhs1, rhs2, speed);
1831 	      break;
1832 
1833 	    case NEGATE_EXPR:
1834 	      slsr_process_neg (gs, rhs1, speed);
1835 	      break;
1836 
1837 	    CASE_CONVERT:
1838 	      slsr_process_cast (gs, rhs1, speed);
1839 	      break;
1840 
1841 	    case SSA_NAME:
1842 	      slsr_process_copy (gs, rhs1, speed);
1843 	      break;
1844 
1845 	    default:
1846 	      ;
1847 	    }
1848 	}
1849     }
1850   return NULL;
1851 }
1852 
1853 /* Dump a candidate for debug.  */
1854 
1855 static void
1856 dump_candidate (slsr_cand_t c)
1857 {
1858   fprintf (dump_file, "%3d  [%d] ", c->cand_num,
1859 	   gimple_bb (c->cand_stmt)->index);
1860   print_gimple_stmt (dump_file, c->cand_stmt, 0);
1861   switch (c->kind)
1862     {
1863     case CAND_MULT:
1864       fputs ("     MULT : (", dump_file);
1865       print_generic_expr (dump_file, c->base_expr);
1866       fputs (" + ", dump_file);
1867       print_decs (c->index, dump_file);
1868       fputs (") * ", dump_file);
1869       if (TREE_CODE (c->stride) != INTEGER_CST
1870 	  && c->stride_type != TREE_TYPE (c->stride))
1871 	{
1872 	  fputs ("(", dump_file);
1873 	  print_generic_expr (dump_file, c->stride_type);
1874 	  fputs (")", dump_file);
1875 	}
1876       print_generic_expr (dump_file, c->stride);
1877       fputs (" : ", dump_file);
1878       break;
1879     case CAND_ADD:
1880       fputs ("     ADD  : ", dump_file);
1881       print_generic_expr (dump_file, c->base_expr);
1882       fputs (" + (", dump_file);
1883       print_decs (c->index, dump_file);
1884       fputs (" * ", dump_file);
1885       if (TREE_CODE (c->stride) != INTEGER_CST
1886 	  && c->stride_type != TREE_TYPE (c->stride))
1887 	{
1888 	  fputs ("(", dump_file);
1889 	  print_generic_expr (dump_file, c->stride_type);
1890 	  fputs (")", dump_file);
1891 	}
1892       print_generic_expr (dump_file, c->stride);
1893       fputs (") : ", dump_file);
1894       break;
1895     case CAND_REF:
1896       fputs ("     REF  : ", dump_file);
1897       print_generic_expr (dump_file, c->base_expr);
1898       fputs (" + (", dump_file);
1899       print_generic_expr (dump_file, c->stride);
1900       fputs (") + ", dump_file);
1901       print_decs (c->index, dump_file);
1902       fputs (" : ", dump_file);
1903       break;
1904     case CAND_PHI:
1905       fputs ("     PHI  : ", dump_file);
1906       print_generic_expr (dump_file, c->base_expr);
1907       fputs (" + (unknown * ", dump_file);
1908       print_generic_expr (dump_file, c->stride);
1909       fputs (") : ", dump_file);
1910       break;
1911     default:
1912       gcc_unreachable ();
1913     }
1914   print_generic_expr (dump_file, c->cand_type);
1915   fprintf (dump_file, "\n     basis: %d  dependent: %d  sibling: %d\n",
1916 	   c->basis, c->dependent, c->sibling);
1917   fprintf (dump_file,
1918 	   "     next-interp: %d  first-interp: %d  dead-savings: %d\n",
1919 	   c->next_interp, c->first_interp, c->dead_savings);
1920   if (c->def_phi)
1921     fprintf (dump_file, "     phi:  %d\n", c->def_phi);
1922   fputs ("\n", dump_file);
1923 }
1924 
1925 /* Dump the candidate vector for debug.  */
1926 
1927 static void
1928 dump_cand_vec (void)
1929 {
1930   unsigned i;
1931   slsr_cand_t c;
1932 
1933   fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1934 
1935   FOR_EACH_VEC_ELT (cand_vec, i, c)
1936     dump_candidate (c);
1937 }
1938 
1939 /* Callback used to dump the candidate chains hash table.  */
1940 
1941 int
1942 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1943 {
1944   const_cand_chain_t chain = *slot;
1945   cand_chain_t p;
1946 
1947   print_generic_expr (dump_file, chain->base_expr);
1948   fprintf (dump_file, " -> %d", chain->cand->cand_num);
1949 
1950   for (p = chain->next; p; p = p->next)
1951     fprintf (dump_file, " -> %d", p->cand->cand_num);
1952 
1953   fputs ("\n", dump_file);
1954   return 1;
1955 }
1956 
1957 /* Dump the candidate chains.  */
1958 
1959 static void
1960 dump_cand_chains (void)
1961 {
1962   fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1963   base_cand_map->traverse_noresize <void *, ssa_base_cand_dump_callback>
1964     (NULL);
1965   fputs ("\n", dump_file);
1966 }
1967 
1968 /* Dump the increment vector for debug.  */
1969 
1970 static void
1971 dump_incr_vec (void)
1972 {
1973   if (dump_file && (dump_flags & TDF_DETAILS))
1974     {
1975       unsigned i;
1976 
1977       fprintf (dump_file, "\nIncrement vector:\n\n");
1978 
1979       for (i = 0; i < incr_vec_len; i++)
1980 	{
1981 	  fprintf (dump_file, "%3d  increment:   ", i);
1982 	  print_decs (incr_vec[i].incr, dump_file);
1983 	  fprintf (dump_file, "\n     count:       %d", incr_vec[i].count);
1984 	  fprintf (dump_file, "\n     cost:        %d", incr_vec[i].cost);
1985 	  fputs ("\n     initializer: ", dump_file);
1986 	  print_generic_expr (dump_file, incr_vec[i].initializer);
1987 	  fputs ("\n\n", dump_file);
1988 	}
1989     }
1990 }
1991 
1992 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1993    data reference.  */
1994 
1995 static void
1996 replace_ref (tree *expr, slsr_cand_t c)
1997 {
1998   tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1999   unsigned HOST_WIDE_INT misalign;
2000   unsigned align;
2001 
2002   /* Ensure the memory reference carries the minimum alignment
2003      requirement for the data type.  See PR58041.  */
2004   get_object_alignment_1 (*expr, &align, &misalign);
2005   if (misalign != 0)
2006     align = least_bit_hwi (misalign);
2007   if (align < TYPE_ALIGN (acc_type))
2008     acc_type = build_aligned_type (acc_type, align);
2009 
2010   add_expr = fold_build2 (POINTER_PLUS_EXPR, c->cand_type,
2011 			  c->base_expr, c->stride);
2012   mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
2013 			 wide_int_to_tree (c->cand_type, c->index));
2014 
2015   /* Gimplify the base addressing expression for the new MEM_REF tree.  */
2016   gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2017   TREE_OPERAND (mem_ref, 0)
2018     = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
2019 				/*simple_p=*/true, NULL,
2020 				/*before=*/true, GSI_SAME_STMT);
2021   copy_ref_info (mem_ref, *expr);
2022   *expr = mem_ref;
2023   update_stmt (c->cand_stmt);
2024 }
2025 
2026 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
2027    dependent of candidate C with an equivalent strength-reduced data
2028    reference.  */
2029 
2030 static void
2031 replace_refs (slsr_cand_t c)
2032 {
2033   if (dump_file && (dump_flags & TDF_DETAILS))
2034     {
2035       fputs ("Replacing reference: ", dump_file);
2036       print_gimple_stmt (dump_file, c->cand_stmt, 0);
2037     }
2038 
2039   if (gimple_vdef (c->cand_stmt))
2040     {
2041       tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
2042       replace_ref (lhs, c);
2043     }
2044   else
2045     {
2046       tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
2047       replace_ref (rhs, c);
2048     }
2049 
2050   if (dump_file && (dump_flags & TDF_DETAILS))
2051     {
2052       fputs ("With: ", dump_file);
2053       print_gimple_stmt (dump_file, c->cand_stmt, 0);
2054       fputs ("\n", dump_file);
2055     }
2056 
2057   if (c->sibling)
2058     replace_refs (lookup_cand (c->sibling));
2059 
2060   if (c->dependent)
2061     replace_refs (lookup_cand (c->dependent));
2062 }
2063 
2064 /* Return TRUE if candidate C is dependent upon a PHI.  */
2065 
2066 static bool
2067 phi_dependent_cand_p (slsr_cand_t c)
2068 {
2069   /* A candidate is not necessarily dependent upon a PHI just because
2070      it has a phi definition for its base name.  It may have a basis
2071      that relies upon the same phi definition, in which case the PHI
2072      is irrelevant to this candidate.  */
2073   return (c->def_phi
2074 	  && c->basis
2075 	  && lookup_cand (c->basis)->def_phi != c->def_phi);
2076 }
2077 
2078 /* Calculate the increment required for candidate C relative to
2079    its basis.  */
2080 
2081 static widest_int
2082 cand_increment (slsr_cand_t c)
2083 {
2084   slsr_cand_t basis;
2085 
2086   /* If the candidate doesn't have a basis, just return its own
2087      index.  This is useful in record_increments to help us find
2088      an existing initializer.  Also, if the candidate's basis is
2089      hidden by a phi, then its own index will be the increment
2090      from the newly introduced phi basis.  */
2091   if (!c->basis || phi_dependent_cand_p (c))
2092     return c->index;
2093 
2094   basis = lookup_cand (c->basis);
2095   gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
2096   return c->index - basis->index;
2097 }
2098 
2099 /* Calculate the increment required for candidate C relative to
2100    its basis.  If we aren't going to generate pointer arithmetic
2101    for this candidate, return the absolute value of that increment
2102    instead.  */
2103 
2104 static inline widest_int
2105 cand_abs_increment (slsr_cand_t c)
2106 {
2107   widest_int increment = cand_increment (c);
2108 
2109   if (!address_arithmetic_p && wi::neg_p (increment))
2110     increment = -increment;
2111 
2112   return increment;
2113 }
2114 
2115 /* Return TRUE iff candidate C has already been replaced under
2116    another interpretation.  */
2117 
2118 static inline bool
2119 cand_already_replaced (slsr_cand_t c)
2120 {
2121   return (gimple_bb (c->cand_stmt) == 0);
2122 }
2123 
2124 /* Common logic used by replace_unconditional_candidate and
2125    replace_conditional_candidate.  */
2126 
2127 static void
2128 replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
2129 {
2130   tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
2131   enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
2132 
2133   /* It is not useful to replace casts, copies, negates, or adds of
2134      an SSA name and a constant.  */
2135   if (cand_code == SSA_NAME
2136       || CONVERT_EXPR_CODE_P (cand_code)
2137       || cand_code == PLUS_EXPR
2138       || cand_code == POINTER_PLUS_EXPR
2139       || cand_code == MINUS_EXPR
2140       || cand_code == NEGATE_EXPR)
2141     return;
2142 
2143   enum tree_code code = PLUS_EXPR;
2144   tree bump_tree;
2145   gimple *stmt_to_print = NULL;
2146 
2147   if (wi::neg_p (bump))
2148     {
2149       code = MINUS_EXPR;
2150       bump = -bump;
2151     }
2152 
2153   /* It is possible that the resulting bump doesn't fit in target_type.
2154      Abandon the replacement in this case.  This does not affect
2155      siblings or dependents of C.  */
2156   if (bump != wi::ext (bump, TYPE_PRECISION (target_type),
2157 		       TYPE_SIGN (target_type)))
2158     return;
2159 
2160   bump_tree = wide_int_to_tree (target_type, bump);
2161 
2162   /* If the basis name and the candidate's LHS have incompatible types,
2163      introduce a cast.  */
2164   if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
2165     basis_name = introduce_cast_before_cand (c, target_type, basis_name);
2166 
2167   if (dump_file && (dump_flags & TDF_DETAILS))
2168     {
2169       fputs ("Replacing: ", dump_file);
2170       print_gimple_stmt (dump_file, c->cand_stmt, 0);
2171     }
2172 
2173   if (bump == 0)
2174     {
2175       tree lhs = gimple_assign_lhs (c->cand_stmt);
2176       gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
2177       gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2178       slsr_cand_t cc = lookup_cand (c->first_interp);
2179       gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
2180       gsi_replace (&gsi, copy_stmt, false);
2181       while (cc)
2182 	{
2183 	  cc->cand_stmt = copy_stmt;
2184 	  cc = cc->next_interp ? lookup_cand (cc->next_interp) : NULL;
2185 	}
2186       if (dump_file && (dump_flags & TDF_DETAILS))
2187 	stmt_to_print = copy_stmt;
2188     }
2189   else
2190     {
2191       tree rhs1, rhs2;
2192       if (cand_code != NEGATE_EXPR) {
2193 	rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2194 	rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2195       }
2196       if (cand_code != NEGATE_EXPR
2197 	  && ((operand_equal_p (rhs1, basis_name, 0)
2198 	       && operand_equal_p (rhs2, bump_tree, 0))
2199 	      || (operand_equal_p (rhs1, bump_tree, 0)
2200 		  && operand_equal_p (rhs2, basis_name, 0))))
2201 	{
2202 	  if (dump_file && (dump_flags & TDF_DETAILS))
2203 	    {
2204 	      fputs ("(duplicate, not actually replacing)", dump_file);
2205 	      stmt_to_print = c->cand_stmt;
2206 	    }
2207 	}
2208       else
2209 	{
2210 	  gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2211 	  slsr_cand_t cc = lookup_cand (c->first_interp);
2212 	  gimple_assign_set_rhs_with_ops (&gsi, code, basis_name, bump_tree);
2213 	  update_stmt (gsi_stmt (gsi));
2214 	  while (cc)
2215 	    {
2216 	      cc->cand_stmt = gsi_stmt (gsi);
2217 	      cc = cc->next_interp ? lookup_cand (cc->next_interp) : NULL;
2218 	    }
2219 	  if (dump_file && (dump_flags & TDF_DETAILS))
2220 	    stmt_to_print = gsi_stmt (gsi);
2221 	}
2222     }
2223 
2224   if (dump_file && (dump_flags & TDF_DETAILS))
2225     {
2226       fputs ("With: ", dump_file);
2227       print_gimple_stmt (dump_file, stmt_to_print, 0);
2228       fputs ("\n", dump_file);
2229     }
2230 }
2231 
2232 /* Replace candidate C with an add or subtract.   Note that we only
2233    operate on CAND_MULTs with known strides, so we will never generate
2234    a POINTER_PLUS_EXPR.  Each candidate X = (B + i) * S is replaced by
2235    X = Y + ((i - i') * S), as described in the module commentary.  The
2236    folded value ((i - i') * S) is referred to here as the "bump."  */
2237 
2238 static void
2239 replace_unconditional_candidate (slsr_cand_t c)
2240 {
2241   slsr_cand_t basis;
2242 
2243   if (cand_already_replaced (c))
2244     return;
2245 
2246   basis = lookup_cand (c->basis);
2247   widest_int bump = cand_increment (c) * wi::to_widest (c->stride);
2248 
2249   replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2250 }
2251 
2252 /* Return the index in the increment vector of the given INCREMENT,
2253    or -1 if not found.  The latter can occur if more than
2254    MAX_INCR_VEC_LEN increments have been found.  */
2255 
2256 static inline int
2257 incr_vec_index (const widest_int &increment)
2258 {
2259   unsigned i;
2260 
2261   for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2262     ;
2263 
2264   if (i < incr_vec_len)
2265     return i;
2266   else
2267     return -1;
2268 }
2269 
2270 /* Create a new statement along edge E to add BASIS_NAME to the product
2271    of INCREMENT and the stride of candidate C.  Create and return a new
2272    SSA name from *VAR to be used as the LHS of the new statement.
2273    KNOWN_STRIDE is true iff C's stride is a constant.  */
2274 
2275 static tree
2276 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2277 			     widest_int increment, edge e, location_t loc,
2278 			     bool known_stride)
2279 {
2280   tree lhs, basis_type;
2281   gassign *new_stmt, *cast_stmt = NULL;
2282 
2283   /* If the add candidate along this incoming edge has the same
2284      index as C's hidden basis, the hidden basis represents this
2285      edge correctly.  */
2286   if (increment == 0)
2287     return basis_name;
2288 
2289   basis_type = TREE_TYPE (basis_name);
2290   lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2291 
2292   /* Occasionally people convert integers to pointers without a
2293      cast, leading us into trouble if we aren't careful.  */
2294   enum tree_code plus_code
2295     = POINTER_TYPE_P (basis_type) ? POINTER_PLUS_EXPR : PLUS_EXPR;
2296 
2297   if (known_stride)
2298     {
2299       tree bump_tree;
2300       enum tree_code code = plus_code;
2301       widest_int bump = increment * wi::to_widest (c->stride);
2302       if (wi::neg_p (bump) && !POINTER_TYPE_P (basis_type))
2303 	{
2304 	  code = MINUS_EXPR;
2305 	  bump = -bump;
2306 	}
2307 
2308       tree stride_type = POINTER_TYPE_P (basis_type) ? sizetype : basis_type;
2309       bump_tree = wide_int_to_tree (stride_type, bump);
2310       new_stmt = gimple_build_assign (lhs, code, basis_name, bump_tree);
2311     }
2312   else
2313     {
2314       int i;
2315       bool negate_incr = !POINTER_TYPE_P (basis_type) && wi::neg_p (increment);
2316       i = incr_vec_index (negate_incr ? -increment : increment);
2317       gcc_assert (i >= 0);
2318 
2319       if (incr_vec[i].initializer)
2320 	{
2321 	  enum tree_code code = negate_incr ? MINUS_EXPR : plus_code;
2322 	  new_stmt = gimple_build_assign (lhs, code, basis_name,
2323 					  incr_vec[i].initializer);
2324 	}
2325       else {
2326 	tree stride;
2327 
2328 	if (!types_compatible_p (TREE_TYPE (c->stride), c->stride_type))
2329 	  {
2330 	    tree cast_stride = make_temp_ssa_name (c->stride_type, NULL,
2331 						   "slsr");
2332 	    cast_stmt = gimple_build_assign (cast_stride, NOP_EXPR,
2333 					     c->stride);
2334 	    stride = cast_stride;
2335 	  }
2336 	else
2337 	  stride = c->stride;
2338 
2339 	if (increment == 1)
2340 	  new_stmt = gimple_build_assign (lhs, plus_code, basis_name, stride);
2341 	else if (increment == -1)
2342 	  new_stmt = gimple_build_assign (lhs, MINUS_EXPR, basis_name, stride);
2343 	else
2344 	  gcc_unreachable ();
2345       }
2346     }
2347 
2348   if (cast_stmt)
2349     {
2350       gimple_set_location (cast_stmt, loc);
2351       gsi_insert_on_edge (e, cast_stmt);
2352     }
2353 
2354   gimple_set_location (new_stmt, loc);
2355   gsi_insert_on_edge (e, new_stmt);
2356 
2357   if (dump_file && (dump_flags & TDF_DETAILS))
2358     {
2359       if (cast_stmt)
2360 	{
2361 	  fprintf (dump_file, "Inserting cast on edge %d->%d: ",
2362 		   e->src->index, e->dest->index);
2363 	  print_gimple_stmt (dump_file, cast_stmt, 0);
2364 	}
2365       fprintf (dump_file, "Inserting on edge %d->%d: ", e->src->index,
2366 	       e->dest->index);
2367       print_gimple_stmt (dump_file, new_stmt, 0);
2368     }
2369 
2370   return lhs;
2371 }
2372 
2373 /* Clear the visited field for a tree of PHI candidates.  */
2374 
2375 static void
2376 clear_visited (gphi *phi)
2377 {
2378   unsigned i;
2379   slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2380 
2381   if (phi_cand->visited)
2382     {
2383       phi_cand->visited = 0;
2384 
2385       for (i = 0; i < gimple_phi_num_args (phi); i++)
2386 	{
2387 	  tree arg = gimple_phi_arg_def (phi, i);
2388 	  gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2389 	  if (gimple_code (arg_def) == GIMPLE_PHI)
2390 	    clear_visited (as_a <gphi *> (arg_def));
2391 	}
2392     }
2393 }
2394 
2395 /* Recursive helper function for create_phi_basis.  */
2396 
2397 static tree
2398 create_phi_basis_1 (slsr_cand_t c, gimple *from_phi, tree basis_name,
2399 		    location_t loc, bool known_stride)
2400 {
2401   int i;
2402   tree name, phi_arg;
2403   gphi *phi;
2404   slsr_cand_t basis = lookup_cand (c->basis);
2405   int nargs = gimple_phi_num_args (from_phi);
2406   basic_block phi_bb = gimple_bb (from_phi);
2407   slsr_cand_t phi_cand = *stmt_cand_map->get (from_phi);
2408   auto_vec<tree> phi_args (nargs);
2409 
2410   if (phi_cand->visited)
2411     return phi_cand->cached_basis;
2412   phi_cand->visited = 1;
2413 
2414   /* Process each argument of the existing phi that represents
2415      conditionally-executed add candidates.  */
2416   for (i = 0; i < nargs; i++)
2417     {
2418       edge e = (*phi_bb->preds)[i];
2419       tree arg = gimple_phi_arg_def (from_phi, i);
2420       tree feeding_def;
2421 
2422       /* If the phi argument is the base name of the CAND_PHI, then
2423 	 this incoming arc should use the hidden basis.  */
2424       if (operand_equal_p (arg, phi_cand->base_expr, 0))
2425 	if (basis->index == 0)
2426 	  feeding_def = gimple_assign_lhs (basis->cand_stmt);
2427 	else
2428 	  {
2429 	    widest_int incr = -basis->index;
2430 	    feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2431 						       e, loc, known_stride);
2432 	  }
2433       else
2434 	{
2435 	  gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2436 
2437 	  /* If there is another phi along this incoming edge, we must
2438 	     process it in the same fashion to ensure that all basis
2439 	     adjustments are made along its incoming edges.  */
2440 	  if (gimple_code (arg_def) == GIMPLE_PHI)
2441 	    feeding_def = create_phi_basis_1 (c, arg_def, basis_name,
2442 					      loc, known_stride);
2443 	  else
2444 	    {
2445 	      slsr_cand_t arg_cand = base_cand_from_table (arg);
2446 	      widest_int diff = arg_cand->index - basis->index;
2447 	      feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2448 							 e, loc, known_stride);
2449 	    }
2450 	}
2451 
2452       /* Because of recursion, we need to save the arguments in a vector
2453 	 so we can create the PHI statement all at once.  Otherwise the
2454 	 storage for the half-created PHI can be reclaimed.  */
2455       phi_args.safe_push (feeding_def);
2456     }
2457 
2458   /* Create the new phi basis.  */
2459   name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2460   phi = create_phi_node (name, phi_bb);
2461   SSA_NAME_DEF_STMT (name) = phi;
2462 
2463   FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2464     {
2465       edge e = (*phi_bb->preds)[i];
2466       add_phi_arg (phi, phi_arg, e, loc);
2467     }
2468 
2469   update_stmt (phi);
2470 
2471   if (dump_file && (dump_flags & TDF_DETAILS))
2472     {
2473       fputs ("Introducing new phi basis: ", dump_file);
2474       print_gimple_stmt (dump_file, phi, 0);
2475     }
2476 
2477   phi_cand->cached_basis = name;
2478   return name;
2479 }
2480 
2481 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2482    is hidden by the phi node FROM_PHI, create a new phi node in the same
2483    block as FROM_PHI.  The new phi is suitable for use as a basis by C,
2484    with its phi arguments representing conditional adjustments to the
2485    hidden basis along conditional incoming paths.  Those adjustments are
2486    made by creating add statements (and sometimes recursively creating
2487    phis) along those incoming paths.  LOC is the location to attach to
2488    the introduced statements.  KNOWN_STRIDE is true iff C's stride is a
2489    constant.  */
2490 
2491 static tree
2492 create_phi_basis (slsr_cand_t c, gimple *from_phi, tree basis_name,
2493 		  location_t loc, bool known_stride)
2494 {
2495   tree retval = create_phi_basis_1 (c, from_phi, basis_name, loc,
2496 				    known_stride);
2497   gcc_assert (retval);
2498   clear_visited (as_a <gphi *> (from_phi));
2499   return retval;
2500 }
2501 
2502 /* Given a candidate C whose basis is hidden by at least one intervening
2503    phi, introduce a matching number of new phis to represent its basis
2504    adjusted by conditional increments along possible incoming paths.  Then
2505    replace C as though it were an unconditional candidate, using the new
2506    basis.  */
2507 
2508 static void
2509 replace_conditional_candidate (slsr_cand_t c)
2510 {
2511   tree basis_name, name;
2512   slsr_cand_t basis;
2513   location_t loc;
2514 
2515   /* Look up the LHS SSA name from C's basis.  This will be the
2516      RHS1 of the adds we will introduce to create new phi arguments.  */
2517   basis = lookup_cand (c->basis);
2518   basis_name = gimple_assign_lhs (basis->cand_stmt);
2519 
2520   /* Create a new phi statement which will represent C's true basis
2521      after the transformation is complete.  */
2522   loc = gimple_location (c->cand_stmt);
2523   name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2524 			   basis_name, loc, KNOWN_STRIDE);
2525 
2526   /* Replace C with an add of the new basis phi and a constant.  */
2527   widest_int bump = c->index * wi::to_widest (c->stride);
2528 
2529   replace_mult_candidate (c, name, bump);
2530 }
2531 
2532 /* Recursive helper function for phi_add_costs.  SPREAD is a measure of
2533    how many PHI nodes we have visited at this point in the tree walk.  */
2534 
2535 static int
2536 phi_add_costs_1 (gimple *phi, slsr_cand_t c, int one_add_cost, int *spread)
2537 {
2538   unsigned i;
2539   int cost = 0;
2540   slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2541 
2542   if (phi_cand->visited)
2543     return 0;
2544 
2545   phi_cand->visited = 1;
2546   (*spread)++;
2547 
2548   /* If we work our way back to a phi that isn't dominated by the hidden
2549      basis, this isn't a candidate for replacement.  Indicate this by
2550      returning an unreasonably high cost.  It's not easy to detect
2551      these situations when determining the basis, so we defer the
2552      decision until now.  */
2553   basic_block phi_bb = gimple_bb (phi);
2554   slsr_cand_t basis = lookup_cand (c->basis);
2555   basic_block basis_bb = gimple_bb (basis->cand_stmt);
2556 
2557   if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2558     return COST_INFINITE;
2559 
2560   for (i = 0; i < gimple_phi_num_args (phi); i++)
2561     {
2562       tree arg = gimple_phi_arg_def (phi, i);
2563 
2564       if (arg != phi_cand->base_expr)
2565 	{
2566 	  gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2567 
2568 	  if (gimple_code (arg_def) == GIMPLE_PHI)
2569 	    {
2570 	      cost += phi_add_costs_1 (arg_def, c, one_add_cost, spread);
2571 
2572 	      if (cost >= COST_INFINITE || *spread > MAX_SPREAD)
2573 		return COST_INFINITE;
2574 	    }
2575 	  else
2576 	    {
2577 	      slsr_cand_t arg_cand = base_cand_from_table (arg);
2578 
2579 	      if (arg_cand->index != c->index)
2580 		cost += one_add_cost;
2581 	    }
2582 	}
2583     }
2584 
2585   return cost;
2586 }
2587 
2588 /* Compute the expected costs of inserting basis adjustments for
2589    candidate C with phi-definition PHI.  The cost of inserting
2590    one adjustment is given by ONE_ADD_COST.  If PHI has arguments
2591    which are themselves phi results, recursively calculate costs
2592    for those phis as well.  */
2593 
2594 static int
2595 phi_add_costs (gimple *phi, slsr_cand_t c, int one_add_cost)
2596 {
2597   int spread = 0;
2598   int retval = phi_add_costs_1 (phi, c, one_add_cost, &spread);
2599   clear_visited (as_a <gphi *> (phi));
2600   return retval;
2601 }
2602 /* For candidate C, each sibling of candidate C, and each dependent of
2603    candidate C, determine whether the candidate is dependent upon a
2604    phi that hides its basis.  If not, replace the candidate unconditionally.
2605    Otherwise, determine whether the cost of introducing compensation code
2606    for the candidate is offset by the gains from strength reduction.  If
2607    so, replace the candidate and introduce the compensation code.  */
2608 
2609 static void
2610 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2611 {
2612   if (phi_dependent_cand_p (c))
2613     {
2614       /* A multiply candidate with a stride of 1 is just an artifice
2615 	 of a copy or cast; there is no value in replacing it.  */
2616       if (c->kind == CAND_MULT && wi::to_widest (c->stride) != 1)
2617 	{
2618 	  /* A candidate dependent upon a phi will replace a multiply by
2619 	     a constant with an add, and will insert at most one add for
2620 	     each phi argument.  Add these costs with the potential dead-code
2621 	     savings to determine profitability.  */
2622 	  bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2623 	  int mult_savings = stmt_cost (c->cand_stmt, speed);
2624 	  gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2625 	  tree phi_result = gimple_phi_result (phi);
2626 	  int one_add_cost = add_cost (speed,
2627 				       TYPE_MODE (TREE_TYPE (phi_result)));
2628 	  int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2629 	  int cost = add_costs - mult_savings - c->dead_savings;
2630 
2631 	  if (dump_file && (dump_flags & TDF_DETAILS))
2632 	    {
2633 	      fprintf (dump_file, "  Conditional candidate %d:\n", c->cand_num);
2634 	      fprintf (dump_file, "    add_costs = %d\n", add_costs);
2635 	      fprintf (dump_file, "    mult_savings = %d\n", mult_savings);
2636 	      fprintf (dump_file, "    dead_savings = %d\n", c->dead_savings);
2637 	      fprintf (dump_file, "    cost = %d\n", cost);
2638 	      if (cost <= COST_NEUTRAL)
2639 		fputs ("  Replacing...\n", dump_file);
2640 	      else
2641 		fputs ("  Not replaced.\n", dump_file);
2642 	    }
2643 
2644 	  if (cost <= COST_NEUTRAL)
2645 	    replace_conditional_candidate (c);
2646 	}
2647     }
2648   else
2649     replace_unconditional_candidate (c);
2650 
2651   if (c->sibling)
2652     replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2653 
2654   if (c->dependent)
2655     replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2656 }
2657 
2658 /* Count the number of candidates in the tree rooted at C that have
2659    not already been replaced under other interpretations.  */
2660 
2661 static int
2662 count_candidates (slsr_cand_t c)
2663 {
2664   unsigned count = cand_already_replaced (c) ? 0 : 1;
2665 
2666   if (c->sibling)
2667     count += count_candidates (lookup_cand (c->sibling));
2668 
2669   if (c->dependent)
2670     count += count_candidates (lookup_cand (c->dependent));
2671 
2672   return count;
2673 }
2674 
2675 /* Increase the count of INCREMENT by one in the increment vector.
2676    INCREMENT is associated with candidate C.  If INCREMENT is to be
2677    conditionally executed as part of a conditional candidate replacement,
2678    IS_PHI_ADJUST is true, otherwise false.  If an initializer
2679    T_0 = stride * I is provided by a candidate that dominates all
2680    candidates with the same increment, also record T_0 for subsequent use.  */
2681 
2682 static void
2683 record_increment (slsr_cand_t c, widest_int increment, bool is_phi_adjust)
2684 {
2685   bool found = false;
2686   unsigned i;
2687 
2688   /* Treat increments that differ only in sign as identical so as to
2689      share initializers, unless we are generating pointer arithmetic.  */
2690   if (!address_arithmetic_p && wi::neg_p (increment))
2691     increment = -increment;
2692 
2693   for (i = 0; i < incr_vec_len; i++)
2694     {
2695       if (incr_vec[i].incr == increment)
2696 	{
2697 	  incr_vec[i].count++;
2698 	  found = true;
2699 
2700 	  /* If we previously recorded an initializer that doesn't
2701 	     dominate this candidate, it's not going to be useful to
2702 	     us after all.  */
2703 	  if (incr_vec[i].initializer
2704 	      && !dominated_by_p (CDI_DOMINATORS,
2705 				  gimple_bb (c->cand_stmt),
2706 				  incr_vec[i].init_bb))
2707 	    {
2708 	      incr_vec[i].initializer = NULL_TREE;
2709 	      incr_vec[i].init_bb = NULL;
2710 	    }
2711 
2712 	  break;
2713 	}
2714     }
2715 
2716   if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2717     {
2718       /* The first time we see an increment, create the entry for it.
2719 	 If this is the root candidate which doesn't have a basis, set
2720 	 the count to zero.  We're only processing it so it can possibly
2721 	 provide an initializer for other candidates.  */
2722       incr_vec[incr_vec_len].incr = increment;
2723       incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2724       incr_vec[incr_vec_len].cost = COST_INFINITE;
2725 
2726       /* Optimistically record the first occurrence of this increment
2727 	 as providing an initializer (if it does); we will revise this
2728 	 opinion later if it doesn't dominate all other occurrences.
2729          Exception:  increments of 0, 1 never need initializers;
2730 	 and phi adjustments don't ever provide initializers.  */
2731       if (c->kind == CAND_ADD
2732 	  && !is_phi_adjust
2733 	  && c->index == increment
2734 	  && (increment > 1 || increment < 0)
2735 	  && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2736 	      || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2737 	{
2738 	  tree t0 = NULL_TREE;
2739 	  tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2740 	  tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2741 	  if (operand_equal_p (rhs1, c->base_expr, 0))
2742 	    t0 = rhs2;
2743 	  else if (operand_equal_p (rhs2, c->base_expr, 0))
2744 	    t0 = rhs1;
2745 	  if (t0
2746 	      && SSA_NAME_DEF_STMT (t0)
2747 	      && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2748 	    {
2749 	      incr_vec[incr_vec_len].initializer = t0;
2750 	      incr_vec[incr_vec_len++].init_bb
2751 		= gimple_bb (SSA_NAME_DEF_STMT (t0));
2752 	    }
2753 	  else
2754 	    {
2755 	      incr_vec[incr_vec_len].initializer = NULL_TREE;
2756 	      incr_vec[incr_vec_len++].init_bb = NULL;
2757 	    }
2758 	}
2759       else
2760 	{
2761 	  incr_vec[incr_vec_len].initializer = NULL_TREE;
2762 	  incr_vec[incr_vec_len++].init_bb = NULL;
2763 	}
2764     }
2765 }
2766 
2767 /* Recursive helper function for record_phi_increments.  */
2768 
2769 static void
2770 record_phi_increments_1 (slsr_cand_t basis, gimple *phi)
2771 {
2772   unsigned i;
2773   slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2774 
2775   if (phi_cand->visited)
2776     return;
2777   phi_cand->visited = 1;
2778 
2779   for (i = 0; i < gimple_phi_num_args (phi); i++)
2780     {
2781       tree arg = gimple_phi_arg_def (phi, i);
2782       gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2783 
2784       if (gimple_code (arg_def) == GIMPLE_PHI)
2785 	record_phi_increments_1 (basis, arg_def);
2786       else
2787 	{
2788 	  widest_int diff;
2789 
2790 	  if (operand_equal_p (arg, phi_cand->base_expr, 0))
2791 	    {
2792 	      diff = -basis->index;
2793 	      record_increment (phi_cand, diff, PHI_ADJUST);
2794 	    }
2795 	  else
2796 	    {
2797 	      slsr_cand_t arg_cand = base_cand_from_table (arg);
2798 	      diff = arg_cand->index - basis->index;
2799 	      record_increment (arg_cand, diff, PHI_ADJUST);
2800 	    }
2801 	}
2802     }
2803 }
2804 
2805 /* Given phi statement PHI that hides a candidate from its BASIS, find
2806    the increments along each incoming arc (recursively handling additional
2807    phis that may be present) and record them.  These increments are the
2808    difference in index between the index-adjusting statements and the
2809    index of the basis.  */
2810 
2811 static void
2812 record_phi_increments (slsr_cand_t basis, gimple *phi)
2813 {
2814   record_phi_increments_1 (basis, phi);
2815   clear_visited (as_a <gphi *> (phi));
2816 }
2817 
2818 /* Determine how many times each unique increment occurs in the set
2819    of candidates rooted at C's parent, recording the data in the
2820    increment vector.  For each unique increment I, if an initializer
2821    T_0 = stride * I is provided by a candidate that dominates all
2822    candidates with the same increment, also record T_0 for subsequent
2823    use.  */
2824 
2825 static void
2826 record_increments (slsr_cand_t c)
2827 {
2828   if (!cand_already_replaced (c))
2829     {
2830       if (!phi_dependent_cand_p (c))
2831 	record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2832       else
2833 	{
2834 	  /* A candidate with a basis hidden by a phi will have one
2835 	     increment for its relationship to the index represented by
2836 	     the phi, and potentially additional increments along each
2837 	     incoming edge.  For the root of the dependency tree (which
2838 	     has no basis), process just the initial index in case it has
2839 	     an initializer that can be used by subsequent candidates.  */
2840 	  record_increment (c, c->index, NOT_PHI_ADJUST);
2841 
2842 	  if (c->basis)
2843 	    record_phi_increments (lookup_cand (c->basis),
2844 				   lookup_cand (c->def_phi)->cand_stmt);
2845 	}
2846     }
2847 
2848   if (c->sibling)
2849     record_increments (lookup_cand (c->sibling));
2850 
2851   if (c->dependent)
2852     record_increments (lookup_cand (c->dependent));
2853 }
2854 
2855 /* Recursive helper function for phi_incr_cost.  */
2856 
2857 static int
2858 phi_incr_cost_1 (slsr_cand_t c, const widest_int &incr, gimple *phi,
2859 		 int *savings)
2860 {
2861   unsigned i;
2862   int cost = 0;
2863   slsr_cand_t basis = lookup_cand (c->basis);
2864   slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2865 
2866   if (phi_cand->visited)
2867     return 0;
2868   phi_cand->visited = 1;
2869 
2870   for (i = 0; i < gimple_phi_num_args (phi); i++)
2871     {
2872       tree arg = gimple_phi_arg_def (phi, i);
2873       gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2874 
2875       if (gimple_code (arg_def) == GIMPLE_PHI)
2876 	{
2877 	  int feeding_savings = 0;
2878 	  tree feeding_var = gimple_phi_result (arg_def);
2879 	  cost += phi_incr_cost_1 (c, incr, arg_def, &feeding_savings);
2880 	  if (uses_consumed_by_stmt (feeding_var, phi))
2881 	    *savings += feeding_savings;
2882 	}
2883       else
2884 	{
2885 	  widest_int diff;
2886 	  slsr_cand_t arg_cand;
2887 
2888 	  /* When the PHI argument is just a pass-through to the base
2889 	     expression of the hidden basis, the difference is zero minus
2890 	     the index of the basis.  There is no potential savings by
2891 	     eliminating a statement in this case.  */
2892 	  if (operand_equal_p (arg, phi_cand->base_expr, 0))
2893 	    {
2894 	      arg_cand = (slsr_cand_t)NULL;
2895 	      diff = -basis->index;
2896 	    }
2897 	  else
2898 	    {
2899 	      arg_cand = base_cand_from_table (arg);
2900 	      diff = arg_cand->index - basis->index;
2901 	    }
2902 
2903 	  if (incr == diff)
2904 	    {
2905 	      tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2906 	      cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2907 	      if (arg_cand)
2908 		{
2909 		  tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2910 		  if (uses_consumed_by_stmt (lhs, phi))
2911 		    *savings += stmt_cost (arg_cand->cand_stmt, true);
2912 		}
2913 	    }
2914 	}
2915     }
2916 
2917   return cost;
2918 }
2919 
2920 /* Add up and return the costs of introducing add statements that
2921    require the increment INCR on behalf of candidate C and phi
2922    statement PHI.  Accumulate into *SAVINGS the potential savings
2923    from removing existing statements that feed PHI and have no other
2924    uses.  */
2925 
2926 static int
2927 phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple *phi,
2928 	       int *savings)
2929 {
2930   int retval = phi_incr_cost_1 (c, incr, phi, savings);
2931   clear_visited (as_a <gphi *> (phi));
2932   return retval;
2933 }
2934 
2935 /* Return the first candidate in the tree rooted at C that has not
2936    already been replaced, favoring siblings over dependents.  */
2937 
2938 static slsr_cand_t
2939 unreplaced_cand_in_tree (slsr_cand_t c)
2940 {
2941   if (!cand_already_replaced (c))
2942     return c;
2943 
2944   if (c->sibling)
2945     {
2946       slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2947       if (sib)
2948 	return sib;
2949     }
2950 
2951   if (c->dependent)
2952     {
2953       slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2954       if (dep)
2955 	return dep;
2956     }
2957 
2958   return NULL;
2959 }
2960 
2961 /* Return TRUE if the candidates in the tree rooted at C should be
2962    optimized for speed, else FALSE.  We estimate this based on the block
2963    containing the most dominant candidate in the tree that has not yet
2964    been replaced.  */
2965 
2966 static bool
2967 optimize_cands_for_speed_p (slsr_cand_t c)
2968 {
2969   slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2970   gcc_assert (c2);
2971   return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2972 }
2973 
2974 /* Add COST_IN to the lowest cost of any dependent path starting at
2975    candidate C or any of its siblings, counting only candidates along
2976    such paths with increment INCR.  Assume that replacing a candidate
2977    reduces cost by REPL_SAVINGS.  Also account for savings from any
2978    statements that would go dead.  If COUNT_PHIS is true, include
2979    costs of introducing feeding statements for conditional candidates.  */
2980 
2981 static int
2982 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2983 		  const widest_int &incr, bool count_phis)
2984 {
2985   int local_cost, sib_cost, savings = 0;
2986   widest_int cand_incr = cand_abs_increment (c);
2987 
2988   if (cand_already_replaced (c))
2989     local_cost = cost_in;
2990   else if (incr == cand_incr)
2991     local_cost = cost_in - repl_savings - c->dead_savings;
2992   else
2993     local_cost = cost_in - c->dead_savings;
2994 
2995   if (count_phis
2996       && phi_dependent_cand_p (c)
2997       && !cand_already_replaced (c))
2998     {
2999       gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
3000       local_cost += phi_incr_cost (c, incr, phi, &savings);
3001 
3002       if (uses_consumed_by_stmt (gimple_phi_result (phi), c->cand_stmt))
3003 	local_cost -= savings;
3004     }
3005 
3006   if (c->dependent)
3007     local_cost = lowest_cost_path (local_cost, repl_savings,
3008 				   lookup_cand (c->dependent), incr,
3009 				   count_phis);
3010 
3011   if (c->sibling)
3012     {
3013       sib_cost = lowest_cost_path (cost_in, repl_savings,
3014 				   lookup_cand (c->sibling), incr,
3015 				   count_phis);
3016       local_cost = MIN (local_cost, sib_cost);
3017     }
3018 
3019   return local_cost;
3020 }
3021 
3022 /* Compute the total savings that would accrue from all replacements
3023    in the candidate tree rooted at C, counting only candidates with
3024    increment INCR.  Assume that replacing a candidate reduces cost
3025    by REPL_SAVINGS.  Also account for savings from statements that
3026    would go dead.  */
3027 
3028 static int
3029 total_savings (int repl_savings, slsr_cand_t c, const widest_int &incr,
3030 	       bool count_phis)
3031 {
3032   int savings = 0;
3033   widest_int cand_incr = cand_abs_increment (c);
3034 
3035   if (incr == cand_incr && !cand_already_replaced (c))
3036     savings += repl_savings + c->dead_savings;
3037 
3038   if (count_phis
3039       && phi_dependent_cand_p (c)
3040       && !cand_already_replaced (c))
3041     {
3042       int phi_savings = 0;
3043       gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
3044       savings -= phi_incr_cost (c, incr, phi, &phi_savings);
3045 
3046       if (uses_consumed_by_stmt (gimple_phi_result (phi), c->cand_stmt))
3047 	savings += phi_savings;
3048     }
3049 
3050   if (c->dependent)
3051     savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
3052 			      count_phis);
3053 
3054   if (c->sibling)
3055     savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
3056 			      count_phis);
3057 
3058   return savings;
3059 }
3060 
3061 /* Use target-specific costs to determine and record which increments
3062    in the current candidate tree are profitable to replace, assuming
3063    MODE and SPEED.  FIRST_DEP is the first dependent of the root of
3064    the candidate tree.
3065 
3066    One slight limitation here is that we don't account for the possible
3067    introduction of casts in some cases.  See replace_one_candidate for
3068    the cases where these are introduced.  This should probably be cleaned
3069    up sometime.  */
3070 
3071 static void
3072 analyze_increments (slsr_cand_t first_dep, machine_mode mode, bool speed)
3073 {
3074   unsigned i;
3075 
3076   for (i = 0; i < incr_vec_len; i++)
3077     {
3078       HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
3079 
3080       /* If somehow this increment is bigger than a HWI, we won't
3081 	 be optimizing candidates that use it.  And if the increment
3082 	 has a count of zero, nothing will be done with it.  */
3083       if (!wi::fits_shwi_p (incr_vec[i].incr) || !incr_vec[i].count)
3084 	incr_vec[i].cost = COST_INFINITE;
3085 
3086       /* Increments of 0, 1, and -1 are always profitable to replace,
3087 	 because they always replace a multiply or add with an add or
3088 	 copy, and may cause one or more existing instructions to go
3089 	 dead.  Exception:  -1 can't be assumed to be profitable for
3090 	 pointer addition.  */
3091       else if (incr == 0
3092 	       || incr == 1
3093 	       || (incr == -1
3094 		   && !POINTER_TYPE_P (first_dep->cand_type)))
3095 	incr_vec[i].cost = COST_NEUTRAL;
3096 
3097       /* If we need to add an initializer, give up if a cast from the
3098 	 candidate's type to its stride's type can lose precision.
3099 	 Note that this already takes into account that the stride may
3100 	 have been cast to a wider type, in which case this test won't
3101 	 fire.  Example:
3102 
3103            short int _1;
3104 	   _2 = (int) _1;
3105 	   _3 = _2 * 10;
3106 	   _4 = x + _3;    ADD: x + (10 * (int)_1) : int
3107 	   _5 = _2 * 15;
3108 	   _6 = x + _5;    ADD: x + (15 * (int)_1) : int
3109 
3110 	 Although the stride was a short int initially, the stride
3111 	 used in the analysis has been widened to an int, and such
3112 	 widening will be done in the initializer as well.  */
3113       else if (!incr_vec[i].initializer
3114 	       && TREE_CODE (first_dep->stride) != INTEGER_CST
3115 	       && !legal_cast_p_1 (first_dep->stride_type,
3116 				   TREE_TYPE (gimple_assign_lhs
3117 					      (first_dep->cand_stmt))))
3118 	incr_vec[i].cost = COST_INFINITE;
3119 
3120       /* If we need to add an initializer, make sure we don't introduce
3121 	 a multiply by a pointer type, which can happen in certain cast
3122 	 scenarios.  */
3123       else if (!incr_vec[i].initializer
3124 	       && TREE_CODE (first_dep->stride) != INTEGER_CST
3125 	       && POINTER_TYPE_P (first_dep->stride_type))
3126 	incr_vec[i].cost = COST_INFINITE;
3127 
3128       /* For any other increment, if this is a multiply candidate, we
3129 	 must introduce a temporary T and initialize it with
3130 	 T_0 = stride * increment.  When optimizing for speed, walk the
3131 	 candidate tree to calculate the best cost reduction along any
3132 	 path; if it offsets the fixed cost of inserting the initializer,
3133 	 replacing the increment is profitable.  When optimizing for
3134          size, instead calculate the total cost reduction from replacing
3135 	 all candidates with this increment.  */
3136       else if (first_dep->kind == CAND_MULT)
3137 	{
3138 	  int cost = mult_by_coeff_cost (incr, mode, speed);
3139 	  int repl_savings;
3140 
3141 	  if (tree_fits_shwi_p (first_dep->stride))
3142 	    {
3143 	      HOST_WIDE_INT hwi_stride = tree_to_shwi (first_dep->stride);
3144 	      repl_savings = mult_by_coeff_cost (hwi_stride, mode, speed);
3145 	    }
3146 	  else
3147 	    repl_savings = mul_cost (speed, mode);
3148 	  repl_savings -= add_cost (speed, mode);
3149 
3150 	  if (speed)
3151 	    cost = lowest_cost_path (cost, repl_savings, first_dep,
3152 				     incr_vec[i].incr, COUNT_PHIS);
3153 	  else
3154 	    cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
3155 				   COUNT_PHIS);
3156 
3157 	  incr_vec[i].cost = cost;
3158 	}
3159 
3160       /* If this is an add candidate, the initializer may already
3161 	 exist, so only calculate the cost of the initializer if it
3162 	 doesn't.  We are replacing one add with another here, so the
3163 	 known replacement savings is zero.  We will account for removal
3164 	 of dead instructions in lowest_cost_path or total_savings.  */
3165       else
3166 	{
3167 	  int cost = 0;
3168 	  if (!incr_vec[i].initializer)
3169 	    cost = mult_by_coeff_cost (incr, mode, speed);
3170 
3171 	  if (speed)
3172 	    cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
3173 				     DONT_COUNT_PHIS);
3174 	  else
3175 	    cost -= total_savings (0, first_dep, incr_vec[i].incr,
3176 				   DONT_COUNT_PHIS);
3177 
3178 	  incr_vec[i].cost = cost;
3179 	}
3180     }
3181 }
3182 
3183 /* Return the nearest common dominator of BB1 and BB2.  If the blocks
3184    are identical, return the earlier of C1 and C2 in *WHERE.  Otherwise,
3185    if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
3186    return C2 in *WHERE; and if the NCD matches neither, return NULL in
3187    *WHERE.  Note: It is possible for one of C1 and C2 to be NULL.  */
3188 
3189 static basic_block
3190 ncd_for_two_cands (basic_block bb1, basic_block bb2,
3191 		   slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
3192 {
3193   basic_block ncd;
3194 
3195   if (!bb1)
3196     {
3197       *where = c2;
3198       return bb2;
3199     }
3200 
3201   if (!bb2)
3202     {
3203       *where = c1;
3204       return bb1;
3205     }
3206 
3207   ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
3208 
3209   /* If both candidates are in the same block, the earlier
3210      candidate wins.  */
3211   if (bb1 == ncd && bb2 == ncd)
3212     {
3213       if (!c1 || (c2 && c2->cand_num < c1->cand_num))
3214 	*where = c2;
3215       else
3216 	*where = c1;
3217     }
3218 
3219   /* Otherwise, if one of them produced a candidate in the
3220      dominator, that one wins.  */
3221   else if (bb1 == ncd)
3222     *where = c1;
3223 
3224   else if (bb2 == ncd)
3225     *where = c2;
3226 
3227   /* If neither matches the dominator, neither wins.  */
3228   else
3229     *where = NULL;
3230 
3231   return ncd;
3232 }
3233 
3234 /* Consider all candidates that feed PHI.  Find the nearest common
3235    dominator of those candidates requiring the given increment INCR.
3236    Further find and return the nearest common dominator of this result
3237    with block NCD.  If the returned block contains one or more of the
3238    candidates, return the earliest candidate in the block in *WHERE.  */
3239 
3240 static basic_block
3241 ncd_with_phi (slsr_cand_t c, const widest_int &incr, gphi *phi,
3242 	      basic_block ncd, slsr_cand_t *where)
3243 {
3244   unsigned i;
3245   slsr_cand_t basis = lookup_cand (c->basis);
3246   slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
3247 
3248   for (i = 0; i < gimple_phi_num_args (phi); i++)
3249     {
3250       tree arg = gimple_phi_arg_def (phi, i);
3251       gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3252 
3253       if (gimple_code (arg_def) == GIMPLE_PHI)
3254 	ncd = ncd_with_phi (c, incr, as_a <gphi *> (arg_def), ncd, where);
3255       else
3256 	{
3257 	  widest_int diff;
3258 
3259 	  if (operand_equal_p (arg, phi_cand->base_expr, 0))
3260 	    diff = -basis->index;
3261 	  else
3262 	    {
3263 	      slsr_cand_t arg_cand = base_cand_from_table (arg);
3264 	      diff = arg_cand->index - basis->index;
3265 	    }
3266 
3267 	  basic_block pred = gimple_phi_arg_edge (phi, i)->src;
3268 
3269 	  if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
3270 	    ncd = ncd_for_two_cands (ncd, pred, *where, NULL, where);
3271 	}
3272     }
3273 
3274   return ncd;
3275 }
3276 
3277 /* Consider the candidate C together with any candidates that feed
3278    C's phi dependence (if any).  Find and return the nearest common
3279    dominator of those candidates requiring the given increment INCR.
3280    If the returned block contains one or more of the candidates,
3281    return the earliest candidate in the block in *WHERE.  */
3282 
3283 static basic_block
3284 ncd_of_cand_and_phis (slsr_cand_t c, const widest_int &incr, slsr_cand_t *where)
3285 {
3286   basic_block ncd = NULL;
3287 
3288   if (cand_abs_increment (c) == incr)
3289     {
3290       ncd = gimple_bb (c->cand_stmt);
3291       *where = c;
3292     }
3293 
3294   if (phi_dependent_cand_p (c))
3295     ncd = ncd_with_phi (c, incr,
3296 			as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt),
3297 			ncd, where);
3298 
3299   return ncd;
3300 }
3301 
3302 /* Consider all candidates in the tree rooted at C for which INCR
3303    represents the required increment of C relative to its basis.
3304    Find and return the basic block that most nearly dominates all
3305    such candidates.  If the returned block contains one or more of
3306    the candidates, return the earliest candidate in the block in
3307    *WHERE.  */
3308 
3309 static basic_block
3310 nearest_common_dominator_for_cands (slsr_cand_t c, const widest_int &incr,
3311 				    slsr_cand_t *where)
3312 {
3313   basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
3314   slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
3315 
3316   /* First find the NCD of all siblings and dependents.  */
3317   if (c->sibling)
3318     sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
3319 						  incr, &sib_where);
3320   if (c->dependent)
3321     dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
3322 						  incr, &dep_where);
3323   if (!sib_ncd && !dep_ncd)
3324     {
3325       new_where = NULL;
3326       ncd = NULL;
3327     }
3328   else if (sib_ncd && !dep_ncd)
3329     {
3330       new_where = sib_where;
3331       ncd = sib_ncd;
3332     }
3333   else if (dep_ncd && !sib_ncd)
3334     {
3335       new_where = dep_where;
3336       ncd = dep_ncd;
3337     }
3338   else
3339     ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
3340 			     dep_where, &new_where);
3341 
3342   /* If the candidate's increment doesn't match the one we're interested
3343      in (and nor do any increments for feeding defs of a phi-dependence),
3344      then the result depends only on siblings and dependents.  */
3345   this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
3346 
3347   if (!this_ncd || cand_already_replaced (c))
3348     {
3349       *where = new_where;
3350       return ncd;
3351     }
3352 
3353   /* Otherwise, compare this candidate with the result from all siblings
3354      and dependents.  */
3355   ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3356 
3357   return ncd;
3358 }
3359 
3360 /* Return TRUE if the increment indexed by INDEX is profitable to replace.  */
3361 
3362 static inline bool
3363 profitable_increment_p (unsigned index)
3364 {
3365   return (incr_vec[index].cost <= COST_NEUTRAL);
3366 }
3367 
3368 /* For each profitable increment in the increment vector not equal to
3369    0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3370    dominator of all statements in the candidate chain rooted at C
3371    that require that increment, and insert an initializer
3372    T_0 = stride * increment at that location.  Record T_0 with the
3373    increment record.  */
3374 
3375 static void
3376 insert_initializers (slsr_cand_t c)
3377 {
3378   unsigned i;
3379 
3380   for (i = 0; i < incr_vec_len; i++)
3381     {
3382       basic_block bb;
3383       slsr_cand_t where = NULL;
3384       gassign *init_stmt;
3385       gassign *cast_stmt = NULL;
3386       tree new_name, incr_tree, init_stride;
3387       widest_int incr = incr_vec[i].incr;
3388 
3389       if (!profitable_increment_p (i)
3390 	  || incr == 1
3391 	  || (incr == -1
3392 	      && (!POINTER_TYPE_P (lookup_cand (c->basis)->cand_type)))
3393 	  || incr == 0)
3394 	continue;
3395 
3396       /* We may have already identified an existing initializer that
3397 	 will suffice.  */
3398       if (incr_vec[i].initializer)
3399 	{
3400 	  if (dump_file && (dump_flags & TDF_DETAILS))
3401 	    {
3402 	      fputs ("Using existing initializer: ", dump_file);
3403 	      print_gimple_stmt (dump_file,
3404 				 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3405 				 0, 0);
3406 	    }
3407 	  continue;
3408 	}
3409 
3410       /* Find the block that most closely dominates all candidates
3411 	 with this increment.  If there is at least one candidate in
3412 	 that block, the earliest one will be returned in WHERE.  */
3413       bb = nearest_common_dominator_for_cands (c, incr, &where);
3414 
3415       /* If the NCD is not dominated by the block containing the
3416 	 definition of the stride, we can't legally insert a
3417 	 single initializer.  Mark the increment as unprofitable
3418 	 so we don't make any replacements.  FIXME: Multiple
3419 	 initializers could be placed with more analysis.  */
3420       gimple *stride_def = SSA_NAME_DEF_STMT (c->stride);
3421       basic_block stride_bb = gimple_bb (stride_def);
3422 
3423       if (stride_bb && !dominated_by_p (CDI_DOMINATORS, bb, stride_bb))
3424 	{
3425 	  if (dump_file && (dump_flags & TDF_DETAILS))
3426 	    fprintf (dump_file,
3427 		     "Initializer #%d cannot be legally placed\n", i);
3428 	  incr_vec[i].cost = COST_INFINITE;
3429 	  continue;
3430 	}
3431 
3432       /* If the nominal stride has a different type than the recorded
3433 	 stride type, build a cast from the nominal stride to that type.  */
3434       if (!types_compatible_p (TREE_TYPE (c->stride), c->stride_type))
3435 	{
3436 	  init_stride = make_temp_ssa_name (c->stride_type, NULL, "slsr");
3437 	  cast_stmt = gimple_build_assign (init_stride, NOP_EXPR, c->stride);
3438 	}
3439       else
3440 	init_stride = c->stride;
3441 
3442       /* Create a new SSA name to hold the initializer's value.  */
3443       new_name = make_temp_ssa_name (c->stride_type, NULL, "slsr");
3444       incr_vec[i].initializer = new_name;
3445 
3446       /* Create the initializer and insert it in the latest possible
3447 	 dominating position.  */
3448       incr_tree = wide_int_to_tree (c->stride_type, incr);
3449       init_stmt = gimple_build_assign (new_name, MULT_EXPR,
3450 				       init_stride, incr_tree);
3451       if (where)
3452 	{
3453 	  gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3454 	  location_t loc = gimple_location (where->cand_stmt);
3455 
3456 	  if (cast_stmt)
3457 	    {
3458 	      gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3459 	      gimple_set_location (cast_stmt, loc);
3460 	    }
3461 
3462 	  gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3463 	  gimple_set_location (init_stmt, loc);
3464 	}
3465       else
3466 	{
3467 	  gimple_stmt_iterator gsi = gsi_last_bb (bb);
3468 	  gimple *basis_stmt = lookup_cand (c->basis)->cand_stmt;
3469 	  location_t loc = gimple_location (basis_stmt);
3470 
3471 	  if (!gsi_end_p (gsi) && stmt_ends_bb_p (gsi_stmt (gsi)))
3472 	    {
3473 	      if (cast_stmt)
3474 		{
3475 		  gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3476 		  gimple_set_location (cast_stmt, loc);
3477 		}
3478 	      gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3479 	    }
3480 	  else
3481 	    {
3482 	      if (cast_stmt)
3483 		{
3484 		  gsi_insert_after (&gsi, cast_stmt, GSI_NEW_STMT);
3485 		  gimple_set_location (cast_stmt, loc);
3486 		}
3487 	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
3488 	    }
3489 
3490 	  gimple_set_location (init_stmt, gimple_location (basis_stmt));
3491 	}
3492 
3493       if (dump_file && (dump_flags & TDF_DETAILS))
3494 	{
3495 	  if (cast_stmt)
3496 	    {
3497 	      fputs ("Inserting stride cast: ", dump_file);
3498 	      print_gimple_stmt (dump_file, cast_stmt, 0);
3499 	    }
3500 	  fputs ("Inserting initializer: ", dump_file);
3501 	  print_gimple_stmt (dump_file, init_stmt, 0);
3502 	}
3503     }
3504 }
3505 
3506 /* Recursive helper function for all_phi_incrs_profitable.  */
3507 
3508 static bool
3509 all_phi_incrs_profitable_1 (slsr_cand_t c, gphi *phi, int *spread)
3510 {
3511   unsigned i;
3512   slsr_cand_t basis = lookup_cand (c->basis);
3513   slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
3514 
3515   if (phi_cand->visited)
3516     return true;
3517 
3518   phi_cand->visited = 1;
3519   (*spread)++;
3520 
3521   /* If the basis doesn't dominate the PHI (including when the PHI is
3522      in the same block as the basis), we won't be able to create a PHI
3523      using the basis here.  */
3524   basic_block basis_bb = gimple_bb (basis->cand_stmt);
3525   basic_block phi_bb = gimple_bb (phi);
3526 
3527   if (phi_bb == basis_bb
3528       || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
3529     return false;
3530 
3531   for (i = 0; i < gimple_phi_num_args (phi); i++)
3532     {
3533       /* If the PHI arg resides in a block not dominated by the basis,
3534 	 we won't be able to create a PHI using the basis here.  */
3535       basic_block pred_bb = gimple_phi_arg_edge (phi, i)->src;
3536 
3537       if (!dominated_by_p (CDI_DOMINATORS, pred_bb, basis_bb))
3538 	return false;
3539 
3540       tree arg = gimple_phi_arg_def (phi, i);
3541       gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3542 
3543       if (gimple_code (arg_def) == GIMPLE_PHI)
3544 	{
3545 	  if (!all_phi_incrs_profitable_1 (c, as_a <gphi *> (arg_def), spread)
3546 	      || *spread > MAX_SPREAD)
3547 	    return false;
3548 	}
3549       else
3550 	{
3551 	  int j;
3552 	  widest_int increment;
3553 
3554 	  if (operand_equal_p (arg, phi_cand->base_expr, 0))
3555 	    increment = -basis->index;
3556 	  else
3557 	    {
3558 	      slsr_cand_t arg_cand = base_cand_from_table (arg);
3559 	      increment = arg_cand->index - basis->index;
3560 	    }
3561 
3562 	  if (!address_arithmetic_p && wi::neg_p (increment))
3563 	    increment = -increment;
3564 
3565 	  j = incr_vec_index (increment);
3566 
3567 	  if (dump_file && (dump_flags & TDF_DETAILS))
3568 	    {
3569 	      fprintf (dump_file, "  Conditional candidate %d, phi: ",
3570 		       c->cand_num);
3571 	      print_gimple_stmt (dump_file, phi, 0);
3572 	      fputs ("    increment: ", dump_file);
3573 	      print_decs (increment, dump_file);
3574 	      if (j < 0)
3575 		fprintf (dump_file,
3576 			 "\n  Not replaced; incr_vec overflow.\n");
3577 	      else {
3578 		fprintf (dump_file, "\n    cost: %d\n", incr_vec[j].cost);
3579 		if (profitable_increment_p (j))
3580 		  fputs ("  Replacing...\n", dump_file);
3581 		else
3582 		  fputs ("  Not replaced.\n", dump_file);
3583 	      }
3584 	    }
3585 
3586 	  if (j < 0 || !profitable_increment_p (j))
3587 	    return false;
3588 	}
3589     }
3590 
3591   return true;
3592 }
3593 
3594 /* Return TRUE iff all required increments for candidates feeding PHI
3595    are profitable (and legal!) to replace on behalf of candidate C.  */
3596 
3597 static bool
3598 all_phi_incrs_profitable (slsr_cand_t c, gphi *phi)
3599 {
3600   int spread = 0;
3601   bool retval = all_phi_incrs_profitable_1 (c, phi, &spread);
3602   clear_visited (phi);
3603   return retval;
3604 }
3605 
3606 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3607    type TO_TYPE, and insert it in front of the statement represented
3608    by candidate C.  Use *NEW_VAR to create the new SSA name.  Return
3609    the new SSA name.  */
3610 
3611 static tree
3612 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3613 {
3614   tree cast_lhs;
3615   gassign *cast_stmt;
3616   gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3617 
3618   cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3619   cast_stmt = gimple_build_assign (cast_lhs, NOP_EXPR, from_expr);
3620   gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3621   gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3622 
3623   if (dump_file && (dump_flags & TDF_DETAILS))
3624     {
3625       fputs ("  Inserting: ", dump_file);
3626       print_gimple_stmt (dump_file, cast_stmt, 0);
3627     }
3628 
3629   return cast_lhs;
3630 }
3631 
3632 /* Replace the RHS of the statement represented by candidate C with
3633    NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3634    leave C unchanged or just interchange its operands.  The original
3635    operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3636    If the replacement was made and we are doing a details dump,
3637    return the revised statement, else NULL.  */
3638 
3639 static gimple *
3640 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3641 			enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3642 			slsr_cand_t c)
3643 {
3644   if (new_code != old_code
3645       || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3646 	   || !operand_equal_p (new_rhs2, old_rhs2, 0))
3647 	  && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3648 	      || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3649     {
3650       gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3651       slsr_cand_t cc = lookup_cand (c->first_interp);
3652       gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3653       update_stmt (gsi_stmt (gsi));
3654       while (cc)
3655 	{
3656 	  cc->cand_stmt = gsi_stmt (gsi);
3657 	  cc = cc->next_interp ? lookup_cand (cc->next_interp) : NULL;
3658 	}
3659 
3660       if (dump_file && (dump_flags & TDF_DETAILS))
3661 	return gsi_stmt (gsi);
3662     }
3663 
3664   else if (dump_file && (dump_flags & TDF_DETAILS))
3665     fputs ("  (duplicate, not actually replacing)\n", dump_file);
3666 
3667   return NULL;
3668 }
3669 
3670 /* Strength-reduce the statement represented by candidate C by replacing
3671    it with an equivalent addition or subtraction.  I is the index into
3672    the increment vector identifying C's increment.  NEW_VAR is used to
3673    create a new SSA name if a cast needs to be introduced.  BASIS_NAME
3674    is the rhs1 to use in creating the add/subtract.  */
3675 
3676 static void
3677 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3678 {
3679   gimple *stmt_to_print = NULL;
3680   tree orig_rhs1, orig_rhs2;
3681   tree rhs2;
3682   enum tree_code orig_code, repl_code;
3683   widest_int cand_incr;
3684 
3685   orig_code = gimple_assign_rhs_code (c->cand_stmt);
3686   orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3687   orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3688   cand_incr = cand_increment (c);
3689 
3690   /* If orig_rhs2 is NULL, we have already replaced this in situ with
3691      a copy statement under another interpretation.  */
3692   if (!orig_rhs2)
3693     return;
3694 
3695   if (dump_file && (dump_flags & TDF_DETAILS))
3696     {
3697       fputs ("Replacing: ", dump_file);
3698       print_gimple_stmt (dump_file, c->cand_stmt, 0);
3699       stmt_to_print = c->cand_stmt;
3700     }
3701 
3702   if (address_arithmetic_p)
3703     repl_code = POINTER_PLUS_EXPR;
3704   else
3705     repl_code = PLUS_EXPR;
3706 
3707   /* If the increment has an initializer T_0, replace the candidate
3708      statement with an add of the basis name and the initializer.  */
3709   if (incr_vec[i].initializer)
3710     {
3711       tree init_type = TREE_TYPE (incr_vec[i].initializer);
3712       tree orig_type = TREE_TYPE (orig_rhs2);
3713 
3714       if (types_compatible_p (orig_type, init_type))
3715 	rhs2 = incr_vec[i].initializer;
3716       else
3717 	rhs2 = introduce_cast_before_cand (c, orig_type,
3718 					   incr_vec[i].initializer);
3719 
3720       if (incr_vec[i].incr != cand_incr)
3721 	{
3722 	  gcc_assert (repl_code == PLUS_EXPR);
3723 	  repl_code = MINUS_EXPR;
3724 	}
3725 
3726       stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3727 					      orig_code, orig_rhs1, orig_rhs2,
3728 					      c);
3729     }
3730 
3731   /* Otherwise, the increment is one of -1, 0, and 1.  Replace
3732      with a subtract of the stride from the basis name, a copy
3733      from the basis name, or an add of the stride to the basis
3734      name, respectively.  It may be necessary to introduce a
3735      cast (or reuse an existing cast).  */
3736   else if (cand_incr == 1)
3737     {
3738       tree stride_type = TREE_TYPE (c->stride);
3739       tree orig_type = TREE_TYPE (orig_rhs2);
3740 
3741       if (types_compatible_p (orig_type, stride_type))
3742 	rhs2 = c->stride;
3743       else
3744 	rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3745 
3746       stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3747 					      orig_code, orig_rhs1, orig_rhs2,
3748 					      c);
3749     }
3750 
3751   else if (cand_incr == -1)
3752     {
3753       tree stride_type = TREE_TYPE (c->stride);
3754       tree orig_type = TREE_TYPE (orig_rhs2);
3755       gcc_assert (repl_code != POINTER_PLUS_EXPR);
3756 
3757       if (types_compatible_p (orig_type, stride_type))
3758 	rhs2 = c->stride;
3759       else
3760 	rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3761 
3762       if (orig_code != MINUS_EXPR
3763 	  || !operand_equal_p (basis_name, orig_rhs1, 0)
3764 	  || !operand_equal_p (rhs2, orig_rhs2, 0))
3765 	{
3766 	  gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3767 	  slsr_cand_t cc = lookup_cand (c->first_interp);
3768 	  gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3769 	  update_stmt (gsi_stmt (gsi));
3770 	  while (cc)
3771 	    {
3772 	      cc->cand_stmt = gsi_stmt (gsi);
3773 	      cc = cc->next_interp ? lookup_cand (cc->next_interp) : NULL;
3774 	    }
3775 
3776 	  if (dump_file && (dump_flags & TDF_DETAILS))
3777 	    stmt_to_print = gsi_stmt (gsi);
3778 	}
3779       else if (dump_file && (dump_flags & TDF_DETAILS))
3780 	fputs ("  (duplicate, not actually replacing)\n", dump_file);
3781     }
3782 
3783   else if (cand_incr == 0)
3784     {
3785       tree lhs = gimple_assign_lhs (c->cand_stmt);
3786       tree lhs_type = TREE_TYPE (lhs);
3787       tree basis_type = TREE_TYPE (basis_name);
3788 
3789       if (types_compatible_p (lhs_type, basis_type))
3790 	{
3791 	  gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
3792 	  gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3793 	  slsr_cand_t cc = lookup_cand (c->first_interp);
3794 	  gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3795 	  gsi_replace (&gsi, copy_stmt, false);
3796 	  while (cc)
3797 	    {
3798 	      cc->cand_stmt = copy_stmt;
3799 	      cc = cc->next_interp ? lookup_cand (cc->next_interp) : NULL;
3800 	    }
3801 
3802 	  if (dump_file && (dump_flags & TDF_DETAILS))
3803 	    stmt_to_print = copy_stmt;
3804 	}
3805       else
3806 	{
3807 	  gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3808 	  gassign *cast_stmt = gimple_build_assign (lhs, NOP_EXPR, basis_name);
3809 	  slsr_cand_t cc = lookup_cand (c->first_interp);
3810 	  gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3811 	  gsi_replace (&gsi, cast_stmt, false);
3812 	  while (cc)
3813 	    {
3814 	      cc->cand_stmt = cast_stmt;
3815 	      cc = cc->next_interp ? lookup_cand (cc->next_interp) : NULL;
3816 	    }
3817 
3818 	  if (dump_file && (dump_flags & TDF_DETAILS))
3819 	    stmt_to_print = cast_stmt;
3820 	}
3821     }
3822   else
3823     gcc_unreachable ();
3824 
3825   if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3826     {
3827       fputs ("With: ", dump_file);
3828       print_gimple_stmt (dump_file, stmt_to_print, 0);
3829       fputs ("\n", dump_file);
3830     }
3831 }
3832 
3833 /* For each candidate in the tree rooted at C, replace it with
3834    an increment if such has been shown to be profitable.  */
3835 
3836 static void
3837 replace_profitable_candidates (slsr_cand_t c)
3838 {
3839   if (!cand_already_replaced (c))
3840     {
3841       widest_int increment = cand_abs_increment (c);
3842       enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3843       int i;
3844 
3845       i = incr_vec_index (increment);
3846 
3847       /* Only process profitable increments.  Nothing useful can be done
3848 	 to a cast or copy.  */
3849       if (i >= 0
3850 	  && profitable_increment_p (i)
3851 	  && orig_code != SSA_NAME
3852 	  && !CONVERT_EXPR_CODE_P (orig_code))
3853 	{
3854 	  if (phi_dependent_cand_p (c))
3855 	    {
3856 	      gphi *phi = as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt);
3857 
3858 	      if (all_phi_incrs_profitable (c, phi))
3859 		{
3860 		  /* Look up the LHS SSA name from C's basis.  This will be
3861 		     the RHS1 of the adds we will introduce to create new
3862 		     phi arguments.  */
3863 		  slsr_cand_t basis = lookup_cand (c->basis);
3864 		  tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3865 
3866 		  /* Create a new phi statement that will represent C's true
3867 		     basis after the transformation is complete.  */
3868 		  location_t loc = gimple_location (c->cand_stmt);
3869 		  tree name = create_phi_basis (c, phi, basis_name,
3870 						loc, UNKNOWN_STRIDE);
3871 
3872 		  /* Replace C with an add of the new basis phi and the
3873 		     increment.  */
3874 		  replace_one_candidate (c, i, name);
3875 		}
3876 	    }
3877 	  else
3878 	    {
3879 	      slsr_cand_t basis = lookup_cand (c->basis);
3880 	      tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3881 	      replace_one_candidate (c, i, basis_name);
3882 	    }
3883 	}
3884     }
3885 
3886   if (c->sibling)
3887     replace_profitable_candidates (lookup_cand (c->sibling));
3888 
3889   if (c->dependent)
3890     replace_profitable_candidates (lookup_cand (c->dependent));
3891 }
3892 
3893 /* Analyze costs of related candidates in the candidate vector,
3894    and make beneficial replacements.  */
3895 
3896 static void
3897 analyze_candidates_and_replace (void)
3898 {
3899   unsigned i;
3900   slsr_cand_t c;
3901 
3902   /* Each candidate that has a null basis and a non-null
3903      dependent is the root of a tree of related statements.
3904      Analyze each tree to determine a subset of those
3905      statements that can be replaced with maximum benefit.  */
3906   FOR_EACH_VEC_ELT (cand_vec, i, c)
3907     {
3908       slsr_cand_t first_dep;
3909 
3910       if (c->basis != 0 || c->dependent == 0)
3911 	continue;
3912 
3913       if (dump_file && (dump_flags & TDF_DETAILS))
3914 	fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3915 		 c->cand_num);
3916 
3917       first_dep = lookup_cand (c->dependent);
3918 
3919       /* If this is a chain of CAND_REFs, unconditionally replace
3920 	 each of them with a strength-reduced data reference.  */
3921       if (c->kind == CAND_REF)
3922 	replace_refs (c);
3923 
3924       /* If the common stride of all related candidates is a known
3925 	 constant, each candidate without a phi-dependence can be
3926 	 profitably replaced.  Each replaces a multiply by a single
3927 	 add, with the possibility that a feeding add also goes dead.
3928 	 A candidate with a phi-dependence is replaced only if the
3929 	 compensation code it requires is offset by the strength
3930 	 reduction savings.  */
3931       else if (TREE_CODE (c->stride) == INTEGER_CST)
3932 	replace_uncond_cands_and_profitable_phis (first_dep);
3933 
3934       /* When the stride is an SSA name, it may still be profitable
3935 	 to replace some or all of the dependent candidates, depending
3936 	 on whether the introduced increments can be reused, or are
3937 	 less expensive to calculate than the replaced statements.  */
3938       else
3939 	{
3940 	  machine_mode mode;
3941 	  bool speed;
3942 
3943 	  /* Determine whether we'll be generating pointer arithmetic
3944 	     when replacing candidates.  */
3945 	  address_arithmetic_p = (c->kind == CAND_ADD
3946 				  && POINTER_TYPE_P (c->cand_type));
3947 
3948 	  /* If all candidates have already been replaced under other
3949 	     interpretations, nothing remains to be done.  */
3950 	  if (!count_candidates (c))
3951 	    continue;
3952 
3953 	  /* Construct an array of increments for this candidate chain.  */
3954 	  incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3955 	  incr_vec_len = 0;
3956 	  record_increments (c);
3957 
3958 	  /* Determine which increments are profitable to replace.  */
3959 	  mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3960 	  speed = optimize_cands_for_speed_p (c);
3961 	  analyze_increments (first_dep, mode, speed);
3962 
3963 	  /* Insert initializers of the form T_0 = stride * increment
3964 	     for use in profitable replacements.  */
3965 	  insert_initializers (first_dep);
3966 	  dump_incr_vec ();
3967 
3968 	  /* Perform the replacements.  */
3969 	  replace_profitable_candidates (first_dep);
3970 	  free (incr_vec);
3971 	}
3972     }
3973 
3974   /* For conditional candidates, we may have uncommitted insertions
3975      on edges to clean up.  */
3976   gsi_commit_edge_inserts ();
3977 }
3978 
3979 namespace {
3980 
3981 const pass_data pass_data_strength_reduction =
3982 {
3983   GIMPLE_PASS, /* type */
3984   "slsr", /* name */
3985   OPTGROUP_NONE, /* optinfo_flags */
3986   TV_GIMPLE_SLSR, /* tv_id */
3987   ( PROP_cfg | PROP_ssa ), /* properties_required */
3988   0, /* properties_provided */
3989   0, /* properties_destroyed */
3990   0, /* todo_flags_start */
3991   0, /* todo_flags_finish */
3992 };
3993 
3994 class pass_strength_reduction : public gimple_opt_pass
3995 {
3996 public:
3997   pass_strength_reduction (gcc::context *ctxt)
3998     : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3999   {}
4000 
4001   /* opt_pass methods: */
4002   virtual bool gate (function *) { return flag_tree_slsr; }
4003   virtual unsigned int execute (function *);
4004 
4005 }; // class pass_strength_reduction
4006 
4007 unsigned
4008 pass_strength_reduction::execute (function *fun)
4009 {
4010   /* Create the obstack where candidates will reside.  */
4011   gcc_obstack_init (&cand_obstack);
4012 
4013   /* Allocate the candidate vector.  */
4014   cand_vec.create (128);
4015 
4016   /* Allocate the mapping from statements to candidate indices.  */
4017   stmt_cand_map = new hash_map<gimple *, slsr_cand_t>;
4018 
4019   /* Create the obstack where candidate chains will reside.  */
4020   gcc_obstack_init (&chain_obstack);
4021 
4022   /* Allocate the mapping from base expressions to candidate chains.  */
4023   base_cand_map = new hash_table<cand_chain_hasher> (500);
4024 
4025   /* Allocate the mapping from bases to alternative bases.  */
4026   alt_base_map = new hash_map<tree, tree>;
4027 
4028   /* Initialize the loop optimizer.  We need to detect flow across
4029      back edges, and this gives us dominator information as well.  */
4030   loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
4031 
4032   /* Walk the CFG in predominator order looking for strength reduction
4033      candidates.  */
4034   find_candidates_dom_walker (CDI_DOMINATORS)
4035     .walk (fun->cfg->x_entry_block_ptr);
4036 
4037   if (dump_file && (dump_flags & TDF_DETAILS))
4038     {
4039       dump_cand_vec ();
4040       dump_cand_chains ();
4041     }
4042 
4043   delete alt_base_map;
4044   free_affine_expand_cache (&name_expansions);
4045 
4046   /* Analyze costs and make appropriate replacements.  */
4047   analyze_candidates_and_replace ();
4048 
4049   loop_optimizer_finalize ();
4050   delete base_cand_map;
4051   base_cand_map = NULL;
4052   obstack_free (&chain_obstack, NULL);
4053   delete stmt_cand_map;
4054   cand_vec.release ();
4055   obstack_free (&cand_obstack, NULL);
4056 
4057   return 0;
4058 }
4059 
4060 } // anon namespace
4061 
4062 gimple_opt_pass *
4063 make_pass_strength_reduction (gcc::context *ctxt)
4064 {
4065   return new pass_strength_reduction (ctxt);
4066 }
4067