xref: /dragonfly/contrib/gcc-8.0/gcc/cse.c (revision 8af44722)
1 /* Common subexpression elimination for GNU compiler.
2    Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "cfghooks.h"
28 #include "df.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "regs.h"
33 #include "emit-rtl.h"
34 #include "recog.h"
35 #include "cfgrtl.h"
36 #include "cfganal.h"
37 #include "cfgcleanup.h"
38 #include "alias.h"
39 #include "toplev.h"
40 #include "params.h"
41 #include "rtlhooks-def.h"
42 #include "tree-pass.h"
43 #include "dbgcnt.h"
44 #include "rtl-iter.h"
45 
46 /* The basic idea of common subexpression elimination is to go
47    through the code, keeping a record of expressions that would
48    have the same value at the current scan point, and replacing
49    expressions encountered with the cheapest equivalent expression.
50 
51    It is too complicated to keep track of the different possibilities
52    when control paths merge in this code; so, at each label, we forget all
53    that is known and start fresh.  This can be described as processing each
54    extended basic block separately.  We have a separate pass to perform
55    global CSE.
56 
57    Note CSE can turn a conditional or computed jump into a nop or
58    an unconditional jump.  When this occurs we arrange to run the jump
59    optimizer after CSE to delete the unreachable code.
60 
61    We use two data structures to record the equivalent expressions:
62    a hash table for most expressions, and a vector of "quantity
63    numbers" to record equivalent (pseudo) registers.
64 
65    The use of the special data structure for registers is desirable
66    because it is faster.  It is possible because registers references
67    contain a fairly small number, the register number, taken from
68    a contiguously allocated series, and two register references are
69    identical if they have the same number.  General expressions
70    do not have any such thing, so the only way to retrieve the
71    information recorded on an expression other than a register
72    is to keep it in a hash table.
73 
74 Registers and "quantity numbers":
75 
76    At the start of each basic block, all of the (hardware and pseudo)
77    registers used in the function are given distinct quantity
78    numbers to indicate their contents.  During scan, when the code
79    copies one register into another, we copy the quantity number.
80    When a register is loaded in any other way, we allocate a new
81    quantity number to describe the value generated by this operation.
82    `REG_QTY (N)' records what quantity register N is currently thought
83    of as containing.
84 
85    All real quantity numbers are greater than or equal to zero.
86    If register N has not been assigned a quantity, `REG_QTY (N)' will
87    equal -N - 1, which is always negative.
88 
89    Quantity numbers below zero do not exist and none of the `qty_table'
90    entries should be referenced with a negative index.
91 
92    We also maintain a bidirectional chain of registers for each
93    quantity number.  The `qty_table` members `first_reg' and `last_reg',
94    and `reg_eqv_table' members `next' and `prev' hold these chains.
95 
96    The first register in a chain is the one whose lifespan is least local.
97    Among equals, it is the one that was seen first.
98    We replace any equivalent register with that one.
99 
100    If two registers have the same quantity number, it must be true that
101    REG expressions with qty_table `mode' must be in the hash table for both
102    registers and must be in the same class.
103 
104    The converse is not true.  Since hard registers may be referenced in
105    any mode, two REG expressions might be equivalent in the hash table
106    but not have the same quantity number if the quantity number of one
107    of the registers is not the same mode as those expressions.
108 
109 Constants and quantity numbers
110 
111    When a quantity has a known constant value, that value is stored
112    in the appropriate qty_table `const_rtx'.  This is in addition to
113    putting the constant in the hash table as is usual for non-regs.
114 
115    Whether a reg or a constant is preferred is determined by the configuration
116    macro CONST_COSTS and will often depend on the constant value.  In any
117    event, expressions containing constants can be simplified, by fold_rtx.
118 
119    When a quantity has a known nearly constant value (such as an address
120    of a stack slot), that value is stored in the appropriate qty_table
121    `const_rtx'.
122 
123    Integer constants don't have a machine mode.  However, cse
124    determines the intended machine mode from the destination
125    of the instruction that moves the constant.  The machine mode
126    is recorded in the hash table along with the actual RTL
127    constant expression so that different modes are kept separate.
128 
129 Other expressions:
130 
131    To record known equivalences among expressions in general
132    we use a hash table called `table'.  It has a fixed number of buckets
133    that contain chains of `struct table_elt' elements for expressions.
134    These chains connect the elements whose expressions have the same
135    hash codes.
136 
137    Other chains through the same elements connect the elements which
138    currently have equivalent values.
139 
140    Register references in an expression are canonicalized before hashing
141    the expression.  This is done using `reg_qty' and qty_table `first_reg'.
142    The hash code of a register reference is computed using the quantity
143    number, not the register number.
144 
145    When the value of an expression changes, it is necessary to remove from the
146    hash table not just that expression but all expressions whose values
147    could be different as a result.
148 
149      1. If the value changing is in memory, except in special cases
150      ANYTHING referring to memory could be changed.  That is because
151      nobody knows where a pointer does not point.
152      The function `invalidate_memory' removes what is necessary.
153 
154      The special cases are when the address is constant or is
155      a constant plus a fixed register such as the frame pointer
156      or a static chain pointer.  When such addresses are stored in,
157      we can tell exactly which other such addresses must be invalidated
158      due to overlap.  `invalidate' does this.
159      All expressions that refer to non-constant
160      memory addresses are also invalidated.  `invalidate_memory' does this.
161 
162      2. If the value changing is a register, all expressions
163      containing references to that register, and only those,
164      must be removed.
165 
166    Because searching the entire hash table for expressions that contain
167    a register is very slow, we try to figure out when it isn't necessary.
168    Precisely, this is necessary only when expressions have been
169    entered in the hash table using this register, and then the value has
170    changed, and then another expression wants to be added to refer to
171    the register's new value.  This sequence of circumstances is rare
172    within any one basic block.
173 
174    `REG_TICK' and `REG_IN_TABLE', accessors for members of
175    cse_reg_info, are used to detect this case.  REG_TICK (i) is
176    incremented whenever a value is stored in register i.
177    REG_IN_TABLE (i) holds -1 if no references to register i have been
178    entered in the table; otherwise, it contains the value REG_TICK (i)
179    had when the references were entered.  If we want to enter a
180    reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
181    remove old references.  Until we want to enter a new entry, the
182    mere fact that the two vectors don't match makes the entries be
183    ignored if anyone tries to match them.
184 
185    Registers themselves are entered in the hash table as well as in
186    the equivalent-register chains.  However, `REG_TICK' and
187    `REG_IN_TABLE' do not apply to expressions which are simple
188    register references.  These expressions are removed from the table
189    immediately when they become invalid, and this can be done even if
190    we do not immediately search for all the expressions that refer to
191    the register.
192 
193    A CLOBBER rtx in an instruction invalidates its operand for further
194    reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
195    invalidates everything that resides in memory.
196 
197 Related expressions:
198 
199    Constant expressions that differ only by an additive integer
200    are called related.  When a constant expression is put in
201    the table, the related expression with no constant term
202    is also entered.  These are made to point at each other
203    so that it is possible to find out if there exists any
204    register equivalent to an expression related to a given expression.  */
205 
206 /* Length of qty_table vector.  We know in advance we will not need
207    a quantity number this big.  */
208 
209 static int max_qty;
210 
211 /* Next quantity number to be allocated.
212    This is 1 + the largest number needed so far.  */
213 
214 static int next_qty;
215 
216 /* Per-qty information tracking.
217 
218    `first_reg' and `last_reg' track the head and tail of the
219    chain of registers which currently contain this quantity.
220 
221    `mode' contains the machine mode of this quantity.
222 
223    `const_rtx' holds the rtx of the constant value of this
224    quantity, if known.  A summations of the frame/arg pointer
225    and a constant can also be entered here.  When this holds
226    a known value, `const_insn' is the insn which stored the
227    constant value.
228 
229    `comparison_{code,const,qty}' are used to track when a
230    comparison between a quantity and some constant or register has
231    been passed.  In such a case, we know the results of the comparison
232    in case we see it again.  These members record a comparison that
233    is known to be true.  `comparison_code' holds the rtx code of such
234    a comparison, else it is set to UNKNOWN and the other two
235    comparison members are undefined.  `comparison_const' holds
236    the constant being compared against, or zero if the comparison
237    is not against a constant.  `comparison_qty' holds the quantity
238    being compared against when the result is known.  If the comparison
239    is not with a register, `comparison_qty' is -1.  */
240 
241 struct qty_table_elem
242 {
243   rtx const_rtx;
244   rtx_insn *const_insn;
245   rtx comparison_const;
246   int comparison_qty;
247   unsigned int first_reg, last_reg;
248   /* The sizes of these fields should match the sizes of the
249      code and mode fields of struct rtx_def (see rtl.h).  */
250   ENUM_BITFIELD(rtx_code) comparison_code : 16;
251   ENUM_BITFIELD(machine_mode) mode : 8;
252 };
253 
254 /* The table of all qtys, indexed by qty number.  */
255 static struct qty_table_elem *qty_table;
256 
257 /* For machines that have a CC0, we do not record its value in the hash
258    table since its use is guaranteed to be the insn immediately following
259    its definition and any other insn is presumed to invalidate it.
260 
261    Instead, we store below the current and last value assigned to CC0.
262    If it should happen to be a constant, it is stored in preference
263    to the actual assigned value.  In case it is a constant, we store
264    the mode in which the constant should be interpreted.  */
265 
266 static rtx this_insn_cc0, prev_insn_cc0;
267 static machine_mode this_insn_cc0_mode, prev_insn_cc0_mode;
268 
269 /* Insn being scanned.  */
270 
271 static rtx_insn *this_insn;
272 static bool optimize_this_for_speed_p;
273 
274 /* Index by register number, gives the number of the next (or
275    previous) register in the chain of registers sharing the same
276    value.
277 
278    Or -1 if this register is at the end of the chain.
279 
280    If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined.  */
281 
282 /* Per-register equivalence chain.  */
283 struct reg_eqv_elem
284 {
285   int next, prev;
286 };
287 
288 /* The table of all register equivalence chains.  */
289 static struct reg_eqv_elem *reg_eqv_table;
290 
291 struct cse_reg_info
292 {
293   /* The timestamp at which this register is initialized.  */
294   unsigned int timestamp;
295 
296   /* The quantity number of the register's current contents.  */
297   int reg_qty;
298 
299   /* The number of times the register has been altered in the current
300      basic block.  */
301   int reg_tick;
302 
303   /* The REG_TICK value at which rtx's containing this register are
304      valid in the hash table.  If this does not equal the current
305      reg_tick value, such expressions existing in the hash table are
306      invalid.  */
307   int reg_in_table;
308 
309   /* The SUBREG that was set when REG_TICK was last incremented.  Set
310      to -1 if the last store was to the whole register, not a subreg.  */
311   unsigned int subreg_ticked;
312 };
313 
314 /* A table of cse_reg_info indexed by register numbers.  */
315 static struct cse_reg_info *cse_reg_info_table;
316 
317 /* The size of the above table.  */
318 static unsigned int cse_reg_info_table_size;
319 
320 /* The index of the first entry that has not been initialized.  */
321 static unsigned int cse_reg_info_table_first_uninitialized;
322 
323 /* The timestamp at the beginning of the current run of
324    cse_extended_basic_block.  We increment this variable at the beginning of
325    the current run of cse_extended_basic_block.  The timestamp field of a
326    cse_reg_info entry matches the value of this variable if and only
327    if the entry has been initialized during the current run of
328    cse_extended_basic_block.  */
329 static unsigned int cse_reg_info_timestamp;
330 
331 /* A HARD_REG_SET containing all the hard registers for which there is
332    currently a REG expression in the hash table.  Note the difference
333    from the above variables, which indicate if the REG is mentioned in some
334    expression in the table.  */
335 
336 static HARD_REG_SET hard_regs_in_table;
337 
338 /* True if CSE has altered the CFG.  */
339 static bool cse_cfg_altered;
340 
341 /* True if CSE has altered conditional jump insns in such a way
342    that jump optimization should be redone.  */
343 static bool cse_jumps_altered;
344 
345 /* True if we put a LABEL_REF into the hash table for an INSN
346    without a REG_LABEL_OPERAND, we have to rerun jump after CSE
347    to put in the note.  */
348 static bool recorded_label_ref;
349 
350 /* canon_hash stores 1 in do_not_record
351    if it notices a reference to CC0, PC, or some other volatile
352    subexpression.  */
353 
354 static int do_not_record;
355 
356 /* canon_hash stores 1 in hash_arg_in_memory
357    if it notices a reference to memory within the expression being hashed.  */
358 
359 static int hash_arg_in_memory;
360 
361 /* The hash table contains buckets which are chains of `struct table_elt's,
362    each recording one expression's information.
363    That expression is in the `exp' field.
364 
365    The canon_exp field contains a canonical (from the point of view of
366    alias analysis) version of the `exp' field.
367 
368    Those elements with the same hash code are chained in both directions
369    through the `next_same_hash' and `prev_same_hash' fields.
370 
371    Each set of expressions with equivalent values
372    are on a two-way chain through the `next_same_value'
373    and `prev_same_value' fields, and all point with
374    the `first_same_value' field at the first element in
375    that chain.  The chain is in order of increasing cost.
376    Each element's cost value is in its `cost' field.
377 
378    The `in_memory' field is nonzero for elements that
379    involve any reference to memory.  These elements are removed
380    whenever a write is done to an unidentified location in memory.
381    To be safe, we assume that a memory address is unidentified unless
382    the address is either a symbol constant or a constant plus
383    the frame pointer or argument pointer.
384 
385    The `related_value' field is used to connect related expressions
386    (that differ by adding an integer).
387    The related expressions are chained in a circular fashion.
388    `related_value' is zero for expressions for which this
389    chain is not useful.
390 
391    The `cost' field stores the cost of this element's expression.
392    The `regcost' field stores the value returned by approx_reg_cost for
393    this element's expression.
394 
395    The `is_const' flag is set if the element is a constant (including
396    a fixed address).
397 
398    The `flag' field is used as a temporary during some search routines.
399 
400    The `mode' field is usually the same as GET_MODE (`exp'), but
401    if `exp' is a CONST_INT and has no machine mode then the `mode'
402    field is the mode it was being used as.  Each constant is
403    recorded separately for each mode it is used with.  */
404 
405 struct table_elt
406 {
407   rtx exp;
408   rtx canon_exp;
409   struct table_elt *next_same_hash;
410   struct table_elt *prev_same_hash;
411   struct table_elt *next_same_value;
412   struct table_elt *prev_same_value;
413   struct table_elt *first_same_value;
414   struct table_elt *related_value;
415   int cost;
416   int regcost;
417   /* The size of this field should match the size
418      of the mode field of struct rtx_def (see rtl.h).  */
419   ENUM_BITFIELD(machine_mode) mode : 8;
420   char in_memory;
421   char is_const;
422   char flag;
423 };
424 
425 /* We don't want a lot of buckets, because we rarely have very many
426    things stored in the hash table, and a lot of buckets slows
427    down a lot of loops that happen frequently.  */
428 #define HASH_SHIFT	5
429 #define HASH_SIZE	(1 << HASH_SHIFT)
430 #define HASH_MASK	(HASH_SIZE - 1)
431 
432 /* Compute hash code of X in mode M.  Special-case case where X is a pseudo
433    register (hard registers may require `do_not_record' to be set).  */
434 
435 #define HASH(X, M)	\
436  ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
437   ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
438   : canon_hash (X, M)) & HASH_MASK)
439 
440 /* Like HASH, but without side-effects.  */
441 #define SAFE_HASH(X, M)	\
442  ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
443   ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
444   : safe_hash (X, M)) & HASH_MASK)
445 
446 /* Determine whether register number N is considered a fixed register for the
447    purpose of approximating register costs.
448    It is desirable to replace other regs with fixed regs, to reduce need for
449    non-fixed hard regs.
450    A reg wins if it is either the frame pointer or designated as fixed.  */
451 #define FIXED_REGNO_P(N)  \
452   ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
453    || fixed_regs[N] || global_regs[N])
454 
455 /* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
456    hard registers and pointers into the frame are the cheapest with a cost
457    of 0.  Next come pseudos with a cost of one and other hard registers with
458    a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
459 
460 #define CHEAP_REGNO(N)							\
461   (REGNO_PTR_FRAME_P (N)						\
462    || (HARD_REGISTER_NUM_P (N)						\
463        && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
464 
465 #define COST(X, MODE)							\
466   (REG_P (X) ? 0 : notreg_cost (X, MODE, SET, 1))
467 #define COST_IN(X, MODE, OUTER, OPNO)					\
468   (REG_P (X) ? 0 : notreg_cost (X, MODE, OUTER, OPNO))
469 
470 /* Get the number of times this register has been updated in this
471    basic block.  */
472 
473 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
474 
475 /* Get the point at which REG was recorded in the table.  */
476 
477 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
478 
479 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
480    SUBREG).  */
481 
482 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
483 
484 /* Get the quantity number for REG.  */
485 
486 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
487 
488 /* Determine if the quantity number for register X represents a valid index
489    into the qty_table.  */
490 
491 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
492 
493 /* Compare table_elt X and Y and return true iff X is cheaper than Y.  */
494 
495 #define CHEAPER(X, Y) \
496  (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
497 
498 static struct table_elt *table[HASH_SIZE];
499 
500 /* Chain of `struct table_elt's made so far for this function
501    but currently removed from the table.  */
502 
503 static struct table_elt *free_element_chain;
504 
505 /* Set to the cost of a constant pool reference if one was found for a
506    symbolic constant.  If this was found, it means we should try to
507    convert constants into constant pool entries if they don't fit in
508    the insn.  */
509 
510 static int constant_pool_entries_cost;
511 static int constant_pool_entries_regcost;
512 
513 /* Trace a patch through the CFG.  */
514 
515 struct branch_path
516 {
517   /* The basic block for this path entry.  */
518   basic_block bb;
519 };
520 
521 /* This data describes a block that will be processed by
522    cse_extended_basic_block.  */
523 
524 struct cse_basic_block_data
525 {
526   /* Total number of SETs in block.  */
527   int nsets;
528   /* Size of current branch path, if any.  */
529   int path_size;
530   /* Current path, indicating which basic_blocks will be processed.  */
531   struct branch_path *path;
532 };
533 
534 
535 /* Pointers to the live in/live out bitmaps for the boundaries of the
536    current EBB.  */
537 static bitmap cse_ebb_live_in, cse_ebb_live_out;
538 
539 /* A simple bitmap to track which basic blocks have been visited
540    already as part of an already processed extended basic block.  */
541 static sbitmap cse_visited_basic_blocks;
542 
543 static bool fixed_base_plus_p (rtx x);
544 static int notreg_cost (rtx, machine_mode, enum rtx_code, int);
545 static int preferable (int, int, int, int);
546 static void new_basic_block (void);
547 static void make_new_qty (unsigned int, machine_mode);
548 static void make_regs_eqv (unsigned int, unsigned int);
549 static void delete_reg_equiv (unsigned int);
550 static int mention_regs (rtx);
551 static int insert_regs (rtx, struct table_elt *, int);
552 static void remove_from_table (struct table_elt *, unsigned);
553 static void remove_pseudo_from_table (rtx, unsigned);
554 static struct table_elt *lookup (rtx, unsigned, machine_mode);
555 static struct table_elt *lookup_for_remove (rtx, unsigned, machine_mode);
556 static rtx lookup_as_function (rtx, enum rtx_code);
557 static struct table_elt *insert_with_costs (rtx, struct table_elt *, unsigned,
558 					    machine_mode, int, int);
559 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
560 				 machine_mode);
561 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
562 static void invalidate (rtx, machine_mode);
563 static void remove_invalid_refs (unsigned int);
564 static void remove_invalid_subreg_refs (unsigned int, poly_uint64,
565 					machine_mode);
566 static void rehash_using_reg (rtx);
567 static void invalidate_memory (void);
568 static void invalidate_for_call (void);
569 static rtx use_related_value (rtx, struct table_elt *);
570 
571 static inline unsigned canon_hash (rtx, machine_mode);
572 static inline unsigned safe_hash (rtx, machine_mode);
573 static inline unsigned hash_rtx_string (const char *);
574 
575 static rtx canon_reg (rtx, rtx_insn *);
576 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
577 					   machine_mode *,
578 					   machine_mode *);
579 static rtx fold_rtx (rtx, rtx_insn *);
580 static rtx equiv_constant (rtx);
581 static void record_jump_equiv (rtx_insn *, bool);
582 static void record_jump_cond (enum rtx_code, machine_mode, rtx, rtx,
583 			      int);
584 static void cse_insn (rtx_insn *);
585 static void cse_prescan_path (struct cse_basic_block_data *);
586 static void invalidate_from_clobbers (rtx_insn *);
587 static void invalidate_from_sets_and_clobbers (rtx_insn *);
588 static rtx cse_process_notes (rtx, rtx, bool *);
589 static void cse_extended_basic_block (struct cse_basic_block_data *);
590 extern void dump_class (struct table_elt*);
591 static void get_cse_reg_info_1 (unsigned int regno);
592 static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
593 
594 static void flush_hash_table (void);
595 static bool insn_live_p (rtx_insn *, int *);
596 static bool set_live_p (rtx, rtx_insn *, int *);
597 static void cse_change_cc_mode_insn (rtx_insn *, rtx);
598 static void cse_change_cc_mode_insns (rtx_insn *, rtx_insn *, rtx);
599 static machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx,
600 				       bool);
601 
602 
603 #undef RTL_HOOKS_GEN_LOWPART
604 #define RTL_HOOKS_GEN_LOWPART		gen_lowpart_if_possible
605 
606 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
607 
608 /* Nonzero if X has the form (PLUS frame-pointer integer).  */
609 
610 static bool
611 fixed_base_plus_p (rtx x)
612 {
613   switch (GET_CODE (x))
614     {
615     case REG:
616       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
617 	return true;
618       if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
619 	return true;
620       return false;
621 
622     case PLUS:
623       if (!CONST_INT_P (XEXP (x, 1)))
624 	return false;
625       return fixed_base_plus_p (XEXP (x, 0));
626 
627     default:
628       return false;
629     }
630 }
631 
632 /* Dump the expressions in the equivalence class indicated by CLASSP.
633    This function is used only for debugging.  */
634 DEBUG_FUNCTION void
635 dump_class (struct table_elt *classp)
636 {
637   struct table_elt *elt;
638 
639   fprintf (stderr, "Equivalence chain for ");
640   print_rtl (stderr, classp->exp);
641   fprintf (stderr, ": \n");
642 
643   for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
644     {
645       print_rtl (stderr, elt->exp);
646       fprintf (stderr, "\n");
647     }
648 }
649 
650 /* Return an estimate of the cost of the registers used in an rtx.
651    This is mostly the number of different REG expressions in the rtx;
652    however for some exceptions like fixed registers we use a cost of
653    0.  If any other hard register reference occurs, return MAX_COST.  */
654 
655 static int
656 approx_reg_cost (const_rtx x)
657 {
658   int cost = 0;
659   subrtx_iterator::array_type array;
660   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
661     {
662       const_rtx x = *iter;
663       if (REG_P (x))
664 	{
665 	  unsigned int regno = REGNO (x);
666 	  if (!CHEAP_REGNO (regno))
667 	    {
668 	      if (regno < FIRST_PSEUDO_REGISTER)
669 		{
670 		  if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
671 		    return MAX_COST;
672 		  cost += 2;
673 		}
674 	      else
675 		cost += 1;
676 	    }
677 	}
678     }
679   return cost;
680 }
681 
682 /* Return a negative value if an rtx A, whose costs are given by COST_A
683    and REGCOST_A, is more desirable than an rtx B.
684    Return a positive value if A is less desirable, or 0 if the two are
685    equally good.  */
686 static int
687 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
688 {
689   /* First, get rid of cases involving expressions that are entirely
690      unwanted.  */
691   if (cost_a != cost_b)
692     {
693       if (cost_a == MAX_COST)
694 	return 1;
695       if (cost_b == MAX_COST)
696 	return -1;
697     }
698 
699   /* Avoid extending lifetimes of hardregs.  */
700   if (regcost_a != regcost_b)
701     {
702       if (regcost_a == MAX_COST)
703 	return 1;
704       if (regcost_b == MAX_COST)
705 	return -1;
706     }
707 
708   /* Normal operation costs take precedence.  */
709   if (cost_a != cost_b)
710     return cost_a - cost_b;
711   /* Only if these are identical consider effects on register pressure.  */
712   if (regcost_a != regcost_b)
713     return regcost_a - regcost_b;
714   return 0;
715 }
716 
717 /* Internal function, to compute cost when X is not a register; called
718    from COST macro to keep it simple.  */
719 
720 static int
721 notreg_cost (rtx x, machine_mode mode, enum rtx_code outer, int opno)
722 {
723   scalar_int_mode int_mode, inner_mode;
724   return ((GET_CODE (x) == SUBREG
725 	   && REG_P (SUBREG_REG (x))
726 	   && is_int_mode (mode, &int_mode)
727 	   && is_int_mode (GET_MODE (SUBREG_REG (x)), &inner_mode)
728 	   && GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (inner_mode)
729 	   && subreg_lowpart_p (x)
730 	   && TRULY_NOOP_TRUNCATION_MODES_P (int_mode, inner_mode))
731 	  ? 0
732 	  : rtx_cost (x, mode, outer, opno, optimize_this_for_speed_p) * 2);
733 }
734 
735 
736 /* Initialize CSE_REG_INFO_TABLE.  */
737 
738 static void
739 init_cse_reg_info (unsigned int nregs)
740 {
741   /* Do we need to grow the table?  */
742   if (nregs > cse_reg_info_table_size)
743     {
744       unsigned int new_size;
745 
746       if (cse_reg_info_table_size < 2048)
747 	{
748 	  /* Compute a new size that is a power of 2 and no smaller
749 	     than the large of NREGS and 64.  */
750 	  new_size = (cse_reg_info_table_size
751 		      ? cse_reg_info_table_size : 64);
752 
753 	  while (new_size < nregs)
754 	    new_size *= 2;
755 	}
756       else
757 	{
758 	  /* If we need a big table, allocate just enough to hold
759 	     NREGS registers.  */
760 	  new_size = nregs;
761 	}
762 
763       /* Reallocate the table with NEW_SIZE entries.  */
764       free (cse_reg_info_table);
765       cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
766       cse_reg_info_table_size = new_size;
767       cse_reg_info_table_first_uninitialized = 0;
768     }
769 
770   /* Do we have all of the first NREGS entries initialized?  */
771   if (cse_reg_info_table_first_uninitialized < nregs)
772     {
773       unsigned int old_timestamp = cse_reg_info_timestamp - 1;
774       unsigned int i;
775 
776       /* Put the old timestamp on newly allocated entries so that they
777 	 will all be considered out of date.  We do not touch those
778 	 entries beyond the first NREGS entries to be nice to the
779 	 virtual memory.  */
780       for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
781 	cse_reg_info_table[i].timestamp = old_timestamp;
782 
783       cse_reg_info_table_first_uninitialized = nregs;
784     }
785 }
786 
787 /* Given REGNO, initialize the cse_reg_info entry for REGNO.  */
788 
789 static void
790 get_cse_reg_info_1 (unsigned int regno)
791 {
792   /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
793      entry will be considered to have been initialized.  */
794   cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
795 
796   /* Initialize the rest of the entry.  */
797   cse_reg_info_table[regno].reg_tick = 1;
798   cse_reg_info_table[regno].reg_in_table = -1;
799   cse_reg_info_table[regno].subreg_ticked = -1;
800   cse_reg_info_table[regno].reg_qty = -regno - 1;
801 }
802 
803 /* Find a cse_reg_info entry for REGNO.  */
804 
805 static inline struct cse_reg_info *
806 get_cse_reg_info (unsigned int regno)
807 {
808   struct cse_reg_info *p = &cse_reg_info_table[regno];
809 
810   /* If this entry has not been initialized, go ahead and initialize
811      it.  */
812   if (p->timestamp != cse_reg_info_timestamp)
813     get_cse_reg_info_1 (regno);
814 
815   return p;
816 }
817 
818 /* Clear the hash table and initialize each register with its own quantity,
819    for a new basic block.  */
820 
821 static void
822 new_basic_block (void)
823 {
824   int i;
825 
826   next_qty = 0;
827 
828   /* Invalidate cse_reg_info_table.  */
829   cse_reg_info_timestamp++;
830 
831   /* Clear out hash table state for this pass.  */
832   CLEAR_HARD_REG_SET (hard_regs_in_table);
833 
834   /* The per-quantity values used to be initialized here, but it is
835      much faster to initialize each as it is made in `make_new_qty'.  */
836 
837   for (i = 0; i < HASH_SIZE; i++)
838     {
839       struct table_elt *first;
840 
841       first = table[i];
842       if (first != NULL)
843 	{
844 	  struct table_elt *last = first;
845 
846 	  table[i] = NULL;
847 
848 	  while (last->next_same_hash != NULL)
849 	    last = last->next_same_hash;
850 
851 	  /* Now relink this hash entire chain into
852 	     the free element list.  */
853 
854 	  last->next_same_hash = free_element_chain;
855 	  free_element_chain = first;
856 	}
857     }
858 
859   prev_insn_cc0 = 0;
860 }
861 
862 /* Say that register REG contains a quantity in mode MODE not in any
863    register before and initialize that quantity.  */
864 
865 static void
866 make_new_qty (unsigned int reg, machine_mode mode)
867 {
868   int q;
869   struct qty_table_elem *ent;
870   struct reg_eqv_elem *eqv;
871 
872   gcc_assert (next_qty < max_qty);
873 
874   q = REG_QTY (reg) = next_qty++;
875   ent = &qty_table[q];
876   ent->first_reg = reg;
877   ent->last_reg = reg;
878   ent->mode = mode;
879   ent->const_rtx = ent->const_insn = NULL;
880   ent->comparison_code = UNKNOWN;
881 
882   eqv = &reg_eqv_table[reg];
883   eqv->next = eqv->prev = -1;
884 }
885 
886 /* Make reg NEW equivalent to reg OLD.
887    OLD is not changing; NEW is.  */
888 
889 static void
890 make_regs_eqv (unsigned int new_reg, unsigned int old_reg)
891 {
892   unsigned int lastr, firstr;
893   int q = REG_QTY (old_reg);
894   struct qty_table_elem *ent;
895 
896   ent = &qty_table[q];
897 
898   /* Nothing should become eqv until it has a "non-invalid" qty number.  */
899   gcc_assert (REGNO_QTY_VALID_P (old_reg));
900 
901   REG_QTY (new_reg) = q;
902   firstr = ent->first_reg;
903   lastr = ent->last_reg;
904 
905   /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
906      hard regs.  Among pseudos, if NEW will live longer than any other reg
907      of the same qty, and that is beyond the current basic block,
908      make it the new canonical replacement for this qty.  */
909   if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
910       /* Certain fixed registers might be of the class NO_REGS.  This means
911 	 that not only can they not be allocated by the compiler, but
912 	 they cannot be used in substitutions or canonicalizations
913 	 either.  */
914       && (new_reg >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new_reg) != NO_REGS)
915       && ((new_reg < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new_reg))
916 	  || (new_reg >= FIRST_PSEUDO_REGISTER
917 	      && (firstr < FIRST_PSEUDO_REGISTER
918 		  || (bitmap_bit_p (cse_ebb_live_out, new_reg)
919 		      && !bitmap_bit_p (cse_ebb_live_out, firstr))
920 		  || (bitmap_bit_p (cse_ebb_live_in, new_reg)
921 		      && !bitmap_bit_p (cse_ebb_live_in, firstr))))))
922     {
923       reg_eqv_table[firstr].prev = new_reg;
924       reg_eqv_table[new_reg].next = firstr;
925       reg_eqv_table[new_reg].prev = -1;
926       ent->first_reg = new_reg;
927     }
928   else
929     {
930       /* If NEW is a hard reg (known to be non-fixed), insert at end.
931 	 Otherwise, insert before any non-fixed hard regs that are at the
932 	 end.  Registers of class NO_REGS cannot be used as an
933 	 equivalent for anything.  */
934       while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
935 	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
936 	     && new_reg >= FIRST_PSEUDO_REGISTER)
937 	lastr = reg_eqv_table[lastr].prev;
938       reg_eqv_table[new_reg].next = reg_eqv_table[lastr].next;
939       if (reg_eqv_table[lastr].next >= 0)
940 	reg_eqv_table[reg_eqv_table[lastr].next].prev = new_reg;
941       else
942 	qty_table[q].last_reg = new_reg;
943       reg_eqv_table[lastr].next = new_reg;
944       reg_eqv_table[new_reg].prev = lastr;
945     }
946 }
947 
948 /* Remove REG from its equivalence class.  */
949 
950 static void
951 delete_reg_equiv (unsigned int reg)
952 {
953   struct qty_table_elem *ent;
954   int q = REG_QTY (reg);
955   int p, n;
956 
957   /* If invalid, do nothing.  */
958   if (! REGNO_QTY_VALID_P (reg))
959     return;
960 
961   ent = &qty_table[q];
962 
963   p = reg_eqv_table[reg].prev;
964   n = reg_eqv_table[reg].next;
965 
966   if (n != -1)
967     reg_eqv_table[n].prev = p;
968   else
969     ent->last_reg = p;
970   if (p != -1)
971     reg_eqv_table[p].next = n;
972   else
973     ent->first_reg = n;
974 
975   REG_QTY (reg) = -reg - 1;
976 }
977 
978 /* Remove any invalid expressions from the hash table
979    that refer to any of the registers contained in expression X.
980 
981    Make sure that newly inserted references to those registers
982    as subexpressions will be considered valid.
983 
984    mention_regs is not called when a register itself
985    is being stored in the table.
986 
987    Return 1 if we have done something that may have changed the hash code
988    of X.  */
989 
990 static int
991 mention_regs (rtx x)
992 {
993   enum rtx_code code;
994   int i, j;
995   const char *fmt;
996   int changed = 0;
997 
998   if (x == 0)
999     return 0;
1000 
1001   code = GET_CODE (x);
1002   if (code == REG)
1003     {
1004       unsigned int regno = REGNO (x);
1005       unsigned int endregno = END_REGNO (x);
1006       unsigned int i;
1007 
1008       for (i = regno; i < endregno; i++)
1009 	{
1010 	  if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1011 	    remove_invalid_refs (i);
1012 
1013 	  REG_IN_TABLE (i) = REG_TICK (i);
1014 	  SUBREG_TICKED (i) = -1;
1015 	}
1016 
1017       return 0;
1018     }
1019 
1020   /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1021      pseudo if they don't use overlapping words.  We handle only pseudos
1022      here for simplicity.  */
1023   if (code == SUBREG && REG_P (SUBREG_REG (x))
1024       && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1025     {
1026       unsigned int i = REGNO (SUBREG_REG (x));
1027 
1028       if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1029 	{
1030 	  /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1031 	     the last store to this register really stored into this
1032 	     subreg, then remove the memory of this subreg.
1033 	     Otherwise, remove any memory of the entire register and
1034 	     all its subregs from the table.  */
1035 	  if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1036 	      || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1037 	    remove_invalid_refs (i);
1038 	  else
1039 	    remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1040 	}
1041 
1042       REG_IN_TABLE (i) = REG_TICK (i);
1043       SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1044       return 0;
1045     }
1046 
1047   /* If X is a comparison or a COMPARE and either operand is a register
1048      that does not have a quantity, give it one.  This is so that a later
1049      call to record_jump_equiv won't cause X to be assigned a different
1050      hash code and not found in the table after that call.
1051 
1052      It is not necessary to do this here, since rehash_using_reg can
1053      fix up the table later, but doing this here eliminates the need to
1054      call that expensive function in the most common case where the only
1055      use of the register is in the comparison.  */
1056 
1057   if (code == COMPARE || COMPARISON_P (x))
1058     {
1059       if (REG_P (XEXP (x, 0))
1060 	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1061 	if (insert_regs (XEXP (x, 0), NULL, 0))
1062 	  {
1063 	    rehash_using_reg (XEXP (x, 0));
1064 	    changed = 1;
1065 	  }
1066 
1067       if (REG_P (XEXP (x, 1))
1068 	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1069 	if (insert_regs (XEXP (x, 1), NULL, 0))
1070 	  {
1071 	    rehash_using_reg (XEXP (x, 1));
1072 	    changed = 1;
1073 	  }
1074     }
1075 
1076   fmt = GET_RTX_FORMAT (code);
1077   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1078     if (fmt[i] == 'e')
1079       changed |= mention_regs (XEXP (x, i));
1080     else if (fmt[i] == 'E')
1081       for (j = 0; j < XVECLEN (x, i); j++)
1082 	changed |= mention_regs (XVECEXP (x, i, j));
1083 
1084   return changed;
1085 }
1086 
1087 /* Update the register quantities for inserting X into the hash table
1088    with a value equivalent to CLASSP.
1089    (If the class does not contain a REG, it is irrelevant.)
1090    If MODIFIED is nonzero, X is a destination; it is being modified.
1091    Note that delete_reg_equiv should be called on a register
1092    before insert_regs is done on that register with MODIFIED != 0.
1093 
1094    Nonzero value means that elements of reg_qty have changed
1095    so X's hash code may be different.  */
1096 
1097 static int
1098 insert_regs (rtx x, struct table_elt *classp, int modified)
1099 {
1100   if (REG_P (x))
1101     {
1102       unsigned int regno = REGNO (x);
1103       int qty_valid;
1104 
1105       /* If REGNO is in the equivalence table already but is of the
1106 	 wrong mode for that equivalence, don't do anything here.  */
1107 
1108       qty_valid = REGNO_QTY_VALID_P (regno);
1109       if (qty_valid)
1110 	{
1111 	  struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1112 
1113 	  if (ent->mode != GET_MODE (x))
1114 	    return 0;
1115 	}
1116 
1117       if (modified || ! qty_valid)
1118 	{
1119 	  if (classp)
1120 	    for (classp = classp->first_same_value;
1121 		 classp != 0;
1122 		 classp = classp->next_same_value)
1123 	      if (REG_P (classp->exp)
1124 		  && GET_MODE (classp->exp) == GET_MODE (x))
1125 		{
1126 		  unsigned c_regno = REGNO (classp->exp);
1127 
1128 		  gcc_assert (REGNO_QTY_VALID_P (c_regno));
1129 
1130 		  /* Suppose that 5 is hard reg and 100 and 101 are
1131 		     pseudos.  Consider
1132 
1133 		     (set (reg:si 100) (reg:si 5))
1134 		     (set (reg:si 5) (reg:si 100))
1135 		     (set (reg:di 101) (reg:di 5))
1136 
1137 		     We would now set REG_QTY (101) = REG_QTY (5), but the
1138 		     entry for 5 is in SImode.  When we use this later in
1139 		     copy propagation, we get the register in wrong mode.  */
1140 		  if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1141 		    continue;
1142 
1143 		  make_regs_eqv (regno, c_regno);
1144 		  return 1;
1145 		}
1146 
1147 	  /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1148 	     than REG_IN_TABLE to find out if there was only a single preceding
1149 	     invalidation - for the SUBREG - or another one, which would be
1150 	     for the full register.  However, if we find here that REG_TICK
1151 	     indicates that the register is invalid, it means that it has
1152 	     been invalidated in a separate operation.  The SUBREG might be used
1153 	     now (then this is a recursive call), or we might use the full REG
1154 	     now and a SUBREG of it later.  So bump up REG_TICK so that
1155 	     mention_regs will do the right thing.  */
1156 	  if (! modified
1157 	      && REG_IN_TABLE (regno) >= 0
1158 	      && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1159 	    REG_TICK (regno)++;
1160 	  make_new_qty (regno, GET_MODE (x));
1161 	  return 1;
1162 	}
1163 
1164       return 0;
1165     }
1166 
1167   /* If X is a SUBREG, we will likely be inserting the inner register in the
1168      table.  If that register doesn't have an assigned quantity number at
1169      this point but does later, the insertion that we will be doing now will
1170      not be accessible because its hash code will have changed.  So assign
1171      a quantity number now.  */
1172 
1173   else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1174 	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1175     {
1176       insert_regs (SUBREG_REG (x), NULL, 0);
1177       mention_regs (x);
1178       return 1;
1179     }
1180   else
1181     return mention_regs (x);
1182 }
1183 
1184 
1185 /* Compute upper and lower anchors for CST.  Also compute the offset of CST
1186    from these anchors/bases such that *_BASE + *_OFFS = CST.  Return false iff
1187    CST is equal to an anchor.  */
1188 
1189 static bool
1190 compute_const_anchors (rtx cst,
1191 		       HOST_WIDE_INT *lower_base, HOST_WIDE_INT *lower_offs,
1192 		       HOST_WIDE_INT *upper_base, HOST_WIDE_INT *upper_offs)
1193 {
1194   HOST_WIDE_INT n = INTVAL (cst);
1195 
1196   *lower_base = n & ~(targetm.const_anchor - 1);
1197   if (*lower_base == n)
1198     return false;
1199 
1200   *upper_base =
1201     (n + (targetm.const_anchor - 1)) & ~(targetm.const_anchor - 1);
1202   *upper_offs = n - *upper_base;
1203   *lower_offs = n - *lower_base;
1204   return true;
1205 }
1206 
1207 /* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE.  */
1208 
1209 static void
1210 insert_const_anchor (HOST_WIDE_INT anchor, rtx reg, HOST_WIDE_INT offs,
1211 		     machine_mode mode)
1212 {
1213   struct table_elt *elt;
1214   unsigned hash;
1215   rtx anchor_exp;
1216   rtx exp;
1217 
1218   anchor_exp = GEN_INT (anchor);
1219   hash = HASH (anchor_exp, mode);
1220   elt = lookup (anchor_exp, hash, mode);
1221   if (!elt)
1222     elt = insert (anchor_exp, NULL, hash, mode);
1223 
1224   exp = plus_constant (mode, reg, offs);
1225   /* REG has just been inserted and the hash codes recomputed.  */
1226   mention_regs (exp);
1227   hash = HASH (exp, mode);
1228 
1229   /* Use the cost of the register rather than the whole expression.  When
1230      looking up constant anchors we will further offset the corresponding
1231      expression therefore it does not make sense to prefer REGs over
1232      reg-immediate additions.  Prefer instead the oldest expression.  Also
1233      don't prefer pseudos over hard regs so that we derive constants in
1234      argument registers from other argument registers rather than from the
1235      original pseudo that was used to synthesize the constant.  */
1236   insert_with_costs (exp, elt, hash, mode, COST (reg, mode), 1);
1237 }
1238 
1239 /* The constant CST is equivalent to the register REG.  Create
1240    equivalences between the two anchors of CST and the corresponding
1241    register-offset expressions using REG.  */
1242 
1243 static void
1244 insert_const_anchors (rtx reg, rtx cst, machine_mode mode)
1245 {
1246   HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1247 
1248   if (!compute_const_anchors (cst, &lower_base, &lower_offs,
1249 			      &upper_base, &upper_offs))
1250       return;
1251 
1252   /* Ignore anchors of value 0.  Constants accessible from zero are
1253      simple.  */
1254   if (lower_base != 0)
1255     insert_const_anchor (lower_base, reg, -lower_offs, mode);
1256 
1257   if (upper_base != 0)
1258     insert_const_anchor (upper_base, reg, -upper_offs, mode);
1259 }
1260 
1261 /* We need to express ANCHOR_ELT->exp + OFFS.  Walk the equivalence list of
1262    ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1263    valid expression.  Return the cheapest and oldest of such expressions.  In
1264    *OLD, return how old the resulting expression is compared to the other
1265    equivalent expressions.  */
1266 
1267 static rtx
1268 find_reg_offset_for_const (struct table_elt *anchor_elt, HOST_WIDE_INT offs,
1269 			   unsigned *old)
1270 {
1271   struct table_elt *elt;
1272   unsigned idx;
1273   struct table_elt *match_elt;
1274   rtx match;
1275 
1276   /* Find the cheapest and *oldest* expression to maximize the chance of
1277      reusing the same pseudo.  */
1278 
1279   match_elt = NULL;
1280   match = NULL_RTX;
1281   for (elt = anchor_elt->first_same_value, idx = 0;
1282        elt;
1283        elt = elt->next_same_value, idx++)
1284     {
1285       if (match_elt && CHEAPER (match_elt, elt))
1286 	return match;
1287 
1288       if (REG_P (elt->exp)
1289 	  || (GET_CODE (elt->exp) == PLUS
1290 	      && REG_P (XEXP (elt->exp, 0))
1291 	      && GET_CODE (XEXP (elt->exp, 1)) == CONST_INT))
1292 	{
1293 	  rtx x;
1294 
1295 	  /* Ignore expressions that are no longer valid.  */
1296 	  if (!REG_P (elt->exp) && !exp_equiv_p (elt->exp, elt->exp, 1, false))
1297 	    continue;
1298 
1299 	  x = plus_constant (GET_MODE (elt->exp), elt->exp, offs);
1300 	  if (REG_P (x)
1301 	      || (GET_CODE (x) == PLUS
1302 		  && IN_RANGE (INTVAL (XEXP (x, 1)),
1303 			       -targetm.const_anchor,
1304 			       targetm.const_anchor - 1)))
1305 	    {
1306 	      match = x;
1307 	      match_elt = elt;
1308 	      *old = idx;
1309 	    }
1310 	}
1311     }
1312 
1313   return match;
1314 }
1315 
1316 /* Try to express the constant SRC_CONST using a register+offset expression
1317    derived from a constant anchor.  Return it if successful or NULL_RTX,
1318    otherwise.  */
1319 
1320 static rtx
1321 try_const_anchors (rtx src_const, machine_mode mode)
1322 {
1323   struct table_elt *lower_elt, *upper_elt;
1324   HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1325   rtx lower_anchor_rtx, upper_anchor_rtx;
1326   rtx lower_exp = NULL_RTX, upper_exp = NULL_RTX;
1327   unsigned lower_old, upper_old;
1328 
1329   /* CONST_INT is used for CC modes, but we should leave those alone.  */
1330   if (GET_MODE_CLASS (mode) == MODE_CC)
1331     return NULL_RTX;
1332 
1333   gcc_assert (SCALAR_INT_MODE_P (mode));
1334   if (!compute_const_anchors (src_const, &lower_base, &lower_offs,
1335 			      &upper_base, &upper_offs))
1336     return NULL_RTX;
1337 
1338   lower_anchor_rtx = GEN_INT (lower_base);
1339   upper_anchor_rtx = GEN_INT (upper_base);
1340   lower_elt = lookup (lower_anchor_rtx, HASH (lower_anchor_rtx, mode), mode);
1341   upper_elt = lookup (upper_anchor_rtx, HASH (upper_anchor_rtx, mode), mode);
1342 
1343   if (lower_elt)
1344     lower_exp = find_reg_offset_for_const (lower_elt, lower_offs, &lower_old);
1345   if (upper_elt)
1346     upper_exp = find_reg_offset_for_const (upper_elt, upper_offs, &upper_old);
1347 
1348   if (!lower_exp)
1349     return upper_exp;
1350   if (!upper_exp)
1351     return lower_exp;
1352 
1353   /* Return the older expression.  */
1354   return (upper_old > lower_old ? upper_exp : lower_exp);
1355 }
1356 
1357 /* Look in or update the hash table.  */
1358 
1359 /* Remove table element ELT from use in the table.
1360    HASH is its hash code, made using the HASH macro.
1361    It's an argument because often that is known in advance
1362    and we save much time not recomputing it.  */
1363 
1364 static void
1365 remove_from_table (struct table_elt *elt, unsigned int hash)
1366 {
1367   if (elt == 0)
1368     return;
1369 
1370   /* Mark this element as removed.  See cse_insn.  */
1371   elt->first_same_value = 0;
1372 
1373   /* Remove the table element from its equivalence class.  */
1374 
1375   {
1376     struct table_elt *prev = elt->prev_same_value;
1377     struct table_elt *next = elt->next_same_value;
1378 
1379     if (next)
1380       next->prev_same_value = prev;
1381 
1382     if (prev)
1383       prev->next_same_value = next;
1384     else
1385       {
1386 	struct table_elt *newfirst = next;
1387 	while (next)
1388 	  {
1389 	    next->first_same_value = newfirst;
1390 	    next = next->next_same_value;
1391 	  }
1392       }
1393   }
1394 
1395   /* Remove the table element from its hash bucket.  */
1396 
1397   {
1398     struct table_elt *prev = elt->prev_same_hash;
1399     struct table_elt *next = elt->next_same_hash;
1400 
1401     if (next)
1402       next->prev_same_hash = prev;
1403 
1404     if (prev)
1405       prev->next_same_hash = next;
1406     else if (table[hash] == elt)
1407       table[hash] = next;
1408     else
1409       {
1410 	/* This entry is not in the proper hash bucket.  This can happen
1411 	   when two classes were merged by `merge_equiv_classes'.  Search
1412 	   for the hash bucket that it heads.  This happens only very
1413 	   rarely, so the cost is acceptable.  */
1414 	for (hash = 0; hash < HASH_SIZE; hash++)
1415 	  if (table[hash] == elt)
1416 	    table[hash] = next;
1417       }
1418   }
1419 
1420   /* Remove the table element from its related-value circular chain.  */
1421 
1422   if (elt->related_value != 0 && elt->related_value != elt)
1423     {
1424       struct table_elt *p = elt->related_value;
1425 
1426       while (p->related_value != elt)
1427 	p = p->related_value;
1428       p->related_value = elt->related_value;
1429       if (p->related_value == p)
1430 	p->related_value = 0;
1431     }
1432 
1433   /* Now add it to the free element chain.  */
1434   elt->next_same_hash = free_element_chain;
1435   free_element_chain = elt;
1436 }
1437 
1438 /* Same as above, but X is a pseudo-register.  */
1439 
1440 static void
1441 remove_pseudo_from_table (rtx x, unsigned int hash)
1442 {
1443   struct table_elt *elt;
1444 
1445   /* Because a pseudo-register can be referenced in more than one
1446      mode, we might have to remove more than one table entry.  */
1447   while ((elt = lookup_for_remove (x, hash, VOIDmode)))
1448     remove_from_table (elt, hash);
1449 }
1450 
1451 /* Look up X in the hash table and return its table element,
1452    or 0 if X is not in the table.
1453 
1454    MODE is the machine-mode of X, or if X is an integer constant
1455    with VOIDmode then MODE is the mode with which X will be used.
1456 
1457    Here we are satisfied to find an expression whose tree structure
1458    looks like X.  */
1459 
1460 static struct table_elt *
1461 lookup (rtx x, unsigned int hash, machine_mode mode)
1462 {
1463   struct table_elt *p;
1464 
1465   for (p = table[hash]; p; p = p->next_same_hash)
1466     if (mode == p->mode && ((x == p->exp && REG_P (x))
1467 			    || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1468       return p;
1469 
1470   return 0;
1471 }
1472 
1473 /* Like `lookup' but don't care whether the table element uses invalid regs.
1474    Also ignore discrepancies in the machine mode of a register.  */
1475 
1476 static struct table_elt *
1477 lookup_for_remove (rtx x, unsigned int hash, machine_mode mode)
1478 {
1479   struct table_elt *p;
1480 
1481   if (REG_P (x))
1482     {
1483       unsigned int regno = REGNO (x);
1484 
1485       /* Don't check the machine mode when comparing registers;
1486 	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1487       for (p = table[hash]; p; p = p->next_same_hash)
1488 	if (REG_P (p->exp)
1489 	    && REGNO (p->exp) == regno)
1490 	  return p;
1491     }
1492   else
1493     {
1494       for (p = table[hash]; p; p = p->next_same_hash)
1495 	if (mode == p->mode
1496 	    && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1497 	  return p;
1498     }
1499 
1500   return 0;
1501 }
1502 
1503 /* Look for an expression equivalent to X and with code CODE.
1504    If one is found, return that expression.  */
1505 
1506 static rtx
1507 lookup_as_function (rtx x, enum rtx_code code)
1508 {
1509   struct table_elt *p
1510     = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1511 
1512   if (p == 0)
1513     return 0;
1514 
1515   for (p = p->first_same_value; p; p = p->next_same_value)
1516     if (GET_CODE (p->exp) == code
1517 	/* Make sure this is a valid entry in the table.  */
1518 	&& exp_equiv_p (p->exp, p->exp, 1, false))
1519       return p->exp;
1520 
1521   return 0;
1522 }
1523 
1524 /* Insert X in the hash table, assuming HASH is its hash code and
1525    CLASSP is an element of the class it should go in (or 0 if a new
1526    class should be made).  COST is the code of X and reg_cost is the
1527    cost of registers in X.  It is inserted at the proper position to
1528    keep the class in the order cheapest first.
1529 
1530    MODE is the machine-mode of X, or if X is an integer constant
1531    with VOIDmode then MODE is the mode with which X will be used.
1532 
1533    For elements of equal cheapness, the most recent one
1534    goes in front, except that the first element in the list
1535    remains first unless a cheaper element is added.  The order of
1536    pseudo-registers does not matter, as canon_reg will be called to
1537    find the cheapest when a register is retrieved from the table.
1538 
1539    The in_memory field in the hash table element is set to 0.
1540    The caller must set it nonzero if appropriate.
1541 
1542    You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1543    and if insert_regs returns a nonzero value
1544    you must then recompute its hash code before calling here.
1545 
1546    If necessary, update table showing constant values of quantities.  */
1547 
1548 static struct table_elt *
1549 insert_with_costs (rtx x, struct table_elt *classp, unsigned int hash,
1550 		   machine_mode mode, int cost, int reg_cost)
1551 {
1552   struct table_elt *elt;
1553 
1554   /* If X is a register and we haven't made a quantity for it,
1555      something is wrong.  */
1556   gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1557 
1558   /* If X is a hard register, show it is being put in the table.  */
1559   if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1560     add_to_hard_reg_set (&hard_regs_in_table, GET_MODE (x), REGNO (x));
1561 
1562   /* Put an element for X into the right hash bucket.  */
1563 
1564   elt = free_element_chain;
1565   if (elt)
1566     free_element_chain = elt->next_same_hash;
1567   else
1568     elt = XNEW (struct table_elt);
1569 
1570   elt->exp = x;
1571   elt->canon_exp = NULL_RTX;
1572   elt->cost = cost;
1573   elt->regcost = reg_cost;
1574   elt->next_same_value = 0;
1575   elt->prev_same_value = 0;
1576   elt->next_same_hash = table[hash];
1577   elt->prev_same_hash = 0;
1578   elt->related_value = 0;
1579   elt->in_memory = 0;
1580   elt->mode = mode;
1581   elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1582 
1583   if (table[hash])
1584     table[hash]->prev_same_hash = elt;
1585   table[hash] = elt;
1586 
1587   /* Put it into the proper value-class.  */
1588   if (classp)
1589     {
1590       classp = classp->first_same_value;
1591       if (CHEAPER (elt, classp))
1592 	/* Insert at the head of the class.  */
1593 	{
1594 	  struct table_elt *p;
1595 	  elt->next_same_value = classp;
1596 	  classp->prev_same_value = elt;
1597 	  elt->first_same_value = elt;
1598 
1599 	  for (p = classp; p; p = p->next_same_value)
1600 	    p->first_same_value = elt;
1601 	}
1602       else
1603 	{
1604 	  /* Insert not at head of the class.  */
1605 	  /* Put it after the last element cheaper than X.  */
1606 	  struct table_elt *p, *next;
1607 
1608 	  for (p = classp;
1609 	       (next = p->next_same_value) && CHEAPER (next, elt);
1610 	       p = next)
1611 	    ;
1612 
1613 	  /* Put it after P and before NEXT.  */
1614 	  elt->next_same_value = next;
1615 	  if (next)
1616 	    next->prev_same_value = elt;
1617 
1618 	  elt->prev_same_value = p;
1619 	  p->next_same_value = elt;
1620 	  elt->first_same_value = classp;
1621 	}
1622     }
1623   else
1624     elt->first_same_value = elt;
1625 
1626   /* If this is a constant being set equivalent to a register or a register
1627      being set equivalent to a constant, note the constant equivalence.
1628 
1629      If this is a constant, it cannot be equivalent to a different constant,
1630      and a constant is the only thing that can be cheaper than a register.  So
1631      we know the register is the head of the class (before the constant was
1632      inserted).
1633 
1634      If this is a register that is not already known equivalent to a
1635      constant, we must check the entire class.
1636 
1637      If this is a register that is already known equivalent to an insn,
1638      update the qtys `const_insn' to show that `this_insn' is the latest
1639      insn making that quantity equivalent to the constant.  */
1640 
1641   if (elt->is_const && classp && REG_P (classp->exp)
1642       && !REG_P (x))
1643     {
1644       int exp_q = REG_QTY (REGNO (classp->exp));
1645       struct qty_table_elem *exp_ent = &qty_table[exp_q];
1646 
1647       exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1648       exp_ent->const_insn = this_insn;
1649     }
1650 
1651   else if (REG_P (x)
1652 	   && classp
1653 	   && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1654 	   && ! elt->is_const)
1655     {
1656       struct table_elt *p;
1657 
1658       for (p = classp; p != 0; p = p->next_same_value)
1659 	{
1660 	  if (p->is_const && !REG_P (p->exp))
1661 	    {
1662 	      int x_q = REG_QTY (REGNO (x));
1663 	      struct qty_table_elem *x_ent = &qty_table[x_q];
1664 
1665 	      x_ent->const_rtx
1666 		= gen_lowpart (GET_MODE (x), p->exp);
1667 	      x_ent->const_insn = this_insn;
1668 	      break;
1669 	    }
1670 	}
1671     }
1672 
1673   else if (REG_P (x)
1674 	   && qty_table[REG_QTY (REGNO (x))].const_rtx
1675 	   && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1676     qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1677 
1678   /* If this is a constant with symbolic value,
1679      and it has a term with an explicit integer value,
1680      link it up with related expressions.  */
1681   if (GET_CODE (x) == CONST)
1682     {
1683       rtx subexp = get_related_value (x);
1684       unsigned subhash;
1685       struct table_elt *subelt, *subelt_prev;
1686 
1687       if (subexp != 0)
1688 	{
1689 	  /* Get the integer-free subexpression in the hash table.  */
1690 	  subhash = SAFE_HASH (subexp, mode);
1691 	  subelt = lookup (subexp, subhash, mode);
1692 	  if (subelt == 0)
1693 	    subelt = insert (subexp, NULL, subhash, mode);
1694 	  /* Initialize SUBELT's circular chain if it has none.  */
1695 	  if (subelt->related_value == 0)
1696 	    subelt->related_value = subelt;
1697 	  /* Find the element in the circular chain that precedes SUBELT.  */
1698 	  subelt_prev = subelt;
1699 	  while (subelt_prev->related_value != subelt)
1700 	    subelt_prev = subelt_prev->related_value;
1701 	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1702 	     This way the element that follows SUBELT is the oldest one.  */
1703 	  elt->related_value = subelt_prev->related_value;
1704 	  subelt_prev->related_value = elt;
1705 	}
1706     }
1707 
1708   return elt;
1709 }
1710 
1711 /* Wrap insert_with_costs by passing the default costs.  */
1712 
1713 static struct table_elt *
1714 insert (rtx x, struct table_elt *classp, unsigned int hash,
1715 	machine_mode mode)
1716 {
1717   return insert_with_costs (x, classp, hash, mode,
1718 			    COST (x, mode), approx_reg_cost (x));
1719 }
1720 
1721 
1722 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1723    CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1724    the two classes equivalent.
1725 
1726    CLASS1 will be the surviving class; CLASS2 should not be used after this
1727    call.
1728 
1729    Any invalid entries in CLASS2 will not be copied.  */
1730 
1731 static void
1732 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1733 {
1734   struct table_elt *elt, *next, *new_elt;
1735 
1736   /* Ensure we start with the head of the classes.  */
1737   class1 = class1->first_same_value;
1738   class2 = class2->first_same_value;
1739 
1740   /* If they were already equal, forget it.  */
1741   if (class1 == class2)
1742     return;
1743 
1744   for (elt = class2; elt; elt = next)
1745     {
1746       unsigned int hash;
1747       rtx exp = elt->exp;
1748       machine_mode mode = elt->mode;
1749 
1750       next = elt->next_same_value;
1751 
1752       /* Remove old entry, make a new one in CLASS1's class.
1753 	 Don't do this for invalid entries as we cannot find their
1754 	 hash code (it also isn't necessary).  */
1755       if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1756 	{
1757 	  bool need_rehash = false;
1758 
1759 	  hash_arg_in_memory = 0;
1760 	  hash = HASH (exp, mode);
1761 
1762 	  if (REG_P (exp))
1763 	    {
1764 	      need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1765 	      delete_reg_equiv (REGNO (exp));
1766 	    }
1767 
1768 	  if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER)
1769 	    remove_pseudo_from_table (exp, hash);
1770 	  else
1771 	    remove_from_table (elt, hash);
1772 
1773 	  if (insert_regs (exp, class1, 0) || need_rehash)
1774 	    {
1775 	      rehash_using_reg (exp);
1776 	      hash = HASH (exp, mode);
1777 	    }
1778 	  new_elt = insert (exp, class1, hash, mode);
1779 	  new_elt->in_memory = hash_arg_in_memory;
1780 	  if (GET_CODE (exp) == ASM_OPERANDS && elt->cost == MAX_COST)
1781 	    new_elt->cost = MAX_COST;
1782 	}
1783     }
1784 }
1785 
1786 /* Flush the entire hash table.  */
1787 
1788 static void
1789 flush_hash_table (void)
1790 {
1791   int i;
1792   struct table_elt *p;
1793 
1794   for (i = 0; i < HASH_SIZE; i++)
1795     for (p = table[i]; p; p = table[i])
1796       {
1797 	/* Note that invalidate can remove elements
1798 	   after P in the current hash chain.  */
1799 	if (REG_P (p->exp))
1800 	  invalidate (p->exp, VOIDmode);
1801 	else
1802 	  remove_from_table (p, i);
1803       }
1804 }
1805 
1806 /* Check whether an anti dependence exists between X and EXP.  MODE and
1807    ADDR are as for canon_anti_dependence.  */
1808 
1809 static bool
1810 check_dependence (const_rtx x, rtx exp, machine_mode mode, rtx addr)
1811 {
1812   subrtx_iterator::array_type array;
1813   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
1814     {
1815       const_rtx x = *iter;
1816       if (MEM_P (x) && canon_anti_dependence (x, true, exp, mode, addr))
1817 	return true;
1818     }
1819   return false;
1820 }
1821 
1822 /* Remove from the hash table, or mark as invalid, all expressions whose
1823    values could be altered by storing in X.  X is a register, a subreg, or
1824    a memory reference with nonvarying address (because, when a memory
1825    reference with a varying address is stored in, all memory references are
1826    removed by invalidate_memory so specific invalidation is superfluous).
1827    FULL_MODE, if not VOIDmode, indicates that this much should be
1828    invalidated instead of just the amount indicated by the mode of X.  This
1829    is only used for bitfield stores into memory.
1830 
1831    A nonvarying address may be just a register or just a symbol reference,
1832    or it may be either of those plus a numeric offset.  */
1833 
1834 static void
1835 invalidate (rtx x, machine_mode full_mode)
1836 {
1837   int i;
1838   struct table_elt *p;
1839   rtx addr;
1840 
1841   switch (GET_CODE (x))
1842     {
1843     case REG:
1844       {
1845 	/* If X is a register, dependencies on its contents are recorded
1846 	   through the qty number mechanism.  Just change the qty number of
1847 	   the register, mark it as invalid for expressions that refer to it,
1848 	   and remove it itself.  */
1849 	unsigned int regno = REGNO (x);
1850 	unsigned int hash = HASH (x, GET_MODE (x));
1851 
1852 	/* Remove REGNO from any quantity list it might be on and indicate
1853 	   that its value might have changed.  If it is a pseudo, remove its
1854 	   entry from the hash table.
1855 
1856 	   For a hard register, we do the first two actions above for any
1857 	   additional hard registers corresponding to X.  Then, if any of these
1858 	   registers are in the table, we must remove any REG entries that
1859 	   overlap these registers.  */
1860 
1861 	delete_reg_equiv (regno);
1862 	REG_TICK (regno)++;
1863 	SUBREG_TICKED (regno) = -1;
1864 
1865 	if (regno >= FIRST_PSEUDO_REGISTER)
1866 	  remove_pseudo_from_table (x, hash);
1867 	else
1868 	  {
1869 	    HOST_WIDE_INT in_table
1870 	      = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1871 	    unsigned int endregno = END_REGNO (x);
1872 	    unsigned int tregno, tendregno, rn;
1873 	    struct table_elt *p, *next;
1874 
1875 	    CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1876 
1877 	    for (rn = regno + 1; rn < endregno; rn++)
1878 	      {
1879 		in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1880 		CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1881 		delete_reg_equiv (rn);
1882 		REG_TICK (rn)++;
1883 		SUBREG_TICKED (rn) = -1;
1884 	      }
1885 
1886 	    if (in_table)
1887 	      for (hash = 0; hash < HASH_SIZE; hash++)
1888 		for (p = table[hash]; p; p = next)
1889 		  {
1890 		    next = p->next_same_hash;
1891 
1892 		    if (!REG_P (p->exp)
1893 			|| REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1894 		      continue;
1895 
1896 		    tregno = REGNO (p->exp);
1897 		    tendregno = END_REGNO (p->exp);
1898 		    if (tendregno > regno && tregno < endregno)
1899 		      remove_from_table (p, hash);
1900 		  }
1901 	  }
1902       }
1903       return;
1904 
1905     case SUBREG:
1906       invalidate (SUBREG_REG (x), VOIDmode);
1907       return;
1908 
1909     case PARALLEL:
1910       for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1911 	invalidate (XVECEXP (x, 0, i), VOIDmode);
1912       return;
1913 
1914     case EXPR_LIST:
1915       /* This is part of a disjoint return value; extract the location in
1916 	 question ignoring the offset.  */
1917       invalidate (XEXP (x, 0), VOIDmode);
1918       return;
1919 
1920     case MEM:
1921       addr = canon_rtx (get_addr (XEXP (x, 0)));
1922       /* Calculate the canonical version of X here so that
1923 	 true_dependence doesn't generate new RTL for X on each call.  */
1924       x = canon_rtx (x);
1925 
1926       /* Remove all hash table elements that refer to overlapping pieces of
1927 	 memory.  */
1928       if (full_mode == VOIDmode)
1929 	full_mode = GET_MODE (x);
1930 
1931       for (i = 0; i < HASH_SIZE; i++)
1932 	{
1933 	  struct table_elt *next;
1934 
1935 	  for (p = table[i]; p; p = next)
1936 	    {
1937 	      next = p->next_same_hash;
1938 	      if (p->in_memory)
1939 		{
1940 		  /* Just canonicalize the expression once;
1941 		     otherwise each time we call invalidate
1942 		     true_dependence will canonicalize the
1943 		     expression again.  */
1944 		  if (!p->canon_exp)
1945 		    p->canon_exp = canon_rtx (p->exp);
1946 		  if (check_dependence (p->canon_exp, x, full_mode, addr))
1947 		    remove_from_table (p, i);
1948 		}
1949 	    }
1950 	}
1951       return;
1952 
1953     default:
1954       gcc_unreachable ();
1955     }
1956 }
1957 
1958 /* Invalidate DEST.  Used when DEST is not going to be added
1959    into the hash table for some reason, e.g. do_not_record
1960    flagged on it.  */
1961 
1962 static void
1963 invalidate_dest (rtx dest)
1964 {
1965   if (REG_P (dest)
1966       || GET_CODE (dest) == SUBREG
1967       || MEM_P (dest))
1968     invalidate (dest, VOIDmode);
1969   else if (GET_CODE (dest) == STRICT_LOW_PART
1970 	   || GET_CODE (dest) == ZERO_EXTRACT)
1971     invalidate (XEXP (dest, 0), GET_MODE (dest));
1972 }
1973 
1974 /* Remove all expressions that refer to register REGNO,
1975    since they are already invalid, and we are about to
1976    mark that register valid again and don't want the old
1977    expressions to reappear as valid.  */
1978 
1979 static void
1980 remove_invalid_refs (unsigned int regno)
1981 {
1982   unsigned int i;
1983   struct table_elt *p, *next;
1984 
1985   for (i = 0; i < HASH_SIZE; i++)
1986     for (p = table[i]; p; p = next)
1987       {
1988 	next = p->next_same_hash;
1989 	if (!REG_P (p->exp) && refers_to_regno_p (regno, p->exp))
1990 	  remove_from_table (p, i);
1991       }
1992 }
1993 
1994 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1995    and mode MODE.  */
1996 static void
1997 remove_invalid_subreg_refs (unsigned int regno, poly_uint64 offset,
1998 			    machine_mode mode)
1999 {
2000   unsigned int i;
2001   struct table_elt *p, *next;
2002 
2003   for (i = 0; i < HASH_SIZE; i++)
2004     for (p = table[i]; p; p = next)
2005       {
2006 	rtx exp = p->exp;
2007 	next = p->next_same_hash;
2008 
2009 	if (!REG_P (exp)
2010 	    && (GET_CODE (exp) != SUBREG
2011 		|| !REG_P (SUBREG_REG (exp))
2012 		|| REGNO (SUBREG_REG (exp)) != regno
2013 		|| ranges_maybe_overlap_p (SUBREG_BYTE (exp),
2014 					   GET_MODE_SIZE (GET_MODE (exp)),
2015 					   offset, GET_MODE_SIZE (mode)))
2016 	    && refers_to_regno_p (regno, p->exp))
2017 	  remove_from_table (p, i);
2018       }
2019 }
2020 
2021 /* Recompute the hash codes of any valid entries in the hash table that
2022    reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2023 
2024    This is called when we make a jump equivalence.  */
2025 
2026 static void
2027 rehash_using_reg (rtx x)
2028 {
2029   unsigned int i;
2030   struct table_elt *p, *next;
2031   unsigned hash;
2032 
2033   if (GET_CODE (x) == SUBREG)
2034     x = SUBREG_REG (x);
2035 
2036   /* If X is not a register or if the register is known not to be in any
2037      valid entries in the table, we have no work to do.  */
2038 
2039   if (!REG_P (x)
2040       || REG_IN_TABLE (REGNO (x)) < 0
2041       || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2042     return;
2043 
2044   /* Scan all hash chains looking for valid entries that mention X.
2045      If we find one and it is in the wrong hash chain, move it.  */
2046 
2047   for (i = 0; i < HASH_SIZE; i++)
2048     for (p = table[i]; p; p = next)
2049       {
2050 	next = p->next_same_hash;
2051 	if (reg_mentioned_p (x, p->exp)
2052 	    && exp_equiv_p (p->exp, p->exp, 1, false)
2053 	    && i != (hash = SAFE_HASH (p->exp, p->mode)))
2054 	  {
2055 	    if (p->next_same_hash)
2056 	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
2057 
2058 	    if (p->prev_same_hash)
2059 	      p->prev_same_hash->next_same_hash = p->next_same_hash;
2060 	    else
2061 	      table[i] = p->next_same_hash;
2062 
2063 	    p->next_same_hash = table[hash];
2064 	    p->prev_same_hash = 0;
2065 	    if (table[hash])
2066 	      table[hash]->prev_same_hash = p;
2067 	    table[hash] = p;
2068 	  }
2069       }
2070 }
2071 
2072 /* Remove from the hash table any expression that is a call-clobbered
2073    register.  Also update their TICK values.  */
2074 
2075 static void
2076 invalidate_for_call (void)
2077 {
2078   unsigned int regno, endregno;
2079   unsigned int i;
2080   unsigned hash;
2081   struct table_elt *p, *next;
2082   int in_table = 0;
2083   hard_reg_set_iterator hrsi;
2084 
2085   /* Go through all the hard registers.  For each that is clobbered in
2086      a CALL_INSN, remove the register from quantity chains and update
2087      reg_tick if defined.  Also see if any of these registers is currently
2088      in the table.  */
2089   EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, regno, hrsi)
2090     {
2091       delete_reg_equiv (regno);
2092       if (REG_TICK (regno) >= 0)
2093 	{
2094 	  REG_TICK (regno)++;
2095 	  SUBREG_TICKED (regno) = -1;
2096 	}
2097       in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2098     }
2099 
2100   /* In the case where we have no call-clobbered hard registers in the
2101      table, we are done.  Otherwise, scan the table and remove any
2102      entry that overlaps a call-clobbered register.  */
2103 
2104   if (in_table)
2105     for (hash = 0; hash < HASH_SIZE; hash++)
2106       for (p = table[hash]; p; p = next)
2107 	{
2108 	  next = p->next_same_hash;
2109 
2110 	  if (!REG_P (p->exp)
2111 	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2112 	    continue;
2113 
2114 	  regno = REGNO (p->exp);
2115 	  endregno = END_REGNO (p->exp);
2116 
2117 	  for (i = regno; i < endregno; i++)
2118 	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2119 	      {
2120 		remove_from_table (p, hash);
2121 		break;
2122 	      }
2123 	}
2124 }
2125 
2126 /* Given an expression X of type CONST,
2127    and ELT which is its table entry (or 0 if it
2128    is not in the hash table),
2129    return an alternate expression for X as a register plus integer.
2130    If none can be found, return 0.  */
2131 
2132 static rtx
2133 use_related_value (rtx x, struct table_elt *elt)
2134 {
2135   struct table_elt *relt = 0;
2136   struct table_elt *p, *q;
2137   HOST_WIDE_INT offset;
2138 
2139   /* First, is there anything related known?
2140      If we have a table element, we can tell from that.
2141      Otherwise, must look it up.  */
2142 
2143   if (elt != 0 && elt->related_value != 0)
2144     relt = elt;
2145   else if (elt == 0 && GET_CODE (x) == CONST)
2146     {
2147       rtx subexp = get_related_value (x);
2148       if (subexp != 0)
2149 	relt = lookup (subexp,
2150 		       SAFE_HASH (subexp, GET_MODE (subexp)),
2151 		       GET_MODE (subexp));
2152     }
2153 
2154   if (relt == 0)
2155     return 0;
2156 
2157   /* Search all related table entries for one that has an
2158      equivalent register.  */
2159 
2160   p = relt;
2161   while (1)
2162     {
2163       /* This loop is strange in that it is executed in two different cases.
2164 	 The first is when X is already in the table.  Then it is searching
2165 	 the RELATED_VALUE list of X's class (RELT).  The second case is when
2166 	 X is not in the table.  Then RELT points to a class for the related
2167 	 value.
2168 
2169 	 Ensure that, whatever case we are in, that we ignore classes that have
2170 	 the same value as X.  */
2171 
2172       if (rtx_equal_p (x, p->exp))
2173 	q = 0;
2174       else
2175 	for (q = p->first_same_value; q; q = q->next_same_value)
2176 	  if (REG_P (q->exp))
2177 	    break;
2178 
2179       if (q)
2180 	break;
2181 
2182       p = p->related_value;
2183 
2184       /* We went all the way around, so there is nothing to be found.
2185 	 Alternatively, perhaps RELT was in the table for some other reason
2186 	 and it has no related values recorded.  */
2187       if (p == relt || p == 0)
2188 	break;
2189     }
2190 
2191   if (q == 0)
2192     return 0;
2193 
2194   offset = (get_integer_term (x) - get_integer_term (p->exp));
2195   /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2196   return plus_constant (q->mode, q->exp, offset);
2197 }
2198 
2199 
2200 /* Hash a string.  Just add its bytes up.  */
2201 static inline unsigned
2202 hash_rtx_string (const char *ps)
2203 {
2204   unsigned hash = 0;
2205   const unsigned char *p = (const unsigned char *) ps;
2206 
2207   if (p)
2208     while (*p)
2209       hash += *p++;
2210 
2211   return hash;
2212 }
2213 
2214 /* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2215    When the callback returns true, we continue with the new rtx.  */
2216 
2217 unsigned
2218 hash_rtx_cb (const_rtx x, machine_mode mode,
2219              int *do_not_record_p, int *hash_arg_in_memory_p,
2220              bool have_reg_qty, hash_rtx_callback_function cb)
2221 {
2222   int i, j;
2223   unsigned hash = 0;
2224   enum rtx_code code;
2225   const char *fmt;
2226   machine_mode newmode;
2227   rtx newx;
2228 
2229   /* Used to turn recursion into iteration.  We can't rely on GCC's
2230      tail-recursion elimination since we need to keep accumulating values
2231      in HASH.  */
2232  repeat:
2233   if (x == 0)
2234     return hash;
2235 
2236   /* Invoke the callback first.  */
2237   if (cb != NULL
2238       && ((*cb) (x, mode, &newx, &newmode)))
2239     {
2240       hash += hash_rtx_cb (newx, newmode, do_not_record_p,
2241                            hash_arg_in_memory_p, have_reg_qty, cb);
2242       return hash;
2243     }
2244 
2245   code = GET_CODE (x);
2246   switch (code)
2247     {
2248     case REG:
2249       {
2250 	unsigned int regno = REGNO (x);
2251 
2252 	if (do_not_record_p && !reload_completed)
2253 	  {
2254 	    /* On some machines, we can't record any non-fixed hard register,
2255 	       because extending its life will cause reload problems.  We
2256 	       consider ap, fp, sp, gp to be fixed for this purpose.
2257 
2258 	       We also consider CCmode registers to be fixed for this purpose;
2259 	       failure to do so leads to failure to simplify 0<100 type of
2260 	       conditionals.
2261 
2262 	       On all machines, we can't record any global registers.
2263 	       Nor should we record any register that is in a small
2264 	       class, as defined by TARGET_CLASS_LIKELY_SPILLED_P.  */
2265 	    bool record;
2266 
2267 	    if (regno >= FIRST_PSEUDO_REGISTER)
2268 	      record = true;
2269 	    else if (x == frame_pointer_rtx
2270 		     || x == hard_frame_pointer_rtx
2271 		     || x == arg_pointer_rtx
2272 		     || x == stack_pointer_rtx
2273 		     || x == pic_offset_table_rtx)
2274 	      record = true;
2275 	    else if (global_regs[regno])
2276 	      record = false;
2277 	    else if (fixed_regs[regno])
2278 	      record = true;
2279 	    else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2280 	      record = true;
2281 	    else if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
2282 	      record = false;
2283 	    else if (targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno)))
2284 	      record = false;
2285 	    else
2286 	      record = true;
2287 
2288 	    if (!record)
2289 	      {
2290 		*do_not_record_p = 1;
2291 		return 0;
2292 	      }
2293 	  }
2294 
2295 	hash += ((unsigned int) REG << 7);
2296         hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2297 	return hash;
2298       }
2299 
2300     /* We handle SUBREG of a REG specially because the underlying
2301        reg changes its hash value with every value change; we don't
2302        want to have to forget unrelated subregs when one subreg changes.  */
2303     case SUBREG:
2304       {
2305 	if (REG_P (SUBREG_REG (x)))
2306 	  {
2307 	    hash += (((unsigned int) SUBREG << 7)
2308 		     + REGNO (SUBREG_REG (x))
2309 		     + (constant_lower_bound (SUBREG_BYTE (x))
2310 			/ UNITS_PER_WORD));
2311 	    return hash;
2312 	  }
2313 	break;
2314       }
2315 
2316     case CONST_INT:
2317       hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2318                + (unsigned int) INTVAL (x));
2319       return hash;
2320 
2321     case CONST_WIDE_INT:
2322       for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
2323 	hash += CONST_WIDE_INT_ELT (x, i);
2324       return hash;
2325 
2326     case CONST_POLY_INT:
2327       {
2328 	inchash::hash h;
2329 	h.add_int (hash);
2330 	for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2331 	  h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
2332 	return h.end ();
2333       }
2334 
2335     case CONST_DOUBLE:
2336       /* This is like the general case, except that it only counts
2337 	 the integers representing the constant.  */
2338       hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2339       if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
2340 	hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2341 		 + (unsigned int) CONST_DOUBLE_HIGH (x));
2342       else
2343 	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2344       return hash;
2345 
2346     case CONST_FIXED:
2347       hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2348       hash += fixed_hash (CONST_FIXED_VALUE (x));
2349       return hash;
2350 
2351     case CONST_VECTOR:
2352       {
2353 	int units;
2354 	rtx elt;
2355 
2356 	units = const_vector_encoded_nelts (x);
2357 
2358 	for (i = 0; i < units; ++i)
2359 	  {
2360 	    elt = CONST_VECTOR_ENCODED_ELT (x, i);
2361 	    hash += hash_rtx_cb (elt, GET_MODE (elt),
2362                                  do_not_record_p, hash_arg_in_memory_p,
2363                                  have_reg_qty, cb);
2364 	  }
2365 
2366 	return hash;
2367       }
2368 
2369       /* Assume there is only one rtx object for any given label.  */
2370     case LABEL_REF:
2371       /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2372 	 differences and differences between each stage's debugging dumps.  */
2373 	 hash += (((unsigned int) LABEL_REF << 7)
2374 		  + CODE_LABEL_NUMBER (label_ref_label (x)));
2375       return hash;
2376 
2377     case SYMBOL_REF:
2378       {
2379 	/* Don't hash on the symbol's address to avoid bootstrap differences.
2380 	   Different hash values may cause expressions to be recorded in
2381 	   different orders and thus different registers to be used in the
2382 	   final assembler.  This also avoids differences in the dump files
2383 	   between various stages.  */
2384 	unsigned int h = 0;
2385 	const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2386 
2387 	while (*p)
2388 	  h += (h << 7) + *p++; /* ??? revisit */
2389 
2390 	hash += ((unsigned int) SYMBOL_REF << 7) + h;
2391 	return hash;
2392       }
2393 
2394     case MEM:
2395       /* We don't record if marked volatile or if BLKmode since we don't
2396 	 know the size of the move.  */
2397       if (do_not_record_p && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2398 	{
2399 	  *do_not_record_p = 1;
2400 	  return 0;
2401 	}
2402       if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2403 	*hash_arg_in_memory_p = 1;
2404 
2405       /* Now that we have already found this special case,
2406 	 might as well speed it up as much as possible.  */
2407       hash += (unsigned) MEM;
2408       x = XEXP (x, 0);
2409       goto repeat;
2410 
2411     case USE:
2412       /* A USE that mentions non-volatile memory needs special
2413 	 handling since the MEM may be BLKmode which normally
2414 	 prevents an entry from being made.  Pure calls are
2415 	 marked by a USE which mentions BLKmode memory.
2416 	 See calls.c:emit_call_1.  */
2417       if (MEM_P (XEXP (x, 0))
2418 	  && ! MEM_VOLATILE_P (XEXP (x, 0)))
2419 	{
2420 	  hash += (unsigned) USE;
2421 	  x = XEXP (x, 0);
2422 
2423 	  if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2424 	    *hash_arg_in_memory_p = 1;
2425 
2426 	  /* Now that we have already found this special case,
2427 	     might as well speed it up as much as possible.  */
2428 	  hash += (unsigned) MEM;
2429 	  x = XEXP (x, 0);
2430 	  goto repeat;
2431 	}
2432       break;
2433 
2434     case PRE_DEC:
2435     case PRE_INC:
2436     case POST_DEC:
2437     case POST_INC:
2438     case PRE_MODIFY:
2439     case POST_MODIFY:
2440     case PC:
2441     case CC0:
2442     case CALL:
2443     case UNSPEC_VOLATILE:
2444       if (do_not_record_p) {
2445         *do_not_record_p = 1;
2446         return 0;
2447       }
2448       else
2449         return hash;
2450       break;
2451 
2452     case ASM_OPERANDS:
2453       if (do_not_record_p && MEM_VOLATILE_P (x))
2454 	{
2455 	  *do_not_record_p = 1;
2456 	  return 0;
2457 	}
2458       else
2459 	{
2460 	  /* We don't want to take the filename and line into account.  */
2461 	  hash += (unsigned) code + (unsigned) GET_MODE (x)
2462 	    + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2463 	    + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2464 	    + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2465 
2466 	  if (ASM_OPERANDS_INPUT_LENGTH (x))
2467 	    {
2468 	      for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2469 		{
2470 		  hash += (hash_rtx_cb (ASM_OPERANDS_INPUT (x, i),
2471                                         GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2472                                         do_not_record_p, hash_arg_in_memory_p,
2473                                         have_reg_qty, cb)
2474 			   + hash_rtx_string
2475                            (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2476 		}
2477 
2478 	      hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2479 	      x = ASM_OPERANDS_INPUT (x, 0);
2480 	      mode = GET_MODE (x);
2481 	      goto repeat;
2482 	    }
2483 
2484 	  return hash;
2485 	}
2486       break;
2487 
2488     default:
2489       break;
2490     }
2491 
2492   i = GET_RTX_LENGTH (code) - 1;
2493   hash += (unsigned) code + (unsigned) GET_MODE (x);
2494   fmt = GET_RTX_FORMAT (code);
2495   for (; i >= 0; i--)
2496     {
2497       switch (fmt[i])
2498 	{
2499 	case 'e':
2500 	  /* If we are about to do the last recursive call
2501 	     needed at this level, change it into iteration.
2502 	     This function  is called enough to be worth it.  */
2503 	  if (i == 0)
2504 	    {
2505 	      x = XEXP (x, i);
2506 	      goto repeat;
2507 	    }
2508 
2509 	  hash += hash_rtx_cb (XEXP (x, i), VOIDmode, do_not_record_p,
2510                                hash_arg_in_memory_p,
2511                                have_reg_qty, cb);
2512 	  break;
2513 
2514 	case 'E':
2515 	  for (j = 0; j < XVECLEN (x, i); j++)
2516 	    hash += hash_rtx_cb (XVECEXP (x, i, j), VOIDmode, do_not_record_p,
2517                                  hash_arg_in_memory_p,
2518                                  have_reg_qty, cb);
2519 	  break;
2520 
2521 	case 's':
2522 	  hash += hash_rtx_string (XSTR (x, i));
2523 	  break;
2524 
2525 	case 'i':
2526 	  hash += (unsigned int) XINT (x, i);
2527 	  break;
2528 
2529 	case 'p':
2530 	  hash += constant_lower_bound (SUBREG_BYTE (x));
2531 	  break;
2532 
2533 	case '0': case 't':
2534 	  /* Unused.  */
2535 	  break;
2536 
2537 	default:
2538 	  gcc_unreachable ();
2539 	}
2540     }
2541 
2542   return hash;
2543 }
2544 
2545 /* Hash an rtx.  We are careful to make sure the value is never negative.
2546    Equivalent registers hash identically.
2547    MODE is used in hashing for CONST_INTs only;
2548    otherwise the mode of X is used.
2549 
2550    Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2551 
2552    If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2553    a MEM rtx which does not have the MEM_READONLY_P flag set.
2554 
2555    Note that cse_insn knows that the hash code of a MEM expression
2556    is just (int) MEM plus the hash code of the address.  */
2557 
2558 unsigned
2559 hash_rtx (const_rtx x, machine_mode mode, int *do_not_record_p,
2560 	  int *hash_arg_in_memory_p, bool have_reg_qty)
2561 {
2562   return hash_rtx_cb (x, mode, do_not_record_p,
2563                       hash_arg_in_memory_p, have_reg_qty, NULL);
2564 }
2565 
2566 /* Hash an rtx X for cse via hash_rtx.
2567    Stores 1 in do_not_record if any subexpression is volatile.
2568    Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2569    does not have the MEM_READONLY_P flag set.  */
2570 
2571 static inline unsigned
2572 canon_hash (rtx x, machine_mode mode)
2573 {
2574   return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2575 }
2576 
2577 /* Like canon_hash but with no side effects, i.e. do_not_record
2578    and hash_arg_in_memory are not changed.  */
2579 
2580 static inline unsigned
2581 safe_hash (rtx x, machine_mode mode)
2582 {
2583   int dummy_do_not_record;
2584   return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2585 }
2586 
2587 /* Return 1 iff X and Y would canonicalize into the same thing,
2588    without actually constructing the canonicalization of either one.
2589    If VALIDATE is nonzero,
2590    we assume X is an expression being processed from the rtl
2591    and Y was found in the hash table.  We check register refs
2592    in Y for being marked as valid.
2593 
2594    If FOR_GCSE is true, we compare X and Y for equivalence for GCSE.  */
2595 
2596 int
2597 exp_equiv_p (const_rtx x, const_rtx y, int validate, bool for_gcse)
2598 {
2599   int i, j;
2600   enum rtx_code code;
2601   const char *fmt;
2602 
2603   /* Note: it is incorrect to assume an expression is equivalent to itself
2604      if VALIDATE is nonzero.  */
2605   if (x == y && !validate)
2606     return 1;
2607 
2608   if (x == 0 || y == 0)
2609     return x == y;
2610 
2611   code = GET_CODE (x);
2612   if (code != GET_CODE (y))
2613     return 0;
2614 
2615   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2616   if (GET_MODE (x) != GET_MODE (y))
2617     return 0;
2618 
2619   /* MEMs referring to different address space are not equivalent.  */
2620   if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2621     return 0;
2622 
2623   switch (code)
2624     {
2625     case PC:
2626     case CC0:
2627     CASE_CONST_UNIQUE:
2628       return x == y;
2629 
2630     case LABEL_REF:
2631       return label_ref_label (x) == label_ref_label (y);
2632 
2633     case SYMBOL_REF:
2634       return XSTR (x, 0) == XSTR (y, 0);
2635 
2636     case REG:
2637       if (for_gcse)
2638 	return REGNO (x) == REGNO (y);
2639       else
2640 	{
2641 	  unsigned int regno = REGNO (y);
2642 	  unsigned int i;
2643 	  unsigned int endregno = END_REGNO (y);
2644 
2645 	  /* If the quantities are not the same, the expressions are not
2646 	     equivalent.  If there are and we are not to validate, they
2647 	     are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2648 
2649 	  if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2650 	    return 0;
2651 
2652 	  if (! validate)
2653 	    return 1;
2654 
2655 	  for (i = regno; i < endregno; i++)
2656 	    if (REG_IN_TABLE (i) != REG_TICK (i))
2657 	      return 0;
2658 
2659 	  return 1;
2660 	}
2661 
2662     case MEM:
2663       if (for_gcse)
2664 	{
2665 	  /* A volatile mem should not be considered equivalent to any
2666 	     other.  */
2667 	  if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2668 	    return 0;
2669 
2670 	  /* Can't merge two expressions in different alias sets, since we
2671 	     can decide that the expression is transparent in a block when
2672 	     it isn't, due to it being set with the different alias set.
2673 
2674 	     Also, can't merge two expressions with different MEM_ATTRS.
2675 	     They could e.g. be two different entities allocated into the
2676 	     same space on the stack (see e.g. PR25130).  In that case, the
2677 	     MEM addresses can be the same, even though the two MEMs are
2678 	     absolutely not equivalent.
2679 
2680 	     But because really all MEM attributes should be the same for
2681 	     equivalent MEMs, we just use the invariant that MEMs that have
2682 	     the same attributes share the same mem_attrs data structure.  */
2683 	  if (!mem_attrs_eq_p (MEM_ATTRS (x), MEM_ATTRS (y)))
2684 	    return 0;
2685 
2686 	  /* If we are handling exceptions, we cannot consider two expressions
2687 	     with different trapping status as equivalent, because simple_mem
2688 	     might accept one and reject the other.  */
2689 	  if (cfun->can_throw_non_call_exceptions
2690 	      && (MEM_NOTRAP_P (x) != MEM_NOTRAP_P (y)))
2691 	    return 0;
2692 	}
2693       break;
2694 
2695     /*  For commutative operations, check both orders.  */
2696     case PLUS:
2697     case MULT:
2698     case AND:
2699     case IOR:
2700     case XOR:
2701     case NE:
2702     case EQ:
2703       return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2704 			     validate, for_gcse)
2705 	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2706 				validate, for_gcse))
2707 	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2708 				validate, for_gcse)
2709 		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2710 				   validate, for_gcse)));
2711 
2712     case ASM_OPERANDS:
2713       /* We don't use the generic code below because we want to
2714 	 disregard filename and line numbers.  */
2715 
2716       /* A volatile asm isn't equivalent to any other.  */
2717       if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2718 	return 0;
2719 
2720       if (GET_MODE (x) != GET_MODE (y)
2721 	  || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2722 	  || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2723 		     ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2724 	  || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2725 	  || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2726 	return 0;
2727 
2728       if (ASM_OPERANDS_INPUT_LENGTH (x))
2729 	{
2730 	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2731 	    if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2732 			       ASM_OPERANDS_INPUT (y, i),
2733 			       validate, for_gcse)
2734 		|| strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2735 			   ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2736 	      return 0;
2737 	}
2738 
2739       return 1;
2740 
2741     default:
2742       break;
2743     }
2744 
2745   /* Compare the elements.  If any pair of corresponding elements
2746      fail to match, return 0 for the whole thing.  */
2747 
2748   fmt = GET_RTX_FORMAT (code);
2749   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2750     {
2751       switch (fmt[i])
2752 	{
2753 	case 'e':
2754 	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2755 			      validate, for_gcse))
2756 	    return 0;
2757 	  break;
2758 
2759 	case 'E':
2760 	  if (XVECLEN (x, i) != XVECLEN (y, i))
2761 	    return 0;
2762 	  for (j = 0; j < XVECLEN (x, i); j++)
2763 	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2764 				validate, for_gcse))
2765 	      return 0;
2766 	  break;
2767 
2768 	case 's':
2769 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2770 	    return 0;
2771 	  break;
2772 
2773 	case 'i':
2774 	  if (XINT (x, i) != XINT (y, i))
2775 	    return 0;
2776 	  break;
2777 
2778 	case 'w':
2779 	  if (XWINT (x, i) != XWINT (y, i))
2780 	    return 0;
2781 	  break;
2782 
2783 	case 'p':
2784 	  if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
2785 	    return 0;
2786 	  break;
2787 
2788 	case '0':
2789 	case 't':
2790 	  break;
2791 
2792 	default:
2793 	  gcc_unreachable ();
2794 	}
2795     }
2796 
2797   return 1;
2798 }
2799 
2800 /* Subroutine of canon_reg.  Pass *XLOC through canon_reg, and validate
2801    the result if necessary.  INSN is as for canon_reg.  */
2802 
2803 static void
2804 validate_canon_reg (rtx *xloc, rtx_insn *insn)
2805 {
2806   if (*xloc)
2807     {
2808       rtx new_rtx = canon_reg (*xloc, insn);
2809 
2810       /* If replacing pseudo with hard reg or vice versa, ensure the
2811          insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2812       gcc_assert (insn && new_rtx);
2813       validate_change (insn, xloc, new_rtx, 1);
2814     }
2815 }
2816 
2817 /* Canonicalize an expression:
2818    replace each register reference inside it
2819    with the "oldest" equivalent register.
2820 
2821    If INSN is nonzero validate_change is used to ensure that INSN remains valid
2822    after we make our substitution.  The calls are made with IN_GROUP nonzero
2823    so apply_change_group must be called upon the outermost return from this
2824    function (unless INSN is zero).  The result of apply_change_group can
2825    generally be discarded since the changes we are making are optional.  */
2826 
2827 static rtx
2828 canon_reg (rtx x, rtx_insn *insn)
2829 {
2830   int i;
2831   enum rtx_code code;
2832   const char *fmt;
2833 
2834   if (x == 0)
2835     return x;
2836 
2837   code = GET_CODE (x);
2838   switch (code)
2839     {
2840     case PC:
2841     case CC0:
2842     case CONST:
2843     CASE_CONST_ANY:
2844     case SYMBOL_REF:
2845     case LABEL_REF:
2846     case ADDR_VEC:
2847     case ADDR_DIFF_VEC:
2848       return x;
2849 
2850     case REG:
2851       {
2852 	int first;
2853 	int q;
2854 	struct qty_table_elem *ent;
2855 
2856 	/* Never replace a hard reg, because hard regs can appear
2857 	   in more than one machine mode, and we must preserve the mode
2858 	   of each occurrence.  Also, some hard regs appear in
2859 	   MEMs that are shared and mustn't be altered.  Don't try to
2860 	   replace any reg that maps to a reg of class NO_REGS.  */
2861 	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2862 	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2863 	  return x;
2864 
2865 	q = REG_QTY (REGNO (x));
2866 	ent = &qty_table[q];
2867 	first = ent->first_reg;
2868 	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2869 		: REGNO_REG_CLASS (first) == NO_REGS ? x
2870 		: gen_rtx_REG (ent->mode, first));
2871       }
2872 
2873     default:
2874       break;
2875     }
2876 
2877   fmt = GET_RTX_FORMAT (code);
2878   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2879     {
2880       int j;
2881 
2882       if (fmt[i] == 'e')
2883 	validate_canon_reg (&XEXP (x, i), insn);
2884       else if (fmt[i] == 'E')
2885 	for (j = 0; j < XVECLEN (x, i); j++)
2886 	  validate_canon_reg (&XVECEXP (x, i, j), insn);
2887     }
2888 
2889   return x;
2890 }
2891 
2892 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2893    operation (EQ, NE, GT, etc.), follow it back through the hash table and
2894    what values are being compared.
2895 
2896    *PARG1 and *PARG2 are updated to contain the rtx representing the values
2897    actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
2898    was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2899    compared to produce cc0.
2900 
2901    The return value is the comparison operator and is either the code of
2902    A or the code corresponding to the inverse of the comparison.  */
2903 
2904 static enum rtx_code
2905 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
2906 		      machine_mode *pmode1, machine_mode *pmode2)
2907 {
2908   rtx arg1, arg2;
2909   hash_set<rtx> *visited = NULL;
2910   /* Set nonzero when we find something of interest.  */
2911   rtx x = NULL;
2912 
2913   arg1 = *parg1, arg2 = *parg2;
2914 
2915   /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
2916 
2917   while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2918     {
2919       int reverse_code = 0;
2920       struct table_elt *p = 0;
2921 
2922       /* Remember state from previous iteration.  */
2923       if (x)
2924 	{
2925 	  if (!visited)
2926 	    visited = new hash_set<rtx>;
2927 	  visited->add (x);
2928 	  x = 0;
2929 	}
2930 
2931       /* If arg1 is a COMPARE, extract the comparison arguments from it.
2932 	 On machines with CC0, this is the only case that can occur, since
2933 	 fold_rtx will return the COMPARE or item being compared with zero
2934 	 when given CC0.  */
2935 
2936       if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2937 	x = arg1;
2938 
2939       /* If ARG1 is a comparison operator and CODE is testing for
2940 	 STORE_FLAG_VALUE, get the inner arguments.  */
2941 
2942       else if (COMPARISON_P (arg1))
2943 	{
2944 #ifdef FLOAT_STORE_FLAG_VALUE
2945 	  REAL_VALUE_TYPE fsfv;
2946 #endif
2947 
2948 	  if (code == NE
2949 	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2950 		  && code == LT && STORE_FLAG_VALUE == -1)
2951 #ifdef FLOAT_STORE_FLAG_VALUE
2952 	      || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
2953 		  && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2954 		      REAL_VALUE_NEGATIVE (fsfv)))
2955 #endif
2956 	      )
2957 	    x = arg1;
2958 	  else if (code == EQ
2959 		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2960 		       && code == GE && STORE_FLAG_VALUE == -1)
2961 #ifdef FLOAT_STORE_FLAG_VALUE
2962 		   || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
2963 		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2964 			   REAL_VALUE_NEGATIVE (fsfv)))
2965 #endif
2966 		   )
2967 	    x = arg1, reverse_code = 1;
2968 	}
2969 
2970       /* ??? We could also check for
2971 
2972 	 (ne (and (eq (...) (const_int 1))) (const_int 0))
2973 
2974 	 and related forms, but let's wait until we see them occurring.  */
2975 
2976       if (x == 0)
2977 	/* Look up ARG1 in the hash table and see if it has an equivalence
2978 	   that lets us see what is being compared.  */
2979 	p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
2980       if (p)
2981 	{
2982 	  p = p->first_same_value;
2983 
2984 	  /* If what we compare is already known to be constant, that is as
2985 	     good as it gets.
2986 	     We need to break the loop in this case, because otherwise we
2987 	     can have an infinite loop when looking at a reg that is known
2988 	     to be a constant which is the same as a comparison of a reg
2989 	     against zero which appears later in the insn stream, which in
2990 	     turn is constant and the same as the comparison of the first reg
2991 	     against zero...  */
2992 	  if (p->is_const)
2993 	    break;
2994 	}
2995 
2996       for (; p; p = p->next_same_value)
2997 	{
2998 	  machine_mode inner_mode = GET_MODE (p->exp);
2999 #ifdef FLOAT_STORE_FLAG_VALUE
3000 	  REAL_VALUE_TYPE fsfv;
3001 #endif
3002 
3003 	  /* If the entry isn't valid, skip it.  */
3004 	  if (! exp_equiv_p (p->exp, p->exp, 1, false))
3005 	    continue;
3006 
3007 	  /* If it's a comparison we've used before, skip it.  */
3008 	  if (visited && visited->contains (p->exp))
3009 	    continue;
3010 
3011 	  if (GET_CODE (p->exp) == COMPARE
3012 	      /* Another possibility is that this machine has a compare insn
3013 		 that includes the comparison code.  In that case, ARG1 would
3014 		 be equivalent to a comparison operation that would set ARG1 to
3015 		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
3016 		 ORIG_CODE is the actual comparison being done; if it is an EQ,
3017 		 we must reverse ORIG_CODE.  On machine with a negative value
3018 		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3019 	      || ((code == NE
3020 		   || (code == LT
3021 		       && val_signbit_known_set_p (inner_mode,
3022 						   STORE_FLAG_VALUE))
3023 #ifdef FLOAT_STORE_FLAG_VALUE
3024 		   || (code == LT
3025 		       && SCALAR_FLOAT_MODE_P (inner_mode)
3026 		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3027 			   REAL_VALUE_NEGATIVE (fsfv)))
3028 #endif
3029 		   )
3030 		  && COMPARISON_P (p->exp)))
3031 	    {
3032 	      x = p->exp;
3033 	      break;
3034 	    }
3035 	  else if ((code == EQ
3036 		    || (code == GE
3037 			&& val_signbit_known_set_p (inner_mode,
3038 						    STORE_FLAG_VALUE))
3039 #ifdef FLOAT_STORE_FLAG_VALUE
3040 		    || (code == GE
3041 			&& SCALAR_FLOAT_MODE_P (inner_mode)
3042 			&& (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3043 			    REAL_VALUE_NEGATIVE (fsfv)))
3044 #endif
3045 		    )
3046 		   && COMPARISON_P (p->exp))
3047 	    {
3048 	      reverse_code = 1;
3049 	      x = p->exp;
3050 	      break;
3051 	    }
3052 
3053 	  /* If this non-trapping address, e.g. fp + constant, the
3054 	     equivalent is a better operand since it may let us predict
3055 	     the value of the comparison.  */
3056 	  else if (!rtx_addr_can_trap_p (p->exp))
3057 	    {
3058 	      arg1 = p->exp;
3059 	      continue;
3060 	    }
3061 	}
3062 
3063       /* If we didn't find a useful equivalence for ARG1, we are done.
3064 	 Otherwise, set up for the next iteration.  */
3065       if (x == 0)
3066 	break;
3067 
3068       /* If we need to reverse the comparison, make sure that is
3069 	 possible -- we can't necessarily infer the value of GE from LT
3070 	 with floating-point operands.  */
3071       if (reverse_code)
3072 	{
3073 	  enum rtx_code reversed = reversed_comparison_code (x, NULL);
3074 	  if (reversed == UNKNOWN)
3075 	    break;
3076 	  else
3077 	    code = reversed;
3078 	}
3079       else if (COMPARISON_P (x))
3080 	code = GET_CODE (x);
3081       arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3082     }
3083 
3084   /* Return our results.  Return the modes from before fold_rtx
3085      because fold_rtx might produce const_int, and then it's too late.  */
3086   *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3087   *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3088 
3089   if (visited)
3090     delete visited;
3091   return code;
3092 }
3093 
3094 /* If X is a nontrivial arithmetic operation on an argument for which
3095    a constant value can be determined, return the result of operating
3096    on that value, as a constant.  Otherwise, return X, possibly with
3097    one or more operands changed to a forward-propagated constant.
3098 
3099    If X is a register whose contents are known, we do NOT return
3100    those contents here; equiv_constant is called to perform that task.
3101    For SUBREGs and MEMs, we do that both here and in equiv_constant.
3102 
3103    INSN is the insn that we may be modifying.  If it is 0, make a copy
3104    of X before modifying it.  */
3105 
3106 static rtx
3107 fold_rtx (rtx x, rtx_insn *insn)
3108 {
3109   enum rtx_code code;
3110   machine_mode mode;
3111   const char *fmt;
3112   int i;
3113   rtx new_rtx = 0;
3114   int changed = 0;
3115 
3116   /* Operands of X.  */
3117   /* Workaround -Wmaybe-uninitialized false positive during
3118      profiledbootstrap by initializing them.  */
3119   rtx folded_arg0 = NULL_RTX;
3120   rtx folded_arg1 = NULL_RTX;
3121 
3122   /* Constant equivalents of first three operands of X;
3123      0 when no such equivalent is known.  */
3124   rtx const_arg0;
3125   rtx const_arg1;
3126   rtx const_arg2;
3127 
3128   /* The mode of the first operand of X.  We need this for sign and zero
3129      extends.  */
3130   machine_mode mode_arg0;
3131 
3132   if (x == 0)
3133     return x;
3134 
3135   /* Try to perform some initial simplifications on X.  */
3136   code = GET_CODE (x);
3137   switch (code)
3138     {
3139     case MEM:
3140     case SUBREG:
3141     /* The first operand of a SIGN/ZERO_EXTRACT has a different meaning
3142        than it would in other contexts.  Basically its mode does not
3143        signify the size of the object read.  That information is carried
3144        by size operand.    If we happen to have a MEM of the appropriate
3145        mode in our tables with a constant value we could simplify the
3146        extraction incorrectly if we allowed substitution of that value
3147        for the MEM.   */
3148     case ZERO_EXTRACT:
3149     case SIGN_EXTRACT:
3150       if ((new_rtx = equiv_constant (x)) != NULL_RTX)
3151         return new_rtx;
3152       return x;
3153 
3154     case CONST:
3155     CASE_CONST_ANY:
3156     case SYMBOL_REF:
3157     case LABEL_REF:
3158     case REG:
3159     case PC:
3160       /* No use simplifying an EXPR_LIST
3161 	 since they are used only for lists of args
3162 	 in a function call's REG_EQUAL note.  */
3163     case EXPR_LIST:
3164       return x;
3165 
3166     case CC0:
3167       return prev_insn_cc0;
3168 
3169     case ASM_OPERANDS:
3170       if (insn)
3171 	{
3172 	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3173 	    validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3174 			     fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3175 	}
3176       return x;
3177 
3178     case CALL:
3179       if (NO_FUNCTION_CSE && CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3180 	return x;
3181       break;
3182 
3183     /* Anything else goes through the loop below.  */
3184     default:
3185       break;
3186     }
3187 
3188   mode = GET_MODE (x);
3189   const_arg0 = 0;
3190   const_arg1 = 0;
3191   const_arg2 = 0;
3192   mode_arg0 = VOIDmode;
3193 
3194   /* Try folding our operands.
3195      Then see which ones have constant values known.  */
3196 
3197   fmt = GET_RTX_FORMAT (code);
3198   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3199     if (fmt[i] == 'e')
3200       {
3201 	rtx folded_arg = XEXP (x, i), const_arg;
3202 	machine_mode mode_arg = GET_MODE (folded_arg);
3203 
3204 	switch (GET_CODE (folded_arg))
3205 	  {
3206 	  case MEM:
3207 	  case REG:
3208 	  case SUBREG:
3209 	    const_arg = equiv_constant (folded_arg);
3210 	    break;
3211 
3212 	  case CONST:
3213 	  CASE_CONST_ANY:
3214 	  case SYMBOL_REF:
3215 	  case LABEL_REF:
3216 	    const_arg = folded_arg;
3217 	    break;
3218 
3219 	  case CC0:
3220 	    /* The cc0-user and cc0-setter may be in different blocks if
3221 	       the cc0-setter potentially traps.  In that case PREV_INSN_CC0
3222 	       will have been cleared as we exited the block with the
3223 	       setter.
3224 
3225 	       While we could potentially track cc0 in this case, it just
3226 	       doesn't seem to be worth it given that cc0 targets are not
3227 	       terribly common or important these days and trapping math
3228 	       is rarely used.  The combination of those two conditions
3229 	       necessary to trip this situation is exceedingly rare in the
3230 	       real world.  */
3231 	    if (!prev_insn_cc0)
3232 	      {
3233 		const_arg = NULL_RTX;
3234 	      }
3235 	    else
3236 	      {
3237 		folded_arg = prev_insn_cc0;
3238 		mode_arg = prev_insn_cc0_mode;
3239 		const_arg = equiv_constant (folded_arg);
3240 	      }
3241 	    break;
3242 
3243 	  default:
3244 	    folded_arg = fold_rtx (folded_arg, insn);
3245 	    const_arg = equiv_constant (folded_arg);
3246 	    break;
3247 	  }
3248 
3249 	/* For the first three operands, see if the operand
3250 	   is constant or equivalent to a constant.  */
3251 	switch (i)
3252 	  {
3253 	  case 0:
3254 	    folded_arg0 = folded_arg;
3255 	    const_arg0 = const_arg;
3256 	    mode_arg0 = mode_arg;
3257 	    break;
3258 	  case 1:
3259 	    folded_arg1 = folded_arg;
3260 	    const_arg1 = const_arg;
3261 	    break;
3262 	  case 2:
3263 	    const_arg2 = const_arg;
3264 	    break;
3265 	  }
3266 
3267 	/* Pick the least expensive of the argument and an equivalent constant
3268 	   argument.  */
3269 	if (const_arg != 0
3270 	    && const_arg != folded_arg
3271 	    && (COST_IN (const_arg, mode_arg, code, i)
3272 		<= COST_IN (folded_arg, mode_arg, code, i))
3273 
3274 	    /* It's not safe to substitute the operand of a conversion
3275 	       operator with a constant, as the conversion's identity
3276 	       depends upon the mode of its operand.  This optimization
3277 	       is handled by the call to simplify_unary_operation.  */
3278 	    && (GET_RTX_CLASS (code) != RTX_UNARY
3279 		|| GET_MODE (const_arg) == mode_arg0
3280 		|| (code != ZERO_EXTEND
3281 		    && code != SIGN_EXTEND
3282 		    && code != TRUNCATE
3283 		    && code != FLOAT_TRUNCATE
3284 		    && code != FLOAT_EXTEND
3285 		    && code != FLOAT
3286 		    && code != FIX
3287 		    && code != UNSIGNED_FLOAT
3288 		    && code != UNSIGNED_FIX)))
3289 	  folded_arg = const_arg;
3290 
3291 	if (folded_arg == XEXP (x, i))
3292 	  continue;
3293 
3294 	if (insn == NULL_RTX && !changed)
3295 	  x = copy_rtx (x);
3296 	changed = 1;
3297 	validate_unshare_change (insn, &XEXP (x, i), folded_arg, 1);
3298       }
3299 
3300   if (changed)
3301     {
3302       /* Canonicalize X if necessary, and keep const_argN and folded_argN
3303 	 consistent with the order in X.  */
3304       if (canonicalize_change_group (insn, x))
3305 	{
3306 	  std::swap (const_arg0, const_arg1);
3307 	  std::swap (folded_arg0, folded_arg1);
3308 	}
3309 
3310       apply_change_group ();
3311     }
3312 
3313   /* If X is an arithmetic operation, see if we can simplify it.  */
3314 
3315   switch (GET_RTX_CLASS (code))
3316     {
3317     case RTX_UNARY:
3318       {
3319 	/* We can't simplify extension ops unless we know the
3320 	   original mode.  */
3321 	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3322 	    && mode_arg0 == VOIDmode)
3323 	  break;
3324 
3325 	new_rtx = simplify_unary_operation (code, mode,
3326 					    const_arg0 ? const_arg0 : folded_arg0,
3327 					    mode_arg0);
3328       }
3329       break;
3330 
3331     case RTX_COMPARE:
3332     case RTX_COMM_COMPARE:
3333       /* See what items are actually being compared and set FOLDED_ARG[01]
3334 	 to those values and CODE to the actual comparison code.  If any are
3335 	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
3336 	 do anything if both operands are already known to be constant.  */
3337 
3338       /* ??? Vector mode comparisons are not supported yet.  */
3339       if (VECTOR_MODE_P (mode))
3340 	break;
3341 
3342       if (const_arg0 == 0 || const_arg1 == 0)
3343 	{
3344 	  struct table_elt *p0, *p1;
3345 	  rtx true_rtx, false_rtx;
3346 	  machine_mode mode_arg1;
3347 
3348 	  if (SCALAR_FLOAT_MODE_P (mode))
3349 	    {
3350 #ifdef FLOAT_STORE_FLAG_VALUE
3351 	      true_rtx = (const_double_from_real_value
3352 			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
3353 #else
3354 	      true_rtx = NULL_RTX;
3355 #endif
3356 	      false_rtx = CONST0_RTX (mode);
3357 	    }
3358 	  else
3359 	    {
3360 	      true_rtx = const_true_rtx;
3361 	      false_rtx = const0_rtx;
3362 	    }
3363 
3364 	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3365 				       &mode_arg0, &mode_arg1);
3366 
3367 	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3368 	     what kinds of things are being compared, so we can't do
3369 	     anything with this comparison.  */
3370 
3371 	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3372 	    break;
3373 
3374 	  const_arg0 = equiv_constant (folded_arg0);
3375 	  const_arg1 = equiv_constant (folded_arg1);
3376 
3377 	  /* If we do not now have two constants being compared, see
3378 	     if we can nevertheless deduce some things about the
3379 	     comparison.  */
3380 	  if (const_arg0 == 0 || const_arg1 == 0)
3381 	    {
3382 	      if (const_arg1 != NULL)
3383 		{
3384 		  rtx cheapest_simplification;
3385 		  int cheapest_cost;
3386 		  rtx simp_result;
3387 		  struct table_elt *p;
3388 
3389 		  /* See if we can find an equivalent of folded_arg0
3390 		     that gets us a cheaper expression, possibly a
3391 		     constant through simplifications.  */
3392 		  p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
3393 			      mode_arg0);
3394 
3395 		  if (p != NULL)
3396 		    {
3397 		      cheapest_simplification = x;
3398 		      cheapest_cost = COST (x, mode);
3399 
3400 		      for (p = p->first_same_value; p != NULL; p = p->next_same_value)
3401 			{
3402 			  int cost;
3403 
3404 			  /* If the entry isn't valid, skip it.  */
3405 			  if (! exp_equiv_p (p->exp, p->exp, 1, false))
3406 			    continue;
3407 
3408 			  /* Try to simplify using this equivalence.  */
3409 			  simp_result
3410 			    = simplify_relational_operation (code, mode,
3411 							     mode_arg0,
3412 							     p->exp,
3413 							     const_arg1);
3414 
3415 			  if (simp_result == NULL)
3416 			    continue;
3417 
3418 			  cost = COST (simp_result, mode);
3419 			  if (cost < cheapest_cost)
3420 			    {
3421 			      cheapest_cost = cost;
3422 			      cheapest_simplification = simp_result;
3423 			    }
3424 			}
3425 
3426 		      /* If we have a cheaper expression now, use that
3427 			 and try folding it further, from the top.  */
3428 		      if (cheapest_simplification != x)
3429 			return fold_rtx (copy_rtx (cheapest_simplification),
3430 					 insn);
3431 		    }
3432 		}
3433 
3434 	      /* See if the two operands are the same.  */
3435 
3436 	      if ((REG_P (folded_arg0)
3437 		   && REG_P (folded_arg1)
3438 		   && (REG_QTY (REGNO (folded_arg0))
3439 		       == REG_QTY (REGNO (folded_arg1))))
3440 		  || ((p0 = lookup (folded_arg0,
3441 				    SAFE_HASH (folded_arg0, mode_arg0),
3442 				    mode_arg0))
3443 		      && (p1 = lookup (folded_arg1,
3444 				       SAFE_HASH (folded_arg1, mode_arg0),
3445 				       mode_arg0))
3446 		      && p0->first_same_value == p1->first_same_value))
3447 		folded_arg1 = folded_arg0;
3448 
3449 	      /* If FOLDED_ARG0 is a register, see if the comparison we are
3450 		 doing now is either the same as we did before or the reverse
3451 		 (we only check the reverse if not floating-point).  */
3452 	      else if (REG_P (folded_arg0))
3453 		{
3454 		  int qty = REG_QTY (REGNO (folded_arg0));
3455 
3456 		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3457 		    {
3458 		      struct qty_table_elem *ent = &qty_table[qty];
3459 
3460 		      if ((comparison_dominates_p (ent->comparison_code, code)
3461 			   || (! FLOAT_MODE_P (mode_arg0)
3462 			       && comparison_dominates_p (ent->comparison_code,
3463 						          reverse_condition (code))))
3464 			  && (rtx_equal_p (ent->comparison_const, folded_arg1)
3465 			      || (const_arg1
3466 				  && rtx_equal_p (ent->comparison_const,
3467 						  const_arg1))
3468 			      || (REG_P (folded_arg1)
3469 				  && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3470 			{
3471 			  if (comparison_dominates_p (ent->comparison_code, code))
3472 			    {
3473 			      if (true_rtx)
3474 				return true_rtx;
3475 			      else
3476 				break;
3477 			    }
3478 			  else
3479 			    return false_rtx;
3480 			}
3481 		    }
3482 		}
3483 	    }
3484 	}
3485 
3486       /* If we are comparing against zero, see if the first operand is
3487 	 equivalent to an IOR with a constant.  If so, we may be able to
3488 	 determine the result of this comparison.  */
3489       if (const_arg1 == const0_rtx && !const_arg0)
3490 	{
3491 	  rtx y = lookup_as_function (folded_arg0, IOR);
3492 	  rtx inner_const;
3493 
3494 	  if (y != 0
3495 	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3496 	      && CONST_INT_P (inner_const)
3497 	      && INTVAL (inner_const) != 0)
3498 	    folded_arg0 = gen_rtx_IOR (mode_arg0, XEXP (y, 0), inner_const);
3499 	}
3500 
3501       {
3502 	rtx op0 = const_arg0 ? const_arg0 : copy_rtx (folded_arg0);
3503 	rtx op1 = const_arg1 ? const_arg1 : copy_rtx (folded_arg1);
3504 	new_rtx = simplify_relational_operation (code, mode, mode_arg0,
3505 						 op0, op1);
3506       }
3507       break;
3508 
3509     case RTX_BIN_ARITH:
3510     case RTX_COMM_ARITH:
3511       switch (code)
3512 	{
3513 	case PLUS:
3514 	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
3515 	     with that LABEL_REF as its second operand.  If so, the result is
3516 	     the first operand of that MINUS.  This handles switches with an
3517 	     ADDR_DIFF_VEC table.  */
3518 	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3519 	    {
3520 	      rtx y
3521 		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
3522 		: lookup_as_function (folded_arg0, MINUS);
3523 
3524 	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3525 		  && label_ref_label (XEXP (y, 1)) == label_ref_label (const_arg1))
3526 		return XEXP (y, 0);
3527 
3528 	      /* Now try for a CONST of a MINUS like the above.  */
3529 	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3530 			: lookup_as_function (folded_arg0, CONST))) != 0
3531 		  && GET_CODE (XEXP (y, 0)) == MINUS
3532 		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3533 		  && label_ref_label (XEXP (XEXP (y, 0), 1)) == label_ref_label (const_arg1))
3534 		return XEXP (XEXP (y, 0), 0);
3535 	    }
3536 
3537 	  /* Likewise if the operands are in the other order.  */
3538 	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3539 	    {
3540 	      rtx y
3541 		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
3542 		: lookup_as_function (folded_arg1, MINUS);
3543 
3544 	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3545 		  && label_ref_label (XEXP (y, 1)) == label_ref_label (const_arg0))
3546 		return XEXP (y, 0);
3547 
3548 	      /* Now try for a CONST of a MINUS like the above.  */
3549 	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3550 			: lookup_as_function (folded_arg1, CONST))) != 0
3551 		  && GET_CODE (XEXP (y, 0)) == MINUS
3552 		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3553 		  && label_ref_label (XEXP (XEXP (y, 0), 1)) == label_ref_label (const_arg0))
3554 		return XEXP (XEXP (y, 0), 0);
3555 	    }
3556 
3557 	  /* If second operand is a register equivalent to a negative
3558 	     CONST_INT, see if we can find a register equivalent to the
3559 	     positive constant.  Make a MINUS if so.  Don't do this for
3560 	     a non-negative constant since we might then alternate between
3561 	     choosing positive and negative constants.  Having the positive
3562 	     constant previously-used is the more common case.  Be sure
3563 	     the resulting constant is non-negative; if const_arg1 were
3564 	     the smallest negative number this would overflow: depending
3565 	     on the mode, this would either just be the same value (and
3566 	     hence not save anything) or be incorrect.  */
3567 	  if (const_arg1 != 0 && CONST_INT_P (const_arg1)
3568 	      && INTVAL (const_arg1) < 0
3569 	      /* This used to test
3570 
3571 	         -INTVAL (const_arg1) >= 0
3572 
3573 		 But The Sun V5.0 compilers mis-compiled that test.  So
3574 		 instead we test for the problematic value in a more direct
3575 		 manner and hope the Sun compilers get it correct.  */
3576 	      && INTVAL (const_arg1) !=
3577 	        (HOST_WIDE_INT_1 << (HOST_BITS_PER_WIDE_INT - 1))
3578 	      && REG_P (folded_arg1))
3579 	    {
3580 	      rtx new_const = GEN_INT (-INTVAL (const_arg1));
3581 	      struct table_elt *p
3582 		= lookup (new_const, SAFE_HASH (new_const, mode), mode);
3583 
3584 	      if (p)
3585 		for (p = p->first_same_value; p; p = p->next_same_value)
3586 		  if (REG_P (p->exp))
3587 		    return simplify_gen_binary (MINUS, mode, folded_arg0,
3588 						canon_reg (p->exp, NULL));
3589 	    }
3590 	  goto from_plus;
3591 
3592 	case MINUS:
3593 	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3594 	     If so, produce (PLUS Z C2-C).  */
3595 	  if (const_arg1 != 0 && CONST_INT_P (const_arg1))
3596 	    {
3597 	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3598 	      if (y && CONST_INT_P (XEXP (y, 1)))
3599 		return fold_rtx (plus_constant (mode, copy_rtx (y),
3600 						-INTVAL (const_arg1)),
3601 				 NULL);
3602 	    }
3603 
3604 	  /* Fall through.  */
3605 
3606 	from_plus:
3607 	case SMIN:    case SMAX:      case UMIN:    case UMAX:
3608 	case IOR:     case AND:       case XOR:
3609 	case MULT:
3610 	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
3611 	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3612 	     is known to be of similar form, we may be able to replace the
3613 	     operation with a combined operation.  This may eliminate the
3614 	     intermediate operation if every use is simplified in this way.
3615 	     Note that the similar optimization done by combine.c only works
3616 	     if the intermediate operation's result has only one reference.  */
3617 
3618 	  if (REG_P (folded_arg0)
3619 	      && const_arg1 && CONST_INT_P (const_arg1))
3620 	    {
3621 	      int is_shift
3622 		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
3623 	      rtx y, inner_const, new_const;
3624 	      rtx canon_const_arg1 = const_arg1;
3625 	      enum rtx_code associate_code;
3626 
3627 	      if (is_shift
3628 		  && (INTVAL (const_arg1) >= GET_MODE_UNIT_PRECISION (mode)
3629 		      || INTVAL (const_arg1) < 0))
3630 		{
3631 		  if (SHIFT_COUNT_TRUNCATED)
3632 		    canon_const_arg1 = gen_int_shift_amount
3633 		      (mode, (INTVAL (const_arg1)
3634 			      & (GET_MODE_UNIT_BITSIZE (mode) - 1)));
3635 		  else
3636 		    break;
3637 		}
3638 
3639 	      y = lookup_as_function (folded_arg0, code);
3640 	      if (y == 0)
3641 		break;
3642 
3643 	      /* If we have compiled a statement like
3644 		 "if (x == (x & mask1))", and now are looking at
3645 		 "x & mask2", we will have a case where the first operand
3646 		 of Y is the same as our first operand.  Unless we detect
3647 		 this case, an infinite loop will result.  */
3648 	      if (XEXP (y, 0) == folded_arg0)
3649 		break;
3650 
3651 	      inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
3652 	      if (!inner_const || !CONST_INT_P (inner_const))
3653 		break;
3654 
3655 	      /* Don't associate these operations if they are a PLUS with the
3656 		 same constant and it is a power of two.  These might be doable
3657 		 with a pre- or post-increment.  Similarly for two subtracts of
3658 		 identical powers of two with post decrement.  */
3659 
3660 	      if (code == PLUS && const_arg1 == inner_const
3661 		  && ((HAVE_PRE_INCREMENT
3662 			  && pow2p_hwi (INTVAL (const_arg1)))
3663 		      || (HAVE_POST_INCREMENT
3664 			  && pow2p_hwi (INTVAL (const_arg1)))
3665 		      || (HAVE_PRE_DECREMENT
3666 			  && pow2p_hwi (- INTVAL (const_arg1)))
3667 		      || (HAVE_POST_DECREMENT
3668 			  && pow2p_hwi (- INTVAL (const_arg1)))))
3669 		break;
3670 
3671 	      /* ??? Vector mode shifts by scalar
3672 		 shift operand are not supported yet.  */
3673 	      if (is_shift && VECTOR_MODE_P (mode))
3674                 break;
3675 
3676 	      if (is_shift
3677 		  && (INTVAL (inner_const) >= GET_MODE_UNIT_PRECISION (mode)
3678 		      || INTVAL (inner_const) < 0))
3679 		{
3680 		  if (SHIFT_COUNT_TRUNCATED)
3681 		    inner_const = gen_int_shift_amount
3682 		      (mode, (INTVAL (inner_const)
3683 			      & (GET_MODE_UNIT_BITSIZE (mode) - 1)));
3684 		  else
3685 		    break;
3686 		}
3687 
3688 	      /* Compute the code used to compose the constants.  For example,
3689 		 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS.  */
3690 
3691 	      associate_code = (is_shift || code == MINUS ? PLUS : code);
3692 
3693 	      new_const = simplify_binary_operation (associate_code, mode,
3694 						     canon_const_arg1,
3695 						     inner_const);
3696 
3697 	      if (new_const == 0)
3698 		break;
3699 
3700 	      /* If we are associating shift operations, don't let this
3701 		 produce a shift of the size of the object or larger.
3702 		 This could occur when we follow a sign-extend by a right
3703 		 shift on a machine that does a sign-extend as a pair
3704 		 of shifts.  */
3705 
3706 	      if (is_shift
3707 		  && CONST_INT_P (new_const)
3708 		  && INTVAL (new_const) >= GET_MODE_UNIT_PRECISION (mode))
3709 		{
3710 		  /* As an exception, we can turn an ASHIFTRT of this
3711 		     form into a shift of the number of bits - 1.  */
3712 		  if (code == ASHIFTRT)
3713 		    new_const = gen_int_shift_amount
3714 		      (mode, GET_MODE_UNIT_BITSIZE (mode) - 1);
3715 		  else if (!side_effects_p (XEXP (y, 0)))
3716 		    return CONST0_RTX (mode);
3717 		  else
3718 		    break;
3719 		}
3720 
3721 	      y = copy_rtx (XEXP (y, 0));
3722 
3723 	      /* If Y contains our first operand (the most common way this
3724 		 can happen is if Y is a MEM), we would do into an infinite
3725 		 loop if we tried to fold it.  So don't in that case.  */
3726 
3727 	      if (! reg_mentioned_p (folded_arg0, y))
3728 		y = fold_rtx (y, insn);
3729 
3730 	      return simplify_gen_binary (code, mode, y, new_const);
3731 	    }
3732 	  break;
3733 
3734 	case DIV:       case UDIV:
3735 	  /* ??? The associative optimization performed immediately above is
3736 	     also possible for DIV and UDIV using associate_code of MULT.
3737 	     However, we would need extra code to verify that the
3738 	     multiplication does not overflow, that is, there is no overflow
3739 	     in the calculation of new_const.  */
3740 	  break;
3741 
3742 	default:
3743 	  break;
3744 	}
3745 
3746       new_rtx = simplify_binary_operation (code, mode,
3747 				       const_arg0 ? const_arg0 : folded_arg0,
3748 				       const_arg1 ? const_arg1 : folded_arg1);
3749       break;
3750 
3751     case RTX_OBJ:
3752       /* (lo_sum (high X) X) is simply X.  */
3753       if (code == LO_SUM && const_arg0 != 0
3754 	  && GET_CODE (const_arg0) == HIGH
3755 	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
3756 	return const_arg1;
3757       break;
3758 
3759     case RTX_TERNARY:
3760     case RTX_BITFIELD_OPS:
3761       new_rtx = simplify_ternary_operation (code, mode, mode_arg0,
3762 					const_arg0 ? const_arg0 : folded_arg0,
3763 					const_arg1 ? const_arg1 : folded_arg1,
3764 					const_arg2 ? const_arg2 : XEXP (x, 2));
3765       break;
3766 
3767     default:
3768       break;
3769     }
3770 
3771   return new_rtx ? new_rtx : x;
3772 }
3773 
3774 /* Return a constant value currently equivalent to X.
3775    Return 0 if we don't know one.  */
3776 
3777 static rtx
3778 equiv_constant (rtx x)
3779 {
3780   if (REG_P (x)
3781       && REGNO_QTY_VALID_P (REGNO (x)))
3782     {
3783       int x_q = REG_QTY (REGNO (x));
3784       struct qty_table_elem *x_ent = &qty_table[x_q];
3785 
3786       if (x_ent->const_rtx)
3787 	x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
3788     }
3789 
3790   if (x == 0 || CONSTANT_P (x))
3791     return x;
3792 
3793   if (GET_CODE (x) == SUBREG)
3794     {
3795       machine_mode mode = GET_MODE (x);
3796       machine_mode imode = GET_MODE (SUBREG_REG (x));
3797       rtx new_rtx;
3798 
3799       /* See if we previously assigned a constant value to this SUBREG.  */
3800       if ((new_rtx = lookup_as_function (x, CONST_INT)) != 0
3801 	  || (new_rtx = lookup_as_function (x, CONST_WIDE_INT)) != 0
3802 	  || (NUM_POLY_INT_COEFFS > 1
3803 	      && (new_rtx = lookup_as_function (x, CONST_POLY_INT)) != 0)
3804           || (new_rtx = lookup_as_function (x, CONST_DOUBLE)) != 0
3805           || (new_rtx = lookup_as_function (x, CONST_FIXED)) != 0)
3806         return new_rtx;
3807 
3808       /* If we didn't and if doing so makes sense, see if we previously
3809 	 assigned a constant value to the enclosing word mode SUBREG.  */
3810       if (known_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD)
3811 	  && known_lt (UNITS_PER_WORD, GET_MODE_SIZE (imode)))
3812 	{
3813 	  poly_int64 byte = (SUBREG_BYTE (x)
3814 			     - subreg_lowpart_offset (mode, word_mode));
3815 	  if (known_ge (byte, 0) && multiple_p (byte, UNITS_PER_WORD))
3816 	    {
3817 	      rtx y = gen_rtx_SUBREG (word_mode, SUBREG_REG (x), byte);
3818 	      new_rtx = lookup_as_function (y, CONST_INT);
3819 	      if (new_rtx)
3820 		return gen_lowpart (mode, new_rtx);
3821 	    }
3822 	}
3823 
3824       /* Otherwise see if we already have a constant for the inner REG,
3825 	 and if that is enough to calculate an equivalent constant for
3826 	 the subreg.  Note that the upper bits of paradoxical subregs
3827 	 are undefined, so they cannot be said to equal anything.  */
3828       if (REG_P (SUBREG_REG (x))
3829 	  && !paradoxical_subreg_p (x)
3830 	  && (new_rtx = equiv_constant (SUBREG_REG (x))) != 0)
3831         return simplify_subreg (mode, new_rtx, imode, SUBREG_BYTE (x));
3832 
3833       return 0;
3834     }
3835 
3836   /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3837      the hash table in case its value was seen before.  */
3838 
3839   if (MEM_P (x))
3840     {
3841       struct table_elt *elt;
3842 
3843       x = avoid_constant_pool_reference (x);
3844       if (CONSTANT_P (x))
3845 	return x;
3846 
3847       elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
3848       if (elt == 0)
3849 	return 0;
3850 
3851       for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3852 	if (elt->is_const && CONSTANT_P (elt->exp))
3853 	  return elt->exp;
3854     }
3855 
3856   return 0;
3857 }
3858 
3859 /* Given INSN, a jump insn, TAKEN indicates if we are following the
3860    "taken" branch.
3861 
3862    In certain cases, this can cause us to add an equivalence.  For example,
3863    if we are following the taken case of
3864 	if (i == 2)
3865    we can add the fact that `i' and '2' are now equivalent.
3866 
3867    In any case, we can record that this comparison was passed.  If the same
3868    comparison is seen later, we will know its value.  */
3869 
3870 static void
3871 record_jump_equiv (rtx_insn *insn, bool taken)
3872 {
3873   int cond_known_true;
3874   rtx op0, op1;
3875   rtx set;
3876   machine_mode mode, mode0, mode1;
3877   int reversed_nonequality = 0;
3878   enum rtx_code code;
3879 
3880   /* Ensure this is the right kind of insn.  */
3881   gcc_assert (any_condjump_p (insn));
3882 
3883   set = pc_set (insn);
3884 
3885   /* See if this jump condition is known true or false.  */
3886   if (taken)
3887     cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
3888   else
3889     cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
3890 
3891   /* Get the type of comparison being done and the operands being compared.
3892      If we had to reverse a non-equality condition, record that fact so we
3893      know that it isn't valid for floating-point.  */
3894   code = GET_CODE (XEXP (SET_SRC (set), 0));
3895   op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
3896   op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
3897 
3898   /* On a cc0 target the cc0-setter and cc0-user may end up in different
3899      blocks.  When that happens the tracking of the cc0-setter via
3900      PREV_INSN_CC0 is spoiled.  That means that fold_rtx may return
3901      NULL_RTX.  In those cases, there's nothing to record.  */
3902   if (op0 == NULL_RTX || op1 == NULL_RTX)
3903     return;
3904 
3905   code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
3906   if (! cond_known_true)
3907     {
3908       code = reversed_comparison_code_parts (code, op0, op1, insn);
3909 
3910       /* Don't remember if we can't find the inverse.  */
3911       if (code == UNKNOWN)
3912 	return;
3913     }
3914 
3915   /* The mode is the mode of the non-constant.  */
3916   mode = mode0;
3917   if (mode1 != VOIDmode)
3918     mode = mode1;
3919 
3920   record_jump_cond (code, mode, op0, op1, reversed_nonequality);
3921 }
3922 
3923 /* Yet another form of subreg creation.  In this case, we want something in
3924    MODE, and we should assume OP has MODE iff it is naturally modeless.  */
3925 
3926 static rtx
3927 record_jump_cond_subreg (machine_mode mode, rtx op)
3928 {
3929   machine_mode op_mode = GET_MODE (op);
3930   if (op_mode == mode || op_mode == VOIDmode)
3931     return op;
3932   return lowpart_subreg (mode, op, op_mode);
3933 }
3934 
3935 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3936    REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3937    Make any useful entries we can with that information.  Called from
3938    above function and called recursively.  */
3939 
3940 static void
3941 record_jump_cond (enum rtx_code code, machine_mode mode, rtx op0,
3942 		  rtx op1, int reversed_nonequality)
3943 {
3944   unsigned op0_hash, op1_hash;
3945   int op0_in_memory, op1_in_memory;
3946   struct table_elt *op0_elt, *op1_elt;
3947 
3948   /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3949      we know that they are also equal in the smaller mode (this is also
3950      true for all smaller modes whether or not there is a SUBREG, but
3951      is not worth testing for with no SUBREG).  */
3952 
3953   /* Note that GET_MODE (op0) may not equal MODE.  */
3954   if (code == EQ && paradoxical_subreg_p (op0))
3955     {
3956       machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3957       rtx tem = record_jump_cond_subreg (inner_mode, op1);
3958       if (tem)
3959 	record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3960 			  reversed_nonequality);
3961     }
3962 
3963   if (code == EQ && paradoxical_subreg_p (op1))
3964     {
3965       machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3966       rtx tem = record_jump_cond_subreg (inner_mode, op0);
3967       if (tem)
3968 	record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3969 			  reversed_nonequality);
3970     }
3971 
3972   /* Similarly, if this is an NE comparison, and either is a SUBREG
3973      making a smaller mode, we know the whole thing is also NE.  */
3974 
3975   /* Note that GET_MODE (op0) may not equal MODE;
3976      if we test MODE instead, we can get an infinite recursion
3977      alternating between two modes each wider than MODE.  */
3978 
3979   if (code == NE
3980       && partial_subreg_p (op0)
3981       && subreg_lowpart_p (op0))
3982     {
3983       machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3984       rtx tem = record_jump_cond_subreg (inner_mode, op1);
3985       if (tem)
3986 	record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3987 			  reversed_nonequality);
3988     }
3989 
3990   if (code == NE
3991       && partial_subreg_p (op1)
3992       && subreg_lowpart_p (op1))
3993     {
3994       machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3995       rtx tem = record_jump_cond_subreg (inner_mode, op0);
3996       if (tem)
3997 	record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3998 			  reversed_nonequality);
3999     }
4000 
4001   /* Hash both operands.  */
4002 
4003   do_not_record = 0;
4004   hash_arg_in_memory = 0;
4005   op0_hash = HASH (op0, mode);
4006   op0_in_memory = hash_arg_in_memory;
4007 
4008   if (do_not_record)
4009     return;
4010 
4011   do_not_record = 0;
4012   hash_arg_in_memory = 0;
4013   op1_hash = HASH (op1, mode);
4014   op1_in_memory = hash_arg_in_memory;
4015 
4016   if (do_not_record)
4017     return;
4018 
4019   /* Look up both operands.  */
4020   op0_elt = lookup (op0, op0_hash, mode);
4021   op1_elt = lookup (op1, op1_hash, mode);
4022 
4023   /* If both operands are already equivalent or if they are not in the
4024      table but are identical, do nothing.  */
4025   if ((op0_elt != 0 && op1_elt != 0
4026        && op0_elt->first_same_value == op1_elt->first_same_value)
4027       || op0 == op1 || rtx_equal_p (op0, op1))
4028     return;
4029 
4030   /* If we aren't setting two things equal all we can do is save this
4031      comparison.   Similarly if this is floating-point.  In the latter
4032      case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4033      If we record the equality, we might inadvertently delete code
4034      whose intent was to change -0 to +0.  */
4035 
4036   if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4037     {
4038       struct qty_table_elem *ent;
4039       int qty;
4040 
4041       /* If we reversed a floating-point comparison, if OP0 is not a
4042 	 register, or if OP1 is neither a register or constant, we can't
4043 	 do anything.  */
4044 
4045       if (!REG_P (op1))
4046 	op1 = equiv_constant (op1);
4047 
4048       if ((reversed_nonequality && FLOAT_MODE_P (mode))
4049 	  || !REG_P (op0) || op1 == 0)
4050 	return;
4051 
4052       /* Put OP0 in the hash table if it isn't already.  This gives it a
4053 	 new quantity number.  */
4054       if (op0_elt == 0)
4055 	{
4056 	  if (insert_regs (op0, NULL, 0))
4057 	    {
4058 	      rehash_using_reg (op0);
4059 	      op0_hash = HASH (op0, mode);
4060 
4061 	      /* If OP0 is contained in OP1, this changes its hash code
4062 		 as well.  Faster to rehash than to check, except
4063 		 for the simple case of a constant.  */
4064 	      if (! CONSTANT_P (op1))
4065 		op1_hash = HASH (op1,mode);
4066 	    }
4067 
4068 	  op0_elt = insert (op0, NULL, op0_hash, mode);
4069 	  op0_elt->in_memory = op0_in_memory;
4070 	}
4071 
4072       qty = REG_QTY (REGNO (op0));
4073       ent = &qty_table[qty];
4074 
4075       ent->comparison_code = code;
4076       if (REG_P (op1))
4077 	{
4078 	  /* Look it up again--in case op0 and op1 are the same.  */
4079 	  op1_elt = lookup (op1, op1_hash, mode);
4080 
4081 	  /* Put OP1 in the hash table so it gets a new quantity number.  */
4082 	  if (op1_elt == 0)
4083 	    {
4084 	      if (insert_regs (op1, NULL, 0))
4085 		{
4086 		  rehash_using_reg (op1);
4087 		  op1_hash = HASH (op1, mode);
4088 		}
4089 
4090 	      op1_elt = insert (op1, NULL, op1_hash, mode);
4091 	      op1_elt->in_memory = op1_in_memory;
4092 	    }
4093 
4094 	  ent->comparison_const = NULL_RTX;
4095 	  ent->comparison_qty = REG_QTY (REGNO (op1));
4096 	}
4097       else
4098 	{
4099 	  ent->comparison_const = op1;
4100 	  ent->comparison_qty = -1;
4101 	}
4102 
4103       return;
4104     }
4105 
4106   /* If either side is still missing an equivalence, make it now,
4107      then merge the equivalences.  */
4108 
4109   if (op0_elt == 0)
4110     {
4111       if (insert_regs (op0, NULL, 0))
4112 	{
4113 	  rehash_using_reg (op0);
4114 	  op0_hash = HASH (op0, mode);
4115 	}
4116 
4117       op0_elt = insert (op0, NULL, op0_hash, mode);
4118       op0_elt->in_memory = op0_in_memory;
4119     }
4120 
4121   if (op1_elt == 0)
4122     {
4123       if (insert_regs (op1, NULL, 0))
4124 	{
4125 	  rehash_using_reg (op1);
4126 	  op1_hash = HASH (op1, mode);
4127 	}
4128 
4129       op1_elt = insert (op1, NULL, op1_hash, mode);
4130       op1_elt->in_memory = op1_in_memory;
4131     }
4132 
4133   merge_equiv_classes (op0_elt, op1_elt);
4134 }
4135 
4136 /* CSE processing for one instruction.
4137 
4138    Most "true" common subexpressions are mostly optimized away in GIMPLE,
4139    but the few that "leak through" are cleaned up by cse_insn, and complex
4140    addressing modes are often formed here.
4141 
4142    The main function is cse_insn, and between here and that function
4143    a couple of helper functions is defined to keep the size of cse_insn
4144    within reasonable proportions.
4145 
4146    Data is shared between the main and helper functions via STRUCT SET,
4147    that contains all data related for every set in the instruction that
4148    is being processed.
4149 
4150    Note that cse_main processes all sets in the instruction.  Most
4151    passes in GCC only process simple SET insns or single_set insns, but
4152    CSE processes insns with multiple sets as well.  */
4153 
4154 /* Data on one SET contained in the instruction.  */
4155 
4156 struct set
4157 {
4158   /* The SET rtx itself.  */
4159   rtx rtl;
4160   /* The SET_SRC of the rtx (the original value, if it is changing).  */
4161   rtx src;
4162   /* The hash-table element for the SET_SRC of the SET.  */
4163   struct table_elt *src_elt;
4164   /* Hash value for the SET_SRC.  */
4165   unsigned src_hash;
4166   /* Hash value for the SET_DEST.  */
4167   unsigned dest_hash;
4168   /* The SET_DEST, with SUBREG, etc., stripped.  */
4169   rtx inner_dest;
4170   /* Nonzero if the SET_SRC is in memory.  */
4171   char src_in_memory;
4172   /* Nonzero if the SET_SRC contains something
4173      whose value cannot be predicted and understood.  */
4174   char src_volatile;
4175   /* Original machine mode, in case it becomes a CONST_INT.
4176      The size of this field should match the size of the mode
4177      field of struct rtx_def (see rtl.h).  */
4178   ENUM_BITFIELD(machine_mode) mode : 8;
4179   /* Hash value of constant equivalent for SET_SRC.  */
4180   unsigned src_const_hash;
4181   /* A constant equivalent for SET_SRC, if any.  */
4182   rtx src_const;
4183   /* Table entry for constant equivalent for SET_SRC, if any.  */
4184   struct table_elt *src_const_elt;
4185   /* Table entry for the destination address.  */
4186   struct table_elt *dest_addr_elt;
4187 };
4188 
4189 /* Special handling for (set REG0 REG1) where REG0 is the
4190    "cheapest", cheaper than REG1.  After cse, REG1 will probably not
4191    be used in the sequel, so (if easily done) change this insn to
4192    (set REG1 REG0) and replace REG1 with REG0 in the previous insn
4193    that computed their value.  Then REG1 will become a dead store
4194    and won't cloud the situation for later optimizations.
4195 
4196    Do not make this change if REG1 is a hard register, because it will
4197    then be used in the sequel and we may be changing a two-operand insn
4198    into a three-operand insn.
4199 
4200    This is the last transformation that cse_insn will try to do.  */
4201 
4202 static void
4203 try_back_substitute_reg (rtx set, rtx_insn *insn)
4204 {
4205   rtx dest = SET_DEST (set);
4206   rtx src = SET_SRC (set);
4207 
4208   if (REG_P (dest)
4209       && REG_P (src) && ! HARD_REGISTER_P (src)
4210       && REGNO_QTY_VALID_P (REGNO (src)))
4211     {
4212       int src_q = REG_QTY (REGNO (src));
4213       struct qty_table_elem *src_ent = &qty_table[src_q];
4214 
4215       if (src_ent->first_reg == REGNO (dest))
4216 	{
4217 	  /* Scan for the previous nonnote insn, but stop at a basic
4218 	     block boundary.  */
4219 	  rtx_insn *prev = insn;
4220 	  rtx_insn *bb_head = BB_HEAD (BLOCK_FOR_INSN (insn));
4221 	  do
4222 	    {
4223 	      prev = PREV_INSN (prev);
4224 	    }
4225 	  while (prev != bb_head && (NOTE_P (prev) || DEBUG_INSN_P (prev)));
4226 
4227 	  /* Do not swap the registers around if the previous instruction
4228 	     attaches a REG_EQUIV note to REG1.
4229 
4230 	     ??? It's not entirely clear whether we can transfer a REG_EQUIV
4231 	     from the pseudo that originally shadowed an incoming argument
4232 	     to another register.  Some uses of REG_EQUIV might rely on it
4233 	     being attached to REG1 rather than REG2.
4234 
4235 	     This section previously turned the REG_EQUIV into a REG_EQUAL
4236 	     note.  We cannot do that because REG_EQUIV may provide an
4237 	     uninitialized stack slot when REG_PARM_STACK_SPACE is used.  */
4238 	  if (NONJUMP_INSN_P (prev)
4239 	      && GET_CODE (PATTERN (prev)) == SET
4240 	      && SET_DEST (PATTERN (prev)) == src
4241 	      && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
4242 	    {
4243 	      rtx note;
4244 
4245 	      validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
4246 	      validate_change (insn, &SET_DEST (set), src, 1);
4247 	      validate_change (insn, &SET_SRC (set), dest, 1);
4248 	      apply_change_group ();
4249 
4250 	      /* If INSN has a REG_EQUAL note, and this note mentions
4251 		 REG0, then we must delete it, because the value in
4252 		 REG0 has changed.  If the note's value is REG1, we must
4253 		 also delete it because that is now this insn's dest.  */
4254 	      note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4255 	      if (note != 0
4256 		  && (reg_mentioned_p (dest, XEXP (note, 0))
4257 		      || rtx_equal_p (src, XEXP (note, 0))))
4258 		remove_note (insn, note);
4259 
4260 	      /* If INSN has a REG_ARGS_SIZE note, move it to PREV.  */
4261 	      note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4262 	      if (note != 0)
4263 		{
4264 		  remove_note (insn, note);
4265 		  gcc_assert (!find_reg_note (prev, REG_ARGS_SIZE, NULL_RTX));
4266 		  set_unique_reg_note (prev, REG_ARGS_SIZE, XEXP (note, 0));
4267 		}
4268 	    }
4269 	}
4270     }
4271 }
4272 
4273 /* Record all the SETs in this instruction into SETS_PTR,
4274    and return the number of recorded sets.  */
4275 static int
4276 find_sets_in_insn (rtx_insn *insn, struct set **psets)
4277 {
4278   struct set *sets = *psets;
4279   int n_sets = 0;
4280   rtx x = PATTERN (insn);
4281 
4282   if (GET_CODE (x) == SET)
4283     {
4284       /* Ignore SETs that are unconditional jumps.
4285 	 They never need cse processing, so this does not hurt.
4286 	 The reason is not efficiency but rather
4287 	 so that we can test at the end for instructions
4288 	 that have been simplified to unconditional jumps
4289 	 and not be misled by unchanged instructions
4290 	 that were unconditional jumps to begin with.  */
4291       if (SET_DEST (x) == pc_rtx
4292 	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
4293 	;
4294       /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4295 	 The hard function value register is used only once, to copy to
4296 	 someplace else, so it isn't worth cse'ing.  */
4297       else if (GET_CODE (SET_SRC (x)) == CALL)
4298 	;
4299       else
4300 	sets[n_sets++].rtl = x;
4301     }
4302   else if (GET_CODE (x) == PARALLEL)
4303     {
4304       int i, lim = XVECLEN (x, 0);
4305 
4306       /* Go over the expressions of the PARALLEL in forward order, to
4307 	 put them in the same order in the SETS array.  */
4308       for (i = 0; i < lim; i++)
4309 	{
4310 	  rtx y = XVECEXP (x, 0, i);
4311 	  if (GET_CODE (y) == SET)
4312 	    {
4313 	      /* As above, we ignore unconditional jumps and call-insns and
4314 		 ignore the result of apply_change_group.  */
4315 	      if (SET_DEST (y) == pc_rtx
4316 		  && GET_CODE (SET_SRC (y)) == LABEL_REF)
4317 		;
4318 	      else if (GET_CODE (SET_SRC (y)) == CALL)
4319 		;
4320 	      else
4321 		sets[n_sets++].rtl = y;
4322 	    }
4323 	}
4324     }
4325 
4326   return n_sets;
4327 }
4328 
4329 /* Subroutine of canonicalize_insn.  X is an ASM_OPERANDS in INSN.  */
4330 
4331 static void
4332 canon_asm_operands (rtx x, rtx_insn *insn)
4333 {
4334   for (int i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
4335     {
4336       rtx input = ASM_OPERANDS_INPUT (x, i);
4337       if (!(REG_P (input) && HARD_REGISTER_P (input)))
4338 	{
4339 	  input = canon_reg (input, insn);
4340 	  validate_change (insn, &ASM_OPERANDS_INPUT (x, i), input, 1);
4341 	}
4342     }
4343 }
4344 
4345 /* Where possible, substitute every register reference in the N_SETS
4346    number of SETS in INSN with the canonical register.
4347 
4348    Register canonicalization propagatest the earliest register (i.e.
4349    one that is set before INSN) with the same value.  This is a very
4350    useful, simple form of CSE, to clean up warts from expanding GIMPLE
4351    to RTL.  For instance, a CONST for an address is usually expanded
4352    multiple times to loads into different registers, thus creating many
4353    subexpressions of the form:
4354 
4355    (set (reg1) (some_const))
4356    (set (mem (... reg1 ...) (thing)))
4357    (set (reg2) (some_const))
4358    (set (mem (... reg2 ...) (thing)))
4359 
4360    After canonicalizing, the code takes the following form:
4361 
4362    (set (reg1) (some_const))
4363    (set (mem (... reg1 ...) (thing)))
4364    (set (reg2) (some_const))
4365    (set (mem (... reg1 ...) (thing)))
4366 
4367    The set to reg2 is now trivially dead, and the memory reference (or
4368    address, or whatever) may be a candidate for further CSEing.
4369 
4370    In this function, the result of apply_change_group can be ignored;
4371    see canon_reg.  */
4372 
4373 static void
4374 canonicalize_insn (rtx_insn *insn, struct set **psets, int n_sets)
4375 {
4376   struct set *sets = *psets;
4377   rtx tem;
4378   rtx x = PATTERN (insn);
4379   int i;
4380 
4381   if (CALL_P (insn))
4382     {
4383       for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4384 	if (GET_CODE (XEXP (tem, 0)) != SET)
4385 	  XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4386     }
4387 
4388   if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
4389     {
4390       canon_reg (SET_SRC (x), insn);
4391       apply_change_group ();
4392       fold_rtx (SET_SRC (x), insn);
4393     }
4394   else if (GET_CODE (x) == CLOBBER)
4395     {
4396       /* If we clobber memory, canon the address.
4397 	 This does nothing when a register is clobbered
4398 	 because we have already invalidated the reg.  */
4399       if (MEM_P (XEXP (x, 0)))
4400 	canon_reg (XEXP (x, 0), insn);
4401     }
4402   else if (GET_CODE (x) == USE
4403 	   && ! (REG_P (XEXP (x, 0))
4404 		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4405     /* Canonicalize a USE of a pseudo register or memory location.  */
4406     canon_reg (x, insn);
4407   else if (GET_CODE (x) == ASM_OPERANDS)
4408     canon_asm_operands (x, insn);
4409   else if (GET_CODE (x) == CALL)
4410     {
4411       canon_reg (x, insn);
4412       apply_change_group ();
4413       fold_rtx (x, insn);
4414     }
4415   else if (DEBUG_INSN_P (insn))
4416     canon_reg (PATTERN (insn), insn);
4417   else if (GET_CODE (x) == PARALLEL)
4418     {
4419       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4420 	{
4421 	  rtx y = XVECEXP (x, 0, i);
4422 	  if (GET_CODE (y) == SET && GET_CODE (SET_SRC (y)) == CALL)
4423 	    {
4424 	      canon_reg (SET_SRC (y), insn);
4425 	      apply_change_group ();
4426 	      fold_rtx (SET_SRC (y), insn);
4427 	    }
4428 	  else if (GET_CODE (y) == CLOBBER)
4429 	    {
4430 	      if (MEM_P (XEXP (y, 0)))
4431 		canon_reg (XEXP (y, 0), insn);
4432 	    }
4433 	  else if (GET_CODE (y) == USE
4434 		   && ! (REG_P (XEXP (y, 0))
4435 			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4436 	    canon_reg (y, insn);
4437 	  else if (GET_CODE (y) == ASM_OPERANDS)
4438 	    canon_asm_operands (y, insn);
4439 	  else if (GET_CODE (y) == CALL)
4440 	    {
4441 	      canon_reg (y, insn);
4442 	      apply_change_group ();
4443 	      fold_rtx (y, insn);
4444 	    }
4445 	}
4446     }
4447 
4448   if (n_sets == 1 && REG_NOTES (insn) != 0
4449       && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
4450     {
4451       /* We potentially will process this insn many times.  Therefore,
4452 	 drop the REG_EQUAL note if it is equal to the SET_SRC of the
4453 	 unique set in INSN.
4454 
4455 	 Do not do so if the REG_EQUAL note is for a STRICT_LOW_PART,
4456 	 because cse_insn handles those specially.  */
4457       if (GET_CODE (SET_DEST (sets[0].rtl)) != STRICT_LOW_PART
4458 	  && rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl)))
4459 	remove_note (insn, tem);
4460       else
4461 	{
4462 	  canon_reg (XEXP (tem, 0), insn);
4463 	  apply_change_group ();
4464 	  XEXP (tem, 0) = fold_rtx (XEXP (tem, 0), insn);
4465 	  df_notes_rescan (insn);
4466 	}
4467     }
4468 
4469   /* Canonicalize sources and addresses of destinations.
4470      We do this in a separate pass to avoid problems when a MATCH_DUP is
4471      present in the insn pattern.  In that case, we want to ensure that
4472      we don't break the duplicate nature of the pattern.  So we will replace
4473      both operands at the same time.  Otherwise, we would fail to find an
4474      equivalent substitution in the loop calling validate_change below.
4475 
4476      We used to suppress canonicalization of DEST if it appears in SRC,
4477      but we don't do this any more.  */
4478 
4479   for (i = 0; i < n_sets; i++)
4480     {
4481       rtx dest = SET_DEST (sets[i].rtl);
4482       rtx src = SET_SRC (sets[i].rtl);
4483       rtx new_rtx = canon_reg (src, insn);
4484 
4485       validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
4486 
4487       if (GET_CODE (dest) == ZERO_EXTRACT)
4488 	{
4489 	  validate_change (insn, &XEXP (dest, 1),
4490 			   canon_reg (XEXP (dest, 1), insn), 1);
4491 	  validate_change (insn, &XEXP (dest, 2),
4492 			   canon_reg (XEXP (dest, 2), insn), 1);
4493 	}
4494 
4495       while (GET_CODE (dest) == SUBREG
4496 	     || GET_CODE (dest) == ZERO_EXTRACT
4497 	     || GET_CODE (dest) == STRICT_LOW_PART)
4498 	dest = XEXP (dest, 0);
4499 
4500       if (MEM_P (dest))
4501 	canon_reg (dest, insn);
4502     }
4503 
4504   /* Now that we have done all the replacements, we can apply the change
4505      group and see if they all work.  Note that this will cause some
4506      canonicalizations that would have worked individually not to be applied
4507      because some other canonicalization didn't work, but this should not
4508      occur often.
4509 
4510      The result of apply_change_group can be ignored; see canon_reg.  */
4511 
4512   apply_change_group ();
4513 }
4514 
4515 /* Main function of CSE.
4516    First simplify sources and addresses of all assignments
4517    in the instruction, using previously-computed equivalents values.
4518    Then install the new sources and destinations in the table
4519    of available values.  */
4520 
4521 static void
4522 cse_insn (rtx_insn *insn)
4523 {
4524   rtx x = PATTERN (insn);
4525   int i;
4526   rtx tem;
4527   int n_sets = 0;
4528 
4529   rtx src_eqv = 0;
4530   struct table_elt *src_eqv_elt = 0;
4531   int src_eqv_volatile = 0;
4532   int src_eqv_in_memory = 0;
4533   unsigned src_eqv_hash = 0;
4534 
4535   struct set *sets = (struct set *) 0;
4536 
4537   if (GET_CODE (x) == SET)
4538     sets = XALLOCA (struct set);
4539   else if (GET_CODE (x) == PARALLEL)
4540     sets = XALLOCAVEC (struct set, XVECLEN (x, 0));
4541 
4542   this_insn = insn;
4543   /* Records what this insn does to set CC0.  */
4544   this_insn_cc0 = 0;
4545   this_insn_cc0_mode = VOIDmode;
4546 
4547   /* Find all regs explicitly clobbered in this insn,
4548      to ensure they are not replaced with any other regs
4549      elsewhere in this insn.  */
4550   invalidate_from_sets_and_clobbers (insn);
4551 
4552   /* Record all the SETs in this instruction.  */
4553   n_sets = find_sets_in_insn (insn, &sets);
4554 
4555   /* Substitute the canonical register where possible.  */
4556   canonicalize_insn (insn, &sets, n_sets);
4557 
4558   /* If this insn has a REG_EQUAL note, store the equivalent value in SRC_EQV,
4559      if different, or if the DEST is a STRICT_LOW_PART/ZERO_EXTRACT.  The
4560      latter condition is necessary because SRC_EQV is handled specially for
4561      this case, and if it isn't set, then there will be no equivalence
4562      for the destination.  */
4563   if (n_sets == 1 && REG_NOTES (insn) != 0
4564       && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
4565     {
4566 
4567       if (GET_CODE (SET_DEST (sets[0].rtl)) != ZERO_EXTRACT
4568 	  && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4569 	      || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4570 	src_eqv = copy_rtx (XEXP (tem, 0));
4571       /* If DEST is of the form ZERO_EXTACT, as in:
4572 	 (set (zero_extract:SI (reg:SI 119)
4573 		  (const_int 16 [0x10])
4574 		  (const_int 16 [0x10]))
4575 	      (const_int 51154 [0xc7d2]))
4576 	 REG_EQUAL note will specify the value of register (reg:SI 119) at this
4577 	 point.  Note that this is different from SRC_EQV. We can however
4578 	 calculate SRC_EQV with the position and width of ZERO_EXTRACT.  */
4579       else if (GET_CODE (SET_DEST (sets[0].rtl)) == ZERO_EXTRACT
4580 	       && CONST_INT_P (XEXP (tem, 0))
4581 	       && CONST_INT_P (XEXP (SET_DEST (sets[0].rtl), 1))
4582 	       && CONST_INT_P (XEXP (SET_DEST (sets[0].rtl), 2)))
4583 	{
4584 	  rtx dest_reg = XEXP (SET_DEST (sets[0].rtl), 0);
4585 	  /* This is the mode of XEXP (tem, 0) as well.  */
4586 	  scalar_int_mode dest_mode
4587 	    = as_a <scalar_int_mode> (GET_MODE (dest_reg));
4588 	  rtx width = XEXP (SET_DEST (sets[0].rtl), 1);
4589 	  rtx pos = XEXP (SET_DEST (sets[0].rtl), 2);
4590 	  HOST_WIDE_INT val = INTVAL (XEXP (tem, 0));
4591 	  HOST_WIDE_INT mask;
4592 	  unsigned int shift;
4593 	  if (BITS_BIG_ENDIAN)
4594 	    shift = (GET_MODE_PRECISION (dest_mode)
4595 		     - INTVAL (pos) - INTVAL (width));
4596 	  else
4597 	    shift = INTVAL (pos);
4598 	  if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
4599 	    mask = HOST_WIDE_INT_M1;
4600 	  else
4601 	    mask = (HOST_WIDE_INT_1 << INTVAL (width)) - 1;
4602 	  val = (val >> shift) & mask;
4603 	  src_eqv = GEN_INT (val);
4604 	}
4605     }
4606 
4607   /* Set sets[i].src_elt to the class each source belongs to.
4608      Detect assignments from or to volatile things
4609      and set set[i] to zero so they will be ignored
4610      in the rest of this function.
4611 
4612      Nothing in this loop changes the hash table or the register chains.  */
4613 
4614   for (i = 0; i < n_sets; i++)
4615     {
4616       bool repeat = false;
4617       bool mem_noop_insn = false;
4618       rtx src, dest;
4619       rtx src_folded;
4620       struct table_elt *elt = 0, *p;
4621       machine_mode mode;
4622       rtx src_eqv_here;
4623       rtx src_const = 0;
4624       rtx src_related = 0;
4625       bool src_related_is_const_anchor = false;
4626       struct table_elt *src_const_elt = 0;
4627       int src_cost = MAX_COST;
4628       int src_eqv_cost = MAX_COST;
4629       int src_folded_cost = MAX_COST;
4630       int src_related_cost = MAX_COST;
4631       int src_elt_cost = MAX_COST;
4632       int src_regcost = MAX_COST;
4633       int src_eqv_regcost = MAX_COST;
4634       int src_folded_regcost = MAX_COST;
4635       int src_related_regcost = MAX_COST;
4636       int src_elt_regcost = MAX_COST;
4637       /* Set nonzero if we need to call force_const_mem on with the
4638 	 contents of src_folded before using it.  */
4639       int src_folded_force_flag = 0;
4640       scalar_int_mode int_mode;
4641 
4642       dest = SET_DEST (sets[i].rtl);
4643       src = SET_SRC (sets[i].rtl);
4644 
4645       /* If SRC is a constant that has no machine mode,
4646 	 hash it with the destination's machine mode.
4647 	 This way we can keep different modes separate.  */
4648 
4649       mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4650       sets[i].mode = mode;
4651 
4652       if (src_eqv)
4653 	{
4654 	  machine_mode eqvmode = mode;
4655 	  if (GET_CODE (dest) == STRICT_LOW_PART)
4656 	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4657 	  do_not_record = 0;
4658 	  hash_arg_in_memory = 0;
4659 	  src_eqv_hash = HASH (src_eqv, eqvmode);
4660 
4661 	  /* Find the equivalence class for the equivalent expression.  */
4662 
4663 	  if (!do_not_record)
4664 	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4665 
4666 	  src_eqv_volatile = do_not_record;
4667 	  src_eqv_in_memory = hash_arg_in_memory;
4668 	}
4669 
4670       /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4671 	 value of the INNER register, not the destination.  So it is not
4672 	 a valid substitution for the source.  But save it for later.  */
4673       if (GET_CODE (dest) == STRICT_LOW_PART)
4674 	src_eqv_here = 0;
4675       else
4676 	src_eqv_here = src_eqv;
4677 
4678       /* Simplify and foldable subexpressions in SRC.  Then get the fully-
4679 	 simplified result, which may not necessarily be valid.  */
4680       src_folded = fold_rtx (src, NULL);
4681 
4682 #if 0
4683       /* ??? This caused bad code to be generated for the m68k port with -O2.
4684 	 Suppose src is (CONST_INT -1), and that after truncation src_folded
4685 	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
4686 	 At the end we will add src and src_const to the same equivalence
4687 	 class.  We now have 3 and -1 on the same equivalence class.  This
4688 	 causes later instructions to be mis-optimized.  */
4689       /* If storing a constant in a bitfield, pre-truncate the constant
4690 	 so we will be able to record it later.  */
4691       if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
4692 	{
4693 	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4694 
4695 	  if (CONST_INT_P (src)
4696 	      && CONST_INT_P (width)
4697 	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4698 	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4699 	    src_folded
4700 	      = GEN_INT (INTVAL (src) & ((HOST_WIDE_INT_1
4701 					  << INTVAL (width)) - 1));
4702 	}
4703 #endif
4704 
4705       /* Compute SRC's hash code, and also notice if it
4706 	 should not be recorded at all.  In that case,
4707 	 prevent any further processing of this assignment.  */
4708       do_not_record = 0;
4709       hash_arg_in_memory = 0;
4710 
4711       sets[i].src = src;
4712       sets[i].src_hash = HASH (src, mode);
4713       sets[i].src_volatile = do_not_record;
4714       sets[i].src_in_memory = hash_arg_in_memory;
4715 
4716       /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4717 	 a pseudo, do not record SRC.  Using SRC as a replacement for
4718 	 anything else will be incorrect in that situation.  Note that
4719 	 this usually occurs only for stack slots, in which case all the
4720 	 RTL would be referring to SRC, so we don't lose any optimization
4721 	 opportunities by not having SRC in the hash table.  */
4722 
4723       if (MEM_P (src)
4724 	  && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
4725 	  && REG_P (dest)
4726 	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4727 	sets[i].src_volatile = 1;
4728 
4729       else if (GET_CODE (src) == ASM_OPERANDS
4730 	       && GET_CODE (x) == PARALLEL)
4731 	{
4732 	  /* Do not record result of a non-volatile inline asm with
4733 	     more than one result.  */
4734 	  if (n_sets > 1)
4735 	    sets[i].src_volatile = 1;
4736 
4737 	  int j, lim = XVECLEN (x, 0);
4738 	  for (j = 0; j < lim; j++)
4739 	    {
4740 	      rtx y = XVECEXP (x, 0, j);
4741 	      /* And do not record result of a non-volatile inline asm
4742 		 with "memory" clobber.  */
4743 	      if (GET_CODE (y) == CLOBBER && MEM_P (XEXP (y, 0)))
4744 		{
4745 		  sets[i].src_volatile = 1;
4746 		  break;
4747 		}
4748 	    }
4749 	}
4750 
4751 #if 0
4752       /* It is no longer clear why we used to do this, but it doesn't
4753 	 appear to still be needed.  So let's try without it since this
4754 	 code hurts cse'ing widened ops.  */
4755       /* If source is a paradoxical subreg (such as QI treated as an SI),
4756 	 treat it as volatile.  It may do the work of an SI in one context
4757 	 where the extra bits are not being used, but cannot replace an SI
4758 	 in general.  */
4759       if (paradoxical_subreg_p (src))
4760 	sets[i].src_volatile = 1;
4761 #endif
4762 
4763       /* Locate all possible equivalent forms for SRC.  Try to replace
4764          SRC in the insn with each cheaper equivalent.
4765 
4766          We have the following types of equivalents: SRC itself, a folded
4767          version, a value given in a REG_EQUAL note, or a value related
4768 	 to a constant.
4769 
4770          Each of these equivalents may be part of an additional class
4771          of equivalents (if more than one is in the table, they must be in
4772          the same class; we check for this).
4773 
4774 	 If the source is volatile, we don't do any table lookups.
4775 
4776          We note any constant equivalent for possible later use in a
4777          REG_NOTE.  */
4778 
4779       if (!sets[i].src_volatile)
4780 	elt = lookup (src, sets[i].src_hash, mode);
4781 
4782       sets[i].src_elt = elt;
4783 
4784       if (elt && src_eqv_here && src_eqv_elt)
4785 	{
4786 	  if (elt->first_same_value != src_eqv_elt->first_same_value)
4787 	    {
4788 	      /* The REG_EQUAL is indicating that two formerly distinct
4789 		 classes are now equivalent.  So merge them.  */
4790 	      merge_equiv_classes (elt, src_eqv_elt);
4791 	      src_eqv_hash = HASH (src_eqv, elt->mode);
4792 	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4793 	    }
4794 
4795 	  src_eqv_here = 0;
4796 	}
4797 
4798       else if (src_eqv_elt)
4799 	elt = src_eqv_elt;
4800 
4801       /* Try to find a constant somewhere and record it in `src_const'.
4802 	 Record its table element, if any, in `src_const_elt'.  Look in
4803 	 any known equivalences first.  (If the constant is not in the
4804 	 table, also set `sets[i].src_const_hash').  */
4805       if (elt)
4806 	for (p = elt->first_same_value; p; p = p->next_same_value)
4807 	  if (p->is_const)
4808 	    {
4809 	      src_const = p->exp;
4810 	      src_const_elt = elt;
4811 	      break;
4812 	    }
4813 
4814       if (src_const == 0
4815 	  && (CONSTANT_P (src_folded)
4816 	      /* Consider (minus (label_ref L1) (label_ref L2)) as
4817 		 "constant" here so we will record it. This allows us
4818 		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
4819 	      || (GET_CODE (src_folded) == MINUS
4820 		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4821 		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4822 	src_const = src_folded, src_const_elt = elt;
4823       else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4824 	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4825 
4826       /* If we don't know if the constant is in the table, get its
4827 	 hash code and look it up.  */
4828       if (src_const && src_const_elt == 0)
4829 	{
4830 	  sets[i].src_const_hash = HASH (src_const, mode);
4831 	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
4832 	}
4833 
4834       sets[i].src_const = src_const;
4835       sets[i].src_const_elt = src_const_elt;
4836 
4837       /* If the constant and our source are both in the table, mark them as
4838 	 equivalent.  Otherwise, if a constant is in the table but the source
4839 	 isn't, set ELT to it.  */
4840       if (src_const_elt && elt
4841 	  && src_const_elt->first_same_value != elt->first_same_value)
4842 	merge_equiv_classes (elt, src_const_elt);
4843       else if (src_const_elt && elt == 0)
4844 	elt = src_const_elt;
4845 
4846       /* See if there is a register linearly related to a constant
4847          equivalent of SRC.  */
4848       if (src_const
4849 	  && (GET_CODE (src_const) == CONST
4850 	      || (src_const_elt && src_const_elt->related_value != 0)))
4851 	{
4852 	  src_related = use_related_value (src_const, src_const_elt);
4853 	  if (src_related)
4854 	    {
4855 	      struct table_elt *src_related_elt
4856 		= lookup (src_related, HASH (src_related, mode), mode);
4857 	      if (src_related_elt && elt)
4858 		{
4859 		  if (elt->first_same_value
4860 		      != src_related_elt->first_same_value)
4861 		    /* This can occur when we previously saw a CONST
4862 		       involving a SYMBOL_REF and then see the SYMBOL_REF
4863 		       twice.  Merge the involved classes.  */
4864 		    merge_equiv_classes (elt, src_related_elt);
4865 
4866 		  src_related = 0;
4867 		  src_related_elt = 0;
4868 		}
4869 	      else if (src_related_elt && elt == 0)
4870 		elt = src_related_elt;
4871 	    }
4872 	}
4873 
4874       /* See if we have a CONST_INT that is already in a register in a
4875 	 wider mode.  */
4876 
4877       if (src_const && src_related == 0 && CONST_INT_P (src_const)
4878 	  && is_int_mode (mode, &int_mode)
4879 	  && GET_MODE_PRECISION (int_mode) < BITS_PER_WORD)
4880 	{
4881 	  opt_scalar_int_mode wider_mode_iter;
4882 	  FOR_EACH_WIDER_MODE (wider_mode_iter, int_mode)
4883 	    {
4884 	      scalar_int_mode wider_mode = wider_mode_iter.require ();
4885 	      if (GET_MODE_PRECISION (wider_mode) > BITS_PER_WORD)
4886 		break;
4887 
4888 	      struct table_elt *const_elt
4889 		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4890 
4891 	      if (const_elt == 0)
4892 		continue;
4893 
4894 	      for (const_elt = const_elt->first_same_value;
4895 		   const_elt; const_elt = const_elt->next_same_value)
4896 		if (REG_P (const_elt->exp))
4897 		  {
4898 		    src_related = gen_lowpart (int_mode, const_elt->exp);
4899 		    break;
4900 		  }
4901 
4902 	      if (src_related != 0)
4903 		break;
4904 	    }
4905 	}
4906 
4907       /* Another possibility is that we have an AND with a constant in
4908 	 a mode narrower than a word.  If so, it might have been generated
4909 	 as part of an "if" which would narrow the AND.  If we already
4910 	 have done the AND in a wider mode, we can use a SUBREG of that
4911 	 value.  */
4912 
4913       if (flag_expensive_optimizations && ! src_related
4914 	  && is_a <scalar_int_mode> (mode, &int_mode)
4915 	  && GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1))
4916 	  && GET_MODE_SIZE (int_mode) < UNITS_PER_WORD)
4917 	{
4918 	  opt_scalar_int_mode tmode_iter;
4919 	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
4920 
4921 	  FOR_EACH_WIDER_MODE (tmode_iter, int_mode)
4922 	    {
4923 	      scalar_int_mode tmode = tmode_iter.require ();
4924 	      if (GET_MODE_SIZE (tmode) > UNITS_PER_WORD)
4925 		break;
4926 
4927 	      rtx inner = gen_lowpart (tmode, XEXP (src, 0));
4928 	      struct table_elt *larger_elt;
4929 
4930 	      if (inner)
4931 		{
4932 		  PUT_MODE (new_and, tmode);
4933 		  XEXP (new_and, 0) = inner;
4934 		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4935 		  if (larger_elt == 0)
4936 		    continue;
4937 
4938 		  for (larger_elt = larger_elt->first_same_value;
4939 		       larger_elt; larger_elt = larger_elt->next_same_value)
4940 		    if (REG_P (larger_elt->exp))
4941 		      {
4942 			src_related
4943 			  = gen_lowpart (int_mode, larger_elt->exp);
4944 			break;
4945 		      }
4946 
4947 		  if (src_related)
4948 		    break;
4949 		}
4950 	    }
4951 	}
4952 
4953       /* See if a MEM has already been loaded with a widening operation;
4954 	 if it has, we can use a subreg of that.  Many CISC machines
4955 	 also have such operations, but this is only likely to be
4956 	 beneficial on these machines.  */
4957 
4958       rtx_code extend_op;
4959       if (flag_expensive_optimizations && src_related == 0
4960 	  && MEM_P (src) && ! do_not_record
4961 	  && is_a <scalar_int_mode> (mode, &int_mode)
4962 	  && (extend_op = load_extend_op (int_mode)) != UNKNOWN)
4963 	{
4964 	  struct rtx_def memory_extend_buf;
4965 	  rtx memory_extend_rtx = &memory_extend_buf;
4966 
4967 	  /* Set what we are trying to extend and the operation it might
4968 	     have been extended with.  */
4969 	  memset (memory_extend_rtx, 0, sizeof (*memory_extend_rtx));
4970 	  PUT_CODE (memory_extend_rtx, extend_op);
4971 	  XEXP (memory_extend_rtx, 0) = src;
4972 
4973 	  opt_scalar_int_mode tmode_iter;
4974 	  FOR_EACH_WIDER_MODE (tmode_iter, int_mode)
4975 	    {
4976 	      struct table_elt *larger_elt;
4977 
4978 	      scalar_int_mode tmode = tmode_iter.require ();
4979 	      if (GET_MODE_SIZE (tmode) > UNITS_PER_WORD)
4980 		break;
4981 
4982 	      PUT_MODE (memory_extend_rtx, tmode);
4983 	      larger_elt = lookup (memory_extend_rtx,
4984 				   HASH (memory_extend_rtx, tmode), tmode);
4985 	      if (larger_elt == 0)
4986 		continue;
4987 
4988 	      for (larger_elt = larger_elt->first_same_value;
4989 		   larger_elt; larger_elt = larger_elt->next_same_value)
4990 		if (REG_P (larger_elt->exp))
4991 		  {
4992 		    src_related = gen_lowpart (int_mode, larger_elt->exp);
4993 		    break;
4994 		  }
4995 
4996 	      if (src_related)
4997 		break;
4998 	    }
4999 	}
5000 
5001       /* Try to express the constant using a register+offset expression
5002 	 derived from a constant anchor.  */
5003 
5004       if (targetm.const_anchor
5005 	  && !src_related
5006 	  && src_const
5007 	  && GET_CODE (src_const) == CONST_INT)
5008 	{
5009 	  src_related = try_const_anchors (src_const, mode);
5010 	  src_related_is_const_anchor = src_related != NULL_RTX;
5011 	}
5012 
5013 
5014       if (src == src_folded)
5015 	src_folded = 0;
5016 
5017       /* At this point, ELT, if nonzero, points to a class of expressions
5018          equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5019 	 and SRC_RELATED, if nonzero, each contain additional equivalent
5020 	 expressions.  Prune these latter expressions by deleting expressions
5021 	 already in the equivalence class.
5022 
5023 	 Check for an equivalent identical to the destination.  If found,
5024 	 this is the preferred equivalent since it will likely lead to
5025 	 elimination of the insn.  Indicate this by placing it in
5026 	 `src_related'.  */
5027 
5028       if (elt)
5029 	elt = elt->first_same_value;
5030       for (p = elt; p; p = p->next_same_value)
5031 	{
5032 	  enum rtx_code code = GET_CODE (p->exp);
5033 
5034 	  /* If the expression is not valid, ignore it.  Then we do not
5035 	     have to check for validity below.  In most cases, we can use
5036 	     `rtx_equal_p', since canonicalization has already been done.  */
5037 	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
5038 	    continue;
5039 
5040 	  /* Also skip paradoxical subregs, unless that's what we're
5041 	     looking for.  */
5042 	  if (paradoxical_subreg_p (p->exp)
5043 	      && ! (src != 0
5044 		    && GET_CODE (src) == SUBREG
5045 		    && GET_MODE (src) == GET_MODE (p->exp)
5046 		    && partial_subreg_p (GET_MODE (SUBREG_REG (src)),
5047 					 GET_MODE (SUBREG_REG (p->exp)))))
5048 	    continue;
5049 
5050 	  if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5051 	    src = 0;
5052 	  else if (src_folded && GET_CODE (src_folded) == code
5053 		   && rtx_equal_p (src_folded, p->exp))
5054 	    src_folded = 0;
5055 	  else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5056 		   && rtx_equal_p (src_eqv_here, p->exp))
5057 	    src_eqv_here = 0;
5058 	  else if (src_related && GET_CODE (src_related) == code
5059 		   && rtx_equal_p (src_related, p->exp))
5060 	    src_related = 0;
5061 
5062 	  /* This is the same as the destination of the insns, we want
5063 	     to prefer it.  Copy it to src_related.  The code below will
5064 	     then give it a negative cost.  */
5065 	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5066 	    src_related = dest;
5067 	}
5068 
5069       /* Find the cheapest valid equivalent, trying all the available
5070          possibilities.  Prefer items not in the hash table to ones
5071          that are when they are equal cost.  Note that we can never
5072          worsen an insn as the current contents will also succeed.
5073 	 If we find an equivalent identical to the destination, use it as best,
5074 	 since this insn will probably be eliminated in that case.  */
5075       if (src)
5076 	{
5077 	  if (rtx_equal_p (src, dest))
5078 	    src_cost = src_regcost = -1;
5079 	  else
5080 	    {
5081 	      src_cost = COST (src, mode);
5082 	      src_regcost = approx_reg_cost (src);
5083 	    }
5084 	}
5085 
5086       if (src_eqv_here)
5087 	{
5088 	  if (rtx_equal_p (src_eqv_here, dest))
5089 	    src_eqv_cost = src_eqv_regcost = -1;
5090 	  else
5091 	    {
5092 	      src_eqv_cost = COST (src_eqv_here, mode);
5093 	      src_eqv_regcost = approx_reg_cost (src_eqv_here);
5094 	    }
5095 	}
5096 
5097       if (src_folded)
5098 	{
5099 	  if (rtx_equal_p (src_folded, dest))
5100 	    src_folded_cost = src_folded_regcost = -1;
5101 	  else
5102 	    {
5103 	      src_folded_cost = COST (src_folded, mode);
5104 	      src_folded_regcost = approx_reg_cost (src_folded);
5105 	    }
5106 	}
5107 
5108       if (src_related)
5109 	{
5110 	  if (rtx_equal_p (src_related, dest))
5111 	    src_related_cost = src_related_regcost = -1;
5112 	  else
5113 	    {
5114 	      src_related_cost = COST (src_related, mode);
5115 	      src_related_regcost = approx_reg_cost (src_related);
5116 
5117 	      /* If a const-anchor is used to synthesize a constant that
5118 		 normally requires multiple instructions then slightly prefer
5119 		 it over the original sequence.  These instructions are likely
5120 		 to become redundant now.  We can't compare against the cost
5121 		 of src_eqv_here because, on MIPS for example, multi-insn
5122 		 constants have zero cost; they are assumed to be hoisted from
5123 		 loops.  */
5124 	      if (src_related_is_const_anchor
5125 		  && src_related_cost == src_cost
5126 		  && src_eqv_here)
5127 		src_related_cost--;
5128 	    }
5129 	}
5130 
5131       /* If this was an indirect jump insn, a known label will really be
5132 	 cheaper even though it looks more expensive.  */
5133       if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5134 	src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5135 
5136       /* Terminate loop when replacement made.  This must terminate since
5137          the current contents will be tested and will always be valid.  */
5138       while (1)
5139 	{
5140 	  rtx trial;
5141 
5142 	  /* Skip invalid entries.  */
5143 	  while (elt && !REG_P (elt->exp)
5144 		 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5145 	    elt = elt->next_same_value;
5146 
5147 	  /* A paradoxical subreg would be bad here: it'll be the right
5148 	     size, but later may be adjusted so that the upper bits aren't
5149 	     what we want.  So reject it.  */
5150 	  if (elt != 0
5151 	      && paradoxical_subreg_p (elt->exp)
5152 	      /* It is okay, though, if the rtx we're trying to match
5153 		 will ignore any of the bits we can't predict.  */
5154 	      && ! (src != 0
5155 		    && GET_CODE (src) == SUBREG
5156 		    && GET_MODE (src) == GET_MODE (elt->exp)
5157 		    && partial_subreg_p (GET_MODE (SUBREG_REG (src)),
5158 					 GET_MODE (SUBREG_REG (elt->exp)))))
5159 	    {
5160 	      elt = elt->next_same_value;
5161 	      continue;
5162 	    }
5163 
5164 	  if (elt)
5165 	    {
5166 	      src_elt_cost = elt->cost;
5167 	      src_elt_regcost = elt->regcost;
5168 	    }
5169 
5170 	  /* Find cheapest and skip it for the next time.   For items
5171 	     of equal cost, use this order:
5172 	     src_folded, src, src_eqv, src_related and hash table entry.  */
5173 	  if (src_folded
5174 	      && preferable (src_folded_cost, src_folded_regcost,
5175 			     src_cost, src_regcost) <= 0
5176 	      && preferable (src_folded_cost, src_folded_regcost,
5177 			     src_eqv_cost, src_eqv_regcost) <= 0
5178 	      && preferable (src_folded_cost, src_folded_regcost,
5179 			     src_related_cost, src_related_regcost) <= 0
5180 	      && preferable (src_folded_cost, src_folded_regcost,
5181 			     src_elt_cost, src_elt_regcost) <= 0)
5182 	    {
5183 	      trial = src_folded, src_folded_cost = MAX_COST;
5184 	      if (src_folded_force_flag)
5185 		{
5186 		  rtx forced = force_const_mem (mode, trial);
5187 		  if (forced)
5188 		    trial = forced;
5189 		}
5190 	    }
5191 	  else if (src
5192 		   && preferable (src_cost, src_regcost,
5193 				  src_eqv_cost, src_eqv_regcost) <= 0
5194 		   && preferable (src_cost, src_regcost,
5195 				  src_related_cost, src_related_regcost) <= 0
5196 		   && preferable (src_cost, src_regcost,
5197 				  src_elt_cost, src_elt_regcost) <= 0)
5198 	    trial = src, src_cost = MAX_COST;
5199 	  else if (src_eqv_here
5200 		   && preferable (src_eqv_cost, src_eqv_regcost,
5201 				  src_related_cost, src_related_regcost) <= 0
5202 		   && preferable (src_eqv_cost, src_eqv_regcost,
5203 				  src_elt_cost, src_elt_regcost) <= 0)
5204 	    trial = src_eqv_here, src_eqv_cost = MAX_COST;
5205 	  else if (src_related
5206 		   && preferable (src_related_cost, src_related_regcost,
5207 				  src_elt_cost, src_elt_regcost) <= 0)
5208 	    trial = src_related, src_related_cost = MAX_COST;
5209 	  else
5210 	    {
5211 	      trial = elt->exp;
5212 	      elt = elt->next_same_value;
5213 	      src_elt_cost = MAX_COST;
5214 	    }
5215 
5216 	  /* Avoid creation of overlapping memory moves.  */
5217 	  if (MEM_P (trial) && MEM_P (dest) && !rtx_equal_p (trial, dest))
5218 	    {
5219 	      rtx src, dest;
5220 
5221 	      /* BLKmode moves are not handled by cse anyway.  */
5222 	      if (GET_MODE (trial) == BLKmode)
5223 		break;
5224 
5225 	      src = canon_rtx (trial);
5226 	      dest = canon_rtx (SET_DEST (sets[i].rtl));
5227 
5228 	      if (!MEM_P (src) || !MEM_P (dest)
5229 		  || !nonoverlapping_memrefs_p (src, dest, false))
5230 		break;
5231 	    }
5232 
5233 	  /* Try to optimize
5234 	     (set (reg:M N) (const_int A))
5235 	     (set (reg:M2 O) (const_int B))
5236 	     (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
5237 		  (reg:M2 O)).  */
5238 	  if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5239 	      && CONST_INT_P (trial)
5240 	      && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1))
5241 	      && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2))
5242 	      && REG_P (XEXP (SET_DEST (sets[i].rtl), 0))
5243 	      && (known_ge
5244 		  (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets[i].rtl))),
5245 		   INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))))
5246 	      && ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))
5247 		  + (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2))
5248 		  <= HOST_BITS_PER_WIDE_INT))
5249 	    {
5250 	      rtx dest_reg = XEXP (SET_DEST (sets[i].rtl), 0);
5251 	      rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5252 	      rtx pos = XEXP (SET_DEST (sets[i].rtl), 2);
5253 	      unsigned int dest_hash = HASH (dest_reg, GET_MODE (dest_reg));
5254 	      struct table_elt *dest_elt
5255 		= lookup (dest_reg, dest_hash, GET_MODE (dest_reg));
5256 	      rtx dest_cst = NULL;
5257 
5258 	      if (dest_elt)
5259 		for (p = dest_elt->first_same_value; p; p = p->next_same_value)
5260 		  if (p->is_const && CONST_INT_P (p->exp))
5261 		    {
5262 		      dest_cst = p->exp;
5263 		      break;
5264 		    }
5265 	      if (dest_cst)
5266 		{
5267 		  HOST_WIDE_INT val = INTVAL (dest_cst);
5268 		  HOST_WIDE_INT mask;
5269 		  unsigned int shift;
5270 		  /* This is the mode of DEST_CST as well.  */
5271 		  scalar_int_mode dest_mode
5272 		    = as_a <scalar_int_mode> (GET_MODE (dest_reg));
5273 		  if (BITS_BIG_ENDIAN)
5274 		    shift = GET_MODE_PRECISION (dest_mode)
5275 			    - INTVAL (pos) - INTVAL (width);
5276 		  else
5277 		    shift = INTVAL (pos);
5278 		  if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
5279 		    mask = HOST_WIDE_INT_M1;
5280 		  else
5281 		    mask = (HOST_WIDE_INT_1 << INTVAL (width)) - 1;
5282 		  val &= ~(mask << shift);
5283 		  val |= (INTVAL (trial) & mask) << shift;
5284 		  val = trunc_int_for_mode (val, dest_mode);
5285 		  validate_unshare_change (insn, &SET_DEST (sets[i].rtl),
5286 					   dest_reg, 1);
5287 		  validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5288 					   GEN_INT (val), 1);
5289 		  if (apply_change_group ())
5290 		    {
5291 		      rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5292 		      if (note)
5293 			{
5294 			  remove_note (insn, note);
5295 			  df_notes_rescan (insn);
5296 			}
5297 		      src_eqv = NULL_RTX;
5298 		      src_eqv_elt = NULL;
5299 		      src_eqv_volatile = 0;
5300 		      src_eqv_in_memory = 0;
5301 		      src_eqv_hash = 0;
5302 		      repeat = true;
5303 		      break;
5304 		    }
5305 		}
5306 	    }
5307 
5308 	  /* We don't normally have an insn matching (set (pc) (pc)), so
5309 	     check for this separately here.  We will delete such an
5310 	     insn below.
5311 
5312 	     For other cases such as a table jump or conditional jump
5313 	     where we know the ultimate target, go ahead and replace the
5314 	     operand.  While that may not make a valid insn, we will
5315 	     reemit the jump below (and also insert any necessary
5316 	     barriers).  */
5317 	  if (n_sets == 1 && dest == pc_rtx
5318 	      && (trial == pc_rtx
5319 		  || (GET_CODE (trial) == LABEL_REF
5320 		      && ! condjump_p (insn))))
5321 	    {
5322 	      /* Don't substitute non-local labels, this confuses CFG.  */
5323 	      if (GET_CODE (trial) == LABEL_REF
5324 		  && LABEL_REF_NONLOCAL_P (trial))
5325 		continue;
5326 
5327 	      SET_SRC (sets[i].rtl) = trial;
5328 	      cse_jumps_altered = true;
5329 	      break;
5330 	    }
5331 
5332 	  /* Similarly, lots of targets don't allow no-op
5333 	     (set (mem x) (mem x)) moves.  */
5334 	  else if (n_sets == 1
5335 		   && MEM_P (trial)
5336 		   && MEM_P (dest)
5337 		   && rtx_equal_p (trial, dest)
5338 		   && !side_effects_p (dest)
5339 		   && (cfun->can_delete_dead_exceptions
5340 		       || insn_nothrow_p (insn)))
5341 	    {
5342 	      SET_SRC (sets[i].rtl) = trial;
5343 	      mem_noop_insn = true;
5344 	      break;
5345 	    }
5346 
5347 	  /* Reject certain invalid forms of CONST that we create.  */
5348 	  else if (CONSTANT_P (trial)
5349 		   && GET_CODE (trial) == CONST
5350 		   /* Reject cases that will cause decode_rtx_const to
5351 		      die.  On the alpha when simplifying a switch, we
5352 		      get (const (truncate (minus (label_ref)
5353 		      (label_ref)))).  */
5354 		   && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5355 		       /* Likewise on IA-64, except without the
5356 			  truncate.  */
5357 		       || (GET_CODE (XEXP (trial, 0)) == MINUS
5358 			   && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5359 			   && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5360 	    /* Do nothing for this case.  */
5361 	    ;
5362 
5363 	  /* Look for a substitution that makes a valid insn.  */
5364 	  else if (validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5365 					    trial, 0))
5366 	    {
5367 	      rtx new_rtx = canon_reg (SET_SRC (sets[i].rtl), insn);
5368 
5369 	      /* The result of apply_change_group can be ignored; see
5370 		 canon_reg.  */
5371 
5372 	      validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
5373 	      apply_change_group ();
5374 
5375 	      break;
5376 	    }
5377 
5378 	  /* If we previously found constant pool entries for
5379 	     constants and this is a constant, try making a
5380 	     pool entry.  Put it in src_folded unless we already have done
5381 	     this since that is where it likely came from.  */
5382 
5383 	  else if (constant_pool_entries_cost
5384 		   && CONSTANT_P (trial)
5385 		   && (src_folded == 0
5386 		       || (!MEM_P (src_folded)
5387 			   && ! src_folded_force_flag))
5388 		   && GET_MODE_CLASS (mode) != MODE_CC
5389 		   && mode != VOIDmode)
5390 	    {
5391 	      src_folded_force_flag = 1;
5392 	      src_folded = trial;
5393 	      src_folded_cost = constant_pool_entries_cost;
5394 	      src_folded_regcost = constant_pool_entries_regcost;
5395 	    }
5396 	}
5397 
5398       /* If we changed the insn too much, handle this set from scratch.  */
5399       if (repeat)
5400 	{
5401 	  i--;
5402 	  continue;
5403 	}
5404 
5405       src = SET_SRC (sets[i].rtl);
5406 
5407       /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5408 	 However, there is an important exception:  If both are registers
5409 	 that are not the head of their equivalence class, replace SET_SRC
5410 	 with the head of the class.  If we do not do this, we will have
5411 	 both registers live over a portion of the basic block.  This way,
5412 	 their lifetimes will likely abut instead of overlapping.  */
5413       if (REG_P (dest)
5414 	  && REGNO_QTY_VALID_P (REGNO (dest)))
5415 	{
5416 	  int dest_q = REG_QTY (REGNO (dest));
5417 	  struct qty_table_elem *dest_ent = &qty_table[dest_q];
5418 
5419 	  if (dest_ent->mode == GET_MODE (dest)
5420 	      && dest_ent->first_reg != REGNO (dest)
5421 	      && REG_P (src) && REGNO (src) == REGNO (dest)
5422 	      /* Don't do this if the original insn had a hard reg as
5423 		 SET_SRC or SET_DEST.  */
5424 	      && (!REG_P (sets[i].src)
5425 		  || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5426 	      && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5427 	    /* We can't call canon_reg here because it won't do anything if
5428 	       SRC is a hard register.  */
5429 	    {
5430 	      int src_q = REG_QTY (REGNO (src));
5431 	      struct qty_table_elem *src_ent = &qty_table[src_q];
5432 	      int first = src_ent->first_reg;
5433 	      rtx new_src
5434 		= (first >= FIRST_PSEUDO_REGISTER
5435 		   ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5436 
5437 	      /* We must use validate-change even for this, because this
5438 		 might be a special no-op instruction, suitable only to
5439 		 tag notes onto.  */
5440 	      if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5441 		{
5442 		  src = new_src;
5443 		  /* If we had a constant that is cheaper than what we are now
5444 		     setting SRC to, use that constant.  We ignored it when we
5445 		     thought we could make this into a no-op.  */
5446 		  if (src_const && COST (src_const, mode) < COST (src, mode)
5447 		      && validate_change (insn, &SET_SRC (sets[i].rtl),
5448 					  src_const, 0))
5449 		    src = src_const;
5450 		}
5451 	    }
5452 	}
5453 
5454       /* If we made a change, recompute SRC values.  */
5455       if (src != sets[i].src)
5456 	{
5457 	  do_not_record = 0;
5458 	  hash_arg_in_memory = 0;
5459 	  sets[i].src = src;
5460 	  sets[i].src_hash = HASH (src, mode);
5461 	  sets[i].src_volatile = do_not_record;
5462 	  sets[i].src_in_memory = hash_arg_in_memory;
5463 	  sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5464 	}
5465 
5466       /* If this is a single SET, we are setting a register, and we have an
5467 	 equivalent constant, we want to add a REG_EQUAL note if the constant
5468 	 is different from the source.  We don't want to do it for a constant
5469 	 pseudo since verifying that this pseudo hasn't been eliminated is a
5470 	 pain; moreover such a note won't help anything.
5471 
5472 	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5473 	 which can be created for a reference to a compile time computable
5474 	 entry in a jump table.  */
5475       if (n_sets == 1
5476 	  && REG_P (dest)
5477 	  && src_const
5478 	  && !REG_P (src_const)
5479 	  && !(GET_CODE (src_const) == SUBREG
5480 	       && REG_P (SUBREG_REG (src_const)))
5481 	  && !(GET_CODE (src_const) == CONST
5482 	       && GET_CODE (XEXP (src_const, 0)) == MINUS
5483 	       && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5484 	       && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF)
5485 	  && !rtx_equal_p (src, src_const))
5486 	{
5487 	  /* Make sure that the rtx is not shared.  */
5488 	  src_const = copy_rtx (src_const);
5489 
5490 	  /* Record the actual constant value in a REG_EQUAL note,
5491 	     making a new one if one does not already exist.  */
5492 	  set_unique_reg_note (insn, REG_EQUAL, src_const);
5493 	  df_notes_rescan (insn);
5494 	}
5495 
5496       /* Now deal with the destination.  */
5497       do_not_record = 0;
5498 
5499       /* Look within any ZERO_EXTRACT to the MEM or REG within it.  */
5500       while (GET_CODE (dest) == SUBREG
5501 	     || GET_CODE (dest) == ZERO_EXTRACT
5502 	     || GET_CODE (dest) == STRICT_LOW_PART)
5503 	dest = XEXP (dest, 0);
5504 
5505       sets[i].inner_dest = dest;
5506 
5507       if (MEM_P (dest))
5508 	{
5509 #ifdef PUSH_ROUNDING
5510 	  /* Stack pushes invalidate the stack pointer.  */
5511 	  rtx addr = XEXP (dest, 0);
5512 	  if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5513 	      && XEXP (addr, 0) == stack_pointer_rtx)
5514 	    invalidate (stack_pointer_rtx, VOIDmode);
5515 #endif
5516 	  dest = fold_rtx (dest, insn);
5517 	}
5518 
5519       /* Compute the hash code of the destination now,
5520 	 before the effects of this instruction are recorded,
5521 	 since the register values used in the address computation
5522 	 are those before this instruction.  */
5523       sets[i].dest_hash = HASH (dest, mode);
5524 
5525       /* Don't enter a bit-field in the hash table
5526 	 because the value in it after the store
5527 	 may not equal what was stored, due to truncation.  */
5528 
5529       if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5530 	{
5531 	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5532 
5533 	  if (src_const != 0 && CONST_INT_P (src_const)
5534 	      && CONST_INT_P (width)
5535 	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5536 	      && ! (INTVAL (src_const)
5537 		    & (HOST_WIDE_INT_M1U << INTVAL (width))))
5538 	    /* Exception: if the value is constant,
5539 	       and it won't be truncated, record it.  */
5540 	    ;
5541 	  else
5542 	    {
5543 	      /* This is chosen so that the destination will be invalidated
5544 		 but no new value will be recorded.
5545 		 We must invalidate because sometimes constant
5546 		 values can be recorded for bitfields.  */
5547 	      sets[i].src_elt = 0;
5548 	      sets[i].src_volatile = 1;
5549 	      src_eqv = 0;
5550 	      src_eqv_elt = 0;
5551 	    }
5552 	}
5553 
5554       /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5555 	 the insn.  */
5556       else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5557 	{
5558 	  /* One less use of the label this insn used to jump to.  */
5559 	  cse_cfg_altered |= delete_insn_and_edges (insn);
5560 	  cse_jumps_altered = true;
5561 	  /* No more processing for this set.  */
5562 	  sets[i].rtl = 0;
5563 	}
5564 
5565       /* Similarly for no-op MEM moves.  */
5566       else if (mem_noop_insn)
5567 	{
5568 	  if (cfun->can_throw_non_call_exceptions && can_throw_internal (insn))
5569 	    cse_cfg_altered = true;
5570 	  cse_cfg_altered |= delete_insn_and_edges (insn);
5571 	  /* No more processing for this set.  */
5572 	  sets[i].rtl = 0;
5573 	}
5574 
5575       /* If this SET is now setting PC to a label, we know it used to
5576 	 be a conditional or computed branch.  */
5577       else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5578 	       && !LABEL_REF_NONLOCAL_P (src))
5579 	{
5580 	  /* We reemit the jump in as many cases as possible just in
5581 	     case the form of an unconditional jump is significantly
5582 	     different than a computed jump or conditional jump.
5583 
5584 	     If this insn has multiple sets, then reemitting the
5585 	     jump is nontrivial.  So instead we just force rerecognition
5586 	     and hope for the best.  */
5587 	  if (n_sets == 1)
5588 	    {
5589 	      rtx_jump_insn *new_rtx;
5590 	      rtx note;
5591 
5592 	      rtx_insn *seq = targetm.gen_jump (XEXP (src, 0));
5593 	      new_rtx = emit_jump_insn_before (seq, insn);
5594 	      JUMP_LABEL (new_rtx) = XEXP (src, 0);
5595 	      LABEL_NUSES (XEXP (src, 0))++;
5596 
5597 	      /* Make sure to copy over REG_NON_LOCAL_GOTO.  */
5598 	      note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5599 	      if (note)
5600 		{
5601 		  XEXP (note, 1) = NULL_RTX;
5602 		  REG_NOTES (new_rtx) = note;
5603 		}
5604 
5605 	      cse_cfg_altered |= delete_insn_and_edges (insn);
5606 	      insn = new_rtx;
5607 	    }
5608 	  else
5609 	    INSN_CODE (insn) = -1;
5610 
5611 	  /* Do not bother deleting any unreachable code, let jump do it.  */
5612 	  cse_jumps_altered = true;
5613 	  sets[i].rtl = 0;
5614 	}
5615 
5616       /* If destination is volatile, invalidate it and then do no further
5617 	 processing for this assignment.  */
5618 
5619       else if (do_not_record)
5620 	{
5621 	  invalidate_dest (dest);
5622 	  sets[i].rtl = 0;
5623 	}
5624 
5625       if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5626 	{
5627 	  do_not_record = 0;
5628 	  sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5629 	  if (do_not_record)
5630 	    {
5631 	      invalidate_dest (SET_DEST (sets[i].rtl));
5632 	      sets[i].rtl = 0;
5633 	    }
5634 	}
5635 
5636       /* If setting CC0, record what it was set to, or a constant, if it
5637 	 is equivalent to a constant.  If it is being set to a floating-point
5638 	 value, make a COMPARE with the appropriate constant of 0.  If we
5639 	 don't do this, later code can interpret this as a test against
5640 	 const0_rtx, which can cause problems if we try to put it into an
5641 	 insn as a floating-point operand.  */
5642       if (dest == cc0_rtx)
5643 	{
5644 	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5645 	  this_insn_cc0_mode = mode;
5646 	  if (FLOAT_MODE_P (mode))
5647 	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5648 					     CONST0_RTX (mode));
5649 	}
5650     }
5651 
5652   /* Now enter all non-volatile source expressions in the hash table
5653      if they are not already present.
5654      Record their equivalence classes in src_elt.
5655      This way we can insert the corresponding destinations into
5656      the same classes even if the actual sources are no longer in them
5657      (having been invalidated).  */
5658 
5659   if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5660       && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5661     {
5662       struct table_elt *elt;
5663       struct table_elt *classp = sets[0].src_elt;
5664       rtx dest = SET_DEST (sets[0].rtl);
5665       machine_mode eqvmode = GET_MODE (dest);
5666 
5667       if (GET_CODE (dest) == STRICT_LOW_PART)
5668 	{
5669 	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5670 	  classp = 0;
5671 	}
5672       if (insert_regs (src_eqv, classp, 0))
5673 	{
5674 	  rehash_using_reg (src_eqv);
5675 	  src_eqv_hash = HASH (src_eqv, eqvmode);
5676 	}
5677       elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5678       elt->in_memory = src_eqv_in_memory;
5679       src_eqv_elt = elt;
5680 
5681       /* Check to see if src_eqv_elt is the same as a set source which
5682 	 does not yet have an elt, and if so set the elt of the set source
5683 	 to src_eqv_elt.  */
5684       for (i = 0; i < n_sets; i++)
5685 	if (sets[i].rtl && sets[i].src_elt == 0
5686 	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5687 	  sets[i].src_elt = src_eqv_elt;
5688     }
5689 
5690   for (i = 0; i < n_sets; i++)
5691     if (sets[i].rtl && ! sets[i].src_volatile
5692 	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5693       {
5694 	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5695 	  {
5696 	    /* REG_EQUAL in setting a STRICT_LOW_PART
5697 	       gives an equivalent for the entire destination register,
5698 	       not just for the subreg being stored in now.
5699 	       This is a more interesting equivalence, so we arrange later
5700 	       to treat the entire reg as the destination.  */
5701 	    sets[i].src_elt = src_eqv_elt;
5702 	    sets[i].src_hash = src_eqv_hash;
5703 	  }
5704 	else
5705 	  {
5706 	    /* Insert source and constant equivalent into hash table, if not
5707 	       already present.  */
5708 	    struct table_elt *classp = src_eqv_elt;
5709 	    rtx src = sets[i].src;
5710 	    rtx dest = SET_DEST (sets[i].rtl);
5711 	    machine_mode mode
5712 	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5713 
5714 	    /* It's possible that we have a source value known to be
5715 	       constant but don't have a REG_EQUAL note on the insn.
5716 	       Lack of a note will mean src_eqv_elt will be NULL.  This
5717 	       can happen where we've generated a SUBREG to access a
5718 	       CONST_INT that is already in a register in a wider mode.
5719 	       Ensure that the source expression is put in the proper
5720 	       constant class.  */
5721 	    if (!classp)
5722 	      classp = sets[i].src_const_elt;
5723 
5724 	    if (sets[i].src_elt == 0)
5725 	      {
5726 		struct table_elt *elt;
5727 
5728 		/* Note that these insert_regs calls cannot remove
5729 		   any of the src_elt's, because they would have failed to
5730 		   match if not still valid.  */
5731 		if (insert_regs (src, classp, 0))
5732 		  {
5733 		    rehash_using_reg (src);
5734 		    sets[i].src_hash = HASH (src, mode);
5735 		  }
5736 		elt = insert (src, classp, sets[i].src_hash, mode);
5737 		elt->in_memory = sets[i].src_in_memory;
5738 		/* If inline asm has any clobbers, ensure we only reuse
5739 		   existing inline asms and never try to put the ASM_OPERANDS
5740 		   into an insn that isn't inline asm.  */
5741 		if (GET_CODE (src) == ASM_OPERANDS
5742 		    && GET_CODE (x) == PARALLEL)
5743 		  elt->cost = MAX_COST;
5744 		sets[i].src_elt = classp = elt;
5745 	      }
5746 	    if (sets[i].src_const && sets[i].src_const_elt == 0
5747 		&& src != sets[i].src_const
5748 		&& ! rtx_equal_p (sets[i].src_const, src))
5749 	      sets[i].src_elt = insert (sets[i].src_const, classp,
5750 					sets[i].src_const_hash, mode);
5751 	  }
5752       }
5753     else if (sets[i].src_elt == 0)
5754       /* If we did not insert the source into the hash table (e.g., it was
5755 	 volatile), note the equivalence class for the REG_EQUAL value, if any,
5756 	 so that the destination goes into that class.  */
5757       sets[i].src_elt = src_eqv_elt;
5758 
5759   /* Record destination addresses in the hash table.  This allows us to
5760      check if they are invalidated by other sets.  */
5761   for (i = 0; i < n_sets; i++)
5762     {
5763       if (sets[i].rtl)
5764 	{
5765 	  rtx x = sets[i].inner_dest;
5766 	  struct table_elt *elt;
5767 	  machine_mode mode;
5768 	  unsigned hash;
5769 
5770 	  if (MEM_P (x))
5771 	    {
5772 	      x = XEXP (x, 0);
5773 	      mode = GET_MODE (x);
5774 	      hash = HASH (x, mode);
5775 	      elt = lookup (x, hash, mode);
5776 	      if (!elt)
5777 		{
5778 		  if (insert_regs (x, NULL, 0))
5779 		    {
5780 		      rtx dest = SET_DEST (sets[i].rtl);
5781 
5782 		      rehash_using_reg (x);
5783 		      hash = HASH (x, mode);
5784 		      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5785 		    }
5786 		  elt = insert (x, NULL, hash, mode);
5787 		}
5788 
5789 	      sets[i].dest_addr_elt = elt;
5790 	    }
5791 	  else
5792 	    sets[i].dest_addr_elt = NULL;
5793 	}
5794     }
5795 
5796   invalidate_from_clobbers (insn);
5797 
5798   /* Some registers are invalidated by subroutine calls.  Memory is
5799      invalidated by non-constant calls.  */
5800 
5801   if (CALL_P (insn))
5802     {
5803       if (!(RTL_CONST_OR_PURE_CALL_P (insn)))
5804 	invalidate_memory ();
5805       else
5806 	/* For const/pure calls, invalidate any argument slots, because
5807 	   those are owned by the callee.  */
5808 	for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
5809 	  if (GET_CODE (XEXP (tem, 0)) == USE
5810 	      && MEM_P (XEXP (XEXP (tem, 0), 0)))
5811 	    invalidate (XEXP (XEXP (tem, 0), 0), VOIDmode);
5812       invalidate_for_call ();
5813     }
5814 
5815   /* Now invalidate everything set by this instruction.
5816      If a SUBREG or other funny destination is being set,
5817      sets[i].rtl is still nonzero, so here we invalidate the reg
5818      a part of which is being set.  */
5819 
5820   for (i = 0; i < n_sets; i++)
5821     if (sets[i].rtl)
5822       {
5823 	/* We can't use the inner dest, because the mode associated with
5824 	   a ZERO_EXTRACT is significant.  */
5825 	rtx dest = SET_DEST (sets[i].rtl);
5826 
5827 	/* Needed for registers to remove the register from its
5828 	   previous quantity's chain.
5829 	   Needed for memory if this is a nonvarying address, unless
5830 	   we have just done an invalidate_memory that covers even those.  */
5831 	if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5832 	  invalidate (dest, VOIDmode);
5833 	else if (MEM_P (dest))
5834 	  invalidate (dest, VOIDmode);
5835 	else if (GET_CODE (dest) == STRICT_LOW_PART
5836 		 || GET_CODE (dest) == ZERO_EXTRACT)
5837 	  invalidate (XEXP (dest, 0), GET_MODE (dest));
5838       }
5839 
5840   /* Don't cse over a call to setjmp; on some machines (eg VAX)
5841      the regs restored by the longjmp come from a later time
5842      than the setjmp.  */
5843   if (CALL_P (insn) && find_reg_note (insn, REG_SETJMP, NULL))
5844     {
5845       flush_hash_table ();
5846       goto done;
5847     }
5848 
5849   /* Make sure registers mentioned in destinations
5850      are safe for use in an expression to be inserted.
5851      This removes from the hash table
5852      any invalid entry that refers to one of these registers.
5853 
5854      We don't care about the return value from mention_regs because
5855      we are going to hash the SET_DEST values unconditionally.  */
5856 
5857   for (i = 0; i < n_sets; i++)
5858     {
5859       if (sets[i].rtl)
5860 	{
5861 	  rtx x = SET_DEST (sets[i].rtl);
5862 
5863 	  if (!REG_P (x))
5864 	    mention_regs (x);
5865 	  else
5866 	    {
5867 	      /* We used to rely on all references to a register becoming
5868 		 inaccessible when a register changes to a new quantity,
5869 		 since that changes the hash code.  However, that is not
5870 		 safe, since after HASH_SIZE new quantities we get a
5871 		 hash 'collision' of a register with its own invalid
5872 		 entries.  And since SUBREGs have been changed not to
5873 		 change their hash code with the hash code of the register,
5874 		 it wouldn't work any longer at all.  So we have to check
5875 		 for any invalid references lying around now.
5876 		 This code is similar to the REG case in mention_regs,
5877 		 but it knows that reg_tick has been incremented, and
5878 		 it leaves reg_in_table as -1 .  */
5879 	      unsigned int regno = REGNO (x);
5880 	      unsigned int endregno = END_REGNO (x);
5881 	      unsigned int i;
5882 
5883 	      for (i = regno; i < endregno; i++)
5884 		{
5885 		  if (REG_IN_TABLE (i) >= 0)
5886 		    {
5887 		      remove_invalid_refs (i);
5888 		      REG_IN_TABLE (i) = -1;
5889 		    }
5890 		}
5891 	    }
5892 	}
5893     }
5894 
5895   /* We may have just removed some of the src_elt's from the hash table.
5896      So replace each one with the current head of the same class.
5897      Also check if destination addresses have been removed.  */
5898 
5899   for (i = 0; i < n_sets; i++)
5900     if (sets[i].rtl)
5901       {
5902 	if (sets[i].dest_addr_elt
5903 	    && sets[i].dest_addr_elt->first_same_value == 0)
5904 	  {
5905 	    /* The elt was removed, which means this destination is not
5906 	       valid after this instruction.  */
5907 	    sets[i].rtl = NULL_RTX;
5908 	  }
5909 	else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5910 	  /* If elt was removed, find current head of same class,
5911 	     or 0 if nothing remains of that class.  */
5912 	  {
5913 	    struct table_elt *elt = sets[i].src_elt;
5914 
5915 	    while (elt && elt->prev_same_value)
5916 	      elt = elt->prev_same_value;
5917 
5918 	    while (elt && elt->first_same_value == 0)
5919 	      elt = elt->next_same_value;
5920 	    sets[i].src_elt = elt ? elt->first_same_value : 0;
5921 	  }
5922       }
5923 
5924   /* Now insert the destinations into their equivalence classes.  */
5925 
5926   for (i = 0; i < n_sets; i++)
5927     if (sets[i].rtl)
5928       {
5929 	rtx dest = SET_DEST (sets[i].rtl);
5930 	struct table_elt *elt;
5931 
5932 	/* Don't record value if we are not supposed to risk allocating
5933 	   floating-point values in registers that might be wider than
5934 	   memory.  */
5935 	if ((flag_float_store
5936 	     && MEM_P (dest)
5937 	     && FLOAT_MODE_P (GET_MODE (dest)))
5938 	    /* Don't record BLKmode values, because we don't know the
5939 	       size of it, and can't be sure that other BLKmode values
5940 	       have the same or smaller size.  */
5941 	    || GET_MODE (dest) == BLKmode
5942 	    /* If we didn't put a REG_EQUAL value or a source into the hash
5943 	       table, there is no point is recording DEST.  */
5944 	    || sets[i].src_elt == 0)
5945 	  continue;
5946 
5947 	/* STRICT_LOW_PART isn't part of the value BEING set,
5948 	   and neither is the SUBREG inside it.
5949 	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
5950 	if (GET_CODE (dest) == STRICT_LOW_PART)
5951 	  dest = SUBREG_REG (XEXP (dest, 0));
5952 
5953 	if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5954 	  /* Registers must also be inserted into chains for quantities.  */
5955 	  if (insert_regs (dest, sets[i].src_elt, 1))
5956 	    {
5957 	      /* If `insert_regs' changes something, the hash code must be
5958 		 recalculated.  */
5959 	      rehash_using_reg (dest);
5960 	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5961 	    }
5962 
5963 	/* If DEST is a paradoxical SUBREG, don't record DEST since the bits
5964 	   outside the mode of GET_MODE (SUBREG_REG (dest)) are undefined.  */
5965 	if (paradoxical_subreg_p (dest))
5966 	  continue;
5967 
5968 	elt = insert (dest, sets[i].src_elt,
5969 		      sets[i].dest_hash, GET_MODE (dest));
5970 
5971 	/* If this is a constant, insert the constant anchors with the
5972 	   equivalent register-offset expressions using register DEST.  */
5973 	if (targetm.const_anchor
5974 	    && REG_P (dest)
5975 	    && SCALAR_INT_MODE_P (GET_MODE (dest))
5976 	    && GET_CODE (sets[i].src_elt->exp) == CONST_INT)
5977 	  insert_const_anchors (dest, sets[i].src_elt->exp, GET_MODE (dest));
5978 
5979 	elt->in_memory = (MEM_P (sets[i].inner_dest)
5980 			  && !MEM_READONLY_P (sets[i].inner_dest));
5981 
5982 	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5983 	   narrower than M2, and both M1 and M2 are the same number of words,
5984 	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5985 	   make that equivalence as well.
5986 
5987 	   However, BAR may have equivalences for which gen_lowpart
5988 	   will produce a simpler value than gen_lowpart applied to
5989 	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5990 	   BAR's equivalences.  If we don't get a simplified form, make
5991 	   the SUBREG.  It will not be used in an equivalence, but will
5992 	   cause two similar assignments to be detected.
5993 
5994 	   Note the loop below will find SUBREG_REG (DEST) since we have
5995 	   already entered SRC and DEST of the SET in the table.  */
5996 
5997 	if (GET_CODE (dest) == SUBREG
5998 	    && (known_equal_after_align_down
5999 		(GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1,
6000 		 GET_MODE_SIZE (GET_MODE (dest)) - 1,
6001 		 UNITS_PER_WORD))
6002 	    && !partial_subreg_p (dest)
6003 	    && sets[i].src_elt != 0)
6004 	  {
6005 	    machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6006 	    struct table_elt *elt, *classp = 0;
6007 
6008 	    for (elt = sets[i].src_elt->first_same_value; elt;
6009 		 elt = elt->next_same_value)
6010 	      {
6011 		rtx new_src = 0;
6012 		unsigned src_hash;
6013 		struct table_elt *src_elt;
6014 
6015 		/* Ignore invalid entries.  */
6016 		if (!REG_P (elt->exp)
6017 		    && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
6018 		  continue;
6019 
6020 		/* We may have already been playing subreg games.  If the
6021 		   mode is already correct for the destination, use it.  */
6022 		if (GET_MODE (elt->exp) == new_mode)
6023 		  new_src = elt->exp;
6024 		else
6025 		  {
6026 		    poly_uint64 byte
6027 		      = subreg_lowpart_offset (new_mode, GET_MODE (dest));
6028 		    new_src = simplify_gen_subreg (new_mode, elt->exp,
6029 					           GET_MODE (dest), byte);
6030 		  }
6031 
6032 		/* The call to simplify_gen_subreg fails if the value
6033 		   is VOIDmode, yet we can't do any simplification, e.g.
6034 		   for EXPR_LISTs denoting function call results.
6035 		   It is invalid to construct a SUBREG with a VOIDmode
6036 		   SUBREG_REG, hence a zero new_src means we can't do
6037 		   this substitution.  */
6038 		if (! new_src)
6039 		  continue;
6040 
6041 		src_hash = HASH (new_src, new_mode);
6042 		src_elt = lookup (new_src, src_hash, new_mode);
6043 
6044 		/* Put the new source in the hash table is if isn't
6045 		   already.  */
6046 		if (src_elt == 0)
6047 		  {
6048 		    if (insert_regs (new_src, classp, 0))
6049 		      {
6050 			rehash_using_reg (new_src);
6051 			src_hash = HASH (new_src, new_mode);
6052 		      }
6053 		    src_elt = insert (new_src, classp, src_hash, new_mode);
6054 		    src_elt->in_memory = elt->in_memory;
6055 		    if (GET_CODE (new_src) == ASM_OPERANDS
6056 			&& elt->cost == MAX_COST)
6057 		      src_elt->cost = MAX_COST;
6058 		  }
6059 		else if (classp && classp != src_elt->first_same_value)
6060 		  /* Show that two things that we've seen before are
6061 		     actually the same.  */
6062 		  merge_equiv_classes (src_elt, classp);
6063 
6064 		classp = src_elt->first_same_value;
6065 		/* Ignore invalid entries.  */
6066 		while (classp
6067 		       && !REG_P (classp->exp)
6068 		       && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
6069 		  classp = classp->next_same_value;
6070 	      }
6071 	  }
6072       }
6073 
6074   /* Special handling for (set REG0 REG1) where REG0 is the
6075      "cheapest", cheaper than REG1.  After cse, REG1 will probably not
6076      be used in the sequel, so (if easily done) change this insn to
6077      (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6078      that computed their value.  Then REG1 will become a dead store
6079      and won't cloud the situation for later optimizations.
6080 
6081      Do not make this change if REG1 is a hard register, because it will
6082      then be used in the sequel and we may be changing a two-operand insn
6083      into a three-operand insn.
6084 
6085      Also do not do this if we are operating on a copy of INSN.  */
6086 
6087   if (n_sets == 1 && sets[0].rtl)
6088     try_back_substitute_reg (sets[0].rtl, insn);
6089 
6090 done:;
6091 }
6092 
6093 /* Remove from the hash table all expressions that reference memory.  */
6094 
6095 static void
6096 invalidate_memory (void)
6097 {
6098   int i;
6099   struct table_elt *p, *next;
6100 
6101   for (i = 0; i < HASH_SIZE; i++)
6102     for (p = table[i]; p; p = next)
6103       {
6104 	next = p->next_same_hash;
6105 	if (p->in_memory)
6106 	  remove_from_table (p, i);
6107       }
6108 }
6109 
6110 /* Perform invalidation on the basis of everything about INSN,
6111    except for invalidating the actual places that are SET in it.
6112    This includes the places CLOBBERed, and anything that might
6113    alias with something that is SET or CLOBBERed.  */
6114 
6115 static void
6116 invalidate_from_clobbers (rtx_insn *insn)
6117 {
6118   rtx x = PATTERN (insn);
6119 
6120   if (GET_CODE (x) == CLOBBER)
6121     {
6122       rtx ref = XEXP (x, 0);
6123       if (ref)
6124 	{
6125 	  if (REG_P (ref) || GET_CODE (ref) == SUBREG
6126 	      || MEM_P (ref))
6127 	    invalidate (ref, VOIDmode);
6128 	  else if (GET_CODE (ref) == STRICT_LOW_PART
6129 		   || GET_CODE (ref) == ZERO_EXTRACT)
6130 	    invalidate (XEXP (ref, 0), GET_MODE (ref));
6131 	}
6132     }
6133   else if (GET_CODE (x) == PARALLEL)
6134     {
6135       int i;
6136       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6137 	{
6138 	  rtx y = XVECEXP (x, 0, i);
6139 	  if (GET_CODE (y) == CLOBBER)
6140 	    {
6141 	      rtx ref = XEXP (y, 0);
6142 	      if (REG_P (ref) || GET_CODE (ref) == SUBREG
6143 		  || MEM_P (ref))
6144 		invalidate (ref, VOIDmode);
6145 	      else if (GET_CODE (ref) == STRICT_LOW_PART
6146 		       || GET_CODE (ref) == ZERO_EXTRACT)
6147 		invalidate (XEXP (ref, 0), GET_MODE (ref));
6148 	    }
6149 	}
6150     }
6151 }
6152 
6153 /* Perform invalidation on the basis of everything about INSN.
6154    This includes the places CLOBBERed, and anything that might
6155    alias with something that is SET or CLOBBERed.  */
6156 
6157 static void
6158 invalidate_from_sets_and_clobbers (rtx_insn *insn)
6159 {
6160   rtx tem;
6161   rtx x = PATTERN (insn);
6162 
6163   if (CALL_P (insn))
6164     {
6165       for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6166 	if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6167 	  invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6168     }
6169 
6170   /* Ensure we invalidate the destination register of a CALL insn.
6171      This is necessary for machines where this register is a fixed_reg,
6172      because no other code would invalidate it.  */
6173   if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
6174     invalidate (SET_DEST (x), VOIDmode);
6175 
6176   else if (GET_CODE (x) == PARALLEL)
6177     {
6178       int i;
6179 
6180       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6181 	{
6182 	  rtx y = XVECEXP (x, 0, i);
6183 	  if (GET_CODE (y) == CLOBBER)
6184 	    {
6185 	      rtx clobbered = XEXP (y, 0);
6186 
6187 	      if (REG_P (clobbered)
6188 		  || GET_CODE (clobbered) == SUBREG)
6189 		invalidate (clobbered, VOIDmode);
6190 	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
6191 		       || GET_CODE (clobbered) == ZERO_EXTRACT)
6192 		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6193 	    }
6194 	  else if (GET_CODE (y) == SET && GET_CODE (SET_SRC (y)) == CALL)
6195 	    invalidate (SET_DEST (y), VOIDmode);
6196 	}
6197     }
6198 }
6199 
6200 /* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
6201    and replace any registers in them with either an equivalent constant
6202    or the canonical form of the register.  If we are inside an address,
6203    only do this if the address remains valid.
6204 
6205    OBJECT is 0 except when within a MEM in which case it is the MEM.
6206 
6207    Return the replacement for X.  */
6208 
6209 static rtx
6210 cse_process_notes_1 (rtx x, rtx object, bool *changed)
6211 {
6212   enum rtx_code code = GET_CODE (x);
6213   const char *fmt = GET_RTX_FORMAT (code);
6214   int i;
6215 
6216   switch (code)
6217     {
6218     case CONST:
6219     case SYMBOL_REF:
6220     case LABEL_REF:
6221     CASE_CONST_ANY:
6222     case PC:
6223     case CC0:
6224     case LO_SUM:
6225       return x;
6226 
6227     case MEM:
6228       validate_change (x, &XEXP (x, 0),
6229 		       cse_process_notes (XEXP (x, 0), x, changed), 0);
6230       return x;
6231 
6232     case EXPR_LIST:
6233       if (REG_NOTE_KIND (x) == REG_EQUAL)
6234 	XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX, changed);
6235       /* Fall through.  */
6236 
6237     case INSN_LIST:
6238     case INT_LIST:
6239       if (XEXP (x, 1))
6240 	XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX, changed);
6241       return x;
6242 
6243     case SIGN_EXTEND:
6244     case ZERO_EXTEND:
6245     case SUBREG:
6246       {
6247 	rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed);
6248 	/* We don't substitute VOIDmode constants into these rtx,
6249 	   since they would impede folding.  */
6250 	if (GET_MODE (new_rtx) != VOIDmode)
6251 	  validate_change (object, &XEXP (x, 0), new_rtx, 0);
6252 	return x;
6253       }
6254 
6255     case UNSIGNED_FLOAT:
6256       {
6257 	rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed);
6258 	/* We don't substitute negative VOIDmode constants into these rtx,
6259 	   since they would impede folding.  */
6260 	if (GET_MODE (new_rtx) != VOIDmode
6261 	    || (CONST_INT_P (new_rtx) && INTVAL (new_rtx) >= 0)
6262 	    || (CONST_DOUBLE_P (new_rtx) && CONST_DOUBLE_HIGH (new_rtx) >= 0))
6263 	  validate_change (object, &XEXP (x, 0), new_rtx, 0);
6264 	return x;
6265       }
6266 
6267     case REG:
6268       i = REG_QTY (REGNO (x));
6269 
6270       /* Return a constant or a constant register.  */
6271       if (REGNO_QTY_VALID_P (REGNO (x)))
6272 	{
6273 	  struct qty_table_elem *ent = &qty_table[i];
6274 
6275 	  if (ent->const_rtx != NULL_RTX
6276 	      && (CONSTANT_P (ent->const_rtx)
6277 		  || REG_P (ent->const_rtx)))
6278 	    {
6279 	      rtx new_rtx = gen_lowpart (GET_MODE (x), ent->const_rtx);
6280 	      if (new_rtx)
6281 		return copy_rtx (new_rtx);
6282 	    }
6283 	}
6284 
6285       /* Otherwise, canonicalize this register.  */
6286       return canon_reg (x, NULL);
6287 
6288     default:
6289       break;
6290     }
6291 
6292   for (i = 0; i < GET_RTX_LENGTH (code); i++)
6293     if (fmt[i] == 'e')
6294       validate_change (object, &XEXP (x, i),
6295 		       cse_process_notes (XEXP (x, i), object, changed), 0);
6296 
6297   return x;
6298 }
6299 
6300 static rtx
6301 cse_process_notes (rtx x, rtx object, bool *changed)
6302 {
6303   rtx new_rtx = cse_process_notes_1 (x, object, changed);
6304   if (new_rtx != x)
6305     *changed = true;
6306   return new_rtx;
6307 }
6308 
6309 
6310 /* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
6311 
6312    DATA is a pointer to a struct cse_basic_block_data, that is used to
6313    describe the path.
6314    It is filled with a queue of basic blocks, starting with FIRST_BB
6315    and following a trace through the CFG.
6316 
6317    If all paths starting at FIRST_BB have been followed, or no new path
6318    starting at FIRST_BB can be constructed, this function returns FALSE.
6319    Otherwise, DATA->path is filled and the function returns TRUE indicating
6320    that a path to follow was found.
6321 
6322    If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
6323    block in the path will be FIRST_BB.  */
6324 
6325 static bool
6326 cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
6327 	       int follow_jumps)
6328 {
6329   basic_block bb;
6330   edge e;
6331   int path_size;
6332 
6333   bitmap_set_bit (cse_visited_basic_blocks, first_bb->index);
6334 
6335   /* See if there is a previous path.  */
6336   path_size = data->path_size;
6337 
6338   /* There is a previous path.  Make sure it started with FIRST_BB.  */
6339   if (path_size)
6340     gcc_assert (data->path[0].bb == first_bb);
6341 
6342   /* There was only one basic block in the last path.  Clear the path and
6343      return, so that paths starting at another basic block can be tried.  */
6344   if (path_size == 1)
6345     {
6346       path_size = 0;
6347       goto done;
6348     }
6349 
6350   /* If the path was empty from the beginning, construct a new path.  */
6351   if (path_size == 0)
6352     data->path[path_size++].bb = first_bb;
6353   else
6354     {
6355       /* Otherwise, path_size must be equal to or greater than 2, because
6356 	 a previous path exists that is at least two basic blocks long.
6357 
6358 	 Update the previous branch path, if any.  If the last branch was
6359 	 previously along the branch edge, take the fallthrough edge now.  */
6360       while (path_size >= 2)
6361 	{
6362 	  basic_block last_bb_in_path, previous_bb_in_path;
6363 	  edge e;
6364 
6365 	  --path_size;
6366 	  last_bb_in_path = data->path[path_size].bb;
6367 	  previous_bb_in_path = data->path[path_size - 1].bb;
6368 
6369 	  /* If we previously followed a path along the branch edge, try
6370 	     the fallthru edge now.  */
6371 	  if (EDGE_COUNT (previous_bb_in_path->succs) == 2
6372 	      && any_condjump_p (BB_END (previous_bb_in_path))
6373 	      && (e = find_edge (previous_bb_in_path, last_bb_in_path))
6374 	      && e == BRANCH_EDGE (previous_bb_in_path))
6375 	    {
6376 	      bb = FALLTHRU_EDGE (previous_bb_in_path)->dest;
6377 	      if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
6378 		  && single_pred_p (bb)
6379 		  /* We used to assert here that we would only see blocks
6380 		     that we have not visited yet.  But we may end up
6381 		     visiting basic blocks twice if the CFG has changed
6382 		     in this run of cse_main, because when the CFG changes
6383 		     the topological sort of the CFG also changes.  A basic
6384 		     blocks that previously had more than two predecessors
6385 		     may now have a single predecessor, and become part of
6386 		     a path that starts at another basic block.
6387 
6388 		     We still want to visit each basic block only once, so
6389 		     halt the path here if we have already visited BB.  */
6390 		  && !bitmap_bit_p (cse_visited_basic_blocks, bb->index))
6391 		{
6392 		  bitmap_set_bit (cse_visited_basic_blocks, bb->index);
6393 		  data->path[path_size++].bb = bb;
6394 		  break;
6395 		}
6396 	    }
6397 
6398 	  data->path[path_size].bb = NULL;
6399 	}
6400 
6401       /* If only one block remains in the path, bail.  */
6402       if (path_size == 1)
6403 	{
6404 	  path_size = 0;
6405 	  goto done;
6406 	}
6407     }
6408 
6409   /* Extend the path if possible.  */
6410   if (follow_jumps)
6411     {
6412       bb = data->path[path_size - 1].bb;
6413       while (bb && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH))
6414 	{
6415 	  if (single_succ_p (bb))
6416 	    e = single_succ_edge (bb);
6417 	  else if (EDGE_COUNT (bb->succs) == 2
6418 		   && any_condjump_p (BB_END (bb)))
6419 	    {
6420 	      /* First try to follow the branch.  If that doesn't lead
6421 		 to a useful path, follow the fallthru edge.  */
6422 	      e = BRANCH_EDGE (bb);
6423 	      if (!single_pred_p (e->dest))
6424 		e = FALLTHRU_EDGE (bb);
6425 	    }
6426 	  else
6427 	    e = NULL;
6428 
6429 	  if (e
6430 	      && !((e->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
6431 	      && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
6432 	      && single_pred_p (e->dest)
6433 	      /* Avoid visiting basic blocks twice.  The large comment
6434 		 above explains why this can happen.  */
6435 	      && !bitmap_bit_p (cse_visited_basic_blocks, e->dest->index))
6436 	    {
6437 	      basic_block bb2 = e->dest;
6438 	      bitmap_set_bit (cse_visited_basic_blocks, bb2->index);
6439 	      data->path[path_size++].bb = bb2;
6440 	      bb = bb2;
6441 	    }
6442 	  else
6443 	    bb = NULL;
6444 	}
6445     }
6446 
6447 done:
6448   data->path_size = path_size;
6449   return path_size != 0;
6450 }
6451 
6452 /* Dump the path in DATA to file F.  NSETS is the number of sets
6453    in the path.  */
6454 
6455 static void
6456 cse_dump_path (struct cse_basic_block_data *data, int nsets, FILE *f)
6457 {
6458   int path_entry;
6459 
6460   fprintf (f, ";; Following path with %d sets: ", nsets);
6461   for (path_entry = 0; path_entry < data->path_size; path_entry++)
6462     fprintf (f, "%d ", (data->path[path_entry].bb)->index);
6463   fputc ('\n', dump_file);
6464   fflush (f);
6465 }
6466 
6467 
6468 /* Return true if BB has exception handling successor edges.  */
6469 
6470 static bool
6471 have_eh_succ_edges (basic_block bb)
6472 {
6473   edge e;
6474   edge_iterator ei;
6475 
6476   FOR_EACH_EDGE (e, ei, bb->succs)
6477     if (e->flags & EDGE_EH)
6478       return true;
6479 
6480   return false;
6481 }
6482 
6483 
6484 /* Scan to the end of the path described by DATA.  Return an estimate of
6485    the total number of SETs of all insns in the path.  */
6486 
6487 static void
6488 cse_prescan_path (struct cse_basic_block_data *data)
6489 {
6490   int nsets = 0;
6491   int path_size = data->path_size;
6492   int path_entry;
6493 
6494   /* Scan to end of each basic block in the path.  */
6495   for (path_entry = 0; path_entry < path_size; path_entry++)
6496     {
6497       basic_block bb;
6498       rtx_insn *insn;
6499 
6500       bb = data->path[path_entry].bb;
6501 
6502       FOR_BB_INSNS (bb, insn)
6503 	{
6504 	  if (!INSN_P (insn))
6505 	    continue;
6506 
6507 	  /* A PARALLEL can have lots of SETs in it,
6508 	     especially if it is really an ASM_OPERANDS.  */
6509 	  if (GET_CODE (PATTERN (insn)) == PARALLEL)
6510 	    nsets += XVECLEN (PATTERN (insn), 0);
6511 	  else
6512 	    nsets += 1;
6513 	}
6514     }
6515 
6516   data->nsets = nsets;
6517 }
6518 
6519 /* Return true if the pattern of INSN uses a LABEL_REF for which
6520    there isn't a REG_LABEL_OPERAND note.  */
6521 
6522 static bool
6523 check_for_label_ref (rtx_insn *insn)
6524 {
6525   /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6526      note for it, we must rerun jump since it needs to place the note.  If
6527      this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6528      don't do this since no REG_LABEL_OPERAND will be added.  */
6529   subrtx_iterator::array_type array;
6530   FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
6531     {
6532       const_rtx x = *iter;
6533       if (GET_CODE (x) == LABEL_REF
6534 	  && !LABEL_REF_NONLOCAL_P (x)
6535 	  && (!JUMP_P (insn)
6536 	      || !label_is_jump_target_p (label_ref_label (x), insn))
6537 	  && LABEL_P (label_ref_label (x))
6538 	  && INSN_UID (label_ref_label (x)) != 0
6539 	  && !find_reg_note (insn, REG_LABEL_OPERAND, label_ref_label (x)))
6540 	return true;
6541     }
6542   return false;
6543 }
6544 
6545 /* Process a single extended basic block described by EBB_DATA.  */
6546 
6547 static void
6548 cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
6549 {
6550   int path_size = ebb_data->path_size;
6551   int path_entry;
6552   int num_insns = 0;
6553 
6554   /* Allocate the space needed by qty_table.  */
6555   qty_table = XNEWVEC (struct qty_table_elem, max_qty);
6556 
6557   new_basic_block ();
6558   cse_ebb_live_in = df_get_live_in (ebb_data->path[0].bb);
6559   cse_ebb_live_out = df_get_live_out (ebb_data->path[path_size - 1].bb);
6560   for (path_entry = 0; path_entry < path_size; path_entry++)
6561     {
6562       basic_block bb;
6563       rtx_insn *insn;
6564 
6565       bb = ebb_data->path[path_entry].bb;
6566 
6567       /* Invalidate recorded information for eh regs if there is an EH
6568 	 edge pointing to that bb.  */
6569       if (bb_has_eh_pred (bb))
6570 	{
6571 	  df_ref def;
6572 
6573 	  FOR_EACH_ARTIFICIAL_DEF (def, bb->index)
6574 	    if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
6575 	      invalidate (DF_REF_REG (def), GET_MODE (DF_REF_REG (def)));
6576 	}
6577 
6578       optimize_this_for_speed_p = optimize_bb_for_speed_p (bb);
6579       FOR_BB_INSNS (bb, insn)
6580 	{
6581 	  /* If we have processed 1,000 insns, flush the hash table to
6582 	     avoid extreme quadratic behavior.  We must not include NOTEs
6583 	     in the count since there may be more of them when generating
6584 	     debugging information.  If we clear the table at different
6585 	     times, code generated with -g -O might be different than code
6586 	     generated with -O but not -g.
6587 
6588 	     FIXME: This is a real kludge and needs to be done some other
6589 		    way.  */
6590 	  if (NONDEBUG_INSN_P (insn)
6591 	      && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
6592 	    {
6593 	      flush_hash_table ();
6594 	      num_insns = 0;
6595 	    }
6596 
6597 	  if (INSN_P (insn))
6598 	    {
6599 	      /* Process notes first so we have all notes in canonical forms
6600 		 when looking for duplicate operations.  */
6601 	      if (REG_NOTES (insn))
6602 		{
6603 		  bool changed = false;
6604 		  REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn),
6605 						        NULL_RTX, &changed);
6606 		  if (changed)
6607 		    df_notes_rescan (insn);
6608 		}
6609 
6610 	      cse_insn (insn);
6611 
6612 	      /* If we haven't already found an insn where we added a LABEL_REF,
6613 		 check this one.  */
6614 	      if (INSN_P (insn) && !recorded_label_ref
6615 		  && check_for_label_ref (insn))
6616 		recorded_label_ref = true;
6617 
6618 	      if (HAVE_cc0 && NONDEBUG_INSN_P (insn))
6619 		{
6620 		  /* If the previous insn sets CC0 and this insn no
6621 		     longer references CC0, delete the previous insn.
6622 		     Here we use fact that nothing expects CC0 to be
6623 		     valid over an insn, which is true until the final
6624 		     pass.  */
6625 		  rtx_insn *prev_insn;
6626 		  rtx tem;
6627 
6628 		  prev_insn = prev_nonnote_nondebug_insn (insn);
6629 		  if (prev_insn && NONJUMP_INSN_P (prev_insn)
6630 		      && (tem = single_set (prev_insn)) != NULL_RTX
6631 		      && SET_DEST (tem) == cc0_rtx
6632 		      && ! reg_mentioned_p (cc0_rtx, PATTERN (insn)))
6633 		    delete_insn (prev_insn);
6634 
6635 		  /* If this insn is not the last insn in the basic
6636 		     block, it will be PREV_INSN(insn) in the next
6637 		     iteration.  If we recorded any CC0-related
6638 		     information for this insn, remember it.  */
6639 		  if (insn != BB_END (bb))
6640 		    {
6641 		      prev_insn_cc0 = this_insn_cc0;
6642 		      prev_insn_cc0_mode = this_insn_cc0_mode;
6643 		    }
6644 		}
6645 	    }
6646 	}
6647 
6648       /* With non-call exceptions, we are not always able to update
6649 	 the CFG properly inside cse_insn.  So clean up possibly
6650 	 redundant EH edges here.  */
6651       if (cfun->can_throw_non_call_exceptions && have_eh_succ_edges (bb))
6652 	cse_cfg_altered |= purge_dead_edges (bb);
6653 
6654       /* If we changed a conditional jump, we may have terminated
6655 	 the path we are following.  Check that by verifying that
6656 	 the edge we would take still exists.  If the edge does
6657 	 not exist anymore, purge the remainder of the path.
6658 	 Note that this will cause us to return to the caller.  */
6659       if (path_entry < path_size - 1)
6660 	{
6661 	  basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6662 	  if (!find_edge (bb, next_bb))
6663 	    {
6664 	      do
6665 		{
6666 		  path_size--;
6667 
6668 		  /* If we truncate the path, we must also reset the
6669 		     visited bit on the remaining blocks in the path,
6670 		     or we will never visit them at all.  */
6671 		  bitmap_clear_bit (cse_visited_basic_blocks,
6672 			     ebb_data->path[path_size].bb->index);
6673 		  ebb_data->path[path_size].bb = NULL;
6674 		}
6675 	      while (path_size - 1 != path_entry);
6676 	      ebb_data->path_size = path_size;
6677 	    }
6678 	}
6679 
6680       /* If this is a conditional jump insn, record any known
6681 	 equivalences due to the condition being tested.  */
6682       insn = BB_END (bb);
6683       if (path_entry < path_size - 1
6684 	  && EDGE_COUNT (bb->succs) == 2
6685 	  && JUMP_P (insn)
6686 	  && single_set (insn)
6687 	  && any_condjump_p (insn))
6688 	{
6689 	  basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6690 	  bool taken = (next_bb == BRANCH_EDGE (bb)->dest);
6691 	  record_jump_equiv (insn, taken);
6692 	}
6693 
6694       /* Clear the CC0-tracking related insns, they can't provide
6695 	 useful information across basic block boundaries.  */
6696       prev_insn_cc0 = 0;
6697     }
6698 
6699   gcc_assert (next_qty <= max_qty);
6700 
6701   free (qty_table);
6702 }
6703 
6704 
6705 /* Perform cse on the instructions of a function.
6706    F is the first instruction.
6707    NREGS is one plus the highest pseudo-reg number used in the instruction.
6708 
6709    Return 2 if jump optimizations should be redone due to simplifications
6710    in conditional jump instructions.
6711    Return 1 if the CFG should be cleaned up because it has been modified.
6712    Return 0 otherwise.  */
6713 
6714 static int
6715 cse_main (rtx_insn *f ATTRIBUTE_UNUSED, int nregs)
6716 {
6717   struct cse_basic_block_data ebb_data;
6718   basic_block bb;
6719   int *rc_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
6720   int i, n_blocks;
6721 
6722   /* CSE doesn't use dominane info but can invalidate it in different ways.
6723      For simplicity free dominance info here.  */
6724   free_dominance_info (CDI_DOMINATORS);
6725 
6726   df_set_flags (DF_LR_RUN_DCE);
6727   df_note_add_problem ();
6728   df_analyze ();
6729   df_set_flags (DF_DEFER_INSN_RESCAN);
6730 
6731   reg_scan (get_insns (), max_reg_num ());
6732   init_cse_reg_info (nregs);
6733 
6734   ebb_data.path = XNEWVEC (struct branch_path,
6735 			   PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6736 
6737   cse_cfg_altered = false;
6738   cse_jumps_altered = false;
6739   recorded_label_ref = false;
6740   constant_pool_entries_cost = 0;
6741   constant_pool_entries_regcost = 0;
6742   ebb_data.path_size = 0;
6743   ebb_data.nsets = 0;
6744   rtl_hooks = cse_rtl_hooks;
6745 
6746   init_recog ();
6747   init_alias_analysis ();
6748 
6749   reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
6750 
6751   /* Set up the table of already visited basic blocks.  */
6752   cse_visited_basic_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
6753   bitmap_clear (cse_visited_basic_blocks);
6754 
6755   /* Loop over basic blocks in reverse completion order (RPO),
6756      excluding the ENTRY and EXIT blocks.  */
6757   n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false);
6758   i = 0;
6759   while (i < n_blocks)
6760     {
6761       /* Find the first block in the RPO queue that we have not yet
6762 	 processed before.  */
6763       do
6764 	{
6765 	  bb = BASIC_BLOCK_FOR_FN (cfun, rc_order[i++]);
6766 	}
6767       while (bitmap_bit_p (cse_visited_basic_blocks, bb->index)
6768 	     && i < n_blocks);
6769 
6770       /* Find all paths starting with BB, and process them.  */
6771       while (cse_find_path (bb, &ebb_data, flag_cse_follow_jumps))
6772 	{
6773 	  /* Pre-scan the path.  */
6774 	  cse_prescan_path (&ebb_data);
6775 
6776 	  /* If this basic block has no sets, skip it.  */
6777 	  if (ebb_data.nsets == 0)
6778 	    continue;
6779 
6780 	  /* Get a reasonable estimate for the maximum number of qty's
6781 	     needed for this path.  For this, we take the number of sets
6782 	     and multiply that by MAX_RECOG_OPERANDS.  */
6783 	  max_qty = ebb_data.nsets * MAX_RECOG_OPERANDS;
6784 
6785 	  /* Dump the path we're about to process.  */
6786 	  if (dump_file)
6787 	    cse_dump_path (&ebb_data, ebb_data.nsets, dump_file);
6788 
6789 	  cse_extended_basic_block (&ebb_data);
6790 	}
6791     }
6792 
6793   /* Clean up.  */
6794   end_alias_analysis ();
6795   free (reg_eqv_table);
6796   free (ebb_data.path);
6797   sbitmap_free (cse_visited_basic_blocks);
6798   free (rc_order);
6799   rtl_hooks = general_rtl_hooks;
6800 
6801   if (cse_jumps_altered || recorded_label_ref)
6802     return 2;
6803   else if (cse_cfg_altered)
6804     return 1;
6805   else
6806     return 0;
6807 }
6808 
6809 /* Count the number of times registers are used (not set) in X.
6810    COUNTS is an array in which we accumulate the count, INCR is how much
6811    we count each register usage.
6812 
6813    Don't count a usage of DEST, which is the SET_DEST of a SET which
6814    contains X in its SET_SRC.  This is because such a SET does not
6815    modify the liveness of DEST.
6816    DEST is set to pc_rtx for a trapping insn, or for an insn with side effects.
6817    We must then count uses of a SET_DEST regardless, because the insn can't be
6818    deleted here.  */
6819 
6820 static void
6821 count_reg_usage (rtx x, int *counts, rtx dest, int incr)
6822 {
6823   enum rtx_code code;
6824   rtx note;
6825   const char *fmt;
6826   int i, j;
6827 
6828   if (x == 0)
6829     return;
6830 
6831   switch (code = GET_CODE (x))
6832     {
6833     case REG:
6834       if (x != dest)
6835 	counts[REGNO (x)] += incr;
6836       return;
6837 
6838     case PC:
6839     case CC0:
6840     case CONST:
6841     CASE_CONST_ANY:
6842     case SYMBOL_REF:
6843     case LABEL_REF:
6844       return;
6845 
6846     case CLOBBER:
6847       /* If we are clobbering a MEM, mark any registers inside the address
6848          as being used.  */
6849       if (MEM_P (XEXP (x, 0)))
6850 	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
6851       return;
6852 
6853     case SET:
6854       /* Unless we are setting a REG, count everything in SET_DEST.  */
6855       if (!REG_P (SET_DEST (x)))
6856 	count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
6857       count_reg_usage (SET_SRC (x), counts,
6858 		       dest ? dest : SET_DEST (x),
6859 		       incr);
6860       return;
6861 
6862     case DEBUG_INSN:
6863       return;
6864 
6865     case CALL_INSN:
6866     case INSN:
6867     case JUMP_INSN:
6868       /* We expect dest to be NULL_RTX here.  If the insn may throw,
6869 	 or if it cannot be deleted due to side-effects, mark this fact
6870 	 by setting DEST to pc_rtx.  */
6871       if ((!cfun->can_delete_dead_exceptions && !insn_nothrow_p (x))
6872 	  || side_effects_p (PATTERN (x)))
6873 	dest = pc_rtx;
6874       if (code == CALL_INSN)
6875 	count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
6876       count_reg_usage (PATTERN (x), counts, dest, incr);
6877 
6878       /* Things used in a REG_EQUAL note aren't dead since loop may try to
6879 	 use them.  */
6880 
6881       note = find_reg_equal_equiv_note (x);
6882       if (note)
6883 	{
6884 	  rtx eqv = XEXP (note, 0);
6885 
6886 	  if (GET_CODE (eqv) == EXPR_LIST)
6887 	  /* This REG_EQUAL note describes the result of a function call.
6888 	     Process all the arguments.  */
6889 	    do
6890 	      {
6891 		count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
6892 		eqv = XEXP (eqv, 1);
6893 	      }
6894 	    while (eqv && GET_CODE (eqv) == EXPR_LIST);
6895 	  else
6896 	    count_reg_usage (eqv, counts, dest, incr);
6897 	}
6898       return;
6899 
6900     case EXPR_LIST:
6901       if (REG_NOTE_KIND (x) == REG_EQUAL
6902 	  || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
6903 	  /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6904 	     involving registers in the address.  */
6905 	  || GET_CODE (XEXP (x, 0)) == CLOBBER)
6906 	count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
6907 
6908       count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
6909       return;
6910 
6911     case ASM_OPERANDS:
6912       /* Iterate over just the inputs, not the constraints as well.  */
6913       for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
6914 	count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
6915       return;
6916 
6917     case INSN_LIST:
6918     case INT_LIST:
6919       gcc_unreachable ();
6920 
6921     default:
6922       break;
6923     }
6924 
6925   fmt = GET_RTX_FORMAT (code);
6926   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6927     {
6928       if (fmt[i] == 'e')
6929 	count_reg_usage (XEXP (x, i), counts, dest, incr);
6930       else if (fmt[i] == 'E')
6931 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6932 	  count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
6933     }
6934 }
6935 
6936 /* Return true if X is a dead register.  */
6937 
6938 static inline int
6939 is_dead_reg (const_rtx x, int *counts)
6940 {
6941   return (REG_P (x)
6942 	  && REGNO (x) >= FIRST_PSEUDO_REGISTER
6943 	  && counts[REGNO (x)] == 0);
6944 }
6945 
6946 /* Return true if set is live.  */
6947 static bool
6948 set_live_p (rtx set, rtx_insn *insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0.  */
6949 	    int *counts)
6950 {
6951   rtx_insn *tem;
6952 
6953   if (set_noop_p (set))
6954     ;
6955 
6956   else if (GET_CODE (SET_DEST (set)) == CC0
6957 	   && !side_effects_p (SET_SRC (set))
6958 	   && ((tem = next_nonnote_nondebug_insn (insn)) == NULL_RTX
6959 	       || !INSN_P (tem)
6960 	       || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
6961     return false;
6962   else if (!is_dead_reg (SET_DEST (set), counts)
6963 	   || side_effects_p (SET_SRC (set)))
6964     return true;
6965   return false;
6966 }
6967 
6968 /* Return true if insn is live.  */
6969 
6970 static bool
6971 insn_live_p (rtx_insn *insn, int *counts)
6972 {
6973   int i;
6974   if (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
6975     return true;
6976   else if (GET_CODE (PATTERN (insn)) == SET)
6977     return set_live_p (PATTERN (insn), insn, counts);
6978   else if (GET_CODE (PATTERN (insn)) == PARALLEL)
6979     {
6980       for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6981 	{
6982 	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
6983 
6984 	  if (GET_CODE (elt) == SET)
6985 	    {
6986 	      if (set_live_p (elt, insn, counts))
6987 		return true;
6988 	    }
6989 	  else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
6990 	    return true;
6991 	}
6992       return false;
6993     }
6994   else if (DEBUG_INSN_P (insn))
6995     {
6996       rtx_insn *next;
6997 
6998       if (DEBUG_MARKER_INSN_P (insn))
6999 	return true;
7000 
7001       for (next = NEXT_INSN (insn); next; next = NEXT_INSN (next))
7002 	if (NOTE_P (next))
7003 	  continue;
7004 	else if (!DEBUG_INSN_P (next))
7005 	  return true;
7006 	/* If we find an inspection point, such as a debug begin stmt,
7007 	   we want to keep the earlier debug insn.  */
7008 	else if (DEBUG_MARKER_INSN_P (next))
7009 	  return true;
7010 	else if (INSN_VAR_LOCATION_DECL (insn) == INSN_VAR_LOCATION_DECL (next))
7011 	  return false;
7012 
7013       return true;
7014     }
7015   else
7016     return true;
7017 }
7018 
7019 /* Count the number of stores into pseudo.  Callback for note_stores.  */
7020 
7021 static void
7022 count_stores (rtx x, const_rtx set ATTRIBUTE_UNUSED, void *data)
7023 {
7024   int *counts = (int *) data;
7025   if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
7026     counts[REGNO (x)]++;
7027 }
7028 
7029 /* Return if DEBUG_INSN pattern PAT needs to be reset because some dead
7030    pseudo doesn't have a replacement.  COUNTS[X] is zero if register X
7031    is dead and REPLACEMENTS[X] is null if it has no replacemenet.
7032    Set *SEEN_REPL to true if we see a dead register that does have
7033    a replacement.  */
7034 
7035 static bool
7036 is_dead_debug_insn (const_rtx pat, int *counts, rtx *replacements,
7037 		    bool *seen_repl)
7038 {
7039   subrtx_iterator::array_type array;
7040   FOR_EACH_SUBRTX (iter, array, pat, NONCONST)
7041     {
7042       const_rtx x = *iter;
7043       if (is_dead_reg (x, counts))
7044 	{
7045 	  if (replacements && replacements[REGNO (x)] != NULL_RTX)
7046 	    *seen_repl = true;
7047 	  else
7048 	    return true;
7049 	}
7050     }
7051   return false;
7052 }
7053 
7054 /* Replace a dead pseudo in a DEBUG_INSN with replacement DEBUG_EXPR.
7055    Callback for simplify_replace_fn_rtx.  */
7056 
7057 static rtx
7058 replace_dead_reg (rtx x, const_rtx old_rtx ATTRIBUTE_UNUSED, void *data)
7059 {
7060   rtx *replacements = (rtx *) data;
7061 
7062   if (REG_P (x)
7063       && REGNO (x) >= FIRST_PSEUDO_REGISTER
7064       && replacements[REGNO (x)] != NULL_RTX)
7065     {
7066       if (GET_MODE (x) == GET_MODE (replacements[REGNO (x)]))
7067 	return replacements[REGNO (x)];
7068       return lowpart_subreg (GET_MODE (x), replacements[REGNO (x)],
7069 			     GET_MODE (replacements[REGNO (x)]));
7070     }
7071   return NULL_RTX;
7072 }
7073 
7074 /* Scan all the insns and delete any that are dead; i.e., they store a register
7075    that is never used or they copy a register to itself.
7076 
7077    This is used to remove insns made obviously dead by cse, loop or other
7078    optimizations.  It improves the heuristics in loop since it won't try to
7079    move dead invariants out of loops or make givs for dead quantities.  The
7080    remaining passes of the compilation are also sped up.  */
7081 
7082 int
7083 delete_trivially_dead_insns (rtx_insn *insns, int nreg)
7084 {
7085   int *counts;
7086   rtx_insn *insn, *prev;
7087   rtx *replacements = NULL;
7088   int ndead = 0;
7089 
7090   timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7091   /* First count the number of times each register is used.  */
7092   if (MAY_HAVE_DEBUG_BIND_INSNS)
7093     {
7094       counts = XCNEWVEC (int, nreg * 3);
7095       for (insn = insns; insn; insn = NEXT_INSN (insn))
7096 	if (DEBUG_BIND_INSN_P (insn))
7097 	  count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
7098 			   NULL_RTX, 1);
7099 	else if (INSN_P (insn))
7100 	  {
7101 	    count_reg_usage (insn, counts, NULL_RTX, 1);
7102 	    note_stores (PATTERN (insn), count_stores, counts + nreg * 2);
7103 	  }
7104       /* If there can be debug insns, COUNTS are 3 consecutive arrays.
7105 	 First one counts how many times each pseudo is used outside
7106 	 of debug insns, second counts how many times each pseudo is
7107 	 used in debug insns and third counts how many times a pseudo
7108 	 is stored.  */
7109     }
7110   else
7111     {
7112       counts = XCNEWVEC (int, nreg);
7113       for (insn = insns; insn; insn = NEXT_INSN (insn))
7114 	if (INSN_P (insn))
7115 	  count_reg_usage (insn, counts, NULL_RTX, 1);
7116       /* If no debug insns can be present, COUNTS is just an array
7117 	 which counts how many times each pseudo is used.  */
7118     }
7119   /* Pseudo PIC register should be considered as used due to possible
7120      new usages generated.  */
7121   if (!reload_completed
7122       && pic_offset_table_rtx
7123       && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
7124     counts[REGNO (pic_offset_table_rtx)]++;
7125   /* Go from the last insn to the first and delete insns that only set unused
7126      registers or copy a register to itself.  As we delete an insn, remove
7127      usage counts for registers it uses.
7128 
7129      The first jump optimization pass may leave a real insn as the last
7130      insn in the function.   We must not skip that insn or we may end
7131      up deleting code that is not really dead.
7132 
7133      If some otherwise unused register is only used in DEBUG_INSNs,
7134      try to create a DEBUG_EXPR temporary and emit a DEBUG_INSN before
7135      the setter.  Then go through DEBUG_INSNs and if a DEBUG_EXPR
7136      has been created for the unused register, replace it with
7137      the DEBUG_EXPR, otherwise reset the DEBUG_INSN.  */
7138   for (insn = get_last_insn (); insn; insn = prev)
7139     {
7140       int live_insn = 0;
7141 
7142       prev = PREV_INSN (insn);
7143       if (!INSN_P (insn))
7144 	continue;
7145 
7146       live_insn = insn_live_p (insn, counts);
7147 
7148       /* If this is a dead insn, delete it and show registers in it aren't
7149 	 being used.  */
7150 
7151       if (! live_insn && dbg_cnt (delete_trivial_dead))
7152 	{
7153 	  if (DEBUG_INSN_P (insn))
7154 	    {
7155 	      if (DEBUG_BIND_INSN_P (insn))
7156 		count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
7157 				 NULL_RTX, -1);
7158 	    }
7159 	  else
7160 	    {
7161 	      rtx set;
7162 	      if (MAY_HAVE_DEBUG_BIND_INSNS
7163 		  && (set = single_set (insn)) != NULL_RTX
7164 		  && is_dead_reg (SET_DEST (set), counts)
7165 		  /* Used at least once in some DEBUG_INSN.  */
7166 		  && counts[REGNO (SET_DEST (set)) + nreg] > 0
7167 		  /* And set exactly once.  */
7168 		  && counts[REGNO (SET_DEST (set)) + nreg * 2] == 1
7169 		  && !side_effects_p (SET_SRC (set))
7170 		  && asm_noperands (PATTERN (insn)) < 0)
7171 		{
7172 		  rtx dval, bind_var_loc;
7173 		  rtx_insn *bind;
7174 
7175 		  /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
7176 		  dval = make_debug_expr_from_rtl (SET_DEST (set));
7177 
7178 		  /* Emit a debug bind insn before the insn in which
7179 		     reg dies.  */
7180 		  bind_var_loc =
7181 		    gen_rtx_VAR_LOCATION (GET_MODE (SET_DEST (set)),
7182 					  DEBUG_EXPR_TREE_DECL (dval),
7183 					  SET_SRC (set),
7184 					  VAR_INIT_STATUS_INITIALIZED);
7185 		  count_reg_usage (bind_var_loc, counts + nreg, NULL_RTX, 1);
7186 
7187 		  bind = emit_debug_insn_before (bind_var_loc, insn);
7188 		  df_insn_rescan (bind);
7189 
7190 		  if (replacements == NULL)
7191 		    replacements = XCNEWVEC (rtx, nreg);
7192 		  replacements[REGNO (SET_DEST (set))] = dval;
7193 		}
7194 
7195 	      count_reg_usage (insn, counts, NULL_RTX, -1);
7196 	      ndead++;
7197 	    }
7198 	  cse_cfg_altered |= delete_insn_and_edges (insn);
7199 	}
7200     }
7201 
7202   if (MAY_HAVE_DEBUG_BIND_INSNS)
7203     {
7204       for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
7205 	if (DEBUG_BIND_INSN_P (insn))
7206 	  {
7207 	    /* If this debug insn references a dead register that wasn't replaced
7208 	       with an DEBUG_EXPR, reset the DEBUG_INSN.  */
7209 	    bool seen_repl = false;
7210 	    if (is_dead_debug_insn (INSN_VAR_LOCATION_LOC (insn),
7211 				    counts, replacements, &seen_repl))
7212 	      {
7213 		INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
7214 		df_insn_rescan (insn);
7215 	      }
7216 	    else if (seen_repl)
7217 	      {
7218 		INSN_VAR_LOCATION_LOC (insn)
7219 		  = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn),
7220 					     NULL_RTX, replace_dead_reg,
7221 					     replacements);
7222 		df_insn_rescan (insn);
7223 	      }
7224 	  }
7225       free (replacements);
7226     }
7227 
7228   if (dump_file && ndead)
7229     fprintf (dump_file, "Deleted %i trivially dead insns\n",
7230 	     ndead);
7231   /* Clean up.  */
7232   free (counts);
7233   timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7234   return ndead;
7235 }
7236 
7237 /* If LOC contains references to NEWREG in a different mode, change them
7238    to use NEWREG instead.  */
7239 
7240 static void
7241 cse_change_cc_mode (subrtx_ptr_iterator::array_type &array,
7242 		    rtx *loc, rtx_insn *insn, rtx newreg)
7243 {
7244   FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
7245     {
7246       rtx *loc = *iter;
7247       rtx x = *loc;
7248       if (x
7249 	  && REG_P (x)
7250 	  && REGNO (x) == REGNO (newreg)
7251 	  && GET_MODE (x) != GET_MODE (newreg))
7252 	{
7253 	  validate_change (insn, loc, newreg, 1);
7254 	  iter.skip_subrtxes ();
7255 	}
7256     }
7257 }
7258 
7259 /* Change the mode of any reference to the register REGNO (NEWREG) to
7260    GET_MODE (NEWREG) in INSN.  */
7261 
7262 static void
7263 cse_change_cc_mode_insn (rtx_insn *insn, rtx newreg)
7264 {
7265   int success;
7266 
7267   if (!INSN_P (insn))
7268     return;
7269 
7270   subrtx_ptr_iterator::array_type array;
7271   cse_change_cc_mode (array, &PATTERN (insn), insn, newreg);
7272   cse_change_cc_mode (array, &REG_NOTES (insn), insn, newreg);
7273 
7274   /* If the following assertion was triggered, there is most probably
7275      something wrong with the cc_modes_compatible back end function.
7276      CC modes only can be considered compatible if the insn - with the mode
7277      replaced by any of the compatible modes - can still be recognized.  */
7278   success = apply_change_group ();
7279   gcc_assert (success);
7280 }
7281 
7282 /* Change the mode of any reference to the register REGNO (NEWREG) to
7283    GET_MODE (NEWREG), starting at START.  Stop before END.  Stop at
7284    any instruction which modifies NEWREG.  */
7285 
7286 static void
7287 cse_change_cc_mode_insns (rtx_insn *start, rtx_insn *end, rtx newreg)
7288 {
7289   rtx_insn *insn;
7290 
7291   for (insn = start; insn != end; insn = NEXT_INSN (insn))
7292     {
7293       if (! INSN_P (insn))
7294 	continue;
7295 
7296       if (reg_set_p (newreg, insn))
7297 	return;
7298 
7299       cse_change_cc_mode_insn (insn, newreg);
7300     }
7301 }
7302 
7303 /* BB is a basic block which finishes with CC_REG as a condition code
7304    register which is set to CC_SRC.  Look through the successors of BB
7305    to find blocks which have a single predecessor (i.e., this one),
7306    and look through those blocks for an assignment to CC_REG which is
7307    equivalent to CC_SRC.  CAN_CHANGE_MODE indicates whether we are
7308    permitted to change the mode of CC_SRC to a compatible mode.  This
7309    returns VOIDmode if no equivalent assignments were found.
7310    Otherwise it returns the mode which CC_SRC should wind up with.
7311    ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
7312    but is passed unmodified down to recursive calls in order to prevent
7313    endless recursion.
7314 
7315    The main complexity in this function is handling the mode issues.
7316    We may have more than one duplicate which we can eliminate, and we
7317    try to find a mode which will work for multiple duplicates.  */
7318 
7319 static machine_mode
7320 cse_cc_succs (basic_block bb, basic_block orig_bb, rtx cc_reg, rtx cc_src,
7321 	      bool can_change_mode)
7322 {
7323   bool found_equiv;
7324   machine_mode mode;
7325   unsigned int insn_count;
7326   edge e;
7327   rtx_insn *insns[2];
7328   machine_mode modes[2];
7329   rtx_insn *last_insns[2];
7330   unsigned int i;
7331   rtx newreg;
7332   edge_iterator ei;
7333 
7334   /* We expect to have two successors.  Look at both before picking
7335      the final mode for the comparison.  If we have more successors
7336      (i.e., some sort of table jump, although that seems unlikely),
7337      then we require all beyond the first two to use the same
7338      mode.  */
7339 
7340   found_equiv = false;
7341   mode = GET_MODE (cc_src);
7342   insn_count = 0;
7343   FOR_EACH_EDGE (e, ei, bb->succs)
7344     {
7345       rtx_insn *insn;
7346       rtx_insn *end;
7347 
7348       if (e->flags & EDGE_COMPLEX)
7349 	continue;
7350 
7351       if (EDGE_COUNT (e->dest->preds) != 1
7352 	  || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
7353 	  /* Avoid endless recursion on unreachable blocks.  */
7354 	  || e->dest == orig_bb)
7355 	continue;
7356 
7357       end = NEXT_INSN (BB_END (e->dest));
7358       for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7359 	{
7360 	  rtx set;
7361 
7362 	  if (! INSN_P (insn))
7363 	    continue;
7364 
7365 	  /* If CC_SRC is modified, we have to stop looking for
7366 	     something which uses it.  */
7367 	  if (modified_in_p (cc_src, insn))
7368 	    break;
7369 
7370 	  /* Check whether INSN sets CC_REG to CC_SRC.  */
7371 	  set = single_set (insn);
7372 	  if (set
7373 	      && REG_P (SET_DEST (set))
7374 	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7375 	    {
7376 	      bool found;
7377 	      machine_mode set_mode;
7378 	      machine_mode comp_mode;
7379 
7380 	      found = false;
7381 	      set_mode = GET_MODE (SET_SRC (set));
7382 	      comp_mode = set_mode;
7383 	      if (rtx_equal_p (cc_src, SET_SRC (set)))
7384 		found = true;
7385 	      else if (GET_CODE (cc_src) == COMPARE
7386 		       && GET_CODE (SET_SRC (set)) == COMPARE
7387 		       && mode != set_mode
7388 		       && rtx_equal_p (XEXP (cc_src, 0),
7389 				       XEXP (SET_SRC (set), 0))
7390 		       && rtx_equal_p (XEXP (cc_src, 1),
7391 				       XEXP (SET_SRC (set), 1)))
7392 
7393 		{
7394 		  comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7395 		  if (comp_mode != VOIDmode
7396 		      && (can_change_mode || comp_mode == mode))
7397 		    found = true;
7398 		}
7399 
7400 	      if (found)
7401 		{
7402 		  found_equiv = true;
7403 		  if (insn_count < ARRAY_SIZE (insns))
7404 		    {
7405 		      insns[insn_count] = insn;
7406 		      modes[insn_count] = set_mode;
7407 		      last_insns[insn_count] = end;
7408 		      ++insn_count;
7409 
7410 		      if (mode != comp_mode)
7411 			{
7412 			  gcc_assert (can_change_mode);
7413 			  mode = comp_mode;
7414 
7415 			  /* The modified insn will be re-recognized later.  */
7416 			  PUT_MODE (cc_src, mode);
7417 			}
7418 		    }
7419 		  else
7420 		    {
7421 		      if (set_mode != mode)
7422 			{
7423 			  /* We found a matching expression in the
7424 			     wrong mode, but we don't have room to
7425 			     store it in the array.  Punt.  This case
7426 			     should be rare.  */
7427 			  break;
7428 			}
7429 		      /* INSN sets CC_REG to a value equal to CC_SRC
7430 			 with the right mode.  We can simply delete
7431 			 it.  */
7432 		      delete_insn (insn);
7433 		    }
7434 
7435 		  /* We found an instruction to delete.  Keep looking,
7436 		     in the hopes of finding a three-way jump.  */
7437 		  continue;
7438 		}
7439 
7440 	      /* We found an instruction which sets the condition
7441 		 code, so don't look any farther.  */
7442 	      break;
7443 	    }
7444 
7445 	  /* If INSN sets CC_REG in some other way, don't look any
7446 	     farther.  */
7447 	  if (reg_set_p (cc_reg, insn))
7448 	    break;
7449 	}
7450 
7451       /* If we fell off the bottom of the block, we can keep looking
7452 	 through successors.  We pass CAN_CHANGE_MODE as false because
7453 	 we aren't prepared to handle compatibility between the
7454 	 further blocks and this block.  */
7455       if (insn == end)
7456 	{
7457 	  machine_mode submode;
7458 
7459 	  submode = cse_cc_succs (e->dest, orig_bb, cc_reg, cc_src, false);
7460 	  if (submode != VOIDmode)
7461 	    {
7462 	      gcc_assert (submode == mode);
7463 	      found_equiv = true;
7464 	      can_change_mode = false;
7465 	    }
7466 	}
7467     }
7468 
7469   if (! found_equiv)
7470     return VOIDmode;
7471 
7472   /* Now INSN_COUNT is the number of instructions we found which set
7473      CC_REG to a value equivalent to CC_SRC.  The instructions are in
7474      INSNS.  The modes used by those instructions are in MODES.  */
7475 
7476   newreg = NULL_RTX;
7477   for (i = 0; i < insn_count; ++i)
7478     {
7479       if (modes[i] != mode)
7480 	{
7481 	  /* We need to change the mode of CC_REG in INSNS[i] and
7482 	     subsequent instructions.  */
7483 	  if (! newreg)
7484 	    {
7485 	      if (GET_MODE (cc_reg) == mode)
7486 		newreg = cc_reg;
7487 	      else
7488 		newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7489 	    }
7490 	  cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7491 				    newreg);
7492 	}
7493 
7494       cse_cfg_altered |= delete_insn_and_edges (insns[i]);
7495     }
7496 
7497   return mode;
7498 }
7499 
7500 /* If we have a fixed condition code register (or two), walk through
7501    the instructions and try to eliminate duplicate assignments.  */
7502 
7503 static void
7504 cse_condition_code_reg (void)
7505 {
7506   unsigned int cc_regno_1;
7507   unsigned int cc_regno_2;
7508   rtx cc_reg_1;
7509   rtx cc_reg_2;
7510   basic_block bb;
7511 
7512   if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7513     return;
7514 
7515   cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7516   if (cc_regno_2 != INVALID_REGNUM)
7517     cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7518   else
7519     cc_reg_2 = NULL_RTX;
7520 
7521   FOR_EACH_BB_FN (bb, cfun)
7522     {
7523       rtx_insn *last_insn;
7524       rtx cc_reg;
7525       rtx_insn *insn;
7526       rtx_insn *cc_src_insn;
7527       rtx cc_src;
7528       machine_mode mode;
7529       machine_mode orig_mode;
7530 
7531       /* Look for blocks which end with a conditional jump based on a
7532 	 condition code register.  Then look for the instruction which
7533 	 sets the condition code register.  Then look through the
7534 	 successor blocks for instructions which set the condition
7535 	 code register to the same value.  There are other possible
7536 	 uses of the condition code register, but these are by far the
7537 	 most common and the ones which we are most likely to be able
7538 	 to optimize.  */
7539 
7540       last_insn = BB_END (bb);
7541       if (!JUMP_P (last_insn))
7542 	continue;
7543 
7544       if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7545 	cc_reg = cc_reg_1;
7546       else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7547 	cc_reg = cc_reg_2;
7548       else
7549 	continue;
7550 
7551       cc_src_insn = NULL;
7552       cc_src = NULL_RTX;
7553       for (insn = PREV_INSN (last_insn);
7554 	   insn && insn != PREV_INSN (BB_HEAD (bb));
7555 	   insn = PREV_INSN (insn))
7556 	{
7557 	  rtx set;
7558 
7559 	  if (! INSN_P (insn))
7560 	    continue;
7561 	  set = single_set (insn);
7562 	  if (set
7563 	      && REG_P (SET_DEST (set))
7564 	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7565 	    {
7566 	      cc_src_insn = insn;
7567 	      cc_src = SET_SRC (set);
7568 	      break;
7569 	    }
7570 	  else if (reg_set_p (cc_reg, insn))
7571 	    break;
7572 	}
7573 
7574       if (! cc_src_insn)
7575 	continue;
7576 
7577       if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7578 	continue;
7579 
7580       /* Now CC_REG is a condition code register used for a
7581 	 conditional jump at the end of the block, and CC_SRC, in
7582 	 CC_SRC_INSN, is the value to which that condition code
7583 	 register is set, and CC_SRC is still meaningful at the end of
7584 	 the basic block.  */
7585 
7586       orig_mode = GET_MODE (cc_src);
7587       mode = cse_cc_succs (bb, bb, cc_reg, cc_src, true);
7588       if (mode != VOIDmode)
7589 	{
7590 	  gcc_assert (mode == GET_MODE (cc_src));
7591 	  if (mode != orig_mode)
7592 	    {
7593 	      rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7594 
7595 	      cse_change_cc_mode_insn (cc_src_insn, newreg);
7596 
7597 	      /* Do the same in the following insns that use the
7598 		 current value of CC_REG within BB.  */
7599 	      cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7600 					NEXT_INSN (last_insn),
7601 					newreg);
7602 	    }
7603 	}
7604     }
7605 }
7606 
7607 
7608 /* Perform common subexpression elimination.  Nonzero value from
7609    `cse_main' means that jumps were simplified and some code may now
7610    be unreachable, so do jump optimization again.  */
7611 static unsigned int
7612 rest_of_handle_cse (void)
7613 {
7614   int tem;
7615 
7616   if (dump_file)
7617     dump_flow_info (dump_file, dump_flags);
7618 
7619   tem = cse_main (get_insns (), max_reg_num ());
7620 
7621   /* If we are not running more CSE passes, then we are no longer
7622      expecting CSE to be run.  But always rerun it in a cheap mode.  */
7623   cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7624 
7625   if (tem == 2)
7626     {
7627       timevar_push (TV_JUMP);
7628       rebuild_jump_labels (get_insns ());
7629       cse_cfg_altered |= cleanup_cfg (CLEANUP_CFG_CHANGED);
7630       timevar_pop (TV_JUMP);
7631     }
7632   else if (tem == 1 || optimize > 1)
7633     cse_cfg_altered |= cleanup_cfg (0);
7634 
7635   return 0;
7636 }
7637 
7638 namespace {
7639 
7640 const pass_data pass_data_cse =
7641 {
7642   RTL_PASS, /* type */
7643   "cse1", /* name */
7644   OPTGROUP_NONE, /* optinfo_flags */
7645   TV_CSE, /* tv_id */
7646   0, /* properties_required */
7647   0, /* properties_provided */
7648   0, /* properties_destroyed */
7649   0, /* todo_flags_start */
7650   TODO_df_finish, /* todo_flags_finish */
7651 };
7652 
7653 class pass_cse : public rtl_opt_pass
7654 {
7655 public:
7656   pass_cse (gcc::context *ctxt)
7657     : rtl_opt_pass (pass_data_cse, ctxt)
7658   {}
7659 
7660   /* opt_pass methods: */
7661   virtual bool gate (function *) { return optimize > 0; }
7662   virtual unsigned int execute (function *) { return rest_of_handle_cse (); }
7663 
7664 }; // class pass_cse
7665 
7666 } // anon namespace
7667 
7668 rtl_opt_pass *
7669 make_pass_cse (gcc::context *ctxt)
7670 {
7671   return new pass_cse (ctxt);
7672 }
7673 
7674 
7675 /* Run second CSE pass after loop optimizations.  */
7676 static unsigned int
7677 rest_of_handle_cse2 (void)
7678 {
7679   int tem;
7680 
7681   if (dump_file)
7682     dump_flow_info (dump_file, dump_flags);
7683 
7684   tem = cse_main (get_insns (), max_reg_num ());
7685 
7686   /* Run a pass to eliminate duplicated assignments to condition code
7687      registers.  We have to run this after bypass_jumps, because it
7688      makes it harder for that pass to determine whether a jump can be
7689      bypassed safely.  */
7690   cse_condition_code_reg ();
7691 
7692   delete_trivially_dead_insns (get_insns (), max_reg_num ());
7693 
7694   if (tem == 2)
7695     {
7696       timevar_push (TV_JUMP);
7697       rebuild_jump_labels (get_insns ());
7698       cse_cfg_altered |= cleanup_cfg (CLEANUP_CFG_CHANGED);
7699       timevar_pop (TV_JUMP);
7700     }
7701   else if (tem == 1)
7702     cse_cfg_altered |= cleanup_cfg (0);
7703 
7704   cse_not_expected = 1;
7705   return 0;
7706 }
7707 
7708 
7709 namespace {
7710 
7711 const pass_data pass_data_cse2 =
7712 {
7713   RTL_PASS, /* type */
7714   "cse2", /* name */
7715   OPTGROUP_NONE, /* optinfo_flags */
7716   TV_CSE2, /* tv_id */
7717   0, /* properties_required */
7718   0, /* properties_provided */
7719   0, /* properties_destroyed */
7720   0, /* todo_flags_start */
7721   TODO_df_finish, /* todo_flags_finish */
7722 };
7723 
7724 class pass_cse2 : public rtl_opt_pass
7725 {
7726 public:
7727   pass_cse2 (gcc::context *ctxt)
7728     : rtl_opt_pass (pass_data_cse2, ctxt)
7729   {}
7730 
7731   /* opt_pass methods: */
7732   virtual bool gate (function *)
7733     {
7734       return optimize > 0 && flag_rerun_cse_after_loop;
7735     }
7736 
7737   virtual unsigned int execute (function *) { return rest_of_handle_cse2 (); }
7738 
7739 }; // class pass_cse2
7740 
7741 } // anon namespace
7742 
7743 rtl_opt_pass *
7744 make_pass_cse2 (gcc::context *ctxt)
7745 {
7746   return new pass_cse2 (ctxt);
7747 }
7748 
7749 /* Run second CSE pass after loop optimizations.  */
7750 static unsigned int
7751 rest_of_handle_cse_after_global_opts (void)
7752 {
7753   int save_cfj;
7754   int tem;
7755 
7756   /* We only want to do local CSE, so don't follow jumps.  */
7757   save_cfj = flag_cse_follow_jumps;
7758   flag_cse_follow_jumps = 0;
7759 
7760   rebuild_jump_labels (get_insns ());
7761   tem = cse_main (get_insns (), max_reg_num ());
7762   cse_cfg_altered |= purge_all_dead_edges ();
7763   delete_trivially_dead_insns (get_insns (), max_reg_num ());
7764 
7765   cse_not_expected = !flag_rerun_cse_after_loop;
7766 
7767   /* If cse altered any jumps, rerun jump opts to clean things up.  */
7768   if (tem == 2)
7769     {
7770       timevar_push (TV_JUMP);
7771       rebuild_jump_labels (get_insns ());
7772       cse_cfg_altered |= cleanup_cfg (CLEANUP_CFG_CHANGED);
7773       timevar_pop (TV_JUMP);
7774     }
7775   else if (tem == 1)
7776     cse_cfg_altered |= cleanup_cfg (0);
7777 
7778   flag_cse_follow_jumps = save_cfj;
7779   return 0;
7780 }
7781 
7782 namespace {
7783 
7784 const pass_data pass_data_cse_after_global_opts =
7785 {
7786   RTL_PASS, /* type */
7787   "cse_local", /* name */
7788   OPTGROUP_NONE, /* optinfo_flags */
7789   TV_CSE, /* tv_id */
7790   0, /* properties_required */
7791   0, /* properties_provided */
7792   0, /* properties_destroyed */
7793   0, /* todo_flags_start */
7794   TODO_df_finish, /* todo_flags_finish */
7795 };
7796 
7797 class pass_cse_after_global_opts : public rtl_opt_pass
7798 {
7799 public:
7800   pass_cse_after_global_opts (gcc::context *ctxt)
7801     : rtl_opt_pass (pass_data_cse_after_global_opts, ctxt)
7802   {}
7803 
7804   /* opt_pass methods: */
7805   virtual bool gate (function *)
7806     {
7807       return optimize > 0 && flag_rerun_cse_after_global_opts;
7808     }
7809 
7810   virtual unsigned int execute (function *)
7811     {
7812       return rest_of_handle_cse_after_global_opts ();
7813     }
7814 
7815 }; // class pass_cse_after_global_opts
7816 
7817 } // anon namespace
7818 
7819 rtl_opt_pass *
7820 make_pass_cse_after_global_opts (gcc::context *ctxt)
7821 {
7822   return new pass_cse_after_global_opts (ctxt);
7823 }
7824