xref: /openbsd/gnu/usr.bin/gcc/gcc/cse.c (revision 06dc6460)
1 /* Common subexpression elimination for GNU compiler.
2    Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3    1999, 2000, 2001, 2002, 2004 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA.  */
21 
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS.  */
24 #include "system.h"
25 
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "flags.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "function.h"
36 #include "expr.h"
37 #include "toplev.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "timevar.h"
41 #include "target.h"
42 
43 /* The basic idea of common subexpression elimination is to go
44    through the code, keeping a record of expressions that would
45    have the same value at the current scan point, and replacing
46    expressions encountered with the cheapest equivalent expression.
47 
48    It is too complicated to keep track of the different possibilities
49    when control paths merge in this code; so, at each label, we forget all
50    that is known and start fresh.  This can be described as processing each
51    extended basic block separately.  We have a separate pass to perform
52    global CSE.
53 
54    Note CSE can turn a conditional or computed jump into a nop or
55    an unconditional jump.  When this occurs we arrange to run the jump
56    optimizer after CSE to delete the unreachable code.
57 
58    We use two data structures to record the equivalent expressions:
59    a hash table for most expressions, and a vector of "quantity
60    numbers" to record equivalent (pseudo) registers.
61 
62    The use of the special data structure for registers is desirable
63    because it is faster.  It is possible because registers references
64    contain a fairly small number, the register number, taken from
65    a contiguously allocated series, and two register references are
66    identical if they have the same number.  General expressions
67    do not have any such thing, so the only way to retrieve the
68    information recorded on an expression other than a register
69    is to keep it in a hash table.
70 
71 Registers and "quantity numbers":
72 
73    At the start of each basic block, all of the (hardware and pseudo)
74    registers used in the function are given distinct quantity
75    numbers to indicate their contents.  During scan, when the code
76    copies one register into another, we copy the quantity number.
77    When a register is loaded in any other way, we allocate a new
78    quantity number to describe the value generated by this operation.
79    `reg_qty' records what quantity a register is currently thought
80    of as containing.
81 
82    All real quantity numbers are greater than or equal to `max_reg'.
83    If register N has not been assigned a quantity, reg_qty[N] will equal N.
84 
85    Quantity numbers below `max_reg' do not exist and none of the `qty_table'
86    entries should be referenced with an index below `max_reg'.
87 
88    We also maintain a bidirectional chain of registers for each
89    quantity number.  The `qty_table` members `first_reg' and `last_reg',
90    and `reg_eqv_table' members `next' and `prev' hold these chains.
91 
92    The first register in a chain is the one whose lifespan is least local.
93    Among equals, it is the one that was seen first.
94    We replace any equivalent register with that one.
95 
96    If two registers have the same quantity number, it must be true that
97    REG expressions with qty_table `mode' must be in the hash table for both
98    registers and must be in the same class.
99 
100    The converse is not true.  Since hard registers may be referenced in
101    any mode, two REG expressions might be equivalent in the hash table
102    but not have the same quantity number if the quantity number of one
103    of the registers is not the same mode as those expressions.
104 
105 Constants and quantity numbers
106 
107    When a quantity has a known constant value, that value is stored
108    in the appropriate qty_table `const_rtx'.  This is in addition to
109    putting the constant in the hash table as is usual for non-regs.
110 
111    Whether a reg or a constant is preferred is determined by the configuration
112    macro CONST_COSTS and will often depend on the constant value.  In any
113    event, expressions containing constants can be simplified, by fold_rtx.
114 
115    When a quantity has a known nearly constant value (such as an address
116    of a stack slot), that value is stored in the appropriate qty_table
117    `const_rtx'.
118 
119    Integer constants don't have a machine mode.  However, cse
120    determines the intended machine mode from the destination
121    of the instruction that moves the constant.  The machine mode
122    is recorded in the hash table along with the actual RTL
123    constant expression so that different modes are kept separate.
124 
125 Other expressions:
126 
127    To record known equivalences among expressions in general
128    we use a hash table called `table'.  It has a fixed number of buckets
129    that contain chains of `struct table_elt' elements for expressions.
130    These chains connect the elements whose expressions have the same
131    hash codes.
132 
133    Other chains through the same elements connect the elements which
134    currently have equivalent values.
135 
136    Register references in an expression are canonicalized before hashing
137    the expression.  This is done using `reg_qty' and qty_table `first_reg'.
138    The hash code of a register reference is computed using the quantity
139    number, not the register number.
140 
141    When the value of an expression changes, it is necessary to remove from the
142    hash table not just that expression but all expressions whose values
143    could be different as a result.
144 
145      1. If the value changing is in memory, except in special cases
146      ANYTHING referring to memory could be changed.  That is because
147      nobody knows where a pointer does not point.
148      The function `invalidate_memory' removes what is necessary.
149 
150      The special cases are when the address is constant or is
151      a constant plus a fixed register such as the frame pointer
152      or a static chain pointer.  When such addresses are stored in,
153      we can tell exactly which other such addresses must be invalidated
154      due to overlap.  `invalidate' does this.
155      All expressions that refer to non-constant
156      memory addresses are also invalidated.  `invalidate_memory' does this.
157 
158      2. If the value changing is a register, all expressions
159      containing references to that register, and only those,
160      must be removed.
161 
162    Because searching the entire hash table for expressions that contain
163    a register is very slow, we try to figure out when it isn't necessary.
164    Precisely, this is necessary only when expressions have been
165    entered in the hash table using this register, and then the value has
166    changed, and then another expression wants to be added to refer to
167    the register's new value.  This sequence of circumstances is rare
168    within any one basic block.
169 
170    The vectors `reg_tick' and `reg_in_table' are used to detect this case.
171    reg_tick[i] is incremented whenever a value is stored in register i.
172    reg_in_table[i] holds -1 if no references to register i have been
173    entered in the table; otherwise, it contains the value reg_tick[i] had
174    when the references were entered.  If we want to enter a reference
175    and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
176    Until we want to enter a new entry, the mere fact that the two vectors
177    don't match makes the entries be ignored if anyone tries to match them.
178 
179    Registers themselves are entered in the hash table as well as in
180    the equivalent-register chains.  However, the vectors `reg_tick'
181    and `reg_in_table' do not apply to expressions which are simple
182    register references.  These expressions are removed from the table
183    immediately when they become invalid, and this can be done even if
184    we do not immediately search for all the expressions that refer to
185    the register.
186 
187    A CLOBBER rtx in an instruction invalidates its operand for further
188    reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
189    invalidates everything that resides in memory.
190 
191 Related expressions:
192 
193    Constant expressions that differ only by an additive integer
194    are called related.  When a constant expression is put in
195    the table, the related expression with no constant term
196    is also entered.  These are made to point at each other
197    so that it is possible to find out if there exists any
198    register equivalent to an expression related to a given expression.  */
199 
200 /* One plus largest register number used in this function.  */
201 
202 static int max_reg;
203 
204 /* One plus largest instruction UID used in this function at time of
205    cse_main call.  */
206 
207 static int max_insn_uid;
208 
209 /* Length of qty_table vector.  We know in advance we will not need
210    a quantity number this big.  */
211 
212 static int max_qty;
213 
214 /* Next quantity number to be allocated.
215    This is 1 + the largest number needed so far.  */
216 
217 static int next_qty;
218 
219 /* Per-qty information tracking.
220 
221    `first_reg' and `last_reg' track the head and tail of the
222    chain of registers which currently contain this quantity.
223 
224    `mode' contains the machine mode of this quantity.
225 
226    `const_rtx' holds the rtx of the constant value of this
227    quantity, if known.  A summations of the frame/arg pointer
228    and a constant can also be entered here.  When this holds
229    a known value, `const_insn' is the insn which stored the
230    constant value.
231 
232    `comparison_{code,const,qty}' are used to track when a
233    comparison between a quantity and some constant or register has
234    been passed.  In such a case, we know the results of the comparison
235    in case we see it again.  These members record a comparison that
236    is known to be true.  `comparison_code' holds the rtx code of such
237    a comparison, else it is set to UNKNOWN and the other two
238    comparison members are undefined.  `comparison_const' holds
239    the constant being compared against, or zero if the comparison
240    is not against a constant.  `comparison_qty' holds the quantity
241    being compared against when the result is known.  If the comparison
242    is not with a register, `comparison_qty' is -1.  */
243 
244 struct qty_table_elem
245 {
246   rtx const_rtx;
247   rtx const_insn;
248   rtx comparison_const;
249   int comparison_qty;
250   unsigned int first_reg, last_reg;
251   enum machine_mode mode;
252   enum rtx_code comparison_code;
253 };
254 
255 /* The table of all qtys, indexed by qty number.  */
256 static struct qty_table_elem *qty_table;
257 
258 #ifdef HAVE_cc0
259 /* For machines that have a CC0, we do not record its value in the hash
260    table since its use is guaranteed to be the insn immediately following
261    its definition and any other insn is presumed to invalidate it.
262 
263    Instead, we store below the value last assigned to CC0.  If it should
264    happen to be a constant, it is stored in preference to the actual
265    assigned value.  In case it is a constant, we store the mode in which
266    the constant should be interpreted.  */
267 
268 static rtx prev_insn_cc0;
269 static enum machine_mode prev_insn_cc0_mode;
270 #endif
271 
272 /* Previous actual insn.  0 if at first insn of basic block.  */
273 
274 static rtx prev_insn;
275 
276 /* Insn being scanned.  */
277 
278 static rtx this_insn;
279 
280 /* Index by register number, gives the number of the next (or
281    previous) register in the chain of registers sharing the same
282    value.
283 
284    Or -1 if this register is at the end of the chain.
285 
286    If reg_qty[N] == N, reg_eqv_table[N].next is undefined.  */
287 
288 /* Per-register equivalence chain.  */
289 struct reg_eqv_elem
290 {
291   int next, prev;
292 };
293 
294 /* The table of all register equivalence chains.  */
295 static struct reg_eqv_elem *reg_eqv_table;
296 
297 struct cse_reg_info
298 {
299   /* Next in hash chain.  */
300   struct cse_reg_info *hash_next;
301 
302   /* The next cse_reg_info structure in the free or used list.  */
303   struct cse_reg_info *next;
304 
305   /* Search key */
306   unsigned int regno;
307 
308   /* The quantity number of the register's current contents.  */
309   int reg_qty;
310 
311   /* The number of times the register has been altered in the current
312      basic block.  */
313   int reg_tick;
314 
315   /* The REG_TICK value at which rtx's containing this register are
316      valid in the hash table.  If this does not equal the current
317      reg_tick value, such expressions existing in the hash table are
318      invalid.  */
319   int reg_in_table;
320 
321   /* The SUBREG that was set when REG_TICK was last incremented.  Set
322      to -1 if the last store was to the whole register, not a subreg.  */
323   unsigned int subreg_ticked;
324 };
325 
326 /* A free list of cse_reg_info entries.  */
327 static struct cse_reg_info *cse_reg_info_free_list;
328 
329 /* A used list of cse_reg_info entries.  */
330 static struct cse_reg_info *cse_reg_info_used_list;
331 static struct cse_reg_info *cse_reg_info_used_list_end;
332 
333 /* A mapping from registers to cse_reg_info data structures.  */
334 #define REGHASH_SHIFT	7
335 #define REGHASH_SIZE	(1 << REGHASH_SHIFT)
336 #define REGHASH_MASK	(REGHASH_SIZE - 1)
337 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
338 
339 #define REGHASH_FN(REGNO)	\
340 	(((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
341 
342 /* The last lookup we did into the cse_reg_info_tree.  This allows us
343    to cache repeated lookups.  */
344 static unsigned int cached_regno;
345 static struct cse_reg_info *cached_cse_reg_info;
346 
347 /* A HARD_REG_SET containing all the hard registers for which there is
348    currently a REG expression in the hash table.  Note the difference
349    from the above variables, which indicate if the REG is mentioned in some
350    expression in the table.  */
351 
352 static HARD_REG_SET hard_regs_in_table;
353 
354 /* CUID of insn that starts the basic block currently being cse-processed.  */
355 
356 static int cse_basic_block_start;
357 
358 /* CUID of insn that ends the basic block currently being cse-processed.  */
359 
360 static int cse_basic_block_end;
361 
362 /* Vector mapping INSN_UIDs to cuids.
363    The cuids are like uids but increase monotonically always.
364    We use them to see whether a reg is used outside a given basic block.  */
365 
366 static int *uid_cuid;
367 
368 /* Highest UID in UID_CUID.  */
369 static int max_uid;
370 
371 /* Get the cuid of an insn.  */
372 
373 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
374 
375 /* Nonzero if this pass has made changes, and therefore it's
376    worthwhile to run the garbage collector.  */
377 
378 static int cse_altered;
379 
380 /* Nonzero if cse has altered conditional jump insns
381    in such a way that jump optimization should be redone.  */
382 
383 static int cse_jumps_altered;
384 
385 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
386    REG_LABEL, we have to rerun jump after CSE to put in the note.  */
387 static int recorded_label_ref;
388 
389 /* canon_hash stores 1 in do_not_record
390    if it notices a reference to CC0, PC, or some other volatile
391    subexpression.  */
392 
393 static int do_not_record;
394 
395 #ifdef LOAD_EXTEND_OP
396 
397 /* Scratch rtl used when looking for load-extended copy of a MEM.  */
398 static rtx memory_extend_rtx;
399 #endif
400 
401 /* canon_hash stores 1 in hash_arg_in_memory
402    if it notices a reference to memory within the expression being hashed.  */
403 
404 static int hash_arg_in_memory;
405 
406 /* The hash table contains buckets which are chains of `struct table_elt's,
407    each recording one expression's information.
408    That expression is in the `exp' field.
409 
410    The canon_exp field contains a canonical (from the point of view of
411    alias analysis) version of the `exp' field.
412 
413    Those elements with the same hash code are chained in both directions
414    through the `next_same_hash' and `prev_same_hash' fields.
415 
416    Each set of expressions with equivalent values
417    are on a two-way chain through the `next_same_value'
418    and `prev_same_value' fields, and all point with
419    the `first_same_value' field at the first element in
420    that chain.  The chain is in order of increasing cost.
421    Each element's cost value is in its `cost' field.
422 
423    The `in_memory' field is nonzero for elements that
424    involve any reference to memory.  These elements are removed
425    whenever a write is done to an unidentified location in memory.
426    To be safe, we assume that a memory address is unidentified unless
427    the address is either a symbol constant or a constant plus
428    the frame pointer or argument pointer.
429 
430    The `related_value' field is used to connect related expressions
431    (that differ by adding an integer).
432    The related expressions are chained in a circular fashion.
433    `related_value' is zero for expressions for which this
434    chain is not useful.
435 
436    The `cost' field stores the cost of this element's expression.
437    The `regcost' field stores the value returned by approx_reg_cost for
438    this element's expression.
439 
440    The `is_const' flag is set if the element is a constant (including
441    a fixed address).
442 
443    The `flag' field is used as a temporary during some search routines.
444 
445    The `mode' field is usually the same as GET_MODE (`exp'), but
446    if `exp' is a CONST_INT and has no machine mode then the `mode'
447    field is the mode it was being used as.  Each constant is
448    recorded separately for each mode it is used with.  */
449 
450 struct table_elt
451 {
452   rtx exp;
453   rtx canon_exp;
454   struct table_elt *next_same_hash;
455   struct table_elt *prev_same_hash;
456   struct table_elt *next_same_value;
457   struct table_elt *prev_same_value;
458   struct table_elt *first_same_value;
459   struct table_elt *related_value;
460   int cost;
461   int regcost;
462   enum machine_mode mode;
463   char in_memory;
464   char is_const;
465   char flag;
466 };
467 
468 /* We don't want a lot of buckets, because we rarely have very many
469    things stored in the hash table, and a lot of buckets slows
470    down a lot of loops that happen frequently.  */
471 #define HASH_SHIFT	5
472 #define HASH_SIZE	(1 << HASH_SHIFT)
473 #define HASH_MASK	(HASH_SIZE - 1)
474 
475 /* Compute hash code of X in mode M.  Special-case case where X is a pseudo
476    register (hard registers may require `do_not_record' to be set).  */
477 
478 #define HASH(X, M)	\
479  ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
480   ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
481   : canon_hash (X, M)) & HASH_MASK)
482 
483 /* Determine whether register number N is considered a fixed register for the
484    purpose of approximating register costs.
485    It is desirable to replace other regs with fixed regs, to reduce need for
486    non-fixed hard regs.
487    A reg wins if it is either the frame pointer or designated as fixed.  */
488 #define FIXED_REGNO_P(N)  \
489   ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
490    || fixed_regs[N] || global_regs[N])
491 
492 /* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
493    hard registers and pointers into the frame are the cheapest with a cost
494    of 0.  Next come pseudos with a cost of one and other hard registers with
495    a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
496 
497 #define CHEAP_REGNO(N) \
498   ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM 	\
499    || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM	     	\
500    || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) 	\
501    || ((N) < FIRST_PSEUDO_REGISTER					\
502        && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
503 
504 #define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
505 #define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
506 
507 /* Get the info associated with register N.  */
508 
509 #define GET_CSE_REG_INFO(N) 			\
510   (((N) == cached_regno && cached_cse_reg_info)	\
511    ? cached_cse_reg_info : get_cse_reg_info ((N)))
512 
513 /* Get the number of times this register has been updated in this
514    basic block.  */
515 
516 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
517 
518 /* Get the point at which REG was recorded in the table.  */
519 
520 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
521 
522 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
523    SUBREG).  */
524 
525 #define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
526 
527 /* Get the quantity number for REG.  */
528 
529 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
530 
531 /* Determine if the quantity number for register X represents a valid index
532    into the qty_table.  */
533 
534 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
535 
536 static struct table_elt *table[HASH_SIZE];
537 
538 /* Chain of `struct table_elt's made so far for this function
539    but currently removed from the table.  */
540 
541 static struct table_elt *free_element_chain;
542 
543 /* Number of `struct table_elt' structures made so far for this function.  */
544 
545 static int n_elements_made;
546 
547 /* Maximum value `n_elements_made' has had so far in this compilation
548    for functions previously processed.  */
549 
550 static int max_elements_made;
551 
552 /* Surviving equivalence class when two equivalence classes are merged
553    by recording the effects of a jump in the last insn.  Zero if the
554    last insn was not a conditional jump.  */
555 
556 static struct table_elt *last_jump_equiv_class;
557 
558 /* Set to the cost of a constant pool reference if one was found for a
559    symbolic constant.  If this was found, it means we should try to
560    convert constants into constant pool entries if they don't fit in
561    the insn.  */
562 
563 static int constant_pool_entries_cost;
564 
565 /* Define maximum length of a branch path.  */
566 
567 #define PATHLENGTH	10
568 
569 /* This data describes a block that will be processed by cse_basic_block.  */
570 
571 struct cse_basic_block_data
572 {
573   /* Lowest CUID value of insns in block.  */
574   int low_cuid;
575   /* Highest CUID value of insns in block.  */
576   int high_cuid;
577   /* Total number of SETs in block.  */
578   int nsets;
579   /* Last insn in the block.  */
580   rtx last;
581   /* Size of current branch path, if any.  */
582   int path_size;
583   /* Current branch path, indicating which branches will be taken.  */
584   struct branch_path
585     {
586       /* The branch insn.  */
587       rtx branch;
588       /* Whether it should be taken or not.  AROUND is the same as taken
589 	 except that it is used when the destination label is not preceded
590        by a BARRIER.  */
591       enum taken {TAKEN, NOT_TAKEN, AROUND} status;
592     } path[PATHLENGTH];
593 };
594 
595 /* Nonzero if X has the form (PLUS frame-pointer integer).  We check for
596    virtual regs here because the simplify_*_operation routines are called
597    by integrate.c, which is called before virtual register instantiation.
598 
599    ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
600    a header file so that their definitions can be shared with the
601    simplification routines in simplify-rtx.c.  Until then, do not
602    change these macros without also changing the copy in simplify-rtx.c.  */
603 
604 #define FIXED_BASE_PLUS_P(X)					\
605   ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx	\
606    || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
607    || (X) == virtual_stack_vars_rtx				\
608    || (X) == virtual_incoming_args_rtx				\
609    || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
610        && (XEXP (X, 0) == frame_pointer_rtx			\
611 	   || XEXP (X, 0) == hard_frame_pointer_rtx		\
612 	   || ((X) == arg_pointer_rtx				\
613 	       && fixed_regs[ARG_POINTER_REGNUM])		\
614 	   || XEXP (X, 0) == virtual_stack_vars_rtx		\
615 	   || XEXP (X, 0) == virtual_incoming_args_rtx))	\
616    || GET_CODE (X) == ADDRESSOF)
617 
618 /* Similar, but also allows reference to the stack pointer.
619 
620    This used to include FIXED_BASE_PLUS_P, however, we can't assume that
621    arg_pointer_rtx by itself is nonzero, because on at least one machine,
622    the i960, the arg pointer is zero when it is unused.  */
623 
624 #define NONZERO_BASE_PLUS_P(X)					\
625   ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx	\
626    || (X) == virtual_stack_vars_rtx				\
627    || (X) == virtual_incoming_args_rtx				\
628    || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
629        && (XEXP (X, 0) == frame_pointer_rtx			\
630 	   || XEXP (X, 0) == hard_frame_pointer_rtx		\
631 	   || ((X) == arg_pointer_rtx				\
632 	       && fixed_regs[ARG_POINTER_REGNUM])		\
633 	   || XEXP (X, 0) == virtual_stack_vars_rtx		\
634 	   || XEXP (X, 0) == virtual_incoming_args_rtx))	\
635    || (X) == stack_pointer_rtx					\
636    || (X) == virtual_stack_dynamic_rtx				\
637    || (X) == virtual_outgoing_args_rtx				\
638    || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
639        && (XEXP (X, 0) == stack_pointer_rtx			\
640 	   || XEXP (X, 0) == virtual_stack_dynamic_rtx		\
641 	   || XEXP (X, 0) == virtual_outgoing_args_rtx))	\
642    || GET_CODE (X) == ADDRESSOF)
643 
644 static int notreg_cost		PARAMS ((rtx, enum rtx_code));
645 static int approx_reg_cost_1	PARAMS ((rtx *, void *));
646 static int approx_reg_cost	PARAMS ((rtx));
647 static int preferrable		PARAMS ((int, int, int, int));
648 static void new_basic_block	PARAMS ((void));
649 static void make_new_qty	PARAMS ((unsigned int, enum machine_mode));
650 static void make_regs_eqv	PARAMS ((unsigned int, unsigned int));
651 static void delete_reg_equiv	PARAMS ((unsigned int));
652 static int mention_regs		PARAMS ((rtx));
653 static int insert_regs		PARAMS ((rtx, struct table_elt *, int));
654 static void remove_from_table	PARAMS ((struct table_elt *, unsigned));
655 static struct table_elt *lookup	PARAMS ((rtx, unsigned, enum machine_mode)),
656        *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
657 static rtx lookup_as_function	PARAMS ((rtx, enum rtx_code));
658 static struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
659 					 enum machine_mode));
660 static void merge_equiv_classes PARAMS ((struct table_elt *,
661 					 struct table_elt *));
662 static void invalidate		PARAMS ((rtx, enum machine_mode));
663 static int cse_rtx_varies_p	PARAMS ((rtx, int));
664 static void remove_invalid_refs	PARAMS ((unsigned int));
665 static void remove_invalid_subreg_refs	PARAMS ((unsigned int, unsigned int,
666 						 enum machine_mode));
667 static void rehash_using_reg	PARAMS ((rtx));
668 static void invalidate_memory	PARAMS ((void));
669 static void invalidate_for_call	PARAMS ((void));
670 static rtx use_related_value	PARAMS ((rtx, struct table_elt *));
671 static unsigned canon_hash	PARAMS ((rtx, enum machine_mode));
672 static unsigned canon_hash_string PARAMS ((const char *));
673 static unsigned safe_hash	PARAMS ((rtx, enum machine_mode));
674 static int exp_equiv_p		PARAMS ((rtx, rtx, int, int));
675 static rtx canon_reg		PARAMS ((rtx, rtx));
676 static void find_best_addr	PARAMS ((rtx, rtx *, enum machine_mode));
677 static enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
678 						   enum machine_mode *,
679 						   enum machine_mode *));
680 static rtx fold_rtx		PARAMS ((rtx, rtx));
681 static rtx equiv_constant	PARAMS ((rtx));
682 static void record_jump_equiv	PARAMS ((rtx, int));
683 static void record_jump_cond	PARAMS ((enum rtx_code, enum machine_mode,
684 					 rtx, rtx, int));
685 static void cse_insn		PARAMS ((rtx, rtx));
686 static int addr_affects_sp_p	PARAMS ((rtx));
687 static void invalidate_from_clobbers PARAMS ((rtx));
688 static rtx cse_process_notes	PARAMS ((rtx, rtx));
689 static void cse_around_loop	PARAMS ((rtx));
690 static void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
691 static void invalidate_skipped_block PARAMS ((rtx));
692 static void cse_check_loop_start PARAMS ((rtx, rtx, void *));
693 static void cse_set_around_loop	PARAMS ((rtx, rtx, rtx));
694 static rtx cse_basic_block	PARAMS ((rtx, rtx, struct branch_path *, int));
695 static void count_reg_usage	PARAMS ((rtx, int *, rtx, int));
696 static int check_for_label_ref	PARAMS ((rtx *, void *));
697 extern void dump_class          PARAMS ((struct table_elt*));
698 static struct cse_reg_info * get_cse_reg_info PARAMS ((unsigned int));
699 static int check_dependence	PARAMS ((rtx *, void *));
700 
701 static void flush_hash_table	PARAMS ((void));
702 static bool insn_live_p		PARAMS ((rtx, int *));
703 static bool set_live_p		PARAMS ((rtx, rtx, int *));
704 static bool dead_libcall_p	PARAMS ((rtx, int *));
705 static int cse_change_cc_mode	PARAMS ((rtx *, void *));
706 static void cse_change_cc_mode_insns PARAMS ((rtx, rtx, rtx));
707 static enum machine_mode cse_cc_succs PARAMS ((basic_block, rtx, rtx, int));
708 
709 /* Dump the expressions in the equivalence class indicated by CLASSP.
710    This function is used only for debugging.  */
711 void
dump_class(classp)712 dump_class (classp)
713      struct table_elt *classp;
714 {
715   struct table_elt *elt;
716 
717   fprintf (stderr, "Equivalence chain for ");
718   print_rtl (stderr, classp->exp);
719   fprintf (stderr, ": \n");
720 
721   for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
722     {
723       print_rtl (stderr, elt->exp);
724       fprintf (stderr, "\n");
725     }
726 }
727 
728 /* Subroutine of approx_reg_cost; called through for_each_rtx.  */
729 
730 static int
approx_reg_cost_1(xp,data)731 approx_reg_cost_1 (xp, data)
732      rtx *xp;
733      void *data;
734 {
735   rtx x = *xp;
736   int *cost_p = data;
737 
738   if (x && GET_CODE (x) == REG)
739     {
740       unsigned int regno = REGNO (x);
741 
742       if (! CHEAP_REGNO (regno))
743 	{
744 	  if (regno < FIRST_PSEUDO_REGISTER)
745 	    {
746 	      if (SMALL_REGISTER_CLASSES)
747 		return 1;
748 	      *cost_p += 2;
749 	    }
750 	  else
751 	    *cost_p += 1;
752 	}
753     }
754 
755   return 0;
756 }
757 
758 /* Return an estimate of the cost of the registers used in an rtx.
759    This is mostly the number of different REG expressions in the rtx;
760    however for some exceptions like fixed registers we use a cost of
761    0.  If any other hard register reference occurs, return MAX_COST.  */
762 
763 static int
approx_reg_cost(x)764 approx_reg_cost (x)
765      rtx x;
766 {
767   int cost = 0;
768 
769   if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
770     return MAX_COST;
771 
772   return cost;
773 }
774 
775 /* Return a negative value if an rtx A, whose costs are given by COST_A
776    and REGCOST_A, is more desirable than an rtx B.
777    Return a positive value if A is less desirable, or 0 if the two are
778    equally good.  */
779 static int
preferrable(cost_a,regcost_a,cost_b,regcost_b)780 preferrable (cost_a, regcost_a, cost_b, regcost_b)
781      int cost_a, regcost_a, cost_b, regcost_b;
782 {
783   /* First, get rid of cases involving expressions that are entirely
784      unwanted.  */
785   if (cost_a != cost_b)
786     {
787       if (cost_a == MAX_COST)
788 	return 1;
789       if (cost_b == MAX_COST)
790 	return -1;
791     }
792 
793   /* Avoid extending lifetimes of hardregs.  */
794   if (regcost_a != regcost_b)
795     {
796       if (regcost_a == MAX_COST)
797 	return 1;
798       if (regcost_b == MAX_COST)
799 	return -1;
800     }
801 
802   /* Normal operation costs take precedence.  */
803   if (cost_a != cost_b)
804     return cost_a - cost_b;
805   /* Only if these are identical consider effects on register pressure.  */
806   if (regcost_a != regcost_b)
807     return regcost_a - regcost_b;
808   return 0;
809 }
810 
811 /* Internal function, to compute cost when X is not a register; called
812    from COST macro to keep it simple.  */
813 
814 static int
notreg_cost(x,outer)815 notreg_cost (x, outer)
816      rtx x;
817      enum rtx_code outer;
818 {
819   return ((GET_CODE (x) == SUBREG
820 	   && GET_CODE (SUBREG_REG (x)) == REG
821 	   && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
822 	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
823 	   && (GET_MODE_SIZE (GET_MODE (x))
824 	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
825 	   && subreg_lowpart_p (x)
826 	   && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
827 				     GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
828 	  ? 0
829 	  : rtx_cost (x, outer) * 2);
830 }
831 
832 /* Return an estimate of the cost of computing rtx X.
833    One use is in cse, to decide which expression to keep in the hash table.
834    Another is in rtl generation, to pick the cheapest way to multiply.
835    Other uses like the latter are expected in the future.  */
836 
837 int
rtx_cost(x,outer_code)838 rtx_cost (x, outer_code)
839      rtx x;
840      enum rtx_code outer_code ATTRIBUTE_UNUSED;
841 {
842   int i, j;
843   enum rtx_code code;
844   const char *fmt;
845   int total;
846 
847   if (x == 0)
848     return 0;
849 
850   /* Compute the default costs of certain things.
851      Note that RTX_COSTS can override the defaults.  */
852 
853   code = GET_CODE (x);
854   switch (code)
855     {
856     case MULT:
857       total = COSTS_N_INSNS (5);
858       break;
859     case DIV:
860     case UDIV:
861     case MOD:
862     case UMOD:
863       total = COSTS_N_INSNS (7);
864       break;
865     case USE:
866       /* Used in loop.c and combine.c as a marker.  */
867       total = 0;
868       break;
869     default:
870       total = COSTS_N_INSNS (1);
871     }
872 
873   switch (code)
874     {
875     case REG:
876       return 0;
877 
878     case SUBREG:
879       /* If we can't tie these modes, make this expensive.  The larger
880 	 the mode, the more expensive it is.  */
881       if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
882 	return COSTS_N_INSNS (2
883 			      + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
884       break;
885 
886 #ifdef RTX_COSTS
887       RTX_COSTS (x, code, outer_code);
888 #endif
889 #ifdef CONST_COSTS
890       CONST_COSTS (x, code, outer_code);
891 #endif
892 
893     default:
894 #ifdef DEFAULT_RTX_COSTS
895       DEFAULT_RTX_COSTS (x, code, outer_code);
896 #endif
897       break;
898     }
899 
900   /* Sum the costs of the sub-rtx's, plus cost of this operation,
901      which is already in total.  */
902 
903   fmt = GET_RTX_FORMAT (code);
904   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
905     if (fmt[i] == 'e')
906       total += rtx_cost (XEXP (x, i), code);
907     else if (fmt[i] == 'E')
908       for (j = 0; j < XVECLEN (x, i); j++)
909 	total += rtx_cost (XVECEXP (x, i, j), code);
910 
911   return total;
912 }
913 
914 /* Return cost of address expression X.
915    Expect that X is properly formed address reference.  */
916 
917 int
address_cost(x,mode)918 address_cost (x, mode)
919      rtx x;
920      enum machine_mode mode;
921 {
922   /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes.  But,
923      during CSE, such nodes are present.  Using an ADDRESSOF node which
924      refers to the address of a REG is a good thing because we can then
925      turn (MEM (ADDRESSSOF (REG))) into just plain REG.  */
926 
927   if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
928     return -1;
929 
930   /* We may be asked for cost of various unusual addresses, such as operands
931      of push instruction.  It is not worthwhile to complicate writing
932      of ADDRESS_COST macro by such cases.  */
933 
934   if (!memory_address_p (mode, x))
935     return 1000;
936 #ifdef ADDRESS_COST
937   return ADDRESS_COST (x);
938 #else
939   return rtx_cost (x, MEM);
940 #endif
941 }
942 
943 
944 static struct cse_reg_info *
get_cse_reg_info(regno)945 get_cse_reg_info (regno)
946      unsigned int regno;
947 {
948   struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
949   struct cse_reg_info *p;
950 
951   for (p = *hash_head; p != NULL; p = p->hash_next)
952     if (p->regno == regno)
953       break;
954 
955   if (p == NULL)
956     {
957       /* Get a new cse_reg_info structure.  */
958       if (cse_reg_info_free_list)
959 	{
960 	  p = cse_reg_info_free_list;
961 	  cse_reg_info_free_list = p->next;
962 	}
963       else
964 	p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
965 
966       /* Insert into hash table.  */
967       p->hash_next = *hash_head;
968       *hash_head = p;
969 
970       /* Initialize it.  */
971       p->reg_tick = 1;
972       p->reg_in_table = -1;
973       p->subreg_ticked = -1;
974       p->reg_qty = regno;
975       p->regno = regno;
976       p->next = cse_reg_info_used_list;
977       cse_reg_info_used_list = p;
978       if (!cse_reg_info_used_list_end)
979 	cse_reg_info_used_list_end = p;
980     }
981 
982   /* Cache this lookup; we tend to be looking up information about the
983      same register several times in a row.  */
984   cached_regno = regno;
985   cached_cse_reg_info = p;
986 
987   return p;
988 }
989 
990 /* Clear the hash table and initialize each register with its own quantity,
991    for a new basic block.  */
992 
993 static void
new_basic_block()994 new_basic_block ()
995 {
996   int i;
997 
998   next_qty = max_reg;
999 
1000   /* Clear out hash table state for this pass.  */
1001 
1002   memset ((char *) reg_hash, 0, sizeof reg_hash);
1003 
1004   if (cse_reg_info_used_list)
1005     {
1006       cse_reg_info_used_list_end->next = cse_reg_info_free_list;
1007       cse_reg_info_free_list = cse_reg_info_used_list;
1008       cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
1009     }
1010   cached_cse_reg_info = 0;
1011 
1012   CLEAR_HARD_REG_SET (hard_regs_in_table);
1013 
1014   /* The per-quantity values used to be initialized here, but it is
1015      much faster to initialize each as it is made in `make_new_qty'.  */
1016 
1017   for (i = 0; i < HASH_SIZE; i++)
1018     {
1019       struct table_elt *first;
1020 
1021       first = table[i];
1022       if (first != NULL)
1023 	{
1024 	  struct table_elt *last = first;
1025 
1026 	  table[i] = NULL;
1027 
1028 	  while (last->next_same_hash != NULL)
1029 	    last = last->next_same_hash;
1030 
1031 	  /* Now relink this hash entire chain into
1032 	     the free element list.  */
1033 
1034 	  last->next_same_hash = free_element_chain;
1035 	  free_element_chain = first;
1036 	}
1037     }
1038 
1039   prev_insn = 0;
1040 
1041 #ifdef HAVE_cc0
1042   prev_insn_cc0 = 0;
1043 #endif
1044 }
1045 
1046 /* Say that register REG contains a quantity in mode MODE not in any
1047    register before and initialize that quantity.  */
1048 
1049 static void
make_new_qty(reg,mode)1050 make_new_qty (reg, mode)
1051      unsigned int reg;
1052      enum machine_mode mode;
1053 {
1054   int q;
1055   struct qty_table_elem *ent;
1056   struct reg_eqv_elem *eqv;
1057 
1058   if (next_qty >= max_qty)
1059     abort ();
1060 
1061   q = REG_QTY (reg) = next_qty++;
1062   ent = &qty_table[q];
1063   ent->first_reg = reg;
1064   ent->last_reg = reg;
1065   ent->mode = mode;
1066   ent->const_rtx = ent->const_insn = NULL_RTX;
1067   ent->comparison_code = UNKNOWN;
1068 
1069   eqv = &reg_eqv_table[reg];
1070   eqv->next = eqv->prev = -1;
1071 }
1072 
1073 /* Make reg NEW equivalent to reg OLD.
1074    OLD is not changing; NEW is.  */
1075 
1076 static void
make_regs_eqv(new,old)1077 make_regs_eqv (new, old)
1078      unsigned int new, old;
1079 {
1080   unsigned int lastr, firstr;
1081   int q = REG_QTY (old);
1082   struct qty_table_elem *ent;
1083 
1084   ent = &qty_table[q];
1085 
1086   /* Nothing should become eqv until it has a "non-invalid" qty number.  */
1087   if (! REGNO_QTY_VALID_P (old))
1088     abort ();
1089 
1090   REG_QTY (new) = q;
1091   firstr = ent->first_reg;
1092   lastr = ent->last_reg;
1093 
1094   /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
1095      hard regs.  Among pseudos, if NEW will live longer than any other reg
1096      of the same qty, and that is beyond the current basic block,
1097      make it the new canonical replacement for this qty.  */
1098   if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1099       /* Certain fixed registers might be of the class NO_REGS.  This means
1100 	 that not only can they not be allocated by the compiler, but
1101 	 they cannot be used in substitutions or canonicalizations
1102 	 either.  */
1103       && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1104       && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1105 	  || (new >= FIRST_PSEUDO_REGISTER
1106 	      && (firstr < FIRST_PSEUDO_REGISTER
1107 		  || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1108 		       || (uid_cuid[REGNO_FIRST_UID (new)]
1109 			   < cse_basic_block_start))
1110 		      && (uid_cuid[REGNO_LAST_UID (new)]
1111 			  > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1112     {
1113       reg_eqv_table[firstr].prev = new;
1114       reg_eqv_table[new].next = firstr;
1115       reg_eqv_table[new].prev = -1;
1116       ent->first_reg = new;
1117     }
1118   else
1119     {
1120       /* If NEW is a hard reg (known to be non-fixed), insert at end.
1121 	 Otherwise, insert before any non-fixed hard regs that are at the
1122 	 end.  Registers of class NO_REGS cannot be used as an
1123 	 equivalent for anything.  */
1124       while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1125 	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1126 	     && new >= FIRST_PSEUDO_REGISTER)
1127 	lastr = reg_eqv_table[lastr].prev;
1128       reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1129       if (reg_eqv_table[lastr].next >= 0)
1130 	reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1131       else
1132 	qty_table[q].last_reg = new;
1133       reg_eqv_table[lastr].next = new;
1134       reg_eqv_table[new].prev = lastr;
1135     }
1136 }
1137 
1138 /* Remove REG from its equivalence class.  */
1139 
1140 static void
delete_reg_equiv(reg)1141 delete_reg_equiv (reg)
1142      unsigned int reg;
1143 {
1144   struct qty_table_elem *ent;
1145   int q = REG_QTY (reg);
1146   int p, n;
1147 
1148   /* If invalid, do nothing.  */
1149   if (q == (int) reg)
1150     return;
1151 
1152   ent = &qty_table[q];
1153 
1154   p = reg_eqv_table[reg].prev;
1155   n = reg_eqv_table[reg].next;
1156 
1157   if (n != -1)
1158     reg_eqv_table[n].prev = p;
1159   else
1160     ent->last_reg = p;
1161   if (p != -1)
1162     reg_eqv_table[p].next = n;
1163   else
1164     ent->first_reg = n;
1165 
1166   REG_QTY (reg) = reg;
1167 }
1168 
1169 /* Remove any invalid expressions from the hash table
1170    that refer to any of the registers contained in expression X.
1171 
1172    Make sure that newly inserted references to those registers
1173    as subexpressions will be considered valid.
1174 
1175    mention_regs is not called when a register itself
1176    is being stored in the table.
1177 
1178    Return 1 if we have done something that may have changed the hash code
1179    of X.  */
1180 
1181 static int
mention_regs(x)1182 mention_regs (x)
1183      rtx x;
1184 {
1185   enum rtx_code code;
1186   int i, j;
1187   const char *fmt;
1188   int changed = 0;
1189 
1190   if (x == 0)
1191     return 0;
1192 
1193   code = GET_CODE (x);
1194   if (code == REG)
1195     {
1196       unsigned int regno = REGNO (x);
1197       unsigned int endregno
1198 	= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1199 		   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1200       unsigned int i;
1201 
1202       for (i = regno; i < endregno; i++)
1203 	{
1204 	  if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1205 	    remove_invalid_refs (i);
1206 
1207 	  REG_IN_TABLE (i) = REG_TICK (i);
1208 	  SUBREG_TICKED (i) = -1;
1209 	}
1210 
1211       return 0;
1212     }
1213 
1214   /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1215      pseudo if they don't use overlapping words.  We handle only pseudos
1216      here for simplicity.  */
1217   if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1218       && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1219     {
1220       unsigned int i = REGNO (SUBREG_REG (x));
1221 
1222       if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1223 	{
1224 	  /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1225 	     the last store to this register really stored into this
1226 	     subreg, then remove the memory of this subreg.
1227 	     Otherwise, remove any memory of the entire register and
1228 	     all its subregs from the table.  */
1229 	  if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1230 	      || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1231 	    remove_invalid_refs (i);
1232 	  else
1233 	    remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1234 	}
1235 
1236       REG_IN_TABLE (i) = REG_TICK (i);
1237       SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1238       return 0;
1239     }
1240 
1241   /* If X is a comparison or a COMPARE and either operand is a register
1242      that does not have a quantity, give it one.  This is so that a later
1243      call to record_jump_equiv won't cause X to be assigned a different
1244      hash code and not found in the table after that call.
1245 
1246      It is not necessary to do this here, since rehash_using_reg can
1247      fix up the table later, but doing this here eliminates the need to
1248      call that expensive function in the most common case where the only
1249      use of the register is in the comparison.  */
1250 
1251   if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1252     {
1253       if (GET_CODE (XEXP (x, 0)) == REG
1254 	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1255 	if (insert_regs (XEXP (x, 0), NULL, 0))
1256 	  {
1257 	    rehash_using_reg (XEXP (x, 0));
1258 	    changed = 1;
1259 	  }
1260 
1261       if (GET_CODE (XEXP (x, 1)) == REG
1262 	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1263 	if (insert_regs (XEXP (x, 1), NULL, 0))
1264 	  {
1265 	    rehash_using_reg (XEXP (x, 1));
1266 	    changed = 1;
1267 	  }
1268     }
1269 
1270   fmt = GET_RTX_FORMAT (code);
1271   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1272     if (fmt[i] == 'e')
1273       changed |= mention_regs (XEXP (x, i));
1274     else if (fmt[i] == 'E')
1275       for (j = 0; j < XVECLEN (x, i); j++)
1276 	changed |= mention_regs (XVECEXP (x, i, j));
1277 
1278   return changed;
1279 }
1280 
1281 /* Update the register quantities for inserting X into the hash table
1282    with a value equivalent to CLASSP.
1283    (If the class does not contain a REG, it is irrelevant.)
1284    If MODIFIED is nonzero, X is a destination; it is being modified.
1285    Note that delete_reg_equiv should be called on a register
1286    before insert_regs is done on that register with MODIFIED != 0.
1287 
1288    Nonzero value means that elements of reg_qty have changed
1289    so X's hash code may be different.  */
1290 
1291 static int
insert_regs(x,classp,modified)1292 insert_regs (x, classp, modified)
1293      rtx x;
1294      struct table_elt *classp;
1295      int modified;
1296 {
1297   if (GET_CODE (x) == REG)
1298     {
1299       unsigned int regno = REGNO (x);
1300       int qty_valid;
1301 
1302       /* If REGNO is in the equivalence table already but is of the
1303 	 wrong mode for that equivalence, don't do anything here.  */
1304 
1305       qty_valid = REGNO_QTY_VALID_P (regno);
1306       if (qty_valid)
1307 	{
1308 	  struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1309 
1310 	  if (ent->mode != GET_MODE (x))
1311 	    return 0;
1312 	}
1313 
1314       if (modified || ! qty_valid)
1315 	{
1316 	  if (classp)
1317 	    for (classp = classp->first_same_value;
1318 		 classp != 0;
1319 		 classp = classp->next_same_value)
1320 	      if (GET_CODE (classp->exp) == REG
1321 		  && GET_MODE (classp->exp) == GET_MODE (x))
1322 		{
1323 		  make_regs_eqv (regno, REGNO (classp->exp));
1324 		  return 1;
1325 		}
1326 
1327 	  /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1328 	     than REG_IN_TABLE to find out if there was only a single preceding
1329 	     invalidation - for the SUBREG - or another one, which would be
1330 	     for the full register.  However, if we find here that REG_TICK
1331 	     indicates that the register is invalid, it means that it has
1332 	     been invalidated in a separate operation.  The SUBREG might be used
1333 	     now (then this is a recursive call), or we might use the full REG
1334 	     now and a SUBREG of it later.  So bump up REG_TICK so that
1335 	     mention_regs will do the right thing.  */
1336 	  if (! modified
1337 	      && REG_IN_TABLE (regno) >= 0
1338 	      && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1339 	    REG_TICK (regno)++;
1340 	  make_new_qty (regno, GET_MODE (x));
1341 	  return 1;
1342 	}
1343 
1344       return 0;
1345     }
1346 
1347   /* If X is a SUBREG, we will likely be inserting the inner register in the
1348      table.  If that register doesn't have an assigned quantity number at
1349      this point but does later, the insertion that we will be doing now will
1350      not be accessible because its hash code will have changed.  So assign
1351      a quantity number now.  */
1352 
1353   else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1354 	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1355     {
1356       insert_regs (SUBREG_REG (x), NULL, 0);
1357       mention_regs (x);
1358       return 1;
1359     }
1360   else
1361     return mention_regs (x);
1362 }
1363 
1364 /* Look in or update the hash table.  */
1365 
1366 /* Remove table element ELT from use in the table.
1367    HASH is its hash code, made using the HASH macro.
1368    It's an argument because often that is known in advance
1369    and we save much time not recomputing it.  */
1370 
1371 static void
remove_from_table(elt,hash)1372 remove_from_table (elt, hash)
1373      struct table_elt *elt;
1374      unsigned hash;
1375 {
1376   if (elt == 0)
1377     return;
1378 
1379   /* Mark this element as removed.  See cse_insn.  */
1380   elt->first_same_value = 0;
1381 
1382   /* Remove the table element from its equivalence class.  */
1383 
1384   {
1385     struct table_elt *prev = elt->prev_same_value;
1386     struct table_elt *next = elt->next_same_value;
1387 
1388     if (next)
1389       next->prev_same_value = prev;
1390 
1391     if (prev)
1392       prev->next_same_value = next;
1393     else
1394       {
1395 	struct table_elt *newfirst = next;
1396 	while (next)
1397 	  {
1398 	    next->first_same_value = newfirst;
1399 	    next = next->next_same_value;
1400 	  }
1401       }
1402   }
1403 
1404   /* Remove the table element from its hash bucket.  */
1405 
1406   {
1407     struct table_elt *prev = elt->prev_same_hash;
1408     struct table_elt *next = elt->next_same_hash;
1409 
1410     if (next)
1411       next->prev_same_hash = prev;
1412 
1413     if (prev)
1414       prev->next_same_hash = next;
1415     else if (table[hash] == elt)
1416       table[hash] = next;
1417     else
1418       {
1419 	/* This entry is not in the proper hash bucket.  This can happen
1420 	   when two classes were merged by `merge_equiv_classes'.  Search
1421 	   for the hash bucket that it heads.  This happens only very
1422 	   rarely, so the cost is acceptable.  */
1423 	for (hash = 0; hash < HASH_SIZE; hash++)
1424 	  if (table[hash] == elt)
1425 	    table[hash] = next;
1426       }
1427   }
1428 
1429   /* Remove the table element from its related-value circular chain.  */
1430 
1431   if (elt->related_value != 0 && elt->related_value != elt)
1432     {
1433       struct table_elt *p = elt->related_value;
1434 
1435       while (p->related_value != elt)
1436 	p = p->related_value;
1437       p->related_value = elt->related_value;
1438       if (p->related_value == p)
1439 	p->related_value = 0;
1440     }
1441 
1442   /* Now add it to the free element chain.  */
1443   elt->next_same_hash = free_element_chain;
1444   free_element_chain = elt;
1445 }
1446 
1447 /* Look up X in the hash table and return its table element,
1448    or 0 if X is not in the table.
1449 
1450    MODE is the machine-mode of X, or if X is an integer constant
1451    with VOIDmode then MODE is the mode with which X will be used.
1452 
1453    Here we are satisfied to find an expression whose tree structure
1454    looks like X.  */
1455 
1456 static struct table_elt *
lookup(x,hash,mode)1457 lookup (x, hash, mode)
1458      rtx x;
1459      unsigned hash;
1460      enum machine_mode mode;
1461 {
1462   struct table_elt *p;
1463 
1464   for (p = table[hash]; p; p = p->next_same_hash)
1465     if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1466 			    || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1467       return p;
1468 
1469   return 0;
1470 }
1471 
1472 /* Like `lookup' but don't care whether the table element uses invalid regs.
1473    Also ignore discrepancies in the machine mode of a register.  */
1474 
1475 static struct table_elt *
lookup_for_remove(x,hash,mode)1476 lookup_for_remove (x, hash, mode)
1477      rtx x;
1478      unsigned hash;
1479      enum machine_mode mode;
1480 {
1481   struct table_elt *p;
1482 
1483   if (GET_CODE (x) == REG)
1484     {
1485       unsigned int regno = REGNO (x);
1486 
1487       /* Don't check the machine mode when comparing registers;
1488 	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1489       for (p = table[hash]; p; p = p->next_same_hash)
1490 	if (GET_CODE (p->exp) == REG
1491 	    && REGNO (p->exp) == regno)
1492 	  return p;
1493     }
1494   else
1495     {
1496       for (p = table[hash]; p; p = p->next_same_hash)
1497 	if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1498 	  return p;
1499     }
1500 
1501   return 0;
1502 }
1503 
1504 /* Look for an expression equivalent to X and with code CODE.
1505    If one is found, return that expression.  */
1506 
1507 static rtx
lookup_as_function(x,code)1508 lookup_as_function (x, code)
1509      rtx x;
1510      enum rtx_code code;
1511 {
1512   struct table_elt *p
1513     = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1514 
1515   /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1516      long as we are narrowing.  So if we looked in vain for a mode narrower
1517      than word_mode before, look for word_mode now.  */
1518   if (p == 0 && code == CONST_INT
1519       && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1520     {
1521       x = copy_rtx (x);
1522       PUT_MODE (x, word_mode);
1523       p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1524     }
1525 
1526   if (p == 0)
1527     return 0;
1528 
1529   for (p = p->first_same_value; p; p = p->next_same_value)
1530     if (GET_CODE (p->exp) == code
1531 	/* Make sure this is a valid entry in the table.  */
1532 	&& exp_equiv_p (p->exp, p->exp, 1, 0))
1533       return p->exp;
1534 
1535   return 0;
1536 }
1537 
1538 /* Insert X in the hash table, assuming HASH is its hash code
1539    and CLASSP is an element of the class it should go in
1540    (or 0 if a new class should be made).
1541    It is inserted at the proper position to keep the class in
1542    the order cheapest first.
1543 
1544    MODE is the machine-mode of X, or if X is an integer constant
1545    with VOIDmode then MODE is the mode with which X will be used.
1546 
1547    For elements of equal cheapness, the most recent one
1548    goes in front, except that the first element in the list
1549    remains first unless a cheaper element is added.  The order of
1550    pseudo-registers does not matter, as canon_reg will be called to
1551    find the cheapest when a register is retrieved from the table.
1552 
1553    The in_memory field in the hash table element is set to 0.
1554    The caller must set it nonzero if appropriate.
1555 
1556    You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1557    and if insert_regs returns a nonzero value
1558    you must then recompute its hash code before calling here.
1559 
1560    If necessary, update table showing constant values of quantities.  */
1561 
1562 #define CHEAPER(X, Y) \
1563  (preferrable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1564 
1565 static struct table_elt *
insert(x,classp,hash,mode)1566 insert (x, classp, hash, mode)
1567      rtx x;
1568      struct table_elt *classp;
1569      unsigned hash;
1570      enum machine_mode mode;
1571 {
1572   struct table_elt *elt;
1573 
1574   /* If X is a register and we haven't made a quantity for it,
1575      something is wrong.  */
1576   if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1577     abort ();
1578 
1579   /* If X is a hard register, show it is being put in the table.  */
1580   if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1581     {
1582       unsigned int regno = REGNO (x);
1583       unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1584       unsigned int i;
1585 
1586       for (i = regno; i < endregno; i++)
1587 	SET_HARD_REG_BIT (hard_regs_in_table, i);
1588     }
1589 
1590   /* Put an element for X into the right hash bucket.  */
1591 
1592   elt = free_element_chain;
1593   if (elt)
1594     free_element_chain = elt->next_same_hash;
1595   else
1596     {
1597       n_elements_made++;
1598       elt = (struct table_elt *) xmalloc (sizeof (struct table_elt));
1599     }
1600 
1601   elt->exp = x;
1602   elt->canon_exp = NULL_RTX;
1603   elt->cost = COST (x);
1604   elt->regcost = approx_reg_cost (x);
1605   elt->next_same_value = 0;
1606   elt->prev_same_value = 0;
1607   elt->next_same_hash = table[hash];
1608   elt->prev_same_hash = 0;
1609   elt->related_value = 0;
1610   elt->in_memory = 0;
1611   elt->mode = mode;
1612   elt->is_const = (CONSTANT_P (x)
1613 		   /* GNU C++ takes advantage of this for `this'
1614 		      (and other const values).  */
1615 		   || (GET_CODE (x) == REG
1616 		       && RTX_UNCHANGING_P (x)
1617 		       && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1618 		   || FIXED_BASE_PLUS_P (x));
1619 
1620   if (table[hash])
1621     table[hash]->prev_same_hash = elt;
1622   table[hash] = elt;
1623 
1624   /* Put it into the proper value-class.  */
1625   if (classp)
1626     {
1627       classp = classp->first_same_value;
1628       if (CHEAPER (elt, classp))
1629 	/* Insert at the head of the class */
1630 	{
1631 	  struct table_elt *p;
1632 	  elt->next_same_value = classp;
1633 	  classp->prev_same_value = elt;
1634 	  elt->first_same_value = elt;
1635 
1636 	  for (p = classp; p; p = p->next_same_value)
1637 	    p->first_same_value = elt;
1638 	}
1639       else
1640 	{
1641 	  /* Insert not at head of the class.  */
1642 	  /* Put it after the last element cheaper than X.  */
1643 	  struct table_elt *p, *next;
1644 
1645 	  for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1646 	       p = next);
1647 
1648 	  /* Put it after P and before NEXT.  */
1649 	  elt->next_same_value = next;
1650 	  if (next)
1651 	    next->prev_same_value = elt;
1652 
1653 	  elt->prev_same_value = p;
1654 	  p->next_same_value = elt;
1655 	  elt->first_same_value = classp;
1656 	}
1657     }
1658   else
1659     elt->first_same_value = elt;
1660 
1661   /* If this is a constant being set equivalent to a register or a register
1662      being set equivalent to a constant, note the constant equivalence.
1663 
1664      If this is a constant, it cannot be equivalent to a different constant,
1665      and a constant is the only thing that can be cheaper than a register.  So
1666      we know the register is the head of the class (before the constant was
1667      inserted).
1668 
1669      If this is a register that is not already known equivalent to a
1670      constant, we must check the entire class.
1671 
1672      If this is a register that is already known equivalent to an insn,
1673      update the qtys `const_insn' to show that `this_insn' is the latest
1674      insn making that quantity equivalent to the constant.  */
1675 
1676   if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1677       && GET_CODE (x) != REG)
1678     {
1679       int exp_q = REG_QTY (REGNO (classp->exp));
1680       struct qty_table_elem *exp_ent = &qty_table[exp_q];
1681 
1682       exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1683       exp_ent->const_insn = this_insn;
1684     }
1685 
1686   else if (GET_CODE (x) == REG
1687 	   && classp
1688 	   && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1689 	   && ! elt->is_const)
1690     {
1691       struct table_elt *p;
1692 
1693       for (p = classp; p != 0; p = p->next_same_value)
1694 	{
1695 	  if (p->is_const && GET_CODE (p->exp) != REG)
1696 	    {
1697 	      int x_q = REG_QTY (REGNO (x));
1698 	      struct qty_table_elem *x_ent = &qty_table[x_q];
1699 
1700 	      x_ent->const_rtx
1701 		= gen_lowpart_if_possible (GET_MODE (x), p->exp);
1702 	      x_ent->const_insn = this_insn;
1703 	      break;
1704 	    }
1705 	}
1706     }
1707 
1708   else if (GET_CODE (x) == REG
1709 	   && qty_table[REG_QTY (REGNO (x))].const_rtx
1710 	   && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1711     qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1712 
1713   /* If this is a constant with symbolic value,
1714      and it has a term with an explicit integer value,
1715      link it up with related expressions.  */
1716   if (GET_CODE (x) == CONST)
1717     {
1718       rtx subexp = get_related_value (x);
1719       unsigned subhash;
1720       struct table_elt *subelt, *subelt_prev;
1721 
1722       if (subexp != 0)
1723 	{
1724 	  /* Get the integer-free subexpression in the hash table.  */
1725 	  subhash = safe_hash (subexp, mode) & HASH_MASK;
1726 	  subelt = lookup (subexp, subhash, mode);
1727 	  if (subelt == 0)
1728 	    subelt = insert (subexp, NULL, subhash, mode);
1729 	  /* Initialize SUBELT's circular chain if it has none.  */
1730 	  if (subelt->related_value == 0)
1731 	    subelt->related_value = subelt;
1732 	  /* Find the element in the circular chain that precedes SUBELT.  */
1733 	  subelt_prev = subelt;
1734 	  while (subelt_prev->related_value != subelt)
1735 	    subelt_prev = subelt_prev->related_value;
1736 	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1737 	     This way the element that follows SUBELT is the oldest one.  */
1738 	  elt->related_value = subelt_prev->related_value;
1739 	  subelt_prev->related_value = elt;
1740 	}
1741     }
1742 
1743   return elt;
1744 }
1745 
1746 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1747    CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1748    the two classes equivalent.
1749 
1750    CLASS1 will be the surviving class; CLASS2 should not be used after this
1751    call.
1752 
1753    Any invalid entries in CLASS2 will not be copied.  */
1754 
1755 static void
merge_equiv_classes(class1,class2)1756 merge_equiv_classes (class1, class2)
1757      struct table_elt *class1, *class2;
1758 {
1759   struct table_elt *elt, *next, *new;
1760 
1761   /* Ensure we start with the head of the classes.  */
1762   class1 = class1->first_same_value;
1763   class2 = class2->first_same_value;
1764 
1765   /* If they were already equal, forget it.  */
1766   if (class1 == class2)
1767     return;
1768 
1769   for (elt = class2; elt; elt = next)
1770     {
1771       unsigned int hash;
1772       rtx exp = elt->exp;
1773       enum machine_mode mode = elt->mode;
1774 
1775       next = elt->next_same_value;
1776 
1777       /* Remove old entry, make a new one in CLASS1's class.
1778 	 Don't do this for invalid entries as we cannot find their
1779 	 hash code (it also isn't necessary).  */
1780       if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1781 	{
1782 	  hash_arg_in_memory = 0;
1783 	  hash = HASH (exp, mode);
1784 
1785 	  if (GET_CODE (exp) == REG)
1786 	    delete_reg_equiv (REGNO (exp));
1787 
1788 	  remove_from_table (elt, hash);
1789 
1790 	  if (insert_regs (exp, class1, 0))
1791 	    {
1792 	      rehash_using_reg (exp);
1793 	      hash = HASH (exp, mode);
1794 	    }
1795 	  new = insert (exp, class1, hash, mode);
1796 	  new->in_memory = hash_arg_in_memory;
1797 	}
1798     }
1799 }
1800 
1801 /* Flush the entire hash table.  */
1802 
1803 static void
flush_hash_table()1804 flush_hash_table ()
1805 {
1806   int i;
1807   struct table_elt *p;
1808 
1809   for (i = 0; i < HASH_SIZE; i++)
1810     for (p = table[i]; p; p = table[i])
1811       {
1812 	/* Note that invalidate can remove elements
1813 	   after P in the current hash chain.  */
1814 	if (GET_CODE (p->exp) == REG)
1815 	  invalidate (p->exp, p->mode);
1816 	else
1817 	  remove_from_table (p, i);
1818       }
1819 }
1820 
1821 /* Function called for each rtx to check whether true dependence exist.  */
1822 struct check_dependence_data
1823 {
1824   enum machine_mode mode;
1825   rtx exp;
1826 };
1827 
1828 static int
check_dependence(x,data)1829 check_dependence (x, data)
1830      rtx *x;
1831      void *data;
1832 {
1833   struct check_dependence_data *d = (struct check_dependence_data *) data;
1834   if (*x && GET_CODE (*x) == MEM)
1835     return true_dependence (d->exp, d->mode, *x, cse_rtx_varies_p);
1836   else
1837     return 0;
1838 }
1839 
1840 /* Remove from the hash table, or mark as invalid, all expressions whose
1841    values could be altered by storing in X.  X is a register, a subreg, or
1842    a memory reference with nonvarying address (because, when a memory
1843    reference with a varying address is stored in, all memory references are
1844    removed by invalidate_memory so specific invalidation is superfluous).
1845    FULL_MODE, if not VOIDmode, indicates that this much should be
1846    invalidated instead of just the amount indicated by the mode of X.  This
1847    is only used for bitfield stores into memory.
1848 
1849    A nonvarying address may be just a register or just a symbol reference,
1850    or it may be either of those plus a numeric offset.  */
1851 
1852 static void
invalidate(x,full_mode)1853 invalidate (x, full_mode)
1854      rtx x;
1855      enum machine_mode full_mode;
1856 {
1857   int i;
1858   struct table_elt *p;
1859 
1860   switch (GET_CODE (x))
1861     {
1862     case REG:
1863       {
1864 	/* If X is a register, dependencies on its contents are recorded
1865 	   through the qty number mechanism.  Just change the qty number of
1866 	   the register, mark it as invalid for expressions that refer to it,
1867 	   and remove it itself.  */
1868 	unsigned int regno = REGNO (x);
1869 	unsigned int hash = HASH (x, GET_MODE (x));
1870 
1871 	/* Remove REGNO from any quantity list it might be on and indicate
1872 	   that its value might have changed.  If it is a pseudo, remove its
1873 	   entry from the hash table.
1874 
1875 	   For a hard register, we do the first two actions above for any
1876 	   additional hard registers corresponding to X.  Then, if any of these
1877 	   registers are in the table, we must remove any REG entries that
1878 	   overlap these registers.  */
1879 
1880 	delete_reg_equiv (regno);
1881 	REG_TICK (regno)++;
1882 	SUBREG_TICKED (regno) = -1;
1883 
1884 	if (regno >= FIRST_PSEUDO_REGISTER)
1885 	  {
1886 	    /* Because a register can be referenced in more than one mode,
1887 	       we might have to remove more than one table entry.  */
1888 	    struct table_elt *elt;
1889 
1890 	    while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1891 	      remove_from_table (elt, hash);
1892 	  }
1893 	else
1894 	  {
1895 	    HOST_WIDE_INT in_table
1896 	      = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1897 	    unsigned int endregno
1898 	      = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1899 	    unsigned int tregno, tendregno, rn;
1900 	    struct table_elt *p, *next;
1901 
1902 	    CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1903 
1904 	    for (rn = regno + 1; rn < endregno; rn++)
1905 	      {
1906 		in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1907 		CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1908 		delete_reg_equiv (rn);
1909 		REG_TICK (rn)++;
1910 		SUBREG_TICKED (rn) = -1;
1911 	      }
1912 
1913 	    if (in_table)
1914 	      for (hash = 0; hash < HASH_SIZE; hash++)
1915 		for (p = table[hash]; p; p = next)
1916 		  {
1917 		    next = p->next_same_hash;
1918 
1919 		    if (GET_CODE (p->exp) != REG
1920 			|| REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1921 		      continue;
1922 
1923 		    tregno = REGNO (p->exp);
1924 		    tendregno
1925 		      = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1926 		    if (tendregno > regno && tregno < endregno)
1927 		      remove_from_table (p, hash);
1928 		  }
1929 	  }
1930       }
1931       return;
1932 
1933     case SUBREG:
1934       invalidate (SUBREG_REG (x), VOIDmode);
1935       return;
1936 
1937     case PARALLEL:
1938       for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1939 	invalidate (XVECEXP (x, 0, i), VOIDmode);
1940       return;
1941 
1942     case EXPR_LIST:
1943       /* This is part of a disjoint return value; extract the location in
1944 	 question ignoring the offset.  */
1945       invalidate (XEXP (x, 0), VOIDmode);
1946       return;
1947 
1948     case MEM:
1949       /* Calculate the canonical version of X here so that
1950 	 true_dependence doesn't generate new RTL for X on each call.  */
1951       x = canon_rtx (x);
1952 
1953       /* Remove all hash table elements that refer to overlapping pieces of
1954 	 memory.  */
1955       if (full_mode == VOIDmode)
1956 	full_mode = GET_MODE (x);
1957 
1958       for (i = 0; i < HASH_SIZE; i++)
1959 	{
1960 	  struct table_elt *next;
1961 
1962 	  for (p = table[i]; p; p = next)
1963 	    {
1964 	      next = p->next_same_hash;
1965 	      if (p->in_memory)
1966 		{
1967 		  struct check_dependence_data d;
1968 
1969 		  /* Just canonicalize the expression once;
1970 		     otherwise each time we call invalidate
1971 		     true_dependence will canonicalize the
1972 		     expression again.  */
1973 		  if (!p->canon_exp)
1974 		    p->canon_exp = canon_rtx (p->exp);
1975 		  d.exp = x;
1976 		  d.mode = full_mode;
1977 		  if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1978 		    remove_from_table (p, i);
1979 		}
1980 	    }
1981 	}
1982       return;
1983 
1984     default:
1985       abort ();
1986     }
1987 }
1988 
1989 /* Remove all expressions that refer to register REGNO,
1990    since they are already invalid, and we are about to
1991    mark that register valid again and don't want the old
1992    expressions to reappear as valid.  */
1993 
1994 static void
remove_invalid_refs(regno)1995 remove_invalid_refs (regno)
1996      unsigned int regno;
1997 {
1998   unsigned int i;
1999   struct table_elt *p, *next;
2000 
2001   for (i = 0; i < HASH_SIZE; i++)
2002     for (p = table[i]; p; p = next)
2003       {
2004 	next = p->next_same_hash;
2005 	if (GET_CODE (p->exp) != REG
2006 	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2007 	  remove_from_table (p, i);
2008       }
2009 }
2010 
2011 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2012    and mode MODE.  */
2013 static void
remove_invalid_subreg_refs(regno,offset,mode)2014 remove_invalid_subreg_refs (regno, offset, mode)
2015      unsigned int regno;
2016      unsigned int offset;
2017      enum machine_mode mode;
2018 {
2019   unsigned int i;
2020   struct table_elt *p, *next;
2021   unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2022 
2023   for (i = 0; i < HASH_SIZE; i++)
2024     for (p = table[i]; p; p = next)
2025       {
2026 	rtx exp = p->exp;
2027 	next = p->next_same_hash;
2028 
2029 	if (GET_CODE (exp) != REG
2030 	    && (GET_CODE (exp) != SUBREG
2031 		|| GET_CODE (SUBREG_REG (exp)) != REG
2032 		|| REGNO (SUBREG_REG (exp)) != regno
2033 		|| (((SUBREG_BYTE (exp)
2034 		      + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2035 		    && SUBREG_BYTE (exp) <= end))
2036 	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2037 	  remove_from_table (p, i);
2038       }
2039 }
2040 
2041 /* Recompute the hash codes of any valid entries in the hash table that
2042    reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2043 
2044    This is called when we make a jump equivalence.  */
2045 
2046 static void
rehash_using_reg(x)2047 rehash_using_reg (x)
2048      rtx x;
2049 {
2050   unsigned int i;
2051   struct table_elt *p, *next;
2052   unsigned hash;
2053 
2054   if (GET_CODE (x) == SUBREG)
2055     x = SUBREG_REG (x);
2056 
2057   /* If X is not a register or if the register is known not to be in any
2058      valid entries in the table, we have no work to do.  */
2059 
2060   if (GET_CODE (x) != REG
2061       || REG_IN_TABLE (REGNO (x)) < 0
2062       || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2063     return;
2064 
2065   /* Scan all hash chains looking for valid entries that mention X.
2066      If we find one and it is in the wrong hash chain, move it.  We can skip
2067      objects that are registers, since they are handled specially.  */
2068 
2069   for (i = 0; i < HASH_SIZE; i++)
2070     for (p = table[i]; p; p = next)
2071       {
2072 	next = p->next_same_hash;
2073 	if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
2074 	    && exp_equiv_p (p->exp, p->exp, 1, 0)
2075 	    && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2076 	  {
2077 	    if (p->next_same_hash)
2078 	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
2079 
2080 	    if (p->prev_same_hash)
2081 	      p->prev_same_hash->next_same_hash = p->next_same_hash;
2082 	    else
2083 	      table[i] = p->next_same_hash;
2084 
2085 	    p->next_same_hash = table[hash];
2086 	    p->prev_same_hash = 0;
2087 	    if (table[hash])
2088 	      table[hash]->prev_same_hash = p;
2089 	    table[hash] = p;
2090 	  }
2091       }
2092 }
2093 
2094 /* Remove from the hash table any expression that is a call-clobbered
2095    register.  Also update their TICK values.  */
2096 
2097 static void
invalidate_for_call()2098 invalidate_for_call ()
2099 {
2100   unsigned int regno, endregno;
2101   unsigned int i;
2102   unsigned hash;
2103   struct table_elt *p, *next;
2104   int in_table = 0;
2105 
2106   /* Go through all the hard registers.  For each that is clobbered in
2107      a CALL_INSN, remove the register from quantity chains and update
2108      reg_tick if defined.  Also see if any of these registers is currently
2109      in the table.  */
2110 
2111   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2112     if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2113       {
2114 	delete_reg_equiv (regno);
2115 	if (REG_TICK (regno) >= 0)
2116 	  {
2117 	    REG_TICK (regno)++;
2118 	    SUBREG_TICKED (regno) = -1;
2119 	  }
2120 
2121 	in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2122       }
2123 
2124   /* In the case where we have no call-clobbered hard registers in the
2125      table, we are done.  Otherwise, scan the table and remove any
2126      entry that overlaps a call-clobbered register.  */
2127 
2128   if (in_table)
2129     for (hash = 0; hash < HASH_SIZE; hash++)
2130       for (p = table[hash]; p; p = next)
2131 	{
2132 	  next = p->next_same_hash;
2133 
2134 	  if (GET_CODE (p->exp) != REG
2135 	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2136 	    continue;
2137 
2138 	  regno = REGNO (p->exp);
2139 	  endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2140 
2141 	  for (i = regno; i < endregno; i++)
2142 	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2143 	      {
2144 		remove_from_table (p, hash);
2145 		break;
2146 	      }
2147 	}
2148 }
2149 
2150 /* Given an expression X of type CONST,
2151    and ELT which is its table entry (or 0 if it
2152    is not in the hash table),
2153    return an alternate expression for X as a register plus integer.
2154    If none can be found, return 0.  */
2155 
2156 static rtx
use_related_value(x,elt)2157 use_related_value (x, elt)
2158      rtx x;
2159      struct table_elt *elt;
2160 {
2161   struct table_elt *relt = 0;
2162   struct table_elt *p, *q;
2163   HOST_WIDE_INT offset;
2164 
2165   /* First, is there anything related known?
2166      If we have a table element, we can tell from that.
2167      Otherwise, must look it up.  */
2168 
2169   if (elt != 0 && elt->related_value != 0)
2170     relt = elt;
2171   else if (elt == 0 && GET_CODE (x) == CONST)
2172     {
2173       rtx subexp = get_related_value (x);
2174       if (subexp != 0)
2175 	relt = lookup (subexp,
2176 		       safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2177 		       GET_MODE (subexp));
2178     }
2179 
2180   if (relt == 0)
2181     return 0;
2182 
2183   /* Search all related table entries for one that has an
2184      equivalent register.  */
2185 
2186   p = relt;
2187   while (1)
2188     {
2189       /* This loop is strange in that it is executed in two different cases.
2190 	 The first is when X is already in the table.  Then it is searching
2191 	 the RELATED_VALUE list of X's class (RELT).  The second case is when
2192 	 X is not in the table.  Then RELT points to a class for the related
2193 	 value.
2194 
2195 	 Ensure that, whatever case we are in, that we ignore classes that have
2196 	 the same value as X.  */
2197 
2198       if (rtx_equal_p (x, p->exp))
2199 	q = 0;
2200       else
2201 	for (q = p->first_same_value; q; q = q->next_same_value)
2202 	  if (GET_CODE (q->exp) == REG)
2203 	    break;
2204 
2205       if (q)
2206 	break;
2207 
2208       p = p->related_value;
2209 
2210       /* We went all the way around, so there is nothing to be found.
2211 	 Alternatively, perhaps RELT was in the table for some other reason
2212 	 and it has no related values recorded.  */
2213       if (p == relt || p == 0)
2214 	break;
2215     }
2216 
2217   if (q == 0)
2218     return 0;
2219 
2220   offset = (get_integer_term (x) - get_integer_term (p->exp));
2221   /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2222   return plus_constant (q->exp, offset);
2223 }
2224 
2225 /* Hash a string.  Just add its bytes up.  */
2226 static inline unsigned
canon_hash_string(ps)2227 canon_hash_string (ps)
2228      const char *ps;
2229 {
2230   unsigned hash = 0;
2231   const unsigned char *p = (const unsigned char *) ps;
2232 
2233   if (p)
2234     while (*p)
2235       hash += *p++;
2236 
2237   return hash;
2238 }
2239 
2240 /* Hash an rtx.  We are careful to make sure the value is never negative.
2241    Equivalent registers hash identically.
2242    MODE is used in hashing for CONST_INTs only;
2243    otherwise the mode of X is used.
2244 
2245    Store 1 in do_not_record if any subexpression is volatile.
2246 
2247    Store 1 in hash_arg_in_memory if X contains a MEM rtx
2248    which does not have the RTX_UNCHANGING_P bit set.
2249 
2250    Note that cse_insn knows that the hash code of a MEM expression
2251    is just (int) MEM plus the hash code of the address.  */
2252 
2253 static unsigned
canon_hash(x,mode)2254 canon_hash (x, mode)
2255      rtx x;
2256      enum machine_mode mode;
2257 {
2258   int i, j;
2259   unsigned hash = 0;
2260   enum rtx_code code;
2261   const char *fmt;
2262 
2263   /* repeat is used to turn tail-recursion into iteration.  */
2264  repeat:
2265   if (x == 0)
2266     return hash;
2267 
2268   code = GET_CODE (x);
2269   switch (code)
2270     {
2271     case REG:
2272       {
2273 	unsigned int regno = REGNO (x);
2274 	bool record;
2275 
2276 	/* On some machines, we can't record any non-fixed hard register,
2277 	   because extending its life will cause reload problems.  We
2278 	   consider ap, fp, sp, gp to be fixed for this purpose.
2279 
2280 	   We also consider CCmode registers to be fixed for this purpose;
2281 	   failure to do so leads to failure to simplify 0<100 type of
2282 	   conditionals.
2283 
2284 	   On all machines, we can't record any global registers.
2285 	   Nor should we record any register that is in a small
2286 	   class, as defined by CLASS_LIKELY_SPILLED_P.  */
2287 
2288 	if (regno >= FIRST_PSEUDO_REGISTER)
2289 	  record = true;
2290 	else if (x == frame_pointer_rtx
2291 		 || x == hard_frame_pointer_rtx
2292 		 || x == arg_pointer_rtx
2293 		 || x == stack_pointer_rtx
2294 		 || x == pic_offset_table_rtx)
2295 	  record = true;
2296 	else if (global_regs[regno])
2297 	  record = false;
2298 	else if (fixed_regs[regno])
2299 	  record = true;
2300 	else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2301 	  record = true;
2302 	else if (SMALL_REGISTER_CLASSES)
2303 	  record = false;
2304 	else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2305 	  record = false;
2306 	else
2307 	  record = true;
2308 
2309 	if (!record)
2310 	  {
2311 	    do_not_record = 1;
2312 	    return 0;
2313 	  }
2314 
2315 	hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2316 	return hash;
2317       }
2318 
2319     /* We handle SUBREG of a REG specially because the underlying
2320        reg changes its hash value with every value change; we don't
2321        want to have to forget unrelated subregs when one subreg changes.  */
2322     case SUBREG:
2323       {
2324 	if (GET_CODE (SUBREG_REG (x)) == REG)
2325 	  {
2326 	    hash += (((unsigned) SUBREG << 7)
2327 		     + REGNO (SUBREG_REG (x))
2328 		     + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2329 	    return hash;
2330 	  }
2331 	break;
2332       }
2333 
2334     case CONST_INT:
2335       {
2336 	unsigned HOST_WIDE_INT tem = INTVAL (x);
2337 	hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2338 	return hash;
2339       }
2340 
2341     case CONST_DOUBLE:
2342       /* This is like the general case, except that it only counts
2343 	 the integers representing the constant.  */
2344       hash += (unsigned) code + (unsigned) GET_MODE (x);
2345       if (GET_MODE (x) != VOIDmode)
2346 	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2347       else
2348 	hash += ((unsigned) CONST_DOUBLE_LOW (x)
2349 		 + (unsigned) CONST_DOUBLE_HIGH (x));
2350       return hash;
2351 
2352     case CONST_VECTOR:
2353       {
2354 	int units;
2355 	rtx elt;
2356 
2357 	units = CONST_VECTOR_NUNITS (x);
2358 
2359 	for (i = 0; i < units; ++i)
2360 	  {
2361 	    elt = CONST_VECTOR_ELT (x, i);
2362 	    hash += canon_hash (elt, GET_MODE (elt));
2363 	  }
2364 
2365 	return hash;
2366       }
2367 
2368       /* Assume there is only one rtx object for any given label.  */
2369     case LABEL_REF:
2370       hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2371       return hash;
2372 
2373     case SYMBOL_REF:
2374       hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2375       return hash;
2376 
2377     case MEM:
2378       /* We don't record if marked volatile or if BLKmode since we don't
2379 	 know the size of the move.  */
2380       if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2381 	{
2382 	  do_not_record = 1;
2383 	  return 0;
2384 	}
2385       if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2386 	{
2387 	  hash_arg_in_memory = 1;
2388 	}
2389       /* Now that we have already found this special case,
2390 	 might as well speed it up as much as possible.  */
2391       hash += (unsigned) MEM;
2392       x = XEXP (x, 0);
2393       goto repeat;
2394 
2395     case USE:
2396       /* A USE that mentions non-volatile memory needs special
2397 	 handling since the MEM may be BLKmode which normally
2398 	 prevents an entry from being made.  Pure calls are
2399 	 marked by a USE which mentions BLKmode memory.  */
2400       if (GET_CODE (XEXP (x, 0)) == MEM
2401 	  && ! MEM_VOLATILE_P (XEXP (x, 0)))
2402 	{
2403 	  hash += (unsigned) USE;
2404 	  x = XEXP (x, 0);
2405 
2406 	  if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2407 	    hash_arg_in_memory = 1;
2408 
2409 	  /* Now that we have already found this special case,
2410 	     might as well speed it up as much as possible.  */
2411 	  hash += (unsigned) MEM;
2412 	  x = XEXP (x, 0);
2413 	  goto repeat;
2414 	}
2415       break;
2416 
2417     case PRE_DEC:
2418     case PRE_INC:
2419     case POST_DEC:
2420     case POST_INC:
2421     case PRE_MODIFY:
2422     case POST_MODIFY:
2423     case PC:
2424     case CC0:
2425     case CALL:
2426     case UNSPEC_VOLATILE:
2427       do_not_record = 1;
2428       return 0;
2429 
2430     case ASM_OPERANDS:
2431       if (MEM_VOLATILE_P (x))
2432 	{
2433 	  do_not_record = 1;
2434 	  return 0;
2435 	}
2436       else
2437 	{
2438 	  /* We don't want to take the filename and line into account.  */
2439 	  hash += (unsigned) code + (unsigned) GET_MODE (x)
2440 	    + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2441 	    + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2442 	    + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2443 
2444 	  if (ASM_OPERANDS_INPUT_LENGTH (x))
2445 	    {
2446 	      for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2447 		{
2448 		  hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2449 				       GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2450 			   + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2451 						(x, i)));
2452 		}
2453 
2454 	      hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2455 	      x = ASM_OPERANDS_INPUT (x, 0);
2456 	      mode = GET_MODE (x);
2457 	      goto repeat;
2458 	    }
2459 
2460 	  return hash;
2461 	}
2462       break;
2463 
2464     default:
2465       break;
2466     }
2467 
2468   i = GET_RTX_LENGTH (code) - 1;
2469   hash += (unsigned) code + (unsigned) GET_MODE (x);
2470   fmt = GET_RTX_FORMAT (code);
2471   for (; i >= 0; i--)
2472     {
2473       if (fmt[i] == 'e')
2474 	{
2475 	  rtx tem = XEXP (x, i);
2476 
2477 	  /* If we are about to do the last recursive call
2478 	     needed at this level, change it into iteration.
2479 	     This function  is called enough to be worth it.  */
2480 	  if (i == 0)
2481 	    {
2482 	      x = tem;
2483 	      goto repeat;
2484 	    }
2485 	  hash += canon_hash (tem, 0);
2486 	}
2487       else if (fmt[i] == 'E')
2488 	for (j = 0; j < XVECLEN (x, i); j++)
2489 	  hash += canon_hash (XVECEXP (x, i, j), 0);
2490       else if (fmt[i] == 's')
2491 	hash += canon_hash_string (XSTR (x, i));
2492       else if (fmt[i] == 'i')
2493 	{
2494 	  unsigned tem = XINT (x, i);
2495 	  hash += tem;
2496 	}
2497       else if (fmt[i] == '0' || fmt[i] == 't')
2498 	/* Unused.  */
2499 	;
2500       else
2501 	abort ();
2502     }
2503   return hash;
2504 }
2505 
2506 /* Like canon_hash but with no side effects.  */
2507 
2508 static unsigned
safe_hash(x,mode)2509 safe_hash (x, mode)
2510      rtx x;
2511      enum machine_mode mode;
2512 {
2513   int save_do_not_record = do_not_record;
2514   int save_hash_arg_in_memory = hash_arg_in_memory;
2515   unsigned hash = canon_hash (x, mode);
2516   hash_arg_in_memory = save_hash_arg_in_memory;
2517   do_not_record = save_do_not_record;
2518   return hash;
2519 }
2520 
2521 /* Return 1 iff X and Y would canonicalize into the same thing,
2522    without actually constructing the canonicalization of either one.
2523    If VALIDATE is nonzero,
2524    we assume X is an expression being processed from the rtl
2525    and Y was found in the hash table.  We check register refs
2526    in Y for being marked as valid.
2527 
2528    If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2529    that is known to be in the register.  Ordinarily, we don't allow them
2530    to match, because letting them match would cause unpredictable results
2531    in all the places that search a hash table chain for an equivalent
2532    for a given value.  A possible equivalent that has different structure
2533    has its hash code computed from different data.  Whether the hash code
2534    is the same as that of the given value is pure luck.  */
2535 
2536 static int
exp_equiv_p(x,y,validate,equal_values)2537 exp_equiv_p (x, y, validate, equal_values)
2538      rtx x, y;
2539      int validate;
2540      int equal_values;
2541 {
2542   int i, j;
2543   enum rtx_code code;
2544   const char *fmt;
2545 
2546   /* Note: it is incorrect to assume an expression is equivalent to itself
2547      if VALIDATE is nonzero.  */
2548   if (x == y && !validate)
2549     return 1;
2550   if (x == 0 || y == 0)
2551     return x == y;
2552 
2553   code = GET_CODE (x);
2554   if (code != GET_CODE (y))
2555     {
2556       if (!equal_values)
2557 	return 0;
2558 
2559       /* If X is a constant and Y is a register or vice versa, they may be
2560 	 equivalent.  We only have to validate if Y is a register.  */
2561       if (CONSTANT_P (x) && GET_CODE (y) == REG
2562 	  && REGNO_QTY_VALID_P (REGNO (y)))
2563 	{
2564 	  int y_q = REG_QTY (REGNO (y));
2565 	  struct qty_table_elem *y_ent = &qty_table[y_q];
2566 
2567 	  if (GET_MODE (y) == y_ent->mode
2568 	      && rtx_equal_p (x, y_ent->const_rtx)
2569 	      && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2570 	    return 1;
2571 	}
2572 
2573       if (CONSTANT_P (y) && code == REG
2574 	  && REGNO_QTY_VALID_P (REGNO (x)))
2575 	{
2576 	  int x_q = REG_QTY (REGNO (x));
2577 	  struct qty_table_elem *x_ent = &qty_table[x_q];
2578 
2579 	  if (GET_MODE (x) == x_ent->mode
2580 	      && rtx_equal_p (y, x_ent->const_rtx))
2581 	    return 1;
2582 	}
2583 
2584       return 0;
2585     }
2586 
2587   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2588   if (GET_MODE (x) != GET_MODE (y))
2589     return 0;
2590 
2591   switch (code)
2592     {
2593     case PC:
2594     case CC0:
2595     case CONST_INT:
2596       return x == y;
2597 
2598     case LABEL_REF:
2599       return XEXP (x, 0) == XEXP (y, 0);
2600 
2601     case SYMBOL_REF:
2602       return XSTR (x, 0) == XSTR (y, 0);
2603 
2604     case REG:
2605       {
2606 	unsigned int regno = REGNO (y);
2607 	unsigned int endregno
2608 	  = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2609 		     : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2610 	unsigned int i;
2611 
2612 	/* If the quantities are not the same, the expressions are not
2613 	   equivalent.  If there are and we are not to validate, they
2614 	   are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2615 
2616 	if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2617 	  return 0;
2618 
2619 	if (! validate)
2620 	  return 1;
2621 
2622 	for (i = regno; i < endregno; i++)
2623 	  if (REG_IN_TABLE (i) != REG_TICK (i))
2624 	    return 0;
2625 
2626 	return 1;
2627       }
2628 
2629     /*  For commutative operations, check both orders.  */
2630     case PLUS:
2631     case MULT:
2632     case AND:
2633     case IOR:
2634     case XOR:
2635     case NE:
2636     case EQ:
2637       return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2638 	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2639 			       validate, equal_values))
2640 	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2641 			       validate, equal_values)
2642 		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2643 				  validate, equal_values)));
2644 
2645     case ASM_OPERANDS:
2646       /* We don't use the generic code below because we want to
2647 	 disregard filename and line numbers.  */
2648 
2649       /* A volatile asm isn't equivalent to any other.  */
2650       if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2651 	return 0;
2652 
2653       if (GET_MODE (x) != GET_MODE (y)
2654 	  || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2655 	  || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2656 		     ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2657 	  || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2658 	  || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2659 	return 0;
2660 
2661       if (ASM_OPERANDS_INPUT_LENGTH (x))
2662 	{
2663 	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2664 	    if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2665 			       ASM_OPERANDS_INPUT (y, i),
2666 			       validate, equal_values)
2667 		|| strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2668 			   ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2669 	      return 0;
2670 	}
2671 
2672       return 1;
2673 
2674     default:
2675       break;
2676     }
2677 
2678   /* Compare the elements.  If any pair of corresponding elements
2679      fail to match, return 0 for the whole things.  */
2680 
2681   fmt = GET_RTX_FORMAT (code);
2682   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2683     {
2684       switch (fmt[i])
2685 	{
2686 	case 'e':
2687 	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2688 	    return 0;
2689 	  break;
2690 
2691 	case 'E':
2692 	  if (XVECLEN (x, i) != XVECLEN (y, i))
2693 	    return 0;
2694 	  for (j = 0; j < XVECLEN (x, i); j++)
2695 	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2696 			       validate, equal_values))
2697 	      return 0;
2698 	  break;
2699 
2700 	case 's':
2701 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2702 	    return 0;
2703 	  break;
2704 
2705 	case 'i':
2706 	  if (XINT (x, i) != XINT (y, i))
2707 	    return 0;
2708 	  break;
2709 
2710 	case 'w':
2711 	  if (XWINT (x, i) != XWINT (y, i))
2712 	    return 0;
2713 	  break;
2714 
2715 	case '0':
2716 	case 't':
2717 	  break;
2718 
2719 	default:
2720 	  abort ();
2721 	}
2722     }
2723 
2724   return 1;
2725 }
2726 
2727 /* Return 1 if X has a value that can vary even between two
2728    executions of the program.  0 means X can be compared reliably
2729    against certain constants or near-constants.  */
2730 
2731 static int
cse_rtx_varies_p(x,from_alias)2732 cse_rtx_varies_p (x, from_alias)
2733      rtx x;
2734      int from_alias;
2735 {
2736   /* We need not check for X and the equivalence class being of the same
2737      mode because if X is equivalent to a constant in some mode, it
2738      doesn't vary in any mode.  */
2739 
2740   if (GET_CODE (x) == REG
2741       && REGNO_QTY_VALID_P (REGNO (x)))
2742     {
2743       int x_q = REG_QTY (REGNO (x));
2744       struct qty_table_elem *x_ent = &qty_table[x_q];
2745 
2746       if (GET_MODE (x) == x_ent->mode
2747 	  && x_ent->const_rtx != NULL_RTX)
2748 	return 0;
2749     }
2750 
2751   if (GET_CODE (x) == PLUS
2752       && GET_CODE (XEXP (x, 1)) == CONST_INT
2753       && GET_CODE (XEXP (x, 0)) == REG
2754       && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2755     {
2756       int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2757       struct qty_table_elem *x0_ent = &qty_table[x0_q];
2758 
2759       if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2760 	  && x0_ent->const_rtx != NULL_RTX)
2761 	return 0;
2762     }
2763 
2764   /* This can happen as the result of virtual register instantiation, if
2765      the initial constant is too large to be a valid address.  This gives
2766      us a three instruction sequence, load large offset into a register,
2767      load fp minus a constant into a register, then a MEM which is the
2768      sum of the two `constant' registers.  */
2769   if (GET_CODE (x) == PLUS
2770       && GET_CODE (XEXP (x, 0)) == REG
2771       && GET_CODE (XEXP (x, 1)) == REG
2772       && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2773       && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2774     {
2775       int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2776       int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2777       struct qty_table_elem *x0_ent = &qty_table[x0_q];
2778       struct qty_table_elem *x1_ent = &qty_table[x1_q];
2779 
2780       if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2781 	  && x0_ent->const_rtx != NULL_RTX
2782 	  && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2783 	  && x1_ent->const_rtx != NULL_RTX)
2784 	return 0;
2785     }
2786 
2787   return rtx_varies_p (x, from_alias);
2788 }
2789 
2790 /* Canonicalize an expression:
2791    replace each register reference inside it
2792    with the "oldest" equivalent register.
2793 
2794    If INSN is nonzero and we are replacing a pseudo with a hard register
2795    or vice versa, validate_change is used to ensure that INSN remains valid
2796    after we make our substitution.  The calls are made with IN_GROUP nonzero
2797    so apply_change_group must be called upon the outermost return from this
2798    function (unless INSN is zero).  The result of apply_change_group can
2799    generally be discarded since the changes we are making are optional.  */
2800 
2801 static rtx
canon_reg(x,insn)2802 canon_reg (x, insn)
2803      rtx x;
2804      rtx insn;
2805 {
2806   int i;
2807   enum rtx_code code;
2808   const char *fmt;
2809 
2810   if (x == 0)
2811     return x;
2812 
2813   code = GET_CODE (x);
2814   switch (code)
2815     {
2816     case PC:
2817     case CC0:
2818     case CONST:
2819     case CONST_INT:
2820     case CONST_DOUBLE:
2821     case CONST_VECTOR:
2822     case SYMBOL_REF:
2823     case LABEL_REF:
2824     case ADDR_VEC:
2825     case ADDR_DIFF_VEC:
2826       return x;
2827 
2828     case REG:
2829       {
2830 	int first;
2831 	int q;
2832 	struct qty_table_elem *ent;
2833 
2834 	/* Never replace a hard reg, because hard regs can appear
2835 	   in more than one machine mode, and we must preserve the mode
2836 	   of each occurrence.  Also, some hard regs appear in
2837 	   MEMs that are shared and mustn't be altered.  Don't try to
2838 	   replace any reg that maps to a reg of class NO_REGS.  */
2839 	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2840 	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2841 	  return x;
2842 
2843 	q = REG_QTY (REGNO (x));
2844 	ent = &qty_table[q];
2845 	first = ent->first_reg;
2846 	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2847 		: REGNO_REG_CLASS (first) == NO_REGS ? x
2848 		: gen_rtx_REG (ent->mode, first));
2849       }
2850 
2851     default:
2852       break;
2853     }
2854 
2855   fmt = GET_RTX_FORMAT (code);
2856   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2857     {
2858       int j;
2859 
2860       if (fmt[i] == 'e')
2861 	{
2862 	  rtx new = canon_reg (XEXP (x, i), insn);
2863 	  int insn_code;
2864 
2865 	  /* If replacing pseudo with hard reg or vice versa, ensure the
2866 	     insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2867 	  if (insn != 0 && new != 0
2868 	      && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2869 	      && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2870 		   != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2871 		  || (insn_code = recog_memoized (insn)) < 0
2872 		  || insn_data[insn_code].n_dups > 0))
2873 	    validate_change (insn, &XEXP (x, i), new, 1);
2874 	  else
2875 	    XEXP (x, i) = new;
2876 	}
2877       else if (fmt[i] == 'E')
2878 	for (j = 0; j < XVECLEN (x, i); j++)
2879 	  XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2880     }
2881 
2882   return x;
2883 }
2884 
2885 /* LOC is a location within INSN that is an operand address (the contents of
2886    a MEM).  Find the best equivalent address to use that is valid for this
2887    insn.
2888 
2889    On most CISC machines, complicated address modes are costly, and rtx_cost
2890    is a good approximation for that cost.  However, most RISC machines have
2891    only a few (usually only one) memory reference formats.  If an address is
2892    valid at all, it is often just as cheap as any other address.  Hence, for
2893    RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2894    costs of various addresses.  For two addresses of equal cost, choose the one
2895    with the highest `rtx_cost' value as that has the potential of eliminating
2896    the most insns.  For equal costs, we choose the first in the equivalence
2897    class.  Note that we ignore the fact that pseudo registers are cheaper
2898    than hard registers here because we would also prefer the pseudo registers.
2899   */
2900 
2901 static void
find_best_addr(insn,loc,mode)2902 find_best_addr (insn, loc, mode)
2903      rtx insn;
2904      rtx *loc;
2905      enum machine_mode mode;
2906 {
2907   struct table_elt *elt;
2908   rtx addr = *loc;
2909 #ifdef ADDRESS_COST
2910   struct table_elt *p;
2911   int found_better = 1;
2912 #endif
2913   int save_do_not_record = do_not_record;
2914   int save_hash_arg_in_memory = hash_arg_in_memory;
2915   int addr_volatile;
2916   int regno;
2917   unsigned hash;
2918 
2919   /* Do not try to replace constant addresses or addresses of local and
2920      argument slots.  These MEM expressions are made only once and inserted
2921      in many instructions, as well as being used to control symbol table
2922      output.  It is not safe to clobber them.
2923 
2924      There are some uncommon cases where the address is already in a register
2925      for some reason, but we cannot take advantage of that because we have
2926      no easy way to unshare the MEM.  In addition, looking up all stack
2927      addresses is costly.  */
2928   if ((GET_CODE (addr) == PLUS
2929        && GET_CODE (XEXP (addr, 0)) == REG
2930        && GET_CODE (XEXP (addr, 1)) == CONST_INT
2931        && (regno = REGNO (XEXP (addr, 0)),
2932 	   regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2933 	   || regno == ARG_POINTER_REGNUM))
2934       || (GET_CODE (addr) == REG
2935 	  && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2936 	      || regno == HARD_FRAME_POINTER_REGNUM
2937 	      || regno == ARG_POINTER_REGNUM))
2938       || GET_CODE (addr) == ADDRESSOF
2939       || CONSTANT_ADDRESS_P (addr))
2940     return;
2941 
2942   /* If this address is not simply a register, try to fold it.  This will
2943      sometimes simplify the expression.  Many simplifications
2944      will not be valid, but some, usually applying the associative rule, will
2945      be valid and produce better code.  */
2946   if (GET_CODE (addr) != REG)
2947     {
2948       rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2949       int addr_folded_cost = address_cost (folded, mode);
2950       int addr_cost = address_cost (addr, mode);
2951 
2952       if ((addr_folded_cost < addr_cost
2953 	   || (addr_folded_cost == addr_cost
2954 	       /* ??? The rtx_cost comparison is left over from an older
2955 		  version of this code.  It is probably no longer helpful.  */
2956 	       && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2957 		   || approx_reg_cost (folded) < approx_reg_cost (addr))))
2958 	  && validate_change (insn, loc, folded, 0))
2959 	addr = folded;
2960     }
2961 
2962   /* If this address is not in the hash table, we can't look for equivalences
2963      of the whole address.  Also, ignore if volatile.  */
2964 
2965   do_not_record = 0;
2966   hash = HASH (addr, Pmode);
2967   addr_volatile = do_not_record;
2968   do_not_record = save_do_not_record;
2969   hash_arg_in_memory = save_hash_arg_in_memory;
2970 
2971   if (addr_volatile)
2972     return;
2973 
2974   elt = lookup (addr, hash, Pmode);
2975 
2976 #ifndef ADDRESS_COST
2977   if (elt)
2978     {
2979       int our_cost = elt->cost;
2980 
2981       /* Find the lowest cost below ours that works.  */
2982       for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2983 	if (elt->cost < our_cost
2984 	    && (GET_CODE (elt->exp) == REG
2985 		|| exp_equiv_p (elt->exp, elt->exp, 1, 0))
2986 	    && validate_change (insn, loc,
2987 				canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2988 	  return;
2989     }
2990 #else
2991 
2992   if (elt)
2993     {
2994       /* We need to find the best (under the criteria documented above) entry
2995 	 in the class that is valid.  We use the `flag' field to indicate
2996 	 choices that were invalid and iterate until we can't find a better
2997 	 one that hasn't already been tried.  */
2998 
2999       for (p = elt->first_same_value; p; p = p->next_same_value)
3000 	p->flag = 0;
3001 
3002       while (found_better)
3003 	{
3004 	  int best_addr_cost = address_cost (*loc, mode);
3005 	  int best_rtx_cost = (elt->cost + 1) >> 1;
3006 	  int exp_cost;
3007 	  struct table_elt *best_elt = elt;
3008 
3009 	  found_better = 0;
3010 	  for (p = elt->first_same_value; p; p = p->next_same_value)
3011 	    if (! p->flag)
3012 	      {
3013 		if ((GET_CODE (p->exp) == REG
3014 		     || exp_equiv_p (p->exp, p->exp, 1, 0))
3015 		    && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
3016 			|| (exp_cost == best_addr_cost
3017 			    && ((p->cost + 1) >> 1) > best_rtx_cost)))
3018 		  {
3019 		    found_better = 1;
3020 		    best_addr_cost = exp_cost;
3021 		    best_rtx_cost = (p->cost + 1) >> 1;
3022 		    best_elt = p;
3023 		  }
3024 	      }
3025 
3026 	  if (found_better)
3027 	    {
3028 	      if (validate_change (insn, loc,
3029 				   canon_reg (copy_rtx (best_elt->exp),
3030 					      NULL_RTX), 0))
3031 		return;
3032 	      else
3033 		best_elt->flag = 1;
3034 	    }
3035 	}
3036     }
3037 
3038   /* If the address is a binary operation with the first operand a register
3039      and the second a constant, do the same as above, but looking for
3040      equivalences of the register.  Then try to simplify before checking for
3041      the best address to use.  This catches a few cases:  First is when we
3042      have REG+const and the register is another REG+const.  We can often merge
3043      the constants and eliminate one insn and one register.  It may also be
3044      that a machine has a cheap REG+REG+const.  Finally, this improves the
3045      code on the Alpha for unaligned byte stores.  */
3046 
3047   if (flag_expensive_optimizations
3048       && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
3049 	  || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
3050       && GET_CODE (XEXP (*loc, 0)) == REG
3051       && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
3052     {
3053       rtx c = XEXP (*loc, 1);
3054 
3055       do_not_record = 0;
3056       hash = HASH (XEXP (*loc, 0), Pmode);
3057       do_not_record = save_do_not_record;
3058       hash_arg_in_memory = save_hash_arg_in_memory;
3059 
3060       elt = lookup (XEXP (*loc, 0), hash, Pmode);
3061       if (elt == 0)
3062 	return;
3063 
3064       /* We need to find the best (under the criteria documented above) entry
3065 	 in the class that is valid.  We use the `flag' field to indicate
3066 	 choices that were invalid and iterate until we can't find a better
3067 	 one that hasn't already been tried.  */
3068 
3069       for (p = elt->first_same_value; p; p = p->next_same_value)
3070 	p->flag = 0;
3071 
3072       while (found_better)
3073 	{
3074 	  int best_addr_cost = address_cost (*loc, mode);
3075 	  int best_rtx_cost = (COST (*loc) + 1) >> 1;
3076 	  struct table_elt *best_elt = elt;
3077 	  rtx best_rtx = *loc;
3078 	  int count;
3079 
3080 	  /* This is at worst case an O(n^2) algorithm, so limit our search
3081 	     to the first 32 elements on the list.  This avoids trouble
3082 	     compiling code with very long basic blocks that can easily
3083 	     call simplify_gen_binary so many times that we run out of
3084 	     memory.  */
3085 
3086 	  found_better = 0;
3087 	  for (p = elt->first_same_value, count = 0;
3088 	       p && count < 32;
3089 	       p = p->next_same_value, count++)
3090 	    if (! p->flag
3091 		&& (GET_CODE (p->exp) == REG
3092 		    || exp_equiv_p (p->exp, p->exp, 1, 0)))
3093 	      {
3094 		rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3095 					       p->exp, c);
3096 		int new_cost;
3097 		new_cost = address_cost (new, mode);
3098 
3099 		if (new_cost < best_addr_cost
3100 		    || (new_cost == best_addr_cost
3101 			&& (COST (new) + 1) >> 1 > best_rtx_cost))
3102 		  {
3103 		    found_better = 1;
3104 		    best_addr_cost = new_cost;
3105 		    best_rtx_cost = (COST (new) + 1) >> 1;
3106 		    best_elt = p;
3107 		    best_rtx = new;
3108 		  }
3109 	      }
3110 
3111 	  if (found_better)
3112 	    {
3113 	      if (validate_change (insn, loc,
3114 				   canon_reg (copy_rtx (best_rtx),
3115 					      NULL_RTX), 0))
3116 		return;
3117 	      else
3118 		best_elt->flag = 1;
3119 	    }
3120 	}
3121     }
3122 #endif
3123 }
3124 
3125 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3126    operation (EQ, NE, GT, etc.), follow it back through the hash table and
3127    what values are being compared.
3128 
3129    *PARG1 and *PARG2 are updated to contain the rtx representing the values
3130    actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
3131    was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3132    compared to produce cc0.
3133 
3134    The return value is the comparison operator and is either the code of
3135    A or the code corresponding to the inverse of the comparison.  */
3136 
3137 static enum rtx_code
find_comparison_args(code,parg1,parg2,pmode1,pmode2)3138 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3139      enum rtx_code code;
3140      rtx *parg1, *parg2;
3141      enum machine_mode *pmode1, *pmode2;
3142 {
3143   rtx arg1, arg2;
3144 
3145   arg1 = *parg1, arg2 = *parg2;
3146 
3147   /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
3148 
3149   while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3150     {
3151       /* Set nonzero when we find something of interest.  */
3152       rtx x = 0;
3153       int reverse_code = 0;
3154       struct table_elt *p = 0;
3155 
3156       /* If arg1 is a COMPARE, extract the comparison arguments from it.
3157 	 On machines with CC0, this is the only case that can occur, since
3158 	 fold_rtx will return the COMPARE or item being compared with zero
3159 	 when given CC0.  */
3160 
3161       if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3162 	x = arg1;
3163 
3164       /* If ARG1 is a comparison operator and CODE is testing for
3165 	 STORE_FLAG_VALUE, get the inner arguments.  */
3166 
3167       else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3168 	{
3169 #ifdef FLOAT_STORE_FLAG_VALUE
3170 	  REAL_VALUE_TYPE fsfv;
3171 #endif
3172 
3173 	  if (code == NE
3174 	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3175 		  && code == LT && STORE_FLAG_VALUE == -1)
3176 #ifdef FLOAT_STORE_FLAG_VALUE
3177 	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3178 		  && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3179 		      REAL_VALUE_NEGATIVE (fsfv)))
3180 #endif
3181 	      )
3182 	    x = arg1;
3183 	  else if (code == EQ
3184 		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3185 		       && code == GE && STORE_FLAG_VALUE == -1)
3186 #ifdef FLOAT_STORE_FLAG_VALUE
3187 		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3188 		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3189 			   REAL_VALUE_NEGATIVE (fsfv)))
3190 #endif
3191 		   )
3192 	    x = arg1, reverse_code = 1;
3193 	}
3194 
3195       /* ??? We could also check for
3196 
3197 	 (ne (and (eq (...) (const_int 1))) (const_int 0))
3198 
3199 	 and related forms, but let's wait until we see them occurring.  */
3200 
3201       if (x == 0)
3202 	/* Look up ARG1 in the hash table and see if it has an equivalence
3203 	   that lets us see what is being compared.  */
3204 	p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3205 		    GET_MODE (arg1));
3206       if (p)
3207 	{
3208 	  p = p->first_same_value;
3209 
3210 	  /* If what we compare is already known to be constant, that is as
3211 	     good as it gets.
3212 	     We need to break the loop in this case, because otherwise we
3213 	     can have an infinite loop when looking at a reg that is known
3214 	     to be a constant which is the same as a comparison of a reg
3215 	     against zero which appears later in the insn stream, which in
3216 	     turn is constant and the same as the comparison of the first reg
3217 	     against zero...  */
3218 	  if (p->is_const)
3219 	    break;
3220 	}
3221 
3222       for (; p; p = p->next_same_value)
3223 	{
3224 	  enum machine_mode inner_mode = GET_MODE (p->exp);
3225 #ifdef FLOAT_STORE_FLAG_VALUE
3226 	  REAL_VALUE_TYPE fsfv;
3227 #endif
3228 
3229 	  /* If the entry isn't valid, skip it.  */
3230 	  if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3231 	    continue;
3232 
3233 	  if (GET_CODE (p->exp) == COMPARE
3234 	      /* Another possibility is that this machine has a compare insn
3235 		 that includes the comparison code.  In that case, ARG1 would
3236 		 be equivalent to a comparison operation that would set ARG1 to
3237 		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
3238 		 ORIG_CODE is the actual comparison being done; if it is an EQ,
3239 		 we must reverse ORIG_CODE.  On machine with a negative value
3240 		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3241 	      || ((code == NE
3242 		   || (code == LT
3243 		       && GET_MODE_CLASS (inner_mode) == MODE_INT
3244 		       && (GET_MODE_BITSIZE (inner_mode)
3245 			   <= HOST_BITS_PER_WIDE_INT)
3246 		       && (STORE_FLAG_VALUE
3247 			   & ((HOST_WIDE_INT) 1
3248 			      << (GET_MODE_BITSIZE (inner_mode) - 1))))
3249 #ifdef FLOAT_STORE_FLAG_VALUE
3250 		   || (code == LT
3251 		       && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3252 		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3253 			   REAL_VALUE_NEGATIVE (fsfv)))
3254 #endif
3255 		   )
3256 		  && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3257 	    {
3258 	      x = p->exp;
3259 	      break;
3260 	    }
3261 	  else if ((code == EQ
3262 		    || (code == GE
3263 			&& GET_MODE_CLASS (inner_mode) == MODE_INT
3264 			&& (GET_MODE_BITSIZE (inner_mode)
3265 			    <= HOST_BITS_PER_WIDE_INT)
3266 			&& (STORE_FLAG_VALUE
3267 			    & ((HOST_WIDE_INT) 1
3268 			       << (GET_MODE_BITSIZE (inner_mode) - 1))))
3269 #ifdef FLOAT_STORE_FLAG_VALUE
3270 		    || (code == GE
3271 			&& GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3272 			&& (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3273 			    REAL_VALUE_NEGATIVE (fsfv)))
3274 #endif
3275 		    )
3276 		   && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3277 	    {
3278 	      reverse_code = 1;
3279 	      x = p->exp;
3280 	      break;
3281 	    }
3282 
3283 	  /* If this is fp + constant, the equivalent is a better operand since
3284 	     it may let us predict the value of the comparison.  */
3285 	  else if (NONZERO_BASE_PLUS_P (p->exp))
3286 	    {
3287 	      arg1 = p->exp;
3288 	      continue;
3289 	    }
3290 	}
3291 
3292       /* If we didn't find a useful equivalence for ARG1, we are done.
3293 	 Otherwise, set up for the next iteration.  */
3294       if (x == 0)
3295 	break;
3296 
3297       /* If we need to reverse the comparison, make sure that that is
3298 	 possible -- we can't necessarily infer the value of GE from LT
3299 	 with floating-point operands.  */
3300       if (reverse_code)
3301 	{
3302 	  enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3303 	  if (reversed == UNKNOWN)
3304 	    break;
3305 	  else
3306 	    code = reversed;
3307 	}
3308       else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3309 	code = GET_CODE (x);
3310       arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3311     }
3312 
3313   /* Return our results.  Return the modes from before fold_rtx
3314      because fold_rtx might produce const_int, and then it's too late.  */
3315   *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3316   *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3317 
3318   return code;
3319 }
3320 
3321 /* If X is a nontrivial arithmetic operation on an argument
3322    for which a constant value can be determined, return
3323    the result of operating on that value, as a constant.
3324    Otherwise, return X, possibly with one or more operands
3325    modified by recursive calls to this function.
3326 
3327    If X is a register whose contents are known, we do NOT
3328    return those contents here.  equiv_constant is called to
3329    perform that task.
3330 
3331    INSN is the insn that we may be modifying.  If it is 0, make a copy
3332    of X before modifying it.  */
3333 
3334 static rtx
fold_rtx(x,insn)3335 fold_rtx (x, insn)
3336      rtx x;
3337      rtx insn;
3338 {
3339   enum rtx_code code;
3340   enum machine_mode mode;
3341   const char *fmt;
3342   int i;
3343   rtx new = 0;
3344   int copied = 0;
3345   int must_swap = 0;
3346 
3347   /* Folded equivalents of first two operands of X.  */
3348   rtx folded_arg0;
3349   rtx folded_arg1;
3350 
3351   /* Constant equivalents of first three operands of X;
3352      0 when no such equivalent is known.  */
3353   rtx const_arg0;
3354   rtx const_arg1;
3355   rtx const_arg2;
3356 
3357   /* The mode of the first operand of X.  We need this for sign and zero
3358      extends.  */
3359   enum machine_mode mode_arg0;
3360 
3361   if (x == 0)
3362     return x;
3363 
3364   mode = GET_MODE (x);
3365   code = GET_CODE (x);
3366   switch (code)
3367     {
3368     case CONST:
3369     case CONST_INT:
3370     case CONST_DOUBLE:
3371     case CONST_VECTOR:
3372     case SYMBOL_REF:
3373     case LABEL_REF:
3374     case REG:
3375       /* No use simplifying an EXPR_LIST
3376 	 since they are used only for lists of args
3377 	 in a function call's REG_EQUAL note.  */
3378     case EXPR_LIST:
3379       /* Changing anything inside an ADDRESSOF is incorrect; we don't
3380 	 want to (e.g.,) make (addressof (const_int 0)) just because
3381 	 the location is known to be zero.  */
3382     case ADDRESSOF:
3383       return x;
3384 
3385 #ifdef HAVE_cc0
3386     case CC0:
3387       return prev_insn_cc0;
3388 #endif
3389 
3390     case PC:
3391       /* If the next insn is a CODE_LABEL followed by a jump table,
3392 	 PC's value is a LABEL_REF pointing to that label.  That
3393 	 lets us fold switch statements on the VAX.  */
3394       if (insn && GET_CODE (insn) == JUMP_INSN)
3395 	{
3396 	  rtx next = next_nonnote_insn (insn);
3397 
3398 	  if (next && GET_CODE (next) == CODE_LABEL
3399 	      && NEXT_INSN (next) != 0
3400 	      && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3401 	      && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3402 		  || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3403 	    return gen_rtx_LABEL_REF (Pmode, next);
3404 	}
3405       break;
3406 
3407     case SUBREG:
3408       /* See if we previously assigned a constant value to this SUBREG.  */
3409       if ((new = lookup_as_function (x, CONST_INT)) != 0
3410 	  || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3411 	return new;
3412 
3413       /* If this is a paradoxical SUBREG, we have no idea what value the
3414 	 extra bits would have.  However, if the operand is equivalent
3415 	 to a SUBREG whose operand is the same as our mode, and all the
3416 	 modes are within a word, we can just use the inner operand
3417 	 because these SUBREGs just say how to treat the register.
3418 
3419 	 Similarly if we find an integer constant.  */
3420 
3421       if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3422 	{
3423 	  enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3424 	  struct table_elt *elt;
3425 
3426 	  if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3427 	      && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3428 	      && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3429 				imode)) != 0)
3430 	    for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3431 	      {
3432 		if (CONSTANT_P (elt->exp)
3433 		    && GET_MODE (elt->exp) == VOIDmode)
3434 		  return elt->exp;
3435 
3436 		if (GET_CODE (elt->exp) == SUBREG
3437 		    && GET_MODE (SUBREG_REG (elt->exp)) == mode
3438 		    && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3439 		  return copy_rtx (SUBREG_REG (elt->exp));
3440 	      }
3441 
3442 	  return x;
3443 	}
3444 
3445       /* Fold SUBREG_REG.  If it changed, see if we can simplify the SUBREG.
3446 	 We might be able to if the SUBREG is extracting a single word in an
3447 	 integral mode or extracting the low part.  */
3448 
3449       folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3450       const_arg0 = equiv_constant (folded_arg0);
3451       if (const_arg0)
3452 	folded_arg0 = const_arg0;
3453 
3454       if (folded_arg0 != SUBREG_REG (x))
3455 	{
3456 	  new = simplify_subreg (mode, folded_arg0,
3457 				 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3458 	  if (new)
3459 	    return new;
3460 	}
3461 
3462       /* If this is a narrowing SUBREG and our operand is a REG, see if
3463 	 we can find an equivalence for REG that is an arithmetic operation
3464 	 in a wider mode where both operands are paradoxical SUBREGs
3465 	 from objects of our result mode.  In that case, we couldn't report
3466 	 an equivalent value for that operation, since we don't know what the
3467 	 extra bits will be.  But we can find an equivalence for this SUBREG
3468 	 by folding that operation is the narrow mode.  This allows us to
3469 	 fold arithmetic in narrow modes when the machine only supports
3470 	 word-sized arithmetic.
3471 
3472 	 Also look for a case where we have a SUBREG whose operand is the
3473 	 same as our result.  If both modes are smaller than a word, we
3474 	 are simply interpreting a register in different modes and we
3475 	 can use the inner value.  */
3476 
3477       if (GET_CODE (folded_arg0) == REG
3478 	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3479 	  && subreg_lowpart_p (x))
3480 	{
3481 	  struct table_elt *elt;
3482 
3483 	  /* We can use HASH here since we know that canon_hash won't be
3484 	     called.  */
3485 	  elt = lookup (folded_arg0,
3486 			HASH (folded_arg0, GET_MODE (folded_arg0)),
3487 			GET_MODE (folded_arg0));
3488 
3489 	  if (elt)
3490 	    elt = elt->first_same_value;
3491 
3492 	  for (; elt; elt = elt->next_same_value)
3493 	    {
3494 	      enum rtx_code eltcode = GET_CODE (elt->exp);
3495 
3496 	      /* Just check for unary and binary operations.  */
3497 	      if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3498 		  && GET_CODE (elt->exp) != SIGN_EXTEND
3499 		  && GET_CODE (elt->exp) != ZERO_EXTEND
3500 		  && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3501 		  && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3502 		  && (GET_MODE_CLASS (mode)
3503 		      == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3504 		{
3505 		  rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3506 
3507 		  if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3508 		    op0 = fold_rtx (op0, NULL_RTX);
3509 
3510 		  op0 = equiv_constant (op0);
3511 		  if (op0)
3512 		    new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3513 						    op0, mode);
3514 		}
3515 	      else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3516 			|| GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3517 		       && eltcode != DIV && eltcode != MOD
3518 		       && eltcode != UDIV && eltcode != UMOD
3519 		       && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3520 		       && eltcode != ROTATE && eltcode != ROTATERT
3521 		       && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3522 			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3523 				== mode))
3524 			   || CONSTANT_P (XEXP (elt->exp, 0)))
3525 		       && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3526 			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3527 				== mode))
3528 			   || CONSTANT_P (XEXP (elt->exp, 1))))
3529 		{
3530 		  rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3531 		  rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3532 
3533 		  if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3534 		    op0 = fold_rtx (op0, NULL_RTX);
3535 
3536 		  if (op0)
3537 		    op0 = equiv_constant (op0);
3538 
3539 		  if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3540 		    op1 = fold_rtx (op1, NULL_RTX);
3541 
3542 		  if (op1)
3543 		    op1 = equiv_constant (op1);
3544 
3545 		  /* If we are looking for the low SImode part of
3546 		     (ashift:DI c (const_int 32)), it doesn't work
3547 		     to compute that in SImode, because a 32-bit shift
3548 		     in SImode is unpredictable.  We know the value is 0.  */
3549 		  if (op0 && op1
3550 		      && GET_CODE (elt->exp) == ASHIFT
3551 		      && GET_CODE (op1) == CONST_INT
3552 		      && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3553 		    {
3554 		      if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3555 
3556 			/* If the count fits in the inner mode's width,
3557 			   but exceeds the outer mode's width,
3558 			   the value will get truncated to 0
3559 			   by the subreg.  */
3560 			new = const0_rtx;
3561 		      else
3562 			/* If the count exceeds even the inner mode's width,
3563 			   don't fold this expression.  */
3564 			new = 0;
3565 		    }
3566 		  else if (op0 && op1)
3567 		    new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3568 						     op0, op1);
3569 		}
3570 
3571 	      else if (GET_CODE (elt->exp) == SUBREG
3572 		       && GET_MODE (SUBREG_REG (elt->exp)) == mode
3573 		       && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3574 			   <= UNITS_PER_WORD)
3575 		       && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3576 		new = copy_rtx (SUBREG_REG (elt->exp));
3577 
3578 	      if (new)
3579 		return new;
3580 	    }
3581 	}
3582 
3583       return x;
3584 
3585     case NOT:
3586     case NEG:
3587       /* If we have (NOT Y), see if Y is known to be (NOT Z).
3588 	 If so, (NOT Y) simplifies to Z.  Similarly for NEG.  */
3589       new = lookup_as_function (XEXP (x, 0), code);
3590       if (new)
3591 	return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3592       break;
3593 
3594     case MEM:
3595       /* If we are not actually processing an insn, don't try to find the
3596 	 best address.  Not only don't we care, but we could modify the
3597 	 MEM in an invalid way since we have no insn to validate against.  */
3598       if (insn != 0)
3599 	find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3600 
3601       {
3602 	/* Even if we don't fold in the insn itself,
3603 	   we can safely do so here, in hopes of getting a constant.  */
3604 	rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3605 	rtx base = 0;
3606 	HOST_WIDE_INT offset = 0;
3607 
3608 	if (GET_CODE (addr) == REG
3609 	    && REGNO_QTY_VALID_P (REGNO (addr)))
3610 	  {
3611 	    int addr_q = REG_QTY (REGNO (addr));
3612 	    struct qty_table_elem *addr_ent = &qty_table[addr_q];
3613 
3614 	    if (GET_MODE (addr) == addr_ent->mode
3615 		&& addr_ent->const_rtx != NULL_RTX)
3616 	      addr = addr_ent->const_rtx;
3617 	  }
3618 
3619 	/* If address is constant, split it into a base and integer offset.  */
3620 	if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3621 	  base = addr;
3622 	else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3623 		 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3624 	  {
3625 	    base = XEXP (XEXP (addr, 0), 0);
3626 	    offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3627 	  }
3628 	else if (GET_CODE (addr) == LO_SUM
3629 		 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3630 	  base = XEXP (addr, 1);
3631 	else if (GET_CODE (addr) == ADDRESSOF)
3632 	  return change_address (x, VOIDmode, addr);
3633 
3634 	/* If this is a constant pool reference, we can fold it into its
3635 	   constant to allow better value tracking.  */
3636 	if (base && GET_CODE (base) == SYMBOL_REF
3637 	    && CONSTANT_POOL_ADDRESS_P (base))
3638 	  {
3639 	    rtx constant = get_pool_constant (base);
3640 	    enum machine_mode const_mode = get_pool_mode (base);
3641 	    rtx new;
3642 
3643 	    if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3644 	      constant_pool_entries_cost = COST (constant);
3645 
3646 	    /* If we are loading the full constant, we have an equivalence.  */
3647 	    if (offset == 0 && mode == const_mode)
3648 	      return constant;
3649 
3650 	    /* If this actually isn't a constant (weird!), we can't do
3651 	       anything.  Otherwise, handle the two most common cases:
3652 	       extracting a word from a multi-word constant, and extracting
3653 	       the low-order bits.  Other cases don't seem common enough to
3654 	       worry about.  */
3655 	    if (! CONSTANT_P (constant))
3656 	      return x;
3657 
3658 	    if (GET_MODE_CLASS (mode) == MODE_INT
3659 		&& GET_MODE_SIZE (mode) == UNITS_PER_WORD
3660 		&& offset % UNITS_PER_WORD == 0
3661 		&& (new = operand_subword (constant,
3662 					   offset / UNITS_PER_WORD,
3663 					   0, const_mode)) != 0)
3664 	      return new;
3665 
3666 	    if (((BYTES_BIG_ENDIAN
3667 		  && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3668 		 || (! BYTES_BIG_ENDIAN && offset == 0))
3669 		&& (new = gen_lowpart_if_possible (mode, constant)) != 0)
3670 	      return new;
3671 	  }
3672 
3673 	/* If this is a reference to a label at a known position in a jump
3674 	   table, we also know its value.  */
3675 	if (base && GET_CODE (base) == LABEL_REF)
3676 	  {
3677 	    rtx label = XEXP (base, 0);
3678 	    rtx table_insn = NEXT_INSN (label);
3679 
3680 	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3681 		&& GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3682 	      {
3683 		rtx table = PATTERN (table_insn);
3684 
3685 		if (offset >= 0
3686 		    && (offset / GET_MODE_SIZE (GET_MODE (table))
3687 			< XVECLEN (table, 0)))
3688 		  return XVECEXP (table, 0,
3689 				  offset / GET_MODE_SIZE (GET_MODE (table)));
3690 	      }
3691 	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3692 		&& GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3693 	      {
3694 		rtx table = PATTERN (table_insn);
3695 
3696 		if (offset >= 0
3697 		    && (offset / GET_MODE_SIZE (GET_MODE (table))
3698 			< XVECLEN (table, 1)))
3699 		  {
3700 		    offset /= GET_MODE_SIZE (GET_MODE (table));
3701 		    new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3702 					 XEXP (table, 0));
3703 
3704 		    if (GET_MODE (table) != Pmode)
3705 		      new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3706 
3707 		    /* Indicate this is a constant.  This isn't a
3708 		       valid form of CONST, but it will only be used
3709 		       to fold the next insns and then discarded, so
3710 		       it should be safe.
3711 
3712 		       Note this expression must be explicitly discarded,
3713 		       by cse_insn, else it may end up in a REG_EQUAL note
3714 		       and "escape" to cause problems elsewhere.  */
3715 		    return gen_rtx_CONST (GET_MODE (new), new);
3716 		  }
3717 	      }
3718 	  }
3719 
3720 	return x;
3721       }
3722 
3723 #ifdef NO_FUNCTION_CSE
3724     case CALL:
3725       if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3726 	return x;
3727       break;
3728 #endif
3729 
3730     case ASM_OPERANDS:
3731       for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3732 	validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3733 			 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3734       break;
3735 
3736     default:
3737       break;
3738     }
3739 
3740   const_arg0 = 0;
3741   const_arg1 = 0;
3742   const_arg2 = 0;
3743   mode_arg0 = VOIDmode;
3744 
3745   /* Try folding our operands.
3746      Then see which ones have constant values known.  */
3747 
3748   fmt = GET_RTX_FORMAT (code);
3749   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3750     if (fmt[i] == 'e')
3751       {
3752 	rtx arg = XEXP (x, i);
3753 	rtx folded_arg = arg, const_arg = 0;
3754 	enum machine_mode mode_arg = GET_MODE (arg);
3755 	rtx cheap_arg, expensive_arg;
3756 	rtx replacements[2];
3757 	int j;
3758 	int old_cost = COST_IN (XEXP (x, i), code);
3759 
3760 	/* Most arguments are cheap, so handle them specially.  */
3761 	switch (GET_CODE (arg))
3762 	  {
3763 	  case REG:
3764 	    /* This is the same as calling equiv_constant; it is duplicated
3765 	       here for speed.  */
3766 	    if (REGNO_QTY_VALID_P (REGNO (arg)))
3767 	      {
3768 		int arg_q = REG_QTY (REGNO (arg));
3769 		struct qty_table_elem *arg_ent = &qty_table[arg_q];
3770 
3771 		if (arg_ent->const_rtx != NULL_RTX
3772 		    && GET_CODE (arg_ent->const_rtx) != REG
3773 		    && GET_CODE (arg_ent->const_rtx) != PLUS)
3774 		  const_arg
3775 		    = gen_lowpart_if_possible (GET_MODE (arg),
3776 					       arg_ent->const_rtx);
3777 	      }
3778 	    break;
3779 
3780 	  case CONST:
3781 	  case CONST_INT:
3782 	  case SYMBOL_REF:
3783 	  case LABEL_REF:
3784 	  case CONST_DOUBLE:
3785 	  case CONST_VECTOR:
3786 	    const_arg = arg;
3787 	    break;
3788 
3789 #ifdef HAVE_cc0
3790 	  case CC0:
3791 	    folded_arg = prev_insn_cc0;
3792 	    mode_arg = prev_insn_cc0_mode;
3793 	    const_arg = equiv_constant (folded_arg);
3794 	    break;
3795 #endif
3796 
3797 	  default:
3798 	    folded_arg = fold_rtx (arg, insn);
3799 	    const_arg = equiv_constant (folded_arg);
3800 	  }
3801 
3802 	/* For the first three operands, see if the operand
3803 	   is constant or equivalent to a constant.  */
3804 	switch (i)
3805 	  {
3806 	  case 0:
3807 	    folded_arg0 = folded_arg;
3808 	    const_arg0 = const_arg;
3809 	    mode_arg0 = mode_arg;
3810 	    break;
3811 	  case 1:
3812 	    folded_arg1 = folded_arg;
3813 	    const_arg1 = const_arg;
3814 	    break;
3815 	  case 2:
3816 	    const_arg2 = const_arg;
3817 	    break;
3818 	  }
3819 
3820 	/* Pick the least expensive of the folded argument and an
3821 	   equivalent constant argument.  */
3822 	if (const_arg == 0 || const_arg == folded_arg
3823 	    || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3824 	  cheap_arg = folded_arg, expensive_arg = const_arg;
3825 	else
3826 	  cheap_arg = const_arg, expensive_arg = folded_arg;
3827 
3828 	/* Try to replace the operand with the cheapest of the two
3829 	   possibilities.  If it doesn't work and this is either of the first
3830 	   two operands of a commutative operation, try swapping them.
3831 	   If THAT fails, try the more expensive, provided it is cheaper
3832 	   than what is already there.  */
3833 
3834 	if (cheap_arg == XEXP (x, i))
3835 	  continue;
3836 
3837 	if (insn == 0 && ! copied)
3838 	  {
3839 	    x = copy_rtx (x);
3840 	    copied = 1;
3841 	  }
3842 
3843 	/* Order the replacements from cheapest to most expensive.  */
3844 	replacements[0] = cheap_arg;
3845 	replacements[1] = expensive_arg;
3846 
3847 	for (j = 0; j < 2 && replacements[j]; j++)
3848 	  {
3849 	    int new_cost = COST_IN (replacements[j], code);
3850 
3851 	    /* Stop if what existed before was cheaper.  Prefer constants
3852 	       in the case of a tie.  */
3853 	    if (new_cost > old_cost
3854 		|| (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3855 	      break;
3856 
3857 	    /* It's not safe to substitute the operand of a conversion
3858 	       operator with a constant, as the conversion's identity
3859 	       depends upon the mode of it's operand.  This optimization
3860 	       is handled by the call to simplify_unary_operation.  */
3861 	    if (GET_RTX_CLASS (code) == '1'
3862 		&& GET_MODE (replacements[j]) != mode_arg0
3863 		&& (code == ZERO_EXTEND
3864 		    || code == SIGN_EXTEND
3865 		    || code == TRUNCATE
3866 		    || code == FLOAT_TRUNCATE
3867 		    || code == FLOAT_EXTEND
3868 		    || code == FLOAT
3869 		    || code == FIX
3870 		    || code == UNSIGNED_FLOAT
3871 		    || code == UNSIGNED_FIX))
3872 	      continue;
3873 
3874 	    if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3875 	      break;
3876 
3877 	    if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c'
3878 		|| code == LTGT || code == UNEQ || code == ORDERED
3879 		|| code == UNORDERED)
3880 	      {
3881 		validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3882 		validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3883 
3884 		if (apply_change_group ())
3885 		  {
3886 		    /* Swap them back to be invalid so that this loop can
3887 		       continue and flag them to be swapped back later.  */
3888 		    rtx tem;
3889 
3890 		    tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3891 				       XEXP (x, 1) = tem;
3892 		    must_swap = 1;
3893 		    break;
3894 		  }
3895 	      }
3896 	  }
3897       }
3898 
3899     else
3900       {
3901 	if (fmt[i] == 'E')
3902 	  /* Don't try to fold inside of a vector of expressions.
3903 	     Doing nothing is harmless.  */
3904 	  {;}
3905       }
3906 
3907   /* If a commutative operation, place a constant integer as the second
3908      operand unless the first operand is also a constant integer.  Otherwise,
3909      place any constant second unless the first operand is also a constant.  */
3910 
3911   if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c'
3912       || code == LTGT || code == UNEQ || code == ORDERED
3913       || code == UNORDERED)
3914     {
3915       if (must_swap || (const_arg0
3916 	  		&& (const_arg1 == 0
3917 	      		    || (GET_CODE (const_arg0) == CONST_INT
3918 			        && GET_CODE (const_arg1) != CONST_INT))))
3919 	{
3920 	  rtx tem = XEXP (x, 0);
3921 
3922 	  if (insn == 0 && ! copied)
3923 	    {
3924 	      x = copy_rtx (x);
3925 	      copied = 1;
3926 	    }
3927 
3928 	  validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3929 	  validate_change (insn, &XEXP (x, 1), tem, 1);
3930 	  if (apply_change_group ())
3931 	    {
3932 	      tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3933 	      tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3934 	    }
3935 	}
3936     }
3937 
3938   /* If X is an arithmetic operation, see if we can simplify it.  */
3939 
3940   switch (GET_RTX_CLASS (code))
3941     {
3942     case '1':
3943       {
3944 	int is_const = 0;
3945 
3946 	/* We can't simplify extension ops unless we know the
3947 	   original mode.  */
3948 	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3949 	    && mode_arg0 == VOIDmode)
3950 	  break;
3951 
3952 	/* If we had a CONST, strip it off and put it back later if we
3953 	   fold.  */
3954 	if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3955 	  is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3956 
3957 	new = simplify_unary_operation (code, mode,
3958 					const_arg0 ? const_arg0 : folded_arg0,
3959 					mode_arg0);
3960 	if (new != 0 && is_const)
3961 	  new = gen_rtx_CONST (mode, new);
3962       }
3963       break;
3964 
3965     case '<':
3966       /* See what items are actually being compared and set FOLDED_ARG[01]
3967 	 to those values and CODE to the actual comparison code.  If any are
3968 	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
3969 	 do anything if both operands are already known to be constant.  */
3970 
3971       if (const_arg0 == 0 || const_arg1 == 0)
3972 	{
3973 	  struct table_elt *p0, *p1;
3974 	  rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3975 	  enum machine_mode mode_arg1;
3976 
3977 #ifdef FLOAT_STORE_FLAG_VALUE
3978 	  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3979 	    {
3980 	      true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3981 			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
3982 	      false_rtx = CONST0_RTX (mode);
3983 	    }
3984 #endif
3985 
3986 	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3987 				       &mode_arg0, &mode_arg1);
3988 	  const_arg0 = equiv_constant (folded_arg0);
3989 	  const_arg1 = equiv_constant (folded_arg1);
3990 
3991 	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3992 	     what kinds of things are being compared, so we can't do
3993 	     anything with this comparison.  */
3994 
3995 	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3996 	    break;
3997 
3998 	  /* If we do not now have two constants being compared, see
3999 	     if we can nevertheless deduce some things about the
4000 	     comparison.  */
4001 	  if (const_arg0 == 0 || const_arg1 == 0)
4002 	    {
4003 	      /* Is FOLDED_ARG0 frame-pointer plus a constant?  Or
4004 		 non-explicit constant?  These aren't zero, but we
4005 		 don't know their sign.  */
4006 	      if (const_arg1 == const0_rtx
4007 		  && (NONZERO_BASE_PLUS_P (folded_arg0)
4008 #if 0  /* Sad to say, on sysvr4, #pragma weak can make a symbol address
4009 	  come out as 0.  */
4010 		      || GET_CODE (folded_arg0) == SYMBOL_REF
4011 #endif
4012 		      || GET_CODE (folded_arg0) == LABEL_REF
4013 		      || GET_CODE (folded_arg0) == CONST))
4014 		{
4015 		  if (code == EQ)
4016 		    return false_rtx;
4017 		  else if (code == NE)
4018 		    return true_rtx;
4019 		}
4020 
4021 	      /* See if the two operands are the same.  */
4022 
4023 	      if (folded_arg0 == folded_arg1
4024 		  || (GET_CODE (folded_arg0) == REG
4025 		      && GET_CODE (folded_arg1) == REG
4026 		      && (REG_QTY (REGNO (folded_arg0))
4027 			  == REG_QTY (REGNO (folded_arg1))))
4028 		  || ((p0 = lookup (folded_arg0,
4029 				    (safe_hash (folded_arg0, mode_arg0)
4030 				     & HASH_MASK), mode_arg0))
4031 		      && (p1 = lookup (folded_arg1,
4032 				       (safe_hash (folded_arg1, mode_arg0)
4033 					& HASH_MASK), mode_arg0))
4034 		      && p0->first_same_value == p1->first_same_value))
4035 		{
4036 		  /* Sadly two equal NaNs are not equivalent.  */
4037 		  if (!HONOR_NANS (mode_arg0))
4038 		    return ((code == EQ || code == LE || code == GE
4039 			     || code == LEU || code == GEU || code == UNEQ
4040 			     || code == UNLE || code == UNGE
4041 			     || code == ORDERED)
4042 			    ? true_rtx : false_rtx);
4043 		  /* Take care for the FP compares we can resolve.  */
4044 		  if (code == UNEQ || code == UNLE || code == UNGE)
4045 		    return true_rtx;
4046 		  if (code == LTGT || code == LT || code == GT)
4047 		    return false_rtx;
4048 		}
4049 
4050 	      /* If FOLDED_ARG0 is a register, see if the comparison we are
4051 		 doing now is either the same as we did before or the reverse
4052 		 (we only check the reverse if not floating-point).  */
4053 	      else if (GET_CODE (folded_arg0) == REG)
4054 		{
4055 		  int qty = REG_QTY (REGNO (folded_arg0));
4056 
4057 		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
4058 		    {
4059 		      struct qty_table_elem *ent = &qty_table[qty];
4060 
4061 		      if ((comparison_dominates_p (ent->comparison_code, code)
4062 			   || (! FLOAT_MODE_P (mode_arg0)
4063 			       && comparison_dominates_p (ent->comparison_code,
4064 						          reverse_condition (code))))
4065 			  && (rtx_equal_p (ent->comparison_const, folded_arg1)
4066 			      || (const_arg1
4067 				  && rtx_equal_p (ent->comparison_const,
4068 						  const_arg1))
4069 			      || (GET_CODE (folded_arg1) == REG
4070 				  && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4071 			return (comparison_dominates_p (ent->comparison_code, code)
4072 				? true_rtx : false_rtx);
4073 		    }
4074 		}
4075 	    }
4076 	}
4077 
4078       /* If we are comparing against zero, see if the first operand is
4079 	 equivalent to an IOR with a constant.  If so, we may be able to
4080 	 determine the result of this comparison.  */
4081 
4082       if (const_arg1 == const0_rtx)
4083 	{
4084 	  rtx y = lookup_as_function (folded_arg0, IOR);
4085 	  rtx inner_const;
4086 
4087 	  if (y != 0
4088 	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4089 	      && GET_CODE (inner_const) == CONST_INT
4090 	      && INTVAL (inner_const) != 0)
4091 	    {
4092 	      int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4093 	      int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4094 			      && (INTVAL (inner_const)
4095 				  & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4096 	      rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4097 
4098 #ifdef FLOAT_STORE_FLAG_VALUE
4099 	      if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4100 		{
4101 		  true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4102 			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
4103 		  false_rtx = CONST0_RTX (mode);
4104 		}
4105 #endif
4106 
4107 	      switch (code)
4108 		{
4109 		case EQ:
4110 		  return false_rtx;
4111 		case NE:
4112 		  return true_rtx;
4113 		case LT:  case LE:
4114 		  if (has_sign)
4115 		    return true_rtx;
4116 		  break;
4117 		case GT:  case GE:
4118 		  if (has_sign)
4119 		    return false_rtx;
4120 		  break;
4121 		default:
4122 		  break;
4123 		}
4124 	    }
4125 	}
4126 
4127       new = simplify_relational_operation (code,
4128 					   (mode_arg0 != VOIDmode
4129 					    ? mode_arg0
4130 					    : (GET_MODE (const_arg0
4131 							 ? const_arg0
4132 							 : folded_arg0)
4133 					       != VOIDmode)
4134 					    ? GET_MODE (const_arg0
4135 							? const_arg0
4136 							: folded_arg0)
4137 					    : GET_MODE (const_arg1
4138 							? const_arg1
4139 							: folded_arg1)),
4140 					   const_arg0 ? const_arg0 : folded_arg0,
4141 					   const_arg1 ? const_arg1 : folded_arg1);
4142 #ifdef FLOAT_STORE_FLAG_VALUE
4143       if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4144 	{
4145 	  if (new == const0_rtx)
4146 	    new = CONST0_RTX (mode);
4147 	  else
4148 	    new = (CONST_DOUBLE_FROM_REAL_VALUE
4149 		   (FLOAT_STORE_FLAG_VALUE (mode), mode));
4150 	}
4151 #endif
4152       break;
4153 
4154     case '2':
4155     case 'c':
4156       switch (code)
4157 	{
4158 	case PLUS:
4159 	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
4160 	     with that LABEL_REF as its second operand.  If so, the result is
4161 	     the first operand of that MINUS.  This handles switches with an
4162 	     ADDR_DIFF_VEC table.  */
4163 	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4164 	    {
4165 	      rtx y
4166 		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
4167 		: lookup_as_function (folded_arg0, MINUS);
4168 
4169 	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4170 		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4171 		return XEXP (y, 0);
4172 
4173 	      /* Now try for a CONST of a MINUS like the above.  */
4174 	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4175 			: lookup_as_function (folded_arg0, CONST))) != 0
4176 		  && GET_CODE (XEXP (y, 0)) == MINUS
4177 		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4178 		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4179 		return XEXP (XEXP (y, 0), 0);
4180 	    }
4181 
4182 	  /* Likewise if the operands are in the other order.  */
4183 	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4184 	    {
4185 	      rtx y
4186 		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
4187 		: lookup_as_function (folded_arg1, MINUS);
4188 
4189 	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4190 		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4191 		return XEXP (y, 0);
4192 
4193 	      /* Now try for a CONST of a MINUS like the above.  */
4194 	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4195 			: lookup_as_function (folded_arg1, CONST))) != 0
4196 		  && GET_CODE (XEXP (y, 0)) == MINUS
4197 		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4198 		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4199 		return XEXP (XEXP (y, 0), 0);
4200 	    }
4201 
4202 	  /* If second operand is a register equivalent to a negative
4203 	     CONST_INT, see if we can find a register equivalent to the
4204 	     positive constant.  Make a MINUS if so.  Don't do this for
4205 	     a non-negative constant since we might then alternate between
4206 	     choosing positive and negative constants.  Having the positive
4207 	     constant previously-used is the more common case.  Be sure
4208 	     the resulting constant is non-negative; if const_arg1 were
4209 	     the smallest negative number this would overflow: depending
4210 	     on the mode, this would either just be the same value (and
4211 	     hence not save anything) or be incorrect.  */
4212 	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4213 	      && INTVAL (const_arg1) < 0
4214 	      /* This used to test
4215 
4216 	         -INTVAL (const_arg1) >= 0
4217 
4218 		 But The Sun V5.0 compilers mis-compiled that test.  So
4219 		 instead we test for the problematic value in a more direct
4220 		 manner and hope the Sun compilers get it correct.  */
4221 	      && INTVAL (const_arg1) !=
4222 	        ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4223 	      && GET_CODE (folded_arg1) == REG)
4224 	    {
4225 	      rtx new_const = GEN_INT (-INTVAL (const_arg1));
4226 	      struct table_elt *p
4227 		= lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4228 			  mode);
4229 
4230 	      if (p)
4231 		for (p = p->first_same_value; p; p = p->next_same_value)
4232 		  if (GET_CODE (p->exp) == REG)
4233 		    return simplify_gen_binary (MINUS, mode, folded_arg0,
4234 						canon_reg (p->exp, NULL_RTX));
4235 	    }
4236 	  goto from_plus;
4237 
4238 	case MINUS:
4239 	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4240 	     If so, produce (PLUS Z C2-C).  */
4241 	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4242 	    {
4243 	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4244 	      if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4245 		return fold_rtx (plus_constant (copy_rtx (y),
4246 						-INTVAL (const_arg1)),
4247 				 NULL_RTX);
4248 	    }
4249 
4250 	  /* Fall through.  */
4251 
4252 	from_plus:
4253 	case SMIN:    case SMAX:      case UMIN:    case UMAX:
4254 	case IOR:     case AND:       case XOR:
4255 	case MULT:
4256 	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
4257 	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4258 	     is known to be of similar form, we may be able to replace the
4259 	     operation with a combined operation.  This may eliminate the
4260 	     intermediate operation if every use is simplified in this way.
4261 	     Note that the similar optimization done by combine.c only works
4262 	     if the intermediate operation's result has only one reference.  */
4263 
4264 	  if (GET_CODE (folded_arg0) == REG
4265 	      && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4266 	    {
4267 	      int is_shift
4268 		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4269 	      rtx y = lookup_as_function (folded_arg0, code);
4270 	      rtx inner_const;
4271 	      enum rtx_code associate_code;
4272 	      rtx new_const;
4273 
4274 	      if (y == 0
4275 		  || 0 == (inner_const
4276 			   = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4277 		  || GET_CODE (inner_const) != CONST_INT
4278 		  /* If we have compiled a statement like
4279 		     "if (x == (x & mask1))", and now are looking at
4280 		     "x & mask2", we will have a case where the first operand
4281 		     of Y is the same as our first operand.  Unless we detect
4282 		     this case, an infinite loop will result.  */
4283 		  || XEXP (y, 0) == folded_arg0)
4284 		break;
4285 
4286 	      /* Don't associate these operations if they are a PLUS with the
4287 		 same constant and it is a power of two.  These might be doable
4288 		 with a pre- or post-increment.  Similarly for two subtracts of
4289 		 identical powers of two with post decrement.  */
4290 
4291 	      if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
4292 		  && ((HAVE_PRE_INCREMENT
4293 			  && exact_log2 (INTVAL (const_arg1)) >= 0)
4294 		      || (HAVE_POST_INCREMENT
4295 			  && exact_log2 (INTVAL (const_arg1)) >= 0)
4296 		      || (HAVE_PRE_DECREMENT
4297 			  && exact_log2 (- INTVAL (const_arg1)) >= 0)
4298 		      || (HAVE_POST_DECREMENT
4299 			  && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4300 		break;
4301 
4302 	      /* Compute the code used to compose the constants.  For example,
4303 		 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS.  */
4304 
4305 	      associate_code = (is_shift || code == MINUS ? PLUS : code);
4306 
4307 	      new_const = simplify_binary_operation (associate_code, mode,
4308 						     const_arg1, inner_const);
4309 
4310 	      if (new_const == 0)
4311 		break;
4312 #ifndef FRAME_GROWS_DOWNWARD
4313 	      if (flag_propolice_protection
4314 		  && GET_CODE (y) == PLUS
4315 		  && XEXP (y, 0) == frame_pointer_rtx
4316 		  && INTVAL (inner_const) > 0
4317 		  && INTVAL (new_const) <= 0)
4318 		break;
4319 #endif
4320 	      /* If we are associating shift operations, don't let this
4321 		 produce a shift of the size of the object or larger.
4322 		 This could occur when we follow a sign-extend by a right
4323 		 shift on a machine that does a sign-extend as a pair
4324 		 of shifts.  */
4325 
4326 	      if (is_shift && GET_CODE (new_const) == CONST_INT
4327 		  && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4328 		{
4329 		  /* As an exception, we can turn an ASHIFTRT of this
4330 		     form into a shift of the number of bits - 1.  */
4331 		  if (code == ASHIFTRT)
4332 		    new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4333 		  else
4334 		    break;
4335 		}
4336 
4337 	      y = copy_rtx (XEXP (y, 0));
4338 
4339 	      /* If Y contains our first operand (the most common way this
4340 		 can happen is if Y is a MEM), we would do into an infinite
4341 		 loop if we tried to fold it.  So don't in that case.  */
4342 
4343 	      if (! reg_mentioned_p (folded_arg0, y))
4344 		y = fold_rtx (y, insn);
4345 
4346 	      return simplify_gen_binary (code, mode, y, new_const);
4347 	    }
4348 	  break;
4349 
4350 	case DIV:       case UDIV:
4351 	  /* ??? The associative optimization performed immediately above is
4352 	     also possible for DIV and UDIV using associate_code of MULT.
4353 	     However, we would need extra code to verify that the
4354 	     multiplication does not overflow, that is, there is no overflow
4355 	     in the calculation of new_const.  */
4356 	  break;
4357 
4358 	default:
4359 	  break;
4360 	}
4361 
4362       new = simplify_binary_operation (code, mode,
4363 				       const_arg0 ? const_arg0 : folded_arg0,
4364 				       const_arg1 ? const_arg1 : folded_arg1);
4365       break;
4366 
4367     case 'o':
4368       /* (lo_sum (high X) X) is simply X.  */
4369       if (code == LO_SUM && const_arg0 != 0
4370 	  && GET_CODE (const_arg0) == HIGH
4371 	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4372 	return const_arg1;
4373       break;
4374 
4375     case '3':
4376     case 'b':
4377       new = simplify_ternary_operation (code, mode, mode_arg0,
4378 					const_arg0 ? const_arg0 : folded_arg0,
4379 					const_arg1 ? const_arg1 : folded_arg1,
4380 					const_arg2 ? const_arg2 : XEXP (x, 2));
4381       break;
4382 
4383     case 'x':
4384       /* Always eliminate CONSTANT_P_RTX at this stage.  */
4385       if (code == CONSTANT_P_RTX)
4386 	return (const_arg0 ? const1_rtx : const0_rtx);
4387       break;
4388     }
4389 
4390   return new ? new : x;
4391 }
4392 
4393 /* Return a constant value currently equivalent to X.
4394    Return 0 if we don't know one.  */
4395 
4396 static rtx
equiv_constant(x)4397 equiv_constant (x)
4398      rtx x;
4399 {
4400   if (GET_CODE (x) == REG
4401       && REGNO_QTY_VALID_P (REGNO (x)))
4402     {
4403       int x_q = REG_QTY (REGNO (x));
4404       struct qty_table_elem *x_ent = &qty_table[x_q];
4405 
4406       if (x_ent->const_rtx)
4407 	x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4408     }
4409 
4410   if (x == 0 || CONSTANT_P (x))
4411     return x;
4412 
4413   /* If X is a MEM, try to fold it outside the context of any insn to see if
4414      it might be equivalent to a constant.  That handles the case where it
4415      is a constant-pool reference.  Then try to look it up in the hash table
4416      in case it is something whose value we have seen before.  */
4417 
4418   if (GET_CODE (x) == MEM)
4419     {
4420       struct table_elt *elt;
4421 
4422       x = fold_rtx (x, NULL_RTX);
4423       if (CONSTANT_P (x))
4424 	return x;
4425 
4426       elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4427       if (elt == 0)
4428 	return 0;
4429 
4430       for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4431 	if (elt->is_const && CONSTANT_P (elt->exp))
4432 	  return elt->exp;
4433     }
4434 
4435   return 0;
4436 }
4437 
4438 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4439    number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4440    least-significant part of X.
4441    MODE specifies how big a part of X to return.
4442 
4443    If the requested operation cannot be done, 0 is returned.
4444 
4445    This is similar to gen_lowpart in emit-rtl.c.  */
4446 
4447 rtx
gen_lowpart_if_possible(mode,x)4448 gen_lowpart_if_possible (mode, x)
4449      enum machine_mode mode;
4450      rtx x;
4451 {
4452   rtx result = gen_lowpart_common (mode, x);
4453 
4454   if (result)
4455     return result;
4456   else if (GET_CODE (x) == MEM)
4457     {
4458       /* This is the only other case we handle.  */
4459       int offset = 0;
4460       rtx new;
4461 
4462       if (WORDS_BIG_ENDIAN)
4463 	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4464 		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4465       if (BYTES_BIG_ENDIAN)
4466 	/* Adjust the address so that the address-after-the-data is
4467 	   unchanged.  */
4468 	offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4469 		   - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4470 
4471       new = adjust_address_nv (x, mode, offset);
4472       if (! memory_address_p (mode, XEXP (new, 0)))
4473 	return 0;
4474 
4475       return new;
4476     }
4477   else
4478     return 0;
4479 }
4480 
4481 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4482    branch.  It will be zero if not.
4483 
4484    In certain cases, this can cause us to add an equivalence.  For example,
4485    if we are following the taken case of
4486    	if (i == 2)
4487    we can add the fact that `i' and '2' are now equivalent.
4488 
4489    In any case, we can record that this comparison was passed.  If the same
4490    comparison is seen later, we will know its value.  */
4491 
4492 static void
record_jump_equiv(insn,taken)4493 record_jump_equiv (insn, taken)
4494      rtx insn;
4495      int taken;
4496 {
4497   int cond_known_true;
4498   rtx op0, op1;
4499   rtx set;
4500   enum machine_mode mode, mode0, mode1;
4501   int reversed_nonequality = 0;
4502   enum rtx_code code;
4503 
4504   /* Ensure this is the right kind of insn.  */
4505   if (! any_condjump_p (insn))
4506     return;
4507   set = pc_set (insn);
4508 
4509   /* See if this jump condition is known true or false.  */
4510   if (taken)
4511     cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4512   else
4513     cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4514 
4515   /* Get the type of comparison being done and the operands being compared.
4516      If we had to reverse a non-equality condition, record that fact so we
4517      know that it isn't valid for floating-point.  */
4518   code = GET_CODE (XEXP (SET_SRC (set), 0));
4519   op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4520   op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4521 
4522   code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4523   if (! cond_known_true)
4524     {
4525       code = reversed_comparison_code_parts (code, op0, op1, insn);
4526 
4527       /* Don't remember if we can't find the inverse.  */
4528       if (code == UNKNOWN)
4529 	return;
4530     }
4531 
4532   /* The mode is the mode of the non-constant.  */
4533   mode = mode0;
4534   if (mode1 != VOIDmode)
4535     mode = mode1;
4536 
4537   record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4538 }
4539 
4540 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4541    REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4542    Make any useful entries we can with that information.  Called from
4543    above function and called recursively.  */
4544 
4545 static void
record_jump_cond(code,mode,op0,op1,reversed_nonequality)4546 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4547      enum rtx_code code;
4548      enum machine_mode mode;
4549      rtx op0, op1;
4550      int reversed_nonequality;
4551 {
4552   unsigned op0_hash, op1_hash;
4553   int op0_in_memory, op1_in_memory;
4554   struct table_elt *op0_elt, *op1_elt;
4555 
4556   /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4557      we know that they are also equal in the smaller mode (this is also
4558      true for all smaller modes whether or not there is a SUBREG, but
4559      is not worth testing for with no SUBREG).  */
4560 
4561   /* Note that GET_MODE (op0) may not equal MODE.  */
4562   if (code == EQ && GET_CODE (op0) == SUBREG
4563       && (GET_MODE_SIZE (GET_MODE (op0))
4564 	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4565     {
4566       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4567       rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4568 
4569       record_jump_cond (code, mode, SUBREG_REG (op0),
4570 			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4571 			reversed_nonequality);
4572     }
4573 
4574   if (code == EQ && GET_CODE (op1) == SUBREG
4575       && (GET_MODE_SIZE (GET_MODE (op1))
4576 	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4577     {
4578       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4579       rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4580 
4581       record_jump_cond (code, mode, SUBREG_REG (op1),
4582 			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4583 			reversed_nonequality);
4584     }
4585 
4586   /* Similarly, if this is an NE comparison, and either is a SUBREG
4587      making a smaller mode, we know the whole thing is also NE.  */
4588 
4589   /* Note that GET_MODE (op0) may not equal MODE;
4590      if we test MODE instead, we can get an infinite recursion
4591      alternating between two modes each wider than MODE.  */
4592 
4593   if (code == NE && GET_CODE (op0) == SUBREG
4594       && subreg_lowpart_p (op0)
4595       && (GET_MODE_SIZE (GET_MODE (op0))
4596 	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4597     {
4598       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4599       rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4600 
4601       record_jump_cond (code, mode, SUBREG_REG (op0),
4602 			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4603 			reversed_nonequality);
4604     }
4605 
4606   if (code == NE && GET_CODE (op1) == SUBREG
4607       && subreg_lowpart_p (op1)
4608       && (GET_MODE_SIZE (GET_MODE (op1))
4609 	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4610     {
4611       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4612       rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4613 
4614       record_jump_cond (code, mode, SUBREG_REG (op1),
4615 			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4616 			reversed_nonequality);
4617     }
4618 
4619   /* Hash both operands.  */
4620 
4621   do_not_record = 0;
4622   hash_arg_in_memory = 0;
4623   op0_hash = HASH (op0, mode);
4624   op0_in_memory = hash_arg_in_memory;
4625 
4626   if (do_not_record)
4627     return;
4628 
4629   do_not_record = 0;
4630   hash_arg_in_memory = 0;
4631   op1_hash = HASH (op1, mode);
4632   op1_in_memory = hash_arg_in_memory;
4633 
4634   if (do_not_record)
4635     return;
4636 
4637   /* Look up both operands.  */
4638   op0_elt = lookup (op0, op0_hash, mode);
4639   op1_elt = lookup (op1, op1_hash, mode);
4640 
4641   /* If both operands are already equivalent or if they are not in the
4642      table but are identical, do nothing.  */
4643   if ((op0_elt != 0 && op1_elt != 0
4644        && op0_elt->first_same_value == op1_elt->first_same_value)
4645       || op0 == op1 || rtx_equal_p (op0, op1))
4646     return;
4647 
4648   /* If we aren't setting two things equal all we can do is save this
4649      comparison.   Similarly if this is floating-point.  In the latter
4650      case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4651      If we record the equality, we might inadvertently delete code
4652      whose intent was to change -0 to +0.  */
4653 
4654   if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4655     {
4656       struct qty_table_elem *ent;
4657       int qty;
4658 
4659       /* If we reversed a floating-point comparison, if OP0 is not a
4660 	 register, or if OP1 is neither a register or constant, we can't
4661 	 do anything.  */
4662 
4663       if (GET_CODE (op1) != REG)
4664 	op1 = equiv_constant (op1);
4665 
4666       if ((reversed_nonequality && FLOAT_MODE_P (mode))
4667 	  || GET_CODE (op0) != REG || op1 == 0)
4668 	return;
4669 
4670       /* Put OP0 in the hash table if it isn't already.  This gives it a
4671 	 new quantity number.  */
4672       if (op0_elt == 0)
4673 	{
4674 	  if (insert_regs (op0, NULL, 0))
4675 	    {
4676 	      rehash_using_reg (op0);
4677 	      op0_hash = HASH (op0, mode);
4678 
4679 	      /* If OP0 is contained in OP1, this changes its hash code
4680 		 as well.  Faster to rehash than to check, except
4681 		 for the simple case of a constant.  */
4682 	      if (! CONSTANT_P (op1))
4683 		op1_hash = HASH (op1,mode);
4684 	    }
4685 
4686 	  op0_elt = insert (op0, NULL, op0_hash, mode);
4687 	  op0_elt->in_memory = op0_in_memory;
4688 	}
4689 
4690       qty = REG_QTY (REGNO (op0));
4691       ent = &qty_table[qty];
4692 
4693       ent->comparison_code = code;
4694       if (GET_CODE (op1) == REG)
4695 	{
4696 	  /* Look it up again--in case op0 and op1 are the same.  */
4697 	  op1_elt = lookup (op1, op1_hash, mode);
4698 
4699 	  /* Put OP1 in the hash table so it gets a new quantity number.  */
4700 	  if (op1_elt == 0)
4701 	    {
4702 	      if (insert_regs (op1, NULL, 0))
4703 		{
4704 		  rehash_using_reg (op1);
4705 		  op1_hash = HASH (op1, mode);
4706 		}
4707 
4708 	      op1_elt = insert (op1, NULL, op1_hash, mode);
4709 	      op1_elt->in_memory = op1_in_memory;
4710 	    }
4711 
4712 	  ent->comparison_const = NULL_RTX;
4713 	  ent->comparison_qty = REG_QTY (REGNO (op1));
4714 	}
4715       else
4716 	{
4717 	  ent->comparison_const = op1;
4718 	  ent->comparison_qty = -1;
4719 	}
4720 
4721       return;
4722     }
4723 
4724   /* If either side is still missing an equivalence, make it now,
4725      then merge the equivalences.  */
4726 
4727   if (op0_elt == 0)
4728     {
4729       if (insert_regs (op0, NULL, 0))
4730 	{
4731 	  rehash_using_reg (op0);
4732 	  op0_hash = HASH (op0, mode);
4733 	}
4734 
4735       op0_elt = insert (op0, NULL, op0_hash, mode);
4736       op0_elt->in_memory = op0_in_memory;
4737     }
4738 
4739   if (op1_elt == 0)
4740     {
4741       if (insert_regs (op1, NULL, 0))
4742 	{
4743 	  rehash_using_reg (op1);
4744 	  op1_hash = HASH (op1, mode);
4745 	}
4746 
4747       op1_elt = insert (op1, NULL, op1_hash, mode);
4748       op1_elt->in_memory = op1_in_memory;
4749     }
4750 
4751   merge_equiv_classes (op0_elt, op1_elt);
4752   last_jump_equiv_class = op0_elt;
4753 }
4754 
4755 /* CSE processing for one instruction.
4756    First simplify sources and addresses of all assignments
4757    in the instruction, using previously-computed equivalents values.
4758    Then install the new sources and destinations in the table
4759    of available values.
4760 
4761    If LIBCALL_INSN is nonzero, don't record any equivalence made in
4762    the insn.  It means that INSN is inside libcall block.  In this
4763    case LIBCALL_INSN is the corresponding insn with REG_LIBCALL.  */
4764 
4765 /* Data on one SET contained in the instruction.  */
4766 
4767 struct set
4768 {
4769   /* The SET rtx itself.  */
4770   rtx rtl;
4771   /* The SET_SRC of the rtx (the original value, if it is changing).  */
4772   rtx src;
4773   /* The hash-table element for the SET_SRC of the SET.  */
4774   struct table_elt *src_elt;
4775   /* Hash value for the SET_SRC.  */
4776   unsigned src_hash;
4777   /* Hash value for the SET_DEST.  */
4778   unsigned dest_hash;
4779   /* The SET_DEST, with SUBREG, etc., stripped.  */
4780   rtx inner_dest;
4781   /* Nonzero if the SET_SRC is in memory.  */
4782   char src_in_memory;
4783   /* Nonzero if the SET_SRC contains something
4784      whose value cannot be predicted and understood.  */
4785   char src_volatile;
4786   /* Original machine mode, in case it becomes a CONST_INT.  */
4787   enum machine_mode mode;
4788   /* A constant equivalent for SET_SRC, if any.  */
4789   rtx src_const;
4790   /* Original SET_SRC value used for libcall notes.  */
4791   rtx orig_src;
4792   /* Hash value of constant equivalent for SET_SRC.  */
4793   unsigned src_const_hash;
4794   /* Table entry for constant equivalent for SET_SRC, if any.  */
4795   struct table_elt *src_const_elt;
4796 };
4797 
4798 static void
cse_insn(insn,libcall_insn)4799 cse_insn (insn, libcall_insn)
4800      rtx insn;
4801      rtx libcall_insn;
4802 {
4803   rtx x = PATTERN (insn);
4804   int i;
4805   rtx tem;
4806   int n_sets = 0;
4807 
4808 #ifdef HAVE_cc0
4809   /* Records what this insn does to set CC0.  */
4810   rtx this_insn_cc0 = 0;
4811   enum machine_mode this_insn_cc0_mode = VOIDmode;
4812 #endif
4813 
4814   rtx src_eqv = 0;
4815   struct table_elt *src_eqv_elt = 0;
4816   int src_eqv_volatile = 0;
4817   int src_eqv_in_memory = 0;
4818   unsigned src_eqv_hash = 0;
4819 
4820   struct set *sets = (struct set *) 0;
4821 
4822   this_insn = insn;
4823 
4824   /* Find all the SETs and CLOBBERs in this instruction.
4825      Record all the SETs in the array `set' and count them.
4826      Also determine whether there is a CLOBBER that invalidates
4827      all memory references, or all references at varying addresses.  */
4828 
4829   if (GET_CODE (insn) == CALL_INSN)
4830     {
4831       for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4832 	{
4833 	  if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4834 	    invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4835 	  XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4836 	}
4837     }
4838 
4839   if (GET_CODE (x) == SET)
4840     {
4841       sets = (struct set *) alloca (sizeof (struct set));
4842       sets[0].rtl = x;
4843 
4844       /* Ignore SETs that are unconditional jumps.
4845 	 They never need cse processing, so this does not hurt.
4846 	 The reason is not efficiency but rather
4847 	 so that we can test at the end for instructions
4848 	 that have been simplified to unconditional jumps
4849 	 and not be misled by unchanged instructions
4850 	 that were unconditional jumps to begin with.  */
4851       if (SET_DEST (x) == pc_rtx
4852 	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
4853 	;
4854       /* cut the reg propagation of stack-protected argument */
4855       else if (x->volatil) {
4856 	rtx x1 = SET_DEST (x);
4857 	if (GET_CODE (x1) == SUBREG && GET_CODE (SUBREG_REG (x1)) == REG)
4858 	  x1 = SUBREG_REG (x1);
4859 	if (! REGNO_QTY_VALID_P(REGNO (x1)))
4860 	  make_new_qty (REGNO (x1), GET_MODE (x1));
4861       }
4862 
4863       /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4864 	 The hard function value register is used only once, to copy to
4865 	 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4866 	 Ensure we invalidate the destination register.  On the 80386 no
4867 	 other code would invalidate it since it is a fixed_reg.
4868 	 We need not check the return of apply_change_group; see canon_reg.  */
4869 
4870       else if (GET_CODE (SET_SRC (x)) == CALL)
4871 	{
4872 	  canon_reg (SET_SRC (x), insn);
4873 	  apply_change_group ();
4874 	  fold_rtx (SET_SRC (x), insn);
4875 	  invalidate (SET_DEST (x), VOIDmode);
4876 	}
4877       else
4878 	n_sets = 1;
4879     }
4880   else if (GET_CODE (x) == PARALLEL)
4881     {
4882       int lim = XVECLEN (x, 0);
4883 
4884       sets = (struct set *) alloca (lim * sizeof (struct set));
4885 
4886       /* Find all regs explicitly clobbered in this insn,
4887 	 and ensure they are not replaced with any other regs
4888 	 elsewhere in this insn.
4889 	 When a reg that is clobbered is also used for input,
4890 	 we should presume that that is for a reason,
4891 	 and we should not substitute some other register
4892 	 which is not supposed to be clobbered.
4893 	 Therefore, this loop cannot be merged into the one below
4894 	 because a CALL may precede a CLOBBER and refer to the
4895 	 value clobbered.  We must not let a canonicalization do
4896 	 anything in that case.  */
4897       for (i = 0; i < lim; i++)
4898 	{
4899 	  rtx y = XVECEXP (x, 0, i);
4900 	  if (GET_CODE (y) == CLOBBER)
4901 	    {
4902 	      rtx clobbered = XEXP (y, 0);
4903 
4904 	      if (GET_CODE (clobbered) == REG
4905 		  || GET_CODE (clobbered) == SUBREG)
4906 		invalidate (clobbered, VOIDmode);
4907 	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
4908 		       || GET_CODE (clobbered) == ZERO_EXTRACT)
4909 		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4910 	    }
4911 	}
4912 
4913       for (i = 0; i < lim; i++)
4914 	{
4915 	  rtx y = XVECEXP (x, 0, i);
4916 	  if (GET_CODE (y) == SET)
4917 	    {
4918 	      /* As above, we ignore unconditional jumps and call-insns and
4919 		 ignore the result of apply_change_group.  */
4920 	      if (GET_CODE (SET_SRC (y)) == CALL)
4921 		{
4922 		  canon_reg (SET_SRC (y), insn);
4923 		  apply_change_group ();
4924 		  fold_rtx (SET_SRC (y), insn);
4925 		  invalidate (SET_DEST (y), VOIDmode);
4926 		}
4927 	      else if (SET_DEST (y) == pc_rtx
4928 		       && GET_CODE (SET_SRC (y)) == LABEL_REF)
4929 		;
4930 	      else
4931 		sets[n_sets++].rtl = y;
4932 	    }
4933 	  else if (GET_CODE (y) == CLOBBER)
4934 	    {
4935 	      /* If we clobber memory, canon the address.
4936 		 This does nothing when a register is clobbered
4937 		 because we have already invalidated the reg.  */
4938 	      if (GET_CODE (XEXP (y, 0)) == MEM)
4939 		canon_reg (XEXP (y, 0), NULL_RTX);
4940 	    }
4941 	  else if (GET_CODE (y) == USE
4942 		   && ! (GET_CODE (XEXP (y, 0)) == REG
4943 			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4944 	    canon_reg (y, NULL_RTX);
4945 	  else if (GET_CODE (y) == CALL)
4946 	    {
4947 	      /* The result of apply_change_group can be ignored; see
4948 		 canon_reg.  */
4949 	      canon_reg (y, insn);
4950 	      apply_change_group ();
4951 	      fold_rtx (y, insn);
4952 	    }
4953 	}
4954     }
4955   else if (GET_CODE (x) == CLOBBER)
4956     {
4957       if (GET_CODE (XEXP (x, 0)) == MEM)
4958 	canon_reg (XEXP (x, 0), NULL_RTX);
4959     }
4960 
4961   /* Canonicalize a USE of a pseudo register or memory location.  */
4962   else if (GET_CODE (x) == USE
4963 	   && ! (GET_CODE (XEXP (x, 0)) == REG
4964 		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4965     canon_reg (XEXP (x, 0), NULL_RTX);
4966   else if (GET_CODE (x) == CALL)
4967     {
4968       /* The result of apply_change_group can be ignored; see canon_reg.  */
4969       canon_reg (x, insn);
4970       apply_change_group ();
4971       fold_rtx (x, insn);
4972     }
4973 
4974   /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4975      is a STRICT_LOW_PART.  The latter condition is necessary because SRC_EQV
4976      is handled specially for this case, and if it isn't set, then there will
4977      be no equivalence for the destination.  */
4978   if (n_sets == 1 && REG_NOTES (insn) != 0
4979       && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4980       && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4981 	  || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4982     {
4983       src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4984       XEXP (tem, 0) = src_eqv;
4985     }
4986 
4987   /* Canonicalize sources and addresses of destinations.
4988      We do this in a separate pass to avoid problems when a MATCH_DUP is
4989      present in the insn pattern.  In that case, we want to ensure that
4990      we don't break the duplicate nature of the pattern.  So we will replace
4991      both operands at the same time.  Otherwise, we would fail to find an
4992      equivalent substitution in the loop calling validate_change below.
4993 
4994      We used to suppress canonicalization of DEST if it appears in SRC,
4995      but we don't do this any more.  */
4996 
4997   for (i = 0; i < n_sets; i++)
4998     {
4999       rtx dest = SET_DEST (sets[i].rtl);
5000       rtx src = SET_SRC (sets[i].rtl);
5001       rtx new = canon_reg (src, insn);
5002       int insn_code;
5003 
5004       sets[i].orig_src = src;
5005       if ((GET_CODE (new) == REG && GET_CODE (src) == REG
5006 	   && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
5007 	       != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
5008 	  || (insn_code = recog_memoized (insn)) < 0
5009 	  || insn_data[insn_code].n_dups > 0)
5010 	validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5011       else
5012 	SET_SRC (sets[i].rtl) = new;
5013 
5014       if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
5015 	{
5016 	  validate_change (insn, &XEXP (dest, 1),
5017 			   canon_reg (XEXP (dest, 1), insn), 1);
5018 	  validate_change (insn, &XEXP (dest, 2),
5019 			   canon_reg (XEXP (dest, 2), insn), 1);
5020 	}
5021 
5022       while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
5023 	     || GET_CODE (dest) == ZERO_EXTRACT
5024 	     || GET_CODE (dest) == SIGN_EXTRACT)
5025 	dest = XEXP (dest, 0);
5026 
5027       if (GET_CODE (dest) == MEM)
5028 	canon_reg (dest, insn);
5029     }
5030 
5031   /* Now that we have done all the replacements, we can apply the change
5032      group and see if they all work.  Note that this will cause some
5033      canonicalizations that would have worked individually not to be applied
5034      because some other canonicalization didn't work, but this should not
5035      occur often.
5036 
5037      The result of apply_change_group can be ignored; see canon_reg.  */
5038 
5039   apply_change_group ();
5040 
5041   /* Set sets[i].src_elt to the class each source belongs to.
5042      Detect assignments from or to volatile things
5043      and set set[i] to zero so they will be ignored
5044      in the rest of this function.
5045 
5046      Nothing in this loop changes the hash table or the register chains.  */
5047 
5048   for (i = 0; i < n_sets; i++)
5049     {
5050       rtx src, dest;
5051       rtx src_folded;
5052       struct table_elt *elt = 0, *p;
5053       enum machine_mode mode;
5054       rtx src_eqv_here;
5055       rtx src_const = 0;
5056       rtx src_related = 0;
5057       struct table_elt *src_const_elt = 0;
5058       int src_cost = MAX_COST;
5059       int src_eqv_cost = MAX_COST;
5060       int src_folded_cost = MAX_COST;
5061       int src_related_cost = MAX_COST;
5062       int src_elt_cost = MAX_COST;
5063       int src_regcost = MAX_COST;
5064       int src_eqv_regcost = MAX_COST;
5065       int src_folded_regcost = MAX_COST;
5066       int src_related_regcost = MAX_COST;
5067       int src_elt_regcost = MAX_COST;
5068       /* Set nonzero if we need to call force_const_mem on with the
5069 	 contents of src_folded before using it.  */
5070       int src_folded_force_flag = 0;
5071 
5072       dest = SET_DEST (sets[i].rtl);
5073       src = SET_SRC (sets[i].rtl);
5074 
5075       /* If SRC is a constant that has no machine mode,
5076 	 hash it with the destination's machine mode.
5077 	 This way we can keep different modes separate.  */
5078 
5079       mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5080       sets[i].mode = mode;
5081 
5082       if (src_eqv)
5083 	{
5084 	  enum machine_mode eqvmode = mode;
5085 	  if (GET_CODE (dest) == STRICT_LOW_PART)
5086 	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5087 	  do_not_record = 0;
5088 	  hash_arg_in_memory = 0;
5089 	  src_eqv_hash = HASH (src_eqv, eqvmode);
5090 
5091 	  /* Find the equivalence class for the equivalent expression.  */
5092 
5093 	  if (!do_not_record)
5094 	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
5095 
5096 	  src_eqv_volatile = do_not_record;
5097 	  src_eqv_in_memory = hash_arg_in_memory;
5098 	}
5099 
5100       /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5101 	 value of the INNER register, not the destination.  So it is not
5102 	 a valid substitution for the source.  But save it for later.  */
5103       if (GET_CODE (dest) == STRICT_LOW_PART)
5104 	src_eqv_here = 0;
5105       else
5106 	src_eqv_here = src_eqv;
5107 
5108       /* Simplify and foldable subexpressions in SRC.  Then get the fully-
5109 	 simplified result, which may not necessarily be valid.  */
5110       src_folded = fold_rtx (src, insn);
5111 
5112 #if 0
5113       /* ??? This caused bad code to be generated for the m68k port with -O2.
5114 	 Suppose src is (CONST_INT -1), and that after truncation src_folded
5115 	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
5116 	 At the end we will add src and src_const to the same equivalence
5117 	 class.  We now have 3 and -1 on the same equivalence class.  This
5118 	 causes later instructions to be mis-optimized.  */
5119       /* If storing a constant in a bitfield, pre-truncate the constant
5120 	 so we will be able to record it later.  */
5121       if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5122 	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5123 	{
5124 	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5125 
5126 	  if (GET_CODE (src) == CONST_INT
5127 	      && GET_CODE (width) == CONST_INT
5128 	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5129 	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5130 	    src_folded
5131 	      = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5132 					  << INTVAL (width)) - 1));
5133 	}
5134 #endif
5135 
5136       /* Compute SRC's hash code, and also notice if it
5137 	 should not be recorded at all.  In that case,
5138 	 prevent any further processing of this assignment.  */
5139       do_not_record = 0;
5140       hash_arg_in_memory = 0;
5141 
5142       sets[i].src = src;
5143       sets[i].src_hash = HASH (src, mode);
5144       sets[i].src_volatile = do_not_record;
5145       sets[i].src_in_memory = hash_arg_in_memory;
5146 
5147       /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5148 	 a pseudo, do not record SRC.  Using SRC as a replacement for
5149 	 anything else will be incorrect in that situation.  Note that
5150 	 this usually occurs only for stack slots, in which case all the
5151 	 RTL would be referring to SRC, so we don't lose any optimization
5152 	 opportunities by not having SRC in the hash table.  */
5153 
5154       if (GET_CODE (src) == MEM
5155 	  && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5156 	  && GET_CODE (dest) == REG
5157 	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5158 	sets[i].src_volatile = 1;
5159 
5160 #if 0
5161       /* It is no longer clear why we used to do this, but it doesn't
5162 	 appear to still be needed.  So let's try without it since this
5163 	 code hurts cse'ing widened ops.  */
5164       /* If source is a perverse subreg (such as QI treated as an SI),
5165 	 treat it as volatile.  It may do the work of an SI in one context
5166 	 where the extra bits are not being used, but cannot replace an SI
5167 	 in general.  */
5168       if (GET_CODE (src) == SUBREG
5169 	  && (GET_MODE_SIZE (GET_MODE (src))
5170 	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5171 	sets[i].src_volatile = 1;
5172 #endif
5173 
5174       /* Locate all possible equivalent forms for SRC.  Try to replace
5175          SRC in the insn with each cheaper equivalent.
5176 
5177          We have the following types of equivalents: SRC itself, a folded
5178          version, a value given in a REG_EQUAL note, or a value related
5179 	 to a constant.
5180 
5181          Each of these equivalents may be part of an additional class
5182          of equivalents (if more than one is in the table, they must be in
5183          the same class; we check for this).
5184 
5185 	 If the source is volatile, we don't do any table lookups.
5186 
5187          We note any constant equivalent for possible later use in a
5188          REG_NOTE.  */
5189 
5190       if (!sets[i].src_volatile)
5191 	elt = lookup (src, sets[i].src_hash, mode);
5192 
5193       sets[i].src_elt = elt;
5194 
5195       if (elt && src_eqv_here && src_eqv_elt)
5196 	{
5197 	  if (elt->first_same_value != src_eqv_elt->first_same_value)
5198 	    {
5199 	      /* The REG_EQUAL is indicating that two formerly distinct
5200 		 classes are now equivalent.  So merge them.  */
5201 	      merge_equiv_classes (elt, src_eqv_elt);
5202 	      src_eqv_hash = HASH (src_eqv, elt->mode);
5203 	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5204 	    }
5205 
5206 	  src_eqv_here = 0;
5207 	}
5208 
5209       else if (src_eqv_elt)
5210 	elt = src_eqv_elt;
5211 
5212       /* Try to find a constant somewhere and record it in `src_const'.
5213 	 Record its table element, if any, in `src_const_elt'.  Look in
5214 	 any known equivalences first.  (If the constant is not in the
5215 	 table, also set `sets[i].src_const_hash').  */
5216       if (elt)
5217 	for (p = elt->first_same_value; p; p = p->next_same_value)
5218 	  if (p->is_const)
5219 	    {
5220 	      src_const = p->exp;
5221 	      src_const_elt = elt;
5222 	      break;
5223 	    }
5224 
5225       if (src_const == 0
5226 	  && (CONSTANT_P (src_folded)
5227 	      /* Consider (minus (label_ref L1) (label_ref L2)) as
5228 		 "constant" here so we will record it. This allows us
5229 		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
5230 	      || (GET_CODE (src_folded) == MINUS
5231 		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5232 		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5233 	src_const = src_folded, src_const_elt = elt;
5234       else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5235 	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5236 
5237       /* If we don't know if the constant is in the table, get its
5238 	 hash code and look it up.  */
5239       if (src_const && src_const_elt == 0)
5240 	{
5241 	  sets[i].src_const_hash = HASH (src_const, mode);
5242 	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5243 	}
5244 
5245       sets[i].src_const = src_const;
5246       sets[i].src_const_elt = src_const_elt;
5247 
5248       /* If the constant and our source are both in the table, mark them as
5249 	 equivalent.  Otherwise, if a constant is in the table but the source
5250 	 isn't, set ELT to it.  */
5251       if (src_const_elt && elt
5252 	  && src_const_elt->first_same_value != elt->first_same_value)
5253 	merge_equiv_classes (elt, src_const_elt);
5254       else if (src_const_elt && elt == 0)
5255 	elt = src_const_elt;
5256 
5257       /* See if there is a register linearly related to a constant
5258          equivalent of SRC.  */
5259       if (src_const
5260 	  && (GET_CODE (src_const) == CONST
5261 	      || (src_const_elt && src_const_elt->related_value != 0)))
5262 	{
5263 	  src_related = use_related_value (src_const, src_const_elt);
5264 	  if (src_related)
5265 	    {
5266 	      struct table_elt *src_related_elt
5267 		= lookup (src_related, HASH (src_related, mode), mode);
5268 	      if (src_related_elt && elt)
5269 		{
5270 		  if (elt->first_same_value
5271 		      != src_related_elt->first_same_value)
5272 		    /* This can occur when we previously saw a CONST
5273 		       involving a SYMBOL_REF and then see the SYMBOL_REF
5274 		       twice.  Merge the involved classes.  */
5275 		    merge_equiv_classes (elt, src_related_elt);
5276 
5277 		  src_related = 0;
5278 		  src_related_elt = 0;
5279 		}
5280 	      else if (src_related_elt && elt == 0)
5281 		elt = src_related_elt;
5282 	    }
5283 	}
5284 
5285       /* See if we have a CONST_INT that is already in a register in a
5286 	 wider mode.  */
5287 
5288       if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5289 	  && GET_MODE_CLASS (mode) == MODE_INT
5290 	  && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5291 	{
5292 	  enum machine_mode wider_mode;
5293 
5294 	  for (wider_mode = GET_MODE_WIDER_MODE (mode);
5295 	       GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5296 	       && src_related == 0;
5297 	       wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5298 	    {
5299 	      struct table_elt *const_elt
5300 		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5301 
5302 	      if (const_elt == 0)
5303 		continue;
5304 
5305 	      for (const_elt = const_elt->first_same_value;
5306 		   const_elt; const_elt = const_elt->next_same_value)
5307 		if (GET_CODE (const_elt->exp) == REG)
5308 		  {
5309 		    src_related = gen_lowpart_if_possible (mode,
5310 							   const_elt->exp);
5311 		    break;
5312 		  }
5313 	    }
5314 	}
5315 
5316       /* Another possibility is that we have an AND with a constant in
5317 	 a mode narrower than a word.  If so, it might have been generated
5318 	 as part of an "if" which would narrow the AND.  If we already
5319 	 have done the AND in a wider mode, we can use a SUBREG of that
5320 	 value.  */
5321 
5322       if (flag_expensive_optimizations && ! src_related
5323 	  && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5324 	  && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5325 	{
5326 	  enum machine_mode tmode;
5327 	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5328 
5329 	  for (tmode = GET_MODE_WIDER_MODE (mode);
5330 	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5331 	       tmode = GET_MODE_WIDER_MODE (tmode))
5332 	    {
5333 	      rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5334 	      struct table_elt *larger_elt;
5335 
5336 	      if (inner)
5337 		{
5338 		  PUT_MODE (new_and, tmode);
5339 		  XEXP (new_and, 0) = inner;
5340 		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5341 		  if (larger_elt == 0)
5342 		    continue;
5343 
5344 		  for (larger_elt = larger_elt->first_same_value;
5345 		       larger_elt; larger_elt = larger_elt->next_same_value)
5346 		    if (GET_CODE (larger_elt->exp) == REG)
5347 		      {
5348 			src_related
5349 			  = gen_lowpart_if_possible (mode, larger_elt->exp);
5350 			break;
5351 		      }
5352 
5353 		  if (src_related)
5354 		    break;
5355 		}
5356 	    }
5357 	}
5358 
5359 #ifdef LOAD_EXTEND_OP
5360       /* See if a MEM has already been loaded with a widening operation;
5361 	 if it has, we can use a subreg of that.  Many CISC machines
5362 	 also have such operations, but this is only likely to be
5363 	 beneficial these machines.  */
5364 
5365       if (flag_expensive_optimizations && src_related == 0
5366 	  && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5367 	  && GET_MODE_CLASS (mode) == MODE_INT
5368 	  && GET_CODE (src) == MEM && ! do_not_record
5369 	  && LOAD_EXTEND_OP (mode) != NIL)
5370 	{
5371 	  enum machine_mode tmode;
5372 
5373 	  /* Set what we are trying to extend and the operation it might
5374 	     have been extended with.  */
5375 	  PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5376 	  XEXP (memory_extend_rtx, 0) = src;
5377 
5378 	  for (tmode = GET_MODE_WIDER_MODE (mode);
5379 	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5380 	       tmode = GET_MODE_WIDER_MODE (tmode))
5381 	    {
5382 	      struct table_elt *larger_elt;
5383 
5384 	      PUT_MODE (memory_extend_rtx, tmode);
5385 	      larger_elt = lookup (memory_extend_rtx,
5386 				   HASH (memory_extend_rtx, tmode), tmode);
5387 	      if (larger_elt == 0)
5388 		continue;
5389 
5390 	      for (larger_elt = larger_elt->first_same_value;
5391 		   larger_elt; larger_elt = larger_elt->next_same_value)
5392 		if (GET_CODE (larger_elt->exp) == REG)
5393 		  {
5394 		    src_related = gen_lowpart_if_possible (mode,
5395 							   larger_elt->exp);
5396 		    break;
5397 		  }
5398 
5399 	      if (src_related)
5400 		break;
5401 	    }
5402 	}
5403 #endif /* LOAD_EXTEND_OP */
5404 
5405       if (src == src_folded)
5406 	src_folded = 0;
5407 
5408       /* At this point, ELT, if nonzero, points to a class of expressions
5409          equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5410 	 and SRC_RELATED, if nonzero, each contain additional equivalent
5411 	 expressions.  Prune these latter expressions by deleting expressions
5412 	 already in the equivalence class.
5413 
5414 	 Check for an equivalent identical to the destination.  If found,
5415 	 this is the preferred equivalent since it will likely lead to
5416 	 elimination of the insn.  Indicate this by placing it in
5417 	 `src_related'.  */
5418 
5419       if (elt)
5420 	elt = elt->first_same_value;
5421       for (p = elt; p; p = p->next_same_value)
5422 	{
5423 	  enum rtx_code code = GET_CODE (p->exp);
5424 
5425 	  /* If the expression is not valid, ignore it.  Then we do not
5426 	     have to check for validity below.  In most cases, we can use
5427 	     `rtx_equal_p', since canonicalization has already been done.  */
5428 	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5429 	    continue;
5430 
5431 	  /* Also skip paradoxical subregs, unless that's what we're
5432 	     looking for.  */
5433 	  if (code == SUBREG
5434 	      && (GET_MODE_SIZE (GET_MODE (p->exp))
5435 		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5436 	      && ! (src != 0
5437 		    && GET_CODE (src) == SUBREG
5438 		    && GET_MODE (src) == GET_MODE (p->exp)
5439 		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5440 			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5441 	    continue;
5442 
5443 	  if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5444 	    src = 0;
5445 	  else if (src_folded && GET_CODE (src_folded) == code
5446 		   && rtx_equal_p (src_folded, p->exp))
5447 	    src_folded = 0;
5448 	  else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5449 		   && rtx_equal_p (src_eqv_here, p->exp))
5450 	    src_eqv_here = 0;
5451 	  else if (src_related && GET_CODE (src_related) == code
5452 		   && rtx_equal_p (src_related, p->exp))
5453 	    src_related = 0;
5454 
5455 	  /* This is the same as the destination of the insns, we want
5456 	     to prefer it.  Copy it to src_related.  The code below will
5457 	     then give it a negative cost.  */
5458 	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5459 	    src_related = dest;
5460 	}
5461 
5462       /* Find the cheapest valid equivalent, trying all the available
5463          possibilities.  Prefer items not in the hash table to ones
5464          that are when they are equal cost.  Note that we can never
5465          worsen an insn as the current contents will also succeed.
5466 	 If we find an equivalent identical to the destination, use it as best,
5467 	 since this insn will probably be eliminated in that case.  */
5468       if (src)
5469 	{
5470 	  if (rtx_equal_p (src, dest))
5471 	    src_cost = src_regcost = -1;
5472 	  else
5473 	    {
5474 	      src_cost = COST (src);
5475 	      src_regcost = approx_reg_cost (src);
5476 	    }
5477 	}
5478 
5479       if (src_eqv_here)
5480 	{
5481 	  if (rtx_equal_p (src_eqv_here, dest))
5482 	    src_eqv_cost = src_eqv_regcost = -1;
5483 	  else
5484 	    {
5485 	      src_eqv_cost = COST (src_eqv_here);
5486 	      src_eqv_regcost = approx_reg_cost (src_eqv_here);
5487 	    }
5488 	}
5489 
5490       if (src_folded)
5491 	{
5492 	  if (rtx_equal_p (src_folded, dest))
5493 	    src_folded_cost = src_folded_regcost = -1;
5494 	  else
5495 	    {
5496 	      src_folded_cost = COST (src_folded);
5497 	      src_folded_regcost = approx_reg_cost (src_folded);
5498 	    }
5499 	}
5500 
5501       if (src_related)
5502 	{
5503 	  if (rtx_equal_p (src_related, dest))
5504 	    src_related_cost = src_related_regcost = -1;
5505 	  else
5506 	    {
5507 	      src_related_cost = COST (src_related);
5508 	      src_related_regcost = approx_reg_cost (src_related);
5509 	    }
5510 	}
5511 
5512       /* If this was an indirect jump insn, a known label will really be
5513 	 cheaper even though it looks more expensive.  */
5514       if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5515 	src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5516 
5517       /* Terminate loop when replacement made.  This must terminate since
5518          the current contents will be tested and will always be valid.  */
5519       while (1)
5520 	{
5521 	  rtx trial;
5522 
5523 	  /* Skip invalid entries.  */
5524 	  while (elt && GET_CODE (elt->exp) != REG
5525 		 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5526 	    elt = elt->next_same_value;
5527 
5528 	  /* A paradoxical subreg would be bad here: it'll be the right
5529 	     size, but later may be adjusted so that the upper bits aren't
5530 	     what we want.  So reject it.  */
5531 	  if (elt != 0
5532 	      && GET_CODE (elt->exp) == SUBREG
5533 	      && (GET_MODE_SIZE (GET_MODE (elt->exp))
5534 		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5535 	      /* It is okay, though, if the rtx we're trying to match
5536 		 will ignore any of the bits we can't predict.  */
5537 	      && ! (src != 0
5538 		    && GET_CODE (src) == SUBREG
5539 		    && GET_MODE (src) == GET_MODE (elt->exp)
5540 		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5541 			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5542 	    {
5543 	      elt = elt->next_same_value;
5544 	      continue;
5545 	    }
5546 
5547 	  if (elt)
5548 	    {
5549 	      src_elt_cost = elt->cost;
5550 	      src_elt_regcost = elt->regcost;
5551 	    }
5552 
5553 	  /* Find cheapest and skip it for the next time.   For items
5554 	     of equal cost, use this order:
5555 	     src_folded, src, src_eqv, src_related and hash table entry.  */
5556 	  if (src_folded
5557 	      && preferrable (src_folded_cost, src_folded_regcost,
5558 			      src_cost, src_regcost) <= 0
5559 	      && preferrable (src_folded_cost, src_folded_regcost,
5560 			      src_eqv_cost, src_eqv_regcost) <= 0
5561 	      && preferrable (src_folded_cost, src_folded_regcost,
5562 			      src_related_cost, src_related_regcost) <= 0
5563 	      && preferrable (src_folded_cost, src_folded_regcost,
5564 			      src_elt_cost, src_elt_regcost) <= 0)
5565 	    {
5566 	      trial = src_folded, src_folded_cost = MAX_COST;
5567 	      if (src_folded_force_flag)
5568 		trial = force_const_mem (mode, trial);
5569 	    }
5570 	  else if (src
5571 		   && preferrable (src_cost, src_regcost,
5572 				   src_eqv_cost, src_eqv_regcost) <= 0
5573 		   && preferrable (src_cost, src_regcost,
5574 				   src_related_cost, src_related_regcost) <= 0
5575 		   && preferrable (src_cost, src_regcost,
5576 				   src_elt_cost, src_elt_regcost) <= 0)
5577 	    trial = src, src_cost = MAX_COST;
5578 	  else if (src_eqv_here
5579 		   && preferrable (src_eqv_cost, src_eqv_regcost,
5580 				   src_related_cost, src_related_regcost) <= 0
5581 		   && preferrable (src_eqv_cost, src_eqv_regcost,
5582 				   src_elt_cost, src_elt_regcost) <= 0)
5583 	    trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5584 	  else if (src_related
5585 		   && preferrable (src_related_cost, src_related_regcost,
5586 				   src_elt_cost, src_elt_regcost) <= 0)
5587 	    trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5588 	  else
5589 	    {
5590 	      trial = copy_rtx (elt->exp);
5591 	      elt = elt->next_same_value;
5592 	      src_elt_cost = MAX_COST;
5593 	    }
5594 
5595 	  /* We don't normally have an insn matching (set (pc) (pc)), so
5596 	     check for this separately here.  We will delete such an
5597 	     insn below.
5598 
5599 	     For other cases such as a table jump or conditional jump
5600 	     where we know the ultimate target, go ahead and replace the
5601 	     operand.  While that may not make a valid insn, we will
5602 	     reemit the jump below (and also insert any necessary
5603 	     barriers).  */
5604 	  if (n_sets == 1 && dest == pc_rtx
5605 	      && (trial == pc_rtx
5606 		  || (GET_CODE (trial) == LABEL_REF
5607 		      && ! condjump_p (insn))))
5608 	    {
5609 	      SET_SRC (sets[i].rtl) = trial;
5610 	      cse_jumps_altered = 1;
5611 	      break;
5612 	    }
5613 
5614 	  /* Look for a substitution that makes a valid insn.  */
5615 	  else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5616 	    {
5617 	      /* If we just made a substitution inside a libcall, then we
5618 		 need to make the same substitution in any notes attached
5619 		 to the RETVAL insn.  */
5620 	      if (libcall_insn
5621 		  && (GET_CODE (sets[i].orig_src) == REG
5622 		      || GET_CODE (sets[i].orig_src) == SUBREG
5623 		      || GET_CODE (sets[i].orig_src) == MEM))
5624 		replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5625 			     canon_reg (SET_SRC (sets[i].rtl), insn));
5626 
5627 	      /* The result of apply_change_group can be ignored; see
5628 		 canon_reg.  */
5629 
5630 	      validate_change (insn, &SET_SRC (sets[i].rtl),
5631 			       canon_reg (SET_SRC (sets[i].rtl), insn),
5632 			       1);
5633 	      apply_change_group ();
5634 	      break;
5635 	    }
5636 
5637 	  /* If we previously found constant pool entries for
5638 	     constants and this is a constant, try making a
5639 	     pool entry.  Put it in src_folded unless we already have done
5640 	     this since that is where it likely came from.  */
5641 
5642 	  else if (constant_pool_entries_cost
5643 		   && CONSTANT_P (trial)
5644 		   /* Reject cases that will abort in decode_rtx_const.
5645 		      On the alpha when simplifying a switch, we get
5646 		      (const (truncate (minus (label_ref) (label_ref)))).  */
5647 		   && ! (GET_CODE (trial) == CONST
5648 			 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5649 		   /* Likewise on IA-64, except without the truncate.  */
5650 		   && ! (GET_CODE (trial) == CONST
5651 			 && GET_CODE (XEXP (trial, 0)) == MINUS
5652 			 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5653 			 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5654 		   && (src_folded == 0
5655 		       || (GET_CODE (src_folded) != MEM
5656 			   && ! src_folded_force_flag))
5657 		   && GET_MODE_CLASS (mode) != MODE_CC
5658 		   && mode != VOIDmode)
5659 	    {
5660 	      src_folded_force_flag = 1;
5661 	      src_folded = trial;
5662 	      src_folded_cost = constant_pool_entries_cost;
5663 	    }
5664 	}
5665 
5666       src = SET_SRC (sets[i].rtl);
5667 
5668       /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5669 	 However, there is an important exception:  If both are registers
5670 	 that are not the head of their equivalence class, replace SET_SRC
5671 	 with the head of the class.  If we do not do this, we will have
5672 	 both registers live over a portion of the basic block.  This way,
5673 	 their lifetimes will likely abut instead of overlapping.  */
5674       if (GET_CODE (dest) == REG
5675 	  && REGNO_QTY_VALID_P (REGNO (dest)))
5676 	{
5677 	  int dest_q = REG_QTY (REGNO (dest));
5678 	  struct qty_table_elem *dest_ent = &qty_table[dest_q];
5679 
5680 	  if (dest_ent->mode == GET_MODE (dest)
5681 	      && dest_ent->first_reg != REGNO (dest)
5682 	      && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5683 	      /* Don't do this if the original insn had a hard reg as
5684 		 SET_SRC or SET_DEST.  */
5685 	      && (GET_CODE (sets[i].src) != REG
5686 		  || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5687 	      && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5688 	    /* We can't call canon_reg here because it won't do anything if
5689 	       SRC is a hard register.  */
5690 	    {
5691 	      int src_q = REG_QTY (REGNO (src));
5692 	      struct qty_table_elem *src_ent = &qty_table[src_q];
5693 	      int first = src_ent->first_reg;
5694 	      rtx new_src
5695 		= (first >= FIRST_PSEUDO_REGISTER
5696 		   ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5697 
5698 	      /* We must use validate-change even for this, because this
5699 		 might be a special no-op instruction, suitable only to
5700 		 tag notes onto.  */
5701 	      if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5702 		{
5703 		  src = new_src;
5704 		  /* If we had a constant that is cheaper than what we are now
5705 		     setting SRC to, use that constant.  We ignored it when we
5706 		     thought we could make this into a no-op.  */
5707 		  if (src_const && COST (src_const) < COST (src)
5708 		      && validate_change (insn, &SET_SRC (sets[i].rtl),
5709 					  src_const, 0))
5710 		    src = src_const;
5711 		}
5712 	    }
5713 	}
5714 
5715       /* If we made a change, recompute SRC values.  */
5716       if (src != sets[i].src)
5717 	{
5718 	  cse_altered = 1;
5719 	  do_not_record = 0;
5720 	  hash_arg_in_memory = 0;
5721 	  sets[i].src = src;
5722 	  sets[i].src_hash = HASH (src, mode);
5723 	  sets[i].src_volatile = do_not_record;
5724 	  sets[i].src_in_memory = hash_arg_in_memory;
5725 	  sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5726 	}
5727 
5728       /* If this is a single SET, we are setting a register, and we have an
5729 	 equivalent constant, we want to add a REG_NOTE.   We don't want
5730 	 to write a REG_EQUAL note for a constant pseudo since verifying that
5731 	 that pseudo hasn't been eliminated is a pain.  Such a note also
5732 	 won't help anything.
5733 
5734 	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5735 	 which can be created for a reference to a compile time computable
5736 	 entry in a jump table.  */
5737 
5738       if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5739 	  && GET_CODE (src_const) != REG
5740 	  && ! (GET_CODE (src_const) == CONST
5741 		&& GET_CODE (XEXP (src_const, 0)) == MINUS
5742 		&& GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5743 		&& GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5744 	{
5745 	  /* Make sure that the rtx is not shared with any other insn.  */
5746 	  src_const = copy_rtx (src_const);
5747 
5748 	  /* Record the actual constant value in a REG_EQUAL note, making
5749 	     a new one if one does not already exist.  */
5750 	  set_unique_reg_note (insn, REG_EQUAL, src_const);
5751 
5752 	  /* If storing a constant value in a register that
5753 	     previously held the constant value 0,
5754 	     record this fact with a REG_WAS_0 note on this insn.
5755 
5756 	     Note that the *register* is required to have previously held 0,
5757 	     not just any register in the quantity and we must point to the
5758 	     insn that set that register to zero.
5759 
5760 	     Rather than track each register individually, we just see if
5761 	     the last set for this quantity was for this register.  */
5762 
5763 	  if (REGNO_QTY_VALID_P (REGNO (dest)))
5764 	    {
5765 	      int dest_q = REG_QTY (REGNO (dest));
5766 	      struct qty_table_elem *dest_ent = &qty_table[dest_q];
5767 
5768 	      if (dest_ent->const_rtx == const0_rtx)
5769 		{
5770 		  /* See if we previously had a REG_WAS_0 note.  */
5771 		  rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5772 		  rtx const_insn = dest_ent->const_insn;
5773 
5774 		  if ((tem = single_set (const_insn)) != 0
5775 		      && rtx_equal_p (SET_DEST (tem), dest))
5776 		    {
5777 		      if (note)
5778 			XEXP (note, 0) = const_insn;
5779 		      else
5780 			REG_NOTES (insn)
5781 			  = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5782 					       REG_NOTES (insn));
5783 		    }
5784 		}
5785 	    }
5786 	}
5787 
5788       /* Now deal with the destination.  */
5789       do_not_record = 0;
5790 
5791       /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5792 	 to the MEM or REG within it.  */
5793       while (GET_CODE (dest) == SIGN_EXTRACT
5794 	     || GET_CODE (dest) == ZERO_EXTRACT
5795 	     || GET_CODE (dest) == SUBREG
5796 	     || GET_CODE (dest) == STRICT_LOW_PART)
5797 	dest = XEXP (dest, 0);
5798 
5799       sets[i].inner_dest = dest;
5800 
5801       if (GET_CODE (dest) == MEM)
5802 	{
5803 #ifdef PUSH_ROUNDING
5804 	  /* Stack pushes invalidate the stack pointer.  */
5805 	  rtx addr = XEXP (dest, 0);
5806 	  if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5807 	      && XEXP (addr, 0) == stack_pointer_rtx)
5808 	    invalidate (stack_pointer_rtx, Pmode);
5809 #endif
5810 	  dest = fold_rtx (dest, insn);
5811 	}
5812 
5813       /* Compute the hash code of the destination now,
5814 	 before the effects of this instruction are recorded,
5815 	 since the register values used in the address computation
5816 	 are those before this instruction.  */
5817       sets[i].dest_hash = HASH (dest, mode);
5818 
5819       /* Don't enter a bit-field in the hash table
5820 	 because the value in it after the store
5821 	 may not equal what was stored, due to truncation.  */
5822 
5823       if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5824 	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5825 	{
5826 	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5827 
5828 	  if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5829 	      && GET_CODE (width) == CONST_INT
5830 	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5831 	      && ! (INTVAL (src_const)
5832 		    & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5833 	    /* Exception: if the value is constant,
5834 	       and it won't be truncated, record it.  */
5835 	    ;
5836 	  else
5837 	    {
5838 	      /* This is chosen so that the destination will be invalidated
5839 		 but no new value will be recorded.
5840 		 We must invalidate because sometimes constant
5841 		 values can be recorded for bitfields.  */
5842 	      sets[i].src_elt = 0;
5843 	      sets[i].src_volatile = 1;
5844 	      src_eqv = 0;
5845 	      src_eqv_elt = 0;
5846 	    }
5847 	}
5848 
5849       /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5850 	 the insn.  */
5851       else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5852 	{
5853 	  /* One less use of the label this insn used to jump to.  */
5854 	  delete_insn (insn);
5855 	  cse_jumps_altered = 1;
5856 	  /* No more processing for this set.  */
5857 	  sets[i].rtl = 0;
5858 	}
5859 
5860       /* If this SET is now setting PC to a label, we know it used to
5861 	 be a conditional or computed branch.  */
5862       else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5863 	{
5864 	  /* Now emit a BARRIER after the unconditional jump.  */
5865 	  if (NEXT_INSN (insn) == 0
5866 	      || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5867 	    emit_barrier_after (insn);
5868 
5869 	  /* We reemit the jump in as many cases as possible just in
5870 	     case the form of an unconditional jump is significantly
5871 	     different than a computed jump or conditional jump.
5872 
5873 	     If this insn has multiple sets, then reemitting the
5874 	     jump is nontrivial.  So instead we just force rerecognition
5875 	     and hope for the best.  */
5876 	  if (n_sets == 1)
5877 	    {
5878 	      rtx new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5879 
5880 	      JUMP_LABEL (new) = XEXP (src, 0);
5881 	      LABEL_NUSES (XEXP (src, 0))++;
5882 	      delete_insn (insn);
5883 	      insn = new;
5884 
5885 	      /* Now emit a BARRIER after the unconditional jump.  */
5886 	      if (NEXT_INSN (insn) == 0
5887 		  || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5888 		emit_barrier_after (insn);
5889 	    }
5890 	  else
5891 	    INSN_CODE (insn) = -1;
5892 
5893 	  never_reached_warning (insn, NULL);
5894 
5895 	  /* Do not bother deleting any unreachable code,
5896 	     let jump/flow do that.  */
5897 
5898 	  cse_jumps_altered = 1;
5899 	  sets[i].rtl = 0;
5900 	}
5901 
5902       /* If destination is volatile, invalidate it and then do no further
5903 	 processing for this assignment.  */
5904 
5905       else if (do_not_record)
5906 	{
5907 	  if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5908 	    invalidate (dest, VOIDmode);
5909 	  else if (GET_CODE (dest) == MEM)
5910 	    {
5911 	      /* Outgoing arguments for a libcall don't
5912 		 affect any recorded expressions.  */
5913 	      if (! libcall_insn || insn == libcall_insn)
5914 		invalidate (dest, VOIDmode);
5915 	    }
5916 	  else if (GET_CODE (dest) == STRICT_LOW_PART
5917 		   || GET_CODE (dest) == ZERO_EXTRACT)
5918 	    invalidate (XEXP (dest, 0), GET_MODE (dest));
5919 	  sets[i].rtl = 0;
5920 	}
5921 
5922       if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5923 	sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5924 
5925 #ifdef HAVE_cc0
5926       /* If setting CC0, record what it was set to, or a constant, if it
5927 	 is equivalent to a constant.  If it is being set to a floating-point
5928 	 value, make a COMPARE with the appropriate constant of 0.  If we
5929 	 don't do this, later code can interpret this as a test against
5930 	 const0_rtx, which can cause problems if we try to put it into an
5931 	 insn as a floating-point operand.  */
5932       if (dest == cc0_rtx)
5933 	{
5934 	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5935 	  this_insn_cc0_mode = mode;
5936 	  if (FLOAT_MODE_P (mode))
5937 	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5938 					     CONST0_RTX (mode));
5939 	}
5940 #endif
5941     }
5942 
5943   /* Now enter all non-volatile source expressions in the hash table
5944      if they are not already present.
5945      Record their equivalence classes in src_elt.
5946      This way we can insert the corresponding destinations into
5947      the same classes even if the actual sources are no longer in them
5948      (having been invalidated).  */
5949 
5950   if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5951       && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5952     {
5953       struct table_elt *elt;
5954       struct table_elt *classp = sets[0].src_elt;
5955       rtx dest = SET_DEST (sets[0].rtl);
5956       enum machine_mode eqvmode = GET_MODE (dest);
5957 
5958       if (GET_CODE (dest) == STRICT_LOW_PART)
5959 	{
5960 	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5961 	  classp = 0;
5962 	}
5963       if (insert_regs (src_eqv, classp, 0))
5964 	{
5965 	  rehash_using_reg (src_eqv);
5966 	  src_eqv_hash = HASH (src_eqv, eqvmode);
5967 	}
5968       elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5969       elt->in_memory = src_eqv_in_memory;
5970       src_eqv_elt = elt;
5971 
5972       /* Check to see if src_eqv_elt is the same as a set source which
5973 	 does not yet have an elt, and if so set the elt of the set source
5974 	 to src_eqv_elt.  */
5975       for (i = 0; i < n_sets; i++)
5976 	if (sets[i].rtl && sets[i].src_elt == 0
5977 	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5978 	  sets[i].src_elt = src_eqv_elt;
5979     }
5980 
5981   for (i = 0; i < n_sets; i++)
5982     if (sets[i].rtl && ! sets[i].src_volatile
5983 	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5984       {
5985 	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5986 	  {
5987 	    /* REG_EQUAL in setting a STRICT_LOW_PART
5988 	       gives an equivalent for the entire destination register,
5989 	       not just for the subreg being stored in now.
5990 	       This is a more interesting equivalence, so we arrange later
5991 	       to treat the entire reg as the destination.  */
5992 	    sets[i].src_elt = src_eqv_elt;
5993 	    sets[i].src_hash = src_eqv_hash;
5994 	  }
5995 	else
5996 	  {
5997 	    /* Insert source and constant equivalent into hash table, if not
5998 	       already present.  */
5999 	    struct table_elt *classp = src_eqv_elt;
6000 	    rtx src = sets[i].src;
6001 	    rtx dest = SET_DEST (sets[i].rtl);
6002 	    enum machine_mode mode
6003 	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6004 
6005 	    if (sets[i].src_elt == 0)
6006 	      {
6007 		/* Don't put a hard register source into the table if this is
6008 		   the last insn of a libcall.  In this case, we only need
6009 		   to put src_eqv_elt in src_elt.  */
6010 		if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6011 		  {
6012 		    struct table_elt *elt;
6013 
6014 		    /* Note that these insert_regs calls cannot remove
6015 		       any of the src_elt's, because they would have failed to
6016 		       match if not still valid.  */
6017 		    if (insert_regs (src, classp, 0))
6018 		      {
6019 			rehash_using_reg (src);
6020 			sets[i].src_hash = HASH (src, mode);
6021 		      }
6022 		    elt = insert (src, classp, sets[i].src_hash, mode);
6023 		    elt->in_memory = sets[i].src_in_memory;
6024 		    sets[i].src_elt = classp = elt;
6025 		  }
6026 		else
6027 		  sets[i].src_elt = classp;
6028 	      }
6029 	    if (sets[i].src_const && sets[i].src_const_elt == 0
6030 		&& src != sets[i].src_const
6031 		&& ! rtx_equal_p (sets[i].src_const, src))
6032 	      sets[i].src_elt = insert (sets[i].src_const, classp,
6033 					sets[i].src_const_hash, mode);
6034 	  }
6035       }
6036     else if (sets[i].src_elt == 0)
6037       /* If we did not insert the source into the hash table (e.g., it was
6038 	 volatile), note the equivalence class for the REG_EQUAL value, if any,
6039 	 so that the destination goes into that class.  */
6040       sets[i].src_elt = src_eqv_elt;
6041 
6042   invalidate_from_clobbers (x);
6043 
6044   /* Some registers are invalidated by subroutine calls.  Memory is
6045      invalidated by non-constant calls.  */
6046 
6047   if (GET_CODE (insn) == CALL_INSN)
6048     {
6049       if (! CONST_OR_PURE_CALL_P (insn))
6050 	invalidate_memory ();
6051       invalidate_for_call ();
6052     }
6053 
6054   /* Now invalidate everything set by this instruction.
6055      If a SUBREG or other funny destination is being set,
6056      sets[i].rtl is still nonzero, so here we invalidate the reg
6057      a part of which is being set.  */
6058 
6059   for (i = 0; i < n_sets; i++)
6060     if (sets[i].rtl)
6061       {
6062 	/* We can't use the inner dest, because the mode associated with
6063 	   a ZERO_EXTRACT is significant.  */
6064 	rtx dest = SET_DEST (sets[i].rtl);
6065 
6066 	/* Needed for registers to remove the register from its
6067 	   previous quantity's chain.
6068 	   Needed for memory if this is a nonvarying address, unless
6069 	   we have just done an invalidate_memory that covers even those.  */
6070 	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6071 	  invalidate (dest, VOIDmode);
6072 	else if (GET_CODE (dest) == MEM)
6073 	  {
6074 	    /* Outgoing arguments for a libcall don't
6075 	       affect any recorded expressions.  */
6076 	    if (! libcall_insn || insn == libcall_insn)
6077 	      invalidate (dest, VOIDmode);
6078 	  }
6079 	else if (GET_CODE (dest) == STRICT_LOW_PART
6080 		 || GET_CODE (dest) == ZERO_EXTRACT)
6081 	  invalidate (XEXP (dest, 0), GET_MODE (dest));
6082       }
6083 
6084   /* A volatile ASM invalidates everything.  */
6085   if (GET_CODE (insn) == INSN
6086       && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
6087       && MEM_VOLATILE_P (PATTERN (insn)))
6088     flush_hash_table ();
6089 
6090   /* Make sure registers mentioned in destinations
6091      are safe for use in an expression to be inserted.
6092      This removes from the hash table
6093      any invalid entry that refers to one of these registers.
6094 
6095      We don't care about the return value from mention_regs because
6096      we are going to hash the SET_DEST values unconditionally.  */
6097 
6098   for (i = 0; i < n_sets; i++)
6099     {
6100       if (sets[i].rtl)
6101 	{
6102 	  rtx x = SET_DEST (sets[i].rtl);
6103 
6104 	  if (GET_CODE (x) != REG)
6105 	    mention_regs (x);
6106 	  else
6107 	    {
6108 	      /* We used to rely on all references to a register becoming
6109 		 inaccessible when a register changes to a new quantity,
6110 		 since that changes the hash code.  However, that is not
6111 		 safe, since after HASH_SIZE new quantities we get a
6112 		 hash 'collision' of a register with its own invalid
6113 		 entries.  And since SUBREGs have been changed not to
6114 		 change their hash code with the hash code of the register,
6115 		 it wouldn't work any longer at all.  So we have to check
6116 		 for any invalid references lying around now.
6117 		 This code is similar to the REG case in mention_regs,
6118 		 but it knows that reg_tick has been incremented, and
6119 		 it leaves reg_in_table as -1 .  */
6120 	      unsigned int regno = REGNO (x);
6121 	      unsigned int endregno
6122 		= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6123 			   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
6124 	      unsigned int i;
6125 
6126 	      for (i = regno; i < endregno; i++)
6127 		{
6128 		  if (REG_IN_TABLE (i) >= 0)
6129 		    {
6130 		      remove_invalid_refs (i);
6131 		      REG_IN_TABLE (i) = -1;
6132 		    }
6133 		}
6134 	    }
6135 	}
6136     }
6137 
6138   /* We may have just removed some of the src_elt's from the hash table.
6139      So replace each one with the current head of the same class.  */
6140 
6141   for (i = 0; i < n_sets; i++)
6142     if (sets[i].rtl)
6143       {
6144 	if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6145 	  /* If elt was removed, find current head of same class,
6146 	     or 0 if nothing remains of that class.  */
6147 	  {
6148 	    struct table_elt *elt = sets[i].src_elt;
6149 
6150 	    while (elt && elt->prev_same_value)
6151 	      elt = elt->prev_same_value;
6152 
6153 	    while (elt && elt->first_same_value == 0)
6154 	      elt = elt->next_same_value;
6155 	    sets[i].src_elt = elt ? elt->first_same_value : 0;
6156 	  }
6157       }
6158 
6159   /* Now insert the destinations into their equivalence classes.  */
6160 
6161   for (i = 0; i < n_sets; i++)
6162     if (sets[i].rtl)
6163       {
6164 	rtx dest = SET_DEST (sets[i].rtl);
6165 	rtx inner_dest = sets[i].inner_dest;
6166 	struct table_elt *elt;
6167 
6168 	/* Don't record value if we are not supposed to risk allocating
6169 	   floating-point values in registers that might be wider than
6170 	   memory.  */
6171 	if ((flag_float_store
6172 	     && GET_CODE (dest) == MEM
6173 	     && FLOAT_MODE_P (GET_MODE (dest)))
6174 	    /* Don't record BLKmode values, because we don't know the
6175 	       size of it, and can't be sure that other BLKmode values
6176 	       have the same or smaller size.  */
6177 	    || GET_MODE (dest) == BLKmode
6178 	    /* Don't record values of destinations set inside a libcall block
6179 	       since we might delete the libcall.  Things should have been set
6180 	       up so we won't want to reuse such a value, but we play it safe
6181 	       here.  */
6182 	    || libcall_insn
6183 	    /* If we didn't put a REG_EQUAL value or a source into the hash
6184 	       table, there is no point is recording DEST.  */
6185 	    || sets[i].src_elt == 0
6186 	    /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6187 	       or SIGN_EXTEND, don't record DEST since it can cause
6188 	       some tracking to be wrong.
6189 
6190 	       ??? Think about this more later.  */
6191 	    || (GET_CODE (dest) == SUBREG
6192 		&& (GET_MODE_SIZE (GET_MODE (dest))
6193 		    > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6194 		&& (GET_CODE (sets[i].src) == SIGN_EXTEND
6195 		    || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6196 	  continue;
6197 
6198 	/* STRICT_LOW_PART isn't part of the value BEING set,
6199 	   and neither is the SUBREG inside it.
6200 	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
6201 	if (GET_CODE (dest) == STRICT_LOW_PART)
6202 	  dest = SUBREG_REG (XEXP (dest, 0));
6203 
6204 	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6205 	  /* Registers must also be inserted into chains for quantities.  */
6206 	  if (insert_regs (dest, sets[i].src_elt, 1))
6207 	    {
6208 	      /* If `insert_regs' changes something, the hash code must be
6209 		 recalculated.  */
6210 	      rehash_using_reg (dest);
6211 	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6212 	    }
6213 
6214 	if (GET_CODE (inner_dest) == MEM
6215 	    && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6216 	  /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6217 	     that (MEM (ADDRESSOF (X))) is equivalent to Y.
6218 	     Consider the case in which the address of the MEM is
6219 	     passed to a function, which alters the MEM.  Then, if we
6220 	     later use Y instead of the MEM we'll miss the update.  */
6221 	  elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6222 	else
6223 	  elt = insert (dest, sets[i].src_elt,
6224 			sets[i].dest_hash, GET_MODE (dest));
6225 
6226 	elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6227 			  && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6228 			      || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
6229 							  0))));
6230 
6231 	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6232 	   narrower than M2, and both M1 and M2 are the same number of words,
6233 	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6234 	   make that equivalence as well.
6235 
6236 	   However, BAR may have equivalences for which gen_lowpart_if_possible
6237 	   will produce a simpler value than gen_lowpart_if_possible applied to
6238 	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6239 	   BAR's equivalences.  If we don't get a simplified form, make
6240 	   the SUBREG.  It will not be used in an equivalence, but will
6241 	   cause two similar assignments to be detected.
6242 
6243 	   Note the loop below will find SUBREG_REG (DEST) since we have
6244 	   already entered SRC and DEST of the SET in the table.  */
6245 
6246 	if (GET_CODE (dest) == SUBREG
6247 	    && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6248 		 / UNITS_PER_WORD)
6249 		== (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6250 	    && (GET_MODE_SIZE (GET_MODE (dest))
6251 		>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6252 	    && sets[i].src_elt != 0)
6253 	  {
6254 	    enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6255 	    struct table_elt *elt, *classp = 0;
6256 
6257 	    for (elt = sets[i].src_elt->first_same_value; elt;
6258 		 elt = elt->next_same_value)
6259 	      {
6260 		rtx new_src = 0;
6261 		unsigned src_hash;
6262 		struct table_elt *src_elt;
6263 		int byte = 0;
6264 
6265 		/* Ignore invalid entries.  */
6266 		if (GET_CODE (elt->exp) != REG
6267 		    && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6268 		  continue;
6269 
6270 		/* We may have already been playing subreg games.  If the
6271 		   mode is already correct for the destination, use it.  */
6272 		if (GET_MODE (elt->exp) == new_mode)
6273 		  new_src = elt->exp;
6274 		else
6275 		  {
6276 		    /* Calculate big endian correction for the SUBREG_BYTE.
6277 		       We have already checked that M1 (GET_MODE (dest))
6278 		       is not narrower than M2 (new_mode).  */
6279 		    if (BYTES_BIG_ENDIAN)
6280 		      byte = (GET_MODE_SIZE (GET_MODE (dest))
6281 			      - GET_MODE_SIZE (new_mode));
6282 
6283 		    new_src = simplify_gen_subreg (new_mode, elt->exp,
6284 					           GET_MODE (dest), byte);
6285 		  }
6286 
6287 		/* The call to simplify_gen_subreg fails if the value
6288 		   is VOIDmode, yet we can't do any simplification, e.g.
6289 		   for EXPR_LISTs denoting function call results.
6290 		   It is invalid to construct a SUBREG with a VOIDmode
6291 		   SUBREG_REG, hence a zero new_src means we can't do
6292 		   this substitution.  */
6293 		if (! new_src)
6294 		  continue;
6295 
6296 		src_hash = HASH (new_src, new_mode);
6297 		src_elt = lookup (new_src, src_hash, new_mode);
6298 
6299 		/* Put the new source in the hash table is if isn't
6300 		   already.  */
6301 		if (src_elt == 0)
6302 		  {
6303 		    if (insert_regs (new_src, classp, 0))
6304 		      {
6305 			rehash_using_reg (new_src);
6306 			src_hash = HASH (new_src, new_mode);
6307 		      }
6308 		    src_elt = insert (new_src, classp, src_hash, new_mode);
6309 		    src_elt->in_memory = elt->in_memory;
6310 		  }
6311 		else if (classp && classp != src_elt->first_same_value)
6312 		  /* Show that two things that we've seen before are
6313 		     actually the same.  */
6314 		  merge_equiv_classes (src_elt, classp);
6315 
6316 		classp = src_elt->first_same_value;
6317 		/* Ignore invalid entries.  */
6318 		while (classp
6319 		       && GET_CODE (classp->exp) != REG
6320 		       && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6321 		  classp = classp->next_same_value;
6322 	      }
6323 	  }
6324       }
6325 
6326   /* Special handling for (set REG0 REG1) where REG0 is the
6327      "cheapest", cheaper than REG1.  After cse, REG1 will probably not
6328      be used in the sequel, so (if easily done) change this insn to
6329      (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6330      that computed their value.  Then REG1 will become a dead store
6331      and won't cloud the situation for later optimizations.
6332 
6333      Do not make this change if REG1 is a hard register, because it will
6334      then be used in the sequel and we may be changing a two-operand insn
6335      into a three-operand insn.
6336 
6337      Also do not do this if we are operating on a copy of INSN.
6338 
6339      Also don't do this if INSN ends a libcall; this would cause an unrelated
6340      register to be set in the middle of a libcall, and we then get bad code
6341      if the libcall is deleted.  */
6342 
6343   if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6344       && NEXT_INSN (PREV_INSN (insn)) == insn
6345       && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6346       && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6347       && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6348     {
6349       int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6350       struct qty_table_elem *src_ent = &qty_table[src_q];
6351 
6352       if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6353 	  && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6354 	{
6355 	  rtx prev = insn;
6356 	  /* Scan for the previous nonnote insn, but stop at a basic
6357 	     block boundary.  */
6358 	  do
6359 	    {
6360 	      prev = PREV_INSN (prev);
6361 	    }
6362 	  while (prev && GET_CODE (prev) == NOTE
6363 		 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6364 
6365 	  /* Do not swap the registers around if the previous instruction
6366 	     attaches a REG_EQUIV note to REG1.
6367 
6368 	     ??? It's not entirely clear whether we can transfer a REG_EQUIV
6369 	     from the pseudo that originally shadowed an incoming argument
6370 	     to another register.  Some uses of REG_EQUIV might rely on it
6371 	     being attached to REG1 rather than REG2.
6372 
6373 	     This section previously turned the REG_EQUIV into a REG_EQUAL
6374 	     note.  We cannot do that because REG_EQUIV may provide an
6375 	     uninitialized stack slot when REG_PARM_STACK_SPACE is used.  */
6376 
6377 	  if (prev != 0 && GET_CODE (prev) == INSN
6378 	      && GET_CODE (PATTERN (prev)) == SET
6379 	      && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6380 	      && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6381 	    {
6382 	      rtx dest = SET_DEST (sets[0].rtl);
6383 	      rtx src = SET_SRC (sets[0].rtl);
6384 	      rtx note;
6385 
6386 	      validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6387 	      validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6388 	      validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6389 	      apply_change_group ();
6390 
6391 	      /* If there was a REG_WAS_0 note on PREV, remove it.  Move
6392 		 any REG_WAS_0 note on INSN to PREV.  */
6393 	      note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
6394 	      if (note)
6395 		remove_note (prev, note);
6396 
6397 	      note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6398 	      if (note)
6399 		{
6400 		  remove_note (insn, note);
6401 		  XEXP (note, 1) = REG_NOTES (prev);
6402 		  REG_NOTES (prev) = note;
6403 		}
6404 
6405 	      /* If INSN has a REG_EQUAL note, and this note mentions
6406 		 REG0, then we must delete it, because the value in
6407 		 REG0 has changed.  If the note's value is REG1, we must
6408 		 also delete it because that is now this insn's dest.  */
6409 	      note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6410 	      if (note != 0
6411 		  && (reg_mentioned_p (dest, XEXP (note, 0))
6412 		      || rtx_equal_p (src, XEXP (note, 0))))
6413 		remove_note (insn, note);
6414 	    }
6415 	}
6416     }
6417 
6418   /* If this is a conditional jump insn, record any known equivalences due to
6419      the condition being tested.  */
6420 
6421   last_jump_equiv_class = 0;
6422   if (GET_CODE (insn) == JUMP_INSN
6423       && n_sets == 1 && GET_CODE (x) == SET
6424       && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6425     record_jump_equiv (insn, 0);
6426 
6427 #ifdef HAVE_cc0
6428   /* If the previous insn set CC0 and this insn no longer references CC0,
6429      delete the previous insn.  Here we use the fact that nothing expects CC0
6430      to be valid over an insn, which is true until the final pass.  */
6431   if (prev_insn && GET_CODE (prev_insn) == INSN
6432       && (tem = single_set (prev_insn)) != 0
6433       && SET_DEST (tem) == cc0_rtx
6434       && ! reg_mentioned_p (cc0_rtx, x))
6435     delete_insn (prev_insn);
6436 
6437   prev_insn_cc0 = this_insn_cc0;
6438   prev_insn_cc0_mode = this_insn_cc0_mode;
6439 #endif
6440 
6441   prev_insn = insn;
6442 }
6443 
6444 /* Remove from the hash table all expressions that reference memory.  */
6445 
6446 static void
invalidate_memory()6447 invalidate_memory ()
6448 {
6449   int i;
6450   struct table_elt *p, *next;
6451 
6452   for (i = 0; i < HASH_SIZE; i++)
6453     for (p = table[i]; p; p = next)
6454       {
6455 	next = p->next_same_hash;
6456 	if (p->in_memory)
6457 	  remove_from_table (p, i);
6458       }
6459 }
6460 
6461 /* If ADDR is an address that implicitly affects the stack pointer, return
6462    1 and update the register tables to show the effect.  Else, return 0.  */
6463 
6464 static int
addr_affects_sp_p(addr)6465 addr_affects_sp_p (addr)
6466      rtx addr;
6467 {
6468   if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6469       && GET_CODE (XEXP (addr, 0)) == REG
6470       && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6471     {
6472       if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6473 	{
6474 	  REG_TICK (STACK_POINTER_REGNUM)++;
6475 	  /* Is it possible to use a subreg of SP?  */
6476 	  SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6477 	}
6478 
6479       /* This should be *very* rare.  */
6480       if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6481 	invalidate (stack_pointer_rtx, VOIDmode);
6482 
6483       return 1;
6484     }
6485 
6486   return 0;
6487 }
6488 
6489 /* Perform invalidation on the basis of everything about an insn
6490    except for invalidating the actual places that are SET in it.
6491    This includes the places CLOBBERed, and anything that might
6492    alias with something that is SET or CLOBBERed.
6493 
6494    X is the pattern of the insn.  */
6495 
6496 static void
invalidate_from_clobbers(x)6497 invalidate_from_clobbers (x)
6498      rtx x;
6499 {
6500   if (GET_CODE (x) == CLOBBER)
6501     {
6502       rtx ref = XEXP (x, 0);
6503       if (ref)
6504 	{
6505 	  if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6506 	      || GET_CODE (ref) == MEM)
6507 	    invalidate (ref, VOIDmode);
6508 	  else if (GET_CODE (ref) == STRICT_LOW_PART
6509 		   || GET_CODE (ref) == ZERO_EXTRACT)
6510 	    invalidate (XEXP (ref, 0), GET_MODE (ref));
6511 	}
6512     }
6513   else if (GET_CODE (x) == PARALLEL)
6514     {
6515       int i;
6516       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6517 	{
6518 	  rtx y = XVECEXP (x, 0, i);
6519 	  if (GET_CODE (y) == CLOBBER)
6520 	    {
6521 	      rtx ref = XEXP (y, 0);
6522 	      if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6523 		  || GET_CODE (ref) == MEM)
6524 		invalidate (ref, VOIDmode);
6525 	      else if (GET_CODE (ref) == STRICT_LOW_PART
6526 		       || GET_CODE (ref) == ZERO_EXTRACT)
6527 		invalidate (XEXP (ref, 0), GET_MODE (ref));
6528 	    }
6529 	}
6530     }
6531 }
6532 
6533 /* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
6534    and replace any registers in them with either an equivalent constant
6535    or the canonical form of the register.  If we are inside an address,
6536    only do this if the address remains valid.
6537 
6538    OBJECT is 0 except when within a MEM in which case it is the MEM.
6539 
6540    Return the replacement for X.  */
6541 
6542 static rtx
cse_process_notes(x,object)6543 cse_process_notes (x, object)
6544      rtx x;
6545      rtx object;
6546 {
6547   enum rtx_code code = GET_CODE (x);
6548   const char *fmt = GET_RTX_FORMAT (code);
6549   int i;
6550 
6551   switch (code)
6552     {
6553     case CONST_INT:
6554     case CONST:
6555     case SYMBOL_REF:
6556     case LABEL_REF:
6557     case CONST_DOUBLE:
6558     case CONST_VECTOR:
6559     case PC:
6560     case CC0:
6561     case LO_SUM:
6562       return x;
6563 
6564     case MEM:
6565       validate_change (x, &XEXP (x, 0),
6566 		       cse_process_notes (XEXP (x, 0), x), 0);
6567       return x;
6568 
6569     case EXPR_LIST:
6570     case INSN_LIST:
6571       if (REG_NOTE_KIND (x) == REG_EQUAL)
6572 	XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6573       if (XEXP (x, 1))
6574 	XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6575       return x;
6576 
6577     case SIGN_EXTEND:
6578     case ZERO_EXTEND:
6579     case SUBREG:
6580       {
6581 	rtx new = cse_process_notes (XEXP (x, 0), object);
6582 	/* We don't substitute VOIDmode constants into these rtx,
6583 	   since they would impede folding.  */
6584 	if (GET_MODE (new) != VOIDmode)
6585 	  validate_change (object, &XEXP (x, 0), new, 0);
6586 	return x;
6587       }
6588 
6589     case REG:
6590       i = REG_QTY (REGNO (x));
6591 
6592       /* Return a constant or a constant register.  */
6593       if (REGNO_QTY_VALID_P (REGNO (x)))
6594 	{
6595 	  struct qty_table_elem *ent = &qty_table[i];
6596 
6597 	  if (ent->const_rtx != NULL_RTX
6598 	      && (CONSTANT_P (ent->const_rtx)
6599 		  || GET_CODE (ent->const_rtx) == REG))
6600 	    {
6601 	      rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6602 	      if (new)
6603 		return new;
6604 	    }
6605 	}
6606 
6607       /* Otherwise, canonicalize this register.  */
6608       return canon_reg (x, NULL_RTX);
6609 
6610     default:
6611       break;
6612     }
6613 
6614   for (i = 0; i < GET_RTX_LENGTH (code); i++)
6615     if (fmt[i] == 'e')
6616       validate_change (object, &XEXP (x, i),
6617 		       cse_process_notes (XEXP (x, i), object), 0);
6618 
6619   return x;
6620 }
6621 
6622 /* Find common subexpressions between the end test of a loop and the beginning
6623    of the loop.  LOOP_START is the CODE_LABEL at the start of a loop.
6624 
6625    Often we have a loop where an expression in the exit test is used
6626    in the body of the loop.  For example "while (*p) *q++ = *p++;".
6627    Because of the way we duplicate the loop exit test in front of the loop,
6628    however, we don't detect that common subexpression.  This will be caught
6629    when global cse is implemented, but this is a quite common case.
6630 
6631    This function handles the most common cases of these common expressions.
6632    It is called after we have processed the basic block ending with the
6633    NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6634    jumps to a label used only once.  */
6635 
6636 static void
cse_around_loop(loop_start)6637 cse_around_loop (loop_start)
6638      rtx loop_start;
6639 {
6640   rtx insn;
6641   int i;
6642   struct table_elt *p;
6643 
6644   /* If the jump at the end of the loop doesn't go to the start, we don't
6645      do anything.  */
6646   for (insn = PREV_INSN (loop_start);
6647        insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6648        insn = PREV_INSN (insn))
6649     ;
6650 
6651   if (insn == 0
6652       || GET_CODE (insn) != NOTE
6653       || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6654     return;
6655 
6656   /* If the last insn of the loop (the end test) was an NE comparison,
6657      we will interpret it as an EQ comparison, since we fell through
6658      the loop.  Any equivalences resulting from that comparison are
6659      therefore not valid and must be invalidated.  */
6660   if (last_jump_equiv_class)
6661     for (p = last_jump_equiv_class->first_same_value; p;
6662 	 p = p->next_same_value)
6663       {
6664 	if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6665 	    || (GET_CODE (p->exp) == SUBREG
6666 		&& GET_CODE (SUBREG_REG (p->exp)) == REG))
6667 	  invalidate (p->exp, VOIDmode);
6668 	else if (GET_CODE (p->exp) == STRICT_LOW_PART
6669 		 || GET_CODE (p->exp) == ZERO_EXTRACT)
6670 	  invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6671       }
6672 
6673   /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6674      a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6675 
6676      The only thing we do with SET_DEST is invalidate entries, so we
6677      can safely process each SET in order.  It is slightly less efficient
6678      to do so, but we only want to handle the most common cases.
6679 
6680      The gen_move_insn call in cse_set_around_loop may create new pseudos.
6681      These pseudos won't have valid entries in any of the tables indexed
6682      by register number, such as reg_qty.  We avoid out-of-range array
6683      accesses by not processing any instructions created after cse started.  */
6684 
6685   for (insn = NEXT_INSN (loop_start);
6686        GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6687        && INSN_UID (insn) < max_insn_uid
6688        && ! (GET_CODE (insn) == NOTE
6689 	     && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6690        insn = NEXT_INSN (insn))
6691     {
6692       if (INSN_P (insn)
6693 	  && (GET_CODE (PATTERN (insn)) == SET
6694 	      || GET_CODE (PATTERN (insn)) == CLOBBER))
6695 	cse_set_around_loop (PATTERN (insn), insn, loop_start);
6696       else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6697 	for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6698 	  if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6699 	      || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6700 	    cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6701 				 loop_start);
6702     }
6703 }
6704 
6705 /* Process one SET of an insn that was skipped.  We ignore CLOBBERs
6706    since they are done elsewhere.  This function is called via note_stores.  */
6707 
6708 static void
invalidate_skipped_set(dest,set,data)6709 invalidate_skipped_set (dest, set, data)
6710      rtx set;
6711      rtx dest;
6712      void *data ATTRIBUTE_UNUSED;
6713 {
6714   enum rtx_code code = GET_CODE (dest);
6715 
6716   if (code == MEM
6717       && ! addr_affects_sp_p (dest)	/* If this is not a stack push ...  */
6718       /* There are times when an address can appear varying and be a PLUS
6719 	 during this scan when it would be a fixed address were we to know
6720 	 the proper equivalences.  So invalidate all memory if there is
6721 	 a BLKmode or nonscalar memory reference or a reference to a
6722 	 variable address.  */
6723       && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6724 	  || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6725     {
6726       invalidate_memory ();
6727       return;
6728     }
6729 
6730   if (GET_CODE (set) == CLOBBER
6731 #ifdef HAVE_cc0
6732       || dest == cc0_rtx
6733 #endif
6734       || dest == pc_rtx)
6735     return;
6736 
6737   if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6738     invalidate (XEXP (dest, 0), GET_MODE (dest));
6739   else if (code == REG || code == SUBREG || code == MEM)
6740     invalidate (dest, VOIDmode);
6741 }
6742 
6743 /* Invalidate all insns from START up to the end of the function or the
6744    next label.  This called when we wish to CSE around a block that is
6745    conditionally executed.  */
6746 
6747 static void
invalidate_skipped_block(start)6748 invalidate_skipped_block (start)
6749      rtx start;
6750 {
6751   rtx insn;
6752 
6753   for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6754        insn = NEXT_INSN (insn))
6755     {
6756       if (! INSN_P (insn))
6757 	continue;
6758 
6759       if (GET_CODE (insn) == CALL_INSN)
6760 	{
6761 	  if (! CONST_OR_PURE_CALL_P (insn))
6762 	    invalidate_memory ();
6763 	  invalidate_for_call ();
6764 	}
6765 
6766       invalidate_from_clobbers (PATTERN (insn));
6767       note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6768     }
6769 }
6770 
6771 /* If modifying X will modify the value in *DATA (which is really an
6772    `rtx *'), indicate that fact by setting the pointed to value to
6773    NULL_RTX.  */
6774 
6775 static void
cse_check_loop_start(x,set,data)6776 cse_check_loop_start (x, set, data)
6777      rtx x;
6778      rtx set ATTRIBUTE_UNUSED;
6779      void *data;
6780 {
6781   rtx *cse_check_loop_start_value = (rtx *) data;
6782 
6783   if (*cse_check_loop_start_value == NULL_RTX
6784       || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6785     return;
6786 
6787   if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6788       || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6789     *cse_check_loop_start_value = NULL_RTX;
6790 }
6791 
6792 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6793    a loop that starts with the label at LOOP_START.
6794 
6795    If X is a SET, we see if its SET_SRC is currently in our hash table.
6796    If so, we see if it has a value equal to some register used only in the
6797    loop exit code (as marked by jump.c).
6798 
6799    If those two conditions are true, we search backwards from the start of
6800    the loop to see if that same value was loaded into a register that still
6801    retains its value at the start of the loop.
6802 
6803    If so, we insert an insn after the load to copy the destination of that
6804    load into the equivalent register and (try to) replace our SET_SRC with that
6805    register.
6806 
6807    In any event, we invalidate whatever this SET or CLOBBER modifies.  */
6808 
6809 static void
cse_set_around_loop(x,insn,loop_start)6810 cse_set_around_loop (x, insn, loop_start)
6811      rtx x;
6812      rtx insn;
6813      rtx loop_start;
6814 {
6815   struct table_elt *src_elt;
6816 
6817   /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6818      are setting PC or CC0 or whose SET_SRC is already a register.  */
6819   if (GET_CODE (x) == SET
6820       && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6821       && GET_CODE (SET_SRC (x)) != REG)
6822     {
6823       src_elt = lookup (SET_SRC (x),
6824 			HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6825 			GET_MODE (SET_DEST (x)));
6826 
6827       if (src_elt)
6828 	for (src_elt = src_elt->first_same_value; src_elt;
6829 	     src_elt = src_elt->next_same_value)
6830 	  if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6831 	      && COST (src_elt->exp) < COST (SET_SRC (x)))
6832 	    {
6833 	      rtx p, set;
6834 
6835 	      /* Look for an insn in front of LOOP_START that sets
6836 		 something in the desired mode to SET_SRC (x) before we hit
6837 		 a label or CALL_INSN.  */
6838 
6839 	      for (p = prev_nonnote_insn (loop_start);
6840 		   p && GET_CODE (p) != CALL_INSN
6841 		   && GET_CODE (p) != CODE_LABEL;
6842 		   p = prev_nonnote_insn  (p))
6843 		if ((set = single_set (p)) != 0
6844 		    && GET_CODE (SET_DEST (set)) == REG
6845 		    && GET_MODE (SET_DEST (set)) == src_elt->mode
6846 		    && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6847 		  {
6848 		    /* We now have to ensure that nothing between P
6849 		       and LOOP_START modified anything referenced in
6850 		       SET_SRC (x).  We know that nothing within the loop
6851 		       can modify it, or we would have invalidated it in
6852 		       the hash table.  */
6853 		    rtx q;
6854 		    rtx cse_check_loop_start_value = SET_SRC (x);
6855 		    for (q = p; q != loop_start; q = NEXT_INSN (q))
6856 		      if (INSN_P (q))
6857 			note_stores (PATTERN (q),
6858 				     cse_check_loop_start,
6859 				     &cse_check_loop_start_value);
6860 
6861 		    /* If nothing was changed and we can replace our
6862 		       SET_SRC, add an insn after P to copy its destination
6863 		       to what we will be replacing SET_SRC with.  */
6864 		    if (cse_check_loop_start_value
6865 			&& validate_change (insn, &SET_SRC (x),
6866 					    src_elt->exp, 0))
6867 		      {
6868 			/* If this creates new pseudos, this is unsafe,
6869 			   because the regno of new pseudo is unsuitable
6870 			   to index into reg_qty when cse_insn processes
6871 			   the new insn.  Therefore, if a new pseudo was
6872 			   created, discard this optimization.  */
6873 			int nregs = max_reg_num ();
6874 			rtx move
6875 			  = gen_move_insn (src_elt->exp, SET_DEST (set));
6876 			if (nregs != max_reg_num ())
6877 			  {
6878 			    if (! validate_change (insn, &SET_SRC (x),
6879 						   SET_SRC (set), 0))
6880 			      abort ();
6881 			  }
6882 			else
6883 			  {
6884 			    if (control_flow_insn_p (p))
6885 			      /* p can cause a control flow transfer so it
6886 				 is the last insn of a basic block.  We can't
6887 				 therefore use emit_insn_after.  */
6888 			      emit_insn_before (move, next_nonnote_insn (p));
6889 			    else
6890 			      emit_insn_after (move, p);
6891 			  }
6892 		      }
6893 		    break;
6894 		  }
6895 	    }
6896     }
6897 
6898   /* Deal with the destination of X affecting the stack pointer.  */
6899   addr_affects_sp_p (SET_DEST (x));
6900 
6901   /* See comment on similar code in cse_insn for explanation of these
6902      tests.  */
6903   if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6904       || GET_CODE (SET_DEST (x)) == MEM)
6905     invalidate (SET_DEST (x), VOIDmode);
6906   else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6907 	   || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6908     invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6909 }
6910 
6911 /* Find the end of INSN's basic block and return its range,
6912    the total number of SETs in all the insns of the block, the last insn of the
6913    block, and the branch path.
6914 
6915    The branch path indicates which branches should be followed.  If a nonzero
6916    path size is specified, the block should be rescanned and a different set
6917    of branches will be taken.  The branch path is only used if
6918    FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6919 
6920    DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6921    used to describe the block.  It is filled in with the information about
6922    the current block.  The incoming structure's branch path, if any, is used
6923    to construct the output branch path.  */
6924 
6925 void
cse_end_of_basic_block(insn,data,follow_jumps,after_loop,skip_blocks)6926 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6927      rtx insn;
6928      struct cse_basic_block_data *data;
6929      int follow_jumps;
6930      int after_loop;
6931      int skip_blocks;
6932 {
6933   rtx p = insn, q;
6934   int nsets = 0;
6935   int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6936   rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6937   int path_size = data->path_size;
6938   int path_entry = 0;
6939   int i;
6940 
6941   /* Update the previous branch path, if any.  If the last branch was
6942      previously TAKEN, mark it NOT_TAKEN.  If it was previously NOT_TAKEN,
6943      shorten the path by one and look at the previous branch.  We know that
6944      at least one branch must have been taken if PATH_SIZE is nonzero.  */
6945   while (path_size > 0)
6946     {
6947       if (data->path[path_size - 1].status != NOT_TAKEN)
6948 	{
6949 	  data->path[path_size - 1].status = NOT_TAKEN;
6950 	  break;
6951 	}
6952       else
6953 	path_size--;
6954     }
6955 
6956   /* If the first instruction is marked with QImode, that means we've
6957      already processed this block.  Our caller will look at DATA->LAST
6958      to figure out where to go next.  We want to return the next block
6959      in the instruction stream, not some branched-to block somewhere
6960      else.  We accomplish this by pretending our called forbid us to
6961      follow jumps, or skip blocks.  */
6962   if (GET_MODE (insn) == QImode)
6963     follow_jumps = skip_blocks = 0;
6964 
6965   /* Scan to end of this basic block.  */
6966   while (p && GET_CODE (p) != CODE_LABEL)
6967     {
6968       /* Don't cse out the end of a loop.  This makes a difference
6969 	 only for the unusual loops that always execute at least once;
6970 	 all other loops have labels there so we will stop in any case.
6971 	 Cse'ing out the end of the loop is dangerous because it
6972 	 might cause an invariant expression inside the loop
6973 	 to be reused after the end of the loop.  This would make it
6974 	 hard to move the expression out of the loop in loop.c,
6975 	 especially if it is one of several equivalent expressions
6976 	 and loop.c would like to eliminate it.
6977 
6978 	 If we are running after loop.c has finished, we can ignore
6979 	 the NOTE_INSN_LOOP_END.  */
6980 
6981       if (! after_loop && GET_CODE (p) == NOTE
6982 	  && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6983 	break;
6984 
6985       /* Don't cse over a call to setjmp; on some machines (eg VAX)
6986 	 the regs restored by the longjmp come from
6987 	 a later time than the setjmp.  */
6988       if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6989 	  && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6990 	break;
6991 
6992       /* A PARALLEL can have lots of SETs in it,
6993 	 especially if it is really an ASM_OPERANDS.  */
6994       if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6995 	nsets += XVECLEN (PATTERN (p), 0);
6996       else if (GET_CODE (p) != NOTE)
6997 	nsets += 1;
6998 
6999       /* Ignore insns made by CSE; they cannot affect the boundaries of
7000 	 the basic block.  */
7001 
7002       if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
7003 	high_cuid = INSN_CUID (p);
7004       if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
7005 	low_cuid = INSN_CUID (p);
7006 
7007       /* See if this insn is in our branch path.  If it is and we are to
7008 	 take it, do so.  */
7009       if (path_entry < path_size && data->path[path_entry].branch == p)
7010 	{
7011 	  if (data->path[path_entry].status != NOT_TAKEN)
7012 	    p = JUMP_LABEL (p);
7013 
7014 	  /* Point to next entry in path, if any.  */
7015 	  path_entry++;
7016 	}
7017 
7018       /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
7019 	 was specified, we haven't reached our maximum path length, there are
7020 	 insns following the target of the jump, this is the only use of the
7021 	 jump label, and the target label is preceded by a BARRIER.
7022 
7023 	 Alternatively, we can follow the jump if it branches around a
7024 	 block of code and there are no other branches into the block.
7025 	 In this case invalidate_skipped_block will be called to invalidate any
7026 	 registers set in the block when following the jump.  */
7027 
7028       else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
7029 	       && GET_CODE (p) == JUMP_INSN
7030 	       && GET_CODE (PATTERN (p)) == SET
7031 	       && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
7032 	       && JUMP_LABEL (p) != 0
7033 	       && LABEL_NUSES (JUMP_LABEL (p)) == 1
7034 	       && NEXT_INSN (JUMP_LABEL (p)) != 0)
7035 	{
7036 	  for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
7037 	    if ((GET_CODE (q) != NOTE
7038 		 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
7039 		 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
7040 		     && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
7041 		&& (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
7042 	      break;
7043 
7044 	  /* If we ran into a BARRIER, this code is an extension of the
7045 	     basic block when the branch is taken.  */
7046 	  if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
7047 	    {
7048 	      /* Don't allow ourself to keep walking around an
7049 		 always-executed loop.  */
7050 	      if (next_real_insn (q) == next)
7051 		{
7052 		  p = NEXT_INSN (p);
7053 		  continue;
7054 		}
7055 
7056 	      /* Similarly, don't put a branch in our path more than once.  */
7057 	      for (i = 0; i < path_entry; i++)
7058 		if (data->path[i].branch == p)
7059 		  break;
7060 
7061 	      if (i != path_entry)
7062 		break;
7063 
7064 	      data->path[path_entry].branch = p;
7065 	      data->path[path_entry++].status = TAKEN;
7066 
7067 	      /* This branch now ends our path.  It was possible that we
7068 		 didn't see this branch the last time around (when the
7069 		 insn in front of the target was a JUMP_INSN that was
7070 		 turned into a no-op).  */
7071 	      path_size = path_entry;
7072 
7073 	      p = JUMP_LABEL (p);
7074 	      /* Mark block so we won't scan it again later.  */
7075 	      PUT_MODE (NEXT_INSN (p), QImode);
7076 	    }
7077 	  /* Detect a branch around a block of code.  */
7078 	  else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
7079 	    {
7080 	      rtx tmp;
7081 
7082 	      if (next_real_insn (q) == next)
7083 		{
7084 		  p = NEXT_INSN (p);
7085 		  continue;
7086 		}
7087 
7088 	      for (i = 0; i < path_entry; i++)
7089 		if (data->path[i].branch == p)
7090 		  break;
7091 
7092 	      if (i != path_entry)
7093 		break;
7094 
7095 	      /* This is no_labels_between_p (p, q) with an added check for
7096 		 reaching the end of a function (in case Q precedes P).  */
7097 	      for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
7098 		if (GET_CODE (tmp) == CODE_LABEL)
7099 		  break;
7100 
7101 	      if (tmp == q)
7102 		{
7103 		  data->path[path_entry].branch = p;
7104 		  data->path[path_entry++].status = AROUND;
7105 
7106 		  path_size = path_entry;
7107 
7108 		  p = JUMP_LABEL (p);
7109 		  /* Mark block so we won't scan it again later.  */
7110 		  PUT_MODE (NEXT_INSN (p), QImode);
7111 		}
7112 	    }
7113 	}
7114       p = NEXT_INSN (p);
7115     }
7116 
7117   data->low_cuid = low_cuid;
7118   data->high_cuid = high_cuid;
7119   data->nsets = nsets;
7120   data->last = p;
7121 
7122   /* If all jumps in the path are not taken, set our path length to zero
7123      so a rescan won't be done.  */
7124   for (i = path_size - 1; i >= 0; i--)
7125     if (data->path[i].status != NOT_TAKEN)
7126       break;
7127 
7128   if (i == -1)
7129     data->path_size = 0;
7130   else
7131     data->path_size = path_size;
7132 
7133   /* End the current branch path.  */
7134   data->path[path_size].branch = 0;
7135 }
7136 
7137 /* Perform cse on the instructions of a function.
7138    F is the first instruction.
7139    NREGS is one plus the highest pseudo-reg number used in the instruction.
7140 
7141    AFTER_LOOP is 1 if this is the cse call done after loop optimization
7142    (only if -frerun-cse-after-loop).
7143 
7144    Returns 1 if jump_optimize should be redone due to simplifications
7145    in conditional jump instructions.  */
7146 
7147 int
cse_main(f,nregs,after_loop,file)7148 cse_main (f, nregs, after_loop, file)
7149      rtx f;
7150      int nregs;
7151      int after_loop;
7152      FILE *file;
7153 {
7154   struct cse_basic_block_data val;
7155   rtx insn = f;
7156   int i;
7157 
7158   cse_jumps_altered = 0;
7159   recorded_label_ref = 0;
7160   constant_pool_entries_cost = 0;
7161   val.path_size = 0;
7162 
7163   init_recog ();
7164   init_alias_analysis ();
7165 
7166   max_reg = nregs;
7167 
7168   max_insn_uid = get_max_uid ();
7169 
7170   reg_eqv_table = (struct reg_eqv_elem *)
7171     xmalloc (nregs * sizeof (struct reg_eqv_elem));
7172 
7173 #ifdef LOAD_EXTEND_OP
7174 
7175   /* Allocate scratch rtl here.  cse_insn will fill in the memory reference
7176      and change the code and mode as appropriate.  */
7177   memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7178 #endif
7179 
7180   /* Reset the counter indicating how many elements have been made
7181      thus far.  */
7182   n_elements_made = 0;
7183 
7184   /* Find the largest uid.  */
7185 
7186   max_uid = get_max_uid ();
7187   uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
7188 
7189   /* Compute the mapping from uids to cuids.
7190      CUIDs are numbers assigned to insns, like uids,
7191      except that cuids increase monotonically through the code.
7192      Don't assign cuids to line-number NOTEs, so that the distance in cuids
7193      between two insns is not affected by -g.  */
7194 
7195   for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7196     {
7197       if (GET_CODE (insn) != NOTE
7198 	  || NOTE_LINE_NUMBER (insn) < 0)
7199 	INSN_CUID (insn) = ++i;
7200       else
7201 	/* Give a line number note the same cuid as preceding insn.  */
7202 	INSN_CUID (insn) = i;
7203     }
7204 
7205   ggc_push_context ();
7206 
7207   /* Loop over basic blocks.
7208      Compute the maximum number of qty's needed for each basic block
7209      (which is 2 for each SET).  */
7210   insn = f;
7211   while (insn)
7212     {
7213       cse_altered = 0;
7214       cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7215 			      flag_cse_skip_blocks);
7216 
7217       /* If this basic block was already processed or has no sets, skip it.  */
7218       if (val.nsets == 0 || GET_MODE (insn) == QImode)
7219 	{
7220 	  PUT_MODE (insn, VOIDmode);
7221 	  insn = (val.last ? NEXT_INSN (val.last) : 0);
7222 	  val.path_size = 0;
7223 	  continue;
7224 	}
7225 
7226       cse_basic_block_start = val.low_cuid;
7227       cse_basic_block_end = val.high_cuid;
7228       max_qty = val.nsets * 2;
7229 
7230       if (file)
7231 	fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7232 		 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7233 		 val.nsets);
7234 
7235       /* Make MAX_QTY bigger to give us room to optimize
7236 	 past the end of this basic block, if that should prove useful.  */
7237       if (max_qty < 500)
7238 	max_qty = 500;
7239 
7240       max_qty += max_reg;
7241 
7242       /* If this basic block is being extended by following certain jumps,
7243          (see `cse_end_of_basic_block'), we reprocess the code from the start.
7244          Otherwise, we start after this basic block.  */
7245       if (val.path_size > 0)
7246 	cse_basic_block (insn, val.last, val.path, 0);
7247       else
7248 	{
7249 	  int old_cse_jumps_altered = cse_jumps_altered;
7250 	  rtx temp;
7251 
7252 	  /* When cse changes a conditional jump to an unconditional
7253 	     jump, we want to reprocess the block, since it will give
7254 	     us a new branch path to investigate.  */
7255 	  cse_jumps_altered = 0;
7256 	  temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7257 	  if (cse_jumps_altered == 0
7258 	      || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7259 	    insn = temp;
7260 
7261 	  cse_jumps_altered |= old_cse_jumps_altered;
7262 	}
7263 
7264       if (cse_altered)
7265 	ggc_collect ();
7266 
7267 #ifdef USE_C_ALLOCA
7268       alloca (0);
7269 #endif
7270     }
7271 
7272   ggc_pop_context ();
7273 
7274   if (max_elements_made < n_elements_made)
7275     max_elements_made = n_elements_made;
7276 
7277   /* Clean up.  */
7278   end_alias_analysis ();
7279   free (uid_cuid);
7280   free (reg_eqv_table);
7281 
7282   return cse_jumps_altered || recorded_label_ref;
7283 }
7284 
7285 /* Process a single basic block.  FROM and TO and the limits of the basic
7286    block.  NEXT_BRANCH points to the branch path when following jumps or
7287    a null path when not following jumps.
7288 
7289    AROUND_LOOP is nonzero if we are to try to cse around to the start of a
7290    loop.  This is true when we are being called for the last time on a
7291    block and this CSE pass is before loop.c.  */
7292 
7293 static rtx
cse_basic_block(from,to,next_branch,around_loop)7294 cse_basic_block (from, to, next_branch, around_loop)
7295      rtx from, to;
7296      struct branch_path *next_branch;
7297      int around_loop;
7298 {
7299   rtx insn;
7300   int to_usage = 0;
7301   rtx libcall_insn = NULL_RTX;
7302   int num_insns = 0;
7303 
7304   /* This array is undefined before max_reg, so only allocate
7305      the space actually needed and adjust the start.  */
7306 
7307   qty_table
7308     = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
7309 					 * sizeof (struct qty_table_elem));
7310   qty_table -= max_reg;
7311 
7312   new_basic_block ();
7313 
7314   /* TO might be a label.  If so, protect it from being deleted.  */
7315   if (to != 0 && GET_CODE (to) == CODE_LABEL)
7316     ++LABEL_NUSES (to);
7317 
7318   for (insn = from; insn != to; insn = NEXT_INSN (insn))
7319     {
7320       enum rtx_code code = GET_CODE (insn);
7321 
7322       /* If we have processed 1,000 insns, flush the hash table to
7323 	 avoid extreme quadratic behavior.  We must not include NOTEs
7324 	 in the count since there may be more of them when generating
7325 	 debugging information.  If we clear the table at different
7326 	 times, code generated with -g -O might be different than code
7327 	 generated with -O but not -g.
7328 
7329 	 ??? This is a real kludge and needs to be done some other way.
7330 	 Perhaps for 2.9.  */
7331       if (code != NOTE && num_insns++ > 1000)
7332 	{
7333 	  flush_hash_table ();
7334 	  num_insns = 0;
7335 	}
7336 
7337       /* See if this is a branch that is part of the path.  If so, and it is
7338 	 to be taken, do so.  */
7339       if (next_branch->branch == insn)
7340 	{
7341 	  enum taken status = next_branch++->status;
7342 	  if (status != NOT_TAKEN)
7343 	    {
7344 	      if (status == TAKEN)
7345 		record_jump_equiv (insn, 1);
7346 	      else
7347 		invalidate_skipped_block (NEXT_INSN (insn));
7348 
7349 	      /* Set the last insn as the jump insn; it doesn't affect cc0.
7350 		 Then follow this branch.  */
7351 #ifdef HAVE_cc0
7352 	      prev_insn_cc0 = 0;
7353 #endif
7354 	      prev_insn = insn;
7355 	      insn = JUMP_LABEL (insn);
7356 	      continue;
7357 	    }
7358 	}
7359 
7360       if (GET_MODE (insn) == QImode)
7361 	PUT_MODE (insn, VOIDmode);
7362 
7363       if (GET_RTX_CLASS (code) == 'i')
7364 	{
7365 	  rtx p;
7366 
7367 	  /* Process notes first so we have all notes in canonical forms when
7368 	     looking for duplicate operations.  */
7369 
7370 	  if (REG_NOTES (insn))
7371 	    REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7372 
7373 	  /* Track when we are inside in LIBCALL block.  Inside such a block,
7374 	     we do not want to record destinations.  The last insn of a
7375 	     LIBCALL block is not considered to be part of the block, since
7376 	     its destination is the result of the block and hence should be
7377 	     recorded.  */
7378 
7379 	  if (REG_NOTES (insn) != 0)
7380 	    {
7381 	      if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7382 		libcall_insn = XEXP (p, 0);
7383 	      else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7384 		libcall_insn = 0;
7385 	    }
7386 
7387 	  cse_insn (insn, libcall_insn);
7388 
7389 	  /* If we haven't already found an insn where we added a LABEL_REF,
7390 	     check this one.  */
7391 	  if (GET_CODE (insn) == INSN && ! recorded_label_ref
7392 	      && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7393 			       (void *) insn))
7394 	    recorded_label_ref = 1;
7395 	}
7396 
7397       /* If INSN is now an unconditional jump, skip to the end of our
7398 	 basic block by pretending that we just did the last insn in the
7399 	 basic block.  If we are jumping to the end of our block, show
7400 	 that we can have one usage of TO.  */
7401 
7402       if (any_uncondjump_p (insn))
7403 	{
7404 	  if (to == 0)
7405 	    {
7406 	      free (qty_table + max_reg);
7407 	      return 0;
7408 	    }
7409 
7410 	  if (JUMP_LABEL (insn) == to)
7411 	    to_usage = 1;
7412 
7413 	  /* Maybe TO was deleted because the jump is unconditional.
7414 	     If so, there is nothing left in this basic block.  */
7415 	  /* ??? Perhaps it would be smarter to set TO
7416 	     to whatever follows this insn,
7417 	     and pretend the basic block had always ended here.  */
7418 	  if (INSN_DELETED_P (to))
7419 	    break;
7420 
7421 	  insn = PREV_INSN (to);
7422 	}
7423 
7424       /* See if it is ok to keep on going past the label
7425 	 which used to end our basic block.  Remember that we incremented
7426 	 the count of that label, so we decrement it here.  If we made
7427 	 a jump unconditional, TO_USAGE will be one; in that case, we don't
7428 	 want to count the use in that jump.  */
7429 
7430       if (to != 0 && NEXT_INSN (insn) == to
7431 	  && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7432 	{
7433 	  struct cse_basic_block_data val;
7434 	  rtx prev;
7435 
7436 	  insn = NEXT_INSN (to);
7437 
7438 	  /* If TO was the last insn in the function, we are done.  */
7439 	  if (insn == 0)
7440 	    {
7441 	      free (qty_table + max_reg);
7442 	      return 0;
7443 	    }
7444 
7445 	  /* If TO was preceded by a BARRIER we are done with this block
7446 	     because it has no continuation.  */
7447 	  prev = prev_nonnote_insn (to);
7448 	  if (prev && GET_CODE (prev) == BARRIER)
7449 	    {
7450 	      free (qty_table + max_reg);
7451 	      return insn;
7452 	    }
7453 
7454 	  /* Find the end of the following block.  Note that we won't be
7455 	     following branches in this case.  */
7456 	  to_usage = 0;
7457 	  val.path_size = 0;
7458 	  cse_end_of_basic_block (insn, &val, 0, 0, 0);
7459 
7460 	  /* If the tables we allocated have enough space left
7461 	     to handle all the SETs in the next basic block,
7462 	     continue through it.  Otherwise, return,
7463 	     and that block will be scanned individually.  */
7464 	  if (val.nsets * 2 + next_qty > max_qty)
7465 	    break;
7466 
7467 	  cse_basic_block_start = val.low_cuid;
7468 	  cse_basic_block_end = val.high_cuid;
7469 	  to = val.last;
7470 
7471 	  /* Prevent TO from being deleted if it is a label.  */
7472 	  if (to != 0 && GET_CODE (to) == CODE_LABEL)
7473 	    ++LABEL_NUSES (to);
7474 
7475 	  /* Back up so we process the first insn in the extension.  */
7476 	  insn = PREV_INSN (insn);
7477 	}
7478     }
7479 
7480   if (next_qty > max_qty)
7481     abort ();
7482 
7483   /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7484      the previous insn is the only insn that branches to the head of a loop,
7485      we can cse into the loop.  Don't do this if we changed the jump
7486      structure of a loop unless we aren't going to be following jumps.  */
7487 
7488   insn = prev_nonnote_insn (to);
7489   if ((cse_jumps_altered == 0
7490        || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7491       && around_loop && to != 0
7492       && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7493       && GET_CODE (insn) == JUMP_INSN
7494       && JUMP_LABEL (insn) != 0
7495       && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7496     cse_around_loop (JUMP_LABEL (insn));
7497 
7498   free (qty_table + max_reg);
7499 
7500   return to ? NEXT_INSN (to) : 0;
7501 }
7502 
7503 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7504    there isn't a REG_LABEL note.  Return one if so.  DATA is the insn.  */
7505 
7506 static int
check_for_label_ref(rtl,data)7507 check_for_label_ref (rtl, data)
7508      rtx *rtl;
7509      void *data;
7510 {
7511   rtx insn = (rtx) data;
7512 
7513   /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7514      we must rerun jump since it needs to place the note.  If this is a
7515      LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7516      since no REG_LABEL will be added.  */
7517   return (GET_CODE (*rtl) == LABEL_REF
7518 	  && ! LABEL_REF_NONLOCAL_P (*rtl)
7519 	  && LABEL_P (XEXP (*rtl, 0))
7520 	  && INSN_UID (XEXP (*rtl, 0)) != 0
7521 	  && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7522 }
7523 
7524 /* Count the number of times registers are used (not set) in X.
7525    COUNTS is an array in which we accumulate the count, INCR is how much
7526    we count each register usage.
7527 
7528    Don't count a usage of DEST, which is the SET_DEST of a SET which
7529    contains X in its SET_SRC.  This is because such a SET does not
7530    modify the liveness of DEST.  */
7531 
7532 static void
count_reg_usage(x,counts,dest,incr)7533 count_reg_usage (x, counts, dest, incr)
7534      rtx x;
7535      int *counts;
7536      rtx dest;
7537      int incr;
7538 {
7539   enum rtx_code code;
7540   rtx note;
7541   const char *fmt;
7542   int i, j;
7543 
7544   if (x == 0)
7545     return;
7546 
7547   switch (code = GET_CODE (x))
7548     {
7549     case REG:
7550       if (x != dest)
7551 	counts[REGNO (x)] += incr;
7552       return;
7553 
7554     case PC:
7555     case CC0:
7556     case CONST:
7557     case CONST_INT:
7558     case CONST_DOUBLE:
7559     case CONST_VECTOR:
7560     case SYMBOL_REF:
7561     case LABEL_REF:
7562       return;
7563 
7564     case CLOBBER:
7565       /* If we are clobbering a MEM, mark any registers inside the address
7566          as being used.  */
7567       if (GET_CODE (XEXP (x, 0)) == MEM)
7568 	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7569       return;
7570 
7571     case SET:
7572       /* Unless we are setting a REG, count everything in SET_DEST.  */
7573       if (GET_CODE (SET_DEST (x)) != REG)
7574 	count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7575 
7576       /* If SRC has side-effects, then we can't delete this insn, so the
7577 	 usage of SET_DEST inside SRC counts.
7578 
7579 	 ??? Strictly-speaking, we might be preserving this insn
7580 	 because some other SET has side-effects, but that's hard
7581 	 to do and can't happen now.  */
7582       count_reg_usage (SET_SRC (x), counts,
7583 		       side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7584 		       incr);
7585       return;
7586 
7587     case CALL_INSN:
7588       count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7589       /* Fall through.  */
7590 
7591     case INSN:
7592     case JUMP_INSN:
7593       count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7594 
7595       /* Things used in a REG_EQUAL note aren't dead since loop may try to
7596 	 use them.  */
7597 
7598       note = find_reg_equal_equiv_note (x);
7599       if (note)
7600         count_reg_usage (XEXP (note, 0), counts, NULL_RTX, incr);
7601       return;
7602 
7603     case INSN_LIST:
7604       abort ();
7605 
7606     default:
7607       break;
7608     }
7609 
7610   fmt = GET_RTX_FORMAT (code);
7611   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7612     {
7613       if (fmt[i] == 'e')
7614 	count_reg_usage (XEXP (x, i), counts, dest, incr);
7615       else if (fmt[i] == 'E')
7616 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7617 	  count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7618     }
7619 }
7620 
7621 /* Return true if set is live.  */
7622 static bool
set_live_p(set,insn,counts)7623 set_live_p (set, insn, counts)
7624      rtx set;
7625      rtx insn ATTRIBUTE_UNUSED;	/* Only used with HAVE_cc0.  */
7626      int *counts;
7627 {
7628 #ifdef HAVE_cc0
7629   rtx tem;
7630 #endif
7631 
7632   if (set_noop_p (set))
7633     ;
7634 
7635 #ifdef HAVE_cc0
7636   else if (GET_CODE (SET_DEST (set)) == CC0
7637 	   && !side_effects_p (SET_SRC (set))
7638 	   && ((tem = next_nonnote_insn (insn)) == 0
7639 	       || !INSN_P (tem)
7640 	       || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7641     return false;
7642 #endif
7643   else if (GET_CODE (SET_DEST (set)) != REG
7644 	   || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7645 	   || counts[REGNO (SET_DEST (set))] != 0
7646 	   || side_effects_p (SET_SRC (set))
7647 	   /* An ADDRESSOF expression can turn into a use of the
7648 	      internal arg pointer, so always consider the
7649 	      internal arg pointer live.  If it is truly dead,
7650 	      flow will delete the initializing insn.  */
7651 	   || (SET_DEST (set) == current_function_internal_arg_pointer))
7652     return true;
7653   return false;
7654 }
7655 
7656 /* Return true if insn is live.  */
7657 
7658 static bool
insn_live_p(insn,counts)7659 insn_live_p (insn, counts)
7660      rtx insn;
7661      int *counts;
7662 {
7663   int i;
7664   if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7665     return true;
7666   else if (GET_CODE (PATTERN (insn)) == SET)
7667     return set_live_p (PATTERN (insn), insn, counts);
7668   else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7669     {
7670       for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7671 	{
7672 	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
7673 
7674 	  if (GET_CODE (elt) == SET)
7675 	    {
7676 	      if (set_live_p (elt, insn, counts))
7677 		return true;
7678 	    }
7679 	  else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7680 	    return true;
7681 	}
7682       return false;
7683     }
7684   else
7685     return true;
7686 }
7687 
7688 /* Return true if libcall is dead as a whole.  */
7689 
7690 static bool
dead_libcall_p(insn,counts)7691 dead_libcall_p (insn, counts)
7692      rtx insn;
7693      int *counts;
7694 {
7695   rtx note;
7696   /* See if there's a REG_EQUAL note on this insn and try to
7697      replace the source with the REG_EQUAL expression.
7698 
7699      We assume that insns with REG_RETVALs can only be reg->reg
7700      copies at this point.  */
7701   note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7702   if (note)
7703     {
7704       rtx set = single_set (insn);
7705       rtx new = simplify_rtx (XEXP (note, 0));
7706 
7707       if (!new)
7708 	new = XEXP (note, 0);
7709 
7710       /* While changing insn, we must update the counts accordingly.  */
7711       count_reg_usage (insn, counts, NULL_RTX, -1);
7712 
7713       if (set && validate_change (insn, &SET_SRC (set), new, 0))
7714 	{
7715           count_reg_usage (insn, counts, NULL_RTX, 1);
7716 	  remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7717 	  remove_note (insn, note);
7718 	  return true;
7719 	}
7720        count_reg_usage (insn, counts, NULL_RTX, 1);
7721     }
7722   return false;
7723 }
7724 
7725 /* Scan all the insns and delete any that are dead; i.e., they store a register
7726    that is never used or they copy a register to itself.
7727 
7728    This is used to remove insns made obviously dead by cse, loop or other
7729    optimizations.  It improves the heuristics in loop since it won't try to
7730    move dead invariants out of loops or make givs for dead quantities.  The
7731    remaining passes of the compilation are also sped up.  */
7732 
7733 int
delete_trivially_dead_insns(insns,nreg)7734 delete_trivially_dead_insns (insns, nreg)
7735      rtx insns;
7736      int nreg;
7737 {
7738   int *counts;
7739   rtx insn, prev;
7740   int in_libcall = 0, dead_libcall = 0;
7741   int ndead = 0, nlastdead, niterations = 0;
7742 
7743   timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7744   /* First count the number of times each register is used.  */
7745   counts = (int *) xcalloc (nreg, sizeof (int));
7746   for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7747     count_reg_usage (insn, counts, NULL_RTX, 1);
7748 
7749   do
7750     {
7751       nlastdead = ndead;
7752       niterations++;
7753       /* Go from the last insn to the first and delete insns that only set unused
7754 	 registers or copy a register to itself.  As we delete an insn, remove
7755 	 usage counts for registers it uses.
7756 
7757 	 The first jump optimization pass may leave a real insn as the last
7758 	 insn in the function.   We must not skip that insn or we may end
7759 	 up deleting code that is not really dead.  */
7760       insn = get_last_insn ();
7761       if (! INSN_P (insn))
7762 	insn = prev_real_insn (insn);
7763 
7764       for (; insn; insn = prev)
7765 	{
7766 	  int live_insn = 0;
7767 
7768 	  prev = prev_real_insn (insn);
7769 
7770 	  /* Don't delete any insns that are part of a libcall block unless
7771 	     we can delete the whole libcall block.
7772 
7773 	     Flow or loop might get confused if we did that.  Remember
7774 	     that we are scanning backwards.  */
7775 	  if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7776 	    {
7777 	      in_libcall = 1;
7778 	      live_insn = 1;
7779 	      dead_libcall = dead_libcall_p (insn, counts);
7780 	    }
7781 	  else if (in_libcall)
7782 	    live_insn = ! dead_libcall;
7783 	  else
7784 	    live_insn = insn_live_p (insn, counts);
7785 
7786 	  /* If this is a dead insn, delete it and show registers in it aren't
7787 	     being used.  */
7788 
7789 	  if (! live_insn)
7790 	    {
7791 	      count_reg_usage (insn, counts, NULL_RTX, -1);
7792 	      delete_insn_and_edges (insn);
7793 	      ndead++;
7794 	    }
7795 
7796 	  if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7797 	    {
7798 	      in_libcall = 0;
7799 	      dead_libcall = 0;
7800 	    }
7801 	}
7802     }
7803   while (ndead != nlastdead);
7804 
7805   if (rtl_dump_file && ndead)
7806     fprintf (rtl_dump_file, "Deleted %i trivially dead insns; %i iterations\n",
7807 	     ndead, niterations);
7808   /* Clean up.  */
7809   free (counts);
7810   timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7811   return ndead;
7812 }
7813 
7814 /* This function is called via for_each_rtx.  The argument, NEWREG, is
7815    a condition code register with the desired mode.  If we are looking
7816    at the same register in a different mode, replace it with
7817    NEWREG.  */
7818 
7819 static int
cse_change_cc_mode(loc,data)7820 cse_change_cc_mode (loc, data)
7821      rtx *loc;
7822      void *data;
7823 {
7824   rtx newreg = (rtx) data;
7825 
7826   if (*loc
7827       && GET_CODE (*loc) == REG
7828       && REGNO (*loc) == REGNO (newreg)
7829       && GET_MODE (*loc) != GET_MODE (newreg))
7830     {
7831       *loc = newreg;
7832       return -1;
7833     }
7834   return 0;
7835 }
7836 
7837 /* Change the mode of any reference to the register REGNO (NEWREG) to
7838    GET_MODE (NEWREG), starting at START.  Stop before END.  Stop at
7839    any instruction which modifies NEWREG.  */
7840 
7841 static void
cse_change_cc_mode_insns(start,end,newreg)7842 cse_change_cc_mode_insns (start, end, newreg)
7843      rtx start;
7844      rtx end;
7845      rtx newreg;
7846 {
7847   rtx insn;
7848 
7849   for (insn = start; insn != end; insn = NEXT_INSN (insn))
7850     {
7851       if (! INSN_P (insn))
7852 	continue;
7853 
7854       if (reg_set_p (newreg, insn))
7855 	return;
7856 
7857       for_each_rtx (&PATTERN (insn), cse_change_cc_mode, newreg);
7858       for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, newreg);
7859     }
7860 }
7861 
7862 /* BB is a basic block which finishes with CC_REG as a condition code
7863    register which is set to CC_SRC.  Look through the successors of BB
7864    to find blocks which have a single predecessor (i.e., this one),
7865    and look through those blocks for an assignment to CC_REG which is
7866    equivalent to CC_SRC.  CAN_CHANGE_MODE indicates whether we are
7867    permitted to change the mode of CC_SRC to a compatible mode.  This
7868    returns VOIDmode if no equivalent assignments were found.
7869    Otherwise it returns the mode which CC_SRC should wind up with.
7870 
7871    The main complexity in this function is handling the mode issues.
7872    We may have more than one duplicate which we can eliminate, and we
7873    try to find a mode which will work for multiple duplicates.  */
7874 
7875 static enum machine_mode
cse_cc_succs(bb,cc_reg,cc_src,can_change_mode)7876 cse_cc_succs (bb, cc_reg, cc_src, can_change_mode)
7877      basic_block bb;
7878      rtx cc_reg;
7879      rtx cc_src;
7880      int can_change_mode;
7881 {
7882   bool found_equiv;
7883   enum machine_mode mode;
7884   unsigned int insn_count;
7885   edge e;
7886   rtx insns[2];
7887   enum machine_mode modes[2];
7888   rtx last_insns[2];
7889   unsigned int i;
7890   rtx newreg;
7891 
7892   /* We expect to have two successors.  Look at both before picking
7893      the final mode for the comparison.  If we have more successors
7894      (i.e., some sort of table jump, although that seems unlikely),
7895      then we require all beyond the first two to use the same
7896      mode.  */
7897 
7898   found_equiv = false;
7899   mode = GET_MODE (cc_src);
7900   insn_count = 0;
7901   for (e = bb->succ; e; e = e->succ_next)
7902     {
7903       rtx insn;
7904       rtx end;
7905 
7906       if (e->flags & EDGE_COMPLEX)
7907 	continue;
7908 
7909       if (! e->dest->pred
7910 	  || e->dest->pred->pred_next
7911 	  || e->dest == EXIT_BLOCK_PTR)
7912 	continue;
7913 
7914       end = NEXT_INSN (e->dest->end);
7915       for (insn = e->dest->head; insn != end; insn = NEXT_INSN (insn))
7916 	{
7917 	  rtx set;
7918 
7919 	  if (! INSN_P (insn))
7920 	    continue;
7921 
7922 	  /* If CC_SRC is modified, we have to stop looking for
7923 	     something which uses it.  */
7924 	  if (modified_in_p (cc_src, insn))
7925 	    break;
7926 
7927 	  /* Check whether INSN sets CC_REG to CC_SRC.  */
7928 	  set = single_set (insn);
7929 	  if (set
7930 	      && GET_CODE (SET_DEST (set)) == REG
7931 	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7932 	    {
7933 	      bool found;
7934 	      enum machine_mode set_mode;
7935 	      enum machine_mode comp_mode;
7936 
7937 	      found = false;
7938 	      set_mode = GET_MODE (SET_SRC (set));
7939 	      comp_mode = set_mode;
7940 	      if (rtx_equal_p (cc_src, SET_SRC (set)))
7941 		found = true;
7942 	      else if (GET_CODE (cc_src) == COMPARE
7943 		       && GET_CODE (SET_SRC (set)) == COMPARE
7944 		       && mode != set_mode
7945 		       && rtx_equal_p (XEXP (cc_src, 0),
7946 				       XEXP (SET_SRC (set), 0))
7947 		       && rtx_equal_p (XEXP (cc_src, 1),
7948 				       XEXP (SET_SRC (set), 1)))
7949 
7950 		{
7951 		  comp_mode = (*targetm.cc_modes_compatible) (mode, set_mode);
7952 		  if (comp_mode != VOIDmode
7953 		      && (can_change_mode || comp_mode == mode))
7954 		    found = true;
7955 		}
7956 
7957 	      if (found)
7958 		{
7959 		  found_equiv = true;
7960 		  if (insn_count < ARRAY_SIZE (insns))
7961 		    {
7962 		      insns[insn_count] = insn;
7963 		      modes[insn_count] = set_mode;
7964 		      last_insns[insn_count] = end;
7965 		      ++insn_count;
7966 
7967 		      if (mode != comp_mode)
7968 			{
7969 			  if (! can_change_mode)
7970 			    abort ();
7971 			  mode = comp_mode;
7972 			  PUT_MODE (cc_src, mode);
7973 			}
7974 		    }
7975 		  else
7976 		    {
7977 		      if (set_mode != mode)
7978 			{
7979 			  /* We found a matching expression in the
7980 			     wrong mode, but we don't have room to
7981 			     store it in the array.  Punt.  This case
7982 			     should be rare.  */
7983 			  break;
7984 			}
7985 		      /* INSN sets CC_REG to a value equal to CC_SRC
7986 			 with the right mode.  We can simply delete
7987 			 it.  */
7988 		      delete_insn (insn);
7989 		    }
7990 
7991 		  /* We found an instruction to delete.  Keep looking,
7992 		     in the hopes of finding a three-way jump.  */
7993 		  continue;
7994 		}
7995 
7996 	      /* We found an instruction which sets the condition
7997 		 code, so don't look any farther.  */
7998 	      break;
7999 	    }
8000 
8001 	  /* If INSN sets CC_REG in some other way, don't look any
8002 	     farther.  */
8003 	  if (reg_set_p (cc_reg, insn))
8004 	    break;
8005 	}
8006 
8007       /* If we fell off the bottom of the block, we can keep looking
8008 	 through successors.  We pass CAN_CHANGE_MODE as false because
8009 	 we aren't prepared to handle compatibility between the
8010 	 further blocks and this block.  */
8011       if (insn == end)
8012 	{
8013 	  enum machine_mode submode;
8014 
8015 	  submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
8016 	  if (submode != VOIDmode)
8017 	    {
8018 	      if (submode != mode)
8019 		abort ();
8020 	      found_equiv = true;
8021 	      can_change_mode = false;
8022 	    }
8023 	}
8024     }
8025 
8026   if (! found_equiv)
8027     return VOIDmode;
8028 
8029   /* Now INSN_COUNT is the number of instructions we found which set
8030      CC_REG to a value equivalent to CC_SRC.  The instructions are in
8031      INSNS.  The modes used by those instructions are in MODES.  */
8032 
8033   newreg = NULL_RTX;
8034   for (i = 0; i < insn_count; ++i)
8035     {
8036       if (modes[i] != mode)
8037 	{
8038 	  /* We need to change the mode of CC_REG in INSNS[i] and
8039 	     subsequent instructions.  */
8040 	  if (! newreg)
8041 	    {
8042 	      if (GET_MODE (cc_reg) == mode)
8043 		newreg = cc_reg;
8044 	      else
8045 		newreg = gen_rtx_REG (mode, REGNO (cc_reg));
8046 	    }
8047 	  cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
8048 				    newreg);
8049 	}
8050 
8051       delete_insn (insns[i]);
8052     }
8053 
8054   return mode;
8055 }
8056 
8057 /* If we have a fixed condition code register (or two), walk through
8058    the instructions and try to eliminate duplicate assignments.  */
8059 
8060 void
cse_condition_code_reg()8061 cse_condition_code_reg ()
8062 {
8063   unsigned int cc_regno_1;
8064   unsigned int cc_regno_2;
8065   rtx cc_reg_1;
8066   rtx cc_reg_2;
8067   basic_block bb;
8068 
8069   if (! (*targetm.fixed_condition_code_regs) (&cc_regno_1, &cc_regno_2))
8070     return;
8071 
8072   cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
8073   if (cc_regno_2 != INVALID_REGNUM)
8074     cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
8075   else
8076     cc_reg_2 = NULL_RTX;
8077 
8078   FOR_EACH_BB (bb)
8079     {
8080       rtx last_insn;
8081       rtx cc_reg;
8082       rtx insn;
8083       rtx cc_src_insn;
8084       rtx cc_src;
8085       enum machine_mode mode;
8086       enum machine_mode orig_mode;
8087 
8088       /* Look for blocks which end with a conditional jump based on a
8089 	 condition code register.  Then look for the instruction which
8090 	 sets the condition code register.  Then look through the
8091 	 successor blocks for instructions which set the condition
8092 	 code register to the same value.  There are other possible
8093 	 uses of the condition code register, but these are by far the
8094 	 most common and the ones which we are most likely to be able
8095 	 to optimize.  */
8096 
8097       last_insn = bb->end;
8098       if (GET_CODE (last_insn) != JUMP_INSN)
8099 	continue;
8100 
8101       if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
8102 	cc_reg = cc_reg_1;
8103       else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
8104 	cc_reg = cc_reg_2;
8105       else
8106 	continue;
8107 
8108       cc_src_insn = NULL_RTX;
8109       cc_src = NULL_RTX;
8110       for (insn = PREV_INSN (last_insn);
8111 	   insn && insn != PREV_INSN (bb->head);
8112 	   insn = PREV_INSN (insn))
8113 	{
8114 	  rtx set;
8115 
8116 	  if (! INSN_P (insn))
8117 	    continue;
8118 	  set = single_set (insn);
8119 	  if (set
8120 	      && GET_CODE (SET_DEST (set)) == REG
8121 	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
8122 	    {
8123 	      cc_src_insn = insn;
8124 	      cc_src = SET_SRC (set);
8125 	      break;
8126 	    }
8127 	  else if (reg_set_p (cc_reg, insn))
8128 	    break;
8129 	}
8130 
8131       if (! cc_src_insn)
8132 	continue;
8133 
8134       if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
8135 	continue;
8136 
8137       /* Now CC_REG is a condition code register used for a
8138 	 conditional jump at the end of the block, and CC_SRC, in
8139 	 CC_SRC_INSN, is the value to which that condition code
8140 	 register is set, and CC_SRC is still meaningful at the end of
8141 	 the basic block.  */
8142 
8143       orig_mode = GET_MODE (cc_src);
8144       mode = cse_cc_succs (bb, cc_reg, cc_src, true);
8145       if (mode != VOIDmode)
8146 	{
8147 	  if (mode != GET_MODE (cc_src))
8148 	    abort ();
8149 	  if (mode != orig_mode)
8150 	    {
8151 	      rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
8152 
8153 	      /* Change the mode of CC_REG in CC_SRC_INSN to
8154 		 GET_MODE (NEWREG).  */
8155 	      for_each_rtx (&PATTERN (cc_src_insn), cse_change_cc_mode,
8156 			    newreg);
8157 	      for_each_rtx (&REG_NOTES (cc_src_insn), cse_change_cc_mode,
8158 			    newreg);
8159 
8160 	      /* Do the same in the following insns that use the
8161 		 current value of CC_REG within BB.  */
8162 	      cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
8163 					NEXT_INSN (last_insn),
8164 					newreg);
8165 	    }
8166 	}
8167     }
8168 }
8169 
8170 enum machine_mode
default_cc_modes_compatible(m1,m2)8171 default_cc_modes_compatible (m1, m2)
8172      enum machine_mode m1;
8173      enum machine_mode m2;
8174 {
8175   if (m1 == m2)
8176     return m1;
8177   return VOIDmode;
8178 }
8179