xref: /openbsd/gnu/gcc/gcc/reload1.c (revision 404b540a)
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2    Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3    1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4    Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING.  If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 
28 #include "machmode.h"
29 #include "hard-reg-set.h"
30 #include "rtl.h"
31 #include "tm_p.h"
32 #include "obstack.h"
33 #include "insn-config.h"
34 #include "flags.h"
35 #include "function.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "regs.h"
39 #include "addresses.h"
40 #include "basic-block.h"
41 #include "reload.h"
42 #include "recog.h"
43 #include "output.h"
44 #include "real.h"
45 #include "toplev.h"
46 #include "except.h"
47 #include "tree.h"
48 #include "target.h"
49 
50 /* This file contains the reload pass of the compiler, which is
51    run after register allocation has been done.  It checks that
52    each insn is valid (operands required to be in registers really
53    are in registers of the proper class) and fixes up invalid ones
54    by copying values temporarily into registers for the insns
55    that need them.
56 
57    The results of register allocation are described by the vector
58    reg_renumber; the insns still contain pseudo regs, but reg_renumber
59    can be used to find which hard reg, if any, a pseudo reg is in.
60 
61    The technique we always use is to free up a few hard regs that are
62    called ``reload regs'', and for each place where a pseudo reg
63    must be in a hard reg, copy it temporarily into one of the reload regs.
64 
65    Reload regs are allocated locally for every instruction that needs
66    reloads.  When there are pseudos which are allocated to a register that
67    has been chosen as a reload reg, such pseudos must be ``spilled''.
68    This means that they go to other hard regs, or to stack slots if no other
69    available hard regs can be found.  Spilling can invalidate more
70    insns, requiring additional need for reloads, so we must keep checking
71    until the process stabilizes.
72 
73    For machines with different classes of registers, we must keep track
74    of the register class needed for each reload, and make sure that
75    we allocate enough reload registers of each class.
76 
77    The file reload.c contains the code that checks one insn for
78    validity and reports the reloads that it needs.  This file
79    is in charge of scanning the entire rtl code, accumulating the
80    reload needs, spilling, assigning reload registers to use for
81    fixing up each insn, and generating the new insns to copy values
82    into the reload registers.  */
83 
84 /* During reload_as_needed, element N contains a REG rtx for the hard reg
85    into which reg N has been reloaded (perhaps for a previous insn).  */
86 static rtx *reg_last_reload_reg;
87 
88 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
89    for an output reload that stores into reg N.  */
90 static regset_head reg_has_output_reload;
91 
92 /* Indicates which hard regs are reload-registers for an output reload
93    in the current insn.  */
94 static HARD_REG_SET reg_is_output_reload;
95 
96 /* Element N is the constant value to which pseudo reg N is equivalent,
97    or zero if pseudo reg N is not equivalent to a constant.
98    find_reloads looks at this in order to replace pseudo reg N
99    with the constant it stands for.  */
100 rtx *reg_equiv_constant;
101 
102 /* Element N is an invariant value to which pseudo reg N is equivalent.
103    eliminate_regs_in_insn uses this to replace pseudos in particular
104    contexts.  */
105 rtx *reg_equiv_invariant;
106 
107 /* Element N is a memory location to which pseudo reg N is equivalent,
108    prior to any register elimination (such as frame pointer to stack
109    pointer).  Depending on whether or not it is a valid address, this value
110    is transferred to either reg_equiv_address or reg_equiv_mem.  */
111 rtx *reg_equiv_memory_loc;
112 
113 /* We allocate reg_equiv_memory_loc inside a varray so that the garbage
114    collector can keep track of what is inside.  */
115 VEC(rtx,gc) *reg_equiv_memory_loc_vec;
116 
117 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
118    This is used when the address is not valid as a memory address
119    (because its displacement is too big for the machine.)  */
120 rtx *reg_equiv_address;
121 
122 /* Element N is the memory slot to which pseudo reg N is equivalent,
123    or zero if pseudo reg N is not equivalent to a memory slot.  */
124 rtx *reg_equiv_mem;
125 
126 /* Element N is an EXPR_LIST of REG_EQUIVs containing MEMs with
127    alternate representations of the location of pseudo reg N.  */
128 rtx *reg_equiv_alt_mem_list;
129 
130 /* Widest width in which each pseudo reg is referred to (via subreg).  */
131 static unsigned int *reg_max_ref_width;
132 
133 /* Element N is the list of insns that initialized reg N from its equivalent
134    constant or memory slot.  */
135 rtx *reg_equiv_init;
136 int reg_equiv_init_size;
137 
138 /* Vector to remember old contents of reg_renumber before spilling.  */
139 static short *reg_old_renumber;
140 
141 /* During reload_as_needed, element N contains the last pseudo regno reloaded
142    into hard register N.  If that pseudo reg occupied more than one register,
143    reg_reloaded_contents points to that pseudo for each spill register in
144    use; all of these must remain set for an inheritance to occur.  */
145 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
146 
147 /* During reload_as_needed, element N contains the insn for which
148    hard register N was last used.   Its contents are significant only
149    when reg_reloaded_valid is set for this register.  */
150 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
151 
152 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid.  */
153 static HARD_REG_SET reg_reloaded_valid;
154 /* Indicate if the register was dead at the end of the reload.
155    This is only valid if reg_reloaded_contents is set and valid.  */
156 static HARD_REG_SET reg_reloaded_dead;
157 
158 /* Indicate whether the register's current value is one that is not
159    safe to retain across a call, even for registers that are normally
160    call-saved.  */
161 static HARD_REG_SET reg_reloaded_call_part_clobbered;
162 
163 /* Number of spill-regs so far; number of valid elements of spill_regs.  */
164 static int n_spills;
165 
166 /* In parallel with spill_regs, contains REG rtx's for those regs.
167    Holds the last rtx used for any given reg, or 0 if it has never
168    been used for spilling yet.  This rtx is reused, provided it has
169    the proper mode.  */
170 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
171 
172 /* In parallel with spill_regs, contains nonzero for a spill reg
173    that was stored after the last time it was used.
174    The precise value is the insn generated to do the store.  */
175 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
176 
177 /* This is the register that was stored with spill_reg_store.  This is a
178    copy of reload_out / reload_out_reg when the value was stored; if
179    reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg.  */
180 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
181 
182 /* This table is the inverse mapping of spill_regs:
183    indexed by hard reg number,
184    it contains the position of that reg in spill_regs,
185    or -1 for something that is not in spill_regs.
186 
187    ?!?  This is no longer accurate.  */
188 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
189 
190 /* This reg set indicates registers that can't be used as spill registers for
191    the currently processed insn.  These are the hard registers which are live
192    during the insn, but not allocated to pseudos, as well as fixed
193    registers.  */
194 static HARD_REG_SET bad_spill_regs;
195 
196 /* These are the hard registers that can't be used as spill register for any
197    insn.  This includes registers used for user variables and registers that
198    we can't eliminate.  A register that appears in this set also can't be used
199    to retry register allocation.  */
200 static HARD_REG_SET bad_spill_regs_global;
201 
202 /* Describes order of use of registers for reloading
203    of spilled pseudo-registers.  `n_spills' is the number of
204    elements that are actually valid; new ones are added at the end.
205 
206    Both spill_regs and spill_reg_order are used on two occasions:
207    once during find_reload_regs, where they keep track of the spill registers
208    for a single insn, but also during reload_as_needed where they show all
209    the registers ever used by reload.  For the latter case, the information
210    is calculated during finish_spills.  */
211 static short spill_regs[FIRST_PSEUDO_REGISTER];
212 
213 /* This vector of reg sets indicates, for each pseudo, which hard registers
214    may not be used for retrying global allocation because the register was
215    formerly spilled from one of them.  If we allowed reallocating a pseudo to
216    a register that it was already allocated to, reload might not
217    terminate.  */
218 static HARD_REG_SET *pseudo_previous_regs;
219 
220 /* This vector of reg sets indicates, for each pseudo, which hard
221    registers may not be used for retrying global allocation because they
222    are used as spill registers during one of the insns in which the
223    pseudo is live.  */
224 static HARD_REG_SET *pseudo_forbidden_regs;
225 
226 /* All hard regs that have been used as spill registers for any insn are
227    marked in this set.  */
228 static HARD_REG_SET used_spill_regs;
229 
230 /* Index of last register assigned as a spill register.  We allocate in
231    a round-robin fashion.  */
232 static int last_spill_reg;
233 
234 /* Nonzero if indirect addressing is supported on the machine; this means
235    that spilling (REG n) does not require reloading it into a register in
236    order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))).  The
237    value indicates the level of indirect addressing supported, e.g., two
238    means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
239    a hard register.  */
240 static char spill_indirect_levels;
241 
242 /* Nonzero if indirect addressing is supported when the innermost MEM is
243    of the form (MEM (SYMBOL_REF sym)).  It is assumed that the level to
244    which these are valid is the same as spill_indirect_levels, above.  */
245 char indirect_symref_ok;
246 
247 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid.  */
248 char double_reg_address_ok;
249 
250 /* Record the stack slot for each spilled hard register.  */
251 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
252 
253 /* Width allocated so far for that stack slot.  */
254 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
255 
256 /* Record which pseudos needed to be spilled.  */
257 static regset_head spilled_pseudos;
258 
259 /* Used for communication between order_regs_for_reload and count_pseudo.
260    Used to avoid counting one pseudo twice.  */
261 static regset_head pseudos_counted;
262 
263 /* First uid used by insns created by reload in this function.
264    Used in find_equiv_reg.  */
265 int reload_first_uid;
266 
267 /* Flag set by local-alloc or global-alloc if anything is live in
268    a call-clobbered reg across calls.  */
269 int caller_save_needed;
270 
271 /* Set to 1 while reload_as_needed is operating.
272    Required by some machines to handle any generated moves differently.  */
273 int reload_in_progress = 0;
274 
275 /* These arrays record the insn_code of insns that may be needed to
276    perform input and output reloads of special objects.  They provide a
277    place to pass a scratch register.  */
278 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
279 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
280 
281 /* This obstack is used for allocation of rtl during register elimination.
282    The allocated storage can be freed once find_reloads has processed the
283    insn.  */
284 static struct obstack reload_obstack;
285 
286 /* Points to the beginning of the reload_obstack.  All insn_chain structures
287    are allocated first.  */
288 static char *reload_startobj;
289 
290 /* The point after all insn_chain structures.  Used to quickly deallocate
291    memory allocated in copy_reloads during calculate_needs_all_insns.  */
292 static char *reload_firstobj;
293 
294 /* This points before all local rtl generated by register elimination.
295    Used to quickly free all memory after processing one insn.  */
296 static char *reload_insn_firstobj;
297 
298 /* List of insn_chain instructions, one for every insn that reload needs to
299    examine.  */
300 struct insn_chain *reload_insn_chain;
301 
302 /* List of all insns needing reloads.  */
303 static struct insn_chain *insns_need_reload;
304 
305 /* This structure is used to record information about register eliminations.
306    Each array entry describes one possible way of eliminating a register
307    in favor of another.   If there is more than one way of eliminating a
308    particular register, the most preferred should be specified first.  */
309 
310 struct elim_table
311 {
312   int from;			/* Register number to be eliminated.  */
313   int to;			/* Register number used as replacement.  */
314   HOST_WIDE_INT initial_offset;	/* Initial difference between values.  */
315   int can_eliminate;		/* Nonzero if this elimination can be done.  */
316   int can_eliminate_previous;	/* Value of CAN_ELIMINATE in previous scan over
317 				   insns made by reload.  */
318   HOST_WIDE_INT offset;		/* Current offset between the two regs.  */
319   HOST_WIDE_INT previous_offset;/* Offset at end of previous insn.  */
320   int ref_outside_mem;		/* "to" has been referenced outside a MEM.  */
321   rtx from_rtx;			/* REG rtx for the register to be eliminated.
322 				   We cannot simply compare the number since
323 				   we might then spuriously replace a hard
324 				   register corresponding to a pseudo
325 				   assigned to the reg to be eliminated.  */
326   rtx to_rtx;			/* REG rtx for the replacement.  */
327 };
328 
329 static struct elim_table *reg_eliminate = 0;
330 
331 /* This is an intermediate structure to initialize the table.  It has
332    exactly the members provided by ELIMINABLE_REGS.  */
333 static const struct elim_table_1
334 {
335   const int from;
336   const int to;
337 } reg_eliminate_1[] =
338 
339 /* If a set of eliminable registers was specified, define the table from it.
340    Otherwise, default to the normal case of the frame pointer being
341    replaced by the stack pointer.  */
342 
343 #ifdef ELIMINABLE_REGS
344   ELIMINABLE_REGS;
345 #else
346   {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
347 #endif
348 
349 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
350 
351 /* Record the number of pending eliminations that have an offset not equal
352    to their initial offset.  If nonzero, we use a new copy of each
353    replacement result in any insns encountered.  */
354 int num_not_at_initial_offset;
355 
356 /* Count the number of registers that we may be able to eliminate.  */
357 static int num_eliminable;
358 /* And the number of registers that are equivalent to a constant that
359    can be eliminated to frame_pointer / arg_pointer + constant.  */
360 static int num_eliminable_invariants;
361 
362 /* For each label, we record the offset of each elimination.  If we reach
363    a label by more than one path and an offset differs, we cannot do the
364    elimination.  This information is indexed by the difference of the
365    number of the label and the first label number.  We can't offset the
366    pointer itself as this can cause problems on machines with segmented
367    memory.  The first table is an array of flags that records whether we
368    have yet encountered a label and the second table is an array of arrays,
369    one entry in the latter array for each elimination.  */
370 
371 static int first_label_num;
372 static char *offsets_known_at;
373 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
374 
375 /* Number of labels in the current function.  */
376 
377 static int num_labels;
378 
379 static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
380 static void maybe_fix_stack_asms (void);
381 static void copy_reloads (struct insn_chain *);
382 static void calculate_needs_all_insns (int);
383 static int find_reg (struct insn_chain *, int);
384 static void find_reload_regs (struct insn_chain *);
385 static void select_reload_regs (void);
386 static void delete_caller_save_insns (void);
387 
388 static void spill_failure (rtx, enum reg_class);
389 static void count_spilled_pseudo (int, int, int);
390 static void delete_dead_insn (rtx);
391 static void alter_reg (int, int);
392 static void set_label_offsets (rtx, rtx, int);
393 static void check_eliminable_occurrences (rtx);
394 static void elimination_effects (rtx, enum machine_mode);
395 static int eliminate_regs_in_insn (rtx, int);
396 static void update_eliminable_offsets (void);
397 static void mark_not_eliminable (rtx, rtx, void *);
398 static void set_initial_elim_offsets (void);
399 static bool verify_initial_elim_offsets (void);
400 static void set_initial_label_offsets (void);
401 static void set_offsets_for_label (rtx);
402 static void init_elim_table (void);
403 static void update_eliminables (HARD_REG_SET *);
404 static void spill_hard_reg (unsigned int, int);
405 static int finish_spills (int);
406 static void scan_paradoxical_subregs (rtx);
407 static void count_pseudo (int);
408 static void order_regs_for_reload (struct insn_chain *);
409 static void reload_as_needed (int);
410 static void forget_old_reloads_1 (rtx, rtx, void *);
411 static void forget_marked_reloads (regset);
412 static int reload_reg_class_lower (const void *, const void *);
413 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
414 				    enum machine_mode);
415 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
416 				     enum machine_mode);
417 static int reload_reg_free_p (unsigned int, int, enum reload_type);
418 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
419 					rtx, rtx, int, int);
420 static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
421 			     rtx, rtx, int, int);
422 static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type);
423 static int allocate_reload_reg (struct insn_chain *, int, int);
424 static int conflicts_with_override (rtx);
425 static void failed_reload (rtx, int);
426 static int set_reload_reg (int, int);
427 static void choose_reload_regs_init (struct insn_chain *, rtx *);
428 static void choose_reload_regs (struct insn_chain *);
429 static void merge_assigned_reloads (rtx);
430 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
431 				     rtx, int);
432 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
433 				      int);
434 static void do_input_reload (struct insn_chain *, struct reload *, int);
435 static void do_output_reload (struct insn_chain *, struct reload *, int);
436 static bool inherit_piecemeal_p (int, int);
437 static void emit_reload_insns (struct insn_chain *);
438 static void delete_output_reload (rtx, int, int);
439 static void delete_address_reloads (rtx, rtx);
440 static void delete_address_reloads_1 (rtx, rtx, rtx);
441 static rtx inc_for_reload (rtx, rtx, rtx, int);
442 #ifdef AUTO_INC_DEC
443 static void add_auto_inc_notes (rtx, rtx);
444 #endif
445 static void copy_eh_notes (rtx, rtx);
446 static int reloads_conflict (int, int);
447 static rtx gen_reload (rtx, rtx, int, enum reload_type);
448 static rtx emit_insn_if_valid_for_reload (rtx);
449 
450 /* Initialize the reload pass once per compilation.  */
451 
452 void
init_reload(void)453 init_reload (void)
454 {
455   int i;
456 
457   /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
458      Set spill_indirect_levels to the number of levels such addressing is
459      permitted, zero if it is not permitted at all.  */
460 
461   rtx tem
462     = gen_rtx_MEM (Pmode,
463 		   gen_rtx_PLUS (Pmode,
464 				 gen_rtx_REG (Pmode,
465 					      LAST_VIRTUAL_REGISTER + 1),
466 				 GEN_INT (4)));
467   spill_indirect_levels = 0;
468 
469   while (memory_address_p (QImode, tem))
470     {
471       spill_indirect_levels++;
472       tem = gen_rtx_MEM (Pmode, tem);
473     }
474 
475   /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)).  */
476 
477   tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
478   indirect_symref_ok = memory_address_p (QImode, tem);
479 
480   /* See if reg+reg is a valid (and offsettable) address.  */
481 
482   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
483     {
484       tem = gen_rtx_PLUS (Pmode,
485 			  gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
486 			  gen_rtx_REG (Pmode, i));
487 
488       /* This way, we make sure that reg+reg is an offsettable address.  */
489       tem = plus_constant (tem, 4);
490 
491       if (memory_address_p (QImode, tem))
492 	{
493 	  double_reg_address_ok = 1;
494 	  break;
495 	}
496     }
497 
498   /* Initialize obstack for our rtl allocation.  */
499   gcc_obstack_init (&reload_obstack);
500   reload_startobj = obstack_alloc (&reload_obstack, 0);
501 
502   INIT_REG_SET (&spilled_pseudos);
503   INIT_REG_SET (&pseudos_counted);
504 }
505 
506 /* List of insn chains that are currently unused.  */
507 static struct insn_chain *unused_insn_chains = 0;
508 
509 /* Allocate an empty insn_chain structure.  */
510 struct insn_chain *
new_insn_chain(void)511 new_insn_chain (void)
512 {
513   struct insn_chain *c;
514 
515   if (unused_insn_chains == 0)
516     {
517       c = obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
518       INIT_REG_SET (&c->live_throughout);
519       INIT_REG_SET (&c->dead_or_set);
520     }
521   else
522     {
523       c = unused_insn_chains;
524       unused_insn_chains = c->next;
525     }
526   c->is_caller_save_insn = 0;
527   c->need_operand_change = 0;
528   c->need_reload = 0;
529   c->need_elim = 0;
530   return c;
531 }
532 
533 /* Small utility function to set all regs in hard reg set TO which are
534    allocated to pseudos in regset FROM.  */
535 
536 void
compute_use_by_pseudos(HARD_REG_SET * to,regset from)537 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
538 {
539   unsigned int regno;
540   reg_set_iterator rsi;
541 
542   EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
543     {
544       int r = reg_renumber[regno];
545       int nregs;
546 
547       if (r < 0)
548 	{
549 	  /* reload_combine uses the information from
550 	     BASIC_BLOCK->global_live_at_start, which might still
551 	     contain registers that have not actually been allocated
552 	     since they have an equivalence.  */
553 	  gcc_assert (reload_completed);
554 	}
555       else
556 	{
557 	  nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (regno)];
558 	  while (nregs-- > 0)
559 	    SET_HARD_REG_BIT (*to, r + nregs);
560 	}
561     }
562 }
563 
564 /* Replace all pseudos found in LOC with their corresponding
565    equivalences.  */
566 
567 static void
replace_pseudos_in(rtx * loc,enum machine_mode mem_mode,rtx usage)568 replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
569 {
570   rtx x = *loc;
571   enum rtx_code code;
572   const char *fmt;
573   int i, j;
574 
575   if (! x)
576     return;
577 
578   code = GET_CODE (x);
579   if (code == REG)
580     {
581       unsigned int regno = REGNO (x);
582 
583       if (regno < FIRST_PSEUDO_REGISTER)
584 	return;
585 
586       x = eliminate_regs (x, mem_mode, usage);
587       if (x != *loc)
588 	{
589 	  *loc = x;
590 	  replace_pseudos_in (loc, mem_mode, usage);
591 	  return;
592 	}
593 
594       if (reg_equiv_constant[regno])
595 	*loc = reg_equiv_constant[regno];
596       else if (reg_equiv_mem[regno])
597 	*loc = reg_equiv_mem[regno];
598       else if (reg_equiv_address[regno])
599 	*loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
600       else
601 	{
602 	  gcc_assert (!REG_P (regno_reg_rtx[regno])
603 		      || REGNO (regno_reg_rtx[regno]) != regno);
604 	  *loc = regno_reg_rtx[regno];
605 	}
606 
607       return;
608     }
609   else if (code == MEM)
610     {
611       replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
612       return;
613     }
614 
615   /* Process each of our operands recursively.  */
616   fmt = GET_RTX_FORMAT (code);
617   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
618     if (*fmt == 'e')
619       replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
620     else if (*fmt == 'E')
621       for (j = 0; j < XVECLEN (x, i); j++)
622 	replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
623 }
624 
625 
626 /* Global variables used by reload and its subroutines.  */
627 
628 /* Set during calculate_needs if an insn needs register elimination.  */
629 static int something_needs_elimination;
630 /* Set during calculate_needs if an insn needs an operand changed.  */
631 static int something_needs_operands_changed;
632 
633 /* Nonzero means we couldn't get enough spill regs.  */
634 static int failure;
635 
636 /* Main entry point for the reload pass.
637 
638    FIRST is the first insn of the function being compiled.
639 
640    GLOBAL nonzero means we were called from global_alloc
641    and should attempt to reallocate any pseudoregs that we
642    displace from hard regs we will use for reloads.
643    If GLOBAL is zero, we do not have enough information to do that,
644    so any pseudo reg that is spilled must go to the stack.
645 
646    Return value is nonzero if reload failed
647    and we must not do any more for this function.  */
648 
649 int
reload(rtx first,int global)650 reload (rtx first, int global)
651 {
652   int i;
653   rtx insn;
654   struct elim_table *ep;
655   basic_block bb;
656 
657   /* Make sure even insns with volatile mem refs are recognizable.  */
658   init_recog ();
659 
660   failure = 0;
661 
662   reload_firstobj = obstack_alloc (&reload_obstack, 0);
663 
664   /* Make sure that the last insn in the chain
665      is not something that needs reloading.  */
666   emit_note (NOTE_INSN_DELETED);
667 
668   /* Enable find_equiv_reg to distinguish insns made by reload.  */
669   reload_first_uid = get_max_uid ();
670 
671 #ifdef SECONDARY_MEMORY_NEEDED
672   /* Initialize the secondary memory table.  */
673   clear_secondary_mem ();
674 #endif
675 
676   /* We don't have a stack slot for any spill reg yet.  */
677   memset (spill_stack_slot, 0, sizeof spill_stack_slot);
678   memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
679 
680   /* Initialize the save area information for caller-save, in case some
681      are needed.  */
682   init_save_areas ();
683 
684   /* Compute which hard registers are now in use
685      as homes for pseudo registers.
686      This is done here rather than (eg) in global_alloc
687      because this point is reached even if not optimizing.  */
688   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
689     mark_home_live (i);
690 
691   /* A function that receives a nonlocal goto must save all call-saved
692      registers.  */
693   if (current_function_has_nonlocal_label)
694     for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
695       if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
696 	regs_ever_live[i] = 1;
697 
698   /* Find all the pseudo registers that didn't get hard regs
699      but do have known equivalent constants or memory slots.
700      These include parameters (known equivalent to parameter slots)
701      and cse'd or loop-moved constant memory addresses.
702 
703      Record constant equivalents in reg_equiv_constant
704      so they will be substituted by find_reloads.
705      Record memory equivalents in reg_mem_equiv so they can
706      be substituted eventually by altering the REG-rtx's.  */
707 
708   reg_equiv_constant = XCNEWVEC (rtx, max_regno);
709   reg_equiv_invariant = XCNEWVEC (rtx, max_regno);
710   reg_equiv_mem = XCNEWVEC (rtx, max_regno);
711   reg_equiv_alt_mem_list = XCNEWVEC (rtx, max_regno);
712   reg_equiv_address = XCNEWVEC (rtx, max_regno);
713   reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
714   reg_old_renumber = XCNEWVEC (short, max_regno);
715   memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
716   pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
717   pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
718 
719   CLEAR_HARD_REG_SET (bad_spill_regs_global);
720 
721   /* Look for REG_EQUIV notes; record what each pseudo is equivalent
722      to.  Also find all paradoxical subregs and find largest such for
723      each pseudo.  */
724 
725   num_eliminable_invariants = 0;
726   for (insn = first; insn; insn = NEXT_INSN (insn))
727     {
728       rtx set = single_set (insn);
729 
730       /* We may introduce USEs that we want to remove at the end, so
731 	 we'll mark them with QImode.  Make sure there are no
732 	 previously-marked insns left by say regmove.  */
733       if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
734 	  && GET_MODE (insn) != VOIDmode)
735 	PUT_MODE (insn, VOIDmode);
736 
737       if (INSN_P (insn))
738 	scan_paradoxical_subregs (PATTERN (insn));
739 
740       if (set != 0 && REG_P (SET_DEST (set)))
741 	{
742 	  rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
743 	  rtx x;
744 
745 	  if (! note)
746 	    continue;
747 
748 	  i = REGNO (SET_DEST (set));
749 	  x = XEXP (note, 0);
750 
751 	  if (i <= LAST_VIRTUAL_REGISTER)
752 	    continue;
753 
754 	  if (! function_invariant_p (x)
755 	      || ! flag_pic
756 	      /* A function invariant is often CONSTANT_P but may
757 		 include a register.  We promise to only pass
758 		 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P.  */
759 	      || (CONSTANT_P (x)
760 		  && LEGITIMATE_PIC_OPERAND_P (x)))
761 	    {
762 	      /* It can happen that a REG_EQUIV note contains a MEM
763 		 that is not a legitimate memory operand.  As later
764 		 stages of reload assume that all addresses found
765 		 in the reg_equiv_* arrays were originally legitimate,
766 		 we ignore such REG_EQUIV notes.  */
767 	      if (memory_operand (x, VOIDmode))
768 		{
769 		  /* Always unshare the equivalence, so we can
770 		     substitute into this insn without touching the
771 		       equivalence.  */
772 		  reg_equiv_memory_loc[i] = copy_rtx (x);
773 		}
774 	      else if (function_invariant_p (x))
775 		{
776 		  if (GET_CODE (x) == PLUS)
777 		    {
778 		      /* This is PLUS of frame pointer and a constant,
779 			 and might be shared.  Unshare it.  */
780 		      reg_equiv_invariant[i] = copy_rtx (x);
781 		      num_eliminable_invariants++;
782 		    }
783 		  else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
784 		    {
785 		      reg_equiv_invariant[i] = x;
786 		      num_eliminable_invariants++;
787 		    }
788 		  else if (LEGITIMATE_CONSTANT_P (x))
789 		    reg_equiv_constant[i] = x;
790 		  else
791 		    {
792 		      reg_equiv_memory_loc[i]
793 			= force_const_mem (GET_MODE (SET_DEST (set)), x);
794 		      if (! reg_equiv_memory_loc[i])
795 			reg_equiv_init[i] = NULL_RTX;
796 		    }
797 		}
798 	      else
799 		{
800 		  reg_equiv_init[i] = NULL_RTX;
801 		  continue;
802 		}
803 	    }
804 	  else
805 	    reg_equiv_init[i] = NULL_RTX;
806 	}
807     }
808 
809   if (dump_file)
810     for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
811       if (reg_equiv_init[i])
812 	{
813 	  fprintf (dump_file, "init_insns for %u: ", i);
814 	  print_inline_rtx (dump_file, reg_equiv_init[i], 20);
815 	  fprintf (dump_file, "\n");
816 	}
817 
818   init_elim_table ();
819 
820   first_label_num = get_first_label_num ();
821   num_labels = max_label_num () - first_label_num;
822 
823   /* Allocate the tables used to store offset information at labels.  */
824   /* We used to use alloca here, but the size of what it would try to
825      allocate would occasionally cause it to exceed the stack limit and
826      cause a core dump.  */
827   offsets_known_at = XNEWVEC (char, num_labels);
828   offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
829 
830   /* Alter each pseudo-reg rtx to contain its hard reg number.
831      Assign stack slots to the pseudos that lack hard regs or equivalents.
832      Do not touch virtual registers.  */
833 
834   for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
835     alter_reg (i, -1);
836 
837   /* If we have some registers we think can be eliminated, scan all insns to
838      see if there is an insn that sets one of these registers to something
839      other than itself plus a constant.  If so, the register cannot be
840      eliminated.  Doing this scan here eliminates an extra pass through the
841      main reload loop in the most common case where register elimination
842      cannot be done.  */
843   for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
844     if (INSN_P (insn))
845       note_stores (PATTERN (insn), mark_not_eliminable, NULL);
846 
847   maybe_fix_stack_asms ();
848 
849   insns_need_reload = 0;
850   something_needs_elimination = 0;
851 
852   /* Initialize to -1, which means take the first spill register.  */
853   last_spill_reg = -1;
854 
855   /* Spill any hard regs that we know we can't eliminate.  */
856   CLEAR_HARD_REG_SET (used_spill_regs);
857   /* There can be multiple ways to eliminate a register;
858      they should be listed adjacently.
859      Elimination for any register fails only if all possible ways fail.  */
860   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
861     {
862       int from = ep->from;
863       int can_eliminate = 0;
864       do
865 	{
866           can_eliminate |= ep->can_eliminate;
867           ep++;
868 	}
869       while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
870       if (! can_eliminate)
871 	spill_hard_reg (from, 1);
872     }
873 
874 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
875   if (frame_pointer_needed)
876     spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
877 #endif
878   finish_spills (global);
879 
880   /* From now on, we may need to generate moves differently.  We may also
881      allow modifications of insns which cause them to not be recognized.
882      Any such modifications will be cleaned up during reload itself.  */
883   reload_in_progress = 1;
884 
885   /* This loop scans the entire function each go-round
886      and repeats until one repetition spills no additional hard regs.  */
887   for (;;)
888     {
889       int something_changed;
890       int did_spill;
891 
892       HOST_WIDE_INT starting_frame_size;
893 
894       /* Round size of stack frame to stack_alignment_needed.  This must be done
895 	 here because the stack size may be a part of the offset computation
896 	 for register elimination, and there might have been new stack slots
897 	 created in the last iteration of this loop.  */
898       if (cfun->stack_alignment_needed)
899         assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
900 
901       starting_frame_size = get_frame_size ();
902 
903       set_initial_elim_offsets ();
904       set_initial_label_offsets ();
905 
906       /* For each pseudo register that has an equivalent location defined,
907 	 try to eliminate any eliminable registers (such as the frame pointer)
908 	 assuming initial offsets for the replacement register, which
909 	 is the normal case.
910 
911 	 If the resulting location is directly addressable, substitute
912 	 the MEM we just got directly for the old REG.
913 
914 	 If it is not addressable but is a constant or the sum of a hard reg
915 	 and constant, it is probably not addressable because the constant is
916 	 out of range, in that case record the address; we will generate
917 	 hairy code to compute the address in a register each time it is
918 	 needed.  Similarly if it is a hard register, but one that is not
919 	 valid as an address register.
920 
921 	 If the location is not addressable, but does not have one of the
922 	 above forms, assign a stack slot.  We have to do this to avoid the
923 	 potential of producing lots of reloads if, e.g., a location involves
924 	 a pseudo that didn't get a hard register and has an equivalent memory
925 	 location that also involves a pseudo that didn't get a hard register.
926 
927 	 Perhaps at some point we will improve reload_when_needed handling
928 	 so this problem goes away.  But that's very hairy.  */
929 
930       for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
931 	if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
932 	  {
933 	    rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
934 
935 	    if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
936 					 XEXP (x, 0)))
937 	      reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
938 	    else if (CONSTANT_P (XEXP (x, 0))
939 		     || (REG_P (XEXP (x, 0))
940 			 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
941 		     || (GET_CODE (XEXP (x, 0)) == PLUS
942 			 && REG_P (XEXP (XEXP (x, 0), 0))
943 			 && (REGNO (XEXP (XEXP (x, 0), 0))
944 			     < FIRST_PSEUDO_REGISTER)
945 			 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
946 	      reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
947 	    else
948 	      {
949 		/* Make a new stack slot.  Then indicate that something
950 		   changed so we go back and recompute offsets for
951 		   eliminable registers because the allocation of memory
952 		   below might change some offset.  reg_equiv_{mem,address}
953 		   will be set up for this pseudo on the next pass around
954 		   the loop.  */
955 		reg_equiv_memory_loc[i] = 0;
956 		reg_equiv_init[i] = 0;
957 		alter_reg (i, -1);
958 	      }
959 	  }
960 
961       if (caller_save_needed)
962 	setup_save_areas ();
963 
964       /* If we allocated another stack slot, redo elimination bookkeeping.  */
965       if (starting_frame_size != get_frame_size ())
966 	continue;
967 
968       if (caller_save_needed)
969 	{
970 	  save_call_clobbered_regs ();
971 	  /* That might have allocated new insn_chain structures.  */
972 	  reload_firstobj = obstack_alloc (&reload_obstack, 0);
973 	}
974 
975       calculate_needs_all_insns (global);
976 
977       CLEAR_REG_SET (&spilled_pseudos);
978       did_spill = 0;
979 
980       something_changed = 0;
981 
982       /* If we allocated any new memory locations, make another pass
983 	 since it might have changed elimination offsets.  */
984       if (starting_frame_size != get_frame_size ())
985 	something_changed = 1;
986 
987       /* Even if the frame size remained the same, we might still have
988 	 changed elimination offsets, e.g. if find_reloads called
989 	 force_const_mem requiring the back end to allocate a constant
990 	 pool base register that needs to be saved on the stack.  */
991       else if (!verify_initial_elim_offsets ())
992 	something_changed = 1;
993 
994       {
995 	HARD_REG_SET to_spill;
996 	CLEAR_HARD_REG_SET (to_spill);
997 	update_eliminables (&to_spill);
998 	AND_COMPL_HARD_REG_SET(used_spill_regs, to_spill);
999 
1000 	for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1001 	  if (TEST_HARD_REG_BIT (to_spill, i))
1002 	    {
1003 	      spill_hard_reg (i, 1);
1004 	      did_spill = 1;
1005 
1006 	      /* Regardless of the state of spills, if we previously had
1007 		 a register that we thought we could eliminate, but now can
1008 		 not eliminate, we must run another pass.
1009 
1010 		 Consider pseudos which have an entry in reg_equiv_* which
1011 		 reference an eliminable register.  We must make another pass
1012 		 to update reg_equiv_* so that we do not substitute in the
1013 		 old value from when we thought the elimination could be
1014 		 performed.  */
1015 	      something_changed = 1;
1016 	    }
1017       }
1018 
1019       select_reload_regs ();
1020       if (failure)
1021 	goto failed;
1022 
1023       if (insns_need_reload != 0 || did_spill)
1024 	something_changed |= finish_spills (global);
1025 
1026       if (! something_changed)
1027 	break;
1028 
1029       if (caller_save_needed)
1030 	delete_caller_save_insns ();
1031 
1032       obstack_free (&reload_obstack, reload_firstobj);
1033     }
1034 
1035   /* If global-alloc was run, notify it of any register eliminations we have
1036      done.  */
1037   if (global)
1038     for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1039       if (ep->can_eliminate)
1040 	mark_elimination (ep->from, ep->to);
1041 
1042   /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1043      If that insn didn't set the register (i.e., it copied the register to
1044      memory), just delete that insn instead of the equivalencing insn plus
1045      anything now dead.  If we call delete_dead_insn on that insn, we may
1046      delete the insn that actually sets the register if the register dies
1047      there and that is incorrect.  */
1048 
1049   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1050     {
1051       if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1052 	{
1053 	  rtx list;
1054 	  for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1055 	    {
1056 	      rtx equiv_insn = XEXP (list, 0);
1057 
1058 	      /* If we already deleted the insn or if it may trap, we can't
1059 		 delete it.  The latter case shouldn't happen, but can
1060 		 if an insn has a variable address, gets a REG_EH_REGION
1061 		 note added to it, and then gets converted into a load
1062 		 from a constant address.  */
1063 	      if (NOTE_P (equiv_insn)
1064 		  || can_throw_internal (equiv_insn))
1065 		;
1066 	      else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1067 		delete_dead_insn (equiv_insn);
1068 	      else
1069 		SET_INSN_DELETED (equiv_insn);
1070 	    }
1071 	}
1072     }
1073 
1074   /* Use the reload registers where necessary
1075      by generating move instructions to move the must-be-register
1076      values into or out of the reload registers.  */
1077 
1078   if (insns_need_reload != 0 || something_needs_elimination
1079       || something_needs_operands_changed)
1080     {
1081       HOST_WIDE_INT old_frame_size = get_frame_size ();
1082 
1083       reload_as_needed (global);
1084 
1085       gcc_assert (old_frame_size == get_frame_size ());
1086 
1087       gcc_assert (verify_initial_elim_offsets ());
1088     }
1089 
1090   /* If we were able to eliminate the frame pointer, show that it is no
1091      longer live at the start of any basic block.  If it ls live by
1092      virtue of being in a pseudo, that pseudo will be marked live
1093      and hence the frame pointer will be known to be live via that
1094      pseudo.  */
1095 
1096   if (! frame_pointer_needed)
1097     FOR_EACH_BB (bb)
1098       CLEAR_REGNO_REG_SET (bb->il.rtl->global_live_at_start,
1099 			   HARD_FRAME_POINTER_REGNUM);
1100 
1101   /* Come here (with failure set nonzero) if we can't get enough spill
1102      regs.  */
1103  failed:
1104 
1105   CLEAR_REG_SET (&spilled_pseudos);
1106   reload_in_progress = 0;
1107 
1108   /* Now eliminate all pseudo regs by modifying them into
1109      their equivalent memory references.
1110      The REG-rtx's for the pseudos are modified in place,
1111      so all insns that used to refer to them now refer to memory.
1112 
1113      For a reg that has a reg_equiv_address, all those insns
1114      were changed by reloading so that no insns refer to it any longer;
1115      but the DECL_RTL of a variable decl may refer to it,
1116      and if so this causes the debugging info to mention the variable.  */
1117 
1118   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1119     {
1120       rtx addr = 0;
1121 
1122       if (reg_equiv_mem[i])
1123 	addr = XEXP (reg_equiv_mem[i], 0);
1124 
1125       if (reg_equiv_address[i])
1126 	addr = reg_equiv_address[i];
1127 
1128       if (addr)
1129 	{
1130 	  if (reg_renumber[i] < 0)
1131 	    {
1132 	      rtx reg = regno_reg_rtx[i];
1133 
1134 	      REG_USERVAR_P (reg) = 0;
1135 	      PUT_CODE (reg, MEM);
1136 	      XEXP (reg, 0) = addr;
1137 	      if (reg_equiv_memory_loc[i])
1138 		MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1139 	      else
1140 		{
1141 		  MEM_IN_STRUCT_P (reg) = MEM_SCALAR_P (reg) = 0;
1142 		  MEM_ATTRS (reg) = 0;
1143 		}
1144 	      MEM_NOTRAP_P (reg) = 1;
1145 	    }
1146 	  else if (reg_equiv_mem[i])
1147 	    XEXP (reg_equiv_mem[i], 0) = addr;
1148 	}
1149     }
1150 
1151   /* We must set reload_completed now since the cleanup_subreg_operands call
1152      below will re-recognize each insn and reload may have generated insns
1153      which are only valid during and after reload.  */
1154   reload_completed = 1;
1155 
1156   /* Make a pass over all the insns and delete all USEs which we inserted
1157      only to tag a REG_EQUAL note on them.  Remove all REG_DEAD and REG_UNUSED
1158      notes.  Delete all CLOBBER insns, except those that refer to the return
1159      value and the special mem:BLK CLOBBERs added to prevent the scheduler
1160      from misarranging variable-array code, and simplify (subreg (reg))
1161      operands.  Also remove all REG_RETVAL and REG_LIBCALL notes since they
1162      are no longer useful or accurate.  Strip and regenerate REG_INC notes
1163      that may have been moved around.  */
1164 
1165   for (insn = first; insn; insn = NEXT_INSN (insn))
1166     if (INSN_P (insn))
1167       {
1168 	rtx *pnote;
1169 
1170 	if (CALL_P (insn))
1171 	  replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1172 			      VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1173 
1174 	if ((GET_CODE (PATTERN (insn)) == USE
1175 	     /* We mark with QImode USEs introduced by reload itself.  */
1176 	     && (GET_MODE (insn) == QImode
1177 		 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1178 	    || (GET_CODE (PATTERN (insn)) == CLOBBER
1179 		&& (!MEM_P (XEXP (PATTERN (insn), 0))
1180 		    || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1181 		    || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1182 			&& XEXP (XEXP (PATTERN (insn), 0), 0)
1183 				!= stack_pointer_rtx))
1184 		&& (!REG_P (XEXP (PATTERN (insn), 0))
1185 		    || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1186 	  {
1187 	    delete_insn (insn);
1188 	    continue;
1189 	  }
1190 
1191 	/* Some CLOBBERs may survive until here and still reference unassigned
1192 	   pseudos with const equivalent, which may in turn cause ICE in later
1193 	   passes if the reference remains in place.  */
1194 	if (GET_CODE (PATTERN (insn)) == CLOBBER)
1195 	  replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1196 			      VOIDmode, PATTERN (insn));
1197 
1198 	/* Discard obvious no-ops, even without -O.  This optimization
1199 	   is fast and doesn't interfere with debugging.  */
1200 	if (NONJUMP_INSN_P (insn)
1201 	    && GET_CODE (PATTERN (insn)) == SET
1202 	    && REG_P (SET_SRC (PATTERN (insn)))
1203 	    && REG_P (SET_DEST (PATTERN (insn)))
1204 	    && (REGNO (SET_SRC (PATTERN (insn)))
1205 		== REGNO (SET_DEST (PATTERN (insn)))))
1206 	  {
1207 	    delete_insn (insn);
1208 	    continue;
1209 	  }
1210 
1211 	pnote = &REG_NOTES (insn);
1212 	while (*pnote != 0)
1213 	  {
1214 	    if (REG_NOTE_KIND (*pnote) == REG_DEAD
1215 		|| REG_NOTE_KIND (*pnote) == REG_UNUSED
1216 		|| REG_NOTE_KIND (*pnote) == REG_INC
1217 		|| REG_NOTE_KIND (*pnote) == REG_RETVAL
1218 		|| REG_NOTE_KIND (*pnote) == REG_LIBCALL)
1219 	      *pnote = XEXP (*pnote, 1);
1220 	    else
1221 	      pnote = &XEXP (*pnote, 1);
1222 	  }
1223 
1224 #ifdef AUTO_INC_DEC
1225 	add_auto_inc_notes (insn, PATTERN (insn));
1226 #endif
1227 
1228 	/* Simplify (subreg (reg)) if it appears as an operand.  */
1229 	cleanup_subreg_operands (insn);
1230 
1231 	/* Clean up invalid ASMs so that they don't confuse later passes.
1232 	   See PR 21299.  */
1233 	if (asm_noperands (PATTERN (insn)) >= 0)
1234 	  {
1235 	    extract_insn (insn);
1236 	    if (!constrain_operands (1))
1237 	      {
1238 		error_for_asm (insn,
1239 			       "%<asm%> operand has impossible constraints");
1240 		delete_insn (insn);
1241 		continue;
1242 	      }
1243 	  }
1244       }
1245 
1246   /* If we are doing stack checking, give a warning if this function's
1247      frame size is larger than we expect.  */
1248   if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1249     {
1250       HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1251       static int verbose_warned = 0;
1252 
1253       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1254 	if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1255 	  size += UNITS_PER_WORD;
1256 
1257       if (size > STACK_CHECK_MAX_FRAME_SIZE)
1258 	{
1259 	  warning (0, "frame size too large for reliable stack checking");
1260 	  if (! verbose_warned)
1261 	    {
1262 	      warning (0, "try reducing the number of local variables");
1263 	      verbose_warned = 1;
1264 	    }
1265 	}
1266     }
1267 
1268   /* Indicate that we no longer have known memory locations or constants.  */
1269   if (reg_equiv_constant)
1270     free (reg_equiv_constant);
1271   if (reg_equiv_invariant)
1272     free (reg_equiv_invariant);
1273   reg_equiv_constant = 0;
1274   reg_equiv_invariant = 0;
1275   VEC_free (rtx, gc, reg_equiv_memory_loc_vec);
1276   reg_equiv_memory_loc = 0;
1277 
1278   if (offsets_known_at)
1279     free (offsets_known_at);
1280   if (offsets_at)
1281     free (offsets_at);
1282 
1283   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1284     if (reg_equiv_alt_mem_list[i])
1285       free_EXPR_LIST_list (&reg_equiv_alt_mem_list[i]);
1286   free (reg_equiv_alt_mem_list);
1287 
1288   free (reg_equiv_mem);
1289   reg_equiv_init = 0;
1290   free (reg_equiv_address);
1291   free (reg_max_ref_width);
1292   free (reg_old_renumber);
1293   free (pseudo_previous_regs);
1294   free (pseudo_forbidden_regs);
1295 
1296   CLEAR_HARD_REG_SET (used_spill_regs);
1297   for (i = 0; i < n_spills; i++)
1298     SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1299 
1300   /* Free all the insn_chain structures at once.  */
1301   obstack_free (&reload_obstack, reload_startobj);
1302   unused_insn_chains = 0;
1303   fixup_abnormal_edges ();
1304 
1305   /* Replacing pseudos with their memory equivalents might have
1306      created shared rtx.  Subsequent passes would get confused
1307      by this, so unshare everything here.  */
1308   unshare_all_rtl_again (first);
1309 
1310 #ifdef STACK_BOUNDARY
1311   /* init_emit has set the alignment of the hard frame pointer
1312      to STACK_BOUNDARY.  It is very likely no longer valid if
1313      the hard frame pointer was used for register allocation.  */
1314   if (!frame_pointer_needed)
1315     REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1316 #endif
1317 
1318   return failure;
1319 }
1320 
1321 /* Yet another special case.  Unfortunately, reg-stack forces people to
1322    write incorrect clobbers in asm statements.  These clobbers must not
1323    cause the register to appear in bad_spill_regs, otherwise we'll call
1324    fatal_insn later.  We clear the corresponding regnos in the live
1325    register sets to avoid this.
1326    The whole thing is rather sick, I'm afraid.  */
1327 
1328 static void
maybe_fix_stack_asms(void)1329 maybe_fix_stack_asms (void)
1330 {
1331 #ifdef STACK_REGS
1332   const char *constraints[MAX_RECOG_OPERANDS];
1333   enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1334   struct insn_chain *chain;
1335 
1336   for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1337     {
1338       int i, noperands;
1339       HARD_REG_SET clobbered, allowed;
1340       rtx pat;
1341 
1342       if (! INSN_P (chain->insn)
1343 	  || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1344 	continue;
1345       pat = PATTERN (chain->insn);
1346       if (GET_CODE (pat) != PARALLEL)
1347 	continue;
1348 
1349       CLEAR_HARD_REG_SET (clobbered);
1350       CLEAR_HARD_REG_SET (allowed);
1351 
1352       /* First, make a mask of all stack regs that are clobbered.  */
1353       for (i = 0; i < XVECLEN (pat, 0); i++)
1354 	{
1355 	  rtx t = XVECEXP (pat, 0, i);
1356 	  if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1357 	    SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1358 	}
1359 
1360       /* Get the operand values and constraints out of the insn.  */
1361       decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1362 			   constraints, operand_mode);
1363 
1364       /* For every operand, see what registers are allowed.  */
1365       for (i = 0; i < noperands; i++)
1366 	{
1367 	  const char *p = constraints[i];
1368 	  /* For every alternative, we compute the class of registers allowed
1369 	     for reloading in CLS, and merge its contents into the reg set
1370 	     ALLOWED.  */
1371 	  int cls = (int) NO_REGS;
1372 
1373 	  for (;;)
1374 	    {
1375 	      char c = *p;
1376 
1377 	      if (c == '\0' || c == ',' || c == '#')
1378 		{
1379 		  /* End of one alternative - mark the regs in the current
1380 		     class, and reset the class.  */
1381 		  IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1382 		  cls = NO_REGS;
1383 		  p++;
1384 		  if (c == '#')
1385 		    do {
1386 		      c = *p++;
1387 		    } while (c != '\0' && c != ',');
1388 		  if (c == '\0')
1389 		    break;
1390 		  continue;
1391 		}
1392 
1393 	      switch (c)
1394 		{
1395 		case '=': case '+': case '*': case '%': case '?': case '!':
1396 		case '0': case '1': case '2': case '3': case '4': case 'm':
1397 		case '<': case '>': case 'V': case 'o': case '&': case 'E':
1398 		case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1399 		case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1400 		case 'P':
1401 		  break;
1402 
1403 		case 'p':
1404 		  cls = (int) reg_class_subunion[cls]
1405 		      [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1406 		  break;
1407 
1408 		case 'g':
1409 		case 'r':
1410 		  cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1411 		  break;
1412 
1413 		default:
1414 		  if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1415 		    cls = (int) reg_class_subunion[cls]
1416 		      [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1417 		  else
1418 		    cls = (int) reg_class_subunion[cls]
1419 		      [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1420 		}
1421 	      p += CONSTRAINT_LEN (c, p);
1422 	    }
1423 	}
1424       /* Those of the registers which are clobbered, but allowed by the
1425 	 constraints, must be usable as reload registers.  So clear them
1426 	 out of the life information.  */
1427       AND_HARD_REG_SET (allowed, clobbered);
1428       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1429 	if (TEST_HARD_REG_BIT (allowed, i))
1430 	  {
1431 	    CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1432 	    CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1433 	  }
1434     }
1435 
1436 #endif
1437 }
1438 
1439 /* Copy the global variables n_reloads and rld into the corresponding elts
1440    of CHAIN.  */
1441 static void
copy_reloads(struct insn_chain * chain)1442 copy_reloads (struct insn_chain *chain)
1443 {
1444   chain->n_reloads = n_reloads;
1445   chain->rld = obstack_alloc (&reload_obstack,
1446 			      n_reloads * sizeof (struct reload));
1447   memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1448   reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
1449 }
1450 
1451 /* Walk the chain of insns, and determine for each whether it needs reloads
1452    and/or eliminations.  Build the corresponding insns_need_reload list, and
1453    set something_needs_elimination as appropriate.  */
1454 static void
calculate_needs_all_insns(int global)1455 calculate_needs_all_insns (int global)
1456 {
1457   struct insn_chain **pprev_reload = &insns_need_reload;
1458   struct insn_chain *chain, *next = 0;
1459 
1460   something_needs_elimination = 0;
1461 
1462   reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
1463   for (chain = reload_insn_chain; chain != 0; chain = next)
1464     {
1465       rtx insn = chain->insn;
1466 
1467       next = chain->next;
1468 
1469       /* Clear out the shortcuts.  */
1470       chain->n_reloads = 0;
1471       chain->need_elim = 0;
1472       chain->need_reload = 0;
1473       chain->need_operand_change = 0;
1474 
1475       /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1476 	 include REG_LABEL), we need to see what effects this has on the
1477 	 known offsets at labels.  */
1478 
1479       if (LABEL_P (insn) || JUMP_P (insn)
1480 	  || (INSN_P (insn) && REG_NOTES (insn) != 0))
1481 	set_label_offsets (insn, insn, 0);
1482 
1483       if (INSN_P (insn))
1484 	{
1485 	  rtx old_body = PATTERN (insn);
1486 	  int old_code = INSN_CODE (insn);
1487 	  rtx old_notes = REG_NOTES (insn);
1488 	  int did_elimination = 0;
1489 	  int operands_changed = 0;
1490 	  rtx set = single_set (insn);
1491 
1492 	  /* Skip insns that only set an equivalence.  */
1493 	  if (set && REG_P (SET_DEST (set))
1494 	      && reg_renumber[REGNO (SET_DEST (set))] < 0
1495 	      && (reg_equiv_constant[REGNO (SET_DEST (set))]
1496 		  || (reg_equiv_invariant[REGNO (SET_DEST (set))]))
1497 		      && reg_equiv_init[REGNO (SET_DEST (set))])
1498 	    continue;
1499 
1500 	  /* If needed, eliminate any eliminable registers.  */
1501 	  if (num_eliminable || num_eliminable_invariants)
1502 	    did_elimination = eliminate_regs_in_insn (insn, 0);
1503 
1504 	  /* Analyze the instruction.  */
1505 	  operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1506 					   global, spill_reg_order);
1507 
1508 	  /* If a no-op set needs more than one reload, this is likely
1509 	     to be something that needs input address reloads.  We
1510 	     can't get rid of this cleanly later, and it is of no use
1511 	     anyway, so discard it now.
1512 	     We only do this when expensive_optimizations is enabled,
1513 	     since this complements reload inheritance / output
1514 	     reload deletion, and it can make debugging harder.  */
1515 	  if (flag_expensive_optimizations && n_reloads > 1)
1516 	    {
1517 	      rtx set = single_set (insn);
1518 	      if (set
1519 		  && SET_SRC (set) == SET_DEST (set)
1520 		  && REG_P (SET_SRC (set))
1521 		  && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1522 		{
1523 		  delete_insn (insn);
1524 		  /* Delete it from the reload chain.  */
1525 		  if (chain->prev)
1526 		    chain->prev->next = next;
1527 		  else
1528 		    reload_insn_chain = next;
1529 		  if (next)
1530 		    next->prev = chain->prev;
1531 		  chain->next = unused_insn_chains;
1532 		  unused_insn_chains = chain;
1533 		  continue;
1534 		}
1535 	    }
1536 	  if (num_eliminable)
1537 	    update_eliminable_offsets ();
1538 
1539 	  /* Remember for later shortcuts which insns had any reloads or
1540 	     register eliminations.  */
1541 	  chain->need_elim = did_elimination;
1542 	  chain->need_reload = n_reloads > 0;
1543 	  chain->need_operand_change = operands_changed;
1544 
1545 	  /* Discard any register replacements done.  */
1546 	  if (did_elimination)
1547 	    {
1548 	      obstack_free (&reload_obstack, reload_insn_firstobj);
1549 	      PATTERN (insn) = old_body;
1550 	      INSN_CODE (insn) = old_code;
1551 	      REG_NOTES (insn) = old_notes;
1552 	      something_needs_elimination = 1;
1553 	    }
1554 
1555 	  something_needs_operands_changed |= operands_changed;
1556 
1557 	  if (n_reloads != 0)
1558 	    {
1559 	      copy_reloads (chain);
1560 	      *pprev_reload = chain;
1561 	      pprev_reload = &chain->next_need_reload;
1562 	    }
1563 	}
1564     }
1565   *pprev_reload = 0;
1566 }
1567 
1568 /* Comparison function for qsort to decide which of two reloads
1569    should be handled first.  *P1 and *P2 are the reload numbers.  */
1570 
1571 static int
reload_reg_class_lower(const void * r1p,const void * r2p)1572 reload_reg_class_lower (const void *r1p, const void *r2p)
1573 {
1574   int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1575   int t;
1576 
1577   /* Consider required reloads before optional ones.  */
1578   t = rld[r1].optional - rld[r2].optional;
1579   if (t != 0)
1580     return t;
1581 
1582   /* Count all solitary classes before non-solitary ones.  */
1583   t = ((reg_class_size[(int) rld[r2].class] == 1)
1584        - (reg_class_size[(int) rld[r1].class] == 1));
1585   if (t != 0)
1586     return t;
1587 
1588   /* Aside from solitaires, consider all multi-reg groups first.  */
1589   t = rld[r2].nregs - rld[r1].nregs;
1590   if (t != 0)
1591     return t;
1592 
1593   /* Consider reloads in order of increasing reg-class number.  */
1594   t = (int) rld[r1].class - (int) rld[r2].class;
1595   if (t != 0)
1596     return t;
1597 
1598   /* If reloads are equally urgent, sort by reload number,
1599      so that the results of qsort leave nothing to chance.  */
1600   return r1 - r2;
1601 }
1602 
1603 /* The cost of spilling each hard reg.  */
1604 static int spill_cost[FIRST_PSEUDO_REGISTER];
1605 
1606 /* When spilling multiple hard registers, we use SPILL_COST for the first
1607    spilled hard reg and SPILL_ADD_COST for subsequent regs.  SPILL_ADD_COST
1608    only the first hard reg for a multi-reg pseudo.  */
1609 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1610 
1611 /* Update the spill cost arrays, considering that pseudo REG is live.  */
1612 
1613 static void
count_pseudo(int reg)1614 count_pseudo (int reg)
1615 {
1616   int freq = REG_FREQ (reg);
1617   int r = reg_renumber[reg];
1618   int nregs;
1619 
1620   if (REGNO_REG_SET_P (&pseudos_counted, reg)
1621       || REGNO_REG_SET_P (&spilled_pseudos, reg))
1622     return;
1623 
1624   SET_REGNO_REG_SET (&pseudos_counted, reg);
1625 
1626   gcc_assert (r >= 0);
1627 
1628   spill_add_cost[r] += freq;
1629 
1630   nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1631   while (nregs-- > 0)
1632     spill_cost[r + nregs] += freq;
1633 }
1634 
1635 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1636    contents of BAD_SPILL_REGS for the insn described by CHAIN.  */
1637 
1638 static void
order_regs_for_reload(struct insn_chain * chain)1639 order_regs_for_reload (struct insn_chain *chain)
1640 {
1641   unsigned i;
1642   HARD_REG_SET used_by_pseudos;
1643   HARD_REG_SET used_by_pseudos2;
1644   reg_set_iterator rsi;
1645 
1646   COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1647 
1648   memset (spill_cost, 0, sizeof spill_cost);
1649   memset (spill_add_cost, 0, sizeof spill_add_cost);
1650 
1651   /* Count number of uses of each hard reg by pseudo regs allocated to it
1652      and then order them by decreasing use.  First exclude hard registers
1653      that are live in or across this insn.  */
1654 
1655   REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1656   REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1657   IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1658   IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1659 
1660   /* Now find out which pseudos are allocated to it, and update
1661      hard_reg_n_uses.  */
1662   CLEAR_REG_SET (&pseudos_counted);
1663 
1664   EXECUTE_IF_SET_IN_REG_SET
1665     (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1666     {
1667       count_pseudo (i);
1668     }
1669   EXECUTE_IF_SET_IN_REG_SET
1670     (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1671     {
1672       count_pseudo (i);
1673     }
1674   CLEAR_REG_SET (&pseudos_counted);
1675 }
1676 
1677 /* Vector of reload-numbers showing the order in which the reloads should
1678    be processed.  */
1679 static short reload_order[MAX_RELOADS];
1680 
1681 /* This is used to keep track of the spill regs used in one insn.  */
1682 static HARD_REG_SET used_spill_regs_local;
1683 
1684 /* We decided to spill hard register SPILLED, which has a size of
1685    SPILLED_NREGS.  Determine how pseudo REG, which is live during the insn,
1686    is affected.  We will add it to SPILLED_PSEUDOS if necessary, and we will
1687    update SPILL_COST/SPILL_ADD_COST.  */
1688 
1689 static void
count_spilled_pseudo(int spilled,int spilled_nregs,int reg)1690 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1691 {
1692   int r = reg_renumber[reg];
1693   int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1694 
1695   if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1696       || spilled + spilled_nregs <= r || r + nregs <= spilled)
1697     return;
1698 
1699   SET_REGNO_REG_SET (&spilled_pseudos, reg);
1700 
1701   spill_add_cost[r] -= REG_FREQ (reg);
1702   while (nregs-- > 0)
1703     spill_cost[r + nregs] -= REG_FREQ (reg);
1704 }
1705 
1706 /* Find reload register to use for reload number ORDER.  */
1707 
1708 static int
find_reg(struct insn_chain * chain,int order)1709 find_reg (struct insn_chain *chain, int order)
1710 {
1711   int rnum = reload_order[order];
1712   struct reload *rl = rld + rnum;
1713   int best_cost = INT_MAX;
1714   int best_reg = -1;
1715   unsigned int i, j;
1716   int k;
1717   HARD_REG_SET not_usable;
1718   HARD_REG_SET used_by_other_reload;
1719   reg_set_iterator rsi;
1720 
1721   COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1722   IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1723   IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->class]);
1724 
1725   CLEAR_HARD_REG_SET (used_by_other_reload);
1726   for (k = 0; k < order; k++)
1727     {
1728       int other = reload_order[k];
1729 
1730       if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1731 	for (j = 0; j < rld[other].nregs; j++)
1732 	  SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1733     }
1734 
1735   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1736     {
1737       unsigned int regno = i;
1738 
1739       if (! TEST_HARD_REG_BIT (not_usable, regno)
1740 	  && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1741 	  && HARD_REGNO_MODE_OK (regno, rl->mode))
1742 	{
1743 	  int this_cost = spill_cost[regno];
1744 	  int ok = 1;
1745 	  unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1746 
1747 	  for (j = 1; j < this_nregs; j++)
1748 	    {
1749 	      this_cost += spill_add_cost[regno + j];
1750 	      if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1751 		  || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1752 		ok = 0;
1753 	    }
1754 	  if (! ok)
1755 	    continue;
1756 	  if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1757 	    this_cost--;
1758 	  if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1759 	    this_cost--;
1760 	  if (this_cost < best_cost
1761 	      /* Among registers with equal cost, prefer caller-saved ones, or
1762 		 use REG_ALLOC_ORDER if it is defined.  */
1763 	      || (this_cost == best_cost
1764 #ifdef REG_ALLOC_ORDER
1765 		  && (inv_reg_alloc_order[regno]
1766 		      < inv_reg_alloc_order[best_reg])
1767 #else
1768 		  && call_used_regs[regno]
1769 		  && ! call_used_regs[best_reg]
1770 #endif
1771 		  ))
1772 	    {
1773 	      best_reg = regno;
1774 	      best_cost = this_cost;
1775 	    }
1776 	}
1777     }
1778   if (best_reg == -1)
1779     return 0;
1780 
1781   if (dump_file)
1782     fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1783 
1784   rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1785   rl->regno = best_reg;
1786 
1787   EXECUTE_IF_SET_IN_REG_SET
1788     (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1789     {
1790       count_spilled_pseudo (best_reg, rl->nregs, j);
1791     }
1792 
1793   EXECUTE_IF_SET_IN_REG_SET
1794     (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1795     {
1796       count_spilled_pseudo (best_reg, rl->nregs, j);
1797     }
1798 
1799   for (i = 0; i < rl->nregs; i++)
1800     {
1801       gcc_assert (spill_cost[best_reg + i] == 0);
1802       gcc_assert (spill_add_cost[best_reg + i] == 0);
1803       SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1804     }
1805   return 1;
1806 }
1807 
1808 /* Find more reload regs to satisfy the remaining need of an insn, which
1809    is given by CHAIN.
1810    Do it by ascending class number, since otherwise a reg
1811    might be spilled for a big class and might fail to count
1812    for a smaller class even though it belongs to that class.  */
1813 
1814 static void
find_reload_regs(struct insn_chain * chain)1815 find_reload_regs (struct insn_chain *chain)
1816 {
1817   int i;
1818 
1819   /* In order to be certain of getting the registers we need,
1820      we must sort the reloads into order of increasing register class.
1821      Then our grabbing of reload registers will parallel the process
1822      that provided the reload registers.  */
1823   for (i = 0; i < chain->n_reloads; i++)
1824     {
1825       /* Show whether this reload already has a hard reg.  */
1826       if (chain->rld[i].reg_rtx)
1827 	{
1828 	  int regno = REGNO (chain->rld[i].reg_rtx);
1829 	  chain->rld[i].regno = regno;
1830 	  chain->rld[i].nregs
1831 	    = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
1832 	}
1833       else
1834 	chain->rld[i].regno = -1;
1835       reload_order[i] = i;
1836     }
1837 
1838   n_reloads = chain->n_reloads;
1839   memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1840 
1841   CLEAR_HARD_REG_SET (used_spill_regs_local);
1842 
1843   if (dump_file)
1844     fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1845 
1846   qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1847 
1848   /* Compute the order of preference for hard registers to spill.  */
1849 
1850   order_regs_for_reload (chain);
1851 
1852   for (i = 0; i < n_reloads; i++)
1853     {
1854       int r = reload_order[i];
1855 
1856       /* Ignore reloads that got marked inoperative.  */
1857       if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1858 	  && ! rld[r].optional
1859 	  && rld[r].regno == -1)
1860 	if (! find_reg (chain, i))
1861 	  {
1862 	    if (dump_file)
1863 	      fprintf(dump_file, "reload failure for reload %d\n", r);
1864 	    spill_failure (chain->insn, rld[r].class);
1865 	    failure = 1;
1866 	    return;
1867 	  }
1868     }
1869 
1870   COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
1871   IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
1872 
1873   memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1874 }
1875 
1876 static void
select_reload_regs(void)1877 select_reload_regs (void)
1878 {
1879   struct insn_chain *chain;
1880 
1881   /* Try to satisfy the needs for each insn.  */
1882   for (chain = insns_need_reload; chain != 0;
1883        chain = chain->next_need_reload)
1884     find_reload_regs (chain);
1885 }
1886 
1887 /* Delete all insns that were inserted by emit_caller_save_insns during
1888    this iteration.  */
1889 static void
delete_caller_save_insns(void)1890 delete_caller_save_insns (void)
1891 {
1892   struct insn_chain *c = reload_insn_chain;
1893 
1894   while (c != 0)
1895     {
1896       while (c != 0 && c->is_caller_save_insn)
1897 	{
1898 	  struct insn_chain *next = c->next;
1899 	  rtx insn = c->insn;
1900 
1901 	  if (c == reload_insn_chain)
1902 	    reload_insn_chain = next;
1903 	  delete_insn (insn);
1904 
1905 	  if (next)
1906 	    next->prev = c->prev;
1907 	  if (c->prev)
1908 	    c->prev->next = next;
1909 	  c->next = unused_insn_chains;
1910 	  unused_insn_chains = c;
1911 	  c = next;
1912 	}
1913       if (c != 0)
1914 	c = c->next;
1915     }
1916 }
1917 
1918 /* Handle the failure to find a register to spill.
1919    INSN should be one of the insns which needed this particular spill reg.  */
1920 
1921 static void
spill_failure(rtx insn,enum reg_class class)1922 spill_failure (rtx insn, enum reg_class class)
1923 {
1924   if (asm_noperands (PATTERN (insn)) >= 0)
1925     error_for_asm (insn, "can't find a register in class %qs while "
1926 		   "reloading %<asm%>",
1927 		   reg_class_names[class]);
1928   else
1929     {
1930       error ("unable to find a register to spill in class %qs",
1931 	     reg_class_names[class]);
1932 
1933       if (dump_file)
1934 	{
1935 	  fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
1936 	  debug_reload_to_stream (dump_file);
1937 	}
1938       fatal_insn ("this is the insn:", insn);
1939     }
1940 }
1941 
1942 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
1943    data that is dead in INSN.  */
1944 
1945 static void
delete_dead_insn(rtx insn)1946 delete_dead_insn (rtx insn)
1947 {
1948   rtx prev = prev_real_insn (insn);
1949   rtx prev_dest;
1950 
1951   /* If the previous insn sets a register that dies in our insn, delete it
1952      too.  */
1953   if (prev && GET_CODE (PATTERN (prev)) == SET
1954       && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
1955       && reg_mentioned_p (prev_dest, PATTERN (insn))
1956       && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
1957       && ! side_effects_p (SET_SRC (PATTERN (prev))))
1958     delete_dead_insn (prev);
1959 
1960   SET_INSN_DELETED (insn);
1961 }
1962 
1963 /* Modify the home of pseudo-reg I.
1964    The new home is present in reg_renumber[I].
1965 
1966    FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
1967    or it may be -1, meaning there is none or it is not relevant.
1968    This is used so that all pseudos spilled from a given hard reg
1969    can share one stack slot.  */
1970 
1971 static void
alter_reg(int i,int from_reg)1972 alter_reg (int i, int from_reg)
1973 {
1974   /* When outputting an inline function, this can happen
1975      for a reg that isn't actually used.  */
1976   if (regno_reg_rtx[i] == 0)
1977     return;
1978 
1979   /* If the reg got changed to a MEM at rtl-generation time,
1980      ignore it.  */
1981   if (!REG_P (regno_reg_rtx[i]))
1982     return;
1983 
1984   /* Modify the reg-rtx to contain the new hard reg
1985      number or else to contain its pseudo reg number.  */
1986   REGNO (regno_reg_rtx[i])
1987     = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
1988 
1989   /* If we have a pseudo that is needed but has no hard reg or equivalent,
1990      allocate a stack slot for it.  */
1991 
1992   if (reg_renumber[i] < 0
1993       && REG_N_REFS (i) > 0
1994       && reg_equiv_constant[i] == 0
1995       && (reg_equiv_invariant[i] == 0 || reg_equiv_init[i] == 0)
1996       && reg_equiv_memory_loc[i] == 0)
1997     {
1998       rtx x;
1999       enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2000       unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2001       unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2002       unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2003       unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2004       int adjust = 0;
2005 
2006       /* Each pseudo reg has an inherent size which comes from its own mode,
2007 	 and a total size which provides room for paradoxical subregs
2008 	 which refer to the pseudo reg in wider modes.
2009 
2010 	 We can use a slot already allocated if it provides both
2011 	 enough inherent space and enough total space.
2012 	 Otherwise, we allocate a new slot, making sure that it has no less
2013 	 inherent space, and no less total space, then the previous slot.  */
2014       if (from_reg == -1)
2015 	{
2016 	  /* No known place to spill from => no slot to reuse.  */
2017 	  x = assign_stack_local (mode, total_size,
2018 				  min_align > inherent_align
2019 				  || total_size > inherent_size ? -1 : 0);
2020 	  if (BYTES_BIG_ENDIAN)
2021 	    /* Cancel the  big-endian correction done in assign_stack_local.
2022 	       Get the address of the beginning of the slot.
2023 	       This is so we can do a big-endian correction unconditionally
2024 	       below.  */
2025 	    adjust = inherent_size - total_size;
2026 
2027 	  /* Nothing can alias this slot except this pseudo.  */
2028 	  set_mem_alias_set (x, new_alias_set ());
2029 	}
2030 
2031       /* Reuse a stack slot if possible.  */
2032       else if (spill_stack_slot[from_reg] != 0
2033 	       && spill_stack_slot_width[from_reg] >= total_size
2034 	       && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2035 		   >= inherent_size)
2036 	       && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2037 	x = spill_stack_slot[from_reg];
2038 
2039       /* Allocate a bigger slot.  */
2040       else
2041 	{
2042 	  /* Compute maximum size needed, both for inherent size
2043 	     and for total size.  */
2044 	  rtx stack_slot;
2045 
2046 	  if (spill_stack_slot[from_reg])
2047 	    {
2048 	      if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2049 		  > inherent_size)
2050 		mode = GET_MODE (spill_stack_slot[from_reg]);
2051 	      if (spill_stack_slot_width[from_reg] > total_size)
2052 		total_size = spill_stack_slot_width[from_reg];
2053 	      if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2054 		min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2055 	    }
2056 
2057 	  /* Make a slot with that size.  */
2058 	  x = assign_stack_local (mode, total_size,
2059 				  min_align > inherent_align
2060 				  || total_size > inherent_size ? -1 : 0);
2061 	  stack_slot = x;
2062 
2063 	  /* All pseudos mapped to this slot can alias each other.  */
2064 	  if (spill_stack_slot[from_reg])
2065 	    set_mem_alias_set (x, MEM_ALIAS_SET (spill_stack_slot[from_reg]));
2066 	  else
2067 	    set_mem_alias_set (x, new_alias_set ());
2068 
2069 	  if (BYTES_BIG_ENDIAN)
2070 	    {
2071 	      /* Cancel the  big-endian correction done in assign_stack_local.
2072 		 Get the address of the beginning of the slot.
2073 		 This is so we can do a big-endian correction unconditionally
2074 		 below.  */
2075 	      adjust = GET_MODE_SIZE (mode) - total_size;
2076 	      if (adjust)
2077 		stack_slot
2078 		  = adjust_address_nv (x, mode_for_size (total_size
2079 							 * BITS_PER_UNIT,
2080 							 MODE_INT, 1),
2081 				       adjust);
2082 	    }
2083 
2084 	  spill_stack_slot[from_reg] = stack_slot;
2085 	  spill_stack_slot_width[from_reg] = total_size;
2086 	}
2087 
2088       /* On a big endian machine, the "address" of the slot
2089 	 is the address of the low part that fits its inherent mode.  */
2090       if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2091 	adjust += (total_size - inherent_size);
2092 
2093       /* If we have any adjustment to make, or if the stack slot is the
2094 	 wrong mode, make a new stack slot.  */
2095       x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2096 
2097       /* If we have a decl for the original register, set it for the
2098 	 memory.  If this is a shared MEM, make a copy.  */
2099       if (REG_EXPR (regno_reg_rtx[i])
2100 	  && DECL_P (REG_EXPR (regno_reg_rtx[i])))
2101 	{
2102 	  rtx decl = DECL_RTL_IF_SET (REG_EXPR (regno_reg_rtx[i]));
2103 
2104 	  /* We can do this only for the DECLs home pseudo, not for
2105 	     any copies of it, since otherwise when the stack slot
2106 	     is reused, nonoverlapping_memrefs_p might think they
2107 	     cannot overlap.  */
2108 	  if (decl && REG_P (decl) && REGNO (decl) == (unsigned) i)
2109 	    {
2110 	      if (from_reg != -1 && spill_stack_slot[from_reg] == x)
2111 		x = copy_rtx (x);
2112 
2113 	      set_mem_attrs_from_reg (x, regno_reg_rtx[i]);
2114 	    }
2115 	}
2116 
2117       /* Save the stack slot for later.  */
2118       reg_equiv_memory_loc[i] = x;
2119     }
2120 }
2121 
2122 /* Mark the slots in regs_ever_live for the hard regs
2123    used by pseudo-reg number REGNO.  */
2124 
2125 void
mark_home_live(int regno)2126 mark_home_live (int regno)
2127 {
2128   int i, lim;
2129 
2130   i = reg_renumber[regno];
2131   if (i < 0)
2132     return;
2133   lim = i + hard_regno_nregs[i][PSEUDO_REGNO_MODE (regno)];
2134   while (i < lim)
2135     regs_ever_live[i++] = 1;
2136 }
2137 
2138 /* This function handles the tracking of elimination offsets around branches.
2139 
2140    X is a piece of RTL being scanned.
2141 
2142    INSN is the insn that it came from, if any.
2143 
2144    INITIAL_P is nonzero if we are to set the offset to be the initial
2145    offset and zero if we are setting the offset of the label to be the
2146    current offset.  */
2147 
2148 static void
set_label_offsets(rtx x,rtx insn,int initial_p)2149 set_label_offsets (rtx x, rtx insn, int initial_p)
2150 {
2151   enum rtx_code code = GET_CODE (x);
2152   rtx tem;
2153   unsigned int i;
2154   struct elim_table *p;
2155 
2156   switch (code)
2157     {
2158     case LABEL_REF:
2159       if (LABEL_REF_NONLOCAL_P (x))
2160 	return;
2161 
2162       x = XEXP (x, 0);
2163 
2164       /* ... fall through ...  */
2165 
2166     case CODE_LABEL:
2167       /* If we know nothing about this label, set the desired offsets.  Note
2168 	 that this sets the offset at a label to be the offset before a label
2169 	 if we don't know anything about the label.  This is not correct for
2170 	 the label after a BARRIER, but is the best guess we can make.  If
2171 	 we guessed wrong, we will suppress an elimination that might have
2172 	 been possible had we been able to guess correctly.  */
2173 
2174       if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2175 	{
2176 	  for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2177 	    offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2178 	      = (initial_p ? reg_eliminate[i].initial_offset
2179 		 : reg_eliminate[i].offset);
2180 	  offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2181 	}
2182 
2183       /* Otherwise, if this is the definition of a label and it is
2184 	 preceded by a BARRIER, set our offsets to the known offset of
2185 	 that label.  */
2186 
2187       else if (x == insn
2188 	       && (tem = prev_nonnote_insn (insn)) != 0
2189 	       && BARRIER_P (tem))
2190 	set_offsets_for_label (insn);
2191       else
2192 	/* If neither of the above cases is true, compare each offset
2193 	   with those previously recorded and suppress any eliminations
2194 	   where the offsets disagree.  */
2195 
2196 	for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2197 	  if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2198 	      != (initial_p ? reg_eliminate[i].initial_offset
2199 		  : reg_eliminate[i].offset))
2200 	    reg_eliminate[i].can_eliminate = 0;
2201 
2202       return;
2203 
2204     case JUMP_INSN:
2205       set_label_offsets (PATTERN (insn), insn, initial_p);
2206 
2207       /* ... fall through ...  */
2208 
2209     case INSN:
2210     case CALL_INSN:
2211       /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2212 	 and hence must have all eliminations at their initial offsets.  */
2213       for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2214 	if (REG_NOTE_KIND (tem) == REG_LABEL)
2215 	  set_label_offsets (XEXP (tem, 0), insn, 1);
2216       return;
2217 
2218     case PARALLEL:
2219     case ADDR_VEC:
2220     case ADDR_DIFF_VEC:
2221       /* Each of the labels in the parallel or address vector must be
2222 	 at their initial offsets.  We want the first field for PARALLEL
2223 	 and ADDR_VEC and the second field for ADDR_DIFF_VEC.  */
2224 
2225       for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2226 	set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2227 			   insn, initial_p);
2228       return;
2229 
2230     case SET:
2231       /* We only care about setting PC.  If the source is not RETURN,
2232 	 IF_THEN_ELSE, or a label, disable any eliminations not at
2233 	 their initial offsets.  Similarly if any arm of the IF_THEN_ELSE
2234 	 isn't one of those possibilities.  For branches to a label,
2235 	 call ourselves recursively.
2236 
2237 	 Note that this can disable elimination unnecessarily when we have
2238 	 a non-local goto since it will look like a non-constant jump to
2239 	 someplace in the current function.  This isn't a significant
2240 	 problem since such jumps will normally be when all elimination
2241 	 pairs are back to their initial offsets.  */
2242 
2243       if (SET_DEST (x) != pc_rtx)
2244 	return;
2245 
2246       switch (GET_CODE (SET_SRC (x)))
2247 	{
2248 	case PC:
2249 	case RETURN:
2250 	  return;
2251 
2252 	case LABEL_REF:
2253 	  set_label_offsets (SET_SRC (x), insn, initial_p);
2254 	  return;
2255 
2256 	case IF_THEN_ELSE:
2257 	  tem = XEXP (SET_SRC (x), 1);
2258 	  if (GET_CODE (tem) == LABEL_REF)
2259 	    set_label_offsets (XEXP (tem, 0), insn, initial_p);
2260 	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2261 	    break;
2262 
2263 	  tem = XEXP (SET_SRC (x), 2);
2264 	  if (GET_CODE (tem) == LABEL_REF)
2265 	    set_label_offsets (XEXP (tem, 0), insn, initial_p);
2266 	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2267 	    break;
2268 	  return;
2269 
2270 	default:
2271 	  break;
2272 	}
2273 
2274       /* If we reach here, all eliminations must be at their initial
2275 	 offset because we are doing a jump to a variable address.  */
2276       for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2277 	if (p->offset != p->initial_offset)
2278 	  p->can_eliminate = 0;
2279       break;
2280 
2281     default:
2282       break;
2283     }
2284 }
2285 
2286 /* Scan X and replace any eliminable registers (such as fp) with a
2287    replacement (such as sp), plus an offset.
2288 
2289    MEM_MODE is the mode of an enclosing MEM.  We need this to know how
2290    much to adjust a register for, e.g., PRE_DEC.  Also, if we are inside a
2291    MEM, we are allowed to replace a sum of a register and the constant zero
2292    with the register, which we cannot do outside a MEM.  In addition, we need
2293    to record the fact that a register is referenced outside a MEM.
2294 
2295    If INSN is an insn, it is the insn containing X.  If we replace a REG
2296    in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2297    CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2298    the REG is being modified.
2299 
2300    Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2301    That's used when we eliminate in expressions stored in notes.
2302    This means, do not set ref_outside_mem even if the reference
2303    is outside of MEMs.
2304 
2305    REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2306    replacements done assuming all offsets are at their initial values.  If
2307    they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2308    encounter, return the actual location so that find_reloads will do
2309    the proper thing.  */
2310 
2311 static rtx
eliminate_regs_1(rtx x,enum machine_mode mem_mode,rtx insn,bool may_use_invariant)2312 eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
2313 		  bool may_use_invariant)
2314 {
2315   enum rtx_code code = GET_CODE (x);
2316   struct elim_table *ep;
2317   int regno;
2318   rtx new;
2319   int i, j;
2320   const char *fmt;
2321   int copied = 0;
2322 
2323   if (! current_function_decl)
2324     return x;
2325 
2326   switch (code)
2327     {
2328     case CONST_INT:
2329     case CONST_DOUBLE:
2330     case CONST_VECTOR:
2331     case CONST:
2332     case SYMBOL_REF:
2333     case CODE_LABEL:
2334     case PC:
2335     case CC0:
2336     case ASM_INPUT:
2337     case ADDR_VEC:
2338     case ADDR_DIFF_VEC:
2339     case RETURN:
2340       return x;
2341 
2342     case REG:
2343       regno = REGNO (x);
2344 
2345       /* First handle the case where we encounter a bare register that
2346 	 is eliminable.  Replace it with a PLUS.  */
2347       if (regno < FIRST_PSEUDO_REGISTER)
2348 	{
2349 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2350 	       ep++)
2351 	    if (ep->from_rtx == x && ep->can_eliminate)
2352 	      return plus_constant (ep->to_rtx, ep->previous_offset);
2353 
2354 	}
2355       else if (reg_renumber && reg_renumber[regno] < 0
2356 	       && reg_equiv_invariant && reg_equiv_invariant[regno])
2357 	{
2358 	  if (may_use_invariant)
2359 	    return eliminate_regs_1 (copy_rtx (reg_equiv_invariant[regno]),
2360 			             mem_mode, insn, true);
2361 	  /* There exists at least one use of REGNO that cannot be
2362 	     eliminated.  Prevent the defining insn from being deleted.  */
2363 	  reg_equiv_init[regno] = NULL_RTX;
2364 	  alter_reg (regno, -1);
2365 	}
2366       return x;
2367 
2368     /* You might think handling MINUS in a manner similar to PLUS is a
2369        good idea.  It is not.  It has been tried multiple times and every
2370        time the change has had to have been reverted.
2371 
2372        Other parts of reload know a PLUS is special (gen_reload for example)
2373        and require special code to handle code a reloaded PLUS operand.
2374 
2375        Also consider backends where the flags register is clobbered by a
2376        MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2377        lea instruction comes to mind).  If we try to reload a MINUS, we
2378        may kill the flags register that was holding a useful value.
2379 
2380        So, please before trying to handle MINUS, consider reload as a
2381        whole instead of this little section as well as the backend issues.  */
2382     case PLUS:
2383       /* If this is the sum of an eliminable register and a constant, rework
2384 	 the sum.  */
2385       if (REG_P (XEXP (x, 0))
2386 	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2387 	  && CONSTANT_P (XEXP (x, 1)))
2388 	{
2389 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2390 	       ep++)
2391 	    if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2392 	      {
2393 		/* The only time we want to replace a PLUS with a REG (this
2394 		   occurs when the constant operand of the PLUS is the negative
2395 		   of the offset) is when we are inside a MEM.  We won't want
2396 		   to do so at other times because that would change the
2397 		   structure of the insn in a way that reload can't handle.
2398 		   We special-case the commonest situation in
2399 		   eliminate_regs_in_insn, so just replace a PLUS with a
2400 		   PLUS here, unless inside a MEM.  */
2401 		if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2402 		    && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2403 		  return ep->to_rtx;
2404 		else
2405 		  return gen_rtx_PLUS (Pmode, ep->to_rtx,
2406 				       plus_constant (XEXP (x, 1),
2407 						      ep->previous_offset));
2408 	      }
2409 
2410 	  /* If the register is not eliminable, we are done since the other
2411 	     operand is a constant.  */
2412 	  return x;
2413 	}
2414 
2415       /* If this is part of an address, we want to bring any constant to the
2416 	 outermost PLUS.  We will do this by doing register replacement in
2417 	 our operands and seeing if a constant shows up in one of them.
2418 
2419 	 Note that there is no risk of modifying the structure of the insn,
2420 	 since we only get called for its operands, thus we are either
2421 	 modifying the address inside a MEM, or something like an address
2422 	 operand of a load-address insn.  */
2423 
2424       {
2425 	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2426 	rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2427 
2428 	if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2429 	  {
2430 	    /* If one side is a PLUS and the other side is a pseudo that
2431 	       didn't get a hard register but has a reg_equiv_constant,
2432 	       we must replace the constant here since it may no longer
2433 	       be in the position of any operand.  */
2434 	    if (GET_CODE (new0) == PLUS && REG_P (new1)
2435 		&& REGNO (new1) >= FIRST_PSEUDO_REGISTER
2436 		&& reg_renumber[REGNO (new1)] < 0
2437 		&& reg_equiv_constant != 0
2438 		&& reg_equiv_constant[REGNO (new1)] != 0)
2439 	      new1 = reg_equiv_constant[REGNO (new1)];
2440 	    else if (GET_CODE (new1) == PLUS && REG_P (new0)
2441 		     && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2442 		     && reg_renumber[REGNO (new0)] < 0
2443 		     && reg_equiv_constant[REGNO (new0)] != 0)
2444 	      new0 = reg_equiv_constant[REGNO (new0)];
2445 
2446 	    new = form_sum (new0, new1);
2447 
2448 	    /* As above, if we are not inside a MEM we do not want to
2449 	       turn a PLUS into something else.  We might try to do so here
2450 	       for an addition of 0 if we aren't optimizing.  */
2451 	    if (! mem_mode && GET_CODE (new) != PLUS)
2452 	      return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
2453 	    else
2454 	      return new;
2455 	  }
2456       }
2457       return x;
2458 
2459     case MULT:
2460       /* If this is the product of an eliminable register and a
2461 	 constant, apply the distribute law and move the constant out
2462 	 so that we have (plus (mult ..) ..).  This is needed in order
2463 	 to keep load-address insns valid.   This case is pathological.
2464 	 We ignore the possibility of overflow here.  */
2465       if (REG_P (XEXP (x, 0))
2466 	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2467 	  && GET_CODE (XEXP (x, 1)) == CONST_INT)
2468 	for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2469 	     ep++)
2470 	  if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2471 	    {
2472 	      if (! mem_mode
2473 		  /* Refs inside notes don't count for this purpose.  */
2474 		  && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2475 				      || GET_CODE (insn) == INSN_LIST)))
2476 		ep->ref_outside_mem = 1;
2477 
2478 	      return
2479 		plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2480 			       ep->previous_offset * INTVAL (XEXP (x, 1)));
2481 	    }
2482 
2483       /* ... fall through ...  */
2484 
2485     case CALL:
2486     case COMPARE:
2487     /* See comments before PLUS about handling MINUS.  */
2488     case MINUS:
2489     case DIV:      case UDIV:
2490     case MOD:      case UMOD:
2491     case AND:      case IOR:      case XOR:
2492     case ROTATERT: case ROTATE:
2493     case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2494     case NE:       case EQ:
2495     case GE:       case GT:       case GEU:    case GTU:
2496     case LE:       case LT:       case LEU:    case LTU:
2497       {
2498 	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2499 	rtx new1 = XEXP (x, 1)
2500 		   ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false) : 0;
2501 
2502 	if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2503 	  return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2504       }
2505       return x;
2506 
2507     case EXPR_LIST:
2508       /* If we have something in XEXP (x, 0), the usual case, eliminate it.  */
2509       if (XEXP (x, 0))
2510 	{
2511 	  new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2512 	  if (new != XEXP (x, 0))
2513 	    {
2514 	      /* If this is a REG_DEAD note, it is not valid anymore.
2515 		 Using the eliminated version could result in creating a
2516 		 REG_DEAD note for the stack or frame pointer.  */
2517 	      if (GET_MODE (x) == REG_DEAD)
2518 		return (XEXP (x, 1)
2519 			? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true)
2520 			: NULL_RTX);
2521 
2522 	      x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2523 	    }
2524 	}
2525 
2526       /* ... fall through ...  */
2527 
2528     case INSN_LIST:
2529       /* Now do eliminations in the rest of the chain.  If this was
2530 	 an EXPR_LIST, this might result in allocating more memory than is
2531 	 strictly needed, but it simplifies the code.  */
2532       if (XEXP (x, 1))
2533 	{
2534 	  new = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2535 	  if (new != XEXP (x, 1))
2536 	    return
2537 	      gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2538 	}
2539       return x;
2540 
2541     case PRE_INC:
2542     case POST_INC:
2543     case PRE_DEC:
2544     case POST_DEC:
2545     case STRICT_LOW_PART:
2546     case NEG:          case NOT:
2547     case SIGN_EXTEND:  case ZERO_EXTEND:
2548     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2549     case FLOAT:        case FIX:
2550     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2551     case ABS:
2552     case SQRT:
2553     case FFS:
2554     case CLZ:
2555     case CTZ:
2556     case POPCOUNT:
2557     case PARITY:
2558       new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2559       if (new != XEXP (x, 0))
2560 	return gen_rtx_fmt_e (code, GET_MODE (x), new);
2561       return x;
2562 
2563     case SUBREG:
2564       /* Similar to above processing, but preserve SUBREG_BYTE.
2565 	 Convert (subreg (mem)) to (mem) if not paradoxical.
2566 	 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2567 	 pseudo didn't get a hard reg, we must replace this with the
2568 	 eliminated version of the memory location because push_reload
2569 	 may do the replacement in certain circumstances.  */
2570       if (REG_P (SUBREG_REG (x))
2571 	  && (GET_MODE_SIZE (GET_MODE (x))
2572 	      <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2573 	  && reg_equiv_memory_loc != 0
2574 	  && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2575 	{
2576 	  new = SUBREG_REG (x);
2577 	}
2578       else
2579 	new = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false);
2580 
2581       if (new != SUBREG_REG (x))
2582 	{
2583 	  int x_size = GET_MODE_SIZE (GET_MODE (x));
2584 	  int new_size = GET_MODE_SIZE (GET_MODE (new));
2585 
2586 	  if (MEM_P (new)
2587 	      && ((x_size < new_size
2588 #ifdef WORD_REGISTER_OPERATIONS
2589 		   /* On these machines, combine can create rtl of the form
2590 		      (set (subreg:m1 (reg:m2 R) 0) ...)
2591 		      where m1 < m2, and expects something interesting to
2592 		      happen to the entire word.  Moreover, it will use the
2593 		      (reg:m2 R) later, expecting all bits to be preserved.
2594 		      So if the number of words is the same, preserve the
2595 		      subreg so that push_reload can see it.  */
2596 		   && ! ((x_size - 1) / UNITS_PER_WORD
2597 			 == (new_size -1 ) / UNITS_PER_WORD)
2598 #endif
2599 		   )
2600 		  || x_size == new_size)
2601 	      )
2602 	    return adjust_address_nv (new, GET_MODE (x), SUBREG_BYTE (x));
2603 	  else
2604 	    return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_BYTE (x));
2605 	}
2606 
2607       return x;
2608 
2609     case MEM:
2610       /* Our only special processing is to pass the mode of the MEM to our
2611 	 recursive call and copy the flags.  While we are here, handle this
2612 	 case more efficiently.  */
2613       return
2614 	replace_equiv_address_nv (x,
2615 				  eliminate_regs_1 (XEXP (x, 0), GET_MODE (x),
2616 						    insn, true));
2617 
2618     case USE:
2619       /* Handle insn_list USE that a call to a pure function may generate.  */
2620       new = eliminate_regs_1 (XEXP (x, 0), 0, insn, false);
2621       if (new != XEXP (x, 0))
2622 	return gen_rtx_USE (GET_MODE (x), new);
2623       return x;
2624 
2625     case CLOBBER:
2626     case ASM_OPERANDS:
2627     case SET:
2628       gcc_unreachable ();
2629 
2630     default:
2631       break;
2632     }
2633 
2634   /* Process each of our operands recursively.  If any have changed, make a
2635      copy of the rtx.  */
2636   fmt = GET_RTX_FORMAT (code);
2637   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2638     {
2639       if (*fmt == 'e')
2640 	{
2641 	  new = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false);
2642 	  if (new != XEXP (x, i) && ! copied)
2643 	    {
2644 	      x = shallow_copy_rtx (x);
2645 	      copied = 1;
2646 	    }
2647 	  XEXP (x, i) = new;
2648 	}
2649       else if (*fmt == 'E')
2650 	{
2651 	  int copied_vec = 0;
2652 	  for (j = 0; j < XVECLEN (x, i); j++)
2653 	    {
2654 	      new = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false);
2655 	      if (new != XVECEXP (x, i, j) && ! copied_vec)
2656 		{
2657 		  rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2658 					     XVEC (x, i)->elem);
2659 		  if (! copied)
2660 		    {
2661 		      x = shallow_copy_rtx (x);
2662 		      copied = 1;
2663 		    }
2664 		  XVEC (x, i) = new_v;
2665 		  copied_vec = 1;
2666 		}
2667 	      XVECEXP (x, i, j) = new;
2668 	    }
2669 	}
2670     }
2671 
2672   return x;
2673 }
2674 
2675 rtx
eliminate_regs(rtx x,enum machine_mode mem_mode,rtx insn)2676 eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
2677 {
2678   return eliminate_regs_1 (x, mem_mode, insn, false);
2679 }
2680 
2681 /* Scan rtx X for modifications of elimination target registers.  Update
2682    the table of eliminables to reflect the changed state.  MEM_MODE is
2683    the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM.  */
2684 
2685 static void
elimination_effects(rtx x,enum machine_mode mem_mode)2686 elimination_effects (rtx x, enum machine_mode mem_mode)
2687 {
2688   enum rtx_code code = GET_CODE (x);
2689   struct elim_table *ep;
2690   int regno;
2691   int i, j;
2692   const char *fmt;
2693 
2694   switch (code)
2695     {
2696     case CONST_INT:
2697     case CONST_DOUBLE:
2698     case CONST_VECTOR:
2699     case CONST:
2700     case SYMBOL_REF:
2701     case CODE_LABEL:
2702     case PC:
2703     case CC0:
2704     case ASM_INPUT:
2705     case ADDR_VEC:
2706     case ADDR_DIFF_VEC:
2707     case RETURN:
2708       return;
2709 
2710     case REG:
2711       regno = REGNO (x);
2712 
2713       /* First handle the case where we encounter a bare register that
2714 	 is eliminable.  Replace it with a PLUS.  */
2715       if (regno < FIRST_PSEUDO_REGISTER)
2716 	{
2717 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2718 	       ep++)
2719 	    if (ep->from_rtx == x && ep->can_eliminate)
2720 	      {
2721 		if (! mem_mode)
2722 		  ep->ref_outside_mem = 1;
2723 		return;
2724 	      }
2725 
2726 	}
2727       else if (reg_renumber[regno] < 0 && reg_equiv_constant
2728 	       && reg_equiv_constant[regno]
2729 	       && ! function_invariant_p (reg_equiv_constant[regno]))
2730 	elimination_effects (reg_equiv_constant[regno], mem_mode);
2731       return;
2732 
2733     case PRE_INC:
2734     case POST_INC:
2735     case PRE_DEC:
2736     case POST_DEC:
2737     case POST_MODIFY:
2738     case PRE_MODIFY:
2739       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2740 	if (ep->to_rtx == XEXP (x, 0))
2741 	  {
2742 	    int size = GET_MODE_SIZE (mem_mode);
2743 
2744 	    /* If more bytes than MEM_MODE are pushed, account for them.  */
2745 #ifdef PUSH_ROUNDING
2746 	    if (ep->to_rtx == stack_pointer_rtx)
2747 	      size = PUSH_ROUNDING (size);
2748 #endif
2749 	    if (code == PRE_DEC || code == POST_DEC)
2750 	      ep->offset += size;
2751 	    else if (code == PRE_INC || code == POST_INC)
2752 	      ep->offset -= size;
2753 	    else if ((code == PRE_MODIFY || code == POST_MODIFY)
2754 		     && GET_CODE (XEXP (x, 1)) == PLUS
2755 		     && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
2756 		     && CONSTANT_P (XEXP (XEXP (x, 1), 1)))
2757 	      ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
2758 	  }
2759 
2760       /* These two aren't unary operators.  */
2761       if (code == POST_MODIFY || code == PRE_MODIFY)
2762 	break;
2763 
2764       /* Fall through to generic unary operation case.  */
2765     case STRICT_LOW_PART:
2766     case NEG:          case NOT:
2767     case SIGN_EXTEND:  case ZERO_EXTEND:
2768     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2769     case FLOAT:        case FIX:
2770     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2771     case ABS:
2772     case SQRT:
2773     case FFS:
2774     case CLZ:
2775     case CTZ:
2776     case POPCOUNT:
2777     case PARITY:
2778       elimination_effects (XEXP (x, 0), mem_mode);
2779       return;
2780 
2781     case SUBREG:
2782       if (REG_P (SUBREG_REG (x))
2783 	  && (GET_MODE_SIZE (GET_MODE (x))
2784 	      <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2785 	  && reg_equiv_memory_loc != 0
2786 	  && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2787 	return;
2788 
2789       elimination_effects (SUBREG_REG (x), mem_mode);
2790       return;
2791 
2792     case USE:
2793       /* If using a register that is the source of an eliminate we still
2794 	 think can be performed, note it cannot be performed since we don't
2795 	 know how this register is used.  */
2796       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2797 	if (ep->from_rtx == XEXP (x, 0))
2798 	  ep->can_eliminate = 0;
2799 
2800       elimination_effects (XEXP (x, 0), mem_mode);
2801       return;
2802 
2803     case CLOBBER:
2804       /* If clobbering a register that is the replacement register for an
2805 	 elimination we still think can be performed, note that it cannot
2806 	 be performed.  Otherwise, we need not be concerned about it.  */
2807       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2808 	if (ep->to_rtx == XEXP (x, 0))
2809 	  ep->can_eliminate = 0;
2810 
2811       elimination_effects (XEXP (x, 0), mem_mode);
2812       return;
2813 
2814     case SET:
2815       /* Check for setting a register that we know about.  */
2816       if (REG_P (SET_DEST (x)))
2817 	{
2818 	  /* See if this is setting the replacement register for an
2819 	     elimination.
2820 
2821 	     If DEST is the hard frame pointer, we do nothing because we
2822 	     assume that all assignments to the frame pointer are for
2823 	     non-local gotos and are being done at a time when they are valid
2824 	     and do not disturb anything else.  Some machines want to
2825 	     eliminate a fake argument pointer (or even a fake frame pointer)
2826 	     with either the real frame or the stack pointer.  Assignments to
2827 	     the hard frame pointer must not prevent this elimination.  */
2828 
2829 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2830 	       ep++)
2831 	    if (ep->to_rtx == SET_DEST (x)
2832 		&& SET_DEST (x) != hard_frame_pointer_rtx)
2833 	      {
2834 		/* If it is being incremented, adjust the offset.  Otherwise,
2835 		   this elimination can't be done.  */
2836 		rtx src = SET_SRC (x);
2837 
2838 		if (GET_CODE (src) == PLUS
2839 		    && XEXP (src, 0) == SET_DEST (x)
2840 		    && GET_CODE (XEXP (src, 1)) == CONST_INT)
2841 		  ep->offset -= INTVAL (XEXP (src, 1));
2842 		else
2843 		  ep->can_eliminate = 0;
2844 	      }
2845 	}
2846 
2847       elimination_effects (SET_DEST (x), 0);
2848       elimination_effects (SET_SRC (x), 0);
2849       return;
2850 
2851     case MEM:
2852       /* Our only special processing is to pass the mode of the MEM to our
2853 	 recursive call.  */
2854       elimination_effects (XEXP (x, 0), GET_MODE (x));
2855       return;
2856 
2857     default:
2858       break;
2859     }
2860 
2861   fmt = GET_RTX_FORMAT (code);
2862   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2863     {
2864       if (*fmt == 'e')
2865 	elimination_effects (XEXP (x, i), mem_mode);
2866       else if (*fmt == 'E')
2867 	for (j = 0; j < XVECLEN (x, i); j++)
2868 	  elimination_effects (XVECEXP (x, i, j), mem_mode);
2869     }
2870 }
2871 
2872 /* Descend through rtx X and verify that no references to eliminable registers
2873    remain.  If any do remain, mark the involved register as not
2874    eliminable.  */
2875 
2876 static void
check_eliminable_occurrences(rtx x)2877 check_eliminable_occurrences (rtx x)
2878 {
2879   const char *fmt;
2880   int i;
2881   enum rtx_code code;
2882 
2883   if (x == 0)
2884     return;
2885 
2886   code = GET_CODE (x);
2887 
2888   if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2889     {
2890       struct elim_table *ep;
2891 
2892       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2893 	if (ep->from_rtx == x)
2894 	  ep->can_eliminate = 0;
2895       return;
2896     }
2897 
2898   fmt = GET_RTX_FORMAT (code);
2899   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2900     {
2901       if (*fmt == 'e')
2902 	check_eliminable_occurrences (XEXP (x, i));
2903       else if (*fmt == 'E')
2904 	{
2905 	  int j;
2906 	  for (j = 0; j < XVECLEN (x, i); j++)
2907 	    check_eliminable_occurrences (XVECEXP (x, i, j));
2908 	}
2909     }
2910 }
2911 
2912 /* Scan INSN and eliminate all eliminable registers in it.
2913 
2914    If REPLACE is nonzero, do the replacement destructively.  Also
2915    delete the insn as dead it if it is setting an eliminable register.
2916 
2917    If REPLACE is zero, do all our allocations in reload_obstack.
2918 
2919    If no eliminations were done and this insn doesn't require any elimination
2920    processing (these are not identical conditions: it might be updating sp,
2921    but not referencing fp; this needs to be seen during reload_as_needed so
2922    that the offset between fp and sp can be taken into consideration), zero
2923    is returned.  Otherwise, 1 is returned.  */
2924 
2925 static int
eliminate_regs_in_insn(rtx insn,int replace)2926 eliminate_regs_in_insn (rtx insn, int replace)
2927 {
2928   int icode = recog_memoized (insn);
2929   rtx old_body = PATTERN (insn);
2930   int insn_is_asm = asm_noperands (old_body) >= 0;
2931   rtx old_set = single_set (insn);
2932   rtx new_body;
2933   int val = 0;
2934   int i;
2935   rtx substed_operand[MAX_RECOG_OPERANDS];
2936   rtx orig_operand[MAX_RECOG_OPERANDS];
2937   struct elim_table *ep;
2938   rtx plus_src, plus_cst_src;
2939 
2940   if (! insn_is_asm && icode < 0)
2941     {
2942       gcc_assert (GET_CODE (PATTERN (insn)) == USE
2943 		  || GET_CODE (PATTERN (insn)) == CLOBBER
2944 		  || GET_CODE (PATTERN (insn)) == ADDR_VEC
2945 		  || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
2946 		  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2947       return 0;
2948     }
2949 
2950   if (old_set != 0 && REG_P (SET_DEST (old_set))
2951       && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
2952     {
2953       /* Check for setting an eliminable register.  */
2954       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2955 	if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
2956 	  {
2957 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2958 	    /* If this is setting the frame pointer register to the
2959 	       hardware frame pointer register and this is an elimination
2960 	       that will be done (tested above), this insn is really
2961 	       adjusting the frame pointer downward to compensate for
2962 	       the adjustment done before a nonlocal goto.  */
2963 	    if (ep->from == FRAME_POINTER_REGNUM
2964 		&& ep->to == HARD_FRAME_POINTER_REGNUM)
2965 	      {
2966 		rtx base = SET_SRC (old_set);
2967 		rtx base_insn = insn;
2968 		HOST_WIDE_INT offset = 0;
2969 
2970 		while (base != ep->to_rtx)
2971 		  {
2972 		    rtx prev_insn, prev_set;
2973 
2974 		    if (GET_CODE (base) == PLUS
2975 		        && GET_CODE (XEXP (base, 1)) == CONST_INT)
2976 		      {
2977 		        offset += INTVAL (XEXP (base, 1));
2978 		        base = XEXP (base, 0);
2979 		      }
2980 		    else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
2981 			     && (prev_set = single_set (prev_insn)) != 0
2982 			     && rtx_equal_p (SET_DEST (prev_set), base))
2983 		      {
2984 		        base = SET_SRC (prev_set);
2985 		        base_insn = prev_insn;
2986 		      }
2987 		    else
2988 		      break;
2989 		  }
2990 
2991 		if (base == ep->to_rtx)
2992 		  {
2993 		    rtx src
2994 		      = plus_constant (ep->to_rtx, offset - ep->offset);
2995 
2996 		    new_body = old_body;
2997 		    if (! replace)
2998 		      {
2999 			new_body = copy_insn (old_body);
3000 			if (REG_NOTES (insn))
3001 			  REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3002 		      }
3003 		    PATTERN (insn) = new_body;
3004 		    old_set = single_set (insn);
3005 
3006 		    /* First see if this insn remains valid when we
3007 		       make the change.  If not, keep the INSN_CODE
3008 		       the same and let reload fit it up.  */
3009 		    validate_change (insn, &SET_SRC (old_set), src, 1);
3010 		    validate_change (insn, &SET_DEST (old_set),
3011 				     ep->to_rtx, 1);
3012 		    if (! apply_change_group ())
3013 		      {
3014 			SET_SRC (old_set) = src;
3015 			SET_DEST (old_set) = ep->to_rtx;
3016 		      }
3017 
3018 		    val = 1;
3019 		    goto done;
3020 		  }
3021 	      }
3022 #endif
3023 
3024 	    /* In this case this insn isn't serving a useful purpose.  We
3025 	       will delete it in reload_as_needed once we know that this
3026 	       elimination is, in fact, being done.
3027 
3028 	       If REPLACE isn't set, we can't delete this insn, but needn't
3029 	       process it since it won't be used unless something changes.  */
3030 	    if (replace)
3031 	      {
3032 		delete_dead_insn (insn);
3033 		return 1;
3034 	      }
3035 	    val = 1;
3036 	    goto done;
3037 	  }
3038     }
3039 
3040   /* We allow one special case which happens to work on all machines we
3041      currently support: a single set with the source or a REG_EQUAL
3042      note being a PLUS of an eliminable register and a constant.  */
3043   plus_src = plus_cst_src = 0;
3044   if (old_set && REG_P (SET_DEST (old_set)))
3045     {
3046       if (GET_CODE (SET_SRC (old_set)) == PLUS)
3047 	plus_src = SET_SRC (old_set);
3048       /* First see if the source is of the form (plus (...) CST).  */
3049       if (plus_src
3050 	  && GET_CODE (XEXP (plus_src, 1)) == CONST_INT)
3051 	plus_cst_src = plus_src;
3052       else if (REG_P (SET_SRC (old_set))
3053 	       || plus_src)
3054 	{
3055 	  /* Otherwise, see if we have a REG_EQUAL note of the form
3056 	     (plus (...) CST).  */
3057 	  rtx links;
3058 	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3059 	    {
3060 	      if (REG_NOTE_KIND (links) == REG_EQUAL
3061 		  && GET_CODE (XEXP (links, 0)) == PLUS
3062 		  && GET_CODE (XEXP (XEXP (links, 0), 1)) == CONST_INT)
3063 		{
3064 		  plus_cst_src = XEXP (links, 0);
3065 		  break;
3066 		}
3067 	    }
3068 	}
3069 
3070       /* Check that the first operand of the PLUS is a hard reg or
3071 	 the lowpart subreg of one.  */
3072       if (plus_cst_src)
3073 	{
3074 	  rtx reg = XEXP (plus_cst_src, 0);
3075 	  if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3076 	    reg = SUBREG_REG (reg);
3077 
3078 	  if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3079 	    plus_cst_src = 0;
3080 	}
3081     }
3082   if (plus_cst_src)
3083     {
3084       rtx reg = XEXP (plus_cst_src, 0);
3085       HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3086 
3087       if (GET_CODE (reg) == SUBREG)
3088 	reg = SUBREG_REG (reg);
3089 
3090       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3091 	if (ep->from_rtx == reg && ep->can_eliminate)
3092 	  {
3093 	    rtx to_rtx = ep->to_rtx;
3094 	    offset += ep->offset;
3095 	    offset = trunc_int_for_mode (offset, GET_MODE (reg));
3096 
3097 	    if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3098 	      to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3099 				    to_rtx);
3100 	    /* If we have a nonzero offset, and the source is already
3101 	       a simple REG, the following transformation would
3102 	       increase the cost of the insn by replacing a simple REG
3103 	       with (plus (reg sp) CST).  So try only when we already
3104 	       had a PLUS before.  */
3105 	    if (offset == 0 || plus_src)
3106 	      {
3107 		rtx new_src = plus_constant (to_rtx, offset);
3108 
3109 		new_body = old_body;
3110 		if (! replace)
3111 		  {
3112 		    new_body = copy_insn (old_body);
3113 		    if (REG_NOTES (insn))
3114 		      REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3115 		  }
3116 		PATTERN (insn) = new_body;
3117 		old_set = single_set (insn);
3118 
3119 		/* First see if this insn remains valid when we make the
3120 		   change.  If not, try to replace the whole pattern with
3121 		   a simple set (this may help if the original insn was a
3122 		   PARALLEL that was only recognized as single_set due to
3123 		   REG_UNUSED notes).  If this isn't valid either, keep
3124 		   the INSN_CODE the same and let reload fix it up.  */
3125 		if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3126 		  {
3127 		    rtx new_pat = gen_rtx_SET (VOIDmode,
3128 					       SET_DEST (old_set), new_src);
3129 
3130 		    if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3131 		      SET_SRC (old_set) = new_src;
3132 		  }
3133 	      }
3134 	    else
3135 	      break;
3136 
3137 	    val = 1;
3138 	    /* This can't have an effect on elimination offsets, so skip right
3139 	       to the end.  */
3140 	    goto done;
3141 	  }
3142     }
3143 
3144   /* Determine the effects of this insn on elimination offsets.  */
3145   elimination_effects (old_body, 0);
3146 
3147   /* Eliminate all eliminable registers occurring in operands that
3148      can be handled by reload.  */
3149   extract_insn (insn);
3150   for (i = 0; i < recog_data.n_operands; i++)
3151     {
3152       orig_operand[i] = recog_data.operand[i];
3153       substed_operand[i] = recog_data.operand[i];
3154 
3155       /* For an asm statement, every operand is eliminable.  */
3156       if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3157 	{
3158 	  bool is_set_src, in_plus;
3159 
3160 	  /* Check for setting a register that we know about.  */
3161 	  if (recog_data.operand_type[i] != OP_IN
3162 	      && REG_P (orig_operand[i]))
3163 	    {
3164 	      /* If we are assigning to a register that can be eliminated, it
3165 		 must be as part of a PARALLEL, since the code above handles
3166 		 single SETs.  We must indicate that we can no longer
3167 		 eliminate this reg.  */
3168 	      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3169 		   ep++)
3170 		if (ep->from_rtx == orig_operand[i])
3171 		  ep->can_eliminate = 0;
3172 	    }
3173 
3174 	  /* Companion to the above plus substitution, we can allow
3175 	     invariants as the source of a plain move.  */
3176 	  is_set_src = false;
3177 	  if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3178 	    is_set_src = true;
3179 	  in_plus = false;
3180 	  if (plus_src
3181 	      && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3182 		  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3183 	    in_plus = true;
3184 
3185 	  substed_operand[i]
3186 	    = eliminate_regs_1 (recog_data.operand[i], 0,
3187 			        replace ? insn : NULL_RTX,
3188 				is_set_src || in_plus);
3189 	  if (substed_operand[i] != orig_operand[i])
3190 	    val = 1;
3191 	  /* Terminate the search in check_eliminable_occurrences at
3192 	     this point.  */
3193 	  *recog_data.operand_loc[i] = 0;
3194 
3195 	/* If an output operand changed from a REG to a MEM and INSN is an
3196 	   insn, write a CLOBBER insn.  */
3197 	  if (recog_data.operand_type[i] != OP_IN
3198 	      && REG_P (orig_operand[i])
3199 	      && MEM_P (substed_operand[i])
3200 	      && replace)
3201 	    emit_insn_after (gen_rtx_CLOBBER (VOIDmode, orig_operand[i]),
3202 			     insn);
3203 	}
3204     }
3205 
3206   for (i = 0; i < recog_data.n_dups; i++)
3207     *recog_data.dup_loc[i]
3208       = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3209 
3210   /* If any eliminable remain, they aren't eliminable anymore.  */
3211   check_eliminable_occurrences (old_body);
3212 
3213   /* Substitute the operands; the new values are in the substed_operand
3214      array.  */
3215   for (i = 0; i < recog_data.n_operands; i++)
3216     *recog_data.operand_loc[i] = substed_operand[i];
3217   for (i = 0; i < recog_data.n_dups; i++)
3218     *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3219 
3220   /* If we are replacing a body that was a (set X (plus Y Z)), try to
3221      re-recognize the insn.  We do this in case we had a simple addition
3222      but now can do this as a load-address.  This saves an insn in this
3223      common case.
3224      If re-recognition fails, the old insn code number will still be used,
3225      and some register operands may have changed into PLUS expressions.
3226      These will be handled by find_reloads by loading them into a register
3227      again.  */
3228 
3229   if (val)
3230     {
3231       /* If we aren't replacing things permanently and we changed something,
3232 	 make another copy to ensure that all the RTL is new.  Otherwise
3233 	 things can go wrong if find_reload swaps commutative operands
3234 	 and one is inside RTL that has been copied while the other is not.  */
3235       new_body = old_body;
3236       if (! replace)
3237 	{
3238 	  new_body = copy_insn (old_body);
3239 	  if (REG_NOTES (insn))
3240 	    REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3241 	}
3242       PATTERN (insn) = new_body;
3243 
3244       /* If we had a move insn but now we don't, rerecognize it.  This will
3245 	 cause spurious re-recognition if the old move had a PARALLEL since
3246 	 the new one still will, but we can't call single_set without
3247 	 having put NEW_BODY into the insn and the re-recognition won't
3248 	 hurt in this rare case.  */
3249       /* ??? Why this huge if statement - why don't we just rerecognize the
3250 	 thing always?  */
3251       if (! insn_is_asm
3252 	  && old_set != 0
3253 	  && ((REG_P (SET_SRC (old_set))
3254 	       && (GET_CODE (new_body) != SET
3255 		   || !REG_P (SET_SRC (new_body))))
3256 	      /* If this was a load from or store to memory, compare
3257 		 the MEM in recog_data.operand to the one in the insn.
3258 		 If they are not equal, then rerecognize the insn.  */
3259 	      || (old_set != 0
3260 		  && ((MEM_P (SET_SRC (old_set))
3261 		       && SET_SRC (old_set) != recog_data.operand[1])
3262 		      || (MEM_P (SET_DEST (old_set))
3263 			  && SET_DEST (old_set) != recog_data.operand[0])))
3264 	      /* If this was an add insn before, rerecognize.  */
3265 	      || GET_CODE (SET_SRC (old_set)) == PLUS))
3266 	{
3267 	  int new_icode = recog (PATTERN (insn), insn, 0);
3268 	  if (new_icode >= 0)
3269 	    INSN_CODE (insn) = new_icode;
3270 	}
3271     }
3272 
3273   /* Restore the old body.  If there were any changes to it, we made a copy
3274      of it while the changes were still in place, so we'll correctly return
3275      a modified insn below.  */
3276   if (! replace)
3277     {
3278       /* Restore the old body.  */
3279       for (i = 0; i < recog_data.n_operands; i++)
3280 	*recog_data.operand_loc[i] = orig_operand[i];
3281       for (i = 0; i < recog_data.n_dups; i++)
3282 	*recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3283     }
3284 
3285   /* Update all elimination pairs to reflect the status after the current
3286      insn.  The changes we make were determined by the earlier call to
3287      elimination_effects.
3288 
3289      We also detect cases where register elimination cannot be done,
3290      namely, if a register would be both changed and referenced outside a MEM
3291      in the resulting insn since such an insn is often undefined and, even if
3292      not, we cannot know what meaning will be given to it.  Note that it is
3293      valid to have a register used in an address in an insn that changes it
3294      (presumably with a pre- or post-increment or decrement).
3295 
3296      If anything changes, return nonzero.  */
3297 
3298   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3299     {
3300       if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3301 	ep->can_eliminate = 0;
3302 
3303       ep->ref_outside_mem = 0;
3304 
3305       if (ep->previous_offset != ep->offset)
3306 	val = 1;
3307     }
3308 
3309  done:
3310   /* If we changed something, perform elimination in REG_NOTES.  This is
3311      needed even when REPLACE is zero because a REG_DEAD note might refer
3312      to a register that we eliminate and could cause a different number
3313      of spill registers to be needed in the final reload pass than in
3314      the pre-passes.  */
3315   if (val && REG_NOTES (insn) != 0)
3316     REG_NOTES (insn)
3317       = eliminate_regs_1 (REG_NOTES (insn), 0, REG_NOTES (insn), true);
3318 
3319   return val;
3320 }
3321 
3322 /* Loop through all elimination pairs.
3323    Recalculate the number not at initial offset.
3324 
3325    Compute the maximum offset (minimum offset if the stack does not
3326    grow downward) for each elimination pair.  */
3327 
3328 static void
update_eliminable_offsets(void)3329 update_eliminable_offsets (void)
3330 {
3331   struct elim_table *ep;
3332 
3333   num_not_at_initial_offset = 0;
3334   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3335     {
3336       ep->previous_offset = ep->offset;
3337       if (ep->can_eliminate && ep->offset != ep->initial_offset)
3338 	num_not_at_initial_offset++;
3339     }
3340 }
3341 
3342 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3343    replacement we currently believe is valid, mark it as not eliminable if X
3344    modifies DEST in any way other than by adding a constant integer to it.
3345 
3346    If DEST is the frame pointer, we do nothing because we assume that
3347    all assignments to the hard frame pointer are nonlocal gotos and are being
3348    done at a time when they are valid and do not disturb anything else.
3349    Some machines want to eliminate a fake argument pointer with either the
3350    frame or stack pointer.  Assignments to the hard frame pointer must not
3351    prevent this elimination.
3352 
3353    Called via note_stores from reload before starting its passes to scan
3354    the insns of the function.  */
3355 
3356 static void
mark_not_eliminable(rtx dest,rtx x,void * data ATTRIBUTE_UNUSED)3357 mark_not_eliminable (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
3358 {
3359   unsigned int i;
3360 
3361   /* A SUBREG of a hard register here is just changing its mode.  We should
3362      not see a SUBREG of an eliminable hard register, but check just in
3363      case.  */
3364   if (GET_CODE (dest) == SUBREG)
3365     dest = SUBREG_REG (dest);
3366 
3367   if (dest == hard_frame_pointer_rtx)
3368     return;
3369 
3370   for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3371     if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3372 	&& (GET_CODE (x) != SET
3373 	    || GET_CODE (SET_SRC (x)) != PLUS
3374 	    || XEXP (SET_SRC (x), 0) != dest
3375 	    || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3376       {
3377 	reg_eliminate[i].can_eliminate_previous
3378 	  = reg_eliminate[i].can_eliminate = 0;
3379 	num_eliminable--;
3380       }
3381 }
3382 
3383 /* Verify that the initial elimination offsets did not change since the
3384    last call to set_initial_elim_offsets.  This is used to catch cases
3385    where something illegal happened during reload_as_needed that could
3386    cause incorrect code to be generated if we did not check for it.  */
3387 
3388 static bool
verify_initial_elim_offsets(void)3389 verify_initial_elim_offsets (void)
3390 {
3391   HOST_WIDE_INT t;
3392 
3393   if (!num_eliminable)
3394     return true;
3395 
3396 #ifdef ELIMINABLE_REGS
3397   {
3398    struct elim_table *ep;
3399 
3400    for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3401      {
3402        INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3403        if (t != ep->initial_offset)
3404 	 return false;
3405      }
3406   }
3407 #else
3408   INITIAL_FRAME_POINTER_OFFSET (t);
3409   if (t != reg_eliminate[0].initial_offset)
3410     return false;
3411 #endif
3412 
3413   return true;
3414 }
3415 
3416 /* Reset all offsets on eliminable registers to their initial values.  */
3417 
3418 static void
set_initial_elim_offsets(void)3419 set_initial_elim_offsets (void)
3420 {
3421   struct elim_table *ep = reg_eliminate;
3422 
3423 #ifdef ELIMINABLE_REGS
3424   for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3425     {
3426       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3427       ep->previous_offset = ep->offset = ep->initial_offset;
3428     }
3429 #else
3430   INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3431   ep->previous_offset = ep->offset = ep->initial_offset;
3432 #endif
3433 
3434   num_not_at_initial_offset = 0;
3435 }
3436 
3437 /* Subroutine of set_initial_label_offsets called via for_each_eh_label.  */
3438 
3439 static void
set_initial_eh_label_offset(rtx label)3440 set_initial_eh_label_offset (rtx label)
3441 {
3442   set_label_offsets (label, NULL_RTX, 1);
3443 }
3444 
3445 /* Initialize the known label offsets.
3446    Set a known offset for each forced label to be at the initial offset
3447    of each elimination.  We do this because we assume that all
3448    computed jumps occur from a location where each elimination is
3449    at its initial offset.
3450    For all other labels, show that we don't know the offsets.  */
3451 
3452 static void
set_initial_label_offsets(void)3453 set_initial_label_offsets (void)
3454 {
3455   rtx x;
3456   memset (offsets_known_at, 0, num_labels);
3457 
3458   for (x = forced_labels; x; x = XEXP (x, 1))
3459     if (XEXP (x, 0))
3460       set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3461 
3462   for_each_eh_label (set_initial_eh_label_offset);
3463 }
3464 
3465 /* Set all elimination offsets to the known values for the code label given
3466    by INSN.  */
3467 
3468 static void
set_offsets_for_label(rtx insn)3469 set_offsets_for_label (rtx insn)
3470 {
3471   unsigned int i;
3472   int label_nr = CODE_LABEL_NUMBER (insn);
3473   struct elim_table *ep;
3474 
3475   num_not_at_initial_offset = 0;
3476   for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3477     {
3478       ep->offset = ep->previous_offset
3479 		 = offsets_at[label_nr - first_label_num][i];
3480       if (ep->can_eliminate && ep->offset != ep->initial_offset)
3481 	num_not_at_initial_offset++;
3482     }
3483 }
3484 
3485 /* See if anything that happened changes which eliminations are valid.
3486    For example, on the SPARC, whether or not the frame pointer can
3487    be eliminated can depend on what registers have been used.  We need
3488    not check some conditions again (such as flag_omit_frame_pointer)
3489    since they can't have changed.  */
3490 
3491 static void
update_eliminables(HARD_REG_SET * pset)3492 update_eliminables (HARD_REG_SET *pset)
3493 {
3494   int previous_frame_pointer_needed = frame_pointer_needed;
3495   struct elim_table *ep;
3496 
3497   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3498     if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3499 #ifdef ELIMINABLE_REGS
3500 	|| ! CAN_ELIMINATE (ep->from, ep->to)
3501 #endif
3502 	)
3503       ep->can_eliminate = 0;
3504 
3505   /* Look for the case where we have discovered that we can't replace
3506      register A with register B and that means that we will now be
3507      trying to replace register A with register C.  This means we can
3508      no longer replace register C with register B and we need to disable
3509      such an elimination, if it exists.  This occurs often with A == ap,
3510      B == sp, and C == fp.  */
3511 
3512   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3513     {
3514       struct elim_table *op;
3515       int new_to = -1;
3516 
3517       if (! ep->can_eliminate && ep->can_eliminate_previous)
3518 	{
3519 	  /* Find the current elimination for ep->from, if there is a
3520 	     new one.  */
3521 	  for (op = reg_eliminate;
3522 	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3523 	    if (op->from == ep->from && op->can_eliminate)
3524 	      {
3525 		new_to = op->to;
3526 		break;
3527 	      }
3528 
3529 	  /* See if there is an elimination of NEW_TO -> EP->TO.  If so,
3530 	     disable it.  */
3531 	  for (op = reg_eliminate;
3532 	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3533 	    if (op->from == new_to && op->to == ep->to)
3534 	      op->can_eliminate = 0;
3535 	}
3536     }
3537 
3538   /* See if any registers that we thought we could eliminate the previous
3539      time are no longer eliminable.  If so, something has changed and we
3540      must spill the register.  Also, recompute the number of eliminable
3541      registers and see if the frame pointer is needed; it is if there is
3542      no elimination of the frame pointer that we can perform.  */
3543 
3544   frame_pointer_needed = 1;
3545   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3546     {
3547       if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3548 	  && ep->to != HARD_FRAME_POINTER_REGNUM)
3549 	frame_pointer_needed = 0;
3550 
3551       if (! ep->can_eliminate && ep->can_eliminate_previous)
3552 	{
3553 	  ep->can_eliminate_previous = 0;
3554 	  SET_HARD_REG_BIT (*pset, ep->from);
3555 	  num_eliminable--;
3556 	}
3557     }
3558 
3559   /* If we didn't need a frame pointer last time, but we do now, spill
3560      the hard frame pointer.  */
3561   if (frame_pointer_needed && ! previous_frame_pointer_needed)
3562     SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3563 }
3564 
3565 /* Initialize the table of registers to eliminate.  */
3566 
3567 static void
init_elim_table(void)3568 init_elim_table (void)
3569 {
3570   struct elim_table *ep;
3571 #ifdef ELIMINABLE_REGS
3572   const struct elim_table_1 *ep1;
3573 #endif
3574 
3575   if (!reg_eliminate)
3576     reg_eliminate = xcalloc (sizeof (struct elim_table), NUM_ELIMINABLE_REGS);
3577 
3578   /* Does this function require a frame pointer?  */
3579 
3580   frame_pointer_needed = (! flag_omit_frame_pointer
3581 			  /* ?? If EXIT_IGNORE_STACK is set, we will not save
3582 			     and restore sp for alloca.  So we can't eliminate
3583 			     the frame pointer in that case.  At some point,
3584 			     we should improve this by emitting the
3585 			     sp-adjusting insns for this case.  */
3586 			  || (current_function_calls_alloca
3587 			      && EXIT_IGNORE_STACK)
3588 			  || current_function_accesses_prior_frames
3589 			  || FRAME_POINTER_REQUIRED);
3590 
3591   num_eliminable = 0;
3592 
3593 #ifdef ELIMINABLE_REGS
3594   for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3595        ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3596     {
3597       ep->from = ep1->from;
3598       ep->to = ep1->to;
3599       ep->can_eliminate = ep->can_eliminate_previous
3600 	= (CAN_ELIMINATE (ep->from, ep->to)
3601 	   && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3602     }
3603 #else
3604   reg_eliminate[0].from = reg_eliminate_1[0].from;
3605   reg_eliminate[0].to = reg_eliminate_1[0].to;
3606   reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3607     = ! frame_pointer_needed;
3608 #endif
3609 
3610   /* Count the number of eliminable registers and build the FROM and TO
3611      REG rtx's.  Note that code in gen_rtx_REG will cause, e.g.,
3612      gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3613      We depend on this.  */
3614   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3615     {
3616       num_eliminable += ep->can_eliminate;
3617       ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3618       ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3619     }
3620 }
3621 
3622 /* Kick all pseudos out of hard register REGNO.
3623 
3624    If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3625    because we found we can't eliminate some register.  In the case, no pseudos
3626    are allowed to be in the register, even if they are only in a block that
3627    doesn't require spill registers, unlike the case when we are spilling this
3628    hard reg to produce another spill register.
3629 
3630    Return nonzero if any pseudos needed to be kicked out.  */
3631 
3632 static void
spill_hard_reg(unsigned int regno,int cant_eliminate)3633 spill_hard_reg (unsigned int regno, int cant_eliminate)
3634 {
3635   int i;
3636 
3637   if (cant_eliminate)
3638     {
3639       SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3640       regs_ever_live[regno] = 1;
3641     }
3642 
3643   /* Spill every pseudo reg that was allocated to this reg
3644      or to something that overlaps this reg.  */
3645 
3646   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3647     if (reg_renumber[i] >= 0
3648 	&& (unsigned int) reg_renumber[i] <= regno
3649 	&& ((unsigned int) reg_renumber[i]
3650 	    + hard_regno_nregs[(unsigned int) reg_renumber[i]]
3651 			      [PSEUDO_REGNO_MODE (i)]
3652 	    > regno))
3653       SET_REGNO_REG_SET (&spilled_pseudos, i);
3654 }
3655 
3656 /* After find_reload_regs has been run for all insn that need reloads,
3657    and/or spill_hard_regs was called, this function is used to actually
3658    spill pseudo registers and try to reallocate them.  It also sets up the
3659    spill_regs array for use by choose_reload_regs.  */
3660 
3661 static int
finish_spills(int global)3662 finish_spills (int global)
3663 {
3664   struct insn_chain *chain;
3665   int something_changed = 0;
3666   unsigned i;
3667   reg_set_iterator rsi;
3668 
3669   /* Build the spill_regs array for the function.  */
3670   /* If there are some registers still to eliminate and one of the spill regs
3671      wasn't ever used before, additional stack space may have to be
3672      allocated to store this register.  Thus, we may have changed the offset
3673      between the stack and frame pointers, so mark that something has changed.
3674 
3675      One might think that we need only set VAL to 1 if this is a call-used
3676      register.  However, the set of registers that must be saved by the
3677      prologue is not identical to the call-used set.  For example, the
3678      register used by the call insn for the return PC is a call-used register,
3679      but must be saved by the prologue.  */
3680 
3681   n_spills = 0;
3682   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3683     if (TEST_HARD_REG_BIT (used_spill_regs, i))
3684       {
3685 	spill_reg_order[i] = n_spills;
3686 	spill_regs[n_spills++] = i;
3687 	if (num_eliminable && ! regs_ever_live[i])
3688 	  something_changed = 1;
3689 	regs_ever_live[i] = 1;
3690       }
3691     else
3692       spill_reg_order[i] = -1;
3693 
3694   EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
3695     {
3696       /* Record the current hard register the pseudo is allocated to in
3697 	 pseudo_previous_regs so we avoid reallocating it to the same
3698 	 hard reg in a later pass.  */
3699       gcc_assert (reg_renumber[i] >= 0);
3700 
3701       SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3702       /* Mark it as no longer having a hard register home.  */
3703       reg_renumber[i] = -1;
3704       /* We will need to scan everything again.  */
3705       something_changed = 1;
3706     }
3707 
3708   /* Retry global register allocation if possible.  */
3709   if (global)
3710     {
3711       memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
3712       /* For every insn that needs reloads, set the registers used as spill
3713 	 regs in pseudo_forbidden_regs for every pseudo live across the
3714 	 insn.  */
3715       for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3716 	{
3717 	  EXECUTE_IF_SET_IN_REG_SET
3718 	    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
3719 	    {
3720 	      IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
3721 				chain->used_spill_regs);
3722 	    }
3723 	  EXECUTE_IF_SET_IN_REG_SET
3724 	    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
3725 	    {
3726 	      IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
3727 				chain->used_spill_regs);
3728 	    }
3729 	}
3730 
3731       /* Retry allocating the spilled pseudos.  For each reg, merge the
3732 	 various reg sets that indicate which hard regs can't be used,
3733 	 and call retry_global_alloc.
3734 	 We change spill_pseudos here to only contain pseudos that did not
3735 	 get a new hard register.  */
3736       for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
3737 	if (reg_old_renumber[i] != reg_renumber[i])
3738 	  {
3739 	    HARD_REG_SET forbidden;
3740 	    COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
3741 	    IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
3742 	    IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
3743 	    retry_global_alloc (i, forbidden);
3744 	    if (reg_renumber[i] >= 0)
3745 	      CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
3746 	  }
3747     }
3748 
3749   /* Fix up the register information in the insn chain.
3750      This involves deleting those of the spilled pseudos which did not get
3751      a new hard register home from the live_{before,after} sets.  */
3752   for (chain = reload_insn_chain; chain; chain = chain->next)
3753     {
3754       HARD_REG_SET used_by_pseudos;
3755       HARD_REG_SET used_by_pseudos2;
3756 
3757       AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
3758       AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
3759 
3760       /* Mark any unallocated hard regs as available for spills.  That
3761 	 makes inheritance work somewhat better.  */
3762       if (chain->need_reload)
3763 	{
3764 	  REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
3765 	  REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
3766 	  IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
3767 
3768 	  /* Save the old value for the sanity test below.  */
3769 	  COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs);
3770 
3771 	  compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
3772 	  compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
3773 	  COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
3774 	  AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
3775 
3776 	  /* Make sure we only enlarge the set.  */
3777 	  GO_IF_HARD_REG_SUBSET (used_by_pseudos2, chain->used_spill_regs, ok);
3778 	  gcc_unreachable ();
3779 	ok:;
3780 	}
3781     }
3782 
3783   /* Let alter_reg modify the reg rtx's for the modified pseudos.  */
3784   for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
3785     {
3786       int regno = reg_renumber[i];
3787       if (reg_old_renumber[i] == regno)
3788 	continue;
3789 
3790       alter_reg (i, reg_old_renumber[i]);
3791       reg_old_renumber[i] = regno;
3792       if (dump_file)
3793 	{
3794 	  if (regno == -1)
3795 	    fprintf (dump_file, " Register %d now on stack.\n\n", i);
3796 	  else
3797 	    fprintf (dump_file, " Register %d now in %d.\n\n",
3798 		     i, reg_renumber[i]);
3799 	}
3800     }
3801 
3802   return something_changed;
3803 }
3804 
3805 /* Find all paradoxical subregs within X and update reg_max_ref_width.  */
3806 
3807 static void
scan_paradoxical_subregs(rtx x)3808 scan_paradoxical_subregs (rtx x)
3809 {
3810   int i;
3811   const char *fmt;
3812   enum rtx_code code = GET_CODE (x);
3813 
3814   switch (code)
3815     {
3816     case REG:
3817     case CONST_INT:
3818     case CONST:
3819     case SYMBOL_REF:
3820     case LABEL_REF:
3821     case CONST_DOUBLE:
3822     case CONST_VECTOR: /* shouldn't happen, but just in case.  */
3823     case CC0:
3824     case PC:
3825     case USE:
3826     case CLOBBER:
3827       return;
3828 
3829     case SUBREG:
3830       if (REG_P (SUBREG_REG (x))
3831 	  && (GET_MODE_SIZE (GET_MODE (x))
3832 	      > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
3833 	reg_max_ref_width[REGNO (SUBREG_REG (x))]
3834 	  = GET_MODE_SIZE (GET_MODE (x));
3835       return;
3836 
3837     default:
3838       break;
3839     }
3840 
3841   fmt = GET_RTX_FORMAT (code);
3842   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3843     {
3844       if (fmt[i] == 'e')
3845 	scan_paradoxical_subregs (XEXP (x, i));
3846       else if (fmt[i] == 'E')
3847 	{
3848 	  int j;
3849 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3850 	    scan_paradoxical_subregs (XVECEXP (x, i, j));
3851 	}
3852     }
3853 }
3854 
3855 /* A subroutine of reload_as_needed.  If INSN has a REG_EH_REGION note,
3856    examine all of the reload insns between PREV and NEXT exclusive, and
3857    annotate all that may trap.  */
3858 
3859 static void
fixup_eh_region_note(rtx insn,rtx prev,rtx next)3860 fixup_eh_region_note (rtx insn, rtx prev, rtx next)
3861 {
3862   rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3863   unsigned int trap_count;
3864   rtx i;
3865 
3866   if (note == NULL)
3867     return;
3868 
3869   if (may_trap_p (PATTERN (insn)))
3870     trap_count = 1;
3871   else
3872     {
3873       remove_note (insn, note);
3874       trap_count = 0;
3875     }
3876 
3877   for (i = NEXT_INSN (prev); i != next; i = NEXT_INSN (i))
3878     if (INSN_P (i) && i != insn && may_trap_p (PATTERN (i)))
3879       {
3880 	trap_count++;
3881 	REG_NOTES (i)
3882 	  = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (note, 0), REG_NOTES (i));
3883       }
3884 }
3885 
3886 /* Reload pseudo-registers into hard regs around each insn as needed.
3887    Additional register load insns are output before the insn that needs it
3888    and perhaps store insns after insns that modify the reloaded pseudo reg.
3889 
3890    reg_last_reload_reg and reg_reloaded_contents keep track of
3891    which registers are already available in reload registers.
3892    We update these for the reloads that we perform,
3893    as the insns are scanned.  */
3894 
3895 static void
reload_as_needed(int live_known)3896 reload_as_needed (int live_known)
3897 {
3898   struct insn_chain *chain;
3899 #if defined (AUTO_INC_DEC)
3900   int i;
3901 #endif
3902   rtx x;
3903 
3904   memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
3905   memset (spill_reg_store, 0, sizeof spill_reg_store);
3906   reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
3907   INIT_REG_SET (&reg_has_output_reload);
3908   CLEAR_HARD_REG_SET (reg_reloaded_valid);
3909   CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
3910 
3911   set_initial_elim_offsets ();
3912 
3913   for (chain = reload_insn_chain; chain; chain = chain->next)
3914     {
3915       rtx prev = 0;
3916       rtx insn = chain->insn;
3917       rtx old_next = NEXT_INSN (insn);
3918 
3919       /* If we pass a label, copy the offsets from the label information
3920 	 into the current offsets of each elimination.  */
3921       if (LABEL_P (insn))
3922 	set_offsets_for_label (insn);
3923 
3924       else if (INSN_P (insn))
3925 	{
3926 	  regset_head regs_to_forget;
3927 	  INIT_REG_SET (&regs_to_forget);
3928 	  note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
3929 
3930 	  /* If this is a USE and CLOBBER of a MEM, ensure that any
3931 	     references to eliminable registers have been removed.  */
3932 
3933 	  if ((GET_CODE (PATTERN (insn)) == USE
3934 	       || GET_CODE (PATTERN (insn)) == CLOBBER)
3935 	      && MEM_P (XEXP (PATTERN (insn), 0)))
3936 	    XEXP (XEXP (PATTERN (insn), 0), 0)
3937 	      = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3938 				GET_MODE (XEXP (PATTERN (insn), 0)),
3939 				NULL_RTX);
3940 
3941 	  /* If we need to do register elimination processing, do so.
3942 	     This might delete the insn, in which case we are done.  */
3943 	  if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
3944 	    {
3945 	      eliminate_regs_in_insn (insn, 1);
3946 	      if (NOTE_P (insn))
3947 		{
3948 		  update_eliminable_offsets ();
3949 		  CLEAR_REG_SET (&regs_to_forget);
3950 		  continue;
3951 		}
3952 	    }
3953 
3954 	  /* If need_elim is nonzero but need_reload is zero, one might think
3955 	     that we could simply set n_reloads to 0.  However, find_reloads
3956 	     could have done some manipulation of the insn (such as swapping
3957 	     commutative operands), and these manipulations are lost during
3958 	     the first pass for every insn that needs register elimination.
3959 	     So the actions of find_reloads must be redone here.  */
3960 
3961 	  if (! chain->need_elim && ! chain->need_reload
3962 	      && ! chain->need_operand_change)
3963 	    n_reloads = 0;
3964 	  /* First find the pseudo regs that must be reloaded for this insn.
3965 	     This info is returned in the tables reload_... (see reload.h).
3966 	     Also modify the body of INSN by substituting RELOAD
3967 	     rtx's for those pseudo regs.  */
3968 	  else
3969 	    {
3970 	      CLEAR_REG_SET (&reg_has_output_reload);
3971 	      CLEAR_HARD_REG_SET (reg_is_output_reload);
3972 
3973 	      find_reloads (insn, 1, spill_indirect_levels, live_known,
3974 			    spill_reg_order);
3975 	    }
3976 
3977 	  if (n_reloads > 0)
3978 	    {
3979 	      rtx next = NEXT_INSN (insn);
3980 	      rtx p;
3981 
3982 	      prev = PREV_INSN (insn);
3983 
3984 	      /* Now compute which reload regs to reload them into.  Perhaps
3985 		 reusing reload regs from previous insns, or else output
3986 		 load insns to reload them.  Maybe output store insns too.
3987 		 Record the choices of reload reg in reload_reg_rtx.  */
3988 	      choose_reload_regs (chain);
3989 
3990 	      /* Merge any reloads that we didn't combine for fear of
3991 		 increasing the number of spill registers needed but now
3992 		 discover can be safely merged.  */
3993 	      if (SMALL_REGISTER_CLASSES)
3994 		merge_assigned_reloads (insn);
3995 
3996 	      /* Generate the insns to reload operands into or out of
3997 		 their reload regs.  */
3998 	      emit_reload_insns (chain);
3999 
4000 	      /* Substitute the chosen reload regs from reload_reg_rtx
4001 		 into the insn's body (or perhaps into the bodies of other
4002 		 load and store insn that we just made for reloading
4003 		 and that we moved the structure into).  */
4004 	      subst_reloads (insn);
4005 
4006 	      /* Adjust the exception region notes for loads and stores.  */
4007 	      if (flag_non_call_exceptions && !CALL_P (insn))
4008 		fixup_eh_region_note (insn, prev, next);
4009 
4010 	      /* If this was an ASM, make sure that all the reload insns
4011 		 we have generated are valid.  If not, give an error
4012 		 and delete them.  */
4013 	      if (asm_noperands (PATTERN (insn)) >= 0)
4014 		for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4015 		  if (p != insn && INSN_P (p)
4016 		      && GET_CODE (PATTERN (p)) != USE
4017 		      && (recog_memoized (p) < 0
4018 			  || (extract_insn (p), ! constrain_operands (1))))
4019 		    {
4020 		      error_for_asm (insn,
4021 				     "%<asm%> operand requires "
4022 				     "impossible reload");
4023 		      delete_insn (p);
4024 		    }
4025 	    }
4026 
4027 	  if (num_eliminable && chain->need_elim)
4028 	    update_eliminable_offsets ();
4029 
4030 	  /* Any previously reloaded spilled pseudo reg, stored in this insn,
4031 	     is no longer validly lying around to save a future reload.
4032 	     Note that this does not detect pseudos that were reloaded
4033 	     for this insn in order to be stored in
4034 	     (obeying register constraints).  That is correct; such reload
4035 	     registers ARE still valid.  */
4036 	  forget_marked_reloads (&regs_to_forget);
4037 	  CLEAR_REG_SET (&regs_to_forget);
4038 
4039 	  /* There may have been CLOBBER insns placed after INSN.  So scan
4040 	     between INSN and NEXT and use them to forget old reloads.  */
4041 	  for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4042 	    if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4043 	      note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4044 
4045 #ifdef AUTO_INC_DEC
4046 	  /* Likewise for regs altered by auto-increment in this insn.
4047 	     REG_INC notes have been changed by reloading:
4048 	     find_reloads_address_1 records substitutions for them,
4049 	     which have been performed by subst_reloads above.  */
4050 	  for (i = n_reloads - 1; i >= 0; i--)
4051 	    {
4052 	      rtx in_reg = rld[i].in_reg;
4053 	      if (in_reg)
4054 		{
4055 		  enum rtx_code code = GET_CODE (in_reg);
4056 		  /* PRE_INC / PRE_DEC will have the reload register ending up
4057 		     with the same value as the stack slot, but that doesn't
4058 		     hold true for POST_INC / POST_DEC.  Either we have to
4059 		     convert the memory access to a true POST_INC / POST_DEC,
4060 		     or we can't use the reload register for inheritance.  */
4061 		  if ((code == POST_INC || code == POST_DEC)
4062 		      && TEST_HARD_REG_BIT (reg_reloaded_valid,
4063 					    REGNO (rld[i].reg_rtx))
4064 		      /* Make sure it is the inc/dec pseudo, and not
4065 			 some other (e.g. output operand) pseudo.  */
4066 		      && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4067 			  == REGNO (XEXP (in_reg, 0))))
4068 
4069 		    {
4070 		      rtx reload_reg = rld[i].reg_rtx;
4071 		      enum machine_mode mode = GET_MODE (reload_reg);
4072 		      int n = 0;
4073 		      rtx p;
4074 
4075 		      for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4076 			{
4077 			  /* We really want to ignore REG_INC notes here, so
4078 			     use PATTERN (p) as argument to reg_set_p .  */
4079 			  if (reg_set_p (reload_reg, PATTERN (p)))
4080 			    break;
4081 			  n = count_occurrences (PATTERN (p), reload_reg, 0);
4082 			  if (! n)
4083 			    continue;
4084 			  if (n == 1)
4085 			    {
4086 			      n = validate_replace_rtx (reload_reg,
4087 							gen_rtx_fmt_e (code,
4088 								       mode,
4089 								       reload_reg),
4090 							p);
4091 
4092 			      /* We must also verify that the constraints
4093 				 are met after the replacement.  */
4094 			      extract_insn (p);
4095 			      if (n)
4096 				n = constrain_operands (1);
4097 			      else
4098 				break;
4099 
4100 			      /* If the constraints were not met, then
4101 				 undo the replacement.  */
4102 			      if (!n)
4103 				{
4104 				  validate_replace_rtx (gen_rtx_fmt_e (code,
4105 								       mode,
4106 								       reload_reg),
4107 							reload_reg, p);
4108 				  break;
4109 				}
4110 
4111 			    }
4112 			  break;
4113 			}
4114 		      if (n == 1)
4115 			{
4116 			  REG_NOTES (p)
4117 			    = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
4118 						 REG_NOTES (p));
4119 			  /* Mark this as having an output reload so that the
4120 			     REG_INC processing code below won't invalidate
4121 			     the reload for inheritance.  */
4122 			  SET_HARD_REG_BIT (reg_is_output_reload,
4123 					    REGNO (reload_reg));
4124 			  SET_REGNO_REG_SET (&reg_has_output_reload,
4125 					     REGNO (XEXP (in_reg, 0)));
4126 			}
4127 		      else
4128 			forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4129 					      NULL);
4130 		    }
4131 		  else if ((code == PRE_INC || code == PRE_DEC)
4132 			   && TEST_HARD_REG_BIT (reg_reloaded_valid,
4133 						 REGNO (rld[i].reg_rtx))
4134 			   /* Make sure it is the inc/dec pseudo, and not
4135 			      some other (e.g. output operand) pseudo.  */
4136 			   && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4137 			       == REGNO (XEXP (in_reg, 0))))
4138 		    {
4139 		      SET_HARD_REG_BIT (reg_is_output_reload,
4140 					REGNO (rld[i].reg_rtx));
4141 		      SET_REGNO_REG_SET (&reg_has_output_reload,
4142 					 REGNO (XEXP (in_reg, 0)));
4143 		    }
4144 		}
4145 	    }
4146 	  /* If a pseudo that got a hard register is auto-incremented,
4147 	     we must purge records of copying it into pseudos without
4148 	     hard registers.  */
4149 	  for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4150 	    if (REG_NOTE_KIND (x) == REG_INC)
4151 	      {
4152 		/* See if this pseudo reg was reloaded in this insn.
4153 		   If so, its last-reload info is still valid
4154 		   because it is based on this insn's reload.  */
4155 		for (i = 0; i < n_reloads; i++)
4156 		  if (rld[i].out == XEXP (x, 0))
4157 		    break;
4158 
4159 		if (i == n_reloads)
4160 		  forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4161 	      }
4162 #endif
4163 	}
4164       /* A reload reg's contents are unknown after a label.  */
4165       if (LABEL_P (insn))
4166 	CLEAR_HARD_REG_SET (reg_reloaded_valid);
4167 
4168       /* Don't assume a reload reg is still good after a call insn
4169 	 if it is a call-used reg, or if it contains a value that will
4170          be partially clobbered by the call.  */
4171       else if (CALL_P (insn))
4172 	{
4173 	AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4174 	AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4175 	}
4176     }
4177 
4178   /* Clean up.  */
4179   free (reg_last_reload_reg);
4180   CLEAR_REG_SET (&reg_has_output_reload);
4181 }
4182 
4183 /* Discard all record of any value reloaded from X,
4184    or reloaded in X from someplace else;
4185    unless X is an output reload reg of the current insn.
4186 
4187    X may be a hard reg (the reload reg)
4188    or it may be a pseudo reg that was reloaded from.
4189 
4190    When DATA is non-NULL just mark the registers in regset
4191    to be forgotten later.  */
4192 
4193 static void
forget_old_reloads_1(rtx x,rtx ignored ATTRIBUTE_UNUSED,void * data)4194 forget_old_reloads_1 (rtx x, rtx ignored ATTRIBUTE_UNUSED,
4195 		      void *data)
4196 {
4197   unsigned int regno;
4198   unsigned int nr;
4199   regset regs = (regset) data;
4200 
4201   /* note_stores does give us subregs of hard regs,
4202      subreg_regno_offset requires a hard reg.  */
4203   while (GET_CODE (x) == SUBREG)
4204     {
4205       /* We ignore the subreg offset when calculating the regno,
4206 	 because we are using the entire underlying hard register
4207 	 below.  */
4208       x = SUBREG_REG (x);
4209     }
4210 
4211   if (!REG_P (x))
4212     return;
4213 
4214   regno = REGNO (x);
4215 
4216   if (regno >= FIRST_PSEUDO_REGISTER)
4217     nr = 1;
4218   else
4219     {
4220       unsigned int i;
4221 
4222       nr = hard_regno_nregs[regno][GET_MODE (x)];
4223       /* Storing into a spilled-reg invalidates its contents.
4224 	 This can happen if a block-local pseudo is allocated to that reg
4225 	 and it wasn't spilled because this block's total need is 0.
4226 	 Then some insn might have an optional reload and use this reg.  */
4227       if (!regs)
4228 	for (i = 0; i < nr; i++)
4229 	  /* But don't do this if the reg actually serves as an output
4230 	     reload reg in the current instruction.  */
4231 	  if (n_reloads == 0
4232 	      || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4233 	    {
4234 	      CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4235 	      CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, regno + i);
4236 	      spill_reg_store[regno + i] = 0;
4237 	    }
4238     }
4239 
4240   if (regs)
4241     while (nr-- > 0)
4242       SET_REGNO_REG_SET (regs, regno + nr);
4243   else
4244     {
4245       /* Since value of X has changed,
4246 	 forget any value previously copied from it.  */
4247 
4248       while (nr-- > 0)
4249 	/* But don't forget a copy if this is the output reload
4250 	   that establishes the copy's validity.  */
4251 	if (n_reloads == 0
4252 	    || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4253 	  reg_last_reload_reg[regno + nr] = 0;
4254      }
4255 }
4256 
4257 /* Forget the reloads marked in regset by previous function.  */
4258 static void
forget_marked_reloads(regset regs)4259 forget_marked_reloads (regset regs)
4260 {
4261   unsigned int reg;
4262   reg_set_iterator rsi;
4263   EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4264     {
4265       if (reg < FIRST_PSEUDO_REGISTER
4266 	  /* But don't do this if the reg actually serves as an output
4267 	     reload reg in the current instruction.  */
4268 	  && (n_reloads == 0
4269 	      || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4270 	  {
4271 	    CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4272 	    CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, reg);
4273 	    spill_reg_store[reg] = 0;
4274 	  }
4275       if (n_reloads == 0
4276 	  || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4277 	reg_last_reload_reg[reg] = 0;
4278     }
4279 }
4280 
4281 /* The following HARD_REG_SETs indicate when each hard register is
4282    used for a reload of various parts of the current insn.  */
4283 
4284 /* If reg is unavailable for all reloads.  */
4285 static HARD_REG_SET reload_reg_unavailable;
4286 /* If reg is in use as a reload reg for a RELOAD_OTHER reload.  */
4287 static HARD_REG_SET reload_reg_used;
4288 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I.  */
4289 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4290 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I.  */
4291 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4292 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I.  */
4293 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4294 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I.  */
4295 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4296 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I.  */
4297 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4298 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I.  */
4299 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4300 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload.  */
4301 static HARD_REG_SET reload_reg_used_in_op_addr;
4302 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload.  */
4303 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4304 /* If reg is in use for a RELOAD_FOR_INSN reload.  */
4305 static HARD_REG_SET reload_reg_used_in_insn;
4306 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload.  */
4307 static HARD_REG_SET reload_reg_used_in_other_addr;
4308 
4309 /* If reg is in use as a reload reg for any sort of reload.  */
4310 static HARD_REG_SET reload_reg_used_at_all;
4311 
4312 /* If reg is use as an inherited reload.  We just mark the first register
4313    in the group.  */
4314 static HARD_REG_SET reload_reg_used_for_inherit;
4315 
4316 /* Records which hard regs are used in any way, either as explicit use or
4317    by being allocated to a pseudo during any point of the current insn.  */
4318 static HARD_REG_SET reg_used_in_insn;
4319 
4320 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4321    TYPE. MODE is used to indicate how many consecutive regs are
4322    actually used.  */
4323 
4324 static void
mark_reload_reg_in_use(unsigned int regno,int opnum,enum reload_type type,enum machine_mode mode)4325 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4326 			enum machine_mode mode)
4327 {
4328   unsigned int nregs = hard_regno_nregs[regno][mode];
4329   unsigned int i;
4330 
4331   for (i = regno; i < nregs + regno; i++)
4332     {
4333       switch (type)
4334 	{
4335 	case RELOAD_OTHER:
4336 	  SET_HARD_REG_BIT (reload_reg_used, i);
4337 	  break;
4338 
4339 	case RELOAD_FOR_INPUT_ADDRESS:
4340 	  SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4341 	  break;
4342 
4343 	case RELOAD_FOR_INPADDR_ADDRESS:
4344 	  SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4345 	  break;
4346 
4347 	case RELOAD_FOR_OUTPUT_ADDRESS:
4348 	  SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4349 	  break;
4350 
4351 	case RELOAD_FOR_OUTADDR_ADDRESS:
4352 	  SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4353 	  break;
4354 
4355 	case RELOAD_FOR_OPERAND_ADDRESS:
4356 	  SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4357 	  break;
4358 
4359 	case RELOAD_FOR_OPADDR_ADDR:
4360 	  SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4361 	  break;
4362 
4363 	case RELOAD_FOR_OTHER_ADDRESS:
4364 	  SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4365 	  break;
4366 
4367 	case RELOAD_FOR_INPUT:
4368 	  SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4369 	  break;
4370 
4371 	case RELOAD_FOR_OUTPUT:
4372 	  SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4373 	  break;
4374 
4375 	case RELOAD_FOR_INSN:
4376 	  SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4377 	  break;
4378 	}
4379 
4380       SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4381     }
4382 }
4383 
4384 /* Similarly, but show REGNO is no longer in use for a reload.  */
4385 
4386 static void
clear_reload_reg_in_use(unsigned int regno,int opnum,enum reload_type type,enum machine_mode mode)4387 clear_reload_reg_in_use (unsigned int regno, int opnum,
4388 			 enum reload_type type, enum machine_mode mode)
4389 {
4390   unsigned int nregs = hard_regno_nregs[regno][mode];
4391   unsigned int start_regno, end_regno, r;
4392   int i;
4393   /* A complication is that for some reload types, inheritance might
4394      allow multiple reloads of the same types to share a reload register.
4395      We set check_opnum if we have to check only reloads with the same
4396      operand number, and check_any if we have to check all reloads.  */
4397   int check_opnum = 0;
4398   int check_any = 0;
4399   HARD_REG_SET *used_in_set;
4400 
4401   switch (type)
4402     {
4403     case RELOAD_OTHER:
4404       used_in_set = &reload_reg_used;
4405       break;
4406 
4407     case RELOAD_FOR_INPUT_ADDRESS:
4408       used_in_set = &reload_reg_used_in_input_addr[opnum];
4409       break;
4410 
4411     case RELOAD_FOR_INPADDR_ADDRESS:
4412       check_opnum = 1;
4413       used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4414       break;
4415 
4416     case RELOAD_FOR_OUTPUT_ADDRESS:
4417       used_in_set = &reload_reg_used_in_output_addr[opnum];
4418       break;
4419 
4420     case RELOAD_FOR_OUTADDR_ADDRESS:
4421       check_opnum = 1;
4422       used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4423       break;
4424 
4425     case RELOAD_FOR_OPERAND_ADDRESS:
4426       used_in_set = &reload_reg_used_in_op_addr;
4427       break;
4428 
4429     case RELOAD_FOR_OPADDR_ADDR:
4430       check_any = 1;
4431       used_in_set = &reload_reg_used_in_op_addr_reload;
4432       break;
4433 
4434     case RELOAD_FOR_OTHER_ADDRESS:
4435       used_in_set = &reload_reg_used_in_other_addr;
4436       check_any = 1;
4437       break;
4438 
4439     case RELOAD_FOR_INPUT:
4440       used_in_set = &reload_reg_used_in_input[opnum];
4441       break;
4442 
4443     case RELOAD_FOR_OUTPUT:
4444       used_in_set = &reload_reg_used_in_output[opnum];
4445       break;
4446 
4447     case RELOAD_FOR_INSN:
4448       used_in_set = &reload_reg_used_in_insn;
4449       break;
4450     default:
4451       gcc_unreachable ();
4452     }
4453   /* We resolve conflicts with remaining reloads of the same type by
4454      excluding the intervals of reload registers by them from the
4455      interval of freed reload registers.  Since we only keep track of
4456      one set of interval bounds, we might have to exclude somewhat
4457      more than what would be necessary if we used a HARD_REG_SET here.
4458      But this should only happen very infrequently, so there should
4459      be no reason to worry about it.  */
4460 
4461   start_regno = regno;
4462   end_regno = regno + nregs;
4463   if (check_opnum || check_any)
4464     {
4465       for (i = n_reloads - 1; i >= 0; i--)
4466 	{
4467 	  if (rld[i].when_needed == type
4468 	      && (check_any || rld[i].opnum == opnum)
4469 	      && rld[i].reg_rtx)
4470 	    {
4471 	      unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
4472 	      unsigned int conflict_end
4473 		= (conflict_start
4474 		   + hard_regno_nregs[conflict_start][rld[i].mode]);
4475 
4476 	      /* If there is an overlap with the first to-be-freed register,
4477 		 adjust the interval start.  */
4478 	      if (conflict_start <= start_regno && conflict_end > start_regno)
4479 		start_regno = conflict_end;
4480 	      /* Otherwise, if there is a conflict with one of the other
4481 		 to-be-freed registers, adjust the interval end.  */
4482 	      if (conflict_start > start_regno && conflict_start < end_regno)
4483 		end_regno = conflict_start;
4484 	    }
4485 	}
4486     }
4487 
4488   for (r = start_regno; r < end_regno; r++)
4489     CLEAR_HARD_REG_BIT (*used_in_set, r);
4490 }
4491 
4492 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4493    specified by OPNUM and TYPE.  */
4494 
4495 static int
reload_reg_free_p(unsigned int regno,int opnum,enum reload_type type)4496 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
4497 {
4498   int i;
4499 
4500   /* In use for a RELOAD_OTHER means it's not available for anything.  */
4501   if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4502       || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4503     return 0;
4504 
4505   switch (type)
4506     {
4507     case RELOAD_OTHER:
4508       /* In use for anything means we can't use it for RELOAD_OTHER.  */
4509       if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4510 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4511 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4512 	  || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4513 	return 0;
4514 
4515       for (i = 0; i < reload_n_operands; i++)
4516 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4517 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4518 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4519 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4520 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4521 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4522 	  return 0;
4523 
4524       return 1;
4525 
4526     case RELOAD_FOR_INPUT:
4527       if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4528 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4529 	return 0;
4530 
4531       if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4532 	return 0;
4533 
4534       /* If it is used for some other input, can't use it.  */
4535       for (i = 0; i < reload_n_operands; i++)
4536 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4537 	  return 0;
4538 
4539       /* If it is used in a later operand's address, can't use it.  */
4540       for (i = opnum + 1; i < reload_n_operands; i++)
4541 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4542 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4543 	  return 0;
4544 
4545       return 1;
4546 
4547     case RELOAD_FOR_INPUT_ADDRESS:
4548       /* Can't use a register if it is used for an input address for this
4549 	 operand or used as an input in an earlier one.  */
4550       if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4551 	  || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4552 	return 0;
4553 
4554       for (i = 0; i < opnum; i++)
4555 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4556 	  return 0;
4557 
4558       return 1;
4559 
4560     case RELOAD_FOR_INPADDR_ADDRESS:
4561       /* Can't use a register if it is used for an input address
4562 	 for this operand or used as an input in an earlier
4563 	 one.  */
4564       if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4565 	return 0;
4566 
4567       for (i = 0; i < opnum; i++)
4568 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4569 	  return 0;
4570 
4571       return 1;
4572 
4573     case RELOAD_FOR_OUTPUT_ADDRESS:
4574       /* Can't use a register if it is used for an output address for this
4575 	 operand or used as an output in this or a later operand.  Note
4576 	 that multiple output operands are emitted in reverse order, so
4577 	 the conflicting ones are those with lower indices.  */
4578       if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4579 	return 0;
4580 
4581       for (i = 0; i <= opnum; i++)
4582 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4583 	  return 0;
4584 
4585       return 1;
4586 
4587     case RELOAD_FOR_OUTADDR_ADDRESS:
4588       /* Can't use a register if it is used for an output address
4589 	 for this operand or used as an output in this or a
4590 	 later operand.  Note that multiple output operands are
4591 	 emitted in reverse order, so the conflicting ones are
4592 	 those with lower indices.  */
4593       if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4594 	return 0;
4595 
4596       for (i = 0; i <= opnum; i++)
4597 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4598 	  return 0;
4599 
4600       return 1;
4601 
4602     case RELOAD_FOR_OPERAND_ADDRESS:
4603       for (i = 0; i < reload_n_operands; i++)
4604 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4605 	  return 0;
4606 
4607       return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4608 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4609 
4610     case RELOAD_FOR_OPADDR_ADDR:
4611       for (i = 0; i < reload_n_operands; i++)
4612 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4613 	  return 0;
4614 
4615       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4616 
4617     case RELOAD_FOR_OUTPUT:
4618       /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4619 	 outputs, or an operand address for this or an earlier output.
4620 	 Note that multiple output operands are emitted in reverse order,
4621 	 so the conflicting ones are those with higher indices.  */
4622       if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4623 	return 0;
4624 
4625       for (i = 0; i < reload_n_operands; i++)
4626 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4627 	  return 0;
4628 
4629       for (i = opnum; i < reload_n_operands; i++)
4630 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4631 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4632 	  return 0;
4633 
4634       return 1;
4635 
4636     case RELOAD_FOR_INSN:
4637       for (i = 0; i < reload_n_operands; i++)
4638 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4639 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4640 	  return 0;
4641 
4642       return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4643 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4644 
4645     case RELOAD_FOR_OTHER_ADDRESS:
4646       return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4647 
4648     default:
4649       gcc_unreachable ();
4650     }
4651 }
4652 
4653 /* Return 1 if the value in reload reg REGNO, as used by a reload
4654    needed for the part of the insn specified by OPNUM and TYPE,
4655    is still available in REGNO at the end of the insn.
4656 
4657    We can assume that the reload reg was already tested for availability
4658    at the time it is needed, and we should not check this again,
4659    in case the reg has already been marked in use.  */
4660 
4661 static int
reload_reg_reaches_end_p(unsigned int regno,int opnum,enum reload_type type)4662 reload_reg_reaches_end_p (unsigned int regno, int opnum, enum reload_type type)
4663 {
4664   int i;
4665 
4666   switch (type)
4667     {
4668     case RELOAD_OTHER:
4669       /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4670 	 its value must reach the end.  */
4671       return 1;
4672 
4673       /* If this use is for part of the insn,
4674 	 its value reaches if no subsequent part uses the same register.
4675 	 Just like the above function, don't try to do this with lots
4676 	 of fallthroughs.  */
4677 
4678     case RELOAD_FOR_OTHER_ADDRESS:
4679       /* Here we check for everything else, since these don't conflict
4680 	 with anything else and everything comes later.  */
4681 
4682       for (i = 0; i < reload_n_operands; i++)
4683 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4684 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4685 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4686 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4687 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4688 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4689 	  return 0;
4690 
4691       return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4692 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4693 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4694 	      && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4695 
4696     case RELOAD_FOR_INPUT_ADDRESS:
4697     case RELOAD_FOR_INPADDR_ADDRESS:
4698       /* Similar, except that we check only for this and subsequent inputs
4699 	 and the address of only subsequent inputs and we do not need
4700 	 to check for RELOAD_OTHER objects since they are known not to
4701 	 conflict.  */
4702 
4703       for (i = opnum; i < reload_n_operands; i++)
4704 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4705 	  return 0;
4706 
4707       for (i = opnum + 1; i < reload_n_operands; i++)
4708 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4709 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4710 	  return 0;
4711 
4712       for (i = 0; i < reload_n_operands; i++)
4713 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4714 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4715 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4716 	  return 0;
4717 
4718       if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4719 	return 0;
4720 
4721       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4722 	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4723 	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4724 
4725     case RELOAD_FOR_INPUT:
4726       /* Similar to input address, except we start at the next operand for
4727 	 both input and input address and we do not check for
4728 	 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4729 	 would conflict.  */
4730 
4731       for (i = opnum + 1; i < reload_n_operands; i++)
4732 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4733 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4734 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4735 	  return 0;
4736 
4737       /* ... fall through ...  */
4738 
4739     case RELOAD_FOR_OPERAND_ADDRESS:
4740       /* Check outputs and their addresses.  */
4741 
4742       for (i = 0; i < reload_n_operands; i++)
4743 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4744 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4745 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4746 	  return 0;
4747 
4748       return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
4749 
4750     case RELOAD_FOR_OPADDR_ADDR:
4751       for (i = 0; i < reload_n_operands; i++)
4752 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4753 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4754 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4755 	  return 0;
4756 
4757       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4758 	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4759 	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4760 
4761     case RELOAD_FOR_INSN:
4762       /* These conflict with other outputs with RELOAD_OTHER.  So
4763 	 we need only check for output addresses.  */
4764 
4765       opnum = reload_n_operands;
4766 
4767       /* ... fall through ...  */
4768 
4769     case RELOAD_FOR_OUTPUT:
4770     case RELOAD_FOR_OUTPUT_ADDRESS:
4771     case RELOAD_FOR_OUTADDR_ADDRESS:
4772       /* We already know these can't conflict with a later output.  So the
4773 	 only thing to check are later output addresses.
4774 	 Note that multiple output operands are emitted in reverse order,
4775 	 so the conflicting ones are those with lower indices.  */
4776       for (i = 0; i < opnum; i++)
4777 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4778 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4779 	  return 0;
4780 
4781       return 1;
4782 
4783     default:
4784       gcc_unreachable ();
4785     }
4786 }
4787 
4788 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4789    Return 0 otherwise.
4790 
4791    This function uses the same algorithm as reload_reg_free_p above.  */
4792 
4793 static int
reloads_conflict(int r1,int r2)4794 reloads_conflict (int r1, int r2)
4795 {
4796   enum reload_type r1_type = rld[r1].when_needed;
4797   enum reload_type r2_type = rld[r2].when_needed;
4798   int r1_opnum = rld[r1].opnum;
4799   int r2_opnum = rld[r2].opnum;
4800 
4801   /* RELOAD_OTHER conflicts with everything.  */
4802   if (r2_type == RELOAD_OTHER)
4803     return 1;
4804 
4805   /* Otherwise, check conflicts differently for each type.  */
4806 
4807   switch (r1_type)
4808     {
4809     case RELOAD_FOR_INPUT:
4810       return (r2_type == RELOAD_FOR_INSN
4811 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4812 	      || r2_type == RELOAD_FOR_OPADDR_ADDR
4813 	      || r2_type == RELOAD_FOR_INPUT
4814 	      || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4815 		   || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4816 		  && r2_opnum > r1_opnum));
4817 
4818     case RELOAD_FOR_INPUT_ADDRESS:
4819       return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4820 	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4821 
4822     case RELOAD_FOR_INPADDR_ADDRESS:
4823       return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4824 	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4825 
4826     case RELOAD_FOR_OUTPUT_ADDRESS:
4827       return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4828 	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
4829 
4830     case RELOAD_FOR_OUTADDR_ADDRESS:
4831       return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4832 	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
4833 
4834     case RELOAD_FOR_OPERAND_ADDRESS:
4835       return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4836 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4837 
4838     case RELOAD_FOR_OPADDR_ADDR:
4839       return (r2_type == RELOAD_FOR_INPUT
4840 	      || r2_type == RELOAD_FOR_OPADDR_ADDR);
4841 
4842     case RELOAD_FOR_OUTPUT:
4843       return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4844 	      || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4845 		   || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4846 		  && r2_opnum >= r1_opnum));
4847 
4848     case RELOAD_FOR_INSN:
4849       return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4850 	      || r2_type == RELOAD_FOR_INSN
4851 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4852 
4853     case RELOAD_FOR_OTHER_ADDRESS:
4854       return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4855 
4856     case RELOAD_OTHER:
4857       return 1;
4858 
4859     default:
4860       gcc_unreachable ();
4861     }
4862 }
4863 
4864 /* Indexed by reload number, 1 if incoming value
4865    inherited from previous insns.  */
4866 static char reload_inherited[MAX_RELOADS];
4867 
4868 /* For an inherited reload, this is the insn the reload was inherited from,
4869    if we know it.  Otherwise, this is 0.  */
4870 static rtx reload_inheritance_insn[MAX_RELOADS];
4871 
4872 /* If nonzero, this is a place to get the value of the reload,
4873    rather than using reload_in.  */
4874 static rtx reload_override_in[MAX_RELOADS];
4875 
4876 /* For each reload, the hard register number of the register used,
4877    or -1 if we did not need a register for this reload.  */
4878 static int reload_spill_index[MAX_RELOADS];
4879 
4880 /* Subroutine of free_for_value_p, used to check a single register.
4881    START_REGNO is the starting regno of the full reload register
4882    (possibly comprising multiple hard registers) that we are considering.  */
4883 
4884 static int
reload_reg_free_for_value_p(int start_regno,int regno,int opnum,enum reload_type type,rtx value,rtx out,int reloadnum,int ignore_address_reloads)4885 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
4886 			     enum reload_type type, rtx value, rtx out,
4887 			     int reloadnum, int ignore_address_reloads)
4888 {
4889   int time1;
4890   /* Set if we see an input reload that must not share its reload register
4891      with any new earlyclobber, but might otherwise share the reload
4892      register with an output or input-output reload.  */
4893   int check_earlyclobber = 0;
4894   int i;
4895   int copy = 0;
4896 
4897   if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4898     return 0;
4899 
4900   if (out == const0_rtx)
4901     {
4902       copy = 1;
4903       out = NULL_RTX;
4904     }
4905 
4906   /* We use some pseudo 'time' value to check if the lifetimes of the
4907      new register use would overlap with the one of a previous reload
4908      that is not read-only or uses a different value.
4909      The 'time' used doesn't have to be linear in any shape or form, just
4910      monotonic.
4911      Some reload types use different 'buckets' for each operand.
4912      So there are MAX_RECOG_OPERANDS different time values for each
4913      such reload type.
4914      We compute TIME1 as the time when the register for the prospective
4915      new reload ceases to be live, and TIME2 for each existing
4916      reload as the time when that the reload register of that reload
4917      becomes live.
4918      Where there is little to be gained by exact lifetime calculations,
4919      we just make conservative assumptions, i.e. a longer lifetime;
4920      this is done in the 'default:' cases.  */
4921   switch (type)
4922     {
4923     case RELOAD_FOR_OTHER_ADDRESS:
4924       /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads.  */
4925       time1 = copy ? 0 : 1;
4926       break;
4927     case RELOAD_OTHER:
4928       time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
4929       break;
4930       /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
4931 	 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT.  By adding 0 / 1 / 2 ,
4932 	 respectively, to the time values for these, we get distinct time
4933 	 values.  To get distinct time values for each operand, we have to
4934 	 multiply opnum by at least three.  We round that up to four because
4935 	 multiply by four is often cheaper.  */
4936     case RELOAD_FOR_INPADDR_ADDRESS:
4937       time1 = opnum * 4 + 2;
4938       break;
4939     case RELOAD_FOR_INPUT_ADDRESS:
4940       time1 = opnum * 4 + 3;
4941       break;
4942     case RELOAD_FOR_INPUT:
4943       /* All RELOAD_FOR_INPUT reloads remain live till the instruction
4944 	 executes (inclusive).  */
4945       time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
4946       break;
4947     case RELOAD_FOR_OPADDR_ADDR:
4948       /* opnum * 4 + 4
4949 	 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
4950       time1 = MAX_RECOG_OPERANDS * 4 + 1;
4951       break;
4952     case RELOAD_FOR_OPERAND_ADDRESS:
4953       /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
4954 	 is executed.  */
4955       time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
4956       break;
4957     case RELOAD_FOR_OUTADDR_ADDRESS:
4958       time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
4959       break;
4960     case RELOAD_FOR_OUTPUT_ADDRESS:
4961       time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
4962       break;
4963     default:
4964       time1 = MAX_RECOG_OPERANDS * 5 + 5;
4965     }
4966 
4967   for (i = 0; i < n_reloads; i++)
4968     {
4969       rtx reg = rld[i].reg_rtx;
4970       if (reg && REG_P (reg)
4971 	  && ((unsigned) regno - true_regnum (reg)
4972 	      <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
4973 	  && i != reloadnum)
4974 	{
4975 	  rtx other_input = rld[i].in;
4976 
4977 	  /* If the other reload loads the same input value, that
4978 	     will not cause a conflict only if it's loading it into
4979 	     the same register.  */
4980 	  if (true_regnum (reg) != start_regno)
4981 	    other_input = NULL_RTX;
4982 	  if (! other_input || ! rtx_equal_p (other_input, value)
4983 	      || rld[i].out || out)
4984 	    {
4985 	      int time2;
4986 	      switch (rld[i].when_needed)
4987 		{
4988 		case RELOAD_FOR_OTHER_ADDRESS:
4989 		  time2 = 0;
4990 		  break;
4991 		case RELOAD_FOR_INPADDR_ADDRESS:
4992 		  /* find_reloads makes sure that a
4993 		     RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
4994 		     by at most one - the first -
4995 		     RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS .  If the
4996 		     address reload is inherited, the address address reload
4997 		     goes away, so we can ignore this conflict.  */
4998 		  if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
4999 		      && ignore_address_reloads
5000 		      /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5001 			 Then the address address is still needed to store
5002 			 back the new address.  */
5003 		      && ! rld[reloadnum].out)
5004 		    continue;
5005 		  /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5006 		     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5007 		     reloads go away.  */
5008 		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5009 		      && ignore_address_reloads
5010 		      /* Unless we are reloading an auto_inc expression.  */
5011 		      && ! rld[reloadnum].out)
5012 		    continue;
5013 		  time2 = rld[i].opnum * 4 + 2;
5014 		  break;
5015 		case RELOAD_FOR_INPUT_ADDRESS:
5016 		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5017 		      && ignore_address_reloads
5018 		      && ! rld[reloadnum].out)
5019 		    continue;
5020 		  time2 = rld[i].opnum * 4 + 3;
5021 		  break;
5022 		case RELOAD_FOR_INPUT:
5023 		  time2 = rld[i].opnum * 4 + 4;
5024 		  check_earlyclobber = 1;
5025 		  break;
5026 		  /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5027 		     == MAX_RECOG_OPERAND * 4  */
5028 		case RELOAD_FOR_OPADDR_ADDR:
5029 		  if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5030 		      && ignore_address_reloads
5031 		      && ! rld[reloadnum].out)
5032 		    continue;
5033 		  time2 = MAX_RECOG_OPERANDS * 4 + 1;
5034 		  break;
5035 		case RELOAD_FOR_OPERAND_ADDRESS:
5036 		  time2 = MAX_RECOG_OPERANDS * 4 + 2;
5037 		  check_earlyclobber = 1;
5038 		  break;
5039 		case RELOAD_FOR_INSN:
5040 		  time2 = MAX_RECOG_OPERANDS * 4 + 3;
5041 		  break;
5042 		case RELOAD_FOR_OUTPUT:
5043 		  /* All RELOAD_FOR_OUTPUT reloads become live just after the
5044 		     instruction is executed.  */
5045 		  time2 = MAX_RECOG_OPERANDS * 4 + 4;
5046 		  break;
5047 		  /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5048 		     the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5049 		     value.  */
5050 		case RELOAD_FOR_OUTADDR_ADDRESS:
5051 		  if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5052 		      && ignore_address_reloads
5053 		      && ! rld[reloadnum].out)
5054 		    continue;
5055 		  time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5056 		  break;
5057 		case RELOAD_FOR_OUTPUT_ADDRESS:
5058 		  time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5059 		  break;
5060 		case RELOAD_OTHER:
5061 		  /* If there is no conflict in the input part, handle this
5062 		     like an output reload.  */
5063 		  if (! rld[i].in || rtx_equal_p (other_input, value))
5064 		    {
5065 		      time2 = MAX_RECOG_OPERANDS * 4 + 4;
5066 		      /* Earlyclobbered outputs must conflict with inputs.  */
5067 		      if (earlyclobber_operand_p (rld[i].out))
5068 			time2 = MAX_RECOG_OPERANDS * 4 + 3;
5069 
5070 		      break;
5071 		    }
5072 		  time2 = 1;
5073 		  /* RELOAD_OTHER might be live beyond instruction execution,
5074 		     but this is not obvious when we set time2 = 1.  So check
5075 		     here if there might be a problem with the new reload
5076 		     clobbering the register used by the RELOAD_OTHER.  */
5077 		  if (out)
5078 		    return 0;
5079 		  break;
5080 		default:
5081 		  return 0;
5082 		}
5083 	      if ((time1 >= time2
5084 		   && (! rld[i].in || rld[i].out
5085 		       || ! rtx_equal_p (other_input, value)))
5086 		  || (out && rld[reloadnum].out_reg
5087 		      && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5088 		return 0;
5089 	    }
5090 	}
5091     }
5092 
5093   /* Earlyclobbered outputs must conflict with inputs.  */
5094   if (check_earlyclobber && out && earlyclobber_operand_p (out))
5095     return 0;
5096 
5097   return 1;
5098 }
5099 
5100 /* Return 1 if the value in reload reg REGNO, as used by a reload
5101    needed for the part of the insn specified by OPNUM and TYPE,
5102    may be used to load VALUE into it.
5103 
5104    MODE is the mode in which the register is used, this is needed to
5105    determine how many hard regs to test.
5106 
5107    Other read-only reloads with the same value do not conflict
5108    unless OUT is nonzero and these other reloads have to live while
5109    output reloads live.
5110    If OUT is CONST0_RTX, this is a special case: it means that the
5111    test should not be for using register REGNO as reload register, but
5112    for copying from register REGNO into the reload register.
5113 
5114    RELOADNUM is the number of the reload we want to load this value for;
5115    a reload does not conflict with itself.
5116 
5117    When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5118    reloads that load an address for the very reload we are considering.
5119 
5120    The caller has to make sure that there is no conflict with the return
5121    register.  */
5122 
5123 static int
free_for_value_p(int regno,enum machine_mode mode,int opnum,enum reload_type type,rtx value,rtx out,int reloadnum,int ignore_address_reloads)5124 free_for_value_p (int regno, enum machine_mode mode, int opnum,
5125 		  enum reload_type type, rtx value, rtx out, int reloadnum,
5126 		  int ignore_address_reloads)
5127 {
5128   int nregs = hard_regno_nregs[regno][mode];
5129   while (nregs-- > 0)
5130     if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5131 				       value, out, reloadnum,
5132 				       ignore_address_reloads))
5133       return 0;
5134   return 1;
5135 }
5136 
5137 /* Return nonzero if the rtx X is invariant over the current function.  */
5138 /* ??? Actually, the places where we use this expect exactly what is
5139    tested here, and not everything that is function invariant.  In
5140    particular, the frame pointer and arg pointer are special cased;
5141    pic_offset_table_rtx is not, and we must not spill these things to
5142    memory.  */
5143 
5144 int
function_invariant_p(rtx x)5145 function_invariant_p (rtx x)
5146 {
5147   if (CONSTANT_P (x))
5148     return 1;
5149   if (x == frame_pointer_rtx || x == arg_pointer_rtx)
5150     return 1;
5151   if (GET_CODE (x) == PLUS
5152       && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
5153       && CONSTANT_P (XEXP (x, 1)))
5154     return 1;
5155   return 0;
5156 }
5157 
5158 /* Determine whether the reload reg X overlaps any rtx'es used for
5159    overriding inheritance.  Return nonzero if so.  */
5160 
5161 static int
conflicts_with_override(rtx x)5162 conflicts_with_override (rtx x)
5163 {
5164   int i;
5165   for (i = 0; i < n_reloads; i++)
5166     if (reload_override_in[i]
5167 	&& reg_overlap_mentioned_p (x, reload_override_in[i]))
5168       return 1;
5169   return 0;
5170 }
5171 
5172 /* Give an error message saying we failed to find a reload for INSN,
5173    and clear out reload R.  */
5174 static void
failed_reload(rtx insn,int r)5175 failed_reload (rtx insn, int r)
5176 {
5177   if (asm_noperands (PATTERN (insn)) < 0)
5178     /* It's the compiler's fault.  */
5179     fatal_insn ("could not find a spill register", insn);
5180 
5181   /* It's the user's fault; the operand's mode and constraint
5182      don't match.  Disable this reload so we don't crash in final.  */
5183   error_for_asm (insn,
5184 		 "%<asm%> operand constraint incompatible with operand size");
5185   rld[r].in = 0;
5186   rld[r].out = 0;
5187   rld[r].reg_rtx = 0;
5188   rld[r].optional = 1;
5189   rld[r].secondary_p = 1;
5190 }
5191 
5192 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
5193    for reload R.  If it's valid, get an rtx for it.  Return nonzero if
5194    successful.  */
5195 static int
set_reload_reg(int i,int r)5196 set_reload_reg (int i, int r)
5197 {
5198   int regno;
5199   rtx reg = spill_reg_rtx[i];
5200 
5201   if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5202     spill_reg_rtx[i] = reg
5203       = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5204 
5205   regno = true_regnum (reg);
5206 
5207   /* Detect when the reload reg can't hold the reload mode.
5208      This used to be one `if', but Sequent compiler can't handle that.  */
5209   if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5210     {
5211       enum machine_mode test_mode = VOIDmode;
5212       if (rld[r].in)
5213 	test_mode = GET_MODE (rld[r].in);
5214       /* If rld[r].in has VOIDmode, it means we will load it
5215 	 in whatever mode the reload reg has: to wit, rld[r].mode.
5216 	 We have already tested that for validity.  */
5217       /* Aside from that, we need to test that the expressions
5218 	 to reload from or into have modes which are valid for this
5219 	 reload register.  Otherwise the reload insns would be invalid.  */
5220       if (! (rld[r].in != 0 && test_mode != VOIDmode
5221 	     && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5222 	if (! (rld[r].out != 0
5223 	       && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5224 	  {
5225 	    /* The reg is OK.  */
5226 	    last_spill_reg = i;
5227 
5228 	    /* Mark as in use for this insn the reload regs we use
5229 	       for this.  */
5230 	    mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5231 				    rld[r].when_needed, rld[r].mode);
5232 
5233 	    rld[r].reg_rtx = reg;
5234 	    reload_spill_index[r] = spill_regs[i];
5235 	    return 1;
5236 	  }
5237     }
5238   return 0;
5239 }
5240 
5241 /* Find a spill register to use as a reload register for reload R.
5242    LAST_RELOAD is nonzero if this is the last reload for the insn being
5243    processed.
5244 
5245    Set rld[R].reg_rtx to the register allocated.
5246 
5247    We return 1 if successful, or 0 if we couldn't find a spill reg and
5248    we didn't change anything.  */
5249 
5250 static int
allocate_reload_reg(struct insn_chain * chain ATTRIBUTE_UNUSED,int r,int last_reload)5251 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
5252 		     int last_reload)
5253 {
5254   int i, pass, count;
5255 
5256   /* If we put this reload ahead, thinking it is a group,
5257      then insist on finding a group.  Otherwise we can grab a
5258      reg that some other reload needs.
5259      (That can happen when we have a 68000 DATA_OR_FP_REG
5260      which is a group of data regs or one fp reg.)
5261      We need not be so restrictive if there are no more reloads
5262      for this insn.
5263 
5264      ??? Really it would be nicer to have smarter handling
5265      for that kind of reg class, where a problem like this is normal.
5266      Perhaps those classes should be avoided for reloading
5267      by use of more alternatives.  */
5268 
5269   int force_group = rld[r].nregs > 1 && ! last_reload;
5270 
5271   /* If we want a single register and haven't yet found one,
5272      take any reg in the right class and not in use.
5273      If we want a consecutive group, here is where we look for it.
5274 
5275      We use two passes so we can first look for reload regs to
5276      reuse, which are already in use for other reloads in this insn,
5277      and only then use additional registers.
5278      I think that maximizing reuse is needed to make sure we don't
5279      run out of reload regs.  Suppose we have three reloads, and
5280      reloads A and B can share regs.  These need two regs.
5281      Suppose A and B are given different regs.
5282      That leaves none for C.  */
5283   for (pass = 0; pass < 2; pass++)
5284     {
5285       /* I is the index in spill_regs.
5286 	 We advance it round-robin between insns to use all spill regs
5287 	 equally, so that inherited reloads have a chance
5288 	 of leapfrogging each other.  */
5289 
5290       i = last_spill_reg;
5291 
5292       for (count = 0; count < n_spills; count++)
5293 	{
5294 	  int class = (int) rld[r].class;
5295 	  int regnum;
5296 
5297 	  i++;
5298 	  if (i >= n_spills)
5299 	    i -= n_spills;
5300 	  regnum = spill_regs[i];
5301 
5302 	  if ((reload_reg_free_p (regnum, rld[r].opnum,
5303 				  rld[r].when_needed)
5304 	       || (rld[r].in
5305 		   /* We check reload_reg_used to make sure we
5306 		      don't clobber the return register.  */
5307 		   && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5308 		   && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
5309 					rld[r].when_needed, rld[r].in,
5310 					rld[r].out, r, 1)))
5311 	      && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
5312 	      && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
5313 	      /* Look first for regs to share, then for unshared.  But
5314 		 don't share regs used for inherited reloads; they are
5315 		 the ones we want to preserve.  */
5316 	      && (pass
5317 		  || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5318 					 regnum)
5319 		      && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5320 					      regnum))))
5321 	    {
5322 	      int nr = hard_regno_nregs[regnum][rld[r].mode];
5323 	      /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5324 		 (on 68000) got us two FP regs.  If NR is 1,
5325 		 we would reject both of them.  */
5326 	      if (force_group)
5327 		nr = rld[r].nregs;
5328 	      /* If we need only one reg, we have already won.  */
5329 	      if (nr == 1)
5330 		{
5331 		  /* But reject a single reg if we demand a group.  */
5332 		  if (force_group)
5333 		    continue;
5334 		  break;
5335 		}
5336 	      /* Otherwise check that as many consecutive regs as we need
5337 		 are available here.  */
5338 	      while (nr > 1)
5339 		{
5340 		  int regno = regnum + nr - 1;
5341 		  if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5342 			&& spill_reg_order[regno] >= 0
5343 			&& reload_reg_free_p (regno, rld[r].opnum,
5344 					      rld[r].when_needed)))
5345 		    break;
5346 		  nr--;
5347 		}
5348 	      if (nr == 1)
5349 		break;
5350 	    }
5351 	}
5352 
5353       /* If we found something on pass 1, omit pass 2.  */
5354       if (count < n_spills)
5355 	break;
5356     }
5357 
5358   /* We should have found a spill register by now.  */
5359   if (count >= n_spills)
5360     return 0;
5361 
5362   /* I is the index in SPILL_REG_RTX of the reload register we are to
5363      allocate.  Get an rtx for it and find its register number.  */
5364 
5365   return set_reload_reg (i, r);
5366 }
5367 
5368 /* Initialize all the tables needed to allocate reload registers.
5369    CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5370    is the array we use to restore the reg_rtx field for every reload.  */
5371 
5372 static void
choose_reload_regs_init(struct insn_chain * chain,rtx * save_reload_reg_rtx)5373 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
5374 {
5375   int i;
5376 
5377   for (i = 0; i < n_reloads; i++)
5378     rld[i].reg_rtx = save_reload_reg_rtx[i];
5379 
5380   memset (reload_inherited, 0, MAX_RELOADS);
5381   memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
5382   memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
5383 
5384   CLEAR_HARD_REG_SET (reload_reg_used);
5385   CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5386   CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5387   CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5388   CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5389   CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5390 
5391   CLEAR_HARD_REG_SET (reg_used_in_insn);
5392   {
5393     HARD_REG_SET tmp;
5394     REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
5395     IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5396     REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
5397     IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5398     compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
5399     compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
5400   }
5401 
5402   for (i = 0; i < reload_n_operands; i++)
5403     {
5404       CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5405       CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5406       CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5407       CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5408       CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5409       CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5410     }
5411 
5412   COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
5413 
5414   CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5415 
5416   for (i = 0; i < n_reloads; i++)
5417     /* If we have already decided to use a certain register,
5418        don't use it in another way.  */
5419     if (rld[i].reg_rtx)
5420       mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5421 			      rld[i].when_needed, rld[i].mode);
5422 }
5423 
5424 /* Assign hard reg targets for the pseudo-registers we must reload
5425    into hard regs for this insn.
5426    Also output the instructions to copy them in and out of the hard regs.
5427 
5428    For machines with register classes, we are responsible for
5429    finding a reload reg in the proper class.  */
5430 
5431 static void
choose_reload_regs(struct insn_chain * chain)5432 choose_reload_regs (struct insn_chain *chain)
5433 {
5434   rtx insn = chain->insn;
5435   int i, j;
5436   unsigned int max_group_size = 1;
5437   enum reg_class group_class = NO_REGS;
5438   int pass, win, inheritance;
5439 
5440   rtx save_reload_reg_rtx[MAX_RELOADS];
5441 
5442   /* In order to be certain of getting the registers we need,
5443      we must sort the reloads into order of increasing register class.
5444      Then our grabbing of reload registers will parallel the process
5445      that provided the reload registers.
5446 
5447      Also note whether any of the reloads wants a consecutive group of regs.
5448      If so, record the maximum size of the group desired and what
5449      register class contains all the groups needed by this insn.  */
5450 
5451   for (j = 0; j < n_reloads; j++)
5452     {
5453       reload_order[j] = j;
5454       reload_spill_index[j] = -1;
5455 
5456       if (rld[j].nregs > 1)
5457 	{
5458 	  max_group_size = MAX (rld[j].nregs, max_group_size);
5459 	  group_class
5460 	    = reg_class_superunion[(int) rld[j].class][(int) group_class];
5461 	}
5462 
5463       save_reload_reg_rtx[j] = rld[j].reg_rtx;
5464     }
5465 
5466   if (n_reloads > 1)
5467     qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5468 
5469   /* If -O, try first with inheritance, then turning it off.
5470      If not -O, don't do inheritance.
5471      Using inheritance when not optimizing leads to paradoxes
5472      with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5473      because one side of the comparison might be inherited.  */
5474   win = 0;
5475   for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5476     {
5477       choose_reload_regs_init (chain, save_reload_reg_rtx);
5478 
5479       /* Process the reloads in order of preference just found.
5480 	 Beyond this point, subregs can be found in reload_reg_rtx.
5481 
5482 	 This used to look for an existing reloaded home for all of the
5483 	 reloads, and only then perform any new reloads.  But that could lose
5484 	 if the reloads were done out of reg-class order because a later
5485 	 reload with a looser constraint might have an old home in a register
5486 	 needed by an earlier reload with a tighter constraint.
5487 
5488 	 To solve this, we make two passes over the reloads, in the order
5489 	 described above.  In the first pass we try to inherit a reload
5490 	 from a previous insn.  If there is a later reload that needs a
5491 	 class that is a proper subset of the class being processed, we must
5492 	 also allocate a spill register during the first pass.
5493 
5494 	 Then make a second pass over the reloads to allocate any reloads
5495 	 that haven't been given registers yet.  */
5496 
5497       for (j = 0; j < n_reloads; j++)
5498 	{
5499 	  int r = reload_order[j];
5500 	  rtx search_equiv = NULL_RTX;
5501 
5502 	  /* Ignore reloads that got marked inoperative.  */
5503 	  if (rld[r].out == 0 && rld[r].in == 0
5504 	      && ! rld[r].secondary_p)
5505 	    continue;
5506 
5507 	  /* If find_reloads chose to use reload_in or reload_out as a reload
5508 	     register, we don't need to chose one.  Otherwise, try even if it
5509 	     found one since we might save an insn if we find the value lying
5510 	     around.
5511 	     Try also when reload_in is a pseudo without a hard reg.  */
5512 	  if (rld[r].in != 0 && rld[r].reg_rtx != 0
5513 	      && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
5514 		  || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
5515 		      && !MEM_P (rld[r].in)
5516 		      && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
5517 	    continue;
5518 
5519 #if 0 /* No longer needed for correct operation.
5520 	 It might give better code, or might not; worth an experiment?  */
5521 	  /* If this is an optional reload, we can't inherit from earlier insns
5522 	     until we are sure that any non-optional reloads have been allocated.
5523 	     The following code takes advantage of the fact that optional reloads
5524 	     are at the end of reload_order.  */
5525 	  if (rld[r].optional != 0)
5526 	    for (i = 0; i < j; i++)
5527 	      if ((rld[reload_order[i]].out != 0
5528 		   || rld[reload_order[i]].in != 0
5529 		   || rld[reload_order[i]].secondary_p)
5530 		  && ! rld[reload_order[i]].optional
5531 		  && rld[reload_order[i]].reg_rtx == 0)
5532 		allocate_reload_reg (chain, reload_order[i], 0);
5533 #endif
5534 
5535 	  /* First see if this pseudo is already available as reloaded
5536 	     for a previous insn.  We cannot try to inherit for reloads
5537 	     that are smaller than the maximum number of registers needed
5538 	     for groups unless the register we would allocate cannot be used
5539 	     for the groups.
5540 
5541 	     We could check here to see if this is a secondary reload for
5542 	     an object that is already in a register of the desired class.
5543 	     This would avoid the need for the secondary reload register.
5544 	     But this is complex because we can't easily determine what
5545 	     objects might want to be loaded via this reload.  So let a
5546 	     register be allocated here.  In `emit_reload_insns' we suppress
5547 	     one of the loads in the case described above.  */
5548 
5549 	  if (inheritance)
5550 	    {
5551 	      int byte = 0;
5552 	      int regno = -1;
5553 	      enum machine_mode mode = VOIDmode;
5554 
5555 	      if (rld[r].in == 0)
5556 		;
5557 	      else if (REG_P (rld[r].in))
5558 		{
5559 		  regno = REGNO (rld[r].in);
5560 		  mode = GET_MODE (rld[r].in);
5561 		}
5562 	      else if (REG_P (rld[r].in_reg))
5563 		{
5564 		  regno = REGNO (rld[r].in_reg);
5565 		  mode = GET_MODE (rld[r].in_reg);
5566 		}
5567 	      else if (GET_CODE (rld[r].in_reg) == SUBREG
5568 		       && REG_P (SUBREG_REG (rld[r].in_reg)))
5569 		{
5570 		  byte = SUBREG_BYTE (rld[r].in_reg);
5571 		  regno = REGNO (SUBREG_REG (rld[r].in_reg));
5572 		  if (regno < FIRST_PSEUDO_REGISTER)
5573 		    regno = subreg_regno (rld[r].in_reg);
5574 		  mode = GET_MODE (rld[r].in_reg);
5575 		}
5576 #ifdef AUTO_INC_DEC
5577 	      else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
5578 		       && REG_P (XEXP (rld[r].in_reg, 0)))
5579 		{
5580 		  regno = REGNO (XEXP (rld[r].in_reg, 0));
5581 		  mode = GET_MODE (XEXP (rld[r].in_reg, 0));
5582 		  rld[r].out = rld[r].in;
5583 		}
5584 #endif
5585 #if 0
5586 	      /* This won't work, since REGNO can be a pseudo reg number.
5587 		 Also, it takes much more hair to keep track of all the things
5588 		 that can invalidate an inherited reload of part of a pseudoreg.  */
5589 	      else if (GET_CODE (rld[r].in) == SUBREG
5590 		       && REG_P (SUBREG_REG (rld[r].in)))
5591 		regno = subreg_regno (rld[r].in);
5592 #endif
5593 
5594 	      if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5595 		{
5596 		  enum reg_class class = rld[r].class, last_class;
5597 		  rtx last_reg = reg_last_reload_reg[regno];
5598 		  enum machine_mode need_mode;
5599 
5600 		  i = REGNO (last_reg);
5601 		  i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
5602 		  last_class = REGNO_REG_CLASS (i);
5603 
5604 		  if (byte == 0)
5605 		    need_mode = mode;
5606 		  else
5607 		    need_mode
5608 		      = smallest_mode_for_size (GET_MODE_BITSIZE (mode)
5609 						+ byte * BITS_PER_UNIT,
5610 						GET_MODE_CLASS (mode));
5611 
5612 		  if ((GET_MODE_SIZE (GET_MODE (last_reg))
5613 		       >= GET_MODE_SIZE (need_mode))
5614 #ifdef CANNOT_CHANGE_MODE_CLASS
5615 		      /* Verify that the register in "i" can be obtained
5616 			 from LAST_REG.  */
5617 		      && !REG_CANNOT_CHANGE_MODE_P (REGNO (last_reg),
5618 						    GET_MODE (last_reg),
5619 						    mode)
5620 #endif
5621 		      && reg_reloaded_contents[i] == regno
5622 		      && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
5623 		      && HARD_REGNO_MODE_OK (i, rld[r].mode)
5624 		      && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
5625 			  /* Even if we can't use this register as a reload
5626 			     register, we might use it for reload_override_in,
5627 			     if copying it to the desired class is cheap
5628 			     enough.  */
5629 			  || ((REGISTER_MOVE_COST (mode, last_class, class)
5630 			       < MEMORY_MOVE_COST (mode, class, 1))
5631 			      && (secondary_reload_class (1, class, mode,
5632 							  last_reg)
5633 				  == NO_REGS)
5634 #ifdef SECONDARY_MEMORY_NEEDED
5635 			      && ! SECONDARY_MEMORY_NEEDED (last_class, class,
5636 							    mode)
5637 #endif
5638 			      ))
5639 
5640 		      && (rld[r].nregs == max_group_size
5641 			  || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5642 						  i))
5643 		      && free_for_value_p (i, rld[r].mode, rld[r].opnum,
5644 					   rld[r].when_needed, rld[r].in,
5645 					   const0_rtx, r, 1))
5646 		    {
5647 		      /* If a group is needed, verify that all the subsequent
5648 			 registers still have their values intact.  */
5649 		      int nr = hard_regno_nregs[i][rld[r].mode];
5650 		      int k;
5651 
5652 		      for (k = 1; k < nr; k++)
5653 			if (reg_reloaded_contents[i + k] != regno
5654 			    || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
5655 			  break;
5656 
5657 		      if (k == nr)
5658 			{
5659 			  int i1;
5660 			  int bad_for_class;
5661 
5662 			  last_reg = (GET_MODE (last_reg) == mode
5663 				      ? last_reg : gen_rtx_REG (mode, i));
5664 
5665 			  bad_for_class = 0;
5666 			  for (k = 0; k < nr; k++)
5667 			    bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5668 								  i+k);
5669 
5670 			  /* We found a register that contains the
5671 			     value we need.  If this register is the
5672 			     same as an `earlyclobber' operand of the
5673 			     current insn, just mark it as a place to
5674 			     reload from since we can't use it as the
5675 			     reload register itself.  */
5676 
5677 			  for (i1 = 0; i1 < n_earlyclobbers; i1++)
5678 			    if (reg_overlap_mentioned_for_reload_p
5679 				(reg_last_reload_reg[regno],
5680 				 reload_earlyclobbers[i1]))
5681 			      break;
5682 
5683 			  if (i1 != n_earlyclobbers
5684 			      || ! (free_for_value_p (i, rld[r].mode,
5685 						      rld[r].opnum,
5686 						      rld[r].when_needed, rld[r].in,
5687 						      rld[r].out, r, 1))
5688 			      /* Don't use it if we'd clobber a pseudo reg.  */
5689 			      || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
5690 				  && rld[r].out
5691 				  && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
5692 			      /* Don't clobber the frame pointer.  */
5693 			      || (i == HARD_FRAME_POINTER_REGNUM
5694 				  && frame_pointer_needed
5695 				  && rld[r].out)
5696 			      /* Don't really use the inherited spill reg
5697 				 if we need it wider than we've got it.  */
5698 			      || (GET_MODE_SIZE (rld[r].mode)
5699 				  > GET_MODE_SIZE (mode))
5700 			      || bad_for_class
5701 
5702 			      /* If find_reloads chose reload_out as reload
5703 				 register, stay with it - that leaves the
5704 				 inherited register for subsequent reloads.  */
5705 			      || (rld[r].out && rld[r].reg_rtx
5706 				  && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
5707 			    {
5708 			      if (! rld[r].optional)
5709 				{
5710 				  reload_override_in[r] = last_reg;
5711 				  reload_inheritance_insn[r]
5712 				    = reg_reloaded_insn[i];
5713 				}
5714 			    }
5715 			  else
5716 			    {
5717 			      int k;
5718 			      /* We can use this as a reload reg.  */
5719 			      /* Mark the register as in use for this part of
5720 				 the insn.  */
5721 			      mark_reload_reg_in_use (i,
5722 						      rld[r].opnum,
5723 						      rld[r].when_needed,
5724 						      rld[r].mode);
5725 			      rld[r].reg_rtx = last_reg;
5726 			      reload_inherited[r] = 1;
5727 			      reload_inheritance_insn[r]
5728 				= reg_reloaded_insn[i];
5729 			      reload_spill_index[r] = i;
5730 			      for (k = 0; k < nr; k++)
5731 				SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5732 						  i + k);
5733 			    }
5734 			}
5735 		    }
5736 		}
5737 	    }
5738 
5739 	  /* Here's another way to see if the value is already lying around.  */
5740 	  if (inheritance
5741 	      && rld[r].in != 0
5742 	      && ! reload_inherited[r]
5743 	      && rld[r].out == 0
5744 	      && (CONSTANT_P (rld[r].in)
5745 		  || GET_CODE (rld[r].in) == PLUS
5746 		  || REG_P (rld[r].in)
5747 		  || MEM_P (rld[r].in))
5748 	      && (rld[r].nregs == max_group_size
5749 		  || ! reg_classes_intersect_p (rld[r].class, group_class)))
5750 	    search_equiv = rld[r].in;
5751 	  /* If this is an output reload from a simple move insn, look
5752 	     if an equivalence for the input is available.  */
5753 	  else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
5754 	    {
5755 	      rtx set = single_set (insn);
5756 
5757 	      if (set
5758 		  && rtx_equal_p (rld[r].out, SET_DEST (set))
5759 		  && CONSTANT_P (SET_SRC (set)))
5760 		search_equiv = SET_SRC (set);
5761 	    }
5762 
5763 	  if (search_equiv)
5764 	    {
5765 	      rtx equiv
5766 		= find_equiv_reg (search_equiv, insn, rld[r].class,
5767 				  -1, NULL, 0, rld[r].mode);
5768 	      int regno = 0;
5769 
5770 	      if (equiv != 0)
5771 		{
5772 		  if (REG_P (equiv))
5773 		    regno = REGNO (equiv);
5774 		  else
5775 		    {
5776 		      /* This must be a SUBREG of a hard register.
5777 			 Make a new REG since this might be used in an
5778 			 address and not all machines support SUBREGs
5779 			 there.  */
5780 		      gcc_assert (GET_CODE (equiv) == SUBREG);
5781 		      regno = subreg_regno (equiv);
5782 		      equiv = gen_rtx_REG (rld[r].mode, regno);
5783 		      /* If we choose EQUIV as the reload register, but the
5784 			 loop below decides to cancel the inheritance, we'll
5785 			 end up reloading EQUIV in rld[r].mode, not the mode
5786 			 it had originally.  That isn't safe when EQUIV isn't
5787 			 available as a spill register since its value might
5788 			 still be live at this point.  */
5789 		      for (i = regno; i < regno + (int) rld[r].nregs; i++)
5790 			if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
5791 			  equiv = 0;
5792 		    }
5793 		}
5794 
5795 	      /* If we found a spill reg, reject it unless it is free
5796 		 and of the desired class.  */
5797 	      if (equiv != 0)
5798 		{
5799 		  int regs_used = 0;
5800 		  int bad_for_class = 0;
5801 		  int max_regno = regno + rld[r].nregs;
5802 
5803 		  for (i = regno; i < max_regno; i++)
5804 		    {
5805 		      regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
5806 						      i);
5807 		      bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5808 							   i);
5809 		    }
5810 
5811 		  if ((regs_used
5812 		       && ! free_for_value_p (regno, rld[r].mode,
5813 					      rld[r].opnum, rld[r].when_needed,
5814 					      rld[r].in, rld[r].out, r, 1))
5815 		      || bad_for_class)
5816 		    equiv = 0;
5817 		}
5818 
5819 	      if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
5820 		equiv = 0;
5821 
5822 	      /* We found a register that contains the value we need.
5823 		 If this register is the same as an `earlyclobber' operand
5824 		 of the current insn, just mark it as a place to reload from
5825 		 since we can't use it as the reload register itself.  */
5826 
5827 	      if (equiv != 0)
5828 		for (i = 0; i < n_earlyclobbers; i++)
5829 		  if (reg_overlap_mentioned_for_reload_p (equiv,
5830 							  reload_earlyclobbers[i]))
5831 		    {
5832 		      if (! rld[r].optional)
5833 			reload_override_in[r] = equiv;
5834 		      equiv = 0;
5835 		      break;
5836 		    }
5837 
5838 	      /* If the equiv register we have found is explicitly clobbered
5839 		 in the current insn, it depends on the reload type if we
5840 		 can use it, use it for reload_override_in, or not at all.
5841 		 In particular, we then can't use EQUIV for a
5842 		 RELOAD_FOR_OUTPUT_ADDRESS reload.  */
5843 
5844 	      if (equiv != 0)
5845 		{
5846 		  if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
5847 		    switch (rld[r].when_needed)
5848 		      {
5849 		      case RELOAD_FOR_OTHER_ADDRESS:
5850 		      case RELOAD_FOR_INPADDR_ADDRESS:
5851 		      case RELOAD_FOR_INPUT_ADDRESS:
5852 		      case RELOAD_FOR_OPADDR_ADDR:
5853 			break;
5854 		      case RELOAD_OTHER:
5855 		      case RELOAD_FOR_INPUT:
5856 		      case RELOAD_FOR_OPERAND_ADDRESS:
5857 			if (! rld[r].optional)
5858 			  reload_override_in[r] = equiv;
5859 			/* Fall through.  */
5860 		      default:
5861 			equiv = 0;
5862 			break;
5863 		      }
5864 		  else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
5865 		    switch (rld[r].when_needed)
5866 		      {
5867 		      case RELOAD_FOR_OTHER_ADDRESS:
5868 		      case RELOAD_FOR_INPADDR_ADDRESS:
5869 		      case RELOAD_FOR_INPUT_ADDRESS:
5870 		      case RELOAD_FOR_OPADDR_ADDR:
5871 		      case RELOAD_FOR_OPERAND_ADDRESS:
5872 		      case RELOAD_FOR_INPUT:
5873 			break;
5874 		      case RELOAD_OTHER:
5875 			if (! rld[r].optional)
5876 			  reload_override_in[r] = equiv;
5877 			/* Fall through.  */
5878 		      default:
5879 			equiv = 0;
5880 			break;
5881 		      }
5882 		}
5883 
5884 	      /* If we found an equivalent reg, say no code need be generated
5885 		 to load it, and use it as our reload reg.  */
5886 	      if (equiv != 0
5887 		  && (regno != HARD_FRAME_POINTER_REGNUM
5888 		      || !frame_pointer_needed))
5889 		{
5890 		  int nr = hard_regno_nregs[regno][rld[r].mode];
5891 		  int k;
5892 		  rld[r].reg_rtx = equiv;
5893 		  reload_inherited[r] = 1;
5894 
5895 		  /* If reg_reloaded_valid is not set for this register,
5896 		     there might be a stale spill_reg_store lying around.
5897 		     We must clear it, since otherwise emit_reload_insns
5898 		     might delete the store.  */
5899 		  if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
5900 		    spill_reg_store[regno] = NULL_RTX;
5901 		  /* If any of the hard registers in EQUIV are spill
5902 		     registers, mark them as in use for this insn.  */
5903 		  for (k = 0; k < nr; k++)
5904 		    {
5905 		      i = spill_reg_order[regno + k];
5906 		      if (i >= 0)
5907 			{
5908 			  mark_reload_reg_in_use (regno, rld[r].opnum,
5909 						  rld[r].when_needed,
5910 						  rld[r].mode);
5911 			  SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5912 					    regno + k);
5913 			}
5914 		    }
5915 		}
5916 	    }
5917 
5918 	  /* If we found a register to use already, or if this is an optional
5919 	     reload, we are done.  */
5920 	  if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
5921 	    continue;
5922 
5923 #if 0
5924 	  /* No longer needed for correct operation.  Might or might
5925 	     not give better code on the average.  Want to experiment?  */
5926 
5927 	  /* See if there is a later reload that has a class different from our
5928 	     class that intersects our class or that requires less register
5929 	     than our reload.  If so, we must allocate a register to this
5930 	     reload now, since that reload might inherit a previous reload
5931 	     and take the only available register in our class.  Don't do this
5932 	     for optional reloads since they will force all previous reloads
5933 	     to be allocated.  Also don't do this for reloads that have been
5934 	     turned off.  */
5935 
5936 	  for (i = j + 1; i < n_reloads; i++)
5937 	    {
5938 	      int s = reload_order[i];
5939 
5940 	      if ((rld[s].in == 0 && rld[s].out == 0
5941 		   && ! rld[s].secondary_p)
5942 		  || rld[s].optional)
5943 		continue;
5944 
5945 	      if ((rld[s].class != rld[r].class
5946 		   && reg_classes_intersect_p (rld[r].class,
5947 					       rld[s].class))
5948 		  || rld[s].nregs < rld[r].nregs)
5949 		break;
5950 	    }
5951 
5952 	  if (i == n_reloads)
5953 	    continue;
5954 
5955 	  allocate_reload_reg (chain, r, j == n_reloads - 1);
5956 #endif
5957 	}
5958 
5959       /* Now allocate reload registers for anything non-optional that
5960 	 didn't get one yet.  */
5961       for (j = 0; j < n_reloads; j++)
5962 	{
5963 	  int r = reload_order[j];
5964 
5965 	  /* Ignore reloads that got marked inoperative.  */
5966 	  if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
5967 	    continue;
5968 
5969 	  /* Skip reloads that already have a register allocated or are
5970 	     optional.  */
5971 	  if (rld[r].reg_rtx != 0 || rld[r].optional)
5972 	    continue;
5973 
5974 	  if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
5975 	    break;
5976 	}
5977 
5978       /* If that loop got all the way, we have won.  */
5979       if (j == n_reloads)
5980 	{
5981 	  win = 1;
5982 	  break;
5983 	}
5984 
5985       /* Loop around and try without any inheritance.  */
5986     }
5987 
5988   if (! win)
5989     {
5990       /* First undo everything done by the failed attempt
5991 	 to allocate with inheritance.  */
5992       choose_reload_regs_init (chain, save_reload_reg_rtx);
5993 
5994       /* Some sanity tests to verify that the reloads found in the first
5995 	 pass are identical to the ones we have now.  */
5996       gcc_assert (chain->n_reloads == n_reloads);
5997 
5998       for (i = 0; i < n_reloads; i++)
5999 	{
6000 	  if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6001 	    continue;
6002 	  gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6003 	  for (j = 0; j < n_spills; j++)
6004 	    if (spill_regs[j] == chain->rld[i].regno)
6005 	      if (! set_reload_reg (j, i))
6006 		failed_reload (chain->insn, i);
6007 	}
6008     }
6009 
6010   /* If we thought we could inherit a reload, because it seemed that
6011      nothing else wanted the same reload register earlier in the insn,
6012      verify that assumption, now that all reloads have been assigned.
6013      Likewise for reloads where reload_override_in has been set.  */
6014 
6015   /* If doing expensive optimizations, do one preliminary pass that doesn't
6016      cancel any inheritance, but removes reloads that have been needed only
6017      for reloads that we know can be inherited.  */
6018   for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6019     {
6020       for (j = 0; j < n_reloads; j++)
6021 	{
6022 	  int r = reload_order[j];
6023 	  rtx check_reg;
6024 	  if (reload_inherited[r] && rld[r].reg_rtx)
6025 	    check_reg = rld[r].reg_rtx;
6026 	  else if (reload_override_in[r]
6027 		   && (REG_P (reload_override_in[r])
6028 		       || GET_CODE (reload_override_in[r]) == SUBREG))
6029 	    check_reg = reload_override_in[r];
6030 	  else
6031 	    continue;
6032 	  if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6033 				  rld[r].opnum, rld[r].when_needed, rld[r].in,
6034 				  (reload_inherited[r]
6035 				   ? rld[r].out : const0_rtx),
6036 				  r, 1))
6037 	    {
6038 	      if (pass)
6039 		continue;
6040 	      reload_inherited[r] = 0;
6041 	      reload_override_in[r] = 0;
6042 	    }
6043 	  /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6044 	     reload_override_in, then we do not need its related
6045 	     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6046 	     likewise for other reload types.
6047 	     We handle this by removing a reload when its only replacement
6048 	     is mentioned in reload_in of the reload we are going to inherit.
6049 	     A special case are auto_inc expressions; even if the input is
6050 	     inherited, we still need the address for the output.  We can
6051 	     recognize them because they have RELOAD_OUT set to RELOAD_IN.
6052 	     If we succeeded removing some reload and we are doing a preliminary
6053 	     pass just to remove such reloads, make another pass, since the
6054 	     removal of one reload might allow us to inherit another one.  */
6055 	  else if (rld[r].in
6056 		   && rld[r].out != rld[r].in
6057 		   && remove_address_replacements (rld[r].in) && pass)
6058 	    pass = 2;
6059 	}
6060     }
6061 
6062   /* Now that reload_override_in is known valid,
6063      actually override reload_in.  */
6064   for (j = 0; j < n_reloads; j++)
6065     if (reload_override_in[j])
6066       rld[j].in = reload_override_in[j];
6067 
6068   /* If this reload won't be done because it has been canceled or is
6069      optional and not inherited, clear reload_reg_rtx so other
6070      routines (such as subst_reloads) don't get confused.  */
6071   for (j = 0; j < n_reloads; j++)
6072     if (rld[j].reg_rtx != 0
6073 	&& ((rld[j].optional && ! reload_inherited[j])
6074 	    || (rld[j].in == 0 && rld[j].out == 0
6075 		&& ! rld[j].secondary_p)))
6076       {
6077 	int regno = true_regnum (rld[j].reg_rtx);
6078 
6079 	if (spill_reg_order[regno] >= 0)
6080 	  clear_reload_reg_in_use (regno, rld[j].opnum,
6081 				   rld[j].when_needed, rld[j].mode);
6082 	rld[j].reg_rtx = 0;
6083 	reload_spill_index[j] = -1;
6084       }
6085 
6086   /* Record which pseudos and which spill regs have output reloads.  */
6087   for (j = 0; j < n_reloads; j++)
6088     {
6089       int r = reload_order[j];
6090 
6091       i = reload_spill_index[r];
6092 
6093       /* I is nonneg if this reload uses a register.
6094 	 If rld[r].reg_rtx is 0, this is an optional reload
6095 	 that we opted to ignore.  */
6096       if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
6097 	  && rld[r].reg_rtx != 0)
6098 	{
6099 	  int nregno = REGNO (rld[r].out_reg);
6100 	  int nr = 1;
6101 
6102 	  if (nregno < FIRST_PSEUDO_REGISTER)
6103 	    nr = hard_regno_nregs[nregno][rld[r].mode];
6104 
6105 	  while (--nr >= 0)
6106 	    SET_REGNO_REG_SET (&reg_has_output_reload,
6107 			       nregno + nr);
6108 
6109 	  if (i >= 0)
6110 	    {
6111 	      nr = hard_regno_nregs[i][rld[r].mode];
6112 	      while (--nr >= 0)
6113 		SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6114 	    }
6115 
6116 	  gcc_assert (rld[r].when_needed == RELOAD_OTHER
6117 		      || rld[r].when_needed == RELOAD_FOR_OUTPUT
6118 		      || rld[r].when_needed == RELOAD_FOR_INSN);
6119 	}
6120     }
6121 }
6122 
6123 /* Deallocate the reload register for reload R.  This is called from
6124    remove_address_replacements.  */
6125 
6126 void
deallocate_reload_reg(int r)6127 deallocate_reload_reg (int r)
6128 {
6129   int regno;
6130 
6131   if (! rld[r].reg_rtx)
6132     return;
6133   regno = true_regnum (rld[r].reg_rtx);
6134   rld[r].reg_rtx = 0;
6135   if (spill_reg_order[regno] >= 0)
6136     clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
6137 			     rld[r].mode);
6138   reload_spill_index[r] = -1;
6139 }
6140 
6141 /* If SMALL_REGISTER_CLASSES is nonzero, we may not have merged two
6142    reloads of the same item for fear that we might not have enough reload
6143    registers. However, normally they will get the same reload register
6144    and hence actually need not be loaded twice.
6145 
6146    Here we check for the most common case of this phenomenon: when we have
6147    a number of reloads for the same object, each of which were allocated
6148    the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6149    reload, and is not modified in the insn itself.  If we find such,
6150    merge all the reloads and set the resulting reload to RELOAD_OTHER.
6151    This will not increase the number of spill registers needed and will
6152    prevent redundant code.  */
6153 
6154 static void
merge_assigned_reloads(rtx insn)6155 merge_assigned_reloads (rtx insn)
6156 {
6157   int i, j;
6158 
6159   /* Scan all the reloads looking for ones that only load values and
6160      are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6161      assigned and not modified by INSN.  */
6162 
6163   for (i = 0; i < n_reloads; i++)
6164     {
6165       int conflicting_input = 0;
6166       int max_input_address_opnum = -1;
6167       int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6168 
6169       if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
6170 	  || rld[i].out != 0 || rld[i].reg_rtx == 0
6171 	  || reg_set_p (rld[i].reg_rtx, insn))
6172 	continue;
6173 
6174       /* Look at all other reloads.  Ensure that the only use of this
6175 	 reload_reg_rtx is in a reload that just loads the same value
6176 	 as we do.  Note that any secondary reloads must be of the identical
6177 	 class since the values, modes, and result registers are the
6178 	 same, so we need not do anything with any secondary reloads.  */
6179 
6180       for (j = 0; j < n_reloads; j++)
6181 	{
6182 	  if (i == j || rld[j].reg_rtx == 0
6183 	      || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
6184 					    rld[i].reg_rtx))
6185 	    continue;
6186 
6187 	  if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6188 	      && rld[j].opnum > max_input_address_opnum)
6189 	    max_input_address_opnum = rld[j].opnum;
6190 
6191 	  /* If the reload regs aren't exactly the same (e.g, different modes)
6192 	     or if the values are different, we can't merge this reload.
6193 	     But if it is an input reload, we might still merge
6194 	     RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads.  */
6195 
6196 	  if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6197 	      || rld[j].out != 0 || rld[j].in == 0
6198 	      || ! rtx_equal_p (rld[i].in, rld[j].in))
6199 	    {
6200 	      if (rld[j].when_needed != RELOAD_FOR_INPUT
6201 		  || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6202 		       || rld[i].opnum > rld[j].opnum)
6203 		      && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
6204 		break;
6205 	      conflicting_input = 1;
6206 	      if (min_conflicting_input_opnum > rld[j].opnum)
6207 		min_conflicting_input_opnum = rld[j].opnum;
6208 	    }
6209 	}
6210 
6211       /* If all is OK, merge the reloads.  Only set this to RELOAD_OTHER if
6212 	 we, in fact, found any matching reloads.  */
6213 
6214       if (j == n_reloads
6215 	  && max_input_address_opnum <= min_conflicting_input_opnum)
6216 	{
6217 	  gcc_assert (rld[i].when_needed != RELOAD_FOR_OUTPUT);
6218 
6219 	  for (j = 0; j < n_reloads; j++)
6220 	    if (i != j && rld[j].reg_rtx != 0
6221 		&& rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6222 		&& (! conflicting_input
6223 		    || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6224 		    || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
6225 	      {
6226 		rld[i].when_needed = RELOAD_OTHER;
6227 		rld[j].in = 0;
6228 		reload_spill_index[j] = -1;
6229 		transfer_replacements (i, j);
6230 	      }
6231 
6232 	  /* If this is now RELOAD_OTHER, look for any reloads that load
6233 	     parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6234 	     if they were for inputs, RELOAD_OTHER for outputs.  Note that
6235 	     this test is equivalent to looking for reloads for this operand
6236 	     number.  */
6237 	  /* We must take special care with RELOAD_FOR_OUTPUT_ADDRESS; it may
6238 	     share registers with a RELOAD_FOR_INPUT, so we can not change it
6239 	     to RELOAD_FOR_OTHER_ADDRESS.  We should never need to, since we
6240 	     do not modify RELOAD_FOR_OUTPUT.  */
6241 
6242 	  if (rld[i].when_needed == RELOAD_OTHER)
6243 	    for (j = 0; j < n_reloads; j++)
6244 	      if (rld[j].in != 0
6245 		  && rld[j].when_needed != RELOAD_OTHER
6246 		  && rld[j].when_needed != RELOAD_FOR_OTHER_ADDRESS
6247 		  && rld[j].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
6248 		  && (! conflicting_input
6249 		      || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6250 		      || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6251 		  && reg_overlap_mentioned_for_reload_p (rld[j].in,
6252 							 rld[i].in))
6253 		{
6254 		  int k;
6255 
6256 		  rld[j].when_needed
6257 		    = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6258 			|| rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6259 		       ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6260 
6261 		  /* Check to see if we accidentally converted two
6262 		     reloads that use the same reload register with
6263 		     different inputs to the same type.  If so, the
6264 		     resulting code won't work.  */
6265 		  if (rld[j].reg_rtx)
6266 		    for (k = 0; k < j; k++)
6267 		      gcc_assert (rld[k].in == 0 || rld[k].reg_rtx == 0
6268 				  || rld[k].when_needed != rld[j].when_needed
6269 				  || !rtx_equal_p (rld[k].reg_rtx,
6270 						   rld[j].reg_rtx)
6271 				  || rtx_equal_p (rld[k].in,
6272 						  rld[j].in));
6273 		}
6274 	}
6275     }
6276 }
6277 
6278 /* These arrays are filled by emit_reload_insns and its subroutines.  */
6279 static rtx input_reload_insns[MAX_RECOG_OPERANDS];
6280 static rtx other_input_address_reload_insns = 0;
6281 static rtx other_input_reload_insns = 0;
6282 static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6283 static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6284 static rtx output_reload_insns[MAX_RECOG_OPERANDS];
6285 static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6286 static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6287 static rtx operand_reload_insns = 0;
6288 static rtx other_operand_reload_insns = 0;
6289 static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6290 
6291 /* Values to be put in spill_reg_store are put here first.  */
6292 static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6293 static HARD_REG_SET reg_reloaded_died;
6294 
6295 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
6296    of class NEW_CLASS with mode NEW_MODE.  Or alternatively, if alt_reload_reg
6297    is nonzero, if that is suitable.  On success, change *RELOAD_REG to the
6298    adjusted register, and return true.  Otherwise, return false.  */
6299 static bool
reload_adjust_reg_for_temp(rtx * reload_reg,rtx alt_reload_reg,enum reg_class new_class,enum machine_mode new_mode)6300 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
6301 			    enum reg_class new_class,
6302 			    enum machine_mode new_mode)
6303 
6304 {
6305   rtx reg;
6306 
6307   for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
6308     {
6309       unsigned regno = REGNO (reg);
6310 
6311       if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
6312 	continue;
6313       if (GET_MODE (reg) != new_mode)
6314 	{
6315 	  if (!HARD_REGNO_MODE_OK (regno, new_mode))
6316 	    continue;
6317 	  if (hard_regno_nregs[regno][new_mode]
6318 	      > hard_regno_nregs[regno][GET_MODE (reg)])
6319 	    continue;
6320 	  reg = reload_adjust_reg_for_mode (reg, new_mode);
6321 	}
6322       *reload_reg = reg;
6323       return true;
6324     }
6325   return false;
6326 }
6327 
6328 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
6329    pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
6330    nonzero, if that is suitable.  On success, change *RELOAD_REG to the
6331    adjusted register, and return true.  Otherwise, return false.  */
6332 static bool
reload_adjust_reg_for_icode(rtx * reload_reg,rtx alt_reload_reg,enum insn_code icode)6333 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
6334 			     enum insn_code icode)
6335 
6336 {
6337   enum reg_class new_class = scratch_reload_class (icode);
6338   enum machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
6339 
6340   return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
6341 				     new_class, new_mode);
6342 }
6343 
6344 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
6345    has the number J.  OLD contains the value to be used as input.  */
6346 
6347 static void
emit_input_reload_insns(struct insn_chain * chain,struct reload * rl,rtx old,int j)6348 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
6349 			 rtx old, int j)
6350 {
6351   rtx insn = chain->insn;
6352   rtx reloadreg = rl->reg_rtx;
6353   rtx oldequiv_reg = 0;
6354   rtx oldequiv = 0;
6355   int special = 0;
6356   enum machine_mode mode;
6357   rtx *where;
6358 
6359   /* Determine the mode to reload in.
6360      This is very tricky because we have three to choose from.
6361      There is the mode the insn operand wants (rl->inmode).
6362      There is the mode of the reload register RELOADREG.
6363      There is the intrinsic mode of the operand, which we could find
6364      by stripping some SUBREGs.
6365      It turns out that RELOADREG's mode is irrelevant:
6366      we can change that arbitrarily.
6367 
6368      Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6369      then the reload reg may not support QImode moves, so use SImode.
6370      If foo is in memory due to spilling a pseudo reg, this is safe,
6371      because the QImode value is in the least significant part of a
6372      slot big enough for a SImode.  If foo is some other sort of
6373      memory reference, then it is impossible to reload this case,
6374      so previous passes had better make sure this never happens.
6375 
6376      Then consider a one-word union which has SImode and one of its
6377      members is a float, being fetched as (SUBREG:SF union:SI).
6378      We must fetch that as SFmode because we could be loading into
6379      a float-only register.  In this case OLD's mode is correct.
6380 
6381      Consider an immediate integer: it has VOIDmode.  Here we need
6382      to get a mode from something else.
6383 
6384      In some cases, there is a fourth mode, the operand's
6385      containing mode.  If the insn specifies a containing mode for
6386      this operand, it overrides all others.
6387 
6388      I am not sure whether the algorithm here is always right,
6389      but it does the right things in those cases.  */
6390 
6391   mode = GET_MODE (old);
6392   if (mode == VOIDmode)
6393     mode = rl->inmode;
6394 
6395   /* delete_output_reload is only invoked properly if old contains
6396      the original pseudo register.  Since this is replaced with a
6397      hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6398      find the pseudo in RELOAD_IN_REG.  */
6399   if (reload_override_in[j]
6400       && REG_P (rl->in_reg))
6401     {
6402       oldequiv = old;
6403       old = rl->in_reg;
6404     }
6405   if (oldequiv == 0)
6406     oldequiv = old;
6407   else if (REG_P (oldequiv))
6408     oldequiv_reg = oldequiv;
6409   else if (GET_CODE (oldequiv) == SUBREG)
6410     oldequiv_reg = SUBREG_REG (oldequiv);
6411 
6412   /* If we are reloading from a register that was recently stored in
6413      with an output-reload, see if we can prove there was
6414      actually no need to store the old value in it.  */
6415 
6416   if (optimize && REG_P (oldequiv)
6417       && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6418       && spill_reg_store[REGNO (oldequiv)]
6419       && REG_P (old)
6420       && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6421 	  || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6422 			  rl->out_reg)))
6423     delete_output_reload (insn, j, REGNO (oldequiv));
6424 
6425   /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6426      then load RELOADREG from OLDEQUIV.  Note that we cannot use
6427      gen_lowpart_common since it can do the wrong thing when
6428      RELOADREG has a multi-word mode.  Note that RELOADREG
6429      must always be a REG here.  */
6430 
6431   if (GET_MODE (reloadreg) != mode)
6432     reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
6433   while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6434     oldequiv = SUBREG_REG (oldequiv);
6435   if (GET_MODE (oldequiv) != VOIDmode
6436       && mode != GET_MODE (oldequiv))
6437     oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
6438 
6439   /* Switch to the right place to emit the reload insns.  */
6440   switch (rl->when_needed)
6441     {
6442     case RELOAD_OTHER:
6443       where = &other_input_reload_insns;
6444       break;
6445     case RELOAD_FOR_INPUT:
6446       where = &input_reload_insns[rl->opnum];
6447       break;
6448     case RELOAD_FOR_INPUT_ADDRESS:
6449       where = &input_address_reload_insns[rl->opnum];
6450       break;
6451     case RELOAD_FOR_INPADDR_ADDRESS:
6452       where = &inpaddr_address_reload_insns[rl->opnum];
6453       break;
6454     case RELOAD_FOR_OUTPUT_ADDRESS:
6455       where = &output_address_reload_insns[rl->opnum];
6456       break;
6457     case RELOAD_FOR_OUTADDR_ADDRESS:
6458       where = &outaddr_address_reload_insns[rl->opnum];
6459       break;
6460     case RELOAD_FOR_OPERAND_ADDRESS:
6461       where = &operand_reload_insns;
6462       break;
6463     case RELOAD_FOR_OPADDR_ADDR:
6464       where = &other_operand_reload_insns;
6465       break;
6466     case RELOAD_FOR_OTHER_ADDRESS:
6467       where = &other_input_address_reload_insns;
6468       break;
6469     default:
6470       gcc_unreachable ();
6471     }
6472 
6473   push_to_sequence (*where);
6474 
6475   /* Auto-increment addresses must be reloaded in a special way.  */
6476   if (rl->out && ! rl->out_reg)
6477     {
6478       /* We are not going to bother supporting the case where a
6479 	 incremented register can't be copied directly from
6480 	 OLDEQUIV since this seems highly unlikely.  */
6481       gcc_assert (rl->secondary_in_reload < 0);
6482 
6483       if (reload_inherited[j])
6484 	oldequiv = reloadreg;
6485 
6486       old = XEXP (rl->in_reg, 0);
6487 
6488       if (optimize && REG_P (oldequiv)
6489 	  && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6490 	  && spill_reg_store[REGNO (oldequiv)]
6491 	  && REG_P (old)
6492 	  && (dead_or_set_p (insn,
6493 			     spill_reg_stored_to[REGNO (oldequiv)])
6494 	      || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6495 			      old)))
6496 	delete_output_reload (insn, j, REGNO (oldequiv));
6497 
6498       /* Prevent normal processing of this reload.  */
6499       special = 1;
6500       /* Output a special code sequence for this case.  */
6501       new_spill_reg_store[REGNO (reloadreg)]
6502 	= inc_for_reload (reloadreg, oldequiv, rl->out,
6503 			  rl->inc);
6504     }
6505 
6506   /* If we are reloading a pseudo-register that was set by the previous
6507      insn, see if we can get rid of that pseudo-register entirely
6508      by redirecting the previous insn into our reload register.  */
6509 
6510   else if (optimize && REG_P (old)
6511 	   && REGNO (old) >= FIRST_PSEUDO_REGISTER
6512 	   && dead_or_set_p (insn, old)
6513 	   /* This is unsafe if some other reload
6514 	      uses the same reg first.  */
6515 	   && ! conflicts_with_override (reloadreg)
6516 	   && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
6517 				rl->when_needed, old, rl->out, j, 0))
6518     {
6519       rtx temp = PREV_INSN (insn);
6520       while (temp && NOTE_P (temp))
6521 	temp = PREV_INSN (temp);
6522       if (temp
6523 	  && NONJUMP_INSN_P (temp)
6524 	  && GET_CODE (PATTERN (temp)) == SET
6525 	  && SET_DEST (PATTERN (temp)) == old
6526 	  /* Make sure we can access insn_operand_constraint.  */
6527 	  && asm_noperands (PATTERN (temp)) < 0
6528 	  /* This is unsafe if operand occurs more than once in current
6529 	     insn.  Perhaps some occurrences aren't reloaded.  */
6530 	  && count_occurrences (PATTERN (insn), old, 0) == 1)
6531 	{
6532 	  rtx old = SET_DEST (PATTERN (temp));
6533 	  /* Store into the reload register instead of the pseudo.  */
6534 	  SET_DEST (PATTERN (temp)) = reloadreg;
6535 
6536 	  /* Verify that resulting insn is valid.  */
6537 	  extract_insn (temp);
6538 	  if (constrain_operands (1))
6539 	    {
6540 	      /* If the previous insn is an output reload, the source is
6541 		 a reload register, and its spill_reg_store entry will
6542 		 contain the previous destination.  This is now
6543 		 invalid.  */
6544 	      if (REG_P (SET_SRC (PATTERN (temp)))
6545 		  && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
6546 		{
6547 		  spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6548 		  spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6549 		}
6550 
6551 	      /* If these are the only uses of the pseudo reg,
6552 		 pretend for GDB it lives in the reload reg we used.  */
6553 	      if (REG_N_DEATHS (REGNO (old)) == 1
6554 		  && REG_N_SETS (REGNO (old)) == 1)
6555 		{
6556 		  reg_renumber[REGNO (old)] = REGNO (rl->reg_rtx);
6557 		  alter_reg (REGNO (old), -1);
6558 		}
6559 	      special = 1;
6560 	    }
6561 	  else
6562 	    {
6563 	      SET_DEST (PATTERN (temp)) = old;
6564 	    }
6565 	}
6566     }
6567 
6568   /* We can't do that, so output an insn to load RELOADREG.  */
6569 
6570   /* If we have a secondary reload, pick up the secondary register
6571      and icode, if any.  If OLDEQUIV and OLD are different or
6572      if this is an in-out reload, recompute whether or not we
6573      still need a secondary register and what the icode should
6574      be.  If we still need a secondary register and the class or
6575      icode is different, go back to reloading from OLD if using
6576      OLDEQUIV means that we got the wrong type of register.  We
6577      cannot have different class or icode due to an in-out reload
6578      because we don't make such reloads when both the input and
6579      output need secondary reload registers.  */
6580 
6581   if (! special && rl->secondary_in_reload >= 0)
6582     {
6583       rtx second_reload_reg = 0;
6584       rtx third_reload_reg = 0;
6585       int secondary_reload = rl->secondary_in_reload;
6586       rtx real_oldequiv = oldequiv;
6587       rtx real_old = old;
6588       rtx tmp;
6589       enum insn_code icode;
6590       enum insn_code tertiary_icode = CODE_FOR_nothing;
6591 
6592       /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6593 	 and similarly for OLD.
6594 	 See comments in get_secondary_reload in reload.c.  */
6595       /* If it is a pseudo that cannot be replaced with its
6596 	 equivalent MEM, we must fall back to reload_in, which
6597 	 will have all the necessary substitutions registered.
6598 	 Likewise for a pseudo that can't be replaced with its
6599 	 equivalent constant.
6600 
6601 	 Take extra care for subregs of such pseudos.  Note that
6602 	 we cannot use reg_equiv_mem in this case because it is
6603 	 not in the right mode.  */
6604 
6605       tmp = oldequiv;
6606       if (GET_CODE (tmp) == SUBREG)
6607 	tmp = SUBREG_REG (tmp);
6608       if (REG_P (tmp)
6609 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6610 	  && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6611 	      || reg_equiv_constant[REGNO (tmp)] != 0))
6612 	{
6613 	  if (! reg_equiv_mem[REGNO (tmp)]
6614 	      || num_not_at_initial_offset
6615 	      || GET_CODE (oldequiv) == SUBREG)
6616 	    real_oldequiv = rl->in;
6617 	  else
6618 	    real_oldequiv = reg_equiv_mem[REGNO (tmp)];
6619 	}
6620 
6621       tmp = old;
6622       if (GET_CODE (tmp) == SUBREG)
6623 	tmp = SUBREG_REG (tmp);
6624       if (REG_P (tmp)
6625 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6626 	  && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6627 	      || reg_equiv_constant[REGNO (tmp)] != 0))
6628 	{
6629 	  if (! reg_equiv_mem[REGNO (tmp)]
6630 	      || num_not_at_initial_offset
6631 	      || GET_CODE (old) == SUBREG)
6632 	    real_old = rl->in;
6633 	  else
6634 	    real_old = reg_equiv_mem[REGNO (tmp)];
6635 	}
6636 
6637       second_reload_reg = rld[secondary_reload].reg_rtx;
6638       if (rld[secondary_reload].secondary_in_reload >= 0)
6639 	{
6640 	  int tertiary_reload = rld[secondary_reload].secondary_in_reload;
6641 
6642 	  third_reload_reg = rld[tertiary_reload].reg_rtx;
6643 	  tertiary_icode = rld[secondary_reload].secondary_in_icode;
6644 	  /* We'd have to add more code for quartary reloads.  */
6645 	  gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
6646 	}
6647       icode = rl->secondary_in_icode;
6648 
6649       if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6650 	  || (rl->in != 0 && rl->out != 0))
6651 	{
6652 	  secondary_reload_info sri, sri2;
6653 	  enum reg_class new_class, new_t_class;
6654 
6655 	  sri.icode = CODE_FOR_nothing;
6656 	  sri.prev_sri = NULL;
6657 	  new_class = targetm.secondary_reload (1, real_oldequiv, rl->class,
6658 						mode, &sri);
6659 
6660 	  if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
6661 	    second_reload_reg = 0;
6662 	  else if (new_class == NO_REGS)
6663 	    {
6664 	      if (reload_adjust_reg_for_icode (&second_reload_reg,
6665 					       third_reload_reg, sri.icode))
6666 		icode = sri.icode, third_reload_reg = 0;
6667 	      else
6668 		oldequiv = old, real_oldequiv = real_old;
6669 	    }
6670 	  else if (sri.icode != CODE_FOR_nothing)
6671 	    /* We currently lack a way to express this in reloads.  */
6672 	    gcc_unreachable ();
6673 	  else
6674 	    {
6675 	      sri2.icode = CODE_FOR_nothing;
6676 	      sri2.prev_sri = &sri;
6677 	      new_t_class = targetm.secondary_reload (1, real_oldequiv,
6678 						      new_class, mode, &sri);
6679 	      if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
6680 		{
6681 		  if (reload_adjust_reg_for_temp (&second_reload_reg,
6682 						  third_reload_reg,
6683 						  new_class, mode))
6684 		    third_reload_reg = 0, tertiary_icode = sri2.icode;
6685 		  else
6686 		    oldequiv = old, real_oldequiv = real_old;
6687 		}
6688 	      else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
6689 		{
6690 		  rtx intermediate = second_reload_reg;
6691 
6692 		  if (reload_adjust_reg_for_temp (&intermediate, NULL,
6693 						  new_class, mode)
6694 		      && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
6695 						      sri2.icode))
6696 		    {
6697 		      second_reload_reg = intermediate;
6698 		      tertiary_icode = sri2.icode;
6699 		    }
6700 		  else
6701 		    oldequiv = old, real_oldequiv = real_old;
6702 		}
6703 	      else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
6704 		{
6705 		  rtx intermediate = second_reload_reg;
6706 
6707 		  if (reload_adjust_reg_for_temp (&intermediate, NULL,
6708 						  new_class, mode)
6709 		      && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
6710 						      new_t_class, mode))
6711 		    {
6712 		      second_reload_reg = intermediate;
6713 		      tertiary_icode = sri2.icode;
6714 		    }
6715 		  else
6716 		    oldequiv = old, real_oldequiv = real_old;
6717 		}
6718 	      else
6719 		/* This could be handled more intelligently too.  */
6720 		oldequiv = old, real_oldequiv = real_old;
6721 	    }
6722 	}
6723 
6724       /* If we still need a secondary reload register, check
6725 	 to see if it is being used as a scratch or intermediate
6726 	 register and generate code appropriately.  If we need
6727 	 a scratch register, use REAL_OLDEQUIV since the form of
6728 	 the insn may depend on the actual address if it is
6729 	 a MEM.  */
6730 
6731       if (second_reload_reg)
6732 	{
6733 	  if (icode != CODE_FOR_nothing)
6734 	    {
6735 	      /* We'd have to add extra code to handle this case.  */
6736 	      gcc_assert (!third_reload_reg);
6737 
6738 	      emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6739 					  second_reload_reg));
6740 	      special = 1;
6741 	    }
6742 	  else
6743 	    {
6744 	      /* See if we need a scratch register to load the
6745 		 intermediate register (a tertiary reload).  */
6746 	      if (tertiary_icode != CODE_FOR_nothing)
6747 		{
6748 		  emit_insn ((GEN_FCN (tertiary_icode)
6749 			      (second_reload_reg, real_oldequiv,
6750 			       third_reload_reg)));
6751 		}
6752 	      else if (third_reload_reg)
6753 		{
6754 		  gen_reload (third_reload_reg, real_oldequiv,
6755 			      rl->opnum,
6756 			      rl->when_needed);
6757 		  gen_reload (second_reload_reg, third_reload_reg,
6758 			      rl->opnum,
6759 			      rl->when_needed);
6760 		}
6761 	      else
6762 		gen_reload (second_reload_reg, real_oldequiv,
6763 			    rl->opnum,
6764 			    rl->when_needed);
6765 
6766 	      oldequiv = second_reload_reg;
6767 	    }
6768 	}
6769     }
6770 
6771   if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6772     {
6773       rtx real_oldequiv = oldequiv;
6774 
6775       if ((REG_P (oldequiv)
6776 	   && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6777 	   && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
6778 	       || reg_equiv_constant[REGNO (oldequiv)] != 0))
6779 	  || (GET_CODE (oldequiv) == SUBREG
6780 	      && REG_P (SUBREG_REG (oldequiv))
6781 	      && (REGNO (SUBREG_REG (oldequiv))
6782 		  >= FIRST_PSEUDO_REGISTER)
6783 	      && ((reg_equiv_memory_loc
6784 		   [REGNO (SUBREG_REG (oldequiv))] != 0)
6785 		  || (reg_equiv_constant
6786 		      [REGNO (SUBREG_REG (oldequiv))] != 0)))
6787 	  || (CONSTANT_P (oldequiv)
6788 	      && (PREFERRED_RELOAD_CLASS (oldequiv,
6789 					  REGNO_REG_CLASS (REGNO (reloadreg)))
6790 		  == NO_REGS)))
6791 	real_oldequiv = rl->in;
6792       gen_reload (reloadreg, real_oldequiv, rl->opnum,
6793 		  rl->when_needed);
6794     }
6795 
6796   if (flag_non_call_exceptions)
6797     copy_eh_notes (insn, get_insns ());
6798 
6799   /* End this sequence.  */
6800   *where = get_insns ();
6801   end_sequence ();
6802 
6803   /* Update reload_override_in so that delete_address_reloads_1
6804      can see the actual register usage.  */
6805   if (oldequiv_reg)
6806     reload_override_in[j] = oldequiv;
6807 }
6808 
6809 /* Generate insns to for the output reload RL, which is for the insn described
6810    by CHAIN and has the number J.  */
6811 static void
emit_output_reload_insns(struct insn_chain * chain,struct reload * rl,int j)6812 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
6813 			  int j)
6814 {
6815   rtx reloadreg = rl->reg_rtx;
6816   rtx insn = chain->insn;
6817   int special = 0;
6818   rtx old = rl->out;
6819   enum machine_mode mode = GET_MODE (old);
6820   rtx p;
6821 
6822   if (rl->when_needed == RELOAD_OTHER)
6823     start_sequence ();
6824   else
6825     push_to_sequence (output_reload_insns[rl->opnum]);
6826 
6827   /* Determine the mode to reload in.
6828      See comments above (for input reloading).  */
6829 
6830   if (mode == VOIDmode)
6831     {
6832       /* VOIDmode should never happen for an output.  */
6833       if (asm_noperands (PATTERN (insn)) < 0)
6834 	/* It's the compiler's fault.  */
6835 	fatal_insn ("VOIDmode on an output", insn);
6836       error_for_asm (insn, "output operand is constant in %<asm%>");
6837       /* Prevent crash--use something we know is valid.  */
6838       mode = word_mode;
6839       old = gen_rtx_REG (mode, REGNO (reloadreg));
6840     }
6841 
6842   if (GET_MODE (reloadreg) != mode)
6843     reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
6844 
6845   /* If we need two reload regs, set RELOADREG to the intermediate
6846      one, since it will be stored into OLD.  We might need a secondary
6847      register only for an input reload, so check again here.  */
6848 
6849   if (rl->secondary_out_reload >= 0)
6850     {
6851       rtx real_old = old;
6852       int secondary_reload = rl->secondary_out_reload;
6853       int tertiary_reload = rld[secondary_reload].secondary_out_reload;
6854 
6855       if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
6856 	  && reg_equiv_mem[REGNO (old)] != 0)
6857 	real_old = reg_equiv_mem[REGNO (old)];
6858 
6859       if (secondary_reload_class (0, rl->class, mode, real_old) != NO_REGS)
6860 	{
6861 	  rtx second_reloadreg = reloadreg;
6862 	  reloadreg = rld[secondary_reload].reg_rtx;
6863 
6864 	  /* See if RELOADREG is to be used as a scratch register
6865 	     or as an intermediate register.  */
6866 	  if (rl->secondary_out_icode != CODE_FOR_nothing)
6867 	    {
6868 	      /* We'd have to add extra code to handle this case.  */
6869 	      gcc_assert (tertiary_reload < 0);
6870 
6871 	      emit_insn ((GEN_FCN (rl->secondary_out_icode)
6872 			  (real_old, second_reloadreg, reloadreg)));
6873 	      special = 1;
6874 	    }
6875 	  else
6876 	    {
6877 	      /* See if we need both a scratch and intermediate reload
6878 		 register.  */
6879 
6880 	      enum insn_code tertiary_icode
6881 		= rld[secondary_reload].secondary_out_icode;
6882 
6883 	      /* We'd have to add more code for quartary reloads.  */
6884 	      gcc_assert (tertiary_reload < 0
6885 			  || rld[tertiary_reload].secondary_out_reload < 0);
6886 
6887 	      if (GET_MODE (reloadreg) != mode)
6888 		reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
6889 
6890 	      if (tertiary_icode != CODE_FOR_nothing)
6891 		{
6892 		  rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
6893 		  rtx tem;
6894 
6895 		  /* Copy primary reload reg to secondary reload reg.
6896 		     (Note that these have been swapped above, then
6897 		     secondary reload reg to OLD using our insn.)  */
6898 
6899 		  /* If REAL_OLD is a paradoxical SUBREG, remove it
6900 		     and try to put the opposite SUBREG on
6901 		     RELOADREG.  */
6902 		  if (GET_CODE (real_old) == SUBREG
6903 		      && (GET_MODE_SIZE (GET_MODE (real_old))
6904 			  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6905 		      && 0 != (tem = gen_lowpart_common
6906 			       (GET_MODE (SUBREG_REG (real_old)),
6907 				reloadreg)))
6908 		    real_old = SUBREG_REG (real_old), reloadreg = tem;
6909 
6910 		  gen_reload (reloadreg, second_reloadreg,
6911 			      rl->opnum, rl->when_needed);
6912 		  emit_insn ((GEN_FCN (tertiary_icode)
6913 			      (real_old, reloadreg, third_reloadreg)));
6914 		  special = 1;
6915 		}
6916 
6917 	      else
6918 		{
6919 		  /* Copy between the reload regs here and then to
6920 		     OUT later.  */
6921 
6922 		  gen_reload (reloadreg, second_reloadreg,
6923 			      rl->opnum, rl->when_needed);
6924 		  if (tertiary_reload >= 0)
6925 		    {
6926 		      rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
6927 
6928 		      gen_reload (third_reloadreg, reloadreg,
6929 				  rl->opnum, rl->when_needed);
6930 		      reloadreg = third_reloadreg;
6931 		    }
6932 		}
6933 	    }
6934 	}
6935     }
6936 
6937   /* Output the last reload insn.  */
6938   if (! special)
6939     {
6940       rtx set;
6941 
6942       /* Don't output the last reload if OLD is not the dest of
6943 	 INSN and is in the src and is clobbered by INSN.  */
6944       if (! flag_expensive_optimizations
6945 	  || !REG_P (old)
6946 	  || !(set = single_set (insn))
6947 	  || rtx_equal_p (old, SET_DEST (set))
6948 	  || !reg_mentioned_p (old, SET_SRC (set))
6949 	  || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
6950 	       && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
6951 	gen_reload (old, reloadreg, rl->opnum,
6952 		    rl->when_needed);
6953     }
6954 
6955   /* Look at all insns we emitted, just to be safe.  */
6956   for (p = get_insns (); p; p = NEXT_INSN (p))
6957     if (INSN_P (p))
6958       {
6959 	rtx pat = PATTERN (p);
6960 
6961 	/* If this output reload doesn't come from a spill reg,
6962 	   clear any memory of reloaded copies of the pseudo reg.
6963 	   If this output reload comes from a spill reg,
6964 	   reg_has_output_reload will make this do nothing.  */
6965 	note_stores (pat, forget_old_reloads_1, NULL);
6966 
6967 	if (reg_mentioned_p (rl->reg_rtx, pat))
6968 	  {
6969 	    rtx set = single_set (insn);
6970 	    if (reload_spill_index[j] < 0
6971 		&& set
6972 		&& SET_SRC (set) == rl->reg_rtx)
6973 	      {
6974 		int src = REGNO (SET_SRC (set));
6975 
6976 		reload_spill_index[j] = src;
6977 		SET_HARD_REG_BIT (reg_is_output_reload, src);
6978 		if (find_regno_note (insn, REG_DEAD, src))
6979 		  SET_HARD_REG_BIT (reg_reloaded_died, src);
6980 	      }
6981 	    if (REGNO (rl->reg_rtx) < FIRST_PSEUDO_REGISTER)
6982 	      {
6983 		int s = rl->secondary_out_reload;
6984 		set = single_set (p);
6985 		/* If this reload copies only to the secondary reload
6986 		   register, the secondary reload does the actual
6987 		   store.  */
6988 		if (s >= 0 && set == NULL_RTX)
6989 		  /* We can't tell what function the secondary reload
6990 		     has and where the actual store to the pseudo is
6991 		     made; leave new_spill_reg_store alone.  */
6992 		  ;
6993 		else if (s >= 0
6994 			 && SET_SRC (set) == rl->reg_rtx
6995 			 && SET_DEST (set) == rld[s].reg_rtx)
6996 		  {
6997 		    /* Usually the next instruction will be the
6998 		       secondary reload insn;  if we can confirm
6999 		       that it is, setting new_spill_reg_store to
7000 		       that insn will allow an extra optimization.  */
7001 		    rtx s_reg = rld[s].reg_rtx;
7002 		    rtx next = NEXT_INSN (p);
7003 		    rld[s].out = rl->out;
7004 		    rld[s].out_reg = rl->out_reg;
7005 		    set = single_set (next);
7006 		    if (set && SET_SRC (set) == s_reg
7007 			&& ! new_spill_reg_store[REGNO (s_reg)])
7008 		      {
7009 			SET_HARD_REG_BIT (reg_is_output_reload,
7010 					  REGNO (s_reg));
7011 			new_spill_reg_store[REGNO (s_reg)] = next;
7012 		      }
7013 		  }
7014 		else
7015 		  new_spill_reg_store[REGNO (rl->reg_rtx)] = p;
7016 	      }
7017 	  }
7018       }
7019 
7020   if (rl->when_needed == RELOAD_OTHER)
7021     {
7022       emit_insn (other_output_reload_insns[rl->opnum]);
7023       other_output_reload_insns[rl->opnum] = get_insns ();
7024     }
7025   else
7026     output_reload_insns[rl->opnum] = get_insns ();
7027 
7028   if (flag_non_call_exceptions)
7029     copy_eh_notes (insn, get_insns ());
7030 
7031   end_sequence ();
7032 }
7033 
7034 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7035    and has the number J.  */
7036 static void
do_input_reload(struct insn_chain * chain,struct reload * rl,int j)7037 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7038 {
7039   rtx insn = chain->insn;
7040   rtx old = (rl->in && MEM_P (rl->in)
7041 	     ? rl->in_reg : rl->in);
7042 
7043   if (old != 0
7044       /* AUTO_INC reloads need to be handled even if inherited.  We got an
7045 	 AUTO_INC reload if reload_out is set but reload_out_reg isn't.  */
7046       && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7047       && ! rtx_equal_p (rl->reg_rtx, old)
7048       && rl->reg_rtx != 0)
7049     emit_input_reload_insns (chain, rld + j, old, j);
7050 
7051   /* When inheriting a wider reload, we have a MEM in rl->in,
7052      e.g. inheriting a SImode output reload for
7053      (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10)))  */
7054   if (optimize && reload_inherited[j] && rl->in
7055       && MEM_P (rl->in)
7056       && MEM_P (rl->in_reg)
7057       && reload_spill_index[j] >= 0
7058       && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7059     rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7060 
7061   /* If we are reloading a register that was recently stored in with an
7062      output-reload, see if we can prove there was
7063      actually no need to store the old value in it.  */
7064 
7065   if (optimize
7066       /* Only attempt this for input reloads; for RELOAD_OTHER we miss
7067 	 that there may be multiple uses of the previous output reload.
7068 	 Restricting to RELOAD_FOR_INPUT is mostly paranoia.  */
7069       && rl->when_needed == RELOAD_FOR_INPUT
7070       && (reload_inherited[j] || reload_override_in[j])
7071       && rl->reg_rtx
7072       && REG_P (rl->reg_rtx)
7073       && spill_reg_store[REGNO (rl->reg_rtx)] != 0
7074 #if 0
7075       /* There doesn't seem to be any reason to restrict this to pseudos
7076 	 and doing so loses in the case where we are copying from a
7077 	 register of the wrong class.  */
7078       && (REGNO (spill_reg_stored_to[REGNO (rl->reg_rtx)])
7079 	  >= FIRST_PSEUDO_REGISTER)
7080 #endif
7081       /* The insn might have already some references to stackslots
7082 	 replaced by MEMs, while reload_out_reg still names the
7083 	 original pseudo.  */
7084       && (dead_or_set_p (insn,
7085 			 spill_reg_stored_to[REGNO (rl->reg_rtx)])
7086 	  || rtx_equal_p (spill_reg_stored_to[REGNO (rl->reg_rtx)],
7087 			  rl->out_reg)))
7088     delete_output_reload (insn, j, REGNO (rl->reg_rtx));
7089 }
7090 
7091 /* Do output reloading for reload RL, which is for the insn described by
7092    CHAIN and has the number J.
7093    ??? At some point we need to support handling output reloads of
7094    JUMP_INSNs or insns that set cc0.  */
7095 static void
do_output_reload(struct insn_chain * chain,struct reload * rl,int j)7096 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
7097 {
7098   rtx note, old;
7099   rtx insn = chain->insn;
7100   /* If this is an output reload that stores something that is
7101      not loaded in this same reload, see if we can eliminate a previous
7102      store.  */
7103   rtx pseudo = rl->out_reg;
7104 
7105   if (pseudo
7106       && optimize
7107       && REG_P (pseudo)
7108       && ! rtx_equal_p (rl->in_reg, pseudo)
7109       && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7110       && reg_last_reload_reg[REGNO (pseudo)])
7111     {
7112       int pseudo_no = REGNO (pseudo);
7113       int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7114 
7115       /* We don't need to test full validity of last_regno for
7116 	 inherit here; we only want to know if the store actually
7117 	 matches the pseudo.  */
7118       if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
7119 	  && reg_reloaded_contents[last_regno] == pseudo_no
7120 	  && spill_reg_store[last_regno]
7121 	  && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7122 	delete_output_reload (insn, j, last_regno);
7123     }
7124 
7125   old = rl->out_reg;
7126   if (old == 0
7127       || rl->reg_rtx == old
7128       || rl->reg_rtx == 0)
7129     return;
7130 
7131   /* An output operand that dies right away does need a reload,
7132      but need not be copied from it.  Show the new location in the
7133      REG_UNUSED note.  */
7134   if ((REG_P (old) || GET_CODE (old) == SCRATCH)
7135       && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7136     {
7137       XEXP (note, 0) = rl->reg_rtx;
7138       return;
7139     }
7140   /* Likewise for a SUBREG of an operand that dies.  */
7141   else if (GET_CODE (old) == SUBREG
7142 	   && REG_P (SUBREG_REG (old))
7143 	   && 0 != (note = find_reg_note (insn, REG_UNUSED,
7144 					  SUBREG_REG (old))))
7145     {
7146       XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
7147 					   rl->reg_rtx);
7148       return;
7149     }
7150   else if (GET_CODE (old) == SCRATCH)
7151     /* If we aren't optimizing, there won't be a REG_UNUSED note,
7152        but we don't want to make an output reload.  */
7153     return;
7154 
7155   /* If is a JUMP_INSN, we can't support output reloads yet.  */
7156   gcc_assert (NONJUMP_INSN_P (insn));
7157 
7158   emit_output_reload_insns (chain, rld + j, j);
7159 }
7160 
7161 /* Reload number R reloads from or to a group of hard registers starting at
7162    register REGNO.  Return true if it can be treated for inheritance purposes
7163    like a group of reloads, each one reloading a single hard register.
7164    The caller has already checked that the spill register and REGNO use
7165    the same number of registers to store the reload value.  */
7166 
7167 static bool
inherit_piecemeal_p(int r ATTRIBUTE_UNUSED,int regno ATTRIBUTE_UNUSED)7168 inherit_piecemeal_p (int r ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED)
7169 {
7170 #ifdef CANNOT_CHANGE_MODE_CLASS
7171   return (!REG_CANNOT_CHANGE_MODE_P (reload_spill_index[r],
7172 				     GET_MODE (rld[r].reg_rtx),
7173 				     reg_raw_mode[reload_spill_index[r]])
7174 	  && !REG_CANNOT_CHANGE_MODE_P (regno,
7175 					GET_MODE (rld[r].reg_rtx),
7176 					reg_raw_mode[regno]));
7177 #else
7178   return true;
7179 #endif
7180 }
7181 
7182 /* Output insns to reload values in and out of the chosen reload regs.  */
7183 
7184 static void
emit_reload_insns(struct insn_chain * chain)7185 emit_reload_insns (struct insn_chain *chain)
7186 {
7187   rtx insn = chain->insn;
7188 
7189   int j;
7190 
7191   CLEAR_HARD_REG_SET (reg_reloaded_died);
7192 
7193   for (j = 0; j < reload_n_operands; j++)
7194     input_reload_insns[j] = input_address_reload_insns[j]
7195       = inpaddr_address_reload_insns[j]
7196       = output_reload_insns[j] = output_address_reload_insns[j]
7197       = outaddr_address_reload_insns[j]
7198       = other_output_reload_insns[j] = 0;
7199   other_input_address_reload_insns = 0;
7200   other_input_reload_insns = 0;
7201   operand_reload_insns = 0;
7202   other_operand_reload_insns = 0;
7203 
7204   /* Dump reloads into the dump file.  */
7205   if (dump_file)
7206     {
7207       fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
7208       debug_reload_to_stream (dump_file);
7209     }
7210 
7211   /* Now output the instructions to copy the data into and out of the
7212      reload registers.  Do these in the order that the reloads were reported,
7213      since reloads of base and index registers precede reloads of operands
7214      and the operands may need the base and index registers reloaded.  */
7215 
7216   for (j = 0; j < n_reloads; j++)
7217     {
7218       if (rld[j].reg_rtx
7219 	  && REGNO (rld[j].reg_rtx) < FIRST_PSEUDO_REGISTER)
7220 	new_spill_reg_store[REGNO (rld[j].reg_rtx)] = 0;
7221 
7222       do_input_reload (chain, rld + j, j);
7223       do_output_reload (chain, rld + j, j);
7224     }
7225 
7226   /* Now write all the insns we made for reloads in the order expected by
7227      the allocation functions.  Prior to the insn being reloaded, we write
7228      the following reloads:
7229 
7230      RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7231 
7232      RELOAD_OTHER reloads.
7233 
7234      For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7235      by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7236      RELOAD_FOR_INPUT reload for the operand.
7237 
7238      RELOAD_FOR_OPADDR_ADDRS reloads.
7239 
7240      RELOAD_FOR_OPERAND_ADDRESS reloads.
7241 
7242      After the insn being reloaded, we write the following:
7243 
7244      For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7245      by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7246      RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7247      reloads for the operand.  The RELOAD_OTHER output reloads are
7248      output in descending order by reload number.  */
7249 
7250   emit_insn_before (other_input_address_reload_insns, insn);
7251   emit_insn_before (other_input_reload_insns, insn);
7252 
7253   for (j = 0; j < reload_n_operands; j++)
7254     {
7255       emit_insn_before (inpaddr_address_reload_insns[j], insn);
7256       emit_insn_before (input_address_reload_insns[j], insn);
7257       emit_insn_before (input_reload_insns[j], insn);
7258     }
7259 
7260   emit_insn_before (other_operand_reload_insns, insn);
7261   emit_insn_before (operand_reload_insns, insn);
7262 
7263   for (j = 0; j < reload_n_operands; j++)
7264     {
7265       rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
7266       x = emit_insn_after (output_address_reload_insns[j], x);
7267       x = emit_insn_after (output_reload_insns[j], x);
7268       emit_insn_after (other_output_reload_insns[j], x);
7269     }
7270 
7271   /* For all the spill regs newly reloaded in this instruction,
7272      record what they were reloaded from, so subsequent instructions
7273      can inherit the reloads.
7274 
7275      Update spill_reg_store for the reloads of this insn.
7276      Copy the elements that were updated in the loop above.  */
7277 
7278   for (j = 0; j < n_reloads; j++)
7279     {
7280       int r = reload_order[j];
7281       int i = reload_spill_index[r];
7282 
7283       /* If this is a non-inherited input reload from a pseudo, we must
7284 	 clear any memory of a previous store to the same pseudo.  Only do
7285 	 something if there will not be an output reload for the pseudo
7286 	 being reloaded.  */
7287       if (rld[r].in_reg != 0
7288 	  && ! (reload_inherited[r] || reload_override_in[r]))
7289 	{
7290 	  rtx reg = rld[r].in_reg;
7291 
7292 	  if (GET_CODE (reg) == SUBREG)
7293 	    reg = SUBREG_REG (reg);
7294 
7295 	  if (REG_P (reg)
7296 	      && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7297 	      && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
7298 	    {
7299 	      int nregno = REGNO (reg);
7300 
7301 	      if (reg_last_reload_reg[nregno])
7302 		{
7303 		  int last_regno = REGNO (reg_last_reload_reg[nregno]);
7304 
7305 		  if (reg_reloaded_contents[last_regno] == nregno)
7306 		    spill_reg_store[last_regno] = 0;
7307 		}
7308 	    }
7309 	}
7310 
7311       /* I is nonneg if this reload used a register.
7312 	 If rld[r].reg_rtx is 0, this is an optional reload
7313 	 that we opted to ignore.  */
7314 
7315       if (i >= 0 && rld[r].reg_rtx != 0)
7316 	{
7317 	  int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
7318 	  int k;
7319 	  int part_reaches_end = 0;
7320 	  int all_reaches_end = 1;
7321 
7322 	  /* For a multi register reload, we need to check if all or part
7323 	     of the value lives to the end.  */
7324 	  for (k = 0; k < nr; k++)
7325 	    {
7326 	      if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7327 					    rld[r].when_needed))
7328 		part_reaches_end = 1;
7329 	      else
7330 		all_reaches_end = 0;
7331 	    }
7332 
7333 	  /* Ignore reloads that don't reach the end of the insn in
7334 	     entirety.  */
7335 	  if (all_reaches_end)
7336 	    {
7337 	      /* First, clear out memory of what used to be in this spill reg.
7338 		 If consecutive registers are used, clear them all.  */
7339 
7340 	      for (k = 0; k < nr; k++)
7341   	        {
7342 		CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7343   		  CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7344   		}
7345 
7346 	      /* Maybe the spill reg contains a copy of reload_out.  */
7347 	      if (rld[r].out != 0
7348 		  && (REG_P (rld[r].out)
7349 #ifdef AUTO_INC_DEC
7350 		      || ! rld[r].out_reg
7351 #endif
7352 		      || REG_P (rld[r].out_reg)))
7353 		{
7354 		  rtx out = (REG_P (rld[r].out)
7355 			     ? rld[r].out
7356 			     : rld[r].out_reg
7357 			     ? rld[r].out_reg
7358 /* AUTO_INC */		     : XEXP (rld[r].in_reg, 0));
7359 		  int nregno = REGNO (out);
7360 		  int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7361 			     : hard_regno_nregs[nregno]
7362 					       [GET_MODE (rld[r].reg_rtx)]);
7363 		  bool piecemeal;
7364 
7365 		  spill_reg_store[i] = new_spill_reg_store[i];
7366 		  spill_reg_stored_to[i] = out;
7367 		  reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7368 
7369 		  piecemeal = (nregno < FIRST_PSEUDO_REGISTER
7370 			       && nr == nnr
7371 			       && inherit_piecemeal_p (r, nregno));
7372 
7373 		  /* If NREGNO is a hard register, it may occupy more than
7374 		     one register.  If it does, say what is in the
7375 		     rest of the registers assuming that both registers
7376 		     agree on how many words the object takes.  If not,
7377 		     invalidate the subsequent registers.  */
7378 
7379 		  if (nregno < FIRST_PSEUDO_REGISTER)
7380 		    for (k = 1; k < nnr; k++)
7381 		      reg_last_reload_reg[nregno + k]
7382 			= (piecemeal
7383 			   ? regno_reg_rtx[REGNO (rld[r].reg_rtx) + k]
7384 			   : 0);
7385 
7386 		  /* Now do the inverse operation.  */
7387 		  for (k = 0; k < nr; k++)
7388 		    {
7389 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7390 		      reg_reloaded_contents[i + k]
7391 			= (nregno >= FIRST_PSEUDO_REGISTER || !piecemeal
7392 			   ? nregno
7393 			   : nregno + k);
7394 		      reg_reloaded_insn[i + k] = insn;
7395 		      SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7396 		      if (HARD_REGNO_CALL_PART_CLOBBERED (i + k, GET_MODE (out)))
7397 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7398 		    }
7399 		}
7400 
7401 	      /* Maybe the spill reg contains a copy of reload_in.  Only do
7402 		 something if there will not be an output reload for
7403 		 the register being reloaded.  */
7404 	      else if (rld[r].out_reg == 0
7405 		       && rld[r].in != 0
7406 		       && ((REG_P (rld[r].in)
7407 			    && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER
7408 	                    && !REGNO_REG_SET_P (&reg_has_output_reload,
7409 			      			 REGNO (rld[r].in)))
7410 			   || (REG_P (rld[r].in_reg)
7411 			       && !REGNO_REG_SET_P (&reg_has_output_reload,
7412 						    REGNO (rld[r].in_reg))))
7413 		       && ! reg_set_p (rld[r].reg_rtx, PATTERN (insn)))
7414 		{
7415 		  int nregno;
7416 		  int nnr;
7417 		  rtx in;
7418 		  bool piecemeal;
7419 
7420 		  if (REG_P (rld[r].in)
7421 		      && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
7422 		    in = rld[r].in;
7423 		  else if (REG_P (rld[r].in_reg))
7424 		    in = rld[r].in_reg;
7425 		  else
7426 		    in = XEXP (rld[r].in_reg, 0);
7427 		  nregno = REGNO (in);
7428 
7429 		  nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7430 			 : hard_regno_nregs[nregno]
7431 					   [GET_MODE (rld[r].reg_rtx)]);
7432 
7433 		  reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7434 
7435 		  piecemeal = (nregno < FIRST_PSEUDO_REGISTER
7436 			       && nr == nnr
7437 			       && inherit_piecemeal_p (r, nregno));
7438 
7439 		  if (nregno < FIRST_PSEUDO_REGISTER)
7440 		    for (k = 1; k < nnr; k++)
7441 		      reg_last_reload_reg[nregno + k]
7442 			= (piecemeal
7443 			   ? regno_reg_rtx[REGNO (rld[r].reg_rtx) + k]
7444 			   : 0);
7445 
7446 		  /* Unless we inherited this reload, show we haven't
7447 		     recently done a store.
7448 		     Previous stores of inherited auto_inc expressions
7449 		     also have to be discarded.  */
7450 		  if (! reload_inherited[r]
7451 		      || (rld[r].out && ! rld[r].out_reg))
7452 		    spill_reg_store[i] = 0;
7453 
7454 		  for (k = 0; k < nr; k++)
7455 		    {
7456 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7457 		      reg_reloaded_contents[i + k]
7458 			= (nregno >= FIRST_PSEUDO_REGISTER || !piecemeal
7459 			   ? nregno
7460 			   : nregno + k);
7461 		      reg_reloaded_insn[i + k] = insn;
7462 		      SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7463 		      if (HARD_REGNO_CALL_PART_CLOBBERED (i + k, GET_MODE (in)))
7464 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7465 		    }
7466 		}
7467 	    }
7468 
7469 	  /* However, if part of the reload reaches the end, then we must
7470 	     invalidate the old info for the part that survives to the end.  */
7471 	  else if (part_reaches_end)
7472 	    {
7473 	      for (k = 0; k < nr; k++)
7474 		if (reload_reg_reaches_end_p (i + k,
7475 					      rld[r].opnum,
7476 					      rld[r].when_needed))
7477 		  CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7478 	    }
7479 	}
7480 
7481       /* The following if-statement was #if 0'd in 1.34 (or before...).
7482 	 It's reenabled in 1.35 because supposedly nothing else
7483 	 deals with this problem.  */
7484 
7485       /* If a register gets output-reloaded from a non-spill register,
7486 	 that invalidates any previous reloaded copy of it.
7487 	 But forget_old_reloads_1 won't get to see it, because
7488 	 it thinks only about the original insn.  So invalidate it here.
7489 	 Also do the same thing for RELOAD_OTHER constraints where the
7490 	 output is discarded.  */
7491       if (i < 0
7492 	  && ((rld[r].out != 0
7493 	       && (REG_P (rld[r].out)
7494 		   || (MEM_P (rld[r].out)
7495 		       && REG_P (rld[r].out_reg))))
7496 	      || (rld[r].out == 0 && rld[r].out_reg
7497 		  && REG_P (rld[r].out_reg))))
7498 	{
7499 	  rtx out = ((rld[r].out && REG_P (rld[r].out))
7500 		     ? rld[r].out : rld[r].out_reg);
7501 	  int nregno = REGNO (out);
7502 	  if (nregno >= FIRST_PSEUDO_REGISTER)
7503 	    {
7504 	      rtx src_reg, store_insn = NULL_RTX;
7505 
7506 	      reg_last_reload_reg[nregno] = 0;
7507 
7508 	      /* If we can find a hard register that is stored, record
7509 		 the storing insn so that we may delete this insn with
7510 		 delete_output_reload.  */
7511 	      src_reg = rld[r].reg_rtx;
7512 
7513 	      /* If this is an optional reload, try to find the source reg
7514 		 from an input reload.  */
7515 	      if (! src_reg)
7516 		{
7517 		  rtx set = single_set (insn);
7518 		  if (set && SET_DEST (set) == rld[r].out)
7519 		    {
7520 		      int k;
7521 
7522 		      src_reg = SET_SRC (set);
7523 		      store_insn = insn;
7524 		      for (k = 0; k < n_reloads; k++)
7525 			{
7526 			  if (rld[k].in == src_reg)
7527 			    {
7528 			      src_reg = rld[k].reg_rtx;
7529 			      break;
7530 			    }
7531 			}
7532 		    }
7533 		}
7534 	      else
7535 		store_insn = new_spill_reg_store[REGNO (src_reg)];
7536 	      if (src_reg && REG_P (src_reg)
7537 		  && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7538 		{
7539 		  int src_regno = REGNO (src_reg);
7540 		  int nr = hard_regno_nregs[src_regno][rld[r].mode];
7541 		  /* The place where to find a death note varies with
7542 		     PRESERVE_DEATH_INFO_REGNO_P .  The condition is not
7543 		     necessarily checked exactly in the code that moves
7544 		     notes, so just check both locations.  */
7545 		  rtx note = find_regno_note (insn, REG_DEAD, src_regno);
7546 		  if (! note && store_insn)
7547 		    note = find_regno_note (store_insn, REG_DEAD, src_regno);
7548 		  while (nr-- > 0)
7549 		    {
7550 		      spill_reg_store[src_regno + nr] = store_insn;
7551 		      spill_reg_stored_to[src_regno + nr] = out;
7552 		      reg_reloaded_contents[src_regno + nr] = nregno;
7553 		      reg_reloaded_insn[src_regno + nr] = store_insn;
7554 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr);
7555 		      SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr);
7556 		      if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + nr,
7557 							  GET_MODE (src_reg)))
7558 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7559 					  src_regno + nr);
7560 		      SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr);
7561 		      if (note)
7562 			SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
7563 		      else
7564 			CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
7565 		    }
7566 		  reg_last_reload_reg[nregno] = src_reg;
7567 		  /* We have to set reg_has_output_reload here, or else
7568 		     forget_old_reloads_1 will clear reg_last_reload_reg
7569 		     right away.  */
7570 		  SET_REGNO_REG_SET (&reg_has_output_reload,
7571 				     nregno);
7572 		}
7573 	    }
7574 	  else
7575 	    {
7576 	      int num_regs = hard_regno_nregs[nregno][GET_MODE (out)];
7577 
7578 	      while (num_regs-- > 0)
7579 		reg_last_reload_reg[nregno + num_regs] = 0;
7580 	    }
7581 	}
7582     }
7583   IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
7584 }
7585 
7586 /* Go through the motions to emit INSN and test if it is strictly valid.
7587    Return the emitted insn if valid, else return NULL.  */
7588 
7589 static rtx
emit_insn_if_valid_for_reload(rtx insn)7590 emit_insn_if_valid_for_reload (rtx insn)
7591 {
7592   rtx last = get_last_insn ();
7593   int code;
7594 
7595   insn = emit_insn (insn);
7596   code = recog_memoized (insn);
7597 
7598   if (code >= 0)
7599     {
7600       extract_insn (insn);
7601       /* We want constrain operands to treat this insn strictly in its
7602 	 validity determination, i.e., the way it would after reload has
7603 	 completed.  */
7604       if (constrain_operands (1))
7605 	return insn;
7606     }
7607 
7608   delete_insns_since (last);
7609   return NULL;
7610 }
7611 
7612 /* Emit code to perform a reload from IN (which may be a reload register) to
7613    OUT (which may also be a reload register).  IN or OUT is from operand
7614    OPNUM with reload type TYPE.
7615 
7616    Returns first insn emitted.  */
7617 
7618 static rtx
gen_reload(rtx out,rtx in,int opnum,enum reload_type type)7619 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
7620 {
7621   rtx last = get_last_insn ();
7622   rtx tem;
7623 
7624   /* If IN is a paradoxical SUBREG, remove it and try to put the
7625      opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
7626   if (GET_CODE (in) == SUBREG
7627       && (GET_MODE_SIZE (GET_MODE (in))
7628 	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7629       && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7630     in = SUBREG_REG (in), out = tem;
7631   else if (GET_CODE (out) == SUBREG
7632 	   && (GET_MODE_SIZE (GET_MODE (out))
7633 	       > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7634 	   && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7635     out = SUBREG_REG (out), in = tem;
7636 
7637   /* How to do this reload can get quite tricky.  Normally, we are being
7638      asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7639      register that didn't get a hard register.  In that case we can just
7640      call emit_move_insn.
7641 
7642      We can also be asked to reload a PLUS that adds a register or a MEM to
7643      another register, constant or MEM.  This can occur during frame pointer
7644      elimination and while reloading addresses.  This case is handled by
7645      trying to emit a single insn to perform the add.  If it is not valid,
7646      we use a two insn sequence.
7647 
7648      Or we can be asked to reload an unary operand that was a fragment of
7649      an addressing mode, into a register.  If it isn't recognized as-is,
7650      we try making the unop operand and the reload-register the same:
7651      (set reg:X (unop:X expr:Y))
7652      -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
7653 
7654      Finally, we could be called to handle an 'o' constraint by putting
7655      an address into a register.  In that case, we first try to do this
7656      with a named pattern of "reload_load_address".  If no such pattern
7657      exists, we just emit a SET insn and hope for the best (it will normally
7658      be valid on machines that use 'o').
7659 
7660      This entire process is made complex because reload will never
7661      process the insns we generate here and so we must ensure that
7662      they will fit their constraints and also by the fact that parts of
7663      IN might be being reloaded separately and replaced with spill registers.
7664      Because of this, we are, in some sense, just guessing the right approach
7665      here.  The one listed above seems to work.
7666 
7667      ??? At some point, this whole thing needs to be rethought.  */
7668 
7669   if (GET_CODE (in) == PLUS
7670       && (REG_P (XEXP (in, 0))
7671 	  || GET_CODE (XEXP (in, 0)) == SUBREG
7672 	  || MEM_P (XEXP (in, 0)))
7673       && (REG_P (XEXP (in, 1))
7674 	  || GET_CODE (XEXP (in, 1)) == SUBREG
7675 	  || CONSTANT_P (XEXP (in, 1))
7676 	  || MEM_P (XEXP (in, 1))))
7677     {
7678       /* We need to compute the sum of a register or a MEM and another
7679 	 register, constant, or MEM, and put it into the reload
7680 	 register.  The best possible way of doing this is if the machine
7681 	 has a three-operand ADD insn that accepts the required operands.
7682 
7683 	 The simplest approach is to try to generate such an insn and see if it
7684 	 is recognized and matches its constraints.  If so, it can be used.
7685 
7686 	 It might be better not to actually emit the insn unless it is valid,
7687 	 but we need to pass the insn as an operand to `recog' and
7688 	 `extract_insn' and it is simpler to emit and then delete the insn if
7689 	 not valid than to dummy things up.  */
7690 
7691       rtx op0, op1, tem, insn;
7692       int code;
7693 
7694       op0 = find_replacement (&XEXP (in, 0));
7695       op1 = find_replacement (&XEXP (in, 1));
7696 
7697       /* Since constraint checking is strict, commutativity won't be
7698 	 checked, so we need to do that here to avoid spurious failure
7699 	 if the add instruction is two-address and the second operand
7700 	 of the add is the same as the reload reg, which is frequently
7701 	 the case.  If the insn would be A = B + A, rearrange it so
7702 	 it will be A = A + B as constrain_operands expects.  */
7703 
7704       if (REG_P (XEXP (in, 1))
7705 	  && REGNO (out) == REGNO (XEXP (in, 1)))
7706 	tem = op0, op0 = op1, op1 = tem;
7707 
7708       if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7709 	in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
7710 
7711       insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
7712       if (insn)
7713 	return insn;
7714 
7715       /* If that failed, we must use a conservative two-insn sequence.
7716 
7717 	 Use a move to copy one operand into the reload register.  Prefer
7718 	 to reload a constant, MEM or pseudo since the move patterns can
7719 	 handle an arbitrary operand.  If OP1 is not a constant, MEM or
7720 	 pseudo and OP1 is not a valid operand for an add instruction, then
7721 	 reload OP1.
7722 
7723 	 After reloading one of the operands into the reload register, add
7724 	 the reload register to the output register.
7725 
7726 	 If there is another way to do this for a specific machine, a
7727 	 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7728 	 we emit below.  */
7729 
7730       code = (int) add_optab->handlers[(int) GET_MODE (out)].insn_code;
7731 
7732       if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
7733 	  || (REG_P (op1)
7734 	      && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
7735 	  || (code != CODE_FOR_nothing
7736 	      && ! ((*insn_data[code].operand[2].predicate)
7737 		    (op1, insn_data[code].operand[2].mode))))
7738 	tem = op0, op0 = op1, op1 = tem;
7739 
7740       gen_reload (out, op0, opnum, type);
7741 
7742       /* If OP0 and OP1 are the same, we can use OUT for OP1.
7743 	 This fixes a problem on the 32K where the stack pointer cannot
7744 	 be used as an operand of an add insn.  */
7745 
7746       if (rtx_equal_p (op0, op1))
7747 	op1 = out;
7748 
7749       insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
7750       if (insn)
7751 	{
7752 	  /* Add a REG_EQUIV note so that find_equiv_reg can find it.  */
7753 	  REG_NOTES (insn)
7754 	    = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7755 	  return insn;
7756 	}
7757 
7758       /* If that failed, copy the address register to the reload register.
7759 	 Then add the constant to the reload register.  */
7760 
7761       gen_reload (out, op1, opnum, type);
7762       insn = emit_insn (gen_add2_insn (out, op0));
7763       REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7764     }
7765 
7766 #ifdef SECONDARY_MEMORY_NEEDED
7767   /* If we need a memory location to do the move, do it that way.  */
7768   else if ((REG_P (in) || GET_CODE (in) == SUBREG)
7769 	   && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
7770 	   && (REG_P (out) || GET_CODE (out) == SUBREG)
7771 	   && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
7772 	   && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
7773 				       REGNO_REG_CLASS (reg_or_subregno (out)),
7774 				       GET_MODE (out)))
7775     {
7776       /* Get the memory to use and rewrite both registers to its mode.  */
7777       rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7778 
7779       if (GET_MODE (loc) != GET_MODE (out))
7780 	out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
7781 
7782       if (GET_MODE (loc) != GET_MODE (in))
7783 	in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
7784 
7785       gen_reload (loc, in, opnum, type);
7786       gen_reload (out, loc, opnum, type);
7787     }
7788 #endif
7789   else if (REG_P (out) && UNARY_P (in))
7790     {
7791       rtx insn;
7792       rtx op1;
7793       rtx out_moded;
7794       rtx set;
7795 
7796       op1 = find_replacement (&XEXP (in, 0));
7797       if (op1 != XEXP (in, 0))
7798 	in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
7799 
7800       /* First, try a plain SET.  */
7801       set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
7802       if (set)
7803 	return set;
7804 
7805       /* If that failed, move the inner operand to the reload
7806 	 register, and try the same unop with the inner expression
7807 	 replaced with the reload register.  */
7808 
7809       if (GET_MODE (op1) != GET_MODE (out))
7810 	out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
7811       else
7812 	out_moded = out;
7813 
7814       gen_reload (out_moded, op1, opnum, type);
7815 
7816       insn
7817 	= gen_rtx_SET (VOIDmode, out,
7818 		       gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
7819 				      out_moded));
7820       insn = emit_insn_if_valid_for_reload (insn);
7821       if (insn)
7822 	{
7823 	  REG_NOTES (insn)
7824 	    = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7825 	  return insn;
7826 	}
7827 
7828       fatal_insn ("Failure trying to reload:", set);
7829     }
7830   /* If IN is a simple operand, use gen_move_insn.  */
7831   else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
7832     {
7833       tem = emit_insn (gen_move_insn (out, in));
7834       /* IN may contain a LABEL_REF, if so add a REG_LABEL note.  */
7835       mark_jump_label (in, tem, 0);
7836     }
7837 
7838 #ifdef HAVE_reload_load_address
7839   else if (HAVE_reload_load_address)
7840     emit_insn (gen_reload_load_address (out, in));
7841 #endif
7842 
7843   /* Otherwise, just write (set OUT IN) and hope for the best.  */
7844   else
7845     emit_insn (gen_rtx_SET (VOIDmode, out, in));
7846 
7847   /* Return the first insn emitted.
7848      We can not just return get_last_insn, because there may have
7849      been multiple instructions emitted.  Also note that gen_move_insn may
7850      emit more than one insn itself, so we can not assume that there is one
7851      insn emitted per emit_insn_before call.  */
7852 
7853   return last ? NEXT_INSN (last) : get_insns ();
7854 }
7855 
7856 /* Delete a previously made output-reload whose result we now believe
7857    is not needed.  First we double-check.
7858 
7859    INSN is the insn now being processed.
7860    LAST_RELOAD_REG is the hard register number for which we want to delete
7861    the last output reload.
7862    J is the reload-number that originally used REG.  The caller has made
7863    certain that reload J doesn't use REG any longer for input.  */
7864 
7865 static void
delete_output_reload(rtx insn,int j,int last_reload_reg)7866 delete_output_reload (rtx insn, int j, int last_reload_reg)
7867 {
7868   rtx output_reload_insn = spill_reg_store[last_reload_reg];
7869   rtx reg = spill_reg_stored_to[last_reload_reg];
7870   int k;
7871   int n_occurrences;
7872   int n_inherited = 0;
7873   rtx i1;
7874   rtx substed;
7875 
7876   /* It is possible that this reload has been only used to set another reload
7877      we eliminated earlier and thus deleted this instruction too.  */
7878   if (INSN_DELETED_P (output_reload_insn))
7879     return;
7880 
7881   /* Get the raw pseudo-register referred to.  */
7882 
7883   while (GET_CODE (reg) == SUBREG)
7884     reg = SUBREG_REG (reg);
7885   substed = reg_equiv_memory_loc[REGNO (reg)];
7886 
7887   /* This is unsafe if the operand occurs more often in the current
7888      insn than it is inherited.  */
7889   for (k = n_reloads - 1; k >= 0; k--)
7890     {
7891       rtx reg2 = rld[k].in;
7892       if (! reg2)
7893 	continue;
7894       if (MEM_P (reg2) || reload_override_in[k])
7895 	reg2 = rld[k].in_reg;
7896 #ifdef AUTO_INC_DEC
7897       if (rld[k].out && ! rld[k].out_reg)
7898 	reg2 = XEXP (rld[k].in_reg, 0);
7899 #endif
7900       while (GET_CODE (reg2) == SUBREG)
7901 	reg2 = SUBREG_REG (reg2);
7902       if (rtx_equal_p (reg2, reg))
7903 	{
7904 	  if (reload_inherited[k] || reload_override_in[k] || k == j)
7905 	    {
7906 	      n_inherited++;
7907 	      reg2 = rld[k].out_reg;
7908 	      if (! reg2)
7909 		continue;
7910 	      while (GET_CODE (reg2) == SUBREG)
7911 		reg2 = XEXP (reg2, 0);
7912 	      if (rtx_equal_p (reg2, reg))
7913 		n_inherited++;
7914 	    }
7915 	  else
7916 	    return;
7917 	}
7918     }
7919   n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
7920   if (substed)
7921     n_occurrences += count_occurrences (PATTERN (insn),
7922 					eliminate_regs (substed, 0,
7923 							NULL_RTX), 0);
7924   for (i1 = reg_equiv_alt_mem_list [REGNO (reg)]; i1; i1 = XEXP (i1, 1))
7925     {
7926       gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
7927       n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
7928     }
7929   if (n_occurrences > n_inherited)
7930     return;
7931 
7932   /* If the pseudo-reg we are reloading is no longer referenced
7933      anywhere between the store into it and here,
7934      and we're within the same basic block, then the value can only
7935      pass through the reload reg and end up here.
7936      Otherwise, give up--return.  */
7937   for (i1 = NEXT_INSN (output_reload_insn);
7938        i1 != insn; i1 = NEXT_INSN (i1))
7939     {
7940       if (NOTE_INSN_BASIC_BLOCK_P (i1))
7941 	return;
7942       if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
7943 	  && reg_mentioned_p (reg, PATTERN (i1)))
7944 	{
7945 	  /* If this is USE in front of INSN, we only have to check that
7946 	     there are no more references than accounted for by inheritance.  */
7947 	  while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
7948 	    {
7949 	      n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
7950 	      i1 = NEXT_INSN (i1);
7951 	    }
7952 	  if (n_occurrences <= n_inherited && i1 == insn)
7953 	    break;
7954 	  return;
7955 	}
7956     }
7957 
7958   /* We will be deleting the insn.  Remove the spill reg information.  */
7959   for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
7960     {
7961       spill_reg_store[last_reload_reg + k] = 0;
7962       spill_reg_stored_to[last_reload_reg + k] = 0;
7963     }
7964 
7965   /* The caller has already checked that REG dies or is set in INSN.
7966      It has also checked that we are optimizing, and thus some
7967      inaccuracies in the debugging information are acceptable.
7968      So we could just delete output_reload_insn.  But in some cases
7969      we can improve the debugging information without sacrificing
7970      optimization - maybe even improving the code: See if the pseudo
7971      reg has been completely replaced with reload regs.  If so, delete
7972      the store insn and forget we had a stack slot for the pseudo.  */
7973   if (rld[j].out != rld[j].in
7974       && REG_N_DEATHS (REGNO (reg)) == 1
7975       && REG_N_SETS (REGNO (reg)) == 1
7976       && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7977       && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7978     {
7979       rtx i2;
7980 
7981       /* We know that it was used only between here and the beginning of
7982 	 the current basic block.  (We also know that the last use before
7983 	 INSN was the output reload we are thinking of deleting, but never
7984 	 mind that.)  Search that range; see if any ref remains.  */
7985       for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7986 	{
7987 	  rtx set = single_set (i2);
7988 
7989 	  /* Uses which just store in the pseudo don't count,
7990 	     since if they are the only uses, they are dead.  */
7991 	  if (set != 0 && SET_DEST (set) == reg)
7992 	    continue;
7993 	  if (LABEL_P (i2)
7994 	      || JUMP_P (i2))
7995 	    break;
7996 	  if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
7997 	      && reg_mentioned_p (reg, PATTERN (i2)))
7998 	    {
7999 	      /* Some other ref remains; just delete the output reload we
8000 		 know to be dead.  */
8001 	      delete_address_reloads (output_reload_insn, insn);
8002 	      delete_insn (output_reload_insn);
8003 	      return;
8004 	    }
8005 	}
8006 
8007       /* Delete the now-dead stores into this pseudo.  Note that this
8008 	 loop also takes care of deleting output_reload_insn.  */
8009       for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8010 	{
8011 	  rtx set = single_set (i2);
8012 
8013 	  if (set != 0 && SET_DEST (set) == reg)
8014 	    {
8015 	      delete_address_reloads (i2, insn);
8016 	      delete_insn (i2);
8017 	    }
8018 	  if (LABEL_P (i2)
8019 	      || JUMP_P (i2))
8020 	    break;
8021 	}
8022 
8023       /* For the debugging info, say the pseudo lives in this reload reg.  */
8024       reg_renumber[REGNO (reg)] = REGNO (rld[j].reg_rtx);
8025       alter_reg (REGNO (reg), -1);
8026     }
8027   else
8028     {
8029       delete_address_reloads (output_reload_insn, insn);
8030       delete_insn (output_reload_insn);
8031     }
8032 }
8033 
8034 /* We are going to delete DEAD_INSN.  Recursively delete loads of
8035    reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8036    CURRENT_INSN is being reloaded, so we have to check its reloads too.  */
8037 static void
delete_address_reloads(rtx dead_insn,rtx current_insn)8038 delete_address_reloads (rtx dead_insn, rtx current_insn)
8039 {
8040   rtx set = single_set (dead_insn);
8041   rtx set2, dst, prev, next;
8042   if (set)
8043     {
8044       rtx dst = SET_DEST (set);
8045       if (MEM_P (dst))
8046 	delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8047     }
8048   /* If we deleted the store from a reloaded post_{in,de}c expression,
8049      we can delete the matching adds.  */
8050   prev = PREV_INSN (dead_insn);
8051   next = NEXT_INSN (dead_insn);
8052   if (! prev || ! next)
8053     return;
8054   set = single_set (next);
8055   set2 = single_set (prev);
8056   if (! set || ! set2
8057       || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8058       || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
8059       || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
8060     return;
8061   dst = SET_DEST (set);
8062   if (! rtx_equal_p (dst, SET_DEST (set2))
8063       || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8064       || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8065       || (INTVAL (XEXP (SET_SRC (set), 1))
8066 	  != -INTVAL (XEXP (SET_SRC (set2), 1))))
8067     return;
8068   delete_related_insns (prev);
8069   delete_related_insns (next);
8070 }
8071 
8072 /* Subfunction of delete_address_reloads: process registers found in X.  */
8073 static void
delete_address_reloads_1(rtx dead_insn,rtx x,rtx current_insn)8074 delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
8075 {
8076   rtx prev, set, dst, i2;
8077   int i, j;
8078   enum rtx_code code = GET_CODE (x);
8079 
8080   if (code != REG)
8081     {
8082       const char *fmt = GET_RTX_FORMAT (code);
8083       for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8084 	{
8085 	  if (fmt[i] == 'e')
8086 	    delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8087 	  else if (fmt[i] == 'E')
8088 	    {
8089 	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8090 		delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8091 					  current_insn);
8092 	    }
8093 	}
8094       return;
8095     }
8096 
8097   if (spill_reg_order[REGNO (x)] < 0)
8098     return;
8099 
8100   /* Scan backwards for the insn that sets x.  This might be a way back due
8101      to inheritance.  */
8102   for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8103     {
8104       code = GET_CODE (prev);
8105       if (code == CODE_LABEL || code == JUMP_INSN)
8106 	return;
8107       if (!INSN_P (prev))
8108 	continue;
8109       if (reg_set_p (x, PATTERN (prev)))
8110 	break;
8111       if (reg_referenced_p (x, PATTERN (prev)))
8112 	return;
8113     }
8114   if (! prev || INSN_UID (prev) < reload_first_uid)
8115     return;
8116   /* Check that PREV only sets the reload register.  */
8117   set = single_set (prev);
8118   if (! set)
8119     return;
8120   dst = SET_DEST (set);
8121   if (!REG_P (dst)
8122       || ! rtx_equal_p (dst, x))
8123     return;
8124   if (! reg_set_p (dst, PATTERN (dead_insn)))
8125     {
8126       /* Check if DST was used in a later insn -
8127 	 it might have been inherited.  */
8128       for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8129 	{
8130 	  if (LABEL_P (i2))
8131 	    break;
8132 	  if (! INSN_P (i2))
8133 	    continue;
8134 	  if (reg_referenced_p (dst, PATTERN (i2)))
8135 	    {
8136 	      /* If there is a reference to the register in the current insn,
8137 		 it might be loaded in a non-inherited reload.  If no other
8138 		 reload uses it, that means the register is set before
8139 		 referenced.  */
8140 	      if (i2 == current_insn)
8141 		{
8142 		  for (j = n_reloads - 1; j >= 0; j--)
8143 		    if ((rld[j].reg_rtx == dst && reload_inherited[j])
8144 			|| reload_override_in[j] == dst)
8145 		      return;
8146 		  for (j = n_reloads - 1; j >= 0; j--)
8147 		    if (rld[j].in && rld[j].reg_rtx == dst)
8148 		      break;
8149 		  if (j >= 0)
8150 		    break;
8151 		}
8152 	      return;
8153 	    }
8154 	  if (JUMP_P (i2))
8155 	    break;
8156 	  /* If DST is still live at CURRENT_INSN, check if it is used for
8157 	     any reload.  Note that even if CURRENT_INSN sets DST, we still
8158 	     have to check the reloads.  */
8159 	  if (i2 == current_insn)
8160 	    {
8161 	      for (j = n_reloads - 1; j >= 0; j--)
8162 		if ((rld[j].reg_rtx == dst && reload_inherited[j])
8163 		    || reload_override_in[j] == dst)
8164 		  return;
8165 	      /* ??? We can't finish the loop here, because dst might be
8166 		 allocated to a pseudo in this block if no reload in this
8167 		 block needs any of the classes containing DST - see
8168 		 spill_hard_reg.  There is no easy way to tell this, so we
8169 		 have to scan till the end of the basic block.  */
8170 	    }
8171 	  if (reg_set_p (dst, PATTERN (i2)))
8172 	    break;
8173 	}
8174     }
8175   delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8176   reg_reloaded_contents[REGNO (dst)] = -1;
8177   delete_insn (prev);
8178 }
8179 
8180 /* Output reload-insns to reload VALUE into RELOADREG.
8181    VALUE is an autoincrement or autodecrement RTX whose operand
8182    is a register or memory location;
8183    so reloading involves incrementing that location.
8184    IN is either identical to VALUE, or some cheaper place to reload from.
8185 
8186    INC_AMOUNT is the number to increment or decrement by (always positive).
8187    This cannot be deduced from VALUE.
8188 
8189    Return the instruction that stores into RELOADREG.  */
8190 
8191 static rtx
inc_for_reload(rtx reloadreg,rtx in,rtx value,int inc_amount)8192 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
8193 {
8194   /* REG or MEM to be copied and incremented.  */
8195   rtx incloc = find_replacement (&XEXP (value, 0));
8196   /* Nonzero if increment after copying.  */
8197   int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
8198 	      || GET_CODE (value) == POST_MODIFY);
8199   rtx last;
8200   rtx inc;
8201   rtx add_insn;
8202   int code;
8203   rtx store;
8204   rtx real_in = in == value ? incloc : in;
8205 
8206   /* No hard register is equivalent to this register after
8207      inc/dec operation.  If REG_LAST_RELOAD_REG were nonzero,
8208      we could inc/dec that register as well (maybe even using it for
8209      the source), but I'm not sure it's worth worrying about.  */
8210   if (REG_P (incloc))
8211     reg_last_reload_reg[REGNO (incloc)] = 0;
8212 
8213   if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
8214     {
8215       gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
8216       inc = find_replacement (&XEXP (XEXP (value, 1), 1));
8217     }
8218   else
8219     {
8220       if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
8221 	inc_amount = -inc_amount;
8222 
8223       inc = GEN_INT (inc_amount);
8224     }
8225 
8226   /* If this is post-increment, first copy the location to the reload reg.  */
8227   if (post && real_in != reloadreg)
8228     emit_insn (gen_move_insn (reloadreg, real_in));
8229 
8230   if (in == value)
8231     {
8232       /* See if we can directly increment INCLOC.  Use a method similar to
8233 	 that in gen_reload.  */
8234 
8235       last = get_last_insn ();
8236       add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8237 					 gen_rtx_PLUS (GET_MODE (incloc),
8238 						       incloc, inc)));
8239 
8240       code = recog_memoized (add_insn);
8241       if (code >= 0)
8242 	{
8243 	  extract_insn (add_insn);
8244 	  if (constrain_operands (1))
8245 	    {
8246 	      /* If this is a pre-increment and we have incremented the value
8247 		 where it lives, copy the incremented value to RELOADREG to
8248 		 be used as an address.  */
8249 
8250 	      if (! post)
8251 		emit_insn (gen_move_insn (reloadreg, incloc));
8252 
8253 	      return add_insn;
8254 	    }
8255 	}
8256       delete_insns_since (last);
8257     }
8258 
8259   /* If couldn't do the increment directly, must increment in RELOADREG.
8260      The way we do this depends on whether this is pre- or post-increment.
8261      For pre-increment, copy INCLOC to the reload register, increment it
8262      there, then save back.  */
8263 
8264   if (! post)
8265     {
8266       if (in != reloadreg)
8267 	emit_insn (gen_move_insn (reloadreg, real_in));
8268       emit_insn (gen_add2_insn (reloadreg, inc));
8269       store = emit_insn (gen_move_insn (incloc, reloadreg));
8270     }
8271   else
8272     {
8273       /* Postincrement.
8274 	 Because this might be a jump insn or a compare, and because RELOADREG
8275 	 may not be available after the insn in an input reload, we must do
8276 	 the incrementation before the insn being reloaded for.
8277 
8278 	 We have already copied IN to RELOADREG.  Increment the copy in
8279 	 RELOADREG, save that back, then decrement RELOADREG so it has
8280 	 the original value.  */
8281 
8282       emit_insn (gen_add2_insn (reloadreg, inc));
8283       store = emit_insn (gen_move_insn (incloc, reloadreg));
8284       if (GET_CODE (inc) == CONST_INT)
8285 	emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL(inc))));
8286       else
8287 	emit_insn (gen_sub2_insn (reloadreg, inc));
8288     }
8289 
8290   return store;
8291 }
8292 
8293 #ifdef AUTO_INC_DEC
8294 static void
add_auto_inc_notes(rtx insn,rtx x)8295 add_auto_inc_notes (rtx insn, rtx x)
8296 {
8297   enum rtx_code code = GET_CODE (x);
8298   const char *fmt;
8299   int i, j;
8300 
8301   if (code == MEM && auto_inc_p (XEXP (x, 0)))
8302     {
8303       REG_NOTES (insn)
8304 	= gen_rtx_EXPR_LIST (REG_INC, XEXP (XEXP (x, 0), 0), REG_NOTES (insn));
8305       return;
8306     }
8307 
8308   /* Scan all the operand sub-expressions.  */
8309   fmt = GET_RTX_FORMAT (code);
8310   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8311     {
8312       if (fmt[i] == 'e')
8313 	add_auto_inc_notes (insn, XEXP (x, i));
8314       else if (fmt[i] == 'E')
8315 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8316 	  add_auto_inc_notes (insn, XVECEXP (x, i, j));
8317     }
8318 }
8319 #endif
8320 
8321 /* Copy EH notes from an insn to its reloads.  */
8322 static void
copy_eh_notes(rtx insn,rtx x)8323 copy_eh_notes (rtx insn, rtx x)
8324 {
8325   rtx eh_note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
8326   if (eh_note)
8327     {
8328       for (; x != 0; x = NEXT_INSN (x))
8329 	{
8330 	  if (may_trap_p (PATTERN (x)))
8331 	    REG_NOTES (x)
8332 	      = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
8333 				   REG_NOTES (x));
8334 	}
8335     }
8336 }
8337 
8338 /* This is used by reload pass, that does emit some instructions after
8339    abnormal calls moving basic block end, but in fact it wants to emit
8340    them on the edge.  Looks for abnormal call edges, find backward the
8341    proper call and fix the damage.
8342 
8343    Similar handle instructions throwing exceptions internally.  */
8344 void
fixup_abnormal_edges(void)8345 fixup_abnormal_edges (void)
8346 {
8347   bool inserted = false;
8348   basic_block bb;
8349 
8350   FOR_EACH_BB (bb)
8351     {
8352       edge e;
8353       edge_iterator ei;
8354 
8355       /* Look for cases we are interested in - calls or instructions causing
8356          exceptions.  */
8357       FOR_EACH_EDGE (e, ei, bb->succs)
8358 	{
8359 	  if (e->flags & EDGE_ABNORMAL_CALL)
8360 	    break;
8361 	  if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
8362 	      == (EDGE_ABNORMAL | EDGE_EH))
8363 	    break;
8364 	}
8365       if (e && !CALL_P (BB_END (bb))
8366 	  && !can_throw_internal (BB_END (bb)))
8367 	{
8368 	  rtx insn;
8369 
8370 	  /* Get past the new insns generated.  Allow notes, as the insns
8371 	     may be already deleted.  */
8372 	  insn = BB_END (bb);
8373 	  while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
8374 		 && !can_throw_internal (insn)
8375 		 && insn != BB_HEAD (bb))
8376 	    insn = PREV_INSN (insn);
8377 
8378 	  if (CALL_P (insn) || can_throw_internal (insn))
8379 	    {
8380 	      rtx stop, next;
8381 
8382 	      stop = NEXT_INSN (BB_END (bb));
8383 	      BB_END (bb) = insn;
8384 	      insn = NEXT_INSN (insn);
8385 
8386 	      FOR_EACH_EDGE (e, ei, bb->succs)
8387 		if (e->flags & EDGE_FALLTHRU)
8388 		  break;
8389 
8390 	      while (insn && insn != stop)
8391 		{
8392 		  next = NEXT_INSN (insn);
8393 		  if (INSN_P (insn))
8394 		    {
8395 	              delete_insn (insn);
8396 
8397 		      /* Sometimes there's still the return value USE.
8398 			 If it's placed after a trapping call (i.e. that
8399 			 call is the last insn anyway), we have no fallthru
8400 			 edge.  Simply delete this use and don't try to insert
8401 			 on the non-existent edge.  */
8402 		      if (GET_CODE (PATTERN (insn)) != USE)
8403 			{
8404 			  /* We're not deleting it, we're moving it.  */
8405 			  INSN_DELETED_P (insn) = 0;
8406 			  PREV_INSN (insn) = NULL_RTX;
8407 			  NEXT_INSN (insn) = NULL_RTX;
8408 
8409 			  insert_insn_on_edge (insn, e);
8410 			  inserted = true;
8411 			}
8412 		    }
8413 		  insn = next;
8414 		}
8415 	    }
8416 
8417 	  /* It may be that we don't find any such trapping insn.  In this
8418 	     case we discovered quite late that the insn that had been
8419 	     marked as can_throw_internal in fact couldn't trap at all.
8420 	     So we should in fact delete the EH edges out of the block.  */
8421 	  else
8422 	    purge_dead_edges (bb);
8423 	}
8424     }
8425 
8426   /* We've possibly turned single trapping insn into multiple ones.  */
8427   if (flag_non_call_exceptions)
8428     {
8429       sbitmap blocks;
8430       blocks = sbitmap_alloc (last_basic_block);
8431       sbitmap_ones (blocks);
8432       find_many_sub_basic_blocks (blocks);
8433     }
8434 
8435   if (inserted)
8436     commit_edge_insertions ();
8437 
8438 #ifdef ENABLE_CHECKING
8439   /* Verify that we didn't turn one trapping insn into many, and that
8440      we found and corrected all of the problems wrt fixups on the
8441      fallthru edge.  */
8442   verify_flow_info ();
8443 #endif
8444 }
8445