1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2    Copyright (C) 1987-2021 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "predict.h"
28 #include "df.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "optabs.h"
32 #include "regs.h"
33 #include "ira.h"
34 #include "recog.h"
35 
36 #include "rtl-error.h"
37 #include "expr.h"
38 #include "addresses.h"
39 #include "cfgrtl.h"
40 #include "cfgbuild.h"
41 #include "reload.h"
42 #include "except.h"
43 #include "dumpfile.h"
44 #include "rtl-iter.h"
45 #include "function-abi.h"
46 
47 /* This file contains the reload pass of the compiler, which is
48    run after register allocation has been done.  It checks that
49    each insn is valid (operands required to be in registers really
50    are in registers of the proper class) and fixes up invalid ones
51    by copying values temporarily into registers for the insns
52    that need them.
53 
54    The results of register allocation are described by the vector
55    reg_renumber; the insns still contain pseudo regs, but reg_renumber
56    can be used to find which hard reg, if any, a pseudo reg is in.
57 
58    The technique we always use is to free up a few hard regs that are
59    called ``reload regs'', and for each place where a pseudo reg
60    must be in a hard reg, copy it temporarily into one of the reload regs.
61 
62    Reload regs are allocated locally for every instruction that needs
63    reloads.  When there are pseudos which are allocated to a register that
64    has been chosen as a reload reg, such pseudos must be ``spilled''.
65    This means that they go to other hard regs, or to stack slots if no other
66    available hard regs can be found.  Spilling can invalidate more
67    insns, requiring additional need for reloads, so we must keep checking
68    until the process stabilizes.
69 
70    For machines with different classes of registers, we must keep track
71    of the register class needed for each reload, and make sure that
72    we allocate enough reload registers of each class.
73 
74    The file reload.c contains the code that checks one insn for
75    validity and reports the reloads that it needs.  This file
76    is in charge of scanning the entire rtl code, accumulating the
77    reload needs, spilling, assigning reload registers to use for
78    fixing up each insn, and generating the new insns to copy values
79    into the reload registers.  */
80 
81 struct target_reload default_target_reload;
82 #if SWITCHABLE_TARGET
83 struct target_reload *this_target_reload = &default_target_reload;
84 #endif
85 
86 #define spill_indirect_levels			\
87   (this_target_reload->x_spill_indirect_levels)
88 
89 /* During reload_as_needed, element N contains a REG rtx for the hard reg
90    into which reg N has been reloaded (perhaps for a previous insn).  */
91 static rtx *reg_last_reload_reg;
92 
93 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
94    for an output reload that stores into reg N.  */
95 static regset_head reg_has_output_reload;
96 
97 /* Indicates which hard regs are reload-registers for an output reload
98    in the current insn.  */
99 static HARD_REG_SET reg_is_output_reload;
100 
101 /* Widest mode in which each pseudo reg is referred to (via subreg).  */
102 static machine_mode *reg_max_ref_mode;
103 
104 /* Vector to remember old contents of reg_renumber before spilling.  */
105 static short *reg_old_renumber;
106 
107 /* During reload_as_needed, element N contains the last pseudo regno reloaded
108    into hard register N.  If that pseudo reg occupied more than one register,
109    reg_reloaded_contents points to that pseudo for each spill register in
110    use; all of these must remain set for an inheritance to occur.  */
111 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
112 
113 /* During reload_as_needed, element N contains the insn for which
114    hard register N was last used.   Its contents are significant only
115    when reg_reloaded_valid is set for this register.  */
116 static rtx_insn *reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
117 
118 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid.  */
119 static HARD_REG_SET reg_reloaded_valid;
120 /* Indicate if the register was dead at the end of the reload.
121    This is only valid if reg_reloaded_contents is set and valid.  */
122 static HARD_REG_SET reg_reloaded_dead;
123 
124 /* Number of spill-regs so far; number of valid elements of spill_regs.  */
125 static int n_spills;
126 
127 /* In parallel with spill_regs, contains REG rtx's for those regs.
128    Holds the last rtx used for any given reg, or 0 if it has never
129    been used for spilling yet.  This rtx is reused, provided it has
130    the proper mode.  */
131 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
132 
133 /* In parallel with spill_regs, contains nonzero for a spill reg
134    that was stored after the last time it was used.
135    The precise value is the insn generated to do the store.  */
136 static rtx_insn *spill_reg_store[FIRST_PSEUDO_REGISTER];
137 
138 /* This is the register that was stored with spill_reg_store.  This is a
139    copy of reload_out / reload_out_reg when the value was stored; if
140    reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg.  */
141 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
142 
143 /* This table is the inverse mapping of spill_regs:
144    indexed by hard reg number,
145    it contains the position of that reg in spill_regs,
146    or -1 for something that is not in spill_regs.
147 
148    ?!?  This is no longer accurate.  */
149 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
150 
151 /* This reg set indicates registers that can't be used as spill registers for
152    the currently processed insn.  These are the hard registers which are live
153    during the insn, but not allocated to pseudos, as well as fixed
154    registers.  */
155 static HARD_REG_SET bad_spill_regs;
156 
157 /* These are the hard registers that can't be used as spill register for any
158    insn.  This includes registers used for user variables and registers that
159    we can't eliminate.  A register that appears in this set also can't be used
160    to retry register allocation.  */
161 static HARD_REG_SET bad_spill_regs_global;
162 
163 /* Describes order of use of registers for reloading
164    of spilled pseudo-registers.  `n_spills' is the number of
165    elements that are actually valid; new ones are added at the end.
166 
167    Both spill_regs and spill_reg_order are used on two occasions:
168    once during find_reload_regs, where they keep track of the spill registers
169    for a single insn, but also during reload_as_needed where they show all
170    the registers ever used by reload.  For the latter case, the information
171    is calculated during finish_spills.  */
172 static short spill_regs[FIRST_PSEUDO_REGISTER];
173 
174 /* This vector of reg sets indicates, for each pseudo, which hard registers
175    may not be used for retrying global allocation because the register was
176    formerly spilled from one of them.  If we allowed reallocating a pseudo to
177    a register that it was already allocated to, reload might not
178    terminate.  */
179 static HARD_REG_SET *pseudo_previous_regs;
180 
181 /* This vector of reg sets indicates, for each pseudo, which hard
182    registers may not be used for retrying global allocation because they
183    are used as spill registers during one of the insns in which the
184    pseudo is live.  */
185 static HARD_REG_SET *pseudo_forbidden_regs;
186 
187 /* All hard regs that have been used as spill registers for any insn are
188    marked in this set.  */
189 static HARD_REG_SET used_spill_regs;
190 
191 /* Index of last register assigned as a spill register.  We allocate in
192    a round-robin fashion.  */
193 static int last_spill_reg;
194 
195 /* Record the stack slot for each spilled hard register.  */
196 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
197 
198 /* Width allocated so far for that stack slot.  */
199 static poly_uint64_pod spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
200 
201 /* Record which pseudos needed to be spilled.  */
202 static regset_head spilled_pseudos;
203 
204 /* Record which pseudos changed their allocation in finish_spills.  */
205 static regset_head changed_allocation_pseudos;
206 
207 /* Used for communication between order_regs_for_reload and count_pseudo.
208    Used to avoid counting one pseudo twice.  */
209 static regset_head pseudos_counted;
210 
211 /* First uid used by insns created by reload in this function.
212    Used in find_equiv_reg.  */
213 int reload_first_uid;
214 
215 /* Flag set by local-alloc or global-alloc if anything is live in
216    a call-clobbered reg across calls.  */
217 int caller_save_needed;
218 
219 /* Set to 1 while reload_as_needed is operating.
220    Required by some machines to handle any generated moves differently.  */
221 int reload_in_progress = 0;
222 
223 /* This obstack is used for allocation of rtl during register elimination.
224    The allocated storage can be freed once find_reloads has processed the
225    insn.  */
226 static struct obstack reload_obstack;
227 
228 /* Points to the beginning of the reload_obstack.  All insn_chain structures
229    are allocated first.  */
230 static char *reload_startobj;
231 
232 /* The point after all insn_chain structures.  Used to quickly deallocate
233    memory allocated in copy_reloads during calculate_needs_all_insns.  */
234 static char *reload_firstobj;
235 
236 /* This points before all local rtl generated by register elimination.
237    Used to quickly free all memory after processing one insn.  */
238 static char *reload_insn_firstobj;
239 
240 /* List of insn_chain instructions, one for every insn that reload needs to
241    examine.  */
242 class insn_chain *reload_insn_chain;
243 
244 /* TRUE if we potentially left dead insns in the insn stream and want to
245    run DCE immediately after reload, FALSE otherwise.  */
246 static bool need_dce;
247 
248 /* List of all insns needing reloads.  */
249 static class insn_chain *insns_need_reload;
250 
251 /* This structure is used to record information about register eliminations.
252    Each array entry describes one possible way of eliminating a register
253    in favor of another.   If there is more than one way of eliminating a
254    particular register, the most preferred should be specified first.  */
255 
256 struct elim_table
257 {
258   int from;			/* Register number to be eliminated.  */
259   int to;			/* Register number used as replacement.  */
260   poly_int64_pod initial_offset; /* Initial difference between values.  */
261   int can_eliminate;		/* Nonzero if this elimination can be done.  */
262   int can_eliminate_previous;	/* Value returned by TARGET_CAN_ELIMINATE
263 				   target hook in previous scan over insns
264 				   made by reload.  */
265   poly_int64_pod offset;	/* Current offset between the two regs.  */
266   poly_int64_pod previous_offset; /* Offset at end of previous insn.  */
267   int ref_outside_mem;		/* "to" has been referenced outside a MEM.  */
268   rtx from_rtx;			/* REG rtx for the register to be eliminated.
269 				   We cannot simply compare the number since
270 				   we might then spuriously replace a hard
271 				   register corresponding to a pseudo
272 				   assigned to the reg to be eliminated.  */
273   rtx to_rtx;			/* REG rtx for the replacement.  */
274 };
275 
276 static struct elim_table *reg_eliminate = 0;
277 
278 /* This is an intermediate structure to initialize the table.  It has
279    exactly the members provided by ELIMINABLE_REGS.  */
280 static const struct elim_table_1
281 {
282   const int from;
283   const int to;
284 } reg_eliminate_1[] =
285 
286   ELIMINABLE_REGS;
287 
288 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
289 
290 /* Record the number of pending eliminations that have an offset not equal
291    to their initial offset.  If nonzero, we use a new copy of each
292    replacement result in any insns encountered.  */
293 int num_not_at_initial_offset;
294 
295 /* Count the number of registers that we may be able to eliminate.  */
296 static int num_eliminable;
297 /* And the number of registers that are equivalent to a constant that
298    can be eliminated to frame_pointer / arg_pointer + constant.  */
299 static int num_eliminable_invariants;
300 
301 /* For each label, we record the offset of each elimination.  If we reach
302    a label by more than one path and an offset differs, we cannot do the
303    elimination.  This information is indexed by the difference of the
304    number of the label and the first label number.  We can't offset the
305    pointer itself as this can cause problems on machines with segmented
306    memory.  The first table is an array of flags that records whether we
307    have yet encountered a label and the second table is an array of arrays,
308    one entry in the latter array for each elimination.  */
309 
310 static int first_label_num;
311 static char *offsets_known_at;
312 static poly_int64_pod (*offsets_at)[NUM_ELIMINABLE_REGS];
313 
314 vec<reg_equivs_t, va_gc> *reg_equivs;
315 
316 /* Stack of addresses where an rtx has been changed.  We can undo the
317    changes by popping items off the stack and restoring the original
318    value at each location.
319 
320    We use this simplistic undo capability rather than copy_rtx as copy_rtx
321    will not make a deep copy of a normally sharable rtx, such as
322    (const (plus (symbol_ref) (const_int))).  If such an expression appears
323    as R1 in gen_reload_chain_without_interm_reg_p, then a shared
324    rtx expression would be changed.  See PR 42431.  */
325 
326 typedef rtx *rtx_p;
327 static vec<rtx_p> substitute_stack;
328 
329 /* Number of labels in the current function.  */
330 
331 static int num_labels;
332 
333 static void replace_pseudos_in (rtx *, machine_mode, rtx);
334 static void maybe_fix_stack_asms (void);
335 static void copy_reloads (class insn_chain *);
336 static void calculate_needs_all_insns (int);
337 static int find_reg (class insn_chain *, int);
338 static void find_reload_regs (class insn_chain *);
339 static void select_reload_regs (void);
340 static void delete_caller_save_insns (void);
341 
342 static void spill_failure (rtx_insn *, enum reg_class);
343 static void count_spilled_pseudo (int, int, int);
344 static void delete_dead_insn (rtx_insn *);
345 static void alter_reg (int, int, bool);
346 static void set_label_offsets (rtx, rtx_insn *, int);
347 static void check_eliminable_occurrences (rtx);
348 static void elimination_effects (rtx, machine_mode);
349 static rtx eliminate_regs_1 (rtx, machine_mode, rtx, bool, bool);
350 static int eliminate_regs_in_insn (rtx_insn *, int);
351 static void update_eliminable_offsets (void);
352 static void mark_not_eliminable (rtx, const_rtx, void *);
353 static void set_initial_elim_offsets (void);
354 static bool verify_initial_elim_offsets (void);
355 static void set_initial_label_offsets (void);
356 static void set_offsets_for_label (rtx_insn *);
357 static void init_eliminable_invariants (rtx_insn *, bool);
358 static void init_elim_table (void);
359 static void free_reg_equiv (void);
360 static void update_eliminables (HARD_REG_SET *);
361 static bool update_eliminables_and_spill (void);
362 static void elimination_costs_in_insn (rtx_insn *);
363 static void spill_hard_reg (unsigned int, int);
364 static int finish_spills (int);
365 static void scan_paradoxical_subregs (rtx);
366 static void count_pseudo (int);
367 static void order_regs_for_reload (class insn_chain *);
368 static void reload_as_needed (int);
369 static void forget_old_reloads_1 (rtx, const_rtx, void *);
370 static void forget_marked_reloads (regset);
371 static int reload_reg_class_lower (const void *, const void *);
372 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
373 				    machine_mode);
374 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
375 				     machine_mode);
376 static int reload_reg_free_p (unsigned int, int, enum reload_type);
377 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
378 					rtx, rtx, int, int);
379 static int free_for_value_p (int, machine_mode, int, enum reload_type,
380 			     rtx, rtx, int, int);
381 static int allocate_reload_reg (class insn_chain *, int, int);
382 static int conflicts_with_override (rtx);
383 static void failed_reload (rtx_insn *, int);
384 static int set_reload_reg (int, int);
385 static void choose_reload_regs_init (class insn_chain *, rtx *);
386 static void choose_reload_regs (class insn_chain *);
387 static void emit_input_reload_insns (class insn_chain *, struct reload *,
388 				     rtx, int);
389 static void emit_output_reload_insns (class insn_chain *, struct reload *,
390 				      int);
391 static void do_input_reload (class insn_chain *, struct reload *, int);
392 static void do_output_reload (class insn_chain *, struct reload *, int);
393 static void emit_reload_insns (class insn_chain *);
394 static void delete_output_reload (rtx_insn *, int, int, rtx);
395 static void delete_address_reloads (rtx_insn *, rtx_insn *);
396 static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *);
397 static void inc_for_reload (rtx, rtx, rtx, poly_int64);
398 static void substitute (rtx *, const_rtx, rtx);
399 static bool gen_reload_chain_without_interm_reg_p (int, int);
400 static int reloads_conflict (int, int);
401 static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type);
402 static rtx_insn *emit_insn_if_valid_for_reload (rtx);
403 
404 /* Initialize the reload pass.  This is called at the beginning of compilation
405    and may be called again if the target is reinitialized.  */
406 
407 void
init_reload(void)408 init_reload (void)
409 {
410   int i;
411 
412   /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
413      Set spill_indirect_levels to the number of levels such addressing is
414      permitted, zero if it is not permitted at all.  */
415 
416   rtx tem
417     = gen_rtx_MEM (Pmode,
418 		   gen_rtx_PLUS (Pmode,
419 				 gen_rtx_REG (Pmode,
420 					      LAST_VIRTUAL_REGISTER + 1),
421 				 gen_int_mode (4, Pmode)));
422   spill_indirect_levels = 0;
423 
424   while (memory_address_p (QImode, tem))
425     {
426       spill_indirect_levels++;
427       tem = gen_rtx_MEM (Pmode, tem);
428     }
429 
430   /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)).  */
431 
432   tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
433   indirect_symref_ok = memory_address_p (QImode, tem);
434 
435   /* See if reg+reg is a valid (and offsettable) address.  */
436 
437   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
438     {
439       tem = gen_rtx_PLUS (Pmode,
440 			  gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
441 			  gen_rtx_REG (Pmode, i));
442 
443       /* This way, we make sure that reg+reg is an offsettable address.  */
444       tem = plus_constant (Pmode, tem, 4);
445 
446       for (int mode = 0; mode < MAX_MACHINE_MODE; mode++)
447 	if (!double_reg_address_ok[mode]
448 	    && memory_address_p ((enum machine_mode)mode, tem))
449 	  double_reg_address_ok[mode] = 1;
450     }
451 
452   /* Initialize obstack for our rtl allocation.  */
453   if (reload_startobj == NULL)
454     {
455       gcc_obstack_init (&reload_obstack);
456       reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
457     }
458 
459   INIT_REG_SET (&spilled_pseudos);
460   INIT_REG_SET (&changed_allocation_pseudos);
461   INIT_REG_SET (&pseudos_counted);
462 }
463 
464 /* List of insn chains that are currently unused.  */
465 static class insn_chain *unused_insn_chains = 0;
466 
467 /* Allocate an empty insn_chain structure.  */
468 class insn_chain *
new_insn_chain(void)469 new_insn_chain (void)
470 {
471   class insn_chain *c;
472 
473   if (unused_insn_chains == 0)
474     {
475       c = XOBNEW (&reload_obstack, class insn_chain);
476       INIT_REG_SET (&c->live_throughout);
477       INIT_REG_SET (&c->dead_or_set);
478     }
479   else
480     {
481       c = unused_insn_chains;
482       unused_insn_chains = c->next;
483     }
484   c->is_caller_save_insn = 0;
485   c->need_operand_change = 0;
486   c->need_reload = 0;
487   c->need_elim = 0;
488   return c;
489 }
490 
491 /* Small utility function to set all regs in hard reg set TO which are
492    allocated to pseudos in regset FROM.  */
493 
494 void
compute_use_by_pseudos(HARD_REG_SET * to,regset from)495 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
496 {
497   unsigned int regno;
498   reg_set_iterator rsi;
499 
500   EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
501     {
502       int r = reg_renumber[regno];
503 
504       if (r < 0)
505 	{
506 	  /* reload_combine uses the information from DF_LIVE_IN,
507 	     which might still contain registers that have not
508 	     actually been allocated since they have an
509 	     equivalence.  */
510 	  gcc_assert (ira_conflicts_p || reload_completed);
511 	}
512       else
513 	add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
514     }
515 }
516 
517 /* Replace all pseudos found in LOC with their corresponding
518    equivalences.  */
519 
520 static void
replace_pseudos_in(rtx * loc,machine_mode mem_mode,rtx usage)521 replace_pseudos_in (rtx *loc, machine_mode mem_mode, rtx usage)
522 {
523   rtx x = *loc;
524   enum rtx_code code;
525   const char *fmt;
526   int i, j;
527 
528   if (! x)
529     return;
530 
531   code = GET_CODE (x);
532   if (code == REG)
533     {
534       unsigned int regno = REGNO (x);
535 
536       if (regno < FIRST_PSEUDO_REGISTER)
537 	return;
538 
539       x = eliminate_regs_1 (x, mem_mode, usage, true, false);
540       if (x != *loc)
541 	{
542 	  *loc = x;
543 	  replace_pseudos_in (loc, mem_mode, usage);
544 	  return;
545 	}
546 
547       if (reg_equiv_constant (regno))
548 	*loc = reg_equiv_constant (regno);
549       else if (reg_equiv_invariant (regno))
550 	*loc = reg_equiv_invariant (regno);
551       else if (reg_equiv_mem (regno))
552 	*loc = reg_equiv_mem (regno);
553       else if (reg_equiv_address (regno))
554 	*loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address (regno));
555       else
556 	{
557 	  gcc_assert (!REG_P (regno_reg_rtx[regno])
558 		      || REGNO (regno_reg_rtx[regno]) != regno);
559 	  *loc = regno_reg_rtx[regno];
560 	}
561 
562       return;
563     }
564   else if (code == MEM)
565     {
566       replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
567       return;
568     }
569 
570   /* Process each of our operands recursively.  */
571   fmt = GET_RTX_FORMAT (code);
572   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
573     if (*fmt == 'e')
574       replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
575     else if (*fmt == 'E')
576       for (j = 0; j < XVECLEN (x, i); j++)
577 	replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
578 }
579 
580 /* Determine if the current function has an exception receiver block
581    that reaches the exit block via non-exceptional edges  */
582 
583 static bool
has_nonexceptional_receiver(void)584 has_nonexceptional_receiver (void)
585 {
586   edge e;
587   edge_iterator ei;
588   basic_block *tos, *worklist, bb;
589 
590   /* If we're not optimizing, then just err on the safe side.  */
591   if (!optimize)
592     return true;
593 
594   /* First determine which blocks can reach exit via normal paths.  */
595   tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
596 
597   FOR_EACH_BB_FN (bb, cfun)
598     bb->flags &= ~BB_REACHABLE;
599 
600   /* Place the exit block on our worklist.  */
601   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
602   *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
603 
604   /* Iterate: find everything reachable from what we've already seen.  */
605   while (tos != worklist)
606     {
607       bb = *--tos;
608 
609       FOR_EACH_EDGE (e, ei, bb->preds)
610 	if (!(e->flags & EDGE_ABNORMAL))
611 	  {
612 	    basic_block src = e->src;
613 
614 	    if (!(src->flags & BB_REACHABLE))
615 	      {
616 		src->flags |= BB_REACHABLE;
617 		*tos++ = src;
618 	      }
619 	  }
620     }
621   free (worklist);
622 
623   /* Now see if there's a reachable block with an exceptional incoming
624      edge.  */
625   FOR_EACH_BB_FN (bb, cfun)
626     if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb))
627       return true;
628 
629   /* No exceptional block reached exit unexceptionally.  */
630   return false;
631 }
632 
633 /* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
634    zero elements) to MAX_REG_NUM elements.
635 
636    Initialize all new fields to NULL and update REG_EQUIVS_SIZE.  */
637 void
grow_reg_equivs(void)638 grow_reg_equivs (void)
639 {
640   int old_size = vec_safe_length (reg_equivs);
641   int max_regno = max_reg_num ();
642   int i;
643   reg_equivs_t ze;
644 
645   memset (&ze, 0, sizeof (reg_equivs_t));
646   vec_safe_reserve (reg_equivs, max_regno);
647   for (i = old_size; i < max_regno; i++)
648     reg_equivs->quick_insert (i, ze);
649 }
650 
651 
652 /* Global variables used by reload and its subroutines.  */
653 
654 /* The current basic block while in calculate_elim_costs_all_insns.  */
655 static basic_block elim_bb;
656 
657 /* Set during calculate_needs if an insn needs register elimination.  */
658 static int something_needs_elimination;
659 /* Set during calculate_needs if an insn needs an operand changed.  */
660 static int something_needs_operands_changed;
661 /* Set by alter_regs if we spilled a register to the stack.  */
662 static bool something_was_spilled;
663 
664 /* Nonzero means we couldn't get enough spill regs.  */
665 static int failure;
666 
667 /* Temporary array of pseudo-register number.  */
668 static int *temp_pseudo_reg_arr;
669 
670 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
671    If that insn didn't set the register (i.e., it copied the register to
672    memory), just delete that insn instead of the equivalencing insn plus
673    anything now dead.  If we call delete_dead_insn on that insn, we may
674    delete the insn that actually sets the register if the register dies
675    there and that is incorrect.  */
676 static void
remove_init_insns()677 remove_init_insns ()
678 {
679   for (int i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
680     {
681       if (reg_renumber[i] < 0 && reg_equiv_init (i) != 0)
682 	{
683 	  rtx list;
684 	  for (list = reg_equiv_init (i); list; list = XEXP (list, 1))
685 	    {
686 	      rtx_insn *equiv_insn = as_a <rtx_insn *> (XEXP (list, 0));
687 
688 	      /* If we already deleted the insn or if it may trap, we can't
689 		 delete it.  The latter case shouldn't happen, but can
690 		 if an insn has a variable address, gets a REG_EH_REGION
691 		 note added to it, and then gets converted into a load
692 		 from a constant address.  */
693 	      if (NOTE_P (equiv_insn)
694 		  || can_throw_internal (equiv_insn))
695 		;
696 	      else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
697 		delete_dead_insn (equiv_insn);
698 	      else
699 		SET_INSN_DELETED (equiv_insn);
700 	    }
701 	}
702     }
703 }
704 
705 /* Return true if remove_init_insns will delete INSN.  */
706 static bool
will_delete_init_insn_p(rtx_insn * insn)707 will_delete_init_insn_p (rtx_insn *insn)
708 {
709   rtx set = single_set (insn);
710   if (!set || !REG_P (SET_DEST (set)))
711     return false;
712   unsigned regno = REGNO (SET_DEST (set));
713 
714   if (can_throw_internal (insn))
715     return false;
716 
717   if (regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
718     return false;
719 
720   for (rtx list = reg_equiv_init (regno); list; list = XEXP (list, 1))
721     {
722       rtx equiv_insn = XEXP (list, 0);
723       if (equiv_insn == insn)
724 	return true;
725     }
726   return false;
727 }
728 
729 /* Main entry point for the reload pass.
730 
731    FIRST is the first insn of the function being compiled.
732 
733    GLOBAL nonzero means we were called from global_alloc
734    and should attempt to reallocate any pseudoregs that we
735    displace from hard regs we will use for reloads.
736    If GLOBAL is zero, we do not have enough information to do that,
737    so any pseudo reg that is spilled must go to the stack.
738 
739    Return value is TRUE if reload likely left dead insns in the
740    stream and a DCE pass should be run to elimiante them.  Else the
741    return value is FALSE.  */
742 
743 bool
reload(rtx_insn * first,int global)744 reload (rtx_insn *first, int global)
745 {
746   int i, n;
747   rtx_insn *insn;
748   struct elim_table *ep;
749   basic_block bb;
750   bool inserted;
751 
752   /* Make sure even insns with volatile mem refs are recognizable.  */
753   init_recog ();
754 
755   failure = 0;
756 
757   reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
758 
759   /* Make sure that the last insn in the chain
760      is not something that needs reloading.  */
761   emit_note (NOTE_INSN_DELETED);
762 
763   /* Enable find_equiv_reg to distinguish insns made by reload.  */
764   reload_first_uid = get_max_uid ();
765 
766   /* Initialize the secondary memory table.  */
767   clear_secondary_mem ();
768 
769   /* We don't have a stack slot for any spill reg yet.  */
770   memset (spill_stack_slot, 0, sizeof spill_stack_slot);
771   memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
772 
773   /* Initialize the save area information for caller-save, in case some
774      are needed.  */
775   init_save_areas ();
776 
777   /* Compute which hard registers are now in use
778      as homes for pseudo registers.
779      This is done here rather than (eg) in global_alloc
780      because this point is reached even if not optimizing.  */
781   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
782     mark_home_live (i);
783 
784   /* A function that has a nonlocal label that can reach the exit
785      block via non-exceptional paths must save all call-saved
786      registers.  */
787   if (cfun->has_nonlocal_label
788       && has_nonexceptional_receiver ())
789     crtl->saves_all_registers = 1;
790 
791   if (crtl->saves_all_registers)
792     for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
793       if (! crtl->abi->clobbers_full_reg_p (i)
794 	  && ! fixed_regs[i]
795 	  && ! LOCAL_REGNO (i))
796 	df_set_regs_ever_live (i, true);
797 
798   /* Find all the pseudo registers that didn't get hard regs
799      but do have known equivalent constants or memory slots.
800      These include parameters (known equivalent to parameter slots)
801      and cse'd or loop-moved constant memory addresses.
802 
803      Record constant equivalents in reg_equiv_constant
804      so they will be substituted by find_reloads.
805      Record memory equivalents in reg_mem_equiv so they can
806      be substituted eventually by altering the REG-rtx's.  */
807 
808   grow_reg_equivs ();
809   reg_old_renumber = XCNEWVEC (short, max_regno);
810   memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
811   pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
812   pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
813 
814   CLEAR_HARD_REG_SET (bad_spill_regs_global);
815 
816   init_eliminable_invariants (first, true);
817   init_elim_table ();
818 
819   /* Alter each pseudo-reg rtx to contain its hard reg number.  Assign
820      stack slots to the pseudos that lack hard regs or equivalents.
821      Do not touch virtual registers.  */
822 
823   temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
824   for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
825     temp_pseudo_reg_arr[n++] = i;
826 
827   if (ira_conflicts_p)
828     /* Ask IRA to order pseudo-registers for better stack slot
829        sharing.  */
830     ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_mode);
831 
832   for (i = 0; i < n; i++)
833     alter_reg (temp_pseudo_reg_arr[i], -1, false);
834 
835   /* If we have some registers we think can be eliminated, scan all insns to
836      see if there is an insn that sets one of these registers to something
837      other than itself plus a constant.  If so, the register cannot be
838      eliminated.  Doing this scan here eliminates an extra pass through the
839      main reload loop in the most common case where register elimination
840      cannot be done.  */
841   for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
842     if (INSN_P (insn))
843       note_pattern_stores (PATTERN (insn), mark_not_eliminable, NULL);
844 
845   maybe_fix_stack_asms ();
846 
847   insns_need_reload = 0;
848   something_needs_elimination = 0;
849 
850   /* Initialize to -1, which means take the first spill register.  */
851   last_spill_reg = -1;
852 
853   /* Spill any hard regs that we know we can't eliminate.  */
854   CLEAR_HARD_REG_SET (used_spill_regs);
855   /* There can be multiple ways to eliminate a register;
856      they should be listed adjacently.
857      Elimination for any register fails only if all possible ways fail.  */
858   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
859     {
860       int from = ep->from;
861       int can_eliminate = 0;
862       do
863 	{
864           can_eliminate |= ep->can_eliminate;
865           ep++;
866 	}
867       while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
868       if (! can_eliminate)
869 	spill_hard_reg (from, 1);
870     }
871 
872   if (!HARD_FRAME_POINTER_IS_FRAME_POINTER && frame_pointer_needed)
873     spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
874 
875   finish_spills (global);
876 
877   /* From now on, we may need to generate moves differently.  We may also
878      allow modifications of insns which cause them to not be recognized.
879      Any such modifications will be cleaned up during reload itself.  */
880   reload_in_progress = 1;
881 
882   /* This loop scans the entire function each go-round
883      and repeats until one repetition spills no additional hard regs.  */
884   for (;;)
885     {
886       int something_changed;
887       poly_int64 starting_frame_size;
888 
889       starting_frame_size = get_frame_size ();
890       something_was_spilled = false;
891 
892       set_initial_elim_offsets ();
893       set_initial_label_offsets ();
894 
895       /* For each pseudo register that has an equivalent location defined,
896 	 try to eliminate any eliminable registers (such as the frame pointer)
897 	 assuming initial offsets for the replacement register, which
898 	 is the normal case.
899 
900 	 If the resulting location is directly addressable, substitute
901 	 the MEM we just got directly for the old REG.
902 
903 	 If it is not addressable but is a constant or the sum of a hard reg
904 	 and constant, it is probably not addressable because the constant is
905 	 out of range, in that case record the address; we will generate
906 	 hairy code to compute the address in a register each time it is
907 	 needed.  Similarly if it is a hard register, but one that is not
908 	 valid as an address register.
909 
910 	 If the location is not addressable, but does not have one of the
911 	 above forms, assign a stack slot.  We have to do this to avoid the
912 	 potential of producing lots of reloads if, e.g., a location involves
913 	 a pseudo that didn't get a hard register and has an equivalent memory
914 	 location that also involves a pseudo that didn't get a hard register.
915 
916 	 Perhaps at some point we will improve reload_when_needed handling
917 	 so this problem goes away.  But that's very hairy.  */
918 
919       for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
920 	if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i))
921 	  {
922 	    rtx x = eliminate_regs (reg_equiv_memory_loc (i), VOIDmode,
923 				    NULL_RTX);
924 
925 	    if (strict_memory_address_addr_space_p
926 		  (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
927 		   MEM_ADDR_SPACE (x)))
928 	      reg_equiv_mem (i) = x, reg_equiv_address (i) = 0;
929 	    else if (CONSTANT_P (XEXP (x, 0))
930 		     || (REG_P (XEXP (x, 0))
931 			 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
932 		     || (GET_CODE (XEXP (x, 0)) == PLUS
933 			 && REG_P (XEXP (XEXP (x, 0), 0))
934 			 && (REGNO (XEXP (XEXP (x, 0), 0))
935 			     < FIRST_PSEUDO_REGISTER)
936 			 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
937 	      reg_equiv_address (i) = XEXP (x, 0), reg_equiv_mem (i) = 0;
938 	    else
939 	      {
940 		/* Make a new stack slot.  Then indicate that something
941 		   changed so we go back and recompute offsets for
942 		   eliminable registers because the allocation of memory
943 		   below might change some offset.  reg_equiv_{mem,address}
944 		   will be set up for this pseudo on the next pass around
945 		   the loop.  */
946 		reg_equiv_memory_loc (i) = 0;
947 		reg_equiv_init (i) = 0;
948 		alter_reg (i, -1, true);
949 	      }
950 	  }
951 
952       if (caller_save_needed)
953 	setup_save_areas ();
954 
955       if (maybe_ne (starting_frame_size, 0) && crtl->stack_alignment_needed)
956 	{
957 	  /* If we have a stack frame, we must align it now.  The
958 	     stack size may be a part of the offset computation for
959 	     register elimination.  So if this changes the stack size,
960 	     then repeat the elimination bookkeeping.  We don't
961 	     realign when there is no stack, as that will cause a
962 	     stack frame when none is needed should
963 	     TARGET_STARTING_FRAME_OFFSET not be already aligned to
964 	     STACK_BOUNDARY.  */
965 	  assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
966 	}
967       /* If we allocated another stack slot, redo elimination bookkeeping.  */
968       if (something_was_spilled
969 	  || maybe_ne (starting_frame_size, get_frame_size ()))
970 	{
971 	  if (update_eliminables_and_spill ())
972 	    finish_spills (0);
973 	  continue;
974 	}
975 
976       if (caller_save_needed)
977 	{
978 	  save_call_clobbered_regs ();
979 	  /* That might have allocated new insn_chain structures.  */
980 	  reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
981 	}
982 
983       calculate_needs_all_insns (global);
984 
985       if (! ira_conflicts_p)
986 	/* Don't do it for IRA.  We need this info because we don't
987 	   change live_throughout and dead_or_set for chains when IRA
988 	   is used.  */
989 	CLEAR_REG_SET (&spilled_pseudos);
990 
991       something_changed = 0;
992 
993       /* If we allocated any new memory locations, make another pass
994 	 since it might have changed elimination offsets.  */
995       if (something_was_spilled
996 	  || maybe_ne (starting_frame_size, get_frame_size ()))
997 	something_changed = 1;
998 
999       /* Even if the frame size remained the same, we might still have
1000 	 changed elimination offsets, e.g. if find_reloads called
1001 	 force_const_mem requiring the back end to allocate a constant
1002 	 pool base register that needs to be saved on the stack.  */
1003       else if (!verify_initial_elim_offsets ())
1004 	something_changed = 1;
1005 
1006       if (update_eliminables_and_spill ())
1007 	{
1008 	  finish_spills (0);
1009 	  something_changed = 1;
1010 	}
1011       else
1012 	{
1013 	  select_reload_regs ();
1014 	  if (failure)
1015 	    goto failed;
1016 	  if (insns_need_reload)
1017 	    something_changed |= finish_spills (global);
1018 	}
1019 
1020       if (! something_changed)
1021 	break;
1022 
1023       if (caller_save_needed)
1024 	delete_caller_save_insns ();
1025 
1026       obstack_free (&reload_obstack, reload_firstobj);
1027     }
1028 
1029   /* If global-alloc was run, notify it of any register eliminations we have
1030      done.  */
1031   if (global)
1032     for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1033       if (ep->can_eliminate)
1034 	mark_elimination (ep->from, ep->to);
1035 
1036   remove_init_insns ();
1037 
1038   /* Use the reload registers where necessary
1039      by generating move instructions to move the must-be-register
1040      values into or out of the reload registers.  */
1041 
1042   if (insns_need_reload != 0 || something_needs_elimination
1043       || something_needs_operands_changed)
1044     {
1045       poly_int64 old_frame_size = get_frame_size ();
1046 
1047       reload_as_needed (global);
1048 
1049       gcc_assert (known_eq (old_frame_size, get_frame_size ()));
1050 
1051       gcc_assert (verify_initial_elim_offsets ());
1052     }
1053 
1054   /* If we were able to eliminate the frame pointer, show that it is no
1055      longer live at the start of any basic block.  If it ls live by
1056      virtue of being in a pseudo, that pseudo will be marked live
1057      and hence the frame pointer will be known to be live via that
1058      pseudo.  */
1059 
1060   if (! frame_pointer_needed)
1061     FOR_EACH_BB_FN (bb, cfun)
1062       bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1063 
1064   /* Come here (with failure set nonzero) if we can't get enough spill
1065      regs.  */
1066  failed:
1067 
1068   CLEAR_REG_SET (&changed_allocation_pseudos);
1069   CLEAR_REG_SET (&spilled_pseudos);
1070   reload_in_progress = 0;
1071 
1072   /* Now eliminate all pseudo regs by modifying them into
1073      their equivalent memory references.
1074      The REG-rtx's for the pseudos are modified in place,
1075      so all insns that used to refer to them now refer to memory.
1076 
1077      For a reg that has a reg_equiv_address, all those insns
1078      were changed by reloading so that no insns refer to it any longer;
1079      but the DECL_RTL of a variable decl may refer to it,
1080      and if so this causes the debugging info to mention the variable.  */
1081 
1082   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1083     {
1084       rtx addr = 0;
1085 
1086       if (reg_equiv_mem (i))
1087 	addr = XEXP (reg_equiv_mem (i), 0);
1088 
1089       if (reg_equiv_address (i))
1090 	addr = reg_equiv_address (i);
1091 
1092       if (addr)
1093 	{
1094 	  if (reg_renumber[i] < 0)
1095 	    {
1096 	      rtx reg = regno_reg_rtx[i];
1097 
1098 	      REG_USERVAR_P (reg) = 0;
1099 	      PUT_CODE (reg, MEM);
1100 	      XEXP (reg, 0) = addr;
1101 	      if (reg_equiv_memory_loc (i))
1102 		MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i));
1103 	      else
1104 		MEM_ATTRS (reg) = 0;
1105 	      MEM_NOTRAP_P (reg) = 1;
1106 	    }
1107 	  else if (reg_equiv_mem (i))
1108 	    XEXP (reg_equiv_mem (i), 0) = addr;
1109 	}
1110 
1111       /* We don't want complex addressing modes in debug insns
1112 	 if simpler ones will do, so delegitimize equivalences
1113 	 in debug insns.  */
1114       if (MAY_HAVE_DEBUG_BIND_INSNS && reg_renumber[i] < 0)
1115 	{
1116 	  rtx reg = regno_reg_rtx[i];
1117 	  rtx equiv = 0;
1118 	  df_ref use, next;
1119 
1120 	  if (reg_equiv_constant (i))
1121 	    equiv = reg_equiv_constant (i);
1122 	  else if (reg_equiv_invariant (i))
1123 	    equiv = reg_equiv_invariant (i);
1124 	  else if (reg && MEM_P (reg))
1125 	    equiv = targetm.delegitimize_address (reg);
1126 	  else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1127 	    equiv = reg;
1128 
1129 	  if (equiv == reg)
1130 	    continue;
1131 
1132 	  for (use = DF_REG_USE_CHAIN (i); use; use = next)
1133 	    {
1134 	      insn = DF_REF_INSN (use);
1135 
1136 	      /* Make sure the next ref is for a different instruction,
1137 		 so that we're not affected by the rescan.  */
1138 	      next = DF_REF_NEXT_REG (use);
1139 	      while (next && DF_REF_INSN (next) == insn)
1140 		next = DF_REF_NEXT_REG (next);
1141 
1142 	      if (DEBUG_BIND_INSN_P (insn))
1143 		{
1144 		  if (!equiv)
1145 		    {
1146 		      INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1147 		      df_insn_rescan_debug_internal (insn);
1148 		    }
1149 		  else
1150 		    INSN_VAR_LOCATION_LOC (insn)
1151 		      = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1152 					      reg, equiv);
1153 		}
1154 	    }
1155 	}
1156     }
1157 
1158   /* We must set reload_completed now since the cleanup_subreg_operands call
1159      below will re-recognize each insn and reload may have generated insns
1160      which are only valid during and after reload.  */
1161   reload_completed = 1;
1162 
1163   /* Make a pass over all the insns and delete all USEs which we inserted
1164      only to tag a REG_EQUAL note on them.  Remove all REG_DEAD and REG_UNUSED
1165      notes.  Delete all CLOBBER insns, except those that refer to the return
1166      value and the special mem:BLK CLOBBERs added to prevent the scheduler
1167      from misarranging variable-array code, and simplify (subreg (reg))
1168      operands.  Strip and regenerate REG_INC notes that may have been moved
1169      around.  */
1170 
1171   for (insn = first; insn; insn = NEXT_INSN (insn))
1172     if (INSN_P (insn))
1173       {
1174 	rtx *pnote;
1175 
1176 	if (CALL_P (insn))
1177 	  replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1178 			      VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1179 
1180 	if ((GET_CODE (PATTERN (insn)) == USE
1181 	     /* We mark with QImode USEs introduced by reload itself.  */
1182 	     && (GET_MODE (insn) == QImode
1183 		 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1184 	    || (GET_CODE (PATTERN (insn)) == CLOBBER
1185 		&& (!MEM_P (XEXP (PATTERN (insn), 0))
1186 		    || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1187 		    || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1188 			&& XEXP (XEXP (PATTERN (insn), 0), 0)
1189 				!= stack_pointer_rtx))
1190 		&& (!REG_P (XEXP (PATTERN (insn), 0))
1191 		    || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1192 	  {
1193 	    delete_insn (insn);
1194 	    continue;
1195 	  }
1196 
1197 	/* Some CLOBBERs may survive until here and still reference unassigned
1198 	   pseudos with const equivalent, which may in turn cause ICE in later
1199 	   passes if the reference remains in place.  */
1200 	if (GET_CODE (PATTERN (insn)) == CLOBBER)
1201 	  replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1202 			      VOIDmode, PATTERN (insn));
1203 
1204 	/* Discard obvious no-ops, even without -O.  This optimization
1205 	   is fast and doesn't interfere with debugging.  */
1206 	if (NONJUMP_INSN_P (insn)
1207 	    && GET_CODE (PATTERN (insn)) == SET
1208 	    && REG_P (SET_SRC (PATTERN (insn)))
1209 	    && REG_P (SET_DEST (PATTERN (insn)))
1210 	    && (REGNO (SET_SRC (PATTERN (insn)))
1211 		== REGNO (SET_DEST (PATTERN (insn)))))
1212 	  {
1213 	    delete_insn (insn);
1214 	    continue;
1215 	  }
1216 
1217 	pnote = &REG_NOTES (insn);
1218 	while (*pnote != 0)
1219 	  {
1220 	    if (REG_NOTE_KIND (*pnote) == REG_DEAD
1221 		|| REG_NOTE_KIND (*pnote) == REG_UNUSED
1222 		|| REG_NOTE_KIND (*pnote) == REG_INC)
1223 	      *pnote = XEXP (*pnote, 1);
1224 	    else
1225 	      pnote = &XEXP (*pnote, 1);
1226 	  }
1227 
1228 	if (AUTO_INC_DEC)
1229 	  add_auto_inc_notes (insn, PATTERN (insn));
1230 
1231 	/* Simplify (subreg (reg)) if it appears as an operand.  */
1232 	cleanup_subreg_operands (insn);
1233 
1234 	/* Clean up invalid ASMs so that they don't confuse later passes.
1235 	   See PR 21299.  */
1236 	if (asm_noperands (PATTERN (insn)) >= 0)
1237 	  {
1238 	    extract_insn (insn);
1239 	    if (!constrain_operands (1, get_enabled_alternatives (insn)))
1240 	      {
1241 		error_for_asm (insn,
1242 			       "%<asm%> operand has impossible constraints");
1243 		delete_insn (insn);
1244 		continue;
1245 	      }
1246 	  }
1247       }
1248 
1249   free (temp_pseudo_reg_arr);
1250 
1251   /* Indicate that we no longer have known memory locations or constants.  */
1252   free_reg_equiv ();
1253 
1254   free (reg_max_ref_mode);
1255   free (reg_old_renumber);
1256   free (pseudo_previous_regs);
1257   free (pseudo_forbidden_regs);
1258 
1259   CLEAR_HARD_REG_SET (used_spill_regs);
1260   for (i = 0; i < n_spills; i++)
1261     SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1262 
1263   /* Free all the insn_chain structures at once.  */
1264   obstack_free (&reload_obstack, reload_startobj);
1265   unused_insn_chains = 0;
1266 
1267   inserted = fixup_abnormal_edges ();
1268 
1269   /* We've possibly turned single trapping insn into multiple ones.  */
1270   if (cfun->can_throw_non_call_exceptions)
1271     {
1272       auto_sbitmap blocks (last_basic_block_for_fn (cfun));
1273       bitmap_ones (blocks);
1274       find_many_sub_basic_blocks (blocks);
1275     }
1276 
1277   if (inserted)
1278     commit_edge_insertions ();
1279 
1280   /* Replacing pseudos with their memory equivalents might have
1281      created shared rtx.  Subsequent passes would get confused
1282      by this, so unshare everything here.  */
1283   unshare_all_rtl_again (first);
1284 
1285 #ifdef STACK_BOUNDARY
1286   /* init_emit has set the alignment of the hard frame pointer
1287      to STACK_BOUNDARY.  It is very likely no longer valid if
1288      the hard frame pointer was used for register allocation.  */
1289   if (!frame_pointer_needed)
1290     REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1291 #endif
1292 
1293   substitute_stack.release ();
1294 
1295   gcc_assert (bitmap_empty_p (&spilled_pseudos));
1296 
1297   reload_completed = !failure;
1298 
1299   return need_dce;
1300 }
1301 
1302 /* Yet another special case.  Unfortunately, reg-stack forces people to
1303    write incorrect clobbers in asm statements.  These clobbers must not
1304    cause the register to appear in bad_spill_regs, otherwise we'll call
1305    fatal_insn later.  We clear the corresponding regnos in the live
1306    register sets to avoid this.
1307    The whole thing is rather sick, I'm afraid.  */
1308 
1309 static void
maybe_fix_stack_asms(void)1310 maybe_fix_stack_asms (void)
1311 {
1312 #ifdef STACK_REGS
1313   const char *constraints[MAX_RECOG_OPERANDS];
1314   machine_mode operand_mode[MAX_RECOG_OPERANDS];
1315   class insn_chain *chain;
1316 
1317   for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1318     {
1319       int i, noperands;
1320       HARD_REG_SET clobbered, allowed;
1321       rtx pat;
1322 
1323       if (! INSN_P (chain->insn)
1324 	  || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1325 	continue;
1326       pat = PATTERN (chain->insn);
1327       if (GET_CODE (pat) != PARALLEL)
1328 	continue;
1329 
1330       CLEAR_HARD_REG_SET (clobbered);
1331       CLEAR_HARD_REG_SET (allowed);
1332 
1333       /* First, make a mask of all stack regs that are clobbered.  */
1334       for (i = 0; i < XVECLEN (pat, 0); i++)
1335 	{
1336 	  rtx t = XVECEXP (pat, 0, i);
1337 	  if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1338 	    SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1339 	}
1340 
1341       /* Get the operand values and constraints out of the insn.  */
1342       decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1343 			   constraints, operand_mode, NULL);
1344 
1345       /* For every operand, see what registers are allowed.  */
1346       for (i = 0; i < noperands; i++)
1347 	{
1348 	  const char *p = constraints[i];
1349 	  /* For every alternative, we compute the class of registers allowed
1350 	     for reloading in CLS, and merge its contents into the reg set
1351 	     ALLOWED.  */
1352 	  int cls = (int) NO_REGS;
1353 
1354 	  for (;;)
1355 	    {
1356 	      char c = *p;
1357 
1358 	      if (c == '\0' || c == ',' || c == '#')
1359 		{
1360 		  /* End of one alternative - mark the regs in the current
1361 		     class, and reset the class.  */
1362 		  allowed |= reg_class_contents[cls];
1363 		  cls = NO_REGS;
1364 		  p++;
1365 		  if (c == '#')
1366 		    do {
1367 		      c = *p++;
1368 		    } while (c != '\0' && c != ',');
1369 		  if (c == '\0')
1370 		    break;
1371 		  continue;
1372 		}
1373 
1374 	      switch (c)
1375 		{
1376 		case 'g':
1377 		  cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1378 		  break;
1379 
1380 		default:
1381 		  enum constraint_num cn = lookup_constraint (p);
1382 		  if (insn_extra_address_constraint (cn))
1383 		    cls = (int) reg_class_subunion[cls]
1384 		      [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1385 					     ADDRESS, SCRATCH)];
1386 		  else
1387 		    cls = (int) reg_class_subunion[cls]
1388 		      [reg_class_for_constraint (cn)];
1389 		  break;
1390 		}
1391 	      p += CONSTRAINT_LEN (c, p);
1392 	    }
1393 	}
1394       /* Those of the registers which are clobbered, but allowed by the
1395 	 constraints, must be usable as reload registers.  So clear them
1396 	 out of the life information.  */
1397       allowed &= clobbered;
1398       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1399 	if (TEST_HARD_REG_BIT (allowed, i))
1400 	  {
1401 	    CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1402 	    CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1403 	  }
1404     }
1405 
1406 #endif
1407 }
1408 
1409 /* Copy the global variables n_reloads and rld into the corresponding elts
1410    of CHAIN.  */
1411 static void
copy_reloads(class insn_chain * chain)1412 copy_reloads (class insn_chain *chain)
1413 {
1414   chain->n_reloads = n_reloads;
1415   chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1416   memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1417   reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1418 }
1419 
1420 /* Walk the chain of insns, and determine for each whether it needs reloads
1421    and/or eliminations.  Build the corresponding insns_need_reload list, and
1422    set something_needs_elimination as appropriate.  */
1423 static void
calculate_needs_all_insns(int global)1424 calculate_needs_all_insns (int global)
1425 {
1426   class insn_chain **pprev_reload = &insns_need_reload;
1427   class insn_chain *chain, *next = 0;
1428 
1429   something_needs_elimination = 0;
1430 
1431   reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1432   for (chain = reload_insn_chain; chain != 0; chain = next)
1433     {
1434       rtx_insn *insn = chain->insn;
1435 
1436       next = chain->next;
1437 
1438       /* Clear out the shortcuts.  */
1439       chain->n_reloads = 0;
1440       chain->need_elim = 0;
1441       chain->need_reload = 0;
1442       chain->need_operand_change = 0;
1443 
1444       /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1445 	 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1446 	 what effects this has on the known offsets at labels.  */
1447 
1448       if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1449 	  || (INSN_P (insn) && REG_NOTES (insn) != 0))
1450 	set_label_offsets (insn, insn, 0);
1451 
1452       if (INSN_P (insn))
1453 	{
1454 	  rtx old_body = PATTERN (insn);
1455 	  int old_code = INSN_CODE (insn);
1456 	  rtx old_notes = REG_NOTES (insn);
1457 	  int did_elimination = 0;
1458 	  int operands_changed = 0;
1459 
1460 	  /* Skip insns that only set an equivalence.  */
1461 	  if (will_delete_init_insn_p (insn))
1462 	    continue;
1463 
1464 	  /* If needed, eliminate any eliminable registers.  */
1465 	  if (num_eliminable || num_eliminable_invariants)
1466 	    did_elimination = eliminate_regs_in_insn (insn, 0);
1467 
1468 	  /* Analyze the instruction.  */
1469 	  operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1470 					   global, spill_reg_order);
1471 
1472 	  /* If a no-op set needs more than one reload, this is likely
1473 	     to be something that needs input address reloads.  We
1474 	     can't get rid of this cleanly later, and it is of no use
1475 	     anyway, so discard it now.
1476 	     We only do this when expensive_optimizations is enabled,
1477 	     since this complements reload inheritance / output
1478 	     reload deletion, and it can make debugging harder.  */
1479 	  if (flag_expensive_optimizations && n_reloads > 1)
1480 	    {
1481 	      rtx set = single_set (insn);
1482 	      if (set
1483 		  &&
1484 		  ((SET_SRC (set) == SET_DEST (set)
1485 		    && REG_P (SET_SRC (set))
1486 		    && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1487 		   || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1488 		       && reg_renumber[REGNO (SET_SRC (set))] < 0
1489 		       && reg_renumber[REGNO (SET_DEST (set))] < 0
1490 		       && reg_equiv_memory_loc (REGNO (SET_SRC (set))) != NULL
1491 		       && reg_equiv_memory_loc (REGNO (SET_DEST (set))) != NULL
1492 		       && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set))),
1493 				       reg_equiv_memory_loc (REGNO (SET_DEST (set)))))))
1494 		{
1495 		  if (ira_conflicts_p)
1496 		    /* Inform IRA about the insn deletion.  */
1497 		    ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1498 						   REGNO (SET_SRC (set)));
1499 		  delete_insn (insn);
1500 		  /* Delete it from the reload chain.  */
1501 		  if (chain->prev)
1502 		    chain->prev->next = next;
1503 		  else
1504 		    reload_insn_chain = next;
1505 		  if (next)
1506 		    next->prev = chain->prev;
1507 		  chain->next = unused_insn_chains;
1508 		  unused_insn_chains = chain;
1509 		  continue;
1510 		}
1511 	    }
1512 	  if (num_eliminable)
1513 	    update_eliminable_offsets ();
1514 
1515 	  /* Remember for later shortcuts which insns had any reloads or
1516 	     register eliminations.  */
1517 	  chain->need_elim = did_elimination;
1518 	  chain->need_reload = n_reloads > 0;
1519 	  chain->need_operand_change = operands_changed;
1520 
1521 	  /* Discard any register replacements done.  */
1522 	  if (did_elimination)
1523 	    {
1524 	      obstack_free (&reload_obstack, reload_insn_firstobj);
1525 	      PATTERN (insn) = old_body;
1526 	      INSN_CODE (insn) = old_code;
1527 	      REG_NOTES (insn) = old_notes;
1528 	      something_needs_elimination = 1;
1529 	    }
1530 
1531 	  something_needs_operands_changed |= operands_changed;
1532 
1533 	  if (n_reloads != 0)
1534 	    {
1535 	      copy_reloads (chain);
1536 	      *pprev_reload = chain;
1537 	      pprev_reload = &chain->next_need_reload;
1538 	    }
1539 	}
1540     }
1541   *pprev_reload = 0;
1542 }
1543 
1544 /* This function is called from the register allocator to set up estimates
1545    for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1546    an invariant.  The structure is similar to calculate_needs_all_insns.  */
1547 
1548 void
calculate_elim_costs_all_insns(void)1549 calculate_elim_costs_all_insns (void)
1550 {
1551   int *reg_equiv_init_cost;
1552   basic_block bb;
1553   int i;
1554 
1555   reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1556   init_elim_table ();
1557   init_eliminable_invariants (get_insns (), false);
1558 
1559   set_initial_elim_offsets ();
1560   set_initial_label_offsets ();
1561 
1562   FOR_EACH_BB_FN (bb, cfun)
1563     {
1564       rtx_insn *insn;
1565       elim_bb = bb;
1566 
1567       FOR_BB_INSNS (bb, insn)
1568 	{
1569 	  /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1570 	     include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1571 	     what effects this has on the known offsets at labels.  */
1572 
1573 	  if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1574 	      || (INSN_P (insn) && REG_NOTES (insn) != 0))
1575 	    set_label_offsets (insn, insn, 0);
1576 
1577 	  if (INSN_P (insn))
1578 	    {
1579 	      rtx set = single_set (insn);
1580 
1581 	      /* Skip insns that only set an equivalence.  */
1582 	      if (set && REG_P (SET_DEST (set))
1583 		  && reg_renumber[REGNO (SET_DEST (set))] < 0
1584 		  && (reg_equiv_constant (REGNO (SET_DEST (set)))
1585 		      || reg_equiv_invariant (REGNO (SET_DEST (set)))))
1586 		{
1587 		  unsigned regno = REGNO (SET_DEST (set));
1588 		  rtx_insn_list *init = reg_equiv_init (regno);
1589 		  if (init)
1590 		    {
1591 		      rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1592 						false, true);
1593 		      machine_mode mode = GET_MODE (SET_DEST (set));
1594 		      int cost = set_src_cost (t, mode,
1595 					       optimize_bb_for_speed_p (bb));
1596 		      int freq = REG_FREQ_FROM_BB (bb);
1597 
1598 		      reg_equiv_init_cost[regno] = cost * freq;
1599 		      continue;
1600 		    }
1601 		}
1602 	      /* If needed, eliminate any eliminable registers.  */
1603 	      if (num_eliminable || num_eliminable_invariants)
1604 		elimination_costs_in_insn (insn);
1605 
1606 	      if (num_eliminable)
1607 		update_eliminable_offsets ();
1608 	    }
1609 	}
1610     }
1611   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1612     {
1613       if (reg_equiv_invariant (i))
1614 	{
1615 	  if (reg_equiv_init (i))
1616 	    {
1617 	      int cost = reg_equiv_init_cost[i];
1618 	      if (dump_file)
1619 		fprintf (dump_file,
1620 			 "Reg %d has equivalence, initial gains %d\n", i, cost);
1621 	      if (cost != 0)
1622 		ira_adjust_equiv_reg_cost (i, cost);
1623 	    }
1624 	  else
1625 	    {
1626 	      if (dump_file)
1627 		fprintf (dump_file,
1628 			 "Reg %d had equivalence, but can't be eliminated\n",
1629 			 i);
1630 	      ira_adjust_equiv_reg_cost (i, 0);
1631 	    }
1632 	}
1633     }
1634 
1635   free (reg_equiv_init_cost);
1636   free (offsets_known_at);
1637   free (offsets_at);
1638   offsets_at = NULL;
1639   offsets_known_at = NULL;
1640 }
1641 
1642 /* Comparison function for qsort to decide which of two reloads
1643    should be handled first.  *P1 and *P2 are the reload numbers.  */
1644 
1645 static int
reload_reg_class_lower(const void * r1p,const void * r2p)1646 reload_reg_class_lower (const void *r1p, const void *r2p)
1647 {
1648   int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1649   int t;
1650 
1651   /* Consider required reloads before optional ones.  */
1652   t = rld[r1].optional - rld[r2].optional;
1653   if (t != 0)
1654     return t;
1655 
1656   /* Count all solitary classes before non-solitary ones.  */
1657   t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1658        - (reg_class_size[(int) rld[r1].rclass] == 1));
1659   if (t != 0)
1660     return t;
1661 
1662   /* Aside from solitaires, consider all multi-reg groups first.  */
1663   t = rld[r2].nregs - rld[r1].nregs;
1664   if (t != 0)
1665     return t;
1666 
1667   /* Consider reloads in order of increasing reg-class number.  */
1668   t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1669   if (t != 0)
1670     return t;
1671 
1672   /* If reloads are equally urgent, sort by reload number,
1673      so that the results of qsort leave nothing to chance.  */
1674   return r1 - r2;
1675 }
1676 
1677 /* The cost of spilling each hard reg.  */
1678 static int spill_cost[FIRST_PSEUDO_REGISTER];
1679 
1680 /* When spilling multiple hard registers, we use SPILL_COST for the first
1681    spilled hard reg and SPILL_ADD_COST for subsequent regs.  SPILL_ADD_COST
1682    only the first hard reg for a multi-reg pseudo.  */
1683 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1684 
1685 /* Map of hard regno to pseudo regno currently occupying the hard
1686    reg.  */
1687 static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1688 
1689 /* Update the spill cost arrays, considering that pseudo REG is live.  */
1690 
1691 static void
count_pseudo(int reg)1692 count_pseudo (int reg)
1693 {
1694   int freq = REG_FREQ (reg);
1695   int r = reg_renumber[reg];
1696   int nregs;
1697 
1698   /* Ignore spilled pseudo-registers which can be here only if IRA is used.  */
1699   if (ira_conflicts_p && r < 0)
1700     return;
1701 
1702   if (REGNO_REG_SET_P (&pseudos_counted, reg)
1703       || REGNO_REG_SET_P (&spilled_pseudos, reg))
1704     return;
1705 
1706   SET_REGNO_REG_SET (&pseudos_counted, reg);
1707 
1708   gcc_assert (r >= 0);
1709 
1710   spill_add_cost[r] += freq;
1711   nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg));
1712   while (nregs-- > 0)
1713     {
1714       hard_regno_to_pseudo_regno[r + nregs] = reg;
1715       spill_cost[r + nregs] += freq;
1716     }
1717 }
1718 
1719 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1720    contents of BAD_SPILL_REGS for the insn described by CHAIN.  */
1721 
1722 static void
order_regs_for_reload(class insn_chain * chain)1723 order_regs_for_reload (class insn_chain *chain)
1724 {
1725   unsigned i;
1726   HARD_REG_SET used_by_pseudos;
1727   HARD_REG_SET used_by_pseudos2;
1728   reg_set_iterator rsi;
1729 
1730   bad_spill_regs = fixed_reg_set;
1731 
1732   memset (spill_cost, 0, sizeof spill_cost);
1733   memset (spill_add_cost, 0, sizeof spill_add_cost);
1734   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1735     hard_regno_to_pseudo_regno[i] = -1;
1736 
1737   /* Count number of uses of each hard reg by pseudo regs allocated to it
1738      and then order them by decreasing use.  First exclude hard registers
1739      that are live in or across this insn.  */
1740 
1741   REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1742   REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1743   bad_spill_regs |= used_by_pseudos;
1744   bad_spill_regs |= used_by_pseudos2;
1745 
1746   /* Now find out which pseudos are allocated to it, and update
1747      hard_reg_n_uses.  */
1748   CLEAR_REG_SET (&pseudos_counted);
1749 
1750   EXECUTE_IF_SET_IN_REG_SET
1751     (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1752     {
1753       count_pseudo (i);
1754     }
1755   EXECUTE_IF_SET_IN_REG_SET
1756     (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1757     {
1758       count_pseudo (i);
1759     }
1760   CLEAR_REG_SET (&pseudos_counted);
1761 }
1762 
1763 /* Vector of reload-numbers showing the order in which the reloads should
1764    be processed.  */
1765 static short reload_order[MAX_RELOADS];
1766 
1767 /* This is used to keep track of the spill regs used in one insn.  */
1768 static HARD_REG_SET used_spill_regs_local;
1769 
1770 /* We decided to spill hard register SPILLED, which has a size of
1771    SPILLED_NREGS.  Determine how pseudo REG, which is live during the insn,
1772    is affected.  We will add it to SPILLED_PSEUDOS if necessary, and we will
1773    update SPILL_COST/SPILL_ADD_COST.  */
1774 
1775 static void
count_spilled_pseudo(int spilled,int spilled_nregs,int reg)1776 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1777 {
1778   int freq = REG_FREQ (reg);
1779   int r = reg_renumber[reg];
1780   int nregs;
1781 
1782   /* Ignore spilled pseudo-registers which can be here only if IRA is used.  */
1783   if (ira_conflicts_p && r < 0)
1784     return;
1785 
1786   gcc_assert (r >= 0);
1787 
1788   nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg));
1789 
1790   if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1791       || spilled + spilled_nregs <= r || r + nregs <= spilled)
1792     return;
1793 
1794   SET_REGNO_REG_SET (&spilled_pseudos, reg);
1795 
1796   spill_add_cost[r] -= freq;
1797   while (nregs-- > 0)
1798     {
1799       hard_regno_to_pseudo_regno[r + nregs] = -1;
1800       spill_cost[r + nregs] -= freq;
1801     }
1802 }
1803 
1804 /* Find reload register to use for reload number ORDER.  */
1805 
1806 static int
find_reg(class insn_chain * chain,int order)1807 find_reg (class insn_chain *chain, int order)
1808 {
1809   int rnum = reload_order[order];
1810   struct reload *rl = rld + rnum;
1811   int best_cost = INT_MAX;
1812   int best_reg = -1;
1813   unsigned int i, j, n;
1814   int k;
1815   HARD_REG_SET not_usable;
1816   HARD_REG_SET used_by_other_reload;
1817   reg_set_iterator rsi;
1818   static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1819   static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1820 
1821   not_usable = (bad_spill_regs
1822 		| bad_spill_regs_global
1823 		| ~reg_class_contents[rl->rclass]);
1824 
1825   CLEAR_HARD_REG_SET (used_by_other_reload);
1826   for (k = 0; k < order; k++)
1827     {
1828       int other = reload_order[k];
1829 
1830       if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1831 	for (j = 0; j < rld[other].nregs; j++)
1832 	  SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1833     }
1834 
1835   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1836     {
1837 #ifdef REG_ALLOC_ORDER
1838       unsigned int regno = reg_alloc_order[i];
1839 #else
1840       unsigned int regno = i;
1841 #endif
1842 
1843       if (! TEST_HARD_REG_BIT (not_usable, regno)
1844 	  && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1845 	  && targetm.hard_regno_mode_ok (regno, rl->mode))
1846 	{
1847 	  int this_cost = spill_cost[regno];
1848 	  int ok = 1;
1849 	  unsigned int this_nregs = hard_regno_nregs (regno, rl->mode);
1850 
1851 	  for (j = 1; j < this_nregs; j++)
1852 	    {
1853 	      this_cost += spill_add_cost[regno + j];
1854 	      if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1855 		  || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1856 		ok = 0;
1857 	    }
1858 	  if (! ok)
1859 	    continue;
1860 
1861 	  if (ira_conflicts_p)
1862 	    {
1863 	      /* Ask IRA to find a better pseudo-register for
1864 		 spilling.  */
1865 	      for (n = j = 0; j < this_nregs; j++)
1866 		{
1867 		  int r = hard_regno_to_pseudo_regno[regno + j];
1868 
1869 		  if (r < 0)
1870 		    continue;
1871 		  if (n == 0 || regno_pseudo_regs[n - 1] != r)
1872 		    regno_pseudo_regs[n++] = r;
1873 		}
1874 	      regno_pseudo_regs[n++] = -1;
1875 	      if (best_reg < 0
1876 		  || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1877 						      best_regno_pseudo_regs,
1878 						      rl->in, rl->out,
1879 						      chain->insn))
1880 		{
1881 		  best_reg = regno;
1882 		  for (j = 0;; j++)
1883 		    {
1884 		      best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1885 		      if (regno_pseudo_regs[j] < 0)
1886 			break;
1887 		    }
1888 		}
1889 	      continue;
1890 	    }
1891 
1892 	  if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1893 	    this_cost--;
1894 	  if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1895 	    this_cost--;
1896 	  if (this_cost < best_cost
1897 	      /* Among registers with equal cost, prefer caller-saved ones, or
1898 		 use REG_ALLOC_ORDER if it is defined.  */
1899 	      || (this_cost == best_cost
1900 #ifdef REG_ALLOC_ORDER
1901 		  && (inv_reg_alloc_order[regno]
1902 		      < inv_reg_alloc_order[best_reg])
1903 #else
1904 		  && crtl->abi->clobbers_full_reg_p (regno)
1905 		  && !crtl->abi->clobbers_full_reg_p (best_reg)
1906 #endif
1907 		  ))
1908 	    {
1909 	      best_reg = regno;
1910 	      best_cost = this_cost;
1911 	    }
1912 	}
1913     }
1914   if (best_reg == -1)
1915     return 0;
1916 
1917   if (dump_file)
1918     fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1919 
1920   rl->nregs = hard_regno_nregs (best_reg, rl->mode);
1921   rl->regno = best_reg;
1922 
1923   EXECUTE_IF_SET_IN_REG_SET
1924     (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1925     {
1926       count_spilled_pseudo (best_reg, rl->nregs, j);
1927     }
1928 
1929   EXECUTE_IF_SET_IN_REG_SET
1930     (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1931     {
1932       count_spilled_pseudo (best_reg, rl->nregs, j);
1933     }
1934 
1935   for (i = 0; i < rl->nregs; i++)
1936     {
1937       gcc_assert (spill_cost[best_reg + i] == 0);
1938       gcc_assert (spill_add_cost[best_reg + i] == 0);
1939       gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1940       SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1941     }
1942   return 1;
1943 }
1944 
1945 /* Find more reload regs to satisfy the remaining need of an insn, which
1946    is given by CHAIN.
1947    Do it by ascending class number, since otherwise a reg
1948    might be spilled for a big class and might fail to count
1949    for a smaller class even though it belongs to that class.  */
1950 
1951 static void
find_reload_regs(class insn_chain * chain)1952 find_reload_regs (class insn_chain *chain)
1953 {
1954   int i;
1955 
1956   /* In order to be certain of getting the registers we need,
1957      we must sort the reloads into order of increasing register class.
1958      Then our grabbing of reload registers will parallel the process
1959      that provided the reload registers.  */
1960   for (i = 0; i < chain->n_reloads; i++)
1961     {
1962       /* Show whether this reload already has a hard reg.  */
1963       if (chain->rld[i].reg_rtx)
1964 	{
1965 	  chain->rld[i].regno = REGNO (chain->rld[i].reg_rtx);
1966 	  chain->rld[i].nregs = REG_NREGS (chain->rld[i].reg_rtx);
1967 	}
1968       else
1969 	chain->rld[i].regno = -1;
1970       reload_order[i] = i;
1971     }
1972 
1973   n_reloads = chain->n_reloads;
1974   memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1975 
1976   CLEAR_HARD_REG_SET (used_spill_regs_local);
1977 
1978   if (dump_file)
1979     fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1980 
1981   qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1982 
1983   /* Compute the order of preference for hard registers to spill.  */
1984 
1985   order_regs_for_reload (chain);
1986 
1987   for (i = 0; i < n_reloads; i++)
1988     {
1989       int r = reload_order[i];
1990 
1991       /* Ignore reloads that got marked inoperative.  */
1992       if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1993 	  && ! rld[r].optional
1994 	  && rld[r].regno == -1)
1995 	if (! find_reg (chain, i))
1996 	  {
1997 	    if (dump_file)
1998 	      fprintf (dump_file, "reload failure for reload %d\n", r);
1999 	    spill_failure (chain->insn, rld[r].rclass);
2000 	    failure = 1;
2001 	    return;
2002 	  }
2003     }
2004 
2005   chain->used_spill_regs = used_spill_regs_local;
2006   used_spill_regs |= used_spill_regs_local;
2007 
2008   memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2009 }
2010 
2011 static void
select_reload_regs(void)2012 select_reload_regs (void)
2013 {
2014   class insn_chain *chain;
2015 
2016   /* Try to satisfy the needs for each insn.  */
2017   for (chain = insns_need_reload; chain != 0;
2018        chain = chain->next_need_reload)
2019     find_reload_regs (chain);
2020 }
2021 
2022 /* Delete all insns that were inserted by emit_caller_save_insns during
2023    this iteration.  */
2024 static void
delete_caller_save_insns(void)2025 delete_caller_save_insns (void)
2026 {
2027   class insn_chain *c = reload_insn_chain;
2028 
2029   while (c != 0)
2030     {
2031       while (c != 0 && c->is_caller_save_insn)
2032 	{
2033 	  class insn_chain *next = c->next;
2034 	  rtx_insn *insn = c->insn;
2035 
2036 	  if (c == reload_insn_chain)
2037 	    reload_insn_chain = next;
2038 	  delete_insn (insn);
2039 
2040 	  if (next)
2041 	    next->prev = c->prev;
2042 	  if (c->prev)
2043 	    c->prev->next = next;
2044 	  c->next = unused_insn_chains;
2045 	  unused_insn_chains = c;
2046 	  c = next;
2047 	}
2048       if (c != 0)
2049 	c = c->next;
2050     }
2051 }
2052 
2053 /* Handle the failure to find a register to spill.
2054    INSN should be one of the insns which needed this particular spill reg.  */
2055 
2056 static void
spill_failure(rtx_insn * insn,enum reg_class rclass)2057 spill_failure (rtx_insn *insn, enum reg_class rclass)
2058 {
2059   if (asm_noperands (PATTERN (insn)) >= 0)
2060     error_for_asm (insn, "cannot find a register in class %qs while "
2061 		   "reloading %<asm%>",
2062 		   reg_class_names[rclass]);
2063   else
2064     {
2065       error ("unable to find a register to spill in class %qs",
2066 	     reg_class_names[rclass]);
2067 
2068       if (dump_file)
2069 	{
2070 	  fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2071 	  debug_reload_to_stream (dump_file);
2072 	}
2073       fatal_insn ("this is the insn:", insn);
2074     }
2075 }
2076 
2077 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2078    data that is dead in INSN.  */
2079 
2080 static void
delete_dead_insn(rtx_insn * insn)2081 delete_dead_insn (rtx_insn *insn)
2082 {
2083   rtx_insn *prev = prev_active_insn (insn);
2084   rtx prev_dest;
2085 
2086   /* If the previous insn sets a register that dies in our insn make
2087      a note that we want to run DCE immediately after reload.
2088 
2089      We used to delete the previous insn & recurse, but that's wrong for
2090      block local equivalences.  Instead of trying to figure out the exact
2091      circumstances where we can delete the potentially dead insns, just
2092      let DCE do the job.  */
2093   if (prev && BLOCK_FOR_INSN (prev) == BLOCK_FOR_INSN (insn)
2094       && GET_CODE (PATTERN (prev)) == SET
2095       && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2096       && reg_mentioned_p (prev_dest, PATTERN (insn))
2097       && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2098       && ! side_effects_p (SET_SRC (PATTERN (prev))))
2099     need_dce = 1;
2100 
2101   SET_INSN_DELETED (insn);
2102 }
2103 
2104 /* Modify the home of pseudo-reg I.
2105    The new home is present in reg_renumber[I].
2106 
2107    FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2108    or it may be -1, meaning there is none or it is not relevant.
2109    This is used so that all pseudos spilled from a given hard reg
2110    can share one stack slot.  */
2111 
2112 static void
alter_reg(int i,int from_reg,bool dont_share_p)2113 alter_reg (int i, int from_reg, bool dont_share_p)
2114 {
2115   /* When outputting an inline function, this can happen
2116      for a reg that isn't actually used.  */
2117   if (regno_reg_rtx[i] == 0)
2118     return;
2119 
2120   /* If the reg got changed to a MEM at rtl-generation time,
2121      ignore it.  */
2122   if (!REG_P (regno_reg_rtx[i]))
2123     return;
2124 
2125   /* Modify the reg-rtx to contain the new hard reg
2126      number or else to contain its pseudo reg number.  */
2127   SET_REGNO (regno_reg_rtx[i],
2128 	     reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2129 
2130   /* If we have a pseudo that is needed but has no hard reg or equivalent,
2131      allocate a stack slot for it.  */
2132 
2133   if (reg_renumber[i] < 0
2134       && REG_N_REFS (i) > 0
2135       && reg_equiv_constant (i) == 0
2136       && (reg_equiv_invariant (i) == 0
2137 	  || reg_equiv_init (i) == 0)
2138       && reg_equiv_memory_loc (i) == 0)
2139     {
2140       rtx x = NULL_RTX;
2141       machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2142       poly_uint64 inherent_size = GET_MODE_SIZE (mode);
2143       unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2144       machine_mode wider_mode = wider_subreg_mode (mode, reg_max_ref_mode[i]);
2145       poly_uint64 total_size = GET_MODE_SIZE (wider_mode);
2146       /* ??? Seems strange to derive the minimum alignment from the size,
2147 	 but that's the traditional behavior.  For polynomial-size modes,
2148 	 the natural extension is to use the minimum possible size.  */
2149       unsigned int min_align
2150 	= constant_lower_bound (GET_MODE_BITSIZE (reg_max_ref_mode[i]));
2151       poly_int64 adjust = 0;
2152 
2153       something_was_spilled = true;
2154 
2155       if (ira_conflicts_p)
2156 	{
2157 	  /* Mark the spill for IRA.  */
2158 	  SET_REGNO_REG_SET (&spilled_pseudos, i);
2159 	  if (!dont_share_p)
2160 	    x = ira_reuse_stack_slot (i, inherent_size, total_size);
2161 	}
2162 
2163       if (x)
2164 	;
2165 
2166       /* Each pseudo reg has an inherent size which comes from its own mode,
2167 	 and a total size which provides room for paradoxical subregs
2168 	 which refer to the pseudo reg in wider modes.
2169 
2170 	 We can use a slot already allocated if it provides both
2171 	 enough inherent space and enough total space.
2172 	 Otherwise, we allocate a new slot, making sure that it has no less
2173 	 inherent space, and no less total space, then the previous slot.  */
2174       else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2175 	{
2176 	  rtx stack_slot;
2177 
2178 	  /* The sizes are taken from a subreg operation, which guarantees
2179 	     that they're ordered.  */
2180 	  gcc_checking_assert (ordered_p (total_size, inherent_size));
2181 
2182 	  /* No known place to spill from => no slot to reuse.  */
2183 	  x = assign_stack_local (mode, total_size,
2184 				  min_align > inherent_align
2185 				  || maybe_gt (total_size, inherent_size)
2186 				  ? -1 : 0);
2187 
2188 	  stack_slot = x;
2189 
2190 	  /* Cancel the big-endian correction done in assign_stack_local.
2191 	     Get the address of the beginning of the slot.  This is so we
2192 	     can do a big-endian correction unconditionally below.  */
2193 	  if (BYTES_BIG_ENDIAN)
2194 	    {
2195 	      adjust = inherent_size - total_size;
2196 	      if (maybe_ne (adjust, 0))
2197 		{
2198 		  poly_uint64 total_bits = total_size * BITS_PER_UNIT;
2199 		  machine_mode mem_mode
2200 		    = int_mode_for_size (total_bits, 1).else_blk ();
2201 		  stack_slot = adjust_address_nv (x, mem_mode, adjust);
2202 		}
2203 	    }
2204 
2205 	  if (! dont_share_p && ira_conflicts_p)
2206 	    /* Inform IRA about allocation a new stack slot.  */
2207 	    ira_mark_new_stack_slot (stack_slot, i, total_size);
2208 	}
2209 
2210       /* Reuse a stack slot if possible.  */
2211       else if (spill_stack_slot[from_reg] != 0
2212 	       && known_ge (spill_stack_slot_width[from_reg], total_size)
2213 	       && known_ge (GET_MODE_SIZE
2214 			    (GET_MODE (spill_stack_slot[from_reg])),
2215 			    inherent_size)
2216 	       && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2217 	x = spill_stack_slot[from_reg];
2218 
2219       /* Allocate a bigger slot.  */
2220       else
2221 	{
2222 	  /* Compute maximum size needed, both for inherent size
2223 	     and for total size.  */
2224 	  rtx stack_slot;
2225 
2226 	  if (spill_stack_slot[from_reg])
2227 	    {
2228 	      if (partial_subreg_p (mode,
2229 				    GET_MODE (spill_stack_slot[from_reg])))
2230 		mode = GET_MODE (spill_stack_slot[from_reg]);
2231 	      total_size = ordered_max (total_size,
2232 					spill_stack_slot_width[from_reg]);
2233 	      if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2234 		min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2235 	    }
2236 
2237 	  /* The sizes are taken from a subreg operation, which guarantees
2238 	     that they're ordered.  */
2239 	  gcc_checking_assert (ordered_p (total_size, inherent_size));
2240 
2241 	  /* Make a slot with that size.  */
2242 	  x = assign_stack_local (mode, total_size,
2243 				  min_align > inherent_align
2244 				  || maybe_gt (total_size, inherent_size)
2245 				  ? -1 : 0);
2246 	  stack_slot = x;
2247 
2248 	  /* Cancel the  big-endian correction done in assign_stack_local.
2249 	     Get the address of the beginning of the slot.  This is so we
2250 	     can do a big-endian correction unconditionally below.  */
2251 	  if (BYTES_BIG_ENDIAN)
2252 	    {
2253 	      adjust = GET_MODE_SIZE (mode) - total_size;
2254 	      if (maybe_ne (adjust, 0))
2255 		{
2256 		  poly_uint64 total_bits = total_size * BITS_PER_UNIT;
2257 		  machine_mode mem_mode
2258 		    = int_mode_for_size (total_bits, 1).else_blk ();
2259 		  stack_slot = adjust_address_nv (x, mem_mode, adjust);
2260 		}
2261 	    }
2262 
2263 	  spill_stack_slot[from_reg] = stack_slot;
2264 	  spill_stack_slot_width[from_reg] = total_size;
2265 	}
2266 
2267       /* On a big endian machine, the "address" of the slot
2268 	 is the address of the low part that fits its inherent mode.  */
2269       adjust += subreg_size_lowpart_offset (inherent_size, total_size);
2270 
2271       /* If we have any adjustment to make, or if the stack slot is the
2272 	 wrong mode, make a new stack slot.  */
2273       x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2274 
2275       /* Set all of the memory attributes as appropriate for a spill.  */
2276       set_mem_attrs_for_spill (x);
2277 
2278       /* Save the stack slot for later.  */
2279       reg_equiv_memory_loc (i) = x;
2280     }
2281 }
2282 
2283 /* Mark the slots in regs_ever_live for the hard regs used by
2284    pseudo-reg number REGNO, accessed in MODE.  */
2285 
2286 static void
mark_home_live_1(int regno,machine_mode mode)2287 mark_home_live_1 (int regno, machine_mode mode)
2288 {
2289   int i, lim;
2290 
2291   i = reg_renumber[regno];
2292   if (i < 0)
2293     return;
2294   lim = end_hard_regno (mode, i);
2295   while (i < lim)
2296     df_set_regs_ever_live (i++, true);
2297 }
2298 
2299 /* Mark the slots in regs_ever_live for the hard regs
2300    used by pseudo-reg number REGNO.  */
2301 
2302 void
mark_home_live(int regno)2303 mark_home_live (int regno)
2304 {
2305   if (reg_renumber[regno] >= 0)
2306     mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2307 }
2308 
2309 /* This function handles the tracking of elimination offsets around branches.
2310 
2311    X is a piece of RTL being scanned.
2312 
2313    INSN is the insn that it came from, if any.
2314 
2315    INITIAL_P is nonzero if we are to set the offset to be the initial
2316    offset and zero if we are setting the offset of the label to be the
2317    current offset.  */
2318 
2319 static void
set_label_offsets(rtx x,rtx_insn * insn,int initial_p)2320 set_label_offsets (rtx x, rtx_insn *insn, int initial_p)
2321 {
2322   enum rtx_code code = GET_CODE (x);
2323   rtx tem;
2324   unsigned int i;
2325   struct elim_table *p;
2326 
2327   switch (code)
2328     {
2329     case LABEL_REF:
2330       if (LABEL_REF_NONLOCAL_P (x))
2331 	return;
2332 
2333       x = label_ref_label (x);
2334 
2335       /* fall through */
2336 
2337     case CODE_LABEL:
2338       /* If we know nothing about this label, set the desired offsets.  Note
2339 	 that this sets the offset at a label to be the offset before a label
2340 	 if we don't know anything about the label.  This is not correct for
2341 	 the label after a BARRIER, but is the best guess we can make.  If
2342 	 we guessed wrong, we will suppress an elimination that might have
2343 	 been possible had we been able to guess correctly.  */
2344 
2345       if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2346 	{
2347 	  for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2348 	    offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2349 	      = (initial_p ? reg_eliminate[i].initial_offset
2350 		 : reg_eliminate[i].offset);
2351 	  offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2352 	}
2353 
2354       /* Otherwise, if this is the definition of a label and it is
2355 	 preceded by a BARRIER, set our offsets to the known offset of
2356 	 that label.  */
2357 
2358       else if (x == insn
2359 	       && (tem = prev_nonnote_insn (insn)) != 0
2360 	       && BARRIER_P (tem))
2361 	set_offsets_for_label (insn);
2362       else
2363 	/* If neither of the above cases is true, compare each offset
2364 	   with those previously recorded and suppress any eliminations
2365 	   where the offsets disagree.  */
2366 
2367 	for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2368 	  if (maybe_ne (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i],
2369 			(initial_p ? reg_eliminate[i].initial_offset
2370 			 : reg_eliminate[i].offset)))
2371 	    reg_eliminate[i].can_eliminate = 0;
2372 
2373       return;
2374 
2375     case JUMP_TABLE_DATA:
2376       set_label_offsets (PATTERN (insn), insn, initial_p);
2377       return;
2378 
2379     case JUMP_INSN:
2380       set_label_offsets (PATTERN (insn), insn, initial_p);
2381 
2382       /* fall through */
2383 
2384     case INSN:
2385     case CALL_INSN:
2386       /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2387 	 to indirectly and hence must have all eliminations at their
2388 	 initial offsets.  */
2389       for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2390 	if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2391 	  set_label_offsets (XEXP (tem, 0), insn, 1);
2392       return;
2393 
2394     case PARALLEL:
2395     case ADDR_VEC:
2396     case ADDR_DIFF_VEC:
2397       /* Each of the labels in the parallel or address vector must be
2398 	 at their initial offsets.  We want the first field for PARALLEL
2399 	 and ADDR_VEC and the second field for ADDR_DIFF_VEC.  */
2400 
2401       for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2402 	set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2403 			   insn, initial_p);
2404       return;
2405 
2406     case SET:
2407       /* We only care about setting PC.  If the source is not RETURN,
2408 	 IF_THEN_ELSE, or a label, disable any eliminations not at
2409 	 their initial offsets.  Similarly if any arm of the IF_THEN_ELSE
2410 	 isn't one of those possibilities.  For branches to a label,
2411 	 call ourselves recursively.
2412 
2413 	 Note that this can disable elimination unnecessarily when we have
2414 	 a non-local goto since it will look like a non-constant jump to
2415 	 someplace in the current function.  This isn't a significant
2416 	 problem since such jumps will normally be when all elimination
2417 	 pairs are back to their initial offsets.  */
2418 
2419       if (SET_DEST (x) != pc_rtx)
2420 	return;
2421 
2422       switch (GET_CODE (SET_SRC (x)))
2423 	{
2424 	case PC:
2425 	case RETURN:
2426 	  return;
2427 
2428 	case LABEL_REF:
2429 	  set_label_offsets (SET_SRC (x), insn, initial_p);
2430 	  return;
2431 
2432 	case IF_THEN_ELSE:
2433 	  tem = XEXP (SET_SRC (x), 1);
2434 	  if (GET_CODE (tem) == LABEL_REF)
2435 	    set_label_offsets (label_ref_label (tem), insn, initial_p);
2436 	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2437 	    break;
2438 
2439 	  tem = XEXP (SET_SRC (x), 2);
2440 	  if (GET_CODE (tem) == LABEL_REF)
2441 	    set_label_offsets (label_ref_label (tem), insn, initial_p);
2442 	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2443 	    break;
2444 	  return;
2445 
2446 	default:
2447 	  break;
2448 	}
2449 
2450       /* If we reach here, all eliminations must be at their initial
2451 	 offset because we are doing a jump to a variable address.  */
2452       for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2453 	if (maybe_ne (p->offset, p->initial_offset))
2454 	  p->can_eliminate = 0;
2455       break;
2456 
2457     default:
2458       break;
2459     }
2460 }
2461 
2462 /* This function examines every reg that occurs in X and adjusts the
2463    costs for its elimination which are gathered by IRA.  INSN is the
2464    insn in which X occurs.  We do not recurse into MEM expressions.  */
2465 
2466 static void
note_reg_elim_costly(const_rtx x,rtx insn)2467 note_reg_elim_costly (const_rtx x, rtx insn)
2468 {
2469   subrtx_iterator::array_type array;
2470   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
2471     {
2472       const_rtx x = *iter;
2473       if (MEM_P (x))
2474 	iter.skip_subrtxes ();
2475       else if (REG_P (x)
2476 	       && REGNO (x) >= FIRST_PSEUDO_REGISTER
2477 	       && reg_equiv_init (REGNO (x))
2478 	       && reg_equiv_invariant (REGNO (x)))
2479 	{
2480 	  rtx t = reg_equiv_invariant (REGNO (x));
2481 	  rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2482 	  int cost = set_src_cost (new_rtx, Pmode,
2483 				   optimize_bb_for_speed_p (elim_bb));
2484 	  int freq = REG_FREQ_FROM_BB (elim_bb);
2485 
2486 	  if (cost != 0)
2487 	    ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2488 	}
2489     }
2490 }
2491 
2492 /* Scan X and replace any eliminable registers (such as fp) with a
2493    replacement (such as sp), plus an offset.
2494 
2495    MEM_MODE is the mode of an enclosing MEM.  We need this to know how
2496    much to adjust a register for, e.g., PRE_DEC.  Also, if we are inside a
2497    MEM, we are allowed to replace a sum of a register and the constant zero
2498    with the register, which we cannot do outside a MEM.  In addition, we need
2499    to record the fact that a register is referenced outside a MEM.
2500 
2501    If INSN is an insn, it is the insn containing X.  If we replace a REG
2502    in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2503    CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2504    the REG is being modified.
2505 
2506    Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2507    That's used when we eliminate in expressions stored in notes.
2508    This means, do not set ref_outside_mem even if the reference
2509    is outside of MEMs.
2510 
2511    If FOR_COSTS is true, we are being called before reload in order to
2512    estimate the costs of keeping registers with an equivalence unallocated.
2513 
2514    REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2515    replacements done assuming all offsets are at their initial values.  If
2516    they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2517    encounter, return the actual location so that find_reloads will do
2518    the proper thing.  */
2519 
2520 static rtx
eliminate_regs_1(rtx x,machine_mode mem_mode,rtx insn,bool may_use_invariant,bool for_costs)2521 eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn,
2522 		  bool may_use_invariant, bool for_costs)
2523 {
2524   enum rtx_code code = GET_CODE (x);
2525   struct elim_table *ep;
2526   int regno;
2527   rtx new_rtx;
2528   int i, j;
2529   const char *fmt;
2530   int copied = 0;
2531 
2532   if (! current_function_decl)
2533     return x;
2534 
2535   switch (code)
2536     {
2537     CASE_CONST_ANY:
2538     case CONST:
2539     case SYMBOL_REF:
2540     case CODE_LABEL:
2541     case PC:
2542     case CC0:
2543     case ASM_INPUT:
2544     case ADDR_VEC:
2545     case ADDR_DIFF_VEC:
2546     case RETURN:
2547       return x;
2548 
2549     case REG:
2550       regno = REGNO (x);
2551 
2552       /* First handle the case where we encounter a bare register that
2553 	 is eliminable.  Replace it with a PLUS.  */
2554       if (regno < FIRST_PSEUDO_REGISTER)
2555 	{
2556 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2557 	       ep++)
2558 	    if (ep->from_rtx == x && ep->can_eliminate)
2559 	      return plus_constant (Pmode, ep->to_rtx, ep->previous_offset);
2560 
2561 	}
2562       else if (reg_renumber && reg_renumber[regno] < 0
2563 	       && reg_equivs
2564 	       && reg_equiv_invariant (regno))
2565 	{
2566 	  if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2567 	    return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)),
2568 			             mem_mode, insn, true, for_costs);
2569 	  /* There exists at least one use of REGNO that cannot be
2570 	     eliminated.  Prevent the defining insn from being deleted.  */
2571 	  reg_equiv_init (regno) = NULL;
2572 	  if (!for_costs)
2573 	    alter_reg (regno, -1, true);
2574 	}
2575       return x;
2576 
2577     /* You might think handling MINUS in a manner similar to PLUS is a
2578        good idea.  It is not.  It has been tried multiple times and every
2579        time the change has had to have been reverted.
2580 
2581        Other parts of reload know a PLUS is special (gen_reload for example)
2582        and require special code to handle code a reloaded PLUS operand.
2583 
2584        Also consider backends where the flags register is clobbered by a
2585        MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2586        lea instruction comes to mind).  If we try to reload a MINUS, we
2587        may kill the flags register that was holding a useful value.
2588 
2589        So, please before trying to handle MINUS, consider reload as a
2590        whole instead of this little section as well as the backend issues.  */
2591     case PLUS:
2592       /* If this is the sum of an eliminable register and a constant, rework
2593 	 the sum.  */
2594       if (REG_P (XEXP (x, 0))
2595 	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2596 	  && CONSTANT_P (XEXP (x, 1)))
2597 	{
2598 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2599 	       ep++)
2600 	    if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2601 	      {
2602 		/* The only time we want to replace a PLUS with a REG (this
2603 		   occurs when the constant operand of the PLUS is the negative
2604 		   of the offset) is when we are inside a MEM.  We won't want
2605 		   to do so at other times because that would change the
2606 		   structure of the insn in a way that reload can't handle.
2607 		   We special-case the commonest situation in
2608 		   eliminate_regs_in_insn, so just replace a PLUS with a
2609 		   PLUS here, unless inside a MEM.  In DEBUG_INSNs, it is
2610 		   always ok to replace a PLUS with just a REG.  */
2611 		if ((mem_mode != 0 || (insn && DEBUG_INSN_P (insn)))
2612 		    && CONST_INT_P (XEXP (x, 1))
2613 		    && known_eq (INTVAL (XEXP (x, 1)), -ep->previous_offset))
2614 		  return ep->to_rtx;
2615 		else
2616 		  return gen_rtx_PLUS (Pmode, ep->to_rtx,
2617 				       plus_constant (Pmode, XEXP (x, 1),
2618 						      ep->previous_offset));
2619 	      }
2620 
2621 	  /* If the register is not eliminable, we are done since the other
2622 	     operand is a constant.  */
2623 	  return x;
2624 	}
2625 
2626       /* If this is part of an address, we want to bring any constant to the
2627 	 outermost PLUS.  We will do this by doing register replacement in
2628 	 our operands and seeing if a constant shows up in one of them.
2629 
2630 	 Note that there is no risk of modifying the structure of the insn,
2631 	 since we only get called for its operands, thus we are either
2632 	 modifying the address inside a MEM, or something like an address
2633 	 operand of a load-address insn.  */
2634 
2635       {
2636 	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2637 				     for_costs);
2638 	rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2639 				     for_costs);
2640 
2641 	if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2642 	  {
2643 	    /* If one side is a PLUS and the other side is a pseudo that
2644 	       didn't get a hard register but has a reg_equiv_constant,
2645 	       we must replace the constant here since it may no longer
2646 	       be in the position of any operand.  */
2647 	    if (GET_CODE (new0) == PLUS && REG_P (new1)
2648 		&& REGNO (new1) >= FIRST_PSEUDO_REGISTER
2649 		&& reg_renumber[REGNO (new1)] < 0
2650 		&& reg_equivs
2651 		&& reg_equiv_constant (REGNO (new1)) != 0)
2652 	      new1 = reg_equiv_constant (REGNO (new1));
2653 	    else if (GET_CODE (new1) == PLUS && REG_P (new0)
2654 		     && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2655 		     && reg_renumber[REGNO (new0)] < 0
2656 		     && reg_equiv_constant (REGNO (new0)) != 0)
2657 	      new0 = reg_equiv_constant (REGNO (new0));
2658 
2659 	    new_rtx = form_sum (GET_MODE (x), new0, new1);
2660 
2661 	    /* As above, if we are not inside a MEM we do not want to
2662 	       turn a PLUS into something else.  We might try to do so here
2663 	       for an addition of 0 if we aren't optimizing.  */
2664 	    if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2665 	      return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2666 	    else
2667 	      return new_rtx;
2668 	  }
2669       }
2670       return x;
2671 
2672     case MULT:
2673       /* If this is the product of an eliminable register and a
2674 	 constant, apply the distribute law and move the constant out
2675 	 so that we have (plus (mult ..) ..).  This is needed in order
2676 	 to keep load-address insns valid.   This case is pathological.
2677 	 We ignore the possibility of overflow here.  */
2678       if (REG_P (XEXP (x, 0))
2679 	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2680 	  && CONST_INT_P (XEXP (x, 1)))
2681 	for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2682 	     ep++)
2683 	  if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2684 	    {
2685 	      if (! mem_mode
2686 		  /* Refs inside notes or in DEBUG_INSNs don't count for
2687 		     this purpose.  */
2688 		  && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2689 				      || GET_CODE (insn) == INSN_LIST
2690 				      || DEBUG_INSN_P (insn))))
2691 		ep->ref_outside_mem = 1;
2692 
2693 	      return
2694 		plus_constant (Pmode,
2695 			       gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2696 			       ep->previous_offset * INTVAL (XEXP (x, 1)));
2697 	    }
2698 
2699       /* fall through */
2700 
2701     case CALL:
2702     case COMPARE:
2703     /* See comments before PLUS about handling MINUS.  */
2704     case MINUS:
2705     case DIV:      case UDIV:
2706     case MOD:      case UMOD:
2707     case AND:      case IOR:      case XOR:
2708     case ROTATERT: case ROTATE:
2709     case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2710     case NE:       case EQ:
2711     case GE:       case GT:       case GEU:    case GTU:
2712     case LE:       case LT:       case LEU:    case LTU:
2713       {
2714 	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2715 				     for_costs);
2716 	rtx new1 = XEXP (x, 1)
2717 	  ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2718 			      for_costs) : 0;
2719 
2720 	if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2721 	  return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2722       }
2723       return x;
2724 
2725     case EXPR_LIST:
2726       /* If we have something in XEXP (x, 0), the usual case, eliminate it.  */
2727       if (XEXP (x, 0))
2728 	{
2729 	  new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2730 				      for_costs);
2731 	  if (new_rtx != XEXP (x, 0))
2732 	    {
2733 	      /* If this is a REG_DEAD note, it is not valid anymore.
2734 		 Using the eliminated version could result in creating a
2735 		 REG_DEAD note for the stack or frame pointer.  */
2736 	      if (REG_NOTE_KIND (x) == REG_DEAD)
2737 		return (XEXP (x, 1)
2738 			? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2739 					    for_costs)
2740 			: NULL_RTX);
2741 
2742 	      x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2743 	    }
2744 	}
2745 
2746       /* fall through */
2747 
2748     case INSN_LIST:
2749     case INT_LIST:
2750       /* Now do eliminations in the rest of the chain.  If this was
2751 	 an EXPR_LIST, this might result in allocating more memory than is
2752 	 strictly needed, but it simplifies the code.  */
2753       if (XEXP (x, 1))
2754 	{
2755 	  new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2756 				      for_costs);
2757 	  if (new_rtx != XEXP (x, 1))
2758 	    return
2759 	      gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2760 	}
2761       return x;
2762 
2763     case PRE_INC:
2764     case POST_INC:
2765     case PRE_DEC:
2766     case POST_DEC:
2767       /* We do not support elimination of a register that is modified.
2768 	 elimination_effects has already make sure that this does not
2769 	 happen.  */
2770       return x;
2771 
2772     case PRE_MODIFY:
2773     case POST_MODIFY:
2774       /* We do not support elimination of a register that is modified.
2775 	 elimination_effects has already make sure that this does not
2776 	 happen.  The only remaining case we need to consider here is
2777 	 that the increment value may be an eliminable register.  */
2778       if (GET_CODE (XEXP (x, 1)) == PLUS
2779 	  && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2780 	{
2781 	  rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2782 					  insn, true, for_costs);
2783 
2784 	  if (new_rtx != XEXP (XEXP (x, 1), 1))
2785 	    return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2786 				   gen_rtx_PLUS (GET_MODE (x),
2787 						 XEXP (x, 0), new_rtx));
2788 	}
2789       return x;
2790 
2791     case STRICT_LOW_PART:
2792     case NEG:          case NOT:
2793     case SIGN_EXTEND:  case ZERO_EXTEND:
2794     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2795     case FLOAT:        case FIX:
2796     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2797     case ABS:
2798     case SQRT:
2799     case FFS:
2800     case CLZ:
2801     case CTZ:
2802     case POPCOUNT:
2803     case PARITY:
2804     case BSWAP:
2805       new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2806 				  for_costs);
2807       if (new_rtx != XEXP (x, 0))
2808 	return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2809       return x;
2810 
2811     case SUBREG:
2812       /* Similar to above processing, but preserve SUBREG_BYTE.
2813 	 Convert (subreg (mem)) to (mem) if not paradoxical.
2814 	 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2815 	 pseudo didn't get a hard reg, we must replace this with the
2816 	 eliminated version of the memory location because push_reload
2817 	 may do the replacement in certain circumstances.  */
2818       if (REG_P (SUBREG_REG (x))
2819 	  && !paradoxical_subreg_p (x)
2820 	  && reg_equivs
2821 	  && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
2822 	{
2823 	  new_rtx = SUBREG_REG (x);
2824 	}
2825       else
2826 	new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs);
2827 
2828       if (new_rtx != SUBREG_REG (x))
2829 	{
2830 	  poly_int64 x_size = GET_MODE_SIZE (GET_MODE (x));
2831 	  poly_int64 new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2832 
2833 	  if (MEM_P (new_rtx)
2834 	      && ((partial_subreg_p (GET_MODE (x), GET_MODE (new_rtx))
2835 		   /* On RISC machines, combine can create rtl of the form
2836 		      (set (subreg:m1 (reg:m2 R) 0) ...)
2837 		      where m1 < m2, and expects something interesting to
2838 		      happen to the entire word.  Moreover, it will use the
2839 		      (reg:m2 R) later, expecting all bits to be preserved.
2840 		      So if the number of words is the same, preserve the
2841 		      subreg so that push_reload can see it.  */
2842 		   && !(WORD_REGISTER_OPERATIONS
2843 			&& known_equal_after_align_down (x_size - 1,
2844 							 new_size - 1,
2845 							 UNITS_PER_WORD)))
2846 		  || known_eq (x_size, new_size))
2847 	      )
2848 	    return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2849 	  else if (insn && GET_CODE (insn) == DEBUG_INSN)
2850 	    return gen_rtx_raw_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2851 	  else
2852 	    return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2853 	}
2854 
2855       return x;
2856 
2857     case MEM:
2858       /* Our only special processing is to pass the mode of the MEM to our
2859 	 recursive call and copy the flags.  While we are here, handle this
2860 	 case more efficiently.  */
2861 
2862       new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2863 				  for_costs);
2864       if (for_costs
2865 	  && memory_address_p (GET_MODE (x), XEXP (x, 0))
2866 	  && !memory_address_p (GET_MODE (x), new_rtx))
2867 	note_reg_elim_costly (XEXP (x, 0), insn);
2868 
2869       return replace_equiv_address_nv (x, new_rtx);
2870 
2871     case USE:
2872       /* Handle insn_list USE that a call to a pure function may generate.  */
2873       new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2874 				  for_costs);
2875       if (new_rtx != XEXP (x, 0))
2876 	return gen_rtx_USE (GET_MODE (x), new_rtx);
2877       return x;
2878 
2879     case CLOBBER:
2880     case ASM_OPERANDS:
2881       gcc_assert (insn && DEBUG_INSN_P (insn));
2882       break;
2883 
2884     case SET:
2885       gcc_unreachable ();
2886 
2887     default:
2888       break;
2889     }
2890 
2891   /* Process each of our operands recursively.  If any have changed, make a
2892      copy of the rtx.  */
2893   fmt = GET_RTX_FORMAT (code);
2894   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2895     {
2896       if (*fmt == 'e')
2897 	{
2898 	  new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2899 				      for_costs);
2900 	  if (new_rtx != XEXP (x, i) && ! copied)
2901 	    {
2902 	      x = shallow_copy_rtx (x);
2903 	      copied = 1;
2904 	    }
2905 	  XEXP (x, i) = new_rtx;
2906 	}
2907       else if (*fmt == 'E')
2908 	{
2909 	  int copied_vec = 0;
2910 	  for (j = 0; j < XVECLEN (x, i); j++)
2911 	    {
2912 	      new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2913 					  for_costs);
2914 	      if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2915 		{
2916 		  rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2917 					     XVEC (x, i)->elem);
2918 		  if (! copied)
2919 		    {
2920 		      x = shallow_copy_rtx (x);
2921 		      copied = 1;
2922 		    }
2923 		  XVEC (x, i) = new_v;
2924 		  copied_vec = 1;
2925 		}
2926 	      XVECEXP (x, i, j) = new_rtx;
2927 	    }
2928 	}
2929     }
2930 
2931   return x;
2932 }
2933 
2934 rtx
eliminate_regs(rtx x,machine_mode mem_mode,rtx insn)2935 eliminate_regs (rtx x, machine_mode mem_mode, rtx insn)
2936 {
2937   if (reg_eliminate == NULL)
2938     {
2939       gcc_assert (targetm.no_register_allocation);
2940       return x;
2941     }
2942   return eliminate_regs_1 (x, mem_mode, insn, false, false);
2943 }
2944 
2945 /* Scan rtx X for modifications of elimination target registers.  Update
2946    the table of eliminables to reflect the changed state.  MEM_MODE is
2947    the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM.  */
2948 
2949 static void
elimination_effects(rtx x,machine_mode mem_mode)2950 elimination_effects (rtx x, machine_mode mem_mode)
2951 {
2952   enum rtx_code code = GET_CODE (x);
2953   struct elim_table *ep;
2954   int regno;
2955   int i, j;
2956   const char *fmt;
2957 
2958   switch (code)
2959     {
2960     CASE_CONST_ANY:
2961     case CONST:
2962     case SYMBOL_REF:
2963     case CODE_LABEL:
2964     case PC:
2965     case CC0:
2966     case ASM_INPUT:
2967     case ADDR_VEC:
2968     case ADDR_DIFF_VEC:
2969     case RETURN:
2970       return;
2971 
2972     case REG:
2973       regno = REGNO (x);
2974 
2975       /* First handle the case where we encounter a bare register that
2976 	 is eliminable.  Replace it with a PLUS.  */
2977       if (regno < FIRST_PSEUDO_REGISTER)
2978 	{
2979 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2980 	       ep++)
2981 	    if (ep->from_rtx == x && ep->can_eliminate)
2982 	      {
2983 		if (! mem_mode)
2984 		  ep->ref_outside_mem = 1;
2985 		return;
2986 	      }
2987 
2988 	}
2989       else if (reg_renumber[regno] < 0
2990 	       && reg_equivs
2991 	       && reg_equiv_constant (regno)
2992 	       && ! function_invariant_p (reg_equiv_constant (regno)))
2993 	elimination_effects (reg_equiv_constant (regno), mem_mode);
2994       return;
2995 
2996     case PRE_INC:
2997     case POST_INC:
2998     case PRE_DEC:
2999     case POST_DEC:
3000     case POST_MODIFY:
3001     case PRE_MODIFY:
3002       /* If we modify the source of an elimination rule, disable it.  */
3003       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3004 	if (ep->from_rtx == XEXP (x, 0))
3005 	  ep->can_eliminate = 0;
3006 
3007       /* If we modify the target of an elimination rule by adding a constant,
3008 	 update its offset.  If we modify the target in any other way, we'll
3009 	 have to disable the rule as well.  */
3010       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3011 	if (ep->to_rtx == XEXP (x, 0))
3012 	  {
3013 	    poly_int64 size = GET_MODE_SIZE (mem_mode);
3014 
3015 	    /* If more bytes than MEM_MODE are pushed, account for them.  */
3016 #ifdef PUSH_ROUNDING
3017 	    if (ep->to_rtx == stack_pointer_rtx)
3018 	      size = PUSH_ROUNDING (size);
3019 #endif
3020 	    if (code == PRE_DEC || code == POST_DEC)
3021 	      ep->offset += size;
3022 	    else if (code == PRE_INC || code == POST_INC)
3023 	      ep->offset -= size;
3024 	    else if (code == PRE_MODIFY || code == POST_MODIFY)
3025 	      {
3026 		if (GET_CODE (XEXP (x, 1)) == PLUS
3027 		    && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3028 		    && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3029 		  ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3030 		else
3031 		  ep->can_eliminate = 0;
3032 	      }
3033 	  }
3034 
3035       /* These two aren't unary operators.  */
3036       if (code == POST_MODIFY || code == PRE_MODIFY)
3037 	break;
3038 
3039       /* Fall through to generic unary operation case.  */
3040       gcc_fallthrough ();
3041     case STRICT_LOW_PART:
3042     case NEG:          case NOT:
3043     case SIGN_EXTEND:  case ZERO_EXTEND:
3044     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3045     case FLOAT:        case FIX:
3046     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3047     case ABS:
3048     case SQRT:
3049     case FFS:
3050     case CLZ:
3051     case CTZ:
3052     case POPCOUNT:
3053     case PARITY:
3054     case BSWAP:
3055       elimination_effects (XEXP (x, 0), mem_mode);
3056       return;
3057 
3058     case SUBREG:
3059       if (REG_P (SUBREG_REG (x))
3060 	  && !paradoxical_subreg_p (x)
3061 	  && reg_equivs
3062 	  && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
3063 	return;
3064 
3065       elimination_effects (SUBREG_REG (x), mem_mode);
3066       return;
3067 
3068     case USE:
3069       /* If using a register that is the source of an eliminate we still
3070 	 think can be performed, note it cannot be performed since we don't
3071 	 know how this register is used.  */
3072       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3073 	if (ep->from_rtx == XEXP (x, 0))
3074 	  ep->can_eliminate = 0;
3075 
3076       elimination_effects (XEXP (x, 0), mem_mode);
3077       return;
3078 
3079     case CLOBBER:
3080       /* If clobbering a register that is the replacement register for an
3081 	 elimination we still think can be performed, note that it cannot
3082 	 be performed.  Otherwise, we need not be concerned about it.  */
3083       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3084 	if (ep->to_rtx == XEXP (x, 0))
3085 	  ep->can_eliminate = 0;
3086 
3087       elimination_effects (XEXP (x, 0), mem_mode);
3088       return;
3089 
3090     case SET:
3091       /* Check for setting a register that we know about.  */
3092       if (REG_P (SET_DEST (x)))
3093 	{
3094 	  /* See if this is setting the replacement register for an
3095 	     elimination.
3096 
3097 	     If DEST is the hard frame pointer, we do nothing because we
3098 	     assume that all assignments to the frame pointer are for
3099 	     non-local gotos and are being done at a time when they are valid
3100 	     and do not disturb anything else.  Some machines want to
3101 	     eliminate a fake argument pointer (or even a fake frame pointer)
3102 	     with either the real frame or the stack pointer.  Assignments to
3103 	     the hard frame pointer must not prevent this elimination.  */
3104 
3105 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3106 	       ep++)
3107 	    if (ep->to_rtx == SET_DEST (x)
3108 		&& SET_DEST (x) != hard_frame_pointer_rtx)
3109 	      {
3110 		/* If it is being incremented, adjust the offset.  Otherwise,
3111 		   this elimination can't be done.  */
3112 		rtx src = SET_SRC (x);
3113 
3114 		if (GET_CODE (src) == PLUS
3115 		    && XEXP (src, 0) == SET_DEST (x)
3116 		    && CONST_INT_P (XEXP (src, 1)))
3117 		  ep->offset -= INTVAL (XEXP (src, 1));
3118 		else
3119 		  ep->can_eliminate = 0;
3120 	      }
3121 	}
3122 
3123       elimination_effects (SET_DEST (x), VOIDmode);
3124       elimination_effects (SET_SRC (x), VOIDmode);
3125       return;
3126 
3127     case MEM:
3128       /* Our only special processing is to pass the mode of the MEM to our
3129 	 recursive call.  */
3130       elimination_effects (XEXP (x, 0), GET_MODE (x));
3131       return;
3132 
3133     default:
3134       break;
3135     }
3136 
3137   fmt = GET_RTX_FORMAT (code);
3138   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3139     {
3140       if (*fmt == 'e')
3141 	elimination_effects (XEXP (x, i), mem_mode);
3142       else if (*fmt == 'E')
3143 	for (j = 0; j < XVECLEN (x, i); j++)
3144 	  elimination_effects (XVECEXP (x, i, j), mem_mode);
3145     }
3146 }
3147 
3148 /* Descend through rtx X and verify that no references to eliminable registers
3149    remain.  If any do remain, mark the involved register as not
3150    eliminable.  */
3151 
3152 static void
check_eliminable_occurrences(rtx x)3153 check_eliminable_occurrences (rtx x)
3154 {
3155   const char *fmt;
3156   int i;
3157   enum rtx_code code;
3158 
3159   if (x == 0)
3160     return;
3161 
3162   code = GET_CODE (x);
3163 
3164   if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3165     {
3166       struct elim_table *ep;
3167 
3168       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3169 	if (ep->from_rtx == x)
3170 	  ep->can_eliminate = 0;
3171       return;
3172     }
3173 
3174   fmt = GET_RTX_FORMAT (code);
3175   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3176     {
3177       if (*fmt == 'e')
3178 	check_eliminable_occurrences (XEXP (x, i));
3179       else if (*fmt == 'E')
3180 	{
3181 	  int j;
3182 	  for (j = 0; j < XVECLEN (x, i); j++)
3183 	    check_eliminable_occurrences (XVECEXP (x, i, j));
3184 	}
3185     }
3186 }
3187 
3188 /* Scan INSN and eliminate all eliminable registers in it.
3189 
3190    If REPLACE is nonzero, do the replacement destructively.  Also
3191    delete the insn as dead it if it is setting an eliminable register.
3192 
3193    If REPLACE is zero, do all our allocations in reload_obstack.
3194 
3195    If no eliminations were done and this insn doesn't require any elimination
3196    processing (these are not identical conditions: it might be updating sp,
3197    but not referencing fp; this needs to be seen during reload_as_needed so
3198    that the offset between fp and sp can be taken into consideration), zero
3199    is returned.  Otherwise, 1 is returned.  */
3200 
3201 static int
eliminate_regs_in_insn(rtx_insn * insn,int replace)3202 eliminate_regs_in_insn (rtx_insn *insn, int replace)
3203 {
3204   int icode = recog_memoized (insn);
3205   rtx old_body = PATTERN (insn);
3206   int insn_is_asm = asm_noperands (old_body) >= 0;
3207   rtx old_set = single_set (insn);
3208   rtx new_body;
3209   int val = 0;
3210   int i;
3211   rtx substed_operand[MAX_RECOG_OPERANDS];
3212   rtx orig_operand[MAX_RECOG_OPERANDS];
3213   struct elim_table *ep;
3214   rtx plus_src, plus_cst_src;
3215 
3216   if (! insn_is_asm && icode < 0)
3217     {
3218       gcc_assert (DEBUG_INSN_P (insn)
3219 		  || GET_CODE (PATTERN (insn)) == USE
3220 		  || GET_CODE (PATTERN (insn)) == CLOBBER
3221 		  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3222       if (DEBUG_BIND_INSN_P (insn))
3223 	INSN_VAR_LOCATION_LOC (insn)
3224 	  = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3225       return 0;
3226     }
3227 
3228   /* We allow one special case which happens to work on all machines we
3229      currently support: a single set with the source or a REG_EQUAL
3230      note being a PLUS of an eliminable register and a constant.  */
3231   plus_src = plus_cst_src = 0;
3232   if (old_set && REG_P (SET_DEST (old_set)))
3233     {
3234       if (GET_CODE (SET_SRC (old_set)) == PLUS)
3235 	plus_src = SET_SRC (old_set);
3236       /* First see if the source is of the form (plus (...) CST).  */
3237       if (plus_src
3238 	  && CONST_INT_P (XEXP (plus_src, 1)))
3239 	plus_cst_src = plus_src;
3240       else if (REG_P (SET_SRC (old_set))
3241 	       || plus_src)
3242 	{
3243 	  /* Otherwise, see if we have a REG_EQUAL note of the form
3244 	     (plus (...) CST).  */
3245 	  rtx links;
3246 	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3247 	    {
3248 	      if ((REG_NOTE_KIND (links) == REG_EQUAL
3249 		   || REG_NOTE_KIND (links) == REG_EQUIV)
3250 		  && GET_CODE (XEXP (links, 0)) == PLUS
3251 		  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3252 		{
3253 		  plus_cst_src = XEXP (links, 0);
3254 		  break;
3255 		}
3256 	    }
3257 	}
3258 
3259       /* Check that the first operand of the PLUS is a hard reg or
3260 	 the lowpart subreg of one.  */
3261       if (plus_cst_src)
3262 	{
3263 	  rtx reg = XEXP (plus_cst_src, 0);
3264 	  if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3265 	    reg = SUBREG_REG (reg);
3266 
3267 	  if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3268 	    plus_cst_src = 0;
3269 	}
3270     }
3271   if (plus_cst_src)
3272     {
3273       rtx reg = XEXP (plus_cst_src, 0);
3274       poly_int64 offset = INTVAL (XEXP (plus_cst_src, 1));
3275 
3276       if (GET_CODE (reg) == SUBREG)
3277 	reg = SUBREG_REG (reg);
3278 
3279       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3280 	if (ep->from_rtx == reg && ep->can_eliminate)
3281 	  {
3282 	    rtx to_rtx = ep->to_rtx;
3283 	    offset += ep->offset;
3284 	    offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3285 
3286 	    if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3287 	      to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3288 				    to_rtx);
3289 	    /* If we have a nonzero offset, and the source is already
3290 	       a simple REG, the following transformation would
3291 	       increase the cost of the insn by replacing a simple REG
3292 	       with (plus (reg sp) CST).  So try only when we already
3293 	       had a PLUS before.  */
3294 	    if (known_eq (offset, 0) || plus_src)
3295 	      {
3296 		rtx new_src = plus_constant (GET_MODE (to_rtx),
3297 					     to_rtx, offset);
3298 
3299 		new_body = old_body;
3300 		if (! replace)
3301 		  {
3302 		    new_body = copy_insn (old_body);
3303 		    if (REG_NOTES (insn))
3304 		      REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3305 		  }
3306 		PATTERN (insn) = new_body;
3307 		old_set = single_set (insn);
3308 
3309 		/* First see if this insn remains valid when we make the
3310 		   change.  If not, try to replace the whole pattern with
3311 		   a simple set (this may help if the original insn was a
3312 		   PARALLEL that was only recognized as single_set due to
3313 		   REG_UNUSED notes).  If this isn't valid either, keep
3314 		   the INSN_CODE the same and let reload fix it up.  */
3315 		if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3316 		  {
3317 		    rtx new_pat = gen_rtx_SET (SET_DEST (old_set), new_src);
3318 
3319 		    if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3320 		      SET_SRC (old_set) = new_src;
3321 		  }
3322 	      }
3323 	    else
3324 	      break;
3325 
3326 	    val = 1;
3327 	    /* This can't have an effect on elimination offsets, so skip right
3328 	       to the end.  */
3329 	    goto done;
3330 	  }
3331     }
3332 
3333   /* Determine the effects of this insn on elimination offsets.  */
3334   elimination_effects (old_body, VOIDmode);
3335 
3336   /* Eliminate all eliminable registers occurring in operands that
3337      can be handled by reload.  */
3338   extract_insn (insn);
3339   for (i = 0; i < recog_data.n_operands; i++)
3340     {
3341       orig_operand[i] = recog_data.operand[i];
3342       substed_operand[i] = recog_data.operand[i];
3343 
3344       /* For an asm statement, every operand is eliminable.  */
3345       if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3346 	{
3347 	  bool is_set_src, in_plus;
3348 
3349 	  /* Check for setting a register that we know about.  */
3350 	  if (recog_data.operand_type[i] != OP_IN
3351 	      && REG_P (orig_operand[i]))
3352 	    {
3353 	      /* If we are assigning to a register that can be eliminated, it
3354 		 must be as part of a PARALLEL, since the code above handles
3355 		 single SETs.  We must indicate that we can no longer
3356 		 eliminate this reg.  */
3357 	      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3358 		   ep++)
3359 		if (ep->from_rtx == orig_operand[i])
3360 		  ep->can_eliminate = 0;
3361 	    }
3362 
3363 	  /* Companion to the above plus substitution, we can allow
3364 	     invariants as the source of a plain move.  */
3365 	  is_set_src = false;
3366 	  if (old_set
3367 	      && recog_data.operand_loc[i] == &SET_SRC (old_set))
3368 	    is_set_src = true;
3369 	  in_plus = false;
3370 	  if (plus_src
3371 	      && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3372 		  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3373 	    in_plus = true;
3374 
3375 	  substed_operand[i]
3376 	    = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3377 			        replace ? insn : NULL_RTX,
3378 				is_set_src || in_plus, false);
3379 	  if (substed_operand[i] != orig_operand[i])
3380 	    val = 1;
3381 	  /* Terminate the search in check_eliminable_occurrences at
3382 	     this point.  */
3383 	  *recog_data.operand_loc[i] = 0;
3384 
3385 	  /* If an output operand changed from a REG to a MEM and INSN is an
3386 	     insn, write a CLOBBER insn.  */
3387 	  if (recog_data.operand_type[i] != OP_IN
3388 	      && REG_P (orig_operand[i])
3389 	      && MEM_P (substed_operand[i])
3390 	      && replace)
3391 	    emit_insn_after (gen_clobber (orig_operand[i]), insn);
3392 	}
3393     }
3394 
3395   for (i = 0; i < recog_data.n_dups; i++)
3396     *recog_data.dup_loc[i]
3397       = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3398 
3399   /* If any eliminable remain, they aren't eliminable anymore.  */
3400   check_eliminable_occurrences (old_body);
3401 
3402   /* Substitute the operands; the new values are in the substed_operand
3403      array.  */
3404   for (i = 0; i < recog_data.n_operands; i++)
3405     *recog_data.operand_loc[i] = substed_operand[i];
3406   for (i = 0; i < recog_data.n_dups; i++)
3407     *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3408 
3409   /* If we are replacing a body that was a (set X (plus Y Z)), try to
3410      re-recognize the insn.  We do this in case we had a simple addition
3411      but now can do this as a load-address.  This saves an insn in this
3412      common case.
3413      If re-recognition fails, the old insn code number will still be used,
3414      and some register operands may have changed into PLUS expressions.
3415      These will be handled by find_reloads by loading them into a register
3416      again.  */
3417 
3418   if (val)
3419     {
3420       /* If we aren't replacing things permanently and we changed something,
3421 	 make another copy to ensure that all the RTL is new.  Otherwise
3422 	 things can go wrong if find_reload swaps commutative operands
3423 	 and one is inside RTL that has been copied while the other is not.  */
3424       new_body = old_body;
3425       if (! replace)
3426 	{
3427 	  new_body = copy_insn (old_body);
3428 	  if (REG_NOTES (insn))
3429 	    REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3430 	}
3431       PATTERN (insn) = new_body;
3432 
3433       /* If we had a move insn but now we don't, rerecognize it.  This will
3434 	 cause spurious re-recognition if the old move had a PARALLEL since
3435 	 the new one still will, but we can't call single_set without
3436 	 having put NEW_BODY into the insn and the re-recognition won't
3437 	 hurt in this rare case.  */
3438       /* ??? Why this huge if statement - why don't we just rerecognize the
3439 	 thing always?  */
3440       if (! insn_is_asm
3441 	  && old_set != 0
3442 	  && ((REG_P (SET_SRC (old_set))
3443 	       && (GET_CODE (new_body) != SET
3444 		   || !REG_P (SET_SRC (new_body))))
3445 	      /* If this was a load from or store to memory, compare
3446 		 the MEM in recog_data.operand to the one in the insn.
3447 		 If they are not equal, then rerecognize the insn.  */
3448 	      || (old_set != 0
3449 		  && ((MEM_P (SET_SRC (old_set))
3450 		       && SET_SRC (old_set) != recog_data.operand[1])
3451 		      || (MEM_P (SET_DEST (old_set))
3452 			  && SET_DEST (old_set) != recog_data.operand[0])))
3453 	      /* If this was an add insn before, rerecognize.  */
3454 	      || GET_CODE (SET_SRC (old_set)) == PLUS))
3455 	{
3456 	  int new_icode = recog (PATTERN (insn), insn, 0);
3457 	  if (new_icode >= 0)
3458 	    INSN_CODE (insn) = new_icode;
3459 	}
3460     }
3461 
3462   /* Restore the old body.  If there were any changes to it, we made a copy
3463      of it while the changes were still in place, so we'll correctly return
3464      a modified insn below.  */
3465   if (! replace)
3466     {
3467       /* Restore the old body.  */
3468       for (i = 0; i < recog_data.n_operands; i++)
3469 	/* Restoring a top-level match_parallel would clobber the new_body
3470 	   we installed in the insn.  */
3471 	if (recog_data.operand_loc[i] != &PATTERN (insn))
3472 	  *recog_data.operand_loc[i] = orig_operand[i];
3473       for (i = 0; i < recog_data.n_dups; i++)
3474 	*recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3475     }
3476 
3477   /* Update all elimination pairs to reflect the status after the current
3478      insn.  The changes we make were determined by the earlier call to
3479      elimination_effects.
3480 
3481      We also detect cases where register elimination cannot be done,
3482      namely, if a register would be both changed and referenced outside a MEM
3483      in the resulting insn since such an insn is often undefined and, even if
3484      not, we cannot know what meaning will be given to it.  Note that it is
3485      valid to have a register used in an address in an insn that changes it
3486      (presumably with a pre- or post-increment or decrement).
3487 
3488      If anything changes, return nonzero.  */
3489 
3490   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3491     {
3492       if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem)
3493 	ep->can_eliminate = 0;
3494 
3495       ep->ref_outside_mem = 0;
3496 
3497       if (maybe_ne (ep->previous_offset, ep->offset))
3498 	val = 1;
3499     }
3500 
3501  done:
3502   /* If we changed something, perform elimination in REG_NOTES.  This is
3503      needed even when REPLACE is zero because a REG_DEAD note might refer
3504      to a register that we eliminate and could cause a different number
3505      of spill registers to be needed in the final reload pass than in
3506      the pre-passes.  */
3507   if (val && REG_NOTES (insn) != 0)
3508     REG_NOTES (insn)
3509       = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3510 			  false);
3511 
3512   return val;
3513 }
3514 
3515 /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3516    register allocator.  INSN is the instruction we need to examine, we perform
3517    eliminations in its operands and record cases where eliminating a reg with
3518    an invariant equivalence would add extra cost.  */
3519 
3520 #pragma GCC diagnostic push
3521 #pragma GCC diagnostic warning "-Wmaybe-uninitialized"
3522 static void
elimination_costs_in_insn(rtx_insn * insn)3523 elimination_costs_in_insn (rtx_insn *insn)
3524 {
3525   int icode = recog_memoized (insn);
3526   rtx old_body = PATTERN (insn);
3527   int insn_is_asm = asm_noperands (old_body) >= 0;
3528   rtx old_set = single_set (insn);
3529   int i;
3530   rtx orig_operand[MAX_RECOG_OPERANDS];
3531   rtx orig_dup[MAX_RECOG_OPERANDS];
3532   struct elim_table *ep;
3533   rtx plus_src, plus_cst_src;
3534   bool sets_reg_p;
3535 
3536   if (! insn_is_asm && icode < 0)
3537     {
3538       gcc_assert (DEBUG_INSN_P (insn)
3539 		  || GET_CODE (PATTERN (insn)) == USE
3540 		  || GET_CODE (PATTERN (insn)) == CLOBBER
3541 		  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3542       return;
3543     }
3544 
3545   if (old_set != 0 && REG_P (SET_DEST (old_set))
3546       && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3547     {
3548       /* Check for setting an eliminable register.  */
3549       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3550 	if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3551 	  return;
3552     }
3553 
3554   /* We allow one special case which happens to work on all machines we
3555      currently support: a single set with the source or a REG_EQUAL
3556      note being a PLUS of an eliminable register and a constant.  */
3557   plus_src = plus_cst_src = 0;
3558   sets_reg_p = false;
3559   if (old_set && REG_P (SET_DEST (old_set)))
3560     {
3561       sets_reg_p = true;
3562       if (GET_CODE (SET_SRC (old_set)) == PLUS)
3563 	plus_src = SET_SRC (old_set);
3564       /* First see if the source is of the form (plus (...) CST).  */
3565       if (plus_src
3566 	  && CONST_INT_P (XEXP (plus_src, 1)))
3567 	plus_cst_src = plus_src;
3568       else if (REG_P (SET_SRC (old_set))
3569 	       || plus_src)
3570 	{
3571 	  /* Otherwise, see if we have a REG_EQUAL note of the form
3572 	     (plus (...) CST).  */
3573 	  rtx links;
3574 	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3575 	    {
3576 	      if ((REG_NOTE_KIND (links) == REG_EQUAL
3577 		   || REG_NOTE_KIND (links) == REG_EQUIV)
3578 		  && GET_CODE (XEXP (links, 0)) == PLUS
3579 		  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3580 		{
3581 		  plus_cst_src = XEXP (links, 0);
3582 		  break;
3583 		}
3584 	    }
3585 	}
3586     }
3587 
3588   /* Determine the effects of this insn on elimination offsets.  */
3589   elimination_effects (old_body, VOIDmode);
3590 
3591   /* Eliminate all eliminable registers occurring in operands that
3592      can be handled by reload.  */
3593   extract_insn (insn);
3594   int n_dups = recog_data.n_dups;
3595   for (i = 0; i < n_dups; i++)
3596     orig_dup[i] = *recog_data.dup_loc[i];
3597 
3598   int n_operands = recog_data.n_operands;
3599   for (i = 0; i < n_operands; i++)
3600     {
3601       orig_operand[i] = recog_data.operand[i];
3602 
3603       /* For an asm statement, every operand is eliminable.  */
3604       if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3605 	{
3606 	  bool is_set_src, in_plus;
3607 
3608 	  /* Check for setting a register that we know about.  */
3609 	  if (recog_data.operand_type[i] != OP_IN
3610 	      && REG_P (orig_operand[i]))
3611 	    {
3612 	      /* If we are assigning to a register that can be eliminated, it
3613 		 must be as part of a PARALLEL, since the code above handles
3614 		 single SETs.  We must indicate that we can no longer
3615 		 eliminate this reg.  */
3616 	      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3617 		   ep++)
3618 		if (ep->from_rtx == orig_operand[i])
3619 		  ep->can_eliminate = 0;
3620 	    }
3621 
3622 	  /* Companion to the above plus substitution, we can allow
3623 	     invariants as the source of a plain move.  */
3624 	  is_set_src = false;
3625 	  if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3626 	    is_set_src = true;
3627 	  if (is_set_src && !sets_reg_p)
3628 	    note_reg_elim_costly (SET_SRC (old_set), insn);
3629 	  in_plus = false;
3630 	  if (plus_src && sets_reg_p
3631 	      && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3632 		  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3633 	    in_plus = true;
3634 
3635 	  eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3636 			    NULL_RTX,
3637 			    is_set_src || in_plus, true);
3638 	  /* Terminate the search in check_eliminable_occurrences at
3639 	     this point.  */
3640 	  *recog_data.operand_loc[i] = 0;
3641 	}
3642     }
3643 
3644   for (i = 0; i < n_dups; i++)
3645     *recog_data.dup_loc[i]
3646       = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3647 
3648   /* If any eliminable remain, they aren't eliminable anymore.  */
3649   check_eliminable_occurrences (old_body);
3650 
3651   /* Restore the old body.  */
3652   for (i = 0; i < n_operands; i++)
3653     *recog_data.operand_loc[i] = orig_operand[i];
3654   for (i = 0; i < n_dups; i++)
3655     *recog_data.dup_loc[i] = orig_dup[i];
3656 
3657   /* Update all elimination pairs to reflect the status after the current
3658      insn.  The changes we make were determined by the earlier call to
3659      elimination_effects.  */
3660 
3661   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3662     {
3663       if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem)
3664 	ep->can_eliminate = 0;
3665 
3666       ep->ref_outside_mem = 0;
3667     }
3668 
3669   return;
3670 }
3671 #pragma GCC diagnostic pop
3672 
3673 /* Loop through all elimination pairs.
3674    Recalculate the number not at initial offset.
3675 
3676    Compute the maximum offset (minimum offset if the stack does not
3677    grow downward) for each elimination pair.  */
3678 
3679 static void
update_eliminable_offsets(void)3680 update_eliminable_offsets (void)
3681 {
3682   struct elim_table *ep;
3683 
3684   num_not_at_initial_offset = 0;
3685   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3686     {
3687       ep->previous_offset = ep->offset;
3688       if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset))
3689 	num_not_at_initial_offset++;
3690     }
3691 }
3692 
3693 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3694    replacement we currently believe is valid, mark it as not eliminable if X
3695    modifies DEST in any way other than by adding a constant integer to it.
3696 
3697    If DEST is the frame pointer, we do nothing because we assume that
3698    all assignments to the hard frame pointer are nonlocal gotos and are being
3699    done at a time when they are valid and do not disturb anything else.
3700    Some machines want to eliminate a fake argument pointer with either the
3701    frame or stack pointer.  Assignments to the hard frame pointer must not
3702    prevent this elimination.
3703 
3704    Called via note_stores from reload before starting its passes to scan
3705    the insns of the function.  */
3706 
3707 static void
mark_not_eliminable(rtx dest,const_rtx x,void * data ATTRIBUTE_UNUSED)3708 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3709 {
3710   unsigned int i;
3711 
3712   /* A SUBREG of a hard register here is just changing its mode.  We should
3713      not see a SUBREG of an eliminable hard register, but check just in
3714      case.  */
3715   if (GET_CODE (dest) == SUBREG)
3716     dest = SUBREG_REG (dest);
3717 
3718   if (dest == hard_frame_pointer_rtx)
3719     return;
3720 
3721   for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3722     if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3723 	&& (GET_CODE (x) != SET
3724 	    || GET_CODE (SET_SRC (x)) != PLUS
3725 	    || XEXP (SET_SRC (x), 0) != dest
3726 	    || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3727       {
3728 	reg_eliminate[i].can_eliminate_previous
3729 	  = reg_eliminate[i].can_eliminate = 0;
3730 	num_eliminable--;
3731       }
3732 }
3733 
3734 /* Verify that the initial elimination offsets did not change since the
3735    last call to set_initial_elim_offsets.  This is used to catch cases
3736    where something illegal happened during reload_as_needed that could
3737    cause incorrect code to be generated if we did not check for it.  */
3738 
3739 static bool
verify_initial_elim_offsets(void)3740 verify_initial_elim_offsets (void)
3741 {
3742   poly_int64 t;
3743   struct elim_table *ep;
3744 
3745   if (!num_eliminable)
3746     return true;
3747 
3748   targetm.compute_frame_layout ();
3749   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3750     {
3751       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3752       if (maybe_ne (t, ep->initial_offset))
3753 	return false;
3754     }
3755 
3756   return true;
3757 }
3758 
3759 /* Reset all offsets on eliminable registers to their initial values.  */
3760 
3761 static void
set_initial_elim_offsets(void)3762 set_initial_elim_offsets (void)
3763 {
3764   struct elim_table *ep = reg_eliminate;
3765 
3766   targetm.compute_frame_layout ();
3767   for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3768     {
3769       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3770       ep->previous_offset = ep->offset = ep->initial_offset;
3771     }
3772 
3773   num_not_at_initial_offset = 0;
3774 }
3775 
3776 /* Subroutine of set_initial_label_offsets called via for_each_eh_label.  */
3777 
3778 static void
set_initial_eh_label_offset(rtx label)3779 set_initial_eh_label_offset (rtx label)
3780 {
3781   set_label_offsets (label, NULL, 1);
3782 }
3783 
3784 /* Initialize the known label offsets.
3785    Set a known offset for each forced label to be at the initial offset
3786    of each elimination.  We do this because we assume that all
3787    computed jumps occur from a location where each elimination is
3788    at its initial offset.
3789    For all other labels, show that we don't know the offsets.  */
3790 
3791 static void
set_initial_label_offsets(void)3792 set_initial_label_offsets (void)
3793 {
3794   memset (offsets_known_at, 0, num_labels);
3795 
3796   unsigned int i;
3797   rtx_insn *insn;
3798   FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
3799     set_label_offsets (insn, NULL, 1);
3800 
3801   for (rtx_insn_list *x = nonlocal_goto_handler_labels; x; x = x->next ())
3802     if (x->insn ())
3803       set_label_offsets (x->insn (), NULL, 1);
3804 
3805   for_each_eh_label (set_initial_eh_label_offset);
3806 }
3807 
3808 /* Set all elimination offsets to the known values for the code label given
3809    by INSN.  */
3810 
3811 static void
set_offsets_for_label(rtx_insn * insn)3812 set_offsets_for_label (rtx_insn *insn)
3813 {
3814   unsigned int i;
3815   int label_nr = CODE_LABEL_NUMBER (insn);
3816   struct elim_table *ep;
3817 
3818   num_not_at_initial_offset = 0;
3819   for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3820     {
3821       ep->offset = ep->previous_offset
3822 		 = offsets_at[label_nr - first_label_num][i];
3823       if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset))
3824 	num_not_at_initial_offset++;
3825     }
3826 }
3827 
3828 /* See if anything that happened changes which eliminations are valid.
3829    For example, on the SPARC, whether or not the frame pointer can
3830    be eliminated can depend on what registers have been used.  We need
3831    not check some conditions again (such as flag_omit_frame_pointer)
3832    since they can't have changed.  */
3833 
3834 static void
update_eliminables(HARD_REG_SET * pset)3835 update_eliminables (HARD_REG_SET *pset)
3836 {
3837   int previous_frame_pointer_needed = frame_pointer_needed;
3838   struct elim_table *ep;
3839 
3840   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3841     if ((ep->from == HARD_FRAME_POINTER_REGNUM
3842          && targetm.frame_pointer_required ())
3843 	|| ! targetm.can_eliminate (ep->from, ep->to)
3844 	)
3845       ep->can_eliminate = 0;
3846 
3847   /* Look for the case where we have discovered that we can't replace
3848      register A with register B and that means that we will now be
3849      trying to replace register A with register C.  This means we can
3850      no longer replace register C with register B and we need to disable
3851      such an elimination, if it exists.  This occurs often with A == ap,
3852      B == sp, and C == fp.  */
3853 
3854   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3855     {
3856       struct elim_table *op;
3857       int new_to = -1;
3858 
3859       if (! ep->can_eliminate && ep->can_eliminate_previous)
3860 	{
3861 	  /* Find the current elimination for ep->from, if there is a
3862 	     new one.  */
3863 	  for (op = reg_eliminate;
3864 	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3865 	    if (op->from == ep->from && op->can_eliminate)
3866 	      {
3867 		new_to = op->to;
3868 		break;
3869 	      }
3870 
3871 	  /* See if there is an elimination of NEW_TO -> EP->TO.  If so,
3872 	     disable it.  */
3873 	  for (op = reg_eliminate;
3874 	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3875 	    if (op->from == new_to && op->to == ep->to)
3876 	      op->can_eliminate = 0;
3877 	}
3878     }
3879 
3880   /* See if any registers that we thought we could eliminate the previous
3881      time are no longer eliminable.  If so, something has changed and we
3882      must spill the register.  Also, recompute the number of eliminable
3883      registers and see if the frame pointer is needed; it is if there is
3884      no elimination of the frame pointer that we can perform.  */
3885 
3886   frame_pointer_needed = 1;
3887   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3888     {
3889       if (ep->can_eliminate
3890 	  && ep->from == FRAME_POINTER_REGNUM
3891 	  && ep->to != HARD_FRAME_POINTER_REGNUM
3892 	  && (! SUPPORTS_STACK_ALIGNMENT
3893 	      || ! crtl->stack_realign_needed))
3894 	frame_pointer_needed = 0;
3895 
3896       if (! ep->can_eliminate && ep->can_eliminate_previous)
3897 	{
3898 	  ep->can_eliminate_previous = 0;
3899 	  SET_HARD_REG_BIT (*pset, ep->from);
3900 	  num_eliminable--;
3901 	}
3902     }
3903 
3904   /* If we didn't need a frame pointer last time, but we do now, spill
3905      the hard frame pointer.  */
3906   if (frame_pointer_needed && ! previous_frame_pointer_needed)
3907     SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3908 }
3909 
3910 /* Call update_eliminables an spill any registers we can't eliminate anymore.
3911    Return true iff a register was spilled.  */
3912 
3913 static bool
update_eliminables_and_spill(void)3914 update_eliminables_and_spill (void)
3915 {
3916   int i;
3917   bool did_spill = false;
3918   HARD_REG_SET to_spill;
3919   CLEAR_HARD_REG_SET (to_spill);
3920   update_eliminables (&to_spill);
3921   used_spill_regs &= ~to_spill;
3922 
3923   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3924     if (TEST_HARD_REG_BIT (to_spill, i))
3925       {
3926 	spill_hard_reg (i, 1);
3927 	did_spill = true;
3928 
3929 	/* Regardless of the state of spills, if we previously had
3930 	   a register that we thought we could eliminate, but now
3931 	   cannot eliminate, we must run another pass.
3932 
3933 	   Consider pseudos which have an entry in reg_equiv_* which
3934 	   reference an eliminable register.  We must make another pass
3935 	   to update reg_equiv_* so that we do not substitute in the
3936 	   old value from when we thought the elimination could be
3937 	   performed.  */
3938       }
3939   return did_spill;
3940 }
3941 
3942 /* Return true if X is used as the target register of an elimination.  */
3943 
3944 bool
elimination_target_reg_p(rtx x)3945 elimination_target_reg_p (rtx x)
3946 {
3947   struct elim_table *ep;
3948 
3949   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3950     if (ep->to_rtx == x && ep->can_eliminate)
3951       return true;
3952 
3953   return false;
3954 }
3955 
3956 /* Initialize the table of registers to eliminate.
3957    Pre-condition: global flag frame_pointer_needed has been set before
3958    calling this function.  */
3959 
3960 static void
init_elim_table(void)3961 init_elim_table (void)
3962 {
3963   struct elim_table *ep;
3964   const struct elim_table_1 *ep1;
3965 
3966   if (!reg_eliminate)
3967     reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
3968 
3969   num_eliminable = 0;
3970 
3971   for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3972        ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3973     {
3974       ep->from = ep1->from;
3975       ep->to = ep1->to;
3976       ep->can_eliminate = ep->can_eliminate_previous
3977 	= (targetm.can_eliminate (ep->from, ep->to)
3978 	   && ! (ep->to == STACK_POINTER_REGNUM
3979 		 && frame_pointer_needed
3980 		 && (! SUPPORTS_STACK_ALIGNMENT
3981 		     || ! stack_realign_fp)));
3982     }
3983 
3984   /* Count the number of eliminable registers and build the FROM and TO
3985      REG rtx's.  Note that code in gen_rtx_REG will cause, e.g.,
3986      gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3987      We depend on this.  */
3988   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3989     {
3990       num_eliminable += ep->can_eliminate;
3991       ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3992       ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3993     }
3994 }
3995 
3996 /* Find all the pseudo registers that didn't get hard regs
3997    but do have known equivalent constants or memory slots.
3998    These include parameters (known equivalent to parameter slots)
3999    and cse'd or loop-moved constant memory addresses.
4000 
4001    Record constant equivalents in reg_equiv_constant
4002    so they will be substituted by find_reloads.
4003    Record memory equivalents in reg_mem_equiv so they can
4004    be substituted eventually by altering the REG-rtx's.  */
4005 
4006 static void
init_eliminable_invariants(rtx_insn * first,bool do_subregs)4007 init_eliminable_invariants (rtx_insn *first, bool do_subregs)
4008 {
4009   int i;
4010   rtx_insn *insn;
4011 
4012   grow_reg_equivs ();
4013   if (do_subregs)
4014     reg_max_ref_mode = XCNEWVEC (machine_mode, max_regno);
4015   else
4016     reg_max_ref_mode = NULL;
4017 
4018   num_eliminable_invariants = 0;
4019 
4020   first_label_num = get_first_label_num ();
4021   num_labels = max_label_num () - first_label_num;
4022 
4023   /* Allocate the tables used to store offset information at labels.  */
4024   offsets_known_at = XNEWVEC (char, num_labels);
4025   offsets_at = (poly_int64_pod (*)[NUM_ELIMINABLE_REGS])
4026     xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (poly_int64));
4027 
4028 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4029    to.  If DO_SUBREGS is true, also find all paradoxical subregs and
4030    find largest such for each pseudo.  FIRST is the head of the insn
4031    list.  */
4032 
4033   for (insn = first; insn; insn = NEXT_INSN (insn))
4034     {
4035       rtx set = single_set (insn);
4036 
4037       /* We may introduce USEs that we want to remove at the end, so
4038 	 we'll mark them with QImode.  Make sure there are no
4039 	 previously-marked insns left by say regmove.  */
4040       if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4041 	  && GET_MODE (insn) != VOIDmode)
4042 	PUT_MODE (insn, VOIDmode);
4043 
4044       if (do_subregs && NONDEBUG_INSN_P (insn))
4045 	scan_paradoxical_subregs (PATTERN (insn));
4046 
4047       if (set != 0 && REG_P (SET_DEST (set)))
4048 	{
4049 	  rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4050 	  rtx x;
4051 
4052 	  if (! note)
4053 	    continue;
4054 
4055 	  i = REGNO (SET_DEST (set));
4056 	  x = XEXP (note, 0);
4057 
4058 	  if (i <= LAST_VIRTUAL_REGISTER)
4059 	    continue;
4060 
4061 	  /* If flag_pic and we have constant, verify it's legitimate.  */
4062 	  if (!CONSTANT_P (x)
4063 	      || !flag_pic || LEGITIMATE_PIC_OPERAND_P (x))
4064 	    {
4065 	      /* It can happen that a REG_EQUIV note contains a MEM
4066 		 that is not a legitimate memory operand.  As later
4067 		 stages of reload assume that all addresses found
4068 		 in the reg_equiv_* arrays were originally legitimate,
4069 		 we ignore such REG_EQUIV notes.  */
4070 	      if (memory_operand (x, VOIDmode))
4071 		{
4072 		  /* Always unshare the equivalence, so we can
4073 		     substitute into this insn without touching the
4074 		       equivalence.  */
4075 		  reg_equiv_memory_loc (i) = copy_rtx (x);
4076 		}
4077 	      else if (function_invariant_p (x))
4078 		{
4079 		  machine_mode mode;
4080 
4081 		  mode = GET_MODE (SET_DEST (set));
4082 		  if (GET_CODE (x) == PLUS)
4083 		    {
4084 		      /* This is PLUS of frame pointer and a constant,
4085 			 and might be shared.  Unshare it.  */
4086 		      reg_equiv_invariant (i) = copy_rtx (x);
4087 		      num_eliminable_invariants++;
4088 		    }
4089 		  else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4090 		    {
4091 		      reg_equiv_invariant (i) = x;
4092 		      num_eliminable_invariants++;
4093 		    }
4094 		  else if (targetm.legitimate_constant_p (mode, x))
4095 		    reg_equiv_constant (i) = x;
4096 		  else
4097 		    {
4098 		      reg_equiv_memory_loc (i) = force_const_mem (mode, x);
4099 		      if (! reg_equiv_memory_loc (i))
4100 			reg_equiv_init (i) = NULL;
4101 		    }
4102 		}
4103 	      else
4104 		{
4105 		  reg_equiv_init (i) = NULL;
4106 		  continue;
4107 		}
4108 	    }
4109 	  else
4110 	    reg_equiv_init (i) = NULL;
4111 	}
4112     }
4113 
4114   if (dump_file)
4115     for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4116       if (reg_equiv_init (i))
4117 	{
4118 	  fprintf (dump_file, "init_insns for %u: ", i);
4119 	  print_inline_rtx (dump_file, reg_equiv_init (i), 20);
4120 	  fprintf (dump_file, "\n");
4121 	}
4122 }
4123 
4124 /* Indicate that we no longer have known memory locations or constants.
4125    Free all data involved in tracking these.  */
4126 
4127 static void
free_reg_equiv(void)4128 free_reg_equiv (void)
4129 {
4130   int i;
4131 
4132   free (offsets_known_at);
4133   free (offsets_at);
4134   offsets_at = 0;
4135   offsets_known_at = 0;
4136 
4137   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4138     if (reg_equiv_alt_mem_list (i))
4139       free_EXPR_LIST_list (&reg_equiv_alt_mem_list (i));
4140   vec_free (reg_equivs);
4141 }
4142 
4143 /* Kick all pseudos out of hard register REGNO.
4144 
4145    If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4146    because we found we can't eliminate some register.  In the case, no pseudos
4147    are allowed to be in the register, even if they are only in a block that
4148    doesn't require spill registers, unlike the case when we are spilling this
4149    hard reg to produce another spill register.
4150 
4151    Return nonzero if any pseudos needed to be kicked out.  */
4152 
4153 static void
spill_hard_reg(unsigned int regno,int cant_eliminate)4154 spill_hard_reg (unsigned int regno, int cant_eliminate)
4155 {
4156   int i;
4157 
4158   if (cant_eliminate)
4159     {
4160       SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4161       df_set_regs_ever_live (regno, true);
4162     }
4163 
4164   /* Spill every pseudo reg that was allocated to this reg
4165      or to something that overlaps this reg.  */
4166 
4167   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4168     if (reg_renumber[i] >= 0
4169 	&& (unsigned int) reg_renumber[i] <= regno
4170 	&& end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4171       SET_REGNO_REG_SET (&spilled_pseudos, i);
4172 }
4173 
4174 /* After spill_hard_reg was called and/or find_reload_regs was run for all
4175    insns that need reloads, this function is used to actually spill pseudo
4176    registers and try to reallocate them.  It also sets up the spill_regs
4177    array for use by choose_reload_regs.
4178 
4179    GLOBAL nonzero means we should attempt to reallocate any pseudo registers
4180    that we displace from hard registers.  */
4181 
4182 static int
finish_spills(int global)4183 finish_spills (int global)
4184 {
4185   class insn_chain *chain;
4186   int something_changed = 0;
4187   unsigned i;
4188   reg_set_iterator rsi;
4189 
4190   /* Build the spill_regs array for the function.  */
4191   /* If there are some registers still to eliminate and one of the spill regs
4192      wasn't ever used before, additional stack space may have to be
4193      allocated to store this register.  Thus, we may have changed the offset
4194      between the stack and frame pointers, so mark that something has changed.
4195 
4196      One might think that we need only set VAL to 1 if this is a call-used
4197      register.  However, the set of registers that must be saved by the
4198      prologue is not identical to the call-used set.  For example, the
4199      register used by the call insn for the return PC is a call-used register,
4200      but must be saved by the prologue.  */
4201 
4202   n_spills = 0;
4203   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4204     if (TEST_HARD_REG_BIT (used_spill_regs, i))
4205       {
4206 	spill_reg_order[i] = n_spills;
4207 	spill_regs[n_spills++] = i;
4208 	if (num_eliminable && ! df_regs_ever_live_p (i))
4209 	  something_changed = 1;
4210 	df_set_regs_ever_live (i, true);
4211       }
4212     else
4213       spill_reg_order[i] = -1;
4214 
4215   EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4216     if (reg_renumber[i] >= 0)
4217       {
4218 	SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4219 	/* Mark it as no longer having a hard register home.  */
4220 	reg_renumber[i] = -1;
4221 	if (ira_conflicts_p)
4222 	  /* Inform IRA about the change.  */
4223 	  ira_mark_allocation_change (i);
4224 	/* We will need to scan everything again.  */
4225 	something_changed = 1;
4226       }
4227 
4228   /* Retry global register allocation if possible.  */
4229   if (global && ira_conflicts_p)
4230     {
4231       unsigned int n;
4232 
4233       memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4234       /* For every insn that needs reloads, set the registers used as spill
4235 	 regs in pseudo_forbidden_regs for every pseudo live across the
4236 	 insn.  */
4237       for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4238 	{
4239 	  EXECUTE_IF_SET_IN_REG_SET
4240 	    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4241 	    {
4242 	      pseudo_forbidden_regs[i] |= chain->used_spill_regs;
4243 	    }
4244 	  EXECUTE_IF_SET_IN_REG_SET
4245 	    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4246 	    {
4247 	      pseudo_forbidden_regs[i] |= chain->used_spill_regs;
4248 	    }
4249 	}
4250 
4251       /* Retry allocating the pseudos spilled in IRA and the
4252 	 reload.  For each reg, merge the various reg sets that
4253 	 indicate which hard regs can't be used, and call
4254 	 ira_reassign_pseudos.  */
4255       for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4256 	if (reg_old_renumber[i] != reg_renumber[i])
4257 	  {
4258 	    if (reg_renumber[i] < 0)
4259 	      temp_pseudo_reg_arr[n++] = i;
4260 	    else
4261 	      CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4262 	  }
4263       if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4264 				bad_spill_regs_global,
4265 				pseudo_forbidden_regs, pseudo_previous_regs,
4266 				&spilled_pseudos))
4267 	something_changed = 1;
4268     }
4269   /* Fix up the register information in the insn chain.
4270      This involves deleting those of the spilled pseudos which did not get
4271      a new hard register home from the live_{before,after} sets.  */
4272   for (chain = reload_insn_chain; chain; chain = chain->next)
4273     {
4274       HARD_REG_SET used_by_pseudos;
4275       HARD_REG_SET used_by_pseudos2;
4276 
4277       if (! ira_conflicts_p)
4278 	{
4279 	  /* Don't do it for IRA because IRA and the reload still can
4280 	     assign hard registers to the spilled pseudos on next
4281 	     reload iterations.  */
4282 	  AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4283 	  AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4284 	}
4285       /* Mark any unallocated hard regs as available for spills.  That
4286 	 makes inheritance work somewhat better.  */
4287       if (chain->need_reload)
4288 	{
4289 	  REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4290 	  REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4291 	  used_by_pseudos |= used_by_pseudos2;
4292 
4293 	  compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4294 	  compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4295 	  /* Value of chain->used_spill_regs from previous iteration
4296 	     may be not included in the value calculated here because
4297 	     of possible removing caller-saves insns (see function
4298 	     delete_caller_save_insns.  */
4299 	  chain->used_spill_regs = ~used_by_pseudos & used_spill_regs;
4300 	}
4301     }
4302 
4303   CLEAR_REG_SET (&changed_allocation_pseudos);
4304   /* Let alter_reg modify the reg rtx's for the modified pseudos.  */
4305   for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4306     {
4307       int regno = reg_renumber[i];
4308       if (reg_old_renumber[i] == regno)
4309 	continue;
4310 
4311       SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4312 
4313       alter_reg (i, reg_old_renumber[i], false);
4314       reg_old_renumber[i] = regno;
4315       if (dump_file)
4316 	{
4317 	  if (regno == -1)
4318 	    fprintf (dump_file, " Register %d now on stack.\n\n", i);
4319 	  else
4320 	    fprintf (dump_file, " Register %d now in %d.\n\n",
4321 		     i, reg_renumber[i]);
4322 	}
4323     }
4324 
4325   return something_changed;
4326 }
4327 
4328 /* Find all paradoxical subregs within X and update reg_max_ref_mode.  */
4329 
4330 static void
scan_paradoxical_subregs(rtx x)4331 scan_paradoxical_subregs (rtx x)
4332 {
4333   int i;
4334   const char *fmt;
4335   enum rtx_code code = GET_CODE (x);
4336 
4337   switch (code)
4338     {
4339     case REG:
4340     case CONST:
4341     case SYMBOL_REF:
4342     case LABEL_REF:
4343     CASE_CONST_ANY:
4344     case CC0:
4345     case PC:
4346     case USE:
4347     case CLOBBER:
4348       return;
4349 
4350     case SUBREG:
4351       if (REG_P (SUBREG_REG (x)))
4352 	{
4353 	  unsigned int regno = REGNO (SUBREG_REG (x));
4354 	  if (partial_subreg_p (reg_max_ref_mode[regno], GET_MODE (x)))
4355 	    {
4356 	      reg_max_ref_mode[regno] = GET_MODE (x);
4357 	      mark_home_live_1 (regno, GET_MODE (x));
4358 	    }
4359 	}
4360       return;
4361 
4362     default:
4363       break;
4364     }
4365 
4366   fmt = GET_RTX_FORMAT (code);
4367   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4368     {
4369       if (fmt[i] == 'e')
4370 	scan_paradoxical_subregs (XEXP (x, i));
4371       else if (fmt[i] == 'E')
4372 	{
4373 	  int j;
4374 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4375 	    scan_paradoxical_subregs (XVECEXP (x, i, j));
4376 	}
4377     }
4378 }
4379 
4380 /* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload.
4381    If *OP_PTR is a paradoxical subreg, try to remove that subreg
4382    and apply the corresponding narrowing subreg to *OTHER_PTR.
4383    Return true if the operands were changed, false otherwise.  */
4384 
4385 static bool
strip_paradoxical_subreg(rtx * op_ptr,rtx * other_ptr)4386 strip_paradoxical_subreg (rtx *op_ptr, rtx *other_ptr)
4387 {
4388   rtx op, inner, other, tem;
4389 
4390   op = *op_ptr;
4391   if (!paradoxical_subreg_p (op))
4392     return false;
4393   inner = SUBREG_REG (op);
4394 
4395   other = *other_ptr;
4396   tem = gen_lowpart_common (GET_MODE (inner), other);
4397   if (!tem)
4398     return false;
4399 
4400   /* If the lowpart operation turned a hard register into a subreg,
4401      rather than simplifying it to another hard register, then the
4402      mode change cannot be properly represented.  For example, OTHER
4403      might be valid in its current mode, but not in the new one.  */
4404   if (GET_CODE (tem) == SUBREG
4405       && REG_P (other)
4406       && HARD_REGISTER_P (other))
4407     return false;
4408 
4409   *op_ptr = inner;
4410   *other_ptr = tem;
4411   return true;
4412 }
4413 
4414 /* A subroutine of reload_as_needed.  If INSN has a REG_EH_REGION note,
4415    examine all of the reload insns between PREV and NEXT exclusive, and
4416    annotate all that may trap.  */
4417 
4418 static void
fixup_eh_region_note(rtx_insn * insn,rtx_insn * prev,rtx_insn * next)4419 fixup_eh_region_note (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4420 {
4421   rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4422   if (note == NULL)
4423     return;
4424   if (!insn_could_throw_p (insn))
4425     remove_note (insn, note);
4426   copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4427 }
4428 
4429 /* Reload pseudo-registers into hard regs around each insn as needed.
4430    Additional register load insns are output before the insn that needs it
4431    and perhaps store insns after insns that modify the reloaded pseudo reg.
4432 
4433    reg_last_reload_reg and reg_reloaded_contents keep track of
4434    which registers are already available in reload registers.
4435    We update these for the reloads that we perform,
4436    as the insns are scanned.  */
4437 
4438 static void
reload_as_needed(int live_known)4439 reload_as_needed (int live_known)
4440 {
4441   class insn_chain *chain;
4442 #if AUTO_INC_DEC
4443   int i;
4444 #endif
4445   rtx_note *marker;
4446 
4447   memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4448   memset (spill_reg_store, 0, sizeof spill_reg_store);
4449   reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4450   INIT_REG_SET (&reg_has_output_reload);
4451   CLEAR_HARD_REG_SET (reg_reloaded_valid);
4452 
4453   set_initial_elim_offsets ();
4454 
4455   /* Generate a marker insn that we will move around.  */
4456   marker = emit_note (NOTE_INSN_DELETED);
4457   unlink_insn_chain (marker, marker);
4458 
4459   for (chain = reload_insn_chain; chain; chain = chain->next)
4460     {
4461       rtx_insn *prev = 0;
4462       rtx_insn *insn = chain->insn;
4463       rtx_insn *old_next = NEXT_INSN (insn);
4464 #if AUTO_INC_DEC
4465       rtx_insn *old_prev = PREV_INSN (insn);
4466 #endif
4467 
4468       if (will_delete_init_insn_p (insn))
4469 	continue;
4470 
4471       /* If we pass a label, copy the offsets from the label information
4472 	 into the current offsets of each elimination.  */
4473       if (LABEL_P (insn))
4474 	set_offsets_for_label (insn);
4475 
4476       else if (INSN_P (insn))
4477 	{
4478 	  regset_head regs_to_forget;
4479 	  INIT_REG_SET (&regs_to_forget);
4480 	  note_stores (insn, forget_old_reloads_1, &regs_to_forget);
4481 
4482 	  /* If this is a USE and CLOBBER of a MEM, ensure that any
4483 	     references to eliminable registers have been removed.  */
4484 
4485 	  if ((GET_CODE (PATTERN (insn)) == USE
4486 	       || GET_CODE (PATTERN (insn)) == CLOBBER)
4487 	      && MEM_P (XEXP (PATTERN (insn), 0)))
4488 	    XEXP (XEXP (PATTERN (insn), 0), 0)
4489 	      = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4490 				GET_MODE (XEXP (PATTERN (insn), 0)),
4491 				NULL_RTX);
4492 
4493 	  /* If we need to do register elimination processing, do so.
4494 	     This might delete the insn, in which case we are done.  */
4495 	  if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4496 	    {
4497 	      eliminate_regs_in_insn (insn, 1);
4498 	      if (NOTE_P (insn))
4499 		{
4500 		  update_eliminable_offsets ();
4501 		  CLEAR_REG_SET (&regs_to_forget);
4502 		  continue;
4503 		}
4504 	    }
4505 
4506 	  /* If need_elim is nonzero but need_reload is zero, one might think
4507 	     that we could simply set n_reloads to 0.  However, find_reloads
4508 	     could have done some manipulation of the insn (such as swapping
4509 	     commutative operands), and these manipulations are lost during
4510 	     the first pass for every insn that needs register elimination.
4511 	     So the actions of find_reloads must be redone here.  */
4512 
4513 	  if (! chain->need_elim && ! chain->need_reload
4514 	      && ! chain->need_operand_change)
4515 	    n_reloads = 0;
4516 	  /* First find the pseudo regs that must be reloaded for this insn.
4517 	     This info is returned in the tables reload_... (see reload.h).
4518 	     Also modify the body of INSN by substituting RELOAD
4519 	     rtx's for those pseudo regs.  */
4520 	  else
4521 	    {
4522 	      CLEAR_REG_SET (&reg_has_output_reload);
4523 	      CLEAR_HARD_REG_SET (reg_is_output_reload);
4524 
4525 	      find_reloads (insn, 1, spill_indirect_levels, live_known,
4526 			    spill_reg_order);
4527 	    }
4528 
4529 	  if (n_reloads > 0)
4530 	    {
4531 	      rtx_insn *next = NEXT_INSN (insn);
4532 
4533 	      /* ??? PREV can get deleted by reload inheritance.
4534 		 Work around this by emitting a marker note.  */
4535 	      prev = PREV_INSN (insn);
4536 	      reorder_insns_nobb (marker, marker, prev);
4537 
4538 	      /* Now compute which reload regs to reload them into.  Perhaps
4539 		 reusing reload regs from previous insns, or else output
4540 		 load insns to reload them.  Maybe output store insns too.
4541 		 Record the choices of reload reg in reload_reg_rtx.  */
4542 	      choose_reload_regs (chain);
4543 
4544 	      /* Generate the insns to reload operands into or out of
4545 		 their reload regs.  */
4546 	      emit_reload_insns (chain);
4547 
4548 	      /* Substitute the chosen reload regs from reload_reg_rtx
4549 		 into the insn's body (or perhaps into the bodies of other
4550 		 load and store insn that we just made for reloading
4551 		 and that we moved the structure into).  */
4552 	      subst_reloads (insn);
4553 
4554 	      prev = PREV_INSN (marker);
4555 	      unlink_insn_chain (marker, marker);
4556 
4557 	      /* Adjust the exception region notes for loads and stores.  */
4558 	      if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4559 		fixup_eh_region_note (insn, prev, next);
4560 
4561 	      /* Adjust the location of REG_ARGS_SIZE.  */
4562 	      rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4563 	      if (p)
4564 		{
4565 		  remove_note (insn, p);
4566 		  fixup_args_size_notes (prev, PREV_INSN (next),
4567 					 get_args_size (p));
4568 		}
4569 
4570 	      /* If this was an ASM, make sure that all the reload insns
4571 		 we have generated are valid.  If not, give an error
4572 		 and delete them.  */
4573 	      if (asm_noperands (PATTERN (insn)) >= 0)
4574 		for (rtx_insn *p = NEXT_INSN (prev);
4575 		     p != next;
4576 		     p = NEXT_INSN (p))
4577 		  if (p != insn && INSN_P (p)
4578 		      && GET_CODE (PATTERN (p)) != USE
4579 		      && (recog_memoized (p) < 0
4580 			  || (extract_insn (p),
4581 			      !(constrain_operands (1,
4582 				  get_enabled_alternatives (p))))))
4583 		    {
4584 		      error_for_asm (insn,
4585 				     "%<asm%> operand requires "
4586 				     "impossible reload");
4587 		      delete_insn (p);
4588 		    }
4589 	    }
4590 
4591 	  if (num_eliminable && chain->need_elim)
4592 	    update_eliminable_offsets ();
4593 
4594 	  /* Any previously reloaded spilled pseudo reg, stored in this insn,
4595 	     is no longer validly lying around to save a future reload.
4596 	     Note that this does not detect pseudos that were reloaded
4597 	     for this insn in order to be stored in
4598 	     (obeying register constraints).  That is correct; such reload
4599 	     registers ARE still valid.  */
4600 	  forget_marked_reloads (&regs_to_forget);
4601 	  CLEAR_REG_SET (&regs_to_forget);
4602 
4603 	  /* There may have been CLOBBER insns placed after INSN.  So scan
4604 	     between INSN and NEXT and use them to forget old reloads.  */
4605 	  for (rtx_insn *x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4606 	    if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4607 	      note_stores (x, forget_old_reloads_1, NULL);
4608 
4609 #if AUTO_INC_DEC
4610 	  /* Likewise for regs altered by auto-increment in this insn.
4611 	     REG_INC notes have been changed by reloading:
4612 	     find_reloads_address_1 records substitutions for them,
4613 	     which have been performed by subst_reloads above.  */
4614 	  for (i = n_reloads - 1; i >= 0; i--)
4615 	    {
4616 	      rtx in_reg = rld[i].in_reg;
4617 	      if (in_reg)
4618 		{
4619 		  enum rtx_code code = GET_CODE (in_reg);
4620 		  /* PRE_INC / PRE_DEC will have the reload register ending up
4621 		     with the same value as the stack slot, but that doesn't
4622 		     hold true for POST_INC / POST_DEC.  Either we have to
4623 		     convert the memory access to a true POST_INC / POST_DEC,
4624 		     or we can't use the reload register for inheritance.  */
4625 		  if ((code == POST_INC || code == POST_DEC)
4626 		      && TEST_HARD_REG_BIT (reg_reloaded_valid,
4627 					    REGNO (rld[i].reg_rtx))
4628 		      /* Make sure it is the inc/dec pseudo, and not
4629 			 some other (e.g. output operand) pseudo.  */
4630 		      && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4631 			  == REGNO (XEXP (in_reg, 0))))
4632 
4633 		    {
4634 		      rtx reload_reg = rld[i].reg_rtx;
4635 		      machine_mode mode = GET_MODE (reload_reg);
4636 		      int n = 0;
4637 		      rtx_insn *p;
4638 
4639 		      for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4640 			{
4641 			  /* We really want to ignore REG_INC notes here, so
4642 			     use PATTERN (p) as argument to reg_set_p .  */
4643 			  if (reg_set_p (reload_reg, PATTERN (p)))
4644 			    break;
4645 			  n = count_occurrences (PATTERN (p), reload_reg, 0);
4646 			  if (! n)
4647 			    continue;
4648 			  if (n == 1)
4649 			    {
4650 			      rtx replace_reg
4651 				= gen_rtx_fmt_e (code, mode, reload_reg);
4652 
4653 			      validate_replace_rtx_group (reload_reg,
4654 							  replace_reg, p);
4655 			      n = verify_changes (0);
4656 
4657 			      /* We must also verify that the constraints
4658 				 are met after the replacement.  Make sure
4659 				 extract_insn is only called for an insn
4660 				 where the replacements were found to be
4661 				 valid so far. */
4662 			      if (n)
4663 				{
4664 				  extract_insn (p);
4665 				  n = constrain_operands (1,
4666 				    get_enabled_alternatives (p));
4667 				}
4668 
4669 			      /* If the constraints were not met, then
4670 				 undo the replacement, else confirm it.  */
4671 			      if (!n)
4672 				cancel_changes (0);
4673 			      else
4674 				confirm_change_group ();
4675 			    }
4676 			  break;
4677 			}
4678 		      if (n == 1)
4679 			{
4680 			  add_reg_note (p, REG_INC, reload_reg);
4681 			  /* Mark this as having an output reload so that the
4682 			     REG_INC processing code below won't invalidate
4683 			     the reload for inheritance.  */
4684 			  SET_HARD_REG_BIT (reg_is_output_reload,
4685 					    REGNO (reload_reg));
4686 			  SET_REGNO_REG_SET (&reg_has_output_reload,
4687 					     REGNO (XEXP (in_reg, 0)));
4688 			}
4689 		      else
4690 			forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4691 					      NULL);
4692 		    }
4693 		  else if ((code == PRE_INC || code == PRE_DEC)
4694 			   && TEST_HARD_REG_BIT (reg_reloaded_valid,
4695 						 REGNO (rld[i].reg_rtx))
4696 			   /* Make sure it is the inc/dec pseudo, and not
4697 			      some other (e.g. output operand) pseudo.  */
4698 			   && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4699 			       == REGNO (XEXP (in_reg, 0))))
4700 		    {
4701 		      SET_HARD_REG_BIT (reg_is_output_reload,
4702 					REGNO (rld[i].reg_rtx));
4703 		      SET_REGNO_REG_SET (&reg_has_output_reload,
4704 					 REGNO (XEXP (in_reg, 0)));
4705 		    }
4706 		  else if (code == PRE_INC || code == PRE_DEC
4707 			   || code == POST_INC || code == POST_DEC)
4708 		    {
4709 		      int in_regno = REGNO (XEXP (in_reg, 0));
4710 
4711 		      if (reg_last_reload_reg[in_regno] != NULL_RTX)
4712 			{
4713 			  int in_hard_regno;
4714 			  bool forget_p = true;
4715 
4716 			  in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4717 			  if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4718 						 in_hard_regno))
4719 			    {
4720 			      for (rtx_insn *x = (old_prev ?
4721 						  NEXT_INSN (old_prev) : insn);
4722 				   x != old_next;
4723 				   x = NEXT_INSN (x))
4724 				if (x == reg_reloaded_insn[in_hard_regno])
4725 				  {
4726 				    forget_p = false;
4727 				    break;
4728 				  }
4729 			    }
4730 			  /* If for some reasons, we didn't set up
4731 			     reg_last_reload_reg in this insn,
4732 			     invalidate inheritance from previous
4733 			     insns for the incremented/decremented
4734 			     register.  Such registers will be not in
4735 			     reg_has_output_reload.  Invalidate it
4736 			     also if the corresponding element in
4737 			     reg_reloaded_insn is also
4738 			     invalidated.  */
4739 			  if (forget_p)
4740 			    forget_old_reloads_1 (XEXP (in_reg, 0),
4741 						  NULL_RTX, NULL);
4742 			}
4743 		    }
4744 		}
4745 	    }
4746 	  /* If a pseudo that got a hard register is auto-incremented,
4747 	     we must purge records of copying it into pseudos without
4748 	     hard registers.  */
4749 	  for (rtx x = REG_NOTES (insn); x; x = XEXP (x, 1))
4750 	    if (REG_NOTE_KIND (x) == REG_INC)
4751 	      {
4752 		/* See if this pseudo reg was reloaded in this insn.
4753 		   If so, its last-reload info is still valid
4754 		   because it is based on this insn's reload.  */
4755 		for (i = 0; i < n_reloads; i++)
4756 		  if (rld[i].out == XEXP (x, 0))
4757 		    break;
4758 
4759 		if (i == n_reloads)
4760 		  forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4761 	      }
4762 #endif
4763 	}
4764       /* A reload reg's contents are unknown after a label.  */
4765       if (LABEL_P (insn))
4766 	CLEAR_HARD_REG_SET (reg_reloaded_valid);
4767 
4768       /* Don't assume a reload reg is still good after a call insn
4769 	 if it is a call-used reg, or if it contains a value that will
4770          be partially clobbered by the call.  */
4771       else if (CALL_P (insn))
4772 	{
4773 	  reg_reloaded_valid
4774 	    &= ~insn_callee_abi (insn).full_and_partial_reg_clobbers ();
4775 
4776 	  /* If this is a call to a setjmp-type function, we must not
4777 	     reuse any reload reg contents across the call; that will
4778 	     just be clobbered by other uses of the register in later
4779 	     code, before the longjmp.  */
4780 	  if (find_reg_note (insn, REG_SETJMP, NULL_RTX))
4781 	    CLEAR_HARD_REG_SET (reg_reloaded_valid);
4782 	}
4783     }
4784 
4785   /* Clean up.  */
4786   free (reg_last_reload_reg);
4787   CLEAR_REG_SET (&reg_has_output_reload);
4788 }
4789 
4790 /* Discard all record of any value reloaded from X,
4791    or reloaded in X from someplace else;
4792    unless X is an output reload reg of the current insn.
4793 
4794    X may be a hard reg (the reload reg)
4795    or it may be a pseudo reg that was reloaded from.
4796 
4797    When DATA is non-NULL just mark the registers in regset
4798    to be forgotten later.  */
4799 
4800 static void
forget_old_reloads_1(rtx x,const_rtx,void * data)4801 forget_old_reloads_1 (rtx x, const_rtx, void *data)
4802 {
4803   unsigned int regno;
4804   unsigned int nr;
4805   regset regs = (regset) data;
4806 
4807   /* note_stores does give us subregs of hard regs,
4808      subreg_regno_offset requires a hard reg.  */
4809   while (GET_CODE (x) == SUBREG)
4810     {
4811       /* We ignore the subreg offset when calculating the regno,
4812 	 because we are using the entire underlying hard register
4813 	 below.  */
4814       x = SUBREG_REG (x);
4815     }
4816 
4817   if (!REG_P (x))
4818     return;
4819 
4820   regno = REGNO (x);
4821 
4822   if (regno >= FIRST_PSEUDO_REGISTER)
4823     nr = 1;
4824   else
4825     {
4826       unsigned int i;
4827 
4828       nr = REG_NREGS (x);
4829       /* Storing into a spilled-reg invalidates its contents.
4830 	 This can happen if a block-local pseudo is allocated to that reg
4831 	 and it wasn't spilled because this block's total need is 0.
4832 	 Then some insn might have an optional reload and use this reg.  */
4833       if (!regs)
4834 	for (i = 0; i < nr; i++)
4835 	  /* But don't do this if the reg actually serves as an output
4836 	     reload reg in the current instruction.  */
4837 	  if (n_reloads == 0
4838 	      || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4839 	    {
4840 	      CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4841 	      spill_reg_store[regno + i] = 0;
4842 	    }
4843     }
4844 
4845   if (regs)
4846     while (nr-- > 0)
4847       SET_REGNO_REG_SET (regs, regno + nr);
4848   else
4849     {
4850       /* Since value of X has changed,
4851 	 forget any value previously copied from it.  */
4852 
4853       while (nr-- > 0)
4854 	/* But don't forget a copy if this is the output reload
4855 	   that establishes the copy's validity.  */
4856 	if (n_reloads == 0
4857 	    || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4858 	  reg_last_reload_reg[regno + nr] = 0;
4859      }
4860 }
4861 
4862 /* Forget the reloads marked in regset by previous function.  */
4863 static void
forget_marked_reloads(regset regs)4864 forget_marked_reloads (regset regs)
4865 {
4866   unsigned int reg;
4867   reg_set_iterator rsi;
4868   EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4869     {
4870       if (reg < FIRST_PSEUDO_REGISTER
4871 	  /* But don't do this if the reg actually serves as an output
4872 	     reload reg in the current instruction.  */
4873 	  && (n_reloads == 0
4874 	      || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4875 	  {
4876 	    CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4877 	    spill_reg_store[reg] = 0;
4878 	  }
4879       if (n_reloads == 0
4880 	  || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4881 	reg_last_reload_reg[reg] = 0;
4882     }
4883 }
4884 
4885 /* The following HARD_REG_SETs indicate when each hard register is
4886    used for a reload of various parts of the current insn.  */
4887 
4888 /* If reg is unavailable for all reloads.  */
4889 static HARD_REG_SET reload_reg_unavailable;
4890 /* If reg is in use as a reload reg for a RELOAD_OTHER reload.  */
4891 static HARD_REG_SET reload_reg_used;
4892 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I.  */
4893 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4894 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I.  */
4895 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4896 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I.  */
4897 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4898 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I.  */
4899 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4900 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I.  */
4901 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4902 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I.  */
4903 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4904 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload.  */
4905 static HARD_REG_SET reload_reg_used_in_op_addr;
4906 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload.  */
4907 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4908 /* If reg is in use for a RELOAD_FOR_INSN reload.  */
4909 static HARD_REG_SET reload_reg_used_in_insn;
4910 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload.  */
4911 static HARD_REG_SET reload_reg_used_in_other_addr;
4912 
4913 /* If reg is in use as a reload reg for any sort of reload.  */
4914 static HARD_REG_SET reload_reg_used_at_all;
4915 
4916 /* If reg is use as an inherited reload.  We just mark the first register
4917    in the group.  */
4918 static HARD_REG_SET reload_reg_used_for_inherit;
4919 
4920 /* Records which hard regs are used in any way, either as explicit use or
4921    by being allocated to a pseudo during any point of the current insn.  */
4922 static HARD_REG_SET reg_used_in_insn;
4923 
4924 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4925    TYPE. MODE is used to indicate how many consecutive regs are
4926    actually used.  */
4927 
4928 static void
mark_reload_reg_in_use(unsigned int regno,int opnum,enum reload_type type,machine_mode mode)4929 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4930 			machine_mode mode)
4931 {
4932   switch (type)
4933     {
4934     case RELOAD_OTHER:
4935       add_to_hard_reg_set (&reload_reg_used, mode, regno);
4936       break;
4937 
4938     case RELOAD_FOR_INPUT_ADDRESS:
4939       add_to_hard_reg_set (&reload_reg_used_in_input_addr[opnum], mode, regno);
4940       break;
4941 
4942     case RELOAD_FOR_INPADDR_ADDRESS:
4943       add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr[opnum], mode, regno);
4944       break;
4945 
4946     case RELOAD_FOR_OUTPUT_ADDRESS:
4947       add_to_hard_reg_set (&reload_reg_used_in_output_addr[opnum], mode, regno);
4948       break;
4949 
4950     case RELOAD_FOR_OUTADDR_ADDRESS:
4951       add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr[opnum], mode, regno);
4952       break;
4953 
4954     case RELOAD_FOR_OPERAND_ADDRESS:
4955       add_to_hard_reg_set (&reload_reg_used_in_op_addr, mode, regno);
4956       break;
4957 
4958     case RELOAD_FOR_OPADDR_ADDR:
4959       add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload, mode, regno);
4960       break;
4961 
4962     case RELOAD_FOR_OTHER_ADDRESS:
4963       add_to_hard_reg_set (&reload_reg_used_in_other_addr, mode, regno);
4964       break;
4965 
4966     case RELOAD_FOR_INPUT:
4967       add_to_hard_reg_set (&reload_reg_used_in_input[opnum], mode, regno);
4968       break;
4969 
4970     case RELOAD_FOR_OUTPUT:
4971       add_to_hard_reg_set (&reload_reg_used_in_output[opnum], mode, regno);
4972       break;
4973 
4974     case RELOAD_FOR_INSN:
4975       add_to_hard_reg_set (&reload_reg_used_in_insn,  mode, regno);
4976       break;
4977     }
4978 
4979   add_to_hard_reg_set (&reload_reg_used_at_all, mode, regno);
4980 }
4981 
4982 /* Similarly, but show REGNO is no longer in use for a reload.  */
4983 
4984 static void
clear_reload_reg_in_use(unsigned int regno,int opnum,enum reload_type type,machine_mode mode)4985 clear_reload_reg_in_use (unsigned int regno, int opnum,
4986 			 enum reload_type type, machine_mode mode)
4987 {
4988   unsigned int nregs = hard_regno_nregs (regno, mode);
4989   unsigned int start_regno, end_regno, r;
4990   int i;
4991   /* A complication is that for some reload types, inheritance might
4992      allow multiple reloads of the same types to share a reload register.
4993      We set check_opnum if we have to check only reloads with the same
4994      operand number, and check_any if we have to check all reloads.  */
4995   int check_opnum = 0;
4996   int check_any = 0;
4997   HARD_REG_SET *used_in_set;
4998 
4999   switch (type)
5000     {
5001     case RELOAD_OTHER:
5002       used_in_set = &reload_reg_used;
5003       break;
5004 
5005     case RELOAD_FOR_INPUT_ADDRESS:
5006       used_in_set = &reload_reg_used_in_input_addr[opnum];
5007       break;
5008 
5009     case RELOAD_FOR_INPADDR_ADDRESS:
5010       check_opnum = 1;
5011       used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
5012       break;
5013 
5014     case RELOAD_FOR_OUTPUT_ADDRESS:
5015       used_in_set = &reload_reg_used_in_output_addr[opnum];
5016       break;
5017 
5018     case RELOAD_FOR_OUTADDR_ADDRESS:
5019       check_opnum = 1;
5020       used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
5021       break;
5022 
5023     case RELOAD_FOR_OPERAND_ADDRESS:
5024       used_in_set = &reload_reg_used_in_op_addr;
5025       break;
5026 
5027     case RELOAD_FOR_OPADDR_ADDR:
5028       check_any = 1;
5029       used_in_set = &reload_reg_used_in_op_addr_reload;
5030       break;
5031 
5032     case RELOAD_FOR_OTHER_ADDRESS:
5033       used_in_set = &reload_reg_used_in_other_addr;
5034       check_any = 1;
5035       break;
5036 
5037     case RELOAD_FOR_INPUT:
5038       used_in_set = &reload_reg_used_in_input[opnum];
5039       break;
5040 
5041     case RELOAD_FOR_OUTPUT:
5042       used_in_set = &reload_reg_used_in_output[opnum];
5043       break;
5044 
5045     case RELOAD_FOR_INSN:
5046       used_in_set = &reload_reg_used_in_insn;
5047       break;
5048     default:
5049       gcc_unreachable ();
5050     }
5051   /* We resolve conflicts with remaining reloads of the same type by
5052      excluding the intervals of reload registers by them from the
5053      interval of freed reload registers.  Since we only keep track of
5054      one set of interval bounds, we might have to exclude somewhat
5055      more than what would be necessary if we used a HARD_REG_SET here.
5056      But this should only happen very infrequently, so there should
5057      be no reason to worry about it.  */
5058 
5059   start_regno = regno;
5060   end_regno = regno + nregs;
5061   if (check_opnum || check_any)
5062     {
5063       for (i = n_reloads - 1; i >= 0; i--)
5064 	{
5065 	  if (rld[i].when_needed == type
5066 	      && (check_any || rld[i].opnum == opnum)
5067 	      && rld[i].reg_rtx)
5068 	    {
5069 	      unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
5070 	      unsigned int conflict_end
5071 		= end_hard_regno (rld[i].mode, conflict_start);
5072 
5073 	      /* If there is an overlap with the first to-be-freed register,
5074 		 adjust the interval start.  */
5075 	      if (conflict_start <= start_regno && conflict_end > start_regno)
5076 		start_regno = conflict_end;
5077 	      /* Otherwise, if there is a conflict with one of the other
5078 		 to-be-freed registers, adjust the interval end.  */
5079 	      if (conflict_start > start_regno && conflict_start < end_regno)
5080 		end_regno = conflict_start;
5081 	    }
5082 	}
5083     }
5084 
5085   for (r = start_regno; r < end_regno; r++)
5086     CLEAR_HARD_REG_BIT (*used_in_set, r);
5087 }
5088 
5089 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5090    specified by OPNUM and TYPE.  */
5091 
5092 static int
reload_reg_free_p(unsigned int regno,int opnum,enum reload_type type)5093 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
5094 {
5095   int i;
5096 
5097   /* In use for a RELOAD_OTHER means it's not available for anything.  */
5098   if (TEST_HARD_REG_BIT (reload_reg_used, regno)
5099       || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5100     return 0;
5101 
5102   switch (type)
5103     {
5104     case RELOAD_OTHER:
5105       /* In use for anything means we can't use it for RELOAD_OTHER.  */
5106       if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
5107 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5108 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5109 	  || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5110 	return 0;
5111 
5112       for (i = 0; i < reload_n_operands; i++)
5113 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5114 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5115 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5116 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5117 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5118 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5119 	  return 0;
5120 
5121       return 1;
5122 
5123     case RELOAD_FOR_INPUT:
5124       if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5125 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
5126 	return 0;
5127 
5128       if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5129 	return 0;
5130 
5131       /* If it is used for some other input, can't use it.  */
5132       for (i = 0; i < reload_n_operands; i++)
5133 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5134 	  return 0;
5135 
5136       /* If it is used in a later operand's address, can't use it.  */
5137       for (i = opnum + 1; i < reload_n_operands; i++)
5138 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5139 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5140 	  return 0;
5141 
5142       return 1;
5143 
5144     case RELOAD_FOR_INPUT_ADDRESS:
5145       /* Can't use a register if it is used for an input address for this
5146 	 operand or used as an input in an earlier one.  */
5147       if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
5148 	  || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5149 	return 0;
5150 
5151       for (i = 0; i < opnum; i++)
5152 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5153 	  return 0;
5154 
5155       return 1;
5156 
5157     case RELOAD_FOR_INPADDR_ADDRESS:
5158       /* Can't use a register if it is used for an input address
5159 	 for this operand or used as an input in an earlier
5160 	 one.  */
5161       if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5162 	return 0;
5163 
5164       for (i = 0; i < opnum; i++)
5165 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5166 	  return 0;
5167 
5168       return 1;
5169 
5170     case RELOAD_FOR_OUTPUT_ADDRESS:
5171       /* Can't use a register if it is used for an output address for this
5172 	 operand or used as an output in this or a later operand.  Note
5173 	 that multiple output operands are emitted in reverse order, so
5174 	 the conflicting ones are those with lower indices.  */
5175       if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
5176 	return 0;
5177 
5178       for (i = 0; i <= opnum; i++)
5179 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5180 	  return 0;
5181 
5182       return 1;
5183 
5184     case RELOAD_FOR_OUTADDR_ADDRESS:
5185       /* Can't use a register if it is used for an output address
5186 	 for this operand or used as an output in this or a
5187 	 later operand.  Note that multiple output operands are
5188 	 emitted in reverse order, so the conflicting ones are
5189 	 those with lower indices.  */
5190       if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5191 	return 0;
5192 
5193       for (i = 0; i <= opnum; i++)
5194 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5195 	  return 0;
5196 
5197       return 1;
5198 
5199     case RELOAD_FOR_OPERAND_ADDRESS:
5200       for (i = 0; i < reload_n_operands; i++)
5201 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5202 	  return 0;
5203 
5204       return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5205 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5206 
5207     case RELOAD_FOR_OPADDR_ADDR:
5208       for (i = 0; i < reload_n_operands; i++)
5209 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5210 	  return 0;
5211 
5212       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5213 
5214     case RELOAD_FOR_OUTPUT:
5215       /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5216 	 outputs, or an operand address for this or an earlier output.
5217 	 Note that multiple output operands are emitted in reverse order,
5218 	 so the conflicting ones are those with higher indices.  */
5219       if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5220 	return 0;
5221 
5222       for (i = 0; i < reload_n_operands; i++)
5223 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5224 	  return 0;
5225 
5226       for (i = opnum; i < reload_n_operands; i++)
5227 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5228 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5229 	  return 0;
5230 
5231       return 1;
5232 
5233     case RELOAD_FOR_INSN:
5234       for (i = 0; i < reload_n_operands; i++)
5235 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5236 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5237 	  return 0;
5238 
5239       return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5240 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5241 
5242     case RELOAD_FOR_OTHER_ADDRESS:
5243       return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5244 
5245     default:
5246       gcc_unreachable ();
5247     }
5248 }
5249 
5250 /* Return 1 if the value in reload reg REGNO, as used by the reload with
5251    the number RELOADNUM, is still available in REGNO at the end of the insn.
5252 
5253    We can assume that the reload reg was already tested for availability
5254    at the time it is needed, and we should not check this again,
5255    in case the reg has already been marked in use.  */
5256 
5257 static int
reload_reg_reaches_end_p(unsigned int regno,int reloadnum)5258 reload_reg_reaches_end_p (unsigned int regno, int reloadnum)
5259 {
5260   int opnum = rld[reloadnum].opnum;
5261   enum reload_type type = rld[reloadnum].when_needed;
5262   int i;
5263 
5264   /* See if there is a reload with the same type for this operand, using
5265      the same register. This case is not handled by the code below.  */
5266   for (i = reloadnum + 1; i < n_reloads; i++)
5267     {
5268       rtx reg;
5269 
5270       if (rld[i].opnum != opnum || rld[i].when_needed != type)
5271 	continue;
5272       reg = rld[i].reg_rtx;
5273       if (reg == NULL_RTX)
5274 	continue;
5275       if (regno >= REGNO (reg) && regno < END_REGNO (reg))
5276 	return 0;
5277     }
5278 
5279   switch (type)
5280     {
5281     case RELOAD_OTHER:
5282       /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5283 	 its value must reach the end.  */
5284       return 1;
5285 
5286       /* If this use is for part of the insn,
5287 	 its value reaches if no subsequent part uses the same register.
5288 	 Just like the above function, don't try to do this with lots
5289 	 of fallthroughs.  */
5290 
5291     case RELOAD_FOR_OTHER_ADDRESS:
5292       /* Here we check for everything else, since these don't conflict
5293 	 with anything else and everything comes later.  */
5294 
5295       for (i = 0; i < reload_n_operands; i++)
5296 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5297 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5298 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5299 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5300 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5301 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5302 	  return 0;
5303 
5304       return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5305 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5306 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5307 	      && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5308 
5309     case RELOAD_FOR_INPUT_ADDRESS:
5310     case RELOAD_FOR_INPADDR_ADDRESS:
5311       /* Similar, except that we check only for this and subsequent inputs
5312 	 and the address of only subsequent inputs and we do not need
5313 	 to check for RELOAD_OTHER objects since they are known not to
5314 	 conflict.  */
5315 
5316       for (i = opnum; i < reload_n_operands; i++)
5317 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5318 	  return 0;
5319 
5320       /* Reload register of reload with type RELOAD_FOR_INPADDR_ADDRESS
5321 	 could be killed if the register is also used by reload with type
5322 	 RELOAD_FOR_INPUT_ADDRESS, so check it.  */
5323       if (type == RELOAD_FOR_INPADDR_ADDRESS
5324 	  && TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
5325 	return 0;
5326 
5327       for (i = opnum + 1; i < reload_n_operands; i++)
5328 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5329 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5330 	  return 0;
5331 
5332       for (i = 0; i < reload_n_operands; i++)
5333 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5334 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5335 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5336 	  return 0;
5337 
5338       if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5339 	return 0;
5340 
5341       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5342 	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5343 	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5344 
5345     case RELOAD_FOR_INPUT:
5346       /* Similar to input address, except we start at the next operand for
5347 	 both input and input address and we do not check for
5348 	 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5349 	 would conflict.  */
5350 
5351       for (i = opnum + 1; i < reload_n_operands; i++)
5352 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5353 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5354 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5355 	  return 0;
5356 
5357       /* ... fall through ...  */
5358 
5359     case RELOAD_FOR_OPERAND_ADDRESS:
5360       /* Check outputs and their addresses.  */
5361 
5362       for (i = 0; i < reload_n_operands; i++)
5363 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5364 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5365 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5366 	  return 0;
5367 
5368       return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5369 
5370     case RELOAD_FOR_OPADDR_ADDR:
5371       for (i = 0; i < reload_n_operands; i++)
5372 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5373 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5374 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5375 	  return 0;
5376 
5377       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5378 	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5379 	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5380 
5381     case RELOAD_FOR_INSN:
5382       /* These conflict with other outputs with RELOAD_OTHER.  So
5383 	 we need only check for output addresses.  */
5384 
5385       opnum = reload_n_operands;
5386 
5387       /* fall through */
5388 
5389     case RELOAD_FOR_OUTPUT:
5390     case RELOAD_FOR_OUTPUT_ADDRESS:
5391     case RELOAD_FOR_OUTADDR_ADDRESS:
5392       /* We already know these can't conflict with a later output.  So the
5393 	 only thing to check are later output addresses.
5394 	 Note that multiple output operands are emitted in reverse order,
5395 	 so the conflicting ones are those with lower indices.  */
5396       for (i = 0; i < opnum; i++)
5397 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5398 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5399 	  return 0;
5400 
5401       /* Reload register of reload with type RELOAD_FOR_OUTADDR_ADDRESS
5402 	 could be killed if the register is also used by reload with type
5403 	 RELOAD_FOR_OUTPUT_ADDRESS, so check it.  */
5404       if (type == RELOAD_FOR_OUTADDR_ADDRESS
5405 	  && TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5406 	return 0;
5407 
5408       return 1;
5409 
5410     default:
5411       gcc_unreachable ();
5412     }
5413 }
5414 
5415 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5416    every register in REG.  */
5417 
5418 static bool
reload_reg_rtx_reaches_end_p(rtx reg,int reloadnum)5419 reload_reg_rtx_reaches_end_p (rtx reg, int reloadnum)
5420 {
5421   unsigned int i;
5422 
5423   for (i = REGNO (reg); i < END_REGNO (reg); i++)
5424     if (!reload_reg_reaches_end_p (i, reloadnum))
5425       return false;
5426   return true;
5427 }
5428 
5429 
5430 /*  Returns whether R1 and R2 are uniquely chained: the value of one
5431     is used by the other, and that value is not used by any other
5432     reload for this insn.  This is used to partially undo the decision
5433     made in find_reloads when in the case of multiple
5434     RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5435     RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5436     reloads.  This code tries to avoid the conflict created by that
5437     change.  It might be cleaner to explicitly keep track of which
5438     RELOAD_FOR_OPADDR_ADDR reload is associated with which
5439     RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5440     this after the fact. */
5441 static bool
reloads_unique_chain_p(int r1,int r2)5442 reloads_unique_chain_p (int r1, int r2)
5443 {
5444   int i;
5445 
5446   /* We only check input reloads.  */
5447   if (! rld[r1].in || ! rld[r2].in)
5448     return false;
5449 
5450   /* Avoid anything with output reloads.  */
5451   if (rld[r1].out || rld[r2].out)
5452     return false;
5453 
5454   /* "chained" means one reload is a component of the other reload,
5455      not the same as the other reload.  */
5456   if (rld[r1].opnum != rld[r2].opnum
5457       || rtx_equal_p (rld[r1].in, rld[r2].in)
5458       || rld[r1].optional || rld[r2].optional
5459       || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5460 	    || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5461     return false;
5462 
5463   /* The following loop assumes that r1 is the reload that feeds r2.  */
5464   if (r1 > r2)
5465     std::swap (r1, r2);
5466 
5467   for (i = 0; i < n_reloads; i ++)
5468     /* Look for input reloads that aren't our two */
5469     if (i != r1 && i != r2 && rld[i].in)
5470       {
5471 	/* If our reload is mentioned at all, it isn't a simple chain.  */
5472 	if (reg_mentioned_p (rld[r1].in, rld[i].in))
5473 	  return false;
5474       }
5475   return true;
5476 }
5477 
5478 /* The recursive function change all occurrences of WHAT in *WHERE
5479    to REPL.  */
5480 static void
substitute(rtx * where,const_rtx what,rtx repl)5481 substitute (rtx *where, const_rtx what, rtx repl)
5482 {
5483   const char *fmt;
5484   int i;
5485   enum rtx_code code;
5486 
5487   if (*where == 0)
5488     return;
5489 
5490   if (*where == what || rtx_equal_p (*where, what))
5491     {
5492       /* Record the location of the changed rtx.  */
5493       substitute_stack.safe_push (where);
5494       *where = repl;
5495       return;
5496     }
5497 
5498   code = GET_CODE (*where);
5499   fmt = GET_RTX_FORMAT (code);
5500   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5501     {
5502       if (fmt[i] == 'E')
5503 	{
5504 	  int j;
5505 
5506 	  for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5507 	    substitute (&XVECEXP (*where, i, j), what, repl);
5508 	}
5509       else if (fmt[i] == 'e')
5510 	substitute (&XEXP (*where, i), what, repl);
5511     }
5512 }
5513 
5514 /* The function returns TRUE if chain of reload R1 and R2 (in any
5515    order) can be evaluated without usage of intermediate register for
5516    the reload containing another reload.  It is important to see
5517    gen_reload to understand what the function is trying to do.  As an
5518    example, let us have reload chain
5519 
5520       r2: const
5521       r1: <something> + const
5522 
5523    and reload R2 got reload reg HR.  The function returns true if
5524    there is a correct insn HR = HR + <something>.  Otherwise,
5525    gen_reload will use intermediate register (and this is the reload
5526    reg for R1) to reload <something>.
5527 
5528    We need this function to find a conflict for chain reloads.  In our
5529    example, if HR = HR + <something> is incorrect insn, then we cannot
5530    use HR as a reload register for R2.  If we do use it then we get a
5531    wrong code:
5532 
5533       HR = const
5534       HR = <something>
5535       HR = HR + HR
5536 
5537 */
5538 static bool
gen_reload_chain_without_interm_reg_p(int r1,int r2)5539 gen_reload_chain_without_interm_reg_p (int r1, int r2)
5540 {
5541   /* Assume other cases in gen_reload are not possible for
5542      chain reloads or do need an intermediate hard registers.  */
5543   bool result = true;
5544   int regno, code;
5545   rtx out, in;
5546   rtx_insn *insn;
5547   rtx_insn *last = get_last_insn ();
5548 
5549   /* Make r2 a component of r1.  */
5550   if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5551     std::swap (r1, r2);
5552 
5553   gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5554   regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5555   gcc_assert (regno >= 0);
5556   out = gen_rtx_REG (rld[r1].mode, regno);
5557   in = rld[r1].in;
5558   substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5559 
5560   /* If IN is a paradoxical SUBREG, remove it and try to put the
5561      opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
5562   strip_paradoxical_subreg (&in, &out);
5563 
5564   if (GET_CODE (in) == PLUS
5565       && (REG_P (XEXP (in, 0))
5566 	  || GET_CODE (XEXP (in, 0)) == SUBREG
5567 	  || MEM_P (XEXP (in, 0)))
5568       && (REG_P (XEXP (in, 1))
5569 	  || GET_CODE (XEXP (in, 1)) == SUBREG
5570 	  || CONSTANT_P (XEXP (in, 1))
5571 	  || MEM_P (XEXP (in, 1))))
5572     {
5573       insn = emit_insn (gen_rtx_SET (out, in));
5574       code = recog_memoized (insn);
5575       result = false;
5576 
5577       if (code >= 0)
5578 	{
5579 	  extract_insn (insn);
5580 	  /* We want constrain operands to treat this insn strictly in
5581 	     its validity determination, i.e., the way it would after
5582 	     reload has completed.  */
5583 	  result = constrain_operands (1, get_enabled_alternatives (insn));
5584 	}
5585 
5586       delete_insns_since (last);
5587     }
5588 
5589   /* Restore the original value at each changed address within R1.  */
5590   while (!substitute_stack.is_empty ())
5591     {
5592       rtx *where = substitute_stack.pop ();
5593       *where = rld[r2].in;
5594     }
5595 
5596   return result;
5597 }
5598 
5599 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5600    Return 0 otherwise.
5601 
5602    This function uses the same algorithm as reload_reg_free_p above.  */
5603 
5604 static int
reloads_conflict(int r1,int r2)5605 reloads_conflict (int r1, int r2)
5606 {
5607   enum reload_type r1_type = rld[r1].when_needed;
5608   enum reload_type r2_type = rld[r2].when_needed;
5609   int r1_opnum = rld[r1].opnum;
5610   int r2_opnum = rld[r2].opnum;
5611 
5612   /* RELOAD_OTHER conflicts with everything.  */
5613   if (r2_type == RELOAD_OTHER)
5614     return 1;
5615 
5616   /* Otherwise, check conflicts differently for each type.  */
5617 
5618   switch (r1_type)
5619     {
5620     case RELOAD_FOR_INPUT:
5621       return (r2_type == RELOAD_FOR_INSN
5622 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5623 	      || r2_type == RELOAD_FOR_OPADDR_ADDR
5624 	      || r2_type == RELOAD_FOR_INPUT
5625 	      || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5626 		   || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5627 		  && r2_opnum > r1_opnum));
5628 
5629     case RELOAD_FOR_INPUT_ADDRESS:
5630       return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5631 	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5632 
5633     case RELOAD_FOR_INPADDR_ADDRESS:
5634       return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5635 	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5636 
5637     case RELOAD_FOR_OUTPUT_ADDRESS:
5638       return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5639 	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5640 
5641     case RELOAD_FOR_OUTADDR_ADDRESS:
5642       return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5643 	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5644 
5645     case RELOAD_FOR_OPERAND_ADDRESS:
5646       return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5647 	      || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5648 		  && (!reloads_unique_chain_p (r1, r2)
5649 		      || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5650 
5651     case RELOAD_FOR_OPADDR_ADDR:
5652       return (r2_type == RELOAD_FOR_INPUT
5653 	      || r2_type == RELOAD_FOR_OPADDR_ADDR);
5654 
5655     case RELOAD_FOR_OUTPUT:
5656       return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5657 	      || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5658 		   || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5659 		  && r2_opnum >= r1_opnum));
5660 
5661     case RELOAD_FOR_INSN:
5662       return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5663 	      || r2_type == RELOAD_FOR_INSN
5664 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5665 
5666     case RELOAD_FOR_OTHER_ADDRESS:
5667       return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5668 
5669     case RELOAD_OTHER:
5670       return 1;
5671 
5672     default:
5673       gcc_unreachable ();
5674     }
5675 }
5676 
5677 /* Indexed by reload number, 1 if incoming value
5678    inherited from previous insns.  */
5679 static char reload_inherited[MAX_RELOADS];
5680 
5681 /* For an inherited reload, this is the insn the reload was inherited from,
5682    if we know it.  Otherwise, this is 0.  */
5683 static rtx_insn *reload_inheritance_insn[MAX_RELOADS];
5684 
5685 /* If nonzero, this is a place to get the value of the reload,
5686    rather than using reload_in.  */
5687 static rtx reload_override_in[MAX_RELOADS];
5688 
5689 /* For each reload, the hard register number of the register used,
5690    or -1 if we did not need a register for this reload.  */
5691 static int reload_spill_index[MAX_RELOADS];
5692 
5693 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode.  */
5694 static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5695 
5696 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode.  */
5697 static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5698 
5699 /* Subroutine of free_for_value_p, used to check a single register.
5700    START_REGNO is the starting regno of the full reload register
5701    (possibly comprising multiple hard registers) that we are considering.  */
5702 
5703 static int
reload_reg_free_for_value_p(int start_regno,int regno,int opnum,enum reload_type type,rtx value,rtx out,int reloadnum,int ignore_address_reloads)5704 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5705 			     enum reload_type type, rtx value, rtx out,
5706 			     int reloadnum, int ignore_address_reloads)
5707 {
5708   int time1;
5709   /* Set if we see an input reload that must not share its reload register
5710      with any new earlyclobber, but might otherwise share the reload
5711      register with an output or input-output reload.  */
5712   int check_earlyclobber = 0;
5713   int i;
5714   int copy = 0;
5715 
5716   if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5717     return 0;
5718 
5719   if (out == const0_rtx)
5720     {
5721       copy = 1;
5722       out = NULL_RTX;
5723     }
5724 
5725   /* We use some pseudo 'time' value to check if the lifetimes of the
5726      new register use would overlap with the one of a previous reload
5727      that is not read-only or uses a different value.
5728      The 'time' used doesn't have to be linear in any shape or form, just
5729      monotonic.
5730      Some reload types use different 'buckets' for each operand.
5731      So there are MAX_RECOG_OPERANDS different time values for each
5732      such reload type.
5733      We compute TIME1 as the time when the register for the prospective
5734      new reload ceases to be live, and TIME2 for each existing
5735      reload as the time when that the reload register of that reload
5736      becomes live.
5737      Where there is little to be gained by exact lifetime calculations,
5738      we just make conservative assumptions, i.e. a longer lifetime;
5739      this is done in the 'default:' cases.  */
5740   switch (type)
5741     {
5742     case RELOAD_FOR_OTHER_ADDRESS:
5743       /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads.  */
5744       time1 = copy ? 0 : 1;
5745       break;
5746     case RELOAD_OTHER:
5747       time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5748       break;
5749       /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5750 	 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT.  By adding 0 / 1 / 2 ,
5751 	 respectively, to the time values for these, we get distinct time
5752 	 values.  To get distinct time values for each operand, we have to
5753 	 multiply opnum by at least three.  We round that up to four because
5754 	 multiply by four is often cheaper.  */
5755     case RELOAD_FOR_INPADDR_ADDRESS:
5756       time1 = opnum * 4 + 2;
5757       break;
5758     case RELOAD_FOR_INPUT_ADDRESS:
5759       time1 = opnum * 4 + 3;
5760       break;
5761     case RELOAD_FOR_INPUT:
5762       /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5763 	 executes (inclusive).  */
5764       time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5765       break;
5766     case RELOAD_FOR_OPADDR_ADDR:
5767       /* opnum * 4 + 4
5768 	 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5769       time1 = MAX_RECOG_OPERANDS * 4 + 1;
5770       break;
5771     case RELOAD_FOR_OPERAND_ADDRESS:
5772       /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5773 	 is executed.  */
5774       time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5775       break;
5776     case RELOAD_FOR_OUTADDR_ADDRESS:
5777       time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5778       break;
5779     case RELOAD_FOR_OUTPUT_ADDRESS:
5780       time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5781       break;
5782     default:
5783       time1 = MAX_RECOG_OPERANDS * 5 + 5;
5784     }
5785 
5786   for (i = 0; i < n_reloads; i++)
5787     {
5788       rtx reg = rld[i].reg_rtx;
5789       if (reg && REG_P (reg)
5790 	  && (unsigned) regno - true_regnum (reg) < REG_NREGS (reg)
5791 	  && i != reloadnum)
5792 	{
5793 	  rtx other_input = rld[i].in;
5794 
5795 	  /* If the other reload loads the same input value, that
5796 	     will not cause a conflict only if it's loading it into
5797 	     the same register.  */
5798 	  if (true_regnum (reg) != start_regno)
5799 	    other_input = NULL_RTX;
5800 	  if (! other_input || ! rtx_equal_p (other_input, value)
5801 	      || rld[i].out || out)
5802 	    {
5803 	      int time2;
5804 	      switch (rld[i].when_needed)
5805 		{
5806 		case RELOAD_FOR_OTHER_ADDRESS:
5807 		  time2 = 0;
5808 		  break;
5809 		case RELOAD_FOR_INPADDR_ADDRESS:
5810 		  /* find_reloads makes sure that a
5811 		     RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5812 		     by at most one - the first -
5813 		     RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS .  If the
5814 		     address reload is inherited, the address address reload
5815 		     goes away, so we can ignore this conflict.  */
5816 		  if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5817 		      && ignore_address_reloads
5818 		      /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5819 			 Then the address address is still needed to store
5820 			 back the new address.  */
5821 		      && ! rld[reloadnum].out)
5822 		    continue;
5823 		  /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5824 		     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5825 		     reloads go away.  */
5826 		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5827 		      && ignore_address_reloads
5828 		      /* Unless we are reloading an auto_inc expression.  */
5829 		      && ! rld[reloadnum].out)
5830 		    continue;
5831 		  time2 = rld[i].opnum * 4 + 2;
5832 		  break;
5833 		case RELOAD_FOR_INPUT_ADDRESS:
5834 		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5835 		      && ignore_address_reloads
5836 		      && ! rld[reloadnum].out)
5837 		    continue;
5838 		  time2 = rld[i].opnum * 4 + 3;
5839 		  break;
5840 		case RELOAD_FOR_INPUT:
5841 		  time2 = rld[i].opnum * 4 + 4;
5842 		  check_earlyclobber = 1;
5843 		  break;
5844 		  /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5845 		     == MAX_RECOG_OPERAND * 4  */
5846 		case RELOAD_FOR_OPADDR_ADDR:
5847 		  if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5848 		      && ignore_address_reloads
5849 		      && ! rld[reloadnum].out)
5850 		    continue;
5851 		  time2 = MAX_RECOG_OPERANDS * 4 + 1;
5852 		  break;
5853 		case RELOAD_FOR_OPERAND_ADDRESS:
5854 		  time2 = MAX_RECOG_OPERANDS * 4 + 2;
5855 		  check_earlyclobber = 1;
5856 		  break;
5857 		case RELOAD_FOR_INSN:
5858 		  time2 = MAX_RECOG_OPERANDS * 4 + 3;
5859 		  break;
5860 		case RELOAD_FOR_OUTPUT:
5861 		  /* All RELOAD_FOR_OUTPUT reloads become live just after the
5862 		     instruction is executed.  */
5863 		  time2 = MAX_RECOG_OPERANDS * 4 + 4;
5864 		  break;
5865 		  /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5866 		     the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5867 		     value.  */
5868 		case RELOAD_FOR_OUTADDR_ADDRESS:
5869 		  if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5870 		      && ignore_address_reloads
5871 		      && ! rld[reloadnum].out)
5872 		    continue;
5873 		  time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5874 		  break;
5875 		case RELOAD_FOR_OUTPUT_ADDRESS:
5876 		  time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5877 		  break;
5878 		case RELOAD_OTHER:
5879 		  /* If there is no conflict in the input part, handle this
5880 		     like an output reload.  */
5881 		  if (! rld[i].in || rtx_equal_p (other_input, value))
5882 		    {
5883 		      time2 = MAX_RECOG_OPERANDS * 4 + 4;
5884 		      /* Earlyclobbered outputs must conflict with inputs.  */
5885 		      if (earlyclobber_operand_p (rld[i].out))
5886 			time2 = MAX_RECOG_OPERANDS * 4 + 3;
5887 
5888 		      break;
5889 		    }
5890 		  time2 = 1;
5891 		  /* RELOAD_OTHER might be live beyond instruction execution,
5892 		     but this is not obvious when we set time2 = 1.  So check
5893 		     here if there might be a problem with the new reload
5894 		     clobbering the register used by the RELOAD_OTHER.  */
5895 		  if (out)
5896 		    return 0;
5897 		  break;
5898 		default:
5899 		  return 0;
5900 		}
5901 	      if ((time1 >= time2
5902 		   && (! rld[i].in || rld[i].out
5903 		       || ! rtx_equal_p (other_input, value)))
5904 		  || (out && rld[reloadnum].out_reg
5905 		      && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5906 		return 0;
5907 	    }
5908 	}
5909     }
5910 
5911   /* Earlyclobbered outputs must conflict with inputs.  */
5912   if (check_earlyclobber && out && earlyclobber_operand_p (out))
5913     return 0;
5914 
5915   return 1;
5916 }
5917 
5918 /* Return 1 if the value in reload reg REGNO, as used by a reload
5919    needed for the part of the insn specified by OPNUM and TYPE,
5920    may be used to load VALUE into it.
5921 
5922    MODE is the mode in which the register is used, this is needed to
5923    determine how many hard regs to test.
5924 
5925    Other read-only reloads with the same value do not conflict
5926    unless OUT is nonzero and these other reloads have to live while
5927    output reloads live.
5928    If OUT is CONST0_RTX, this is a special case: it means that the
5929    test should not be for using register REGNO as reload register, but
5930    for copying from register REGNO into the reload register.
5931 
5932    RELOADNUM is the number of the reload we want to load this value for;
5933    a reload does not conflict with itself.
5934 
5935    When IGNORE_ADDRESS_RELOADS is set, we cannot have conflicts with
5936    reloads that load an address for the very reload we are considering.
5937 
5938    The caller has to make sure that there is no conflict with the return
5939    register.  */
5940 
5941 static int
free_for_value_p(int regno,machine_mode mode,int opnum,enum reload_type type,rtx value,rtx out,int reloadnum,int ignore_address_reloads)5942 free_for_value_p (int regno, machine_mode mode, int opnum,
5943 		  enum reload_type type, rtx value, rtx out, int reloadnum,
5944 		  int ignore_address_reloads)
5945 {
5946   int nregs = hard_regno_nregs (regno, mode);
5947   while (nregs-- > 0)
5948     if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5949 				       value, out, reloadnum,
5950 				       ignore_address_reloads))
5951       return 0;
5952   return 1;
5953 }
5954 
5955 /* Return nonzero if the rtx X is invariant over the current function.  */
5956 /* ??? Actually, the places where we use this expect exactly what is
5957    tested here, and not everything that is function invariant.  In
5958    particular, the frame pointer and arg pointer are special cased;
5959    pic_offset_table_rtx is not, and we must not spill these things to
5960    memory.  */
5961 
5962 int
function_invariant_p(const_rtx x)5963 function_invariant_p (const_rtx x)
5964 {
5965   if (CONSTANT_P (x))
5966     return 1;
5967   if (x == frame_pointer_rtx || x == arg_pointer_rtx)
5968     return 1;
5969   if (GET_CODE (x) == PLUS
5970       && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
5971       && GET_CODE (XEXP (x, 1)) == CONST_INT)
5972     return 1;
5973   return 0;
5974 }
5975 
5976 /* Determine whether the reload reg X overlaps any rtx'es used for
5977    overriding inheritance.  Return nonzero if so.  */
5978 
5979 static int
conflicts_with_override(rtx x)5980 conflicts_with_override (rtx x)
5981 {
5982   int i;
5983   for (i = 0; i < n_reloads; i++)
5984     if (reload_override_in[i]
5985 	&& reg_overlap_mentioned_p (x, reload_override_in[i]))
5986       return 1;
5987   return 0;
5988 }
5989 
5990 /* Give an error message saying we failed to find a reload for INSN,
5991    and clear out reload R.  */
5992 static void
failed_reload(rtx_insn * insn,int r)5993 failed_reload (rtx_insn *insn, int r)
5994 {
5995   if (asm_noperands (PATTERN (insn)) < 0)
5996     /* It's the compiler's fault.  */
5997     fatal_insn ("could not find a spill register", insn);
5998 
5999   /* It's the user's fault; the operand's mode and constraint
6000      don't match.  Disable this reload so we don't crash in final.  */
6001   error_for_asm (insn,
6002 		 "%<asm%> operand constraint incompatible with operand size");
6003   rld[r].in = 0;
6004   rld[r].out = 0;
6005   rld[r].reg_rtx = 0;
6006   rld[r].optional = 1;
6007   rld[r].secondary_p = 1;
6008 }
6009 
6010 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6011    for reload R.  If it's valid, get an rtx for it.  Return nonzero if
6012    successful.  */
6013 static int
set_reload_reg(int i,int r)6014 set_reload_reg (int i, int r)
6015 {
6016   int regno;
6017   rtx reg = spill_reg_rtx[i];
6018 
6019   if (reg == 0 || GET_MODE (reg) != rld[r].mode)
6020     spill_reg_rtx[i] = reg
6021       = gen_rtx_REG (rld[r].mode, spill_regs[i]);
6022 
6023   regno = true_regnum (reg);
6024 
6025   /* Detect when the reload reg can't hold the reload mode.
6026      This used to be one `if', but Sequent compiler can't handle that.  */
6027   if (targetm.hard_regno_mode_ok (regno, rld[r].mode))
6028     {
6029       machine_mode test_mode = VOIDmode;
6030       if (rld[r].in)
6031 	test_mode = GET_MODE (rld[r].in);
6032       /* If rld[r].in has VOIDmode, it means we will load it
6033 	 in whatever mode the reload reg has: to wit, rld[r].mode.
6034 	 We have already tested that for validity.  */
6035       /* Aside from that, we need to test that the expressions
6036 	 to reload from or into have modes which are valid for this
6037 	 reload register.  Otherwise the reload insns would be invalid.  */
6038       if (! (rld[r].in != 0 && test_mode != VOIDmode
6039 	     && !targetm.hard_regno_mode_ok (regno, test_mode)))
6040 	if (! (rld[r].out != 0
6041 	       && !targetm.hard_regno_mode_ok (regno, GET_MODE (rld[r].out))))
6042 	  {
6043 	    /* The reg is OK.  */
6044 	    last_spill_reg = i;
6045 
6046 	    /* Mark as in use for this insn the reload regs we use
6047 	       for this.  */
6048 	    mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
6049 				    rld[r].when_needed, rld[r].mode);
6050 
6051 	    rld[r].reg_rtx = reg;
6052 	    reload_spill_index[r] = spill_regs[i];
6053 	    return 1;
6054 	  }
6055     }
6056   return 0;
6057 }
6058 
6059 /* Find a spill register to use as a reload register for reload R.
6060    LAST_RELOAD is nonzero if this is the last reload for the insn being
6061    processed.
6062 
6063    Set rld[R].reg_rtx to the register allocated.
6064 
6065    We return 1 if successful, or 0 if we couldn't find a spill reg and
6066    we didn't change anything.  */
6067 
6068 static int
allocate_reload_reg(class insn_chain * chain ATTRIBUTE_UNUSED,int r,int last_reload)6069 allocate_reload_reg (class insn_chain *chain ATTRIBUTE_UNUSED, int r,
6070 		     int last_reload)
6071 {
6072   int i, pass, count;
6073 
6074   /* If we put this reload ahead, thinking it is a group,
6075      then insist on finding a group.  Otherwise we can grab a
6076      reg that some other reload needs.
6077      (That can happen when we have a 68000 DATA_OR_FP_REG
6078      which is a group of data regs or one fp reg.)
6079      We need not be so restrictive if there are no more reloads
6080      for this insn.
6081 
6082      ??? Really it would be nicer to have smarter handling
6083      for that kind of reg class, where a problem like this is normal.
6084      Perhaps those classes should be avoided for reloading
6085      by use of more alternatives.  */
6086 
6087   int force_group = rld[r].nregs > 1 && ! last_reload;
6088 
6089   /* If we want a single register and haven't yet found one,
6090      take any reg in the right class and not in use.
6091      If we want a consecutive group, here is where we look for it.
6092 
6093      We use three passes so we can first look for reload regs to
6094      reuse, which are already in use for other reloads in this insn,
6095      and only then use additional registers which are not "bad", then
6096      finally any register.
6097 
6098      I think that maximizing reuse is needed to make sure we don't
6099      run out of reload regs.  Suppose we have three reloads, and
6100      reloads A and B can share regs.  These need two regs.
6101      Suppose A and B are given different regs.
6102      That leaves none for C.  */
6103   for (pass = 0; pass < 3; pass++)
6104     {
6105       /* I is the index in spill_regs.
6106 	 We advance it round-robin between insns to use all spill regs
6107 	 equally, so that inherited reloads have a chance
6108 	 of leapfrogging each other.  */
6109 
6110       i = last_spill_reg;
6111 
6112       for (count = 0; count < n_spills; count++)
6113 	{
6114 	  int rclass = (int) rld[r].rclass;
6115 	  int regnum;
6116 
6117 	  i++;
6118 	  if (i >= n_spills)
6119 	    i -= n_spills;
6120 	  regnum = spill_regs[i];
6121 
6122 	  if ((reload_reg_free_p (regnum, rld[r].opnum,
6123 				  rld[r].when_needed)
6124 	       || (rld[r].in
6125 		   /* We check reload_reg_used to make sure we
6126 		      don't clobber the return register.  */
6127 		   && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
6128 		   && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
6129 					rld[r].when_needed, rld[r].in,
6130 					rld[r].out, r, 1)))
6131 	      && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
6132 	      && targetm.hard_regno_mode_ok (regnum, rld[r].mode)
6133 	      /* Look first for regs to share, then for unshared.  But
6134 		 don't share regs used for inherited reloads; they are
6135 		 the ones we want to preserve.  */
6136 	      && (pass
6137 		  || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
6138 					 regnum)
6139 		      && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
6140 					      regnum))))
6141 	    {
6142 	      int nr = hard_regno_nregs (regnum, rld[r].mode);
6143 
6144 	      /* During the second pass we want to avoid reload registers
6145 		 which are "bad" for this reload.  */
6146 	      if (pass == 1
6147 		  && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
6148 		continue;
6149 
6150 	      /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6151 		 (on 68000) got us two FP regs.  If NR is 1,
6152 		 we would reject both of them.  */
6153 	      if (force_group)
6154 		nr = rld[r].nregs;
6155 	      /* If we need only one reg, we have already won.  */
6156 	      if (nr == 1)
6157 		{
6158 		  /* But reject a single reg if we demand a group.  */
6159 		  if (force_group)
6160 		    continue;
6161 		  break;
6162 		}
6163 	      /* Otherwise check that as many consecutive regs as we need
6164 		 are available here.  */
6165 	      while (nr > 1)
6166 		{
6167 		  int regno = regnum + nr - 1;
6168 		  if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
6169 			&& spill_reg_order[regno] >= 0
6170 			&& reload_reg_free_p (regno, rld[r].opnum,
6171 					      rld[r].when_needed)))
6172 		    break;
6173 		  nr--;
6174 		}
6175 	      if (nr == 1)
6176 		break;
6177 	    }
6178 	}
6179 
6180       /* If we found something on the current pass, omit later passes.  */
6181       if (count < n_spills)
6182 	break;
6183     }
6184 
6185   /* We should have found a spill register by now.  */
6186   if (count >= n_spills)
6187     return 0;
6188 
6189   /* I is the index in SPILL_REG_RTX of the reload register we are to
6190      allocate.  Get an rtx for it and find its register number.  */
6191 
6192   return set_reload_reg (i, r);
6193 }
6194 
6195 /* Initialize all the tables needed to allocate reload registers.
6196    CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6197    is the array we use to restore the reg_rtx field for every reload.  */
6198 
6199 static void
choose_reload_regs_init(class insn_chain * chain,rtx * save_reload_reg_rtx)6200 choose_reload_regs_init (class insn_chain *chain, rtx *save_reload_reg_rtx)
6201 {
6202   int i;
6203 
6204   for (i = 0; i < n_reloads; i++)
6205     rld[i].reg_rtx = save_reload_reg_rtx[i];
6206 
6207   memset (reload_inherited, 0, MAX_RELOADS);
6208   memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
6209   memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
6210 
6211   CLEAR_HARD_REG_SET (reload_reg_used);
6212   CLEAR_HARD_REG_SET (reload_reg_used_at_all);
6213   CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
6214   CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
6215   CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
6216   CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
6217 
6218   CLEAR_HARD_REG_SET (reg_used_in_insn);
6219   {
6220     HARD_REG_SET tmp;
6221     REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
6222     reg_used_in_insn |= tmp;
6223     REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
6224     reg_used_in_insn |= tmp;
6225     compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
6226     compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
6227   }
6228 
6229   for (i = 0; i < reload_n_operands; i++)
6230     {
6231       CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
6232       CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
6233       CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
6234       CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
6235       CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
6236       CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
6237     }
6238 
6239   reload_reg_unavailable = ~chain->used_spill_regs;
6240 
6241   CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
6242 
6243   for (i = 0; i < n_reloads; i++)
6244     /* If we have already decided to use a certain register,
6245        don't use it in another way.  */
6246     if (rld[i].reg_rtx)
6247       mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
6248 			      rld[i].when_needed, rld[i].mode);
6249 }
6250 
6251 /* If X is not a subreg, return it unmodified.  If it is a subreg,
6252    look up whether we made a replacement for the SUBREG_REG.  Return
6253    either the replacement or the SUBREG_REG.  */
6254 
6255 static rtx
replaced_subreg(rtx x)6256 replaced_subreg (rtx x)
6257 {
6258   if (GET_CODE (x) == SUBREG)
6259     return find_replacement (&SUBREG_REG (x));
6260   return x;
6261 }
6262 
6263 /* Compute the offset to pass to subreg_regno_offset, for a pseudo of
6264    mode OUTERMODE that is available in a hard reg of mode INNERMODE.
6265    SUBREG is non-NULL if the pseudo is a subreg whose reg is a pseudo,
6266    otherwise it is NULL.  */
6267 
6268 static poly_int64
compute_reload_subreg_offset(machine_mode outermode,rtx subreg,machine_mode innermode)6269 compute_reload_subreg_offset (machine_mode outermode,
6270 			      rtx subreg,
6271 			      machine_mode innermode)
6272 {
6273   poly_int64 outer_offset;
6274   machine_mode middlemode;
6275 
6276   if (!subreg)
6277     return subreg_lowpart_offset (outermode, innermode);
6278 
6279   outer_offset = SUBREG_BYTE (subreg);
6280   middlemode = GET_MODE (SUBREG_REG (subreg));
6281 
6282   /* If SUBREG is paradoxical then return the normal lowpart offset
6283      for OUTERMODE and INNERMODE.  Our caller has already checked
6284      that OUTERMODE fits in INNERMODE.  */
6285   if (paradoxical_subreg_p (outermode, middlemode))
6286     return subreg_lowpart_offset (outermode, innermode);
6287 
6288   /* SUBREG is normal, but may not be lowpart; return OUTER_OFFSET
6289      plus the normal lowpart offset for MIDDLEMODE and INNERMODE.  */
6290   return outer_offset + subreg_lowpart_offset (middlemode, innermode);
6291 }
6292 
6293 /* Assign hard reg targets for the pseudo-registers we must reload
6294    into hard regs for this insn.
6295    Also output the instructions to copy them in and out of the hard regs.
6296 
6297    For machines with register classes, we are responsible for
6298    finding a reload reg in the proper class.  */
6299 
6300 static void
choose_reload_regs(class insn_chain * chain)6301 choose_reload_regs (class insn_chain *chain)
6302 {
6303   rtx_insn *insn = chain->insn;
6304   int i, j;
6305   unsigned int max_group_size = 1;
6306   enum reg_class group_class = NO_REGS;
6307   int pass, win, inheritance;
6308 
6309   rtx save_reload_reg_rtx[MAX_RELOADS];
6310 
6311   /* In order to be certain of getting the registers we need,
6312      we must sort the reloads into order of increasing register class.
6313      Then our grabbing of reload registers will parallel the process
6314      that provided the reload registers.
6315 
6316      Also note whether any of the reloads wants a consecutive group of regs.
6317      If so, record the maximum size of the group desired and what
6318      register class contains all the groups needed by this insn.  */
6319 
6320   for (j = 0; j < n_reloads; j++)
6321     {
6322       reload_order[j] = j;
6323       if (rld[j].reg_rtx != NULL_RTX)
6324 	{
6325 	  gcc_assert (REG_P (rld[j].reg_rtx)
6326 		      && HARD_REGISTER_P (rld[j].reg_rtx));
6327 	  reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6328 	}
6329       else
6330 	reload_spill_index[j] = -1;
6331 
6332       if (rld[j].nregs > 1)
6333 	{
6334 	  max_group_size = MAX (rld[j].nregs, max_group_size);
6335 	  group_class
6336 	    = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
6337 	}
6338 
6339       save_reload_reg_rtx[j] = rld[j].reg_rtx;
6340     }
6341 
6342   if (n_reloads > 1)
6343     qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6344 
6345   /* If -O, try first with inheritance, then turning it off.
6346      If not -O, don't do inheritance.
6347      Using inheritance when not optimizing leads to paradoxes
6348      with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6349      because one side of the comparison might be inherited.  */
6350   win = 0;
6351   for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6352     {
6353       choose_reload_regs_init (chain, save_reload_reg_rtx);
6354 
6355       /* Process the reloads in order of preference just found.
6356 	 Beyond this point, subregs can be found in reload_reg_rtx.
6357 
6358 	 This used to look for an existing reloaded home for all of the
6359 	 reloads, and only then perform any new reloads.  But that could lose
6360 	 if the reloads were done out of reg-class order because a later
6361 	 reload with a looser constraint might have an old home in a register
6362 	 needed by an earlier reload with a tighter constraint.
6363 
6364 	 To solve this, we make two passes over the reloads, in the order
6365 	 described above.  In the first pass we try to inherit a reload
6366 	 from a previous insn.  If there is a later reload that needs a
6367 	 class that is a proper subset of the class being processed, we must
6368 	 also allocate a spill register during the first pass.
6369 
6370 	 Then make a second pass over the reloads to allocate any reloads
6371 	 that haven't been given registers yet.  */
6372 
6373       for (j = 0; j < n_reloads; j++)
6374 	{
6375 	  int r = reload_order[j];
6376 	  rtx search_equiv = NULL_RTX;
6377 
6378 	  /* Ignore reloads that got marked inoperative.  */
6379 	  if (rld[r].out == 0 && rld[r].in == 0
6380 	      && ! rld[r].secondary_p)
6381 	    continue;
6382 
6383 	  /* If find_reloads chose to use reload_in or reload_out as a reload
6384 	     register, we don't need to chose one.  Otherwise, try even if it
6385 	     found one since we might save an insn if we find the value lying
6386 	     around.
6387 	     Try also when reload_in is a pseudo without a hard reg.  */
6388 	  if (rld[r].in != 0 && rld[r].reg_rtx != 0
6389 	      && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6390 		  || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6391 		      && !MEM_P (rld[r].in)
6392 		      && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6393 	    continue;
6394 
6395 #if 0 /* No longer needed for correct operation.
6396 	 It might give better code, or might not; worth an experiment?  */
6397 	  /* If this is an optional reload, we can't inherit from earlier insns
6398 	     until we are sure that any non-optional reloads have been allocated.
6399 	     The following code takes advantage of the fact that optional reloads
6400 	     are at the end of reload_order.  */
6401 	  if (rld[r].optional != 0)
6402 	    for (i = 0; i < j; i++)
6403 	      if ((rld[reload_order[i]].out != 0
6404 		   || rld[reload_order[i]].in != 0
6405 		   || rld[reload_order[i]].secondary_p)
6406 		  && ! rld[reload_order[i]].optional
6407 		  && rld[reload_order[i]].reg_rtx == 0)
6408 		allocate_reload_reg (chain, reload_order[i], 0);
6409 #endif
6410 
6411 	  /* First see if this pseudo is already available as reloaded
6412 	     for a previous insn.  We cannot try to inherit for reloads
6413 	     that are smaller than the maximum number of registers needed
6414 	     for groups unless the register we would allocate cannot be used
6415 	     for the groups.
6416 
6417 	     We could check here to see if this is a secondary reload for
6418 	     an object that is already in a register of the desired class.
6419 	     This would avoid the need for the secondary reload register.
6420 	     But this is complex because we can't easily determine what
6421 	     objects might want to be loaded via this reload.  So let a
6422 	     register be allocated here.  In `emit_reload_insns' we suppress
6423 	     one of the loads in the case described above.  */
6424 
6425 	  if (inheritance)
6426 	    {
6427 	      poly_int64 byte = 0;
6428 	      int regno = -1;
6429 	      machine_mode mode = VOIDmode;
6430 	      rtx subreg = NULL_RTX;
6431 
6432 	      if (rld[r].in == 0)
6433 		;
6434 	      else if (REG_P (rld[r].in))
6435 		{
6436 		  regno = REGNO (rld[r].in);
6437 		  mode = GET_MODE (rld[r].in);
6438 		}
6439 	      else if (REG_P (rld[r].in_reg))
6440 		{
6441 		  regno = REGNO (rld[r].in_reg);
6442 		  mode = GET_MODE (rld[r].in_reg);
6443 		}
6444 	      else if (GET_CODE (rld[r].in_reg) == SUBREG
6445 		       && REG_P (SUBREG_REG (rld[r].in_reg)))
6446 		{
6447 		  regno = REGNO (SUBREG_REG (rld[r].in_reg));
6448 		  if (regno < FIRST_PSEUDO_REGISTER)
6449 		    regno = subreg_regno (rld[r].in_reg);
6450 		  else
6451 		    {
6452 		      subreg = rld[r].in_reg;
6453 		      byte = SUBREG_BYTE (subreg);
6454 		    }
6455 		  mode = GET_MODE (rld[r].in_reg);
6456 		}
6457 #if AUTO_INC_DEC
6458 	      else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6459 		       && REG_P (XEXP (rld[r].in_reg, 0)))
6460 		{
6461 		  regno = REGNO (XEXP (rld[r].in_reg, 0));
6462 		  mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6463 		  rld[r].out = rld[r].in;
6464 		}
6465 #endif
6466 #if 0
6467 	      /* This won't work, since REGNO can be a pseudo reg number.
6468 		 Also, it takes much more hair to keep track of all the things
6469 		 that can invalidate an inherited reload of part of a pseudoreg.  */
6470 	      else if (GET_CODE (rld[r].in) == SUBREG
6471 		       && REG_P (SUBREG_REG (rld[r].in)))
6472 		regno = subreg_regno (rld[r].in);
6473 #endif
6474 
6475 	      if (regno >= 0
6476 		  && reg_last_reload_reg[regno] != 0
6477 		  && (known_ge
6478 		      (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno])),
6479 		       GET_MODE_SIZE (mode) + byte))
6480 		  /* Verify that the register it's in can be used in
6481 		     mode MODE.  */
6482 		  && (REG_CAN_CHANGE_MODE_P
6483 		      (REGNO (reg_last_reload_reg[regno]),
6484 		       GET_MODE (reg_last_reload_reg[regno]),
6485 		       mode)))
6486 		{
6487 		  enum reg_class rclass = rld[r].rclass, last_class;
6488 		  rtx last_reg = reg_last_reload_reg[regno];
6489 
6490 		  i = REGNO (last_reg);
6491 		  byte = compute_reload_subreg_offset (mode,
6492 						       subreg,
6493 						       GET_MODE (last_reg));
6494 		  i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6495 		  last_class = REGNO_REG_CLASS (i);
6496 
6497 		  if (reg_reloaded_contents[i] == regno
6498 		      && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6499 		      && targetm.hard_regno_mode_ok (i, rld[r].mode)
6500 		      && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6501 			  /* Even if we can't use this register as a reload
6502 			     register, we might use it for reload_override_in,
6503 			     if copying it to the desired class is cheap
6504 			     enough.  */
6505 			  || ((register_move_cost (mode, last_class, rclass)
6506 			       < memory_move_cost (mode, rclass, true))
6507 			      && (secondary_reload_class (1, rclass, mode,
6508 							  last_reg)
6509 				  == NO_REGS)
6510 			      && !(targetm.secondary_memory_needed
6511 				   (mode, last_class, rclass))))
6512 		      && (rld[r].nregs == max_group_size
6513 			  || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6514 						  i))
6515 		      && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6516 					   rld[r].when_needed, rld[r].in,
6517 					   const0_rtx, r, 1))
6518 		    {
6519 		      /* If a group is needed, verify that all the subsequent
6520 			 registers still have their values intact.  */
6521 		      int nr = hard_regno_nregs (i, rld[r].mode);
6522 		      int k;
6523 
6524 		      for (k = 1; k < nr; k++)
6525 			if (reg_reloaded_contents[i + k] != regno
6526 			    || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6527 			  break;
6528 
6529 		      if (k == nr)
6530 			{
6531 			  int i1;
6532 			  int bad_for_class;
6533 
6534 			  last_reg = (GET_MODE (last_reg) == mode
6535 				      ? last_reg : gen_rtx_REG (mode, i));
6536 
6537 			  bad_for_class = 0;
6538 			  for (k = 0; k < nr; k++)
6539 			    bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6540 								  i+k);
6541 
6542 			  /* We found a register that contains the
6543 			     value we need.  If this register is the
6544 			     same as an `earlyclobber' operand of the
6545 			     current insn, just mark it as a place to
6546 			     reload from since we can't use it as the
6547 			     reload register itself.  */
6548 
6549 			  for (i1 = 0; i1 < n_earlyclobbers; i1++)
6550 			    if (reg_overlap_mentioned_for_reload_p
6551 				(reg_last_reload_reg[regno],
6552 				 reload_earlyclobbers[i1]))
6553 			      break;
6554 
6555 			  if (i1 != n_earlyclobbers
6556 			      || ! (free_for_value_p (i, rld[r].mode,
6557 						      rld[r].opnum,
6558 						      rld[r].when_needed, rld[r].in,
6559 						      rld[r].out, r, 1))
6560 			      /* Don't use it if we'd clobber a pseudo reg.  */
6561 			      || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6562 				  && rld[r].out
6563 				  && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6564 			      /* Don't clobber the frame pointer.  */
6565 			      || (i == HARD_FRAME_POINTER_REGNUM
6566 				  && frame_pointer_needed
6567 				  && rld[r].out)
6568 			      /* Don't really use the inherited spill reg
6569 				 if we need it wider than we've got it.  */
6570 			      || paradoxical_subreg_p (rld[r].mode, mode)
6571 			      || bad_for_class
6572 
6573 			      /* If find_reloads chose reload_out as reload
6574 				 register, stay with it - that leaves the
6575 				 inherited register for subsequent reloads.  */
6576 			      || (rld[r].out && rld[r].reg_rtx
6577 				  && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6578 			    {
6579 			      if (! rld[r].optional)
6580 				{
6581 				  reload_override_in[r] = last_reg;
6582 				  reload_inheritance_insn[r]
6583 				    = reg_reloaded_insn[i];
6584 				}
6585 			    }
6586 			  else
6587 			    {
6588 			      int k;
6589 			      /* We can use this as a reload reg.  */
6590 			      /* Mark the register as in use for this part of
6591 				 the insn.  */
6592 			      mark_reload_reg_in_use (i,
6593 						      rld[r].opnum,
6594 						      rld[r].when_needed,
6595 						      rld[r].mode);
6596 			      rld[r].reg_rtx = last_reg;
6597 			      reload_inherited[r] = 1;
6598 			      reload_inheritance_insn[r]
6599 				= reg_reloaded_insn[i];
6600 			      reload_spill_index[r] = i;
6601 			      for (k = 0; k < nr; k++)
6602 				SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6603 						  i + k);
6604 			    }
6605 			}
6606 		    }
6607 		}
6608 	    }
6609 
6610 	  /* Here's another way to see if the value is already lying around.  */
6611 	  if (inheritance
6612 	      && rld[r].in != 0
6613 	      && ! reload_inherited[r]
6614 	      && rld[r].out == 0
6615 	      && (CONSTANT_P (rld[r].in)
6616 		  || GET_CODE (rld[r].in) == PLUS
6617 		  || REG_P (rld[r].in)
6618 		  || MEM_P (rld[r].in))
6619 	      && (rld[r].nregs == max_group_size
6620 		  || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6621 	    search_equiv = rld[r].in;
6622 
6623 	  if (search_equiv)
6624 	    {
6625 	      rtx equiv
6626 		= find_equiv_reg (search_equiv, insn, rld[r].rclass,
6627 				  -1, NULL, 0, rld[r].mode);
6628 	      int regno = 0;
6629 
6630 	      if (equiv != 0)
6631 		{
6632 		  if (REG_P (equiv))
6633 		    regno = REGNO (equiv);
6634 		  else
6635 		    {
6636 		      /* This must be a SUBREG of a hard register.
6637 			 Make a new REG since this might be used in an
6638 			 address and not all machines support SUBREGs
6639 			 there.  */
6640 		      gcc_assert (GET_CODE (equiv) == SUBREG);
6641 		      regno = subreg_regno (equiv);
6642 		      equiv = gen_rtx_REG (rld[r].mode, regno);
6643 		      /* If we choose EQUIV as the reload register, but the
6644 			 loop below decides to cancel the inheritance, we'll
6645 			 end up reloading EQUIV in rld[r].mode, not the mode
6646 			 it had originally.  That isn't safe when EQUIV isn't
6647 			 available as a spill register since its value might
6648 			 still be live at this point.  */
6649 		      for (i = regno; i < regno + (int) rld[r].nregs; i++)
6650 			if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6651 			  equiv = 0;
6652 		    }
6653 		}
6654 
6655 	      /* If we found a spill reg, reject it unless it is free
6656 		 and of the desired class.  */
6657 	      if (equiv != 0)
6658 		{
6659 		  int regs_used = 0;
6660 		  int bad_for_class = 0;
6661 		  int max_regno = regno + rld[r].nregs;
6662 
6663 		  for (i = regno; i < max_regno; i++)
6664 		    {
6665 		      regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6666 						      i);
6667 		      bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6668 							   i);
6669 		    }
6670 
6671 		  if ((regs_used
6672 		       && ! free_for_value_p (regno, rld[r].mode,
6673 					      rld[r].opnum, rld[r].when_needed,
6674 					      rld[r].in, rld[r].out, r, 1))
6675 		      || bad_for_class)
6676 		    equiv = 0;
6677 		}
6678 
6679 	      if (equiv != 0
6680 		  && !targetm.hard_regno_mode_ok (regno, rld[r].mode))
6681 		equiv = 0;
6682 
6683 	      /* We found a register that contains the value we need.
6684 		 If this register is the same as an `earlyclobber' operand
6685 		 of the current insn, just mark it as a place to reload from
6686 		 since we can't use it as the reload register itself.  */
6687 
6688 	      if (equiv != 0)
6689 		for (i = 0; i < n_earlyclobbers; i++)
6690 		  if (reg_overlap_mentioned_for_reload_p (equiv,
6691 							  reload_earlyclobbers[i]))
6692 		    {
6693 		      if (! rld[r].optional)
6694 			reload_override_in[r] = equiv;
6695 		      equiv = 0;
6696 		      break;
6697 		    }
6698 
6699 	      /* If the equiv register we have found is explicitly clobbered
6700 		 in the current insn, it depends on the reload type if we
6701 		 can use it, use it for reload_override_in, or not at all.
6702 		 In particular, we then can't use EQUIV for a
6703 		 RELOAD_FOR_OUTPUT_ADDRESS reload.  */
6704 
6705 	      if (equiv != 0)
6706 		{
6707 		  if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6708 		    switch (rld[r].when_needed)
6709 		      {
6710 		      case RELOAD_FOR_OTHER_ADDRESS:
6711 		      case RELOAD_FOR_INPADDR_ADDRESS:
6712 		      case RELOAD_FOR_INPUT_ADDRESS:
6713 		      case RELOAD_FOR_OPADDR_ADDR:
6714 			break;
6715 		      case RELOAD_OTHER:
6716 		      case RELOAD_FOR_INPUT:
6717 		      case RELOAD_FOR_OPERAND_ADDRESS:
6718 			if (! rld[r].optional)
6719 			  reload_override_in[r] = equiv;
6720 			/* Fall through.  */
6721 		      default:
6722 			equiv = 0;
6723 			break;
6724 		      }
6725 		  else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6726 		    switch (rld[r].when_needed)
6727 		      {
6728 		      case RELOAD_FOR_OTHER_ADDRESS:
6729 		      case RELOAD_FOR_INPADDR_ADDRESS:
6730 		      case RELOAD_FOR_INPUT_ADDRESS:
6731 		      case RELOAD_FOR_OPADDR_ADDR:
6732 		      case RELOAD_FOR_OPERAND_ADDRESS:
6733 		      case RELOAD_FOR_INPUT:
6734 			break;
6735 		      case RELOAD_OTHER:
6736 			if (! rld[r].optional)
6737 			  reload_override_in[r] = equiv;
6738 			/* Fall through.  */
6739 		      default:
6740 			equiv = 0;
6741 			break;
6742 		      }
6743 		}
6744 
6745 	      /* If we found an equivalent reg, say no code need be generated
6746 		 to load it, and use it as our reload reg.  */
6747 	      if (equiv != 0
6748 		  && (regno != HARD_FRAME_POINTER_REGNUM
6749 		      || !frame_pointer_needed))
6750 		{
6751 		  int nr = hard_regno_nregs (regno, rld[r].mode);
6752 		  int k;
6753 		  rld[r].reg_rtx = equiv;
6754 		  reload_spill_index[r] = regno;
6755 		  reload_inherited[r] = 1;
6756 
6757 		  /* If reg_reloaded_valid is not set for this register,
6758 		     there might be a stale spill_reg_store lying around.
6759 		     We must clear it, since otherwise emit_reload_insns
6760 		     might delete the store.  */
6761 		  if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6762 		    spill_reg_store[regno] = NULL;
6763 		  /* If any of the hard registers in EQUIV are spill
6764 		     registers, mark them as in use for this insn.  */
6765 		  for (k = 0; k < nr; k++)
6766 		    {
6767 		      i = spill_reg_order[regno + k];
6768 		      if (i >= 0)
6769 			{
6770 			  mark_reload_reg_in_use (regno, rld[r].opnum,
6771 						  rld[r].when_needed,
6772 						  rld[r].mode);
6773 			  SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6774 					    regno + k);
6775 			}
6776 		    }
6777 		}
6778 	    }
6779 
6780 	  /* If we found a register to use already, or if this is an optional
6781 	     reload, we are done.  */
6782 	  if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6783 	    continue;
6784 
6785 #if 0
6786 	  /* No longer needed for correct operation.  Might or might
6787 	     not give better code on the average.  Want to experiment?  */
6788 
6789 	  /* See if there is a later reload that has a class different from our
6790 	     class that intersects our class or that requires less register
6791 	     than our reload.  If so, we must allocate a register to this
6792 	     reload now, since that reload might inherit a previous reload
6793 	     and take the only available register in our class.  Don't do this
6794 	     for optional reloads since they will force all previous reloads
6795 	     to be allocated.  Also don't do this for reloads that have been
6796 	     turned off.  */
6797 
6798 	  for (i = j + 1; i < n_reloads; i++)
6799 	    {
6800 	      int s = reload_order[i];
6801 
6802 	      if ((rld[s].in == 0 && rld[s].out == 0
6803 		   && ! rld[s].secondary_p)
6804 		  || rld[s].optional)
6805 		continue;
6806 
6807 	      if ((rld[s].rclass != rld[r].rclass
6808 		   && reg_classes_intersect_p (rld[r].rclass,
6809 					       rld[s].rclass))
6810 		  || rld[s].nregs < rld[r].nregs)
6811 		break;
6812 	    }
6813 
6814 	  if (i == n_reloads)
6815 	    continue;
6816 
6817 	  allocate_reload_reg (chain, r, j == n_reloads - 1);
6818 #endif
6819 	}
6820 
6821       /* Now allocate reload registers for anything non-optional that
6822 	 didn't get one yet.  */
6823       for (j = 0; j < n_reloads; j++)
6824 	{
6825 	  int r = reload_order[j];
6826 
6827 	  /* Ignore reloads that got marked inoperative.  */
6828 	  if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6829 	    continue;
6830 
6831 	  /* Skip reloads that already have a register allocated or are
6832 	     optional.  */
6833 	  if (rld[r].reg_rtx != 0 || rld[r].optional)
6834 	    continue;
6835 
6836 	  if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6837 	    break;
6838 	}
6839 
6840       /* If that loop got all the way, we have won.  */
6841       if (j == n_reloads)
6842 	{
6843 	  win = 1;
6844 	  break;
6845 	}
6846 
6847       /* Loop around and try without any inheritance.  */
6848     }
6849 
6850   if (! win)
6851     {
6852       /* First undo everything done by the failed attempt
6853 	 to allocate with inheritance.  */
6854       choose_reload_regs_init (chain, save_reload_reg_rtx);
6855 
6856       /* Some sanity tests to verify that the reloads found in the first
6857 	 pass are identical to the ones we have now.  */
6858       gcc_assert (chain->n_reloads == n_reloads);
6859 
6860       for (i = 0; i < n_reloads; i++)
6861 	{
6862 	  if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6863 	    continue;
6864 	  gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6865 	  for (j = 0; j < n_spills; j++)
6866 	    if (spill_regs[j] == chain->rld[i].regno)
6867 	      if (! set_reload_reg (j, i))
6868 		failed_reload (chain->insn, i);
6869 	}
6870     }
6871 
6872   /* If we thought we could inherit a reload, because it seemed that
6873      nothing else wanted the same reload register earlier in the insn,
6874      verify that assumption, now that all reloads have been assigned.
6875      Likewise for reloads where reload_override_in has been set.  */
6876 
6877   /* If doing expensive optimizations, do one preliminary pass that doesn't
6878      cancel any inheritance, but removes reloads that have been needed only
6879      for reloads that we know can be inherited.  */
6880   for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6881     {
6882       for (j = 0; j < n_reloads; j++)
6883 	{
6884 	  int r = reload_order[j];
6885 	  rtx check_reg;
6886 	  rtx tem;
6887 	  if (reload_inherited[r] && rld[r].reg_rtx)
6888 	    check_reg = rld[r].reg_rtx;
6889 	  else if (reload_override_in[r]
6890 		   && (REG_P (reload_override_in[r])
6891 		       || GET_CODE (reload_override_in[r]) == SUBREG))
6892 	    check_reg = reload_override_in[r];
6893 	  else
6894 	    continue;
6895 	  if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6896 				  rld[r].opnum, rld[r].when_needed, rld[r].in,
6897 				  (reload_inherited[r]
6898 				   ? rld[r].out : const0_rtx),
6899 				  r, 1))
6900 	    {
6901 	      if (pass)
6902 		continue;
6903 	      reload_inherited[r] = 0;
6904 	      reload_override_in[r] = 0;
6905 	    }
6906 	  /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6907 	     reload_override_in, then we do not need its related
6908 	     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6909 	     likewise for other reload types.
6910 	     We handle this by removing a reload when its only replacement
6911 	     is mentioned in reload_in of the reload we are going to inherit.
6912 	     A special case are auto_inc expressions; even if the input is
6913 	     inherited, we still need the address for the output.  We can
6914 	     recognize them because they have RELOAD_OUT set to RELOAD_IN.
6915 	     If we succeeded removing some reload and we are doing a preliminary
6916 	     pass just to remove such reloads, make another pass, since the
6917 	     removal of one reload might allow us to inherit another one.  */
6918 	  else if (rld[r].in
6919 		   && rld[r].out != rld[r].in
6920 		   && remove_address_replacements (rld[r].in))
6921 	    {
6922 	      if (pass)
6923 	        pass = 2;
6924 	    }
6925 	  /* If we needed a memory location for the reload, we also have to
6926 	     remove its related reloads.  */
6927 	  else if (rld[r].in
6928 		   && rld[r].out != rld[r].in
6929 		   && (tem = replaced_subreg (rld[r].in), REG_P (tem))
6930 		   && REGNO (tem) < FIRST_PSEUDO_REGISTER
6931 		   && (targetm.secondary_memory_needed
6932 		       (rld[r].inmode, REGNO_REG_CLASS (REGNO (tem)),
6933 			rld[r].rclass))
6934 		   && remove_address_replacements
6935 		      (get_secondary_mem (tem, rld[r].inmode, rld[r].opnum,
6936 					  rld[r].when_needed)))
6937 	    {
6938 	      if (pass)
6939 	        pass = 2;
6940 	    }
6941 	}
6942     }
6943 
6944   /* Now that reload_override_in is known valid,
6945      actually override reload_in.  */
6946   for (j = 0; j < n_reloads; j++)
6947     if (reload_override_in[j])
6948       rld[j].in = reload_override_in[j];
6949 
6950   /* If this reload won't be done because it has been canceled or is
6951      optional and not inherited, clear reload_reg_rtx so other
6952      routines (such as subst_reloads) don't get confused.  */
6953   for (j = 0; j < n_reloads; j++)
6954     if (rld[j].reg_rtx != 0
6955 	&& ((rld[j].optional && ! reload_inherited[j])
6956 	    || (rld[j].in == 0 && rld[j].out == 0
6957 		&& ! rld[j].secondary_p)))
6958       {
6959 	int regno = true_regnum (rld[j].reg_rtx);
6960 
6961 	if (spill_reg_order[regno] >= 0)
6962 	  clear_reload_reg_in_use (regno, rld[j].opnum,
6963 				   rld[j].when_needed, rld[j].mode);
6964 	rld[j].reg_rtx = 0;
6965 	reload_spill_index[j] = -1;
6966       }
6967 
6968   /* Record which pseudos and which spill regs have output reloads.  */
6969   for (j = 0; j < n_reloads; j++)
6970     {
6971       int r = reload_order[j];
6972 
6973       i = reload_spill_index[r];
6974 
6975       /* I is nonneg if this reload uses a register.
6976 	 If rld[r].reg_rtx is 0, this is an optional reload
6977 	 that we opted to ignore.  */
6978       if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
6979 	  && rld[r].reg_rtx != 0)
6980 	{
6981 	  int nregno = REGNO (rld[r].out_reg);
6982 	  int nr = 1;
6983 
6984 	  if (nregno < FIRST_PSEUDO_REGISTER)
6985 	    nr = hard_regno_nregs (nregno, rld[r].mode);
6986 
6987 	  while (--nr >= 0)
6988 	    SET_REGNO_REG_SET (&reg_has_output_reload,
6989 			       nregno + nr);
6990 
6991 	  if (i >= 0)
6992 	    add_to_hard_reg_set (&reg_is_output_reload, rld[r].mode, i);
6993 
6994 	  gcc_assert (rld[r].when_needed == RELOAD_OTHER
6995 		      || rld[r].when_needed == RELOAD_FOR_OUTPUT
6996 		      || rld[r].when_needed == RELOAD_FOR_INSN);
6997 	}
6998     }
6999 }
7000 
7001 /* Deallocate the reload register for reload R.  This is called from
7002    remove_address_replacements.  */
7003 
7004 void
deallocate_reload_reg(int r)7005 deallocate_reload_reg (int r)
7006 {
7007   int regno;
7008 
7009   if (! rld[r].reg_rtx)
7010     return;
7011   regno = true_regnum (rld[r].reg_rtx);
7012   rld[r].reg_rtx = 0;
7013   if (spill_reg_order[regno] >= 0)
7014     clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
7015 			     rld[r].mode);
7016   reload_spill_index[r] = -1;
7017 }
7018 
7019 /* These arrays are filled by emit_reload_insns and its subroutines.  */
7020 static rtx_insn *input_reload_insns[MAX_RECOG_OPERANDS];
7021 static rtx_insn *other_input_address_reload_insns = 0;
7022 static rtx_insn *other_input_reload_insns = 0;
7023 static rtx_insn *input_address_reload_insns[MAX_RECOG_OPERANDS];
7024 static rtx_insn *inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7025 static rtx_insn *output_reload_insns[MAX_RECOG_OPERANDS];
7026 static rtx_insn *output_address_reload_insns[MAX_RECOG_OPERANDS];
7027 static rtx_insn *outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7028 static rtx_insn *operand_reload_insns = 0;
7029 static rtx_insn *other_operand_reload_insns = 0;
7030 static rtx_insn *other_output_reload_insns[MAX_RECOG_OPERANDS];
7031 
7032 /* Values to be put in spill_reg_store are put here first.  Instructions
7033    must only be placed here if the associated reload register reaches
7034    the end of the instruction's reload sequence.  */
7035 static rtx_insn *new_spill_reg_store[FIRST_PSEUDO_REGISTER];
7036 static HARD_REG_SET reg_reloaded_died;
7037 
7038 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7039    of class NEW_CLASS with mode NEW_MODE.  Or alternatively, if alt_reload_reg
7040    is nonzero, if that is suitable.  On success, change *RELOAD_REG to the
7041    adjusted register, and return true.  Otherwise, return false.  */
7042 static bool
reload_adjust_reg_for_temp(rtx * reload_reg,rtx alt_reload_reg,enum reg_class new_class,machine_mode new_mode)7043 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
7044 			    enum reg_class new_class,
7045 			    machine_mode new_mode)
7046 
7047 {
7048   rtx reg;
7049 
7050   for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
7051     {
7052       unsigned regno = REGNO (reg);
7053 
7054       if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
7055 	continue;
7056       if (GET_MODE (reg) != new_mode)
7057 	{
7058 	  if (!targetm.hard_regno_mode_ok (regno, new_mode))
7059 	    continue;
7060 	  if (hard_regno_nregs (regno, new_mode) > REG_NREGS (reg))
7061 	    continue;
7062 	  reg = reload_adjust_reg_for_mode (reg, new_mode);
7063 	}
7064       *reload_reg = reg;
7065       return true;
7066     }
7067   return false;
7068 }
7069 
7070 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7071    pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7072    nonzero, if that is suitable.  On success, change *RELOAD_REG to the
7073    adjusted register, and return true.  Otherwise, return false.  */
7074 static bool
reload_adjust_reg_for_icode(rtx * reload_reg,rtx alt_reload_reg,enum insn_code icode)7075 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
7076 			     enum insn_code icode)
7077 
7078 {
7079   enum reg_class new_class = scratch_reload_class (icode);
7080   machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
7081 
7082   return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
7083 				     new_class, new_mode);
7084 }
7085 
7086 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7087    has the number J.  OLD contains the value to be used as input.  */
7088 
7089 static void
emit_input_reload_insns(class insn_chain * chain,struct reload * rl,rtx old,int j)7090 emit_input_reload_insns (class insn_chain *chain, struct reload *rl,
7091 			 rtx old, int j)
7092 {
7093   rtx_insn *insn = chain->insn;
7094   rtx reloadreg;
7095   rtx oldequiv_reg = 0;
7096   rtx oldequiv = 0;
7097   int special = 0;
7098   machine_mode mode;
7099   rtx_insn **where;
7100 
7101   /* delete_output_reload is only invoked properly if old contains
7102      the original pseudo register.  Since this is replaced with a
7103      hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7104      find the pseudo in RELOAD_IN_REG.  This is also used to
7105      determine whether a secondary reload is needed.  */
7106   if (reload_override_in[j]
7107       && (REG_P (rl->in_reg)
7108 	  || (GET_CODE (rl->in_reg) == SUBREG
7109 	      && REG_P (SUBREG_REG (rl->in_reg)))))
7110     {
7111       oldequiv = old;
7112       old = rl->in_reg;
7113     }
7114   if (oldequiv == 0)
7115     oldequiv = old;
7116   else if (REG_P (oldequiv))
7117     oldequiv_reg = oldequiv;
7118   else if (GET_CODE (oldequiv) == SUBREG)
7119     oldequiv_reg = SUBREG_REG (oldequiv);
7120 
7121   reloadreg = reload_reg_rtx_for_input[j];
7122   mode = GET_MODE (reloadreg);
7123 
7124   /* If we are reloading from a register that was recently stored in
7125      with an output-reload, see if we can prove there was
7126      actually no need to store the old value in it.  */
7127 
7128   if (optimize && REG_P (oldequiv)
7129       && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7130       && spill_reg_store[REGNO (oldequiv)]
7131       && REG_P (old)
7132       && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
7133 	  || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7134 			  rl->out_reg)))
7135     delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7136 
7137   /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7138      OLDEQUIV.  */
7139 
7140   while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
7141     oldequiv = SUBREG_REG (oldequiv);
7142   if (GET_MODE (oldequiv) != VOIDmode
7143       && mode != GET_MODE (oldequiv))
7144     oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
7145 
7146   /* Switch to the right place to emit the reload insns.  */
7147   switch (rl->when_needed)
7148     {
7149     case RELOAD_OTHER:
7150       where = &other_input_reload_insns;
7151       break;
7152     case RELOAD_FOR_INPUT:
7153       where = &input_reload_insns[rl->opnum];
7154       break;
7155     case RELOAD_FOR_INPUT_ADDRESS:
7156       where = &input_address_reload_insns[rl->opnum];
7157       break;
7158     case RELOAD_FOR_INPADDR_ADDRESS:
7159       where = &inpaddr_address_reload_insns[rl->opnum];
7160       break;
7161     case RELOAD_FOR_OUTPUT_ADDRESS:
7162       where = &output_address_reload_insns[rl->opnum];
7163       break;
7164     case RELOAD_FOR_OUTADDR_ADDRESS:
7165       where = &outaddr_address_reload_insns[rl->opnum];
7166       break;
7167     case RELOAD_FOR_OPERAND_ADDRESS:
7168       where = &operand_reload_insns;
7169       break;
7170     case RELOAD_FOR_OPADDR_ADDR:
7171       where = &other_operand_reload_insns;
7172       break;
7173     case RELOAD_FOR_OTHER_ADDRESS:
7174       where = &other_input_address_reload_insns;
7175       break;
7176     default:
7177       gcc_unreachable ();
7178     }
7179 
7180   push_to_sequence (*where);
7181 
7182   /* Auto-increment addresses must be reloaded in a special way.  */
7183   if (rl->out && ! rl->out_reg)
7184     {
7185       /* We are not going to bother supporting the case where a
7186 	 incremented register can't be copied directly from
7187 	 OLDEQUIV since this seems highly unlikely.  */
7188       gcc_assert (rl->secondary_in_reload < 0);
7189 
7190       if (reload_inherited[j])
7191 	oldequiv = reloadreg;
7192 
7193       old = XEXP (rl->in_reg, 0);
7194 
7195       /* Prevent normal processing of this reload.  */
7196       special = 1;
7197       /* Output a special code sequence for this case.  */
7198       inc_for_reload (reloadreg, oldequiv, rl->out, rl->inc);
7199     }
7200 
7201   /* If we are reloading a pseudo-register that was set by the previous
7202      insn, see if we can get rid of that pseudo-register entirely
7203      by redirecting the previous insn into our reload register.  */
7204 
7205   else if (optimize && REG_P (old)
7206 	   && REGNO (old) >= FIRST_PSEUDO_REGISTER
7207 	   && dead_or_set_p (insn, old)
7208 	   /* This is unsafe if some other reload
7209 	      uses the same reg first.  */
7210 	   && ! conflicts_with_override (reloadreg)
7211 	   && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7212 				rl->when_needed, old, rl->out, j, 0))
7213     {
7214       rtx_insn *temp = PREV_INSN (insn);
7215       while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7216 	temp = PREV_INSN (temp);
7217       if (temp
7218 	  && NONJUMP_INSN_P (temp)
7219 	  && GET_CODE (PATTERN (temp)) == SET
7220 	  && SET_DEST (PATTERN (temp)) == old
7221 	  /* Make sure we can access insn_operand_constraint.  */
7222 	  && asm_noperands (PATTERN (temp)) < 0
7223 	  /* This is unsafe if operand occurs more than once in current
7224 	     insn.  Perhaps some occurrences aren't reloaded.  */
7225 	  && count_occurrences (PATTERN (insn), old, 0) == 1)
7226 	{
7227 	  rtx old = SET_DEST (PATTERN (temp));
7228 	  /* Store into the reload register instead of the pseudo.  */
7229 	  SET_DEST (PATTERN (temp)) = reloadreg;
7230 
7231 	  /* Verify that resulting insn is valid.
7232 
7233 	     Note that we have replaced the destination of TEMP with
7234 	     RELOADREG.  If TEMP references RELOADREG within an
7235 	     autoincrement addressing mode, then the resulting insn
7236 	     is ill-formed and we must reject this optimization.  */
7237 	  extract_insn (temp);
7238 	  if (constrain_operands (1, get_enabled_alternatives (temp))
7239 	      && (!AUTO_INC_DEC || ! find_reg_note (temp, REG_INC, reloadreg)))
7240 	    {
7241 	      /* If the previous insn is an output reload, the source is
7242 		 a reload register, and its spill_reg_store entry will
7243 		 contain the previous destination.  This is now
7244 		 invalid.  */
7245 	      if (REG_P (SET_SRC (PATTERN (temp)))
7246 		  && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7247 		{
7248 		  spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7249 		  spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7250 		}
7251 
7252 	      /* If these are the only uses of the pseudo reg,
7253 		 pretend for GDB it lives in the reload reg we used.  */
7254 	      if (REG_N_DEATHS (REGNO (old)) == 1
7255 		  && REG_N_SETS (REGNO (old)) == 1)
7256 		{
7257 		  reg_renumber[REGNO (old)] = REGNO (reloadreg);
7258 		  if (ira_conflicts_p)
7259 		    /* Inform IRA about the change.  */
7260 		    ira_mark_allocation_change (REGNO (old));
7261 		  alter_reg (REGNO (old), -1, false);
7262 		}
7263 	      special = 1;
7264 
7265 	      /* Adjust any debug insns between temp and insn.  */
7266 	      while ((temp = NEXT_INSN (temp)) != insn)
7267 		if (DEBUG_BIND_INSN_P (temp))
7268 		  INSN_VAR_LOCATION_LOC (temp)
7269 		    = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (temp),
7270 					    old, reloadreg);
7271 		else
7272 		  gcc_assert (DEBUG_INSN_P (temp) || NOTE_P (temp));
7273 	    }
7274 	  else
7275 	    {
7276 	      SET_DEST (PATTERN (temp)) = old;
7277 	    }
7278 	}
7279     }
7280 
7281   /* We can't do that, so output an insn to load RELOADREG.  */
7282 
7283   /* If we have a secondary reload, pick up the secondary register
7284      and icode, if any.  If OLDEQUIV and OLD are different or
7285      if this is an in-out reload, recompute whether or not we
7286      still need a secondary register and what the icode should
7287      be.  If we still need a secondary register and the class or
7288      icode is different, go back to reloading from OLD if using
7289      OLDEQUIV means that we got the wrong type of register.  We
7290      cannot have different class or icode due to an in-out reload
7291      because we don't make such reloads when both the input and
7292      output need secondary reload registers.  */
7293 
7294   if (! special && rl->secondary_in_reload >= 0)
7295     {
7296       rtx second_reload_reg = 0;
7297       rtx third_reload_reg = 0;
7298       int secondary_reload = rl->secondary_in_reload;
7299       rtx real_oldequiv = oldequiv;
7300       rtx real_old = old;
7301       rtx tmp;
7302       enum insn_code icode;
7303       enum insn_code tertiary_icode = CODE_FOR_nothing;
7304 
7305       /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7306 	 and similarly for OLD.
7307 	 See comments in get_secondary_reload in reload.c.  */
7308       /* If it is a pseudo that cannot be replaced with its
7309 	 equivalent MEM, we must fall back to reload_in, which
7310 	 will have all the necessary substitutions registered.
7311 	 Likewise for a pseudo that can't be replaced with its
7312 	 equivalent constant.
7313 
7314 	 Take extra care for subregs of such pseudos.  Note that
7315 	 we cannot use reg_equiv_mem in this case because it is
7316 	 not in the right mode.  */
7317 
7318       tmp = oldequiv;
7319       if (GET_CODE (tmp) == SUBREG)
7320 	tmp = SUBREG_REG (tmp);
7321       if (REG_P (tmp)
7322 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7323 	  && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7324 	      || reg_equiv_constant (REGNO (tmp)) != 0))
7325 	{
7326 	  if (! reg_equiv_mem (REGNO (tmp))
7327 	      || num_not_at_initial_offset
7328 	      || GET_CODE (oldequiv) == SUBREG)
7329 	    real_oldequiv = rl->in;
7330 	  else
7331 	    real_oldequiv = reg_equiv_mem (REGNO (tmp));
7332 	}
7333 
7334       tmp = old;
7335       if (GET_CODE (tmp) == SUBREG)
7336 	tmp = SUBREG_REG (tmp);
7337       if (REG_P (tmp)
7338 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7339 	  && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7340 	      || reg_equiv_constant (REGNO (tmp)) != 0))
7341 	{
7342 	  if (! reg_equiv_mem (REGNO (tmp))
7343 	      || num_not_at_initial_offset
7344 	      || GET_CODE (old) == SUBREG)
7345 	    real_old = rl->in;
7346 	  else
7347 	    real_old = reg_equiv_mem (REGNO (tmp));
7348 	}
7349 
7350       second_reload_reg = rld[secondary_reload].reg_rtx;
7351       if (rld[secondary_reload].secondary_in_reload >= 0)
7352 	{
7353 	  int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7354 
7355 	  third_reload_reg = rld[tertiary_reload].reg_rtx;
7356 	  tertiary_icode = rld[secondary_reload].secondary_in_icode;
7357 	  /* We'd have to add more code for quartary reloads.  */
7358 	  gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7359 	}
7360       icode = rl->secondary_in_icode;
7361 
7362       if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7363 	  || (rl->in != 0 && rl->out != 0))
7364 	{
7365 	  secondary_reload_info sri, sri2;
7366 	  enum reg_class new_class, new_t_class;
7367 
7368 	  sri.icode = CODE_FOR_nothing;
7369 	  sri.prev_sri = NULL;
7370 	  new_class
7371 	    = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7372 							 rl->rclass, mode,
7373 							 &sri);
7374 
7375 	  if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7376 	    second_reload_reg = 0;
7377 	  else if (new_class == NO_REGS)
7378 	    {
7379 	      if (reload_adjust_reg_for_icode (&second_reload_reg,
7380 					       third_reload_reg,
7381 					       (enum insn_code) sri.icode))
7382 		{
7383 		  icode = (enum insn_code) sri.icode;
7384 		  third_reload_reg = 0;
7385 		}
7386 	      else
7387 		{
7388 		  oldequiv = old;
7389 		  real_oldequiv = real_old;
7390 		}
7391 	    }
7392 	  else if (sri.icode != CODE_FOR_nothing)
7393 	    /* We currently lack a way to express this in reloads.  */
7394 	    gcc_unreachable ();
7395 	  else
7396 	    {
7397 	      sri2.icode = CODE_FOR_nothing;
7398 	      sri2.prev_sri = &sri;
7399 	      new_t_class
7400 		= (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7401 							     new_class, mode,
7402 							     &sri);
7403 	      if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7404 		{
7405 		  if (reload_adjust_reg_for_temp (&second_reload_reg,
7406 						  third_reload_reg,
7407 						  new_class, mode))
7408 		    {
7409 		      third_reload_reg = 0;
7410 		      tertiary_icode = (enum insn_code) sri2.icode;
7411 		    }
7412 		  else
7413 		    {
7414 		      oldequiv = old;
7415 		      real_oldequiv = real_old;
7416 		    }
7417 		}
7418 	      else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7419 		{
7420 		  rtx intermediate = second_reload_reg;
7421 
7422 		  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7423 						  new_class, mode)
7424 		      && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7425 						      ((enum insn_code)
7426 						       sri2.icode)))
7427 		    {
7428 		      second_reload_reg = intermediate;
7429 		      tertiary_icode = (enum insn_code) sri2.icode;
7430 		    }
7431 		  else
7432 		    {
7433 		      oldequiv = old;
7434 		      real_oldequiv = real_old;
7435 		    }
7436 		}
7437 	      else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7438 		{
7439 		  rtx intermediate = second_reload_reg;
7440 
7441 		  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7442 						  new_class, mode)
7443 		      && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7444 						      new_t_class, mode))
7445 		    {
7446 		      second_reload_reg = intermediate;
7447 		      tertiary_icode = (enum insn_code) sri2.icode;
7448 		    }
7449 		  else
7450 		    {
7451 		      oldequiv = old;
7452 		      real_oldequiv = real_old;
7453 		    }
7454 		}
7455 	      else
7456 		{
7457 		  /* This could be handled more intelligently too.  */
7458 		  oldequiv = old;
7459 		  real_oldequiv = real_old;
7460 		}
7461 	    }
7462 	}
7463 
7464       /* If we still need a secondary reload register, check
7465 	 to see if it is being used as a scratch or intermediate
7466 	 register and generate code appropriately.  If we need
7467 	 a scratch register, use REAL_OLDEQUIV since the form of
7468 	 the insn may depend on the actual address if it is
7469 	 a MEM.  */
7470 
7471       if (second_reload_reg)
7472 	{
7473 	  if (icode != CODE_FOR_nothing)
7474 	    {
7475 	      /* We'd have to add extra code to handle this case.  */
7476 	      gcc_assert (!third_reload_reg);
7477 
7478 	      emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7479 					  second_reload_reg));
7480 	      special = 1;
7481 	    }
7482 	  else
7483 	    {
7484 	      /* See if we need a scratch register to load the
7485 		 intermediate register (a tertiary reload).  */
7486 	      if (tertiary_icode != CODE_FOR_nothing)
7487 		{
7488 		  emit_insn ((GEN_FCN (tertiary_icode)
7489 			      (second_reload_reg, real_oldequiv,
7490 			       third_reload_reg)));
7491 		}
7492 	      else if (third_reload_reg)
7493 		{
7494 		  gen_reload (third_reload_reg, real_oldequiv,
7495 			      rl->opnum,
7496 			      rl->when_needed);
7497 		  gen_reload (second_reload_reg, third_reload_reg,
7498 			      rl->opnum,
7499 			      rl->when_needed);
7500 		}
7501 	      else
7502 		gen_reload (second_reload_reg, real_oldequiv,
7503 			    rl->opnum,
7504 			    rl->when_needed);
7505 
7506 	      oldequiv = second_reload_reg;
7507 	    }
7508 	}
7509     }
7510 
7511   if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7512     {
7513       rtx real_oldequiv = oldequiv;
7514 
7515       if ((REG_P (oldequiv)
7516 	   && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7517 	   && (reg_equiv_memory_loc (REGNO (oldequiv)) != 0
7518 	       || reg_equiv_constant (REGNO (oldequiv)) != 0))
7519 	  || (GET_CODE (oldequiv) == SUBREG
7520 	      && REG_P (SUBREG_REG (oldequiv))
7521 	      && (REGNO (SUBREG_REG (oldequiv))
7522 		  >= FIRST_PSEUDO_REGISTER)
7523 	      && ((reg_equiv_memory_loc (REGNO (SUBREG_REG (oldequiv))) != 0)
7524 		  || (reg_equiv_constant (REGNO (SUBREG_REG (oldequiv))) != 0)))
7525 	  || (CONSTANT_P (oldequiv)
7526 	      && (targetm.preferred_reload_class (oldequiv,
7527 						  REGNO_REG_CLASS (REGNO (reloadreg)))
7528 		  == NO_REGS)))
7529 	real_oldequiv = rl->in;
7530       gen_reload (reloadreg, real_oldequiv, rl->opnum,
7531 		  rl->when_needed);
7532     }
7533 
7534   if (cfun->can_throw_non_call_exceptions)
7535     copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7536 
7537   /* End this sequence.  */
7538   *where = get_insns ();
7539   end_sequence ();
7540 
7541   /* Update reload_override_in so that delete_address_reloads_1
7542      can see the actual register usage.  */
7543   if (oldequiv_reg)
7544     reload_override_in[j] = oldequiv;
7545 }
7546 
7547 /* Generate insns to for the output reload RL, which is for the insn described
7548    by CHAIN and has the number J.  */
7549 static void
emit_output_reload_insns(class insn_chain * chain,struct reload * rl,int j)7550 emit_output_reload_insns (class insn_chain *chain, struct reload *rl,
7551 			  int j)
7552 {
7553   rtx reloadreg;
7554   rtx_insn *insn = chain->insn;
7555   int special = 0;
7556   rtx old = rl->out;
7557   machine_mode mode;
7558   rtx_insn *p;
7559   rtx rl_reg_rtx;
7560 
7561   if (rl->when_needed == RELOAD_OTHER)
7562     start_sequence ();
7563   else
7564     push_to_sequence (output_reload_insns[rl->opnum]);
7565 
7566   rl_reg_rtx = reload_reg_rtx_for_output[j];
7567   mode = GET_MODE (rl_reg_rtx);
7568 
7569   reloadreg = rl_reg_rtx;
7570 
7571   /* If we need two reload regs, set RELOADREG to the intermediate
7572      one, since it will be stored into OLD.  We might need a secondary
7573      register only for an input reload, so check again here.  */
7574 
7575   if (rl->secondary_out_reload >= 0)
7576     {
7577       rtx real_old = old;
7578       int secondary_reload = rl->secondary_out_reload;
7579       int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7580 
7581       if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7582 	  && reg_equiv_mem (REGNO (old)) != 0)
7583 	real_old = reg_equiv_mem (REGNO (old));
7584 
7585       if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7586 	{
7587 	  rtx second_reloadreg = reloadreg;
7588 	  reloadreg = rld[secondary_reload].reg_rtx;
7589 
7590 	  /* See if RELOADREG is to be used as a scratch register
7591 	     or as an intermediate register.  */
7592 	  if (rl->secondary_out_icode != CODE_FOR_nothing)
7593 	    {
7594 	      /* We'd have to add extra code to handle this case.  */
7595 	      gcc_assert (tertiary_reload < 0);
7596 
7597 	      emit_insn ((GEN_FCN (rl->secondary_out_icode)
7598 			  (real_old, second_reloadreg, reloadreg)));
7599 	      special = 1;
7600 	    }
7601 	  else
7602 	    {
7603 	      /* See if we need both a scratch and intermediate reload
7604 		 register.  */
7605 
7606 	      enum insn_code tertiary_icode
7607 		= rld[secondary_reload].secondary_out_icode;
7608 
7609 	      /* We'd have to add more code for quartary reloads.  */
7610 	      gcc_assert (tertiary_reload < 0
7611 			  || rld[tertiary_reload].secondary_out_reload < 0);
7612 
7613 	      if (GET_MODE (reloadreg) != mode)
7614 		reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7615 
7616 	      if (tertiary_icode != CODE_FOR_nothing)
7617 		{
7618 		  rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7619 
7620 		  /* Copy primary reload reg to secondary reload reg.
7621 		     (Note that these have been swapped above, then
7622 		     secondary reload reg to OLD using our insn.)  */
7623 
7624 		  /* If REAL_OLD is a paradoxical SUBREG, remove it
7625 		     and try to put the opposite SUBREG on
7626 		     RELOADREG.  */
7627 		  strip_paradoxical_subreg (&real_old, &reloadreg);
7628 
7629 		  gen_reload (reloadreg, second_reloadreg,
7630 			      rl->opnum, rl->when_needed);
7631 		  emit_insn ((GEN_FCN (tertiary_icode)
7632 			      (real_old, reloadreg, third_reloadreg)));
7633 		  special = 1;
7634 		}
7635 
7636 	      else
7637 		{
7638 		  /* Copy between the reload regs here and then to
7639 		     OUT later.  */
7640 
7641 		  gen_reload (reloadreg, second_reloadreg,
7642 			      rl->opnum, rl->when_needed);
7643 		  if (tertiary_reload >= 0)
7644 		    {
7645 		      rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7646 
7647 		      gen_reload (third_reloadreg, reloadreg,
7648 				  rl->opnum, rl->when_needed);
7649 		      reloadreg = third_reloadreg;
7650 		    }
7651 		}
7652 	    }
7653 	}
7654     }
7655 
7656   /* Output the last reload insn.  */
7657   if (! special)
7658     {
7659       rtx set;
7660 
7661       /* Don't output the last reload if OLD is not the dest of
7662 	 INSN and is in the src and is clobbered by INSN.  */
7663       if (! flag_expensive_optimizations
7664 	  || !REG_P (old)
7665 	  || !(set = single_set (insn))
7666 	  || rtx_equal_p (old, SET_DEST (set))
7667 	  || !reg_mentioned_p (old, SET_SRC (set))
7668 	  || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7669 	       && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7670 	gen_reload (old, reloadreg, rl->opnum,
7671 		    rl->when_needed);
7672     }
7673 
7674   /* Look at all insns we emitted, just to be safe.  */
7675   for (p = get_insns (); p; p = NEXT_INSN (p))
7676     if (INSN_P (p))
7677       {
7678 	rtx pat = PATTERN (p);
7679 
7680 	/* If this output reload doesn't come from a spill reg,
7681 	   clear any memory of reloaded copies of the pseudo reg.
7682 	   If this output reload comes from a spill reg,
7683 	   reg_has_output_reload will make this do nothing.  */
7684 	note_stores (p, forget_old_reloads_1, NULL);
7685 
7686 	if (reg_mentioned_p (rl_reg_rtx, pat))
7687 	  {
7688 	    rtx set = single_set (insn);
7689 	    if (reload_spill_index[j] < 0
7690 		&& set
7691 		&& SET_SRC (set) == rl_reg_rtx)
7692 	      {
7693 		int src = REGNO (SET_SRC (set));
7694 
7695 		reload_spill_index[j] = src;
7696 		SET_HARD_REG_BIT (reg_is_output_reload, src);
7697 		if (find_regno_note (insn, REG_DEAD, src))
7698 		  SET_HARD_REG_BIT (reg_reloaded_died, src);
7699 	      }
7700 	    if (HARD_REGISTER_P (rl_reg_rtx))
7701 	      {
7702 		int s = rl->secondary_out_reload;
7703 		set = single_set (p);
7704 		/* If this reload copies only to the secondary reload
7705 		   register, the secondary reload does the actual
7706 		   store.  */
7707 		if (s >= 0 && set == NULL_RTX)
7708 		  /* We can't tell what function the secondary reload
7709 		     has and where the actual store to the pseudo is
7710 		     made; leave new_spill_reg_store alone.  */
7711 		  ;
7712 		else if (s >= 0
7713 			 && SET_SRC (set) == rl_reg_rtx
7714 			 && SET_DEST (set) == rld[s].reg_rtx)
7715 		  {
7716 		    /* Usually the next instruction will be the
7717 		       secondary reload insn;  if we can confirm
7718 		       that it is, setting new_spill_reg_store to
7719 		       that insn will allow an extra optimization.  */
7720 		    rtx s_reg = rld[s].reg_rtx;
7721 		    rtx_insn *next = NEXT_INSN (p);
7722 		    rld[s].out = rl->out;
7723 		    rld[s].out_reg = rl->out_reg;
7724 		    set = single_set (next);
7725 		    if (set && SET_SRC (set) == s_reg
7726 			&& reload_reg_rtx_reaches_end_p (s_reg, s))
7727 		      {
7728 			SET_HARD_REG_BIT (reg_is_output_reload,
7729 					  REGNO (s_reg));
7730 			new_spill_reg_store[REGNO (s_reg)] = next;
7731 		      }
7732 		  }
7733 		else if (reload_reg_rtx_reaches_end_p (rl_reg_rtx, j))
7734 		  new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7735 	      }
7736 	  }
7737       }
7738 
7739   if (rl->when_needed == RELOAD_OTHER)
7740     {
7741       emit_insn (other_output_reload_insns[rl->opnum]);
7742       other_output_reload_insns[rl->opnum] = get_insns ();
7743     }
7744   else
7745     output_reload_insns[rl->opnum] = get_insns ();
7746 
7747   if (cfun->can_throw_non_call_exceptions)
7748     copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7749 
7750   end_sequence ();
7751 }
7752 
7753 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7754    and has the number J.  */
7755 static void
do_input_reload(class insn_chain * chain,struct reload * rl,int j)7756 do_input_reload (class insn_chain *chain, struct reload *rl, int j)
7757 {
7758   rtx_insn *insn = chain->insn;
7759   rtx old = (rl->in && MEM_P (rl->in)
7760 	     ? rl->in_reg : rl->in);
7761   rtx reg_rtx = rl->reg_rtx;
7762 
7763   if (old && reg_rtx)
7764     {
7765       machine_mode mode;
7766 
7767       /* Determine the mode to reload in.
7768 	 This is very tricky because we have three to choose from.
7769 	 There is the mode the insn operand wants (rl->inmode).
7770 	 There is the mode of the reload register RELOADREG.
7771 	 There is the intrinsic mode of the operand, which we could find
7772 	 by stripping some SUBREGs.
7773 	 It turns out that RELOADREG's mode is irrelevant:
7774 	 we can change that arbitrarily.
7775 
7776 	 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7777 	 then the reload reg may not support QImode moves, so use SImode.
7778 	 If foo is in memory due to spilling a pseudo reg, this is safe,
7779 	 because the QImode value is in the least significant part of a
7780 	 slot big enough for a SImode.  If foo is some other sort of
7781 	 memory reference, then it is impossible to reload this case,
7782 	 so previous passes had better make sure this never happens.
7783 
7784 	 Then consider a one-word union which has SImode and one of its
7785 	 members is a float, being fetched as (SUBREG:SF union:SI).
7786 	 We must fetch that as SFmode because we could be loading into
7787 	 a float-only register.  In this case OLD's mode is correct.
7788 
7789 	 Consider an immediate integer: it has VOIDmode.  Here we need
7790 	 to get a mode from something else.
7791 
7792 	 In some cases, there is a fourth mode, the operand's
7793 	 containing mode.  If the insn specifies a containing mode for
7794 	 this operand, it overrides all others.
7795 
7796 	 I am not sure whether the algorithm here is always right,
7797 	 but it does the right things in those cases.  */
7798 
7799       mode = GET_MODE (old);
7800       if (mode == VOIDmode)
7801 	mode = rl->inmode;
7802 
7803       /* We cannot use gen_lowpart_common since it can do the wrong thing
7804 	 when REG_RTX has a multi-word mode.  Note that REG_RTX must
7805 	 always be a REG here.  */
7806       if (GET_MODE (reg_rtx) != mode)
7807 	reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7808     }
7809   reload_reg_rtx_for_input[j] = reg_rtx;
7810 
7811   if (old != 0
7812       /* AUTO_INC reloads need to be handled even if inherited.  We got an
7813 	 AUTO_INC reload if reload_out is set but reload_out_reg isn't.  */
7814       && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7815       && ! rtx_equal_p (reg_rtx, old)
7816       && reg_rtx != 0)
7817     emit_input_reload_insns (chain, rld + j, old, j);
7818 
7819   /* When inheriting a wider reload, we have a MEM in rl->in,
7820      e.g. inheriting a SImode output reload for
7821      (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10)))  */
7822   if (optimize && reload_inherited[j] && rl->in
7823       && MEM_P (rl->in)
7824       && MEM_P (rl->in_reg)
7825       && reload_spill_index[j] >= 0
7826       && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7827     rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7828 
7829   /* If we are reloading a register that was recently stored in with an
7830      output-reload, see if we can prove there was
7831      actually no need to store the old value in it.  */
7832 
7833   if (optimize
7834       && (reload_inherited[j] || reload_override_in[j])
7835       && reg_rtx
7836       && REG_P (reg_rtx)
7837       && spill_reg_store[REGNO (reg_rtx)] != 0
7838 #if 0
7839       /* There doesn't seem to be any reason to restrict this to pseudos
7840 	 and doing so loses in the case where we are copying from a
7841 	 register of the wrong class.  */
7842       && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7843 #endif
7844       /* The insn might have already some references to stackslots
7845 	 replaced by MEMs, while reload_out_reg still names the
7846 	 original pseudo.  */
7847       && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7848 	  || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7849     delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
7850 }
7851 
7852 /* Do output reloading for reload RL, which is for the insn described by
7853    CHAIN and has the number J.
7854    ??? At some point we need to support handling output reloads of
7855    JUMP_INSNs or insns that set cc0.  */
7856 static void
do_output_reload(class insn_chain * chain,struct reload * rl,int j)7857 do_output_reload (class insn_chain *chain, struct reload *rl, int j)
7858 {
7859   rtx note, old;
7860   rtx_insn *insn = chain->insn;
7861   /* If this is an output reload that stores something that is
7862      not loaded in this same reload, see if we can eliminate a previous
7863      store.  */
7864   rtx pseudo = rl->out_reg;
7865   rtx reg_rtx = rl->reg_rtx;
7866 
7867   if (rl->out && reg_rtx)
7868     {
7869       machine_mode mode;
7870 
7871       /* Determine the mode to reload in.
7872 	 See comments above (for input reloading).  */
7873       mode = GET_MODE (rl->out);
7874       if (mode == VOIDmode)
7875 	{
7876 	  /* VOIDmode should never happen for an output.  */
7877 	  if (asm_noperands (PATTERN (insn)) < 0)
7878 	    /* It's the compiler's fault.  */
7879 	    fatal_insn ("VOIDmode on an output", insn);
7880 	  error_for_asm (insn, "output operand is constant in %<asm%>");
7881 	  /* Prevent crash--use something we know is valid.  */
7882 	  mode = word_mode;
7883 	  rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
7884 	}
7885       if (GET_MODE (reg_rtx) != mode)
7886 	reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7887     }
7888   reload_reg_rtx_for_output[j] = reg_rtx;
7889 
7890   if (pseudo
7891       && optimize
7892       && REG_P (pseudo)
7893       && ! rtx_equal_p (rl->in_reg, pseudo)
7894       && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7895       && reg_last_reload_reg[REGNO (pseudo)])
7896     {
7897       int pseudo_no = REGNO (pseudo);
7898       int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7899 
7900       /* We don't need to test full validity of last_regno for
7901 	 inherit here; we only want to know if the store actually
7902 	 matches the pseudo.  */
7903       if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
7904 	  && reg_reloaded_contents[last_regno] == pseudo_no
7905 	  && spill_reg_store[last_regno]
7906 	  && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7907 	delete_output_reload (insn, j, last_regno, reg_rtx);
7908     }
7909 
7910   old = rl->out_reg;
7911   if (old == 0
7912       || reg_rtx == 0
7913       || rtx_equal_p (old, reg_rtx))
7914     return;
7915 
7916   /* An output operand that dies right away does need a reload,
7917      but need not be copied from it.  Show the new location in the
7918      REG_UNUSED note.  */
7919   if ((REG_P (old) || GET_CODE (old) == SCRATCH)
7920       && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7921     {
7922       XEXP (note, 0) = reg_rtx;
7923       return;
7924     }
7925   /* Likewise for a SUBREG of an operand that dies.  */
7926   else if (GET_CODE (old) == SUBREG
7927 	   && REG_P (SUBREG_REG (old))
7928 	   && (note = find_reg_note (insn, REG_UNUSED,
7929 				     SUBREG_REG (old))) != 0)
7930     {
7931       XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
7932       return;
7933     }
7934   else if (GET_CODE (old) == SCRATCH)
7935     /* If we aren't optimizing, there won't be a REG_UNUSED note,
7936        but we don't want to make an output reload.  */
7937     return;
7938 
7939   /* If is a JUMP_INSN, we can't support output reloads yet.  */
7940   gcc_assert (NONJUMP_INSN_P (insn));
7941 
7942   emit_output_reload_insns (chain, rld + j, j);
7943 }
7944 
7945 /* A reload copies values of MODE from register SRC to register DEST.
7946    Return true if it can be treated for inheritance purposes like a
7947    group of reloads, each one reloading a single hard register.  The
7948    caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
7949    occupy the same number of hard registers.  */
7950 
7951 static bool
inherit_piecemeal_p(int dest ATTRIBUTE_UNUSED,int src ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED)7952 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
7953 		     int src ATTRIBUTE_UNUSED,
7954 		     machine_mode mode ATTRIBUTE_UNUSED)
7955 {
7956   return (REG_CAN_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
7957 	  && REG_CAN_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
7958 }
7959 
7960 /* Output insns to reload values in and out of the chosen reload regs.  */
7961 
7962 static void
emit_reload_insns(class insn_chain * chain)7963 emit_reload_insns (class insn_chain *chain)
7964 {
7965   rtx_insn *insn = chain->insn;
7966 
7967   int j;
7968 
7969   CLEAR_HARD_REG_SET (reg_reloaded_died);
7970 
7971   for (j = 0; j < reload_n_operands; j++)
7972     input_reload_insns[j] = input_address_reload_insns[j]
7973       = inpaddr_address_reload_insns[j]
7974       = output_reload_insns[j] = output_address_reload_insns[j]
7975       = outaddr_address_reload_insns[j]
7976       = other_output_reload_insns[j] = 0;
7977   other_input_address_reload_insns = 0;
7978   other_input_reload_insns = 0;
7979   operand_reload_insns = 0;
7980   other_operand_reload_insns = 0;
7981 
7982   /* Dump reloads into the dump file.  */
7983   if (dump_file)
7984     {
7985       fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
7986       debug_reload_to_stream (dump_file);
7987     }
7988 
7989   for (j = 0; j < n_reloads; j++)
7990     if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
7991       {
7992 	unsigned int i;
7993 
7994 	for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
7995 	  new_spill_reg_store[i] = 0;
7996       }
7997 
7998   /* Now output the instructions to copy the data into and out of the
7999      reload registers.  Do these in the order that the reloads were reported,
8000      since reloads of base and index registers precede reloads of operands
8001      and the operands may need the base and index registers reloaded.  */
8002 
8003   for (j = 0; j < n_reloads; j++)
8004     {
8005       do_input_reload (chain, rld + j, j);
8006       do_output_reload (chain, rld + j, j);
8007     }
8008 
8009   /* Now write all the insns we made for reloads in the order expected by
8010      the allocation functions.  Prior to the insn being reloaded, we write
8011      the following reloads:
8012 
8013      RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8014 
8015      RELOAD_OTHER reloads.
8016 
8017      For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8018      by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8019      RELOAD_FOR_INPUT reload for the operand.
8020 
8021      RELOAD_FOR_OPADDR_ADDRS reloads.
8022 
8023      RELOAD_FOR_OPERAND_ADDRESS reloads.
8024 
8025      After the insn being reloaded, we write the following:
8026 
8027      For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8028      by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8029      RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8030      reloads for the operand.  The RELOAD_OTHER output reloads are
8031      output in descending order by reload number.  */
8032 
8033   emit_insn_before (other_input_address_reload_insns, insn);
8034   emit_insn_before (other_input_reload_insns, insn);
8035 
8036   for (j = 0; j < reload_n_operands; j++)
8037     {
8038       emit_insn_before (inpaddr_address_reload_insns[j], insn);
8039       emit_insn_before (input_address_reload_insns[j], insn);
8040       emit_insn_before (input_reload_insns[j], insn);
8041     }
8042 
8043   emit_insn_before (other_operand_reload_insns, insn);
8044   emit_insn_before (operand_reload_insns, insn);
8045 
8046   for (j = 0; j < reload_n_operands; j++)
8047     {
8048       rtx_insn *x = emit_insn_after (outaddr_address_reload_insns[j], insn);
8049       x = emit_insn_after (output_address_reload_insns[j], x);
8050       x = emit_insn_after (output_reload_insns[j], x);
8051       emit_insn_after (other_output_reload_insns[j], x);
8052     }
8053 
8054   /* For all the spill regs newly reloaded in this instruction,
8055      record what they were reloaded from, so subsequent instructions
8056      can inherit the reloads.
8057 
8058      Update spill_reg_store for the reloads of this insn.
8059      Copy the elements that were updated in the loop above.  */
8060 
8061   for (j = 0; j < n_reloads; j++)
8062     {
8063       int r = reload_order[j];
8064       int i = reload_spill_index[r];
8065 
8066       /* If this is a non-inherited input reload from a pseudo, we must
8067 	 clear any memory of a previous store to the same pseudo.  Only do
8068 	 something if there will not be an output reload for the pseudo
8069 	 being reloaded.  */
8070       if (rld[r].in_reg != 0
8071 	  && ! (reload_inherited[r] || reload_override_in[r]))
8072 	{
8073 	  rtx reg = rld[r].in_reg;
8074 
8075 	  if (GET_CODE (reg) == SUBREG)
8076 	    reg = SUBREG_REG (reg);
8077 
8078 	  if (REG_P (reg)
8079 	      && REGNO (reg) >= FIRST_PSEUDO_REGISTER
8080 	      && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
8081 	    {
8082 	      int nregno = REGNO (reg);
8083 
8084 	      if (reg_last_reload_reg[nregno])
8085 		{
8086 		  int last_regno = REGNO (reg_last_reload_reg[nregno]);
8087 
8088 		  if (reg_reloaded_contents[last_regno] == nregno)
8089 		    spill_reg_store[last_regno] = 0;
8090 		}
8091 	    }
8092 	}
8093 
8094       /* I is nonneg if this reload used a register.
8095 	 If rld[r].reg_rtx is 0, this is an optional reload
8096 	 that we opted to ignore.  */
8097 
8098       if (i >= 0 && rld[r].reg_rtx != 0)
8099 	{
8100 	  int nr = hard_regno_nregs (i, GET_MODE (rld[r].reg_rtx));
8101 	  int k;
8102 
8103 	  /* For a multi register reload, we need to check if all or part
8104 	     of the value lives to the end.  */
8105 	  for (k = 0; k < nr; k++)
8106 	    if (reload_reg_reaches_end_p (i + k, r))
8107 	      CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
8108 
8109 	  /* Maybe the spill reg contains a copy of reload_out.  */
8110 	  if (rld[r].out != 0
8111 	      && (REG_P (rld[r].out)
8112 		  || (rld[r].out_reg
8113 		      ? REG_P (rld[r].out_reg)
8114 		      /* The reload value is an auto-modification of
8115 			 some kind.  For PRE_INC, POST_INC, PRE_DEC
8116 			 and POST_DEC, we record an equivalence
8117 			 between the reload register and the operand
8118 			 on the optimistic assumption that we can make
8119 			 the equivalence hold.  reload_as_needed must
8120 			 then either make it hold or invalidate the
8121 			 equivalence.
8122 
8123 			 PRE_MODIFY and POST_MODIFY addresses are reloaded
8124 			 somewhat differently, and allowing them here leads
8125 			 to problems.  */
8126 		      : (GET_CODE (rld[r].out) != POST_MODIFY
8127 			 && GET_CODE (rld[r].out) != PRE_MODIFY))))
8128 	    {
8129 	      rtx reg;
8130 
8131 	      reg = reload_reg_rtx_for_output[r];
8132 	      if (reload_reg_rtx_reaches_end_p (reg, r))
8133 		{
8134 		  machine_mode mode = GET_MODE (reg);
8135 		  int regno = REGNO (reg);
8136 		  int nregs = REG_NREGS (reg);
8137 		  rtx out = (REG_P (rld[r].out)
8138 			     ? rld[r].out
8139 			     : rld[r].out_reg
8140 			     ? rld[r].out_reg
8141 /* AUTO_INC */		     : XEXP (rld[r].in_reg, 0));
8142 		  int out_regno = REGNO (out);
8143 		  int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
8144 				   : hard_regno_nregs (out_regno, mode));
8145 		  bool piecemeal;
8146 
8147 		  spill_reg_store[regno] = new_spill_reg_store[regno];
8148 		  spill_reg_stored_to[regno] = out;
8149 		  reg_last_reload_reg[out_regno] = reg;
8150 
8151 		  piecemeal = (HARD_REGISTER_NUM_P (out_regno)
8152 			       && nregs == out_nregs
8153 			       && inherit_piecemeal_p (out_regno, regno, mode));
8154 
8155 		  /* If OUT_REGNO is a hard register, it may occupy more than
8156 		     one register.  If it does, say what is in the
8157 		     rest of the registers assuming that both registers
8158 		     agree on how many words the object takes.  If not,
8159 		     invalidate the subsequent registers.  */
8160 
8161 		  if (HARD_REGISTER_NUM_P (out_regno))
8162 		    for (k = 1; k < out_nregs; k++)
8163 		      reg_last_reload_reg[out_regno + k]
8164 			= (piecemeal ? regno_reg_rtx[regno + k] : 0);
8165 
8166 		  /* Now do the inverse operation.  */
8167 		  for (k = 0; k < nregs; k++)
8168 		    {
8169 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8170 		      reg_reloaded_contents[regno + k]
8171 			= (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
8172 			   ? out_regno
8173 			   : out_regno + k);
8174 		      reg_reloaded_insn[regno + k] = insn;
8175 		      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8176 		    }
8177 		}
8178 	    }
8179 	  /* Maybe the spill reg contains a copy of reload_in.  Only do
8180 	     something if there will not be an output reload for
8181 	     the register being reloaded.  */
8182 	  else if (rld[r].out_reg == 0
8183 		   && rld[r].in != 0
8184 		   && ((REG_P (rld[r].in)
8185 			&& !HARD_REGISTER_P (rld[r].in)
8186 			&& !REGNO_REG_SET_P (&reg_has_output_reload,
8187 					     REGNO (rld[r].in)))
8188 		       || (REG_P (rld[r].in_reg)
8189 			   && !REGNO_REG_SET_P (&reg_has_output_reload,
8190 						REGNO (rld[r].in_reg))))
8191 		   && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8192 	    {
8193 	      rtx reg;
8194 
8195 	      reg = reload_reg_rtx_for_input[r];
8196 	      if (reload_reg_rtx_reaches_end_p (reg, r))
8197 		{
8198 		  machine_mode mode;
8199 		  int regno;
8200 		  int nregs;
8201 		  int in_regno;
8202 		  int in_nregs;
8203 		  rtx in;
8204 		  bool piecemeal;
8205 
8206 		  mode = GET_MODE (reg);
8207 		  regno = REGNO (reg);
8208 		  nregs = REG_NREGS (reg);
8209 		  if (REG_P (rld[r].in)
8210 		      && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8211 		    in = rld[r].in;
8212 		  else if (REG_P (rld[r].in_reg))
8213 		    in = rld[r].in_reg;
8214 		  else
8215 		    in = XEXP (rld[r].in_reg, 0);
8216 		  in_regno = REGNO (in);
8217 
8218 		  in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8219 			      : hard_regno_nregs (in_regno, mode));
8220 
8221 		  reg_last_reload_reg[in_regno] = reg;
8222 
8223 		  piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8224 			       && nregs == in_nregs
8225 			       && inherit_piecemeal_p (regno, in_regno, mode));
8226 
8227 		  if (HARD_REGISTER_NUM_P (in_regno))
8228 		    for (k = 1; k < in_nregs; k++)
8229 		      reg_last_reload_reg[in_regno + k]
8230 			= (piecemeal ? regno_reg_rtx[regno + k] : 0);
8231 
8232 		  /* Unless we inherited this reload, show we haven't
8233 		     recently done a store.
8234 		     Previous stores of inherited auto_inc expressions
8235 		     also have to be discarded.  */
8236 		  if (! reload_inherited[r]
8237 		      || (rld[r].out && ! rld[r].out_reg))
8238 		    spill_reg_store[regno] = 0;
8239 
8240 		  for (k = 0; k < nregs; k++)
8241 		    {
8242 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8243 		      reg_reloaded_contents[regno + k]
8244 			= (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8245 			   ? in_regno
8246 			   : in_regno + k);
8247 		      reg_reloaded_insn[regno + k] = insn;
8248 		      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8249 		    }
8250 		}
8251 	    }
8252 	}
8253 
8254       /* The following if-statement was #if 0'd in 1.34 (or before...).
8255 	 It's reenabled in 1.35 because supposedly nothing else
8256 	 deals with this problem.  */
8257 
8258       /* If a register gets output-reloaded from a non-spill register,
8259 	 that invalidates any previous reloaded copy of it.
8260 	 But forget_old_reloads_1 won't get to see it, because
8261 	 it thinks only about the original insn.  So invalidate it here.
8262 	 Also do the same thing for RELOAD_OTHER constraints where the
8263 	 output is discarded.  */
8264       if (i < 0
8265 	  && ((rld[r].out != 0
8266 	       && (REG_P (rld[r].out)
8267 		   || (MEM_P (rld[r].out)
8268 		       && REG_P (rld[r].out_reg))))
8269 	      || (rld[r].out == 0 && rld[r].out_reg
8270 		  && REG_P (rld[r].out_reg))))
8271 	{
8272 	  rtx out = ((rld[r].out && REG_P (rld[r].out))
8273 		     ? rld[r].out : rld[r].out_reg);
8274 	  int out_regno = REGNO (out);
8275 	  machine_mode mode = GET_MODE (out);
8276 
8277 	  /* REG_RTX is now set or clobbered by the main instruction.
8278 	     As the comment above explains, forget_old_reloads_1 only
8279 	     sees the original instruction, and there is no guarantee
8280 	     that the original instruction also clobbered REG_RTX.
8281 	     For example, if find_reloads sees that the input side of
8282 	     a matched operand pair dies in this instruction, it may
8283 	     use the input register as the reload register.
8284 
8285 	     Calling forget_old_reloads_1 is a waste of effort if
8286 	     REG_RTX is also the output register.
8287 
8288 	     If we know that REG_RTX holds the value of a pseudo
8289 	     register, the code after the call will record that fact.  */
8290 	  if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8291 	    forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8292 
8293 	  if (!HARD_REGISTER_NUM_P (out_regno))
8294 	    {
8295 	      rtx src_reg;
8296 	      rtx_insn *store_insn = NULL;
8297 
8298 	      reg_last_reload_reg[out_regno] = 0;
8299 
8300 	      /* If we can find a hard register that is stored, record
8301 		 the storing insn so that we may delete this insn with
8302 		 delete_output_reload.  */
8303 	      src_reg = reload_reg_rtx_for_output[r];
8304 
8305 	      if (src_reg)
8306 		{
8307 		  if (reload_reg_rtx_reaches_end_p (src_reg, r))
8308 		    store_insn = new_spill_reg_store[REGNO (src_reg)];
8309 		  else
8310 		    src_reg = NULL_RTX;
8311 		}
8312 	      else
8313 		{
8314 		  /* If this is an optional reload, try to find the
8315 		     source reg from an input reload.  */
8316 		  rtx set = single_set (insn);
8317 		  if (set && SET_DEST (set) == rld[r].out)
8318 		    {
8319 		      int k;
8320 
8321 		      src_reg = SET_SRC (set);
8322 		      store_insn = insn;
8323 		      for (k = 0; k < n_reloads; k++)
8324 			{
8325 			  if (rld[k].in == src_reg)
8326 			    {
8327 			      src_reg = reload_reg_rtx_for_input[k];
8328 			      break;
8329 			    }
8330 			}
8331 		    }
8332 		}
8333 	      if (src_reg && REG_P (src_reg)
8334 		  && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8335 		{
8336 		  int src_regno, src_nregs, k;
8337 		  rtx note;
8338 
8339 		  gcc_assert (GET_MODE (src_reg) == mode);
8340 		  src_regno = REGNO (src_reg);
8341 		  src_nregs = hard_regno_nregs (src_regno, mode);
8342 		  /* The place where to find a death note varies with
8343 		     PRESERVE_DEATH_INFO_REGNO_P .  The condition is not
8344 		     necessarily checked exactly in the code that moves
8345 		     notes, so just check both locations.  */
8346 		  note = find_regno_note (insn, REG_DEAD, src_regno);
8347 		  if (! note && store_insn)
8348 		    note = find_regno_note (store_insn, REG_DEAD, src_regno);
8349 		  for (k = 0; k < src_nregs; k++)
8350 		    {
8351 		      spill_reg_store[src_regno + k] = store_insn;
8352 		      spill_reg_stored_to[src_regno + k] = out;
8353 		      reg_reloaded_contents[src_regno + k] = out_regno;
8354 		      reg_reloaded_insn[src_regno + k] = store_insn;
8355 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8356 		      SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8357 		      SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8358 		      if (note)
8359 			SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8360 		      else
8361 			CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8362 		    }
8363 		  reg_last_reload_reg[out_regno] = src_reg;
8364 		  /* We have to set reg_has_output_reload here, or else
8365 		     forget_old_reloads_1 will clear reg_last_reload_reg
8366 		     right away.  */
8367 		  SET_REGNO_REG_SET (&reg_has_output_reload,
8368 				     out_regno);
8369 		}
8370 	    }
8371 	  else
8372 	    {
8373 	      int k, out_nregs = hard_regno_nregs (out_regno, mode);
8374 
8375 	      for (k = 0; k < out_nregs; k++)
8376 		reg_last_reload_reg[out_regno + k] = 0;
8377 	    }
8378 	}
8379     }
8380   reg_reloaded_dead |= reg_reloaded_died;
8381 }
8382 
8383 /* Go through the motions to emit INSN and test if it is strictly valid.
8384    Return the emitted insn if valid, else return NULL.  */
8385 
8386 static rtx_insn *
emit_insn_if_valid_for_reload(rtx pat)8387 emit_insn_if_valid_for_reload (rtx pat)
8388 {
8389   rtx_insn *last = get_last_insn ();
8390   int code;
8391 
8392   rtx_insn *insn = emit_insn (pat);
8393   code = recog_memoized (insn);
8394 
8395   if (code >= 0)
8396     {
8397       extract_insn (insn);
8398       /* We want constrain operands to treat this insn strictly in its
8399 	 validity determination, i.e., the way it would after reload has
8400 	 completed.  */
8401       if (constrain_operands (1, get_enabled_alternatives (insn)))
8402 	return insn;
8403     }
8404 
8405   delete_insns_since (last);
8406   return NULL;
8407 }
8408 
8409 /* Emit code to perform a reload from IN (which may be a reload register) to
8410    OUT (which may also be a reload register).  IN or OUT is from operand
8411    OPNUM with reload type TYPE.
8412 
8413    Returns first insn emitted.  */
8414 
8415 static rtx_insn *
gen_reload(rtx out,rtx in,int opnum,enum reload_type type)8416 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8417 {
8418   rtx_insn *last = get_last_insn ();
8419   rtx_insn *tem;
8420   rtx tem1, tem2;
8421 
8422   /* If IN is a paradoxical SUBREG, remove it and try to put the
8423      opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
8424   if (!strip_paradoxical_subreg (&in, &out))
8425     strip_paradoxical_subreg (&out, &in);
8426 
8427   /* How to do this reload can get quite tricky.  Normally, we are being
8428      asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8429      register that didn't get a hard register.  In that case we can just
8430      call emit_move_insn.
8431 
8432      We can also be asked to reload a PLUS that adds a register or a MEM to
8433      another register, constant or MEM.  This can occur during frame pointer
8434      elimination and while reloading addresses.  This case is handled by
8435      trying to emit a single insn to perform the add.  If it is not valid,
8436      we use a two insn sequence.
8437 
8438      Or we can be asked to reload an unary operand that was a fragment of
8439      an addressing mode, into a register.  If it isn't recognized as-is,
8440      we try making the unop operand and the reload-register the same:
8441      (set reg:X (unop:X expr:Y))
8442      -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8443 
8444      Finally, we could be called to handle an 'o' constraint by putting
8445      an address into a register.  In that case, we first try to do this
8446      with a named pattern of "reload_load_address".  If no such pattern
8447      exists, we just emit a SET insn and hope for the best (it will normally
8448      be valid on machines that use 'o').
8449 
8450      This entire process is made complex because reload will never
8451      process the insns we generate here and so we must ensure that
8452      they will fit their constraints and also by the fact that parts of
8453      IN might be being reloaded separately and replaced with spill registers.
8454      Because of this, we are, in some sense, just guessing the right approach
8455      here.  The one listed above seems to work.
8456 
8457      ??? At some point, this whole thing needs to be rethought.  */
8458 
8459   if (GET_CODE (in) == PLUS
8460       && (REG_P (XEXP (in, 0))
8461 	  || GET_CODE (XEXP (in, 0)) == SUBREG
8462 	  || MEM_P (XEXP (in, 0)))
8463       && (REG_P (XEXP (in, 1))
8464 	  || GET_CODE (XEXP (in, 1)) == SUBREG
8465 	  || CONSTANT_P (XEXP (in, 1))
8466 	  || MEM_P (XEXP (in, 1))))
8467     {
8468       /* We need to compute the sum of a register or a MEM and another
8469 	 register, constant, or MEM, and put it into the reload
8470 	 register.  The best possible way of doing this is if the machine
8471 	 has a three-operand ADD insn that accepts the required operands.
8472 
8473 	 The simplest approach is to try to generate such an insn and see if it
8474 	 is recognized and matches its constraints.  If so, it can be used.
8475 
8476 	 It might be better not to actually emit the insn unless it is valid,
8477 	 but we need to pass the insn as an operand to `recog' and
8478 	 `extract_insn' and it is simpler to emit and then delete the insn if
8479 	 not valid than to dummy things up.  */
8480 
8481       rtx op0, op1, tem;
8482       rtx_insn *insn;
8483       enum insn_code code;
8484 
8485       op0 = find_replacement (&XEXP (in, 0));
8486       op1 = find_replacement (&XEXP (in, 1));
8487 
8488       /* Since constraint checking is strict, commutativity won't be
8489 	 checked, so we need to do that here to avoid spurious failure
8490 	 if the add instruction is two-address and the second operand
8491 	 of the add is the same as the reload reg, which is frequently
8492 	 the case.  If the insn would be A = B + A, rearrange it so
8493 	 it will be A = A + B as constrain_operands expects.  */
8494 
8495       if (REG_P (XEXP (in, 1))
8496 	  && REGNO (out) == REGNO (XEXP (in, 1)))
8497 	tem = op0, op0 = op1, op1 = tem;
8498 
8499       if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8500 	in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8501 
8502       insn = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8503       if (insn)
8504 	return insn;
8505 
8506       /* If that failed, we must use a conservative two-insn sequence.
8507 
8508 	 Use a move to copy one operand into the reload register.  Prefer
8509 	 to reload a constant, MEM or pseudo since the move patterns can
8510 	 handle an arbitrary operand.  If OP1 is not a constant, MEM or
8511 	 pseudo and OP1 is not a valid operand for an add instruction, then
8512 	 reload OP1.
8513 
8514 	 After reloading one of the operands into the reload register, add
8515 	 the reload register to the output register.
8516 
8517 	 If there is another way to do this for a specific machine, a
8518 	 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8519 	 we emit below.  */
8520 
8521       code = optab_handler (add_optab, GET_MODE (out));
8522 
8523       if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8524 	  || (REG_P (op1)
8525 	      && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8526 	  || (code != CODE_FOR_nothing
8527 	      && !insn_operand_matches (code, 2, op1)))
8528 	tem = op0, op0 = op1, op1 = tem;
8529 
8530       gen_reload (out, op0, opnum, type);
8531 
8532       /* If OP0 and OP1 are the same, we can use OUT for OP1.
8533 	 This fixes a problem on the 32K where the stack pointer cannot
8534 	 be used as an operand of an add insn.  */
8535 
8536       if (rtx_equal_p (op0, op1))
8537 	op1 = out;
8538 
8539       insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8540       if (insn)
8541 	{
8542 	  /* Add a REG_EQUIV note so that find_equiv_reg can find it.  */
8543 	  set_dst_reg_note (insn, REG_EQUIV, in, out);
8544 	  return insn;
8545 	}
8546 
8547       /* If that failed, copy the address register to the reload register.
8548 	 Then add the constant to the reload register.  */
8549 
8550       gcc_assert (!reg_overlap_mentioned_p (out, op0));
8551       gen_reload (out, op1, opnum, type);
8552       insn = emit_insn (gen_add2_insn (out, op0));
8553       set_dst_reg_note (insn, REG_EQUIV, in, out);
8554     }
8555 
8556   /* If we need a memory location to do the move, do it that way.  */
8557   else if ((tem1 = replaced_subreg (in), tem2 = replaced_subreg (out),
8558 	    (REG_P (tem1) && REG_P (tem2)))
8559 	   && REGNO (tem1) < FIRST_PSEUDO_REGISTER
8560 	   && REGNO (tem2) < FIRST_PSEUDO_REGISTER
8561 	   && targetm.secondary_memory_needed (GET_MODE (out),
8562 					       REGNO_REG_CLASS (REGNO (tem1)),
8563 					       REGNO_REG_CLASS (REGNO (tem2))))
8564     {
8565       /* Get the memory to use and rewrite both registers to its mode.  */
8566       rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8567 
8568       if (GET_MODE (loc) != GET_MODE (out))
8569 	out = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (out));
8570 
8571       if (GET_MODE (loc) != GET_MODE (in))
8572 	in = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (in));
8573 
8574       gen_reload (loc, in, opnum, type);
8575       gen_reload (out, loc, opnum, type);
8576     }
8577   else if (REG_P (out) && UNARY_P (in))
8578     {
8579       rtx op1;
8580       rtx out_moded;
8581       rtx_insn *set;
8582 
8583       op1 = find_replacement (&XEXP (in, 0));
8584       if (op1 != XEXP (in, 0))
8585 	in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8586 
8587       /* First, try a plain SET.  */
8588       set = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8589       if (set)
8590 	return set;
8591 
8592       /* If that failed, move the inner operand to the reload
8593 	 register, and try the same unop with the inner expression
8594 	 replaced with the reload register.  */
8595 
8596       if (GET_MODE (op1) != GET_MODE (out))
8597 	out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8598       else
8599 	out_moded = out;
8600 
8601       gen_reload (out_moded, op1, opnum, type);
8602 
8603       rtx temp = gen_rtx_SET (out, gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8604 						  out_moded));
8605       rtx_insn *insn = emit_insn_if_valid_for_reload (temp);
8606       if (insn)
8607 	{
8608 	  set_unique_reg_note (insn, REG_EQUIV, in);
8609 	  return insn;
8610 	}
8611 
8612       fatal_insn ("failure trying to reload:", set);
8613     }
8614   /* If IN is a simple operand, use gen_move_insn.  */
8615   else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8616     {
8617       tem = emit_insn (gen_move_insn (out, in));
8618       /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note.  */
8619       mark_jump_label (in, tem, 0);
8620     }
8621 
8622   else if (targetm.have_reload_load_address ())
8623     emit_insn (targetm.gen_reload_load_address (out, in));
8624 
8625   /* Otherwise, just write (set OUT IN) and hope for the best.  */
8626   else
8627     emit_insn (gen_rtx_SET (out, in));
8628 
8629   /* Return the first insn emitted.
8630      We cannot just return get_last_insn, because there may have
8631      been multiple instructions emitted.  Also note that gen_move_insn may
8632      emit more than one insn itself, so we cannot assume that there is one
8633      insn emitted per emit_insn_before call.  */
8634 
8635   return last ? NEXT_INSN (last) : get_insns ();
8636 }
8637 
8638 /* Delete a previously made output-reload whose result we now believe
8639    is not needed.  First we double-check.
8640 
8641    INSN is the insn now being processed.
8642    LAST_RELOAD_REG is the hard register number for which we want to delete
8643    the last output reload.
8644    J is the reload-number that originally used REG.  The caller has made
8645    certain that reload J doesn't use REG any longer for input.
8646    NEW_RELOAD_REG is reload register that reload J is using for REG.  */
8647 
8648 static void
delete_output_reload(rtx_insn * insn,int j,int last_reload_reg,rtx new_reload_reg)8649 delete_output_reload (rtx_insn *insn, int j, int last_reload_reg,
8650 		      rtx new_reload_reg)
8651 {
8652   rtx_insn *output_reload_insn = spill_reg_store[last_reload_reg];
8653   rtx reg = spill_reg_stored_to[last_reload_reg];
8654   int k;
8655   int n_occurrences;
8656   int n_inherited = 0;
8657   rtx substed;
8658   unsigned regno;
8659   int nregs;
8660 
8661   /* It is possible that this reload has been only used to set another reload
8662      we eliminated earlier and thus deleted this instruction too.  */
8663   if (output_reload_insn->deleted ())
8664     return;
8665 
8666   /* Get the raw pseudo-register referred to.  */
8667 
8668   while (GET_CODE (reg) == SUBREG)
8669     reg = SUBREG_REG (reg);
8670   substed = reg_equiv_memory_loc (REGNO (reg));
8671 
8672   /* This is unsafe if the operand occurs more often in the current
8673      insn than it is inherited.  */
8674   for (k = n_reloads - 1; k >= 0; k--)
8675     {
8676       rtx reg2 = rld[k].in;
8677       if (! reg2)
8678 	continue;
8679       if (MEM_P (reg2) || reload_override_in[k])
8680 	reg2 = rld[k].in_reg;
8681 
8682       if (AUTO_INC_DEC && rld[k].out && ! rld[k].out_reg)
8683 	reg2 = XEXP (rld[k].in_reg, 0);
8684 
8685       while (GET_CODE (reg2) == SUBREG)
8686 	reg2 = SUBREG_REG (reg2);
8687       if (rtx_equal_p (reg2, reg))
8688 	{
8689 	  if (reload_inherited[k] || reload_override_in[k] || k == j)
8690 	    n_inherited++;
8691 	  else
8692 	    return;
8693 	}
8694     }
8695   n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8696   if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8697     n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8698 					reg, 0);
8699   if (substed)
8700     n_occurrences += count_occurrences (PATTERN (insn),
8701 					eliminate_regs (substed, VOIDmode,
8702 							NULL_RTX), 0);
8703   for (rtx i1 = reg_equiv_alt_mem_list (REGNO (reg)); i1; i1 = XEXP (i1, 1))
8704     {
8705       gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8706       n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8707     }
8708   if (n_occurrences > n_inherited)
8709     return;
8710 
8711   regno = REGNO (reg);
8712   nregs = REG_NREGS (reg);
8713 
8714   /* If the pseudo-reg we are reloading is no longer referenced
8715      anywhere between the store into it and here,
8716      and we're within the same basic block, then the value can only
8717      pass through the reload reg and end up here.
8718      Otherwise, give up--return.  */
8719   for (rtx_insn *i1 = NEXT_INSN (output_reload_insn);
8720        i1 != insn; i1 = NEXT_INSN (i1))
8721     {
8722       if (NOTE_INSN_BASIC_BLOCK_P (i1))
8723 	return;
8724       if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8725 	  && refers_to_regno_p (regno, regno + nregs, PATTERN (i1), NULL))
8726 	{
8727 	  /* If this is USE in front of INSN, we only have to check that
8728 	     there are no more references than accounted for by inheritance.  */
8729 	  while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8730 	    {
8731 	      n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8732 	      i1 = NEXT_INSN (i1);
8733 	    }
8734 	  if (n_occurrences <= n_inherited && i1 == insn)
8735 	    break;
8736 	  return;
8737 	}
8738     }
8739 
8740   /* We will be deleting the insn.  Remove the spill reg information.  */
8741   for (k = hard_regno_nregs (last_reload_reg, GET_MODE (reg)); k-- > 0; )
8742     {
8743       spill_reg_store[last_reload_reg + k] = 0;
8744       spill_reg_stored_to[last_reload_reg + k] = 0;
8745     }
8746 
8747   /* The caller has already checked that REG dies or is set in INSN.
8748      It has also checked that we are optimizing, and thus some
8749      inaccuracies in the debugging information are acceptable.
8750      So we could just delete output_reload_insn.  But in some cases
8751      we can improve the debugging information without sacrificing
8752      optimization - maybe even improving the code: See if the pseudo
8753      reg has been completely replaced with reload regs.  If so, delete
8754      the store insn and forget we had a stack slot for the pseudo.  */
8755   if (rld[j].out != rld[j].in
8756       && REG_N_DEATHS (REGNO (reg)) == 1
8757       && REG_N_SETS (REGNO (reg)) == 1
8758       && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8759       && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8760     {
8761       rtx_insn *i2;
8762 
8763       /* We know that it was used only between here and the beginning of
8764 	 the current basic block.  (We also know that the last use before
8765 	 INSN was the output reload we are thinking of deleting, but never
8766 	 mind that.)  Search that range; see if any ref remains.  */
8767       for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8768 	{
8769 	  rtx set = single_set (i2);
8770 
8771 	  /* Uses which just store in the pseudo don't count,
8772 	     since if they are the only uses, they are dead.  */
8773 	  if (set != 0 && SET_DEST (set) == reg)
8774 	    continue;
8775 	  if (LABEL_P (i2) || JUMP_P (i2))
8776 	    break;
8777 	  if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8778 	      && reg_mentioned_p (reg, PATTERN (i2)))
8779 	    {
8780 	      /* Some other ref remains; just delete the output reload we
8781 		 know to be dead.  */
8782 	      delete_address_reloads (output_reload_insn, insn);
8783 	      delete_insn (output_reload_insn);
8784 	      return;
8785 	    }
8786 	}
8787 
8788       /* Delete the now-dead stores into this pseudo.  Note that this
8789 	 loop also takes care of deleting output_reload_insn.  */
8790       for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8791 	{
8792 	  rtx set = single_set (i2);
8793 
8794 	  if (set != 0 && SET_DEST (set) == reg)
8795 	    {
8796 	      delete_address_reloads (i2, insn);
8797 	      delete_insn (i2);
8798 	    }
8799 	  if (LABEL_P (i2) || JUMP_P (i2))
8800 	    break;
8801 	}
8802 
8803       /* For the debugging info, say the pseudo lives in this reload reg.  */
8804       reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8805       if (ira_conflicts_p)
8806 	/* Inform IRA about the change.  */
8807 	ira_mark_allocation_change (REGNO (reg));
8808       alter_reg (REGNO (reg), -1, false);
8809     }
8810   else
8811     {
8812       delete_address_reloads (output_reload_insn, insn);
8813       delete_insn (output_reload_insn);
8814     }
8815 }
8816 
8817 /* We are going to delete DEAD_INSN.  Recursively delete loads of
8818    reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8819    CURRENT_INSN is being reloaded, so we have to check its reloads too.  */
8820 static void
delete_address_reloads(rtx_insn * dead_insn,rtx_insn * current_insn)8821 delete_address_reloads (rtx_insn *dead_insn, rtx_insn *current_insn)
8822 {
8823   rtx set = single_set (dead_insn);
8824   rtx set2, dst;
8825   rtx_insn *prev, *next;
8826   if (set)
8827     {
8828       rtx dst = SET_DEST (set);
8829       if (MEM_P (dst))
8830 	delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8831     }
8832   /* If we deleted the store from a reloaded post_{in,de}c expression,
8833      we can delete the matching adds.  */
8834   prev = PREV_INSN (dead_insn);
8835   next = NEXT_INSN (dead_insn);
8836   if (! prev || ! next)
8837     return;
8838   set = single_set (next);
8839   set2 = single_set (prev);
8840   if (! set || ! set2
8841       || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8842       || !CONST_INT_P (XEXP (SET_SRC (set), 1))
8843       || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
8844     return;
8845   dst = SET_DEST (set);
8846   if (! rtx_equal_p (dst, SET_DEST (set2))
8847       || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8848       || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8849       || (INTVAL (XEXP (SET_SRC (set), 1))
8850 	  != -INTVAL (XEXP (SET_SRC (set2), 1))))
8851     return;
8852   delete_related_insns (prev);
8853   delete_related_insns (next);
8854 }
8855 
8856 /* Subfunction of delete_address_reloads: process registers found in X.  */
8857 static void
delete_address_reloads_1(rtx_insn * dead_insn,rtx x,rtx_insn * current_insn)8858 delete_address_reloads_1 (rtx_insn *dead_insn, rtx x, rtx_insn *current_insn)
8859 {
8860   rtx_insn *prev, *i2;
8861   rtx set, dst;
8862   int i, j;
8863   enum rtx_code code = GET_CODE (x);
8864 
8865   if (code != REG)
8866     {
8867       const char *fmt = GET_RTX_FORMAT (code);
8868       for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8869 	{
8870 	  if (fmt[i] == 'e')
8871 	    delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8872 	  else if (fmt[i] == 'E')
8873 	    {
8874 	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8875 		delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8876 					  current_insn);
8877 	    }
8878 	}
8879       return;
8880     }
8881 
8882   if (spill_reg_order[REGNO (x)] < 0)
8883     return;
8884 
8885   /* Scan backwards for the insn that sets x.  This might be a way back due
8886      to inheritance.  */
8887   for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8888     {
8889       code = GET_CODE (prev);
8890       if (code == CODE_LABEL || code == JUMP_INSN)
8891 	return;
8892       if (!INSN_P (prev))
8893 	continue;
8894       if (reg_set_p (x, PATTERN (prev)))
8895 	break;
8896       if (reg_referenced_p (x, PATTERN (prev)))
8897 	return;
8898     }
8899   if (! prev || INSN_UID (prev) < reload_first_uid)
8900     return;
8901   /* Check that PREV only sets the reload register.  */
8902   set = single_set (prev);
8903   if (! set)
8904     return;
8905   dst = SET_DEST (set);
8906   if (!REG_P (dst)
8907       || ! rtx_equal_p (dst, x))
8908     return;
8909   if (! reg_set_p (dst, PATTERN (dead_insn)))
8910     {
8911       /* Check if DST was used in a later insn -
8912 	 it might have been inherited.  */
8913       for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8914 	{
8915 	  if (LABEL_P (i2))
8916 	    break;
8917 	  if (! INSN_P (i2))
8918 	    continue;
8919 	  if (reg_referenced_p (dst, PATTERN (i2)))
8920 	    {
8921 	      /* If there is a reference to the register in the current insn,
8922 		 it might be loaded in a non-inherited reload.  If no other
8923 		 reload uses it, that means the register is set before
8924 		 referenced.  */
8925 	      if (i2 == current_insn)
8926 		{
8927 		  for (j = n_reloads - 1; j >= 0; j--)
8928 		    if ((rld[j].reg_rtx == dst && reload_inherited[j])
8929 			|| reload_override_in[j] == dst)
8930 		      return;
8931 		  for (j = n_reloads - 1; j >= 0; j--)
8932 		    if (rld[j].in && rld[j].reg_rtx == dst)
8933 		      break;
8934 		  if (j >= 0)
8935 		    break;
8936 		}
8937 	      return;
8938 	    }
8939 	  if (JUMP_P (i2))
8940 	    break;
8941 	  /* If DST is still live at CURRENT_INSN, check if it is used for
8942 	     any reload.  Note that even if CURRENT_INSN sets DST, we still
8943 	     have to check the reloads.  */
8944 	  if (i2 == current_insn)
8945 	    {
8946 	      for (j = n_reloads - 1; j >= 0; j--)
8947 		if ((rld[j].reg_rtx == dst && reload_inherited[j])
8948 		    || reload_override_in[j] == dst)
8949 		  return;
8950 	      /* ??? We can't finish the loop here, because dst might be
8951 		 allocated to a pseudo in this block if no reload in this
8952 		 block needs any of the classes containing DST - see
8953 		 spill_hard_reg.  There is no easy way to tell this, so we
8954 		 have to scan till the end of the basic block.  */
8955 	    }
8956 	  if (reg_set_p (dst, PATTERN (i2)))
8957 	    break;
8958 	}
8959     }
8960   delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8961   reg_reloaded_contents[REGNO (dst)] = -1;
8962   delete_insn (prev);
8963 }
8964 
8965 /* Output reload-insns to reload VALUE into RELOADREG.
8966    VALUE is an autoincrement or autodecrement RTX whose operand
8967    is a register or memory location;
8968    so reloading involves incrementing that location.
8969    IN is either identical to VALUE, or some cheaper place to reload from.
8970 
8971    INC_AMOUNT is the number to increment or decrement by (always positive).
8972    This cannot be deduced from VALUE.  */
8973 
8974 static void
inc_for_reload(rtx reloadreg,rtx in,rtx value,poly_int64 inc_amount)8975 inc_for_reload (rtx reloadreg, rtx in, rtx value, poly_int64 inc_amount)
8976 {
8977   /* REG or MEM to be copied and incremented.  */
8978   rtx incloc = find_replacement (&XEXP (value, 0));
8979   /* Nonzero if increment after copying.  */
8980   int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
8981 	      || GET_CODE (value) == POST_MODIFY);
8982   rtx_insn *last;
8983   rtx inc;
8984   rtx_insn *add_insn;
8985   int code;
8986   rtx real_in = in == value ? incloc : in;
8987 
8988   /* No hard register is equivalent to this register after
8989      inc/dec operation.  If REG_LAST_RELOAD_REG were nonzero,
8990      we could inc/dec that register as well (maybe even using it for
8991      the source), but I'm not sure it's worth worrying about.  */
8992   if (REG_P (incloc))
8993     reg_last_reload_reg[REGNO (incloc)] = 0;
8994 
8995   if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
8996     {
8997       gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
8998       inc = find_replacement (&XEXP (XEXP (value, 1), 1));
8999     }
9000   else
9001     {
9002       if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
9003 	inc_amount = -inc_amount;
9004 
9005       inc = gen_int_mode (inc_amount, Pmode);
9006     }
9007 
9008   /* If this is post-increment, first copy the location to the reload reg.  */
9009   if (post && real_in != reloadreg)
9010     emit_insn (gen_move_insn (reloadreg, real_in));
9011 
9012   if (in == value)
9013     {
9014       /* See if we can directly increment INCLOC.  Use a method similar to
9015 	 that in gen_reload.  */
9016 
9017       last = get_last_insn ();
9018       add_insn = emit_insn (gen_rtx_SET (incloc,
9019 					 gen_rtx_PLUS (GET_MODE (incloc),
9020 						       incloc, inc)));
9021 
9022       code = recog_memoized (add_insn);
9023       if (code >= 0)
9024 	{
9025 	  extract_insn (add_insn);
9026 	  if (constrain_operands (1, get_enabled_alternatives (add_insn)))
9027 	    {
9028 	      /* If this is a pre-increment and we have incremented the value
9029 		 where it lives, copy the incremented value to RELOADREG to
9030 		 be used as an address.  */
9031 
9032 	      if (! post)
9033 		emit_insn (gen_move_insn (reloadreg, incloc));
9034 	      return;
9035 	    }
9036 	}
9037       delete_insns_since (last);
9038     }
9039 
9040   /* If couldn't do the increment directly, must increment in RELOADREG.
9041      The way we do this depends on whether this is pre- or post-increment.
9042      For pre-increment, copy INCLOC to the reload register, increment it
9043      there, then save back.  */
9044 
9045   if (! post)
9046     {
9047       if (in != reloadreg)
9048 	emit_insn (gen_move_insn (reloadreg, real_in));
9049       emit_insn (gen_add2_insn (reloadreg, inc));
9050       emit_insn (gen_move_insn (incloc, reloadreg));
9051     }
9052   else
9053     {
9054       /* Postincrement.
9055 	 Because this might be a jump insn or a compare, and because RELOADREG
9056 	 may not be available after the insn in an input reload, we must do
9057 	 the incrementation before the insn being reloaded for.
9058 
9059 	 We have already copied IN to RELOADREG.  Increment the copy in
9060 	 RELOADREG, save that back, then decrement RELOADREG so it has
9061 	 the original value.  */
9062 
9063       emit_insn (gen_add2_insn (reloadreg, inc));
9064       emit_insn (gen_move_insn (incloc, reloadreg));
9065       if (CONST_INT_P (inc))
9066 	emit_insn (gen_add2_insn (reloadreg,
9067 				  gen_int_mode (-INTVAL (inc),
9068 						GET_MODE (reloadreg))));
9069       else
9070 	emit_insn (gen_sub2_insn (reloadreg, inc));
9071     }
9072 }
9073