1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2    Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "predict.h"
28 #include "df.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "optabs.h"
32 #include "regs.h"
33 #include "ira.h"
34 #include "recog.h"
35 
36 #include "rtl-error.h"
37 #include "expr.h"
38 #include "addresses.h"
39 #include "cfgrtl.h"
40 #include "cfgbuild.h"
41 #include "reload.h"
42 #include "except.h"
43 #include "dumpfile.h"
44 #include "rtl-iter.h"
45 
46 /* This file contains the reload pass of the compiler, which is
47    run after register allocation has been done.  It checks that
48    each insn is valid (operands required to be in registers really
49    are in registers of the proper class) and fixes up invalid ones
50    by copying values temporarily into registers for the insns
51    that need them.
52 
53    The results of register allocation are described by the vector
54    reg_renumber; the insns still contain pseudo regs, but reg_renumber
55    can be used to find which hard reg, if any, a pseudo reg is in.
56 
57    The technique we always use is to free up a few hard regs that are
58    called ``reload regs'', and for each place where a pseudo reg
59    must be in a hard reg, copy it temporarily into one of the reload regs.
60 
61    Reload regs are allocated locally for every instruction that needs
62    reloads.  When there are pseudos which are allocated to a register that
63    has been chosen as a reload reg, such pseudos must be ``spilled''.
64    This means that they go to other hard regs, or to stack slots if no other
65    available hard regs can be found.  Spilling can invalidate more
66    insns, requiring additional need for reloads, so we must keep checking
67    until the process stabilizes.
68 
69    For machines with different classes of registers, we must keep track
70    of the register class needed for each reload, and make sure that
71    we allocate enough reload registers of each class.
72 
73    The file reload.c contains the code that checks one insn for
74    validity and reports the reloads that it needs.  This file
75    is in charge of scanning the entire rtl code, accumulating the
76    reload needs, spilling, assigning reload registers to use for
77    fixing up each insn, and generating the new insns to copy values
78    into the reload registers.  */
79 
80 struct target_reload default_target_reload;
81 #if SWITCHABLE_TARGET
82 struct target_reload *this_target_reload = &default_target_reload;
83 #endif
84 
85 #define spill_indirect_levels			\
86   (this_target_reload->x_spill_indirect_levels)
87 
88 /* During reload_as_needed, element N contains a REG rtx for the hard reg
89    into which reg N has been reloaded (perhaps for a previous insn).  */
90 static rtx *reg_last_reload_reg;
91 
92 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
93    for an output reload that stores into reg N.  */
94 static regset_head reg_has_output_reload;
95 
96 /* Indicates which hard regs are reload-registers for an output reload
97    in the current insn.  */
98 static HARD_REG_SET reg_is_output_reload;
99 
100 /* Widest mode in which each pseudo reg is referred to (via subreg).  */
101 static machine_mode *reg_max_ref_mode;
102 
103 /* Vector to remember old contents of reg_renumber before spilling.  */
104 static short *reg_old_renumber;
105 
106 /* During reload_as_needed, element N contains the last pseudo regno reloaded
107    into hard register N.  If that pseudo reg occupied more than one register,
108    reg_reloaded_contents points to that pseudo for each spill register in
109    use; all of these must remain set for an inheritance to occur.  */
110 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
111 
112 /* During reload_as_needed, element N contains the insn for which
113    hard register N was last used.   Its contents are significant only
114    when reg_reloaded_valid is set for this register.  */
115 static rtx_insn *reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
116 
117 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid.  */
118 static HARD_REG_SET reg_reloaded_valid;
119 /* Indicate if the register was dead at the end of the reload.
120    This is only valid if reg_reloaded_contents is set and valid.  */
121 static HARD_REG_SET reg_reloaded_dead;
122 
123 /* Indicate whether the register's current value is one that is not
124    safe to retain across a call, even for registers that are normally
125    call-saved.  This is only meaningful for members of reg_reloaded_valid.  */
126 static HARD_REG_SET reg_reloaded_call_part_clobbered;
127 
128 /* Number of spill-regs so far; number of valid elements of spill_regs.  */
129 static int n_spills;
130 
131 /* In parallel with spill_regs, contains REG rtx's for those regs.
132    Holds the last rtx used for any given reg, or 0 if it has never
133    been used for spilling yet.  This rtx is reused, provided it has
134    the proper mode.  */
135 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
136 
137 /* In parallel with spill_regs, contains nonzero for a spill reg
138    that was stored after the last time it was used.
139    The precise value is the insn generated to do the store.  */
140 static rtx_insn *spill_reg_store[FIRST_PSEUDO_REGISTER];
141 
142 /* This is the register that was stored with spill_reg_store.  This is a
143    copy of reload_out / reload_out_reg when the value was stored; if
144    reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg.  */
145 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
146 
147 /* This table is the inverse mapping of spill_regs:
148    indexed by hard reg number,
149    it contains the position of that reg in spill_regs,
150    or -1 for something that is not in spill_regs.
151 
152    ?!?  This is no longer accurate.  */
153 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
154 
155 /* This reg set indicates registers that can't be used as spill registers for
156    the currently processed insn.  These are the hard registers which are live
157    during the insn, but not allocated to pseudos, as well as fixed
158    registers.  */
159 static HARD_REG_SET bad_spill_regs;
160 
161 /* These are the hard registers that can't be used as spill register for any
162    insn.  This includes registers used for user variables and registers that
163    we can't eliminate.  A register that appears in this set also can't be used
164    to retry register allocation.  */
165 static HARD_REG_SET bad_spill_regs_global;
166 
167 /* Describes order of use of registers for reloading
168    of spilled pseudo-registers.  `n_spills' is the number of
169    elements that are actually valid; new ones are added at the end.
170 
171    Both spill_regs and spill_reg_order are used on two occasions:
172    once during find_reload_regs, where they keep track of the spill registers
173    for a single insn, but also during reload_as_needed where they show all
174    the registers ever used by reload.  For the latter case, the information
175    is calculated during finish_spills.  */
176 static short spill_regs[FIRST_PSEUDO_REGISTER];
177 
178 /* This vector of reg sets indicates, for each pseudo, which hard registers
179    may not be used for retrying global allocation because the register was
180    formerly spilled from one of them.  If we allowed reallocating a pseudo to
181    a register that it was already allocated to, reload might not
182    terminate.  */
183 static HARD_REG_SET *pseudo_previous_regs;
184 
185 /* This vector of reg sets indicates, for each pseudo, which hard
186    registers may not be used for retrying global allocation because they
187    are used as spill registers during one of the insns in which the
188    pseudo is live.  */
189 static HARD_REG_SET *pseudo_forbidden_regs;
190 
191 /* All hard regs that have been used as spill registers for any insn are
192    marked in this set.  */
193 static HARD_REG_SET used_spill_regs;
194 
195 /* Index of last register assigned as a spill register.  We allocate in
196    a round-robin fashion.  */
197 static int last_spill_reg;
198 
199 /* Record the stack slot for each spilled hard register.  */
200 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
201 
202 /* Width allocated so far for that stack slot.  */
203 static poly_uint64_pod spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
204 
205 /* Record which pseudos needed to be spilled.  */
206 static regset_head spilled_pseudos;
207 
208 /* Record which pseudos changed their allocation in finish_spills.  */
209 static regset_head changed_allocation_pseudos;
210 
211 /* Used for communication between order_regs_for_reload and count_pseudo.
212    Used to avoid counting one pseudo twice.  */
213 static regset_head pseudos_counted;
214 
215 /* First uid used by insns created by reload in this function.
216    Used in find_equiv_reg.  */
217 int reload_first_uid;
218 
219 /* Flag set by local-alloc or global-alloc if anything is live in
220    a call-clobbered reg across calls.  */
221 int caller_save_needed;
222 
223 /* Set to 1 while reload_as_needed is operating.
224    Required by some machines to handle any generated moves differently.  */
225 int reload_in_progress = 0;
226 
227 /* This obstack is used for allocation of rtl during register elimination.
228    The allocated storage can be freed once find_reloads has processed the
229    insn.  */
230 static struct obstack reload_obstack;
231 
232 /* Points to the beginning of the reload_obstack.  All insn_chain structures
233    are allocated first.  */
234 static char *reload_startobj;
235 
236 /* The point after all insn_chain structures.  Used to quickly deallocate
237    memory allocated in copy_reloads during calculate_needs_all_insns.  */
238 static char *reload_firstobj;
239 
240 /* This points before all local rtl generated by register elimination.
241    Used to quickly free all memory after processing one insn.  */
242 static char *reload_insn_firstobj;
243 
244 /* List of insn_chain instructions, one for every insn that reload needs to
245    examine.  */
246 struct insn_chain *reload_insn_chain;
247 
248 /* TRUE if we potentially left dead insns in the insn stream and want to
249    run DCE immediately after reload, FALSE otherwise.  */
250 static bool need_dce;
251 
252 /* List of all insns needing reloads.  */
253 static struct insn_chain *insns_need_reload;
254 
255 /* This structure is used to record information about register eliminations.
256    Each array entry describes one possible way of eliminating a register
257    in favor of another.   If there is more than one way of eliminating a
258    particular register, the most preferred should be specified first.  */
259 
260 struct elim_table
261 {
262   int from;			/* Register number to be eliminated.  */
263   int to;			/* Register number used as replacement.  */
264   poly_int64_pod initial_offset; /* Initial difference between values.  */
265   int can_eliminate;		/* Nonzero if this elimination can be done.  */
266   int can_eliminate_previous;	/* Value returned by TARGET_CAN_ELIMINATE
267 				   target hook in previous scan over insns
268 				   made by reload.  */
269   poly_int64_pod offset;	/* Current offset between the two regs.  */
270   poly_int64_pod previous_offset; /* Offset at end of previous insn.  */
271   int ref_outside_mem;		/* "to" has been referenced outside a MEM.  */
272   rtx from_rtx;			/* REG rtx for the register to be eliminated.
273 				   We cannot simply compare the number since
274 				   we might then spuriously replace a hard
275 				   register corresponding to a pseudo
276 				   assigned to the reg to be eliminated.  */
277   rtx to_rtx;			/* REG rtx for the replacement.  */
278 };
279 
280 static struct elim_table *reg_eliminate = 0;
281 
282 /* This is an intermediate structure to initialize the table.  It has
283    exactly the members provided by ELIMINABLE_REGS.  */
284 static const struct elim_table_1
285 {
286   const int from;
287   const int to;
288 } reg_eliminate_1[] =
289 
290   ELIMINABLE_REGS;
291 
292 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
293 
294 /* Record the number of pending eliminations that have an offset not equal
295    to their initial offset.  If nonzero, we use a new copy of each
296    replacement result in any insns encountered.  */
297 int num_not_at_initial_offset;
298 
299 /* Count the number of registers that we may be able to eliminate.  */
300 static int num_eliminable;
301 /* And the number of registers that are equivalent to a constant that
302    can be eliminated to frame_pointer / arg_pointer + constant.  */
303 static int num_eliminable_invariants;
304 
305 /* For each label, we record the offset of each elimination.  If we reach
306    a label by more than one path and an offset differs, we cannot do the
307    elimination.  This information is indexed by the difference of the
308    number of the label and the first label number.  We can't offset the
309    pointer itself as this can cause problems on machines with segmented
310    memory.  The first table is an array of flags that records whether we
311    have yet encountered a label and the second table is an array of arrays,
312    one entry in the latter array for each elimination.  */
313 
314 static int first_label_num;
315 static char *offsets_known_at;
316 static poly_int64_pod (*offsets_at)[NUM_ELIMINABLE_REGS];
317 
318 vec<reg_equivs_t, va_gc> *reg_equivs;
319 
320 /* Stack of addresses where an rtx has been changed.  We can undo the
321    changes by popping items off the stack and restoring the original
322    value at each location.
323 
324    We use this simplistic undo capability rather than copy_rtx as copy_rtx
325    will not make a deep copy of a normally sharable rtx, such as
326    (const (plus (symbol_ref) (const_int))).  If such an expression appears
327    as R1 in gen_reload_chain_without_interm_reg_p, then a shared
328    rtx expression would be changed.  See PR 42431.  */
329 
330 typedef rtx *rtx_p;
331 static vec<rtx_p> substitute_stack;
332 
333 /* Number of labels in the current function.  */
334 
335 static int num_labels;
336 
337 static void replace_pseudos_in (rtx *, machine_mode, rtx);
338 static void maybe_fix_stack_asms (void);
339 static void copy_reloads (struct insn_chain *);
340 static void calculate_needs_all_insns (int);
341 static int find_reg (struct insn_chain *, int);
342 static void find_reload_regs (struct insn_chain *);
343 static void select_reload_regs (void);
344 static void delete_caller_save_insns (void);
345 
346 static void spill_failure (rtx_insn *, enum reg_class);
347 static void count_spilled_pseudo (int, int, int);
348 static void delete_dead_insn (rtx_insn *);
349 static void alter_reg (int, int, bool);
350 static void set_label_offsets (rtx, rtx_insn *, int);
351 static void check_eliminable_occurrences (rtx);
352 static void elimination_effects (rtx, machine_mode);
353 static rtx eliminate_regs_1 (rtx, machine_mode, rtx, bool, bool);
354 static int eliminate_regs_in_insn (rtx_insn *, int);
355 static void update_eliminable_offsets (void);
356 static void mark_not_eliminable (rtx, const_rtx, void *);
357 static void set_initial_elim_offsets (void);
358 static bool verify_initial_elim_offsets (void);
359 static void set_initial_label_offsets (void);
360 static void set_offsets_for_label (rtx_insn *);
361 static void init_eliminable_invariants (rtx_insn *, bool);
362 static void init_elim_table (void);
363 static void free_reg_equiv (void);
364 static void update_eliminables (HARD_REG_SET *);
365 static bool update_eliminables_and_spill (void);
366 static void elimination_costs_in_insn (rtx_insn *);
367 static void spill_hard_reg (unsigned int, int);
368 static int finish_spills (int);
369 static void scan_paradoxical_subregs (rtx);
370 static void count_pseudo (int);
371 static void order_regs_for_reload (struct insn_chain *);
372 static void reload_as_needed (int);
373 static void forget_old_reloads_1 (rtx, const_rtx, void *);
374 static void forget_marked_reloads (regset);
375 static int reload_reg_class_lower (const void *, const void *);
376 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
377 				    machine_mode);
378 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
379 				     machine_mode);
380 static int reload_reg_free_p (unsigned int, int, enum reload_type);
381 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
382 					rtx, rtx, int, int);
383 static int free_for_value_p (int, machine_mode, int, enum reload_type,
384 			     rtx, rtx, int, int);
385 static int allocate_reload_reg (struct insn_chain *, int, int);
386 static int conflicts_with_override (rtx);
387 static void failed_reload (rtx_insn *, int);
388 static int set_reload_reg (int, int);
389 static void choose_reload_regs_init (struct insn_chain *, rtx *);
390 static void choose_reload_regs (struct insn_chain *);
391 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
392 				     rtx, int);
393 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
394 				      int);
395 static void do_input_reload (struct insn_chain *, struct reload *, int);
396 static void do_output_reload (struct insn_chain *, struct reload *, int);
397 static void emit_reload_insns (struct insn_chain *);
398 static void delete_output_reload (rtx_insn *, int, int, rtx);
399 static void delete_address_reloads (rtx_insn *, rtx_insn *);
400 static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *);
401 static void inc_for_reload (rtx, rtx, rtx, poly_int64);
402 static void add_auto_inc_notes (rtx_insn *, rtx);
403 static void substitute (rtx *, const_rtx, rtx);
404 static bool gen_reload_chain_without_interm_reg_p (int, int);
405 static int reloads_conflict (int, int);
406 static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type);
407 static rtx_insn *emit_insn_if_valid_for_reload (rtx);
408 
409 /* Initialize the reload pass.  This is called at the beginning of compilation
410    and may be called again if the target is reinitialized.  */
411 
412 void
init_reload(void)413 init_reload (void)
414 {
415   int i;
416 
417   /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
418      Set spill_indirect_levels to the number of levels such addressing is
419      permitted, zero if it is not permitted at all.  */
420 
421   rtx tem
422     = gen_rtx_MEM (Pmode,
423 		   gen_rtx_PLUS (Pmode,
424 				 gen_rtx_REG (Pmode,
425 					      LAST_VIRTUAL_REGISTER + 1),
426 				 gen_int_mode (4, Pmode)));
427   spill_indirect_levels = 0;
428 
429   while (memory_address_p (QImode, tem))
430     {
431       spill_indirect_levels++;
432       tem = gen_rtx_MEM (Pmode, tem);
433     }
434 
435   /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)).  */
436 
437   tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
438   indirect_symref_ok = memory_address_p (QImode, tem);
439 
440   /* See if reg+reg is a valid (and offsettable) address.  */
441 
442   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
443     {
444       tem = gen_rtx_PLUS (Pmode,
445 			  gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
446 			  gen_rtx_REG (Pmode, i));
447 
448       /* This way, we make sure that reg+reg is an offsettable address.  */
449       tem = plus_constant (Pmode, tem, 4);
450 
451       for (int mode = 0; mode < MAX_MACHINE_MODE; mode++)
452 	if (!double_reg_address_ok[mode]
453 	    && memory_address_p ((enum machine_mode)mode, tem))
454 	  double_reg_address_ok[mode] = 1;
455     }
456 
457   /* Initialize obstack for our rtl allocation.  */
458   if (reload_startobj == NULL)
459     {
460       gcc_obstack_init (&reload_obstack);
461       reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
462     }
463 
464   INIT_REG_SET (&spilled_pseudos);
465   INIT_REG_SET (&changed_allocation_pseudos);
466   INIT_REG_SET (&pseudos_counted);
467 }
468 
469 /* List of insn chains that are currently unused.  */
470 static struct insn_chain *unused_insn_chains = 0;
471 
472 /* Allocate an empty insn_chain structure.  */
473 struct insn_chain *
new_insn_chain(void)474 new_insn_chain (void)
475 {
476   struct insn_chain *c;
477 
478   if (unused_insn_chains == 0)
479     {
480       c = XOBNEW (&reload_obstack, struct insn_chain);
481       INIT_REG_SET (&c->live_throughout);
482       INIT_REG_SET (&c->dead_or_set);
483     }
484   else
485     {
486       c = unused_insn_chains;
487       unused_insn_chains = c->next;
488     }
489   c->is_caller_save_insn = 0;
490   c->need_operand_change = 0;
491   c->need_reload = 0;
492   c->need_elim = 0;
493   return c;
494 }
495 
496 /* Small utility function to set all regs in hard reg set TO which are
497    allocated to pseudos in regset FROM.  */
498 
499 void
compute_use_by_pseudos(HARD_REG_SET * to,regset from)500 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
501 {
502   unsigned int regno;
503   reg_set_iterator rsi;
504 
505   EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
506     {
507       int r = reg_renumber[regno];
508 
509       if (r < 0)
510 	{
511 	  /* reload_combine uses the information from DF_LIVE_IN,
512 	     which might still contain registers that have not
513 	     actually been allocated since they have an
514 	     equivalence.  */
515 	  gcc_assert (ira_conflicts_p || reload_completed);
516 	}
517       else
518 	add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
519     }
520 }
521 
522 /* Replace all pseudos found in LOC with their corresponding
523    equivalences.  */
524 
525 static void
replace_pseudos_in(rtx * loc,machine_mode mem_mode,rtx usage)526 replace_pseudos_in (rtx *loc, machine_mode mem_mode, rtx usage)
527 {
528   rtx x = *loc;
529   enum rtx_code code;
530   const char *fmt;
531   int i, j;
532 
533   if (! x)
534     return;
535 
536   code = GET_CODE (x);
537   if (code == REG)
538     {
539       unsigned int regno = REGNO (x);
540 
541       if (regno < FIRST_PSEUDO_REGISTER)
542 	return;
543 
544       x = eliminate_regs_1 (x, mem_mode, usage, true, false);
545       if (x != *loc)
546 	{
547 	  *loc = x;
548 	  replace_pseudos_in (loc, mem_mode, usage);
549 	  return;
550 	}
551 
552       if (reg_equiv_constant (regno))
553 	*loc = reg_equiv_constant (regno);
554       else if (reg_equiv_invariant (regno))
555 	*loc = reg_equiv_invariant (regno);
556       else if (reg_equiv_mem (regno))
557 	*loc = reg_equiv_mem (regno);
558       else if (reg_equiv_address (regno))
559 	*loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address (regno));
560       else
561 	{
562 	  gcc_assert (!REG_P (regno_reg_rtx[regno])
563 		      || REGNO (regno_reg_rtx[regno]) != regno);
564 	  *loc = regno_reg_rtx[regno];
565 	}
566 
567       return;
568     }
569   else if (code == MEM)
570     {
571       replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
572       return;
573     }
574 
575   /* Process each of our operands recursively.  */
576   fmt = GET_RTX_FORMAT (code);
577   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
578     if (*fmt == 'e')
579       replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
580     else if (*fmt == 'E')
581       for (j = 0; j < XVECLEN (x, i); j++)
582 	replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
583 }
584 
585 /* Determine if the current function has an exception receiver block
586    that reaches the exit block via non-exceptional edges  */
587 
588 static bool
has_nonexceptional_receiver(void)589 has_nonexceptional_receiver (void)
590 {
591   edge e;
592   edge_iterator ei;
593   basic_block *tos, *worklist, bb;
594 
595   /* If we're not optimizing, then just err on the safe side.  */
596   if (!optimize)
597     return true;
598 
599   /* First determine which blocks can reach exit via normal paths.  */
600   tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
601 
602   FOR_EACH_BB_FN (bb, cfun)
603     bb->flags &= ~BB_REACHABLE;
604 
605   /* Place the exit block on our worklist.  */
606   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
607   *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
608 
609   /* Iterate: find everything reachable from what we've already seen.  */
610   while (tos != worklist)
611     {
612       bb = *--tos;
613 
614       FOR_EACH_EDGE (e, ei, bb->preds)
615 	if (!(e->flags & EDGE_ABNORMAL))
616 	  {
617 	    basic_block src = e->src;
618 
619 	    if (!(src->flags & BB_REACHABLE))
620 	      {
621 		src->flags |= BB_REACHABLE;
622 		*tos++ = src;
623 	      }
624 	  }
625     }
626   free (worklist);
627 
628   /* Now see if there's a reachable block with an exceptional incoming
629      edge.  */
630   FOR_EACH_BB_FN (bb, cfun)
631     if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb))
632       return true;
633 
634   /* No exceptional block reached exit unexceptionally.  */
635   return false;
636 }
637 
638 /* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
639    zero elements) to MAX_REG_NUM elements.
640 
641    Initialize all new fields to NULL and update REG_EQUIVS_SIZE.  */
642 void
grow_reg_equivs(void)643 grow_reg_equivs (void)
644 {
645   int old_size = vec_safe_length (reg_equivs);
646   int max_regno = max_reg_num ();
647   int i;
648   reg_equivs_t ze;
649 
650   memset (&ze, 0, sizeof (reg_equivs_t));
651   vec_safe_reserve (reg_equivs, max_regno);
652   for (i = old_size; i < max_regno; i++)
653     reg_equivs->quick_insert (i, ze);
654 }
655 
656 
657 /* Global variables used by reload and its subroutines.  */
658 
659 /* The current basic block while in calculate_elim_costs_all_insns.  */
660 static basic_block elim_bb;
661 
662 /* Set during calculate_needs if an insn needs register elimination.  */
663 static int something_needs_elimination;
664 /* Set during calculate_needs if an insn needs an operand changed.  */
665 static int something_needs_operands_changed;
666 /* Set by alter_regs if we spilled a register to the stack.  */
667 static bool something_was_spilled;
668 
669 /* Nonzero means we couldn't get enough spill regs.  */
670 static int failure;
671 
672 /* Temporary array of pseudo-register number.  */
673 static int *temp_pseudo_reg_arr;
674 
675 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
676    If that insn didn't set the register (i.e., it copied the register to
677    memory), just delete that insn instead of the equivalencing insn plus
678    anything now dead.  If we call delete_dead_insn on that insn, we may
679    delete the insn that actually sets the register if the register dies
680    there and that is incorrect.  */
681 static void
remove_init_insns()682 remove_init_insns ()
683 {
684   for (int i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
685     {
686       if (reg_renumber[i] < 0 && reg_equiv_init (i) != 0)
687 	{
688 	  rtx list;
689 	  for (list = reg_equiv_init (i); list; list = XEXP (list, 1))
690 	    {
691 	      rtx_insn *equiv_insn = as_a <rtx_insn *> (XEXP (list, 0));
692 
693 	      /* If we already deleted the insn or if it may trap, we can't
694 		 delete it.  The latter case shouldn't happen, but can
695 		 if an insn has a variable address, gets a REG_EH_REGION
696 		 note added to it, and then gets converted into a load
697 		 from a constant address.  */
698 	      if (NOTE_P (equiv_insn)
699 		  || can_throw_internal (equiv_insn))
700 		;
701 	      else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
702 		delete_dead_insn (equiv_insn);
703 	      else
704 		SET_INSN_DELETED (equiv_insn);
705 	    }
706 	}
707     }
708 }
709 
710 /* Return true if remove_init_insns will delete INSN.  */
711 static bool
will_delete_init_insn_p(rtx_insn * insn)712 will_delete_init_insn_p (rtx_insn *insn)
713 {
714   rtx set = single_set (insn);
715   if (!set || !REG_P (SET_DEST (set)))
716     return false;
717   unsigned regno = REGNO (SET_DEST (set));
718 
719   if (can_throw_internal (insn))
720     return false;
721 
722   if (regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
723     return false;
724 
725   for (rtx list = reg_equiv_init (regno); list; list = XEXP (list, 1))
726     {
727       rtx equiv_insn = XEXP (list, 0);
728       if (equiv_insn == insn)
729 	return true;
730     }
731   return false;
732 }
733 
734 /* Main entry point for the reload pass.
735 
736    FIRST is the first insn of the function being compiled.
737 
738    GLOBAL nonzero means we were called from global_alloc
739    and should attempt to reallocate any pseudoregs that we
740    displace from hard regs we will use for reloads.
741    If GLOBAL is zero, we do not have enough information to do that,
742    so any pseudo reg that is spilled must go to the stack.
743 
744    Return value is TRUE if reload likely left dead insns in the
745    stream and a DCE pass should be run to elimiante them.  Else the
746    return value is FALSE.  */
747 
748 bool
reload(rtx_insn * first,int global)749 reload (rtx_insn *first, int global)
750 {
751   int i, n;
752   rtx_insn *insn;
753   struct elim_table *ep;
754   basic_block bb;
755   bool inserted;
756 
757   /* Make sure even insns with volatile mem refs are recognizable.  */
758   init_recog ();
759 
760   failure = 0;
761 
762   reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
763 
764   /* Make sure that the last insn in the chain
765      is not something that needs reloading.  */
766   emit_note (NOTE_INSN_DELETED);
767 
768   /* Enable find_equiv_reg to distinguish insns made by reload.  */
769   reload_first_uid = get_max_uid ();
770 
771   /* Initialize the secondary memory table.  */
772   clear_secondary_mem ();
773 
774   /* We don't have a stack slot for any spill reg yet.  */
775   memset (spill_stack_slot, 0, sizeof spill_stack_slot);
776   memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
777 
778   /* Initialize the save area information for caller-save, in case some
779      are needed.  */
780   init_save_areas ();
781 
782   /* Compute which hard registers are now in use
783      as homes for pseudo registers.
784      This is done here rather than (eg) in global_alloc
785      because this point is reached even if not optimizing.  */
786   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
787     mark_home_live (i);
788 
789   /* A function that has a nonlocal label that can reach the exit
790      block via non-exceptional paths must save all call-saved
791      registers.  */
792   if (cfun->has_nonlocal_label
793       && has_nonexceptional_receiver ())
794     crtl->saves_all_registers = 1;
795 
796   if (crtl->saves_all_registers)
797     for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
798       if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
799 	df_set_regs_ever_live (i, true);
800 
801   /* Find all the pseudo registers that didn't get hard regs
802      but do have known equivalent constants or memory slots.
803      These include parameters (known equivalent to parameter slots)
804      and cse'd or loop-moved constant memory addresses.
805 
806      Record constant equivalents in reg_equiv_constant
807      so they will be substituted by find_reloads.
808      Record memory equivalents in reg_mem_equiv so they can
809      be substituted eventually by altering the REG-rtx's.  */
810 
811   grow_reg_equivs ();
812   reg_old_renumber = XCNEWVEC (short, max_regno);
813   memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
814   pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
815   pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
816 
817   CLEAR_HARD_REG_SET (bad_spill_regs_global);
818 
819   init_eliminable_invariants (first, true);
820   init_elim_table ();
821 
822   /* Alter each pseudo-reg rtx to contain its hard reg number.  Assign
823      stack slots to the pseudos that lack hard regs or equivalents.
824      Do not touch virtual registers.  */
825 
826   temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
827   for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
828     temp_pseudo_reg_arr[n++] = i;
829 
830   if (ira_conflicts_p)
831     /* Ask IRA to order pseudo-registers for better stack slot
832        sharing.  */
833     ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_mode);
834 
835   for (i = 0; i < n; i++)
836     alter_reg (temp_pseudo_reg_arr[i], -1, false);
837 
838   /* If we have some registers we think can be eliminated, scan all insns to
839      see if there is an insn that sets one of these registers to something
840      other than itself plus a constant.  If so, the register cannot be
841      eliminated.  Doing this scan here eliminates an extra pass through the
842      main reload loop in the most common case where register elimination
843      cannot be done.  */
844   for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
845     if (INSN_P (insn))
846       note_stores (PATTERN (insn), mark_not_eliminable, NULL);
847 
848   maybe_fix_stack_asms ();
849 
850   insns_need_reload = 0;
851   something_needs_elimination = 0;
852 
853   /* Initialize to -1, which means take the first spill register.  */
854   last_spill_reg = -1;
855 
856   /* Spill any hard regs that we know we can't eliminate.  */
857   CLEAR_HARD_REG_SET (used_spill_regs);
858   /* There can be multiple ways to eliminate a register;
859      they should be listed adjacently.
860      Elimination for any register fails only if all possible ways fail.  */
861   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
862     {
863       int from = ep->from;
864       int can_eliminate = 0;
865       do
866 	{
867           can_eliminate |= ep->can_eliminate;
868           ep++;
869 	}
870       while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
871       if (! can_eliminate)
872 	spill_hard_reg (from, 1);
873     }
874 
875   if (!HARD_FRAME_POINTER_IS_FRAME_POINTER && frame_pointer_needed)
876     spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
877 
878   finish_spills (global);
879 
880   /* From now on, we may need to generate moves differently.  We may also
881      allow modifications of insns which cause them to not be recognized.
882      Any such modifications will be cleaned up during reload itself.  */
883   reload_in_progress = 1;
884 
885   /* This loop scans the entire function each go-round
886      and repeats until one repetition spills no additional hard regs.  */
887   for (;;)
888     {
889       int something_changed;
890       poly_int64 starting_frame_size;
891 
892       starting_frame_size = get_frame_size ();
893       something_was_spilled = false;
894 
895       set_initial_elim_offsets ();
896       set_initial_label_offsets ();
897 
898       /* For each pseudo register that has an equivalent location defined,
899 	 try to eliminate any eliminable registers (such as the frame pointer)
900 	 assuming initial offsets for the replacement register, which
901 	 is the normal case.
902 
903 	 If the resulting location is directly addressable, substitute
904 	 the MEM we just got directly for the old REG.
905 
906 	 If it is not addressable but is a constant or the sum of a hard reg
907 	 and constant, it is probably not addressable because the constant is
908 	 out of range, in that case record the address; we will generate
909 	 hairy code to compute the address in a register each time it is
910 	 needed.  Similarly if it is a hard register, but one that is not
911 	 valid as an address register.
912 
913 	 If the location is not addressable, but does not have one of the
914 	 above forms, assign a stack slot.  We have to do this to avoid the
915 	 potential of producing lots of reloads if, e.g., a location involves
916 	 a pseudo that didn't get a hard register and has an equivalent memory
917 	 location that also involves a pseudo that didn't get a hard register.
918 
919 	 Perhaps at some point we will improve reload_when_needed handling
920 	 so this problem goes away.  But that's very hairy.  */
921 
922       for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
923 	if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i))
924 	  {
925 	    rtx x = eliminate_regs (reg_equiv_memory_loc (i), VOIDmode,
926 				    NULL_RTX);
927 
928 	    if (strict_memory_address_addr_space_p
929 		  (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
930 		   MEM_ADDR_SPACE (x)))
931 	      reg_equiv_mem (i) = x, reg_equiv_address (i) = 0;
932 	    else if (CONSTANT_P (XEXP (x, 0))
933 		     || (REG_P (XEXP (x, 0))
934 			 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
935 		     || (GET_CODE (XEXP (x, 0)) == PLUS
936 			 && REG_P (XEXP (XEXP (x, 0), 0))
937 			 && (REGNO (XEXP (XEXP (x, 0), 0))
938 			     < FIRST_PSEUDO_REGISTER)
939 			 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
940 	      reg_equiv_address (i) = XEXP (x, 0), reg_equiv_mem (i) = 0;
941 	    else
942 	      {
943 		/* Make a new stack slot.  Then indicate that something
944 		   changed so we go back and recompute offsets for
945 		   eliminable registers because the allocation of memory
946 		   below might change some offset.  reg_equiv_{mem,address}
947 		   will be set up for this pseudo on the next pass around
948 		   the loop.  */
949 		reg_equiv_memory_loc (i) = 0;
950 		reg_equiv_init (i) = 0;
951 		alter_reg (i, -1, true);
952 	      }
953 	  }
954 
955       if (caller_save_needed)
956 	setup_save_areas ();
957 
958       if (maybe_ne (starting_frame_size, 0) && crtl->stack_alignment_needed)
959 	{
960 	  /* If we have a stack frame, we must align it now.  The
961 	     stack size may be a part of the offset computation for
962 	     register elimination.  So if this changes the stack size,
963 	     then repeat the elimination bookkeeping.  We don't
964 	     realign when there is no stack, as that will cause a
965 	     stack frame when none is needed should
966 	     TARGET_STARTING_FRAME_OFFSET not be already aligned to
967 	     STACK_BOUNDARY.  */
968 	  assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
969 	}
970       /* If we allocated another stack slot, redo elimination bookkeeping.  */
971       if (something_was_spilled
972 	  || maybe_ne (starting_frame_size, get_frame_size ()))
973 	{
974 	  if (update_eliminables_and_spill ())
975 	    finish_spills (0);
976 	  continue;
977 	}
978 
979       if (caller_save_needed)
980 	{
981 	  save_call_clobbered_regs ();
982 	  /* That might have allocated new insn_chain structures.  */
983 	  reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
984 	}
985 
986       calculate_needs_all_insns (global);
987 
988       if (! ira_conflicts_p)
989 	/* Don't do it for IRA.  We need this info because we don't
990 	   change live_throughout and dead_or_set for chains when IRA
991 	   is used.  */
992 	CLEAR_REG_SET (&spilled_pseudos);
993 
994       something_changed = 0;
995 
996       /* If we allocated any new memory locations, make another pass
997 	 since it might have changed elimination offsets.  */
998       if (something_was_spilled
999 	  || maybe_ne (starting_frame_size, get_frame_size ()))
1000 	something_changed = 1;
1001 
1002       /* Even if the frame size remained the same, we might still have
1003 	 changed elimination offsets, e.g. if find_reloads called
1004 	 force_const_mem requiring the back end to allocate a constant
1005 	 pool base register that needs to be saved on the stack.  */
1006       else if (!verify_initial_elim_offsets ())
1007 	something_changed = 1;
1008 
1009       if (update_eliminables_and_spill ())
1010 	{
1011 	  finish_spills (0);
1012 	  something_changed = 1;
1013 	}
1014       else
1015 	{
1016 	  select_reload_regs ();
1017 	  if (failure)
1018 	    goto failed;
1019 	  if (insns_need_reload)
1020 	    something_changed |= finish_spills (global);
1021 	}
1022 
1023       if (! something_changed)
1024 	break;
1025 
1026       if (caller_save_needed)
1027 	delete_caller_save_insns ();
1028 
1029       obstack_free (&reload_obstack, reload_firstobj);
1030     }
1031 
1032   /* If global-alloc was run, notify it of any register eliminations we have
1033      done.  */
1034   if (global)
1035     for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1036       if (ep->can_eliminate)
1037 	mark_elimination (ep->from, ep->to);
1038 
1039   remove_init_insns ();
1040 
1041   /* Use the reload registers where necessary
1042      by generating move instructions to move the must-be-register
1043      values into or out of the reload registers.  */
1044 
1045   if (insns_need_reload != 0 || something_needs_elimination
1046       || something_needs_operands_changed)
1047     {
1048       poly_int64 old_frame_size = get_frame_size ();
1049 
1050       reload_as_needed (global);
1051 
1052       gcc_assert (known_eq (old_frame_size, get_frame_size ()));
1053 
1054       gcc_assert (verify_initial_elim_offsets ());
1055     }
1056 
1057   /* If we were able to eliminate the frame pointer, show that it is no
1058      longer live at the start of any basic block.  If it ls live by
1059      virtue of being in a pseudo, that pseudo will be marked live
1060      and hence the frame pointer will be known to be live via that
1061      pseudo.  */
1062 
1063   if (! frame_pointer_needed)
1064     FOR_EACH_BB_FN (bb, cfun)
1065       bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1066 
1067   /* Come here (with failure set nonzero) if we can't get enough spill
1068      regs.  */
1069  failed:
1070 
1071   CLEAR_REG_SET (&changed_allocation_pseudos);
1072   CLEAR_REG_SET (&spilled_pseudos);
1073   reload_in_progress = 0;
1074 
1075   /* Now eliminate all pseudo regs by modifying them into
1076      their equivalent memory references.
1077      The REG-rtx's for the pseudos are modified in place,
1078      so all insns that used to refer to them now refer to memory.
1079 
1080      For a reg that has a reg_equiv_address, all those insns
1081      were changed by reloading so that no insns refer to it any longer;
1082      but the DECL_RTL of a variable decl may refer to it,
1083      and if so this causes the debugging info to mention the variable.  */
1084 
1085   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1086     {
1087       rtx addr = 0;
1088 
1089       if (reg_equiv_mem (i))
1090 	addr = XEXP (reg_equiv_mem (i), 0);
1091 
1092       if (reg_equiv_address (i))
1093 	addr = reg_equiv_address (i);
1094 
1095       if (addr)
1096 	{
1097 	  if (reg_renumber[i] < 0)
1098 	    {
1099 	      rtx reg = regno_reg_rtx[i];
1100 
1101 	      REG_USERVAR_P (reg) = 0;
1102 	      PUT_CODE (reg, MEM);
1103 	      XEXP (reg, 0) = addr;
1104 	      if (reg_equiv_memory_loc (i))
1105 		MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i));
1106 	      else
1107 		MEM_ATTRS (reg) = 0;
1108 	      MEM_NOTRAP_P (reg) = 1;
1109 	    }
1110 	  else if (reg_equiv_mem (i))
1111 	    XEXP (reg_equiv_mem (i), 0) = addr;
1112 	}
1113 
1114       /* We don't want complex addressing modes in debug insns
1115 	 if simpler ones will do, so delegitimize equivalences
1116 	 in debug insns.  */
1117       if (MAY_HAVE_DEBUG_BIND_INSNS && reg_renumber[i] < 0)
1118 	{
1119 	  rtx reg = regno_reg_rtx[i];
1120 	  rtx equiv = 0;
1121 	  df_ref use, next;
1122 
1123 	  if (reg_equiv_constant (i))
1124 	    equiv = reg_equiv_constant (i);
1125 	  else if (reg_equiv_invariant (i))
1126 	    equiv = reg_equiv_invariant (i);
1127 	  else if (reg && MEM_P (reg))
1128 	    equiv = targetm.delegitimize_address (reg);
1129 	  else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1130 	    equiv = reg;
1131 
1132 	  if (equiv == reg)
1133 	    continue;
1134 
1135 	  for (use = DF_REG_USE_CHAIN (i); use; use = next)
1136 	    {
1137 	      insn = DF_REF_INSN (use);
1138 
1139 	      /* Make sure the next ref is for a different instruction,
1140 		 so that we're not affected by the rescan.  */
1141 	      next = DF_REF_NEXT_REG (use);
1142 	      while (next && DF_REF_INSN (next) == insn)
1143 		next = DF_REF_NEXT_REG (next);
1144 
1145 	      if (DEBUG_BIND_INSN_P (insn))
1146 		{
1147 		  if (!equiv)
1148 		    {
1149 		      INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1150 		      df_insn_rescan_debug_internal (insn);
1151 		    }
1152 		  else
1153 		    INSN_VAR_LOCATION_LOC (insn)
1154 		      = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1155 					      reg, equiv);
1156 		}
1157 	    }
1158 	}
1159     }
1160 
1161   /* We must set reload_completed now since the cleanup_subreg_operands call
1162      below will re-recognize each insn and reload may have generated insns
1163      which are only valid during and after reload.  */
1164   reload_completed = 1;
1165 
1166   /* Make a pass over all the insns and delete all USEs which we inserted
1167      only to tag a REG_EQUAL note on them.  Remove all REG_DEAD and REG_UNUSED
1168      notes.  Delete all CLOBBER insns, except those that refer to the return
1169      value and the special mem:BLK CLOBBERs added to prevent the scheduler
1170      from misarranging variable-array code, and simplify (subreg (reg))
1171      operands.  Strip and regenerate REG_INC notes that may have been moved
1172      around.  */
1173 
1174   for (insn = first; insn; insn = NEXT_INSN (insn))
1175     if (INSN_P (insn))
1176       {
1177 	rtx *pnote;
1178 
1179 	if (CALL_P (insn))
1180 	  replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1181 			      VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1182 
1183 	if ((GET_CODE (PATTERN (insn)) == USE
1184 	     /* We mark with QImode USEs introduced by reload itself.  */
1185 	     && (GET_MODE (insn) == QImode
1186 		 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1187 	    || (GET_CODE (PATTERN (insn)) == CLOBBER
1188 		&& (!MEM_P (XEXP (PATTERN (insn), 0))
1189 		    || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1190 		    || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1191 			&& XEXP (XEXP (PATTERN (insn), 0), 0)
1192 				!= stack_pointer_rtx))
1193 		&& (!REG_P (XEXP (PATTERN (insn), 0))
1194 		    || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1195 	  {
1196 	    delete_insn (insn);
1197 	    continue;
1198 	  }
1199 
1200 	/* Some CLOBBERs may survive until here and still reference unassigned
1201 	   pseudos with const equivalent, which may in turn cause ICE in later
1202 	   passes if the reference remains in place.  */
1203 	if (GET_CODE (PATTERN (insn)) == CLOBBER)
1204 	  replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1205 			      VOIDmode, PATTERN (insn));
1206 
1207 	/* Discard obvious no-ops, even without -O.  This optimization
1208 	   is fast and doesn't interfere with debugging.  */
1209 	if (NONJUMP_INSN_P (insn)
1210 	    && GET_CODE (PATTERN (insn)) == SET
1211 	    && REG_P (SET_SRC (PATTERN (insn)))
1212 	    && REG_P (SET_DEST (PATTERN (insn)))
1213 	    && (REGNO (SET_SRC (PATTERN (insn)))
1214 		== REGNO (SET_DEST (PATTERN (insn)))))
1215 	  {
1216 	    delete_insn (insn);
1217 	    continue;
1218 	  }
1219 
1220 	pnote = &REG_NOTES (insn);
1221 	while (*pnote != 0)
1222 	  {
1223 	    if (REG_NOTE_KIND (*pnote) == REG_DEAD
1224 		|| REG_NOTE_KIND (*pnote) == REG_UNUSED
1225 		|| REG_NOTE_KIND (*pnote) == REG_INC)
1226 	      *pnote = XEXP (*pnote, 1);
1227 	    else
1228 	      pnote = &XEXP (*pnote, 1);
1229 	  }
1230 
1231 	if (AUTO_INC_DEC)
1232 	  add_auto_inc_notes (insn, PATTERN (insn));
1233 
1234 	/* Simplify (subreg (reg)) if it appears as an operand.  */
1235 	cleanup_subreg_operands (insn);
1236 
1237 	/* Clean up invalid ASMs so that they don't confuse later passes.
1238 	   See PR 21299.  */
1239 	if (asm_noperands (PATTERN (insn)) >= 0)
1240 	  {
1241 	    extract_insn (insn);
1242 	    if (!constrain_operands (1, get_enabled_alternatives (insn)))
1243 	      {
1244 		error_for_asm (insn,
1245 			       "%<asm%> operand has impossible constraints");
1246 		delete_insn (insn);
1247 		continue;
1248 	      }
1249 	  }
1250       }
1251 
1252   free (temp_pseudo_reg_arr);
1253 
1254   /* Indicate that we no longer have known memory locations or constants.  */
1255   free_reg_equiv ();
1256 
1257   free (reg_max_ref_mode);
1258   free (reg_old_renumber);
1259   free (pseudo_previous_regs);
1260   free (pseudo_forbidden_regs);
1261 
1262   CLEAR_HARD_REG_SET (used_spill_regs);
1263   for (i = 0; i < n_spills; i++)
1264     SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1265 
1266   /* Free all the insn_chain structures at once.  */
1267   obstack_free (&reload_obstack, reload_startobj);
1268   unused_insn_chains = 0;
1269 
1270   inserted = fixup_abnormal_edges ();
1271 
1272   /* We've possibly turned single trapping insn into multiple ones.  */
1273   if (cfun->can_throw_non_call_exceptions)
1274     {
1275       auto_sbitmap blocks (last_basic_block_for_fn (cfun));
1276       bitmap_ones (blocks);
1277       find_many_sub_basic_blocks (blocks);
1278     }
1279 
1280   if (inserted)
1281     commit_edge_insertions ();
1282 
1283   /* Replacing pseudos with their memory equivalents might have
1284      created shared rtx.  Subsequent passes would get confused
1285      by this, so unshare everything here.  */
1286   unshare_all_rtl_again (first);
1287 
1288 #ifdef STACK_BOUNDARY
1289   /* init_emit has set the alignment of the hard frame pointer
1290      to STACK_BOUNDARY.  It is very likely no longer valid if
1291      the hard frame pointer was used for register allocation.  */
1292   if (!frame_pointer_needed)
1293     REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1294 #endif
1295 
1296   substitute_stack.release ();
1297 
1298   gcc_assert (bitmap_empty_p (&spilled_pseudos));
1299 
1300   reload_completed = !failure;
1301 
1302   return need_dce;
1303 }
1304 
1305 /* Yet another special case.  Unfortunately, reg-stack forces people to
1306    write incorrect clobbers in asm statements.  These clobbers must not
1307    cause the register to appear in bad_spill_regs, otherwise we'll call
1308    fatal_insn later.  We clear the corresponding regnos in the live
1309    register sets to avoid this.
1310    The whole thing is rather sick, I'm afraid.  */
1311 
1312 static void
maybe_fix_stack_asms(void)1313 maybe_fix_stack_asms (void)
1314 {
1315 #ifdef STACK_REGS
1316   const char *constraints[MAX_RECOG_OPERANDS];
1317   machine_mode operand_mode[MAX_RECOG_OPERANDS];
1318   struct insn_chain *chain;
1319 
1320   for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1321     {
1322       int i, noperands;
1323       HARD_REG_SET clobbered, allowed;
1324       rtx pat;
1325 
1326       if (! INSN_P (chain->insn)
1327 	  || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1328 	continue;
1329       pat = PATTERN (chain->insn);
1330       if (GET_CODE (pat) != PARALLEL)
1331 	continue;
1332 
1333       CLEAR_HARD_REG_SET (clobbered);
1334       CLEAR_HARD_REG_SET (allowed);
1335 
1336       /* First, make a mask of all stack regs that are clobbered.  */
1337       for (i = 0; i < XVECLEN (pat, 0); i++)
1338 	{
1339 	  rtx t = XVECEXP (pat, 0, i);
1340 	  if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1341 	    SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1342 	}
1343 
1344       /* Get the operand values and constraints out of the insn.  */
1345       decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1346 			   constraints, operand_mode, NULL);
1347 
1348       /* For every operand, see what registers are allowed.  */
1349       for (i = 0; i < noperands; i++)
1350 	{
1351 	  const char *p = constraints[i];
1352 	  /* For every alternative, we compute the class of registers allowed
1353 	     for reloading in CLS, and merge its contents into the reg set
1354 	     ALLOWED.  */
1355 	  int cls = (int) NO_REGS;
1356 
1357 	  for (;;)
1358 	    {
1359 	      char c = *p;
1360 
1361 	      if (c == '\0' || c == ',' || c == '#')
1362 		{
1363 		  /* End of one alternative - mark the regs in the current
1364 		     class, and reset the class.  */
1365 		  IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1366 		  cls = NO_REGS;
1367 		  p++;
1368 		  if (c == '#')
1369 		    do {
1370 		      c = *p++;
1371 		    } while (c != '\0' && c != ',');
1372 		  if (c == '\0')
1373 		    break;
1374 		  continue;
1375 		}
1376 
1377 	      switch (c)
1378 		{
1379 		case 'g':
1380 		  cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1381 		  break;
1382 
1383 		default:
1384 		  enum constraint_num cn = lookup_constraint (p);
1385 		  if (insn_extra_address_constraint (cn))
1386 		    cls = (int) reg_class_subunion[cls]
1387 		      [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1388 					     ADDRESS, SCRATCH)];
1389 		  else
1390 		    cls = (int) reg_class_subunion[cls]
1391 		      [reg_class_for_constraint (cn)];
1392 		  break;
1393 		}
1394 	      p += CONSTRAINT_LEN (c, p);
1395 	    }
1396 	}
1397       /* Those of the registers which are clobbered, but allowed by the
1398 	 constraints, must be usable as reload registers.  So clear them
1399 	 out of the life information.  */
1400       AND_HARD_REG_SET (allowed, clobbered);
1401       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1402 	if (TEST_HARD_REG_BIT (allowed, i))
1403 	  {
1404 	    CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1405 	    CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1406 	  }
1407     }
1408 
1409 #endif
1410 }
1411 
1412 /* Copy the global variables n_reloads and rld into the corresponding elts
1413    of CHAIN.  */
1414 static void
copy_reloads(struct insn_chain * chain)1415 copy_reloads (struct insn_chain *chain)
1416 {
1417   chain->n_reloads = n_reloads;
1418   chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1419   memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1420   reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1421 }
1422 
1423 /* Walk the chain of insns, and determine for each whether it needs reloads
1424    and/or eliminations.  Build the corresponding insns_need_reload list, and
1425    set something_needs_elimination as appropriate.  */
1426 static void
calculate_needs_all_insns(int global)1427 calculate_needs_all_insns (int global)
1428 {
1429   struct insn_chain **pprev_reload = &insns_need_reload;
1430   struct insn_chain *chain, *next = 0;
1431 
1432   something_needs_elimination = 0;
1433 
1434   reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1435   for (chain = reload_insn_chain; chain != 0; chain = next)
1436     {
1437       rtx_insn *insn = chain->insn;
1438 
1439       next = chain->next;
1440 
1441       /* Clear out the shortcuts.  */
1442       chain->n_reloads = 0;
1443       chain->need_elim = 0;
1444       chain->need_reload = 0;
1445       chain->need_operand_change = 0;
1446 
1447       /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1448 	 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1449 	 what effects this has on the known offsets at labels.  */
1450 
1451       if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1452 	  || (INSN_P (insn) && REG_NOTES (insn) != 0))
1453 	set_label_offsets (insn, insn, 0);
1454 
1455       if (INSN_P (insn))
1456 	{
1457 	  rtx old_body = PATTERN (insn);
1458 	  int old_code = INSN_CODE (insn);
1459 	  rtx old_notes = REG_NOTES (insn);
1460 	  int did_elimination = 0;
1461 	  int operands_changed = 0;
1462 
1463 	  /* Skip insns that only set an equivalence.  */
1464 	  if (will_delete_init_insn_p (insn))
1465 	    continue;
1466 
1467 	  /* If needed, eliminate any eliminable registers.  */
1468 	  if (num_eliminable || num_eliminable_invariants)
1469 	    did_elimination = eliminate_regs_in_insn (insn, 0);
1470 
1471 	  /* Analyze the instruction.  */
1472 	  operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1473 					   global, spill_reg_order);
1474 
1475 	  /* If a no-op set needs more than one reload, this is likely
1476 	     to be something that needs input address reloads.  We
1477 	     can't get rid of this cleanly later, and it is of no use
1478 	     anyway, so discard it now.
1479 	     We only do this when expensive_optimizations is enabled,
1480 	     since this complements reload inheritance / output
1481 	     reload deletion, and it can make debugging harder.  */
1482 	  if (flag_expensive_optimizations && n_reloads > 1)
1483 	    {
1484 	      rtx set = single_set (insn);
1485 	      if (set
1486 		  &&
1487 		  ((SET_SRC (set) == SET_DEST (set)
1488 		    && REG_P (SET_SRC (set))
1489 		    && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1490 		   || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1491 		       && reg_renumber[REGNO (SET_SRC (set))] < 0
1492 		       && reg_renumber[REGNO (SET_DEST (set))] < 0
1493 		       && reg_equiv_memory_loc (REGNO (SET_SRC (set))) != NULL
1494 		       && reg_equiv_memory_loc (REGNO (SET_DEST (set))) != NULL
1495 		       && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set))),
1496 				       reg_equiv_memory_loc (REGNO (SET_DEST (set)))))))
1497 		{
1498 		  if (ira_conflicts_p)
1499 		    /* Inform IRA about the insn deletion.  */
1500 		    ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1501 						   REGNO (SET_SRC (set)));
1502 		  delete_insn (insn);
1503 		  /* Delete it from the reload chain.  */
1504 		  if (chain->prev)
1505 		    chain->prev->next = next;
1506 		  else
1507 		    reload_insn_chain = next;
1508 		  if (next)
1509 		    next->prev = chain->prev;
1510 		  chain->next = unused_insn_chains;
1511 		  unused_insn_chains = chain;
1512 		  continue;
1513 		}
1514 	    }
1515 	  if (num_eliminable)
1516 	    update_eliminable_offsets ();
1517 
1518 	  /* Remember for later shortcuts which insns had any reloads or
1519 	     register eliminations.  */
1520 	  chain->need_elim = did_elimination;
1521 	  chain->need_reload = n_reloads > 0;
1522 	  chain->need_operand_change = operands_changed;
1523 
1524 	  /* Discard any register replacements done.  */
1525 	  if (did_elimination)
1526 	    {
1527 	      obstack_free (&reload_obstack, reload_insn_firstobj);
1528 	      PATTERN (insn) = old_body;
1529 	      INSN_CODE (insn) = old_code;
1530 	      REG_NOTES (insn) = old_notes;
1531 	      something_needs_elimination = 1;
1532 	    }
1533 
1534 	  something_needs_operands_changed |= operands_changed;
1535 
1536 	  if (n_reloads != 0)
1537 	    {
1538 	      copy_reloads (chain);
1539 	      *pprev_reload = chain;
1540 	      pprev_reload = &chain->next_need_reload;
1541 	    }
1542 	}
1543     }
1544   *pprev_reload = 0;
1545 }
1546 
1547 /* This function is called from the register allocator to set up estimates
1548    for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1549    an invariant.  The structure is similar to calculate_needs_all_insns.  */
1550 
1551 void
calculate_elim_costs_all_insns(void)1552 calculate_elim_costs_all_insns (void)
1553 {
1554   int *reg_equiv_init_cost;
1555   basic_block bb;
1556   int i;
1557 
1558   reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1559   init_elim_table ();
1560   init_eliminable_invariants (get_insns (), false);
1561 
1562   set_initial_elim_offsets ();
1563   set_initial_label_offsets ();
1564 
1565   FOR_EACH_BB_FN (bb, cfun)
1566     {
1567       rtx_insn *insn;
1568       elim_bb = bb;
1569 
1570       FOR_BB_INSNS (bb, insn)
1571 	{
1572 	  /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1573 	     include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1574 	     what effects this has on the known offsets at labels.  */
1575 
1576 	  if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1577 	      || (INSN_P (insn) && REG_NOTES (insn) != 0))
1578 	    set_label_offsets (insn, insn, 0);
1579 
1580 	  if (INSN_P (insn))
1581 	    {
1582 	      rtx set = single_set (insn);
1583 
1584 	      /* Skip insns that only set an equivalence.  */
1585 	      if (set && REG_P (SET_DEST (set))
1586 		  && reg_renumber[REGNO (SET_DEST (set))] < 0
1587 		  && (reg_equiv_constant (REGNO (SET_DEST (set)))
1588 		      || reg_equiv_invariant (REGNO (SET_DEST (set)))))
1589 		{
1590 		  unsigned regno = REGNO (SET_DEST (set));
1591 		  rtx_insn_list *init = reg_equiv_init (regno);
1592 		  if (init)
1593 		    {
1594 		      rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1595 						false, true);
1596 		      machine_mode mode = GET_MODE (SET_DEST (set));
1597 		      int cost = set_src_cost (t, mode,
1598 					       optimize_bb_for_speed_p (bb));
1599 		      int freq = REG_FREQ_FROM_BB (bb);
1600 
1601 		      reg_equiv_init_cost[regno] = cost * freq;
1602 		      continue;
1603 		    }
1604 		}
1605 	      /* If needed, eliminate any eliminable registers.  */
1606 	      if (num_eliminable || num_eliminable_invariants)
1607 		elimination_costs_in_insn (insn);
1608 
1609 	      if (num_eliminable)
1610 		update_eliminable_offsets ();
1611 	    }
1612 	}
1613     }
1614   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1615     {
1616       if (reg_equiv_invariant (i))
1617 	{
1618 	  if (reg_equiv_init (i))
1619 	    {
1620 	      int cost = reg_equiv_init_cost[i];
1621 	      if (dump_file)
1622 		fprintf (dump_file,
1623 			 "Reg %d has equivalence, initial gains %d\n", i, cost);
1624 	      if (cost != 0)
1625 		ira_adjust_equiv_reg_cost (i, cost);
1626 	    }
1627 	  else
1628 	    {
1629 	      if (dump_file)
1630 		fprintf (dump_file,
1631 			 "Reg %d had equivalence, but can't be eliminated\n",
1632 			 i);
1633 	      ira_adjust_equiv_reg_cost (i, 0);
1634 	    }
1635 	}
1636     }
1637 
1638   free (reg_equiv_init_cost);
1639   free (offsets_known_at);
1640   free (offsets_at);
1641   offsets_at = NULL;
1642   offsets_known_at = NULL;
1643 }
1644 
1645 /* Comparison function for qsort to decide which of two reloads
1646    should be handled first.  *P1 and *P2 are the reload numbers.  */
1647 
1648 static int
reload_reg_class_lower(const void * r1p,const void * r2p)1649 reload_reg_class_lower (const void *r1p, const void *r2p)
1650 {
1651   int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1652   int t;
1653 
1654   /* Consider required reloads before optional ones.  */
1655   t = rld[r1].optional - rld[r2].optional;
1656   if (t != 0)
1657     return t;
1658 
1659   /* Count all solitary classes before non-solitary ones.  */
1660   t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1661        - (reg_class_size[(int) rld[r1].rclass] == 1));
1662   if (t != 0)
1663     return t;
1664 
1665   /* Aside from solitaires, consider all multi-reg groups first.  */
1666   t = rld[r2].nregs - rld[r1].nregs;
1667   if (t != 0)
1668     return t;
1669 
1670   /* Consider reloads in order of increasing reg-class number.  */
1671   t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1672   if (t != 0)
1673     return t;
1674 
1675   /* If reloads are equally urgent, sort by reload number,
1676      so that the results of qsort leave nothing to chance.  */
1677   return r1 - r2;
1678 }
1679 
1680 /* The cost of spilling each hard reg.  */
1681 static int spill_cost[FIRST_PSEUDO_REGISTER];
1682 
1683 /* When spilling multiple hard registers, we use SPILL_COST for the first
1684    spilled hard reg and SPILL_ADD_COST for subsequent regs.  SPILL_ADD_COST
1685    only the first hard reg for a multi-reg pseudo.  */
1686 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1687 
1688 /* Map of hard regno to pseudo regno currently occupying the hard
1689    reg.  */
1690 static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1691 
1692 /* Update the spill cost arrays, considering that pseudo REG is live.  */
1693 
1694 static void
count_pseudo(int reg)1695 count_pseudo (int reg)
1696 {
1697   int freq = REG_FREQ (reg);
1698   int r = reg_renumber[reg];
1699   int nregs;
1700 
1701   /* Ignore spilled pseudo-registers which can be here only if IRA is used.  */
1702   if (ira_conflicts_p && r < 0)
1703     return;
1704 
1705   if (REGNO_REG_SET_P (&pseudos_counted, reg)
1706       || REGNO_REG_SET_P (&spilled_pseudos, reg))
1707     return;
1708 
1709   SET_REGNO_REG_SET (&pseudos_counted, reg);
1710 
1711   gcc_assert (r >= 0);
1712 
1713   spill_add_cost[r] += freq;
1714   nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg));
1715   while (nregs-- > 0)
1716     {
1717       hard_regno_to_pseudo_regno[r + nregs] = reg;
1718       spill_cost[r + nregs] += freq;
1719     }
1720 }
1721 
1722 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1723    contents of BAD_SPILL_REGS for the insn described by CHAIN.  */
1724 
1725 static void
order_regs_for_reload(struct insn_chain * chain)1726 order_regs_for_reload (struct insn_chain *chain)
1727 {
1728   unsigned i;
1729   HARD_REG_SET used_by_pseudos;
1730   HARD_REG_SET used_by_pseudos2;
1731   reg_set_iterator rsi;
1732 
1733   COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1734 
1735   memset (spill_cost, 0, sizeof spill_cost);
1736   memset (spill_add_cost, 0, sizeof spill_add_cost);
1737   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1738     hard_regno_to_pseudo_regno[i] = -1;
1739 
1740   /* Count number of uses of each hard reg by pseudo regs allocated to it
1741      and then order them by decreasing use.  First exclude hard registers
1742      that are live in or across this insn.  */
1743 
1744   REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1745   REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1746   IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1747   IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1748 
1749   /* Now find out which pseudos are allocated to it, and update
1750      hard_reg_n_uses.  */
1751   CLEAR_REG_SET (&pseudos_counted);
1752 
1753   EXECUTE_IF_SET_IN_REG_SET
1754     (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1755     {
1756       count_pseudo (i);
1757     }
1758   EXECUTE_IF_SET_IN_REG_SET
1759     (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1760     {
1761       count_pseudo (i);
1762     }
1763   CLEAR_REG_SET (&pseudos_counted);
1764 }
1765 
1766 /* Vector of reload-numbers showing the order in which the reloads should
1767    be processed.  */
1768 static short reload_order[MAX_RELOADS];
1769 
1770 /* This is used to keep track of the spill regs used in one insn.  */
1771 static HARD_REG_SET used_spill_regs_local;
1772 
1773 /* We decided to spill hard register SPILLED, which has a size of
1774    SPILLED_NREGS.  Determine how pseudo REG, which is live during the insn,
1775    is affected.  We will add it to SPILLED_PSEUDOS if necessary, and we will
1776    update SPILL_COST/SPILL_ADD_COST.  */
1777 
1778 static void
count_spilled_pseudo(int spilled,int spilled_nregs,int reg)1779 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1780 {
1781   int freq = REG_FREQ (reg);
1782   int r = reg_renumber[reg];
1783   int nregs;
1784 
1785   /* Ignore spilled pseudo-registers which can be here only if IRA is used.  */
1786   if (ira_conflicts_p && r < 0)
1787     return;
1788 
1789   gcc_assert (r >= 0);
1790 
1791   nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg));
1792 
1793   if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1794       || spilled + spilled_nregs <= r || r + nregs <= spilled)
1795     return;
1796 
1797   SET_REGNO_REG_SET (&spilled_pseudos, reg);
1798 
1799   spill_add_cost[r] -= freq;
1800   while (nregs-- > 0)
1801     {
1802       hard_regno_to_pseudo_regno[r + nregs] = -1;
1803       spill_cost[r + nregs] -= freq;
1804     }
1805 }
1806 
1807 /* Find reload register to use for reload number ORDER.  */
1808 
1809 static int
find_reg(struct insn_chain * chain,int order)1810 find_reg (struct insn_chain *chain, int order)
1811 {
1812   int rnum = reload_order[order];
1813   struct reload *rl = rld + rnum;
1814   int best_cost = INT_MAX;
1815   int best_reg = -1;
1816   unsigned int i, j, n;
1817   int k;
1818   HARD_REG_SET not_usable;
1819   HARD_REG_SET used_by_other_reload;
1820   reg_set_iterator rsi;
1821   static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1822   static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1823 
1824   COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1825   IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1826   IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1827 
1828   CLEAR_HARD_REG_SET (used_by_other_reload);
1829   for (k = 0; k < order; k++)
1830     {
1831       int other = reload_order[k];
1832 
1833       if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1834 	for (j = 0; j < rld[other].nregs; j++)
1835 	  SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1836     }
1837 
1838   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1839     {
1840 #ifdef REG_ALLOC_ORDER
1841       unsigned int regno = reg_alloc_order[i];
1842 #else
1843       unsigned int regno = i;
1844 #endif
1845 
1846       if (! TEST_HARD_REG_BIT (not_usable, regno)
1847 	  && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1848 	  && targetm.hard_regno_mode_ok (regno, rl->mode))
1849 	{
1850 	  int this_cost = spill_cost[regno];
1851 	  int ok = 1;
1852 	  unsigned int this_nregs = hard_regno_nregs (regno, rl->mode);
1853 
1854 	  for (j = 1; j < this_nregs; j++)
1855 	    {
1856 	      this_cost += spill_add_cost[regno + j];
1857 	      if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1858 		  || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1859 		ok = 0;
1860 	    }
1861 	  if (! ok)
1862 	    continue;
1863 
1864 	  if (ira_conflicts_p)
1865 	    {
1866 	      /* Ask IRA to find a better pseudo-register for
1867 		 spilling.  */
1868 	      for (n = j = 0; j < this_nregs; j++)
1869 		{
1870 		  int r = hard_regno_to_pseudo_regno[regno + j];
1871 
1872 		  if (r < 0)
1873 		    continue;
1874 		  if (n == 0 || regno_pseudo_regs[n - 1] != r)
1875 		    regno_pseudo_regs[n++] = r;
1876 		}
1877 	      regno_pseudo_regs[n++] = -1;
1878 	      if (best_reg < 0
1879 		  || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1880 						      best_regno_pseudo_regs,
1881 						      rl->in, rl->out,
1882 						      chain->insn))
1883 		{
1884 		  best_reg = regno;
1885 		  for (j = 0;; j++)
1886 		    {
1887 		      best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1888 		      if (regno_pseudo_regs[j] < 0)
1889 			break;
1890 		    }
1891 		}
1892 	      continue;
1893 	    }
1894 
1895 	  if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1896 	    this_cost--;
1897 	  if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1898 	    this_cost--;
1899 	  if (this_cost < best_cost
1900 	      /* Among registers with equal cost, prefer caller-saved ones, or
1901 		 use REG_ALLOC_ORDER if it is defined.  */
1902 	      || (this_cost == best_cost
1903 #ifdef REG_ALLOC_ORDER
1904 		  && (inv_reg_alloc_order[regno]
1905 		      < inv_reg_alloc_order[best_reg])
1906 #else
1907 		  && call_used_regs[regno]
1908 		  && ! call_used_regs[best_reg]
1909 #endif
1910 		  ))
1911 	    {
1912 	      best_reg = regno;
1913 	      best_cost = this_cost;
1914 	    }
1915 	}
1916     }
1917   if (best_reg == -1)
1918     return 0;
1919 
1920   if (dump_file)
1921     fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1922 
1923   rl->nregs = hard_regno_nregs (best_reg, rl->mode);
1924   rl->regno = best_reg;
1925 
1926   EXECUTE_IF_SET_IN_REG_SET
1927     (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1928     {
1929       count_spilled_pseudo (best_reg, rl->nregs, j);
1930     }
1931 
1932   EXECUTE_IF_SET_IN_REG_SET
1933     (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1934     {
1935       count_spilled_pseudo (best_reg, rl->nregs, j);
1936     }
1937 
1938   for (i = 0; i < rl->nregs; i++)
1939     {
1940       gcc_assert (spill_cost[best_reg + i] == 0);
1941       gcc_assert (spill_add_cost[best_reg + i] == 0);
1942       gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1943       SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1944     }
1945   return 1;
1946 }
1947 
1948 /* Find more reload regs to satisfy the remaining need of an insn, which
1949    is given by CHAIN.
1950    Do it by ascending class number, since otherwise a reg
1951    might be spilled for a big class and might fail to count
1952    for a smaller class even though it belongs to that class.  */
1953 
1954 static void
find_reload_regs(struct insn_chain * chain)1955 find_reload_regs (struct insn_chain *chain)
1956 {
1957   int i;
1958 
1959   /* In order to be certain of getting the registers we need,
1960      we must sort the reloads into order of increasing register class.
1961      Then our grabbing of reload registers will parallel the process
1962      that provided the reload registers.  */
1963   for (i = 0; i < chain->n_reloads; i++)
1964     {
1965       /* Show whether this reload already has a hard reg.  */
1966       if (chain->rld[i].reg_rtx)
1967 	{
1968 	  chain->rld[i].regno = REGNO (chain->rld[i].reg_rtx);
1969 	  chain->rld[i].nregs = REG_NREGS (chain->rld[i].reg_rtx);
1970 	}
1971       else
1972 	chain->rld[i].regno = -1;
1973       reload_order[i] = i;
1974     }
1975 
1976   n_reloads = chain->n_reloads;
1977   memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1978 
1979   CLEAR_HARD_REG_SET (used_spill_regs_local);
1980 
1981   if (dump_file)
1982     fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1983 
1984   qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1985 
1986   /* Compute the order of preference for hard registers to spill.  */
1987 
1988   order_regs_for_reload (chain);
1989 
1990   for (i = 0; i < n_reloads; i++)
1991     {
1992       int r = reload_order[i];
1993 
1994       /* Ignore reloads that got marked inoperative.  */
1995       if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1996 	  && ! rld[r].optional
1997 	  && rld[r].regno == -1)
1998 	if (! find_reg (chain, i))
1999 	  {
2000 	    if (dump_file)
2001 	      fprintf (dump_file, "reload failure for reload %d\n", r);
2002 	    spill_failure (chain->insn, rld[r].rclass);
2003 	    failure = 1;
2004 	    return;
2005 	  }
2006     }
2007 
2008   COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2009   IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2010 
2011   memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2012 }
2013 
2014 static void
select_reload_regs(void)2015 select_reload_regs (void)
2016 {
2017   struct insn_chain *chain;
2018 
2019   /* Try to satisfy the needs for each insn.  */
2020   for (chain = insns_need_reload; chain != 0;
2021        chain = chain->next_need_reload)
2022     find_reload_regs (chain);
2023 }
2024 
2025 /* Delete all insns that were inserted by emit_caller_save_insns during
2026    this iteration.  */
2027 static void
delete_caller_save_insns(void)2028 delete_caller_save_insns (void)
2029 {
2030   struct insn_chain *c = reload_insn_chain;
2031 
2032   while (c != 0)
2033     {
2034       while (c != 0 && c->is_caller_save_insn)
2035 	{
2036 	  struct insn_chain *next = c->next;
2037 	  rtx_insn *insn = c->insn;
2038 
2039 	  if (c == reload_insn_chain)
2040 	    reload_insn_chain = next;
2041 	  delete_insn (insn);
2042 
2043 	  if (next)
2044 	    next->prev = c->prev;
2045 	  if (c->prev)
2046 	    c->prev->next = next;
2047 	  c->next = unused_insn_chains;
2048 	  unused_insn_chains = c;
2049 	  c = next;
2050 	}
2051       if (c != 0)
2052 	c = c->next;
2053     }
2054 }
2055 
2056 /* Handle the failure to find a register to spill.
2057    INSN should be one of the insns which needed this particular spill reg.  */
2058 
2059 static void
spill_failure(rtx_insn * insn,enum reg_class rclass)2060 spill_failure (rtx_insn *insn, enum reg_class rclass)
2061 {
2062   if (asm_noperands (PATTERN (insn)) >= 0)
2063     error_for_asm (insn, "can%'t find a register in class %qs while "
2064 		   "reloading %<asm%>",
2065 		   reg_class_names[rclass]);
2066   else
2067     {
2068       error ("unable to find a register to spill in class %qs",
2069 	     reg_class_names[rclass]);
2070 
2071       if (dump_file)
2072 	{
2073 	  fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2074 	  debug_reload_to_stream (dump_file);
2075 	}
2076       fatal_insn ("this is the insn:", insn);
2077     }
2078 }
2079 
2080 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2081    data that is dead in INSN.  */
2082 
2083 static void
delete_dead_insn(rtx_insn * insn)2084 delete_dead_insn (rtx_insn *insn)
2085 {
2086   rtx_insn *prev = prev_active_insn (insn);
2087   rtx prev_dest;
2088 
2089   /* If the previous insn sets a register that dies in our insn make
2090      a note that we want to run DCE immediately after reload.
2091 
2092      We used to delete the previous insn & recurse, but that's wrong for
2093      block local equivalences.  Instead of trying to figure out the exact
2094      circumstances where we can delete the potentially dead insns, just
2095      let DCE do the job.  */
2096   if (prev && BLOCK_FOR_INSN (prev) == BLOCK_FOR_INSN (insn)
2097       && GET_CODE (PATTERN (prev)) == SET
2098       && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2099       && reg_mentioned_p (prev_dest, PATTERN (insn))
2100       && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2101       && ! side_effects_p (SET_SRC (PATTERN (prev))))
2102     need_dce = 1;
2103 
2104   SET_INSN_DELETED (insn);
2105 }
2106 
2107 /* Modify the home of pseudo-reg I.
2108    The new home is present in reg_renumber[I].
2109 
2110    FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2111    or it may be -1, meaning there is none or it is not relevant.
2112    This is used so that all pseudos spilled from a given hard reg
2113    can share one stack slot.  */
2114 
2115 static void
alter_reg(int i,int from_reg,bool dont_share_p)2116 alter_reg (int i, int from_reg, bool dont_share_p)
2117 {
2118   /* When outputting an inline function, this can happen
2119      for a reg that isn't actually used.  */
2120   if (regno_reg_rtx[i] == 0)
2121     return;
2122 
2123   /* If the reg got changed to a MEM at rtl-generation time,
2124      ignore it.  */
2125   if (!REG_P (regno_reg_rtx[i]))
2126     return;
2127 
2128   /* Modify the reg-rtx to contain the new hard reg
2129      number or else to contain its pseudo reg number.  */
2130   SET_REGNO (regno_reg_rtx[i],
2131 	     reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2132 
2133   /* If we have a pseudo that is needed but has no hard reg or equivalent,
2134      allocate a stack slot for it.  */
2135 
2136   if (reg_renumber[i] < 0
2137       && REG_N_REFS (i) > 0
2138       && reg_equiv_constant (i) == 0
2139       && (reg_equiv_invariant (i) == 0
2140 	  || reg_equiv_init (i) == 0)
2141       && reg_equiv_memory_loc (i) == 0)
2142     {
2143       rtx x = NULL_RTX;
2144       machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2145       poly_uint64 inherent_size = GET_MODE_SIZE (mode);
2146       unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2147       machine_mode wider_mode = wider_subreg_mode (mode, reg_max_ref_mode[i]);
2148       poly_uint64 total_size = GET_MODE_SIZE (wider_mode);
2149       /* ??? Seems strange to derive the minimum alignment from the size,
2150 	 but that's the traditional behavior.  For polynomial-size modes,
2151 	 the natural extension is to use the minimum possible size.  */
2152       unsigned int min_align
2153 	= constant_lower_bound (GET_MODE_BITSIZE (reg_max_ref_mode[i]));
2154       poly_int64 adjust = 0;
2155 
2156       something_was_spilled = true;
2157 
2158       if (ira_conflicts_p)
2159 	{
2160 	  /* Mark the spill for IRA.  */
2161 	  SET_REGNO_REG_SET (&spilled_pseudos, i);
2162 	  if (!dont_share_p)
2163 	    x = ira_reuse_stack_slot (i, inherent_size, total_size);
2164 	}
2165 
2166       if (x)
2167 	;
2168 
2169       /* Each pseudo reg has an inherent size which comes from its own mode,
2170 	 and a total size which provides room for paradoxical subregs
2171 	 which refer to the pseudo reg in wider modes.
2172 
2173 	 We can use a slot already allocated if it provides both
2174 	 enough inherent space and enough total space.
2175 	 Otherwise, we allocate a new slot, making sure that it has no less
2176 	 inherent space, and no less total space, then the previous slot.  */
2177       else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2178 	{
2179 	  rtx stack_slot;
2180 
2181 	  /* The sizes are taken from a subreg operation, which guarantees
2182 	     that they're ordered.  */
2183 	  gcc_checking_assert (ordered_p (total_size, inherent_size));
2184 
2185 	  /* No known place to spill from => no slot to reuse.  */
2186 	  x = assign_stack_local (mode, total_size,
2187 				  min_align > inherent_align
2188 				  || maybe_gt (total_size, inherent_size)
2189 				  ? -1 : 0);
2190 
2191 	  stack_slot = x;
2192 
2193 	  /* Cancel the big-endian correction done in assign_stack_local.
2194 	     Get the address of the beginning of the slot.  This is so we
2195 	     can do a big-endian correction unconditionally below.  */
2196 	  if (BYTES_BIG_ENDIAN)
2197 	    {
2198 	      adjust = inherent_size - total_size;
2199 	      if (maybe_ne (adjust, 0))
2200 		{
2201 		  poly_uint64 total_bits = total_size * BITS_PER_UNIT;
2202 		  machine_mode mem_mode
2203 		    = int_mode_for_size (total_bits, 1).else_blk ();
2204 		  stack_slot = adjust_address_nv (x, mem_mode, adjust);
2205 		}
2206 	    }
2207 
2208 	  if (! dont_share_p && ira_conflicts_p)
2209 	    /* Inform IRA about allocation a new stack slot.  */
2210 	    ira_mark_new_stack_slot (stack_slot, i, total_size);
2211 	}
2212 
2213       /* Reuse a stack slot if possible.  */
2214       else if (spill_stack_slot[from_reg] != 0
2215 	       && known_ge (spill_stack_slot_width[from_reg], total_size)
2216 	       && known_ge (GET_MODE_SIZE
2217 			    (GET_MODE (spill_stack_slot[from_reg])),
2218 			    inherent_size)
2219 	       && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2220 	x = spill_stack_slot[from_reg];
2221 
2222       /* Allocate a bigger slot.  */
2223       else
2224 	{
2225 	  /* Compute maximum size needed, both for inherent size
2226 	     and for total size.  */
2227 	  rtx stack_slot;
2228 
2229 	  if (spill_stack_slot[from_reg])
2230 	    {
2231 	      if (partial_subreg_p (mode,
2232 				    GET_MODE (spill_stack_slot[from_reg])))
2233 		mode = GET_MODE (spill_stack_slot[from_reg]);
2234 	      total_size = ordered_max (total_size,
2235 					spill_stack_slot_width[from_reg]);
2236 	      if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2237 		min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2238 	    }
2239 
2240 	  /* The sizes are taken from a subreg operation, which guarantees
2241 	     that they're ordered.  */
2242 	  gcc_checking_assert (ordered_p (total_size, inherent_size));
2243 
2244 	  /* Make a slot with that size.  */
2245 	  x = assign_stack_local (mode, total_size,
2246 				  min_align > inherent_align
2247 				  || maybe_gt (total_size, inherent_size)
2248 				  ? -1 : 0);
2249 	  stack_slot = x;
2250 
2251 	  /* Cancel the  big-endian correction done in assign_stack_local.
2252 	     Get the address of the beginning of the slot.  This is so we
2253 	     can do a big-endian correction unconditionally below.  */
2254 	  if (BYTES_BIG_ENDIAN)
2255 	    {
2256 	      adjust = GET_MODE_SIZE (mode) - total_size;
2257 	      if (maybe_ne (adjust, 0))
2258 		{
2259 		  poly_uint64 total_bits = total_size * BITS_PER_UNIT;
2260 		  machine_mode mem_mode
2261 		    = int_mode_for_size (total_bits, 1).else_blk ();
2262 		  stack_slot = adjust_address_nv (x, mem_mode, adjust);
2263 		}
2264 	    }
2265 
2266 	  spill_stack_slot[from_reg] = stack_slot;
2267 	  spill_stack_slot_width[from_reg] = total_size;
2268 	}
2269 
2270       /* On a big endian machine, the "address" of the slot
2271 	 is the address of the low part that fits its inherent mode.  */
2272       adjust += subreg_size_lowpart_offset (inherent_size, total_size);
2273 
2274       /* If we have any adjustment to make, or if the stack slot is the
2275 	 wrong mode, make a new stack slot.  */
2276       x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2277 
2278       /* Set all of the memory attributes as appropriate for a spill.  */
2279       set_mem_attrs_for_spill (x);
2280 
2281       /* Save the stack slot for later.  */
2282       reg_equiv_memory_loc (i) = x;
2283     }
2284 }
2285 
2286 /* Mark the slots in regs_ever_live for the hard regs used by
2287    pseudo-reg number REGNO, accessed in MODE.  */
2288 
2289 static void
mark_home_live_1(int regno,machine_mode mode)2290 mark_home_live_1 (int regno, machine_mode mode)
2291 {
2292   int i, lim;
2293 
2294   i = reg_renumber[regno];
2295   if (i < 0)
2296     return;
2297   lim = end_hard_regno (mode, i);
2298   while (i < lim)
2299     df_set_regs_ever_live (i++, true);
2300 }
2301 
2302 /* Mark the slots in regs_ever_live for the hard regs
2303    used by pseudo-reg number REGNO.  */
2304 
2305 void
mark_home_live(int regno)2306 mark_home_live (int regno)
2307 {
2308   if (reg_renumber[regno] >= 0)
2309     mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2310 }
2311 
2312 /* This function handles the tracking of elimination offsets around branches.
2313 
2314    X is a piece of RTL being scanned.
2315 
2316    INSN is the insn that it came from, if any.
2317 
2318    INITIAL_P is nonzero if we are to set the offset to be the initial
2319    offset and zero if we are setting the offset of the label to be the
2320    current offset.  */
2321 
2322 static void
set_label_offsets(rtx x,rtx_insn * insn,int initial_p)2323 set_label_offsets (rtx x, rtx_insn *insn, int initial_p)
2324 {
2325   enum rtx_code code = GET_CODE (x);
2326   rtx tem;
2327   unsigned int i;
2328   struct elim_table *p;
2329 
2330   switch (code)
2331     {
2332     case LABEL_REF:
2333       if (LABEL_REF_NONLOCAL_P (x))
2334 	return;
2335 
2336       x = label_ref_label (x);
2337 
2338       /* fall through */
2339 
2340     case CODE_LABEL:
2341       /* If we know nothing about this label, set the desired offsets.  Note
2342 	 that this sets the offset at a label to be the offset before a label
2343 	 if we don't know anything about the label.  This is not correct for
2344 	 the label after a BARRIER, but is the best guess we can make.  If
2345 	 we guessed wrong, we will suppress an elimination that might have
2346 	 been possible had we been able to guess correctly.  */
2347 
2348       if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2349 	{
2350 	  for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2351 	    offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2352 	      = (initial_p ? reg_eliminate[i].initial_offset
2353 		 : reg_eliminate[i].offset);
2354 	  offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2355 	}
2356 
2357       /* Otherwise, if this is the definition of a label and it is
2358 	 preceded by a BARRIER, set our offsets to the known offset of
2359 	 that label.  */
2360 
2361       else if (x == insn
2362 	       && (tem = prev_nonnote_insn (insn)) != 0
2363 	       && BARRIER_P (tem))
2364 	set_offsets_for_label (insn);
2365       else
2366 	/* If neither of the above cases is true, compare each offset
2367 	   with those previously recorded and suppress any eliminations
2368 	   where the offsets disagree.  */
2369 
2370 	for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2371 	  if (maybe_ne (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i],
2372 			(initial_p ? reg_eliminate[i].initial_offset
2373 			 : reg_eliminate[i].offset)))
2374 	    reg_eliminate[i].can_eliminate = 0;
2375 
2376       return;
2377 
2378     case JUMP_TABLE_DATA:
2379       set_label_offsets (PATTERN (insn), insn, initial_p);
2380       return;
2381 
2382     case JUMP_INSN:
2383       set_label_offsets (PATTERN (insn), insn, initial_p);
2384 
2385       /* fall through */
2386 
2387     case INSN:
2388     case CALL_INSN:
2389       /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2390 	 to indirectly and hence must have all eliminations at their
2391 	 initial offsets.  */
2392       for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2393 	if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2394 	  set_label_offsets (XEXP (tem, 0), insn, 1);
2395       return;
2396 
2397     case PARALLEL:
2398     case ADDR_VEC:
2399     case ADDR_DIFF_VEC:
2400       /* Each of the labels in the parallel or address vector must be
2401 	 at their initial offsets.  We want the first field for PARALLEL
2402 	 and ADDR_VEC and the second field for ADDR_DIFF_VEC.  */
2403 
2404       for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2405 	set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2406 			   insn, initial_p);
2407       return;
2408 
2409     case SET:
2410       /* We only care about setting PC.  If the source is not RETURN,
2411 	 IF_THEN_ELSE, or a label, disable any eliminations not at
2412 	 their initial offsets.  Similarly if any arm of the IF_THEN_ELSE
2413 	 isn't one of those possibilities.  For branches to a label,
2414 	 call ourselves recursively.
2415 
2416 	 Note that this can disable elimination unnecessarily when we have
2417 	 a non-local goto since it will look like a non-constant jump to
2418 	 someplace in the current function.  This isn't a significant
2419 	 problem since such jumps will normally be when all elimination
2420 	 pairs are back to their initial offsets.  */
2421 
2422       if (SET_DEST (x) != pc_rtx)
2423 	return;
2424 
2425       switch (GET_CODE (SET_SRC (x)))
2426 	{
2427 	case PC:
2428 	case RETURN:
2429 	  return;
2430 
2431 	case LABEL_REF:
2432 	  set_label_offsets (SET_SRC (x), insn, initial_p);
2433 	  return;
2434 
2435 	case IF_THEN_ELSE:
2436 	  tem = XEXP (SET_SRC (x), 1);
2437 	  if (GET_CODE (tem) == LABEL_REF)
2438 	    set_label_offsets (label_ref_label (tem), insn, initial_p);
2439 	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2440 	    break;
2441 
2442 	  tem = XEXP (SET_SRC (x), 2);
2443 	  if (GET_CODE (tem) == LABEL_REF)
2444 	    set_label_offsets (label_ref_label (tem), insn, initial_p);
2445 	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2446 	    break;
2447 	  return;
2448 
2449 	default:
2450 	  break;
2451 	}
2452 
2453       /* If we reach here, all eliminations must be at their initial
2454 	 offset because we are doing a jump to a variable address.  */
2455       for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2456 	if (maybe_ne (p->offset, p->initial_offset))
2457 	  p->can_eliminate = 0;
2458       break;
2459 
2460     default:
2461       break;
2462     }
2463 }
2464 
2465 /* This function examines every reg that occurs in X and adjusts the
2466    costs for its elimination which are gathered by IRA.  INSN is the
2467    insn in which X occurs.  We do not recurse into MEM expressions.  */
2468 
2469 static void
note_reg_elim_costly(const_rtx x,rtx insn)2470 note_reg_elim_costly (const_rtx x, rtx insn)
2471 {
2472   subrtx_iterator::array_type array;
2473   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
2474     {
2475       const_rtx x = *iter;
2476       if (MEM_P (x))
2477 	iter.skip_subrtxes ();
2478       else if (REG_P (x)
2479 	       && REGNO (x) >= FIRST_PSEUDO_REGISTER
2480 	       && reg_equiv_init (REGNO (x))
2481 	       && reg_equiv_invariant (REGNO (x)))
2482 	{
2483 	  rtx t = reg_equiv_invariant (REGNO (x));
2484 	  rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2485 	  int cost = set_src_cost (new_rtx, Pmode,
2486 				   optimize_bb_for_speed_p (elim_bb));
2487 	  int freq = REG_FREQ_FROM_BB (elim_bb);
2488 
2489 	  if (cost != 0)
2490 	    ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2491 	}
2492     }
2493 }
2494 
2495 /* Scan X and replace any eliminable registers (such as fp) with a
2496    replacement (such as sp), plus an offset.
2497 
2498    MEM_MODE is the mode of an enclosing MEM.  We need this to know how
2499    much to adjust a register for, e.g., PRE_DEC.  Also, if we are inside a
2500    MEM, we are allowed to replace a sum of a register and the constant zero
2501    with the register, which we cannot do outside a MEM.  In addition, we need
2502    to record the fact that a register is referenced outside a MEM.
2503 
2504    If INSN is an insn, it is the insn containing X.  If we replace a REG
2505    in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2506    CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2507    the REG is being modified.
2508 
2509    Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2510    That's used when we eliminate in expressions stored in notes.
2511    This means, do not set ref_outside_mem even if the reference
2512    is outside of MEMs.
2513 
2514    If FOR_COSTS is true, we are being called before reload in order to
2515    estimate the costs of keeping registers with an equivalence unallocated.
2516 
2517    REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2518    replacements done assuming all offsets are at their initial values.  If
2519    they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2520    encounter, return the actual location so that find_reloads will do
2521    the proper thing.  */
2522 
2523 static rtx
eliminate_regs_1(rtx x,machine_mode mem_mode,rtx insn,bool may_use_invariant,bool for_costs)2524 eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn,
2525 		  bool may_use_invariant, bool for_costs)
2526 {
2527   enum rtx_code code = GET_CODE (x);
2528   struct elim_table *ep;
2529   int regno;
2530   rtx new_rtx;
2531   int i, j;
2532   const char *fmt;
2533   int copied = 0;
2534 
2535   if (! current_function_decl)
2536     return x;
2537 
2538   switch (code)
2539     {
2540     CASE_CONST_ANY:
2541     case CONST:
2542     case SYMBOL_REF:
2543     case CODE_LABEL:
2544     case PC:
2545     case CC0:
2546     case ASM_INPUT:
2547     case ADDR_VEC:
2548     case ADDR_DIFF_VEC:
2549     case RETURN:
2550       return x;
2551 
2552     case REG:
2553       regno = REGNO (x);
2554 
2555       /* First handle the case where we encounter a bare register that
2556 	 is eliminable.  Replace it with a PLUS.  */
2557       if (regno < FIRST_PSEUDO_REGISTER)
2558 	{
2559 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2560 	       ep++)
2561 	    if (ep->from_rtx == x && ep->can_eliminate)
2562 	      return plus_constant (Pmode, ep->to_rtx, ep->previous_offset);
2563 
2564 	}
2565       else if (reg_renumber && reg_renumber[regno] < 0
2566 	       && reg_equivs
2567 	       && reg_equiv_invariant (regno))
2568 	{
2569 	  if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2570 	    return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)),
2571 			             mem_mode, insn, true, for_costs);
2572 	  /* There exists at least one use of REGNO that cannot be
2573 	     eliminated.  Prevent the defining insn from being deleted.  */
2574 	  reg_equiv_init (regno) = NULL;
2575 	  if (!for_costs)
2576 	    alter_reg (regno, -1, true);
2577 	}
2578       return x;
2579 
2580     /* You might think handling MINUS in a manner similar to PLUS is a
2581        good idea.  It is not.  It has been tried multiple times and every
2582        time the change has had to have been reverted.
2583 
2584        Other parts of reload know a PLUS is special (gen_reload for example)
2585        and require special code to handle code a reloaded PLUS operand.
2586 
2587        Also consider backends where the flags register is clobbered by a
2588        MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2589        lea instruction comes to mind).  If we try to reload a MINUS, we
2590        may kill the flags register that was holding a useful value.
2591 
2592        So, please before trying to handle MINUS, consider reload as a
2593        whole instead of this little section as well as the backend issues.  */
2594     case PLUS:
2595       /* If this is the sum of an eliminable register and a constant, rework
2596 	 the sum.  */
2597       if (REG_P (XEXP (x, 0))
2598 	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2599 	  && CONSTANT_P (XEXP (x, 1)))
2600 	{
2601 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2602 	       ep++)
2603 	    if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2604 	      {
2605 		/* The only time we want to replace a PLUS with a REG (this
2606 		   occurs when the constant operand of the PLUS is the negative
2607 		   of the offset) is when we are inside a MEM.  We won't want
2608 		   to do so at other times because that would change the
2609 		   structure of the insn in a way that reload can't handle.
2610 		   We special-case the commonest situation in
2611 		   eliminate_regs_in_insn, so just replace a PLUS with a
2612 		   PLUS here, unless inside a MEM.  */
2613 		if (mem_mode != 0
2614 		    && CONST_INT_P (XEXP (x, 1))
2615 		    && known_eq (INTVAL (XEXP (x, 1)), -ep->previous_offset))
2616 		  return ep->to_rtx;
2617 		else
2618 		  return gen_rtx_PLUS (Pmode, ep->to_rtx,
2619 				       plus_constant (Pmode, XEXP (x, 1),
2620 						      ep->previous_offset));
2621 	      }
2622 
2623 	  /* If the register is not eliminable, we are done since the other
2624 	     operand is a constant.  */
2625 	  return x;
2626 	}
2627 
2628       /* If this is part of an address, we want to bring any constant to the
2629 	 outermost PLUS.  We will do this by doing register replacement in
2630 	 our operands and seeing if a constant shows up in one of them.
2631 
2632 	 Note that there is no risk of modifying the structure of the insn,
2633 	 since we only get called for its operands, thus we are either
2634 	 modifying the address inside a MEM, or something like an address
2635 	 operand of a load-address insn.  */
2636 
2637       {
2638 	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2639 				     for_costs);
2640 	rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2641 				     for_costs);
2642 
2643 	if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2644 	  {
2645 	    /* If one side is a PLUS and the other side is a pseudo that
2646 	       didn't get a hard register but has a reg_equiv_constant,
2647 	       we must replace the constant here since it may no longer
2648 	       be in the position of any operand.  */
2649 	    if (GET_CODE (new0) == PLUS && REG_P (new1)
2650 		&& REGNO (new1) >= FIRST_PSEUDO_REGISTER
2651 		&& reg_renumber[REGNO (new1)] < 0
2652 		&& reg_equivs
2653 		&& reg_equiv_constant (REGNO (new1)) != 0)
2654 	      new1 = reg_equiv_constant (REGNO (new1));
2655 	    else if (GET_CODE (new1) == PLUS && REG_P (new0)
2656 		     && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2657 		     && reg_renumber[REGNO (new0)] < 0
2658 		     && reg_equiv_constant (REGNO (new0)) != 0)
2659 	      new0 = reg_equiv_constant (REGNO (new0));
2660 
2661 	    new_rtx = form_sum (GET_MODE (x), new0, new1);
2662 
2663 	    /* As above, if we are not inside a MEM we do not want to
2664 	       turn a PLUS into something else.  We might try to do so here
2665 	       for an addition of 0 if we aren't optimizing.  */
2666 	    if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2667 	      return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2668 	    else
2669 	      return new_rtx;
2670 	  }
2671       }
2672       return x;
2673 
2674     case MULT:
2675       /* If this is the product of an eliminable register and a
2676 	 constant, apply the distribute law and move the constant out
2677 	 so that we have (plus (mult ..) ..).  This is needed in order
2678 	 to keep load-address insns valid.   This case is pathological.
2679 	 We ignore the possibility of overflow here.  */
2680       if (REG_P (XEXP (x, 0))
2681 	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2682 	  && CONST_INT_P (XEXP (x, 1)))
2683 	for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2684 	     ep++)
2685 	  if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2686 	    {
2687 	      if (! mem_mode
2688 		  /* Refs inside notes or in DEBUG_INSNs don't count for
2689 		     this purpose.  */
2690 		  && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2691 				      || GET_CODE (insn) == INSN_LIST
2692 				      || DEBUG_INSN_P (insn))))
2693 		ep->ref_outside_mem = 1;
2694 
2695 	      return
2696 		plus_constant (Pmode,
2697 			       gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2698 			       ep->previous_offset * INTVAL (XEXP (x, 1)));
2699 	    }
2700 
2701       /* fall through */
2702 
2703     case CALL:
2704     case COMPARE:
2705     /* See comments before PLUS about handling MINUS.  */
2706     case MINUS:
2707     case DIV:      case UDIV:
2708     case MOD:      case UMOD:
2709     case AND:      case IOR:      case XOR:
2710     case ROTATERT: case ROTATE:
2711     case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2712     case NE:       case EQ:
2713     case GE:       case GT:       case GEU:    case GTU:
2714     case LE:       case LT:       case LEU:    case LTU:
2715       {
2716 	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2717 				     for_costs);
2718 	rtx new1 = XEXP (x, 1)
2719 	  ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2720 			      for_costs) : 0;
2721 
2722 	if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2723 	  return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2724       }
2725       return x;
2726 
2727     case EXPR_LIST:
2728       /* If we have something in XEXP (x, 0), the usual case, eliminate it.  */
2729       if (XEXP (x, 0))
2730 	{
2731 	  new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2732 				      for_costs);
2733 	  if (new_rtx != XEXP (x, 0))
2734 	    {
2735 	      /* If this is a REG_DEAD note, it is not valid anymore.
2736 		 Using the eliminated version could result in creating a
2737 		 REG_DEAD note for the stack or frame pointer.  */
2738 	      if (REG_NOTE_KIND (x) == REG_DEAD)
2739 		return (XEXP (x, 1)
2740 			? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2741 					    for_costs)
2742 			: NULL_RTX);
2743 
2744 	      x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2745 	    }
2746 	}
2747 
2748       /* fall through */
2749 
2750     case INSN_LIST:
2751     case INT_LIST:
2752       /* Now do eliminations in the rest of the chain.  If this was
2753 	 an EXPR_LIST, this might result in allocating more memory than is
2754 	 strictly needed, but it simplifies the code.  */
2755       if (XEXP (x, 1))
2756 	{
2757 	  new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2758 				      for_costs);
2759 	  if (new_rtx != XEXP (x, 1))
2760 	    return
2761 	      gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2762 	}
2763       return x;
2764 
2765     case PRE_INC:
2766     case POST_INC:
2767     case PRE_DEC:
2768     case POST_DEC:
2769       /* We do not support elimination of a register that is modified.
2770 	 elimination_effects has already make sure that this does not
2771 	 happen.  */
2772       return x;
2773 
2774     case PRE_MODIFY:
2775     case POST_MODIFY:
2776       /* We do not support elimination of a register that is modified.
2777 	 elimination_effects has already make sure that this does not
2778 	 happen.  The only remaining case we need to consider here is
2779 	 that the increment value may be an eliminable register.  */
2780       if (GET_CODE (XEXP (x, 1)) == PLUS
2781 	  && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2782 	{
2783 	  rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2784 					  insn, true, for_costs);
2785 
2786 	  if (new_rtx != XEXP (XEXP (x, 1), 1))
2787 	    return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2788 				   gen_rtx_PLUS (GET_MODE (x),
2789 						 XEXP (x, 0), new_rtx));
2790 	}
2791       return x;
2792 
2793     case STRICT_LOW_PART:
2794     case NEG:          case NOT:
2795     case SIGN_EXTEND:  case ZERO_EXTEND:
2796     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2797     case FLOAT:        case FIX:
2798     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2799     case ABS:
2800     case SQRT:
2801     case FFS:
2802     case CLZ:
2803     case CTZ:
2804     case POPCOUNT:
2805     case PARITY:
2806     case BSWAP:
2807       new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2808 				  for_costs);
2809       if (new_rtx != XEXP (x, 0))
2810 	return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2811       return x;
2812 
2813     case SUBREG:
2814       /* Similar to above processing, but preserve SUBREG_BYTE.
2815 	 Convert (subreg (mem)) to (mem) if not paradoxical.
2816 	 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2817 	 pseudo didn't get a hard reg, we must replace this with the
2818 	 eliminated version of the memory location because push_reload
2819 	 may do the replacement in certain circumstances.  */
2820       if (REG_P (SUBREG_REG (x))
2821 	  && !paradoxical_subreg_p (x)
2822 	  && reg_equivs
2823 	  && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
2824 	{
2825 	  new_rtx = SUBREG_REG (x);
2826 	}
2827       else
2828 	new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs);
2829 
2830       if (new_rtx != SUBREG_REG (x))
2831 	{
2832 	  poly_int64 x_size = GET_MODE_SIZE (GET_MODE (x));
2833 	  poly_int64 new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2834 
2835 	  if (MEM_P (new_rtx)
2836 	      && ((partial_subreg_p (GET_MODE (x), GET_MODE (new_rtx))
2837 		   /* On RISC machines, combine can create rtl of the form
2838 		      (set (subreg:m1 (reg:m2 R) 0) ...)
2839 		      where m1 < m2, and expects something interesting to
2840 		      happen to the entire word.  Moreover, it will use the
2841 		      (reg:m2 R) later, expecting all bits to be preserved.
2842 		      So if the number of words is the same, preserve the
2843 		      subreg so that push_reload can see it.  */
2844 		   && !(WORD_REGISTER_OPERATIONS
2845 			&& known_equal_after_align_down (x_size - 1,
2846 							 new_size - 1,
2847 							 UNITS_PER_WORD)))
2848 		  || known_eq (x_size, new_size))
2849 	      )
2850 	    return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2851 	  else if (insn && GET_CODE (insn) == DEBUG_INSN)
2852 	    return gen_rtx_raw_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2853 	  else
2854 	    return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2855 	}
2856 
2857       return x;
2858 
2859     case MEM:
2860       /* Our only special processing is to pass the mode of the MEM to our
2861 	 recursive call and copy the flags.  While we are here, handle this
2862 	 case more efficiently.  */
2863 
2864       new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2865 				  for_costs);
2866       if (for_costs
2867 	  && memory_address_p (GET_MODE (x), XEXP (x, 0))
2868 	  && !memory_address_p (GET_MODE (x), new_rtx))
2869 	note_reg_elim_costly (XEXP (x, 0), insn);
2870 
2871       return replace_equiv_address_nv (x, new_rtx);
2872 
2873     case USE:
2874       /* Handle insn_list USE that a call to a pure function may generate.  */
2875       new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2876 				  for_costs);
2877       if (new_rtx != XEXP (x, 0))
2878 	return gen_rtx_USE (GET_MODE (x), new_rtx);
2879       return x;
2880 
2881     case CLOBBER:
2882     case ASM_OPERANDS:
2883       gcc_assert (insn && DEBUG_INSN_P (insn));
2884       break;
2885 
2886     case SET:
2887       gcc_unreachable ();
2888 
2889     default:
2890       break;
2891     }
2892 
2893   /* Process each of our operands recursively.  If any have changed, make a
2894      copy of the rtx.  */
2895   fmt = GET_RTX_FORMAT (code);
2896   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2897     {
2898       if (*fmt == 'e')
2899 	{
2900 	  new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2901 				      for_costs);
2902 	  if (new_rtx != XEXP (x, i) && ! copied)
2903 	    {
2904 	      x = shallow_copy_rtx (x);
2905 	      copied = 1;
2906 	    }
2907 	  XEXP (x, i) = new_rtx;
2908 	}
2909       else if (*fmt == 'E')
2910 	{
2911 	  int copied_vec = 0;
2912 	  for (j = 0; j < XVECLEN (x, i); j++)
2913 	    {
2914 	      new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2915 					  for_costs);
2916 	      if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2917 		{
2918 		  rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2919 					     XVEC (x, i)->elem);
2920 		  if (! copied)
2921 		    {
2922 		      x = shallow_copy_rtx (x);
2923 		      copied = 1;
2924 		    }
2925 		  XVEC (x, i) = new_v;
2926 		  copied_vec = 1;
2927 		}
2928 	      XVECEXP (x, i, j) = new_rtx;
2929 	    }
2930 	}
2931     }
2932 
2933   return x;
2934 }
2935 
2936 rtx
eliminate_regs(rtx x,machine_mode mem_mode,rtx insn)2937 eliminate_regs (rtx x, machine_mode mem_mode, rtx insn)
2938 {
2939   if (reg_eliminate == NULL)
2940     {
2941       gcc_assert (targetm.no_register_allocation);
2942       return x;
2943     }
2944   return eliminate_regs_1 (x, mem_mode, insn, false, false);
2945 }
2946 
2947 /* Scan rtx X for modifications of elimination target registers.  Update
2948    the table of eliminables to reflect the changed state.  MEM_MODE is
2949    the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM.  */
2950 
2951 static void
elimination_effects(rtx x,machine_mode mem_mode)2952 elimination_effects (rtx x, machine_mode mem_mode)
2953 {
2954   enum rtx_code code = GET_CODE (x);
2955   struct elim_table *ep;
2956   int regno;
2957   int i, j;
2958   const char *fmt;
2959 
2960   switch (code)
2961     {
2962     CASE_CONST_ANY:
2963     case CONST:
2964     case SYMBOL_REF:
2965     case CODE_LABEL:
2966     case PC:
2967     case CC0:
2968     case ASM_INPUT:
2969     case ADDR_VEC:
2970     case ADDR_DIFF_VEC:
2971     case RETURN:
2972       return;
2973 
2974     case REG:
2975       regno = REGNO (x);
2976 
2977       /* First handle the case where we encounter a bare register that
2978 	 is eliminable.  Replace it with a PLUS.  */
2979       if (regno < FIRST_PSEUDO_REGISTER)
2980 	{
2981 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2982 	       ep++)
2983 	    if (ep->from_rtx == x && ep->can_eliminate)
2984 	      {
2985 		if (! mem_mode)
2986 		  ep->ref_outside_mem = 1;
2987 		return;
2988 	      }
2989 
2990 	}
2991       else if (reg_renumber[regno] < 0
2992 	       && reg_equivs
2993 	       && reg_equiv_constant (regno)
2994 	       && ! function_invariant_p (reg_equiv_constant (regno)))
2995 	elimination_effects (reg_equiv_constant (regno), mem_mode);
2996       return;
2997 
2998     case PRE_INC:
2999     case POST_INC:
3000     case PRE_DEC:
3001     case POST_DEC:
3002     case POST_MODIFY:
3003     case PRE_MODIFY:
3004       /* If we modify the source of an elimination rule, disable it.  */
3005       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3006 	if (ep->from_rtx == XEXP (x, 0))
3007 	  ep->can_eliminate = 0;
3008 
3009       /* If we modify the target of an elimination rule by adding a constant,
3010 	 update its offset.  If we modify the target in any other way, we'll
3011 	 have to disable the rule as well.  */
3012       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3013 	if (ep->to_rtx == XEXP (x, 0))
3014 	  {
3015 	    poly_int64 size = GET_MODE_SIZE (mem_mode);
3016 
3017 	    /* If more bytes than MEM_MODE are pushed, account for them.  */
3018 #ifdef PUSH_ROUNDING
3019 	    if (ep->to_rtx == stack_pointer_rtx)
3020 	      size = PUSH_ROUNDING (size);
3021 #endif
3022 	    if (code == PRE_DEC || code == POST_DEC)
3023 	      ep->offset += size;
3024 	    else if (code == PRE_INC || code == POST_INC)
3025 	      ep->offset -= size;
3026 	    else if (code == PRE_MODIFY || code == POST_MODIFY)
3027 	      {
3028 		if (GET_CODE (XEXP (x, 1)) == PLUS
3029 		    && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3030 		    && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3031 		  ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3032 		else
3033 		  ep->can_eliminate = 0;
3034 	      }
3035 	  }
3036 
3037       /* These two aren't unary operators.  */
3038       if (code == POST_MODIFY || code == PRE_MODIFY)
3039 	break;
3040 
3041       /* Fall through to generic unary operation case.  */
3042       gcc_fallthrough ();
3043     case STRICT_LOW_PART:
3044     case NEG:          case NOT:
3045     case SIGN_EXTEND:  case ZERO_EXTEND:
3046     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3047     case FLOAT:        case FIX:
3048     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3049     case ABS:
3050     case SQRT:
3051     case FFS:
3052     case CLZ:
3053     case CTZ:
3054     case POPCOUNT:
3055     case PARITY:
3056     case BSWAP:
3057       elimination_effects (XEXP (x, 0), mem_mode);
3058       return;
3059 
3060     case SUBREG:
3061       if (REG_P (SUBREG_REG (x))
3062 	  && !paradoxical_subreg_p (x)
3063 	  && reg_equivs
3064 	  && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
3065 	return;
3066 
3067       elimination_effects (SUBREG_REG (x), mem_mode);
3068       return;
3069 
3070     case USE:
3071       /* If using a register that is the source of an eliminate we still
3072 	 think can be performed, note it cannot be performed since we don't
3073 	 know how this register is used.  */
3074       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3075 	if (ep->from_rtx == XEXP (x, 0))
3076 	  ep->can_eliminate = 0;
3077 
3078       elimination_effects (XEXP (x, 0), mem_mode);
3079       return;
3080 
3081     case CLOBBER:
3082       /* If clobbering a register that is the replacement register for an
3083 	 elimination we still think can be performed, note that it cannot
3084 	 be performed.  Otherwise, we need not be concerned about it.  */
3085       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3086 	if (ep->to_rtx == XEXP (x, 0))
3087 	  ep->can_eliminate = 0;
3088 
3089       elimination_effects (XEXP (x, 0), mem_mode);
3090       return;
3091 
3092     case SET:
3093       /* Check for setting a register that we know about.  */
3094       if (REG_P (SET_DEST (x)))
3095 	{
3096 	  /* See if this is setting the replacement register for an
3097 	     elimination.
3098 
3099 	     If DEST is the hard frame pointer, we do nothing because we
3100 	     assume that all assignments to the frame pointer are for
3101 	     non-local gotos and are being done at a time when they are valid
3102 	     and do not disturb anything else.  Some machines want to
3103 	     eliminate a fake argument pointer (or even a fake frame pointer)
3104 	     with either the real frame or the stack pointer.  Assignments to
3105 	     the hard frame pointer must not prevent this elimination.  */
3106 
3107 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3108 	       ep++)
3109 	    if (ep->to_rtx == SET_DEST (x)
3110 		&& SET_DEST (x) != hard_frame_pointer_rtx)
3111 	      {
3112 		/* If it is being incremented, adjust the offset.  Otherwise,
3113 		   this elimination can't be done.  */
3114 		rtx src = SET_SRC (x);
3115 
3116 		if (GET_CODE (src) == PLUS
3117 		    && XEXP (src, 0) == SET_DEST (x)
3118 		    && CONST_INT_P (XEXP (src, 1)))
3119 		  ep->offset -= INTVAL (XEXP (src, 1));
3120 		else
3121 		  ep->can_eliminate = 0;
3122 	      }
3123 	}
3124 
3125       elimination_effects (SET_DEST (x), VOIDmode);
3126       elimination_effects (SET_SRC (x), VOIDmode);
3127       return;
3128 
3129     case MEM:
3130       /* Our only special processing is to pass the mode of the MEM to our
3131 	 recursive call.  */
3132       elimination_effects (XEXP (x, 0), GET_MODE (x));
3133       return;
3134 
3135     default:
3136       break;
3137     }
3138 
3139   fmt = GET_RTX_FORMAT (code);
3140   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3141     {
3142       if (*fmt == 'e')
3143 	elimination_effects (XEXP (x, i), mem_mode);
3144       else if (*fmt == 'E')
3145 	for (j = 0; j < XVECLEN (x, i); j++)
3146 	  elimination_effects (XVECEXP (x, i, j), mem_mode);
3147     }
3148 }
3149 
3150 /* Descend through rtx X and verify that no references to eliminable registers
3151    remain.  If any do remain, mark the involved register as not
3152    eliminable.  */
3153 
3154 static void
check_eliminable_occurrences(rtx x)3155 check_eliminable_occurrences (rtx x)
3156 {
3157   const char *fmt;
3158   int i;
3159   enum rtx_code code;
3160 
3161   if (x == 0)
3162     return;
3163 
3164   code = GET_CODE (x);
3165 
3166   if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3167     {
3168       struct elim_table *ep;
3169 
3170       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3171 	if (ep->from_rtx == x)
3172 	  ep->can_eliminate = 0;
3173       return;
3174     }
3175 
3176   fmt = GET_RTX_FORMAT (code);
3177   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3178     {
3179       if (*fmt == 'e')
3180 	check_eliminable_occurrences (XEXP (x, i));
3181       else if (*fmt == 'E')
3182 	{
3183 	  int j;
3184 	  for (j = 0; j < XVECLEN (x, i); j++)
3185 	    check_eliminable_occurrences (XVECEXP (x, i, j));
3186 	}
3187     }
3188 }
3189 
3190 /* Scan INSN and eliminate all eliminable registers in it.
3191 
3192    If REPLACE is nonzero, do the replacement destructively.  Also
3193    delete the insn as dead it if it is setting an eliminable register.
3194 
3195    If REPLACE is zero, do all our allocations in reload_obstack.
3196 
3197    If no eliminations were done and this insn doesn't require any elimination
3198    processing (these are not identical conditions: it might be updating sp,
3199    but not referencing fp; this needs to be seen during reload_as_needed so
3200    that the offset between fp and sp can be taken into consideration), zero
3201    is returned.  Otherwise, 1 is returned.  */
3202 
3203 static int
eliminate_regs_in_insn(rtx_insn * insn,int replace)3204 eliminate_regs_in_insn (rtx_insn *insn, int replace)
3205 {
3206   int icode = recog_memoized (insn);
3207   rtx old_body = PATTERN (insn);
3208   int insn_is_asm = asm_noperands (old_body) >= 0;
3209   rtx old_set = single_set (insn);
3210   rtx new_body;
3211   int val = 0;
3212   int i;
3213   rtx substed_operand[MAX_RECOG_OPERANDS];
3214   rtx orig_operand[MAX_RECOG_OPERANDS];
3215   struct elim_table *ep;
3216   rtx plus_src, plus_cst_src;
3217 
3218   if (! insn_is_asm && icode < 0)
3219     {
3220       gcc_assert (DEBUG_INSN_P (insn)
3221 		  || GET_CODE (PATTERN (insn)) == USE
3222 		  || GET_CODE (PATTERN (insn)) == CLOBBER
3223 		  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3224       if (DEBUG_BIND_INSN_P (insn))
3225 	INSN_VAR_LOCATION_LOC (insn)
3226 	  = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3227       return 0;
3228     }
3229 
3230   if (old_set != 0 && REG_P (SET_DEST (old_set))
3231       && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3232     {
3233       /* Check for setting an eliminable register.  */
3234       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3235 	if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3236 	  {
3237 	    /* If this is setting the frame pointer register to the
3238 	       hardware frame pointer register and this is an elimination
3239 	       that will be done (tested above), this insn is really
3240 	       adjusting the frame pointer downward to compensate for
3241 	       the adjustment done before a nonlocal goto.  */
3242 	    if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3243 		&& ep->from == FRAME_POINTER_REGNUM
3244 		&& ep->to == HARD_FRAME_POINTER_REGNUM)
3245 	      {
3246 		rtx base = SET_SRC (old_set);
3247 		rtx_insn *base_insn = insn;
3248 		HOST_WIDE_INT offset = 0;
3249 
3250 		while (base != ep->to_rtx)
3251 		  {
3252 		    rtx_insn *prev_insn;
3253 		    rtx prev_set;
3254 
3255 		    if (GET_CODE (base) == PLUS
3256 		        && CONST_INT_P (XEXP (base, 1)))
3257 		      {
3258 		        offset += INTVAL (XEXP (base, 1));
3259 		        base = XEXP (base, 0);
3260 		      }
3261 		    else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3262 			     && (prev_set = single_set (prev_insn)) != 0
3263 			     && rtx_equal_p (SET_DEST (prev_set), base))
3264 		      {
3265 		        base = SET_SRC (prev_set);
3266 		        base_insn = prev_insn;
3267 		      }
3268 		    else
3269 		      break;
3270 		  }
3271 
3272 		if (base == ep->to_rtx)
3273 		  {
3274 		    rtx src = plus_constant (Pmode, ep->to_rtx,
3275 					     offset - ep->offset);
3276 
3277 		    new_body = old_body;
3278 		    if (! replace)
3279 		      {
3280 			new_body = copy_insn (old_body);
3281 			if (REG_NOTES (insn))
3282 			  REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3283 		      }
3284 		    PATTERN (insn) = new_body;
3285 		    old_set = single_set (insn);
3286 
3287 		    /* First see if this insn remains valid when we
3288 		       make the change.  If not, keep the INSN_CODE
3289 		       the same and let reload fit it up.  */
3290 		    validate_change (insn, &SET_SRC (old_set), src, 1);
3291 		    validate_change (insn, &SET_DEST (old_set),
3292 				     ep->to_rtx, 1);
3293 		    if (! apply_change_group ())
3294 		      {
3295 			SET_SRC (old_set) = src;
3296 			SET_DEST (old_set) = ep->to_rtx;
3297 		      }
3298 
3299 		    val = 1;
3300 		    goto done;
3301 		  }
3302 	      }
3303 
3304 	    /* In this case this insn isn't serving a useful purpose.  We
3305 	       will delete it in reload_as_needed once we know that this
3306 	       elimination is, in fact, being done.
3307 
3308 	       If REPLACE isn't set, we can't delete this insn, but needn't
3309 	       process it since it won't be used unless something changes.  */
3310 	    if (replace)
3311 	      {
3312 		delete_dead_insn (insn);
3313 		return 1;
3314 	      }
3315 	    val = 1;
3316 	    goto done;
3317 	  }
3318     }
3319 
3320   /* We allow one special case which happens to work on all machines we
3321      currently support: a single set with the source or a REG_EQUAL
3322      note being a PLUS of an eliminable register and a constant.  */
3323   plus_src = plus_cst_src = 0;
3324   if (old_set && REG_P (SET_DEST (old_set)))
3325     {
3326       if (GET_CODE (SET_SRC (old_set)) == PLUS)
3327 	plus_src = SET_SRC (old_set);
3328       /* First see if the source is of the form (plus (...) CST).  */
3329       if (plus_src
3330 	  && CONST_INT_P (XEXP (plus_src, 1)))
3331 	plus_cst_src = plus_src;
3332       else if (REG_P (SET_SRC (old_set))
3333 	       || plus_src)
3334 	{
3335 	  /* Otherwise, see if we have a REG_EQUAL note of the form
3336 	     (plus (...) CST).  */
3337 	  rtx links;
3338 	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3339 	    {
3340 	      if ((REG_NOTE_KIND (links) == REG_EQUAL
3341 		   || REG_NOTE_KIND (links) == REG_EQUIV)
3342 		  && GET_CODE (XEXP (links, 0)) == PLUS
3343 		  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3344 		{
3345 		  plus_cst_src = XEXP (links, 0);
3346 		  break;
3347 		}
3348 	    }
3349 	}
3350 
3351       /* Check that the first operand of the PLUS is a hard reg or
3352 	 the lowpart subreg of one.  */
3353       if (plus_cst_src)
3354 	{
3355 	  rtx reg = XEXP (plus_cst_src, 0);
3356 	  if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3357 	    reg = SUBREG_REG (reg);
3358 
3359 	  if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3360 	    plus_cst_src = 0;
3361 	}
3362     }
3363   if (plus_cst_src)
3364     {
3365       rtx reg = XEXP (plus_cst_src, 0);
3366       poly_int64 offset = INTVAL (XEXP (plus_cst_src, 1));
3367 
3368       if (GET_CODE (reg) == SUBREG)
3369 	reg = SUBREG_REG (reg);
3370 
3371       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3372 	if (ep->from_rtx == reg && ep->can_eliminate)
3373 	  {
3374 	    rtx to_rtx = ep->to_rtx;
3375 	    offset += ep->offset;
3376 	    offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3377 
3378 	    if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3379 	      to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3380 				    to_rtx);
3381 	    /* If we have a nonzero offset, and the source is already
3382 	       a simple REG, the following transformation would
3383 	       increase the cost of the insn by replacing a simple REG
3384 	       with (plus (reg sp) CST).  So try only when we already
3385 	       had a PLUS before.  */
3386 	    if (known_eq (offset, 0) || plus_src)
3387 	      {
3388 		rtx new_src = plus_constant (GET_MODE (to_rtx),
3389 					     to_rtx, offset);
3390 
3391 		new_body = old_body;
3392 		if (! replace)
3393 		  {
3394 		    new_body = copy_insn (old_body);
3395 		    if (REG_NOTES (insn))
3396 		      REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3397 		  }
3398 		PATTERN (insn) = new_body;
3399 		old_set = single_set (insn);
3400 
3401 		/* First see if this insn remains valid when we make the
3402 		   change.  If not, try to replace the whole pattern with
3403 		   a simple set (this may help if the original insn was a
3404 		   PARALLEL that was only recognized as single_set due to
3405 		   REG_UNUSED notes).  If this isn't valid either, keep
3406 		   the INSN_CODE the same and let reload fix it up.  */
3407 		if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3408 		  {
3409 		    rtx new_pat = gen_rtx_SET (SET_DEST (old_set), new_src);
3410 
3411 		    if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3412 		      SET_SRC (old_set) = new_src;
3413 		  }
3414 	      }
3415 	    else
3416 	      break;
3417 
3418 	    val = 1;
3419 	    /* This can't have an effect on elimination offsets, so skip right
3420 	       to the end.  */
3421 	    goto done;
3422 	  }
3423     }
3424 
3425   /* Determine the effects of this insn on elimination offsets.  */
3426   elimination_effects (old_body, VOIDmode);
3427 
3428   /* Eliminate all eliminable registers occurring in operands that
3429      can be handled by reload.  */
3430   extract_insn (insn);
3431   for (i = 0; i < recog_data.n_operands; i++)
3432     {
3433       orig_operand[i] = recog_data.operand[i];
3434       substed_operand[i] = recog_data.operand[i];
3435 
3436       /* For an asm statement, every operand is eliminable.  */
3437       if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3438 	{
3439 	  bool is_set_src, in_plus;
3440 
3441 	  /* Check for setting a register that we know about.  */
3442 	  if (recog_data.operand_type[i] != OP_IN
3443 	      && REG_P (orig_operand[i]))
3444 	    {
3445 	      /* If we are assigning to a register that can be eliminated, it
3446 		 must be as part of a PARALLEL, since the code above handles
3447 		 single SETs.  We must indicate that we can no longer
3448 		 eliminate this reg.  */
3449 	      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3450 		   ep++)
3451 		if (ep->from_rtx == orig_operand[i])
3452 		  ep->can_eliminate = 0;
3453 	    }
3454 
3455 	  /* Companion to the above plus substitution, we can allow
3456 	     invariants as the source of a plain move.  */
3457 	  is_set_src = false;
3458 	  if (old_set
3459 	      && recog_data.operand_loc[i] == &SET_SRC (old_set))
3460 	    is_set_src = true;
3461 	  in_plus = false;
3462 	  if (plus_src
3463 	      && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3464 		  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3465 	    in_plus = true;
3466 
3467 	  substed_operand[i]
3468 	    = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3469 			        replace ? insn : NULL_RTX,
3470 				is_set_src || in_plus, false);
3471 	  if (substed_operand[i] != orig_operand[i])
3472 	    val = 1;
3473 	  /* Terminate the search in check_eliminable_occurrences at
3474 	     this point.  */
3475 	  *recog_data.operand_loc[i] = 0;
3476 
3477 	  /* If an output operand changed from a REG to a MEM and INSN is an
3478 	     insn, write a CLOBBER insn.  */
3479 	  if (recog_data.operand_type[i] != OP_IN
3480 	      && REG_P (orig_operand[i])
3481 	      && MEM_P (substed_operand[i])
3482 	      && replace)
3483 	    emit_insn_after (gen_clobber (orig_operand[i]), insn);
3484 	}
3485     }
3486 
3487   for (i = 0; i < recog_data.n_dups; i++)
3488     *recog_data.dup_loc[i]
3489       = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3490 
3491   /* If any eliminable remain, they aren't eliminable anymore.  */
3492   check_eliminable_occurrences (old_body);
3493 
3494   /* Substitute the operands; the new values are in the substed_operand
3495      array.  */
3496   for (i = 0; i < recog_data.n_operands; i++)
3497     *recog_data.operand_loc[i] = substed_operand[i];
3498   for (i = 0; i < recog_data.n_dups; i++)
3499     *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3500 
3501   /* If we are replacing a body that was a (set X (plus Y Z)), try to
3502      re-recognize the insn.  We do this in case we had a simple addition
3503      but now can do this as a load-address.  This saves an insn in this
3504      common case.
3505      If re-recognition fails, the old insn code number will still be used,
3506      and some register operands may have changed into PLUS expressions.
3507      These will be handled by find_reloads by loading them into a register
3508      again.  */
3509 
3510   if (val)
3511     {
3512       /* If we aren't replacing things permanently and we changed something,
3513 	 make another copy to ensure that all the RTL is new.  Otherwise
3514 	 things can go wrong if find_reload swaps commutative operands
3515 	 and one is inside RTL that has been copied while the other is not.  */
3516       new_body = old_body;
3517       if (! replace)
3518 	{
3519 	  new_body = copy_insn (old_body);
3520 	  if (REG_NOTES (insn))
3521 	    REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3522 	}
3523       PATTERN (insn) = new_body;
3524 
3525       /* If we had a move insn but now we don't, rerecognize it.  This will
3526 	 cause spurious re-recognition if the old move had a PARALLEL since
3527 	 the new one still will, but we can't call single_set without
3528 	 having put NEW_BODY into the insn and the re-recognition won't
3529 	 hurt in this rare case.  */
3530       /* ??? Why this huge if statement - why don't we just rerecognize the
3531 	 thing always?  */
3532       if (! insn_is_asm
3533 	  && old_set != 0
3534 	  && ((REG_P (SET_SRC (old_set))
3535 	       && (GET_CODE (new_body) != SET
3536 		   || !REG_P (SET_SRC (new_body))))
3537 	      /* If this was a load from or store to memory, compare
3538 		 the MEM in recog_data.operand to the one in the insn.
3539 		 If they are not equal, then rerecognize the insn.  */
3540 	      || (old_set != 0
3541 		  && ((MEM_P (SET_SRC (old_set))
3542 		       && SET_SRC (old_set) != recog_data.operand[1])
3543 		      || (MEM_P (SET_DEST (old_set))
3544 			  && SET_DEST (old_set) != recog_data.operand[0])))
3545 	      /* If this was an add insn before, rerecognize.  */
3546 	      || GET_CODE (SET_SRC (old_set)) == PLUS))
3547 	{
3548 	  int new_icode = recog (PATTERN (insn), insn, 0);
3549 	  if (new_icode >= 0)
3550 	    INSN_CODE (insn) = new_icode;
3551 	}
3552     }
3553 
3554   /* Restore the old body.  If there were any changes to it, we made a copy
3555      of it while the changes were still in place, so we'll correctly return
3556      a modified insn below.  */
3557   if (! replace)
3558     {
3559       /* Restore the old body.  */
3560       for (i = 0; i < recog_data.n_operands; i++)
3561 	/* Restoring a top-level match_parallel would clobber the new_body
3562 	   we installed in the insn.  */
3563 	if (recog_data.operand_loc[i] != &PATTERN (insn))
3564 	  *recog_data.operand_loc[i] = orig_operand[i];
3565       for (i = 0; i < recog_data.n_dups; i++)
3566 	*recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3567     }
3568 
3569   /* Update all elimination pairs to reflect the status after the current
3570      insn.  The changes we make were determined by the earlier call to
3571      elimination_effects.
3572 
3573      We also detect cases where register elimination cannot be done,
3574      namely, if a register would be both changed and referenced outside a MEM
3575      in the resulting insn since such an insn is often undefined and, even if
3576      not, we cannot know what meaning will be given to it.  Note that it is
3577      valid to have a register used in an address in an insn that changes it
3578      (presumably with a pre- or post-increment or decrement).
3579 
3580      If anything changes, return nonzero.  */
3581 
3582   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3583     {
3584       if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem)
3585 	ep->can_eliminate = 0;
3586 
3587       ep->ref_outside_mem = 0;
3588 
3589       if (maybe_ne (ep->previous_offset, ep->offset))
3590 	val = 1;
3591     }
3592 
3593  done:
3594   /* If we changed something, perform elimination in REG_NOTES.  This is
3595      needed even when REPLACE is zero because a REG_DEAD note might refer
3596      to a register that we eliminate and could cause a different number
3597      of spill registers to be needed in the final reload pass than in
3598      the pre-passes.  */
3599   if (val && REG_NOTES (insn) != 0)
3600     REG_NOTES (insn)
3601       = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3602 			  false);
3603 
3604   return val;
3605 }
3606 
3607 /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3608    register allocator.  INSN is the instruction we need to examine, we perform
3609    eliminations in its operands and record cases where eliminating a reg with
3610    an invariant equivalence would add extra cost.  */
3611 
3612 #pragma GCC diagnostic push
3613 #pragma GCC diagnostic warning "-Wmaybe-uninitialized"
3614 static void
elimination_costs_in_insn(rtx_insn * insn)3615 elimination_costs_in_insn (rtx_insn *insn)
3616 {
3617   int icode = recog_memoized (insn);
3618   rtx old_body = PATTERN (insn);
3619   int insn_is_asm = asm_noperands (old_body) >= 0;
3620   rtx old_set = single_set (insn);
3621   int i;
3622   rtx orig_operand[MAX_RECOG_OPERANDS];
3623   rtx orig_dup[MAX_RECOG_OPERANDS];
3624   struct elim_table *ep;
3625   rtx plus_src, plus_cst_src;
3626   bool sets_reg_p;
3627 
3628   if (! insn_is_asm && icode < 0)
3629     {
3630       gcc_assert (DEBUG_INSN_P (insn)
3631 		  || GET_CODE (PATTERN (insn)) == USE
3632 		  || GET_CODE (PATTERN (insn)) == CLOBBER
3633 		  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3634       return;
3635     }
3636 
3637   if (old_set != 0 && REG_P (SET_DEST (old_set))
3638       && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3639     {
3640       /* Check for setting an eliminable register.  */
3641       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3642 	if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3643 	  return;
3644     }
3645 
3646   /* We allow one special case which happens to work on all machines we
3647      currently support: a single set with the source or a REG_EQUAL
3648      note being a PLUS of an eliminable register and a constant.  */
3649   plus_src = plus_cst_src = 0;
3650   sets_reg_p = false;
3651   if (old_set && REG_P (SET_DEST (old_set)))
3652     {
3653       sets_reg_p = true;
3654       if (GET_CODE (SET_SRC (old_set)) == PLUS)
3655 	plus_src = SET_SRC (old_set);
3656       /* First see if the source is of the form (plus (...) CST).  */
3657       if (plus_src
3658 	  && CONST_INT_P (XEXP (plus_src, 1)))
3659 	plus_cst_src = plus_src;
3660       else if (REG_P (SET_SRC (old_set))
3661 	       || plus_src)
3662 	{
3663 	  /* Otherwise, see if we have a REG_EQUAL note of the form
3664 	     (plus (...) CST).  */
3665 	  rtx links;
3666 	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3667 	    {
3668 	      if ((REG_NOTE_KIND (links) == REG_EQUAL
3669 		   || REG_NOTE_KIND (links) == REG_EQUIV)
3670 		  && GET_CODE (XEXP (links, 0)) == PLUS
3671 		  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3672 		{
3673 		  plus_cst_src = XEXP (links, 0);
3674 		  break;
3675 		}
3676 	    }
3677 	}
3678     }
3679 
3680   /* Determine the effects of this insn on elimination offsets.  */
3681   elimination_effects (old_body, VOIDmode);
3682 
3683   /* Eliminate all eliminable registers occurring in operands that
3684      can be handled by reload.  */
3685   extract_insn (insn);
3686   int n_dups = recog_data.n_dups;
3687   for (i = 0; i < n_dups; i++)
3688     orig_dup[i] = *recog_data.dup_loc[i];
3689 
3690   int n_operands = recog_data.n_operands;
3691   for (i = 0; i < n_operands; i++)
3692     {
3693       orig_operand[i] = recog_data.operand[i];
3694 
3695       /* For an asm statement, every operand is eliminable.  */
3696       if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3697 	{
3698 	  bool is_set_src, in_plus;
3699 
3700 	  /* Check for setting a register that we know about.  */
3701 	  if (recog_data.operand_type[i] != OP_IN
3702 	      && REG_P (orig_operand[i]))
3703 	    {
3704 	      /* If we are assigning to a register that can be eliminated, it
3705 		 must be as part of a PARALLEL, since the code above handles
3706 		 single SETs.  We must indicate that we can no longer
3707 		 eliminate this reg.  */
3708 	      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3709 		   ep++)
3710 		if (ep->from_rtx == orig_operand[i])
3711 		  ep->can_eliminate = 0;
3712 	    }
3713 
3714 	  /* Companion to the above plus substitution, we can allow
3715 	     invariants as the source of a plain move.  */
3716 	  is_set_src = false;
3717 	  if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3718 	    is_set_src = true;
3719 	  if (is_set_src && !sets_reg_p)
3720 	    note_reg_elim_costly (SET_SRC (old_set), insn);
3721 	  in_plus = false;
3722 	  if (plus_src && sets_reg_p
3723 	      && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3724 		  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3725 	    in_plus = true;
3726 
3727 	  eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3728 			    NULL_RTX,
3729 			    is_set_src || in_plus, true);
3730 	  /* Terminate the search in check_eliminable_occurrences at
3731 	     this point.  */
3732 	  *recog_data.operand_loc[i] = 0;
3733 	}
3734     }
3735 
3736   for (i = 0; i < n_dups; i++)
3737     *recog_data.dup_loc[i]
3738       = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3739 
3740   /* If any eliminable remain, they aren't eliminable anymore.  */
3741   check_eliminable_occurrences (old_body);
3742 
3743   /* Restore the old body.  */
3744   for (i = 0; i < n_operands; i++)
3745     *recog_data.operand_loc[i] = orig_operand[i];
3746   for (i = 0; i < n_dups; i++)
3747     *recog_data.dup_loc[i] = orig_dup[i];
3748 
3749   /* Update all elimination pairs to reflect the status after the current
3750      insn.  The changes we make were determined by the earlier call to
3751      elimination_effects.  */
3752 
3753   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3754     {
3755       if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem)
3756 	ep->can_eliminate = 0;
3757 
3758       ep->ref_outside_mem = 0;
3759     }
3760 
3761   return;
3762 }
3763 #pragma GCC diagnostic pop
3764 
3765 /* Loop through all elimination pairs.
3766    Recalculate the number not at initial offset.
3767 
3768    Compute the maximum offset (minimum offset if the stack does not
3769    grow downward) for each elimination pair.  */
3770 
3771 static void
update_eliminable_offsets(void)3772 update_eliminable_offsets (void)
3773 {
3774   struct elim_table *ep;
3775 
3776   num_not_at_initial_offset = 0;
3777   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3778     {
3779       ep->previous_offset = ep->offset;
3780       if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset))
3781 	num_not_at_initial_offset++;
3782     }
3783 }
3784 
3785 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3786    replacement we currently believe is valid, mark it as not eliminable if X
3787    modifies DEST in any way other than by adding a constant integer to it.
3788 
3789    If DEST is the frame pointer, we do nothing because we assume that
3790    all assignments to the hard frame pointer are nonlocal gotos and are being
3791    done at a time when they are valid and do not disturb anything else.
3792    Some machines want to eliminate a fake argument pointer with either the
3793    frame or stack pointer.  Assignments to the hard frame pointer must not
3794    prevent this elimination.
3795 
3796    Called via note_stores from reload before starting its passes to scan
3797    the insns of the function.  */
3798 
3799 static void
mark_not_eliminable(rtx dest,const_rtx x,void * data ATTRIBUTE_UNUSED)3800 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3801 {
3802   unsigned int i;
3803 
3804   /* A SUBREG of a hard register here is just changing its mode.  We should
3805      not see a SUBREG of an eliminable hard register, but check just in
3806      case.  */
3807   if (GET_CODE (dest) == SUBREG)
3808     dest = SUBREG_REG (dest);
3809 
3810   if (dest == hard_frame_pointer_rtx)
3811     return;
3812 
3813   for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3814     if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3815 	&& (GET_CODE (x) != SET
3816 	    || GET_CODE (SET_SRC (x)) != PLUS
3817 	    || XEXP (SET_SRC (x), 0) != dest
3818 	    || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3819       {
3820 	reg_eliminate[i].can_eliminate_previous
3821 	  = reg_eliminate[i].can_eliminate = 0;
3822 	num_eliminable--;
3823       }
3824 }
3825 
3826 /* Verify that the initial elimination offsets did not change since the
3827    last call to set_initial_elim_offsets.  This is used to catch cases
3828    where something illegal happened during reload_as_needed that could
3829    cause incorrect code to be generated if we did not check for it.  */
3830 
3831 static bool
verify_initial_elim_offsets(void)3832 verify_initial_elim_offsets (void)
3833 {
3834   poly_int64 t;
3835   struct elim_table *ep;
3836 
3837   if (!num_eliminable)
3838     return true;
3839 
3840   targetm.compute_frame_layout ();
3841   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3842     {
3843       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3844       if (maybe_ne (t, ep->initial_offset))
3845 	return false;
3846     }
3847 
3848   return true;
3849 }
3850 
3851 /* Reset all offsets on eliminable registers to their initial values.  */
3852 
3853 static void
set_initial_elim_offsets(void)3854 set_initial_elim_offsets (void)
3855 {
3856   struct elim_table *ep = reg_eliminate;
3857 
3858   targetm.compute_frame_layout ();
3859   for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3860     {
3861       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3862       ep->previous_offset = ep->offset = ep->initial_offset;
3863     }
3864 
3865   num_not_at_initial_offset = 0;
3866 }
3867 
3868 /* Subroutine of set_initial_label_offsets called via for_each_eh_label.  */
3869 
3870 static void
set_initial_eh_label_offset(rtx label)3871 set_initial_eh_label_offset (rtx label)
3872 {
3873   set_label_offsets (label, NULL, 1);
3874 }
3875 
3876 /* Initialize the known label offsets.
3877    Set a known offset for each forced label to be at the initial offset
3878    of each elimination.  We do this because we assume that all
3879    computed jumps occur from a location where each elimination is
3880    at its initial offset.
3881    For all other labels, show that we don't know the offsets.  */
3882 
3883 static void
set_initial_label_offsets(void)3884 set_initial_label_offsets (void)
3885 {
3886   memset (offsets_known_at, 0, num_labels);
3887 
3888   unsigned int i;
3889   rtx_insn *insn;
3890   FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
3891     set_label_offsets (insn, NULL, 1);
3892 
3893   for (rtx_insn_list *x = nonlocal_goto_handler_labels; x; x = x->next ())
3894     if (x->insn ())
3895       set_label_offsets (x->insn (), NULL, 1);
3896 
3897   for_each_eh_label (set_initial_eh_label_offset);
3898 }
3899 
3900 /* Set all elimination offsets to the known values for the code label given
3901    by INSN.  */
3902 
3903 static void
set_offsets_for_label(rtx_insn * insn)3904 set_offsets_for_label (rtx_insn *insn)
3905 {
3906   unsigned int i;
3907   int label_nr = CODE_LABEL_NUMBER (insn);
3908   struct elim_table *ep;
3909 
3910   num_not_at_initial_offset = 0;
3911   for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3912     {
3913       ep->offset = ep->previous_offset
3914 		 = offsets_at[label_nr - first_label_num][i];
3915       if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset))
3916 	num_not_at_initial_offset++;
3917     }
3918 }
3919 
3920 /* See if anything that happened changes which eliminations are valid.
3921    For example, on the SPARC, whether or not the frame pointer can
3922    be eliminated can depend on what registers have been used.  We need
3923    not check some conditions again (such as flag_omit_frame_pointer)
3924    since they can't have changed.  */
3925 
3926 static void
update_eliminables(HARD_REG_SET * pset)3927 update_eliminables (HARD_REG_SET *pset)
3928 {
3929   int previous_frame_pointer_needed = frame_pointer_needed;
3930   struct elim_table *ep;
3931 
3932   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3933     if ((ep->from == HARD_FRAME_POINTER_REGNUM
3934          && targetm.frame_pointer_required ())
3935 	|| ! targetm.can_eliminate (ep->from, ep->to)
3936 	)
3937       ep->can_eliminate = 0;
3938 
3939   /* Look for the case where we have discovered that we can't replace
3940      register A with register B and that means that we will now be
3941      trying to replace register A with register C.  This means we can
3942      no longer replace register C with register B and we need to disable
3943      such an elimination, if it exists.  This occurs often with A == ap,
3944      B == sp, and C == fp.  */
3945 
3946   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3947     {
3948       struct elim_table *op;
3949       int new_to = -1;
3950 
3951       if (! ep->can_eliminate && ep->can_eliminate_previous)
3952 	{
3953 	  /* Find the current elimination for ep->from, if there is a
3954 	     new one.  */
3955 	  for (op = reg_eliminate;
3956 	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3957 	    if (op->from == ep->from && op->can_eliminate)
3958 	      {
3959 		new_to = op->to;
3960 		break;
3961 	      }
3962 
3963 	  /* See if there is an elimination of NEW_TO -> EP->TO.  If so,
3964 	     disable it.  */
3965 	  for (op = reg_eliminate;
3966 	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3967 	    if (op->from == new_to && op->to == ep->to)
3968 	      op->can_eliminate = 0;
3969 	}
3970     }
3971 
3972   /* See if any registers that we thought we could eliminate the previous
3973      time are no longer eliminable.  If so, something has changed and we
3974      must spill the register.  Also, recompute the number of eliminable
3975      registers and see if the frame pointer is needed; it is if there is
3976      no elimination of the frame pointer that we can perform.  */
3977 
3978   frame_pointer_needed = 1;
3979   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3980     {
3981       if (ep->can_eliminate
3982 	  && ep->from == FRAME_POINTER_REGNUM
3983 	  && ep->to != HARD_FRAME_POINTER_REGNUM
3984 	  && (! SUPPORTS_STACK_ALIGNMENT
3985 	      || ! crtl->stack_realign_needed))
3986 	frame_pointer_needed = 0;
3987 
3988       if (! ep->can_eliminate && ep->can_eliminate_previous)
3989 	{
3990 	  ep->can_eliminate_previous = 0;
3991 	  SET_HARD_REG_BIT (*pset, ep->from);
3992 	  num_eliminable--;
3993 	}
3994     }
3995 
3996   /* If we didn't need a frame pointer last time, but we do now, spill
3997      the hard frame pointer.  */
3998   if (frame_pointer_needed && ! previous_frame_pointer_needed)
3999     SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
4000 }
4001 
4002 /* Call update_eliminables an spill any registers we can't eliminate anymore.
4003    Return true iff a register was spilled.  */
4004 
4005 static bool
update_eliminables_and_spill(void)4006 update_eliminables_and_spill (void)
4007 {
4008   int i;
4009   bool did_spill = false;
4010   HARD_REG_SET to_spill;
4011   CLEAR_HARD_REG_SET (to_spill);
4012   update_eliminables (&to_spill);
4013   AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
4014 
4015   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4016     if (TEST_HARD_REG_BIT (to_spill, i))
4017       {
4018 	spill_hard_reg (i, 1);
4019 	did_spill = true;
4020 
4021 	/* Regardless of the state of spills, if we previously had
4022 	   a register that we thought we could eliminate, but now can
4023 	   not eliminate, we must run another pass.
4024 
4025 	   Consider pseudos which have an entry in reg_equiv_* which
4026 	   reference an eliminable register.  We must make another pass
4027 	   to update reg_equiv_* so that we do not substitute in the
4028 	   old value from when we thought the elimination could be
4029 	   performed.  */
4030       }
4031   return did_spill;
4032 }
4033 
4034 /* Return true if X is used as the target register of an elimination.  */
4035 
4036 bool
elimination_target_reg_p(rtx x)4037 elimination_target_reg_p (rtx x)
4038 {
4039   struct elim_table *ep;
4040 
4041   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4042     if (ep->to_rtx == x && ep->can_eliminate)
4043       return true;
4044 
4045   return false;
4046 }
4047 
4048 /* Initialize the table of registers to eliminate.
4049    Pre-condition: global flag frame_pointer_needed has been set before
4050    calling this function.  */
4051 
4052 static void
init_elim_table(void)4053 init_elim_table (void)
4054 {
4055   struct elim_table *ep;
4056   const struct elim_table_1 *ep1;
4057 
4058   if (!reg_eliminate)
4059     reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
4060 
4061   num_eliminable = 0;
4062 
4063   for (ep = reg_eliminate, ep1 = reg_eliminate_1;
4064        ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
4065     {
4066       ep->from = ep1->from;
4067       ep->to = ep1->to;
4068       ep->can_eliminate = ep->can_eliminate_previous
4069 	= (targetm.can_eliminate (ep->from, ep->to)
4070 	   && ! (ep->to == STACK_POINTER_REGNUM
4071 		 && frame_pointer_needed
4072 		 && (! SUPPORTS_STACK_ALIGNMENT
4073 		     || ! stack_realign_fp)));
4074     }
4075 
4076   /* Count the number of eliminable registers and build the FROM and TO
4077      REG rtx's.  Note that code in gen_rtx_REG will cause, e.g.,
4078      gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4079      We depend on this.  */
4080   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4081     {
4082       num_eliminable += ep->can_eliminate;
4083       ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
4084       ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
4085     }
4086 }
4087 
4088 /* Find all the pseudo registers that didn't get hard regs
4089    but do have known equivalent constants or memory slots.
4090    These include parameters (known equivalent to parameter slots)
4091    and cse'd or loop-moved constant memory addresses.
4092 
4093    Record constant equivalents in reg_equiv_constant
4094    so they will be substituted by find_reloads.
4095    Record memory equivalents in reg_mem_equiv so they can
4096    be substituted eventually by altering the REG-rtx's.  */
4097 
4098 static void
init_eliminable_invariants(rtx_insn * first,bool do_subregs)4099 init_eliminable_invariants (rtx_insn *first, bool do_subregs)
4100 {
4101   int i;
4102   rtx_insn *insn;
4103 
4104   grow_reg_equivs ();
4105   if (do_subregs)
4106     reg_max_ref_mode = XCNEWVEC (machine_mode, max_regno);
4107   else
4108     reg_max_ref_mode = NULL;
4109 
4110   num_eliminable_invariants = 0;
4111 
4112   first_label_num = get_first_label_num ();
4113   num_labels = max_label_num () - first_label_num;
4114 
4115   /* Allocate the tables used to store offset information at labels.  */
4116   offsets_known_at = XNEWVEC (char, num_labels);
4117   offsets_at = (poly_int64_pod (*)[NUM_ELIMINABLE_REGS])
4118     xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (poly_int64));
4119 
4120 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4121    to.  If DO_SUBREGS is true, also find all paradoxical subregs and
4122    find largest such for each pseudo.  FIRST is the head of the insn
4123    list.  */
4124 
4125   for (insn = first; insn; insn = NEXT_INSN (insn))
4126     {
4127       rtx set = single_set (insn);
4128 
4129       /* We may introduce USEs that we want to remove at the end, so
4130 	 we'll mark them with QImode.  Make sure there are no
4131 	 previously-marked insns left by say regmove.  */
4132       if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4133 	  && GET_MODE (insn) != VOIDmode)
4134 	PUT_MODE (insn, VOIDmode);
4135 
4136       if (do_subregs && NONDEBUG_INSN_P (insn))
4137 	scan_paradoxical_subregs (PATTERN (insn));
4138 
4139       if (set != 0 && REG_P (SET_DEST (set)))
4140 	{
4141 	  rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4142 	  rtx x;
4143 
4144 	  if (! note)
4145 	    continue;
4146 
4147 	  i = REGNO (SET_DEST (set));
4148 	  x = XEXP (note, 0);
4149 
4150 	  if (i <= LAST_VIRTUAL_REGISTER)
4151 	    continue;
4152 
4153 	  /* If flag_pic and we have constant, verify it's legitimate.  */
4154 	  if (!CONSTANT_P (x)
4155 	      || !flag_pic || LEGITIMATE_PIC_OPERAND_P (x))
4156 	    {
4157 	      /* It can happen that a REG_EQUIV note contains a MEM
4158 		 that is not a legitimate memory operand.  As later
4159 		 stages of reload assume that all addresses found
4160 		 in the reg_equiv_* arrays were originally legitimate,
4161 		 we ignore such REG_EQUIV notes.  */
4162 	      if (memory_operand (x, VOIDmode))
4163 		{
4164 		  /* Always unshare the equivalence, so we can
4165 		     substitute into this insn without touching the
4166 		       equivalence.  */
4167 		  reg_equiv_memory_loc (i) = copy_rtx (x);
4168 		}
4169 	      else if (function_invariant_p (x))
4170 		{
4171 		  machine_mode mode;
4172 
4173 		  mode = GET_MODE (SET_DEST (set));
4174 		  if (GET_CODE (x) == PLUS)
4175 		    {
4176 		      /* This is PLUS of frame pointer and a constant,
4177 			 and might be shared.  Unshare it.  */
4178 		      reg_equiv_invariant (i) = copy_rtx (x);
4179 		      num_eliminable_invariants++;
4180 		    }
4181 		  else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4182 		    {
4183 		      reg_equiv_invariant (i) = x;
4184 		      num_eliminable_invariants++;
4185 		    }
4186 		  else if (targetm.legitimate_constant_p (mode, x))
4187 		    reg_equiv_constant (i) = x;
4188 		  else
4189 		    {
4190 		      reg_equiv_memory_loc (i) = force_const_mem (mode, x);
4191 		      if (! reg_equiv_memory_loc (i))
4192 			reg_equiv_init (i) = NULL;
4193 		    }
4194 		}
4195 	      else
4196 		{
4197 		  reg_equiv_init (i) = NULL;
4198 		  continue;
4199 		}
4200 	    }
4201 	  else
4202 	    reg_equiv_init (i) = NULL;
4203 	}
4204     }
4205 
4206   if (dump_file)
4207     for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4208       if (reg_equiv_init (i))
4209 	{
4210 	  fprintf (dump_file, "init_insns for %u: ", i);
4211 	  print_inline_rtx (dump_file, reg_equiv_init (i), 20);
4212 	  fprintf (dump_file, "\n");
4213 	}
4214 }
4215 
4216 /* Indicate that we no longer have known memory locations or constants.
4217    Free all data involved in tracking these.  */
4218 
4219 static void
free_reg_equiv(void)4220 free_reg_equiv (void)
4221 {
4222   int i;
4223 
4224   free (offsets_known_at);
4225   free (offsets_at);
4226   offsets_at = 0;
4227   offsets_known_at = 0;
4228 
4229   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4230     if (reg_equiv_alt_mem_list (i))
4231       free_EXPR_LIST_list (&reg_equiv_alt_mem_list (i));
4232   vec_free (reg_equivs);
4233 }
4234 
4235 /* Kick all pseudos out of hard register REGNO.
4236 
4237    If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4238    because we found we can't eliminate some register.  In the case, no pseudos
4239    are allowed to be in the register, even if they are only in a block that
4240    doesn't require spill registers, unlike the case when we are spilling this
4241    hard reg to produce another spill register.
4242 
4243    Return nonzero if any pseudos needed to be kicked out.  */
4244 
4245 static void
spill_hard_reg(unsigned int regno,int cant_eliminate)4246 spill_hard_reg (unsigned int regno, int cant_eliminate)
4247 {
4248   int i;
4249 
4250   if (cant_eliminate)
4251     {
4252       SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4253       df_set_regs_ever_live (regno, true);
4254     }
4255 
4256   /* Spill every pseudo reg that was allocated to this reg
4257      or to something that overlaps this reg.  */
4258 
4259   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4260     if (reg_renumber[i] >= 0
4261 	&& (unsigned int) reg_renumber[i] <= regno
4262 	&& end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4263       SET_REGNO_REG_SET (&spilled_pseudos, i);
4264 }
4265 
4266 /* After spill_hard_reg was called and/or find_reload_regs was run for all
4267    insns that need reloads, this function is used to actually spill pseudo
4268    registers and try to reallocate them.  It also sets up the spill_regs
4269    array for use by choose_reload_regs.
4270 
4271    GLOBAL nonzero means we should attempt to reallocate any pseudo registers
4272    that we displace from hard registers.  */
4273 
4274 static int
finish_spills(int global)4275 finish_spills (int global)
4276 {
4277   struct insn_chain *chain;
4278   int something_changed = 0;
4279   unsigned i;
4280   reg_set_iterator rsi;
4281 
4282   /* Build the spill_regs array for the function.  */
4283   /* If there are some registers still to eliminate and one of the spill regs
4284      wasn't ever used before, additional stack space may have to be
4285      allocated to store this register.  Thus, we may have changed the offset
4286      between the stack and frame pointers, so mark that something has changed.
4287 
4288      One might think that we need only set VAL to 1 if this is a call-used
4289      register.  However, the set of registers that must be saved by the
4290      prologue is not identical to the call-used set.  For example, the
4291      register used by the call insn for the return PC is a call-used register,
4292      but must be saved by the prologue.  */
4293 
4294   n_spills = 0;
4295   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4296     if (TEST_HARD_REG_BIT (used_spill_regs, i))
4297       {
4298 	spill_reg_order[i] = n_spills;
4299 	spill_regs[n_spills++] = i;
4300 	if (num_eliminable && ! df_regs_ever_live_p (i))
4301 	  something_changed = 1;
4302 	df_set_regs_ever_live (i, true);
4303       }
4304     else
4305       spill_reg_order[i] = -1;
4306 
4307   EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4308     if (! ira_conflicts_p || reg_renumber[i] >= 0)
4309       {
4310 	/* Record the current hard register the pseudo is allocated to
4311 	   in pseudo_previous_regs so we avoid reallocating it to the
4312 	   same hard reg in a later pass.  */
4313 	gcc_assert (reg_renumber[i] >= 0);
4314 
4315 	SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4316 	/* Mark it as no longer having a hard register home.  */
4317 	reg_renumber[i] = -1;
4318 	if (ira_conflicts_p)
4319 	  /* Inform IRA about the change.  */
4320 	  ira_mark_allocation_change (i);
4321 	/* We will need to scan everything again.  */
4322 	something_changed = 1;
4323       }
4324 
4325   /* Retry global register allocation if possible.  */
4326   if (global && ira_conflicts_p)
4327     {
4328       unsigned int n;
4329 
4330       memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4331       /* For every insn that needs reloads, set the registers used as spill
4332 	 regs in pseudo_forbidden_regs for every pseudo live across the
4333 	 insn.  */
4334       for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4335 	{
4336 	  EXECUTE_IF_SET_IN_REG_SET
4337 	    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4338 	    {
4339 	      IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4340 				chain->used_spill_regs);
4341 	    }
4342 	  EXECUTE_IF_SET_IN_REG_SET
4343 	    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4344 	    {
4345 	      IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4346 				chain->used_spill_regs);
4347 	    }
4348 	}
4349 
4350       /* Retry allocating the pseudos spilled in IRA and the
4351 	 reload.  For each reg, merge the various reg sets that
4352 	 indicate which hard regs can't be used, and call
4353 	 ira_reassign_pseudos.  */
4354       for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4355 	if (reg_old_renumber[i] != reg_renumber[i])
4356 	  {
4357 	    if (reg_renumber[i] < 0)
4358 	      temp_pseudo_reg_arr[n++] = i;
4359 	    else
4360 	      CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4361 	  }
4362       if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4363 				bad_spill_regs_global,
4364 				pseudo_forbidden_regs, pseudo_previous_regs,
4365 				&spilled_pseudos))
4366 	something_changed = 1;
4367     }
4368   /* Fix up the register information in the insn chain.
4369      This involves deleting those of the spilled pseudos which did not get
4370      a new hard register home from the live_{before,after} sets.  */
4371   for (chain = reload_insn_chain; chain; chain = chain->next)
4372     {
4373       HARD_REG_SET used_by_pseudos;
4374       HARD_REG_SET used_by_pseudos2;
4375 
4376       if (! ira_conflicts_p)
4377 	{
4378 	  /* Don't do it for IRA because IRA and the reload still can
4379 	     assign hard registers to the spilled pseudos on next
4380 	     reload iterations.  */
4381 	  AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4382 	  AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4383 	}
4384       /* Mark any unallocated hard regs as available for spills.  That
4385 	 makes inheritance work somewhat better.  */
4386       if (chain->need_reload)
4387 	{
4388 	  REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4389 	  REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4390 	  IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4391 
4392 	  compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4393 	  compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4394 	  /* Value of chain->used_spill_regs from previous iteration
4395 	     may be not included in the value calculated here because
4396 	     of possible removing caller-saves insns (see function
4397 	     delete_caller_save_insns.  */
4398 	  COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4399 	  AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4400 	}
4401     }
4402 
4403   CLEAR_REG_SET (&changed_allocation_pseudos);
4404   /* Let alter_reg modify the reg rtx's for the modified pseudos.  */
4405   for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4406     {
4407       int regno = reg_renumber[i];
4408       if (reg_old_renumber[i] == regno)
4409 	continue;
4410 
4411       SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4412 
4413       alter_reg (i, reg_old_renumber[i], false);
4414       reg_old_renumber[i] = regno;
4415       if (dump_file)
4416 	{
4417 	  if (regno == -1)
4418 	    fprintf (dump_file, " Register %d now on stack.\n\n", i);
4419 	  else
4420 	    fprintf (dump_file, " Register %d now in %d.\n\n",
4421 		     i, reg_renumber[i]);
4422 	}
4423     }
4424 
4425   return something_changed;
4426 }
4427 
4428 /* Find all paradoxical subregs within X and update reg_max_ref_mode.  */
4429 
4430 static void
scan_paradoxical_subregs(rtx x)4431 scan_paradoxical_subregs (rtx x)
4432 {
4433   int i;
4434   const char *fmt;
4435   enum rtx_code code = GET_CODE (x);
4436 
4437   switch (code)
4438     {
4439     case REG:
4440     case CONST:
4441     case SYMBOL_REF:
4442     case LABEL_REF:
4443     CASE_CONST_ANY:
4444     case CC0:
4445     case PC:
4446     case USE:
4447     case CLOBBER:
4448       return;
4449 
4450     case SUBREG:
4451       if (REG_P (SUBREG_REG (x)))
4452 	{
4453 	  unsigned int regno = REGNO (SUBREG_REG (x));
4454 	  if (partial_subreg_p (reg_max_ref_mode[regno], GET_MODE (x)))
4455 	    {
4456 	      reg_max_ref_mode[regno] = GET_MODE (x);
4457 	      mark_home_live_1 (regno, GET_MODE (x));
4458 	    }
4459 	}
4460       return;
4461 
4462     default:
4463       break;
4464     }
4465 
4466   fmt = GET_RTX_FORMAT (code);
4467   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4468     {
4469       if (fmt[i] == 'e')
4470 	scan_paradoxical_subregs (XEXP (x, i));
4471       else if (fmt[i] == 'E')
4472 	{
4473 	  int j;
4474 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4475 	    scan_paradoxical_subregs (XVECEXP (x, i, j));
4476 	}
4477     }
4478 }
4479 
4480 /* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload.
4481    If *OP_PTR is a paradoxical subreg, try to remove that subreg
4482    and apply the corresponding narrowing subreg to *OTHER_PTR.
4483    Return true if the operands were changed, false otherwise.  */
4484 
4485 static bool
strip_paradoxical_subreg(rtx * op_ptr,rtx * other_ptr)4486 strip_paradoxical_subreg (rtx *op_ptr, rtx *other_ptr)
4487 {
4488   rtx op, inner, other, tem;
4489 
4490   op = *op_ptr;
4491   if (!paradoxical_subreg_p (op))
4492     return false;
4493   inner = SUBREG_REG (op);
4494 
4495   other = *other_ptr;
4496   tem = gen_lowpart_common (GET_MODE (inner), other);
4497   if (!tem)
4498     return false;
4499 
4500   /* If the lowpart operation turned a hard register into a subreg,
4501      rather than simplifying it to another hard register, then the
4502      mode change cannot be properly represented.  For example, OTHER
4503      might be valid in its current mode, but not in the new one.  */
4504   if (GET_CODE (tem) == SUBREG
4505       && REG_P (other)
4506       && HARD_REGISTER_P (other))
4507     return false;
4508 
4509   *op_ptr = inner;
4510   *other_ptr = tem;
4511   return true;
4512 }
4513 
4514 /* A subroutine of reload_as_needed.  If INSN has a REG_EH_REGION note,
4515    examine all of the reload insns between PREV and NEXT exclusive, and
4516    annotate all that may trap.  */
4517 
4518 static void
fixup_eh_region_note(rtx_insn * insn,rtx_insn * prev,rtx_insn * next)4519 fixup_eh_region_note (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4520 {
4521   rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4522   if (note == NULL)
4523     return;
4524   if (!insn_could_throw_p (insn))
4525     remove_note (insn, note);
4526   copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4527 }
4528 
4529 /* Reload pseudo-registers into hard regs around each insn as needed.
4530    Additional register load insns are output before the insn that needs it
4531    and perhaps store insns after insns that modify the reloaded pseudo reg.
4532 
4533    reg_last_reload_reg and reg_reloaded_contents keep track of
4534    which registers are already available in reload registers.
4535    We update these for the reloads that we perform,
4536    as the insns are scanned.  */
4537 
4538 static void
reload_as_needed(int live_known)4539 reload_as_needed (int live_known)
4540 {
4541   struct insn_chain *chain;
4542 #if AUTO_INC_DEC
4543   int i;
4544 #endif
4545   rtx_note *marker;
4546 
4547   memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4548   memset (spill_reg_store, 0, sizeof spill_reg_store);
4549   reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4550   INIT_REG_SET (&reg_has_output_reload);
4551   CLEAR_HARD_REG_SET (reg_reloaded_valid);
4552   CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4553 
4554   set_initial_elim_offsets ();
4555 
4556   /* Generate a marker insn that we will move around.  */
4557   marker = emit_note (NOTE_INSN_DELETED);
4558   unlink_insn_chain (marker, marker);
4559 
4560   for (chain = reload_insn_chain; chain; chain = chain->next)
4561     {
4562       rtx_insn *prev = 0;
4563       rtx_insn *insn = chain->insn;
4564       rtx_insn *old_next = NEXT_INSN (insn);
4565 #if AUTO_INC_DEC
4566       rtx_insn *old_prev = PREV_INSN (insn);
4567 #endif
4568 
4569       if (will_delete_init_insn_p (insn))
4570 	continue;
4571 
4572       /* If we pass a label, copy the offsets from the label information
4573 	 into the current offsets of each elimination.  */
4574       if (LABEL_P (insn))
4575 	set_offsets_for_label (insn);
4576 
4577       else if (INSN_P (insn))
4578 	{
4579 	  regset_head regs_to_forget;
4580 	  INIT_REG_SET (&regs_to_forget);
4581 	  note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4582 
4583 	  /* If this is a USE and CLOBBER of a MEM, ensure that any
4584 	     references to eliminable registers have been removed.  */
4585 
4586 	  if ((GET_CODE (PATTERN (insn)) == USE
4587 	       || GET_CODE (PATTERN (insn)) == CLOBBER)
4588 	      && MEM_P (XEXP (PATTERN (insn), 0)))
4589 	    XEXP (XEXP (PATTERN (insn), 0), 0)
4590 	      = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4591 				GET_MODE (XEXP (PATTERN (insn), 0)),
4592 				NULL_RTX);
4593 
4594 	  /* If we need to do register elimination processing, do so.
4595 	     This might delete the insn, in which case we are done.  */
4596 	  if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4597 	    {
4598 	      eliminate_regs_in_insn (insn, 1);
4599 	      if (NOTE_P (insn))
4600 		{
4601 		  update_eliminable_offsets ();
4602 		  CLEAR_REG_SET (&regs_to_forget);
4603 		  continue;
4604 		}
4605 	    }
4606 
4607 	  /* If need_elim is nonzero but need_reload is zero, one might think
4608 	     that we could simply set n_reloads to 0.  However, find_reloads
4609 	     could have done some manipulation of the insn (such as swapping
4610 	     commutative operands), and these manipulations are lost during
4611 	     the first pass for every insn that needs register elimination.
4612 	     So the actions of find_reloads must be redone here.  */
4613 
4614 	  if (! chain->need_elim && ! chain->need_reload
4615 	      && ! chain->need_operand_change)
4616 	    n_reloads = 0;
4617 	  /* First find the pseudo regs that must be reloaded for this insn.
4618 	     This info is returned in the tables reload_... (see reload.h).
4619 	     Also modify the body of INSN by substituting RELOAD
4620 	     rtx's for those pseudo regs.  */
4621 	  else
4622 	    {
4623 	      CLEAR_REG_SET (&reg_has_output_reload);
4624 	      CLEAR_HARD_REG_SET (reg_is_output_reload);
4625 
4626 	      find_reloads (insn, 1, spill_indirect_levels, live_known,
4627 			    spill_reg_order);
4628 	    }
4629 
4630 	  if (n_reloads > 0)
4631 	    {
4632 	      rtx_insn *next = NEXT_INSN (insn);
4633 
4634 	      /* ??? PREV can get deleted by reload inheritance.
4635 		 Work around this by emitting a marker note.  */
4636 	      prev = PREV_INSN (insn);
4637 	      reorder_insns_nobb (marker, marker, prev);
4638 
4639 	      /* Now compute which reload regs to reload them into.  Perhaps
4640 		 reusing reload regs from previous insns, or else output
4641 		 load insns to reload them.  Maybe output store insns too.
4642 		 Record the choices of reload reg in reload_reg_rtx.  */
4643 	      choose_reload_regs (chain);
4644 
4645 	      /* Generate the insns to reload operands into or out of
4646 		 their reload regs.  */
4647 	      emit_reload_insns (chain);
4648 
4649 	      /* Substitute the chosen reload regs from reload_reg_rtx
4650 		 into the insn's body (or perhaps into the bodies of other
4651 		 load and store insn that we just made for reloading
4652 		 and that we moved the structure into).  */
4653 	      subst_reloads (insn);
4654 
4655 	      prev = PREV_INSN (marker);
4656 	      unlink_insn_chain (marker, marker);
4657 
4658 	      /* Adjust the exception region notes for loads and stores.  */
4659 	      if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4660 		fixup_eh_region_note (insn, prev, next);
4661 
4662 	      /* Adjust the location of REG_ARGS_SIZE.  */
4663 	      rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4664 	      if (p)
4665 		{
4666 		  remove_note (insn, p);
4667 		  fixup_args_size_notes (prev, PREV_INSN (next),
4668 					 get_args_size (p));
4669 		}
4670 
4671 	      /* If this was an ASM, make sure that all the reload insns
4672 		 we have generated are valid.  If not, give an error
4673 		 and delete them.  */
4674 	      if (asm_noperands (PATTERN (insn)) >= 0)
4675 		for (rtx_insn *p = NEXT_INSN (prev);
4676 		     p != next;
4677 		     p = NEXT_INSN (p))
4678 		  if (p != insn && INSN_P (p)
4679 		      && GET_CODE (PATTERN (p)) != USE
4680 		      && (recog_memoized (p) < 0
4681 			  || (extract_insn (p),
4682 			      !(constrain_operands (1,
4683 				  get_enabled_alternatives (p))))))
4684 		    {
4685 		      error_for_asm (insn,
4686 				     "%<asm%> operand requires "
4687 				     "impossible reload");
4688 		      delete_insn (p);
4689 		    }
4690 	    }
4691 
4692 	  if (num_eliminable && chain->need_elim)
4693 	    update_eliminable_offsets ();
4694 
4695 	  /* Any previously reloaded spilled pseudo reg, stored in this insn,
4696 	     is no longer validly lying around to save a future reload.
4697 	     Note that this does not detect pseudos that were reloaded
4698 	     for this insn in order to be stored in
4699 	     (obeying register constraints).  That is correct; such reload
4700 	     registers ARE still valid.  */
4701 	  forget_marked_reloads (&regs_to_forget);
4702 	  CLEAR_REG_SET (&regs_to_forget);
4703 
4704 	  /* There may have been CLOBBER insns placed after INSN.  So scan
4705 	     between INSN and NEXT and use them to forget old reloads.  */
4706 	  for (rtx_insn *x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4707 	    if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4708 	      note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4709 
4710 #if AUTO_INC_DEC
4711 	  /* Likewise for regs altered by auto-increment in this insn.
4712 	     REG_INC notes have been changed by reloading:
4713 	     find_reloads_address_1 records substitutions for them,
4714 	     which have been performed by subst_reloads above.  */
4715 	  for (i = n_reloads - 1; i >= 0; i--)
4716 	    {
4717 	      rtx in_reg = rld[i].in_reg;
4718 	      if (in_reg)
4719 		{
4720 		  enum rtx_code code = GET_CODE (in_reg);
4721 		  /* PRE_INC / PRE_DEC will have the reload register ending up
4722 		     with the same value as the stack slot, but that doesn't
4723 		     hold true for POST_INC / POST_DEC.  Either we have to
4724 		     convert the memory access to a true POST_INC / POST_DEC,
4725 		     or we can't use the reload register for inheritance.  */
4726 		  if ((code == POST_INC || code == POST_DEC)
4727 		      && TEST_HARD_REG_BIT (reg_reloaded_valid,
4728 					    REGNO (rld[i].reg_rtx))
4729 		      /* Make sure it is the inc/dec pseudo, and not
4730 			 some other (e.g. output operand) pseudo.  */
4731 		      && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4732 			  == REGNO (XEXP (in_reg, 0))))
4733 
4734 		    {
4735 		      rtx reload_reg = rld[i].reg_rtx;
4736 		      machine_mode mode = GET_MODE (reload_reg);
4737 		      int n = 0;
4738 		      rtx_insn *p;
4739 
4740 		      for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4741 			{
4742 			  /* We really want to ignore REG_INC notes here, so
4743 			     use PATTERN (p) as argument to reg_set_p .  */
4744 			  if (reg_set_p (reload_reg, PATTERN (p)))
4745 			    break;
4746 			  n = count_occurrences (PATTERN (p), reload_reg, 0);
4747 			  if (! n)
4748 			    continue;
4749 			  if (n == 1)
4750 			    {
4751 			      rtx replace_reg
4752 				= gen_rtx_fmt_e (code, mode, reload_reg);
4753 
4754 			      validate_replace_rtx_group (reload_reg,
4755 							  replace_reg, p);
4756 			      n = verify_changes (0);
4757 
4758 			      /* We must also verify that the constraints
4759 				 are met after the replacement.  Make sure
4760 				 extract_insn is only called for an insn
4761 				 where the replacements were found to be
4762 				 valid so far. */
4763 			      if (n)
4764 				{
4765 				  extract_insn (p);
4766 				  n = constrain_operands (1,
4767 				    get_enabled_alternatives (p));
4768 				}
4769 
4770 			      /* If the constraints were not met, then
4771 				 undo the replacement, else confirm it.  */
4772 			      if (!n)
4773 				cancel_changes (0);
4774 			      else
4775 				confirm_change_group ();
4776 			    }
4777 			  break;
4778 			}
4779 		      if (n == 1)
4780 			{
4781 			  add_reg_note (p, REG_INC, reload_reg);
4782 			  /* Mark this as having an output reload so that the
4783 			     REG_INC processing code below won't invalidate
4784 			     the reload for inheritance.  */
4785 			  SET_HARD_REG_BIT (reg_is_output_reload,
4786 					    REGNO (reload_reg));
4787 			  SET_REGNO_REG_SET (&reg_has_output_reload,
4788 					     REGNO (XEXP (in_reg, 0)));
4789 			}
4790 		      else
4791 			forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4792 					      NULL);
4793 		    }
4794 		  else if ((code == PRE_INC || code == PRE_DEC)
4795 			   && TEST_HARD_REG_BIT (reg_reloaded_valid,
4796 						 REGNO (rld[i].reg_rtx))
4797 			   /* Make sure it is the inc/dec pseudo, and not
4798 			      some other (e.g. output operand) pseudo.  */
4799 			   && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4800 			       == REGNO (XEXP (in_reg, 0))))
4801 		    {
4802 		      SET_HARD_REG_BIT (reg_is_output_reload,
4803 					REGNO (rld[i].reg_rtx));
4804 		      SET_REGNO_REG_SET (&reg_has_output_reload,
4805 					 REGNO (XEXP (in_reg, 0)));
4806 		    }
4807 		  else if (code == PRE_INC || code == PRE_DEC
4808 			   || code == POST_INC || code == POST_DEC)
4809 		    {
4810 		      int in_regno = REGNO (XEXP (in_reg, 0));
4811 
4812 		      if (reg_last_reload_reg[in_regno] != NULL_RTX)
4813 			{
4814 			  int in_hard_regno;
4815 			  bool forget_p = true;
4816 
4817 			  in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4818 			  if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4819 						 in_hard_regno))
4820 			    {
4821 			      for (rtx_insn *x = (old_prev ?
4822 						  NEXT_INSN (old_prev) : insn);
4823 				   x != old_next;
4824 				   x = NEXT_INSN (x))
4825 				if (x == reg_reloaded_insn[in_hard_regno])
4826 				  {
4827 				    forget_p = false;
4828 				    break;
4829 				  }
4830 			    }
4831 			  /* If for some reasons, we didn't set up
4832 			     reg_last_reload_reg in this insn,
4833 			     invalidate inheritance from previous
4834 			     insns for the incremented/decremented
4835 			     register.  Such registers will be not in
4836 			     reg_has_output_reload.  Invalidate it
4837 			     also if the corresponding element in
4838 			     reg_reloaded_insn is also
4839 			     invalidated.  */
4840 			  if (forget_p)
4841 			    forget_old_reloads_1 (XEXP (in_reg, 0),
4842 						  NULL_RTX, NULL);
4843 			}
4844 		    }
4845 		}
4846 	    }
4847 	  /* If a pseudo that got a hard register is auto-incremented,
4848 	     we must purge records of copying it into pseudos without
4849 	     hard registers.  */
4850 	  for (rtx x = REG_NOTES (insn); x; x = XEXP (x, 1))
4851 	    if (REG_NOTE_KIND (x) == REG_INC)
4852 	      {
4853 		/* See if this pseudo reg was reloaded in this insn.
4854 		   If so, its last-reload info is still valid
4855 		   because it is based on this insn's reload.  */
4856 		for (i = 0; i < n_reloads; i++)
4857 		  if (rld[i].out == XEXP (x, 0))
4858 		    break;
4859 
4860 		if (i == n_reloads)
4861 		  forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4862 	      }
4863 #endif
4864 	}
4865       /* A reload reg's contents are unknown after a label.  */
4866       if (LABEL_P (insn))
4867 	CLEAR_HARD_REG_SET (reg_reloaded_valid);
4868 
4869       /* Don't assume a reload reg is still good after a call insn
4870 	 if it is a call-used reg, or if it contains a value that will
4871          be partially clobbered by the call.  */
4872       else if (CALL_P (insn))
4873 	{
4874 	  AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4875 	  AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4876 
4877 	  /* If this is a call to a setjmp-type function, we must not
4878 	     reuse any reload reg contents across the call; that will
4879 	     just be clobbered by other uses of the register in later
4880 	     code, before the longjmp.  */
4881 	  if (find_reg_note (insn, REG_SETJMP, NULL_RTX))
4882 	    CLEAR_HARD_REG_SET (reg_reloaded_valid);
4883 	}
4884     }
4885 
4886   /* Clean up.  */
4887   free (reg_last_reload_reg);
4888   CLEAR_REG_SET (&reg_has_output_reload);
4889 }
4890 
4891 /* Discard all record of any value reloaded from X,
4892    or reloaded in X from someplace else;
4893    unless X is an output reload reg of the current insn.
4894 
4895    X may be a hard reg (the reload reg)
4896    or it may be a pseudo reg that was reloaded from.
4897 
4898    When DATA is non-NULL just mark the registers in regset
4899    to be forgotten later.  */
4900 
4901 static void
forget_old_reloads_1(rtx x,const_rtx ignored ATTRIBUTE_UNUSED,void * data)4902 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4903 		      void *data)
4904 {
4905   unsigned int regno;
4906   unsigned int nr;
4907   regset regs = (regset) data;
4908 
4909   /* note_stores does give us subregs of hard regs,
4910      subreg_regno_offset requires a hard reg.  */
4911   while (GET_CODE (x) == SUBREG)
4912     {
4913       /* We ignore the subreg offset when calculating the regno,
4914 	 because we are using the entire underlying hard register
4915 	 below.  */
4916       x = SUBREG_REG (x);
4917     }
4918 
4919   if (!REG_P (x))
4920     return;
4921 
4922   regno = REGNO (x);
4923 
4924   if (regno >= FIRST_PSEUDO_REGISTER)
4925     nr = 1;
4926   else
4927     {
4928       unsigned int i;
4929 
4930       nr = REG_NREGS (x);
4931       /* Storing into a spilled-reg invalidates its contents.
4932 	 This can happen if a block-local pseudo is allocated to that reg
4933 	 and it wasn't spilled because this block's total need is 0.
4934 	 Then some insn might have an optional reload and use this reg.  */
4935       if (!regs)
4936 	for (i = 0; i < nr; i++)
4937 	  /* But don't do this if the reg actually serves as an output
4938 	     reload reg in the current instruction.  */
4939 	  if (n_reloads == 0
4940 	      || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4941 	    {
4942 	      CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4943 	      spill_reg_store[regno + i] = 0;
4944 	    }
4945     }
4946 
4947   if (regs)
4948     while (nr-- > 0)
4949       SET_REGNO_REG_SET (regs, regno + nr);
4950   else
4951     {
4952       /* Since value of X has changed,
4953 	 forget any value previously copied from it.  */
4954 
4955       while (nr-- > 0)
4956 	/* But don't forget a copy if this is the output reload
4957 	   that establishes the copy's validity.  */
4958 	if (n_reloads == 0
4959 	    || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4960 	  reg_last_reload_reg[regno + nr] = 0;
4961      }
4962 }
4963 
4964 /* Forget the reloads marked in regset by previous function.  */
4965 static void
forget_marked_reloads(regset regs)4966 forget_marked_reloads (regset regs)
4967 {
4968   unsigned int reg;
4969   reg_set_iterator rsi;
4970   EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4971     {
4972       if (reg < FIRST_PSEUDO_REGISTER
4973 	  /* But don't do this if the reg actually serves as an output
4974 	     reload reg in the current instruction.  */
4975 	  && (n_reloads == 0
4976 	      || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4977 	  {
4978 	    CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4979 	    spill_reg_store[reg] = 0;
4980 	  }
4981       if (n_reloads == 0
4982 	  || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4983 	reg_last_reload_reg[reg] = 0;
4984     }
4985 }
4986 
4987 /* The following HARD_REG_SETs indicate when each hard register is
4988    used for a reload of various parts of the current insn.  */
4989 
4990 /* If reg is unavailable for all reloads.  */
4991 static HARD_REG_SET reload_reg_unavailable;
4992 /* If reg is in use as a reload reg for a RELOAD_OTHER reload.  */
4993 static HARD_REG_SET reload_reg_used;
4994 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I.  */
4995 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4996 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I.  */
4997 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4998 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I.  */
4999 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5000 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I.  */
5001 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5002 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I.  */
5003 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5004 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I.  */
5005 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5006 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload.  */
5007 static HARD_REG_SET reload_reg_used_in_op_addr;
5008 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload.  */
5009 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
5010 /* If reg is in use for a RELOAD_FOR_INSN reload.  */
5011 static HARD_REG_SET reload_reg_used_in_insn;
5012 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload.  */
5013 static HARD_REG_SET reload_reg_used_in_other_addr;
5014 
5015 /* If reg is in use as a reload reg for any sort of reload.  */
5016 static HARD_REG_SET reload_reg_used_at_all;
5017 
5018 /* If reg is use as an inherited reload.  We just mark the first register
5019    in the group.  */
5020 static HARD_REG_SET reload_reg_used_for_inherit;
5021 
5022 /* Records which hard regs are used in any way, either as explicit use or
5023    by being allocated to a pseudo during any point of the current insn.  */
5024 static HARD_REG_SET reg_used_in_insn;
5025 
5026 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
5027    TYPE. MODE is used to indicate how many consecutive regs are
5028    actually used.  */
5029 
5030 static void
mark_reload_reg_in_use(unsigned int regno,int opnum,enum reload_type type,machine_mode mode)5031 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
5032 			machine_mode mode)
5033 {
5034   switch (type)
5035     {
5036     case RELOAD_OTHER:
5037       add_to_hard_reg_set (&reload_reg_used, mode, regno);
5038       break;
5039 
5040     case RELOAD_FOR_INPUT_ADDRESS:
5041       add_to_hard_reg_set (&reload_reg_used_in_input_addr[opnum], mode, regno);
5042       break;
5043 
5044     case RELOAD_FOR_INPADDR_ADDRESS:
5045       add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr[opnum], mode, regno);
5046       break;
5047 
5048     case RELOAD_FOR_OUTPUT_ADDRESS:
5049       add_to_hard_reg_set (&reload_reg_used_in_output_addr[opnum], mode, regno);
5050       break;
5051 
5052     case RELOAD_FOR_OUTADDR_ADDRESS:
5053       add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr[opnum], mode, regno);
5054       break;
5055 
5056     case RELOAD_FOR_OPERAND_ADDRESS:
5057       add_to_hard_reg_set (&reload_reg_used_in_op_addr, mode, regno);
5058       break;
5059 
5060     case RELOAD_FOR_OPADDR_ADDR:
5061       add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload, mode, regno);
5062       break;
5063 
5064     case RELOAD_FOR_OTHER_ADDRESS:
5065       add_to_hard_reg_set (&reload_reg_used_in_other_addr, mode, regno);
5066       break;
5067 
5068     case RELOAD_FOR_INPUT:
5069       add_to_hard_reg_set (&reload_reg_used_in_input[opnum], mode, regno);
5070       break;
5071 
5072     case RELOAD_FOR_OUTPUT:
5073       add_to_hard_reg_set (&reload_reg_used_in_output[opnum], mode, regno);
5074       break;
5075 
5076     case RELOAD_FOR_INSN:
5077       add_to_hard_reg_set (&reload_reg_used_in_insn,  mode, regno);
5078       break;
5079     }
5080 
5081   add_to_hard_reg_set (&reload_reg_used_at_all, mode, regno);
5082 }
5083 
5084 /* Similarly, but show REGNO is no longer in use for a reload.  */
5085 
5086 static void
clear_reload_reg_in_use(unsigned int regno,int opnum,enum reload_type type,machine_mode mode)5087 clear_reload_reg_in_use (unsigned int regno, int opnum,
5088 			 enum reload_type type, machine_mode mode)
5089 {
5090   unsigned int nregs = hard_regno_nregs (regno, mode);
5091   unsigned int start_regno, end_regno, r;
5092   int i;
5093   /* A complication is that for some reload types, inheritance might
5094      allow multiple reloads of the same types to share a reload register.
5095      We set check_opnum if we have to check only reloads with the same
5096      operand number, and check_any if we have to check all reloads.  */
5097   int check_opnum = 0;
5098   int check_any = 0;
5099   HARD_REG_SET *used_in_set;
5100 
5101   switch (type)
5102     {
5103     case RELOAD_OTHER:
5104       used_in_set = &reload_reg_used;
5105       break;
5106 
5107     case RELOAD_FOR_INPUT_ADDRESS:
5108       used_in_set = &reload_reg_used_in_input_addr[opnum];
5109       break;
5110 
5111     case RELOAD_FOR_INPADDR_ADDRESS:
5112       check_opnum = 1;
5113       used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
5114       break;
5115 
5116     case RELOAD_FOR_OUTPUT_ADDRESS:
5117       used_in_set = &reload_reg_used_in_output_addr[opnum];
5118       break;
5119 
5120     case RELOAD_FOR_OUTADDR_ADDRESS:
5121       check_opnum = 1;
5122       used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
5123       break;
5124 
5125     case RELOAD_FOR_OPERAND_ADDRESS:
5126       used_in_set = &reload_reg_used_in_op_addr;
5127       break;
5128 
5129     case RELOAD_FOR_OPADDR_ADDR:
5130       check_any = 1;
5131       used_in_set = &reload_reg_used_in_op_addr_reload;
5132       break;
5133 
5134     case RELOAD_FOR_OTHER_ADDRESS:
5135       used_in_set = &reload_reg_used_in_other_addr;
5136       check_any = 1;
5137       break;
5138 
5139     case RELOAD_FOR_INPUT:
5140       used_in_set = &reload_reg_used_in_input[opnum];
5141       break;
5142 
5143     case RELOAD_FOR_OUTPUT:
5144       used_in_set = &reload_reg_used_in_output[opnum];
5145       break;
5146 
5147     case RELOAD_FOR_INSN:
5148       used_in_set = &reload_reg_used_in_insn;
5149       break;
5150     default:
5151       gcc_unreachable ();
5152     }
5153   /* We resolve conflicts with remaining reloads of the same type by
5154      excluding the intervals of reload registers by them from the
5155      interval of freed reload registers.  Since we only keep track of
5156      one set of interval bounds, we might have to exclude somewhat
5157      more than what would be necessary if we used a HARD_REG_SET here.
5158      But this should only happen very infrequently, so there should
5159      be no reason to worry about it.  */
5160 
5161   start_regno = regno;
5162   end_regno = regno + nregs;
5163   if (check_opnum || check_any)
5164     {
5165       for (i = n_reloads - 1; i >= 0; i--)
5166 	{
5167 	  if (rld[i].when_needed == type
5168 	      && (check_any || rld[i].opnum == opnum)
5169 	      && rld[i].reg_rtx)
5170 	    {
5171 	      unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
5172 	      unsigned int conflict_end
5173 		= end_hard_regno (rld[i].mode, conflict_start);
5174 
5175 	      /* If there is an overlap with the first to-be-freed register,
5176 		 adjust the interval start.  */
5177 	      if (conflict_start <= start_regno && conflict_end > start_regno)
5178 		start_regno = conflict_end;
5179 	      /* Otherwise, if there is a conflict with one of the other
5180 		 to-be-freed registers, adjust the interval end.  */
5181 	      if (conflict_start > start_regno && conflict_start < end_regno)
5182 		end_regno = conflict_start;
5183 	    }
5184 	}
5185     }
5186 
5187   for (r = start_regno; r < end_regno; r++)
5188     CLEAR_HARD_REG_BIT (*used_in_set, r);
5189 }
5190 
5191 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5192    specified by OPNUM and TYPE.  */
5193 
5194 static int
reload_reg_free_p(unsigned int regno,int opnum,enum reload_type type)5195 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
5196 {
5197   int i;
5198 
5199   /* In use for a RELOAD_OTHER means it's not available for anything.  */
5200   if (TEST_HARD_REG_BIT (reload_reg_used, regno)
5201       || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5202     return 0;
5203 
5204   switch (type)
5205     {
5206     case RELOAD_OTHER:
5207       /* In use for anything means we can't use it for RELOAD_OTHER.  */
5208       if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
5209 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5210 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5211 	  || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5212 	return 0;
5213 
5214       for (i = 0; i < reload_n_operands; i++)
5215 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5216 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5217 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5218 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5219 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5220 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5221 	  return 0;
5222 
5223       return 1;
5224 
5225     case RELOAD_FOR_INPUT:
5226       if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5227 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
5228 	return 0;
5229 
5230       if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5231 	return 0;
5232 
5233       /* If it is used for some other input, can't use it.  */
5234       for (i = 0; i < reload_n_operands; i++)
5235 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5236 	  return 0;
5237 
5238       /* If it is used in a later operand's address, can't use it.  */
5239       for (i = opnum + 1; i < reload_n_operands; i++)
5240 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5241 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5242 	  return 0;
5243 
5244       return 1;
5245 
5246     case RELOAD_FOR_INPUT_ADDRESS:
5247       /* Can't use a register if it is used for an input address for this
5248 	 operand or used as an input in an earlier one.  */
5249       if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
5250 	  || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5251 	return 0;
5252 
5253       for (i = 0; i < opnum; i++)
5254 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5255 	  return 0;
5256 
5257       return 1;
5258 
5259     case RELOAD_FOR_INPADDR_ADDRESS:
5260       /* Can't use a register if it is used for an input address
5261 	 for this operand or used as an input in an earlier
5262 	 one.  */
5263       if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5264 	return 0;
5265 
5266       for (i = 0; i < opnum; i++)
5267 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5268 	  return 0;
5269 
5270       return 1;
5271 
5272     case RELOAD_FOR_OUTPUT_ADDRESS:
5273       /* Can't use a register if it is used for an output address for this
5274 	 operand or used as an output in this or a later operand.  Note
5275 	 that multiple output operands are emitted in reverse order, so
5276 	 the conflicting ones are those with lower indices.  */
5277       if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
5278 	return 0;
5279 
5280       for (i = 0; i <= opnum; i++)
5281 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5282 	  return 0;
5283 
5284       return 1;
5285 
5286     case RELOAD_FOR_OUTADDR_ADDRESS:
5287       /* Can't use a register if it is used for an output address
5288 	 for this operand or used as an output in this or a
5289 	 later operand.  Note that multiple output operands are
5290 	 emitted in reverse order, so the conflicting ones are
5291 	 those with lower indices.  */
5292       if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5293 	return 0;
5294 
5295       for (i = 0; i <= opnum; i++)
5296 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5297 	  return 0;
5298 
5299       return 1;
5300 
5301     case RELOAD_FOR_OPERAND_ADDRESS:
5302       for (i = 0; i < reload_n_operands; i++)
5303 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5304 	  return 0;
5305 
5306       return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5307 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5308 
5309     case RELOAD_FOR_OPADDR_ADDR:
5310       for (i = 0; i < reload_n_operands; i++)
5311 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5312 	  return 0;
5313 
5314       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5315 
5316     case RELOAD_FOR_OUTPUT:
5317       /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5318 	 outputs, or an operand address for this or an earlier output.
5319 	 Note that multiple output operands are emitted in reverse order,
5320 	 so the conflicting ones are those with higher indices.  */
5321       if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5322 	return 0;
5323 
5324       for (i = 0; i < reload_n_operands; i++)
5325 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5326 	  return 0;
5327 
5328       for (i = opnum; i < reload_n_operands; i++)
5329 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5330 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5331 	  return 0;
5332 
5333       return 1;
5334 
5335     case RELOAD_FOR_INSN:
5336       for (i = 0; i < reload_n_operands; i++)
5337 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5338 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5339 	  return 0;
5340 
5341       return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5342 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5343 
5344     case RELOAD_FOR_OTHER_ADDRESS:
5345       return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5346 
5347     default:
5348       gcc_unreachable ();
5349     }
5350 }
5351 
5352 /* Return 1 if the value in reload reg REGNO, as used by the reload with
5353    the number RELOADNUM, is still available in REGNO at the end of the insn.
5354 
5355    We can assume that the reload reg was already tested for availability
5356    at the time it is needed, and we should not check this again,
5357    in case the reg has already been marked in use.  */
5358 
5359 static int
reload_reg_reaches_end_p(unsigned int regno,int reloadnum)5360 reload_reg_reaches_end_p (unsigned int regno, int reloadnum)
5361 {
5362   int opnum = rld[reloadnum].opnum;
5363   enum reload_type type = rld[reloadnum].when_needed;
5364   int i;
5365 
5366   /* See if there is a reload with the same type for this operand, using
5367      the same register. This case is not handled by the code below.  */
5368   for (i = reloadnum + 1; i < n_reloads; i++)
5369     {
5370       rtx reg;
5371 
5372       if (rld[i].opnum != opnum || rld[i].when_needed != type)
5373 	continue;
5374       reg = rld[i].reg_rtx;
5375       if (reg == NULL_RTX)
5376 	continue;
5377       if (regno >= REGNO (reg) && regno < END_REGNO (reg))
5378 	return 0;
5379     }
5380 
5381   switch (type)
5382     {
5383     case RELOAD_OTHER:
5384       /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5385 	 its value must reach the end.  */
5386       return 1;
5387 
5388       /* If this use is for part of the insn,
5389 	 its value reaches if no subsequent part uses the same register.
5390 	 Just like the above function, don't try to do this with lots
5391 	 of fallthroughs.  */
5392 
5393     case RELOAD_FOR_OTHER_ADDRESS:
5394       /* Here we check for everything else, since these don't conflict
5395 	 with anything else and everything comes later.  */
5396 
5397       for (i = 0; i < reload_n_operands; i++)
5398 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5399 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5400 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5401 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5402 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5403 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5404 	  return 0;
5405 
5406       return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5407 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5408 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5409 	      && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5410 
5411     case RELOAD_FOR_INPUT_ADDRESS:
5412     case RELOAD_FOR_INPADDR_ADDRESS:
5413       /* Similar, except that we check only for this and subsequent inputs
5414 	 and the address of only subsequent inputs and we do not need
5415 	 to check for RELOAD_OTHER objects since they are known not to
5416 	 conflict.  */
5417 
5418       for (i = opnum; i < reload_n_operands; i++)
5419 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5420 	  return 0;
5421 
5422       /* Reload register of reload with type RELOAD_FOR_INPADDR_ADDRESS
5423 	 could be killed if the register is also used by reload with type
5424 	 RELOAD_FOR_INPUT_ADDRESS, so check it.  */
5425       if (type == RELOAD_FOR_INPADDR_ADDRESS
5426 	  && TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
5427 	return 0;
5428 
5429       for (i = opnum + 1; i < reload_n_operands; i++)
5430 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5431 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5432 	  return 0;
5433 
5434       for (i = 0; i < reload_n_operands; i++)
5435 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5436 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5437 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5438 	  return 0;
5439 
5440       if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5441 	return 0;
5442 
5443       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5444 	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5445 	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5446 
5447     case RELOAD_FOR_INPUT:
5448       /* Similar to input address, except we start at the next operand for
5449 	 both input and input address and we do not check for
5450 	 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5451 	 would conflict.  */
5452 
5453       for (i = opnum + 1; i < reload_n_operands; i++)
5454 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5455 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5456 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5457 	  return 0;
5458 
5459       /* ... fall through ...  */
5460 
5461     case RELOAD_FOR_OPERAND_ADDRESS:
5462       /* Check outputs and their addresses.  */
5463 
5464       for (i = 0; i < reload_n_operands; i++)
5465 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5466 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5467 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5468 	  return 0;
5469 
5470       return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5471 
5472     case RELOAD_FOR_OPADDR_ADDR:
5473       for (i = 0; i < reload_n_operands; i++)
5474 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5475 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5476 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5477 	  return 0;
5478 
5479       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5480 	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5481 	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5482 
5483     case RELOAD_FOR_INSN:
5484       /* These conflict with other outputs with RELOAD_OTHER.  So
5485 	 we need only check for output addresses.  */
5486 
5487       opnum = reload_n_operands;
5488 
5489       /* fall through */
5490 
5491     case RELOAD_FOR_OUTPUT:
5492     case RELOAD_FOR_OUTPUT_ADDRESS:
5493     case RELOAD_FOR_OUTADDR_ADDRESS:
5494       /* We already know these can't conflict with a later output.  So the
5495 	 only thing to check are later output addresses.
5496 	 Note that multiple output operands are emitted in reverse order,
5497 	 so the conflicting ones are those with lower indices.  */
5498       for (i = 0; i < opnum; i++)
5499 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5500 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5501 	  return 0;
5502 
5503       /* Reload register of reload with type RELOAD_FOR_OUTADDR_ADDRESS
5504 	 could be killed if the register is also used by reload with type
5505 	 RELOAD_FOR_OUTPUT_ADDRESS, so check it.  */
5506       if (type == RELOAD_FOR_OUTADDR_ADDRESS
5507 	  && TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5508 	return 0;
5509 
5510       return 1;
5511 
5512     default:
5513       gcc_unreachable ();
5514     }
5515 }
5516 
5517 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5518    every register in REG.  */
5519 
5520 static bool
reload_reg_rtx_reaches_end_p(rtx reg,int reloadnum)5521 reload_reg_rtx_reaches_end_p (rtx reg, int reloadnum)
5522 {
5523   unsigned int i;
5524 
5525   for (i = REGNO (reg); i < END_REGNO (reg); i++)
5526     if (!reload_reg_reaches_end_p (i, reloadnum))
5527       return false;
5528   return true;
5529 }
5530 
5531 
5532 /*  Returns whether R1 and R2 are uniquely chained: the value of one
5533     is used by the other, and that value is not used by any other
5534     reload for this insn.  This is used to partially undo the decision
5535     made in find_reloads when in the case of multiple
5536     RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5537     RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5538     reloads.  This code tries to avoid the conflict created by that
5539     change.  It might be cleaner to explicitly keep track of which
5540     RELOAD_FOR_OPADDR_ADDR reload is associated with which
5541     RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5542     this after the fact. */
5543 static bool
reloads_unique_chain_p(int r1,int r2)5544 reloads_unique_chain_p (int r1, int r2)
5545 {
5546   int i;
5547 
5548   /* We only check input reloads.  */
5549   if (! rld[r1].in || ! rld[r2].in)
5550     return false;
5551 
5552   /* Avoid anything with output reloads.  */
5553   if (rld[r1].out || rld[r2].out)
5554     return false;
5555 
5556   /* "chained" means one reload is a component of the other reload,
5557      not the same as the other reload.  */
5558   if (rld[r1].opnum != rld[r2].opnum
5559       || rtx_equal_p (rld[r1].in, rld[r2].in)
5560       || rld[r1].optional || rld[r2].optional
5561       || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5562 	    || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5563     return false;
5564 
5565   /* The following loop assumes that r1 is the reload that feeds r2.  */
5566   if (r1 > r2)
5567     std::swap (r1, r2);
5568 
5569   for (i = 0; i < n_reloads; i ++)
5570     /* Look for input reloads that aren't our two */
5571     if (i != r1 && i != r2 && rld[i].in)
5572       {
5573 	/* If our reload is mentioned at all, it isn't a simple chain.  */
5574 	if (reg_mentioned_p (rld[r1].in, rld[i].in))
5575 	  return false;
5576       }
5577   return true;
5578 }
5579 
5580 /* The recursive function change all occurrences of WHAT in *WHERE
5581    to REPL.  */
5582 static void
substitute(rtx * where,const_rtx what,rtx repl)5583 substitute (rtx *where, const_rtx what, rtx repl)
5584 {
5585   const char *fmt;
5586   int i;
5587   enum rtx_code code;
5588 
5589   if (*where == 0)
5590     return;
5591 
5592   if (*where == what || rtx_equal_p (*where, what))
5593     {
5594       /* Record the location of the changed rtx.  */
5595       substitute_stack.safe_push (where);
5596       *where = repl;
5597       return;
5598     }
5599 
5600   code = GET_CODE (*where);
5601   fmt = GET_RTX_FORMAT (code);
5602   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5603     {
5604       if (fmt[i] == 'E')
5605 	{
5606 	  int j;
5607 
5608 	  for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5609 	    substitute (&XVECEXP (*where, i, j), what, repl);
5610 	}
5611       else if (fmt[i] == 'e')
5612 	substitute (&XEXP (*where, i), what, repl);
5613     }
5614 }
5615 
5616 /* The function returns TRUE if chain of reload R1 and R2 (in any
5617    order) can be evaluated without usage of intermediate register for
5618    the reload containing another reload.  It is important to see
5619    gen_reload to understand what the function is trying to do.  As an
5620    example, let us have reload chain
5621 
5622       r2: const
5623       r1: <something> + const
5624 
5625    and reload R2 got reload reg HR.  The function returns true if
5626    there is a correct insn HR = HR + <something>.  Otherwise,
5627    gen_reload will use intermediate register (and this is the reload
5628    reg for R1) to reload <something>.
5629 
5630    We need this function to find a conflict for chain reloads.  In our
5631    example, if HR = HR + <something> is incorrect insn, then we cannot
5632    use HR as a reload register for R2.  If we do use it then we get a
5633    wrong code:
5634 
5635       HR = const
5636       HR = <something>
5637       HR = HR + HR
5638 
5639 */
5640 static bool
gen_reload_chain_without_interm_reg_p(int r1,int r2)5641 gen_reload_chain_without_interm_reg_p (int r1, int r2)
5642 {
5643   /* Assume other cases in gen_reload are not possible for
5644      chain reloads or do need an intermediate hard registers.  */
5645   bool result = true;
5646   int regno, code;
5647   rtx out, in;
5648   rtx_insn *insn;
5649   rtx_insn *last = get_last_insn ();
5650 
5651   /* Make r2 a component of r1.  */
5652   if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5653     std::swap (r1, r2);
5654 
5655   gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5656   regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5657   gcc_assert (regno >= 0);
5658   out = gen_rtx_REG (rld[r1].mode, regno);
5659   in = rld[r1].in;
5660   substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5661 
5662   /* If IN is a paradoxical SUBREG, remove it and try to put the
5663      opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
5664   strip_paradoxical_subreg (&in, &out);
5665 
5666   if (GET_CODE (in) == PLUS
5667       && (REG_P (XEXP (in, 0))
5668 	  || GET_CODE (XEXP (in, 0)) == SUBREG
5669 	  || MEM_P (XEXP (in, 0)))
5670       && (REG_P (XEXP (in, 1))
5671 	  || GET_CODE (XEXP (in, 1)) == SUBREG
5672 	  || CONSTANT_P (XEXP (in, 1))
5673 	  || MEM_P (XEXP (in, 1))))
5674     {
5675       insn = emit_insn (gen_rtx_SET (out, in));
5676       code = recog_memoized (insn);
5677       result = false;
5678 
5679       if (code >= 0)
5680 	{
5681 	  extract_insn (insn);
5682 	  /* We want constrain operands to treat this insn strictly in
5683 	     its validity determination, i.e., the way it would after
5684 	     reload has completed.  */
5685 	  result = constrain_operands (1, get_enabled_alternatives (insn));
5686 	}
5687 
5688       delete_insns_since (last);
5689     }
5690 
5691   /* Restore the original value at each changed address within R1.  */
5692   while (!substitute_stack.is_empty ())
5693     {
5694       rtx *where = substitute_stack.pop ();
5695       *where = rld[r2].in;
5696     }
5697 
5698   return result;
5699 }
5700 
5701 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5702    Return 0 otherwise.
5703 
5704    This function uses the same algorithm as reload_reg_free_p above.  */
5705 
5706 static int
reloads_conflict(int r1,int r2)5707 reloads_conflict (int r1, int r2)
5708 {
5709   enum reload_type r1_type = rld[r1].when_needed;
5710   enum reload_type r2_type = rld[r2].when_needed;
5711   int r1_opnum = rld[r1].opnum;
5712   int r2_opnum = rld[r2].opnum;
5713 
5714   /* RELOAD_OTHER conflicts with everything.  */
5715   if (r2_type == RELOAD_OTHER)
5716     return 1;
5717 
5718   /* Otherwise, check conflicts differently for each type.  */
5719 
5720   switch (r1_type)
5721     {
5722     case RELOAD_FOR_INPUT:
5723       return (r2_type == RELOAD_FOR_INSN
5724 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5725 	      || r2_type == RELOAD_FOR_OPADDR_ADDR
5726 	      || r2_type == RELOAD_FOR_INPUT
5727 	      || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5728 		   || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5729 		  && r2_opnum > r1_opnum));
5730 
5731     case RELOAD_FOR_INPUT_ADDRESS:
5732       return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5733 	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5734 
5735     case RELOAD_FOR_INPADDR_ADDRESS:
5736       return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5737 	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5738 
5739     case RELOAD_FOR_OUTPUT_ADDRESS:
5740       return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5741 	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5742 
5743     case RELOAD_FOR_OUTADDR_ADDRESS:
5744       return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5745 	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5746 
5747     case RELOAD_FOR_OPERAND_ADDRESS:
5748       return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5749 	      || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5750 		  && (!reloads_unique_chain_p (r1, r2)
5751 		      || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5752 
5753     case RELOAD_FOR_OPADDR_ADDR:
5754       return (r2_type == RELOAD_FOR_INPUT
5755 	      || r2_type == RELOAD_FOR_OPADDR_ADDR);
5756 
5757     case RELOAD_FOR_OUTPUT:
5758       return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5759 	      || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5760 		   || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5761 		  && r2_opnum >= r1_opnum));
5762 
5763     case RELOAD_FOR_INSN:
5764       return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5765 	      || r2_type == RELOAD_FOR_INSN
5766 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5767 
5768     case RELOAD_FOR_OTHER_ADDRESS:
5769       return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5770 
5771     case RELOAD_OTHER:
5772       return 1;
5773 
5774     default:
5775       gcc_unreachable ();
5776     }
5777 }
5778 
5779 /* Indexed by reload number, 1 if incoming value
5780    inherited from previous insns.  */
5781 static char reload_inherited[MAX_RELOADS];
5782 
5783 /* For an inherited reload, this is the insn the reload was inherited from,
5784    if we know it.  Otherwise, this is 0.  */
5785 static rtx_insn *reload_inheritance_insn[MAX_RELOADS];
5786 
5787 /* If nonzero, this is a place to get the value of the reload,
5788    rather than using reload_in.  */
5789 static rtx reload_override_in[MAX_RELOADS];
5790 
5791 /* For each reload, the hard register number of the register used,
5792    or -1 if we did not need a register for this reload.  */
5793 static int reload_spill_index[MAX_RELOADS];
5794 
5795 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode.  */
5796 static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5797 
5798 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode.  */
5799 static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5800 
5801 /* Subroutine of free_for_value_p, used to check a single register.
5802    START_REGNO is the starting regno of the full reload register
5803    (possibly comprising multiple hard registers) that we are considering.  */
5804 
5805 static int
reload_reg_free_for_value_p(int start_regno,int regno,int opnum,enum reload_type type,rtx value,rtx out,int reloadnum,int ignore_address_reloads)5806 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5807 			     enum reload_type type, rtx value, rtx out,
5808 			     int reloadnum, int ignore_address_reloads)
5809 {
5810   int time1;
5811   /* Set if we see an input reload that must not share its reload register
5812      with any new earlyclobber, but might otherwise share the reload
5813      register with an output or input-output reload.  */
5814   int check_earlyclobber = 0;
5815   int i;
5816   int copy = 0;
5817 
5818   if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5819     return 0;
5820 
5821   if (out == const0_rtx)
5822     {
5823       copy = 1;
5824       out = NULL_RTX;
5825     }
5826 
5827   /* We use some pseudo 'time' value to check if the lifetimes of the
5828      new register use would overlap with the one of a previous reload
5829      that is not read-only or uses a different value.
5830      The 'time' used doesn't have to be linear in any shape or form, just
5831      monotonic.
5832      Some reload types use different 'buckets' for each operand.
5833      So there are MAX_RECOG_OPERANDS different time values for each
5834      such reload type.
5835      We compute TIME1 as the time when the register for the prospective
5836      new reload ceases to be live, and TIME2 for each existing
5837      reload as the time when that the reload register of that reload
5838      becomes live.
5839      Where there is little to be gained by exact lifetime calculations,
5840      we just make conservative assumptions, i.e. a longer lifetime;
5841      this is done in the 'default:' cases.  */
5842   switch (type)
5843     {
5844     case RELOAD_FOR_OTHER_ADDRESS:
5845       /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads.  */
5846       time1 = copy ? 0 : 1;
5847       break;
5848     case RELOAD_OTHER:
5849       time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5850       break;
5851       /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5852 	 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT.  By adding 0 / 1 / 2 ,
5853 	 respectively, to the time values for these, we get distinct time
5854 	 values.  To get distinct time values for each operand, we have to
5855 	 multiply opnum by at least three.  We round that up to four because
5856 	 multiply by four is often cheaper.  */
5857     case RELOAD_FOR_INPADDR_ADDRESS:
5858       time1 = opnum * 4 + 2;
5859       break;
5860     case RELOAD_FOR_INPUT_ADDRESS:
5861       time1 = opnum * 4 + 3;
5862       break;
5863     case RELOAD_FOR_INPUT:
5864       /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5865 	 executes (inclusive).  */
5866       time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5867       break;
5868     case RELOAD_FOR_OPADDR_ADDR:
5869       /* opnum * 4 + 4
5870 	 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5871       time1 = MAX_RECOG_OPERANDS * 4 + 1;
5872       break;
5873     case RELOAD_FOR_OPERAND_ADDRESS:
5874       /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5875 	 is executed.  */
5876       time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5877       break;
5878     case RELOAD_FOR_OUTADDR_ADDRESS:
5879       time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5880       break;
5881     case RELOAD_FOR_OUTPUT_ADDRESS:
5882       time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5883       break;
5884     default:
5885       time1 = MAX_RECOG_OPERANDS * 5 + 5;
5886     }
5887 
5888   for (i = 0; i < n_reloads; i++)
5889     {
5890       rtx reg = rld[i].reg_rtx;
5891       if (reg && REG_P (reg)
5892 	  && (unsigned) regno - true_regnum (reg) < REG_NREGS (reg)
5893 	  && i != reloadnum)
5894 	{
5895 	  rtx other_input = rld[i].in;
5896 
5897 	  /* If the other reload loads the same input value, that
5898 	     will not cause a conflict only if it's loading it into
5899 	     the same register.  */
5900 	  if (true_regnum (reg) != start_regno)
5901 	    other_input = NULL_RTX;
5902 	  if (! other_input || ! rtx_equal_p (other_input, value)
5903 	      || rld[i].out || out)
5904 	    {
5905 	      int time2;
5906 	      switch (rld[i].when_needed)
5907 		{
5908 		case RELOAD_FOR_OTHER_ADDRESS:
5909 		  time2 = 0;
5910 		  break;
5911 		case RELOAD_FOR_INPADDR_ADDRESS:
5912 		  /* find_reloads makes sure that a
5913 		     RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5914 		     by at most one - the first -
5915 		     RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS .  If the
5916 		     address reload is inherited, the address address reload
5917 		     goes away, so we can ignore this conflict.  */
5918 		  if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5919 		      && ignore_address_reloads
5920 		      /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5921 			 Then the address address is still needed to store
5922 			 back the new address.  */
5923 		      && ! rld[reloadnum].out)
5924 		    continue;
5925 		  /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5926 		     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5927 		     reloads go away.  */
5928 		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5929 		      && ignore_address_reloads
5930 		      /* Unless we are reloading an auto_inc expression.  */
5931 		      && ! rld[reloadnum].out)
5932 		    continue;
5933 		  time2 = rld[i].opnum * 4 + 2;
5934 		  break;
5935 		case RELOAD_FOR_INPUT_ADDRESS:
5936 		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5937 		      && ignore_address_reloads
5938 		      && ! rld[reloadnum].out)
5939 		    continue;
5940 		  time2 = rld[i].opnum * 4 + 3;
5941 		  break;
5942 		case RELOAD_FOR_INPUT:
5943 		  time2 = rld[i].opnum * 4 + 4;
5944 		  check_earlyclobber = 1;
5945 		  break;
5946 		  /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5947 		     == MAX_RECOG_OPERAND * 4  */
5948 		case RELOAD_FOR_OPADDR_ADDR:
5949 		  if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5950 		      && ignore_address_reloads
5951 		      && ! rld[reloadnum].out)
5952 		    continue;
5953 		  time2 = MAX_RECOG_OPERANDS * 4 + 1;
5954 		  break;
5955 		case RELOAD_FOR_OPERAND_ADDRESS:
5956 		  time2 = MAX_RECOG_OPERANDS * 4 + 2;
5957 		  check_earlyclobber = 1;
5958 		  break;
5959 		case RELOAD_FOR_INSN:
5960 		  time2 = MAX_RECOG_OPERANDS * 4 + 3;
5961 		  break;
5962 		case RELOAD_FOR_OUTPUT:
5963 		  /* All RELOAD_FOR_OUTPUT reloads become live just after the
5964 		     instruction is executed.  */
5965 		  time2 = MAX_RECOG_OPERANDS * 4 + 4;
5966 		  break;
5967 		  /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5968 		     the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5969 		     value.  */
5970 		case RELOAD_FOR_OUTADDR_ADDRESS:
5971 		  if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5972 		      && ignore_address_reloads
5973 		      && ! rld[reloadnum].out)
5974 		    continue;
5975 		  time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5976 		  break;
5977 		case RELOAD_FOR_OUTPUT_ADDRESS:
5978 		  time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5979 		  break;
5980 		case RELOAD_OTHER:
5981 		  /* If there is no conflict in the input part, handle this
5982 		     like an output reload.  */
5983 		  if (! rld[i].in || rtx_equal_p (other_input, value))
5984 		    {
5985 		      time2 = MAX_RECOG_OPERANDS * 4 + 4;
5986 		      /* Earlyclobbered outputs must conflict with inputs.  */
5987 		      if (earlyclobber_operand_p (rld[i].out))
5988 			time2 = MAX_RECOG_OPERANDS * 4 + 3;
5989 
5990 		      break;
5991 		    }
5992 		  time2 = 1;
5993 		  /* RELOAD_OTHER might be live beyond instruction execution,
5994 		     but this is not obvious when we set time2 = 1.  So check
5995 		     here if there might be a problem with the new reload
5996 		     clobbering the register used by the RELOAD_OTHER.  */
5997 		  if (out)
5998 		    return 0;
5999 		  break;
6000 		default:
6001 		  return 0;
6002 		}
6003 	      if ((time1 >= time2
6004 		   && (! rld[i].in || rld[i].out
6005 		       || ! rtx_equal_p (other_input, value)))
6006 		  || (out && rld[reloadnum].out_reg
6007 		      && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
6008 		return 0;
6009 	    }
6010 	}
6011     }
6012 
6013   /* Earlyclobbered outputs must conflict with inputs.  */
6014   if (check_earlyclobber && out && earlyclobber_operand_p (out))
6015     return 0;
6016 
6017   return 1;
6018 }
6019 
6020 /* Return 1 if the value in reload reg REGNO, as used by a reload
6021    needed for the part of the insn specified by OPNUM and TYPE,
6022    may be used to load VALUE into it.
6023 
6024    MODE is the mode in which the register is used, this is needed to
6025    determine how many hard regs to test.
6026 
6027    Other read-only reloads with the same value do not conflict
6028    unless OUT is nonzero and these other reloads have to live while
6029    output reloads live.
6030    If OUT is CONST0_RTX, this is a special case: it means that the
6031    test should not be for using register REGNO as reload register, but
6032    for copying from register REGNO into the reload register.
6033 
6034    RELOADNUM is the number of the reload we want to load this value for;
6035    a reload does not conflict with itself.
6036 
6037    When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
6038    reloads that load an address for the very reload we are considering.
6039 
6040    The caller has to make sure that there is no conflict with the return
6041    register.  */
6042 
6043 static int
free_for_value_p(int regno,machine_mode mode,int opnum,enum reload_type type,rtx value,rtx out,int reloadnum,int ignore_address_reloads)6044 free_for_value_p (int regno, machine_mode mode, int opnum,
6045 		  enum reload_type type, rtx value, rtx out, int reloadnum,
6046 		  int ignore_address_reloads)
6047 {
6048   int nregs = hard_regno_nregs (regno, mode);
6049   while (nregs-- > 0)
6050     if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
6051 				       value, out, reloadnum,
6052 				       ignore_address_reloads))
6053       return 0;
6054   return 1;
6055 }
6056 
6057 /* Return nonzero if the rtx X is invariant over the current function.  */
6058 /* ??? Actually, the places where we use this expect exactly what is
6059    tested here, and not everything that is function invariant.  In
6060    particular, the frame pointer and arg pointer are special cased;
6061    pic_offset_table_rtx is not, and we must not spill these things to
6062    memory.  */
6063 
6064 int
function_invariant_p(const_rtx x)6065 function_invariant_p (const_rtx x)
6066 {
6067   if (CONSTANT_P (x))
6068     return 1;
6069   if (x == frame_pointer_rtx || x == arg_pointer_rtx)
6070     return 1;
6071   if (GET_CODE (x) == PLUS
6072       && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
6073       && GET_CODE (XEXP (x, 1)) == CONST_INT)
6074     return 1;
6075   return 0;
6076 }
6077 
6078 /* Determine whether the reload reg X overlaps any rtx'es used for
6079    overriding inheritance.  Return nonzero if so.  */
6080 
6081 static int
conflicts_with_override(rtx x)6082 conflicts_with_override (rtx x)
6083 {
6084   int i;
6085   for (i = 0; i < n_reloads; i++)
6086     if (reload_override_in[i]
6087 	&& reg_overlap_mentioned_p (x, reload_override_in[i]))
6088       return 1;
6089   return 0;
6090 }
6091 
6092 /* Give an error message saying we failed to find a reload for INSN,
6093    and clear out reload R.  */
6094 static void
failed_reload(rtx_insn * insn,int r)6095 failed_reload (rtx_insn *insn, int r)
6096 {
6097   if (asm_noperands (PATTERN (insn)) < 0)
6098     /* It's the compiler's fault.  */
6099     fatal_insn ("could not find a spill register", insn);
6100 
6101   /* It's the user's fault; the operand's mode and constraint
6102      don't match.  Disable this reload so we don't crash in final.  */
6103   error_for_asm (insn,
6104 		 "%<asm%> operand constraint incompatible with operand size");
6105   rld[r].in = 0;
6106   rld[r].out = 0;
6107   rld[r].reg_rtx = 0;
6108   rld[r].optional = 1;
6109   rld[r].secondary_p = 1;
6110 }
6111 
6112 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6113    for reload R.  If it's valid, get an rtx for it.  Return nonzero if
6114    successful.  */
6115 static int
set_reload_reg(int i,int r)6116 set_reload_reg (int i, int r)
6117 {
6118   int regno;
6119   rtx reg = spill_reg_rtx[i];
6120 
6121   if (reg == 0 || GET_MODE (reg) != rld[r].mode)
6122     spill_reg_rtx[i] = reg
6123       = gen_rtx_REG (rld[r].mode, spill_regs[i]);
6124 
6125   regno = true_regnum (reg);
6126 
6127   /* Detect when the reload reg can't hold the reload mode.
6128      This used to be one `if', but Sequent compiler can't handle that.  */
6129   if (targetm.hard_regno_mode_ok (regno, rld[r].mode))
6130     {
6131       machine_mode test_mode = VOIDmode;
6132       if (rld[r].in)
6133 	test_mode = GET_MODE (rld[r].in);
6134       /* If rld[r].in has VOIDmode, it means we will load it
6135 	 in whatever mode the reload reg has: to wit, rld[r].mode.
6136 	 We have already tested that for validity.  */
6137       /* Aside from that, we need to test that the expressions
6138 	 to reload from or into have modes which are valid for this
6139 	 reload register.  Otherwise the reload insns would be invalid.  */
6140       if (! (rld[r].in != 0 && test_mode != VOIDmode
6141 	     && !targetm.hard_regno_mode_ok (regno, test_mode)))
6142 	if (! (rld[r].out != 0
6143 	       && !targetm.hard_regno_mode_ok (regno, GET_MODE (rld[r].out))))
6144 	  {
6145 	    /* The reg is OK.  */
6146 	    last_spill_reg = i;
6147 
6148 	    /* Mark as in use for this insn the reload regs we use
6149 	       for this.  */
6150 	    mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
6151 				    rld[r].when_needed, rld[r].mode);
6152 
6153 	    rld[r].reg_rtx = reg;
6154 	    reload_spill_index[r] = spill_regs[i];
6155 	    return 1;
6156 	  }
6157     }
6158   return 0;
6159 }
6160 
6161 /* Find a spill register to use as a reload register for reload R.
6162    LAST_RELOAD is nonzero if this is the last reload for the insn being
6163    processed.
6164 
6165    Set rld[R].reg_rtx to the register allocated.
6166 
6167    We return 1 if successful, or 0 if we couldn't find a spill reg and
6168    we didn't change anything.  */
6169 
6170 static int
allocate_reload_reg(struct insn_chain * chain ATTRIBUTE_UNUSED,int r,int last_reload)6171 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
6172 		     int last_reload)
6173 {
6174   int i, pass, count;
6175 
6176   /* If we put this reload ahead, thinking it is a group,
6177      then insist on finding a group.  Otherwise we can grab a
6178      reg that some other reload needs.
6179      (That can happen when we have a 68000 DATA_OR_FP_REG
6180      which is a group of data regs or one fp reg.)
6181      We need not be so restrictive if there are no more reloads
6182      for this insn.
6183 
6184      ??? Really it would be nicer to have smarter handling
6185      for that kind of reg class, where a problem like this is normal.
6186      Perhaps those classes should be avoided for reloading
6187      by use of more alternatives.  */
6188 
6189   int force_group = rld[r].nregs > 1 && ! last_reload;
6190 
6191   /* If we want a single register and haven't yet found one,
6192      take any reg in the right class and not in use.
6193      If we want a consecutive group, here is where we look for it.
6194 
6195      We use three passes so we can first look for reload regs to
6196      reuse, which are already in use for other reloads in this insn,
6197      and only then use additional registers which are not "bad", then
6198      finally any register.
6199 
6200      I think that maximizing reuse is needed to make sure we don't
6201      run out of reload regs.  Suppose we have three reloads, and
6202      reloads A and B can share regs.  These need two regs.
6203      Suppose A and B are given different regs.
6204      That leaves none for C.  */
6205   for (pass = 0; pass < 3; pass++)
6206     {
6207       /* I is the index in spill_regs.
6208 	 We advance it round-robin between insns to use all spill regs
6209 	 equally, so that inherited reloads have a chance
6210 	 of leapfrogging each other.  */
6211 
6212       i = last_spill_reg;
6213 
6214       for (count = 0; count < n_spills; count++)
6215 	{
6216 	  int rclass = (int) rld[r].rclass;
6217 	  int regnum;
6218 
6219 	  i++;
6220 	  if (i >= n_spills)
6221 	    i -= n_spills;
6222 	  regnum = spill_regs[i];
6223 
6224 	  if ((reload_reg_free_p (regnum, rld[r].opnum,
6225 				  rld[r].when_needed)
6226 	       || (rld[r].in
6227 		   /* We check reload_reg_used to make sure we
6228 		      don't clobber the return register.  */
6229 		   && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
6230 		   && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
6231 					rld[r].when_needed, rld[r].in,
6232 					rld[r].out, r, 1)))
6233 	      && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
6234 	      && targetm.hard_regno_mode_ok (regnum, rld[r].mode)
6235 	      /* Look first for regs to share, then for unshared.  But
6236 		 don't share regs used for inherited reloads; they are
6237 		 the ones we want to preserve.  */
6238 	      && (pass
6239 		  || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
6240 					 regnum)
6241 		      && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
6242 					      regnum))))
6243 	    {
6244 	      int nr = hard_regno_nregs (regnum, rld[r].mode);
6245 
6246 	      /* During the second pass we want to avoid reload registers
6247 		 which are "bad" for this reload.  */
6248 	      if (pass == 1
6249 		  && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
6250 		continue;
6251 
6252 	      /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6253 		 (on 68000) got us two FP regs.  If NR is 1,
6254 		 we would reject both of them.  */
6255 	      if (force_group)
6256 		nr = rld[r].nregs;
6257 	      /* If we need only one reg, we have already won.  */
6258 	      if (nr == 1)
6259 		{
6260 		  /* But reject a single reg if we demand a group.  */
6261 		  if (force_group)
6262 		    continue;
6263 		  break;
6264 		}
6265 	      /* Otherwise check that as many consecutive regs as we need
6266 		 are available here.  */
6267 	      while (nr > 1)
6268 		{
6269 		  int regno = regnum + nr - 1;
6270 		  if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
6271 			&& spill_reg_order[regno] >= 0
6272 			&& reload_reg_free_p (regno, rld[r].opnum,
6273 					      rld[r].when_needed)))
6274 		    break;
6275 		  nr--;
6276 		}
6277 	      if (nr == 1)
6278 		break;
6279 	    }
6280 	}
6281 
6282       /* If we found something on the current pass, omit later passes.  */
6283       if (count < n_spills)
6284 	break;
6285     }
6286 
6287   /* We should have found a spill register by now.  */
6288   if (count >= n_spills)
6289     return 0;
6290 
6291   /* I is the index in SPILL_REG_RTX of the reload register we are to
6292      allocate.  Get an rtx for it and find its register number.  */
6293 
6294   return set_reload_reg (i, r);
6295 }
6296 
6297 /* Initialize all the tables needed to allocate reload registers.
6298    CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6299    is the array we use to restore the reg_rtx field for every reload.  */
6300 
6301 static void
choose_reload_regs_init(struct insn_chain * chain,rtx * save_reload_reg_rtx)6302 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
6303 {
6304   int i;
6305 
6306   for (i = 0; i < n_reloads; i++)
6307     rld[i].reg_rtx = save_reload_reg_rtx[i];
6308 
6309   memset (reload_inherited, 0, MAX_RELOADS);
6310   memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
6311   memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
6312 
6313   CLEAR_HARD_REG_SET (reload_reg_used);
6314   CLEAR_HARD_REG_SET (reload_reg_used_at_all);
6315   CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
6316   CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
6317   CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
6318   CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
6319 
6320   CLEAR_HARD_REG_SET (reg_used_in_insn);
6321   {
6322     HARD_REG_SET tmp;
6323     REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
6324     IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6325     REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
6326     IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6327     compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
6328     compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
6329   }
6330 
6331   for (i = 0; i < reload_n_operands; i++)
6332     {
6333       CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
6334       CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
6335       CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
6336       CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
6337       CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
6338       CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
6339     }
6340 
6341   COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
6342 
6343   CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
6344 
6345   for (i = 0; i < n_reloads; i++)
6346     /* If we have already decided to use a certain register,
6347        don't use it in another way.  */
6348     if (rld[i].reg_rtx)
6349       mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
6350 			      rld[i].when_needed, rld[i].mode);
6351 }
6352 
6353 /* If X is not a subreg, return it unmodified.  If it is a subreg,
6354    look up whether we made a replacement for the SUBREG_REG.  Return
6355    either the replacement or the SUBREG_REG.  */
6356 
6357 static rtx
replaced_subreg(rtx x)6358 replaced_subreg (rtx x)
6359 {
6360   if (GET_CODE (x) == SUBREG)
6361     return find_replacement (&SUBREG_REG (x));
6362   return x;
6363 }
6364 
6365 /* Compute the offset to pass to subreg_regno_offset, for a pseudo of
6366    mode OUTERMODE that is available in a hard reg of mode INNERMODE.
6367    SUBREG is non-NULL if the pseudo is a subreg whose reg is a pseudo,
6368    otherwise it is NULL.  */
6369 
6370 static poly_int64
compute_reload_subreg_offset(machine_mode outermode,rtx subreg,machine_mode innermode)6371 compute_reload_subreg_offset (machine_mode outermode,
6372 			      rtx subreg,
6373 			      machine_mode innermode)
6374 {
6375   poly_int64 outer_offset;
6376   machine_mode middlemode;
6377 
6378   if (!subreg)
6379     return subreg_lowpart_offset (outermode, innermode);
6380 
6381   outer_offset = SUBREG_BYTE (subreg);
6382   middlemode = GET_MODE (SUBREG_REG (subreg));
6383 
6384   /* If SUBREG is paradoxical then return the normal lowpart offset
6385      for OUTERMODE and INNERMODE.  Our caller has already checked
6386      that OUTERMODE fits in INNERMODE.  */
6387   if (paradoxical_subreg_p (outermode, middlemode))
6388     return subreg_lowpart_offset (outermode, innermode);
6389 
6390   /* SUBREG is normal, but may not be lowpart; return OUTER_OFFSET
6391      plus the normal lowpart offset for MIDDLEMODE and INNERMODE.  */
6392   return outer_offset + subreg_lowpart_offset (middlemode, innermode);
6393 }
6394 
6395 /* Assign hard reg targets for the pseudo-registers we must reload
6396    into hard regs for this insn.
6397    Also output the instructions to copy them in and out of the hard regs.
6398 
6399    For machines with register classes, we are responsible for
6400    finding a reload reg in the proper class.  */
6401 
6402 static void
choose_reload_regs(struct insn_chain * chain)6403 choose_reload_regs (struct insn_chain *chain)
6404 {
6405   rtx_insn *insn = chain->insn;
6406   int i, j;
6407   unsigned int max_group_size = 1;
6408   enum reg_class group_class = NO_REGS;
6409   int pass, win, inheritance;
6410 
6411   rtx save_reload_reg_rtx[MAX_RELOADS];
6412 
6413   /* In order to be certain of getting the registers we need,
6414      we must sort the reloads into order of increasing register class.
6415      Then our grabbing of reload registers will parallel the process
6416      that provided the reload registers.
6417 
6418      Also note whether any of the reloads wants a consecutive group of regs.
6419      If so, record the maximum size of the group desired and what
6420      register class contains all the groups needed by this insn.  */
6421 
6422   for (j = 0; j < n_reloads; j++)
6423     {
6424       reload_order[j] = j;
6425       if (rld[j].reg_rtx != NULL_RTX)
6426 	{
6427 	  gcc_assert (REG_P (rld[j].reg_rtx)
6428 		      && HARD_REGISTER_P (rld[j].reg_rtx));
6429 	  reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6430 	}
6431       else
6432 	reload_spill_index[j] = -1;
6433 
6434       if (rld[j].nregs > 1)
6435 	{
6436 	  max_group_size = MAX (rld[j].nregs, max_group_size);
6437 	  group_class
6438 	    = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
6439 	}
6440 
6441       save_reload_reg_rtx[j] = rld[j].reg_rtx;
6442     }
6443 
6444   if (n_reloads > 1)
6445     qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6446 
6447   /* If -O, try first with inheritance, then turning it off.
6448      If not -O, don't do inheritance.
6449      Using inheritance when not optimizing leads to paradoxes
6450      with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6451      because one side of the comparison might be inherited.  */
6452   win = 0;
6453   for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6454     {
6455       choose_reload_regs_init (chain, save_reload_reg_rtx);
6456 
6457       /* Process the reloads in order of preference just found.
6458 	 Beyond this point, subregs can be found in reload_reg_rtx.
6459 
6460 	 This used to look for an existing reloaded home for all of the
6461 	 reloads, and only then perform any new reloads.  But that could lose
6462 	 if the reloads were done out of reg-class order because a later
6463 	 reload with a looser constraint might have an old home in a register
6464 	 needed by an earlier reload with a tighter constraint.
6465 
6466 	 To solve this, we make two passes over the reloads, in the order
6467 	 described above.  In the first pass we try to inherit a reload
6468 	 from a previous insn.  If there is a later reload that needs a
6469 	 class that is a proper subset of the class being processed, we must
6470 	 also allocate a spill register during the first pass.
6471 
6472 	 Then make a second pass over the reloads to allocate any reloads
6473 	 that haven't been given registers yet.  */
6474 
6475       for (j = 0; j < n_reloads; j++)
6476 	{
6477 	  int r = reload_order[j];
6478 	  rtx search_equiv = NULL_RTX;
6479 
6480 	  /* Ignore reloads that got marked inoperative.  */
6481 	  if (rld[r].out == 0 && rld[r].in == 0
6482 	      && ! rld[r].secondary_p)
6483 	    continue;
6484 
6485 	  /* If find_reloads chose to use reload_in or reload_out as a reload
6486 	     register, we don't need to chose one.  Otherwise, try even if it
6487 	     found one since we might save an insn if we find the value lying
6488 	     around.
6489 	     Try also when reload_in is a pseudo without a hard reg.  */
6490 	  if (rld[r].in != 0 && rld[r].reg_rtx != 0
6491 	      && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6492 		  || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6493 		      && !MEM_P (rld[r].in)
6494 		      && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6495 	    continue;
6496 
6497 #if 0 /* No longer needed for correct operation.
6498 	 It might give better code, or might not; worth an experiment?  */
6499 	  /* If this is an optional reload, we can't inherit from earlier insns
6500 	     until we are sure that any non-optional reloads have been allocated.
6501 	     The following code takes advantage of the fact that optional reloads
6502 	     are at the end of reload_order.  */
6503 	  if (rld[r].optional != 0)
6504 	    for (i = 0; i < j; i++)
6505 	      if ((rld[reload_order[i]].out != 0
6506 		   || rld[reload_order[i]].in != 0
6507 		   || rld[reload_order[i]].secondary_p)
6508 		  && ! rld[reload_order[i]].optional
6509 		  && rld[reload_order[i]].reg_rtx == 0)
6510 		allocate_reload_reg (chain, reload_order[i], 0);
6511 #endif
6512 
6513 	  /* First see if this pseudo is already available as reloaded
6514 	     for a previous insn.  We cannot try to inherit for reloads
6515 	     that are smaller than the maximum number of registers needed
6516 	     for groups unless the register we would allocate cannot be used
6517 	     for the groups.
6518 
6519 	     We could check here to see if this is a secondary reload for
6520 	     an object that is already in a register of the desired class.
6521 	     This would avoid the need for the secondary reload register.
6522 	     But this is complex because we can't easily determine what
6523 	     objects might want to be loaded via this reload.  So let a
6524 	     register be allocated here.  In `emit_reload_insns' we suppress
6525 	     one of the loads in the case described above.  */
6526 
6527 	  if (inheritance)
6528 	    {
6529 	      poly_int64 byte = 0;
6530 	      int regno = -1;
6531 	      machine_mode mode = VOIDmode;
6532 	      rtx subreg = NULL_RTX;
6533 
6534 	      if (rld[r].in == 0)
6535 		;
6536 	      else if (REG_P (rld[r].in))
6537 		{
6538 		  regno = REGNO (rld[r].in);
6539 		  mode = GET_MODE (rld[r].in);
6540 		}
6541 	      else if (REG_P (rld[r].in_reg))
6542 		{
6543 		  regno = REGNO (rld[r].in_reg);
6544 		  mode = GET_MODE (rld[r].in_reg);
6545 		}
6546 	      else if (GET_CODE (rld[r].in_reg) == SUBREG
6547 		       && REG_P (SUBREG_REG (rld[r].in_reg)))
6548 		{
6549 		  regno = REGNO (SUBREG_REG (rld[r].in_reg));
6550 		  if (regno < FIRST_PSEUDO_REGISTER)
6551 		    regno = subreg_regno (rld[r].in_reg);
6552 		  else
6553 		    {
6554 		      subreg = rld[r].in_reg;
6555 		      byte = SUBREG_BYTE (subreg);
6556 		    }
6557 		  mode = GET_MODE (rld[r].in_reg);
6558 		}
6559 #if AUTO_INC_DEC
6560 	      else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6561 		       && REG_P (XEXP (rld[r].in_reg, 0)))
6562 		{
6563 		  regno = REGNO (XEXP (rld[r].in_reg, 0));
6564 		  mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6565 		  rld[r].out = rld[r].in;
6566 		}
6567 #endif
6568 #if 0
6569 	      /* This won't work, since REGNO can be a pseudo reg number.
6570 		 Also, it takes much more hair to keep track of all the things
6571 		 that can invalidate an inherited reload of part of a pseudoreg.  */
6572 	      else if (GET_CODE (rld[r].in) == SUBREG
6573 		       && REG_P (SUBREG_REG (rld[r].in)))
6574 		regno = subreg_regno (rld[r].in);
6575 #endif
6576 
6577 	      if (regno >= 0
6578 		  && reg_last_reload_reg[regno] != 0
6579 		  && (known_ge
6580 		      (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno])),
6581 		       GET_MODE_SIZE (mode) + byte))
6582 		  /* Verify that the register it's in can be used in
6583 		     mode MODE.  */
6584 		  && (REG_CAN_CHANGE_MODE_P
6585 		      (REGNO (reg_last_reload_reg[regno]),
6586 		       GET_MODE (reg_last_reload_reg[regno]),
6587 		       mode)))
6588 		{
6589 		  enum reg_class rclass = rld[r].rclass, last_class;
6590 		  rtx last_reg = reg_last_reload_reg[regno];
6591 
6592 		  i = REGNO (last_reg);
6593 		  byte = compute_reload_subreg_offset (mode,
6594 						       subreg,
6595 						       GET_MODE (last_reg));
6596 		  i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6597 		  last_class = REGNO_REG_CLASS (i);
6598 
6599 		  if (reg_reloaded_contents[i] == regno
6600 		      && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6601 		      && targetm.hard_regno_mode_ok (i, rld[r].mode)
6602 		      && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6603 			  /* Even if we can't use this register as a reload
6604 			     register, we might use it for reload_override_in,
6605 			     if copying it to the desired class is cheap
6606 			     enough.  */
6607 			  || ((register_move_cost (mode, last_class, rclass)
6608 			       < memory_move_cost (mode, rclass, true))
6609 			      && (secondary_reload_class (1, rclass, mode,
6610 							  last_reg)
6611 				  == NO_REGS)
6612 			      && !(targetm.secondary_memory_needed
6613 				   (mode, last_class, rclass))))
6614 		      && (rld[r].nregs == max_group_size
6615 			  || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6616 						  i))
6617 		      && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6618 					   rld[r].when_needed, rld[r].in,
6619 					   const0_rtx, r, 1))
6620 		    {
6621 		      /* If a group is needed, verify that all the subsequent
6622 			 registers still have their values intact.  */
6623 		      int nr = hard_regno_nregs (i, rld[r].mode);
6624 		      int k;
6625 
6626 		      for (k = 1; k < nr; k++)
6627 			if (reg_reloaded_contents[i + k] != regno
6628 			    || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6629 			  break;
6630 
6631 		      if (k == nr)
6632 			{
6633 			  int i1;
6634 			  int bad_for_class;
6635 
6636 			  last_reg = (GET_MODE (last_reg) == mode
6637 				      ? last_reg : gen_rtx_REG (mode, i));
6638 
6639 			  bad_for_class = 0;
6640 			  for (k = 0; k < nr; k++)
6641 			    bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6642 								  i+k);
6643 
6644 			  /* We found a register that contains the
6645 			     value we need.  If this register is the
6646 			     same as an `earlyclobber' operand of the
6647 			     current insn, just mark it as a place to
6648 			     reload from since we can't use it as the
6649 			     reload register itself.  */
6650 
6651 			  for (i1 = 0; i1 < n_earlyclobbers; i1++)
6652 			    if (reg_overlap_mentioned_for_reload_p
6653 				(reg_last_reload_reg[regno],
6654 				 reload_earlyclobbers[i1]))
6655 			      break;
6656 
6657 			  if (i1 != n_earlyclobbers
6658 			      || ! (free_for_value_p (i, rld[r].mode,
6659 						      rld[r].opnum,
6660 						      rld[r].when_needed, rld[r].in,
6661 						      rld[r].out, r, 1))
6662 			      /* Don't use it if we'd clobber a pseudo reg.  */
6663 			      || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6664 				  && rld[r].out
6665 				  && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6666 			      /* Don't clobber the frame pointer.  */
6667 			      || (i == HARD_FRAME_POINTER_REGNUM
6668 				  && frame_pointer_needed
6669 				  && rld[r].out)
6670 			      /* Don't really use the inherited spill reg
6671 				 if we need it wider than we've got it.  */
6672 			      || paradoxical_subreg_p (rld[r].mode, mode)
6673 			      || bad_for_class
6674 
6675 			      /* If find_reloads chose reload_out as reload
6676 				 register, stay with it - that leaves the
6677 				 inherited register for subsequent reloads.  */
6678 			      || (rld[r].out && rld[r].reg_rtx
6679 				  && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6680 			    {
6681 			      if (! rld[r].optional)
6682 				{
6683 				  reload_override_in[r] = last_reg;
6684 				  reload_inheritance_insn[r]
6685 				    = reg_reloaded_insn[i];
6686 				}
6687 			    }
6688 			  else
6689 			    {
6690 			      int k;
6691 			      /* We can use this as a reload reg.  */
6692 			      /* Mark the register as in use for this part of
6693 				 the insn.  */
6694 			      mark_reload_reg_in_use (i,
6695 						      rld[r].opnum,
6696 						      rld[r].when_needed,
6697 						      rld[r].mode);
6698 			      rld[r].reg_rtx = last_reg;
6699 			      reload_inherited[r] = 1;
6700 			      reload_inheritance_insn[r]
6701 				= reg_reloaded_insn[i];
6702 			      reload_spill_index[r] = i;
6703 			      for (k = 0; k < nr; k++)
6704 				SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6705 						  i + k);
6706 			    }
6707 			}
6708 		    }
6709 		}
6710 	    }
6711 
6712 	  /* Here's another way to see if the value is already lying around.  */
6713 	  if (inheritance
6714 	      && rld[r].in != 0
6715 	      && ! reload_inherited[r]
6716 	      && rld[r].out == 0
6717 	      && (CONSTANT_P (rld[r].in)
6718 		  || GET_CODE (rld[r].in) == PLUS
6719 		  || REG_P (rld[r].in)
6720 		  || MEM_P (rld[r].in))
6721 	      && (rld[r].nregs == max_group_size
6722 		  || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6723 	    search_equiv = rld[r].in;
6724 
6725 	  if (search_equiv)
6726 	    {
6727 	      rtx equiv
6728 		= find_equiv_reg (search_equiv, insn, rld[r].rclass,
6729 				  -1, NULL, 0, rld[r].mode);
6730 	      int regno = 0;
6731 
6732 	      if (equiv != 0)
6733 		{
6734 		  if (REG_P (equiv))
6735 		    regno = REGNO (equiv);
6736 		  else
6737 		    {
6738 		      /* This must be a SUBREG of a hard register.
6739 			 Make a new REG since this might be used in an
6740 			 address and not all machines support SUBREGs
6741 			 there.  */
6742 		      gcc_assert (GET_CODE (equiv) == SUBREG);
6743 		      regno = subreg_regno (equiv);
6744 		      equiv = gen_rtx_REG (rld[r].mode, regno);
6745 		      /* If we choose EQUIV as the reload register, but the
6746 			 loop below decides to cancel the inheritance, we'll
6747 			 end up reloading EQUIV in rld[r].mode, not the mode
6748 			 it had originally.  That isn't safe when EQUIV isn't
6749 			 available as a spill register since its value might
6750 			 still be live at this point.  */
6751 		      for (i = regno; i < regno + (int) rld[r].nregs; i++)
6752 			if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6753 			  equiv = 0;
6754 		    }
6755 		}
6756 
6757 	      /* If we found a spill reg, reject it unless it is free
6758 		 and of the desired class.  */
6759 	      if (equiv != 0)
6760 		{
6761 		  int regs_used = 0;
6762 		  int bad_for_class = 0;
6763 		  int max_regno = regno + rld[r].nregs;
6764 
6765 		  for (i = regno; i < max_regno; i++)
6766 		    {
6767 		      regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6768 						      i);
6769 		      bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6770 							   i);
6771 		    }
6772 
6773 		  if ((regs_used
6774 		       && ! free_for_value_p (regno, rld[r].mode,
6775 					      rld[r].opnum, rld[r].when_needed,
6776 					      rld[r].in, rld[r].out, r, 1))
6777 		      || bad_for_class)
6778 		    equiv = 0;
6779 		}
6780 
6781 	      if (equiv != 0
6782 		  && !targetm.hard_regno_mode_ok (regno, rld[r].mode))
6783 		equiv = 0;
6784 
6785 	      /* We found a register that contains the value we need.
6786 		 If this register is the same as an `earlyclobber' operand
6787 		 of the current insn, just mark it as a place to reload from
6788 		 since we can't use it as the reload register itself.  */
6789 
6790 	      if (equiv != 0)
6791 		for (i = 0; i < n_earlyclobbers; i++)
6792 		  if (reg_overlap_mentioned_for_reload_p (equiv,
6793 							  reload_earlyclobbers[i]))
6794 		    {
6795 		      if (! rld[r].optional)
6796 			reload_override_in[r] = equiv;
6797 		      equiv = 0;
6798 		      break;
6799 		    }
6800 
6801 	      /* If the equiv register we have found is explicitly clobbered
6802 		 in the current insn, it depends on the reload type if we
6803 		 can use it, use it for reload_override_in, or not at all.
6804 		 In particular, we then can't use EQUIV for a
6805 		 RELOAD_FOR_OUTPUT_ADDRESS reload.  */
6806 
6807 	      if (equiv != 0)
6808 		{
6809 		  if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6810 		    switch (rld[r].when_needed)
6811 		      {
6812 		      case RELOAD_FOR_OTHER_ADDRESS:
6813 		      case RELOAD_FOR_INPADDR_ADDRESS:
6814 		      case RELOAD_FOR_INPUT_ADDRESS:
6815 		      case RELOAD_FOR_OPADDR_ADDR:
6816 			break;
6817 		      case RELOAD_OTHER:
6818 		      case RELOAD_FOR_INPUT:
6819 		      case RELOAD_FOR_OPERAND_ADDRESS:
6820 			if (! rld[r].optional)
6821 			  reload_override_in[r] = equiv;
6822 			/* Fall through.  */
6823 		      default:
6824 			equiv = 0;
6825 			break;
6826 		      }
6827 		  else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6828 		    switch (rld[r].when_needed)
6829 		      {
6830 		      case RELOAD_FOR_OTHER_ADDRESS:
6831 		      case RELOAD_FOR_INPADDR_ADDRESS:
6832 		      case RELOAD_FOR_INPUT_ADDRESS:
6833 		      case RELOAD_FOR_OPADDR_ADDR:
6834 		      case RELOAD_FOR_OPERAND_ADDRESS:
6835 		      case RELOAD_FOR_INPUT:
6836 			break;
6837 		      case RELOAD_OTHER:
6838 			if (! rld[r].optional)
6839 			  reload_override_in[r] = equiv;
6840 			/* Fall through.  */
6841 		      default:
6842 			equiv = 0;
6843 			break;
6844 		      }
6845 		}
6846 
6847 	      /* If we found an equivalent reg, say no code need be generated
6848 		 to load it, and use it as our reload reg.  */
6849 	      if (equiv != 0
6850 		  && (regno != HARD_FRAME_POINTER_REGNUM
6851 		      || !frame_pointer_needed))
6852 		{
6853 		  int nr = hard_regno_nregs (regno, rld[r].mode);
6854 		  int k;
6855 		  rld[r].reg_rtx = equiv;
6856 		  reload_spill_index[r] = regno;
6857 		  reload_inherited[r] = 1;
6858 
6859 		  /* If reg_reloaded_valid is not set for this register,
6860 		     there might be a stale spill_reg_store lying around.
6861 		     We must clear it, since otherwise emit_reload_insns
6862 		     might delete the store.  */
6863 		  if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6864 		    spill_reg_store[regno] = NULL;
6865 		  /* If any of the hard registers in EQUIV are spill
6866 		     registers, mark them as in use for this insn.  */
6867 		  for (k = 0; k < nr; k++)
6868 		    {
6869 		      i = spill_reg_order[regno + k];
6870 		      if (i >= 0)
6871 			{
6872 			  mark_reload_reg_in_use (regno, rld[r].opnum,
6873 						  rld[r].when_needed,
6874 						  rld[r].mode);
6875 			  SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6876 					    regno + k);
6877 			}
6878 		    }
6879 		}
6880 	    }
6881 
6882 	  /* If we found a register to use already, or if this is an optional
6883 	     reload, we are done.  */
6884 	  if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6885 	    continue;
6886 
6887 #if 0
6888 	  /* No longer needed for correct operation.  Might or might
6889 	     not give better code on the average.  Want to experiment?  */
6890 
6891 	  /* See if there is a later reload that has a class different from our
6892 	     class that intersects our class or that requires less register
6893 	     than our reload.  If so, we must allocate a register to this
6894 	     reload now, since that reload might inherit a previous reload
6895 	     and take the only available register in our class.  Don't do this
6896 	     for optional reloads since they will force all previous reloads
6897 	     to be allocated.  Also don't do this for reloads that have been
6898 	     turned off.  */
6899 
6900 	  for (i = j + 1; i < n_reloads; i++)
6901 	    {
6902 	      int s = reload_order[i];
6903 
6904 	      if ((rld[s].in == 0 && rld[s].out == 0
6905 		   && ! rld[s].secondary_p)
6906 		  || rld[s].optional)
6907 		continue;
6908 
6909 	      if ((rld[s].rclass != rld[r].rclass
6910 		   && reg_classes_intersect_p (rld[r].rclass,
6911 					       rld[s].rclass))
6912 		  || rld[s].nregs < rld[r].nregs)
6913 		break;
6914 	    }
6915 
6916 	  if (i == n_reloads)
6917 	    continue;
6918 
6919 	  allocate_reload_reg (chain, r, j == n_reloads - 1);
6920 #endif
6921 	}
6922 
6923       /* Now allocate reload registers for anything non-optional that
6924 	 didn't get one yet.  */
6925       for (j = 0; j < n_reloads; j++)
6926 	{
6927 	  int r = reload_order[j];
6928 
6929 	  /* Ignore reloads that got marked inoperative.  */
6930 	  if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6931 	    continue;
6932 
6933 	  /* Skip reloads that already have a register allocated or are
6934 	     optional.  */
6935 	  if (rld[r].reg_rtx != 0 || rld[r].optional)
6936 	    continue;
6937 
6938 	  if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6939 	    break;
6940 	}
6941 
6942       /* If that loop got all the way, we have won.  */
6943       if (j == n_reloads)
6944 	{
6945 	  win = 1;
6946 	  break;
6947 	}
6948 
6949       /* Loop around and try without any inheritance.  */
6950     }
6951 
6952   if (! win)
6953     {
6954       /* First undo everything done by the failed attempt
6955 	 to allocate with inheritance.  */
6956       choose_reload_regs_init (chain, save_reload_reg_rtx);
6957 
6958       /* Some sanity tests to verify that the reloads found in the first
6959 	 pass are identical to the ones we have now.  */
6960       gcc_assert (chain->n_reloads == n_reloads);
6961 
6962       for (i = 0; i < n_reloads; i++)
6963 	{
6964 	  if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6965 	    continue;
6966 	  gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6967 	  for (j = 0; j < n_spills; j++)
6968 	    if (spill_regs[j] == chain->rld[i].regno)
6969 	      if (! set_reload_reg (j, i))
6970 		failed_reload (chain->insn, i);
6971 	}
6972     }
6973 
6974   /* If we thought we could inherit a reload, because it seemed that
6975      nothing else wanted the same reload register earlier in the insn,
6976      verify that assumption, now that all reloads have been assigned.
6977      Likewise for reloads where reload_override_in has been set.  */
6978 
6979   /* If doing expensive optimizations, do one preliminary pass that doesn't
6980      cancel any inheritance, but removes reloads that have been needed only
6981      for reloads that we know can be inherited.  */
6982   for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6983     {
6984       for (j = 0; j < n_reloads; j++)
6985 	{
6986 	  int r = reload_order[j];
6987 	  rtx check_reg;
6988 	  rtx tem;
6989 	  if (reload_inherited[r] && rld[r].reg_rtx)
6990 	    check_reg = rld[r].reg_rtx;
6991 	  else if (reload_override_in[r]
6992 		   && (REG_P (reload_override_in[r])
6993 		       || GET_CODE (reload_override_in[r]) == SUBREG))
6994 	    check_reg = reload_override_in[r];
6995 	  else
6996 	    continue;
6997 	  if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6998 				  rld[r].opnum, rld[r].when_needed, rld[r].in,
6999 				  (reload_inherited[r]
7000 				   ? rld[r].out : const0_rtx),
7001 				  r, 1))
7002 	    {
7003 	      if (pass)
7004 		continue;
7005 	      reload_inherited[r] = 0;
7006 	      reload_override_in[r] = 0;
7007 	    }
7008 	  /* If we can inherit a RELOAD_FOR_INPUT, or can use a
7009 	     reload_override_in, then we do not need its related
7010 	     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
7011 	     likewise for other reload types.
7012 	     We handle this by removing a reload when its only replacement
7013 	     is mentioned in reload_in of the reload we are going to inherit.
7014 	     A special case are auto_inc expressions; even if the input is
7015 	     inherited, we still need the address for the output.  We can
7016 	     recognize them because they have RELOAD_OUT set to RELOAD_IN.
7017 	     If we succeeded removing some reload and we are doing a preliminary
7018 	     pass just to remove such reloads, make another pass, since the
7019 	     removal of one reload might allow us to inherit another one.  */
7020 	  else if (rld[r].in
7021 		   && rld[r].out != rld[r].in
7022 		   && remove_address_replacements (rld[r].in))
7023 	    {
7024 	      if (pass)
7025 	        pass = 2;
7026 	    }
7027 	  /* If we needed a memory location for the reload, we also have to
7028 	     remove its related reloads.  */
7029 	  else if (rld[r].in
7030 		   && rld[r].out != rld[r].in
7031 		   && (tem = replaced_subreg (rld[r].in), REG_P (tem))
7032 		   && REGNO (tem) < FIRST_PSEUDO_REGISTER
7033 		   && (targetm.secondary_memory_needed
7034 		       (rld[r].inmode, REGNO_REG_CLASS (REGNO (tem)),
7035 			rld[r].rclass))
7036 		   && remove_address_replacements
7037 		      (get_secondary_mem (tem, rld[r].inmode, rld[r].opnum,
7038 					  rld[r].when_needed)))
7039 	    {
7040 	      if (pass)
7041 	        pass = 2;
7042 	    }
7043 	}
7044     }
7045 
7046   /* Now that reload_override_in is known valid,
7047      actually override reload_in.  */
7048   for (j = 0; j < n_reloads; j++)
7049     if (reload_override_in[j])
7050       rld[j].in = reload_override_in[j];
7051 
7052   /* If this reload won't be done because it has been canceled or is
7053      optional and not inherited, clear reload_reg_rtx so other
7054      routines (such as subst_reloads) don't get confused.  */
7055   for (j = 0; j < n_reloads; j++)
7056     if (rld[j].reg_rtx != 0
7057 	&& ((rld[j].optional && ! reload_inherited[j])
7058 	    || (rld[j].in == 0 && rld[j].out == 0
7059 		&& ! rld[j].secondary_p)))
7060       {
7061 	int regno = true_regnum (rld[j].reg_rtx);
7062 
7063 	if (spill_reg_order[regno] >= 0)
7064 	  clear_reload_reg_in_use (regno, rld[j].opnum,
7065 				   rld[j].when_needed, rld[j].mode);
7066 	rld[j].reg_rtx = 0;
7067 	reload_spill_index[j] = -1;
7068       }
7069 
7070   /* Record which pseudos and which spill regs have output reloads.  */
7071   for (j = 0; j < n_reloads; j++)
7072     {
7073       int r = reload_order[j];
7074 
7075       i = reload_spill_index[r];
7076 
7077       /* I is nonneg if this reload uses a register.
7078 	 If rld[r].reg_rtx is 0, this is an optional reload
7079 	 that we opted to ignore.  */
7080       if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
7081 	  && rld[r].reg_rtx != 0)
7082 	{
7083 	  int nregno = REGNO (rld[r].out_reg);
7084 	  int nr = 1;
7085 
7086 	  if (nregno < FIRST_PSEUDO_REGISTER)
7087 	    nr = hard_regno_nregs (nregno, rld[r].mode);
7088 
7089 	  while (--nr >= 0)
7090 	    SET_REGNO_REG_SET (&reg_has_output_reload,
7091 			       nregno + nr);
7092 
7093 	  if (i >= 0)
7094 	    add_to_hard_reg_set (&reg_is_output_reload, rld[r].mode, i);
7095 
7096 	  gcc_assert (rld[r].when_needed == RELOAD_OTHER
7097 		      || rld[r].when_needed == RELOAD_FOR_OUTPUT
7098 		      || rld[r].when_needed == RELOAD_FOR_INSN);
7099 	}
7100     }
7101 }
7102 
7103 /* Deallocate the reload register for reload R.  This is called from
7104    remove_address_replacements.  */
7105 
7106 void
deallocate_reload_reg(int r)7107 deallocate_reload_reg (int r)
7108 {
7109   int regno;
7110 
7111   if (! rld[r].reg_rtx)
7112     return;
7113   regno = true_regnum (rld[r].reg_rtx);
7114   rld[r].reg_rtx = 0;
7115   if (spill_reg_order[regno] >= 0)
7116     clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
7117 			     rld[r].mode);
7118   reload_spill_index[r] = -1;
7119 }
7120 
7121 /* These arrays are filled by emit_reload_insns and its subroutines.  */
7122 static rtx_insn *input_reload_insns[MAX_RECOG_OPERANDS];
7123 static rtx_insn *other_input_address_reload_insns = 0;
7124 static rtx_insn *other_input_reload_insns = 0;
7125 static rtx_insn *input_address_reload_insns[MAX_RECOG_OPERANDS];
7126 static rtx_insn *inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7127 static rtx_insn *output_reload_insns[MAX_RECOG_OPERANDS];
7128 static rtx_insn *output_address_reload_insns[MAX_RECOG_OPERANDS];
7129 static rtx_insn *outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7130 static rtx_insn *operand_reload_insns = 0;
7131 static rtx_insn *other_operand_reload_insns = 0;
7132 static rtx_insn *other_output_reload_insns[MAX_RECOG_OPERANDS];
7133 
7134 /* Values to be put in spill_reg_store are put here first.  Instructions
7135    must only be placed here if the associated reload register reaches
7136    the end of the instruction's reload sequence.  */
7137 static rtx_insn *new_spill_reg_store[FIRST_PSEUDO_REGISTER];
7138 static HARD_REG_SET reg_reloaded_died;
7139 
7140 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7141    of class NEW_CLASS with mode NEW_MODE.  Or alternatively, if alt_reload_reg
7142    is nonzero, if that is suitable.  On success, change *RELOAD_REG to the
7143    adjusted register, and return true.  Otherwise, return false.  */
7144 static bool
reload_adjust_reg_for_temp(rtx * reload_reg,rtx alt_reload_reg,enum reg_class new_class,machine_mode new_mode)7145 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
7146 			    enum reg_class new_class,
7147 			    machine_mode new_mode)
7148 
7149 {
7150   rtx reg;
7151 
7152   for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
7153     {
7154       unsigned regno = REGNO (reg);
7155 
7156       if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
7157 	continue;
7158       if (GET_MODE (reg) != new_mode)
7159 	{
7160 	  if (!targetm.hard_regno_mode_ok (regno, new_mode))
7161 	    continue;
7162 	  if (hard_regno_nregs (regno, new_mode) > REG_NREGS (reg))
7163 	    continue;
7164 	  reg = reload_adjust_reg_for_mode (reg, new_mode);
7165 	}
7166       *reload_reg = reg;
7167       return true;
7168     }
7169   return false;
7170 }
7171 
7172 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7173    pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7174    nonzero, if that is suitable.  On success, change *RELOAD_REG to the
7175    adjusted register, and return true.  Otherwise, return false.  */
7176 static bool
reload_adjust_reg_for_icode(rtx * reload_reg,rtx alt_reload_reg,enum insn_code icode)7177 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
7178 			     enum insn_code icode)
7179 
7180 {
7181   enum reg_class new_class = scratch_reload_class (icode);
7182   machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
7183 
7184   return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
7185 				     new_class, new_mode);
7186 }
7187 
7188 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7189    has the number J.  OLD contains the value to be used as input.  */
7190 
7191 static void
emit_input_reload_insns(struct insn_chain * chain,struct reload * rl,rtx old,int j)7192 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
7193 			 rtx old, int j)
7194 {
7195   rtx_insn *insn = chain->insn;
7196   rtx reloadreg;
7197   rtx oldequiv_reg = 0;
7198   rtx oldequiv = 0;
7199   int special = 0;
7200   machine_mode mode;
7201   rtx_insn **where;
7202 
7203   /* delete_output_reload is only invoked properly if old contains
7204      the original pseudo register.  Since this is replaced with a
7205      hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7206      find the pseudo in RELOAD_IN_REG.  This is also used to
7207      determine whether a secondary reload is needed.  */
7208   if (reload_override_in[j]
7209       && (REG_P (rl->in_reg)
7210 	  || (GET_CODE (rl->in_reg) == SUBREG
7211 	      && REG_P (SUBREG_REG (rl->in_reg)))))
7212     {
7213       oldequiv = old;
7214       old = rl->in_reg;
7215     }
7216   if (oldequiv == 0)
7217     oldequiv = old;
7218   else if (REG_P (oldequiv))
7219     oldequiv_reg = oldequiv;
7220   else if (GET_CODE (oldequiv) == SUBREG)
7221     oldequiv_reg = SUBREG_REG (oldequiv);
7222 
7223   reloadreg = reload_reg_rtx_for_input[j];
7224   mode = GET_MODE (reloadreg);
7225 
7226   /* If we are reloading from a register that was recently stored in
7227      with an output-reload, see if we can prove there was
7228      actually no need to store the old value in it.  */
7229 
7230   if (optimize && REG_P (oldequiv)
7231       && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7232       && spill_reg_store[REGNO (oldequiv)]
7233       && REG_P (old)
7234       && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
7235 	  || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7236 			  rl->out_reg)))
7237     delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7238 
7239   /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7240      OLDEQUIV.  */
7241 
7242   while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
7243     oldequiv = SUBREG_REG (oldequiv);
7244   if (GET_MODE (oldequiv) != VOIDmode
7245       && mode != GET_MODE (oldequiv))
7246     oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
7247 
7248   /* Switch to the right place to emit the reload insns.  */
7249   switch (rl->when_needed)
7250     {
7251     case RELOAD_OTHER:
7252       where = &other_input_reload_insns;
7253       break;
7254     case RELOAD_FOR_INPUT:
7255       where = &input_reload_insns[rl->opnum];
7256       break;
7257     case RELOAD_FOR_INPUT_ADDRESS:
7258       where = &input_address_reload_insns[rl->opnum];
7259       break;
7260     case RELOAD_FOR_INPADDR_ADDRESS:
7261       where = &inpaddr_address_reload_insns[rl->opnum];
7262       break;
7263     case RELOAD_FOR_OUTPUT_ADDRESS:
7264       where = &output_address_reload_insns[rl->opnum];
7265       break;
7266     case RELOAD_FOR_OUTADDR_ADDRESS:
7267       where = &outaddr_address_reload_insns[rl->opnum];
7268       break;
7269     case RELOAD_FOR_OPERAND_ADDRESS:
7270       where = &operand_reload_insns;
7271       break;
7272     case RELOAD_FOR_OPADDR_ADDR:
7273       where = &other_operand_reload_insns;
7274       break;
7275     case RELOAD_FOR_OTHER_ADDRESS:
7276       where = &other_input_address_reload_insns;
7277       break;
7278     default:
7279       gcc_unreachable ();
7280     }
7281 
7282   push_to_sequence (*where);
7283 
7284   /* Auto-increment addresses must be reloaded in a special way.  */
7285   if (rl->out && ! rl->out_reg)
7286     {
7287       /* We are not going to bother supporting the case where a
7288 	 incremented register can't be copied directly from
7289 	 OLDEQUIV since this seems highly unlikely.  */
7290       gcc_assert (rl->secondary_in_reload < 0);
7291 
7292       if (reload_inherited[j])
7293 	oldequiv = reloadreg;
7294 
7295       old = XEXP (rl->in_reg, 0);
7296 
7297       /* Prevent normal processing of this reload.  */
7298       special = 1;
7299       /* Output a special code sequence for this case.  */
7300       inc_for_reload (reloadreg, oldequiv, rl->out, rl->inc);
7301     }
7302 
7303   /* If we are reloading a pseudo-register that was set by the previous
7304      insn, see if we can get rid of that pseudo-register entirely
7305      by redirecting the previous insn into our reload register.  */
7306 
7307   else if (optimize && REG_P (old)
7308 	   && REGNO (old) >= FIRST_PSEUDO_REGISTER
7309 	   && dead_or_set_p (insn, old)
7310 	   /* This is unsafe if some other reload
7311 	      uses the same reg first.  */
7312 	   && ! conflicts_with_override (reloadreg)
7313 	   && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7314 				rl->when_needed, old, rl->out, j, 0))
7315     {
7316       rtx_insn *temp = PREV_INSN (insn);
7317       while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7318 	temp = PREV_INSN (temp);
7319       if (temp
7320 	  && NONJUMP_INSN_P (temp)
7321 	  && GET_CODE (PATTERN (temp)) == SET
7322 	  && SET_DEST (PATTERN (temp)) == old
7323 	  /* Make sure we can access insn_operand_constraint.  */
7324 	  && asm_noperands (PATTERN (temp)) < 0
7325 	  /* This is unsafe if operand occurs more than once in current
7326 	     insn.  Perhaps some occurrences aren't reloaded.  */
7327 	  && count_occurrences (PATTERN (insn), old, 0) == 1)
7328 	{
7329 	  rtx old = SET_DEST (PATTERN (temp));
7330 	  /* Store into the reload register instead of the pseudo.  */
7331 	  SET_DEST (PATTERN (temp)) = reloadreg;
7332 
7333 	  /* Verify that resulting insn is valid.
7334 
7335 	     Note that we have replaced the destination of TEMP with
7336 	     RELOADREG.  If TEMP references RELOADREG within an
7337 	     autoincrement addressing mode, then the resulting insn
7338 	     is ill-formed and we must reject this optimization.  */
7339 	  extract_insn (temp);
7340 	  if (constrain_operands (1, get_enabled_alternatives (temp))
7341 	      && (!AUTO_INC_DEC || ! find_reg_note (temp, REG_INC, reloadreg)))
7342 	    {
7343 	      /* If the previous insn is an output reload, the source is
7344 		 a reload register, and its spill_reg_store entry will
7345 		 contain the previous destination.  This is now
7346 		 invalid.  */
7347 	      if (REG_P (SET_SRC (PATTERN (temp)))
7348 		  && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7349 		{
7350 		  spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7351 		  spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7352 		}
7353 
7354 	      /* If these are the only uses of the pseudo reg,
7355 		 pretend for GDB it lives in the reload reg we used.  */
7356 	      if (REG_N_DEATHS (REGNO (old)) == 1
7357 		  && REG_N_SETS (REGNO (old)) == 1)
7358 		{
7359 		  reg_renumber[REGNO (old)] = REGNO (reloadreg);
7360 		  if (ira_conflicts_p)
7361 		    /* Inform IRA about the change.  */
7362 		    ira_mark_allocation_change (REGNO (old));
7363 		  alter_reg (REGNO (old), -1, false);
7364 		}
7365 	      special = 1;
7366 
7367 	      /* Adjust any debug insns between temp and insn.  */
7368 	      while ((temp = NEXT_INSN (temp)) != insn)
7369 		if (DEBUG_BIND_INSN_P (temp))
7370 		  INSN_VAR_LOCATION_LOC (temp)
7371 		    = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (temp),
7372 					    old, reloadreg);
7373 		else
7374 		  gcc_assert (DEBUG_INSN_P (temp) || NOTE_P (temp));
7375 	    }
7376 	  else
7377 	    {
7378 	      SET_DEST (PATTERN (temp)) = old;
7379 	    }
7380 	}
7381     }
7382 
7383   /* We can't do that, so output an insn to load RELOADREG.  */
7384 
7385   /* If we have a secondary reload, pick up the secondary register
7386      and icode, if any.  If OLDEQUIV and OLD are different or
7387      if this is an in-out reload, recompute whether or not we
7388      still need a secondary register and what the icode should
7389      be.  If we still need a secondary register and the class or
7390      icode is different, go back to reloading from OLD if using
7391      OLDEQUIV means that we got the wrong type of register.  We
7392      cannot have different class or icode due to an in-out reload
7393      because we don't make such reloads when both the input and
7394      output need secondary reload registers.  */
7395 
7396   if (! special && rl->secondary_in_reload >= 0)
7397     {
7398       rtx second_reload_reg = 0;
7399       rtx third_reload_reg = 0;
7400       int secondary_reload = rl->secondary_in_reload;
7401       rtx real_oldequiv = oldequiv;
7402       rtx real_old = old;
7403       rtx tmp;
7404       enum insn_code icode;
7405       enum insn_code tertiary_icode = CODE_FOR_nothing;
7406 
7407       /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7408 	 and similarly for OLD.
7409 	 See comments in get_secondary_reload in reload.c.  */
7410       /* If it is a pseudo that cannot be replaced with its
7411 	 equivalent MEM, we must fall back to reload_in, which
7412 	 will have all the necessary substitutions registered.
7413 	 Likewise for a pseudo that can't be replaced with its
7414 	 equivalent constant.
7415 
7416 	 Take extra care for subregs of such pseudos.  Note that
7417 	 we cannot use reg_equiv_mem in this case because it is
7418 	 not in the right mode.  */
7419 
7420       tmp = oldequiv;
7421       if (GET_CODE (tmp) == SUBREG)
7422 	tmp = SUBREG_REG (tmp);
7423       if (REG_P (tmp)
7424 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7425 	  && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7426 	      || reg_equiv_constant (REGNO (tmp)) != 0))
7427 	{
7428 	  if (! reg_equiv_mem (REGNO (tmp))
7429 	      || num_not_at_initial_offset
7430 	      || GET_CODE (oldequiv) == SUBREG)
7431 	    real_oldequiv = rl->in;
7432 	  else
7433 	    real_oldequiv = reg_equiv_mem (REGNO (tmp));
7434 	}
7435 
7436       tmp = old;
7437       if (GET_CODE (tmp) == SUBREG)
7438 	tmp = SUBREG_REG (tmp);
7439       if (REG_P (tmp)
7440 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7441 	  && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7442 	      || reg_equiv_constant (REGNO (tmp)) != 0))
7443 	{
7444 	  if (! reg_equiv_mem (REGNO (tmp))
7445 	      || num_not_at_initial_offset
7446 	      || GET_CODE (old) == SUBREG)
7447 	    real_old = rl->in;
7448 	  else
7449 	    real_old = reg_equiv_mem (REGNO (tmp));
7450 	}
7451 
7452       second_reload_reg = rld[secondary_reload].reg_rtx;
7453       if (rld[secondary_reload].secondary_in_reload >= 0)
7454 	{
7455 	  int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7456 
7457 	  third_reload_reg = rld[tertiary_reload].reg_rtx;
7458 	  tertiary_icode = rld[secondary_reload].secondary_in_icode;
7459 	  /* We'd have to add more code for quartary reloads.  */
7460 	  gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7461 	}
7462       icode = rl->secondary_in_icode;
7463 
7464       if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7465 	  || (rl->in != 0 && rl->out != 0))
7466 	{
7467 	  secondary_reload_info sri, sri2;
7468 	  enum reg_class new_class, new_t_class;
7469 
7470 	  sri.icode = CODE_FOR_nothing;
7471 	  sri.prev_sri = NULL;
7472 	  new_class
7473 	    = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7474 							 rl->rclass, mode,
7475 							 &sri);
7476 
7477 	  if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7478 	    second_reload_reg = 0;
7479 	  else if (new_class == NO_REGS)
7480 	    {
7481 	      if (reload_adjust_reg_for_icode (&second_reload_reg,
7482 					       third_reload_reg,
7483 					       (enum insn_code) sri.icode))
7484 		{
7485 		  icode = (enum insn_code) sri.icode;
7486 		  third_reload_reg = 0;
7487 		}
7488 	      else
7489 		{
7490 		  oldequiv = old;
7491 		  real_oldequiv = real_old;
7492 		}
7493 	    }
7494 	  else if (sri.icode != CODE_FOR_nothing)
7495 	    /* We currently lack a way to express this in reloads.  */
7496 	    gcc_unreachable ();
7497 	  else
7498 	    {
7499 	      sri2.icode = CODE_FOR_nothing;
7500 	      sri2.prev_sri = &sri;
7501 	      new_t_class
7502 		= (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7503 							     new_class, mode,
7504 							     &sri);
7505 	      if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7506 		{
7507 		  if (reload_adjust_reg_for_temp (&second_reload_reg,
7508 						  third_reload_reg,
7509 						  new_class, mode))
7510 		    {
7511 		      third_reload_reg = 0;
7512 		      tertiary_icode = (enum insn_code) sri2.icode;
7513 		    }
7514 		  else
7515 		    {
7516 		      oldequiv = old;
7517 		      real_oldequiv = real_old;
7518 		    }
7519 		}
7520 	      else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7521 		{
7522 		  rtx intermediate = second_reload_reg;
7523 
7524 		  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7525 						  new_class, mode)
7526 		      && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7527 						      ((enum insn_code)
7528 						       sri2.icode)))
7529 		    {
7530 		      second_reload_reg = intermediate;
7531 		      tertiary_icode = (enum insn_code) sri2.icode;
7532 		    }
7533 		  else
7534 		    {
7535 		      oldequiv = old;
7536 		      real_oldequiv = real_old;
7537 		    }
7538 		}
7539 	      else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7540 		{
7541 		  rtx intermediate = second_reload_reg;
7542 
7543 		  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7544 						  new_class, mode)
7545 		      && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7546 						      new_t_class, mode))
7547 		    {
7548 		      second_reload_reg = intermediate;
7549 		      tertiary_icode = (enum insn_code) sri2.icode;
7550 		    }
7551 		  else
7552 		    {
7553 		      oldequiv = old;
7554 		      real_oldequiv = real_old;
7555 		    }
7556 		}
7557 	      else
7558 		{
7559 		  /* This could be handled more intelligently too.  */
7560 		  oldequiv = old;
7561 		  real_oldequiv = real_old;
7562 		}
7563 	    }
7564 	}
7565 
7566       /* If we still need a secondary reload register, check
7567 	 to see if it is being used as a scratch or intermediate
7568 	 register and generate code appropriately.  If we need
7569 	 a scratch register, use REAL_OLDEQUIV since the form of
7570 	 the insn may depend on the actual address if it is
7571 	 a MEM.  */
7572 
7573       if (second_reload_reg)
7574 	{
7575 	  if (icode != CODE_FOR_nothing)
7576 	    {
7577 	      /* We'd have to add extra code to handle this case.  */
7578 	      gcc_assert (!third_reload_reg);
7579 
7580 	      emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7581 					  second_reload_reg));
7582 	      special = 1;
7583 	    }
7584 	  else
7585 	    {
7586 	      /* See if we need a scratch register to load the
7587 		 intermediate register (a tertiary reload).  */
7588 	      if (tertiary_icode != CODE_FOR_nothing)
7589 		{
7590 		  emit_insn ((GEN_FCN (tertiary_icode)
7591 			      (second_reload_reg, real_oldequiv,
7592 			       third_reload_reg)));
7593 		}
7594 	      else if (third_reload_reg)
7595 		{
7596 		  gen_reload (third_reload_reg, real_oldequiv,
7597 			      rl->opnum,
7598 			      rl->when_needed);
7599 		  gen_reload (second_reload_reg, third_reload_reg,
7600 			      rl->opnum,
7601 			      rl->when_needed);
7602 		}
7603 	      else
7604 		gen_reload (second_reload_reg, real_oldequiv,
7605 			    rl->opnum,
7606 			    rl->when_needed);
7607 
7608 	      oldequiv = second_reload_reg;
7609 	    }
7610 	}
7611     }
7612 
7613   if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7614     {
7615       rtx real_oldequiv = oldequiv;
7616 
7617       if ((REG_P (oldequiv)
7618 	   && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7619 	   && (reg_equiv_memory_loc (REGNO (oldequiv)) != 0
7620 	       || reg_equiv_constant (REGNO (oldequiv)) != 0))
7621 	  || (GET_CODE (oldequiv) == SUBREG
7622 	      && REG_P (SUBREG_REG (oldequiv))
7623 	      && (REGNO (SUBREG_REG (oldequiv))
7624 		  >= FIRST_PSEUDO_REGISTER)
7625 	      && ((reg_equiv_memory_loc (REGNO (SUBREG_REG (oldequiv))) != 0)
7626 		  || (reg_equiv_constant (REGNO (SUBREG_REG (oldequiv))) != 0)))
7627 	  || (CONSTANT_P (oldequiv)
7628 	      && (targetm.preferred_reload_class (oldequiv,
7629 						  REGNO_REG_CLASS (REGNO (reloadreg)))
7630 		  == NO_REGS)))
7631 	real_oldequiv = rl->in;
7632       gen_reload (reloadreg, real_oldequiv, rl->opnum,
7633 		  rl->when_needed);
7634     }
7635 
7636   if (cfun->can_throw_non_call_exceptions)
7637     copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7638 
7639   /* End this sequence.  */
7640   *where = get_insns ();
7641   end_sequence ();
7642 
7643   /* Update reload_override_in so that delete_address_reloads_1
7644      can see the actual register usage.  */
7645   if (oldequiv_reg)
7646     reload_override_in[j] = oldequiv;
7647 }
7648 
7649 /* Generate insns to for the output reload RL, which is for the insn described
7650    by CHAIN and has the number J.  */
7651 static void
emit_output_reload_insns(struct insn_chain * chain,struct reload * rl,int j)7652 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7653 			  int j)
7654 {
7655   rtx reloadreg;
7656   rtx_insn *insn = chain->insn;
7657   int special = 0;
7658   rtx old = rl->out;
7659   machine_mode mode;
7660   rtx_insn *p;
7661   rtx rl_reg_rtx;
7662 
7663   if (rl->when_needed == RELOAD_OTHER)
7664     start_sequence ();
7665   else
7666     push_to_sequence (output_reload_insns[rl->opnum]);
7667 
7668   rl_reg_rtx = reload_reg_rtx_for_output[j];
7669   mode = GET_MODE (rl_reg_rtx);
7670 
7671   reloadreg = rl_reg_rtx;
7672 
7673   /* If we need two reload regs, set RELOADREG to the intermediate
7674      one, since it will be stored into OLD.  We might need a secondary
7675      register only for an input reload, so check again here.  */
7676 
7677   if (rl->secondary_out_reload >= 0)
7678     {
7679       rtx real_old = old;
7680       int secondary_reload = rl->secondary_out_reload;
7681       int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7682 
7683       if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7684 	  && reg_equiv_mem (REGNO (old)) != 0)
7685 	real_old = reg_equiv_mem (REGNO (old));
7686 
7687       if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7688 	{
7689 	  rtx second_reloadreg = reloadreg;
7690 	  reloadreg = rld[secondary_reload].reg_rtx;
7691 
7692 	  /* See if RELOADREG is to be used as a scratch register
7693 	     or as an intermediate register.  */
7694 	  if (rl->secondary_out_icode != CODE_FOR_nothing)
7695 	    {
7696 	      /* We'd have to add extra code to handle this case.  */
7697 	      gcc_assert (tertiary_reload < 0);
7698 
7699 	      emit_insn ((GEN_FCN (rl->secondary_out_icode)
7700 			  (real_old, second_reloadreg, reloadreg)));
7701 	      special = 1;
7702 	    }
7703 	  else
7704 	    {
7705 	      /* See if we need both a scratch and intermediate reload
7706 		 register.  */
7707 
7708 	      enum insn_code tertiary_icode
7709 		= rld[secondary_reload].secondary_out_icode;
7710 
7711 	      /* We'd have to add more code for quartary reloads.  */
7712 	      gcc_assert (tertiary_reload < 0
7713 			  || rld[tertiary_reload].secondary_out_reload < 0);
7714 
7715 	      if (GET_MODE (reloadreg) != mode)
7716 		reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7717 
7718 	      if (tertiary_icode != CODE_FOR_nothing)
7719 		{
7720 		  rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7721 
7722 		  /* Copy primary reload reg to secondary reload reg.
7723 		     (Note that these have been swapped above, then
7724 		     secondary reload reg to OLD using our insn.)  */
7725 
7726 		  /* If REAL_OLD is a paradoxical SUBREG, remove it
7727 		     and try to put the opposite SUBREG on
7728 		     RELOADREG.  */
7729 		  strip_paradoxical_subreg (&real_old, &reloadreg);
7730 
7731 		  gen_reload (reloadreg, second_reloadreg,
7732 			      rl->opnum, rl->when_needed);
7733 		  emit_insn ((GEN_FCN (tertiary_icode)
7734 			      (real_old, reloadreg, third_reloadreg)));
7735 		  special = 1;
7736 		}
7737 
7738 	      else
7739 		{
7740 		  /* Copy between the reload regs here and then to
7741 		     OUT later.  */
7742 
7743 		  gen_reload (reloadreg, second_reloadreg,
7744 			      rl->opnum, rl->when_needed);
7745 		  if (tertiary_reload >= 0)
7746 		    {
7747 		      rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7748 
7749 		      gen_reload (third_reloadreg, reloadreg,
7750 				  rl->opnum, rl->when_needed);
7751 		      reloadreg = third_reloadreg;
7752 		    }
7753 		}
7754 	    }
7755 	}
7756     }
7757 
7758   /* Output the last reload insn.  */
7759   if (! special)
7760     {
7761       rtx set;
7762 
7763       /* Don't output the last reload if OLD is not the dest of
7764 	 INSN and is in the src and is clobbered by INSN.  */
7765       if (! flag_expensive_optimizations
7766 	  || !REG_P (old)
7767 	  || !(set = single_set (insn))
7768 	  || rtx_equal_p (old, SET_DEST (set))
7769 	  || !reg_mentioned_p (old, SET_SRC (set))
7770 	  || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7771 	       && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7772 	gen_reload (old, reloadreg, rl->opnum,
7773 		    rl->when_needed);
7774     }
7775 
7776   /* Look at all insns we emitted, just to be safe.  */
7777   for (p = get_insns (); p; p = NEXT_INSN (p))
7778     if (INSN_P (p))
7779       {
7780 	rtx pat = PATTERN (p);
7781 
7782 	/* If this output reload doesn't come from a spill reg,
7783 	   clear any memory of reloaded copies of the pseudo reg.
7784 	   If this output reload comes from a spill reg,
7785 	   reg_has_output_reload will make this do nothing.  */
7786 	note_stores (pat, forget_old_reloads_1, NULL);
7787 
7788 	if (reg_mentioned_p (rl_reg_rtx, pat))
7789 	  {
7790 	    rtx set = single_set (insn);
7791 	    if (reload_spill_index[j] < 0
7792 		&& set
7793 		&& SET_SRC (set) == rl_reg_rtx)
7794 	      {
7795 		int src = REGNO (SET_SRC (set));
7796 
7797 		reload_spill_index[j] = src;
7798 		SET_HARD_REG_BIT (reg_is_output_reload, src);
7799 		if (find_regno_note (insn, REG_DEAD, src))
7800 		  SET_HARD_REG_BIT (reg_reloaded_died, src);
7801 	      }
7802 	    if (HARD_REGISTER_P (rl_reg_rtx))
7803 	      {
7804 		int s = rl->secondary_out_reload;
7805 		set = single_set (p);
7806 		/* If this reload copies only to the secondary reload
7807 		   register, the secondary reload does the actual
7808 		   store.  */
7809 		if (s >= 0 && set == NULL_RTX)
7810 		  /* We can't tell what function the secondary reload
7811 		     has and where the actual store to the pseudo is
7812 		     made; leave new_spill_reg_store alone.  */
7813 		  ;
7814 		else if (s >= 0
7815 			 && SET_SRC (set) == rl_reg_rtx
7816 			 && SET_DEST (set) == rld[s].reg_rtx)
7817 		  {
7818 		    /* Usually the next instruction will be the
7819 		       secondary reload insn;  if we can confirm
7820 		       that it is, setting new_spill_reg_store to
7821 		       that insn will allow an extra optimization.  */
7822 		    rtx s_reg = rld[s].reg_rtx;
7823 		    rtx_insn *next = NEXT_INSN (p);
7824 		    rld[s].out = rl->out;
7825 		    rld[s].out_reg = rl->out_reg;
7826 		    set = single_set (next);
7827 		    if (set && SET_SRC (set) == s_reg
7828 			&& reload_reg_rtx_reaches_end_p (s_reg, s))
7829 		      {
7830 			SET_HARD_REG_BIT (reg_is_output_reload,
7831 					  REGNO (s_reg));
7832 			new_spill_reg_store[REGNO (s_reg)] = next;
7833 		      }
7834 		  }
7835 		else if (reload_reg_rtx_reaches_end_p (rl_reg_rtx, j))
7836 		  new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7837 	      }
7838 	  }
7839       }
7840 
7841   if (rl->when_needed == RELOAD_OTHER)
7842     {
7843       emit_insn (other_output_reload_insns[rl->opnum]);
7844       other_output_reload_insns[rl->opnum] = get_insns ();
7845     }
7846   else
7847     output_reload_insns[rl->opnum] = get_insns ();
7848 
7849   if (cfun->can_throw_non_call_exceptions)
7850     copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7851 
7852   end_sequence ();
7853 }
7854 
7855 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7856    and has the number J.  */
7857 static void
do_input_reload(struct insn_chain * chain,struct reload * rl,int j)7858 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7859 {
7860   rtx_insn *insn = chain->insn;
7861   rtx old = (rl->in && MEM_P (rl->in)
7862 	     ? rl->in_reg : rl->in);
7863   rtx reg_rtx = rl->reg_rtx;
7864 
7865   if (old && reg_rtx)
7866     {
7867       machine_mode mode;
7868 
7869       /* Determine the mode to reload in.
7870 	 This is very tricky because we have three to choose from.
7871 	 There is the mode the insn operand wants (rl->inmode).
7872 	 There is the mode of the reload register RELOADREG.
7873 	 There is the intrinsic mode of the operand, which we could find
7874 	 by stripping some SUBREGs.
7875 	 It turns out that RELOADREG's mode is irrelevant:
7876 	 we can change that arbitrarily.
7877 
7878 	 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7879 	 then the reload reg may not support QImode moves, so use SImode.
7880 	 If foo is in memory due to spilling a pseudo reg, this is safe,
7881 	 because the QImode value is in the least significant part of a
7882 	 slot big enough for a SImode.  If foo is some other sort of
7883 	 memory reference, then it is impossible to reload this case,
7884 	 so previous passes had better make sure this never happens.
7885 
7886 	 Then consider a one-word union which has SImode and one of its
7887 	 members is a float, being fetched as (SUBREG:SF union:SI).
7888 	 We must fetch that as SFmode because we could be loading into
7889 	 a float-only register.  In this case OLD's mode is correct.
7890 
7891 	 Consider an immediate integer: it has VOIDmode.  Here we need
7892 	 to get a mode from something else.
7893 
7894 	 In some cases, there is a fourth mode, the operand's
7895 	 containing mode.  If the insn specifies a containing mode for
7896 	 this operand, it overrides all others.
7897 
7898 	 I am not sure whether the algorithm here is always right,
7899 	 but it does the right things in those cases.  */
7900 
7901       mode = GET_MODE (old);
7902       if (mode == VOIDmode)
7903 	mode = rl->inmode;
7904 
7905       /* We cannot use gen_lowpart_common since it can do the wrong thing
7906 	 when REG_RTX has a multi-word mode.  Note that REG_RTX must
7907 	 always be a REG here.  */
7908       if (GET_MODE (reg_rtx) != mode)
7909 	reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7910     }
7911   reload_reg_rtx_for_input[j] = reg_rtx;
7912 
7913   if (old != 0
7914       /* AUTO_INC reloads need to be handled even if inherited.  We got an
7915 	 AUTO_INC reload if reload_out is set but reload_out_reg isn't.  */
7916       && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7917       && ! rtx_equal_p (reg_rtx, old)
7918       && reg_rtx != 0)
7919     emit_input_reload_insns (chain, rld + j, old, j);
7920 
7921   /* When inheriting a wider reload, we have a MEM in rl->in,
7922      e.g. inheriting a SImode output reload for
7923      (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10)))  */
7924   if (optimize && reload_inherited[j] && rl->in
7925       && MEM_P (rl->in)
7926       && MEM_P (rl->in_reg)
7927       && reload_spill_index[j] >= 0
7928       && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7929     rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7930 
7931   /* If we are reloading a register that was recently stored in with an
7932      output-reload, see if we can prove there was
7933      actually no need to store the old value in it.  */
7934 
7935   if (optimize
7936       && (reload_inherited[j] || reload_override_in[j])
7937       && reg_rtx
7938       && REG_P (reg_rtx)
7939       && spill_reg_store[REGNO (reg_rtx)] != 0
7940 #if 0
7941       /* There doesn't seem to be any reason to restrict this to pseudos
7942 	 and doing so loses in the case where we are copying from a
7943 	 register of the wrong class.  */
7944       && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7945 #endif
7946       /* The insn might have already some references to stackslots
7947 	 replaced by MEMs, while reload_out_reg still names the
7948 	 original pseudo.  */
7949       && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7950 	  || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7951     delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
7952 }
7953 
7954 /* Do output reloading for reload RL, which is for the insn described by
7955    CHAIN and has the number J.
7956    ??? At some point we need to support handling output reloads of
7957    JUMP_INSNs or insns that set cc0.  */
7958 static void
do_output_reload(struct insn_chain * chain,struct reload * rl,int j)7959 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
7960 {
7961   rtx note, old;
7962   rtx_insn *insn = chain->insn;
7963   /* If this is an output reload that stores something that is
7964      not loaded in this same reload, see if we can eliminate a previous
7965      store.  */
7966   rtx pseudo = rl->out_reg;
7967   rtx reg_rtx = rl->reg_rtx;
7968 
7969   if (rl->out && reg_rtx)
7970     {
7971       machine_mode mode;
7972 
7973       /* Determine the mode to reload in.
7974 	 See comments above (for input reloading).  */
7975       mode = GET_MODE (rl->out);
7976       if (mode == VOIDmode)
7977 	{
7978 	  /* VOIDmode should never happen for an output.  */
7979 	  if (asm_noperands (PATTERN (insn)) < 0)
7980 	    /* It's the compiler's fault.  */
7981 	    fatal_insn ("VOIDmode on an output", insn);
7982 	  error_for_asm (insn, "output operand is constant in %<asm%>");
7983 	  /* Prevent crash--use something we know is valid.  */
7984 	  mode = word_mode;
7985 	  rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
7986 	}
7987       if (GET_MODE (reg_rtx) != mode)
7988 	reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7989     }
7990   reload_reg_rtx_for_output[j] = reg_rtx;
7991 
7992   if (pseudo
7993       && optimize
7994       && REG_P (pseudo)
7995       && ! rtx_equal_p (rl->in_reg, pseudo)
7996       && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7997       && reg_last_reload_reg[REGNO (pseudo)])
7998     {
7999       int pseudo_no = REGNO (pseudo);
8000       int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
8001 
8002       /* We don't need to test full validity of last_regno for
8003 	 inherit here; we only want to know if the store actually
8004 	 matches the pseudo.  */
8005       if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
8006 	  && reg_reloaded_contents[last_regno] == pseudo_no
8007 	  && spill_reg_store[last_regno]
8008 	  && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
8009 	delete_output_reload (insn, j, last_regno, reg_rtx);
8010     }
8011 
8012   old = rl->out_reg;
8013   if (old == 0
8014       || reg_rtx == 0
8015       || rtx_equal_p (old, reg_rtx))
8016     return;
8017 
8018   /* An output operand that dies right away does need a reload,
8019      but need not be copied from it.  Show the new location in the
8020      REG_UNUSED note.  */
8021   if ((REG_P (old) || GET_CODE (old) == SCRATCH)
8022       && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
8023     {
8024       XEXP (note, 0) = reg_rtx;
8025       return;
8026     }
8027   /* Likewise for a SUBREG of an operand that dies.  */
8028   else if (GET_CODE (old) == SUBREG
8029 	   && REG_P (SUBREG_REG (old))
8030 	   && (note = find_reg_note (insn, REG_UNUSED,
8031 				     SUBREG_REG (old))) != 0)
8032     {
8033       XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
8034       return;
8035     }
8036   else if (GET_CODE (old) == SCRATCH)
8037     /* If we aren't optimizing, there won't be a REG_UNUSED note,
8038        but we don't want to make an output reload.  */
8039     return;
8040 
8041   /* If is a JUMP_INSN, we can't support output reloads yet.  */
8042   gcc_assert (NONJUMP_INSN_P (insn));
8043 
8044   emit_output_reload_insns (chain, rld + j, j);
8045 }
8046 
8047 /* A reload copies values of MODE from register SRC to register DEST.
8048    Return true if it can be treated for inheritance purposes like a
8049    group of reloads, each one reloading a single hard register.  The
8050    caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
8051    occupy the same number of hard registers.  */
8052 
8053 static bool
inherit_piecemeal_p(int dest ATTRIBUTE_UNUSED,int src ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED)8054 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
8055 		     int src ATTRIBUTE_UNUSED,
8056 		     machine_mode mode ATTRIBUTE_UNUSED)
8057 {
8058   return (REG_CAN_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
8059 	  && REG_CAN_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
8060 }
8061 
8062 /* Output insns to reload values in and out of the chosen reload regs.  */
8063 
8064 static void
emit_reload_insns(struct insn_chain * chain)8065 emit_reload_insns (struct insn_chain *chain)
8066 {
8067   rtx_insn *insn = chain->insn;
8068 
8069   int j;
8070 
8071   CLEAR_HARD_REG_SET (reg_reloaded_died);
8072 
8073   for (j = 0; j < reload_n_operands; j++)
8074     input_reload_insns[j] = input_address_reload_insns[j]
8075       = inpaddr_address_reload_insns[j]
8076       = output_reload_insns[j] = output_address_reload_insns[j]
8077       = outaddr_address_reload_insns[j]
8078       = other_output_reload_insns[j] = 0;
8079   other_input_address_reload_insns = 0;
8080   other_input_reload_insns = 0;
8081   operand_reload_insns = 0;
8082   other_operand_reload_insns = 0;
8083 
8084   /* Dump reloads into the dump file.  */
8085   if (dump_file)
8086     {
8087       fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
8088       debug_reload_to_stream (dump_file);
8089     }
8090 
8091   for (j = 0; j < n_reloads; j++)
8092     if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
8093       {
8094 	unsigned int i;
8095 
8096 	for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
8097 	  new_spill_reg_store[i] = 0;
8098       }
8099 
8100   /* Now output the instructions to copy the data into and out of the
8101      reload registers.  Do these in the order that the reloads were reported,
8102      since reloads of base and index registers precede reloads of operands
8103      and the operands may need the base and index registers reloaded.  */
8104 
8105   for (j = 0; j < n_reloads; j++)
8106     {
8107       do_input_reload (chain, rld + j, j);
8108       do_output_reload (chain, rld + j, j);
8109     }
8110 
8111   /* Now write all the insns we made for reloads in the order expected by
8112      the allocation functions.  Prior to the insn being reloaded, we write
8113      the following reloads:
8114 
8115      RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8116 
8117      RELOAD_OTHER reloads.
8118 
8119      For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8120      by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8121      RELOAD_FOR_INPUT reload for the operand.
8122 
8123      RELOAD_FOR_OPADDR_ADDRS reloads.
8124 
8125      RELOAD_FOR_OPERAND_ADDRESS reloads.
8126 
8127      After the insn being reloaded, we write the following:
8128 
8129      For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8130      by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8131      RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8132      reloads for the operand.  The RELOAD_OTHER output reloads are
8133      output in descending order by reload number.  */
8134 
8135   emit_insn_before (other_input_address_reload_insns, insn);
8136   emit_insn_before (other_input_reload_insns, insn);
8137 
8138   for (j = 0; j < reload_n_operands; j++)
8139     {
8140       emit_insn_before (inpaddr_address_reload_insns[j], insn);
8141       emit_insn_before (input_address_reload_insns[j], insn);
8142       emit_insn_before (input_reload_insns[j], insn);
8143     }
8144 
8145   emit_insn_before (other_operand_reload_insns, insn);
8146   emit_insn_before (operand_reload_insns, insn);
8147 
8148   for (j = 0; j < reload_n_operands; j++)
8149     {
8150       rtx_insn *x = emit_insn_after (outaddr_address_reload_insns[j], insn);
8151       x = emit_insn_after (output_address_reload_insns[j], x);
8152       x = emit_insn_after (output_reload_insns[j], x);
8153       emit_insn_after (other_output_reload_insns[j], x);
8154     }
8155 
8156   /* For all the spill regs newly reloaded in this instruction,
8157      record what they were reloaded from, so subsequent instructions
8158      can inherit the reloads.
8159 
8160      Update spill_reg_store for the reloads of this insn.
8161      Copy the elements that were updated in the loop above.  */
8162 
8163   for (j = 0; j < n_reloads; j++)
8164     {
8165       int r = reload_order[j];
8166       int i = reload_spill_index[r];
8167 
8168       /* If this is a non-inherited input reload from a pseudo, we must
8169 	 clear any memory of a previous store to the same pseudo.  Only do
8170 	 something if there will not be an output reload for the pseudo
8171 	 being reloaded.  */
8172       if (rld[r].in_reg != 0
8173 	  && ! (reload_inherited[r] || reload_override_in[r]))
8174 	{
8175 	  rtx reg = rld[r].in_reg;
8176 
8177 	  if (GET_CODE (reg) == SUBREG)
8178 	    reg = SUBREG_REG (reg);
8179 
8180 	  if (REG_P (reg)
8181 	      && REGNO (reg) >= FIRST_PSEUDO_REGISTER
8182 	      && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
8183 	    {
8184 	      int nregno = REGNO (reg);
8185 
8186 	      if (reg_last_reload_reg[nregno])
8187 		{
8188 		  int last_regno = REGNO (reg_last_reload_reg[nregno]);
8189 
8190 		  if (reg_reloaded_contents[last_regno] == nregno)
8191 		    spill_reg_store[last_regno] = 0;
8192 		}
8193 	    }
8194 	}
8195 
8196       /* I is nonneg if this reload used a register.
8197 	 If rld[r].reg_rtx is 0, this is an optional reload
8198 	 that we opted to ignore.  */
8199 
8200       if (i >= 0 && rld[r].reg_rtx != 0)
8201 	{
8202 	  int nr = hard_regno_nregs (i, GET_MODE (rld[r].reg_rtx));
8203 	  int k;
8204 
8205 	  /* For a multi register reload, we need to check if all or part
8206 	     of the value lives to the end.  */
8207 	  for (k = 0; k < nr; k++)
8208 	    if (reload_reg_reaches_end_p (i + k, r))
8209 	      CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
8210 
8211 	  /* Maybe the spill reg contains a copy of reload_out.  */
8212 	  if (rld[r].out != 0
8213 	      && (REG_P (rld[r].out)
8214 		  || (rld[r].out_reg
8215 		      ? REG_P (rld[r].out_reg)
8216 		      /* The reload value is an auto-modification of
8217 			 some kind.  For PRE_INC, POST_INC, PRE_DEC
8218 			 and POST_DEC, we record an equivalence
8219 			 between the reload register and the operand
8220 			 on the optimistic assumption that we can make
8221 			 the equivalence hold.  reload_as_needed must
8222 			 then either make it hold or invalidate the
8223 			 equivalence.
8224 
8225 			 PRE_MODIFY and POST_MODIFY addresses are reloaded
8226 			 somewhat differently, and allowing them here leads
8227 			 to problems.  */
8228 		      : (GET_CODE (rld[r].out) != POST_MODIFY
8229 			 && GET_CODE (rld[r].out) != PRE_MODIFY))))
8230 	    {
8231 	      rtx reg;
8232 
8233 	      reg = reload_reg_rtx_for_output[r];
8234 	      if (reload_reg_rtx_reaches_end_p (reg, r))
8235 		{
8236 		  machine_mode mode = GET_MODE (reg);
8237 		  int regno = REGNO (reg);
8238 		  int nregs = REG_NREGS (reg);
8239 		  rtx out = (REG_P (rld[r].out)
8240 			     ? rld[r].out
8241 			     : rld[r].out_reg
8242 			     ? rld[r].out_reg
8243 /* AUTO_INC */		     : XEXP (rld[r].in_reg, 0));
8244 		  int out_regno = REGNO (out);
8245 		  int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
8246 				   : hard_regno_nregs (out_regno, mode));
8247 		  bool piecemeal;
8248 
8249 		  spill_reg_store[regno] = new_spill_reg_store[regno];
8250 		  spill_reg_stored_to[regno] = out;
8251 		  reg_last_reload_reg[out_regno] = reg;
8252 
8253 		  piecemeal = (HARD_REGISTER_NUM_P (out_regno)
8254 			       && nregs == out_nregs
8255 			       && inherit_piecemeal_p (out_regno, regno, mode));
8256 
8257 		  /* If OUT_REGNO is a hard register, it may occupy more than
8258 		     one register.  If it does, say what is in the
8259 		     rest of the registers assuming that both registers
8260 		     agree on how many words the object takes.  If not,
8261 		     invalidate the subsequent registers.  */
8262 
8263 		  if (HARD_REGISTER_NUM_P (out_regno))
8264 		    for (k = 1; k < out_nregs; k++)
8265 		      reg_last_reload_reg[out_regno + k]
8266 			= (piecemeal ? regno_reg_rtx[regno + k] : 0);
8267 
8268 		  /* Now do the inverse operation.  */
8269 		  for (k = 0; k < nregs; k++)
8270 		    {
8271 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8272 		      reg_reloaded_contents[regno + k]
8273 			= (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
8274 			   ? out_regno
8275 			   : out_regno + k);
8276 		      reg_reloaded_insn[regno + k] = insn;
8277 		      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8278 		      if (targetm.hard_regno_call_part_clobbered (regno + k,
8279 								  mode))
8280 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8281 					  regno + k);
8282 		      else
8283 			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8284 					    regno + k);
8285 		    }
8286 		}
8287 	    }
8288 	  /* Maybe the spill reg contains a copy of reload_in.  Only do
8289 	     something if there will not be an output reload for
8290 	     the register being reloaded.  */
8291 	  else if (rld[r].out_reg == 0
8292 		   && rld[r].in != 0
8293 		   && ((REG_P (rld[r].in)
8294 			&& !HARD_REGISTER_P (rld[r].in)
8295 			&& !REGNO_REG_SET_P (&reg_has_output_reload,
8296 					     REGNO (rld[r].in)))
8297 		       || (REG_P (rld[r].in_reg)
8298 			   && !REGNO_REG_SET_P (&reg_has_output_reload,
8299 						REGNO (rld[r].in_reg))))
8300 		   && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8301 	    {
8302 	      rtx reg;
8303 
8304 	      reg = reload_reg_rtx_for_input[r];
8305 	      if (reload_reg_rtx_reaches_end_p (reg, r))
8306 		{
8307 		  machine_mode mode;
8308 		  int regno;
8309 		  int nregs;
8310 		  int in_regno;
8311 		  int in_nregs;
8312 		  rtx in;
8313 		  bool piecemeal;
8314 
8315 		  mode = GET_MODE (reg);
8316 		  regno = REGNO (reg);
8317 		  nregs = REG_NREGS (reg);
8318 		  if (REG_P (rld[r].in)
8319 		      && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8320 		    in = rld[r].in;
8321 		  else if (REG_P (rld[r].in_reg))
8322 		    in = rld[r].in_reg;
8323 		  else
8324 		    in = XEXP (rld[r].in_reg, 0);
8325 		  in_regno = REGNO (in);
8326 
8327 		  in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8328 			      : hard_regno_nregs (in_regno, mode));
8329 
8330 		  reg_last_reload_reg[in_regno] = reg;
8331 
8332 		  piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8333 			       && nregs == in_nregs
8334 			       && inherit_piecemeal_p (regno, in_regno, mode));
8335 
8336 		  if (HARD_REGISTER_NUM_P (in_regno))
8337 		    for (k = 1; k < in_nregs; k++)
8338 		      reg_last_reload_reg[in_regno + k]
8339 			= (piecemeal ? regno_reg_rtx[regno + k] : 0);
8340 
8341 		  /* Unless we inherited this reload, show we haven't
8342 		     recently done a store.
8343 		     Previous stores of inherited auto_inc expressions
8344 		     also have to be discarded.  */
8345 		  if (! reload_inherited[r]
8346 		      || (rld[r].out && ! rld[r].out_reg))
8347 		    spill_reg_store[regno] = 0;
8348 
8349 		  for (k = 0; k < nregs; k++)
8350 		    {
8351 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8352 		      reg_reloaded_contents[regno + k]
8353 			= (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8354 			   ? in_regno
8355 			   : in_regno + k);
8356 		      reg_reloaded_insn[regno + k] = insn;
8357 		      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8358 		      if (targetm.hard_regno_call_part_clobbered (regno + k,
8359 								  mode))
8360 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8361 					  regno + k);
8362 		      else
8363 			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8364 					    regno + k);
8365 		    }
8366 		}
8367 	    }
8368 	}
8369 
8370       /* The following if-statement was #if 0'd in 1.34 (or before...).
8371 	 It's reenabled in 1.35 because supposedly nothing else
8372 	 deals with this problem.  */
8373 
8374       /* If a register gets output-reloaded from a non-spill register,
8375 	 that invalidates any previous reloaded copy of it.
8376 	 But forget_old_reloads_1 won't get to see it, because
8377 	 it thinks only about the original insn.  So invalidate it here.
8378 	 Also do the same thing for RELOAD_OTHER constraints where the
8379 	 output is discarded.  */
8380       if (i < 0
8381 	  && ((rld[r].out != 0
8382 	       && (REG_P (rld[r].out)
8383 		   || (MEM_P (rld[r].out)
8384 		       && REG_P (rld[r].out_reg))))
8385 	      || (rld[r].out == 0 && rld[r].out_reg
8386 		  && REG_P (rld[r].out_reg))))
8387 	{
8388 	  rtx out = ((rld[r].out && REG_P (rld[r].out))
8389 		     ? rld[r].out : rld[r].out_reg);
8390 	  int out_regno = REGNO (out);
8391 	  machine_mode mode = GET_MODE (out);
8392 
8393 	  /* REG_RTX is now set or clobbered by the main instruction.
8394 	     As the comment above explains, forget_old_reloads_1 only
8395 	     sees the original instruction, and there is no guarantee
8396 	     that the original instruction also clobbered REG_RTX.
8397 	     For example, if find_reloads sees that the input side of
8398 	     a matched operand pair dies in this instruction, it may
8399 	     use the input register as the reload register.
8400 
8401 	     Calling forget_old_reloads_1 is a waste of effort if
8402 	     REG_RTX is also the output register.
8403 
8404 	     If we know that REG_RTX holds the value of a pseudo
8405 	     register, the code after the call will record that fact.  */
8406 	  if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8407 	    forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8408 
8409 	  if (!HARD_REGISTER_NUM_P (out_regno))
8410 	    {
8411 	      rtx src_reg;
8412 	      rtx_insn *store_insn = NULL;
8413 
8414 	      reg_last_reload_reg[out_regno] = 0;
8415 
8416 	      /* If we can find a hard register that is stored, record
8417 		 the storing insn so that we may delete this insn with
8418 		 delete_output_reload.  */
8419 	      src_reg = reload_reg_rtx_for_output[r];
8420 
8421 	      if (src_reg)
8422 		{
8423 		  if (reload_reg_rtx_reaches_end_p (src_reg, r))
8424 		    store_insn = new_spill_reg_store[REGNO (src_reg)];
8425 		  else
8426 		    src_reg = NULL_RTX;
8427 		}
8428 	      else
8429 		{
8430 		  /* If this is an optional reload, try to find the
8431 		     source reg from an input reload.  */
8432 		  rtx set = single_set (insn);
8433 		  if (set && SET_DEST (set) == rld[r].out)
8434 		    {
8435 		      int k;
8436 
8437 		      src_reg = SET_SRC (set);
8438 		      store_insn = insn;
8439 		      for (k = 0; k < n_reloads; k++)
8440 			{
8441 			  if (rld[k].in == src_reg)
8442 			    {
8443 			      src_reg = reload_reg_rtx_for_input[k];
8444 			      break;
8445 			    }
8446 			}
8447 		    }
8448 		}
8449 	      if (src_reg && REG_P (src_reg)
8450 		  && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8451 		{
8452 		  int src_regno, src_nregs, k;
8453 		  rtx note;
8454 
8455 		  gcc_assert (GET_MODE (src_reg) == mode);
8456 		  src_regno = REGNO (src_reg);
8457 		  src_nregs = hard_regno_nregs (src_regno, mode);
8458 		  /* The place where to find a death note varies with
8459 		     PRESERVE_DEATH_INFO_REGNO_P .  The condition is not
8460 		     necessarily checked exactly in the code that moves
8461 		     notes, so just check both locations.  */
8462 		  note = find_regno_note (insn, REG_DEAD, src_regno);
8463 		  if (! note && store_insn)
8464 		    note = find_regno_note (store_insn, REG_DEAD, src_regno);
8465 		  for (k = 0; k < src_nregs; k++)
8466 		    {
8467 		      spill_reg_store[src_regno + k] = store_insn;
8468 		      spill_reg_stored_to[src_regno + k] = out;
8469 		      reg_reloaded_contents[src_regno + k] = out_regno;
8470 		      reg_reloaded_insn[src_regno + k] = store_insn;
8471 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8472 		      SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8473 		      if (targetm.hard_regno_call_part_clobbered
8474 			  (src_regno + k, mode))
8475 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8476 					  src_regno + k);
8477 		      else
8478 			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8479 					    src_regno + k);
8480 		      SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8481 		      if (note)
8482 			SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8483 		      else
8484 			CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8485 		    }
8486 		  reg_last_reload_reg[out_regno] = src_reg;
8487 		  /* We have to set reg_has_output_reload here, or else
8488 		     forget_old_reloads_1 will clear reg_last_reload_reg
8489 		     right away.  */
8490 		  SET_REGNO_REG_SET (&reg_has_output_reload,
8491 				     out_regno);
8492 		}
8493 	    }
8494 	  else
8495 	    {
8496 	      int k, out_nregs = hard_regno_nregs (out_regno, mode);
8497 
8498 	      for (k = 0; k < out_nregs; k++)
8499 		reg_last_reload_reg[out_regno + k] = 0;
8500 	    }
8501 	}
8502     }
8503   IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8504 }
8505 
8506 /* Go through the motions to emit INSN and test if it is strictly valid.
8507    Return the emitted insn if valid, else return NULL.  */
8508 
8509 static rtx_insn *
emit_insn_if_valid_for_reload(rtx pat)8510 emit_insn_if_valid_for_reload (rtx pat)
8511 {
8512   rtx_insn *last = get_last_insn ();
8513   int code;
8514 
8515   rtx_insn *insn = emit_insn (pat);
8516   code = recog_memoized (insn);
8517 
8518   if (code >= 0)
8519     {
8520       extract_insn (insn);
8521       /* We want constrain operands to treat this insn strictly in its
8522 	 validity determination, i.e., the way it would after reload has
8523 	 completed.  */
8524       if (constrain_operands (1, get_enabled_alternatives (insn)))
8525 	return insn;
8526     }
8527 
8528   delete_insns_since (last);
8529   return NULL;
8530 }
8531 
8532 /* Emit code to perform a reload from IN (which may be a reload register) to
8533    OUT (which may also be a reload register).  IN or OUT is from operand
8534    OPNUM with reload type TYPE.
8535 
8536    Returns first insn emitted.  */
8537 
8538 static rtx_insn *
gen_reload(rtx out,rtx in,int opnum,enum reload_type type)8539 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8540 {
8541   rtx_insn *last = get_last_insn ();
8542   rtx_insn *tem;
8543   rtx tem1, tem2;
8544 
8545   /* If IN is a paradoxical SUBREG, remove it and try to put the
8546      opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
8547   if (!strip_paradoxical_subreg (&in, &out))
8548     strip_paradoxical_subreg (&out, &in);
8549 
8550   /* How to do this reload can get quite tricky.  Normally, we are being
8551      asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8552      register that didn't get a hard register.  In that case we can just
8553      call emit_move_insn.
8554 
8555      We can also be asked to reload a PLUS that adds a register or a MEM to
8556      another register, constant or MEM.  This can occur during frame pointer
8557      elimination and while reloading addresses.  This case is handled by
8558      trying to emit a single insn to perform the add.  If it is not valid,
8559      we use a two insn sequence.
8560 
8561      Or we can be asked to reload an unary operand that was a fragment of
8562      an addressing mode, into a register.  If it isn't recognized as-is,
8563      we try making the unop operand and the reload-register the same:
8564      (set reg:X (unop:X expr:Y))
8565      -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8566 
8567      Finally, we could be called to handle an 'o' constraint by putting
8568      an address into a register.  In that case, we first try to do this
8569      with a named pattern of "reload_load_address".  If no such pattern
8570      exists, we just emit a SET insn and hope for the best (it will normally
8571      be valid on machines that use 'o').
8572 
8573      This entire process is made complex because reload will never
8574      process the insns we generate here and so we must ensure that
8575      they will fit their constraints and also by the fact that parts of
8576      IN might be being reloaded separately and replaced with spill registers.
8577      Because of this, we are, in some sense, just guessing the right approach
8578      here.  The one listed above seems to work.
8579 
8580      ??? At some point, this whole thing needs to be rethought.  */
8581 
8582   if (GET_CODE (in) == PLUS
8583       && (REG_P (XEXP (in, 0))
8584 	  || GET_CODE (XEXP (in, 0)) == SUBREG
8585 	  || MEM_P (XEXP (in, 0)))
8586       && (REG_P (XEXP (in, 1))
8587 	  || GET_CODE (XEXP (in, 1)) == SUBREG
8588 	  || CONSTANT_P (XEXP (in, 1))
8589 	  || MEM_P (XEXP (in, 1))))
8590     {
8591       /* We need to compute the sum of a register or a MEM and another
8592 	 register, constant, or MEM, and put it into the reload
8593 	 register.  The best possible way of doing this is if the machine
8594 	 has a three-operand ADD insn that accepts the required operands.
8595 
8596 	 The simplest approach is to try to generate such an insn and see if it
8597 	 is recognized and matches its constraints.  If so, it can be used.
8598 
8599 	 It might be better not to actually emit the insn unless it is valid,
8600 	 but we need to pass the insn as an operand to `recog' and
8601 	 `extract_insn' and it is simpler to emit and then delete the insn if
8602 	 not valid than to dummy things up.  */
8603 
8604       rtx op0, op1, tem;
8605       rtx_insn *insn;
8606       enum insn_code code;
8607 
8608       op0 = find_replacement (&XEXP (in, 0));
8609       op1 = find_replacement (&XEXP (in, 1));
8610 
8611       /* Since constraint checking is strict, commutativity won't be
8612 	 checked, so we need to do that here to avoid spurious failure
8613 	 if the add instruction is two-address and the second operand
8614 	 of the add is the same as the reload reg, which is frequently
8615 	 the case.  If the insn would be A = B + A, rearrange it so
8616 	 it will be A = A + B as constrain_operands expects.  */
8617 
8618       if (REG_P (XEXP (in, 1))
8619 	  && REGNO (out) == REGNO (XEXP (in, 1)))
8620 	tem = op0, op0 = op1, op1 = tem;
8621 
8622       if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8623 	in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8624 
8625       insn = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8626       if (insn)
8627 	return insn;
8628 
8629       /* If that failed, we must use a conservative two-insn sequence.
8630 
8631 	 Use a move to copy one operand into the reload register.  Prefer
8632 	 to reload a constant, MEM or pseudo since the move patterns can
8633 	 handle an arbitrary operand.  If OP1 is not a constant, MEM or
8634 	 pseudo and OP1 is not a valid operand for an add instruction, then
8635 	 reload OP1.
8636 
8637 	 After reloading one of the operands into the reload register, add
8638 	 the reload register to the output register.
8639 
8640 	 If there is another way to do this for a specific machine, a
8641 	 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8642 	 we emit below.  */
8643 
8644       code = optab_handler (add_optab, GET_MODE (out));
8645 
8646       if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8647 	  || (REG_P (op1)
8648 	      && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8649 	  || (code != CODE_FOR_nothing
8650 	      && !insn_operand_matches (code, 2, op1)))
8651 	tem = op0, op0 = op1, op1 = tem;
8652 
8653       gen_reload (out, op0, opnum, type);
8654 
8655       /* If OP0 and OP1 are the same, we can use OUT for OP1.
8656 	 This fixes a problem on the 32K where the stack pointer cannot
8657 	 be used as an operand of an add insn.  */
8658 
8659       if (rtx_equal_p (op0, op1))
8660 	op1 = out;
8661 
8662       insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8663       if (insn)
8664 	{
8665 	  /* Add a REG_EQUIV note so that find_equiv_reg can find it.  */
8666 	  set_dst_reg_note (insn, REG_EQUIV, in, out);
8667 	  return insn;
8668 	}
8669 
8670       /* If that failed, copy the address register to the reload register.
8671 	 Then add the constant to the reload register.  */
8672 
8673       gcc_assert (!reg_overlap_mentioned_p (out, op0));
8674       gen_reload (out, op1, opnum, type);
8675       insn = emit_insn (gen_add2_insn (out, op0));
8676       set_dst_reg_note (insn, REG_EQUIV, in, out);
8677     }
8678 
8679   /* If we need a memory location to do the move, do it that way.  */
8680   else if ((tem1 = replaced_subreg (in), tem2 = replaced_subreg (out),
8681 	    (REG_P (tem1) && REG_P (tem2)))
8682 	   && REGNO (tem1) < FIRST_PSEUDO_REGISTER
8683 	   && REGNO (tem2) < FIRST_PSEUDO_REGISTER
8684 	   && targetm.secondary_memory_needed (GET_MODE (out),
8685 					       REGNO_REG_CLASS (REGNO (tem1)),
8686 					       REGNO_REG_CLASS (REGNO (tem2))))
8687     {
8688       /* Get the memory to use and rewrite both registers to its mode.  */
8689       rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8690 
8691       if (GET_MODE (loc) != GET_MODE (out))
8692 	out = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (out));
8693 
8694       if (GET_MODE (loc) != GET_MODE (in))
8695 	in = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (in));
8696 
8697       gen_reload (loc, in, opnum, type);
8698       gen_reload (out, loc, opnum, type);
8699     }
8700   else if (REG_P (out) && UNARY_P (in))
8701     {
8702       rtx op1;
8703       rtx out_moded;
8704       rtx_insn *set;
8705 
8706       op1 = find_replacement (&XEXP (in, 0));
8707       if (op1 != XEXP (in, 0))
8708 	in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8709 
8710       /* First, try a plain SET.  */
8711       set = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8712       if (set)
8713 	return set;
8714 
8715       /* If that failed, move the inner operand to the reload
8716 	 register, and try the same unop with the inner expression
8717 	 replaced with the reload register.  */
8718 
8719       if (GET_MODE (op1) != GET_MODE (out))
8720 	out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8721       else
8722 	out_moded = out;
8723 
8724       gen_reload (out_moded, op1, opnum, type);
8725 
8726       rtx temp = gen_rtx_SET (out, gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8727 						  out_moded));
8728       rtx_insn *insn = emit_insn_if_valid_for_reload (temp);
8729       if (insn)
8730 	{
8731 	  set_unique_reg_note (insn, REG_EQUIV, in);
8732 	  return insn;
8733 	}
8734 
8735       fatal_insn ("failure trying to reload:", set);
8736     }
8737   /* If IN is a simple operand, use gen_move_insn.  */
8738   else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8739     {
8740       tem = emit_insn (gen_move_insn (out, in));
8741       /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note.  */
8742       mark_jump_label (in, tem, 0);
8743     }
8744 
8745   else if (targetm.have_reload_load_address ())
8746     emit_insn (targetm.gen_reload_load_address (out, in));
8747 
8748   /* Otherwise, just write (set OUT IN) and hope for the best.  */
8749   else
8750     emit_insn (gen_rtx_SET (out, in));
8751 
8752   /* Return the first insn emitted.
8753      We can not just return get_last_insn, because there may have
8754      been multiple instructions emitted.  Also note that gen_move_insn may
8755      emit more than one insn itself, so we can not assume that there is one
8756      insn emitted per emit_insn_before call.  */
8757 
8758   return last ? NEXT_INSN (last) : get_insns ();
8759 }
8760 
8761 /* Delete a previously made output-reload whose result we now believe
8762    is not needed.  First we double-check.
8763 
8764    INSN is the insn now being processed.
8765    LAST_RELOAD_REG is the hard register number for which we want to delete
8766    the last output reload.
8767    J is the reload-number that originally used REG.  The caller has made
8768    certain that reload J doesn't use REG any longer for input.
8769    NEW_RELOAD_REG is reload register that reload J is using for REG.  */
8770 
8771 static void
delete_output_reload(rtx_insn * insn,int j,int last_reload_reg,rtx new_reload_reg)8772 delete_output_reload (rtx_insn *insn, int j, int last_reload_reg,
8773 		      rtx new_reload_reg)
8774 {
8775   rtx_insn *output_reload_insn = spill_reg_store[last_reload_reg];
8776   rtx reg = spill_reg_stored_to[last_reload_reg];
8777   int k;
8778   int n_occurrences;
8779   int n_inherited = 0;
8780   rtx substed;
8781   unsigned regno;
8782   int nregs;
8783 
8784   /* It is possible that this reload has been only used to set another reload
8785      we eliminated earlier and thus deleted this instruction too.  */
8786   if (output_reload_insn->deleted ())
8787     return;
8788 
8789   /* Get the raw pseudo-register referred to.  */
8790 
8791   while (GET_CODE (reg) == SUBREG)
8792     reg = SUBREG_REG (reg);
8793   substed = reg_equiv_memory_loc (REGNO (reg));
8794 
8795   /* This is unsafe if the operand occurs more often in the current
8796      insn than it is inherited.  */
8797   for (k = n_reloads - 1; k >= 0; k--)
8798     {
8799       rtx reg2 = rld[k].in;
8800       if (! reg2)
8801 	continue;
8802       if (MEM_P (reg2) || reload_override_in[k])
8803 	reg2 = rld[k].in_reg;
8804 
8805       if (AUTO_INC_DEC && rld[k].out && ! rld[k].out_reg)
8806 	reg2 = XEXP (rld[k].in_reg, 0);
8807 
8808       while (GET_CODE (reg2) == SUBREG)
8809 	reg2 = SUBREG_REG (reg2);
8810       if (rtx_equal_p (reg2, reg))
8811 	{
8812 	  if (reload_inherited[k] || reload_override_in[k] || k == j)
8813 	    n_inherited++;
8814 	  else
8815 	    return;
8816 	}
8817     }
8818   n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8819   if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8820     n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8821 					reg, 0);
8822   if (substed)
8823     n_occurrences += count_occurrences (PATTERN (insn),
8824 					eliminate_regs (substed, VOIDmode,
8825 							NULL_RTX), 0);
8826   for (rtx i1 = reg_equiv_alt_mem_list (REGNO (reg)); i1; i1 = XEXP (i1, 1))
8827     {
8828       gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8829       n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8830     }
8831   if (n_occurrences > n_inherited)
8832     return;
8833 
8834   regno = REGNO (reg);
8835   nregs = REG_NREGS (reg);
8836 
8837   /* If the pseudo-reg we are reloading is no longer referenced
8838      anywhere between the store into it and here,
8839      and we're within the same basic block, then the value can only
8840      pass through the reload reg and end up here.
8841      Otherwise, give up--return.  */
8842   for (rtx_insn *i1 = NEXT_INSN (output_reload_insn);
8843        i1 != insn; i1 = NEXT_INSN (i1))
8844     {
8845       if (NOTE_INSN_BASIC_BLOCK_P (i1))
8846 	return;
8847       if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8848 	  && refers_to_regno_p (regno, regno + nregs, PATTERN (i1), NULL))
8849 	{
8850 	  /* If this is USE in front of INSN, we only have to check that
8851 	     there are no more references than accounted for by inheritance.  */
8852 	  while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8853 	    {
8854 	      n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8855 	      i1 = NEXT_INSN (i1);
8856 	    }
8857 	  if (n_occurrences <= n_inherited && i1 == insn)
8858 	    break;
8859 	  return;
8860 	}
8861     }
8862 
8863   /* We will be deleting the insn.  Remove the spill reg information.  */
8864   for (k = hard_regno_nregs (last_reload_reg, GET_MODE (reg)); k-- > 0; )
8865     {
8866       spill_reg_store[last_reload_reg + k] = 0;
8867       spill_reg_stored_to[last_reload_reg + k] = 0;
8868     }
8869 
8870   /* The caller has already checked that REG dies or is set in INSN.
8871      It has also checked that we are optimizing, and thus some
8872      inaccuracies in the debugging information are acceptable.
8873      So we could just delete output_reload_insn.  But in some cases
8874      we can improve the debugging information without sacrificing
8875      optimization - maybe even improving the code: See if the pseudo
8876      reg has been completely replaced with reload regs.  If so, delete
8877      the store insn and forget we had a stack slot for the pseudo.  */
8878   if (rld[j].out != rld[j].in
8879       && REG_N_DEATHS (REGNO (reg)) == 1
8880       && REG_N_SETS (REGNO (reg)) == 1
8881       && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8882       && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8883     {
8884       rtx_insn *i2;
8885 
8886       /* We know that it was used only between here and the beginning of
8887 	 the current basic block.  (We also know that the last use before
8888 	 INSN was the output reload we are thinking of deleting, but never
8889 	 mind that.)  Search that range; see if any ref remains.  */
8890       for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8891 	{
8892 	  rtx set = single_set (i2);
8893 
8894 	  /* Uses which just store in the pseudo don't count,
8895 	     since if they are the only uses, they are dead.  */
8896 	  if (set != 0 && SET_DEST (set) == reg)
8897 	    continue;
8898 	  if (LABEL_P (i2) || JUMP_P (i2))
8899 	    break;
8900 	  if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8901 	      && reg_mentioned_p (reg, PATTERN (i2)))
8902 	    {
8903 	      /* Some other ref remains; just delete the output reload we
8904 		 know to be dead.  */
8905 	      delete_address_reloads (output_reload_insn, insn);
8906 	      delete_insn (output_reload_insn);
8907 	      return;
8908 	    }
8909 	}
8910 
8911       /* Delete the now-dead stores into this pseudo.  Note that this
8912 	 loop also takes care of deleting output_reload_insn.  */
8913       for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8914 	{
8915 	  rtx set = single_set (i2);
8916 
8917 	  if (set != 0 && SET_DEST (set) == reg)
8918 	    {
8919 	      delete_address_reloads (i2, insn);
8920 	      delete_insn (i2);
8921 	    }
8922 	  if (LABEL_P (i2) || JUMP_P (i2))
8923 	    break;
8924 	}
8925 
8926       /* For the debugging info, say the pseudo lives in this reload reg.  */
8927       reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8928       if (ira_conflicts_p)
8929 	/* Inform IRA about the change.  */
8930 	ira_mark_allocation_change (REGNO (reg));
8931       alter_reg (REGNO (reg), -1, false);
8932     }
8933   else
8934     {
8935       delete_address_reloads (output_reload_insn, insn);
8936       delete_insn (output_reload_insn);
8937     }
8938 }
8939 
8940 /* We are going to delete DEAD_INSN.  Recursively delete loads of
8941    reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8942    CURRENT_INSN is being reloaded, so we have to check its reloads too.  */
8943 static void
delete_address_reloads(rtx_insn * dead_insn,rtx_insn * current_insn)8944 delete_address_reloads (rtx_insn *dead_insn, rtx_insn *current_insn)
8945 {
8946   rtx set = single_set (dead_insn);
8947   rtx set2, dst;
8948   rtx_insn *prev, *next;
8949   if (set)
8950     {
8951       rtx dst = SET_DEST (set);
8952       if (MEM_P (dst))
8953 	delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8954     }
8955   /* If we deleted the store from a reloaded post_{in,de}c expression,
8956      we can delete the matching adds.  */
8957   prev = PREV_INSN (dead_insn);
8958   next = NEXT_INSN (dead_insn);
8959   if (! prev || ! next)
8960     return;
8961   set = single_set (next);
8962   set2 = single_set (prev);
8963   if (! set || ! set2
8964       || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8965       || !CONST_INT_P (XEXP (SET_SRC (set), 1))
8966       || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
8967     return;
8968   dst = SET_DEST (set);
8969   if (! rtx_equal_p (dst, SET_DEST (set2))
8970       || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8971       || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8972       || (INTVAL (XEXP (SET_SRC (set), 1))
8973 	  != -INTVAL (XEXP (SET_SRC (set2), 1))))
8974     return;
8975   delete_related_insns (prev);
8976   delete_related_insns (next);
8977 }
8978 
8979 /* Subfunction of delete_address_reloads: process registers found in X.  */
8980 static void
delete_address_reloads_1(rtx_insn * dead_insn,rtx x,rtx_insn * current_insn)8981 delete_address_reloads_1 (rtx_insn *dead_insn, rtx x, rtx_insn *current_insn)
8982 {
8983   rtx_insn *prev, *i2;
8984   rtx set, dst;
8985   int i, j;
8986   enum rtx_code code = GET_CODE (x);
8987 
8988   if (code != REG)
8989     {
8990       const char *fmt = GET_RTX_FORMAT (code);
8991       for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8992 	{
8993 	  if (fmt[i] == 'e')
8994 	    delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8995 	  else if (fmt[i] == 'E')
8996 	    {
8997 	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8998 		delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8999 					  current_insn);
9000 	    }
9001 	}
9002       return;
9003     }
9004 
9005   if (spill_reg_order[REGNO (x)] < 0)
9006     return;
9007 
9008   /* Scan backwards for the insn that sets x.  This might be a way back due
9009      to inheritance.  */
9010   for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
9011     {
9012       code = GET_CODE (prev);
9013       if (code == CODE_LABEL || code == JUMP_INSN)
9014 	return;
9015       if (!INSN_P (prev))
9016 	continue;
9017       if (reg_set_p (x, PATTERN (prev)))
9018 	break;
9019       if (reg_referenced_p (x, PATTERN (prev)))
9020 	return;
9021     }
9022   if (! prev || INSN_UID (prev) < reload_first_uid)
9023     return;
9024   /* Check that PREV only sets the reload register.  */
9025   set = single_set (prev);
9026   if (! set)
9027     return;
9028   dst = SET_DEST (set);
9029   if (!REG_P (dst)
9030       || ! rtx_equal_p (dst, x))
9031     return;
9032   if (! reg_set_p (dst, PATTERN (dead_insn)))
9033     {
9034       /* Check if DST was used in a later insn -
9035 	 it might have been inherited.  */
9036       for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
9037 	{
9038 	  if (LABEL_P (i2))
9039 	    break;
9040 	  if (! INSN_P (i2))
9041 	    continue;
9042 	  if (reg_referenced_p (dst, PATTERN (i2)))
9043 	    {
9044 	      /* If there is a reference to the register in the current insn,
9045 		 it might be loaded in a non-inherited reload.  If no other
9046 		 reload uses it, that means the register is set before
9047 		 referenced.  */
9048 	      if (i2 == current_insn)
9049 		{
9050 		  for (j = n_reloads - 1; j >= 0; j--)
9051 		    if ((rld[j].reg_rtx == dst && reload_inherited[j])
9052 			|| reload_override_in[j] == dst)
9053 		      return;
9054 		  for (j = n_reloads - 1; j >= 0; j--)
9055 		    if (rld[j].in && rld[j].reg_rtx == dst)
9056 		      break;
9057 		  if (j >= 0)
9058 		    break;
9059 		}
9060 	      return;
9061 	    }
9062 	  if (JUMP_P (i2))
9063 	    break;
9064 	  /* If DST is still live at CURRENT_INSN, check if it is used for
9065 	     any reload.  Note that even if CURRENT_INSN sets DST, we still
9066 	     have to check the reloads.  */
9067 	  if (i2 == current_insn)
9068 	    {
9069 	      for (j = n_reloads - 1; j >= 0; j--)
9070 		if ((rld[j].reg_rtx == dst && reload_inherited[j])
9071 		    || reload_override_in[j] == dst)
9072 		  return;
9073 	      /* ??? We can't finish the loop here, because dst might be
9074 		 allocated to a pseudo in this block if no reload in this
9075 		 block needs any of the classes containing DST - see
9076 		 spill_hard_reg.  There is no easy way to tell this, so we
9077 		 have to scan till the end of the basic block.  */
9078 	    }
9079 	  if (reg_set_p (dst, PATTERN (i2)))
9080 	    break;
9081 	}
9082     }
9083   delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
9084   reg_reloaded_contents[REGNO (dst)] = -1;
9085   delete_insn (prev);
9086 }
9087 
9088 /* Output reload-insns to reload VALUE into RELOADREG.
9089    VALUE is an autoincrement or autodecrement RTX whose operand
9090    is a register or memory location;
9091    so reloading involves incrementing that location.
9092    IN is either identical to VALUE, or some cheaper place to reload from.
9093 
9094    INC_AMOUNT is the number to increment or decrement by (always positive).
9095    This cannot be deduced from VALUE.  */
9096 
9097 static void
inc_for_reload(rtx reloadreg,rtx in,rtx value,poly_int64 inc_amount)9098 inc_for_reload (rtx reloadreg, rtx in, rtx value, poly_int64 inc_amount)
9099 {
9100   /* REG or MEM to be copied and incremented.  */
9101   rtx incloc = find_replacement (&XEXP (value, 0));
9102   /* Nonzero if increment after copying.  */
9103   int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
9104 	      || GET_CODE (value) == POST_MODIFY);
9105   rtx_insn *last;
9106   rtx inc;
9107   rtx_insn *add_insn;
9108   int code;
9109   rtx real_in = in == value ? incloc : in;
9110 
9111   /* No hard register is equivalent to this register after
9112      inc/dec operation.  If REG_LAST_RELOAD_REG were nonzero,
9113      we could inc/dec that register as well (maybe even using it for
9114      the source), but I'm not sure it's worth worrying about.  */
9115   if (REG_P (incloc))
9116     reg_last_reload_reg[REGNO (incloc)] = 0;
9117 
9118   if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
9119     {
9120       gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
9121       inc = find_replacement (&XEXP (XEXP (value, 1), 1));
9122     }
9123   else
9124     {
9125       if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
9126 	inc_amount = -inc_amount;
9127 
9128       inc = gen_int_mode (inc_amount, Pmode);
9129     }
9130 
9131   /* If this is post-increment, first copy the location to the reload reg.  */
9132   if (post && real_in != reloadreg)
9133     emit_insn (gen_move_insn (reloadreg, real_in));
9134 
9135   if (in == value)
9136     {
9137       /* See if we can directly increment INCLOC.  Use a method similar to
9138 	 that in gen_reload.  */
9139 
9140       last = get_last_insn ();
9141       add_insn = emit_insn (gen_rtx_SET (incloc,
9142 					 gen_rtx_PLUS (GET_MODE (incloc),
9143 						       incloc, inc)));
9144 
9145       code = recog_memoized (add_insn);
9146       if (code >= 0)
9147 	{
9148 	  extract_insn (add_insn);
9149 	  if (constrain_operands (1, get_enabled_alternatives (add_insn)))
9150 	    {
9151 	      /* If this is a pre-increment and we have incremented the value
9152 		 where it lives, copy the incremented value to RELOADREG to
9153 		 be used as an address.  */
9154 
9155 	      if (! post)
9156 		emit_insn (gen_move_insn (reloadreg, incloc));
9157 	      return;
9158 	    }
9159 	}
9160       delete_insns_since (last);
9161     }
9162 
9163   /* If couldn't do the increment directly, must increment in RELOADREG.
9164      The way we do this depends on whether this is pre- or post-increment.
9165      For pre-increment, copy INCLOC to the reload register, increment it
9166      there, then save back.  */
9167 
9168   if (! post)
9169     {
9170       if (in != reloadreg)
9171 	emit_insn (gen_move_insn (reloadreg, real_in));
9172       emit_insn (gen_add2_insn (reloadreg, inc));
9173       emit_insn (gen_move_insn (incloc, reloadreg));
9174     }
9175   else
9176     {
9177       /* Postincrement.
9178 	 Because this might be a jump insn or a compare, and because RELOADREG
9179 	 may not be available after the insn in an input reload, we must do
9180 	 the incrementation before the insn being reloaded for.
9181 
9182 	 We have already copied IN to RELOADREG.  Increment the copy in
9183 	 RELOADREG, save that back, then decrement RELOADREG so it has
9184 	 the original value.  */
9185 
9186       emit_insn (gen_add2_insn (reloadreg, inc));
9187       emit_insn (gen_move_insn (incloc, reloadreg));
9188       if (CONST_INT_P (inc))
9189 	emit_insn (gen_add2_insn (reloadreg,
9190 				  gen_int_mode (-INTVAL (inc),
9191 						GET_MODE (reloadreg))));
9192       else
9193 	emit_insn (gen_sub2_insn (reloadreg, inc));
9194     }
9195 }
9196 
9197 static void
add_auto_inc_notes(rtx_insn * insn,rtx x)9198 add_auto_inc_notes (rtx_insn *insn, rtx x)
9199 {
9200   enum rtx_code code = GET_CODE (x);
9201   const char *fmt;
9202   int i, j;
9203 
9204   if (code == MEM && auto_inc_p (XEXP (x, 0)))
9205     {
9206       add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
9207       return;
9208     }
9209 
9210   /* Scan all the operand sub-expressions.  */
9211   fmt = GET_RTX_FORMAT (code);
9212   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9213     {
9214       if (fmt[i] == 'e')
9215 	add_auto_inc_notes (insn, XEXP (x, i));
9216       else if (fmt[i] == 'E')
9217 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9218 	  add_auto_inc_notes (insn, XVECEXP (x, i, j));
9219     }
9220 }
9221