1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2    Copyright (C) 1987-2016 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "predict.h"
28 #include "df.h"
29 #include "tm_p.h"
30 #include "optabs.h"
31 #include "regs.h"
32 #include "ira.h"
33 #include "recog.h"
34 
35 #include "rtl-error.h"
36 #include "expr.h"
37 #include "addresses.h"
38 #include "cfgrtl.h"
39 #include "cfgbuild.h"
40 #include "reload.h"
41 #include "except.h"
42 #include "dumpfile.h"
43 #include "rtl-iter.h"
44 
45 /* This file contains the reload pass of the compiler, which is
46    run after register allocation has been done.  It checks that
47    each insn is valid (operands required to be in registers really
48    are in registers of the proper class) and fixes up invalid ones
49    by copying values temporarily into registers for the insns
50    that need them.
51 
52    The results of register allocation are described by the vector
53    reg_renumber; the insns still contain pseudo regs, but reg_renumber
54    can be used to find which hard reg, if any, a pseudo reg is in.
55 
56    The technique we always use is to free up a few hard regs that are
57    called ``reload regs'', and for each place where a pseudo reg
58    must be in a hard reg, copy it temporarily into one of the reload regs.
59 
60    Reload regs are allocated locally for every instruction that needs
61    reloads.  When there are pseudos which are allocated to a register that
62    has been chosen as a reload reg, such pseudos must be ``spilled''.
63    This means that they go to other hard regs, or to stack slots if no other
64    available hard regs can be found.  Spilling can invalidate more
65    insns, requiring additional need for reloads, so we must keep checking
66    until the process stabilizes.
67 
68    For machines with different classes of registers, we must keep track
69    of the register class needed for each reload, and make sure that
70    we allocate enough reload registers of each class.
71 
72    The file reload.c contains the code that checks one insn for
73    validity and reports the reloads that it needs.  This file
74    is in charge of scanning the entire rtl code, accumulating the
75    reload needs, spilling, assigning reload registers to use for
76    fixing up each insn, and generating the new insns to copy values
77    into the reload registers.  */
78 
79 struct target_reload default_target_reload;
80 #if SWITCHABLE_TARGET
81 struct target_reload *this_target_reload = &default_target_reload;
82 #endif
83 
84 #define spill_indirect_levels			\
85   (this_target_reload->x_spill_indirect_levels)
86 
87 /* During reload_as_needed, element N contains a REG rtx for the hard reg
88    into which reg N has been reloaded (perhaps for a previous insn).  */
89 static rtx *reg_last_reload_reg;
90 
91 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
92    for an output reload that stores into reg N.  */
93 static regset_head reg_has_output_reload;
94 
95 /* Indicates which hard regs are reload-registers for an output reload
96    in the current insn.  */
97 static HARD_REG_SET reg_is_output_reload;
98 
99 /* Widest width in which each pseudo reg is referred to (via subreg).  */
100 static unsigned int *reg_max_ref_width;
101 
102 /* Vector to remember old contents of reg_renumber before spilling.  */
103 static short *reg_old_renumber;
104 
105 /* During reload_as_needed, element N contains the last pseudo regno reloaded
106    into hard register N.  If that pseudo reg occupied more than one register,
107    reg_reloaded_contents points to that pseudo for each spill register in
108    use; all of these must remain set for an inheritance to occur.  */
109 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
110 
111 /* During reload_as_needed, element N contains the insn for which
112    hard register N was last used.   Its contents are significant only
113    when reg_reloaded_valid is set for this register.  */
114 static rtx_insn *reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
115 
116 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid.  */
117 static HARD_REG_SET reg_reloaded_valid;
118 /* Indicate if the register was dead at the end of the reload.
119    This is only valid if reg_reloaded_contents is set and valid.  */
120 static HARD_REG_SET reg_reloaded_dead;
121 
122 /* Indicate whether the register's current value is one that is not
123    safe to retain across a call, even for registers that are normally
124    call-saved.  This is only meaningful for members of reg_reloaded_valid.  */
125 static HARD_REG_SET reg_reloaded_call_part_clobbered;
126 
127 /* Number of spill-regs so far; number of valid elements of spill_regs.  */
128 static int n_spills;
129 
130 /* In parallel with spill_regs, contains REG rtx's for those regs.
131    Holds the last rtx used for any given reg, or 0 if it has never
132    been used for spilling yet.  This rtx is reused, provided it has
133    the proper mode.  */
134 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
135 
136 /* In parallel with spill_regs, contains nonzero for a spill reg
137    that was stored after the last time it was used.
138    The precise value is the insn generated to do the store.  */
139 static rtx_insn *spill_reg_store[FIRST_PSEUDO_REGISTER];
140 
141 /* This is the register that was stored with spill_reg_store.  This is a
142    copy of reload_out / reload_out_reg when the value was stored; if
143    reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg.  */
144 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
145 
146 /* This table is the inverse mapping of spill_regs:
147    indexed by hard reg number,
148    it contains the position of that reg in spill_regs,
149    or -1 for something that is not in spill_regs.
150 
151    ?!?  This is no longer accurate.  */
152 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
153 
154 /* This reg set indicates registers that can't be used as spill registers for
155    the currently processed insn.  These are the hard registers which are live
156    during the insn, but not allocated to pseudos, as well as fixed
157    registers.  */
158 static HARD_REG_SET bad_spill_regs;
159 
160 /* These are the hard registers that can't be used as spill register for any
161    insn.  This includes registers used for user variables and registers that
162    we can't eliminate.  A register that appears in this set also can't be used
163    to retry register allocation.  */
164 static HARD_REG_SET bad_spill_regs_global;
165 
166 /* Describes order of use of registers for reloading
167    of spilled pseudo-registers.  `n_spills' is the number of
168    elements that are actually valid; new ones are added at the end.
169 
170    Both spill_regs and spill_reg_order are used on two occasions:
171    once during find_reload_regs, where they keep track of the spill registers
172    for a single insn, but also during reload_as_needed where they show all
173    the registers ever used by reload.  For the latter case, the information
174    is calculated during finish_spills.  */
175 static short spill_regs[FIRST_PSEUDO_REGISTER];
176 
177 /* This vector of reg sets indicates, for each pseudo, which hard registers
178    may not be used for retrying global allocation because the register was
179    formerly spilled from one of them.  If we allowed reallocating a pseudo to
180    a register that it was already allocated to, reload might not
181    terminate.  */
182 static HARD_REG_SET *pseudo_previous_regs;
183 
184 /* This vector of reg sets indicates, for each pseudo, which hard
185    registers may not be used for retrying global allocation because they
186    are used as spill registers during one of the insns in which the
187    pseudo is live.  */
188 static HARD_REG_SET *pseudo_forbidden_regs;
189 
190 /* All hard regs that have been used as spill registers for any insn are
191    marked in this set.  */
192 static HARD_REG_SET used_spill_regs;
193 
194 /* Index of last register assigned as a spill register.  We allocate in
195    a round-robin fashion.  */
196 static int last_spill_reg;
197 
198 /* Record the stack slot for each spilled hard register.  */
199 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
200 
201 /* Width allocated so far for that stack slot.  */
202 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
203 
204 /* Record which pseudos needed to be spilled.  */
205 static regset_head spilled_pseudos;
206 
207 /* Record which pseudos changed their allocation in finish_spills.  */
208 static regset_head changed_allocation_pseudos;
209 
210 /* Used for communication between order_regs_for_reload and count_pseudo.
211    Used to avoid counting one pseudo twice.  */
212 static regset_head pseudos_counted;
213 
214 /* First uid used by insns created by reload in this function.
215    Used in find_equiv_reg.  */
216 int reload_first_uid;
217 
218 /* Flag set by local-alloc or global-alloc if anything is live in
219    a call-clobbered reg across calls.  */
220 int caller_save_needed;
221 
222 /* Set to 1 while reload_as_needed is operating.
223    Required by some machines to handle any generated moves differently.  */
224 int reload_in_progress = 0;
225 
226 /* This obstack is used for allocation of rtl during register elimination.
227    The allocated storage can be freed once find_reloads has processed the
228    insn.  */
229 static struct obstack reload_obstack;
230 
231 /* Points to the beginning of the reload_obstack.  All insn_chain structures
232    are allocated first.  */
233 static char *reload_startobj;
234 
235 /* The point after all insn_chain structures.  Used to quickly deallocate
236    memory allocated in copy_reloads during calculate_needs_all_insns.  */
237 static char *reload_firstobj;
238 
239 /* This points before all local rtl generated by register elimination.
240    Used to quickly free all memory after processing one insn.  */
241 static char *reload_insn_firstobj;
242 
243 /* List of insn_chain instructions, one for every insn that reload needs to
244    examine.  */
245 struct insn_chain *reload_insn_chain;
246 
247 /* TRUE if we potentially left dead insns in the insn stream and want to
248    run DCE immediately after reload, FALSE otherwise.  */
249 static bool need_dce;
250 
251 /* List of all insns needing reloads.  */
252 static struct insn_chain *insns_need_reload;
253 
254 /* This structure is used to record information about register eliminations.
255    Each array entry describes one possible way of eliminating a register
256    in favor of another.   If there is more than one way of eliminating a
257    particular register, the most preferred should be specified first.  */
258 
259 struct elim_table
260 {
261   int from;			/* Register number to be eliminated.  */
262   int to;			/* Register number used as replacement.  */
263   HOST_WIDE_INT initial_offset;	/* Initial difference between values.  */
264   int can_eliminate;		/* Nonzero if this elimination can be done.  */
265   int can_eliminate_previous;	/* Value returned by TARGET_CAN_ELIMINATE
266 				   target hook in previous scan over insns
267 				   made by reload.  */
268   HOST_WIDE_INT offset;		/* Current offset between the two regs.  */
269   HOST_WIDE_INT previous_offset;/* Offset at end of previous insn.  */
270   int ref_outside_mem;		/* "to" has been referenced outside a MEM.  */
271   rtx from_rtx;			/* REG rtx for the register to be eliminated.
272 				   We cannot simply compare the number since
273 				   we might then spuriously replace a hard
274 				   register corresponding to a pseudo
275 				   assigned to the reg to be eliminated.  */
276   rtx to_rtx;			/* REG rtx for the replacement.  */
277 };
278 
279 static struct elim_table *reg_eliminate = 0;
280 
281 /* This is an intermediate structure to initialize the table.  It has
282    exactly the members provided by ELIMINABLE_REGS.  */
283 static const struct elim_table_1
284 {
285   const int from;
286   const int to;
287 } reg_eliminate_1[] =
288 
289 /* If a set of eliminable registers was specified, define the table from it.
290    Otherwise, default to the normal case of the frame pointer being
291    replaced by the stack pointer.  */
292 
293 #ifdef ELIMINABLE_REGS
294   ELIMINABLE_REGS;
295 #else
296   {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
297 #endif
298 
299 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
300 
301 /* Record the number of pending eliminations that have an offset not equal
302    to their initial offset.  If nonzero, we use a new copy of each
303    replacement result in any insns encountered.  */
304 int num_not_at_initial_offset;
305 
306 /* Count the number of registers that we may be able to eliminate.  */
307 static int num_eliminable;
308 /* And the number of registers that are equivalent to a constant that
309    can be eliminated to frame_pointer / arg_pointer + constant.  */
310 static int num_eliminable_invariants;
311 
312 /* For each label, we record the offset of each elimination.  If we reach
313    a label by more than one path and an offset differs, we cannot do the
314    elimination.  This information is indexed by the difference of the
315    number of the label and the first label number.  We can't offset the
316    pointer itself as this can cause problems on machines with segmented
317    memory.  The first table is an array of flags that records whether we
318    have yet encountered a label and the second table is an array of arrays,
319    one entry in the latter array for each elimination.  */
320 
321 static int first_label_num;
322 static char *offsets_known_at;
323 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
324 
325 vec<reg_equivs_t, va_gc> *reg_equivs;
326 
327 /* Stack of addresses where an rtx has been changed.  We can undo the
328    changes by popping items off the stack and restoring the original
329    value at each location.
330 
331    We use this simplistic undo capability rather than copy_rtx as copy_rtx
332    will not make a deep copy of a normally sharable rtx, such as
333    (const (plus (symbol_ref) (const_int))).  If such an expression appears
334    as R1 in gen_reload_chain_without_interm_reg_p, then a shared
335    rtx expression would be changed.  See PR 42431.  */
336 
337 typedef rtx *rtx_p;
338 static vec<rtx_p> substitute_stack;
339 
340 /* Number of labels in the current function.  */
341 
342 static int num_labels;
343 
344 static void replace_pseudos_in (rtx *, machine_mode, rtx);
345 static void maybe_fix_stack_asms (void);
346 static void copy_reloads (struct insn_chain *);
347 static void calculate_needs_all_insns (int);
348 static int find_reg (struct insn_chain *, int);
349 static void find_reload_regs (struct insn_chain *);
350 static void select_reload_regs (void);
351 static void delete_caller_save_insns (void);
352 
353 static void spill_failure (rtx_insn *, enum reg_class);
354 static void count_spilled_pseudo (int, int, int);
355 static void delete_dead_insn (rtx_insn *);
356 static void alter_reg (int, int, bool);
357 static void set_label_offsets (rtx, rtx_insn *, int);
358 static void check_eliminable_occurrences (rtx);
359 static void elimination_effects (rtx, machine_mode);
360 static rtx eliminate_regs_1 (rtx, machine_mode, rtx, bool, bool);
361 static int eliminate_regs_in_insn (rtx_insn *, int);
362 static void update_eliminable_offsets (void);
363 static void mark_not_eliminable (rtx, const_rtx, void *);
364 static void set_initial_elim_offsets (void);
365 static bool verify_initial_elim_offsets (void);
366 static void set_initial_label_offsets (void);
367 static void set_offsets_for_label (rtx_insn *);
368 static void init_eliminable_invariants (rtx_insn *, bool);
369 static void init_elim_table (void);
370 static void free_reg_equiv (void);
371 static void update_eliminables (HARD_REG_SET *);
372 static bool update_eliminables_and_spill (void);
373 static void elimination_costs_in_insn (rtx_insn *);
374 static void spill_hard_reg (unsigned int, int);
375 static int finish_spills (int);
376 static void scan_paradoxical_subregs (rtx);
377 static void count_pseudo (int);
378 static void order_regs_for_reload (struct insn_chain *);
379 static void reload_as_needed (int);
380 static void forget_old_reloads_1 (rtx, const_rtx, void *);
381 static void forget_marked_reloads (regset);
382 static int reload_reg_class_lower (const void *, const void *);
383 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
384 				    machine_mode);
385 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
386 				     machine_mode);
387 static int reload_reg_free_p (unsigned int, int, enum reload_type);
388 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
389 					rtx, rtx, int, int);
390 static int free_for_value_p (int, machine_mode, int, enum reload_type,
391 			     rtx, rtx, int, int);
392 static int allocate_reload_reg (struct insn_chain *, int, int);
393 static int conflicts_with_override (rtx);
394 static void failed_reload (rtx_insn *, int);
395 static int set_reload_reg (int, int);
396 static void choose_reload_regs_init (struct insn_chain *, rtx *);
397 static void choose_reload_regs (struct insn_chain *);
398 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
399 				     rtx, int);
400 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
401 				      int);
402 static void do_input_reload (struct insn_chain *, struct reload *, int);
403 static void do_output_reload (struct insn_chain *, struct reload *, int);
404 static void emit_reload_insns (struct insn_chain *);
405 static void delete_output_reload (rtx_insn *, int, int, rtx);
406 static void delete_address_reloads (rtx_insn *, rtx_insn *);
407 static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *);
408 static void inc_for_reload (rtx, rtx, rtx, int);
409 static void add_auto_inc_notes (rtx_insn *, rtx);
410 static void substitute (rtx *, const_rtx, rtx);
411 static bool gen_reload_chain_without_interm_reg_p (int, int);
412 static int reloads_conflict (int, int);
413 static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type);
414 static rtx_insn *emit_insn_if_valid_for_reload (rtx);
415 
416 /* Initialize the reload pass.  This is called at the beginning of compilation
417    and may be called again if the target is reinitialized.  */
418 
419 void
init_reload(void)420 init_reload (void)
421 {
422   int i;
423 
424   /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
425      Set spill_indirect_levels to the number of levels such addressing is
426      permitted, zero if it is not permitted at all.  */
427 
428   rtx tem
429     = gen_rtx_MEM (Pmode,
430 		   gen_rtx_PLUS (Pmode,
431 				 gen_rtx_REG (Pmode,
432 					      LAST_VIRTUAL_REGISTER + 1),
433 				 gen_int_mode (4, Pmode)));
434   spill_indirect_levels = 0;
435 
436   while (memory_address_p (QImode, tem))
437     {
438       spill_indirect_levels++;
439       tem = gen_rtx_MEM (Pmode, tem);
440     }
441 
442   /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)).  */
443 
444   tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
445   indirect_symref_ok = memory_address_p (QImode, tem);
446 
447   /* See if reg+reg is a valid (and offsettable) address.  */
448 
449   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
450     {
451       tem = gen_rtx_PLUS (Pmode,
452 			  gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
453 			  gen_rtx_REG (Pmode, i));
454 
455       /* This way, we make sure that reg+reg is an offsettable address.  */
456       tem = plus_constant (Pmode, tem, 4);
457 
458       if (memory_address_p (QImode, tem))
459 	{
460 	  double_reg_address_ok = 1;
461 	  break;
462 	}
463     }
464 
465   /* Initialize obstack for our rtl allocation.  */
466   if (reload_startobj == NULL)
467     {
468       gcc_obstack_init (&reload_obstack);
469       reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
470     }
471 
472   INIT_REG_SET (&spilled_pseudos);
473   INIT_REG_SET (&changed_allocation_pseudos);
474   INIT_REG_SET (&pseudos_counted);
475 }
476 
477 /* List of insn chains that are currently unused.  */
478 static struct insn_chain *unused_insn_chains = 0;
479 
480 /* Allocate an empty insn_chain structure.  */
481 struct insn_chain *
new_insn_chain(void)482 new_insn_chain (void)
483 {
484   struct insn_chain *c;
485 
486   if (unused_insn_chains == 0)
487     {
488       c = XOBNEW (&reload_obstack, struct insn_chain);
489       INIT_REG_SET (&c->live_throughout);
490       INIT_REG_SET (&c->dead_or_set);
491     }
492   else
493     {
494       c = unused_insn_chains;
495       unused_insn_chains = c->next;
496     }
497   c->is_caller_save_insn = 0;
498   c->need_operand_change = 0;
499   c->need_reload = 0;
500   c->need_elim = 0;
501   return c;
502 }
503 
504 /* Small utility function to set all regs in hard reg set TO which are
505    allocated to pseudos in regset FROM.  */
506 
507 void
compute_use_by_pseudos(HARD_REG_SET * to,regset from)508 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
509 {
510   unsigned int regno;
511   reg_set_iterator rsi;
512 
513   EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
514     {
515       int r = reg_renumber[regno];
516 
517       if (r < 0)
518 	{
519 	  /* reload_combine uses the information from DF_LIVE_IN,
520 	     which might still contain registers that have not
521 	     actually been allocated since they have an
522 	     equivalence.  */
523 	  gcc_assert (ira_conflicts_p || reload_completed);
524 	}
525       else
526 	add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
527     }
528 }
529 
530 /* Replace all pseudos found in LOC with their corresponding
531    equivalences.  */
532 
533 static void
replace_pseudos_in(rtx * loc,machine_mode mem_mode,rtx usage)534 replace_pseudos_in (rtx *loc, machine_mode mem_mode, rtx usage)
535 {
536   rtx x = *loc;
537   enum rtx_code code;
538   const char *fmt;
539   int i, j;
540 
541   if (! x)
542     return;
543 
544   code = GET_CODE (x);
545   if (code == REG)
546     {
547       unsigned int regno = REGNO (x);
548 
549       if (regno < FIRST_PSEUDO_REGISTER)
550 	return;
551 
552       x = eliminate_regs_1 (x, mem_mode, usage, true, false);
553       if (x != *loc)
554 	{
555 	  *loc = x;
556 	  replace_pseudos_in (loc, mem_mode, usage);
557 	  return;
558 	}
559 
560       if (reg_equiv_constant (regno))
561 	*loc = reg_equiv_constant (regno);
562       else if (reg_equiv_invariant (regno))
563 	*loc = reg_equiv_invariant (regno);
564       else if (reg_equiv_mem (regno))
565 	*loc = reg_equiv_mem (regno);
566       else if (reg_equiv_address (regno))
567 	*loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address (regno));
568       else
569 	{
570 	  gcc_assert (!REG_P (regno_reg_rtx[regno])
571 		      || REGNO (regno_reg_rtx[regno]) != regno);
572 	  *loc = regno_reg_rtx[regno];
573 	}
574 
575       return;
576     }
577   else if (code == MEM)
578     {
579       replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
580       return;
581     }
582 
583   /* Process each of our operands recursively.  */
584   fmt = GET_RTX_FORMAT (code);
585   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
586     if (*fmt == 'e')
587       replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
588     else if (*fmt == 'E')
589       for (j = 0; j < XVECLEN (x, i); j++)
590 	replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
591 }
592 
593 /* Determine if the current function has an exception receiver block
594    that reaches the exit block via non-exceptional edges  */
595 
596 static bool
has_nonexceptional_receiver(void)597 has_nonexceptional_receiver (void)
598 {
599   edge e;
600   edge_iterator ei;
601   basic_block *tos, *worklist, bb;
602 
603   /* If we're not optimizing, then just err on the safe side.  */
604   if (!optimize)
605     return true;
606 
607   /* First determine which blocks can reach exit via normal paths.  */
608   tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
609 
610   FOR_EACH_BB_FN (bb, cfun)
611     bb->flags &= ~BB_REACHABLE;
612 
613   /* Place the exit block on our worklist.  */
614   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
615   *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
616 
617   /* Iterate: find everything reachable from what we've already seen.  */
618   while (tos != worklist)
619     {
620       bb = *--tos;
621 
622       FOR_EACH_EDGE (e, ei, bb->preds)
623 	if (!(e->flags & EDGE_ABNORMAL))
624 	  {
625 	    basic_block src = e->src;
626 
627 	    if (!(src->flags & BB_REACHABLE))
628 	      {
629 		src->flags |= BB_REACHABLE;
630 		*tos++ = src;
631 	      }
632 	  }
633     }
634   free (worklist);
635 
636   /* Now see if there's a reachable block with an exceptional incoming
637      edge.  */
638   FOR_EACH_BB_FN (bb, cfun)
639     if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb))
640       return true;
641 
642   /* No exceptional block reached exit unexceptionally.  */
643   return false;
644 }
645 
646 /* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
647    zero elements) to MAX_REG_NUM elements.
648 
649    Initialize all new fields to NULL and update REG_EQUIVS_SIZE.  */
650 void
grow_reg_equivs(void)651 grow_reg_equivs (void)
652 {
653   int old_size = vec_safe_length (reg_equivs);
654   int max_regno = max_reg_num ();
655   int i;
656   reg_equivs_t ze;
657 
658   memset (&ze, 0, sizeof (reg_equivs_t));
659   vec_safe_reserve (reg_equivs, max_regno);
660   for (i = old_size; i < max_regno; i++)
661     reg_equivs->quick_insert (i, ze);
662 }
663 
664 
665 /* Global variables used by reload and its subroutines.  */
666 
667 /* The current basic block while in calculate_elim_costs_all_insns.  */
668 static basic_block elim_bb;
669 
670 /* Set during calculate_needs if an insn needs register elimination.  */
671 static int something_needs_elimination;
672 /* Set during calculate_needs if an insn needs an operand changed.  */
673 static int something_needs_operands_changed;
674 /* Set by alter_regs if we spilled a register to the stack.  */
675 static bool something_was_spilled;
676 
677 /* Nonzero means we couldn't get enough spill regs.  */
678 static int failure;
679 
680 /* Temporary array of pseudo-register number.  */
681 static int *temp_pseudo_reg_arr;
682 
683 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
684    If that insn didn't set the register (i.e., it copied the register to
685    memory), just delete that insn instead of the equivalencing insn plus
686    anything now dead.  If we call delete_dead_insn on that insn, we may
687    delete the insn that actually sets the register if the register dies
688    there and that is incorrect.  */
689 static void
remove_init_insns()690 remove_init_insns ()
691 {
692   for (int i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
693     {
694       if (reg_renumber[i] < 0 && reg_equiv_init (i) != 0)
695 	{
696 	  rtx list;
697 	  for (list = reg_equiv_init (i); list; list = XEXP (list, 1))
698 	    {
699 	      rtx_insn *equiv_insn = as_a <rtx_insn *> (XEXP (list, 0));
700 
701 	      /* If we already deleted the insn or if it may trap, we can't
702 		 delete it.  The latter case shouldn't happen, but can
703 		 if an insn has a variable address, gets a REG_EH_REGION
704 		 note added to it, and then gets converted into a load
705 		 from a constant address.  */
706 	      if (NOTE_P (equiv_insn)
707 		  || can_throw_internal (equiv_insn))
708 		;
709 	      else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
710 		delete_dead_insn (equiv_insn);
711 	      else
712 		SET_INSN_DELETED (equiv_insn);
713 	    }
714 	}
715     }
716 }
717 
718 /* Return true if remove_init_insns will delete INSN.  */
719 static bool
will_delete_init_insn_p(rtx_insn * insn)720 will_delete_init_insn_p (rtx_insn *insn)
721 {
722   rtx set = single_set (insn);
723   if (!set || !REG_P (SET_DEST (set)))
724     return false;
725   unsigned regno = REGNO (SET_DEST (set));
726 
727   if (can_throw_internal (insn))
728     return false;
729 
730   if (regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
731     return false;
732 
733   for (rtx list = reg_equiv_init (regno); list; list = XEXP (list, 1))
734     {
735       rtx equiv_insn = XEXP (list, 0);
736       if (equiv_insn == insn)
737 	return true;
738     }
739   return false;
740 }
741 
742 /* Main entry point for the reload pass.
743 
744    FIRST is the first insn of the function being compiled.
745 
746    GLOBAL nonzero means we were called from global_alloc
747    and should attempt to reallocate any pseudoregs that we
748    displace from hard regs we will use for reloads.
749    If GLOBAL is zero, we do not have enough information to do that,
750    so any pseudo reg that is spilled must go to the stack.
751 
752    Return value is TRUE if reload likely left dead insns in the
753    stream and a DCE pass should be run to elimiante them.  Else the
754    return value is FALSE.  */
755 
756 bool
reload(rtx_insn * first,int global)757 reload (rtx_insn *first, int global)
758 {
759   int i, n;
760   rtx_insn *insn;
761   struct elim_table *ep;
762   basic_block bb;
763   bool inserted;
764 
765   /* Make sure even insns with volatile mem refs are recognizable.  */
766   init_recog ();
767 
768   failure = 0;
769 
770   reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
771 
772   /* Make sure that the last insn in the chain
773      is not something that needs reloading.  */
774   emit_note (NOTE_INSN_DELETED);
775 
776   /* Enable find_equiv_reg to distinguish insns made by reload.  */
777   reload_first_uid = get_max_uid ();
778 
779 #ifdef SECONDARY_MEMORY_NEEDED
780   /* Initialize the secondary memory table.  */
781   clear_secondary_mem ();
782 #endif
783 
784   /* We don't have a stack slot for any spill reg yet.  */
785   memset (spill_stack_slot, 0, sizeof spill_stack_slot);
786   memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
787 
788   /* Initialize the save area information for caller-save, in case some
789      are needed.  */
790   init_save_areas ();
791 
792   /* Compute which hard registers are now in use
793      as homes for pseudo registers.
794      This is done here rather than (eg) in global_alloc
795      because this point is reached even if not optimizing.  */
796   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
797     mark_home_live (i);
798 
799   /* A function that has a nonlocal label that can reach the exit
800      block via non-exceptional paths must save all call-saved
801      registers.  */
802   if (cfun->has_nonlocal_label
803       && has_nonexceptional_receiver ())
804     crtl->saves_all_registers = 1;
805 
806   if (crtl->saves_all_registers)
807     for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
808       if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
809 	df_set_regs_ever_live (i, true);
810 
811   /* Find all the pseudo registers that didn't get hard regs
812      but do have known equivalent constants or memory slots.
813      These include parameters (known equivalent to parameter slots)
814      and cse'd or loop-moved constant memory addresses.
815 
816      Record constant equivalents in reg_equiv_constant
817      so they will be substituted by find_reloads.
818      Record memory equivalents in reg_mem_equiv so they can
819      be substituted eventually by altering the REG-rtx's.  */
820 
821   grow_reg_equivs ();
822   reg_old_renumber = XCNEWVEC (short, max_regno);
823   memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
824   pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
825   pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
826 
827   CLEAR_HARD_REG_SET (bad_spill_regs_global);
828 
829   init_eliminable_invariants (first, true);
830   init_elim_table ();
831 
832   /* Alter each pseudo-reg rtx to contain its hard reg number.  Assign
833      stack slots to the pseudos that lack hard regs or equivalents.
834      Do not touch virtual registers.  */
835 
836   temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
837   for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
838     temp_pseudo_reg_arr[n++] = i;
839 
840   if (ira_conflicts_p)
841     /* Ask IRA to order pseudo-registers for better stack slot
842        sharing.  */
843     ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
844 
845   for (i = 0; i < n; i++)
846     alter_reg (temp_pseudo_reg_arr[i], -1, false);
847 
848   /* If we have some registers we think can be eliminated, scan all insns to
849      see if there is an insn that sets one of these registers to something
850      other than itself plus a constant.  If so, the register cannot be
851      eliminated.  Doing this scan here eliminates an extra pass through the
852      main reload loop in the most common case where register elimination
853      cannot be done.  */
854   for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
855     if (INSN_P (insn))
856       note_stores (PATTERN (insn), mark_not_eliminable, NULL);
857 
858   maybe_fix_stack_asms ();
859 
860   insns_need_reload = 0;
861   something_needs_elimination = 0;
862 
863   /* Initialize to -1, which means take the first spill register.  */
864   last_spill_reg = -1;
865 
866   /* Spill any hard regs that we know we can't eliminate.  */
867   CLEAR_HARD_REG_SET (used_spill_regs);
868   /* There can be multiple ways to eliminate a register;
869      they should be listed adjacently.
870      Elimination for any register fails only if all possible ways fail.  */
871   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
872     {
873       int from = ep->from;
874       int can_eliminate = 0;
875       do
876 	{
877           can_eliminate |= ep->can_eliminate;
878           ep++;
879 	}
880       while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
881       if (! can_eliminate)
882 	spill_hard_reg (from, 1);
883     }
884 
885   if (!HARD_FRAME_POINTER_IS_FRAME_POINTER && frame_pointer_needed)
886     spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
887 
888   finish_spills (global);
889 
890   /* From now on, we may need to generate moves differently.  We may also
891      allow modifications of insns which cause them to not be recognized.
892      Any such modifications will be cleaned up during reload itself.  */
893   reload_in_progress = 1;
894 
895   /* This loop scans the entire function each go-round
896      and repeats until one repetition spills no additional hard regs.  */
897   for (;;)
898     {
899       int something_changed;
900       int did_spill;
901       HOST_WIDE_INT starting_frame_size;
902 
903       starting_frame_size = get_frame_size ();
904       something_was_spilled = false;
905 
906       set_initial_elim_offsets ();
907       set_initial_label_offsets ();
908 
909       /* For each pseudo register that has an equivalent location defined,
910 	 try to eliminate any eliminable registers (such as the frame pointer)
911 	 assuming initial offsets for the replacement register, which
912 	 is the normal case.
913 
914 	 If the resulting location is directly addressable, substitute
915 	 the MEM we just got directly for the old REG.
916 
917 	 If it is not addressable but is a constant or the sum of a hard reg
918 	 and constant, it is probably not addressable because the constant is
919 	 out of range, in that case record the address; we will generate
920 	 hairy code to compute the address in a register each time it is
921 	 needed.  Similarly if it is a hard register, but one that is not
922 	 valid as an address register.
923 
924 	 If the location is not addressable, but does not have one of the
925 	 above forms, assign a stack slot.  We have to do this to avoid the
926 	 potential of producing lots of reloads if, e.g., a location involves
927 	 a pseudo that didn't get a hard register and has an equivalent memory
928 	 location that also involves a pseudo that didn't get a hard register.
929 
930 	 Perhaps at some point we will improve reload_when_needed handling
931 	 so this problem goes away.  But that's very hairy.  */
932 
933       for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
934 	if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i))
935 	  {
936 	    rtx x = eliminate_regs (reg_equiv_memory_loc (i), VOIDmode,
937 				    NULL_RTX);
938 
939 	    if (strict_memory_address_addr_space_p
940 		  (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
941 		   MEM_ADDR_SPACE (x)))
942 	      reg_equiv_mem (i) = x, reg_equiv_address (i) = 0;
943 	    else if (CONSTANT_P (XEXP (x, 0))
944 		     || (REG_P (XEXP (x, 0))
945 			 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
946 		     || (GET_CODE (XEXP (x, 0)) == PLUS
947 			 && REG_P (XEXP (XEXP (x, 0), 0))
948 			 && (REGNO (XEXP (XEXP (x, 0), 0))
949 			     < FIRST_PSEUDO_REGISTER)
950 			 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
951 	      reg_equiv_address (i) = XEXP (x, 0), reg_equiv_mem (i) = 0;
952 	    else
953 	      {
954 		/* Make a new stack slot.  Then indicate that something
955 		   changed so we go back and recompute offsets for
956 		   eliminable registers because the allocation of memory
957 		   below might change some offset.  reg_equiv_{mem,address}
958 		   will be set up for this pseudo on the next pass around
959 		   the loop.  */
960 		reg_equiv_memory_loc (i) = 0;
961 		reg_equiv_init (i) = 0;
962 		alter_reg (i, -1, true);
963 	      }
964 	  }
965 
966       if (caller_save_needed)
967 	setup_save_areas ();
968 
969       if (starting_frame_size && crtl->stack_alignment_needed)
970 	{
971 	  /* If we have a stack frame, we must align it now.  The
972 	     stack size may be a part of the offset computation for
973 	     register elimination.  So if this changes the stack size,
974 	     then repeat the elimination bookkeeping.  We don't
975 	     realign when there is no stack, as that will cause a
976 	     stack frame when none is needed should
977 	     STARTING_FRAME_OFFSET not be already aligned to
978 	     STACK_BOUNDARY.  */
979 	  assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
980 	}
981       /* If we allocated another stack slot, redo elimination bookkeeping.  */
982       if (something_was_spilled || starting_frame_size != get_frame_size ())
983 	{
984 	  update_eliminables_and_spill ();
985 	  continue;
986 	}
987 
988       if (caller_save_needed)
989 	{
990 	  save_call_clobbered_regs ();
991 	  /* That might have allocated new insn_chain structures.  */
992 	  reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
993 	}
994 
995       calculate_needs_all_insns (global);
996 
997       if (! ira_conflicts_p)
998 	/* Don't do it for IRA.  We need this info because we don't
999 	   change live_throughout and dead_or_set for chains when IRA
1000 	   is used.  */
1001 	CLEAR_REG_SET (&spilled_pseudos);
1002 
1003       did_spill = 0;
1004 
1005       something_changed = 0;
1006 
1007       /* If we allocated any new memory locations, make another pass
1008 	 since it might have changed elimination offsets.  */
1009       if (something_was_spilled || starting_frame_size != get_frame_size ())
1010 	something_changed = 1;
1011 
1012       /* Even if the frame size remained the same, we might still have
1013 	 changed elimination offsets, e.g. if find_reloads called
1014 	 force_const_mem requiring the back end to allocate a constant
1015 	 pool base register that needs to be saved on the stack.  */
1016       else if (!verify_initial_elim_offsets ())
1017 	something_changed = 1;
1018 
1019       if (update_eliminables_and_spill ())
1020 	{
1021 	  did_spill = 1;
1022 	  something_changed = 1;
1023 	}
1024 
1025       select_reload_regs ();
1026       if (failure)
1027 	goto failed;
1028 
1029       if (insns_need_reload != 0 || did_spill)
1030 	something_changed |= finish_spills (global);
1031 
1032       if (! something_changed)
1033 	break;
1034 
1035       if (caller_save_needed)
1036 	delete_caller_save_insns ();
1037 
1038       obstack_free (&reload_obstack, reload_firstobj);
1039     }
1040 
1041   /* If global-alloc was run, notify it of any register eliminations we have
1042      done.  */
1043   if (global)
1044     for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1045       if (ep->can_eliminate)
1046 	mark_elimination (ep->from, ep->to);
1047 
1048   remove_init_insns ();
1049 
1050   /* Use the reload registers where necessary
1051      by generating move instructions to move the must-be-register
1052      values into or out of the reload registers.  */
1053 
1054   if (insns_need_reload != 0 || something_needs_elimination
1055       || something_needs_operands_changed)
1056     {
1057       HOST_WIDE_INT old_frame_size = get_frame_size ();
1058 
1059       reload_as_needed (global);
1060 
1061       gcc_assert (old_frame_size == get_frame_size ());
1062 
1063       gcc_assert (verify_initial_elim_offsets ());
1064     }
1065 
1066   /* If we were able to eliminate the frame pointer, show that it is no
1067      longer live at the start of any basic block.  If it ls live by
1068      virtue of being in a pseudo, that pseudo will be marked live
1069      and hence the frame pointer will be known to be live via that
1070      pseudo.  */
1071 
1072   if (! frame_pointer_needed)
1073     FOR_EACH_BB_FN (bb, cfun)
1074       bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1075 
1076   /* Come here (with failure set nonzero) if we can't get enough spill
1077      regs.  */
1078  failed:
1079 
1080   CLEAR_REG_SET (&changed_allocation_pseudos);
1081   CLEAR_REG_SET (&spilled_pseudos);
1082   reload_in_progress = 0;
1083 
1084   /* Now eliminate all pseudo regs by modifying them into
1085      their equivalent memory references.
1086      The REG-rtx's for the pseudos are modified in place,
1087      so all insns that used to refer to them now refer to memory.
1088 
1089      For a reg that has a reg_equiv_address, all those insns
1090      were changed by reloading so that no insns refer to it any longer;
1091      but the DECL_RTL of a variable decl may refer to it,
1092      and if so this causes the debugging info to mention the variable.  */
1093 
1094   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1095     {
1096       rtx addr = 0;
1097 
1098       if (reg_equiv_mem (i))
1099 	addr = XEXP (reg_equiv_mem (i), 0);
1100 
1101       if (reg_equiv_address (i))
1102 	addr = reg_equiv_address (i);
1103 
1104       if (addr)
1105 	{
1106 	  if (reg_renumber[i] < 0)
1107 	    {
1108 	      rtx reg = regno_reg_rtx[i];
1109 
1110 	      REG_USERVAR_P (reg) = 0;
1111 	      PUT_CODE (reg, MEM);
1112 	      XEXP (reg, 0) = addr;
1113 	      if (reg_equiv_memory_loc (i))
1114 		MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i));
1115 	      else
1116 		MEM_ATTRS (reg) = 0;
1117 	      MEM_NOTRAP_P (reg) = 1;
1118 	    }
1119 	  else if (reg_equiv_mem (i))
1120 	    XEXP (reg_equiv_mem (i), 0) = addr;
1121 	}
1122 
1123       /* We don't want complex addressing modes in debug insns
1124 	 if simpler ones will do, so delegitimize equivalences
1125 	 in debug insns.  */
1126       if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0)
1127 	{
1128 	  rtx reg = regno_reg_rtx[i];
1129 	  rtx equiv = 0;
1130 	  df_ref use, next;
1131 
1132 	  if (reg_equiv_constant (i))
1133 	    equiv = reg_equiv_constant (i);
1134 	  else if (reg_equiv_invariant (i))
1135 	    equiv = reg_equiv_invariant (i);
1136 	  else if (reg && MEM_P (reg))
1137 	    equiv = targetm.delegitimize_address (reg);
1138 	  else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1139 	    equiv = reg;
1140 
1141 	  if (equiv == reg)
1142 	    continue;
1143 
1144 	  for (use = DF_REG_USE_CHAIN (i); use; use = next)
1145 	    {
1146 	      insn = DF_REF_INSN (use);
1147 
1148 	      /* Make sure the next ref is for a different instruction,
1149 		 so that we're not affected by the rescan.  */
1150 	      next = DF_REF_NEXT_REG (use);
1151 	      while (next && DF_REF_INSN (next) == insn)
1152 		next = DF_REF_NEXT_REG (next);
1153 
1154 	      if (DEBUG_INSN_P (insn))
1155 		{
1156 		  if (!equiv)
1157 		    {
1158 		      INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1159 		      df_insn_rescan_debug_internal (insn);
1160 		    }
1161 		  else
1162 		    INSN_VAR_LOCATION_LOC (insn)
1163 		      = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1164 					      reg, equiv);
1165 		}
1166 	    }
1167 	}
1168     }
1169 
1170   /* We must set reload_completed now since the cleanup_subreg_operands call
1171      below will re-recognize each insn and reload may have generated insns
1172      which are only valid during and after reload.  */
1173   reload_completed = 1;
1174 
1175   /* Make a pass over all the insns and delete all USEs which we inserted
1176      only to tag a REG_EQUAL note on them.  Remove all REG_DEAD and REG_UNUSED
1177      notes.  Delete all CLOBBER insns, except those that refer to the return
1178      value and the special mem:BLK CLOBBERs added to prevent the scheduler
1179      from misarranging variable-array code, and simplify (subreg (reg))
1180      operands.  Strip and regenerate REG_INC notes that may have been moved
1181      around.  */
1182 
1183   for (insn = first; insn; insn = NEXT_INSN (insn))
1184     if (INSN_P (insn))
1185       {
1186 	rtx *pnote;
1187 
1188 	if (CALL_P (insn))
1189 	  replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1190 			      VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1191 
1192 	if ((GET_CODE (PATTERN (insn)) == USE
1193 	     /* We mark with QImode USEs introduced by reload itself.  */
1194 	     && (GET_MODE (insn) == QImode
1195 		 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1196 	    || (GET_CODE (PATTERN (insn)) == CLOBBER
1197 		&& (!MEM_P (XEXP (PATTERN (insn), 0))
1198 		    || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1199 		    || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1200 			&& XEXP (XEXP (PATTERN (insn), 0), 0)
1201 				!= stack_pointer_rtx))
1202 		&& (!REG_P (XEXP (PATTERN (insn), 0))
1203 		    || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1204 	  {
1205 	    delete_insn (insn);
1206 	    continue;
1207 	  }
1208 
1209 	/* Some CLOBBERs may survive until here and still reference unassigned
1210 	   pseudos with const equivalent, which may in turn cause ICE in later
1211 	   passes if the reference remains in place.  */
1212 	if (GET_CODE (PATTERN (insn)) == CLOBBER)
1213 	  replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1214 			      VOIDmode, PATTERN (insn));
1215 
1216 	/* Discard obvious no-ops, even without -O.  This optimization
1217 	   is fast and doesn't interfere with debugging.  */
1218 	if (NONJUMP_INSN_P (insn)
1219 	    && GET_CODE (PATTERN (insn)) == SET
1220 	    && REG_P (SET_SRC (PATTERN (insn)))
1221 	    && REG_P (SET_DEST (PATTERN (insn)))
1222 	    && (REGNO (SET_SRC (PATTERN (insn)))
1223 		== REGNO (SET_DEST (PATTERN (insn)))))
1224 	  {
1225 	    delete_insn (insn);
1226 	    continue;
1227 	  }
1228 
1229 	pnote = &REG_NOTES (insn);
1230 	while (*pnote != 0)
1231 	  {
1232 	    if (REG_NOTE_KIND (*pnote) == REG_DEAD
1233 		|| REG_NOTE_KIND (*pnote) == REG_UNUSED
1234 		|| REG_NOTE_KIND (*pnote) == REG_INC)
1235 	      *pnote = XEXP (*pnote, 1);
1236 	    else
1237 	      pnote = &XEXP (*pnote, 1);
1238 	  }
1239 
1240 	if (AUTO_INC_DEC)
1241 	  add_auto_inc_notes (insn, PATTERN (insn));
1242 
1243 	/* Simplify (subreg (reg)) if it appears as an operand.  */
1244 	cleanup_subreg_operands (insn);
1245 
1246 	/* Clean up invalid ASMs so that they don't confuse later passes.
1247 	   See PR 21299.  */
1248 	if (asm_noperands (PATTERN (insn)) >= 0)
1249 	  {
1250 	    extract_insn (insn);
1251 	    if (!constrain_operands (1, get_enabled_alternatives (insn)))
1252 	      {
1253 		error_for_asm (insn,
1254 			       "%<asm%> operand has impossible constraints");
1255 		delete_insn (insn);
1256 		continue;
1257 	      }
1258 	  }
1259       }
1260 
1261   free (temp_pseudo_reg_arr);
1262 
1263   /* Indicate that we no longer have known memory locations or constants.  */
1264   free_reg_equiv ();
1265 
1266   free (reg_max_ref_width);
1267   free (reg_old_renumber);
1268   free (pseudo_previous_regs);
1269   free (pseudo_forbidden_regs);
1270 
1271   CLEAR_HARD_REG_SET (used_spill_regs);
1272   for (i = 0; i < n_spills; i++)
1273     SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1274 
1275   /* Free all the insn_chain structures at once.  */
1276   obstack_free (&reload_obstack, reload_startobj);
1277   unused_insn_chains = 0;
1278 
1279   inserted = fixup_abnormal_edges ();
1280 
1281   /* We've possibly turned single trapping insn into multiple ones.  */
1282   if (cfun->can_throw_non_call_exceptions)
1283     {
1284       sbitmap blocks;
1285       blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
1286       bitmap_ones (blocks);
1287       find_many_sub_basic_blocks (blocks);
1288       sbitmap_free (blocks);
1289     }
1290 
1291   if (inserted)
1292     commit_edge_insertions ();
1293 
1294   /* Replacing pseudos with their memory equivalents might have
1295      created shared rtx.  Subsequent passes would get confused
1296      by this, so unshare everything here.  */
1297   unshare_all_rtl_again (first);
1298 
1299 #ifdef STACK_BOUNDARY
1300   /* init_emit has set the alignment of the hard frame pointer
1301      to STACK_BOUNDARY.  It is very likely no longer valid if
1302      the hard frame pointer was used for register allocation.  */
1303   if (!frame_pointer_needed)
1304     REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1305 #endif
1306 
1307   substitute_stack.release ();
1308 
1309   gcc_assert (bitmap_empty_p (&spilled_pseudos));
1310 
1311   reload_completed = !failure;
1312 
1313   return need_dce;
1314 }
1315 
1316 /* Yet another special case.  Unfortunately, reg-stack forces people to
1317    write incorrect clobbers in asm statements.  These clobbers must not
1318    cause the register to appear in bad_spill_regs, otherwise we'll call
1319    fatal_insn later.  We clear the corresponding regnos in the live
1320    register sets to avoid this.
1321    The whole thing is rather sick, I'm afraid.  */
1322 
1323 static void
maybe_fix_stack_asms(void)1324 maybe_fix_stack_asms (void)
1325 {
1326 #ifdef STACK_REGS
1327   const char *constraints[MAX_RECOG_OPERANDS];
1328   machine_mode operand_mode[MAX_RECOG_OPERANDS];
1329   struct insn_chain *chain;
1330 
1331   for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1332     {
1333       int i, noperands;
1334       HARD_REG_SET clobbered, allowed;
1335       rtx pat;
1336 
1337       if (! INSN_P (chain->insn)
1338 	  || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1339 	continue;
1340       pat = PATTERN (chain->insn);
1341       if (GET_CODE (pat) != PARALLEL)
1342 	continue;
1343 
1344       CLEAR_HARD_REG_SET (clobbered);
1345       CLEAR_HARD_REG_SET (allowed);
1346 
1347       /* First, make a mask of all stack regs that are clobbered.  */
1348       for (i = 0; i < XVECLEN (pat, 0); i++)
1349 	{
1350 	  rtx t = XVECEXP (pat, 0, i);
1351 	  if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1352 	    SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1353 	}
1354 
1355       /* Get the operand values and constraints out of the insn.  */
1356       decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1357 			   constraints, operand_mode, NULL);
1358 
1359       /* For every operand, see what registers are allowed.  */
1360       for (i = 0; i < noperands; i++)
1361 	{
1362 	  const char *p = constraints[i];
1363 	  /* For every alternative, we compute the class of registers allowed
1364 	     for reloading in CLS, and merge its contents into the reg set
1365 	     ALLOWED.  */
1366 	  int cls = (int) NO_REGS;
1367 
1368 	  for (;;)
1369 	    {
1370 	      char c = *p;
1371 
1372 	      if (c == '\0' || c == ',' || c == '#')
1373 		{
1374 		  /* End of one alternative - mark the regs in the current
1375 		     class, and reset the class.  */
1376 		  IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1377 		  cls = NO_REGS;
1378 		  p++;
1379 		  if (c == '#')
1380 		    do {
1381 		      c = *p++;
1382 		    } while (c != '\0' && c != ',');
1383 		  if (c == '\0')
1384 		    break;
1385 		  continue;
1386 		}
1387 
1388 	      switch (c)
1389 		{
1390 		case 'g':
1391 		  cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1392 		  break;
1393 
1394 		default:
1395 		  enum constraint_num cn = lookup_constraint (p);
1396 		  if (insn_extra_address_constraint (cn))
1397 		    cls = (int) reg_class_subunion[cls]
1398 		      [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1399 					     ADDRESS, SCRATCH)];
1400 		  else
1401 		    cls = (int) reg_class_subunion[cls]
1402 		      [reg_class_for_constraint (cn)];
1403 		  break;
1404 		}
1405 	      p += CONSTRAINT_LEN (c, p);
1406 	    }
1407 	}
1408       /* Those of the registers which are clobbered, but allowed by the
1409 	 constraints, must be usable as reload registers.  So clear them
1410 	 out of the life information.  */
1411       AND_HARD_REG_SET (allowed, clobbered);
1412       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1413 	if (TEST_HARD_REG_BIT (allowed, i))
1414 	  {
1415 	    CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1416 	    CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1417 	  }
1418     }
1419 
1420 #endif
1421 }
1422 
1423 /* Copy the global variables n_reloads and rld into the corresponding elts
1424    of CHAIN.  */
1425 static void
copy_reloads(struct insn_chain * chain)1426 copy_reloads (struct insn_chain *chain)
1427 {
1428   chain->n_reloads = n_reloads;
1429   chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1430   memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1431   reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1432 }
1433 
1434 /* Walk the chain of insns, and determine for each whether it needs reloads
1435    and/or eliminations.  Build the corresponding insns_need_reload list, and
1436    set something_needs_elimination as appropriate.  */
1437 static void
calculate_needs_all_insns(int global)1438 calculate_needs_all_insns (int global)
1439 {
1440   struct insn_chain **pprev_reload = &insns_need_reload;
1441   struct insn_chain *chain, *next = 0;
1442 
1443   something_needs_elimination = 0;
1444 
1445   reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1446   for (chain = reload_insn_chain; chain != 0; chain = next)
1447     {
1448       rtx_insn *insn = chain->insn;
1449 
1450       next = chain->next;
1451 
1452       /* Clear out the shortcuts.  */
1453       chain->n_reloads = 0;
1454       chain->need_elim = 0;
1455       chain->need_reload = 0;
1456       chain->need_operand_change = 0;
1457 
1458       /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1459 	 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1460 	 what effects this has on the known offsets at labels.  */
1461 
1462       if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1463 	  || (INSN_P (insn) && REG_NOTES (insn) != 0))
1464 	set_label_offsets (insn, insn, 0);
1465 
1466       if (INSN_P (insn))
1467 	{
1468 	  rtx old_body = PATTERN (insn);
1469 	  int old_code = INSN_CODE (insn);
1470 	  rtx old_notes = REG_NOTES (insn);
1471 	  int did_elimination = 0;
1472 	  int operands_changed = 0;
1473 
1474 	  /* Skip insns that only set an equivalence.  */
1475 	  if (will_delete_init_insn_p (insn))
1476 	    continue;
1477 
1478 	  /* If needed, eliminate any eliminable registers.  */
1479 	  if (num_eliminable || num_eliminable_invariants)
1480 	    did_elimination = eliminate_regs_in_insn (insn, 0);
1481 
1482 	  /* Analyze the instruction.  */
1483 	  operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1484 					   global, spill_reg_order);
1485 
1486 	  /* If a no-op set needs more than one reload, this is likely
1487 	     to be something that needs input address reloads.  We
1488 	     can't get rid of this cleanly later, and it is of no use
1489 	     anyway, so discard it now.
1490 	     We only do this when expensive_optimizations is enabled,
1491 	     since this complements reload inheritance / output
1492 	     reload deletion, and it can make debugging harder.  */
1493 	  if (flag_expensive_optimizations && n_reloads > 1)
1494 	    {
1495 	      rtx set = single_set (insn);
1496 	      if (set
1497 		  &&
1498 		  ((SET_SRC (set) == SET_DEST (set)
1499 		    && REG_P (SET_SRC (set))
1500 		    && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1501 		   || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1502 		       && reg_renumber[REGNO (SET_SRC (set))] < 0
1503 		       && reg_renumber[REGNO (SET_DEST (set))] < 0
1504 		       && reg_equiv_memory_loc (REGNO (SET_SRC (set))) != NULL
1505 		       && reg_equiv_memory_loc (REGNO (SET_DEST (set))) != NULL
1506 		       && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set))),
1507 				       reg_equiv_memory_loc (REGNO (SET_DEST (set)))))))
1508 		{
1509 		  if (ira_conflicts_p)
1510 		    /* Inform IRA about the insn deletion.  */
1511 		    ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1512 						   REGNO (SET_SRC (set)));
1513 		  delete_insn (insn);
1514 		  /* Delete it from the reload chain.  */
1515 		  if (chain->prev)
1516 		    chain->prev->next = next;
1517 		  else
1518 		    reload_insn_chain = next;
1519 		  if (next)
1520 		    next->prev = chain->prev;
1521 		  chain->next = unused_insn_chains;
1522 		  unused_insn_chains = chain;
1523 		  continue;
1524 		}
1525 	    }
1526 	  if (num_eliminable)
1527 	    update_eliminable_offsets ();
1528 
1529 	  /* Remember for later shortcuts which insns had any reloads or
1530 	     register eliminations.  */
1531 	  chain->need_elim = did_elimination;
1532 	  chain->need_reload = n_reloads > 0;
1533 	  chain->need_operand_change = operands_changed;
1534 
1535 	  /* Discard any register replacements done.  */
1536 	  if (did_elimination)
1537 	    {
1538 	      obstack_free (&reload_obstack, reload_insn_firstobj);
1539 	      PATTERN (insn) = old_body;
1540 	      INSN_CODE (insn) = old_code;
1541 	      REG_NOTES (insn) = old_notes;
1542 	      something_needs_elimination = 1;
1543 	    }
1544 
1545 	  something_needs_operands_changed |= operands_changed;
1546 
1547 	  if (n_reloads != 0)
1548 	    {
1549 	      copy_reloads (chain);
1550 	      *pprev_reload = chain;
1551 	      pprev_reload = &chain->next_need_reload;
1552 	    }
1553 	}
1554     }
1555   *pprev_reload = 0;
1556 }
1557 
1558 /* This function is called from the register allocator to set up estimates
1559    for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1560    an invariant.  The structure is similar to calculate_needs_all_insns.  */
1561 
1562 void
calculate_elim_costs_all_insns(void)1563 calculate_elim_costs_all_insns (void)
1564 {
1565   int *reg_equiv_init_cost;
1566   basic_block bb;
1567   int i;
1568 
1569   reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1570   init_elim_table ();
1571   init_eliminable_invariants (get_insns (), false);
1572 
1573   set_initial_elim_offsets ();
1574   set_initial_label_offsets ();
1575 
1576   FOR_EACH_BB_FN (bb, cfun)
1577     {
1578       rtx_insn *insn;
1579       elim_bb = bb;
1580 
1581       FOR_BB_INSNS (bb, insn)
1582 	{
1583 	  /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1584 	     include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1585 	     what effects this has on the known offsets at labels.  */
1586 
1587 	  if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1588 	      || (INSN_P (insn) && REG_NOTES (insn) != 0))
1589 	    set_label_offsets (insn, insn, 0);
1590 
1591 	  if (INSN_P (insn))
1592 	    {
1593 	      rtx set = single_set (insn);
1594 
1595 	      /* Skip insns that only set an equivalence.  */
1596 	      if (set && REG_P (SET_DEST (set))
1597 		  && reg_renumber[REGNO (SET_DEST (set))] < 0
1598 		  && (reg_equiv_constant (REGNO (SET_DEST (set)))
1599 		      || reg_equiv_invariant (REGNO (SET_DEST (set)))))
1600 		{
1601 		  unsigned regno = REGNO (SET_DEST (set));
1602 		  rtx_insn_list *init = reg_equiv_init (regno);
1603 		  if (init)
1604 		    {
1605 		      rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1606 						false, true);
1607 		      machine_mode mode = GET_MODE (SET_DEST (set));
1608 		      int cost = set_src_cost (t, mode,
1609 					       optimize_bb_for_speed_p (bb));
1610 		      int freq = REG_FREQ_FROM_BB (bb);
1611 
1612 		      reg_equiv_init_cost[regno] = cost * freq;
1613 		      continue;
1614 		    }
1615 		}
1616 	      /* If needed, eliminate any eliminable registers.  */
1617 	      if (num_eliminable || num_eliminable_invariants)
1618 		elimination_costs_in_insn (insn);
1619 
1620 	      if (num_eliminable)
1621 		update_eliminable_offsets ();
1622 	    }
1623 	}
1624     }
1625   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1626     {
1627       if (reg_equiv_invariant (i))
1628 	{
1629 	  if (reg_equiv_init (i))
1630 	    {
1631 	      int cost = reg_equiv_init_cost[i];
1632 	      if (dump_file)
1633 		fprintf (dump_file,
1634 			 "Reg %d has equivalence, initial gains %d\n", i, cost);
1635 	      if (cost != 0)
1636 		ira_adjust_equiv_reg_cost (i, cost);
1637 	    }
1638 	  else
1639 	    {
1640 	      if (dump_file)
1641 		fprintf (dump_file,
1642 			 "Reg %d had equivalence, but can't be eliminated\n",
1643 			 i);
1644 	      ira_adjust_equiv_reg_cost (i, 0);
1645 	    }
1646 	}
1647     }
1648 
1649   free (reg_equiv_init_cost);
1650   free (offsets_known_at);
1651   free (offsets_at);
1652   offsets_at = NULL;
1653   offsets_known_at = NULL;
1654 }
1655 
1656 /* Comparison function for qsort to decide which of two reloads
1657    should be handled first.  *P1 and *P2 are the reload numbers.  */
1658 
1659 static int
reload_reg_class_lower(const void * r1p,const void * r2p)1660 reload_reg_class_lower (const void *r1p, const void *r2p)
1661 {
1662   int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1663   int t;
1664 
1665   /* Consider required reloads before optional ones.  */
1666   t = rld[r1].optional - rld[r2].optional;
1667   if (t != 0)
1668     return t;
1669 
1670   /* Count all solitary classes before non-solitary ones.  */
1671   t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1672        - (reg_class_size[(int) rld[r1].rclass] == 1));
1673   if (t != 0)
1674     return t;
1675 
1676   /* Aside from solitaires, consider all multi-reg groups first.  */
1677   t = rld[r2].nregs - rld[r1].nregs;
1678   if (t != 0)
1679     return t;
1680 
1681   /* Consider reloads in order of increasing reg-class number.  */
1682   t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1683   if (t != 0)
1684     return t;
1685 
1686   /* If reloads are equally urgent, sort by reload number,
1687      so that the results of qsort leave nothing to chance.  */
1688   return r1 - r2;
1689 }
1690 
1691 /* The cost of spilling each hard reg.  */
1692 static int spill_cost[FIRST_PSEUDO_REGISTER];
1693 
1694 /* When spilling multiple hard registers, we use SPILL_COST for the first
1695    spilled hard reg and SPILL_ADD_COST for subsequent regs.  SPILL_ADD_COST
1696    only the first hard reg for a multi-reg pseudo.  */
1697 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1698 
1699 /* Map of hard regno to pseudo regno currently occupying the hard
1700    reg.  */
1701 static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1702 
1703 /* Update the spill cost arrays, considering that pseudo REG is live.  */
1704 
1705 static void
count_pseudo(int reg)1706 count_pseudo (int reg)
1707 {
1708   int freq = REG_FREQ (reg);
1709   int r = reg_renumber[reg];
1710   int nregs;
1711 
1712   /* Ignore spilled pseudo-registers which can be here only if IRA is used.  */
1713   if (ira_conflicts_p && r < 0)
1714     return;
1715 
1716   if (REGNO_REG_SET_P (&pseudos_counted, reg)
1717       || REGNO_REG_SET_P (&spilled_pseudos, reg))
1718     return;
1719 
1720   SET_REGNO_REG_SET (&pseudos_counted, reg);
1721 
1722   gcc_assert (r >= 0);
1723 
1724   spill_add_cost[r] += freq;
1725   nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1726   while (nregs-- > 0)
1727     {
1728       hard_regno_to_pseudo_regno[r + nregs] = reg;
1729       spill_cost[r + nregs] += freq;
1730     }
1731 }
1732 
1733 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1734    contents of BAD_SPILL_REGS for the insn described by CHAIN.  */
1735 
1736 static void
order_regs_for_reload(struct insn_chain * chain)1737 order_regs_for_reload (struct insn_chain *chain)
1738 {
1739   unsigned i;
1740   HARD_REG_SET used_by_pseudos;
1741   HARD_REG_SET used_by_pseudos2;
1742   reg_set_iterator rsi;
1743 
1744   COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1745 
1746   memset (spill_cost, 0, sizeof spill_cost);
1747   memset (spill_add_cost, 0, sizeof spill_add_cost);
1748   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1749     hard_regno_to_pseudo_regno[i] = -1;
1750 
1751   /* Count number of uses of each hard reg by pseudo regs allocated to it
1752      and then order them by decreasing use.  First exclude hard registers
1753      that are live in or across this insn.  */
1754 
1755   REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1756   REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1757   IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1758   IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1759 
1760   /* Now find out which pseudos are allocated to it, and update
1761      hard_reg_n_uses.  */
1762   CLEAR_REG_SET (&pseudos_counted);
1763 
1764   EXECUTE_IF_SET_IN_REG_SET
1765     (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1766     {
1767       count_pseudo (i);
1768     }
1769   EXECUTE_IF_SET_IN_REG_SET
1770     (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1771     {
1772       count_pseudo (i);
1773     }
1774   CLEAR_REG_SET (&pseudos_counted);
1775 }
1776 
1777 /* Vector of reload-numbers showing the order in which the reloads should
1778    be processed.  */
1779 static short reload_order[MAX_RELOADS];
1780 
1781 /* This is used to keep track of the spill regs used in one insn.  */
1782 static HARD_REG_SET used_spill_regs_local;
1783 
1784 /* We decided to spill hard register SPILLED, which has a size of
1785    SPILLED_NREGS.  Determine how pseudo REG, which is live during the insn,
1786    is affected.  We will add it to SPILLED_PSEUDOS if necessary, and we will
1787    update SPILL_COST/SPILL_ADD_COST.  */
1788 
1789 static void
count_spilled_pseudo(int spilled,int spilled_nregs,int reg)1790 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1791 {
1792   int freq = REG_FREQ (reg);
1793   int r = reg_renumber[reg];
1794   int nregs;
1795 
1796   /* Ignore spilled pseudo-registers which can be here only if IRA is used.  */
1797   if (ira_conflicts_p && r < 0)
1798     return;
1799 
1800   gcc_assert (r >= 0);
1801 
1802   nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1803 
1804   if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1805       || spilled + spilled_nregs <= r || r + nregs <= spilled)
1806     return;
1807 
1808   SET_REGNO_REG_SET (&spilled_pseudos, reg);
1809 
1810   spill_add_cost[r] -= freq;
1811   while (nregs-- > 0)
1812     {
1813       hard_regno_to_pseudo_regno[r + nregs] = -1;
1814       spill_cost[r + nregs] -= freq;
1815     }
1816 }
1817 
1818 /* Find reload register to use for reload number ORDER.  */
1819 
1820 static int
find_reg(struct insn_chain * chain,int order)1821 find_reg (struct insn_chain *chain, int order)
1822 {
1823   int rnum = reload_order[order];
1824   struct reload *rl = rld + rnum;
1825   int best_cost = INT_MAX;
1826   int best_reg = -1;
1827   unsigned int i, j, n;
1828   int k;
1829   HARD_REG_SET not_usable;
1830   HARD_REG_SET used_by_other_reload;
1831   reg_set_iterator rsi;
1832   static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1833   static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1834 
1835   COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1836   IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1837   IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1838 
1839   CLEAR_HARD_REG_SET (used_by_other_reload);
1840   for (k = 0; k < order; k++)
1841     {
1842       int other = reload_order[k];
1843 
1844       if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1845 	for (j = 0; j < rld[other].nregs; j++)
1846 	  SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1847     }
1848 
1849   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1850     {
1851 #ifdef REG_ALLOC_ORDER
1852       unsigned int regno = reg_alloc_order[i];
1853 #else
1854       unsigned int regno = i;
1855 #endif
1856 
1857       if (! TEST_HARD_REG_BIT (not_usable, regno)
1858 	  && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1859 	  && HARD_REGNO_MODE_OK (regno, rl->mode))
1860 	{
1861 	  int this_cost = spill_cost[regno];
1862 	  int ok = 1;
1863 	  unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1864 
1865 	  for (j = 1; j < this_nregs; j++)
1866 	    {
1867 	      this_cost += spill_add_cost[regno + j];
1868 	      if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1869 		  || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1870 		ok = 0;
1871 	    }
1872 	  if (! ok)
1873 	    continue;
1874 
1875 	  if (ira_conflicts_p)
1876 	    {
1877 	      /* Ask IRA to find a better pseudo-register for
1878 		 spilling.  */
1879 	      for (n = j = 0; j < this_nregs; j++)
1880 		{
1881 		  int r = hard_regno_to_pseudo_regno[regno + j];
1882 
1883 		  if (r < 0)
1884 		    continue;
1885 		  if (n == 0 || regno_pseudo_regs[n - 1] != r)
1886 		    regno_pseudo_regs[n++] = r;
1887 		}
1888 	      regno_pseudo_regs[n++] = -1;
1889 	      if (best_reg < 0
1890 		  || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1891 						      best_regno_pseudo_regs,
1892 						      rl->in, rl->out,
1893 						      chain->insn))
1894 		{
1895 		  best_reg = regno;
1896 		  for (j = 0;; j++)
1897 		    {
1898 		      best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1899 		      if (regno_pseudo_regs[j] < 0)
1900 			break;
1901 		    }
1902 		}
1903 	      continue;
1904 	    }
1905 
1906 	  if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1907 	    this_cost--;
1908 	  if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1909 	    this_cost--;
1910 	  if (this_cost < best_cost
1911 	      /* Among registers with equal cost, prefer caller-saved ones, or
1912 		 use REG_ALLOC_ORDER if it is defined.  */
1913 	      || (this_cost == best_cost
1914 #ifdef REG_ALLOC_ORDER
1915 		  && (inv_reg_alloc_order[regno]
1916 		      < inv_reg_alloc_order[best_reg])
1917 #else
1918 		  && call_used_regs[regno]
1919 		  && ! call_used_regs[best_reg]
1920 #endif
1921 		  ))
1922 	    {
1923 	      best_reg = regno;
1924 	      best_cost = this_cost;
1925 	    }
1926 	}
1927     }
1928   if (best_reg == -1)
1929     return 0;
1930 
1931   if (dump_file)
1932     fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1933 
1934   rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1935   rl->regno = best_reg;
1936 
1937   EXECUTE_IF_SET_IN_REG_SET
1938     (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1939     {
1940       count_spilled_pseudo (best_reg, rl->nregs, j);
1941     }
1942 
1943   EXECUTE_IF_SET_IN_REG_SET
1944     (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1945     {
1946       count_spilled_pseudo (best_reg, rl->nregs, j);
1947     }
1948 
1949   for (i = 0; i < rl->nregs; i++)
1950     {
1951       gcc_assert (spill_cost[best_reg + i] == 0);
1952       gcc_assert (spill_add_cost[best_reg + i] == 0);
1953       gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1954       SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1955     }
1956   return 1;
1957 }
1958 
1959 /* Find more reload regs to satisfy the remaining need of an insn, which
1960    is given by CHAIN.
1961    Do it by ascending class number, since otherwise a reg
1962    might be spilled for a big class and might fail to count
1963    for a smaller class even though it belongs to that class.  */
1964 
1965 static void
find_reload_regs(struct insn_chain * chain)1966 find_reload_regs (struct insn_chain *chain)
1967 {
1968   int i;
1969 
1970   /* In order to be certain of getting the registers we need,
1971      we must sort the reloads into order of increasing register class.
1972      Then our grabbing of reload registers will parallel the process
1973      that provided the reload registers.  */
1974   for (i = 0; i < chain->n_reloads; i++)
1975     {
1976       /* Show whether this reload already has a hard reg.  */
1977       if (chain->rld[i].reg_rtx)
1978 	{
1979 	  int regno = REGNO (chain->rld[i].reg_rtx);
1980 	  chain->rld[i].regno = regno;
1981 	  chain->rld[i].nregs
1982 	    = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
1983 	}
1984       else
1985 	chain->rld[i].regno = -1;
1986       reload_order[i] = i;
1987     }
1988 
1989   n_reloads = chain->n_reloads;
1990   memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1991 
1992   CLEAR_HARD_REG_SET (used_spill_regs_local);
1993 
1994   if (dump_file)
1995     fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1996 
1997   qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1998 
1999   /* Compute the order of preference for hard registers to spill.  */
2000 
2001   order_regs_for_reload (chain);
2002 
2003   for (i = 0; i < n_reloads; i++)
2004     {
2005       int r = reload_order[i];
2006 
2007       /* Ignore reloads that got marked inoperative.  */
2008       if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
2009 	  && ! rld[r].optional
2010 	  && rld[r].regno == -1)
2011 	if (! find_reg (chain, i))
2012 	  {
2013 	    if (dump_file)
2014 	      fprintf (dump_file, "reload failure for reload %d\n", r);
2015 	    spill_failure (chain->insn, rld[r].rclass);
2016 	    failure = 1;
2017 	    return;
2018 	  }
2019     }
2020 
2021   COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2022   IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2023 
2024   memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2025 }
2026 
2027 static void
select_reload_regs(void)2028 select_reload_regs (void)
2029 {
2030   struct insn_chain *chain;
2031 
2032   /* Try to satisfy the needs for each insn.  */
2033   for (chain = insns_need_reload; chain != 0;
2034        chain = chain->next_need_reload)
2035     find_reload_regs (chain);
2036 }
2037 
2038 /* Delete all insns that were inserted by emit_caller_save_insns during
2039    this iteration.  */
2040 static void
delete_caller_save_insns(void)2041 delete_caller_save_insns (void)
2042 {
2043   struct insn_chain *c = reload_insn_chain;
2044 
2045   while (c != 0)
2046     {
2047       while (c != 0 && c->is_caller_save_insn)
2048 	{
2049 	  struct insn_chain *next = c->next;
2050 	  rtx_insn *insn = c->insn;
2051 
2052 	  if (c == reload_insn_chain)
2053 	    reload_insn_chain = next;
2054 	  delete_insn (insn);
2055 
2056 	  if (next)
2057 	    next->prev = c->prev;
2058 	  if (c->prev)
2059 	    c->prev->next = next;
2060 	  c->next = unused_insn_chains;
2061 	  unused_insn_chains = c;
2062 	  c = next;
2063 	}
2064       if (c != 0)
2065 	c = c->next;
2066     }
2067 }
2068 
2069 /* Handle the failure to find a register to spill.
2070    INSN should be one of the insns which needed this particular spill reg.  */
2071 
2072 static void
spill_failure(rtx_insn * insn,enum reg_class rclass)2073 spill_failure (rtx_insn *insn, enum reg_class rclass)
2074 {
2075   if (asm_noperands (PATTERN (insn)) >= 0)
2076     error_for_asm (insn, "can%'t find a register in class %qs while "
2077 		   "reloading %<asm%>",
2078 		   reg_class_names[rclass]);
2079   else
2080     {
2081       error ("unable to find a register to spill in class %qs",
2082 	     reg_class_names[rclass]);
2083 
2084       if (dump_file)
2085 	{
2086 	  fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2087 	  debug_reload_to_stream (dump_file);
2088 	}
2089       fatal_insn ("this is the insn:", insn);
2090     }
2091 }
2092 
2093 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2094    data that is dead in INSN.  */
2095 
2096 static void
delete_dead_insn(rtx_insn * insn)2097 delete_dead_insn (rtx_insn *insn)
2098 {
2099   rtx_insn *prev = prev_active_insn (insn);
2100   rtx prev_dest;
2101 
2102   /* If the previous insn sets a register that dies in our insn make
2103      a note that we want to run DCE immediately after reload.
2104 
2105      We used to delete the previous insn & recurse, but that's wrong for
2106      block local equivalences.  Instead of trying to figure out the exact
2107      circumstances where we can delete the potentially dead insns, just
2108      let DCE do the job.  */
2109   if (prev && BLOCK_FOR_INSN (prev) == BLOCK_FOR_INSN (insn)
2110       && GET_CODE (PATTERN (prev)) == SET
2111       && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2112       && reg_mentioned_p (prev_dest, PATTERN (insn))
2113       && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2114       && ! side_effects_p (SET_SRC (PATTERN (prev))))
2115     need_dce = 1;
2116 
2117   SET_INSN_DELETED (insn);
2118 }
2119 
2120 /* Modify the home of pseudo-reg I.
2121    The new home is present in reg_renumber[I].
2122 
2123    FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2124    or it may be -1, meaning there is none or it is not relevant.
2125    This is used so that all pseudos spilled from a given hard reg
2126    can share one stack slot.  */
2127 
2128 static void
alter_reg(int i,int from_reg,bool dont_share_p)2129 alter_reg (int i, int from_reg, bool dont_share_p)
2130 {
2131   /* When outputting an inline function, this can happen
2132      for a reg that isn't actually used.  */
2133   if (regno_reg_rtx[i] == 0)
2134     return;
2135 
2136   /* If the reg got changed to a MEM at rtl-generation time,
2137      ignore it.  */
2138   if (!REG_P (regno_reg_rtx[i]))
2139     return;
2140 
2141   /* Modify the reg-rtx to contain the new hard reg
2142      number or else to contain its pseudo reg number.  */
2143   SET_REGNO (regno_reg_rtx[i],
2144 	     reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2145 
2146   /* If we have a pseudo that is needed but has no hard reg or equivalent,
2147      allocate a stack slot for it.  */
2148 
2149   if (reg_renumber[i] < 0
2150       && REG_N_REFS (i) > 0
2151       && reg_equiv_constant (i) == 0
2152       && (reg_equiv_invariant (i) == 0
2153 	  || reg_equiv_init (i) == 0)
2154       && reg_equiv_memory_loc (i) == 0)
2155     {
2156       rtx x = NULL_RTX;
2157       machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2158       unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2159       unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2160       unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2161       unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2162       int adjust = 0;
2163 
2164       something_was_spilled = true;
2165 
2166       if (ira_conflicts_p)
2167 	{
2168 	  /* Mark the spill for IRA.  */
2169 	  SET_REGNO_REG_SET (&spilled_pseudos, i);
2170 	  if (!dont_share_p)
2171 	    x = ira_reuse_stack_slot (i, inherent_size, total_size);
2172 	}
2173 
2174       if (x)
2175 	;
2176 
2177       /* Each pseudo reg has an inherent size which comes from its own mode,
2178 	 and a total size which provides room for paradoxical subregs
2179 	 which refer to the pseudo reg in wider modes.
2180 
2181 	 We can use a slot already allocated if it provides both
2182 	 enough inherent space and enough total space.
2183 	 Otherwise, we allocate a new slot, making sure that it has no less
2184 	 inherent space, and no less total space, then the previous slot.  */
2185       else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2186 	{
2187 	  rtx stack_slot;
2188 
2189 	  /* No known place to spill from => no slot to reuse.  */
2190 	  x = assign_stack_local (mode, total_size,
2191 				  min_align > inherent_align
2192 				  || total_size > inherent_size ? -1 : 0);
2193 
2194 	  stack_slot = x;
2195 
2196 	  /* Cancel the big-endian correction done in assign_stack_local.
2197 	     Get the address of the beginning of the slot.  This is so we
2198 	     can do a big-endian correction unconditionally below.  */
2199 	  if (BYTES_BIG_ENDIAN)
2200 	    {
2201 	      adjust = inherent_size - total_size;
2202 	      if (adjust)
2203 		stack_slot
2204 		  = adjust_address_nv (x, mode_for_size (total_size
2205 						         * BITS_PER_UNIT,
2206 						         MODE_INT, 1),
2207 				       adjust);
2208 	    }
2209 
2210 	  if (! dont_share_p && ira_conflicts_p)
2211 	    /* Inform IRA about allocation a new stack slot.  */
2212 	    ira_mark_new_stack_slot (stack_slot, i, total_size);
2213 	}
2214 
2215       /* Reuse a stack slot if possible.  */
2216       else if (spill_stack_slot[from_reg] != 0
2217 	       && spill_stack_slot_width[from_reg] >= total_size
2218 	       && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2219 		   >= inherent_size)
2220 	       && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2221 	x = spill_stack_slot[from_reg];
2222 
2223       /* Allocate a bigger slot.  */
2224       else
2225 	{
2226 	  /* Compute maximum size needed, both for inherent size
2227 	     and for total size.  */
2228 	  rtx stack_slot;
2229 
2230 	  if (spill_stack_slot[from_reg])
2231 	    {
2232 	      if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2233 		  > inherent_size)
2234 		mode = GET_MODE (spill_stack_slot[from_reg]);
2235 	      if (spill_stack_slot_width[from_reg] > total_size)
2236 		total_size = spill_stack_slot_width[from_reg];
2237 	      if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2238 		min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2239 	    }
2240 
2241 	  /* Make a slot with that size.  */
2242 	  x = assign_stack_local (mode, total_size,
2243 				  min_align > inherent_align
2244 				  || total_size > inherent_size ? -1 : 0);
2245 	  stack_slot = x;
2246 
2247 	  /* Cancel the  big-endian correction done in assign_stack_local.
2248 	     Get the address of the beginning of the slot.  This is so we
2249 	     can do a big-endian correction unconditionally below.  */
2250 	  if (BYTES_BIG_ENDIAN)
2251 	    {
2252 	      adjust = GET_MODE_SIZE (mode) - total_size;
2253 	      if (adjust)
2254 		stack_slot
2255 		  = adjust_address_nv (x, mode_for_size (total_size
2256 							 * BITS_PER_UNIT,
2257 							 MODE_INT, 1),
2258 				       adjust);
2259 	    }
2260 
2261 	  spill_stack_slot[from_reg] = stack_slot;
2262 	  spill_stack_slot_width[from_reg] = total_size;
2263 	}
2264 
2265       /* On a big endian machine, the "address" of the slot
2266 	 is the address of the low part that fits its inherent mode.  */
2267       if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2268 	adjust += (total_size - inherent_size);
2269 
2270       /* If we have any adjustment to make, or if the stack slot is the
2271 	 wrong mode, make a new stack slot.  */
2272       x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2273 
2274       /* Set all of the memory attributes as appropriate for a spill.  */
2275       set_mem_attrs_for_spill (x);
2276 
2277       /* Save the stack slot for later.  */
2278       reg_equiv_memory_loc (i) = x;
2279     }
2280 }
2281 
2282 /* Mark the slots in regs_ever_live for the hard regs used by
2283    pseudo-reg number REGNO, accessed in MODE.  */
2284 
2285 static void
mark_home_live_1(int regno,machine_mode mode)2286 mark_home_live_1 (int regno, machine_mode mode)
2287 {
2288   int i, lim;
2289 
2290   i = reg_renumber[regno];
2291   if (i < 0)
2292     return;
2293   lim = end_hard_regno (mode, i);
2294   while (i < lim)
2295     df_set_regs_ever_live (i++, true);
2296 }
2297 
2298 /* Mark the slots in regs_ever_live for the hard regs
2299    used by pseudo-reg number REGNO.  */
2300 
2301 void
mark_home_live(int regno)2302 mark_home_live (int regno)
2303 {
2304   if (reg_renumber[regno] >= 0)
2305     mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2306 }
2307 
2308 /* This function handles the tracking of elimination offsets around branches.
2309 
2310    X is a piece of RTL being scanned.
2311 
2312    INSN is the insn that it came from, if any.
2313 
2314    INITIAL_P is nonzero if we are to set the offset to be the initial
2315    offset and zero if we are setting the offset of the label to be the
2316    current offset.  */
2317 
2318 static void
set_label_offsets(rtx x,rtx_insn * insn,int initial_p)2319 set_label_offsets (rtx x, rtx_insn *insn, int initial_p)
2320 {
2321   enum rtx_code code = GET_CODE (x);
2322   rtx tem;
2323   unsigned int i;
2324   struct elim_table *p;
2325 
2326   switch (code)
2327     {
2328     case LABEL_REF:
2329       if (LABEL_REF_NONLOCAL_P (x))
2330 	return;
2331 
2332       x = LABEL_REF_LABEL (x);
2333 
2334       /* ... fall through ...  */
2335 
2336     case CODE_LABEL:
2337       /* If we know nothing about this label, set the desired offsets.  Note
2338 	 that this sets the offset at a label to be the offset before a label
2339 	 if we don't know anything about the label.  This is not correct for
2340 	 the label after a BARRIER, but is the best guess we can make.  If
2341 	 we guessed wrong, we will suppress an elimination that might have
2342 	 been possible had we been able to guess correctly.  */
2343 
2344       if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2345 	{
2346 	  for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2347 	    offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2348 	      = (initial_p ? reg_eliminate[i].initial_offset
2349 		 : reg_eliminate[i].offset);
2350 	  offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2351 	}
2352 
2353       /* Otherwise, if this is the definition of a label and it is
2354 	 preceded by a BARRIER, set our offsets to the known offset of
2355 	 that label.  */
2356 
2357       else if (x == insn
2358 	       && (tem = prev_nonnote_insn (insn)) != 0
2359 	       && BARRIER_P (tem))
2360 	set_offsets_for_label (insn);
2361       else
2362 	/* If neither of the above cases is true, compare each offset
2363 	   with those previously recorded and suppress any eliminations
2364 	   where the offsets disagree.  */
2365 
2366 	for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2367 	  if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2368 	      != (initial_p ? reg_eliminate[i].initial_offset
2369 		  : reg_eliminate[i].offset))
2370 	    reg_eliminate[i].can_eliminate = 0;
2371 
2372       return;
2373 
2374     case JUMP_TABLE_DATA:
2375       set_label_offsets (PATTERN (insn), insn, initial_p);
2376       return;
2377 
2378     case JUMP_INSN:
2379       set_label_offsets (PATTERN (insn), insn, initial_p);
2380 
2381       /* ... fall through ...  */
2382 
2383     case INSN:
2384     case CALL_INSN:
2385       /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2386 	 to indirectly and hence must have all eliminations at their
2387 	 initial offsets.  */
2388       for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2389 	if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2390 	  set_label_offsets (XEXP (tem, 0), insn, 1);
2391       return;
2392 
2393     case PARALLEL:
2394     case ADDR_VEC:
2395     case ADDR_DIFF_VEC:
2396       /* Each of the labels in the parallel or address vector must be
2397 	 at their initial offsets.  We want the first field for PARALLEL
2398 	 and ADDR_VEC and the second field for ADDR_DIFF_VEC.  */
2399 
2400       for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2401 	set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2402 			   insn, initial_p);
2403       return;
2404 
2405     case SET:
2406       /* We only care about setting PC.  If the source is not RETURN,
2407 	 IF_THEN_ELSE, or a label, disable any eliminations not at
2408 	 their initial offsets.  Similarly if any arm of the IF_THEN_ELSE
2409 	 isn't one of those possibilities.  For branches to a label,
2410 	 call ourselves recursively.
2411 
2412 	 Note that this can disable elimination unnecessarily when we have
2413 	 a non-local goto since it will look like a non-constant jump to
2414 	 someplace in the current function.  This isn't a significant
2415 	 problem since such jumps will normally be when all elimination
2416 	 pairs are back to their initial offsets.  */
2417 
2418       if (SET_DEST (x) != pc_rtx)
2419 	return;
2420 
2421       switch (GET_CODE (SET_SRC (x)))
2422 	{
2423 	case PC:
2424 	case RETURN:
2425 	  return;
2426 
2427 	case LABEL_REF:
2428 	  set_label_offsets (SET_SRC (x), insn, initial_p);
2429 	  return;
2430 
2431 	case IF_THEN_ELSE:
2432 	  tem = XEXP (SET_SRC (x), 1);
2433 	  if (GET_CODE (tem) == LABEL_REF)
2434 	    set_label_offsets (LABEL_REF_LABEL (tem), insn, initial_p);
2435 	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2436 	    break;
2437 
2438 	  tem = XEXP (SET_SRC (x), 2);
2439 	  if (GET_CODE (tem) == LABEL_REF)
2440 	    set_label_offsets (LABEL_REF_LABEL (tem), insn, initial_p);
2441 	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2442 	    break;
2443 	  return;
2444 
2445 	default:
2446 	  break;
2447 	}
2448 
2449       /* If we reach here, all eliminations must be at their initial
2450 	 offset because we are doing a jump to a variable address.  */
2451       for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2452 	if (p->offset != p->initial_offset)
2453 	  p->can_eliminate = 0;
2454       break;
2455 
2456     default:
2457       break;
2458     }
2459 }
2460 
2461 /* This function examines every reg that occurs in X and adjusts the
2462    costs for its elimination which are gathered by IRA.  INSN is the
2463    insn in which X occurs.  We do not recurse into MEM expressions.  */
2464 
2465 static void
note_reg_elim_costly(const_rtx x,rtx insn)2466 note_reg_elim_costly (const_rtx x, rtx insn)
2467 {
2468   subrtx_iterator::array_type array;
2469   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
2470     {
2471       const_rtx x = *iter;
2472       if (MEM_P (x))
2473 	iter.skip_subrtxes ();
2474       else if (REG_P (x)
2475 	       && REGNO (x) >= FIRST_PSEUDO_REGISTER
2476 	       && reg_equiv_init (REGNO (x))
2477 	       && reg_equiv_invariant (REGNO (x)))
2478 	{
2479 	  rtx t = reg_equiv_invariant (REGNO (x));
2480 	  rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2481 	  int cost = set_src_cost (new_rtx, Pmode,
2482 				   optimize_bb_for_speed_p (elim_bb));
2483 	  int freq = REG_FREQ_FROM_BB (elim_bb);
2484 
2485 	  if (cost != 0)
2486 	    ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2487 	}
2488     }
2489 }
2490 
2491 /* Scan X and replace any eliminable registers (such as fp) with a
2492    replacement (such as sp), plus an offset.
2493 
2494    MEM_MODE is the mode of an enclosing MEM.  We need this to know how
2495    much to adjust a register for, e.g., PRE_DEC.  Also, if we are inside a
2496    MEM, we are allowed to replace a sum of a register and the constant zero
2497    with the register, which we cannot do outside a MEM.  In addition, we need
2498    to record the fact that a register is referenced outside a MEM.
2499 
2500    If INSN is an insn, it is the insn containing X.  If we replace a REG
2501    in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2502    CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2503    the REG is being modified.
2504 
2505    Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2506    That's used when we eliminate in expressions stored in notes.
2507    This means, do not set ref_outside_mem even if the reference
2508    is outside of MEMs.
2509 
2510    If FOR_COSTS is true, we are being called before reload in order to
2511    estimate the costs of keeping registers with an equivalence unallocated.
2512 
2513    REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2514    replacements done assuming all offsets are at their initial values.  If
2515    they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2516    encounter, return the actual location so that find_reloads will do
2517    the proper thing.  */
2518 
2519 static rtx
eliminate_regs_1(rtx x,machine_mode mem_mode,rtx insn,bool may_use_invariant,bool for_costs)2520 eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn,
2521 		  bool may_use_invariant, bool for_costs)
2522 {
2523   enum rtx_code code = GET_CODE (x);
2524   struct elim_table *ep;
2525   int regno;
2526   rtx new_rtx;
2527   int i, j;
2528   const char *fmt;
2529   int copied = 0;
2530 
2531   if (! current_function_decl)
2532     return x;
2533 
2534   switch (code)
2535     {
2536     CASE_CONST_ANY:
2537     case CONST:
2538     case SYMBOL_REF:
2539     case CODE_LABEL:
2540     case PC:
2541     case CC0:
2542     case ASM_INPUT:
2543     case ADDR_VEC:
2544     case ADDR_DIFF_VEC:
2545     case RETURN:
2546       return x;
2547 
2548     case REG:
2549       regno = REGNO (x);
2550 
2551       /* First handle the case where we encounter a bare register that
2552 	 is eliminable.  Replace it with a PLUS.  */
2553       if (regno < FIRST_PSEUDO_REGISTER)
2554 	{
2555 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2556 	       ep++)
2557 	    if (ep->from_rtx == x && ep->can_eliminate)
2558 	      return plus_constant (Pmode, ep->to_rtx, ep->previous_offset);
2559 
2560 	}
2561       else if (reg_renumber && reg_renumber[regno] < 0
2562 	       && reg_equivs
2563 	       && reg_equiv_invariant (regno))
2564 	{
2565 	  if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2566 	    return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)),
2567 			             mem_mode, insn, true, for_costs);
2568 	  /* There exists at least one use of REGNO that cannot be
2569 	     eliminated.  Prevent the defining insn from being deleted.  */
2570 	  reg_equiv_init (regno) = NULL;
2571 	  if (!for_costs)
2572 	    alter_reg (regno, -1, true);
2573 	}
2574       return x;
2575 
2576     /* You might think handling MINUS in a manner similar to PLUS is a
2577        good idea.  It is not.  It has been tried multiple times and every
2578        time the change has had to have been reverted.
2579 
2580        Other parts of reload know a PLUS is special (gen_reload for example)
2581        and require special code to handle code a reloaded PLUS operand.
2582 
2583        Also consider backends where the flags register is clobbered by a
2584        MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2585        lea instruction comes to mind).  If we try to reload a MINUS, we
2586        may kill the flags register that was holding a useful value.
2587 
2588        So, please before trying to handle MINUS, consider reload as a
2589        whole instead of this little section as well as the backend issues.  */
2590     case PLUS:
2591       /* If this is the sum of an eliminable register and a constant, rework
2592 	 the sum.  */
2593       if (REG_P (XEXP (x, 0))
2594 	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2595 	  && CONSTANT_P (XEXP (x, 1)))
2596 	{
2597 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2598 	       ep++)
2599 	    if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2600 	      {
2601 		/* The only time we want to replace a PLUS with a REG (this
2602 		   occurs when the constant operand of the PLUS is the negative
2603 		   of the offset) is when we are inside a MEM.  We won't want
2604 		   to do so at other times because that would change the
2605 		   structure of the insn in a way that reload can't handle.
2606 		   We special-case the commonest situation in
2607 		   eliminate_regs_in_insn, so just replace a PLUS with a
2608 		   PLUS here, unless inside a MEM.  */
2609 		if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2610 		    && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2611 		  return ep->to_rtx;
2612 		else
2613 		  return gen_rtx_PLUS (Pmode, ep->to_rtx,
2614 				       plus_constant (Pmode, XEXP (x, 1),
2615 						      ep->previous_offset));
2616 	      }
2617 
2618 	  /* If the register is not eliminable, we are done since the other
2619 	     operand is a constant.  */
2620 	  return x;
2621 	}
2622 
2623       /* If this is part of an address, we want to bring any constant to the
2624 	 outermost PLUS.  We will do this by doing register replacement in
2625 	 our operands and seeing if a constant shows up in one of them.
2626 
2627 	 Note that there is no risk of modifying the structure of the insn,
2628 	 since we only get called for its operands, thus we are either
2629 	 modifying the address inside a MEM, or something like an address
2630 	 operand of a load-address insn.  */
2631 
2632       {
2633 	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2634 				     for_costs);
2635 	rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2636 				     for_costs);
2637 
2638 	if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2639 	  {
2640 	    /* If one side is a PLUS and the other side is a pseudo that
2641 	       didn't get a hard register but has a reg_equiv_constant,
2642 	       we must replace the constant here since it may no longer
2643 	       be in the position of any operand.  */
2644 	    if (GET_CODE (new0) == PLUS && REG_P (new1)
2645 		&& REGNO (new1) >= FIRST_PSEUDO_REGISTER
2646 		&& reg_renumber[REGNO (new1)] < 0
2647 		&& reg_equivs
2648 		&& reg_equiv_constant (REGNO (new1)) != 0)
2649 	      new1 = reg_equiv_constant (REGNO (new1));
2650 	    else if (GET_CODE (new1) == PLUS && REG_P (new0)
2651 		     && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2652 		     && reg_renumber[REGNO (new0)] < 0
2653 		     && reg_equiv_constant (REGNO (new0)) != 0)
2654 	      new0 = reg_equiv_constant (REGNO (new0));
2655 
2656 	    new_rtx = form_sum (GET_MODE (x), new0, new1);
2657 
2658 	    /* As above, if we are not inside a MEM we do not want to
2659 	       turn a PLUS into something else.  We might try to do so here
2660 	       for an addition of 0 if we aren't optimizing.  */
2661 	    if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2662 	      return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2663 	    else
2664 	      return new_rtx;
2665 	  }
2666       }
2667       return x;
2668 
2669     case MULT:
2670       /* If this is the product of an eliminable register and a
2671 	 constant, apply the distribute law and move the constant out
2672 	 so that we have (plus (mult ..) ..).  This is needed in order
2673 	 to keep load-address insns valid.   This case is pathological.
2674 	 We ignore the possibility of overflow here.  */
2675       if (REG_P (XEXP (x, 0))
2676 	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2677 	  && CONST_INT_P (XEXP (x, 1)))
2678 	for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2679 	     ep++)
2680 	  if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2681 	    {
2682 	      if (! mem_mode
2683 		  /* Refs inside notes or in DEBUG_INSNs don't count for
2684 		     this purpose.  */
2685 		  && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2686 				      || GET_CODE (insn) == INSN_LIST
2687 				      || DEBUG_INSN_P (insn))))
2688 		ep->ref_outside_mem = 1;
2689 
2690 	      return
2691 		plus_constant (Pmode,
2692 			       gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2693 			       ep->previous_offset * INTVAL (XEXP (x, 1)));
2694 	    }
2695 
2696       /* ... fall through ...  */
2697 
2698     case CALL:
2699     case COMPARE:
2700     /* See comments before PLUS about handling MINUS.  */
2701     case MINUS:
2702     case DIV:      case UDIV:
2703     case MOD:      case UMOD:
2704     case AND:      case IOR:      case XOR:
2705     case ROTATERT: case ROTATE:
2706     case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2707     case NE:       case EQ:
2708     case GE:       case GT:       case GEU:    case GTU:
2709     case LE:       case LT:       case LEU:    case LTU:
2710       {
2711 	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2712 				     for_costs);
2713 	rtx new1 = XEXP (x, 1)
2714 	  ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2715 			      for_costs) : 0;
2716 
2717 	if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2718 	  return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2719       }
2720       return x;
2721 
2722     case EXPR_LIST:
2723       /* If we have something in XEXP (x, 0), the usual case, eliminate it.  */
2724       if (XEXP (x, 0))
2725 	{
2726 	  new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2727 				      for_costs);
2728 	  if (new_rtx != XEXP (x, 0))
2729 	    {
2730 	      /* If this is a REG_DEAD note, it is not valid anymore.
2731 		 Using the eliminated version could result in creating a
2732 		 REG_DEAD note for the stack or frame pointer.  */
2733 	      if (REG_NOTE_KIND (x) == REG_DEAD)
2734 		return (XEXP (x, 1)
2735 			? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2736 					    for_costs)
2737 			: NULL_RTX);
2738 
2739 	      x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2740 	    }
2741 	}
2742 
2743       /* ... fall through ...  */
2744 
2745     case INSN_LIST:
2746     case INT_LIST:
2747       /* Now do eliminations in the rest of the chain.  If this was
2748 	 an EXPR_LIST, this might result in allocating more memory than is
2749 	 strictly needed, but it simplifies the code.  */
2750       if (XEXP (x, 1))
2751 	{
2752 	  new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2753 				      for_costs);
2754 	  if (new_rtx != XEXP (x, 1))
2755 	    return
2756 	      gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2757 	}
2758       return x;
2759 
2760     case PRE_INC:
2761     case POST_INC:
2762     case PRE_DEC:
2763     case POST_DEC:
2764       /* We do not support elimination of a register that is modified.
2765 	 elimination_effects has already make sure that this does not
2766 	 happen.  */
2767       return x;
2768 
2769     case PRE_MODIFY:
2770     case POST_MODIFY:
2771       /* We do not support elimination of a register that is modified.
2772 	 elimination_effects has already make sure that this does not
2773 	 happen.  The only remaining case we need to consider here is
2774 	 that the increment value may be an eliminable register.  */
2775       if (GET_CODE (XEXP (x, 1)) == PLUS
2776 	  && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2777 	{
2778 	  rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2779 					  insn, true, for_costs);
2780 
2781 	  if (new_rtx != XEXP (XEXP (x, 1), 1))
2782 	    return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2783 				   gen_rtx_PLUS (GET_MODE (x),
2784 						 XEXP (x, 0), new_rtx));
2785 	}
2786       return x;
2787 
2788     case STRICT_LOW_PART:
2789     case NEG:          case NOT:
2790     case SIGN_EXTEND:  case ZERO_EXTEND:
2791     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2792     case FLOAT:        case FIX:
2793     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2794     case ABS:
2795     case SQRT:
2796     case FFS:
2797     case CLZ:
2798     case CTZ:
2799     case POPCOUNT:
2800     case PARITY:
2801     case BSWAP:
2802       new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2803 				  for_costs);
2804       if (new_rtx != XEXP (x, 0))
2805 	return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2806       return x;
2807 
2808     case SUBREG:
2809       /* Similar to above processing, but preserve SUBREG_BYTE.
2810 	 Convert (subreg (mem)) to (mem) if not paradoxical.
2811 	 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2812 	 pseudo didn't get a hard reg, we must replace this with the
2813 	 eliminated version of the memory location because push_reload
2814 	 may do the replacement in certain circumstances.  */
2815       if (REG_P (SUBREG_REG (x))
2816 	  && !paradoxical_subreg_p (x)
2817 	  && reg_equivs
2818 	  && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
2819 	{
2820 	  new_rtx = SUBREG_REG (x);
2821 	}
2822       else
2823 	new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs);
2824 
2825       if (new_rtx != SUBREG_REG (x))
2826 	{
2827 	  int x_size = GET_MODE_SIZE (GET_MODE (x));
2828 	  int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2829 
2830 	  if (MEM_P (new_rtx)
2831 	      && ((x_size < new_size
2832 #if WORD_REGISTER_OPERATIONS
2833 		   /* On these machines, combine can create rtl of the form
2834 		      (set (subreg:m1 (reg:m2 R) 0) ...)
2835 		      where m1 < m2, and expects something interesting to
2836 		      happen to the entire word.  Moreover, it will use the
2837 		      (reg:m2 R) later, expecting all bits to be preserved.
2838 		      So if the number of words is the same, preserve the
2839 		      subreg so that push_reload can see it.  */
2840 		   && ! ((x_size - 1) / UNITS_PER_WORD
2841 			 == (new_size -1 ) / UNITS_PER_WORD)
2842 #endif
2843 		   )
2844 		  || x_size == new_size)
2845 	      )
2846 	    return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2847 	  else
2848 	    return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2849 	}
2850 
2851       return x;
2852 
2853     case MEM:
2854       /* Our only special processing is to pass the mode of the MEM to our
2855 	 recursive call and copy the flags.  While we are here, handle this
2856 	 case more efficiently.  */
2857 
2858       new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2859 				  for_costs);
2860       if (for_costs
2861 	  && memory_address_p (GET_MODE (x), XEXP (x, 0))
2862 	  && !memory_address_p (GET_MODE (x), new_rtx))
2863 	note_reg_elim_costly (XEXP (x, 0), insn);
2864 
2865       return replace_equiv_address_nv (x, new_rtx);
2866 
2867     case USE:
2868       /* Handle insn_list USE that a call to a pure function may generate.  */
2869       new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2870 				  for_costs);
2871       if (new_rtx != XEXP (x, 0))
2872 	return gen_rtx_USE (GET_MODE (x), new_rtx);
2873       return x;
2874 
2875     case CLOBBER:
2876     case ASM_OPERANDS:
2877       gcc_assert (insn && DEBUG_INSN_P (insn));
2878       break;
2879 
2880     case SET:
2881       gcc_unreachable ();
2882 
2883     default:
2884       break;
2885     }
2886 
2887   /* Process each of our operands recursively.  If any have changed, make a
2888      copy of the rtx.  */
2889   fmt = GET_RTX_FORMAT (code);
2890   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2891     {
2892       if (*fmt == 'e')
2893 	{
2894 	  new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2895 				      for_costs);
2896 	  if (new_rtx != XEXP (x, i) && ! copied)
2897 	    {
2898 	      x = shallow_copy_rtx (x);
2899 	      copied = 1;
2900 	    }
2901 	  XEXP (x, i) = new_rtx;
2902 	}
2903       else if (*fmt == 'E')
2904 	{
2905 	  int copied_vec = 0;
2906 	  for (j = 0; j < XVECLEN (x, i); j++)
2907 	    {
2908 	      new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2909 					  for_costs);
2910 	      if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2911 		{
2912 		  rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2913 					     XVEC (x, i)->elem);
2914 		  if (! copied)
2915 		    {
2916 		      x = shallow_copy_rtx (x);
2917 		      copied = 1;
2918 		    }
2919 		  XVEC (x, i) = new_v;
2920 		  copied_vec = 1;
2921 		}
2922 	      XVECEXP (x, i, j) = new_rtx;
2923 	    }
2924 	}
2925     }
2926 
2927   return x;
2928 }
2929 
2930 rtx
eliminate_regs(rtx x,machine_mode mem_mode,rtx insn)2931 eliminate_regs (rtx x, machine_mode mem_mode, rtx insn)
2932 {
2933   if (reg_eliminate == NULL)
2934     {
2935       gcc_assert (targetm.no_register_allocation);
2936       return x;
2937     }
2938   return eliminate_regs_1 (x, mem_mode, insn, false, false);
2939 }
2940 
2941 /* Scan rtx X for modifications of elimination target registers.  Update
2942    the table of eliminables to reflect the changed state.  MEM_MODE is
2943    the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM.  */
2944 
2945 static void
elimination_effects(rtx x,machine_mode mem_mode)2946 elimination_effects (rtx x, machine_mode mem_mode)
2947 {
2948   enum rtx_code code = GET_CODE (x);
2949   struct elim_table *ep;
2950   int regno;
2951   int i, j;
2952   const char *fmt;
2953 
2954   switch (code)
2955     {
2956     CASE_CONST_ANY:
2957     case CONST:
2958     case SYMBOL_REF:
2959     case CODE_LABEL:
2960     case PC:
2961     case CC0:
2962     case ASM_INPUT:
2963     case ADDR_VEC:
2964     case ADDR_DIFF_VEC:
2965     case RETURN:
2966       return;
2967 
2968     case REG:
2969       regno = REGNO (x);
2970 
2971       /* First handle the case where we encounter a bare register that
2972 	 is eliminable.  Replace it with a PLUS.  */
2973       if (regno < FIRST_PSEUDO_REGISTER)
2974 	{
2975 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2976 	       ep++)
2977 	    if (ep->from_rtx == x && ep->can_eliminate)
2978 	      {
2979 		if (! mem_mode)
2980 		  ep->ref_outside_mem = 1;
2981 		return;
2982 	      }
2983 
2984 	}
2985       else if (reg_renumber[regno] < 0
2986 	       && reg_equivs
2987 	       && reg_equiv_constant (regno)
2988 	       && ! function_invariant_p (reg_equiv_constant (regno)))
2989 	elimination_effects (reg_equiv_constant (regno), mem_mode);
2990       return;
2991 
2992     case PRE_INC:
2993     case POST_INC:
2994     case PRE_DEC:
2995     case POST_DEC:
2996     case POST_MODIFY:
2997     case PRE_MODIFY:
2998       /* If we modify the source of an elimination rule, disable it.  */
2999       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3000 	if (ep->from_rtx == XEXP (x, 0))
3001 	  ep->can_eliminate = 0;
3002 
3003       /* If we modify the target of an elimination rule by adding a constant,
3004 	 update its offset.  If we modify the target in any other way, we'll
3005 	 have to disable the rule as well.  */
3006       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3007 	if (ep->to_rtx == XEXP (x, 0))
3008 	  {
3009 	    int size = GET_MODE_SIZE (mem_mode);
3010 
3011 	    /* If more bytes than MEM_MODE are pushed, account for them.  */
3012 #ifdef PUSH_ROUNDING
3013 	    if (ep->to_rtx == stack_pointer_rtx)
3014 	      size = PUSH_ROUNDING (size);
3015 #endif
3016 	    if (code == PRE_DEC || code == POST_DEC)
3017 	      ep->offset += size;
3018 	    else if (code == PRE_INC || code == POST_INC)
3019 	      ep->offset -= size;
3020 	    else if (code == PRE_MODIFY || code == POST_MODIFY)
3021 	      {
3022 		if (GET_CODE (XEXP (x, 1)) == PLUS
3023 		    && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3024 		    && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3025 		  ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3026 		else
3027 		  ep->can_eliminate = 0;
3028 	      }
3029 	  }
3030 
3031       /* These two aren't unary operators.  */
3032       if (code == POST_MODIFY || code == PRE_MODIFY)
3033 	break;
3034 
3035       /* Fall through to generic unary operation case.  */
3036     case STRICT_LOW_PART:
3037     case NEG:          case NOT:
3038     case SIGN_EXTEND:  case ZERO_EXTEND:
3039     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3040     case FLOAT:        case FIX:
3041     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3042     case ABS:
3043     case SQRT:
3044     case FFS:
3045     case CLZ:
3046     case CTZ:
3047     case POPCOUNT:
3048     case PARITY:
3049     case BSWAP:
3050       elimination_effects (XEXP (x, 0), mem_mode);
3051       return;
3052 
3053     case SUBREG:
3054       if (REG_P (SUBREG_REG (x))
3055 	  && (GET_MODE_SIZE (GET_MODE (x))
3056 	      <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3057 	  && reg_equivs
3058 	  && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
3059 	return;
3060 
3061       elimination_effects (SUBREG_REG (x), mem_mode);
3062       return;
3063 
3064     case USE:
3065       /* If using a register that is the source of an eliminate we still
3066 	 think can be performed, note it cannot be performed since we don't
3067 	 know how this register is used.  */
3068       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3069 	if (ep->from_rtx == XEXP (x, 0))
3070 	  ep->can_eliminate = 0;
3071 
3072       elimination_effects (XEXP (x, 0), mem_mode);
3073       return;
3074 
3075     case CLOBBER:
3076       /* If clobbering a register that is the replacement register for an
3077 	 elimination we still think can be performed, note that it cannot
3078 	 be performed.  Otherwise, we need not be concerned about it.  */
3079       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3080 	if (ep->to_rtx == XEXP (x, 0))
3081 	  ep->can_eliminate = 0;
3082 
3083       elimination_effects (XEXP (x, 0), mem_mode);
3084       return;
3085 
3086     case SET:
3087       /* Check for setting a register that we know about.  */
3088       if (REG_P (SET_DEST (x)))
3089 	{
3090 	  /* See if this is setting the replacement register for an
3091 	     elimination.
3092 
3093 	     If DEST is the hard frame pointer, we do nothing because we
3094 	     assume that all assignments to the frame pointer are for
3095 	     non-local gotos and are being done at a time when they are valid
3096 	     and do not disturb anything else.  Some machines want to
3097 	     eliminate a fake argument pointer (or even a fake frame pointer)
3098 	     with either the real frame or the stack pointer.  Assignments to
3099 	     the hard frame pointer must not prevent this elimination.  */
3100 
3101 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3102 	       ep++)
3103 	    if (ep->to_rtx == SET_DEST (x)
3104 		&& SET_DEST (x) != hard_frame_pointer_rtx)
3105 	      {
3106 		/* If it is being incremented, adjust the offset.  Otherwise,
3107 		   this elimination can't be done.  */
3108 		rtx src = SET_SRC (x);
3109 
3110 		if (GET_CODE (src) == PLUS
3111 		    && XEXP (src, 0) == SET_DEST (x)
3112 		    && CONST_INT_P (XEXP (src, 1)))
3113 		  ep->offset -= INTVAL (XEXP (src, 1));
3114 		else
3115 		  ep->can_eliminate = 0;
3116 	      }
3117 	}
3118 
3119       elimination_effects (SET_DEST (x), VOIDmode);
3120       elimination_effects (SET_SRC (x), VOIDmode);
3121       return;
3122 
3123     case MEM:
3124       /* Our only special processing is to pass the mode of the MEM to our
3125 	 recursive call.  */
3126       elimination_effects (XEXP (x, 0), GET_MODE (x));
3127       return;
3128 
3129     default:
3130       break;
3131     }
3132 
3133   fmt = GET_RTX_FORMAT (code);
3134   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3135     {
3136       if (*fmt == 'e')
3137 	elimination_effects (XEXP (x, i), mem_mode);
3138       else if (*fmt == 'E')
3139 	for (j = 0; j < XVECLEN (x, i); j++)
3140 	  elimination_effects (XVECEXP (x, i, j), mem_mode);
3141     }
3142 }
3143 
3144 /* Descend through rtx X and verify that no references to eliminable registers
3145    remain.  If any do remain, mark the involved register as not
3146    eliminable.  */
3147 
3148 static void
check_eliminable_occurrences(rtx x)3149 check_eliminable_occurrences (rtx x)
3150 {
3151   const char *fmt;
3152   int i;
3153   enum rtx_code code;
3154 
3155   if (x == 0)
3156     return;
3157 
3158   code = GET_CODE (x);
3159 
3160   if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3161     {
3162       struct elim_table *ep;
3163 
3164       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3165 	if (ep->from_rtx == x)
3166 	  ep->can_eliminate = 0;
3167       return;
3168     }
3169 
3170   fmt = GET_RTX_FORMAT (code);
3171   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3172     {
3173       if (*fmt == 'e')
3174 	check_eliminable_occurrences (XEXP (x, i));
3175       else if (*fmt == 'E')
3176 	{
3177 	  int j;
3178 	  for (j = 0; j < XVECLEN (x, i); j++)
3179 	    check_eliminable_occurrences (XVECEXP (x, i, j));
3180 	}
3181     }
3182 }
3183 
3184 /* Scan INSN and eliminate all eliminable registers in it.
3185 
3186    If REPLACE is nonzero, do the replacement destructively.  Also
3187    delete the insn as dead it if it is setting an eliminable register.
3188 
3189    If REPLACE is zero, do all our allocations in reload_obstack.
3190 
3191    If no eliminations were done and this insn doesn't require any elimination
3192    processing (these are not identical conditions: it might be updating sp,
3193    but not referencing fp; this needs to be seen during reload_as_needed so
3194    that the offset between fp and sp can be taken into consideration), zero
3195    is returned.  Otherwise, 1 is returned.  */
3196 
3197 static int
eliminate_regs_in_insn(rtx_insn * insn,int replace)3198 eliminate_regs_in_insn (rtx_insn *insn, int replace)
3199 {
3200   int icode = recog_memoized (insn);
3201   rtx old_body = PATTERN (insn);
3202   int insn_is_asm = asm_noperands (old_body) >= 0;
3203   rtx old_set = single_set (insn);
3204   rtx new_body;
3205   int val = 0;
3206   int i;
3207   rtx substed_operand[MAX_RECOG_OPERANDS];
3208   rtx orig_operand[MAX_RECOG_OPERANDS];
3209   struct elim_table *ep;
3210   rtx plus_src, plus_cst_src;
3211 
3212   if (! insn_is_asm && icode < 0)
3213     {
3214       gcc_assert (DEBUG_INSN_P (insn)
3215 		  || GET_CODE (PATTERN (insn)) == USE
3216 		  || GET_CODE (PATTERN (insn)) == CLOBBER
3217 		  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3218       if (DEBUG_INSN_P (insn))
3219 	INSN_VAR_LOCATION_LOC (insn)
3220 	  = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3221       return 0;
3222     }
3223 
3224   if (old_set != 0 && REG_P (SET_DEST (old_set))
3225       && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3226     {
3227       /* Check for setting an eliminable register.  */
3228       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3229 	if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3230 	  {
3231 	    /* If this is setting the frame pointer register to the
3232 	       hardware frame pointer register and this is an elimination
3233 	       that will be done (tested above), this insn is really
3234 	       adjusting the frame pointer downward to compensate for
3235 	       the adjustment done before a nonlocal goto.  */
3236 	    if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3237 		&& ep->from == FRAME_POINTER_REGNUM
3238 		&& ep->to == HARD_FRAME_POINTER_REGNUM)
3239 	      {
3240 		rtx base = SET_SRC (old_set);
3241 		rtx_insn *base_insn = insn;
3242 		HOST_WIDE_INT offset = 0;
3243 
3244 		while (base != ep->to_rtx)
3245 		  {
3246 		    rtx_insn *prev_insn;
3247 		    rtx prev_set;
3248 
3249 		    if (GET_CODE (base) == PLUS
3250 		        && CONST_INT_P (XEXP (base, 1)))
3251 		      {
3252 		        offset += INTVAL (XEXP (base, 1));
3253 		        base = XEXP (base, 0);
3254 		      }
3255 		    else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3256 			     && (prev_set = single_set (prev_insn)) != 0
3257 			     && rtx_equal_p (SET_DEST (prev_set), base))
3258 		      {
3259 		        base = SET_SRC (prev_set);
3260 		        base_insn = prev_insn;
3261 		      }
3262 		    else
3263 		      break;
3264 		  }
3265 
3266 		if (base == ep->to_rtx)
3267 		  {
3268 		    rtx src = plus_constant (Pmode, ep->to_rtx,
3269 					     offset - ep->offset);
3270 
3271 		    new_body = old_body;
3272 		    if (! replace)
3273 		      {
3274 			new_body = copy_insn (old_body);
3275 			if (REG_NOTES (insn))
3276 			  REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3277 		      }
3278 		    PATTERN (insn) = new_body;
3279 		    old_set = single_set (insn);
3280 
3281 		    /* First see if this insn remains valid when we
3282 		       make the change.  If not, keep the INSN_CODE
3283 		       the same and let reload fit it up.  */
3284 		    validate_change (insn, &SET_SRC (old_set), src, 1);
3285 		    validate_change (insn, &SET_DEST (old_set),
3286 				     ep->to_rtx, 1);
3287 		    if (! apply_change_group ())
3288 		      {
3289 			SET_SRC (old_set) = src;
3290 			SET_DEST (old_set) = ep->to_rtx;
3291 		      }
3292 
3293 		    val = 1;
3294 		    goto done;
3295 		  }
3296 	      }
3297 
3298 	    /* In this case this insn isn't serving a useful purpose.  We
3299 	       will delete it in reload_as_needed once we know that this
3300 	       elimination is, in fact, being done.
3301 
3302 	       If REPLACE isn't set, we can't delete this insn, but needn't
3303 	       process it since it won't be used unless something changes.  */
3304 	    if (replace)
3305 	      {
3306 		delete_dead_insn (insn);
3307 		return 1;
3308 	      }
3309 	    val = 1;
3310 	    goto done;
3311 	  }
3312     }
3313 
3314   /* We allow one special case which happens to work on all machines we
3315      currently support: a single set with the source or a REG_EQUAL
3316      note being a PLUS of an eliminable register and a constant.  */
3317   plus_src = plus_cst_src = 0;
3318   if (old_set && REG_P (SET_DEST (old_set)))
3319     {
3320       if (GET_CODE (SET_SRC (old_set)) == PLUS)
3321 	plus_src = SET_SRC (old_set);
3322       /* First see if the source is of the form (plus (...) CST).  */
3323       if (plus_src
3324 	  && CONST_INT_P (XEXP (plus_src, 1)))
3325 	plus_cst_src = plus_src;
3326       else if (REG_P (SET_SRC (old_set))
3327 	       || plus_src)
3328 	{
3329 	  /* Otherwise, see if we have a REG_EQUAL note of the form
3330 	     (plus (...) CST).  */
3331 	  rtx links;
3332 	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3333 	    {
3334 	      if ((REG_NOTE_KIND (links) == REG_EQUAL
3335 		   || REG_NOTE_KIND (links) == REG_EQUIV)
3336 		  && GET_CODE (XEXP (links, 0)) == PLUS
3337 		  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3338 		{
3339 		  plus_cst_src = XEXP (links, 0);
3340 		  break;
3341 		}
3342 	    }
3343 	}
3344 
3345       /* Check that the first operand of the PLUS is a hard reg or
3346 	 the lowpart subreg of one.  */
3347       if (plus_cst_src)
3348 	{
3349 	  rtx reg = XEXP (plus_cst_src, 0);
3350 	  if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3351 	    reg = SUBREG_REG (reg);
3352 
3353 	  if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3354 	    plus_cst_src = 0;
3355 	}
3356     }
3357   if (plus_cst_src)
3358     {
3359       rtx reg = XEXP (plus_cst_src, 0);
3360       HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3361 
3362       if (GET_CODE (reg) == SUBREG)
3363 	reg = SUBREG_REG (reg);
3364 
3365       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3366 	if (ep->from_rtx == reg && ep->can_eliminate)
3367 	  {
3368 	    rtx to_rtx = ep->to_rtx;
3369 	    offset += ep->offset;
3370 	    offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3371 
3372 	    if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3373 	      to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3374 				    to_rtx);
3375 	    /* If we have a nonzero offset, and the source is already
3376 	       a simple REG, the following transformation would
3377 	       increase the cost of the insn by replacing a simple REG
3378 	       with (plus (reg sp) CST).  So try only when we already
3379 	       had a PLUS before.  */
3380 	    if (offset == 0 || plus_src)
3381 	      {
3382 		rtx new_src = plus_constant (GET_MODE (to_rtx),
3383 					     to_rtx, offset);
3384 
3385 		new_body = old_body;
3386 		if (! replace)
3387 		  {
3388 		    new_body = copy_insn (old_body);
3389 		    if (REG_NOTES (insn))
3390 		      REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3391 		  }
3392 		PATTERN (insn) = new_body;
3393 		old_set = single_set (insn);
3394 
3395 		/* First see if this insn remains valid when we make the
3396 		   change.  If not, try to replace the whole pattern with
3397 		   a simple set (this may help if the original insn was a
3398 		   PARALLEL that was only recognized as single_set due to
3399 		   REG_UNUSED notes).  If this isn't valid either, keep
3400 		   the INSN_CODE the same and let reload fix it up.  */
3401 		if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3402 		  {
3403 		    rtx new_pat = gen_rtx_SET (SET_DEST (old_set), new_src);
3404 
3405 		    if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3406 		      SET_SRC (old_set) = new_src;
3407 		  }
3408 	      }
3409 	    else
3410 	      break;
3411 
3412 	    val = 1;
3413 	    /* This can't have an effect on elimination offsets, so skip right
3414 	       to the end.  */
3415 	    goto done;
3416 	  }
3417     }
3418 
3419   /* Determine the effects of this insn on elimination offsets.  */
3420   elimination_effects (old_body, VOIDmode);
3421 
3422   /* Eliminate all eliminable registers occurring in operands that
3423      can be handled by reload.  */
3424   extract_insn (insn);
3425   for (i = 0; i < recog_data.n_operands; i++)
3426     {
3427       orig_operand[i] = recog_data.operand[i];
3428       substed_operand[i] = recog_data.operand[i];
3429 
3430       /* For an asm statement, every operand is eliminable.  */
3431       if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3432 	{
3433 	  bool is_set_src, in_plus;
3434 
3435 	  /* Check for setting a register that we know about.  */
3436 	  if (recog_data.operand_type[i] != OP_IN
3437 	      && REG_P (orig_operand[i]))
3438 	    {
3439 	      /* If we are assigning to a register that can be eliminated, it
3440 		 must be as part of a PARALLEL, since the code above handles
3441 		 single SETs.  We must indicate that we can no longer
3442 		 eliminate this reg.  */
3443 	      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3444 		   ep++)
3445 		if (ep->from_rtx == orig_operand[i])
3446 		  ep->can_eliminate = 0;
3447 	    }
3448 
3449 	  /* Companion to the above plus substitution, we can allow
3450 	     invariants as the source of a plain move.  */
3451 	  is_set_src = false;
3452 	  if (old_set
3453 	      && recog_data.operand_loc[i] == &SET_SRC (old_set))
3454 	    is_set_src = true;
3455 	  in_plus = false;
3456 	  if (plus_src
3457 	      && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3458 		  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3459 	    in_plus = true;
3460 
3461 	  substed_operand[i]
3462 	    = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3463 			        replace ? insn : NULL_RTX,
3464 				is_set_src || in_plus, false);
3465 	  if (substed_operand[i] != orig_operand[i])
3466 	    val = 1;
3467 	  /* Terminate the search in check_eliminable_occurrences at
3468 	     this point.  */
3469 	  *recog_data.operand_loc[i] = 0;
3470 
3471 	  /* If an output operand changed from a REG to a MEM and INSN is an
3472 	     insn, write a CLOBBER insn.  */
3473 	  if (recog_data.operand_type[i] != OP_IN
3474 	      && REG_P (orig_operand[i])
3475 	      && MEM_P (substed_operand[i])
3476 	      && replace)
3477 	    emit_insn_after (gen_clobber (orig_operand[i]), insn);
3478 	}
3479     }
3480 
3481   for (i = 0; i < recog_data.n_dups; i++)
3482     *recog_data.dup_loc[i]
3483       = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3484 
3485   /* If any eliminable remain, they aren't eliminable anymore.  */
3486   check_eliminable_occurrences (old_body);
3487 
3488   /* Substitute the operands; the new values are in the substed_operand
3489      array.  */
3490   for (i = 0; i < recog_data.n_operands; i++)
3491     *recog_data.operand_loc[i] = substed_operand[i];
3492   for (i = 0; i < recog_data.n_dups; i++)
3493     *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3494 
3495   /* If we are replacing a body that was a (set X (plus Y Z)), try to
3496      re-recognize the insn.  We do this in case we had a simple addition
3497      but now can do this as a load-address.  This saves an insn in this
3498      common case.
3499      If re-recognition fails, the old insn code number will still be used,
3500      and some register operands may have changed into PLUS expressions.
3501      These will be handled by find_reloads by loading them into a register
3502      again.  */
3503 
3504   if (val)
3505     {
3506       /* If we aren't replacing things permanently and we changed something,
3507 	 make another copy to ensure that all the RTL is new.  Otherwise
3508 	 things can go wrong if find_reload swaps commutative operands
3509 	 and one is inside RTL that has been copied while the other is not.  */
3510       new_body = old_body;
3511       if (! replace)
3512 	{
3513 	  new_body = copy_insn (old_body);
3514 	  if (REG_NOTES (insn))
3515 	    REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3516 	}
3517       PATTERN (insn) = new_body;
3518 
3519       /* If we had a move insn but now we don't, rerecognize it.  This will
3520 	 cause spurious re-recognition if the old move had a PARALLEL since
3521 	 the new one still will, but we can't call single_set without
3522 	 having put NEW_BODY into the insn and the re-recognition won't
3523 	 hurt in this rare case.  */
3524       /* ??? Why this huge if statement - why don't we just rerecognize the
3525 	 thing always?  */
3526       if (! insn_is_asm
3527 	  && old_set != 0
3528 	  && ((REG_P (SET_SRC (old_set))
3529 	       && (GET_CODE (new_body) != SET
3530 		   || !REG_P (SET_SRC (new_body))))
3531 	      /* If this was a load from or store to memory, compare
3532 		 the MEM in recog_data.operand to the one in the insn.
3533 		 If they are not equal, then rerecognize the insn.  */
3534 	      || (old_set != 0
3535 		  && ((MEM_P (SET_SRC (old_set))
3536 		       && SET_SRC (old_set) != recog_data.operand[1])
3537 		      || (MEM_P (SET_DEST (old_set))
3538 			  && SET_DEST (old_set) != recog_data.operand[0])))
3539 	      /* If this was an add insn before, rerecognize.  */
3540 	      || GET_CODE (SET_SRC (old_set)) == PLUS))
3541 	{
3542 	  int new_icode = recog (PATTERN (insn), insn, 0);
3543 	  if (new_icode >= 0)
3544 	    INSN_CODE (insn) = new_icode;
3545 	}
3546     }
3547 
3548   /* Restore the old body.  If there were any changes to it, we made a copy
3549      of it while the changes were still in place, so we'll correctly return
3550      a modified insn below.  */
3551   if (! replace)
3552     {
3553       /* Restore the old body.  */
3554       for (i = 0; i < recog_data.n_operands; i++)
3555 	/* Restoring a top-level match_parallel would clobber the new_body
3556 	   we installed in the insn.  */
3557 	if (recog_data.operand_loc[i] != &PATTERN (insn))
3558 	  *recog_data.operand_loc[i] = orig_operand[i];
3559       for (i = 0; i < recog_data.n_dups; i++)
3560 	*recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3561     }
3562 
3563   /* Update all elimination pairs to reflect the status after the current
3564      insn.  The changes we make were determined by the earlier call to
3565      elimination_effects.
3566 
3567      We also detect cases where register elimination cannot be done,
3568      namely, if a register would be both changed and referenced outside a MEM
3569      in the resulting insn since such an insn is often undefined and, even if
3570      not, we cannot know what meaning will be given to it.  Note that it is
3571      valid to have a register used in an address in an insn that changes it
3572      (presumably with a pre- or post-increment or decrement).
3573 
3574      If anything changes, return nonzero.  */
3575 
3576   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3577     {
3578       if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3579 	ep->can_eliminate = 0;
3580 
3581       ep->ref_outside_mem = 0;
3582 
3583       if (ep->previous_offset != ep->offset)
3584 	val = 1;
3585     }
3586 
3587  done:
3588   /* If we changed something, perform elimination in REG_NOTES.  This is
3589      needed even when REPLACE is zero because a REG_DEAD note might refer
3590      to a register that we eliminate and could cause a different number
3591      of spill registers to be needed in the final reload pass than in
3592      the pre-passes.  */
3593   if (val && REG_NOTES (insn) != 0)
3594     REG_NOTES (insn)
3595       = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3596 			  false);
3597 
3598   return val;
3599 }
3600 
3601 /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3602    register allocator.  INSN is the instruction we need to examine, we perform
3603    eliminations in its operands and record cases where eliminating a reg with
3604    an invariant equivalence would add extra cost.  */
3605 
3606 #pragma GCC diagnostic push
3607 #pragma GCC diagnostic warning "-Wmaybe-uninitialized"
3608 static void
elimination_costs_in_insn(rtx_insn * insn)3609 elimination_costs_in_insn (rtx_insn *insn)
3610 {
3611   int icode = recog_memoized (insn);
3612   rtx old_body = PATTERN (insn);
3613   int insn_is_asm = asm_noperands (old_body) >= 0;
3614   rtx old_set = single_set (insn);
3615   int i;
3616   rtx orig_operand[MAX_RECOG_OPERANDS];
3617   rtx orig_dup[MAX_RECOG_OPERANDS];
3618   struct elim_table *ep;
3619   rtx plus_src, plus_cst_src;
3620   bool sets_reg_p;
3621 
3622   if (! insn_is_asm && icode < 0)
3623     {
3624       gcc_assert (DEBUG_INSN_P (insn)
3625 		  || GET_CODE (PATTERN (insn)) == USE
3626 		  || GET_CODE (PATTERN (insn)) == CLOBBER
3627 		  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3628       return;
3629     }
3630 
3631   if (old_set != 0 && REG_P (SET_DEST (old_set))
3632       && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3633     {
3634       /* Check for setting an eliminable register.  */
3635       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3636 	if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3637 	  return;
3638     }
3639 
3640   /* We allow one special case which happens to work on all machines we
3641      currently support: a single set with the source or a REG_EQUAL
3642      note being a PLUS of an eliminable register and a constant.  */
3643   plus_src = plus_cst_src = 0;
3644   sets_reg_p = false;
3645   if (old_set && REG_P (SET_DEST (old_set)))
3646     {
3647       sets_reg_p = true;
3648       if (GET_CODE (SET_SRC (old_set)) == PLUS)
3649 	plus_src = SET_SRC (old_set);
3650       /* First see if the source is of the form (plus (...) CST).  */
3651       if (plus_src
3652 	  && CONST_INT_P (XEXP (plus_src, 1)))
3653 	plus_cst_src = plus_src;
3654       else if (REG_P (SET_SRC (old_set))
3655 	       || plus_src)
3656 	{
3657 	  /* Otherwise, see if we have a REG_EQUAL note of the form
3658 	     (plus (...) CST).  */
3659 	  rtx links;
3660 	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3661 	    {
3662 	      if ((REG_NOTE_KIND (links) == REG_EQUAL
3663 		   || REG_NOTE_KIND (links) == REG_EQUIV)
3664 		  && GET_CODE (XEXP (links, 0)) == PLUS
3665 		  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3666 		{
3667 		  plus_cst_src = XEXP (links, 0);
3668 		  break;
3669 		}
3670 	    }
3671 	}
3672     }
3673 
3674   /* Determine the effects of this insn on elimination offsets.  */
3675   elimination_effects (old_body, VOIDmode);
3676 
3677   /* Eliminate all eliminable registers occurring in operands that
3678      can be handled by reload.  */
3679   extract_insn (insn);
3680   int n_dups = recog_data.n_dups;
3681   for (i = 0; i < n_dups; i++)
3682     orig_dup[i] = *recog_data.dup_loc[i];
3683 
3684   int n_operands = recog_data.n_operands;
3685   for (i = 0; i < n_operands; i++)
3686     {
3687       orig_operand[i] = recog_data.operand[i];
3688 
3689       /* For an asm statement, every operand is eliminable.  */
3690       if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3691 	{
3692 	  bool is_set_src, in_plus;
3693 
3694 	  /* Check for setting a register that we know about.  */
3695 	  if (recog_data.operand_type[i] != OP_IN
3696 	      && REG_P (orig_operand[i]))
3697 	    {
3698 	      /* If we are assigning to a register that can be eliminated, it
3699 		 must be as part of a PARALLEL, since the code above handles
3700 		 single SETs.  We must indicate that we can no longer
3701 		 eliminate this reg.  */
3702 	      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3703 		   ep++)
3704 		if (ep->from_rtx == orig_operand[i])
3705 		  ep->can_eliminate = 0;
3706 	    }
3707 
3708 	  /* Companion to the above plus substitution, we can allow
3709 	     invariants as the source of a plain move.  */
3710 	  is_set_src = false;
3711 	  if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3712 	    is_set_src = true;
3713 	  if (is_set_src && !sets_reg_p)
3714 	    note_reg_elim_costly (SET_SRC (old_set), insn);
3715 	  in_plus = false;
3716 	  if (plus_src && sets_reg_p
3717 	      && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3718 		  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3719 	    in_plus = true;
3720 
3721 	  eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3722 			    NULL_RTX,
3723 			    is_set_src || in_plus, true);
3724 	  /* Terminate the search in check_eliminable_occurrences at
3725 	     this point.  */
3726 	  *recog_data.operand_loc[i] = 0;
3727 	}
3728     }
3729 
3730   for (i = 0; i < n_dups; i++)
3731     *recog_data.dup_loc[i]
3732       = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3733 
3734   /* If any eliminable remain, they aren't eliminable anymore.  */
3735   check_eliminable_occurrences (old_body);
3736 
3737   /* Restore the old body.  */
3738   for (i = 0; i < n_operands; i++)
3739     *recog_data.operand_loc[i] = orig_operand[i];
3740   for (i = 0; i < n_dups; i++)
3741     *recog_data.dup_loc[i] = orig_dup[i];
3742 
3743   /* Update all elimination pairs to reflect the status after the current
3744      insn.  The changes we make were determined by the earlier call to
3745      elimination_effects.  */
3746 
3747   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3748     {
3749       if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3750 	ep->can_eliminate = 0;
3751 
3752       ep->ref_outside_mem = 0;
3753     }
3754 
3755   return;
3756 }
3757 #pragma GCC diagnostic pop
3758 
3759 /* Loop through all elimination pairs.
3760    Recalculate the number not at initial offset.
3761 
3762    Compute the maximum offset (minimum offset if the stack does not
3763    grow downward) for each elimination pair.  */
3764 
3765 static void
update_eliminable_offsets(void)3766 update_eliminable_offsets (void)
3767 {
3768   struct elim_table *ep;
3769 
3770   num_not_at_initial_offset = 0;
3771   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3772     {
3773       ep->previous_offset = ep->offset;
3774       if (ep->can_eliminate && ep->offset != ep->initial_offset)
3775 	num_not_at_initial_offset++;
3776     }
3777 }
3778 
3779 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3780    replacement we currently believe is valid, mark it as not eliminable if X
3781    modifies DEST in any way other than by adding a constant integer to it.
3782 
3783    If DEST is the frame pointer, we do nothing because we assume that
3784    all assignments to the hard frame pointer are nonlocal gotos and are being
3785    done at a time when they are valid and do not disturb anything else.
3786    Some machines want to eliminate a fake argument pointer with either the
3787    frame or stack pointer.  Assignments to the hard frame pointer must not
3788    prevent this elimination.
3789 
3790    Called via note_stores from reload before starting its passes to scan
3791    the insns of the function.  */
3792 
3793 static void
mark_not_eliminable(rtx dest,const_rtx x,void * data ATTRIBUTE_UNUSED)3794 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3795 {
3796   unsigned int i;
3797 
3798   /* A SUBREG of a hard register here is just changing its mode.  We should
3799      not see a SUBREG of an eliminable hard register, but check just in
3800      case.  */
3801   if (GET_CODE (dest) == SUBREG)
3802     dest = SUBREG_REG (dest);
3803 
3804   if (dest == hard_frame_pointer_rtx)
3805     return;
3806 
3807   for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3808     if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3809 	&& (GET_CODE (x) != SET
3810 	    || GET_CODE (SET_SRC (x)) != PLUS
3811 	    || XEXP (SET_SRC (x), 0) != dest
3812 	    || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3813       {
3814 	reg_eliminate[i].can_eliminate_previous
3815 	  = reg_eliminate[i].can_eliminate = 0;
3816 	num_eliminable--;
3817       }
3818 }
3819 
3820 /* Verify that the initial elimination offsets did not change since the
3821    last call to set_initial_elim_offsets.  This is used to catch cases
3822    where something illegal happened during reload_as_needed that could
3823    cause incorrect code to be generated if we did not check for it.  */
3824 
3825 static bool
verify_initial_elim_offsets(void)3826 verify_initial_elim_offsets (void)
3827 {
3828   HOST_WIDE_INT t;
3829 
3830   if (!num_eliminable)
3831     return true;
3832 
3833 #ifdef ELIMINABLE_REGS
3834   {
3835    struct elim_table *ep;
3836 
3837    for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3838      {
3839        INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3840        if (t != ep->initial_offset)
3841 	 return false;
3842      }
3843   }
3844 #else
3845   INITIAL_FRAME_POINTER_OFFSET (t);
3846   if (t != reg_eliminate[0].initial_offset)
3847     return false;
3848 #endif
3849 
3850   return true;
3851 }
3852 
3853 /* Reset all offsets on eliminable registers to their initial values.  */
3854 
3855 static void
set_initial_elim_offsets(void)3856 set_initial_elim_offsets (void)
3857 {
3858   struct elim_table *ep = reg_eliminate;
3859 
3860 #ifdef ELIMINABLE_REGS
3861   for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3862     {
3863       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3864       ep->previous_offset = ep->offset = ep->initial_offset;
3865     }
3866 #else
3867   INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3868   ep->previous_offset = ep->offset = ep->initial_offset;
3869 #endif
3870 
3871   num_not_at_initial_offset = 0;
3872 }
3873 
3874 /* Subroutine of set_initial_label_offsets called via for_each_eh_label.  */
3875 
3876 static void
set_initial_eh_label_offset(rtx label)3877 set_initial_eh_label_offset (rtx label)
3878 {
3879   set_label_offsets (label, NULL, 1);
3880 }
3881 
3882 /* Initialize the known label offsets.
3883    Set a known offset for each forced label to be at the initial offset
3884    of each elimination.  We do this because we assume that all
3885    computed jumps occur from a location where each elimination is
3886    at its initial offset.
3887    For all other labels, show that we don't know the offsets.  */
3888 
3889 static void
set_initial_label_offsets(void)3890 set_initial_label_offsets (void)
3891 {
3892   memset (offsets_known_at, 0, num_labels);
3893 
3894   for (rtx_insn_list *x = forced_labels; x; x = x->next ())
3895     if (x->insn ())
3896       set_label_offsets (x->insn (), NULL, 1);
3897 
3898   for (rtx_insn_list *x = nonlocal_goto_handler_labels; x; x = x->next ())
3899     if (x->insn ())
3900       set_label_offsets (x->insn (), NULL, 1);
3901 
3902   for_each_eh_label (set_initial_eh_label_offset);
3903 }
3904 
3905 /* Set all elimination offsets to the known values for the code label given
3906    by INSN.  */
3907 
3908 static void
set_offsets_for_label(rtx_insn * insn)3909 set_offsets_for_label (rtx_insn *insn)
3910 {
3911   unsigned int i;
3912   int label_nr = CODE_LABEL_NUMBER (insn);
3913   struct elim_table *ep;
3914 
3915   num_not_at_initial_offset = 0;
3916   for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3917     {
3918       ep->offset = ep->previous_offset
3919 		 = offsets_at[label_nr - first_label_num][i];
3920       if (ep->can_eliminate && ep->offset != ep->initial_offset)
3921 	num_not_at_initial_offset++;
3922     }
3923 }
3924 
3925 /* See if anything that happened changes which eliminations are valid.
3926    For example, on the SPARC, whether or not the frame pointer can
3927    be eliminated can depend on what registers have been used.  We need
3928    not check some conditions again (such as flag_omit_frame_pointer)
3929    since they can't have changed.  */
3930 
3931 static void
update_eliminables(HARD_REG_SET * pset)3932 update_eliminables (HARD_REG_SET *pset)
3933 {
3934   int previous_frame_pointer_needed = frame_pointer_needed;
3935   struct elim_table *ep;
3936 
3937   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3938     if ((ep->from == HARD_FRAME_POINTER_REGNUM
3939          && targetm.frame_pointer_required ())
3940 #ifdef ELIMINABLE_REGS
3941 	|| ! targetm.can_eliminate (ep->from, ep->to)
3942 #endif
3943 	)
3944       ep->can_eliminate = 0;
3945 
3946   /* Look for the case where we have discovered that we can't replace
3947      register A with register B and that means that we will now be
3948      trying to replace register A with register C.  This means we can
3949      no longer replace register C with register B and we need to disable
3950      such an elimination, if it exists.  This occurs often with A == ap,
3951      B == sp, and C == fp.  */
3952 
3953   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3954     {
3955       struct elim_table *op;
3956       int new_to = -1;
3957 
3958       if (! ep->can_eliminate && ep->can_eliminate_previous)
3959 	{
3960 	  /* Find the current elimination for ep->from, if there is a
3961 	     new one.  */
3962 	  for (op = reg_eliminate;
3963 	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3964 	    if (op->from == ep->from && op->can_eliminate)
3965 	      {
3966 		new_to = op->to;
3967 		break;
3968 	      }
3969 
3970 	  /* See if there is an elimination of NEW_TO -> EP->TO.  If so,
3971 	     disable it.  */
3972 	  for (op = reg_eliminate;
3973 	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3974 	    if (op->from == new_to && op->to == ep->to)
3975 	      op->can_eliminate = 0;
3976 	}
3977     }
3978 
3979   /* See if any registers that we thought we could eliminate the previous
3980      time are no longer eliminable.  If so, something has changed and we
3981      must spill the register.  Also, recompute the number of eliminable
3982      registers and see if the frame pointer is needed; it is if there is
3983      no elimination of the frame pointer that we can perform.  */
3984 
3985   frame_pointer_needed = 1;
3986   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3987     {
3988       if (ep->can_eliminate
3989 	  && ep->from == FRAME_POINTER_REGNUM
3990 	  && ep->to != HARD_FRAME_POINTER_REGNUM
3991 	  && (! SUPPORTS_STACK_ALIGNMENT
3992 	      || ! crtl->stack_realign_needed))
3993 	frame_pointer_needed = 0;
3994 
3995       if (! ep->can_eliminate && ep->can_eliminate_previous)
3996 	{
3997 	  ep->can_eliminate_previous = 0;
3998 	  SET_HARD_REG_BIT (*pset, ep->from);
3999 	  num_eliminable--;
4000 	}
4001     }
4002 
4003   /* If we didn't need a frame pointer last time, but we do now, spill
4004      the hard frame pointer.  */
4005   if (frame_pointer_needed && ! previous_frame_pointer_needed)
4006     SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
4007 }
4008 
4009 /* Call update_eliminables an spill any registers we can't eliminate anymore.
4010    Return true iff a register was spilled.  */
4011 
4012 static bool
update_eliminables_and_spill(void)4013 update_eliminables_and_spill (void)
4014 {
4015   int i;
4016   bool did_spill = false;
4017   HARD_REG_SET to_spill;
4018   CLEAR_HARD_REG_SET (to_spill);
4019   update_eliminables (&to_spill);
4020   AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
4021 
4022   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4023     if (TEST_HARD_REG_BIT (to_spill, i))
4024       {
4025 	spill_hard_reg (i, 1);
4026 	did_spill = true;
4027 
4028 	/* Regardless of the state of spills, if we previously had
4029 	   a register that we thought we could eliminate, but now can
4030 	   not eliminate, we must run another pass.
4031 
4032 	   Consider pseudos which have an entry in reg_equiv_* which
4033 	   reference an eliminable register.  We must make another pass
4034 	   to update reg_equiv_* so that we do not substitute in the
4035 	   old value from when we thought the elimination could be
4036 	   performed.  */
4037       }
4038   return did_spill;
4039 }
4040 
4041 /* Return true if X is used as the target register of an elimination.  */
4042 
4043 bool
elimination_target_reg_p(rtx x)4044 elimination_target_reg_p (rtx x)
4045 {
4046   struct elim_table *ep;
4047 
4048   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4049     if (ep->to_rtx == x && ep->can_eliminate)
4050       return true;
4051 
4052   return false;
4053 }
4054 
4055 /* Initialize the table of registers to eliminate.
4056    Pre-condition: global flag frame_pointer_needed has been set before
4057    calling this function.  */
4058 
4059 static void
init_elim_table(void)4060 init_elim_table (void)
4061 {
4062   struct elim_table *ep;
4063 #ifdef ELIMINABLE_REGS
4064   const struct elim_table_1 *ep1;
4065 #endif
4066 
4067   if (!reg_eliminate)
4068     reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
4069 
4070   num_eliminable = 0;
4071 
4072 #ifdef ELIMINABLE_REGS
4073   for (ep = reg_eliminate, ep1 = reg_eliminate_1;
4074        ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
4075     {
4076       ep->from = ep1->from;
4077       ep->to = ep1->to;
4078       ep->can_eliminate = ep->can_eliminate_previous
4079 	= (targetm.can_eliminate (ep->from, ep->to)
4080 	   && ! (ep->to == STACK_POINTER_REGNUM
4081 		 && frame_pointer_needed
4082 		 && (! SUPPORTS_STACK_ALIGNMENT
4083 		     || ! stack_realign_fp)));
4084     }
4085 #else
4086   reg_eliminate[0].from = reg_eliminate_1[0].from;
4087   reg_eliminate[0].to = reg_eliminate_1[0].to;
4088   reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
4089     = ! frame_pointer_needed;
4090 #endif
4091 
4092   /* Count the number of eliminable registers and build the FROM and TO
4093      REG rtx's.  Note that code in gen_rtx_REG will cause, e.g.,
4094      gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4095      We depend on this.  */
4096   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4097     {
4098       num_eliminable += ep->can_eliminate;
4099       ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
4100       ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
4101     }
4102 }
4103 
4104 /* Find all the pseudo registers that didn't get hard regs
4105    but do have known equivalent constants or memory slots.
4106    These include parameters (known equivalent to parameter slots)
4107    and cse'd or loop-moved constant memory addresses.
4108 
4109    Record constant equivalents in reg_equiv_constant
4110    so they will be substituted by find_reloads.
4111    Record memory equivalents in reg_mem_equiv so they can
4112    be substituted eventually by altering the REG-rtx's.  */
4113 
4114 static void
init_eliminable_invariants(rtx_insn * first,bool do_subregs)4115 init_eliminable_invariants (rtx_insn *first, bool do_subregs)
4116 {
4117   int i;
4118   rtx_insn *insn;
4119 
4120   grow_reg_equivs ();
4121   if (do_subregs)
4122     reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
4123   else
4124     reg_max_ref_width = NULL;
4125 
4126   num_eliminable_invariants = 0;
4127 
4128   first_label_num = get_first_label_num ();
4129   num_labels = max_label_num () - first_label_num;
4130 
4131   /* Allocate the tables used to store offset information at labels.  */
4132   offsets_known_at = XNEWVEC (char, num_labels);
4133   offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
4134 
4135 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4136    to.  If DO_SUBREGS is true, also find all paradoxical subregs and
4137    find largest such for each pseudo.  FIRST is the head of the insn
4138    list.  */
4139 
4140   for (insn = first; insn; insn = NEXT_INSN (insn))
4141     {
4142       rtx set = single_set (insn);
4143 
4144       /* We may introduce USEs that we want to remove at the end, so
4145 	 we'll mark them with QImode.  Make sure there are no
4146 	 previously-marked insns left by say regmove.  */
4147       if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4148 	  && GET_MODE (insn) != VOIDmode)
4149 	PUT_MODE (insn, VOIDmode);
4150 
4151       if (do_subregs && NONDEBUG_INSN_P (insn))
4152 	scan_paradoxical_subregs (PATTERN (insn));
4153 
4154       if (set != 0 && REG_P (SET_DEST (set)))
4155 	{
4156 	  rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4157 	  rtx x;
4158 
4159 	  if (! note)
4160 	    continue;
4161 
4162 	  i = REGNO (SET_DEST (set));
4163 	  x = XEXP (note, 0);
4164 
4165 	  if (i <= LAST_VIRTUAL_REGISTER)
4166 	    continue;
4167 
4168 	  /* If flag_pic and we have constant, verify it's legitimate.  */
4169 	  if (!CONSTANT_P (x)
4170 	      || !flag_pic || LEGITIMATE_PIC_OPERAND_P (x))
4171 	    {
4172 	      /* It can happen that a REG_EQUIV note contains a MEM
4173 		 that is not a legitimate memory operand.  As later
4174 		 stages of reload assume that all addresses found
4175 		 in the reg_equiv_* arrays were originally legitimate,
4176 		 we ignore such REG_EQUIV notes.  */
4177 	      if (memory_operand (x, VOIDmode))
4178 		{
4179 		  /* Always unshare the equivalence, so we can
4180 		     substitute into this insn without touching the
4181 		       equivalence.  */
4182 		  reg_equiv_memory_loc (i) = copy_rtx (x);
4183 		}
4184 	      else if (function_invariant_p (x))
4185 		{
4186 		  machine_mode mode;
4187 
4188 		  mode = GET_MODE (SET_DEST (set));
4189 		  if (GET_CODE (x) == PLUS)
4190 		    {
4191 		      /* This is PLUS of frame pointer and a constant,
4192 			 and might be shared.  Unshare it.  */
4193 		      reg_equiv_invariant (i) = copy_rtx (x);
4194 		      num_eliminable_invariants++;
4195 		    }
4196 		  else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4197 		    {
4198 		      reg_equiv_invariant (i) = x;
4199 		      num_eliminable_invariants++;
4200 		    }
4201 		  else if (targetm.legitimate_constant_p (mode, x))
4202 		    reg_equiv_constant (i) = x;
4203 		  else
4204 		    {
4205 		      reg_equiv_memory_loc (i) = force_const_mem (mode, x);
4206 		      if (! reg_equiv_memory_loc (i))
4207 			reg_equiv_init (i) = NULL;
4208 		    }
4209 		}
4210 	      else
4211 		{
4212 		  reg_equiv_init (i) = NULL;
4213 		  continue;
4214 		}
4215 	    }
4216 	  else
4217 	    reg_equiv_init (i) = NULL;
4218 	}
4219     }
4220 
4221   if (dump_file)
4222     for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4223       if (reg_equiv_init (i))
4224 	{
4225 	  fprintf (dump_file, "init_insns for %u: ", i);
4226 	  print_inline_rtx (dump_file, reg_equiv_init (i), 20);
4227 	  fprintf (dump_file, "\n");
4228 	}
4229 }
4230 
4231 /* Indicate that we no longer have known memory locations or constants.
4232    Free all data involved in tracking these.  */
4233 
4234 static void
free_reg_equiv(void)4235 free_reg_equiv (void)
4236 {
4237   int i;
4238 
4239   free (offsets_known_at);
4240   free (offsets_at);
4241   offsets_at = 0;
4242   offsets_known_at = 0;
4243 
4244   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4245     if (reg_equiv_alt_mem_list (i))
4246       free_EXPR_LIST_list (&reg_equiv_alt_mem_list (i));
4247   vec_free (reg_equivs);
4248 }
4249 
4250 /* Kick all pseudos out of hard register REGNO.
4251 
4252    If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4253    because we found we can't eliminate some register.  In the case, no pseudos
4254    are allowed to be in the register, even if they are only in a block that
4255    doesn't require spill registers, unlike the case when we are spilling this
4256    hard reg to produce another spill register.
4257 
4258    Return nonzero if any pseudos needed to be kicked out.  */
4259 
4260 static void
spill_hard_reg(unsigned int regno,int cant_eliminate)4261 spill_hard_reg (unsigned int regno, int cant_eliminate)
4262 {
4263   int i;
4264 
4265   if (cant_eliminate)
4266     {
4267       SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4268       df_set_regs_ever_live (regno, true);
4269     }
4270 
4271   /* Spill every pseudo reg that was allocated to this reg
4272      or to something that overlaps this reg.  */
4273 
4274   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4275     if (reg_renumber[i] >= 0
4276 	&& (unsigned int) reg_renumber[i] <= regno
4277 	&& end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4278       SET_REGNO_REG_SET (&spilled_pseudos, i);
4279 }
4280 
4281 /* After find_reload_regs has been run for all insn that need reloads,
4282    and/or spill_hard_regs was called, this function is used to actually
4283    spill pseudo registers and try to reallocate them.  It also sets up the
4284    spill_regs array for use by choose_reload_regs.  */
4285 
4286 static int
finish_spills(int global)4287 finish_spills (int global)
4288 {
4289   struct insn_chain *chain;
4290   int something_changed = 0;
4291   unsigned i;
4292   reg_set_iterator rsi;
4293 
4294   /* Build the spill_regs array for the function.  */
4295   /* If there are some registers still to eliminate and one of the spill regs
4296      wasn't ever used before, additional stack space may have to be
4297      allocated to store this register.  Thus, we may have changed the offset
4298      between the stack and frame pointers, so mark that something has changed.
4299 
4300      One might think that we need only set VAL to 1 if this is a call-used
4301      register.  However, the set of registers that must be saved by the
4302      prologue is not identical to the call-used set.  For example, the
4303      register used by the call insn for the return PC is a call-used register,
4304      but must be saved by the prologue.  */
4305 
4306   n_spills = 0;
4307   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4308     if (TEST_HARD_REG_BIT (used_spill_regs, i))
4309       {
4310 	spill_reg_order[i] = n_spills;
4311 	spill_regs[n_spills++] = i;
4312 	if (num_eliminable && ! df_regs_ever_live_p (i))
4313 	  something_changed = 1;
4314 	df_set_regs_ever_live (i, true);
4315       }
4316     else
4317       spill_reg_order[i] = -1;
4318 
4319   EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4320     if (! ira_conflicts_p || reg_renumber[i] >= 0)
4321       {
4322 	/* Record the current hard register the pseudo is allocated to
4323 	   in pseudo_previous_regs so we avoid reallocating it to the
4324 	   same hard reg in a later pass.  */
4325 	gcc_assert (reg_renumber[i] >= 0);
4326 
4327 	SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4328 	/* Mark it as no longer having a hard register home.  */
4329 	reg_renumber[i] = -1;
4330 	if (ira_conflicts_p)
4331 	  /* Inform IRA about the change.  */
4332 	  ira_mark_allocation_change (i);
4333 	/* We will need to scan everything again.  */
4334 	something_changed = 1;
4335       }
4336 
4337   /* Retry global register allocation if possible.  */
4338   if (global && ira_conflicts_p)
4339     {
4340       unsigned int n;
4341 
4342       memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4343       /* For every insn that needs reloads, set the registers used as spill
4344 	 regs in pseudo_forbidden_regs for every pseudo live across the
4345 	 insn.  */
4346       for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4347 	{
4348 	  EXECUTE_IF_SET_IN_REG_SET
4349 	    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4350 	    {
4351 	      IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4352 				chain->used_spill_regs);
4353 	    }
4354 	  EXECUTE_IF_SET_IN_REG_SET
4355 	    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4356 	    {
4357 	      IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4358 				chain->used_spill_regs);
4359 	    }
4360 	}
4361 
4362       /* Retry allocating the pseudos spilled in IRA and the
4363 	 reload.  For each reg, merge the various reg sets that
4364 	 indicate which hard regs can't be used, and call
4365 	 ira_reassign_pseudos.  */
4366       for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4367 	if (reg_old_renumber[i] != reg_renumber[i])
4368 	  {
4369 	    if (reg_renumber[i] < 0)
4370 	      temp_pseudo_reg_arr[n++] = i;
4371 	    else
4372 	      CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4373 	  }
4374       if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4375 				bad_spill_regs_global,
4376 				pseudo_forbidden_regs, pseudo_previous_regs,
4377 				&spilled_pseudos))
4378 	something_changed = 1;
4379     }
4380   /* Fix up the register information in the insn chain.
4381      This involves deleting those of the spilled pseudos which did not get
4382      a new hard register home from the live_{before,after} sets.  */
4383   for (chain = reload_insn_chain; chain; chain = chain->next)
4384     {
4385       HARD_REG_SET used_by_pseudos;
4386       HARD_REG_SET used_by_pseudos2;
4387 
4388       if (! ira_conflicts_p)
4389 	{
4390 	  /* Don't do it for IRA because IRA and the reload still can
4391 	     assign hard registers to the spilled pseudos on next
4392 	     reload iterations.  */
4393 	  AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4394 	  AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4395 	}
4396       /* Mark any unallocated hard regs as available for spills.  That
4397 	 makes inheritance work somewhat better.  */
4398       if (chain->need_reload)
4399 	{
4400 	  REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4401 	  REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4402 	  IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4403 
4404 	  compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4405 	  compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4406 	  /* Value of chain->used_spill_regs from previous iteration
4407 	     may be not included in the value calculated here because
4408 	     of possible removing caller-saves insns (see function
4409 	     delete_caller_save_insns.  */
4410 	  COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4411 	  AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4412 	}
4413     }
4414 
4415   CLEAR_REG_SET (&changed_allocation_pseudos);
4416   /* Let alter_reg modify the reg rtx's for the modified pseudos.  */
4417   for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4418     {
4419       int regno = reg_renumber[i];
4420       if (reg_old_renumber[i] == regno)
4421 	continue;
4422 
4423       SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4424 
4425       alter_reg (i, reg_old_renumber[i], false);
4426       reg_old_renumber[i] = regno;
4427       if (dump_file)
4428 	{
4429 	  if (regno == -1)
4430 	    fprintf (dump_file, " Register %d now on stack.\n\n", i);
4431 	  else
4432 	    fprintf (dump_file, " Register %d now in %d.\n\n",
4433 		     i, reg_renumber[i]);
4434 	}
4435     }
4436 
4437   return something_changed;
4438 }
4439 
4440 /* Find all paradoxical subregs within X and update reg_max_ref_width.  */
4441 
4442 static void
scan_paradoxical_subregs(rtx x)4443 scan_paradoxical_subregs (rtx x)
4444 {
4445   int i;
4446   const char *fmt;
4447   enum rtx_code code = GET_CODE (x);
4448 
4449   switch (code)
4450     {
4451     case REG:
4452     case CONST:
4453     case SYMBOL_REF:
4454     case LABEL_REF:
4455     CASE_CONST_ANY:
4456     case CC0:
4457     case PC:
4458     case USE:
4459     case CLOBBER:
4460       return;
4461 
4462     case SUBREG:
4463       if (REG_P (SUBREG_REG (x))
4464 	  && (GET_MODE_SIZE (GET_MODE (x))
4465 	      > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4466 	{
4467 	  reg_max_ref_width[REGNO (SUBREG_REG (x))]
4468 	    = GET_MODE_SIZE (GET_MODE (x));
4469 	  mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4470 	}
4471       return;
4472 
4473     default:
4474       break;
4475     }
4476 
4477   fmt = GET_RTX_FORMAT (code);
4478   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4479     {
4480       if (fmt[i] == 'e')
4481 	scan_paradoxical_subregs (XEXP (x, i));
4482       else if (fmt[i] == 'E')
4483 	{
4484 	  int j;
4485 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4486 	    scan_paradoxical_subregs (XVECEXP (x, i, j));
4487 	}
4488     }
4489 }
4490 
4491 /* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload.
4492    If *OP_PTR is a paradoxical subreg, try to remove that subreg
4493    and apply the corresponding narrowing subreg to *OTHER_PTR.
4494    Return true if the operands were changed, false otherwise.  */
4495 
4496 static bool
strip_paradoxical_subreg(rtx * op_ptr,rtx * other_ptr)4497 strip_paradoxical_subreg (rtx *op_ptr, rtx *other_ptr)
4498 {
4499   rtx op, inner, other, tem;
4500 
4501   op = *op_ptr;
4502   if (!paradoxical_subreg_p (op))
4503     return false;
4504   inner = SUBREG_REG (op);
4505 
4506   other = *other_ptr;
4507   tem = gen_lowpart_common (GET_MODE (inner), other);
4508   if (!tem)
4509     return false;
4510 
4511   /* If the lowpart operation turned a hard register into a subreg,
4512      rather than simplifying it to another hard register, then the
4513      mode change cannot be properly represented.  For example, OTHER
4514      might be valid in its current mode, but not in the new one.  */
4515   if (GET_CODE (tem) == SUBREG
4516       && REG_P (other)
4517       && HARD_REGISTER_P (other))
4518     return false;
4519 
4520   *op_ptr = inner;
4521   *other_ptr = tem;
4522   return true;
4523 }
4524 
4525 /* A subroutine of reload_as_needed.  If INSN has a REG_EH_REGION note,
4526    examine all of the reload insns between PREV and NEXT exclusive, and
4527    annotate all that may trap.  */
4528 
4529 static void
fixup_eh_region_note(rtx_insn * insn,rtx_insn * prev,rtx_insn * next)4530 fixup_eh_region_note (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4531 {
4532   rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4533   if (note == NULL)
4534     return;
4535   if (!insn_could_throw_p (insn))
4536     remove_note (insn, note);
4537   copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4538 }
4539 
4540 /* Reload pseudo-registers into hard regs around each insn as needed.
4541    Additional register load insns are output before the insn that needs it
4542    and perhaps store insns after insns that modify the reloaded pseudo reg.
4543 
4544    reg_last_reload_reg and reg_reloaded_contents keep track of
4545    which registers are already available in reload registers.
4546    We update these for the reloads that we perform,
4547    as the insns are scanned.  */
4548 
4549 static void
reload_as_needed(int live_known)4550 reload_as_needed (int live_known)
4551 {
4552   struct insn_chain *chain;
4553 #if AUTO_INC_DEC
4554   int i;
4555 #endif
4556   rtx_note *marker;
4557 
4558   memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4559   memset (spill_reg_store, 0, sizeof spill_reg_store);
4560   reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4561   INIT_REG_SET (&reg_has_output_reload);
4562   CLEAR_HARD_REG_SET (reg_reloaded_valid);
4563   CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4564 
4565   set_initial_elim_offsets ();
4566 
4567   /* Generate a marker insn that we will move around.  */
4568   marker = emit_note (NOTE_INSN_DELETED);
4569   unlink_insn_chain (marker, marker);
4570 
4571   for (chain = reload_insn_chain; chain; chain = chain->next)
4572     {
4573       rtx_insn *prev = 0;
4574       rtx_insn *insn = chain->insn;
4575       rtx_insn *old_next = NEXT_INSN (insn);
4576 #if AUTO_INC_DEC
4577       rtx_insn *old_prev = PREV_INSN (insn);
4578 #endif
4579 
4580       if (will_delete_init_insn_p (insn))
4581 	continue;
4582 
4583       /* If we pass a label, copy the offsets from the label information
4584 	 into the current offsets of each elimination.  */
4585       if (LABEL_P (insn))
4586 	set_offsets_for_label (insn);
4587 
4588       else if (INSN_P (insn))
4589 	{
4590 	  regset_head regs_to_forget;
4591 	  INIT_REG_SET (&regs_to_forget);
4592 	  note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4593 
4594 	  /* If this is a USE and CLOBBER of a MEM, ensure that any
4595 	     references to eliminable registers have been removed.  */
4596 
4597 	  if ((GET_CODE (PATTERN (insn)) == USE
4598 	       || GET_CODE (PATTERN (insn)) == CLOBBER)
4599 	      && MEM_P (XEXP (PATTERN (insn), 0)))
4600 	    XEXP (XEXP (PATTERN (insn), 0), 0)
4601 	      = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4602 				GET_MODE (XEXP (PATTERN (insn), 0)),
4603 				NULL_RTX);
4604 
4605 	  /* If we need to do register elimination processing, do so.
4606 	     This might delete the insn, in which case we are done.  */
4607 	  if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4608 	    {
4609 	      eliminate_regs_in_insn (insn, 1);
4610 	      if (NOTE_P (insn))
4611 		{
4612 		  update_eliminable_offsets ();
4613 		  CLEAR_REG_SET (&regs_to_forget);
4614 		  continue;
4615 		}
4616 	    }
4617 
4618 	  /* If need_elim is nonzero but need_reload is zero, one might think
4619 	     that we could simply set n_reloads to 0.  However, find_reloads
4620 	     could have done some manipulation of the insn (such as swapping
4621 	     commutative operands), and these manipulations are lost during
4622 	     the first pass for every insn that needs register elimination.
4623 	     So the actions of find_reloads must be redone here.  */
4624 
4625 	  if (! chain->need_elim && ! chain->need_reload
4626 	      && ! chain->need_operand_change)
4627 	    n_reloads = 0;
4628 	  /* First find the pseudo regs that must be reloaded for this insn.
4629 	     This info is returned in the tables reload_... (see reload.h).
4630 	     Also modify the body of INSN by substituting RELOAD
4631 	     rtx's for those pseudo regs.  */
4632 	  else
4633 	    {
4634 	      CLEAR_REG_SET (&reg_has_output_reload);
4635 	      CLEAR_HARD_REG_SET (reg_is_output_reload);
4636 
4637 	      find_reloads (insn, 1, spill_indirect_levels, live_known,
4638 			    spill_reg_order);
4639 	    }
4640 
4641 	  if (n_reloads > 0)
4642 	    {
4643 	      rtx_insn *next = NEXT_INSN (insn);
4644 
4645 	      /* ??? PREV can get deleted by reload inheritance.
4646 		 Work around this by emitting a marker note.  */
4647 	      prev = PREV_INSN (insn);
4648 	      reorder_insns_nobb (marker, marker, prev);
4649 
4650 	      /* Now compute which reload regs to reload them into.  Perhaps
4651 		 reusing reload regs from previous insns, or else output
4652 		 load insns to reload them.  Maybe output store insns too.
4653 		 Record the choices of reload reg in reload_reg_rtx.  */
4654 	      choose_reload_regs (chain);
4655 
4656 	      /* Generate the insns to reload operands into or out of
4657 		 their reload regs.  */
4658 	      emit_reload_insns (chain);
4659 
4660 	      /* Substitute the chosen reload regs from reload_reg_rtx
4661 		 into the insn's body (or perhaps into the bodies of other
4662 		 load and store insn that we just made for reloading
4663 		 and that we moved the structure into).  */
4664 	      subst_reloads (insn);
4665 
4666 	      prev = PREV_INSN (marker);
4667 	      unlink_insn_chain (marker, marker);
4668 
4669 	      /* Adjust the exception region notes for loads and stores.  */
4670 	      if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4671 		fixup_eh_region_note (insn, prev, next);
4672 
4673 	      /* Adjust the location of REG_ARGS_SIZE.  */
4674 	      rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4675 	      if (p)
4676 		{
4677 		  remove_note (insn, p);
4678 		  fixup_args_size_notes (prev, PREV_INSN (next),
4679 					 INTVAL (XEXP (p, 0)));
4680 		}
4681 
4682 	      /* If this was an ASM, make sure that all the reload insns
4683 		 we have generated are valid.  If not, give an error
4684 		 and delete them.  */
4685 	      if (asm_noperands (PATTERN (insn)) >= 0)
4686 		for (rtx_insn *p = NEXT_INSN (prev);
4687 		     p != next;
4688 		     p = NEXT_INSN (p))
4689 		  if (p != insn && INSN_P (p)
4690 		      && GET_CODE (PATTERN (p)) != USE
4691 		      && (recog_memoized (p) < 0
4692 			  || (extract_insn (p),
4693 			      !(constrain_operands (1,
4694 				  get_enabled_alternatives (p))))))
4695 		    {
4696 		      error_for_asm (insn,
4697 				     "%<asm%> operand requires "
4698 				     "impossible reload");
4699 		      delete_insn (p);
4700 		    }
4701 	    }
4702 
4703 	  if (num_eliminable && chain->need_elim)
4704 	    update_eliminable_offsets ();
4705 
4706 	  /* Any previously reloaded spilled pseudo reg, stored in this insn,
4707 	     is no longer validly lying around to save a future reload.
4708 	     Note that this does not detect pseudos that were reloaded
4709 	     for this insn in order to be stored in
4710 	     (obeying register constraints).  That is correct; such reload
4711 	     registers ARE still valid.  */
4712 	  forget_marked_reloads (&regs_to_forget);
4713 	  CLEAR_REG_SET (&regs_to_forget);
4714 
4715 	  /* There may have been CLOBBER insns placed after INSN.  So scan
4716 	     between INSN and NEXT and use them to forget old reloads.  */
4717 	  for (rtx_insn *x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4718 	    if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4719 	      note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4720 
4721 #if AUTO_INC_DEC
4722 	  /* Likewise for regs altered by auto-increment in this insn.
4723 	     REG_INC notes have been changed by reloading:
4724 	     find_reloads_address_1 records substitutions for them,
4725 	     which have been performed by subst_reloads above.  */
4726 	  for (i = n_reloads - 1; i >= 0; i--)
4727 	    {
4728 	      rtx in_reg = rld[i].in_reg;
4729 	      if (in_reg)
4730 		{
4731 		  enum rtx_code code = GET_CODE (in_reg);
4732 		  /* PRE_INC / PRE_DEC will have the reload register ending up
4733 		     with the same value as the stack slot, but that doesn't
4734 		     hold true for POST_INC / POST_DEC.  Either we have to
4735 		     convert the memory access to a true POST_INC / POST_DEC,
4736 		     or we can't use the reload register for inheritance.  */
4737 		  if ((code == POST_INC || code == POST_DEC)
4738 		      && TEST_HARD_REG_BIT (reg_reloaded_valid,
4739 					    REGNO (rld[i].reg_rtx))
4740 		      /* Make sure it is the inc/dec pseudo, and not
4741 			 some other (e.g. output operand) pseudo.  */
4742 		      && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4743 			  == REGNO (XEXP (in_reg, 0))))
4744 
4745 		    {
4746 		      rtx reload_reg = rld[i].reg_rtx;
4747 		      machine_mode mode = GET_MODE (reload_reg);
4748 		      int n = 0;
4749 		      rtx_insn *p;
4750 
4751 		      for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4752 			{
4753 			  /* We really want to ignore REG_INC notes here, so
4754 			     use PATTERN (p) as argument to reg_set_p .  */
4755 			  if (reg_set_p (reload_reg, PATTERN (p)))
4756 			    break;
4757 			  n = count_occurrences (PATTERN (p), reload_reg, 0);
4758 			  if (! n)
4759 			    continue;
4760 			  if (n == 1)
4761 			    {
4762 			      rtx replace_reg
4763 				= gen_rtx_fmt_e (code, mode, reload_reg);
4764 
4765 			      validate_replace_rtx_group (reload_reg,
4766 							  replace_reg, p);
4767 			      n = verify_changes (0);
4768 
4769 			      /* We must also verify that the constraints
4770 				 are met after the replacement.  Make sure
4771 				 extract_insn is only called for an insn
4772 				 where the replacements were found to be
4773 				 valid so far. */
4774 			      if (n)
4775 				{
4776 				  extract_insn (p);
4777 				  n = constrain_operands (1,
4778 				    get_enabled_alternatives (p));
4779 				}
4780 
4781 			      /* If the constraints were not met, then
4782 				 undo the replacement, else confirm it.  */
4783 			      if (!n)
4784 				cancel_changes (0);
4785 			      else
4786 				confirm_change_group ();
4787 			    }
4788 			  break;
4789 			}
4790 		      if (n == 1)
4791 			{
4792 			  add_reg_note (p, REG_INC, reload_reg);
4793 			  /* Mark this as having an output reload so that the
4794 			     REG_INC processing code below won't invalidate
4795 			     the reload for inheritance.  */
4796 			  SET_HARD_REG_BIT (reg_is_output_reload,
4797 					    REGNO (reload_reg));
4798 			  SET_REGNO_REG_SET (&reg_has_output_reload,
4799 					     REGNO (XEXP (in_reg, 0)));
4800 			}
4801 		      else
4802 			forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4803 					      NULL);
4804 		    }
4805 		  else if ((code == PRE_INC || code == PRE_DEC)
4806 			   && TEST_HARD_REG_BIT (reg_reloaded_valid,
4807 						 REGNO (rld[i].reg_rtx))
4808 			   /* Make sure it is the inc/dec pseudo, and not
4809 			      some other (e.g. output operand) pseudo.  */
4810 			   && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4811 			       == REGNO (XEXP (in_reg, 0))))
4812 		    {
4813 		      SET_HARD_REG_BIT (reg_is_output_reload,
4814 					REGNO (rld[i].reg_rtx));
4815 		      SET_REGNO_REG_SET (&reg_has_output_reload,
4816 					 REGNO (XEXP (in_reg, 0)));
4817 		    }
4818 		  else if (code == PRE_INC || code == PRE_DEC
4819 			   || code == POST_INC || code == POST_DEC)
4820 		    {
4821 		      int in_regno = REGNO (XEXP (in_reg, 0));
4822 
4823 		      if (reg_last_reload_reg[in_regno] != NULL_RTX)
4824 			{
4825 			  int in_hard_regno;
4826 			  bool forget_p = true;
4827 
4828 			  in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4829 			  if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4830 						 in_hard_regno))
4831 			    {
4832 			      for (rtx_insn *x = (old_prev ?
4833 						  NEXT_INSN (old_prev) : insn);
4834 				   x != old_next;
4835 				   x = NEXT_INSN (x))
4836 				if (x == reg_reloaded_insn[in_hard_regno])
4837 				  {
4838 				    forget_p = false;
4839 				    break;
4840 				  }
4841 			    }
4842 			  /* If for some reasons, we didn't set up
4843 			     reg_last_reload_reg in this insn,
4844 			     invalidate inheritance from previous
4845 			     insns for the incremented/decremented
4846 			     register.  Such registers will be not in
4847 			     reg_has_output_reload.  Invalidate it
4848 			     also if the corresponding element in
4849 			     reg_reloaded_insn is also
4850 			     invalidated.  */
4851 			  if (forget_p)
4852 			    forget_old_reloads_1 (XEXP (in_reg, 0),
4853 						  NULL_RTX, NULL);
4854 			}
4855 		    }
4856 		}
4857 	    }
4858 	  /* If a pseudo that got a hard register is auto-incremented,
4859 	     we must purge records of copying it into pseudos without
4860 	     hard registers.  */
4861 	  for (rtx x = REG_NOTES (insn); x; x = XEXP (x, 1))
4862 	    if (REG_NOTE_KIND (x) == REG_INC)
4863 	      {
4864 		/* See if this pseudo reg was reloaded in this insn.
4865 		   If so, its last-reload info is still valid
4866 		   because it is based on this insn's reload.  */
4867 		for (i = 0; i < n_reloads; i++)
4868 		  if (rld[i].out == XEXP (x, 0))
4869 		    break;
4870 
4871 		if (i == n_reloads)
4872 		  forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4873 	      }
4874 #endif
4875 	}
4876       /* A reload reg's contents are unknown after a label.  */
4877       if (LABEL_P (insn))
4878 	CLEAR_HARD_REG_SET (reg_reloaded_valid);
4879 
4880       /* Don't assume a reload reg is still good after a call insn
4881 	 if it is a call-used reg, or if it contains a value that will
4882          be partially clobbered by the call.  */
4883       else if (CALL_P (insn))
4884 	{
4885 	  AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4886 	  AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4887 
4888 	  /* If this is a call to a setjmp-type function, we must not
4889 	     reuse any reload reg contents across the call; that will
4890 	     just be clobbered by other uses of the register in later
4891 	     code, before the longjmp.  */
4892 	  if (find_reg_note (insn, REG_SETJMP, NULL_RTX))
4893 	    CLEAR_HARD_REG_SET (reg_reloaded_valid);
4894 	}
4895     }
4896 
4897   /* Clean up.  */
4898   free (reg_last_reload_reg);
4899   CLEAR_REG_SET (&reg_has_output_reload);
4900 }
4901 
4902 /* Discard all record of any value reloaded from X,
4903    or reloaded in X from someplace else;
4904    unless X is an output reload reg of the current insn.
4905 
4906    X may be a hard reg (the reload reg)
4907    or it may be a pseudo reg that was reloaded from.
4908 
4909    When DATA is non-NULL just mark the registers in regset
4910    to be forgotten later.  */
4911 
4912 static void
forget_old_reloads_1(rtx x,const_rtx ignored ATTRIBUTE_UNUSED,void * data)4913 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4914 		      void *data)
4915 {
4916   unsigned int regno;
4917   unsigned int nr;
4918   regset regs = (regset) data;
4919 
4920   /* note_stores does give us subregs of hard regs,
4921      subreg_regno_offset requires a hard reg.  */
4922   while (GET_CODE (x) == SUBREG)
4923     {
4924       /* We ignore the subreg offset when calculating the regno,
4925 	 because we are using the entire underlying hard register
4926 	 below.  */
4927       x = SUBREG_REG (x);
4928     }
4929 
4930   if (!REG_P (x))
4931     return;
4932 
4933   regno = REGNO (x);
4934 
4935   if (regno >= FIRST_PSEUDO_REGISTER)
4936     nr = 1;
4937   else
4938     {
4939       unsigned int i;
4940 
4941       nr = hard_regno_nregs[regno][GET_MODE (x)];
4942       /* Storing into a spilled-reg invalidates its contents.
4943 	 This can happen if a block-local pseudo is allocated to that reg
4944 	 and it wasn't spilled because this block's total need is 0.
4945 	 Then some insn might have an optional reload and use this reg.  */
4946       if (!regs)
4947 	for (i = 0; i < nr; i++)
4948 	  /* But don't do this if the reg actually serves as an output
4949 	     reload reg in the current instruction.  */
4950 	  if (n_reloads == 0
4951 	      || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4952 	    {
4953 	      CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4954 	      spill_reg_store[regno + i] = 0;
4955 	    }
4956     }
4957 
4958   if (regs)
4959     while (nr-- > 0)
4960       SET_REGNO_REG_SET (regs, regno + nr);
4961   else
4962     {
4963       /* Since value of X has changed,
4964 	 forget any value previously copied from it.  */
4965 
4966       while (nr-- > 0)
4967 	/* But don't forget a copy if this is the output reload
4968 	   that establishes the copy's validity.  */
4969 	if (n_reloads == 0
4970 	    || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4971 	  reg_last_reload_reg[regno + nr] = 0;
4972      }
4973 }
4974 
4975 /* Forget the reloads marked in regset by previous function.  */
4976 static void
forget_marked_reloads(regset regs)4977 forget_marked_reloads (regset regs)
4978 {
4979   unsigned int reg;
4980   reg_set_iterator rsi;
4981   EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4982     {
4983       if (reg < FIRST_PSEUDO_REGISTER
4984 	  /* But don't do this if the reg actually serves as an output
4985 	     reload reg in the current instruction.  */
4986 	  && (n_reloads == 0
4987 	      || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4988 	  {
4989 	    CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4990 	    spill_reg_store[reg] = 0;
4991 	  }
4992       if (n_reloads == 0
4993 	  || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4994 	reg_last_reload_reg[reg] = 0;
4995     }
4996 }
4997 
4998 /* The following HARD_REG_SETs indicate when each hard register is
4999    used for a reload of various parts of the current insn.  */
5000 
5001 /* If reg is unavailable for all reloads.  */
5002 static HARD_REG_SET reload_reg_unavailable;
5003 /* If reg is in use as a reload reg for a RELOAD_OTHER reload.  */
5004 static HARD_REG_SET reload_reg_used;
5005 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I.  */
5006 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5007 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I.  */
5008 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5009 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I.  */
5010 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5011 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I.  */
5012 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5013 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I.  */
5014 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5015 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I.  */
5016 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5017 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload.  */
5018 static HARD_REG_SET reload_reg_used_in_op_addr;
5019 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload.  */
5020 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
5021 /* If reg is in use for a RELOAD_FOR_INSN reload.  */
5022 static HARD_REG_SET reload_reg_used_in_insn;
5023 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload.  */
5024 static HARD_REG_SET reload_reg_used_in_other_addr;
5025 
5026 /* If reg is in use as a reload reg for any sort of reload.  */
5027 static HARD_REG_SET reload_reg_used_at_all;
5028 
5029 /* If reg is use as an inherited reload.  We just mark the first register
5030    in the group.  */
5031 static HARD_REG_SET reload_reg_used_for_inherit;
5032 
5033 /* Records which hard regs are used in any way, either as explicit use or
5034    by being allocated to a pseudo during any point of the current insn.  */
5035 static HARD_REG_SET reg_used_in_insn;
5036 
5037 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
5038    TYPE. MODE is used to indicate how many consecutive regs are
5039    actually used.  */
5040 
5041 static void
mark_reload_reg_in_use(unsigned int regno,int opnum,enum reload_type type,machine_mode mode)5042 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
5043 			machine_mode mode)
5044 {
5045   switch (type)
5046     {
5047     case RELOAD_OTHER:
5048       add_to_hard_reg_set (&reload_reg_used, mode, regno);
5049       break;
5050 
5051     case RELOAD_FOR_INPUT_ADDRESS:
5052       add_to_hard_reg_set (&reload_reg_used_in_input_addr[opnum], mode, regno);
5053       break;
5054 
5055     case RELOAD_FOR_INPADDR_ADDRESS:
5056       add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr[opnum], mode, regno);
5057       break;
5058 
5059     case RELOAD_FOR_OUTPUT_ADDRESS:
5060       add_to_hard_reg_set (&reload_reg_used_in_output_addr[opnum], mode, regno);
5061       break;
5062 
5063     case RELOAD_FOR_OUTADDR_ADDRESS:
5064       add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr[opnum], mode, regno);
5065       break;
5066 
5067     case RELOAD_FOR_OPERAND_ADDRESS:
5068       add_to_hard_reg_set (&reload_reg_used_in_op_addr, mode, regno);
5069       break;
5070 
5071     case RELOAD_FOR_OPADDR_ADDR:
5072       add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload, mode, regno);
5073       break;
5074 
5075     case RELOAD_FOR_OTHER_ADDRESS:
5076       add_to_hard_reg_set (&reload_reg_used_in_other_addr, mode, regno);
5077       break;
5078 
5079     case RELOAD_FOR_INPUT:
5080       add_to_hard_reg_set (&reload_reg_used_in_input[opnum], mode, regno);
5081       break;
5082 
5083     case RELOAD_FOR_OUTPUT:
5084       add_to_hard_reg_set (&reload_reg_used_in_output[opnum], mode, regno);
5085       break;
5086 
5087     case RELOAD_FOR_INSN:
5088       add_to_hard_reg_set (&reload_reg_used_in_insn,  mode, regno);
5089       break;
5090     }
5091 
5092   add_to_hard_reg_set (&reload_reg_used_at_all, mode, regno);
5093 }
5094 
5095 /* Similarly, but show REGNO is no longer in use for a reload.  */
5096 
5097 static void
clear_reload_reg_in_use(unsigned int regno,int opnum,enum reload_type type,machine_mode mode)5098 clear_reload_reg_in_use (unsigned int regno, int opnum,
5099 			 enum reload_type type, machine_mode mode)
5100 {
5101   unsigned int nregs = hard_regno_nregs[regno][mode];
5102   unsigned int start_regno, end_regno, r;
5103   int i;
5104   /* A complication is that for some reload types, inheritance might
5105      allow multiple reloads of the same types to share a reload register.
5106      We set check_opnum if we have to check only reloads with the same
5107      operand number, and check_any if we have to check all reloads.  */
5108   int check_opnum = 0;
5109   int check_any = 0;
5110   HARD_REG_SET *used_in_set;
5111 
5112   switch (type)
5113     {
5114     case RELOAD_OTHER:
5115       used_in_set = &reload_reg_used;
5116       break;
5117 
5118     case RELOAD_FOR_INPUT_ADDRESS:
5119       used_in_set = &reload_reg_used_in_input_addr[opnum];
5120       break;
5121 
5122     case RELOAD_FOR_INPADDR_ADDRESS:
5123       check_opnum = 1;
5124       used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
5125       break;
5126 
5127     case RELOAD_FOR_OUTPUT_ADDRESS:
5128       used_in_set = &reload_reg_used_in_output_addr[opnum];
5129       break;
5130 
5131     case RELOAD_FOR_OUTADDR_ADDRESS:
5132       check_opnum = 1;
5133       used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
5134       break;
5135 
5136     case RELOAD_FOR_OPERAND_ADDRESS:
5137       used_in_set = &reload_reg_used_in_op_addr;
5138       break;
5139 
5140     case RELOAD_FOR_OPADDR_ADDR:
5141       check_any = 1;
5142       used_in_set = &reload_reg_used_in_op_addr_reload;
5143       break;
5144 
5145     case RELOAD_FOR_OTHER_ADDRESS:
5146       used_in_set = &reload_reg_used_in_other_addr;
5147       check_any = 1;
5148       break;
5149 
5150     case RELOAD_FOR_INPUT:
5151       used_in_set = &reload_reg_used_in_input[opnum];
5152       break;
5153 
5154     case RELOAD_FOR_OUTPUT:
5155       used_in_set = &reload_reg_used_in_output[opnum];
5156       break;
5157 
5158     case RELOAD_FOR_INSN:
5159       used_in_set = &reload_reg_used_in_insn;
5160       break;
5161     default:
5162       gcc_unreachable ();
5163     }
5164   /* We resolve conflicts with remaining reloads of the same type by
5165      excluding the intervals of reload registers by them from the
5166      interval of freed reload registers.  Since we only keep track of
5167      one set of interval bounds, we might have to exclude somewhat
5168      more than what would be necessary if we used a HARD_REG_SET here.
5169      But this should only happen very infrequently, so there should
5170      be no reason to worry about it.  */
5171 
5172   start_regno = regno;
5173   end_regno = regno + nregs;
5174   if (check_opnum || check_any)
5175     {
5176       for (i = n_reloads - 1; i >= 0; i--)
5177 	{
5178 	  if (rld[i].when_needed == type
5179 	      && (check_any || rld[i].opnum == opnum)
5180 	      && rld[i].reg_rtx)
5181 	    {
5182 	      unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
5183 	      unsigned int conflict_end
5184 		= end_hard_regno (rld[i].mode, conflict_start);
5185 
5186 	      /* If there is an overlap with the first to-be-freed register,
5187 		 adjust the interval start.  */
5188 	      if (conflict_start <= start_regno && conflict_end > start_regno)
5189 		start_regno = conflict_end;
5190 	      /* Otherwise, if there is a conflict with one of the other
5191 		 to-be-freed registers, adjust the interval end.  */
5192 	      if (conflict_start > start_regno && conflict_start < end_regno)
5193 		end_regno = conflict_start;
5194 	    }
5195 	}
5196     }
5197 
5198   for (r = start_regno; r < end_regno; r++)
5199     CLEAR_HARD_REG_BIT (*used_in_set, r);
5200 }
5201 
5202 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5203    specified by OPNUM and TYPE.  */
5204 
5205 static int
reload_reg_free_p(unsigned int regno,int opnum,enum reload_type type)5206 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
5207 {
5208   int i;
5209 
5210   /* In use for a RELOAD_OTHER means it's not available for anything.  */
5211   if (TEST_HARD_REG_BIT (reload_reg_used, regno)
5212       || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5213     return 0;
5214 
5215   switch (type)
5216     {
5217     case RELOAD_OTHER:
5218       /* In use for anything means we can't use it for RELOAD_OTHER.  */
5219       if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
5220 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5221 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5222 	  || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5223 	return 0;
5224 
5225       for (i = 0; i < reload_n_operands; i++)
5226 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5227 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5228 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5229 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5230 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5231 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5232 	  return 0;
5233 
5234       return 1;
5235 
5236     case RELOAD_FOR_INPUT:
5237       if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5238 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
5239 	return 0;
5240 
5241       if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5242 	return 0;
5243 
5244       /* If it is used for some other input, can't use it.  */
5245       for (i = 0; i < reload_n_operands; i++)
5246 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5247 	  return 0;
5248 
5249       /* If it is used in a later operand's address, can't use it.  */
5250       for (i = opnum + 1; i < reload_n_operands; i++)
5251 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5252 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5253 	  return 0;
5254 
5255       return 1;
5256 
5257     case RELOAD_FOR_INPUT_ADDRESS:
5258       /* Can't use a register if it is used for an input address for this
5259 	 operand or used as an input in an earlier one.  */
5260       if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
5261 	  || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5262 	return 0;
5263 
5264       for (i = 0; i < opnum; i++)
5265 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5266 	  return 0;
5267 
5268       return 1;
5269 
5270     case RELOAD_FOR_INPADDR_ADDRESS:
5271       /* Can't use a register if it is used for an input address
5272 	 for this operand or used as an input in an earlier
5273 	 one.  */
5274       if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5275 	return 0;
5276 
5277       for (i = 0; i < opnum; i++)
5278 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5279 	  return 0;
5280 
5281       return 1;
5282 
5283     case RELOAD_FOR_OUTPUT_ADDRESS:
5284       /* Can't use a register if it is used for an output address for this
5285 	 operand or used as an output in this or a later operand.  Note
5286 	 that multiple output operands are emitted in reverse order, so
5287 	 the conflicting ones are those with lower indices.  */
5288       if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
5289 	return 0;
5290 
5291       for (i = 0; i <= opnum; i++)
5292 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5293 	  return 0;
5294 
5295       return 1;
5296 
5297     case RELOAD_FOR_OUTADDR_ADDRESS:
5298       /* Can't use a register if it is used for an output address
5299 	 for this operand or used as an output in this or a
5300 	 later operand.  Note that multiple output operands are
5301 	 emitted in reverse order, so the conflicting ones are
5302 	 those with lower indices.  */
5303       if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5304 	return 0;
5305 
5306       for (i = 0; i <= opnum; i++)
5307 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5308 	  return 0;
5309 
5310       return 1;
5311 
5312     case RELOAD_FOR_OPERAND_ADDRESS:
5313       for (i = 0; i < reload_n_operands; i++)
5314 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5315 	  return 0;
5316 
5317       return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5318 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5319 
5320     case RELOAD_FOR_OPADDR_ADDR:
5321       for (i = 0; i < reload_n_operands; i++)
5322 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5323 	  return 0;
5324 
5325       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5326 
5327     case RELOAD_FOR_OUTPUT:
5328       /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5329 	 outputs, or an operand address for this or an earlier output.
5330 	 Note that multiple output operands are emitted in reverse order,
5331 	 so the conflicting ones are those with higher indices.  */
5332       if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5333 	return 0;
5334 
5335       for (i = 0; i < reload_n_operands; i++)
5336 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5337 	  return 0;
5338 
5339       for (i = opnum; i < reload_n_operands; i++)
5340 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5341 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5342 	  return 0;
5343 
5344       return 1;
5345 
5346     case RELOAD_FOR_INSN:
5347       for (i = 0; i < reload_n_operands; i++)
5348 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5349 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5350 	  return 0;
5351 
5352       return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5353 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5354 
5355     case RELOAD_FOR_OTHER_ADDRESS:
5356       return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5357 
5358     default:
5359       gcc_unreachable ();
5360     }
5361 }
5362 
5363 /* Return 1 if the value in reload reg REGNO, as used by the reload with
5364    the number RELOADNUM, is still available in REGNO at the end of the insn.
5365 
5366    We can assume that the reload reg was already tested for availability
5367    at the time it is needed, and we should not check this again,
5368    in case the reg has already been marked in use.  */
5369 
5370 static int
reload_reg_reaches_end_p(unsigned int regno,int reloadnum)5371 reload_reg_reaches_end_p (unsigned int regno, int reloadnum)
5372 {
5373   int opnum = rld[reloadnum].opnum;
5374   enum reload_type type = rld[reloadnum].when_needed;
5375   int i;
5376 
5377   /* See if there is a reload with the same type for this operand, using
5378      the same register. This case is not handled by the code below.  */
5379   for (i = reloadnum + 1; i < n_reloads; i++)
5380     {
5381       rtx reg;
5382       int nregs;
5383 
5384       if (rld[i].opnum != opnum || rld[i].when_needed != type)
5385 	continue;
5386       reg = rld[i].reg_rtx;
5387       if (reg == NULL_RTX)
5388 	continue;
5389       nregs = hard_regno_nregs[REGNO (reg)][GET_MODE (reg)];
5390       if (regno >= REGNO (reg) && regno < REGNO (reg) + nregs)
5391 	return 0;
5392     }
5393 
5394   switch (type)
5395     {
5396     case RELOAD_OTHER:
5397       /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5398 	 its value must reach the end.  */
5399       return 1;
5400 
5401       /* If this use is for part of the insn,
5402 	 its value reaches if no subsequent part uses the same register.
5403 	 Just like the above function, don't try to do this with lots
5404 	 of fallthroughs.  */
5405 
5406     case RELOAD_FOR_OTHER_ADDRESS:
5407       /* Here we check for everything else, since these don't conflict
5408 	 with anything else and everything comes later.  */
5409 
5410       for (i = 0; i < reload_n_operands; i++)
5411 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5412 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5413 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5414 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5415 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5416 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5417 	  return 0;
5418 
5419       return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5420 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5421 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5422 	      && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5423 
5424     case RELOAD_FOR_INPUT_ADDRESS:
5425     case RELOAD_FOR_INPADDR_ADDRESS:
5426       /* Similar, except that we check only for this and subsequent inputs
5427 	 and the address of only subsequent inputs and we do not need
5428 	 to check for RELOAD_OTHER objects since they are known not to
5429 	 conflict.  */
5430 
5431       for (i = opnum; i < reload_n_operands; i++)
5432 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5433 	  return 0;
5434 
5435       /* Reload register of reload with type RELOAD_FOR_INPADDR_ADDRESS
5436 	 could be killed if the register is also used by reload with type
5437 	 RELOAD_FOR_INPUT_ADDRESS, so check it.  */
5438       if (type == RELOAD_FOR_INPADDR_ADDRESS
5439 	  && TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
5440 	return 0;
5441 
5442       for (i = opnum + 1; i < reload_n_operands; i++)
5443 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5444 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5445 	  return 0;
5446 
5447       for (i = 0; i < reload_n_operands; i++)
5448 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5449 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5450 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5451 	  return 0;
5452 
5453       if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5454 	return 0;
5455 
5456       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5457 	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5458 	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5459 
5460     case RELOAD_FOR_INPUT:
5461       /* Similar to input address, except we start at the next operand for
5462 	 both input and input address and we do not check for
5463 	 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5464 	 would conflict.  */
5465 
5466       for (i = opnum + 1; i < reload_n_operands; i++)
5467 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5468 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5469 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5470 	  return 0;
5471 
5472       /* ... fall through ...  */
5473 
5474     case RELOAD_FOR_OPERAND_ADDRESS:
5475       /* Check outputs and their addresses.  */
5476 
5477       for (i = 0; i < reload_n_operands; i++)
5478 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5479 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5480 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5481 	  return 0;
5482 
5483       return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5484 
5485     case RELOAD_FOR_OPADDR_ADDR:
5486       for (i = 0; i < reload_n_operands; i++)
5487 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5488 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5489 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5490 	  return 0;
5491 
5492       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5493 	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5494 	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5495 
5496     case RELOAD_FOR_INSN:
5497       /* These conflict with other outputs with RELOAD_OTHER.  So
5498 	 we need only check for output addresses.  */
5499 
5500       opnum = reload_n_operands;
5501 
5502       /* ... fall through ...  */
5503 
5504     case RELOAD_FOR_OUTPUT:
5505     case RELOAD_FOR_OUTPUT_ADDRESS:
5506     case RELOAD_FOR_OUTADDR_ADDRESS:
5507       /* We already know these can't conflict with a later output.  So the
5508 	 only thing to check are later output addresses.
5509 	 Note that multiple output operands are emitted in reverse order,
5510 	 so the conflicting ones are those with lower indices.  */
5511       for (i = 0; i < opnum; i++)
5512 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5513 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5514 	  return 0;
5515 
5516       /* Reload register of reload with type RELOAD_FOR_OUTADDR_ADDRESS
5517 	 could be killed if the register is also used by reload with type
5518 	 RELOAD_FOR_OUTPUT_ADDRESS, so check it.  */
5519       if (type == RELOAD_FOR_OUTADDR_ADDRESS
5520 	  && TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5521 	return 0;
5522 
5523       return 1;
5524 
5525     default:
5526       gcc_unreachable ();
5527     }
5528 }
5529 
5530 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5531    every register in REG.  */
5532 
5533 static bool
reload_reg_rtx_reaches_end_p(rtx reg,int reloadnum)5534 reload_reg_rtx_reaches_end_p (rtx reg, int reloadnum)
5535 {
5536   unsigned int i;
5537 
5538   for (i = REGNO (reg); i < END_REGNO (reg); i++)
5539     if (!reload_reg_reaches_end_p (i, reloadnum))
5540       return false;
5541   return true;
5542 }
5543 
5544 
5545 /*  Returns whether R1 and R2 are uniquely chained: the value of one
5546     is used by the other, and that value is not used by any other
5547     reload for this insn.  This is used to partially undo the decision
5548     made in find_reloads when in the case of multiple
5549     RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5550     RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5551     reloads.  This code tries to avoid the conflict created by that
5552     change.  It might be cleaner to explicitly keep track of which
5553     RELOAD_FOR_OPADDR_ADDR reload is associated with which
5554     RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5555     this after the fact. */
5556 static bool
reloads_unique_chain_p(int r1,int r2)5557 reloads_unique_chain_p (int r1, int r2)
5558 {
5559   int i;
5560 
5561   /* We only check input reloads.  */
5562   if (! rld[r1].in || ! rld[r2].in)
5563     return false;
5564 
5565   /* Avoid anything with output reloads.  */
5566   if (rld[r1].out || rld[r2].out)
5567     return false;
5568 
5569   /* "chained" means one reload is a component of the other reload,
5570      not the same as the other reload.  */
5571   if (rld[r1].opnum != rld[r2].opnum
5572       || rtx_equal_p (rld[r1].in, rld[r2].in)
5573       || rld[r1].optional || rld[r2].optional
5574       || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5575 	    || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5576     return false;
5577 
5578   /* The following loop assumes that r1 is the reload that feeds r2.  */
5579   if (r1 > r2)
5580     std::swap (r1, r2);
5581 
5582   for (i = 0; i < n_reloads; i ++)
5583     /* Look for input reloads that aren't our two */
5584     if (i != r1 && i != r2 && rld[i].in)
5585       {
5586 	/* If our reload is mentioned at all, it isn't a simple chain.  */
5587 	if (reg_mentioned_p (rld[r1].in, rld[i].in))
5588 	  return false;
5589       }
5590   return true;
5591 }
5592 
5593 /* The recursive function change all occurrences of WHAT in *WHERE
5594    to REPL.  */
5595 static void
substitute(rtx * where,const_rtx what,rtx repl)5596 substitute (rtx *where, const_rtx what, rtx repl)
5597 {
5598   const char *fmt;
5599   int i;
5600   enum rtx_code code;
5601 
5602   if (*where == 0)
5603     return;
5604 
5605   if (*where == what || rtx_equal_p (*where, what))
5606     {
5607       /* Record the location of the changed rtx.  */
5608       substitute_stack.safe_push (where);
5609       *where = repl;
5610       return;
5611     }
5612 
5613   code = GET_CODE (*where);
5614   fmt = GET_RTX_FORMAT (code);
5615   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5616     {
5617       if (fmt[i] == 'E')
5618 	{
5619 	  int j;
5620 
5621 	  for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5622 	    substitute (&XVECEXP (*where, i, j), what, repl);
5623 	}
5624       else if (fmt[i] == 'e')
5625 	substitute (&XEXP (*where, i), what, repl);
5626     }
5627 }
5628 
5629 /* The function returns TRUE if chain of reload R1 and R2 (in any
5630    order) can be evaluated without usage of intermediate register for
5631    the reload containing another reload.  It is important to see
5632    gen_reload to understand what the function is trying to do.  As an
5633    example, let us have reload chain
5634 
5635       r2: const
5636       r1: <something> + const
5637 
5638    and reload R2 got reload reg HR.  The function returns true if
5639    there is a correct insn HR = HR + <something>.  Otherwise,
5640    gen_reload will use intermediate register (and this is the reload
5641    reg for R1) to reload <something>.
5642 
5643    We need this function to find a conflict for chain reloads.  In our
5644    example, if HR = HR + <something> is incorrect insn, then we cannot
5645    use HR as a reload register for R2.  If we do use it then we get a
5646    wrong code:
5647 
5648       HR = const
5649       HR = <something>
5650       HR = HR + HR
5651 
5652 */
5653 static bool
gen_reload_chain_without_interm_reg_p(int r1,int r2)5654 gen_reload_chain_without_interm_reg_p (int r1, int r2)
5655 {
5656   /* Assume other cases in gen_reload are not possible for
5657      chain reloads or do need an intermediate hard registers.  */
5658   bool result = true;
5659   int regno, code;
5660   rtx out, in;
5661   rtx_insn *insn;
5662   rtx_insn *last = get_last_insn ();
5663 
5664   /* Make r2 a component of r1.  */
5665   if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5666     std::swap (r1, r2);
5667 
5668   gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5669   regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5670   gcc_assert (regno >= 0);
5671   out = gen_rtx_REG (rld[r1].mode, regno);
5672   in = rld[r1].in;
5673   substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5674 
5675   /* If IN is a paradoxical SUBREG, remove it and try to put the
5676      opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
5677   strip_paradoxical_subreg (&in, &out);
5678 
5679   if (GET_CODE (in) == PLUS
5680       && (REG_P (XEXP (in, 0))
5681 	  || GET_CODE (XEXP (in, 0)) == SUBREG
5682 	  || MEM_P (XEXP (in, 0)))
5683       && (REG_P (XEXP (in, 1))
5684 	  || GET_CODE (XEXP (in, 1)) == SUBREG
5685 	  || CONSTANT_P (XEXP (in, 1))
5686 	  || MEM_P (XEXP (in, 1))))
5687     {
5688       insn = emit_insn (gen_rtx_SET (out, in));
5689       code = recog_memoized (insn);
5690       result = false;
5691 
5692       if (code >= 0)
5693 	{
5694 	  extract_insn (insn);
5695 	  /* We want constrain operands to treat this insn strictly in
5696 	     its validity determination, i.e., the way it would after
5697 	     reload has completed.  */
5698 	  result = constrain_operands (1, get_enabled_alternatives (insn));
5699 	}
5700 
5701       delete_insns_since (last);
5702     }
5703 
5704   /* Restore the original value at each changed address within R1.  */
5705   while (!substitute_stack.is_empty ())
5706     {
5707       rtx *where = substitute_stack.pop ();
5708       *where = rld[r2].in;
5709     }
5710 
5711   return result;
5712 }
5713 
5714 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5715    Return 0 otherwise.
5716 
5717    This function uses the same algorithm as reload_reg_free_p above.  */
5718 
5719 static int
reloads_conflict(int r1,int r2)5720 reloads_conflict (int r1, int r2)
5721 {
5722   enum reload_type r1_type = rld[r1].when_needed;
5723   enum reload_type r2_type = rld[r2].when_needed;
5724   int r1_opnum = rld[r1].opnum;
5725   int r2_opnum = rld[r2].opnum;
5726 
5727   /* RELOAD_OTHER conflicts with everything.  */
5728   if (r2_type == RELOAD_OTHER)
5729     return 1;
5730 
5731   /* Otherwise, check conflicts differently for each type.  */
5732 
5733   switch (r1_type)
5734     {
5735     case RELOAD_FOR_INPUT:
5736       return (r2_type == RELOAD_FOR_INSN
5737 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5738 	      || r2_type == RELOAD_FOR_OPADDR_ADDR
5739 	      || r2_type == RELOAD_FOR_INPUT
5740 	      || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5741 		   || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5742 		  && r2_opnum > r1_opnum));
5743 
5744     case RELOAD_FOR_INPUT_ADDRESS:
5745       return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5746 	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5747 
5748     case RELOAD_FOR_INPADDR_ADDRESS:
5749       return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5750 	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5751 
5752     case RELOAD_FOR_OUTPUT_ADDRESS:
5753       return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5754 	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5755 
5756     case RELOAD_FOR_OUTADDR_ADDRESS:
5757       return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5758 	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5759 
5760     case RELOAD_FOR_OPERAND_ADDRESS:
5761       return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5762 	      || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5763 		  && (!reloads_unique_chain_p (r1, r2)
5764 		      || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5765 
5766     case RELOAD_FOR_OPADDR_ADDR:
5767       return (r2_type == RELOAD_FOR_INPUT
5768 	      || r2_type == RELOAD_FOR_OPADDR_ADDR);
5769 
5770     case RELOAD_FOR_OUTPUT:
5771       return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5772 	      || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5773 		   || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5774 		  && r2_opnum >= r1_opnum));
5775 
5776     case RELOAD_FOR_INSN:
5777       return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5778 	      || r2_type == RELOAD_FOR_INSN
5779 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5780 
5781     case RELOAD_FOR_OTHER_ADDRESS:
5782       return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5783 
5784     case RELOAD_OTHER:
5785       return 1;
5786 
5787     default:
5788       gcc_unreachable ();
5789     }
5790 }
5791 
5792 /* Indexed by reload number, 1 if incoming value
5793    inherited from previous insns.  */
5794 static char reload_inherited[MAX_RELOADS];
5795 
5796 /* For an inherited reload, this is the insn the reload was inherited from,
5797    if we know it.  Otherwise, this is 0.  */
5798 static rtx_insn *reload_inheritance_insn[MAX_RELOADS];
5799 
5800 /* If nonzero, this is a place to get the value of the reload,
5801    rather than using reload_in.  */
5802 static rtx reload_override_in[MAX_RELOADS];
5803 
5804 /* For each reload, the hard register number of the register used,
5805    or -1 if we did not need a register for this reload.  */
5806 static int reload_spill_index[MAX_RELOADS];
5807 
5808 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode.  */
5809 static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5810 
5811 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode.  */
5812 static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5813 
5814 /* Subroutine of free_for_value_p, used to check a single register.
5815    START_REGNO is the starting regno of the full reload register
5816    (possibly comprising multiple hard registers) that we are considering.  */
5817 
5818 static int
reload_reg_free_for_value_p(int start_regno,int regno,int opnum,enum reload_type type,rtx value,rtx out,int reloadnum,int ignore_address_reloads)5819 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5820 			     enum reload_type type, rtx value, rtx out,
5821 			     int reloadnum, int ignore_address_reloads)
5822 {
5823   int time1;
5824   /* Set if we see an input reload that must not share its reload register
5825      with any new earlyclobber, but might otherwise share the reload
5826      register with an output or input-output reload.  */
5827   int check_earlyclobber = 0;
5828   int i;
5829   int copy = 0;
5830 
5831   if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5832     return 0;
5833 
5834   if (out == const0_rtx)
5835     {
5836       copy = 1;
5837       out = NULL_RTX;
5838     }
5839 
5840   /* We use some pseudo 'time' value to check if the lifetimes of the
5841      new register use would overlap with the one of a previous reload
5842      that is not read-only or uses a different value.
5843      The 'time' used doesn't have to be linear in any shape or form, just
5844      monotonic.
5845      Some reload types use different 'buckets' for each operand.
5846      So there are MAX_RECOG_OPERANDS different time values for each
5847      such reload type.
5848      We compute TIME1 as the time when the register for the prospective
5849      new reload ceases to be live, and TIME2 for each existing
5850      reload as the time when that the reload register of that reload
5851      becomes live.
5852      Where there is little to be gained by exact lifetime calculations,
5853      we just make conservative assumptions, i.e. a longer lifetime;
5854      this is done in the 'default:' cases.  */
5855   switch (type)
5856     {
5857     case RELOAD_FOR_OTHER_ADDRESS:
5858       /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads.  */
5859       time1 = copy ? 0 : 1;
5860       break;
5861     case RELOAD_OTHER:
5862       time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5863       break;
5864       /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5865 	 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT.  By adding 0 / 1 / 2 ,
5866 	 respectively, to the time values for these, we get distinct time
5867 	 values.  To get distinct time values for each operand, we have to
5868 	 multiply opnum by at least three.  We round that up to four because
5869 	 multiply by four is often cheaper.  */
5870     case RELOAD_FOR_INPADDR_ADDRESS:
5871       time1 = opnum * 4 + 2;
5872       break;
5873     case RELOAD_FOR_INPUT_ADDRESS:
5874       time1 = opnum * 4 + 3;
5875       break;
5876     case RELOAD_FOR_INPUT:
5877       /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5878 	 executes (inclusive).  */
5879       time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5880       break;
5881     case RELOAD_FOR_OPADDR_ADDR:
5882       /* opnum * 4 + 4
5883 	 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5884       time1 = MAX_RECOG_OPERANDS * 4 + 1;
5885       break;
5886     case RELOAD_FOR_OPERAND_ADDRESS:
5887       /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5888 	 is executed.  */
5889       time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5890       break;
5891     case RELOAD_FOR_OUTADDR_ADDRESS:
5892       time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5893       break;
5894     case RELOAD_FOR_OUTPUT_ADDRESS:
5895       time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5896       break;
5897     default:
5898       time1 = MAX_RECOG_OPERANDS * 5 + 5;
5899     }
5900 
5901   for (i = 0; i < n_reloads; i++)
5902     {
5903       rtx reg = rld[i].reg_rtx;
5904       if (reg && REG_P (reg)
5905 	  && ((unsigned) regno - true_regnum (reg)
5906 	      <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5907 	  && i != reloadnum)
5908 	{
5909 	  rtx other_input = rld[i].in;
5910 
5911 	  /* If the other reload loads the same input value, that
5912 	     will not cause a conflict only if it's loading it into
5913 	     the same register.  */
5914 	  if (true_regnum (reg) != start_regno)
5915 	    other_input = NULL_RTX;
5916 	  if (! other_input || ! rtx_equal_p (other_input, value)
5917 	      || rld[i].out || out)
5918 	    {
5919 	      int time2;
5920 	      switch (rld[i].when_needed)
5921 		{
5922 		case RELOAD_FOR_OTHER_ADDRESS:
5923 		  time2 = 0;
5924 		  break;
5925 		case RELOAD_FOR_INPADDR_ADDRESS:
5926 		  /* find_reloads makes sure that a
5927 		     RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5928 		     by at most one - the first -
5929 		     RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS .  If the
5930 		     address reload is inherited, the address address reload
5931 		     goes away, so we can ignore this conflict.  */
5932 		  if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5933 		      && ignore_address_reloads
5934 		      /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5935 			 Then the address address is still needed to store
5936 			 back the new address.  */
5937 		      && ! rld[reloadnum].out)
5938 		    continue;
5939 		  /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5940 		     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5941 		     reloads go away.  */
5942 		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5943 		      && ignore_address_reloads
5944 		      /* Unless we are reloading an auto_inc expression.  */
5945 		      && ! rld[reloadnum].out)
5946 		    continue;
5947 		  time2 = rld[i].opnum * 4 + 2;
5948 		  break;
5949 		case RELOAD_FOR_INPUT_ADDRESS:
5950 		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5951 		      && ignore_address_reloads
5952 		      && ! rld[reloadnum].out)
5953 		    continue;
5954 		  time2 = rld[i].opnum * 4 + 3;
5955 		  break;
5956 		case RELOAD_FOR_INPUT:
5957 		  time2 = rld[i].opnum * 4 + 4;
5958 		  check_earlyclobber = 1;
5959 		  break;
5960 		  /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5961 		     == MAX_RECOG_OPERAND * 4  */
5962 		case RELOAD_FOR_OPADDR_ADDR:
5963 		  if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5964 		      && ignore_address_reloads
5965 		      && ! rld[reloadnum].out)
5966 		    continue;
5967 		  time2 = MAX_RECOG_OPERANDS * 4 + 1;
5968 		  break;
5969 		case RELOAD_FOR_OPERAND_ADDRESS:
5970 		  time2 = MAX_RECOG_OPERANDS * 4 + 2;
5971 		  check_earlyclobber = 1;
5972 		  break;
5973 		case RELOAD_FOR_INSN:
5974 		  time2 = MAX_RECOG_OPERANDS * 4 + 3;
5975 		  break;
5976 		case RELOAD_FOR_OUTPUT:
5977 		  /* All RELOAD_FOR_OUTPUT reloads become live just after the
5978 		     instruction is executed.  */
5979 		  time2 = MAX_RECOG_OPERANDS * 4 + 4;
5980 		  break;
5981 		  /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5982 		     the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5983 		     value.  */
5984 		case RELOAD_FOR_OUTADDR_ADDRESS:
5985 		  if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5986 		      && ignore_address_reloads
5987 		      && ! rld[reloadnum].out)
5988 		    continue;
5989 		  time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5990 		  break;
5991 		case RELOAD_FOR_OUTPUT_ADDRESS:
5992 		  time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5993 		  break;
5994 		case RELOAD_OTHER:
5995 		  /* If there is no conflict in the input part, handle this
5996 		     like an output reload.  */
5997 		  if (! rld[i].in || rtx_equal_p (other_input, value))
5998 		    {
5999 		      time2 = MAX_RECOG_OPERANDS * 4 + 4;
6000 		      /* Earlyclobbered outputs must conflict with inputs.  */
6001 		      if (earlyclobber_operand_p (rld[i].out))
6002 			time2 = MAX_RECOG_OPERANDS * 4 + 3;
6003 
6004 		      break;
6005 		    }
6006 		  time2 = 1;
6007 		  /* RELOAD_OTHER might be live beyond instruction execution,
6008 		     but this is not obvious when we set time2 = 1.  So check
6009 		     here if there might be a problem with the new reload
6010 		     clobbering the register used by the RELOAD_OTHER.  */
6011 		  if (out)
6012 		    return 0;
6013 		  break;
6014 		default:
6015 		  return 0;
6016 		}
6017 	      if ((time1 >= time2
6018 		   && (! rld[i].in || rld[i].out
6019 		       || ! rtx_equal_p (other_input, value)))
6020 		  || (out && rld[reloadnum].out_reg
6021 		      && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
6022 		return 0;
6023 	    }
6024 	}
6025     }
6026 
6027   /* Earlyclobbered outputs must conflict with inputs.  */
6028   if (check_earlyclobber && out && earlyclobber_operand_p (out))
6029     return 0;
6030 
6031   return 1;
6032 }
6033 
6034 /* Return 1 if the value in reload reg REGNO, as used by a reload
6035    needed for the part of the insn specified by OPNUM and TYPE,
6036    may be used to load VALUE into it.
6037 
6038    MODE is the mode in which the register is used, this is needed to
6039    determine how many hard regs to test.
6040 
6041    Other read-only reloads with the same value do not conflict
6042    unless OUT is nonzero and these other reloads have to live while
6043    output reloads live.
6044    If OUT is CONST0_RTX, this is a special case: it means that the
6045    test should not be for using register REGNO as reload register, but
6046    for copying from register REGNO into the reload register.
6047 
6048    RELOADNUM is the number of the reload we want to load this value for;
6049    a reload does not conflict with itself.
6050 
6051    When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
6052    reloads that load an address for the very reload we are considering.
6053 
6054    The caller has to make sure that there is no conflict with the return
6055    register.  */
6056 
6057 static int
free_for_value_p(int regno,machine_mode mode,int opnum,enum reload_type type,rtx value,rtx out,int reloadnum,int ignore_address_reloads)6058 free_for_value_p (int regno, machine_mode mode, int opnum,
6059 		  enum reload_type type, rtx value, rtx out, int reloadnum,
6060 		  int ignore_address_reloads)
6061 {
6062   int nregs = hard_regno_nregs[regno][mode];
6063   while (nregs-- > 0)
6064     if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
6065 				       value, out, reloadnum,
6066 				       ignore_address_reloads))
6067       return 0;
6068   return 1;
6069 }
6070 
6071 /* Return nonzero if the rtx X is invariant over the current function.  */
6072 /* ??? Actually, the places where we use this expect exactly what is
6073    tested here, and not everything that is function invariant.  In
6074    particular, the frame pointer and arg pointer are special cased;
6075    pic_offset_table_rtx is not, and we must not spill these things to
6076    memory.  */
6077 
6078 int
function_invariant_p(const_rtx x)6079 function_invariant_p (const_rtx x)
6080 {
6081   if (CONSTANT_P (x))
6082     return 1;
6083   if (x == frame_pointer_rtx || x == arg_pointer_rtx)
6084     return 1;
6085   if (GET_CODE (x) == PLUS
6086       && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
6087       && GET_CODE (XEXP (x, 1)) == CONST_INT)
6088     return 1;
6089   return 0;
6090 }
6091 
6092 /* Determine whether the reload reg X overlaps any rtx'es used for
6093    overriding inheritance.  Return nonzero if so.  */
6094 
6095 static int
conflicts_with_override(rtx x)6096 conflicts_with_override (rtx x)
6097 {
6098   int i;
6099   for (i = 0; i < n_reloads; i++)
6100     if (reload_override_in[i]
6101 	&& reg_overlap_mentioned_p (x, reload_override_in[i]))
6102       return 1;
6103   return 0;
6104 }
6105 
6106 /* Give an error message saying we failed to find a reload for INSN,
6107    and clear out reload R.  */
6108 static void
failed_reload(rtx_insn * insn,int r)6109 failed_reload (rtx_insn *insn, int r)
6110 {
6111   if (asm_noperands (PATTERN (insn)) < 0)
6112     /* It's the compiler's fault.  */
6113     fatal_insn ("could not find a spill register", insn);
6114 
6115   /* It's the user's fault; the operand's mode and constraint
6116      don't match.  Disable this reload so we don't crash in final.  */
6117   error_for_asm (insn,
6118 		 "%<asm%> operand constraint incompatible with operand size");
6119   rld[r].in = 0;
6120   rld[r].out = 0;
6121   rld[r].reg_rtx = 0;
6122   rld[r].optional = 1;
6123   rld[r].secondary_p = 1;
6124 }
6125 
6126 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6127    for reload R.  If it's valid, get an rtx for it.  Return nonzero if
6128    successful.  */
6129 static int
set_reload_reg(int i,int r)6130 set_reload_reg (int i, int r)
6131 {
6132   /* regno is 'set but not used' if HARD_REGNO_MODE_OK doesn't use its first
6133      parameter.  */
6134   int regno ATTRIBUTE_UNUSED;
6135   rtx reg = spill_reg_rtx[i];
6136 
6137   if (reg == 0 || GET_MODE (reg) != rld[r].mode)
6138     spill_reg_rtx[i] = reg
6139       = gen_rtx_REG (rld[r].mode, spill_regs[i]);
6140 
6141   regno = true_regnum (reg);
6142 
6143   /* Detect when the reload reg can't hold the reload mode.
6144      This used to be one `if', but Sequent compiler can't handle that.  */
6145   if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
6146     {
6147       machine_mode test_mode = VOIDmode;
6148       if (rld[r].in)
6149 	test_mode = GET_MODE (rld[r].in);
6150       /* If rld[r].in has VOIDmode, it means we will load it
6151 	 in whatever mode the reload reg has: to wit, rld[r].mode.
6152 	 We have already tested that for validity.  */
6153       /* Aside from that, we need to test that the expressions
6154 	 to reload from or into have modes which are valid for this
6155 	 reload register.  Otherwise the reload insns would be invalid.  */
6156       if (! (rld[r].in != 0 && test_mode != VOIDmode
6157 	     && ! HARD_REGNO_MODE_OK (regno, test_mode)))
6158 	if (! (rld[r].out != 0
6159 	       && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
6160 	  {
6161 	    /* The reg is OK.  */
6162 	    last_spill_reg = i;
6163 
6164 	    /* Mark as in use for this insn the reload regs we use
6165 	       for this.  */
6166 	    mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
6167 				    rld[r].when_needed, rld[r].mode);
6168 
6169 	    rld[r].reg_rtx = reg;
6170 	    reload_spill_index[r] = spill_regs[i];
6171 	    return 1;
6172 	  }
6173     }
6174   return 0;
6175 }
6176 
6177 /* Find a spill register to use as a reload register for reload R.
6178    LAST_RELOAD is nonzero if this is the last reload for the insn being
6179    processed.
6180 
6181    Set rld[R].reg_rtx to the register allocated.
6182 
6183    We return 1 if successful, or 0 if we couldn't find a spill reg and
6184    we didn't change anything.  */
6185 
6186 static int
allocate_reload_reg(struct insn_chain * chain ATTRIBUTE_UNUSED,int r,int last_reload)6187 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
6188 		     int last_reload)
6189 {
6190   int i, pass, count;
6191 
6192   /* If we put this reload ahead, thinking it is a group,
6193      then insist on finding a group.  Otherwise we can grab a
6194      reg that some other reload needs.
6195      (That can happen when we have a 68000 DATA_OR_FP_REG
6196      which is a group of data regs or one fp reg.)
6197      We need not be so restrictive if there are no more reloads
6198      for this insn.
6199 
6200      ??? Really it would be nicer to have smarter handling
6201      for that kind of reg class, where a problem like this is normal.
6202      Perhaps those classes should be avoided for reloading
6203      by use of more alternatives.  */
6204 
6205   int force_group = rld[r].nregs > 1 && ! last_reload;
6206 
6207   /* If we want a single register and haven't yet found one,
6208      take any reg in the right class and not in use.
6209      If we want a consecutive group, here is where we look for it.
6210 
6211      We use three passes so we can first look for reload regs to
6212      reuse, which are already in use for other reloads in this insn,
6213      and only then use additional registers which are not "bad", then
6214      finally any register.
6215 
6216      I think that maximizing reuse is needed to make sure we don't
6217      run out of reload regs.  Suppose we have three reloads, and
6218      reloads A and B can share regs.  These need two regs.
6219      Suppose A and B are given different regs.
6220      That leaves none for C.  */
6221   for (pass = 0; pass < 3; pass++)
6222     {
6223       /* I is the index in spill_regs.
6224 	 We advance it round-robin between insns to use all spill regs
6225 	 equally, so that inherited reloads have a chance
6226 	 of leapfrogging each other.  */
6227 
6228       i = last_spill_reg;
6229 
6230       for (count = 0; count < n_spills; count++)
6231 	{
6232 	  int rclass = (int) rld[r].rclass;
6233 	  int regnum;
6234 
6235 	  i++;
6236 	  if (i >= n_spills)
6237 	    i -= n_spills;
6238 	  regnum = spill_regs[i];
6239 
6240 	  if ((reload_reg_free_p (regnum, rld[r].opnum,
6241 				  rld[r].when_needed)
6242 	       || (rld[r].in
6243 		   /* We check reload_reg_used to make sure we
6244 		      don't clobber the return register.  */
6245 		   && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
6246 		   && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
6247 					rld[r].when_needed, rld[r].in,
6248 					rld[r].out, r, 1)))
6249 	      && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
6250 	      && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
6251 	      /* Look first for regs to share, then for unshared.  But
6252 		 don't share regs used for inherited reloads; they are
6253 		 the ones we want to preserve.  */
6254 	      && (pass
6255 		  || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
6256 					 regnum)
6257 		      && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
6258 					      regnum))))
6259 	    {
6260 	      int nr = hard_regno_nregs[regnum][rld[r].mode];
6261 
6262 	      /* During the second pass we want to avoid reload registers
6263 		 which are "bad" for this reload.  */
6264 	      if (pass == 1
6265 		  && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
6266 		continue;
6267 
6268 	      /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6269 		 (on 68000) got us two FP regs.  If NR is 1,
6270 		 we would reject both of them.  */
6271 	      if (force_group)
6272 		nr = rld[r].nregs;
6273 	      /* If we need only one reg, we have already won.  */
6274 	      if (nr == 1)
6275 		{
6276 		  /* But reject a single reg if we demand a group.  */
6277 		  if (force_group)
6278 		    continue;
6279 		  break;
6280 		}
6281 	      /* Otherwise check that as many consecutive regs as we need
6282 		 are available here.  */
6283 	      while (nr > 1)
6284 		{
6285 		  int regno = regnum + nr - 1;
6286 		  if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
6287 			&& spill_reg_order[regno] >= 0
6288 			&& reload_reg_free_p (regno, rld[r].opnum,
6289 					      rld[r].when_needed)))
6290 		    break;
6291 		  nr--;
6292 		}
6293 	      if (nr == 1)
6294 		break;
6295 	    }
6296 	}
6297 
6298       /* If we found something on the current pass, omit later passes.  */
6299       if (count < n_spills)
6300 	break;
6301     }
6302 
6303   /* We should have found a spill register by now.  */
6304   if (count >= n_spills)
6305     return 0;
6306 
6307   /* I is the index in SPILL_REG_RTX of the reload register we are to
6308      allocate.  Get an rtx for it and find its register number.  */
6309 
6310   return set_reload_reg (i, r);
6311 }
6312 
6313 /* Initialize all the tables needed to allocate reload registers.
6314    CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6315    is the array we use to restore the reg_rtx field for every reload.  */
6316 
6317 static void
choose_reload_regs_init(struct insn_chain * chain,rtx * save_reload_reg_rtx)6318 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
6319 {
6320   int i;
6321 
6322   for (i = 0; i < n_reloads; i++)
6323     rld[i].reg_rtx = save_reload_reg_rtx[i];
6324 
6325   memset (reload_inherited, 0, MAX_RELOADS);
6326   memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
6327   memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
6328 
6329   CLEAR_HARD_REG_SET (reload_reg_used);
6330   CLEAR_HARD_REG_SET (reload_reg_used_at_all);
6331   CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
6332   CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
6333   CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
6334   CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
6335 
6336   CLEAR_HARD_REG_SET (reg_used_in_insn);
6337   {
6338     HARD_REG_SET tmp;
6339     REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
6340     IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6341     REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
6342     IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6343     compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
6344     compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
6345   }
6346 
6347   for (i = 0; i < reload_n_operands; i++)
6348     {
6349       CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
6350       CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
6351       CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
6352       CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
6353       CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
6354       CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
6355     }
6356 
6357   COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
6358 
6359   CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
6360 
6361   for (i = 0; i < n_reloads; i++)
6362     /* If we have already decided to use a certain register,
6363        don't use it in another way.  */
6364     if (rld[i].reg_rtx)
6365       mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
6366 			      rld[i].when_needed, rld[i].mode);
6367 }
6368 
6369 #ifdef SECONDARY_MEMORY_NEEDED
6370 /* If X is not a subreg, return it unmodified.  If it is a subreg,
6371    look up whether we made a replacement for the SUBREG_REG.  Return
6372    either the replacement or the SUBREG_REG.  */
6373 
6374 static rtx
replaced_subreg(rtx x)6375 replaced_subreg (rtx x)
6376 {
6377   if (GET_CODE (x) == SUBREG)
6378     return find_replacement (&SUBREG_REG (x));
6379   return x;
6380 }
6381 #endif
6382 
6383 /* Compute the offset to pass to subreg_regno_offset, for a pseudo of
6384    mode OUTERMODE that is available in a hard reg of mode INNERMODE.
6385    SUBREG is non-NULL if the pseudo is a subreg whose reg is a pseudo,
6386    otherwise it is NULL.  */
6387 
6388 static int
compute_reload_subreg_offset(machine_mode outermode,rtx subreg,machine_mode innermode)6389 compute_reload_subreg_offset (machine_mode outermode,
6390 			      rtx subreg,
6391 			      machine_mode innermode)
6392 {
6393   int outer_offset;
6394   machine_mode middlemode;
6395 
6396   if (!subreg)
6397     return subreg_lowpart_offset (outermode, innermode);
6398 
6399   outer_offset = SUBREG_BYTE (subreg);
6400   middlemode = GET_MODE (SUBREG_REG (subreg));
6401 
6402   /* If SUBREG is paradoxical then return the normal lowpart offset
6403      for OUTERMODE and INNERMODE.  Our caller has already checked
6404      that OUTERMODE fits in INNERMODE.  */
6405   if (outer_offset == 0
6406       && GET_MODE_SIZE (outermode) > GET_MODE_SIZE (middlemode))
6407     return subreg_lowpart_offset (outermode, innermode);
6408 
6409   /* SUBREG is normal, but may not be lowpart; return OUTER_OFFSET
6410      plus the normal lowpart offset for MIDDLEMODE and INNERMODE.  */
6411   return outer_offset + subreg_lowpart_offset (middlemode, innermode);
6412 }
6413 
6414 /* Assign hard reg targets for the pseudo-registers we must reload
6415    into hard regs for this insn.
6416    Also output the instructions to copy them in and out of the hard regs.
6417 
6418    For machines with register classes, we are responsible for
6419    finding a reload reg in the proper class.  */
6420 
6421 static void
choose_reload_regs(struct insn_chain * chain)6422 choose_reload_regs (struct insn_chain *chain)
6423 {
6424   rtx_insn *insn = chain->insn;
6425   int i, j;
6426   unsigned int max_group_size = 1;
6427   enum reg_class group_class = NO_REGS;
6428   int pass, win, inheritance;
6429 
6430   rtx save_reload_reg_rtx[MAX_RELOADS];
6431 
6432   /* In order to be certain of getting the registers we need,
6433      we must sort the reloads into order of increasing register class.
6434      Then our grabbing of reload registers will parallel the process
6435      that provided the reload registers.
6436 
6437      Also note whether any of the reloads wants a consecutive group of regs.
6438      If so, record the maximum size of the group desired and what
6439      register class contains all the groups needed by this insn.  */
6440 
6441   for (j = 0; j < n_reloads; j++)
6442     {
6443       reload_order[j] = j;
6444       if (rld[j].reg_rtx != NULL_RTX)
6445 	{
6446 	  gcc_assert (REG_P (rld[j].reg_rtx)
6447 		      && HARD_REGISTER_P (rld[j].reg_rtx));
6448 	  reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6449 	}
6450       else
6451 	reload_spill_index[j] = -1;
6452 
6453       if (rld[j].nregs > 1)
6454 	{
6455 	  max_group_size = MAX (rld[j].nregs, max_group_size);
6456 	  group_class
6457 	    = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
6458 	}
6459 
6460       save_reload_reg_rtx[j] = rld[j].reg_rtx;
6461     }
6462 
6463   if (n_reloads > 1)
6464     qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6465 
6466   /* If -O, try first with inheritance, then turning it off.
6467      If not -O, don't do inheritance.
6468      Using inheritance when not optimizing leads to paradoxes
6469      with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6470      because one side of the comparison might be inherited.  */
6471   win = 0;
6472   for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6473     {
6474       choose_reload_regs_init (chain, save_reload_reg_rtx);
6475 
6476       /* Process the reloads in order of preference just found.
6477 	 Beyond this point, subregs can be found in reload_reg_rtx.
6478 
6479 	 This used to look for an existing reloaded home for all of the
6480 	 reloads, and only then perform any new reloads.  But that could lose
6481 	 if the reloads were done out of reg-class order because a later
6482 	 reload with a looser constraint might have an old home in a register
6483 	 needed by an earlier reload with a tighter constraint.
6484 
6485 	 To solve this, we make two passes over the reloads, in the order
6486 	 described above.  In the first pass we try to inherit a reload
6487 	 from a previous insn.  If there is a later reload that needs a
6488 	 class that is a proper subset of the class being processed, we must
6489 	 also allocate a spill register during the first pass.
6490 
6491 	 Then make a second pass over the reloads to allocate any reloads
6492 	 that haven't been given registers yet.  */
6493 
6494       for (j = 0; j < n_reloads; j++)
6495 	{
6496 	  int r = reload_order[j];
6497 	  rtx search_equiv = NULL_RTX;
6498 
6499 	  /* Ignore reloads that got marked inoperative.  */
6500 	  if (rld[r].out == 0 && rld[r].in == 0
6501 	      && ! rld[r].secondary_p)
6502 	    continue;
6503 
6504 	  /* If find_reloads chose to use reload_in or reload_out as a reload
6505 	     register, we don't need to chose one.  Otherwise, try even if it
6506 	     found one since we might save an insn if we find the value lying
6507 	     around.
6508 	     Try also when reload_in is a pseudo without a hard reg.  */
6509 	  if (rld[r].in != 0 && rld[r].reg_rtx != 0
6510 	      && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6511 		  || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6512 		      && !MEM_P (rld[r].in)
6513 		      && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6514 	    continue;
6515 
6516 #if 0 /* No longer needed for correct operation.
6517 	 It might give better code, or might not; worth an experiment?  */
6518 	  /* If this is an optional reload, we can't inherit from earlier insns
6519 	     until we are sure that any non-optional reloads have been allocated.
6520 	     The following code takes advantage of the fact that optional reloads
6521 	     are at the end of reload_order.  */
6522 	  if (rld[r].optional != 0)
6523 	    for (i = 0; i < j; i++)
6524 	      if ((rld[reload_order[i]].out != 0
6525 		   || rld[reload_order[i]].in != 0
6526 		   || rld[reload_order[i]].secondary_p)
6527 		  && ! rld[reload_order[i]].optional
6528 		  && rld[reload_order[i]].reg_rtx == 0)
6529 		allocate_reload_reg (chain, reload_order[i], 0);
6530 #endif
6531 
6532 	  /* First see if this pseudo is already available as reloaded
6533 	     for a previous insn.  We cannot try to inherit for reloads
6534 	     that are smaller than the maximum number of registers needed
6535 	     for groups unless the register we would allocate cannot be used
6536 	     for the groups.
6537 
6538 	     We could check here to see if this is a secondary reload for
6539 	     an object that is already in a register of the desired class.
6540 	     This would avoid the need for the secondary reload register.
6541 	     But this is complex because we can't easily determine what
6542 	     objects might want to be loaded via this reload.  So let a
6543 	     register be allocated here.  In `emit_reload_insns' we suppress
6544 	     one of the loads in the case described above.  */
6545 
6546 	  if (inheritance)
6547 	    {
6548 	      int byte = 0;
6549 	      int regno = -1;
6550 	      machine_mode mode = VOIDmode;
6551 	      rtx subreg = NULL_RTX;
6552 
6553 	      if (rld[r].in == 0)
6554 		;
6555 	      else if (REG_P (rld[r].in))
6556 		{
6557 		  regno = REGNO (rld[r].in);
6558 		  mode = GET_MODE (rld[r].in);
6559 		}
6560 	      else if (REG_P (rld[r].in_reg))
6561 		{
6562 		  regno = REGNO (rld[r].in_reg);
6563 		  mode = GET_MODE (rld[r].in_reg);
6564 		}
6565 	      else if (GET_CODE (rld[r].in_reg) == SUBREG
6566 		       && REG_P (SUBREG_REG (rld[r].in_reg)))
6567 		{
6568 		  regno = REGNO (SUBREG_REG (rld[r].in_reg));
6569 		  if (regno < FIRST_PSEUDO_REGISTER)
6570 		    regno = subreg_regno (rld[r].in_reg);
6571 		  else
6572 		    {
6573 		      subreg = rld[r].in_reg;
6574 		      byte = SUBREG_BYTE (subreg);
6575 		    }
6576 		  mode = GET_MODE (rld[r].in_reg);
6577 		}
6578 #if AUTO_INC_DEC
6579 	      else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6580 		       && REG_P (XEXP (rld[r].in_reg, 0)))
6581 		{
6582 		  regno = REGNO (XEXP (rld[r].in_reg, 0));
6583 		  mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6584 		  rld[r].out = rld[r].in;
6585 		}
6586 #endif
6587 #if 0
6588 	      /* This won't work, since REGNO can be a pseudo reg number.
6589 		 Also, it takes much more hair to keep track of all the things
6590 		 that can invalidate an inherited reload of part of a pseudoreg.  */
6591 	      else if (GET_CODE (rld[r].in) == SUBREG
6592 		       && REG_P (SUBREG_REG (rld[r].in)))
6593 		regno = subreg_regno (rld[r].in);
6594 #endif
6595 
6596 	      if (regno >= 0
6597 		  && reg_last_reload_reg[regno] != 0
6598 		  && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
6599 		      >= GET_MODE_SIZE (mode) + byte)
6600 #ifdef CANNOT_CHANGE_MODE_CLASS
6601 		  /* Verify that the register it's in can be used in
6602 		     mode MODE.  */
6603 		  && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
6604 						GET_MODE (reg_last_reload_reg[regno]),
6605 						mode)
6606 #endif
6607 		  )
6608 		{
6609 		  enum reg_class rclass = rld[r].rclass, last_class;
6610 		  rtx last_reg = reg_last_reload_reg[regno];
6611 
6612 		  i = REGNO (last_reg);
6613 		  byte = compute_reload_subreg_offset (mode,
6614 						       subreg,
6615 						       GET_MODE (last_reg));
6616 		  i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6617 		  last_class = REGNO_REG_CLASS (i);
6618 
6619 		  if (reg_reloaded_contents[i] == regno
6620 		      && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6621 		      && HARD_REGNO_MODE_OK (i, rld[r].mode)
6622 		      && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6623 			  /* Even if we can't use this register as a reload
6624 			     register, we might use it for reload_override_in,
6625 			     if copying it to the desired class is cheap
6626 			     enough.  */
6627 			  || ((register_move_cost (mode, last_class, rclass)
6628 			       < memory_move_cost (mode, rclass, true))
6629 			      && (secondary_reload_class (1, rclass, mode,
6630 							  last_reg)
6631 				  == NO_REGS)
6632 #ifdef SECONDARY_MEMORY_NEEDED
6633 			      && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
6634 							    mode)
6635 #endif
6636 			      ))
6637 
6638 		      && (rld[r].nregs == max_group_size
6639 			  || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6640 						  i))
6641 		      && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6642 					   rld[r].when_needed, rld[r].in,
6643 					   const0_rtx, r, 1))
6644 		    {
6645 		      /* If a group is needed, verify that all the subsequent
6646 			 registers still have their values intact.  */
6647 		      int nr = hard_regno_nregs[i][rld[r].mode];
6648 		      int k;
6649 
6650 		      for (k = 1; k < nr; k++)
6651 			if (reg_reloaded_contents[i + k] != regno
6652 			    || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6653 			  break;
6654 
6655 		      if (k == nr)
6656 			{
6657 			  int i1;
6658 			  int bad_for_class;
6659 
6660 			  last_reg = (GET_MODE (last_reg) == mode
6661 				      ? last_reg : gen_rtx_REG (mode, i));
6662 
6663 			  bad_for_class = 0;
6664 			  for (k = 0; k < nr; k++)
6665 			    bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6666 								  i+k);
6667 
6668 			  /* We found a register that contains the
6669 			     value we need.  If this register is the
6670 			     same as an `earlyclobber' operand of the
6671 			     current insn, just mark it as a place to
6672 			     reload from since we can't use it as the
6673 			     reload register itself.  */
6674 
6675 			  for (i1 = 0; i1 < n_earlyclobbers; i1++)
6676 			    if (reg_overlap_mentioned_for_reload_p
6677 				(reg_last_reload_reg[regno],
6678 				 reload_earlyclobbers[i1]))
6679 			      break;
6680 
6681 			  if (i1 != n_earlyclobbers
6682 			      || ! (free_for_value_p (i, rld[r].mode,
6683 						      rld[r].opnum,
6684 						      rld[r].when_needed, rld[r].in,
6685 						      rld[r].out, r, 1))
6686 			      /* Don't use it if we'd clobber a pseudo reg.  */
6687 			      || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6688 				  && rld[r].out
6689 				  && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6690 			      /* Don't clobber the frame pointer.  */
6691 			      || (i == HARD_FRAME_POINTER_REGNUM
6692 				  && frame_pointer_needed
6693 				  && rld[r].out)
6694 			      /* Don't really use the inherited spill reg
6695 				 if we need it wider than we've got it.  */
6696 			      || (GET_MODE_SIZE (rld[r].mode)
6697 				  > GET_MODE_SIZE (mode))
6698 			      || bad_for_class
6699 
6700 			      /* If find_reloads chose reload_out as reload
6701 				 register, stay with it - that leaves the
6702 				 inherited register for subsequent reloads.  */
6703 			      || (rld[r].out && rld[r].reg_rtx
6704 				  && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6705 			    {
6706 			      if (! rld[r].optional)
6707 				{
6708 				  reload_override_in[r] = last_reg;
6709 				  reload_inheritance_insn[r]
6710 				    = reg_reloaded_insn[i];
6711 				}
6712 			    }
6713 			  else
6714 			    {
6715 			      int k;
6716 			      /* We can use this as a reload reg.  */
6717 			      /* Mark the register as in use for this part of
6718 				 the insn.  */
6719 			      mark_reload_reg_in_use (i,
6720 						      rld[r].opnum,
6721 						      rld[r].when_needed,
6722 						      rld[r].mode);
6723 			      rld[r].reg_rtx = last_reg;
6724 			      reload_inherited[r] = 1;
6725 			      reload_inheritance_insn[r]
6726 				= reg_reloaded_insn[i];
6727 			      reload_spill_index[r] = i;
6728 			      for (k = 0; k < nr; k++)
6729 				SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6730 						  i + k);
6731 			    }
6732 			}
6733 		    }
6734 		}
6735 	    }
6736 
6737 	  /* Here's another way to see if the value is already lying around.  */
6738 	  if (inheritance
6739 	      && rld[r].in != 0
6740 	      && ! reload_inherited[r]
6741 	      && rld[r].out == 0
6742 	      && (CONSTANT_P (rld[r].in)
6743 		  || GET_CODE (rld[r].in) == PLUS
6744 		  || REG_P (rld[r].in)
6745 		  || MEM_P (rld[r].in))
6746 	      && (rld[r].nregs == max_group_size
6747 		  || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6748 	    search_equiv = rld[r].in;
6749 
6750 	  if (search_equiv)
6751 	    {
6752 	      rtx equiv
6753 		= find_equiv_reg (search_equiv, insn, rld[r].rclass,
6754 				  -1, NULL, 0, rld[r].mode);
6755 	      int regno = 0;
6756 
6757 	      if (equiv != 0)
6758 		{
6759 		  if (REG_P (equiv))
6760 		    regno = REGNO (equiv);
6761 		  else
6762 		    {
6763 		      /* This must be a SUBREG of a hard register.
6764 			 Make a new REG since this might be used in an
6765 			 address and not all machines support SUBREGs
6766 			 there.  */
6767 		      gcc_assert (GET_CODE (equiv) == SUBREG);
6768 		      regno = subreg_regno (equiv);
6769 		      equiv = gen_rtx_REG (rld[r].mode, regno);
6770 		      /* If we choose EQUIV as the reload register, but the
6771 			 loop below decides to cancel the inheritance, we'll
6772 			 end up reloading EQUIV in rld[r].mode, not the mode
6773 			 it had originally.  That isn't safe when EQUIV isn't
6774 			 available as a spill register since its value might
6775 			 still be live at this point.  */
6776 		      for (i = regno; i < regno + (int) rld[r].nregs; i++)
6777 			if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6778 			  equiv = 0;
6779 		    }
6780 		}
6781 
6782 	      /* If we found a spill reg, reject it unless it is free
6783 		 and of the desired class.  */
6784 	      if (equiv != 0)
6785 		{
6786 		  int regs_used = 0;
6787 		  int bad_for_class = 0;
6788 		  int max_regno = regno + rld[r].nregs;
6789 
6790 		  for (i = regno; i < max_regno; i++)
6791 		    {
6792 		      regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6793 						      i);
6794 		      bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6795 							   i);
6796 		    }
6797 
6798 		  if ((regs_used
6799 		       && ! free_for_value_p (regno, rld[r].mode,
6800 					      rld[r].opnum, rld[r].when_needed,
6801 					      rld[r].in, rld[r].out, r, 1))
6802 		      || bad_for_class)
6803 		    equiv = 0;
6804 		}
6805 
6806 	      if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6807 		equiv = 0;
6808 
6809 	      /* We found a register that contains the value we need.
6810 		 If this register is the same as an `earlyclobber' operand
6811 		 of the current insn, just mark it as a place to reload from
6812 		 since we can't use it as the reload register itself.  */
6813 
6814 	      if (equiv != 0)
6815 		for (i = 0; i < n_earlyclobbers; i++)
6816 		  if (reg_overlap_mentioned_for_reload_p (equiv,
6817 							  reload_earlyclobbers[i]))
6818 		    {
6819 		      if (! rld[r].optional)
6820 			reload_override_in[r] = equiv;
6821 		      equiv = 0;
6822 		      break;
6823 		    }
6824 
6825 	      /* If the equiv register we have found is explicitly clobbered
6826 		 in the current insn, it depends on the reload type if we
6827 		 can use it, use it for reload_override_in, or not at all.
6828 		 In particular, we then can't use EQUIV for a
6829 		 RELOAD_FOR_OUTPUT_ADDRESS reload.  */
6830 
6831 	      if (equiv != 0)
6832 		{
6833 		  if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6834 		    switch (rld[r].when_needed)
6835 		      {
6836 		      case RELOAD_FOR_OTHER_ADDRESS:
6837 		      case RELOAD_FOR_INPADDR_ADDRESS:
6838 		      case RELOAD_FOR_INPUT_ADDRESS:
6839 		      case RELOAD_FOR_OPADDR_ADDR:
6840 			break;
6841 		      case RELOAD_OTHER:
6842 		      case RELOAD_FOR_INPUT:
6843 		      case RELOAD_FOR_OPERAND_ADDRESS:
6844 			if (! rld[r].optional)
6845 			  reload_override_in[r] = equiv;
6846 			/* Fall through.  */
6847 		      default:
6848 			equiv = 0;
6849 			break;
6850 		      }
6851 		  else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6852 		    switch (rld[r].when_needed)
6853 		      {
6854 		      case RELOAD_FOR_OTHER_ADDRESS:
6855 		      case RELOAD_FOR_INPADDR_ADDRESS:
6856 		      case RELOAD_FOR_INPUT_ADDRESS:
6857 		      case RELOAD_FOR_OPADDR_ADDR:
6858 		      case RELOAD_FOR_OPERAND_ADDRESS:
6859 		      case RELOAD_FOR_INPUT:
6860 			break;
6861 		      case RELOAD_OTHER:
6862 			if (! rld[r].optional)
6863 			  reload_override_in[r] = equiv;
6864 			/* Fall through.  */
6865 		      default:
6866 			equiv = 0;
6867 			break;
6868 		      }
6869 		}
6870 
6871 	      /* If we found an equivalent reg, say no code need be generated
6872 		 to load it, and use it as our reload reg.  */
6873 	      if (equiv != 0
6874 		  && (regno != HARD_FRAME_POINTER_REGNUM
6875 		      || !frame_pointer_needed))
6876 		{
6877 		  int nr = hard_regno_nregs[regno][rld[r].mode];
6878 		  int k;
6879 		  rld[r].reg_rtx = equiv;
6880 		  reload_spill_index[r] = regno;
6881 		  reload_inherited[r] = 1;
6882 
6883 		  /* If reg_reloaded_valid is not set for this register,
6884 		     there might be a stale spill_reg_store lying around.
6885 		     We must clear it, since otherwise emit_reload_insns
6886 		     might delete the store.  */
6887 		  if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6888 		    spill_reg_store[regno] = NULL;
6889 		  /* If any of the hard registers in EQUIV are spill
6890 		     registers, mark them as in use for this insn.  */
6891 		  for (k = 0; k < nr; k++)
6892 		    {
6893 		      i = spill_reg_order[regno + k];
6894 		      if (i >= 0)
6895 			{
6896 			  mark_reload_reg_in_use (regno, rld[r].opnum,
6897 						  rld[r].when_needed,
6898 						  rld[r].mode);
6899 			  SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6900 					    regno + k);
6901 			}
6902 		    }
6903 		}
6904 	    }
6905 
6906 	  /* If we found a register to use already, or if this is an optional
6907 	     reload, we are done.  */
6908 	  if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6909 	    continue;
6910 
6911 #if 0
6912 	  /* No longer needed for correct operation.  Might or might
6913 	     not give better code on the average.  Want to experiment?  */
6914 
6915 	  /* See if there is a later reload that has a class different from our
6916 	     class that intersects our class or that requires less register
6917 	     than our reload.  If so, we must allocate a register to this
6918 	     reload now, since that reload might inherit a previous reload
6919 	     and take the only available register in our class.  Don't do this
6920 	     for optional reloads since they will force all previous reloads
6921 	     to be allocated.  Also don't do this for reloads that have been
6922 	     turned off.  */
6923 
6924 	  for (i = j + 1; i < n_reloads; i++)
6925 	    {
6926 	      int s = reload_order[i];
6927 
6928 	      if ((rld[s].in == 0 && rld[s].out == 0
6929 		   && ! rld[s].secondary_p)
6930 		  || rld[s].optional)
6931 		continue;
6932 
6933 	      if ((rld[s].rclass != rld[r].rclass
6934 		   && reg_classes_intersect_p (rld[r].rclass,
6935 					       rld[s].rclass))
6936 		  || rld[s].nregs < rld[r].nregs)
6937 		break;
6938 	    }
6939 
6940 	  if (i == n_reloads)
6941 	    continue;
6942 
6943 	  allocate_reload_reg (chain, r, j == n_reloads - 1);
6944 #endif
6945 	}
6946 
6947       /* Now allocate reload registers for anything non-optional that
6948 	 didn't get one yet.  */
6949       for (j = 0; j < n_reloads; j++)
6950 	{
6951 	  int r = reload_order[j];
6952 
6953 	  /* Ignore reloads that got marked inoperative.  */
6954 	  if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6955 	    continue;
6956 
6957 	  /* Skip reloads that already have a register allocated or are
6958 	     optional.  */
6959 	  if (rld[r].reg_rtx != 0 || rld[r].optional)
6960 	    continue;
6961 
6962 	  if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6963 	    break;
6964 	}
6965 
6966       /* If that loop got all the way, we have won.  */
6967       if (j == n_reloads)
6968 	{
6969 	  win = 1;
6970 	  break;
6971 	}
6972 
6973       /* Loop around and try without any inheritance.  */
6974     }
6975 
6976   if (! win)
6977     {
6978       /* First undo everything done by the failed attempt
6979 	 to allocate with inheritance.  */
6980       choose_reload_regs_init (chain, save_reload_reg_rtx);
6981 
6982       /* Some sanity tests to verify that the reloads found in the first
6983 	 pass are identical to the ones we have now.  */
6984       gcc_assert (chain->n_reloads == n_reloads);
6985 
6986       for (i = 0; i < n_reloads; i++)
6987 	{
6988 	  if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6989 	    continue;
6990 	  gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6991 	  for (j = 0; j < n_spills; j++)
6992 	    if (spill_regs[j] == chain->rld[i].regno)
6993 	      if (! set_reload_reg (j, i))
6994 		failed_reload (chain->insn, i);
6995 	}
6996     }
6997 
6998   /* If we thought we could inherit a reload, because it seemed that
6999      nothing else wanted the same reload register earlier in the insn,
7000      verify that assumption, now that all reloads have been assigned.
7001      Likewise for reloads where reload_override_in has been set.  */
7002 
7003   /* If doing expensive optimizations, do one preliminary pass that doesn't
7004      cancel any inheritance, but removes reloads that have been needed only
7005      for reloads that we know can be inherited.  */
7006   for (pass = flag_expensive_optimizations; pass >= 0; pass--)
7007     {
7008       for (j = 0; j < n_reloads; j++)
7009 	{
7010 	  int r = reload_order[j];
7011 	  rtx check_reg;
7012 #ifdef SECONDARY_MEMORY_NEEDED
7013 	  rtx tem;
7014 #endif
7015 	  if (reload_inherited[r] && rld[r].reg_rtx)
7016 	    check_reg = rld[r].reg_rtx;
7017 	  else if (reload_override_in[r]
7018 		   && (REG_P (reload_override_in[r])
7019 		       || GET_CODE (reload_override_in[r]) == SUBREG))
7020 	    check_reg = reload_override_in[r];
7021 	  else
7022 	    continue;
7023 	  if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
7024 				  rld[r].opnum, rld[r].when_needed, rld[r].in,
7025 				  (reload_inherited[r]
7026 				   ? rld[r].out : const0_rtx),
7027 				  r, 1))
7028 	    {
7029 	      if (pass)
7030 		continue;
7031 	      reload_inherited[r] = 0;
7032 	      reload_override_in[r] = 0;
7033 	    }
7034 	  /* If we can inherit a RELOAD_FOR_INPUT, or can use a
7035 	     reload_override_in, then we do not need its related
7036 	     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
7037 	     likewise for other reload types.
7038 	     We handle this by removing a reload when its only replacement
7039 	     is mentioned in reload_in of the reload we are going to inherit.
7040 	     A special case are auto_inc expressions; even if the input is
7041 	     inherited, we still need the address for the output.  We can
7042 	     recognize them because they have RELOAD_OUT set to RELOAD_IN.
7043 	     If we succeeded removing some reload and we are doing a preliminary
7044 	     pass just to remove such reloads, make another pass, since the
7045 	     removal of one reload might allow us to inherit another one.  */
7046 	  else if (rld[r].in
7047 		   && rld[r].out != rld[r].in
7048 		   && remove_address_replacements (rld[r].in))
7049 	    {
7050 	      if (pass)
7051 	        pass = 2;
7052 	    }
7053 #ifdef SECONDARY_MEMORY_NEEDED
7054 	  /* If we needed a memory location for the reload, we also have to
7055 	     remove its related reloads.  */
7056 	  else if (rld[r].in
7057 		   && rld[r].out != rld[r].in
7058 		   && (tem = replaced_subreg (rld[r].in), REG_P (tem))
7059 		   && REGNO (tem) < FIRST_PSEUDO_REGISTER
7060 		   && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (tem)),
7061 					       rld[r].rclass, rld[r].inmode)
7062 		   && remove_address_replacements
7063 		      (get_secondary_mem (tem, rld[r].inmode, rld[r].opnum,
7064 					  rld[r].when_needed)))
7065 	    {
7066 	      if (pass)
7067 	        pass = 2;
7068 	    }
7069 #endif
7070 	}
7071     }
7072 
7073   /* Now that reload_override_in is known valid,
7074      actually override reload_in.  */
7075   for (j = 0; j < n_reloads; j++)
7076     if (reload_override_in[j])
7077       rld[j].in = reload_override_in[j];
7078 
7079   /* If this reload won't be done because it has been canceled or is
7080      optional and not inherited, clear reload_reg_rtx so other
7081      routines (such as subst_reloads) don't get confused.  */
7082   for (j = 0; j < n_reloads; j++)
7083     if (rld[j].reg_rtx != 0
7084 	&& ((rld[j].optional && ! reload_inherited[j])
7085 	    || (rld[j].in == 0 && rld[j].out == 0
7086 		&& ! rld[j].secondary_p)))
7087       {
7088 	int regno = true_regnum (rld[j].reg_rtx);
7089 
7090 	if (spill_reg_order[regno] >= 0)
7091 	  clear_reload_reg_in_use (regno, rld[j].opnum,
7092 				   rld[j].when_needed, rld[j].mode);
7093 	rld[j].reg_rtx = 0;
7094 	reload_spill_index[j] = -1;
7095       }
7096 
7097   /* Record which pseudos and which spill regs have output reloads.  */
7098   for (j = 0; j < n_reloads; j++)
7099     {
7100       int r = reload_order[j];
7101 
7102       i = reload_spill_index[r];
7103 
7104       /* I is nonneg if this reload uses a register.
7105 	 If rld[r].reg_rtx is 0, this is an optional reload
7106 	 that we opted to ignore.  */
7107       if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
7108 	  && rld[r].reg_rtx != 0)
7109 	{
7110 	  int nregno = REGNO (rld[r].out_reg);
7111 	  int nr = 1;
7112 
7113 	  if (nregno < FIRST_PSEUDO_REGISTER)
7114 	    nr = hard_regno_nregs[nregno][rld[r].mode];
7115 
7116 	  while (--nr >= 0)
7117 	    SET_REGNO_REG_SET (&reg_has_output_reload,
7118 			       nregno + nr);
7119 
7120 	  if (i >= 0)
7121 	    add_to_hard_reg_set (&reg_is_output_reload, rld[r].mode, i);
7122 
7123 	  gcc_assert (rld[r].when_needed == RELOAD_OTHER
7124 		      || rld[r].when_needed == RELOAD_FOR_OUTPUT
7125 		      || rld[r].when_needed == RELOAD_FOR_INSN);
7126 	}
7127     }
7128 }
7129 
7130 /* Deallocate the reload register for reload R.  This is called from
7131    remove_address_replacements.  */
7132 
7133 void
deallocate_reload_reg(int r)7134 deallocate_reload_reg (int r)
7135 {
7136   int regno;
7137 
7138   if (! rld[r].reg_rtx)
7139     return;
7140   regno = true_regnum (rld[r].reg_rtx);
7141   rld[r].reg_rtx = 0;
7142   if (spill_reg_order[regno] >= 0)
7143     clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
7144 			     rld[r].mode);
7145   reload_spill_index[r] = -1;
7146 }
7147 
7148 /* These arrays are filled by emit_reload_insns and its subroutines.  */
7149 static rtx_insn *input_reload_insns[MAX_RECOG_OPERANDS];
7150 static rtx_insn *other_input_address_reload_insns = 0;
7151 static rtx_insn *other_input_reload_insns = 0;
7152 static rtx_insn *input_address_reload_insns[MAX_RECOG_OPERANDS];
7153 static rtx_insn *inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7154 static rtx_insn *output_reload_insns[MAX_RECOG_OPERANDS];
7155 static rtx_insn *output_address_reload_insns[MAX_RECOG_OPERANDS];
7156 static rtx_insn *outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7157 static rtx_insn *operand_reload_insns = 0;
7158 static rtx_insn *other_operand_reload_insns = 0;
7159 static rtx_insn *other_output_reload_insns[MAX_RECOG_OPERANDS];
7160 
7161 /* Values to be put in spill_reg_store are put here first.  Instructions
7162    must only be placed here if the associated reload register reaches
7163    the end of the instruction's reload sequence.  */
7164 static rtx_insn *new_spill_reg_store[FIRST_PSEUDO_REGISTER];
7165 static HARD_REG_SET reg_reloaded_died;
7166 
7167 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7168    of class NEW_CLASS with mode NEW_MODE.  Or alternatively, if alt_reload_reg
7169    is nonzero, if that is suitable.  On success, change *RELOAD_REG to the
7170    adjusted register, and return true.  Otherwise, return false.  */
7171 static bool
reload_adjust_reg_for_temp(rtx * reload_reg,rtx alt_reload_reg,enum reg_class new_class,machine_mode new_mode)7172 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
7173 			    enum reg_class new_class,
7174 			    machine_mode new_mode)
7175 
7176 {
7177   rtx reg;
7178 
7179   for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
7180     {
7181       unsigned regno = REGNO (reg);
7182 
7183       if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
7184 	continue;
7185       if (GET_MODE (reg) != new_mode)
7186 	{
7187 	  if (!HARD_REGNO_MODE_OK (regno, new_mode))
7188 	    continue;
7189 	  if (hard_regno_nregs[regno][new_mode]
7190 	      > hard_regno_nregs[regno][GET_MODE (reg)])
7191 	    continue;
7192 	  reg = reload_adjust_reg_for_mode (reg, new_mode);
7193 	}
7194       *reload_reg = reg;
7195       return true;
7196     }
7197   return false;
7198 }
7199 
7200 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7201    pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7202    nonzero, if that is suitable.  On success, change *RELOAD_REG to the
7203    adjusted register, and return true.  Otherwise, return false.  */
7204 static bool
reload_adjust_reg_for_icode(rtx * reload_reg,rtx alt_reload_reg,enum insn_code icode)7205 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
7206 			     enum insn_code icode)
7207 
7208 {
7209   enum reg_class new_class = scratch_reload_class (icode);
7210   machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
7211 
7212   return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
7213 				     new_class, new_mode);
7214 }
7215 
7216 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7217    has the number J.  OLD contains the value to be used as input.  */
7218 
7219 static void
emit_input_reload_insns(struct insn_chain * chain,struct reload * rl,rtx old,int j)7220 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
7221 			 rtx old, int j)
7222 {
7223   rtx_insn *insn = chain->insn;
7224   rtx reloadreg;
7225   rtx oldequiv_reg = 0;
7226   rtx oldequiv = 0;
7227   int special = 0;
7228   machine_mode mode;
7229   rtx_insn **where;
7230 
7231   /* delete_output_reload is only invoked properly if old contains
7232      the original pseudo register.  Since this is replaced with a
7233      hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7234      find the pseudo in RELOAD_IN_REG.  This is also used to
7235      determine whether a secondary reload is needed.  */
7236   if (reload_override_in[j]
7237       && (REG_P (rl->in_reg)
7238 	  || (GET_CODE (rl->in_reg) == SUBREG
7239 	      && REG_P (SUBREG_REG (rl->in_reg)))))
7240     {
7241       oldequiv = old;
7242       old = rl->in_reg;
7243     }
7244   if (oldequiv == 0)
7245     oldequiv = old;
7246   else if (REG_P (oldequiv))
7247     oldequiv_reg = oldequiv;
7248   else if (GET_CODE (oldequiv) == SUBREG)
7249     oldequiv_reg = SUBREG_REG (oldequiv);
7250 
7251   reloadreg = reload_reg_rtx_for_input[j];
7252   mode = GET_MODE (reloadreg);
7253 
7254   /* If we are reloading from a register that was recently stored in
7255      with an output-reload, see if we can prove there was
7256      actually no need to store the old value in it.  */
7257 
7258   if (optimize && REG_P (oldequiv)
7259       && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7260       && spill_reg_store[REGNO (oldequiv)]
7261       && REG_P (old)
7262       && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
7263 	  || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7264 			  rl->out_reg)))
7265     delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7266 
7267   /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7268      OLDEQUIV.  */
7269 
7270   while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
7271     oldequiv = SUBREG_REG (oldequiv);
7272   if (GET_MODE (oldequiv) != VOIDmode
7273       && mode != GET_MODE (oldequiv))
7274     oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
7275 
7276   /* Switch to the right place to emit the reload insns.  */
7277   switch (rl->when_needed)
7278     {
7279     case RELOAD_OTHER:
7280       where = &other_input_reload_insns;
7281       break;
7282     case RELOAD_FOR_INPUT:
7283       where = &input_reload_insns[rl->opnum];
7284       break;
7285     case RELOAD_FOR_INPUT_ADDRESS:
7286       where = &input_address_reload_insns[rl->opnum];
7287       break;
7288     case RELOAD_FOR_INPADDR_ADDRESS:
7289       where = &inpaddr_address_reload_insns[rl->opnum];
7290       break;
7291     case RELOAD_FOR_OUTPUT_ADDRESS:
7292       where = &output_address_reload_insns[rl->opnum];
7293       break;
7294     case RELOAD_FOR_OUTADDR_ADDRESS:
7295       where = &outaddr_address_reload_insns[rl->opnum];
7296       break;
7297     case RELOAD_FOR_OPERAND_ADDRESS:
7298       where = &operand_reload_insns;
7299       break;
7300     case RELOAD_FOR_OPADDR_ADDR:
7301       where = &other_operand_reload_insns;
7302       break;
7303     case RELOAD_FOR_OTHER_ADDRESS:
7304       where = &other_input_address_reload_insns;
7305       break;
7306     default:
7307       gcc_unreachable ();
7308     }
7309 
7310   push_to_sequence (*where);
7311 
7312   /* Auto-increment addresses must be reloaded in a special way.  */
7313   if (rl->out && ! rl->out_reg)
7314     {
7315       /* We are not going to bother supporting the case where a
7316 	 incremented register can't be copied directly from
7317 	 OLDEQUIV since this seems highly unlikely.  */
7318       gcc_assert (rl->secondary_in_reload < 0);
7319 
7320       if (reload_inherited[j])
7321 	oldequiv = reloadreg;
7322 
7323       old = XEXP (rl->in_reg, 0);
7324 
7325       /* Prevent normal processing of this reload.  */
7326       special = 1;
7327       /* Output a special code sequence for this case.  */
7328       inc_for_reload (reloadreg, oldequiv, rl->out, rl->inc);
7329     }
7330 
7331   /* If we are reloading a pseudo-register that was set by the previous
7332      insn, see if we can get rid of that pseudo-register entirely
7333      by redirecting the previous insn into our reload register.  */
7334 
7335   else if (optimize && REG_P (old)
7336 	   && REGNO (old) >= FIRST_PSEUDO_REGISTER
7337 	   && dead_or_set_p (insn, old)
7338 	   /* This is unsafe if some other reload
7339 	      uses the same reg first.  */
7340 	   && ! conflicts_with_override (reloadreg)
7341 	   && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7342 				rl->when_needed, old, rl->out, j, 0))
7343     {
7344       rtx_insn *temp = PREV_INSN (insn);
7345       while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7346 	temp = PREV_INSN (temp);
7347       if (temp
7348 	  && NONJUMP_INSN_P (temp)
7349 	  && GET_CODE (PATTERN (temp)) == SET
7350 	  && SET_DEST (PATTERN (temp)) == old
7351 	  /* Make sure we can access insn_operand_constraint.  */
7352 	  && asm_noperands (PATTERN (temp)) < 0
7353 	  /* This is unsafe if operand occurs more than once in current
7354 	     insn.  Perhaps some occurrences aren't reloaded.  */
7355 	  && count_occurrences (PATTERN (insn), old, 0) == 1)
7356 	{
7357 	  rtx old = SET_DEST (PATTERN (temp));
7358 	  /* Store into the reload register instead of the pseudo.  */
7359 	  SET_DEST (PATTERN (temp)) = reloadreg;
7360 
7361 	  /* Verify that resulting insn is valid.
7362 
7363 	     Note that we have replaced the destination of TEMP with
7364 	     RELOADREG.  If TEMP references RELOADREG within an
7365 	     autoincrement addressing mode, then the resulting insn
7366 	     is ill-formed and we must reject this optimization.  */
7367 	  extract_insn (temp);
7368 	  if (constrain_operands (1, get_enabled_alternatives (temp))
7369 	      && (!AUTO_INC_DEC || ! find_reg_note (temp, REG_INC, reloadreg)))
7370 	    {
7371 	      /* If the previous insn is an output reload, the source is
7372 		 a reload register, and its spill_reg_store entry will
7373 		 contain the previous destination.  This is now
7374 		 invalid.  */
7375 	      if (REG_P (SET_SRC (PATTERN (temp)))
7376 		  && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7377 		{
7378 		  spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7379 		  spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7380 		}
7381 
7382 	      /* If these are the only uses of the pseudo reg,
7383 		 pretend for GDB it lives in the reload reg we used.  */
7384 	      if (REG_N_DEATHS (REGNO (old)) == 1
7385 		  && REG_N_SETS (REGNO (old)) == 1)
7386 		{
7387 		  reg_renumber[REGNO (old)] = REGNO (reloadreg);
7388 		  if (ira_conflicts_p)
7389 		    /* Inform IRA about the change.  */
7390 		    ira_mark_allocation_change (REGNO (old));
7391 		  alter_reg (REGNO (old), -1, false);
7392 		}
7393 	      special = 1;
7394 
7395 	      /* Adjust any debug insns between temp and insn.  */
7396 	      while ((temp = NEXT_INSN (temp)) != insn)
7397 		if (DEBUG_INSN_P (temp))
7398 		  INSN_VAR_LOCATION_LOC (temp)
7399 		    = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (temp),
7400 					    old, reloadreg);
7401 		else
7402 		  gcc_assert (NOTE_P (temp));
7403 	    }
7404 	  else
7405 	    {
7406 	      SET_DEST (PATTERN (temp)) = old;
7407 	    }
7408 	}
7409     }
7410 
7411   /* We can't do that, so output an insn to load RELOADREG.  */
7412 
7413   /* If we have a secondary reload, pick up the secondary register
7414      and icode, if any.  If OLDEQUIV and OLD are different or
7415      if this is an in-out reload, recompute whether or not we
7416      still need a secondary register and what the icode should
7417      be.  If we still need a secondary register and the class or
7418      icode is different, go back to reloading from OLD if using
7419      OLDEQUIV means that we got the wrong type of register.  We
7420      cannot have different class or icode due to an in-out reload
7421      because we don't make such reloads when both the input and
7422      output need secondary reload registers.  */
7423 
7424   if (! special && rl->secondary_in_reload >= 0)
7425     {
7426       rtx second_reload_reg = 0;
7427       rtx third_reload_reg = 0;
7428       int secondary_reload = rl->secondary_in_reload;
7429       rtx real_oldequiv = oldequiv;
7430       rtx real_old = old;
7431       rtx tmp;
7432       enum insn_code icode;
7433       enum insn_code tertiary_icode = CODE_FOR_nothing;
7434 
7435       /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7436 	 and similarly for OLD.
7437 	 See comments in get_secondary_reload in reload.c.  */
7438       /* If it is a pseudo that cannot be replaced with its
7439 	 equivalent MEM, we must fall back to reload_in, which
7440 	 will have all the necessary substitutions registered.
7441 	 Likewise for a pseudo that can't be replaced with its
7442 	 equivalent constant.
7443 
7444 	 Take extra care for subregs of such pseudos.  Note that
7445 	 we cannot use reg_equiv_mem in this case because it is
7446 	 not in the right mode.  */
7447 
7448       tmp = oldequiv;
7449       if (GET_CODE (tmp) == SUBREG)
7450 	tmp = SUBREG_REG (tmp);
7451       if (REG_P (tmp)
7452 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7453 	  && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7454 	      || reg_equiv_constant (REGNO (tmp)) != 0))
7455 	{
7456 	  if (! reg_equiv_mem (REGNO (tmp))
7457 	      || num_not_at_initial_offset
7458 	      || GET_CODE (oldequiv) == SUBREG)
7459 	    real_oldequiv = rl->in;
7460 	  else
7461 	    real_oldequiv = reg_equiv_mem (REGNO (tmp));
7462 	}
7463 
7464       tmp = old;
7465       if (GET_CODE (tmp) == SUBREG)
7466 	tmp = SUBREG_REG (tmp);
7467       if (REG_P (tmp)
7468 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7469 	  && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7470 	      || reg_equiv_constant (REGNO (tmp)) != 0))
7471 	{
7472 	  if (! reg_equiv_mem (REGNO (tmp))
7473 	      || num_not_at_initial_offset
7474 	      || GET_CODE (old) == SUBREG)
7475 	    real_old = rl->in;
7476 	  else
7477 	    real_old = reg_equiv_mem (REGNO (tmp));
7478 	}
7479 
7480       second_reload_reg = rld[secondary_reload].reg_rtx;
7481       if (rld[secondary_reload].secondary_in_reload >= 0)
7482 	{
7483 	  int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7484 
7485 	  third_reload_reg = rld[tertiary_reload].reg_rtx;
7486 	  tertiary_icode = rld[secondary_reload].secondary_in_icode;
7487 	  /* We'd have to add more code for quartary reloads.  */
7488 	  gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7489 	}
7490       icode = rl->secondary_in_icode;
7491 
7492       if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7493 	  || (rl->in != 0 && rl->out != 0))
7494 	{
7495 	  secondary_reload_info sri, sri2;
7496 	  enum reg_class new_class, new_t_class;
7497 
7498 	  sri.icode = CODE_FOR_nothing;
7499 	  sri.prev_sri = NULL;
7500 	  new_class
7501 	    = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7502 							 rl->rclass, mode,
7503 							 &sri);
7504 
7505 	  if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7506 	    second_reload_reg = 0;
7507 	  else if (new_class == NO_REGS)
7508 	    {
7509 	      if (reload_adjust_reg_for_icode (&second_reload_reg,
7510 					       third_reload_reg,
7511 					       (enum insn_code) sri.icode))
7512 		{
7513 		  icode = (enum insn_code) sri.icode;
7514 		  third_reload_reg = 0;
7515 		}
7516 	      else
7517 		{
7518 		  oldequiv = old;
7519 		  real_oldequiv = real_old;
7520 		}
7521 	    }
7522 	  else if (sri.icode != CODE_FOR_nothing)
7523 	    /* We currently lack a way to express this in reloads.  */
7524 	    gcc_unreachable ();
7525 	  else
7526 	    {
7527 	      sri2.icode = CODE_FOR_nothing;
7528 	      sri2.prev_sri = &sri;
7529 	      new_t_class
7530 		= (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7531 							     new_class, mode,
7532 							     &sri);
7533 	      if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7534 		{
7535 		  if (reload_adjust_reg_for_temp (&second_reload_reg,
7536 						  third_reload_reg,
7537 						  new_class, mode))
7538 		    {
7539 		      third_reload_reg = 0;
7540 		      tertiary_icode = (enum insn_code) sri2.icode;
7541 		    }
7542 		  else
7543 		    {
7544 		      oldequiv = old;
7545 		      real_oldequiv = real_old;
7546 		    }
7547 		}
7548 	      else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7549 		{
7550 		  rtx intermediate = second_reload_reg;
7551 
7552 		  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7553 						  new_class, mode)
7554 		      && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7555 						      ((enum insn_code)
7556 						       sri2.icode)))
7557 		    {
7558 		      second_reload_reg = intermediate;
7559 		      tertiary_icode = (enum insn_code) sri2.icode;
7560 		    }
7561 		  else
7562 		    {
7563 		      oldequiv = old;
7564 		      real_oldequiv = real_old;
7565 		    }
7566 		}
7567 	      else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7568 		{
7569 		  rtx intermediate = second_reload_reg;
7570 
7571 		  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7572 						  new_class, mode)
7573 		      && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7574 						      new_t_class, mode))
7575 		    {
7576 		      second_reload_reg = intermediate;
7577 		      tertiary_icode = (enum insn_code) sri2.icode;
7578 		    }
7579 		  else
7580 		    {
7581 		      oldequiv = old;
7582 		      real_oldequiv = real_old;
7583 		    }
7584 		}
7585 	      else
7586 		{
7587 		  /* This could be handled more intelligently too.  */
7588 		  oldequiv = old;
7589 		  real_oldequiv = real_old;
7590 		}
7591 	    }
7592 	}
7593 
7594       /* If we still need a secondary reload register, check
7595 	 to see if it is being used as a scratch or intermediate
7596 	 register and generate code appropriately.  If we need
7597 	 a scratch register, use REAL_OLDEQUIV since the form of
7598 	 the insn may depend on the actual address if it is
7599 	 a MEM.  */
7600 
7601       if (second_reload_reg)
7602 	{
7603 	  if (icode != CODE_FOR_nothing)
7604 	    {
7605 	      /* We'd have to add extra code to handle this case.  */
7606 	      gcc_assert (!third_reload_reg);
7607 
7608 	      emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7609 					  second_reload_reg));
7610 	      special = 1;
7611 	    }
7612 	  else
7613 	    {
7614 	      /* See if we need a scratch register to load the
7615 		 intermediate register (a tertiary reload).  */
7616 	      if (tertiary_icode != CODE_FOR_nothing)
7617 		{
7618 		  emit_insn ((GEN_FCN (tertiary_icode)
7619 			      (second_reload_reg, real_oldequiv,
7620 			       third_reload_reg)));
7621 		}
7622 	      else if (third_reload_reg)
7623 		{
7624 		  gen_reload (third_reload_reg, real_oldequiv,
7625 			      rl->opnum,
7626 			      rl->when_needed);
7627 		  gen_reload (second_reload_reg, third_reload_reg,
7628 			      rl->opnum,
7629 			      rl->when_needed);
7630 		}
7631 	      else
7632 		gen_reload (second_reload_reg, real_oldequiv,
7633 			    rl->opnum,
7634 			    rl->when_needed);
7635 
7636 	      oldequiv = second_reload_reg;
7637 	    }
7638 	}
7639     }
7640 
7641   if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7642     {
7643       rtx real_oldequiv = oldequiv;
7644 
7645       if ((REG_P (oldequiv)
7646 	   && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7647 	   && (reg_equiv_memory_loc (REGNO (oldequiv)) != 0
7648 	       || reg_equiv_constant (REGNO (oldequiv)) != 0))
7649 	  || (GET_CODE (oldequiv) == SUBREG
7650 	      && REG_P (SUBREG_REG (oldequiv))
7651 	      && (REGNO (SUBREG_REG (oldequiv))
7652 		  >= FIRST_PSEUDO_REGISTER)
7653 	      && ((reg_equiv_memory_loc (REGNO (SUBREG_REG (oldequiv))) != 0)
7654 		  || (reg_equiv_constant (REGNO (SUBREG_REG (oldequiv))) != 0)))
7655 	  || (CONSTANT_P (oldequiv)
7656 	      && (targetm.preferred_reload_class (oldequiv,
7657 						  REGNO_REG_CLASS (REGNO (reloadreg)))
7658 		  == NO_REGS)))
7659 	real_oldequiv = rl->in;
7660       gen_reload (reloadreg, real_oldequiv, rl->opnum,
7661 		  rl->when_needed);
7662     }
7663 
7664   if (cfun->can_throw_non_call_exceptions)
7665     copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7666 
7667   /* End this sequence.  */
7668   *where = get_insns ();
7669   end_sequence ();
7670 
7671   /* Update reload_override_in so that delete_address_reloads_1
7672      can see the actual register usage.  */
7673   if (oldequiv_reg)
7674     reload_override_in[j] = oldequiv;
7675 }
7676 
7677 /* Generate insns to for the output reload RL, which is for the insn described
7678    by CHAIN and has the number J.  */
7679 static void
emit_output_reload_insns(struct insn_chain * chain,struct reload * rl,int j)7680 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7681 			  int j)
7682 {
7683   rtx reloadreg;
7684   rtx_insn *insn = chain->insn;
7685   int special = 0;
7686   rtx old = rl->out;
7687   machine_mode mode;
7688   rtx_insn *p;
7689   rtx rl_reg_rtx;
7690 
7691   if (rl->when_needed == RELOAD_OTHER)
7692     start_sequence ();
7693   else
7694     push_to_sequence (output_reload_insns[rl->opnum]);
7695 
7696   rl_reg_rtx = reload_reg_rtx_for_output[j];
7697   mode = GET_MODE (rl_reg_rtx);
7698 
7699   reloadreg = rl_reg_rtx;
7700 
7701   /* If we need two reload regs, set RELOADREG to the intermediate
7702      one, since it will be stored into OLD.  We might need a secondary
7703      register only for an input reload, so check again here.  */
7704 
7705   if (rl->secondary_out_reload >= 0)
7706     {
7707       rtx real_old = old;
7708       int secondary_reload = rl->secondary_out_reload;
7709       int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7710 
7711       if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7712 	  && reg_equiv_mem (REGNO (old)) != 0)
7713 	real_old = reg_equiv_mem (REGNO (old));
7714 
7715       if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7716 	{
7717 	  rtx second_reloadreg = reloadreg;
7718 	  reloadreg = rld[secondary_reload].reg_rtx;
7719 
7720 	  /* See if RELOADREG is to be used as a scratch register
7721 	     or as an intermediate register.  */
7722 	  if (rl->secondary_out_icode != CODE_FOR_nothing)
7723 	    {
7724 	      /* We'd have to add extra code to handle this case.  */
7725 	      gcc_assert (tertiary_reload < 0);
7726 
7727 	      emit_insn ((GEN_FCN (rl->secondary_out_icode)
7728 			  (real_old, second_reloadreg, reloadreg)));
7729 	      special = 1;
7730 	    }
7731 	  else
7732 	    {
7733 	      /* See if we need both a scratch and intermediate reload
7734 		 register.  */
7735 
7736 	      enum insn_code tertiary_icode
7737 		= rld[secondary_reload].secondary_out_icode;
7738 
7739 	      /* We'd have to add more code for quartary reloads.  */
7740 	      gcc_assert (tertiary_reload < 0
7741 			  || rld[tertiary_reload].secondary_out_reload < 0);
7742 
7743 	      if (GET_MODE (reloadreg) != mode)
7744 		reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7745 
7746 	      if (tertiary_icode != CODE_FOR_nothing)
7747 		{
7748 		  rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7749 
7750 		  /* Copy primary reload reg to secondary reload reg.
7751 		     (Note that these have been swapped above, then
7752 		     secondary reload reg to OLD using our insn.)  */
7753 
7754 		  /* If REAL_OLD is a paradoxical SUBREG, remove it
7755 		     and try to put the opposite SUBREG on
7756 		     RELOADREG.  */
7757 		  strip_paradoxical_subreg (&real_old, &reloadreg);
7758 
7759 		  gen_reload (reloadreg, second_reloadreg,
7760 			      rl->opnum, rl->when_needed);
7761 		  emit_insn ((GEN_FCN (tertiary_icode)
7762 			      (real_old, reloadreg, third_reloadreg)));
7763 		  special = 1;
7764 		}
7765 
7766 	      else
7767 		{
7768 		  /* Copy between the reload regs here and then to
7769 		     OUT later.  */
7770 
7771 		  gen_reload (reloadreg, second_reloadreg,
7772 			      rl->opnum, rl->when_needed);
7773 		  if (tertiary_reload >= 0)
7774 		    {
7775 		      rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7776 
7777 		      gen_reload (third_reloadreg, reloadreg,
7778 				  rl->opnum, rl->when_needed);
7779 		      reloadreg = third_reloadreg;
7780 		    }
7781 		}
7782 	    }
7783 	}
7784     }
7785 
7786   /* Output the last reload insn.  */
7787   if (! special)
7788     {
7789       rtx set;
7790 
7791       /* Don't output the last reload if OLD is not the dest of
7792 	 INSN and is in the src and is clobbered by INSN.  */
7793       if (! flag_expensive_optimizations
7794 	  || !REG_P (old)
7795 	  || !(set = single_set (insn))
7796 	  || rtx_equal_p (old, SET_DEST (set))
7797 	  || !reg_mentioned_p (old, SET_SRC (set))
7798 	  || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7799 	       && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7800 	gen_reload (old, reloadreg, rl->opnum,
7801 		    rl->when_needed);
7802     }
7803 
7804   /* Look at all insns we emitted, just to be safe.  */
7805   for (p = get_insns (); p; p = NEXT_INSN (p))
7806     if (INSN_P (p))
7807       {
7808 	rtx pat = PATTERN (p);
7809 
7810 	/* If this output reload doesn't come from a spill reg,
7811 	   clear any memory of reloaded copies of the pseudo reg.
7812 	   If this output reload comes from a spill reg,
7813 	   reg_has_output_reload will make this do nothing.  */
7814 	note_stores (pat, forget_old_reloads_1, NULL);
7815 
7816 	if (reg_mentioned_p (rl_reg_rtx, pat))
7817 	  {
7818 	    rtx set = single_set (insn);
7819 	    if (reload_spill_index[j] < 0
7820 		&& set
7821 		&& SET_SRC (set) == rl_reg_rtx)
7822 	      {
7823 		int src = REGNO (SET_SRC (set));
7824 
7825 		reload_spill_index[j] = src;
7826 		SET_HARD_REG_BIT (reg_is_output_reload, src);
7827 		if (find_regno_note (insn, REG_DEAD, src))
7828 		  SET_HARD_REG_BIT (reg_reloaded_died, src);
7829 	      }
7830 	    if (HARD_REGISTER_P (rl_reg_rtx))
7831 	      {
7832 		int s = rl->secondary_out_reload;
7833 		set = single_set (p);
7834 		/* If this reload copies only to the secondary reload
7835 		   register, the secondary reload does the actual
7836 		   store.  */
7837 		if (s >= 0 && set == NULL_RTX)
7838 		  /* We can't tell what function the secondary reload
7839 		     has and where the actual store to the pseudo is
7840 		     made; leave new_spill_reg_store alone.  */
7841 		  ;
7842 		else if (s >= 0
7843 			 && SET_SRC (set) == rl_reg_rtx
7844 			 && SET_DEST (set) == rld[s].reg_rtx)
7845 		  {
7846 		    /* Usually the next instruction will be the
7847 		       secondary reload insn;  if we can confirm
7848 		       that it is, setting new_spill_reg_store to
7849 		       that insn will allow an extra optimization.  */
7850 		    rtx s_reg = rld[s].reg_rtx;
7851 		    rtx_insn *next = NEXT_INSN (p);
7852 		    rld[s].out = rl->out;
7853 		    rld[s].out_reg = rl->out_reg;
7854 		    set = single_set (next);
7855 		    if (set && SET_SRC (set) == s_reg
7856 			&& reload_reg_rtx_reaches_end_p (s_reg, s))
7857 		      {
7858 			SET_HARD_REG_BIT (reg_is_output_reload,
7859 					  REGNO (s_reg));
7860 			new_spill_reg_store[REGNO (s_reg)] = next;
7861 		      }
7862 		  }
7863 		else if (reload_reg_rtx_reaches_end_p (rl_reg_rtx, j))
7864 		  new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7865 	      }
7866 	  }
7867       }
7868 
7869   if (rl->when_needed == RELOAD_OTHER)
7870     {
7871       emit_insn (other_output_reload_insns[rl->opnum]);
7872       other_output_reload_insns[rl->opnum] = get_insns ();
7873     }
7874   else
7875     output_reload_insns[rl->opnum] = get_insns ();
7876 
7877   if (cfun->can_throw_non_call_exceptions)
7878     copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7879 
7880   end_sequence ();
7881 }
7882 
7883 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7884    and has the number J.  */
7885 static void
do_input_reload(struct insn_chain * chain,struct reload * rl,int j)7886 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7887 {
7888   rtx_insn *insn = chain->insn;
7889   rtx old = (rl->in && MEM_P (rl->in)
7890 	     ? rl->in_reg : rl->in);
7891   rtx reg_rtx = rl->reg_rtx;
7892 
7893   if (old && reg_rtx)
7894     {
7895       machine_mode mode;
7896 
7897       /* Determine the mode to reload in.
7898 	 This is very tricky because we have three to choose from.
7899 	 There is the mode the insn operand wants (rl->inmode).
7900 	 There is the mode of the reload register RELOADREG.
7901 	 There is the intrinsic mode of the operand, which we could find
7902 	 by stripping some SUBREGs.
7903 	 It turns out that RELOADREG's mode is irrelevant:
7904 	 we can change that arbitrarily.
7905 
7906 	 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7907 	 then the reload reg may not support QImode moves, so use SImode.
7908 	 If foo is in memory due to spilling a pseudo reg, this is safe,
7909 	 because the QImode value is in the least significant part of a
7910 	 slot big enough for a SImode.  If foo is some other sort of
7911 	 memory reference, then it is impossible to reload this case,
7912 	 so previous passes had better make sure this never happens.
7913 
7914 	 Then consider a one-word union which has SImode and one of its
7915 	 members is a float, being fetched as (SUBREG:SF union:SI).
7916 	 We must fetch that as SFmode because we could be loading into
7917 	 a float-only register.  In this case OLD's mode is correct.
7918 
7919 	 Consider an immediate integer: it has VOIDmode.  Here we need
7920 	 to get a mode from something else.
7921 
7922 	 In some cases, there is a fourth mode, the operand's
7923 	 containing mode.  If the insn specifies a containing mode for
7924 	 this operand, it overrides all others.
7925 
7926 	 I am not sure whether the algorithm here is always right,
7927 	 but it does the right things in those cases.  */
7928 
7929       mode = GET_MODE (old);
7930       if (mode == VOIDmode)
7931 	mode = rl->inmode;
7932 
7933       /* We cannot use gen_lowpart_common since it can do the wrong thing
7934 	 when REG_RTX has a multi-word mode.  Note that REG_RTX must
7935 	 always be a REG here.  */
7936       if (GET_MODE (reg_rtx) != mode)
7937 	reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7938     }
7939   reload_reg_rtx_for_input[j] = reg_rtx;
7940 
7941   if (old != 0
7942       /* AUTO_INC reloads need to be handled even if inherited.  We got an
7943 	 AUTO_INC reload if reload_out is set but reload_out_reg isn't.  */
7944       && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7945       && ! rtx_equal_p (reg_rtx, old)
7946       && reg_rtx != 0)
7947     emit_input_reload_insns (chain, rld + j, old, j);
7948 
7949   /* When inheriting a wider reload, we have a MEM in rl->in,
7950      e.g. inheriting a SImode output reload for
7951      (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10)))  */
7952   if (optimize && reload_inherited[j] && rl->in
7953       && MEM_P (rl->in)
7954       && MEM_P (rl->in_reg)
7955       && reload_spill_index[j] >= 0
7956       && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7957     rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7958 
7959   /* If we are reloading a register that was recently stored in with an
7960      output-reload, see if we can prove there was
7961      actually no need to store the old value in it.  */
7962 
7963   if (optimize
7964       && (reload_inherited[j] || reload_override_in[j])
7965       && reg_rtx
7966       && REG_P (reg_rtx)
7967       && spill_reg_store[REGNO (reg_rtx)] != 0
7968 #if 0
7969       /* There doesn't seem to be any reason to restrict this to pseudos
7970 	 and doing so loses in the case where we are copying from a
7971 	 register of the wrong class.  */
7972       && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7973 #endif
7974       /* The insn might have already some references to stackslots
7975 	 replaced by MEMs, while reload_out_reg still names the
7976 	 original pseudo.  */
7977       && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7978 	  || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7979     delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
7980 }
7981 
7982 /* Do output reloading for reload RL, which is for the insn described by
7983    CHAIN and has the number J.
7984    ??? At some point we need to support handling output reloads of
7985    JUMP_INSNs or insns that set cc0.  */
7986 static void
do_output_reload(struct insn_chain * chain,struct reload * rl,int j)7987 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
7988 {
7989   rtx note, old;
7990   rtx_insn *insn = chain->insn;
7991   /* If this is an output reload that stores something that is
7992      not loaded in this same reload, see if we can eliminate a previous
7993      store.  */
7994   rtx pseudo = rl->out_reg;
7995   rtx reg_rtx = rl->reg_rtx;
7996 
7997   if (rl->out && reg_rtx)
7998     {
7999       machine_mode mode;
8000 
8001       /* Determine the mode to reload in.
8002 	 See comments above (for input reloading).  */
8003       mode = GET_MODE (rl->out);
8004       if (mode == VOIDmode)
8005 	{
8006 	  /* VOIDmode should never happen for an output.  */
8007 	  if (asm_noperands (PATTERN (insn)) < 0)
8008 	    /* It's the compiler's fault.  */
8009 	    fatal_insn ("VOIDmode on an output", insn);
8010 	  error_for_asm (insn, "output operand is constant in %<asm%>");
8011 	  /* Prevent crash--use something we know is valid.  */
8012 	  mode = word_mode;
8013 	  rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
8014 	}
8015       if (GET_MODE (reg_rtx) != mode)
8016 	reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
8017     }
8018   reload_reg_rtx_for_output[j] = reg_rtx;
8019 
8020   if (pseudo
8021       && optimize
8022       && REG_P (pseudo)
8023       && ! rtx_equal_p (rl->in_reg, pseudo)
8024       && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
8025       && reg_last_reload_reg[REGNO (pseudo)])
8026     {
8027       int pseudo_no = REGNO (pseudo);
8028       int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
8029 
8030       /* We don't need to test full validity of last_regno for
8031 	 inherit here; we only want to know if the store actually
8032 	 matches the pseudo.  */
8033       if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
8034 	  && reg_reloaded_contents[last_regno] == pseudo_no
8035 	  && spill_reg_store[last_regno]
8036 	  && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
8037 	delete_output_reload (insn, j, last_regno, reg_rtx);
8038     }
8039 
8040   old = rl->out_reg;
8041   if (old == 0
8042       || reg_rtx == 0
8043       || rtx_equal_p (old, reg_rtx))
8044     return;
8045 
8046   /* An output operand that dies right away does need a reload,
8047      but need not be copied from it.  Show the new location in the
8048      REG_UNUSED note.  */
8049   if ((REG_P (old) || GET_CODE (old) == SCRATCH)
8050       && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
8051     {
8052       XEXP (note, 0) = reg_rtx;
8053       return;
8054     }
8055   /* Likewise for a SUBREG of an operand that dies.  */
8056   else if (GET_CODE (old) == SUBREG
8057 	   && REG_P (SUBREG_REG (old))
8058 	   && 0 != (note = find_reg_note (insn, REG_UNUSED,
8059 					  SUBREG_REG (old))))
8060     {
8061       XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
8062       return;
8063     }
8064   else if (GET_CODE (old) == SCRATCH)
8065     /* If we aren't optimizing, there won't be a REG_UNUSED note,
8066        but we don't want to make an output reload.  */
8067     return;
8068 
8069   /* If is a JUMP_INSN, we can't support output reloads yet.  */
8070   gcc_assert (NONJUMP_INSN_P (insn));
8071 
8072   emit_output_reload_insns (chain, rld + j, j);
8073 }
8074 
8075 /* A reload copies values of MODE from register SRC to register DEST.
8076    Return true if it can be treated for inheritance purposes like a
8077    group of reloads, each one reloading a single hard register.  The
8078    caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
8079    occupy the same number of hard registers.  */
8080 
8081 static bool
inherit_piecemeal_p(int dest ATTRIBUTE_UNUSED,int src ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED)8082 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
8083 		     int src ATTRIBUTE_UNUSED,
8084 		     machine_mode mode ATTRIBUTE_UNUSED)
8085 {
8086 #ifdef CANNOT_CHANGE_MODE_CLASS
8087   return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
8088 	  && !REG_CANNOT_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
8089 #else
8090   return true;
8091 #endif
8092 }
8093 
8094 /* Output insns to reload values in and out of the chosen reload regs.  */
8095 
8096 static void
emit_reload_insns(struct insn_chain * chain)8097 emit_reload_insns (struct insn_chain *chain)
8098 {
8099   rtx_insn *insn = chain->insn;
8100 
8101   int j;
8102 
8103   CLEAR_HARD_REG_SET (reg_reloaded_died);
8104 
8105   for (j = 0; j < reload_n_operands; j++)
8106     input_reload_insns[j] = input_address_reload_insns[j]
8107       = inpaddr_address_reload_insns[j]
8108       = output_reload_insns[j] = output_address_reload_insns[j]
8109       = outaddr_address_reload_insns[j]
8110       = other_output_reload_insns[j] = 0;
8111   other_input_address_reload_insns = 0;
8112   other_input_reload_insns = 0;
8113   operand_reload_insns = 0;
8114   other_operand_reload_insns = 0;
8115 
8116   /* Dump reloads into the dump file.  */
8117   if (dump_file)
8118     {
8119       fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
8120       debug_reload_to_stream (dump_file);
8121     }
8122 
8123   for (j = 0; j < n_reloads; j++)
8124     if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
8125       {
8126 	unsigned int i;
8127 
8128 	for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
8129 	  new_spill_reg_store[i] = 0;
8130       }
8131 
8132   /* Now output the instructions to copy the data into and out of the
8133      reload registers.  Do these in the order that the reloads were reported,
8134      since reloads of base and index registers precede reloads of operands
8135      and the operands may need the base and index registers reloaded.  */
8136 
8137   for (j = 0; j < n_reloads; j++)
8138     {
8139       do_input_reload (chain, rld + j, j);
8140       do_output_reload (chain, rld + j, j);
8141     }
8142 
8143   /* Now write all the insns we made for reloads in the order expected by
8144      the allocation functions.  Prior to the insn being reloaded, we write
8145      the following reloads:
8146 
8147      RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8148 
8149      RELOAD_OTHER reloads.
8150 
8151      For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8152      by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8153      RELOAD_FOR_INPUT reload for the operand.
8154 
8155      RELOAD_FOR_OPADDR_ADDRS reloads.
8156 
8157      RELOAD_FOR_OPERAND_ADDRESS reloads.
8158 
8159      After the insn being reloaded, we write the following:
8160 
8161      For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8162      by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8163      RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8164      reloads for the operand.  The RELOAD_OTHER output reloads are
8165      output in descending order by reload number.  */
8166 
8167   emit_insn_before (other_input_address_reload_insns, insn);
8168   emit_insn_before (other_input_reload_insns, insn);
8169 
8170   for (j = 0; j < reload_n_operands; j++)
8171     {
8172       emit_insn_before (inpaddr_address_reload_insns[j], insn);
8173       emit_insn_before (input_address_reload_insns[j], insn);
8174       emit_insn_before (input_reload_insns[j], insn);
8175     }
8176 
8177   emit_insn_before (other_operand_reload_insns, insn);
8178   emit_insn_before (operand_reload_insns, insn);
8179 
8180   for (j = 0; j < reload_n_operands; j++)
8181     {
8182       rtx_insn *x = emit_insn_after (outaddr_address_reload_insns[j], insn);
8183       x = emit_insn_after (output_address_reload_insns[j], x);
8184       x = emit_insn_after (output_reload_insns[j], x);
8185       emit_insn_after (other_output_reload_insns[j], x);
8186     }
8187 
8188   /* For all the spill regs newly reloaded in this instruction,
8189      record what they were reloaded from, so subsequent instructions
8190      can inherit the reloads.
8191 
8192      Update spill_reg_store for the reloads of this insn.
8193      Copy the elements that were updated in the loop above.  */
8194 
8195   for (j = 0; j < n_reloads; j++)
8196     {
8197       int r = reload_order[j];
8198       int i = reload_spill_index[r];
8199 
8200       /* If this is a non-inherited input reload from a pseudo, we must
8201 	 clear any memory of a previous store to the same pseudo.  Only do
8202 	 something if there will not be an output reload for the pseudo
8203 	 being reloaded.  */
8204       if (rld[r].in_reg != 0
8205 	  && ! (reload_inherited[r] || reload_override_in[r]))
8206 	{
8207 	  rtx reg = rld[r].in_reg;
8208 
8209 	  if (GET_CODE (reg) == SUBREG)
8210 	    reg = SUBREG_REG (reg);
8211 
8212 	  if (REG_P (reg)
8213 	      && REGNO (reg) >= FIRST_PSEUDO_REGISTER
8214 	      && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
8215 	    {
8216 	      int nregno = REGNO (reg);
8217 
8218 	      if (reg_last_reload_reg[nregno])
8219 		{
8220 		  int last_regno = REGNO (reg_last_reload_reg[nregno]);
8221 
8222 		  if (reg_reloaded_contents[last_regno] == nregno)
8223 		    spill_reg_store[last_regno] = 0;
8224 		}
8225 	    }
8226 	}
8227 
8228       /* I is nonneg if this reload used a register.
8229 	 If rld[r].reg_rtx is 0, this is an optional reload
8230 	 that we opted to ignore.  */
8231 
8232       if (i >= 0 && rld[r].reg_rtx != 0)
8233 	{
8234 	  int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
8235 	  int k;
8236 
8237 	  /* For a multi register reload, we need to check if all or part
8238 	     of the value lives to the end.  */
8239 	  for (k = 0; k < nr; k++)
8240 	    if (reload_reg_reaches_end_p (i + k, r))
8241 	      CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
8242 
8243 	  /* Maybe the spill reg contains a copy of reload_out.  */
8244 	  if (rld[r].out != 0
8245 	      && (REG_P (rld[r].out)
8246 		  || (rld[r].out_reg
8247 		      ? REG_P (rld[r].out_reg)
8248 		      /* The reload value is an auto-modification of
8249 			 some kind.  For PRE_INC, POST_INC, PRE_DEC
8250 			 and POST_DEC, we record an equivalence
8251 			 between the reload register and the operand
8252 			 on the optimistic assumption that we can make
8253 			 the equivalence hold.  reload_as_needed must
8254 			 then either make it hold or invalidate the
8255 			 equivalence.
8256 
8257 			 PRE_MODIFY and POST_MODIFY addresses are reloaded
8258 			 somewhat differently, and allowing them here leads
8259 			 to problems.  */
8260 		      : (GET_CODE (rld[r].out) != POST_MODIFY
8261 			 && GET_CODE (rld[r].out) != PRE_MODIFY))))
8262 	    {
8263 	      rtx reg;
8264 
8265 	      reg = reload_reg_rtx_for_output[r];
8266 	      if (reload_reg_rtx_reaches_end_p (reg, r))
8267 		{
8268 		  machine_mode mode = GET_MODE (reg);
8269 		  int regno = REGNO (reg);
8270 		  int nregs = hard_regno_nregs[regno][mode];
8271 		  rtx out = (REG_P (rld[r].out)
8272 			     ? rld[r].out
8273 			     : rld[r].out_reg
8274 			     ? rld[r].out_reg
8275 /* AUTO_INC */		     : XEXP (rld[r].in_reg, 0));
8276 		  int out_regno = REGNO (out);
8277 		  int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
8278 				   : hard_regno_nregs[out_regno][mode]);
8279 		  bool piecemeal;
8280 
8281 		  spill_reg_store[regno] = new_spill_reg_store[regno];
8282 		  spill_reg_stored_to[regno] = out;
8283 		  reg_last_reload_reg[out_regno] = reg;
8284 
8285 		  piecemeal = (HARD_REGISTER_NUM_P (out_regno)
8286 			       && nregs == out_nregs
8287 			       && inherit_piecemeal_p (out_regno, regno, mode));
8288 
8289 		  /* If OUT_REGNO is a hard register, it may occupy more than
8290 		     one register.  If it does, say what is in the
8291 		     rest of the registers assuming that both registers
8292 		     agree on how many words the object takes.  If not,
8293 		     invalidate the subsequent registers.  */
8294 
8295 		  if (HARD_REGISTER_NUM_P (out_regno))
8296 		    for (k = 1; k < out_nregs; k++)
8297 		      reg_last_reload_reg[out_regno + k]
8298 			= (piecemeal ? regno_reg_rtx[regno + k] : 0);
8299 
8300 		  /* Now do the inverse operation.  */
8301 		  for (k = 0; k < nregs; k++)
8302 		    {
8303 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8304 		      reg_reloaded_contents[regno + k]
8305 			= (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
8306 			   ? out_regno
8307 			   : out_regno + k);
8308 		      reg_reloaded_insn[regno + k] = insn;
8309 		      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8310 		      if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8311 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8312 					  regno + k);
8313 		      else
8314 			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8315 					    regno + k);
8316 		    }
8317 		}
8318 	    }
8319 	  /* Maybe the spill reg contains a copy of reload_in.  Only do
8320 	     something if there will not be an output reload for
8321 	     the register being reloaded.  */
8322 	  else if (rld[r].out_reg == 0
8323 		   && rld[r].in != 0
8324 		   && ((REG_P (rld[r].in)
8325 			&& !HARD_REGISTER_P (rld[r].in)
8326 			&& !REGNO_REG_SET_P (&reg_has_output_reload,
8327 					     REGNO (rld[r].in)))
8328 		       || (REG_P (rld[r].in_reg)
8329 			   && !REGNO_REG_SET_P (&reg_has_output_reload,
8330 						REGNO (rld[r].in_reg))))
8331 		   && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8332 	    {
8333 	      rtx reg;
8334 
8335 	      reg = reload_reg_rtx_for_input[r];
8336 	      if (reload_reg_rtx_reaches_end_p (reg, r))
8337 		{
8338 		  machine_mode mode;
8339 		  int regno;
8340 		  int nregs;
8341 		  int in_regno;
8342 		  int in_nregs;
8343 		  rtx in;
8344 		  bool piecemeal;
8345 
8346 		  mode = GET_MODE (reg);
8347 		  regno = REGNO (reg);
8348 		  nregs = hard_regno_nregs[regno][mode];
8349 		  if (REG_P (rld[r].in)
8350 		      && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8351 		    in = rld[r].in;
8352 		  else if (REG_P (rld[r].in_reg))
8353 		    in = rld[r].in_reg;
8354 		  else
8355 		    in = XEXP (rld[r].in_reg, 0);
8356 		  in_regno = REGNO (in);
8357 
8358 		  in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8359 			      : hard_regno_nregs[in_regno][mode]);
8360 
8361 		  reg_last_reload_reg[in_regno] = reg;
8362 
8363 		  piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8364 			       && nregs == in_nregs
8365 			       && inherit_piecemeal_p (regno, in_regno, mode));
8366 
8367 		  if (HARD_REGISTER_NUM_P (in_regno))
8368 		    for (k = 1; k < in_nregs; k++)
8369 		      reg_last_reload_reg[in_regno + k]
8370 			= (piecemeal ? regno_reg_rtx[regno + k] : 0);
8371 
8372 		  /* Unless we inherited this reload, show we haven't
8373 		     recently done a store.
8374 		     Previous stores of inherited auto_inc expressions
8375 		     also have to be discarded.  */
8376 		  if (! reload_inherited[r]
8377 		      || (rld[r].out && ! rld[r].out_reg))
8378 		    spill_reg_store[regno] = 0;
8379 
8380 		  for (k = 0; k < nregs; k++)
8381 		    {
8382 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8383 		      reg_reloaded_contents[regno + k]
8384 			= (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8385 			   ? in_regno
8386 			   : in_regno + k);
8387 		      reg_reloaded_insn[regno + k] = insn;
8388 		      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8389 		      if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8390 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8391 					  regno + k);
8392 		      else
8393 			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8394 					    regno + k);
8395 		    }
8396 		}
8397 	    }
8398 	}
8399 
8400       /* The following if-statement was #if 0'd in 1.34 (or before...).
8401 	 It's reenabled in 1.35 because supposedly nothing else
8402 	 deals with this problem.  */
8403 
8404       /* If a register gets output-reloaded from a non-spill register,
8405 	 that invalidates any previous reloaded copy of it.
8406 	 But forget_old_reloads_1 won't get to see it, because
8407 	 it thinks only about the original insn.  So invalidate it here.
8408 	 Also do the same thing for RELOAD_OTHER constraints where the
8409 	 output is discarded.  */
8410       if (i < 0
8411 	  && ((rld[r].out != 0
8412 	       && (REG_P (rld[r].out)
8413 		   || (MEM_P (rld[r].out)
8414 		       && REG_P (rld[r].out_reg))))
8415 	      || (rld[r].out == 0 && rld[r].out_reg
8416 		  && REG_P (rld[r].out_reg))))
8417 	{
8418 	  rtx out = ((rld[r].out && REG_P (rld[r].out))
8419 		     ? rld[r].out : rld[r].out_reg);
8420 	  int out_regno = REGNO (out);
8421 	  machine_mode mode = GET_MODE (out);
8422 
8423 	  /* REG_RTX is now set or clobbered by the main instruction.
8424 	     As the comment above explains, forget_old_reloads_1 only
8425 	     sees the original instruction, and there is no guarantee
8426 	     that the original instruction also clobbered REG_RTX.
8427 	     For example, if find_reloads sees that the input side of
8428 	     a matched operand pair dies in this instruction, it may
8429 	     use the input register as the reload register.
8430 
8431 	     Calling forget_old_reloads_1 is a waste of effort if
8432 	     REG_RTX is also the output register.
8433 
8434 	     If we know that REG_RTX holds the value of a pseudo
8435 	     register, the code after the call will record that fact.  */
8436 	  if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8437 	    forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8438 
8439 	  if (!HARD_REGISTER_NUM_P (out_regno))
8440 	    {
8441 	      rtx src_reg;
8442 	      rtx_insn *store_insn = NULL;
8443 
8444 	      reg_last_reload_reg[out_regno] = 0;
8445 
8446 	      /* If we can find a hard register that is stored, record
8447 		 the storing insn so that we may delete this insn with
8448 		 delete_output_reload.  */
8449 	      src_reg = reload_reg_rtx_for_output[r];
8450 
8451 	      if (src_reg)
8452 		{
8453 		  if (reload_reg_rtx_reaches_end_p (src_reg, r))
8454 		    store_insn = new_spill_reg_store[REGNO (src_reg)];
8455 		  else
8456 		    src_reg = NULL_RTX;
8457 		}
8458 	      else
8459 		{
8460 		  /* If this is an optional reload, try to find the
8461 		     source reg from an input reload.  */
8462 		  rtx set = single_set (insn);
8463 		  if (set && SET_DEST (set) == rld[r].out)
8464 		    {
8465 		      int k;
8466 
8467 		      src_reg = SET_SRC (set);
8468 		      store_insn = insn;
8469 		      for (k = 0; k < n_reloads; k++)
8470 			{
8471 			  if (rld[k].in == src_reg)
8472 			    {
8473 			      src_reg = reload_reg_rtx_for_input[k];
8474 			      break;
8475 			    }
8476 			}
8477 		    }
8478 		}
8479 	      if (src_reg && REG_P (src_reg)
8480 		  && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8481 		{
8482 		  int src_regno, src_nregs, k;
8483 		  rtx note;
8484 
8485 		  gcc_assert (GET_MODE (src_reg) == mode);
8486 		  src_regno = REGNO (src_reg);
8487 		  src_nregs = hard_regno_nregs[src_regno][mode];
8488 		  /* The place where to find a death note varies with
8489 		     PRESERVE_DEATH_INFO_REGNO_P .  The condition is not
8490 		     necessarily checked exactly in the code that moves
8491 		     notes, so just check both locations.  */
8492 		  note = find_regno_note (insn, REG_DEAD, src_regno);
8493 		  if (! note && store_insn)
8494 		    note = find_regno_note (store_insn, REG_DEAD, src_regno);
8495 		  for (k = 0; k < src_nregs; k++)
8496 		    {
8497 		      spill_reg_store[src_regno + k] = store_insn;
8498 		      spill_reg_stored_to[src_regno + k] = out;
8499 		      reg_reloaded_contents[src_regno + k] = out_regno;
8500 		      reg_reloaded_insn[src_regno + k] = store_insn;
8501 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8502 		      SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8503 		      if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8504 							  mode))
8505 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8506 					  src_regno + k);
8507 		      else
8508 			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8509 					    src_regno + k);
8510 		      SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8511 		      if (note)
8512 			SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8513 		      else
8514 			CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8515 		    }
8516 		  reg_last_reload_reg[out_regno] = src_reg;
8517 		  /* We have to set reg_has_output_reload here, or else
8518 		     forget_old_reloads_1 will clear reg_last_reload_reg
8519 		     right away.  */
8520 		  SET_REGNO_REG_SET (&reg_has_output_reload,
8521 				     out_regno);
8522 		}
8523 	    }
8524 	  else
8525 	    {
8526 	      int k, out_nregs = hard_regno_nregs[out_regno][mode];
8527 
8528 	      for (k = 0; k < out_nregs; k++)
8529 		reg_last_reload_reg[out_regno + k] = 0;
8530 	    }
8531 	}
8532     }
8533   IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8534 }
8535 
8536 /* Go through the motions to emit INSN and test if it is strictly valid.
8537    Return the emitted insn if valid, else return NULL.  */
8538 
8539 static rtx_insn *
emit_insn_if_valid_for_reload(rtx pat)8540 emit_insn_if_valid_for_reload (rtx pat)
8541 {
8542   rtx_insn *last = get_last_insn ();
8543   int code;
8544 
8545   rtx_insn *insn = emit_insn (pat);
8546   code = recog_memoized (insn);
8547 
8548   if (code >= 0)
8549     {
8550       extract_insn (insn);
8551       /* We want constrain operands to treat this insn strictly in its
8552 	 validity determination, i.e., the way it would after reload has
8553 	 completed.  */
8554       if (constrain_operands (1, get_enabled_alternatives (insn)))
8555 	return insn;
8556     }
8557 
8558   delete_insns_since (last);
8559   return NULL;
8560 }
8561 
8562 /* Emit code to perform a reload from IN (which may be a reload register) to
8563    OUT (which may also be a reload register).  IN or OUT is from operand
8564    OPNUM with reload type TYPE.
8565 
8566    Returns first insn emitted.  */
8567 
8568 static rtx_insn *
gen_reload(rtx out,rtx in,int opnum,enum reload_type type)8569 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8570 {
8571   rtx_insn *last = get_last_insn ();
8572   rtx_insn *tem;
8573 #ifdef SECONDARY_MEMORY_NEEDED
8574   rtx tem1, tem2;
8575 #endif
8576 
8577   /* If IN is a paradoxical SUBREG, remove it and try to put the
8578      opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
8579   if (!strip_paradoxical_subreg (&in, &out))
8580     strip_paradoxical_subreg (&out, &in);
8581 
8582   /* How to do this reload can get quite tricky.  Normally, we are being
8583      asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8584      register that didn't get a hard register.  In that case we can just
8585      call emit_move_insn.
8586 
8587      We can also be asked to reload a PLUS that adds a register or a MEM to
8588      another register, constant or MEM.  This can occur during frame pointer
8589      elimination and while reloading addresses.  This case is handled by
8590      trying to emit a single insn to perform the add.  If it is not valid,
8591      we use a two insn sequence.
8592 
8593      Or we can be asked to reload an unary operand that was a fragment of
8594      an addressing mode, into a register.  If it isn't recognized as-is,
8595      we try making the unop operand and the reload-register the same:
8596      (set reg:X (unop:X expr:Y))
8597      -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8598 
8599      Finally, we could be called to handle an 'o' constraint by putting
8600      an address into a register.  In that case, we first try to do this
8601      with a named pattern of "reload_load_address".  If no such pattern
8602      exists, we just emit a SET insn and hope for the best (it will normally
8603      be valid on machines that use 'o').
8604 
8605      This entire process is made complex because reload will never
8606      process the insns we generate here and so we must ensure that
8607      they will fit their constraints and also by the fact that parts of
8608      IN might be being reloaded separately and replaced with spill registers.
8609      Because of this, we are, in some sense, just guessing the right approach
8610      here.  The one listed above seems to work.
8611 
8612      ??? At some point, this whole thing needs to be rethought.  */
8613 
8614   if (GET_CODE (in) == PLUS
8615       && (REG_P (XEXP (in, 0))
8616 	  || GET_CODE (XEXP (in, 0)) == SUBREG
8617 	  || MEM_P (XEXP (in, 0)))
8618       && (REG_P (XEXP (in, 1))
8619 	  || GET_CODE (XEXP (in, 1)) == SUBREG
8620 	  || CONSTANT_P (XEXP (in, 1))
8621 	  || MEM_P (XEXP (in, 1))))
8622     {
8623       /* We need to compute the sum of a register or a MEM and another
8624 	 register, constant, or MEM, and put it into the reload
8625 	 register.  The best possible way of doing this is if the machine
8626 	 has a three-operand ADD insn that accepts the required operands.
8627 
8628 	 The simplest approach is to try to generate such an insn and see if it
8629 	 is recognized and matches its constraints.  If so, it can be used.
8630 
8631 	 It might be better not to actually emit the insn unless it is valid,
8632 	 but we need to pass the insn as an operand to `recog' and
8633 	 `extract_insn' and it is simpler to emit and then delete the insn if
8634 	 not valid than to dummy things up.  */
8635 
8636       rtx op0, op1, tem;
8637       rtx_insn *insn;
8638       enum insn_code code;
8639 
8640       op0 = find_replacement (&XEXP (in, 0));
8641       op1 = find_replacement (&XEXP (in, 1));
8642 
8643       /* Since constraint checking is strict, commutativity won't be
8644 	 checked, so we need to do that here to avoid spurious failure
8645 	 if the add instruction is two-address and the second operand
8646 	 of the add is the same as the reload reg, which is frequently
8647 	 the case.  If the insn would be A = B + A, rearrange it so
8648 	 it will be A = A + B as constrain_operands expects.  */
8649 
8650       if (REG_P (XEXP (in, 1))
8651 	  && REGNO (out) == REGNO (XEXP (in, 1)))
8652 	tem = op0, op0 = op1, op1 = tem;
8653 
8654       if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8655 	in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8656 
8657       insn = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8658       if (insn)
8659 	return insn;
8660 
8661       /* If that failed, we must use a conservative two-insn sequence.
8662 
8663 	 Use a move to copy one operand into the reload register.  Prefer
8664 	 to reload a constant, MEM or pseudo since the move patterns can
8665 	 handle an arbitrary operand.  If OP1 is not a constant, MEM or
8666 	 pseudo and OP1 is not a valid operand for an add instruction, then
8667 	 reload OP1.
8668 
8669 	 After reloading one of the operands into the reload register, add
8670 	 the reload register to the output register.
8671 
8672 	 If there is another way to do this for a specific machine, a
8673 	 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8674 	 we emit below.  */
8675 
8676       code = optab_handler (add_optab, GET_MODE (out));
8677 
8678       if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8679 	  || (REG_P (op1)
8680 	      && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8681 	  || (code != CODE_FOR_nothing
8682 	      && !insn_operand_matches (code, 2, op1)))
8683 	tem = op0, op0 = op1, op1 = tem;
8684 
8685       gen_reload (out, op0, opnum, type);
8686 
8687       /* If OP0 and OP1 are the same, we can use OUT for OP1.
8688 	 This fixes a problem on the 32K where the stack pointer cannot
8689 	 be used as an operand of an add insn.  */
8690 
8691       if (rtx_equal_p (op0, op1))
8692 	op1 = out;
8693 
8694       insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8695       if (insn)
8696 	{
8697 	  /* Add a REG_EQUIV note so that find_equiv_reg can find it.  */
8698 	  set_dst_reg_note (insn, REG_EQUIV, in, out);
8699 	  return insn;
8700 	}
8701 
8702       /* If that failed, copy the address register to the reload register.
8703 	 Then add the constant to the reload register.  */
8704 
8705       gcc_assert (!reg_overlap_mentioned_p (out, op0));
8706       gen_reload (out, op1, opnum, type);
8707       insn = emit_insn (gen_add2_insn (out, op0));
8708       set_dst_reg_note (insn, REG_EQUIV, in, out);
8709     }
8710 
8711 #ifdef SECONDARY_MEMORY_NEEDED
8712   /* If we need a memory location to do the move, do it that way.  */
8713   else if ((tem1 = replaced_subreg (in), tem2 = replaced_subreg (out),
8714 	    (REG_P (tem1) && REG_P (tem2)))
8715 	   && REGNO (tem1) < FIRST_PSEUDO_REGISTER
8716 	   && REGNO (tem2) < FIRST_PSEUDO_REGISTER
8717 	   && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (tem1)),
8718 				       REGNO_REG_CLASS (REGNO (tem2)),
8719 				       GET_MODE (out)))
8720     {
8721       /* Get the memory to use and rewrite both registers to its mode.  */
8722       rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8723 
8724       if (GET_MODE (loc) != GET_MODE (out))
8725 	out = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (out));
8726 
8727       if (GET_MODE (loc) != GET_MODE (in))
8728 	in = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (in));
8729 
8730       gen_reload (loc, in, opnum, type);
8731       gen_reload (out, loc, opnum, type);
8732     }
8733 #endif
8734   else if (REG_P (out) && UNARY_P (in))
8735     {
8736       rtx insn;
8737       rtx op1;
8738       rtx out_moded;
8739       rtx_insn *set;
8740 
8741       op1 = find_replacement (&XEXP (in, 0));
8742       if (op1 != XEXP (in, 0))
8743 	in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8744 
8745       /* First, try a plain SET.  */
8746       set = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8747       if (set)
8748 	return set;
8749 
8750       /* If that failed, move the inner operand to the reload
8751 	 register, and try the same unop with the inner expression
8752 	 replaced with the reload register.  */
8753 
8754       if (GET_MODE (op1) != GET_MODE (out))
8755 	out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8756       else
8757 	out_moded = out;
8758 
8759       gen_reload (out_moded, op1, opnum, type);
8760 
8761       insn = gen_rtx_SET (out, gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8762 					      out_moded));
8763       insn = emit_insn_if_valid_for_reload (insn);
8764       if (insn)
8765 	{
8766 	  set_unique_reg_note (insn, REG_EQUIV, in);
8767 	  return as_a <rtx_insn *> (insn);
8768 	}
8769 
8770       fatal_insn ("failure trying to reload:", set);
8771     }
8772   /* If IN is a simple operand, use gen_move_insn.  */
8773   else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8774     {
8775       tem = emit_insn (gen_move_insn (out, in));
8776       /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note.  */
8777       mark_jump_label (in, tem, 0);
8778     }
8779 
8780   else if (targetm.have_reload_load_address ())
8781     emit_insn (targetm.gen_reload_load_address (out, in));
8782 
8783   /* Otherwise, just write (set OUT IN) and hope for the best.  */
8784   else
8785     emit_insn (gen_rtx_SET (out, in));
8786 
8787   /* Return the first insn emitted.
8788      We can not just return get_last_insn, because there may have
8789      been multiple instructions emitted.  Also note that gen_move_insn may
8790      emit more than one insn itself, so we can not assume that there is one
8791      insn emitted per emit_insn_before call.  */
8792 
8793   return last ? NEXT_INSN (last) : get_insns ();
8794 }
8795 
8796 /* Delete a previously made output-reload whose result we now believe
8797    is not needed.  First we double-check.
8798 
8799    INSN is the insn now being processed.
8800    LAST_RELOAD_REG is the hard register number for which we want to delete
8801    the last output reload.
8802    J is the reload-number that originally used REG.  The caller has made
8803    certain that reload J doesn't use REG any longer for input.
8804    NEW_RELOAD_REG is reload register that reload J is using for REG.  */
8805 
8806 static void
delete_output_reload(rtx_insn * insn,int j,int last_reload_reg,rtx new_reload_reg)8807 delete_output_reload (rtx_insn *insn, int j, int last_reload_reg,
8808 		      rtx new_reload_reg)
8809 {
8810   rtx_insn *output_reload_insn = spill_reg_store[last_reload_reg];
8811   rtx reg = spill_reg_stored_to[last_reload_reg];
8812   int k;
8813   int n_occurrences;
8814   int n_inherited = 0;
8815   rtx substed;
8816   unsigned regno;
8817   int nregs;
8818 
8819   /* It is possible that this reload has been only used to set another reload
8820      we eliminated earlier and thus deleted this instruction too.  */
8821   if (output_reload_insn->deleted ())
8822     return;
8823 
8824   /* Get the raw pseudo-register referred to.  */
8825 
8826   while (GET_CODE (reg) == SUBREG)
8827     reg = SUBREG_REG (reg);
8828   substed = reg_equiv_memory_loc (REGNO (reg));
8829 
8830   /* This is unsafe if the operand occurs more often in the current
8831      insn than it is inherited.  */
8832   for (k = n_reloads - 1; k >= 0; k--)
8833     {
8834       rtx reg2 = rld[k].in;
8835       if (! reg2)
8836 	continue;
8837       if (MEM_P (reg2) || reload_override_in[k])
8838 	reg2 = rld[k].in_reg;
8839 
8840       if (AUTO_INC_DEC && rld[k].out && ! rld[k].out_reg)
8841 	reg2 = XEXP (rld[k].in_reg, 0);
8842 
8843       while (GET_CODE (reg2) == SUBREG)
8844 	reg2 = SUBREG_REG (reg2);
8845       if (rtx_equal_p (reg2, reg))
8846 	{
8847 	  if (reload_inherited[k] || reload_override_in[k] || k == j)
8848 	    n_inherited++;
8849 	  else
8850 	    return;
8851 	}
8852     }
8853   n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8854   if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8855     n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8856 					reg, 0);
8857   if (substed)
8858     n_occurrences += count_occurrences (PATTERN (insn),
8859 					eliminate_regs (substed, VOIDmode,
8860 							NULL_RTX), 0);
8861   for (rtx i1 = reg_equiv_alt_mem_list (REGNO (reg)); i1; i1 = XEXP (i1, 1))
8862     {
8863       gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8864       n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8865     }
8866   if (n_occurrences > n_inherited)
8867     return;
8868 
8869   regno = REGNO (reg);
8870   if (regno >= FIRST_PSEUDO_REGISTER)
8871     nregs = 1;
8872   else
8873     nregs = hard_regno_nregs[regno][GET_MODE (reg)];
8874 
8875   /* If the pseudo-reg we are reloading is no longer referenced
8876      anywhere between the store into it and here,
8877      and we're within the same basic block, then the value can only
8878      pass through the reload reg and end up here.
8879      Otherwise, give up--return.  */
8880   for (rtx_insn *i1 = NEXT_INSN (output_reload_insn);
8881        i1 != insn; i1 = NEXT_INSN (i1))
8882     {
8883       if (NOTE_INSN_BASIC_BLOCK_P (i1))
8884 	return;
8885       if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8886 	  && refers_to_regno_p (regno, regno + nregs, PATTERN (i1), NULL))
8887 	{
8888 	  /* If this is USE in front of INSN, we only have to check that
8889 	     there are no more references than accounted for by inheritance.  */
8890 	  while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8891 	    {
8892 	      n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8893 	      i1 = NEXT_INSN (i1);
8894 	    }
8895 	  if (n_occurrences <= n_inherited && i1 == insn)
8896 	    break;
8897 	  return;
8898 	}
8899     }
8900 
8901   /* We will be deleting the insn.  Remove the spill reg information.  */
8902   for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8903     {
8904       spill_reg_store[last_reload_reg + k] = 0;
8905       spill_reg_stored_to[last_reload_reg + k] = 0;
8906     }
8907 
8908   /* The caller has already checked that REG dies or is set in INSN.
8909      It has also checked that we are optimizing, and thus some
8910      inaccuracies in the debugging information are acceptable.
8911      So we could just delete output_reload_insn.  But in some cases
8912      we can improve the debugging information without sacrificing
8913      optimization - maybe even improving the code: See if the pseudo
8914      reg has been completely replaced with reload regs.  If so, delete
8915      the store insn and forget we had a stack slot for the pseudo.  */
8916   if (rld[j].out != rld[j].in
8917       && REG_N_DEATHS (REGNO (reg)) == 1
8918       && REG_N_SETS (REGNO (reg)) == 1
8919       && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8920       && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8921     {
8922       rtx_insn *i2;
8923 
8924       /* We know that it was used only between here and the beginning of
8925 	 the current basic block.  (We also know that the last use before
8926 	 INSN was the output reload we are thinking of deleting, but never
8927 	 mind that.)  Search that range; see if any ref remains.  */
8928       for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8929 	{
8930 	  rtx set = single_set (i2);
8931 
8932 	  /* Uses which just store in the pseudo don't count,
8933 	     since if they are the only uses, they are dead.  */
8934 	  if (set != 0 && SET_DEST (set) == reg)
8935 	    continue;
8936 	  if (LABEL_P (i2) || JUMP_P (i2))
8937 	    break;
8938 	  if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8939 	      && reg_mentioned_p (reg, PATTERN (i2)))
8940 	    {
8941 	      /* Some other ref remains; just delete the output reload we
8942 		 know to be dead.  */
8943 	      delete_address_reloads (output_reload_insn, insn);
8944 	      delete_insn (output_reload_insn);
8945 	      return;
8946 	    }
8947 	}
8948 
8949       /* Delete the now-dead stores into this pseudo.  Note that this
8950 	 loop also takes care of deleting output_reload_insn.  */
8951       for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8952 	{
8953 	  rtx set = single_set (i2);
8954 
8955 	  if (set != 0 && SET_DEST (set) == reg)
8956 	    {
8957 	      delete_address_reloads (i2, insn);
8958 	      delete_insn (i2);
8959 	    }
8960 	  if (LABEL_P (i2) || JUMP_P (i2))
8961 	    break;
8962 	}
8963 
8964       /* For the debugging info, say the pseudo lives in this reload reg.  */
8965       reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8966       if (ira_conflicts_p)
8967 	/* Inform IRA about the change.  */
8968 	ira_mark_allocation_change (REGNO (reg));
8969       alter_reg (REGNO (reg), -1, false);
8970     }
8971   else
8972     {
8973       delete_address_reloads (output_reload_insn, insn);
8974       delete_insn (output_reload_insn);
8975     }
8976 }
8977 
8978 /* We are going to delete DEAD_INSN.  Recursively delete loads of
8979    reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8980    CURRENT_INSN is being reloaded, so we have to check its reloads too.  */
8981 static void
delete_address_reloads(rtx_insn * dead_insn,rtx_insn * current_insn)8982 delete_address_reloads (rtx_insn *dead_insn, rtx_insn *current_insn)
8983 {
8984   rtx set = single_set (dead_insn);
8985   rtx set2, dst;
8986   rtx_insn *prev, *next;
8987   if (set)
8988     {
8989       rtx dst = SET_DEST (set);
8990       if (MEM_P (dst))
8991 	delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8992     }
8993   /* If we deleted the store from a reloaded post_{in,de}c expression,
8994      we can delete the matching adds.  */
8995   prev = PREV_INSN (dead_insn);
8996   next = NEXT_INSN (dead_insn);
8997   if (! prev || ! next)
8998     return;
8999   set = single_set (next);
9000   set2 = single_set (prev);
9001   if (! set || ! set2
9002       || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
9003       || !CONST_INT_P (XEXP (SET_SRC (set), 1))
9004       || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
9005     return;
9006   dst = SET_DEST (set);
9007   if (! rtx_equal_p (dst, SET_DEST (set2))
9008       || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
9009       || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
9010       || (INTVAL (XEXP (SET_SRC (set), 1))
9011 	  != -INTVAL (XEXP (SET_SRC (set2), 1))))
9012     return;
9013   delete_related_insns (prev);
9014   delete_related_insns (next);
9015 }
9016 
9017 /* Subfunction of delete_address_reloads: process registers found in X.  */
9018 static void
delete_address_reloads_1(rtx_insn * dead_insn,rtx x,rtx_insn * current_insn)9019 delete_address_reloads_1 (rtx_insn *dead_insn, rtx x, rtx_insn *current_insn)
9020 {
9021   rtx_insn *prev, *i2;
9022   rtx set, dst;
9023   int i, j;
9024   enum rtx_code code = GET_CODE (x);
9025 
9026   if (code != REG)
9027     {
9028       const char *fmt = GET_RTX_FORMAT (code);
9029       for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9030 	{
9031 	  if (fmt[i] == 'e')
9032 	    delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
9033 	  else if (fmt[i] == 'E')
9034 	    {
9035 	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9036 		delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
9037 					  current_insn);
9038 	    }
9039 	}
9040       return;
9041     }
9042 
9043   if (spill_reg_order[REGNO (x)] < 0)
9044     return;
9045 
9046   /* Scan backwards for the insn that sets x.  This might be a way back due
9047      to inheritance.  */
9048   for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
9049     {
9050       code = GET_CODE (prev);
9051       if (code == CODE_LABEL || code == JUMP_INSN)
9052 	return;
9053       if (!INSN_P (prev))
9054 	continue;
9055       if (reg_set_p (x, PATTERN (prev)))
9056 	break;
9057       if (reg_referenced_p (x, PATTERN (prev)))
9058 	return;
9059     }
9060   if (! prev || INSN_UID (prev) < reload_first_uid)
9061     return;
9062   /* Check that PREV only sets the reload register.  */
9063   set = single_set (prev);
9064   if (! set)
9065     return;
9066   dst = SET_DEST (set);
9067   if (!REG_P (dst)
9068       || ! rtx_equal_p (dst, x))
9069     return;
9070   if (! reg_set_p (dst, PATTERN (dead_insn)))
9071     {
9072       /* Check if DST was used in a later insn -
9073 	 it might have been inherited.  */
9074       for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
9075 	{
9076 	  if (LABEL_P (i2))
9077 	    break;
9078 	  if (! INSN_P (i2))
9079 	    continue;
9080 	  if (reg_referenced_p (dst, PATTERN (i2)))
9081 	    {
9082 	      /* If there is a reference to the register in the current insn,
9083 		 it might be loaded in a non-inherited reload.  If no other
9084 		 reload uses it, that means the register is set before
9085 		 referenced.  */
9086 	      if (i2 == current_insn)
9087 		{
9088 		  for (j = n_reloads - 1; j >= 0; j--)
9089 		    if ((rld[j].reg_rtx == dst && reload_inherited[j])
9090 			|| reload_override_in[j] == dst)
9091 		      return;
9092 		  for (j = n_reloads - 1; j >= 0; j--)
9093 		    if (rld[j].in && rld[j].reg_rtx == dst)
9094 		      break;
9095 		  if (j >= 0)
9096 		    break;
9097 		}
9098 	      return;
9099 	    }
9100 	  if (JUMP_P (i2))
9101 	    break;
9102 	  /* If DST is still live at CURRENT_INSN, check if it is used for
9103 	     any reload.  Note that even if CURRENT_INSN sets DST, we still
9104 	     have to check the reloads.  */
9105 	  if (i2 == current_insn)
9106 	    {
9107 	      for (j = n_reloads - 1; j >= 0; j--)
9108 		if ((rld[j].reg_rtx == dst && reload_inherited[j])
9109 		    || reload_override_in[j] == dst)
9110 		  return;
9111 	      /* ??? We can't finish the loop here, because dst might be
9112 		 allocated to a pseudo in this block if no reload in this
9113 		 block needs any of the classes containing DST - see
9114 		 spill_hard_reg.  There is no easy way to tell this, so we
9115 		 have to scan till the end of the basic block.  */
9116 	    }
9117 	  if (reg_set_p (dst, PATTERN (i2)))
9118 	    break;
9119 	}
9120     }
9121   delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
9122   reg_reloaded_contents[REGNO (dst)] = -1;
9123   delete_insn (prev);
9124 }
9125 
9126 /* Output reload-insns to reload VALUE into RELOADREG.
9127    VALUE is an autoincrement or autodecrement RTX whose operand
9128    is a register or memory location;
9129    so reloading involves incrementing that location.
9130    IN is either identical to VALUE, or some cheaper place to reload from.
9131 
9132    INC_AMOUNT is the number to increment or decrement by (always positive).
9133    This cannot be deduced from VALUE.  */
9134 
9135 static void
inc_for_reload(rtx reloadreg,rtx in,rtx value,int inc_amount)9136 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
9137 {
9138   /* REG or MEM to be copied and incremented.  */
9139   rtx incloc = find_replacement (&XEXP (value, 0));
9140   /* Nonzero if increment after copying.  */
9141   int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
9142 	      || GET_CODE (value) == POST_MODIFY);
9143   rtx_insn *last;
9144   rtx inc;
9145   rtx_insn *add_insn;
9146   int code;
9147   rtx real_in = in == value ? incloc : in;
9148 
9149   /* No hard register is equivalent to this register after
9150      inc/dec operation.  If REG_LAST_RELOAD_REG were nonzero,
9151      we could inc/dec that register as well (maybe even using it for
9152      the source), but I'm not sure it's worth worrying about.  */
9153   if (REG_P (incloc))
9154     reg_last_reload_reg[REGNO (incloc)] = 0;
9155 
9156   if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
9157     {
9158       gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
9159       inc = find_replacement (&XEXP (XEXP (value, 1), 1));
9160     }
9161   else
9162     {
9163       if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
9164 	inc_amount = -inc_amount;
9165 
9166       inc = GEN_INT (inc_amount);
9167     }
9168 
9169   /* If this is post-increment, first copy the location to the reload reg.  */
9170   if (post && real_in != reloadreg)
9171     emit_insn (gen_move_insn (reloadreg, real_in));
9172 
9173   if (in == value)
9174     {
9175       /* See if we can directly increment INCLOC.  Use a method similar to
9176 	 that in gen_reload.  */
9177 
9178       last = get_last_insn ();
9179       add_insn = emit_insn (gen_rtx_SET (incloc,
9180 					 gen_rtx_PLUS (GET_MODE (incloc),
9181 						       incloc, inc)));
9182 
9183       code = recog_memoized (add_insn);
9184       if (code >= 0)
9185 	{
9186 	  extract_insn (add_insn);
9187 	  if (constrain_operands (1, get_enabled_alternatives (add_insn)))
9188 	    {
9189 	      /* If this is a pre-increment and we have incremented the value
9190 		 where it lives, copy the incremented value to RELOADREG to
9191 		 be used as an address.  */
9192 
9193 	      if (! post)
9194 		emit_insn (gen_move_insn (reloadreg, incloc));
9195 	      return;
9196 	    }
9197 	}
9198       delete_insns_since (last);
9199     }
9200 
9201   /* If couldn't do the increment directly, must increment in RELOADREG.
9202      The way we do this depends on whether this is pre- or post-increment.
9203      For pre-increment, copy INCLOC to the reload register, increment it
9204      there, then save back.  */
9205 
9206   if (! post)
9207     {
9208       if (in != reloadreg)
9209 	emit_insn (gen_move_insn (reloadreg, real_in));
9210       emit_insn (gen_add2_insn (reloadreg, inc));
9211       emit_insn (gen_move_insn (incloc, reloadreg));
9212     }
9213   else
9214     {
9215       /* Postincrement.
9216 	 Because this might be a jump insn or a compare, and because RELOADREG
9217 	 may not be available after the insn in an input reload, we must do
9218 	 the incrementation before the insn being reloaded for.
9219 
9220 	 We have already copied IN to RELOADREG.  Increment the copy in
9221 	 RELOADREG, save that back, then decrement RELOADREG so it has
9222 	 the original value.  */
9223 
9224       emit_insn (gen_add2_insn (reloadreg, inc));
9225       emit_insn (gen_move_insn (incloc, reloadreg));
9226       if (CONST_INT_P (inc))
9227 	emit_insn (gen_add2_insn (reloadreg,
9228 				  gen_int_mode (-INTVAL (inc),
9229 						GET_MODE (reloadreg))));
9230       else
9231 	emit_insn (gen_sub2_insn (reloadreg, inc));
9232     }
9233 }
9234 
9235 static void
add_auto_inc_notes(rtx_insn * insn,rtx x)9236 add_auto_inc_notes (rtx_insn *insn, rtx x)
9237 {
9238   enum rtx_code code = GET_CODE (x);
9239   const char *fmt;
9240   int i, j;
9241 
9242   if (code == MEM && auto_inc_p (XEXP (x, 0)))
9243     {
9244       add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
9245       return;
9246     }
9247 
9248   /* Scan all the operand sub-expressions.  */
9249   fmt = GET_RTX_FORMAT (code);
9250   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9251     {
9252       if (fmt[i] == 'e')
9253 	add_auto_inc_notes (insn, XEXP (x, i));
9254       else if (fmt[i] == 'E')
9255 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9256 	  add_auto_inc_notes (insn, XVECEXP (x, i, j));
9257     }
9258 }
9259