xref: /netbsd/external/gpl3/gcc/dist/gcc/reload.cc (revision f0fbc68b)
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2    Copyright (C) 1987-2022 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains subroutines used only from the file reload1.cc.
21    It knows how to scan one insn for operands and values
22    that need to be copied into registers to make valid code.
23    It also finds other operands and values which are valid
24    but for which equivalent values in registers exist and
25    ought to be used instead.
26 
27    Before processing the first insn of the function, call `init_reload'.
28    init_reload actually has to be called earlier anyway.
29 
30    To scan an insn, call `find_reloads'.  This does two things:
31    1. sets up tables describing which values must be reloaded
32    for this insn, and what kind of hard regs they must be reloaded into;
33    2. optionally record the locations where those values appear in
34    the data, so they can be replaced properly later.
35    This is done only if the second arg to `find_reloads' is nonzero.
36 
37    The third arg to `find_reloads' specifies the number of levels
38    of indirect addressing supported by the machine.  If it is zero,
39    indirect addressing is not valid.  If it is one, (MEM (REG n))
40    is valid even if (REG n) did not get a hard register; if it is two,
41    (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42    hard register, and similarly for higher values.
43 
44    Then you must choose the hard regs to reload those pseudo regs into,
45    and generate appropriate load insns before this insn and perhaps
46    also store insns after this insn.  Set up the array `reload_reg_rtx'
47    to contain the REG rtx's for the registers you used.  In some
48    cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49    for certain reloads.  Then that tells you which register to use,
50    so you do not need to allocate one.  But you still do need to add extra
51    instructions to copy the value into and out of that register.
52 
53    Finally you must call `subst_reloads' to substitute the reload reg rtx's
54    into the locations already recorded.
55 
56 NOTE SIDE EFFECTS:
57 
58    find_reloads can alter the operands of the instruction it is called on.
59 
60    1. Two operands of any sort may be interchanged, if they are in a
61    commutative instruction.
62    This happens only if find_reloads thinks the instruction will compile
63    better that way.
64 
65    2. Pseudo-registers that are equivalent to constants are replaced
66    with those constants if they are not in hard registers.
67 
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
71 
72 Using a reload register for several reloads in one insn:
73 
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
77 
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
81 
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload.  */
85 
86 #define REG_OK_STRICT
87 
88 /* We do not enable this with CHECKING_P, since it is awfully slow.  */
89 #undef DEBUG_RELOAD
90 
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "backend.h"
95 #include "target.h"
96 #include "rtl.h"
97 #include "tree.h"
98 #include "df.h"
99 #include "memmodel.h"
100 #include "tm_p.h"
101 #include "optabs.h"
102 #include "regs.h"
103 #include "ira.h"
104 #include "recog.h"
105 #include "rtl-error.h"
106 #include "reload.h"
107 #include "addresses.h"
108 #include "function-abi.h"
109 
110 /* True if X is a constant that can be forced into the constant pool.
111    MODE is the mode of the operand, or VOIDmode if not known.  */
112 #define CONST_POOL_OK_P(MODE, X)		\
113   ((MODE) != VOIDmode				\
114    && CONSTANT_P (X)				\
115    && GET_CODE (X) != HIGH			\
116    && !targetm.cannot_force_const_mem (MODE, X))
117 
118 /* True if C is a non-empty register class that has too few registers
119    to be safely used as a reload target class.  */
120 
121 static inline bool
small_register_class_p(reg_class_t rclass)122 small_register_class_p (reg_class_t rclass)
123 {
124   return (reg_class_size [(int) rclass] == 1
125 	  || (reg_class_size [(int) rclass] >= 1
126 	      && targetm.class_likely_spilled_p (rclass)));
127 }
128 
129 
130 /* All reloads of the current insn are recorded here.  See reload.h for
131    comments.  */
132 int n_reloads;
133 struct reload rld[MAX_RELOADS];
134 
135 /* All the "earlyclobber" operands of the current insn
136    are recorded here.  */
137 int n_earlyclobbers;
138 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
139 
140 int reload_n_operands;
141 
142 /* Replacing reloads.
143 
144    If `replace_reloads' is nonzero, then as each reload is recorded
145    an entry is made for it in the table `replacements'.
146    Then later `subst_reloads' can look through that table and
147    perform all the replacements needed.  */
148 
149 /* Nonzero means record the places to replace.  */
150 static int replace_reloads;
151 
152 /* Each replacement is recorded with a structure like this.  */
153 struct replacement
154 {
155   rtx *where;			/* Location to store in */
156   int what;			/* which reload this is for */
157   machine_mode mode;	/* mode it must have */
158 };
159 
160 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
161 
162 /* Number of replacements currently recorded.  */
163 static int n_replacements;
164 
165 /* Used to track what is modified by an operand.  */
166 struct decomposition
167 {
168   int reg_flag;		/* Nonzero if referencing a register.  */
169   int safe;		/* Nonzero if this can't conflict with anything.  */
170   rtx base;		/* Base address for MEM.  */
171   poly_int64_pod start;	/* Starting offset or register number.  */
172   poly_int64_pod end;	/* Ending offset or register number.  */
173 };
174 
175 /* Save MEMs needed to copy from one class of registers to another.  One MEM
176    is used per mode, but normally only one or two modes are ever used.
177 
178    We keep two versions, before and after register elimination.  The one
179    after register elimination is record separately for each operand.  This
180    is done in case the address is not valid to be sure that we separately
181    reload each.  */
182 
183 static rtx secondary_memlocs[NUM_MACHINE_MODES];
184 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
185 static int secondary_memlocs_elim_used = 0;
186 
187 /* The instruction we are doing reloads for;
188    so we can test whether a register dies in it.  */
189 static rtx_insn *this_insn;
190 
191 /* Nonzero if this instruction is a user-specified asm with operands.  */
192 static int this_insn_is_asm;
193 
194 /* If hard_regs_live_known is nonzero,
195    we can tell which hard regs are currently live,
196    at least enough to succeed in choosing dummy reloads.  */
197 static int hard_regs_live_known;
198 
199 /* Indexed by hard reg number,
200    element is nonnegative if hard reg has been spilled.
201    This vector is passed to `find_reloads' as an argument
202    and is not changed here.  */
203 static short *static_reload_reg_p;
204 
205 /* Set to 1 in subst_reg_equivs if it changes anything.  */
206 static int subst_reg_equivs_changed;
207 
208 /* On return from push_reload, holds the reload-number for the OUT
209    operand, which can be different for that from the input operand.  */
210 static int output_reloadnum;
211 
212   /* Compare two RTX's.  */
213 #define MATCHES(x, y) \
214  (x == y || (x != 0 && (REG_P (x)				\
215 			? REG_P (y) && REGNO (x) == REGNO (y)	\
216 			: rtx_equal_p (x, y) && ! side_effects_p (x))))
217 
218   /* Indicates if two reloads purposes are for similar enough things that we
219      can merge their reloads.  */
220 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
221   ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER	\
222    || ((when1) == (when2) && (op1) == (op2))		\
223    || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
224    || ((when1) == RELOAD_FOR_OPERAND_ADDRESS		\
225        && (when2) == RELOAD_FOR_OPERAND_ADDRESS)	\
226    || ((when1) == RELOAD_FOR_OTHER_ADDRESS		\
227        && (when2) == RELOAD_FOR_OTHER_ADDRESS))
228 
229   /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged.  */
230 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
231   ((when1) != (when2)					\
232    || ! ((op1) == (op2)					\
233 	 || (when1) == RELOAD_FOR_INPUT			\
234 	 || (when1) == RELOAD_FOR_OPERAND_ADDRESS	\
235 	 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
236 
237   /* If we are going to reload an address, compute the reload type to
238      use.  */
239 #define ADDR_TYPE(type)					\
240   ((type) == RELOAD_FOR_INPUT_ADDRESS			\
241    ? RELOAD_FOR_INPADDR_ADDRESS				\
242    : ((type) == RELOAD_FOR_OUTPUT_ADDRESS		\
243       ? RELOAD_FOR_OUTADDR_ADDRESS			\
244       : (type)))
245 
246 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
247 				  machine_mode, enum reload_type,
248 				  enum insn_code *, secondary_reload_info *);
249 static enum reg_class find_valid_class (machine_mode, machine_mode,
250 					int, unsigned int);
251 static void push_replacement (rtx *, int, machine_mode);
252 static void dup_replacements (rtx *, rtx *);
253 static void combine_reloads (void);
254 static int find_reusable_reload (rtx *, rtx, enum reg_class,
255 				 enum reload_type, int, int);
256 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
257 			      machine_mode, reg_class_t, int, int);
258 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
259 static struct decomposition decompose (rtx);
260 static int immune_p (rtx, rtx, struct decomposition);
261 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
262 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
263 				rtx_insn *, int *);
264 static rtx make_memloc (rtx, int);
265 static bool maybe_memory_address_addr_space_p (machine_mode, rtx,
266 					       addr_space_t, rtx *);
267 static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
268 				 int, enum reload_type, int, rtx_insn *);
269 static rtx subst_reg_equivs (rtx, rtx_insn *);
270 static rtx subst_indexed_address (rtx);
271 static void update_auto_inc_notes (rtx_insn *, int, int);
272 static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
273 				   enum rtx_code, enum rtx_code, rtx *,
274 				   int, enum reload_type,int, rtx_insn *);
275 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
276 				       machine_mode, int,
277 				       enum reload_type, int);
278 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
279 					int, rtx_insn *, int *);
280 static void copy_replacements_1 (rtx *, rtx *, int);
281 static poly_int64 find_inc_amount (rtx, rtx);
282 static int refers_to_mem_for_reload_p (rtx);
283 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
284 					 rtx, rtx *);
285 
286 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
287    list yet.  */
288 
289 static void
push_reg_equiv_alt_mem(int regno,rtx mem)290 push_reg_equiv_alt_mem (int regno, rtx mem)
291 {
292   rtx it;
293 
294   for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
295     if (rtx_equal_p (XEXP (it, 0), mem))
296       return;
297 
298   reg_equiv_alt_mem_list (regno)
299     = alloc_EXPR_LIST (REG_EQUIV, mem,
300 		       reg_equiv_alt_mem_list (regno));
301 }
302 
303 /* Determine if any secondary reloads are needed for loading (if IN_P is
304    nonzero) or storing (if IN_P is zero) X to or from a reload register of
305    register class RELOAD_CLASS in mode RELOAD_MODE.  If secondary reloads
306    are needed, push them.
307 
308    Return the reload number of the secondary reload we made, or -1 if
309    we didn't need one.  *PICODE is set to the insn_code to use if we do
310    need a secondary reload.  */
311 
312 static int
push_secondary_reload(int in_p,rtx x,int opnum,int optional,enum reg_class reload_class,machine_mode reload_mode,enum reload_type type,enum insn_code * picode,secondary_reload_info * prev_sri)313 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
314 		       enum reg_class reload_class,
315 		       machine_mode reload_mode, enum reload_type type,
316 		       enum insn_code *picode, secondary_reload_info *prev_sri)
317 {
318   enum reg_class rclass = NO_REGS;
319   enum reg_class scratch_class;
320   machine_mode mode = reload_mode;
321   enum insn_code icode = CODE_FOR_nothing;
322   enum insn_code t_icode = CODE_FOR_nothing;
323   enum reload_type secondary_type;
324   int s_reload, t_reload = -1;
325   const char *scratch_constraint;
326   secondary_reload_info sri;
327 
328   if (type == RELOAD_FOR_INPUT_ADDRESS
329       || type == RELOAD_FOR_OUTPUT_ADDRESS
330       || type == RELOAD_FOR_INPADDR_ADDRESS
331       || type == RELOAD_FOR_OUTADDR_ADDRESS)
332     secondary_type = type;
333   else
334     secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
335 
336   *picode = CODE_FOR_nothing;
337 
338   /* If X is a paradoxical SUBREG, use the inner value to determine both the
339      mode and object being reloaded.  */
340   if (paradoxical_subreg_p (x))
341     {
342       x = SUBREG_REG (x);
343       reload_mode = GET_MODE (x);
344     }
345 
346   /* If X is a pseudo-register that has an equivalent MEM (actually, if it
347      is still a pseudo-register by now, it *must* have an equivalent MEM
348      but we don't want to assume that), use that equivalent when seeing if
349      a secondary reload is needed since whether or not a reload is needed
350      might be sensitive to the form of the MEM.  */
351 
352   if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
353       && reg_equiv_mem (REGNO (x)))
354     x = reg_equiv_mem (REGNO (x));
355 
356   sri.icode = CODE_FOR_nothing;
357   sri.prev_sri = prev_sri;
358   rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
359 						      reload_mode, &sri);
360   icode = (enum insn_code) sri.icode;
361 
362   /* If we don't need any secondary registers, done.  */
363   if (rclass == NO_REGS && icode == CODE_FOR_nothing)
364     return -1;
365 
366   if (rclass != NO_REGS)
367     t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
368 				      reload_mode, type, &t_icode, &sri);
369 
370   /* If we will be using an insn, the secondary reload is for a
371      scratch register.  */
372 
373   if (icode != CODE_FOR_nothing)
374     {
375       /* If IN_P is nonzero, the reload register will be the output in
376 	 operand 0.  If IN_P is zero, the reload register will be the input
377 	 in operand 1.  Outputs should have an initial "=", which we must
378 	 skip.  */
379 
380       /* ??? It would be useful to be able to handle only two, or more than
381 	 three, operands, but for now we can only handle the case of having
382 	 exactly three: output, input and one temp/scratch.  */
383       gcc_assert (insn_data[(int) icode].n_operands == 3);
384 
385       /* ??? We currently have no way to represent a reload that needs
386 	 an icode to reload from an intermediate tertiary reload register.
387 	 We should probably have a new field in struct reload to tag a
388 	 chain of scratch operand reloads onto.   */
389       gcc_assert (rclass == NO_REGS);
390 
391       scratch_constraint = insn_data[(int) icode].operand[2].constraint;
392       gcc_assert (*scratch_constraint == '=');
393       scratch_constraint++;
394       if (*scratch_constraint == '&')
395 	scratch_constraint++;
396       scratch_class = (reg_class_for_constraint
397 		       (lookup_constraint (scratch_constraint)));
398 
399       rclass = scratch_class;
400       mode = insn_data[(int) icode].operand[2].mode;
401     }
402 
403   /* This case isn't valid, so fail.  Reload is allowed to use the same
404      register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
405      in the case of a secondary register, we actually need two different
406      registers for correct code.  We fail here to prevent the possibility of
407      silently generating incorrect code later.
408 
409      The convention is that secondary input reloads are valid only if the
410      secondary_class is different from class.  If you have such a case, you
411      cannot use secondary reloads, you must work around the problem some
412      other way.
413 
414      Allow this when a reload_in/out pattern is being used.  I.e. assume
415      that the generated code handles this case.  */
416 
417   gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
418 	      || t_icode != CODE_FOR_nothing);
419 
420   /* See if we can reuse an existing secondary reload.  */
421   for (s_reload = 0; s_reload < n_reloads; s_reload++)
422     if (rld[s_reload].secondary_p
423 	&& (reg_class_subset_p (rclass, rld[s_reload].rclass)
424 	    || reg_class_subset_p (rld[s_reload].rclass, rclass))
425 	&& ((in_p && rld[s_reload].inmode == mode)
426 	    || (! in_p && rld[s_reload].outmode == mode))
427 	&& ((in_p && rld[s_reload].secondary_in_reload == t_reload)
428 	    || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
429 	&& ((in_p && rld[s_reload].secondary_in_icode == t_icode)
430 	    || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
431 	&& (small_register_class_p (rclass)
432 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
433 	&& MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
434 			     opnum, rld[s_reload].opnum))
435       {
436 	if (in_p)
437 	  rld[s_reload].inmode = mode;
438 	if (! in_p)
439 	  rld[s_reload].outmode = mode;
440 
441 	if (reg_class_subset_p (rclass, rld[s_reload].rclass))
442 	  rld[s_reload].rclass = rclass;
443 
444 	rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
445 	rld[s_reload].optional &= optional;
446 	rld[s_reload].secondary_p = 1;
447 	if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
448 			    opnum, rld[s_reload].opnum))
449 	  rld[s_reload].when_needed = RELOAD_OTHER;
450 
451 	break;
452       }
453 
454   if (s_reload == n_reloads)
455     {
456       /* If we need a memory location to copy between the two reload regs,
457 	 set it up now.  Note that we do the input case before making
458 	 the reload and the output case after.  This is due to the
459 	 way reloads are output.  */
460 
461       if (in_p && icode == CODE_FOR_nothing
462 	  && targetm.secondary_memory_needed (mode, rclass, reload_class))
463 	{
464 	  get_secondary_mem (x, reload_mode, opnum, type);
465 
466 	  /* We may have just added new reloads.  Make sure we add
467 	     the new reload at the end.  */
468 	  s_reload = n_reloads;
469 	}
470 
471       /* We need to make a new secondary reload for this register class.  */
472       rld[s_reload].in = rld[s_reload].out = 0;
473       rld[s_reload].rclass = rclass;
474 
475       rld[s_reload].inmode = in_p ? mode : VOIDmode;
476       rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
477       rld[s_reload].reg_rtx = 0;
478       rld[s_reload].optional = optional;
479       rld[s_reload].inc = 0;
480       /* Maybe we could combine these, but it seems too tricky.  */
481       rld[s_reload].nocombine = 1;
482       rld[s_reload].in_reg = 0;
483       rld[s_reload].out_reg = 0;
484       rld[s_reload].opnum = opnum;
485       rld[s_reload].when_needed = secondary_type;
486       rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
487       rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
488       rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
489       rld[s_reload].secondary_out_icode
490 	= ! in_p ? t_icode : CODE_FOR_nothing;
491       rld[s_reload].secondary_p = 1;
492 
493       n_reloads++;
494 
495       if (! in_p && icode == CODE_FOR_nothing
496 	  && targetm.secondary_memory_needed (mode, reload_class, rclass))
497 	get_secondary_mem (x, mode, opnum, type);
498     }
499 
500   *picode = icode;
501   return s_reload;
502 }
503 
504 /* If a secondary reload is needed, return its class.  If both an intermediate
505    register and a scratch register is needed, we return the class of the
506    intermediate register.  */
507 reg_class_t
secondary_reload_class(bool in_p,reg_class_t rclass,machine_mode mode,rtx x)508 secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
509 			rtx x)
510 {
511   enum insn_code icode;
512   secondary_reload_info sri;
513 
514   sri.icode = CODE_FOR_nothing;
515   sri.prev_sri = NULL;
516   rclass
517     = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
518   icode = (enum insn_code) sri.icode;
519 
520   /* If there are no secondary reloads at all, we return NO_REGS.
521      If an intermediate register is needed, we return its class.  */
522   if (icode == CODE_FOR_nothing || rclass != NO_REGS)
523     return rclass;
524 
525   /* No intermediate register is needed, but we have a special reload
526      pattern, which we assume for now needs a scratch register.  */
527   return scratch_reload_class (icode);
528 }
529 
530 /* ICODE is the insn_code of a reload pattern.  Check that it has exactly
531    three operands, verify that operand 2 is an output operand, and return
532    its register class.
533    ??? We'd like to be able to handle any pattern with at least 2 operands,
534    for zero or more scratch registers, but that needs more infrastructure.  */
535 enum reg_class
scratch_reload_class(enum insn_code icode)536 scratch_reload_class (enum insn_code icode)
537 {
538   const char *scratch_constraint;
539   enum reg_class rclass;
540 
541   gcc_assert (insn_data[(int) icode].n_operands == 3);
542   scratch_constraint = insn_data[(int) icode].operand[2].constraint;
543   gcc_assert (*scratch_constraint == '=');
544   scratch_constraint++;
545   if (*scratch_constraint == '&')
546     scratch_constraint++;
547   rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
548   gcc_assert (rclass != NO_REGS);
549   return rclass;
550 }
551 
552 /* Return a memory location that will be used to copy X in mode MODE.
553    If we haven't already made a location for this mode in this insn,
554    call find_reloads_address on the location being returned.  */
555 
556 rtx
get_secondary_mem(rtx x ATTRIBUTE_UNUSED,machine_mode mode,int opnum,enum reload_type type)557 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
558 		   int opnum, enum reload_type type)
559 {
560   rtx loc;
561   int mem_valid;
562 
563   /* By default, if MODE is narrower than a word, widen it to a word.
564      This is required because most machines that require these memory
565      locations do not support short load and stores from all registers
566      (e.g., FP registers).  */
567 
568   mode = targetm.secondary_memory_needed_mode (mode);
569 
570   /* If we already have made a MEM for this operand in MODE, return it.  */
571   if (secondary_memlocs_elim[(int) mode][opnum] != 0)
572     return secondary_memlocs_elim[(int) mode][opnum];
573 
574   /* If this is the first time we've tried to get a MEM for this mode,
575      allocate a new one.  `something_changed' in reload will get set
576      by noticing that the frame size has changed.  */
577 
578   if (secondary_memlocs[(int) mode] == 0)
579     {
580 #ifdef SECONDARY_MEMORY_NEEDED_RTX
581       secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
582 #else
583       secondary_memlocs[(int) mode]
584 	= assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
585 #endif
586     }
587 
588   /* Get a version of the address doing any eliminations needed.  If that
589      didn't give us a new MEM, make a new one if it isn't valid.  */
590 
591   loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
592   mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
593 						  MEM_ADDR_SPACE (loc));
594 
595   if (! mem_valid && loc == secondary_memlocs[(int) mode])
596     loc = copy_rtx (loc);
597 
598   /* The only time the call below will do anything is if the stack
599      offset is too large.  In that case IND_LEVELS doesn't matter, so we
600      can just pass a zero.  Adjust the type to be the address of the
601      corresponding object.  If the address was valid, save the eliminated
602      address.  If it wasn't valid, we need to make a reload each time, so
603      don't save it.  */
604 
605   if (! mem_valid)
606     {
607       type =  (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
608 	       : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
609 	       : RELOAD_OTHER);
610 
611       find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
612 			    opnum, type, 0, 0);
613     }
614 
615   secondary_memlocs_elim[(int) mode][opnum] = loc;
616   if (secondary_memlocs_elim_used <= (int)mode)
617     secondary_memlocs_elim_used = (int)mode + 1;
618   return loc;
619 }
620 
621 /* Clear any secondary memory locations we've made.  */
622 
623 void
clear_secondary_mem(void)624 clear_secondary_mem (void)
625 {
626   memset (secondary_memlocs, 0, sizeof secondary_memlocs);
627 }
628 
629 
630 /* Find the largest class which has at least one register valid in
631    mode INNER, and which for every such register, that register number
632    plus N is also valid in OUTER (if in range) and is cheap to move
633    into REGNO.  Such a class must exist.  */
634 
635 static enum reg_class
find_valid_class(machine_mode outer ATTRIBUTE_UNUSED,machine_mode inner ATTRIBUTE_UNUSED,int n,unsigned int dest_regno ATTRIBUTE_UNUSED)636 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
637 		  machine_mode inner ATTRIBUTE_UNUSED, int n,
638 		  unsigned int dest_regno ATTRIBUTE_UNUSED)
639 {
640   int best_cost = -1;
641   int rclass;
642   int regno;
643   enum reg_class best_class = NO_REGS;
644   enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
645   unsigned int best_size = 0;
646   int cost;
647 
648   for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
649     {
650       int bad = 0;
651       int good = 0;
652       for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
653 	if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
654 	  {
655 	    if (targetm.hard_regno_mode_ok (regno, inner))
656 	      {
657 		good = 1;
658 		if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
659 		    && !targetm.hard_regno_mode_ok (regno + n, outer))
660 		  bad = 1;
661 	      }
662 	  }
663 
664       if (bad || !good)
665 	continue;
666       cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
667 
668       if ((reg_class_size[rclass] > best_size
669 	   && (best_cost < 0 || best_cost >= cost))
670 	  || best_cost > cost)
671 	{
672 	  best_class = (enum reg_class) rclass;
673 	  best_size = reg_class_size[rclass];
674 	  best_cost = register_move_cost (outer, (enum reg_class) rclass,
675 					  dest_class);
676 	}
677     }
678 
679   gcc_assert (best_size != 0);
680 
681   return best_class;
682 }
683 
684 /* We are trying to reload a subreg of something that is not a register.
685    Find the largest class which contains only registers valid in
686    mode MODE.  OUTER is the mode of the subreg, DEST_CLASS the class in
687    which we would eventually like to obtain the object.  */
688 
689 static enum reg_class
find_valid_class_1(machine_mode outer ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,enum reg_class dest_class ATTRIBUTE_UNUSED)690 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
691 		    machine_mode mode ATTRIBUTE_UNUSED,
692 		    enum reg_class dest_class ATTRIBUTE_UNUSED)
693 {
694   int best_cost = -1;
695   int rclass;
696   int regno;
697   enum reg_class best_class = NO_REGS;
698   unsigned int best_size = 0;
699   int cost;
700 
701   for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
702     {
703       unsigned int computed_rclass_size = 0;
704 
705       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
706         {
707           if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
708 	      && targetm.hard_regno_mode_ok (regno, mode))
709             computed_rclass_size++;
710         }
711 
712       cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
713 
714       if ((computed_rclass_size > best_size
715 	   && (best_cost < 0 || best_cost >= cost))
716 	  || best_cost > cost)
717 	{
718 	  best_class = (enum reg_class) rclass;
719 	  best_size = computed_rclass_size;
720 	  best_cost = register_move_cost (outer, (enum reg_class) rclass,
721 					  dest_class);
722 	}
723     }
724 
725   gcc_assert (best_size != 0);
726 
727 #ifdef LIMIT_RELOAD_CLASS
728   best_class = LIMIT_RELOAD_CLASS (mode, best_class);
729 #endif
730   return best_class;
731 }
732 
733 /* Return the number of a previously made reload that can be combined with
734    a new one, or n_reloads if none of the existing reloads can be used.
735    OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
736    push_reload, they determine the kind of the new reload that we try to
737    combine.  P_IN points to the corresponding value of IN, which can be
738    modified by this function.
739    DONT_SHARE is nonzero if we can't share any input-only reload for IN.  */
740 
741 static int
find_reusable_reload(rtx * p_in,rtx out,enum reg_class rclass,enum reload_type type,int opnum,int dont_share)742 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
743 		      enum reload_type type, int opnum, int dont_share)
744 {
745   rtx in = *p_in;
746   int i;
747   /* We can't merge two reloads if the output of either one is
748      earlyclobbered.  */
749 
750   if (earlyclobber_operand_p (out))
751     return n_reloads;
752 
753   /* We can use an existing reload if the class is right
754      and at least one of IN and OUT is a match
755      and the other is at worst neutral.
756      (A zero compared against anything is neutral.)
757 
758      For targets with small register classes, don't use existing reloads
759      unless they are for the same thing since that can cause us to need
760      more reload registers than we otherwise would.  */
761 
762   for (i = 0; i < n_reloads; i++)
763     if ((reg_class_subset_p (rclass, rld[i].rclass)
764 	 || reg_class_subset_p (rld[i].rclass, rclass))
765 	/* If the existing reload has a register, it must fit our class.  */
766 	&& (rld[i].reg_rtx == 0
767 	    || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
768 				  true_regnum (rld[i].reg_rtx)))
769 	&& ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
770 	     && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
771 	    || (out != 0 && MATCHES (rld[i].out, out)
772 		&& (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
773 	&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
774 	&& (small_register_class_p (rclass)
775 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
776 	&& MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
777       return i;
778 
779   /* Reloading a plain reg for input can match a reload to postincrement
780      that reg, since the postincrement's value is the right value.
781      Likewise, it can match a preincrement reload, since we regard
782      the preincrementation as happening before any ref in this insn
783      to that register.  */
784   for (i = 0; i < n_reloads; i++)
785     if ((reg_class_subset_p (rclass, rld[i].rclass)
786 	 || reg_class_subset_p (rld[i].rclass, rclass))
787 	/* If the existing reload has a register, it must fit our
788 	   class.  */
789 	&& (rld[i].reg_rtx == 0
790 	    || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
791 				  true_regnum (rld[i].reg_rtx)))
792 	&& out == 0 && rld[i].out == 0 && rld[i].in != 0
793 	&& ((REG_P (in)
794 	     && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
795 	     && MATCHES (XEXP (rld[i].in, 0), in))
796 	    || (REG_P (rld[i].in)
797 		&& GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
798 		&& MATCHES (XEXP (in, 0), rld[i].in)))
799 	&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
800 	&& (small_register_class_p (rclass)
801 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
802 	&& MERGABLE_RELOADS (type, rld[i].when_needed,
803 			     opnum, rld[i].opnum))
804       {
805 	/* Make sure reload_in ultimately has the increment,
806 	   not the plain register.  */
807 	if (REG_P (in))
808 	  *p_in = rld[i].in;
809 	return i;
810       }
811   return n_reloads;
812 }
813 
814 /* Return true if:
815 
816    (a) (subreg:OUTER_MODE REG ...) represents a word or subword subreg
817        of a multiword value; and
818 
819    (b) the number of *words* in REG does not match the number of *registers*
820        in REG.  */
821 
822 static bool
complex_word_subreg_p(machine_mode outer_mode,rtx reg)823 complex_word_subreg_p (machine_mode outer_mode, rtx reg)
824 {
825   machine_mode inner_mode = GET_MODE (reg);
826   poly_uint64 reg_words = REG_NREGS (reg) * UNITS_PER_WORD;
827   return (known_le (GET_MODE_SIZE (outer_mode), UNITS_PER_WORD)
828 	  && maybe_gt (GET_MODE_SIZE (inner_mode), UNITS_PER_WORD)
829 	  && !known_equal_after_align_up (GET_MODE_SIZE (inner_mode),
830 					  reg_words, UNITS_PER_WORD));
831 }
832 
833 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
834    expression.  MODE is the mode that X will be used in.  OUTPUT is true if
835    the function is invoked for the output part of an enclosing reload.  */
836 
837 static bool
reload_inner_reg_of_subreg(rtx x,machine_mode mode,bool output)838 reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
839 {
840   rtx inner;
841 
842   /* Only SUBREGs are problematical.  */
843   if (GET_CODE (x) != SUBREG)
844     return false;
845 
846   inner = SUBREG_REG (x);
847 
848   /* If INNER is a constant or PLUS, then INNER will need reloading.  */
849   if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
850     return true;
851 
852   /* If INNER is not a hard register, then INNER will not need reloading.  */
853   if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
854     return false;
855 
856   /* If INNER is not ok for MODE, then INNER will need reloading.  */
857   if (!targetm.hard_regno_mode_ok (subreg_regno (x), mode))
858     return true;
859 
860   /* If this is for an output, and the outer part is a word or smaller,
861      INNER is larger than a word and the number of registers in INNER is
862      not the same as the number of words in INNER, then INNER will need
863      reloading (with an in-out reload).  */
864   return output && complex_word_subreg_p (mode, inner);
865 }
866 
867 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
868    requiring an extra reload register.  The caller has already found that
869    IN contains some reference to REGNO, so check that we can produce the
870    new value in a single step.  E.g. if we have
871    (set (reg r13) (plus (reg r13) (const int 1))), and there is an
872    instruction that adds one to a register, this should succeed.
873    However, if we have something like
874    (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
875    needs to be loaded into a register first, we need a separate reload
876    register.
877    Such PLUS reloads are generated by find_reload_address_part.
878    The out-of-range PLUS expressions are usually introduced in the instruction
879    patterns by register elimination and substituting pseudos without a home
880    by their function-invariant equivalences.  */
881 static int
can_reload_into(rtx in,int regno,machine_mode mode)882 can_reload_into (rtx in, int regno, machine_mode mode)
883 {
884   rtx dst;
885   rtx_insn *test_insn;
886   int r = 0;
887   struct recog_data_d save_recog_data;
888 
889   /* For matching constraints, we often get notional input reloads where
890      we want to use the original register as the reload register.  I.e.
891      technically this is a non-optional input-output reload, but IN is
892      already a valid register, and has been chosen as the reload register.
893      Speed this up, since it trivially works.  */
894   if (REG_P (in))
895     return 1;
896 
897   /* To test MEMs properly, we'd have to take into account all the reloads
898      that are already scheduled, which can become quite complicated.
899      And since we've already handled address reloads for this MEM, it
900      should always succeed anyway.  */
901   if (MEM_P (in))
902     return 1;
903 
904   /* If we can make a simple SET insn that does the job, everything should
905      be fine.  */
906   dst =  gen_rtx_REG (mode, regno);
907   test_insn = make_insn_raw (gen_rtx_SET (dst, in));
908   save_recog_data = recog_data;
909   if (recog_memoized (test_insn) >= 0)
910     {
911       extract_insn (test_insn);
912       r = constrain_operands (1, get_enabled_alternatives (test_insn));
913     }
914   recog_data = save_recog_data;
915   return r;
916 }
917 
918 /* Record one reload that needs to be performed.
919    IN is an rtx saying where the data are to be found before this instruction.
920    OUT says where they must be stored after the instruction.
921    (IN is zero for data not read, and OUT is zero for data not written.)
922    INLOC and OUTLOC point to the places in the instructions where
923    IN and OUT were found.
924    If IN and OUT are both nonzero, it means the same register must be used
925    to reload both IN and OUT.
926 
927    RCLASS is a register class required for the reloaded data.
928    INMODE is the machine mode that the instruction requires
929    for the reg that replaces IN and OUTMODE is likewise for OUT.
930 
931    If IN is zero, then OUT's location and mode should be passed as
932    INLOC and INMODE.
933 
934    STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
935 
936    OPTIONAL nonzero means this reload does not need to be performed:
937    it can be discarded if that is more convenient.
938 
939    OPNUM and TYPE say what the purpose of this reload is.
940 
941    The return value is the reload-number for this reload.
942 
943    If both IN and OUT are nonzero, in some rare cases we might
944    want to make two separate reloads.  (Actually we never do this now.)
945    Therefore, the reload-number for OUT is stored in
946    output_reloadnum when we return; the return value applies to IN.
947    Usually (presently always), when IN and OUT are nonzero,
948    the two reload-numbers are equal, but the caller should be careful to
949    distinguish them.  */
950 
951 int
push_reload(rtx in,rtx out,rtx * inloc,rtx * outloc,enum reg_class rclass,machine_mode inmode,machine_mode outmode,int strict_low,int optional,int opnum,enum reload_type type)952 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
953 	     enum reg_class rclass, machine_mode inmode,
954 	     machine_mode outmode, int strict_low, int optional,
955 	     int opnum, enum reload_type type)
956 {
957   int i;
958   int dont_share = 0;
959   int dont_remove_subreg = 0;
960 #ifdef LIMIT_RELOAD_CLASS
961   rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
962 #endif
963   int secondary_in_reload = -1, secondary_out_reload = -1;
964   enum insn_code secondary_in_icode = CODE_FOR_nothing;
965   enum insn_code secondary_out_icode = CODE_FOR_nothing;
966   enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
967   subreg_in_class = NO_REGS;
968 
969   /* INMODE and/or OUTMODE could be VOIDmode if no mode
970      has been specified for the operand.  In that case,
971      use the operand's mode as the mode to reload.  */
972   if (inmode == VOIDmode && in != 0)
973     inmode = GET_MODE (in);
974   if (outmode == VOIDmode && out != 0)
975     outmode = GET_MODE (out);
976 
977   /* If find_reloads and friends until now missed to replace a pseudo
978      with a constant of reg_equiv_constant something went wrong
979      beforehand.
980      Note that it can't simply be done here if we missed it earlier
981      since the constant might need to be pushed into the literal pool
982      and the resulting memref would probably need further
983      reloading.  */
984   if (in != 0 && REG_P (in))
985     {
986       int regno = REGNO (in);
987 
988       gcc_assert (regno < FIRST_PSEUDO_REGISTER
989 		  || reg_renumber[regno] >= 0
990 		  || reg_equiv_constant (regno) == NULL_RTX);
991     }
992 
993   /* reg_equiv_constant only contains constants which are obviously
994      not appropriate as destination.  So if we would need to replace
995      the destination pseudo with a constant we are in real
996      trouble.  */
997   if (out != 0 && REG_P (out))
998     {
999       int regno = REGNO (out);
1000 
1001       gcc_assert (regno < FIRST_PSEUDO_REGISTER
1002 		  || reg_renumber[regno] >= 0
1003 		  || reg_equiv_constant (regno) == NULL_RTX);
1004     }
1005 
1006   /* If we have a read-write operand with an address side-effect,
1007      change either IN or OUT so the side-effect happens only once.  */
1008   if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1009     switch (GET_CODE (XEXP (in, 0)))
1010       {
1011       case POST_INC: case POST_DEC:   case POST_MODIFY:
1012 	in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1013 	break;
1014 
1015       case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1016 	out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1017 	break;
1018 
1019       default:
1020 	break;
1021       }
1022 
1023   /* If we are reloading a (SUBREG constant ...), really reload just the
1024      inside expression in its own mode.  Similarly for (SUBREG (PLUS ...)).
1025      If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1026      a pseudo and hence will become a MEM) with M1 wider than M2 and the
1027      register is a pseudo, also reload the inside expression.
1028      For machines that extend byte loads, do this for any SUBREG of a pseudo
1029      where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1030      M2 is an integral mode that gets extended when loaded.
1031      Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1032      where either M1 is not valid for R or M2 is wider than a word but we
1033      only need one register to store an M2-sized quantity in R.
1034      (However, if OUT is nonzero, we need to reload the reg *and*
1035      the subreg, so do nothing here, and let following statement handle it.)
1036 
1037      Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1038      we can't handle it here because CONST_INT does not indicate a mode.
1039 
1040      Similarly, we must reload the inside expression if we have a
1041      STRICT_LOW_PART (presumably, in == out in this case).
1042 
1043      Also reload the inner expression if it does not require a secondary
1044      reload but the SUBREG does.
1045 
1046      Also reload the inner expression if it is a register that is in
1047      the class whose registers cannot be referenced in a different size
1048      and M1 is not the same size as M2.  If subreg_lowpart_p is false, we
1049      cannot reload just the inside since we might end up with the wrong
1050      register class.  But if it is inside a STRICT_LOW_PART, we have
1051      no choice, so we hope we do get the right register class there.
1052 
1053      Finally, reload the inner expression if it is a pseudo that will
1054      become a MEM and the MEM has a mode-dependent address, as in that
1055      case we obviously cannot change the mode of the MEM to that of the
1056      containing SUBREG as that would change the interpretation of the
1057      address.  */
1058 
1059   scalar_int_mode inner_mode;
1060   if (in != 0 && GET_CODE (in) == SUBREG
1061       && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (in)),
1062 					inmode, rclass)
1063       && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (in))]
1064       && (strict_low
1065 	  || (subreg_lowpart_p (in)
1066 	      && (CONSTANT_P (SUBREG_REG (in))
1067 		  || GET_CODE (SUBREG_REG (in)) == PLUS
1068 		  || (((REG_P (SUBREG_REG (in))
1069 			&& REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1070 		       || MEM_P (SUBREG_REG (in)))
1071 		      && (paradoxical_subreg_p (inmode,
1072 						GET_MODE (SUBREG_REG (in)))
1073 			  || (known_le (GET_MODE_SIZE (inmode), UNITS_PER_WORD)
1074 			      && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG
1075 								   (in)),
1076 							 &inner_mode)
1077 			      && GET_MODE_SIZE (inner_mode) <= UNITS_PER_WORD
1078 			      && paradoxical_subreg_p (inmode, inner_mode)
1079 			      && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)
1080 			  || (WORD_REGISTER_OPERATIONS
1081 			      && partial_subreg_p (inmode,
1082 						   GET_MODE (SUBREG_REG (in)))
1083 			      && (known_equal_after_align_down
1084 				  (GET_MODE_SIZE (inmode) - 1,
1085 				   GET_MODE_SIZE (GET_MODE (SUBREG_REG
1086 							    (in))) - 1,
1087 				   UNITS_PER_WORD)))))
1088 		  || (REG_P (SUBREG_REG (in))
1089 		      && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1090 		      /* The case where out is nonzero
1091 			 is handled differently in the following statement.  */
1092 		      && (out == 0 || subreg_lowpart_p (in))
1093 		      && (complex_word_subreg_p (inmode, SUBREG_REG (in))
1094 			  || !targetm.hard_regno_mode_ok (subreg_regno (in),
1095 							  inmode)))
1096 		  || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1097 		      && (secondary_reload_class (1, rclass,
1098 						  GET_MODE (SUBREG_REG (in)),
1099 						  SUBREG_REG (in))
1100 			  == NO_REGS))
1101 		  || (REG_P (SUBREG_REG (in))
1102 		      && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1103 		      && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (in)),
1104 						 GET_MODE (SUBREG_REG (in)),
1105 						 inmode))))
1106 	  || (REG_P (SUBREG_REG (in))
1107 	      && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER
1108 	      && reg_equiv_mem (REGNO (SUBREG_REG (in)))
1109 	      && (mode_dependent_address_p
1110 		  (XEXP (reg_equiv_mem (REGNO (SUBREG_REG (in))), 0),
1111 		   MEM_ADDR_SPACE (reg_equiv_mem (REGNO (SUBREG_REG (in)))))))))
1112     {
1113 #ifdef LIMIT_RELOAD_CLASS
1114       in_subreg_loc = inloc;
1115 #endif
1116       inloc = &SUBREG_REG (in);
1117       in = *inloc;
1118 
1119       if (!WORD_REGISTER_OPERATIONS
1120 	  && LOAD_EXTEND_OP (GET_MODE (in)) == UNKNOWN
1121 	  && MEM_P (in))
1122 	/* This is supposed to happen only for paradoxical subregs made by
1123 	   combine.cc.  (SUBREG (MEM)) isn't supposed to occur other ways.  */
1124 	gcc_assert (known_le (GET_MODE_SIZE (GET_MODE (in)),
1125 			      GET_MODE_SIZE (inmode)));
1126 
1127       inmode = GET_MODE (in);
1128     }
1129 
1130   /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1131      where M1 is not valid for R if it was not handled by the code above.
1132 
1133      Similar issue for (SUBREG constant ...) if it was not handled by the
1134      code above.  This can happen if SUBREG_BYTE != 0.
1135 
1136      However, we must reload the inner reg *as well as* the subreg in
1137      that case.  */
1138 
1139   if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1140     {
1141       if (REG_P (SUBREG_REG (in)))
1142 	subreg_in_class
1143 	  = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1144 			      subreg_regno_offset (REGNO (SUBREG_REG (in)),
1145 						   GET_MODE (SUBREG_REG (in)),
1146 						   SUBREG_BYTE (in),
1147 						   GET_MODE (in)),
1148 			      REGNO (SUBREG_REG (in)));
1149       else if (CONSTANT_P (SUBREG_REG (in))
1150                || GET_CODE (SUBREG_REG (in)) == PLUS)
1151 	subreg_in_class = find_valid_class_1 (inmode,
1152 					      GET_MODE (SUBREG_REG (in)),
1153 					      rclass);
1154 
1155       /* This relies on the fact that emit_reload_insns outputs the
1156 	 instructions for input reloads of type RELOAD_OTHER in the same
1157 	 order as the reloads.  Thus if the outer reload is also of type
1158 	 RELOAD_OTHER, we are guaranteed that this inner reload will be
1159 	 output before the outer reload.  */
1160       push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1161 		   subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1162       dont_remove_subreg = 1;
1163     }
1164 
1165   /* Similarly for paradoxical and problematical SUBREGs on the output.
1166      Note that there is no reason we need worry about the previous value
1167      of SUBREG_REG (out); even if wider than out, storing in a subreg is
1168      entitled to clobber it all (except in the case of a word mode subreg
1169      or of a STRICT_LOW_PART, in that latter case the constraint should
1170      label it input-output.)  */
1171   if (out != 0 && GET_CODE (out) == SUBREG
1172       && (subreg_lowpart_p (out) || strict_low)
1173       && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (out)),
1174 					outmode, rclass)
1175       && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (out))]
1176       && (CONSTANT_P (SUBREG_REG (out))
1177 	  || strict_low
1178 	  || (((REG_P (SUBREG_REG (out))
1179 		&& REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1180 	       || MEM_P (SUBREG_REG (out)))
1181 	      && (paradoxical_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1182 		  || (WORD_REGISTER_OPERATIONS
1183 		      && partial_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1184 		      && (known_equal_after_align_down
1185 			  (GET_MODE_SIZE (outmode) - 1,
1186 			   GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1,
1187 			   UNITS_PER_WORD)))))
1188 	  || (REG_P (SUBREG_REG (out))
1189 	      && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1190 	      /* The case of a word mode subreg
1191 		 is handled differently in the following statement.  */
1192 	      && ! (known_le (GET_MODE_SIZE (outmode), UNITS_PER_WORD)
1193 		    && maybe_gt (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))),
1194 				 UNITS_PER_WORD))
1195 	      && !targetm.hard_regno_mode_ok (subreg_regno (out), outmode))
1196 	  || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1197 	      && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1198 					  SUBREG_REG (out))
1199 		  == NO_REGS))
1200 	  || (REG_P (SUBREG_REG (out))
1201 	      && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1202 	      && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1203 					 GET_MODE (SUBREG_REG (out)),
1204 					 outmode))))
1205     {
1206 #ifdef LIMIT_RELOAD_CLASS
1207       out_subreg_loc = outloc;
1208 #endif
1209       outloc = &SUBREG_REG (out);
1210       out = *outloc;
1211       gcc_assert (WORD_REGISTER_OPERATIONS || !MEM_P (out)
1212 		  || known_le (GET_MODE_SIZE (GET_MODE (out)),
1213 			       GET_MODE_SIZE (outmode)));
1214       outmode = GET_MODE (out);
1215     }
1216 
1217   /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1218      where either M1 is not valid for R or M2 is wider than a word but we
1219      only need one register to store an M2-sized quantity in R.
1220 
1221      However, we must reload the inner reg *as well as* the subreg in
1222      that case and the inner reg is an in-out reload.  */
1223 
1224   if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1225     {
1226       enum reg_class in_out_class
1227 	= find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1228 			    subreg_regno_offset (REGNO (SUBREG_REG (out)),
1229 						 GET_MODE (SUBREG_REG (out)),
1230 						 SUBREG_BYTE (out),
1231 						 GET_MODE (out)),
1232 			    REGNO (SUBREG_REG (out)));
1233 
1234       /* This relies on the fact that emit_reload_insns outputs the
1235 	 instructions for output reloads of type RELOAD_OTHER in reverse
1236 	 order of the reloads.  Thus if the outer reload is also of type
1237 	 RELOAD_OTHER, we are guaranteed that this inner reload will be
1238 	 output after the outer reload.  */
1239       push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1240 		   &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1241 		   0, 0, opnum, RELOAD_OTHER);
1242       dont_remove_subreg = 1;
1243     }
1244 
1245   /* If IN appears in OUT, we can't share any input-only reload for IN.  */
1246   if (in != 0 && out != 0 && MEM_P (out)
1247       && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1248       && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1249     dont_share = 1;
1250 
1251   /* If IN is a SUBREG of a hard register, make a new REG.  This
1252      simplifies some of the cases below.  */
1253 
1254   if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1255       && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1256       && ! dont_remove_subreg)
1257     in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1258 
1259   /* Similarly for OUT.  */
1260   if (out != 0 && GET_CODE (out) == SUBREG
1261       && REG_P (SUBREG_REG (out))
1262       && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1263       && ! dont_remove_subreg)
1264     out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1265 
1266   /* Narrow down the class of register wanted if that is
1267      desirable on this machine for efficiency.  */
1268   {
1269     reg_class_t preferred_class = rclass;
1270 
1271     if (in != 0)
1272       preferred_class = targetm.preferred_reload_class (in, rclass);
1273 
1274     /* Output reloads may need analogous treatment, different in detail.  */
1275     if (out != 0)
1276       preferred_class
1277 	= targetm.preferred_output_reload_class (out, preferred_class);
1278 
1279     /* Discard what the target said if we cannot do it.  */
1280     if (preferred_class != NO_REGS
1281 	|| (optional && type == RELOAD_FOR_OUTPUT))
1282       rclass = (enum reg_class) preferred_class;
1283   }
1284 
1285   /* Make sure we use a class that can handle the actual pseudo
1286      inside any subreg.  For example, on the 386, QImode regs
1287      can appear within SImode subregs.  Although GENERAL_REGS
1288      can handle SImode, QImode needs a smaller class.  */
1289 #ifdef LIMIT_RELOAD_CLASS
1290   if (in_subreg_loc)
1291     rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1292   else if (in != 0 && GET_CODE (in) == SUBREG)
1293     rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1294 
1295   if (out_subreg_loc)
1296     rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1297   if (out != 0 && GET_CODE (out) == SUBREG)
1298     rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1299 #endif
1300 
1301   /* Verify that this class is at least possible for the mode that
1302      is specified.  */
1303   if (this_insn_is_asm)
1304     {
1305       machine_mode mode;
1306       if (paradoxical_subreg_p (inmode, outmode))
1307 	mode = inmode;
1308       else
1309 	mode = outmode;
1310       if (mode == VOIDmode)
1311 	{
1312 	  error_for_asm (this_insn, "cannot reload integer constant "
1313 			 "operand in %<asm%>");
1314 	  mode = word_mode;
1315 	  if (in != 0)
1316 	    inmode = word_mode;
1317 	  if (out != 0)
1318 	    outmode = word_mode;
1319 	}
1320       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1321 	if (targetm.hard_regno_mode_ok (i, mode)
1322 	    && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1323 	  break;
1324       if (i == FIRST_PSEUDO_REGISTER)
1325 	{
1326 	  error_for_asm (this_insn, "impossible register constraint "
1327 			 "in %<asm%>");
1328 	  /* Avoid further trouble with this insn.  */
1329 	  PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1330 	  /* We used to continue here setting class to ALL_REGS, but it triggers
1331 	     sanity check on i386 for:
1332 	     void foo(long double d)
1333 	     {
1334 	       asm("" :: "a" (d));
1335 	     }
1336 	     Returning zero here ought to be safe as we take care in
1337 	     find_reloads to not process the reloads when instruction was
1338 	     replaced by USE.  */
1339 
1340 	  return 0;
1341 	}
1342     }
1343 
1344   /* Optional output reloads are always OK even if we have no register class,
1345      since the function of these reloads is only to have spill_reg_store etc.
1346      set, so that the storing insn can be deleted later.  */
1347   gcc_assert (rclass != NO_REGS
1348 	      || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1349 
1350   i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1351 
1352   if (i == n_reloads)
1353     {
1354       /* See if we need a secondary reload register to move between CLASS
1355 	 and IN or CLASS and OUT.  Get the icode and push any required reloads
1356 	 needed for each of them if so.  */
1357 
1358       if (in != 0)
1359 	secondary_in_reload
1360 	  = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1361 				   &secondary_in_icode, NULL);
1362       if (out != 0 && GET_CODE (out) != SCRATCH)
1363 	secondary_out_reload
1364 	  = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1365 				   type, &secondary_out_icode, NULL);
1366 
1367       /* We found no existing reload suitable for re-use.
1368 	 So add an additional reload.  */
1369 
1370       if (subreg_in_class == NO_REGS
1371 	  && in != 0
1372 	  && (REG_P (in)
1373 	      || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1374 	  && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1375 	subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1376       /* If a memory location is needed for the copy, make one.  */
1377       if (subreg_in_class != NO_REGS
1378 	  && targetm.secondary_memory_needed (inmode, subreg_in_class, rclass))
1379 	get_secondary_mem (in, inmode, opnum, type);
1380 
1381       i = n_reloads;
1382       rld[i].in = in;
1383       rld[i].out = out;
1384       rld[i].rclass = rclass;
1385       rld[i].inmode = inmode;
1386       rld[i].outmode = outmode;
1387       rld[i].reg_rtx = 0;
1388       rld[i].optional = optional;
1389       rld[i].inc = 0;
1390       rld[i].nocombine = 0;
1391       rld[i].in_reg = inloc ? *inloc : 0;
1392       rld[i].out_reg = outloc ? *outloc : 0;
1393       rld[i].opnum = opnum;
1394       rld[i].when_needed = type;
1395       rld[i].secondary_in_reload = secondary_in_reload;
1396       rld[i].secondary_out_reload = secondary_out_reload;
1397       rld[i].secondary_in_icode = secondary_in_icode;
1398       rld[i].secondary_out_icode = secondary_out_icode;
1399       rld[i].secondary_p = 0;
1400 
1401       n_reloads++;
1402 
1403       if (out != 0
1404           && (REG_P (out)
1405 	      || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1406 	  && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1407 	  && (targetm.secondary_memory_needed
1408 	      (outmode, rclass, REGNO_REG_CLASS (reg_or_subregno (out)))))
1409 	get_secondary_mem (out, outmode, opnum, type);
1410     }
1411   else
1412     {
1413       /* We are reusing an existing reload,
1414 	 but we may have additional information for it.
1415 	 For example, we may now have both IN and OUT
1416 	 while the old one may have just one of them.  */
1417 
1418       /* The modes can be different.  If they are, we want to reload in
1419 	 the larger mode, so that the value is valid for both modes.  */
1420       if (inmode != VOIDmode
1421 	  && partial_subreg_p (rld[i].inmode, inmode))
1422 	rld[i].inmode = inmode;
1423       if (outmode != VOIDmode
1424 	  && partial_subreg_p (rld[i].outmode, outmode))
1425 	rld[i].outmode = outmode;
1426       if (in != 0)
1427 	{
1428 	  rtx in_reg = inloc ? *inloc : 0;
1429 	  /* If we merge reloads for two distinct rtl expressions that
1430 	     are identical in content, there might be duplicate address
1431 	     reloads.  Remove the extra set now, so that if we later find
1432 	     that we can inherit this reload, we can get rid of the
1433 	     address reloads altogether.
1434 
1435 	     Do not do this if both reloads are optional since the result
1436 	     would be an optional reload which could potentially leave
1437 	     unresolved address replacements.
1438 
1439 	     It is not sufficient to call transfer_replacements since
1440 	     choose_reload_regs will remove the replacements for address
1441 	     reloads of inherited reloads which results in the same
1442 	     problem.  */
1443 	  if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1444 	      && ! (rld[i].optional && optional))
1445 	    {
1446 	      /* We must keep the address reload with the lower operand
1447 		 number alive.  */
1448 	      if (opnum > rld[i].opnum)
1449 		{
1450 		  remove_address_replacements (in);
1451 		  in = rld[i].in;
1452 		  in_reg = rld[i].in_reg;
1453 		}
1454 	      else
1455 		remove_address_replacements (rld[i].in);
1456 	    }
1457 	  /* When emitting reloads we don't necessarily look at the in-
1458 	     and outmode, but also directly at the operands (in and out).
1459 	     So we can't simply overwrite them with whatever we have found
1460 	     for this (to-be-merged) reload, we have to "merge" that too.
1461 	     Reusing another reload already verified that we deal with the
1462 	     same operands, just possibly in different modes.  So we
1463 	     overwrite the operands only when the new mode is larger.
1464 	     See also PR33613.  */
1465 	  if (!rld[i].in
1466 	      || partial_subreg_p (GET_MODE (rld[i].in), GET_MODE (in)))
1467 	    rld[i].in = in;
1468 	  if (!rld[i].in_reg
1469 	      || (in_reg
1470 		  && partial_subreg_p (GET_MODE (rld[i].in_reg),
1471 				       GET_MODE (in_reg))))
1472 	    rld[i].in_reg = in_reg;
1473 	}
1474       if (out != 0)
1475 	{
1476 	  if (!rld[i].out
1477 	      || (out
1478 		  && partial_subreg_p (GET_MODE (rld[i].out),
1479 				       GET_MODE (out))))
1480 	    rld[i].out = out;
1481 	  if (outloc
1482 	      && (!rld[i].out_reg
1483 		  || partial_subreg_p (GET_MODE (rld[i].out_reg),
1484 				       GET_MODE (*outloc))))
1485 	    rld[i].out_reg = *outloc;
1486 	}
1487       if (reg_class_subset_p (rclass, rld[i].rclass))
1488 	rld[i].rclass = rclass;
1489       rld[i].optional &= optional;
1490       if (MERGE_TO_OTHER (type, rld[i].when_needed,
1491 			  opnum, rld[i].opnum))
1492 	rld[i].when_needed = RELOAD_OTHER;
1493       rld[i].opnum = MIN (rld[i].opnum, opnum);
1494     }
1495 
1496   /* If the ostensible rtx being reloaded differs from the rtx found
1497      in the location to substitute, this reload is not safe to combine
1498      because we cannot reliably tell whether it appears in the insn.  */
1499 
1500   if (in != 0 && in != *inloc)
1501     rld[i].nocombine = 1;
1502 
1503   /* If we will replace IN and OUT with the reload-reg,
1504      record where they are located so that substitution need
1505      not do a tree walk.  */
1506 
1507   if (replace_reloads)
1508     {
1509       if (inloc != 0)
1510 	{
1511 	  struct replacement *r = &replacements[n_replacements++];
1512 	  r->what = i;
1513 	  r->where = inloc;
1514 	  r->mode = inmode;
1515 	}
1516       if (outloc != 0 && outloc != inloc)
1517 	{
1518 	  struct replacement *r = &replacements[n_replacements++];
1519 	  r->what = i;
1520 	  r->where = outloc;
1521 	  r->mode = outmode;
1522 	}
1523     }
1524 
1525   /* If this reload is just being introduced and it has both
1526      an incoming quantity and an outgoing quantity that are
1527      supposed to be made to match, see if either one of the two
1528      can serve as the place to reload into.
1529 
1530      If one of them is acceptable, set rld[i].reg_rtx
1531      to that one.  */
1532 
1533   if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1534     {
1535       rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1536 					  inmode, outmode,
1537 					  rld[i].rclass, i,
1538 					  earlyclobber_operand_p (out));
1539 
1540       /* If the outgoing register already contains the same value
1541 	 as the incoming one, we can dispense with loading it.
1542 	 The easiest way to tell the caller that is to give a phony
1543 	 value for the incoming operand (same as outgoing one).  */
1544       if (rld[i].reg_rtx == out
1545 	  && (REG_P (in) || CONSTANT_P (in))
1546 	  && find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1547 			     static_reload_reg_p, i, inmode) != 0)
1548 	rld[i].in = out;
1549     }
1550 
1551   /* If this is an input reload and the operand contains a register that
1552      dies in this insn and is used nowhere else, see if it is the right class
1553      to be used for this reload.  Use it if so.  (This occurs most commonly
1554      in the case of paradoxical SUBREGs and in-out reloads).  We cannot do
1555      this if it is also an output reload that mentions the register unless
1556      the output is a SUBREG that clobbers an entire register.
1557 
1558      Note that the operand might be one of the spill regs, if it is a
1559      pseudo reg and we are in a block where spilling has not taken place.
1560      But if there is no spilling in this block, that is OK.
1561      An explicitly used hard reg cannot be a spill reg.  */
1562 
1563   if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1564     {
1565       rtx note;
1566       int regno;
1567       machine_mode rel_mode = inmode;
1568 
1569       if (out && partial_subreg_p (rel_mode, outmode))
1570 	rel_mode = outmode;
1571 
1572       for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1573 	if (REG_NOTE_KIND (note) == REG_DEAD
1574 	    && REG_P (XEXP (note, 0))
1575 	    && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1576 	    && reg_mentioned_p (XEXP (note, 0), in)
1577 	    /* Check that a former pseudo is valid; see find_dummy_reload.  */
1578 	    && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1579 		|| (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1580 				    ORIGINAL_REGNO (XEXP (note, 0)))
1581 		    && REG_NREGS (XEXP (note, 0)) == 1))
1582 	    && ! refers_to_regno_for_reload_p (regno,
1583 					       end_hard_regno (rel_mode,
1584 							       regno),
1585 					       PATTERN (this_insn), inloc)
1586 	    && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1587 	    /* If this is also an output reload, IN cannot be used as
1588 	       the reload register if it is set in this insn unless IN
1589 	       is also OUT.  */
1590 	    && (out == 0 || in == out
1591 		|| ! hard_reg_set_here_p (regno,
1592 					  end_hard_regno (rel_mode, regno),
1593 					  PATTERN (this_insn)))
1594 	    /* ??? Why is this code so different from the previous?
1595 	       Is there any simple coherent way to describe the two together?
1596 	       What's going on here.  */
1597 	    && (in != out
1598 		|| (GET_CODE (in) == SUBREG
1599 		    && (known_equal_after_align_up
1600 			(GET_MODE_SIZE (GET_MODE (in)),
1601 			 GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))),
1602 			 UNITS_PER_WORD))))
1603 	    /* Make sure the operand fits in the reg that dies.  */
1604 	    && known_le (GET_MODE_SIZE (rel_mode),
1605 			 GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1606 	    && targetm.hard_regno_mode_ok (regno, inmode)
1607 	    && targetm.hard_regno_mode_ok (regno, outmode))
1608 	  {
1609 	    unsigned int offs;
1610 	    unsigned int nregs = MAX (hard_regno_nregs (regno, inmode),
1611 				      hard_regno_nregs (regno, outmode));
1612 
1613 	    for (offs = 0; offs < nregs; offs++)
1614 	      if (fixed_regs[regno + offs]
1615 		  || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1616 					  regno + offs))
1617 		break;
1618 
1619 	    if (offs == nregs
1620 		&& (! (refers_to_regno_for_reload_p
1621 		       (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1622 		    || can_reload_into (in, regno, inmode)))
1623 	      {
1624 		rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1625 		break;
1626 	      }
1627 	  }
1628     }
1629 
1630   if (out)
1631     output_reloadnum = i;
1632 
1633   return i;
1634 }
1635 
1636 /* Record an additional place we must replace a value
1637    for which we have already recorded a reload.
1638    RELOADNUM is the value returned by push_reload
1639    when the reload was recorded.
1640    This is used in insn patterns that use match_dup.  */
1641 
1642 static void
push_replacement(rtx * loc,int reloadnum,machine_mode mode)1643 push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1644 {
1645   if (replace_reloads)
1646     {
1647       struct replacement *r = &replacements[n_replacements++];
1648       r->what = reloadnum;
1649       r->where = loc;
1650       r->mode = mode;
1651     }
1652 }
1653 
1654 /* Duplicate any replacement we have recorded to apply at
1655    location ORIG_LOC to also be performed at DUP_LOC.
1656    This is used in insn patterns that use match_dup.  */
1657 
1658 static void
dup_replacements(rtx * dup_loc,rtx * orig_loc)1659 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1660 {
1661   int i, n = n_replacements;
1662 
1663   for (i = 0; i < n; i++)
1664     {
1665       struct replacement *r = &replacements[i];
1666       if (r->where == orig_loc)
1667 	push_replacement (dup_loc, r->what, r->mode);
1668     }
1669 }
1670 
1671 /* Transfer all replacements that used to be in reload FROM to be in
1672    reload TO.  */
1673 
1674 void
transfer_replacements(int to,int from)1675 transfer_replacements (int to, int from)
1676 {
1677   int i;
1678 
1679   for (i = 0; i < n_replacements; i++)
1680     if (replacements[i].what == from)
1681       replacements[i].what = to;
1682 }
1683 
1684 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1685    or a subpart of it.  If we have any replacements registered for IN_RTX,
1686    cancel the reloads that were supposed to load them.
1687    Return nonzero if we canceled any reloads.  */
1688 int
remove_address_replacements(rtx in_rtx)1689 remove_address_replacements (rtx in_rtx)
1690 {
1691   int i, j;
1692   char reload_flags[MAX_RELOADS];
1693   int something_changed = 0;
1694 
1695   memset (reload_flags, 0, sizeof reload_flags);
1696   for (i = 0, j = 0; i < n_replacements; i++)
1697     {
1698       if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1699 	reload_flags[replacements[i].what] |= 1;
1700       else
1701 	{
1702 	  replacements[j++] = replacements[i];
1703 	  reload_flags[replacements[i].what] |= 2;
1704 	}
1705     }
1706   /* Note that the following store must be done before the recursive calls.  */
1707   n_replacements = j;
1708 
1709   for (i = n_reloads - 1; i >= 0; i--)
1710     {
1711       if (reload_flags[i] == 1)
1712 	{
1713 	  deallocate_reload_reg (i);
1714 	  remove_address_replacements (rld[i].in);
1715 	  rld[i].in = 0;
1716 	  something_changed = 1;
1717 	}
1718     }
1719   return something_changed;
1720 }
1721 
1722 /* If there is only one output reload, and it is not for an earlyclobber
1723    operand, try to combine it with a (logically unrelated) input reload
1724    to reduce the number of reload registers needed.
1725 
1726    This is safe if the input reload does not appear in
1727    the value being output-reloaded, because this implies
1728    it is not needed any more once the original insn completes.
1729 
1730    If that doesn't work, see we can use any of the registers that
1731    die in this insn as a reload register.  We can if it is of the right
1732    class and does not appear in the value being output-reloaded.  */
1733 
1734 static void
combine_reloads(void)1735 combine_reloads (void)
1736 {
1737   int i, regno;
1738   int output_reload = -1;
1739   int secondary_out = -1;
1740   rtx note;
1741 
1742   /* Find the output reload; return unless there is exactly one
1743      and that one is mandatory.  */
1744 
1745   for (i = 0; i < n_reloads; i++)
1746     if (rld[i].out != 0)
1747       {
1748 	if (output_reload >= 0)
1749 	  return;
1750 	output_reload = i;
1751       }
1752 
1753   if (output_reload < 0 || rld[output_reload].optional)
1754     return;
1755 
1756   /* An input-output reload isn't combinable.  */
1757 
1758   if (rld[output_reload].in != 0)
1759     return;
1760 
1761   /* If this reload is for an earlyclobber operand, we can't do anything.  */
1762   if (earlyclobber_operand_p (rld[output_reload].out))
1763     return;
1764 
1765   /* If there is a reload for part of the address of this operand, we would
1766      need to change it to RELOAD_FOR_OTHER_ADDRESS.  But that would extend
1767      its life to the point where doing this combine would not lower the
1768      number of spill registers needed.  */
1769   for (i = 0; i < n_reloads; i++)
1770     if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1771 	 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1772 	&& rld[i].opnum == rld[output_reload].opnum)
1773       return;
1774 
1775   /* Check each input reload; can we combine it?  */
1776 
1777   for (i = 0; i < n_reloads; i++)
1778     if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1779 	/* Life span of this reload must not extend past main insn.  */
1780 	&& rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1781 	&& rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1782 	&& rld[i].when_needed != RELOAD_OTHER
1783 	&& (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1784 	    == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1785 				       [(int) rld[output_reload].outmode])
1786 	&& known_eq (rld[i].inc, 0)
1787 	&& rld[i].reg_rtx == 0
1788 	/* Don't combine two reloads with different secondary
1789 	   memory locations.  */
1790 	&& (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1791 	    || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1792 	    || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1793 			    secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1794 	&& (targetm.small_register_classes_for_mode_p (VOIDmode)
1795 	    ? (rld[i].rclass == rld[output_reload].rclass)
1796 	    : (reg_class_subset_p (rld[i].rclass,
1797 				   rld[output_reload].rclass)
1798 	       || reg_class_subset_p (rld[output_reload].rclass,
1799 				      rld[i].rclass)))
1800 	&& (MATCHES (rld[i].in, rld[output_reload].out)
1801 	    /* Args reversed because the first arg seems to be
1802 	       the one that we imagine being modified
1803 	       while the second is the one that might be affected.  */
1804 	    || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1805 						      rld[i].in)
1806 		/* However, if the input is a register that appears inside
1807 		   the output, then we also can't share.
1808 		   Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1809 		   If the same reload reg is used for both reg 69 and the
1810 		   result to be stored in memory, then that result
1811 		   will clobber the address of the memory ref.  */
1812 		&& ! (REG_P (rld[i].in)
1813 		      && reg_overlap_mentioned_for_reload_p (rld[i].in,
1814 							     rld[output_reload].out))))
1815 	&& ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1816 					 rld[i].when_needed != RELOAD_FOR_INPUT)
1817 	&& (reg_class_size[(int) rld[i].rclass]
1818 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
1819 	/* We will allow making things slightly worse by combining an
1820 	   input and an output, but no worse than that.  */
1821 	&& (rld[i].when_needed == RELOAD_FOR_INPUT
1822 	    || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1823       {
1824 	int j;
1825 
1826 	/* We have found a reload to combine with!  */
1827 	rld[i].out = rld[output_reload].out;
1828 	rld[i].out_reg = rld[output_reload].out_reg;
1829 	rld[i].outmode = rld[output_reload].outmode;
1830 	/* Mark the old output reload as inoperative.  */
1831 	rld[output_reload].out = 0;
1832 	/* The combined reload is needed for the entire insn.  */
1833 	rld[i].when_needed = RELOAD_OTHER;
1834 	/* If the output reload had a secondary reload, copy it.  */
1835 	if (rld[output_reload].secondary_out_reload != -1)
1836 	  {
1837 	    rld[i].secondary_out_reload
1838 	      = rld[output_reload].secondary_out_reload;
1839 	    rld[i].secondary_out_icode
1840 	      = rld[output_reload].secondary_out_icode;
1841 	  }
1842 
1843 	/* Copy any secondary MEM.  */
1844 	if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1845 	  secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1846 	    = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1847 	/* If required, minimize the register class.  */
1848 	if (reg_class_subset_p (rld[output_reload].rclass,
1849 				rld[i].rclass))
1850 	  rld[i].rclass = rld[output_reload].rclass;
1851 
1852 	/* Transfer all replacements from the old reload to the combined.  */
1853 	for (j = 0; j < n_replacements; j++)
1854 	  if (replacements[j].what == output_reload)
1855 	    replacements[j].what = i;
1856 
1857 	return;
1858       }
1859 
1860   /* If this insn has only one operand that is modified or written (assumed
1861      to be the first),  it must be the one corresponding to this reload.  It
1862      is safe to use anything that dies in this insn for that output provided
1863      that it does not occur in the output (we already know it isn't an
1864      earlyclobber.  If this is an asm insn, give up.  */
1865 
1866   if (INSN_CODE (this_insn) == -1)
1867     return;
1868 
1869   for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1870     if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1871 	|| insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1872       return;
1873 
1874   /* See if some hard register that dies in this insn and is not used in
1875      the output is the right class.  Only works if the register we pick
1876      up can fully hold our output reload.  */
1877   for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1878     if (REG_NOTE_KIND (note) == REG_DEAD
1879 	&& REG_P (XEXP (note, 0))
1880 	&& !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1881 						rld[output_reload].out)
1882 	&& (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1883 	&& targetm.hard_regno_mode_ok (regno, rld[output_reload].outmode)
1884 	&& TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1885 			      regno)
1886 	&& (hard_regno_nregs (regno, rld[output_reload].outmode)
1887 	    <= REG_NREGS (XEXP (note, 0)))
1888 	/* Ensure that a secondary or tertiary reload for this output
1889 	   won't want this register.  */
1890 	&& ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1891 	    || (!(TEST_HARD_REG_BIT
1892 		  (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1893 		&& ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1894 		    || !(TEST_HARD_REG_BIT
1895 			 (reg_class_contents[(int) rld[secondary_out].rclass],
1896 			  regno)))))
1897 	&& !fixed_regs[regno]
1898 	/* Check that a former pseudo is valid; see find_dummy_reload.  */
1899 	&& (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1900 	    || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1901 			       ORIGINAL_REGNO (XEXP (note, 0)))
1902 		&& REG_NREGS (XEXP (note, 0)) == 1)))
1903       {
1904 	rld[output_reload].reg_rtx
1905 	  = gen_rtx_REG (rld[output_reload].outmode, regno);
1906 	return;
1907       }
1908 }
1909 
1910 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1911    See if one of IN and OUT is a register that may be used;
1912    this is desirable since a spill-register won't be needed.
1913    If so, return the register rtx that proves acceptable.
1914 
1915    INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1916    RCLASS is the register class required for the reload.
1917 
1918    If FOR_REAL is >= 0, it is the number of the reload,
1919    and in some cases when it can be discovered that OUT doesn't need
1920    to be computed, clear out rld[FOR_REAL].out.
1921 
1922    If FOR_REAL is -1, this should not be done, because this call
1923    is just to see if a register can be found, not to find and install it.
1924 
1925    EARLYCLOBBER is nonzero if OUT is an earlyclobber operand.  This
1926    puts an additional constraint on being able to use IN for OUT since
1927    IN must not appear elsewhere in the insn (it is assumed that IN itself
1928    is safe from the earlyclobber).  */
1929 
1930 static rtx
find_dummy_reload(rtx real_in,rtx real_out,rtx * inloc,rtx * outloc,machine_mode inmode,machine_mode outmode,reg_class_t rclass,int for_real,int earlyclobber)1931 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1932 		   machine_mode inmode, machine_mode outmode,
1933 		   reg_class_t rclass, int for_real, int earlyclobber)
1934 {
1935   rtx in = real_in;
1936   rtx out = real_out;
1937   int in_offset = 0;
1938   int out_offset = 0;
1939   rtx value = 0;
1940 
1941   /* If operands exceed a word, we can't use either of them
1942      unless they have the same size.  */
1943   if (maybe_ne (GET_MODE_SIZE (outmode), GET_MODE_SIZE (inmode))
1944       && (maybe_gt (GET_MODE_SIZE (outmode), UNITS_PER_WORD)
1945 	  || maybe_gt (GET_MODE_SIZE (inmode), UNITS_PER_WORD)))
1946     return 0;
1947 
1948   /* Note that {in,out}_offset are needed only when 'in' or 'out'
1949      respectively refers to a hard register.  */
1950 
1951   /* Find the inside of any subregs.  */
1952   while (GET_CODE (out) == SUBREG)
1953     {
1954       if (REG_P (SUBREG_REG (out))
1955 	  && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1956 	out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1957 					   GET_MODE (SUBREG_REG (out)),
1958 					   SUBREG_BYTE (out),
1959 					   GET_MODE (out));
1960       out = SUBREG_REG (out);
1961     }
1962   while (GET_CODE (in) == SUBREG)
1963     {
1964       if (REG_P (SUBREG_REG (in))
1965 	  && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1966 	in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1967 					  GET_MODE (SUBREG_REG (in)),
1968 					  SUBREG_BYTE (in),
1969 					  GET_MODE (in));
1970       in = SUBREG_REG (in);
1971     }
1972 
1973   /* Narrow down the reg class, the same way push_reload will;
1974      otherwise we might find a dummy now, but push_reload won't.  */
1975   {
1976     reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1977     if (preferred_class != NO_REGS)
1978       rclass = (enum reg_class) preferred_class;
1979   }
1980 
1981   /* See if OUT will do.  */
1982   if (REG_P (out)
1983       && REGNO (out) < FIRST_PSEUDO_REGISTER)
1984     {
1985       unsigned int regno = REGNO (out) + out_offset;
1986       unsigned int nwords = hard_regno_nregs (regno, outmode);
1987       rtx saved_rtx;
1988 
1989       /* When we consider whether the insn uses OUT,
1990 	 ignore references within IN.  They don't prevent us
1991 	 from copying IN into OUT, because those refs would
1992 	 move into the insn that reloads IN.
1993 
1994 	 However, we only ignore IN in its role as this reload.
1995 	 If the insn uses IN elsewhere and it contains OUT,
1996 	 that counts.  We can't be sure it's the "same" operand
1997 	 so it might not go through this reload.
1998 
1999          We also need to avoid using OUT if it, or part of it, is a
2000          fixed register.  Modifying such registers, even transiently,
2001          may have undefined effects on the machine, such as modifying
2002          the stack pointer.  */
2003       saved_rtx = *inloc;
2004       *inloc = const0_rtx;
2005 
2006       if (regno < FIRST_PSEUDO_REGISTER
2007 	  && targetm.hard_regno_mode_ok (regno, outmode)
2008 	  && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2009 					     PATTERN (this_insn), outloc))
2010 	{
2011 	  unsigned int i;
2012 
2013 	  for (i = 0; i < nwords; i++)
2014 	    if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2015 				     regno + i)
2016 		|| fixed_regs[regno + i])
2017 	      break;
2018 
2019 	  if (i == nwords)
2020 	    {
2021 	      if (REG_P (real_out))
2022 		value = real_out;
2023 	      else
2024 		value = gen_rtx_REG (outmode, regno);
2025 	    }
2026 	}
2027 
2028       *inloc = saved_rtx;
2029     }
2030 
2031   /* Consider using IN if OUT was not acceptable
2032      or if OUT dies in this insn (like the quotient in a divmod insn).
2033      We can't use IN unless it is dies in this insn,
2034      which means we must know accurately which hard regs are live.
2035      Also, the result can't go in IN if IN is used within OUT,
2036      or if OUT is an earlyclobber and IN appears elsewhere in the insn.  */
2037   if (hard_regs_live_known
2038       && REG_P (in)
2039       && REGNO (in) < FIRST_PSEUDO_REGISTER
2040       && (value == 0
2041 	  || find_reg_note (this_insn, REG_UNUSED, real_out))
2042       && find_reg_note (this_insn, REG_DEAD, real_in)
2043       && !fixed_regs[REGNO (in)]
2044       && targetm.hard_regno_mode_ok (REGNO (in),
2045 				     /* The only case where out and real_out
2046 					might have different modes is where
2047 					real_out is a subreg, and in that
2048 					case, out has a real mode.  */
2049 				     (GET_MODE (out) != VOIDmode
2050 				      ? GET_MODE (out) : outmode))
2051       && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2052 	  /* However only do this if we can be sure that this input
2053 	     operand doesn't correspond with an uninitialized pseudo.
2054 	     global can assign some hardreg to it that is the same as
2055 	     the one assigned to a different, also live pseudo (as it
2056 	     can ignore the conflict).  We must never introduce writes
2057 	     to such hardregs, as they would clobber the other live
2058 	     pseudo.  See PR 20973.  */
2059 	  || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2060 			     ORIGINAL_REGNO (in))
2061 	      /* Similarly, only do this if we can be sure that the death
2062 		 note is still valid.  global can assign some hardreg to
2063 		 the pseudo referenced in the note and simultaneously a
2064 		 subword of this hardreg to a different, also live pseudo,
2065 		 because only another subword of the hardreg is actually
2066 		 used in the insn.  This cannot happen if the pseudo has
2067 		 been assigned exactly one hardreg.  See PR 33732.  */
2068 	      && REG_NREGS (in) == 1)))
2069     {
2070       unsigned int regno = REGNO (in) + in_offset;
2071       unsigned int nwords = hard_regno_nregs (regno, inmode);
2072 
2073       if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2074 	  && ! hard_reg_set_here_p (regno, regno + nwords,
2075 				    PATTERN (this_insn))
2076 	  && (! earlyclobber
2077 	      || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2078 						 PATTERN (this_insn), inloc)))
2079 	{
2080 	  unsigned int i;
2081 
2082 	  for (i = 0; i < nwords; i++)
2083 	    if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2084 				     regno + i))
2085 	      break;
2086 
2087 	  if (i == nwords)
2088 	    {
2089 	      /* If we were going to use OUT as the reload reg
2090 		 and changed our mind, it means OUT is a dummy that
2091 		 dies here.  So don't bother copying value to it.  */
2092 	      if (for_real >= 0 && value == real_out)
2093 		rld[for_real].out = 0;
2094 	      if (REG_P (real_in))
2095 		value = real_in;
2096 	      else
2097 		value = gen_rtx_REG (inmode, regno);
2098 	    }
2099 	}
2100     }
2101 
2102   return value;
2103 }
2104 
2105 /* This page contains subroutines used mainly for determining
2106    whether the IN or an OUT of a reload can serve as the
2107    reload register.  */
2108 
2109 /* Return 1 if X is an operand of an insn that is being earlyclobbered.  */
2110 
2111 int
earlyclobber_operand_p(rtx x)2112 earlyclobber_operand_p (rtx x)
2113 {
2114   int i;
2115 
2116   for (i = 0; i < n_earlyclobbers; i++)
2117     if (reload_earlyclobbers[i] == x)
2118       return 1;
2119 
2120   return 0;
2121 }
2122 
2123 /* Return 1 if expression X alters a hard reg in the range
2124    from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2125    either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2126    X should be the body of an instruction.  */
2127 
2128 static int
hard_reg_set_here_p(unsigned int beg_regno,unsigned int end_regno,rtx x)2129 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2130 {
2131   if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2132     {
2133       rtx op0 = SET_DEST (x);
2134 
2135       while (GET_CODE (op0) == SUBREG)
2136 	op0 = SUBREG_REG (op0);
2137       if (REG_P (op0))
2138 	{
2139 	  unsigned int r = REGNO (op0);
2140 
2141 	  /* See if this reg overlaps range under consideration.  */
2142 	  if (r < end_regno
2143 	      && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2144 	    return 1;
2145 	}
2146     }
2147   else if (GET_CODE (x) == PARALLEL)
2148     {
2149       int i = XVECLEN (x, 0) - 1;
2150 
2151       for (; i >= 0; i--)
2152 	if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2153 	  return 1;
2154     }
2155 
2156   return 0;
2157 }
2158 
2159 /* Return true if ADDR is a valid memory address for mode MODE
2160    in address space AS, and check that each pseudo reg has the
2161    proper kind of hard reg.  */
2162 
2163 bool
strict_memory_address_addr_space_p(machine_mode mode ATTRIBUTE_UNUSED,rtx addr,addr_space_t as)2164 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2165 				    rtx addr, addr_space_t as)
2166 {
2167 #ifdef GO_IF_LEGITIMATE_ADDRESS
2168   gcc_assert (ADDR_SPACE_GENERIC_P (as));
2169   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2170   return false;
2171 
2172  win:
2173   return true;
2174 #else
2175   return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2176 #endif
2177 }
2178 
2179 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2180    if they are the same hard reg, and has special hacks for
2181    autoincrement and autodecrement.
2182    This is specifically intended for find_reloads to use
2183    in determining whether two operands match.
2184    X is the operand whose number is the lower of the two.
2185 
2186    The value is 2 if Y contains a pre-increment that matches
2187    a non-incrementing address in X.  */
2188 
2189 /* ??? To be completely correct, we should arrange to pass
2190    for X the output operand and for Y the input operand.
2191    For now, we assume that the output operand has the lower number
2192    because that is natural in (SET output (... input ...)).  */
2193 
2194 int
operands_match_p(rtx x,rtx y)2195 operands_match_p (rtx x, rtx y)
2196 {
2197   int i;
2198   RTX_CODE code = GET_CODE (x);
2199   const char *fmt;
2200   int success_2;
2201 
2202   if (x == y)
2203     return 1;
2204   if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2205       && (REG_P (y) || (GET_CODE (y) == SUBREG
2206 				  && REG_P (SUBREG_REG (y)))))
2207     {
2208       int j;
2209 
2210       if (code == SUBREG)
2211 	{
2212 	  i = REGNO (SUBREG_REG (x));
2213 	  if (i >= FIRST_PSEUDO_REGISTER)
2214 	    goto slow;
2215 	  i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2216 				    GET_MODE (SUBREG_REG (x)),
2217 				    SUBREG_BYTE (x),
2218 				    GET_MODE (x));
2219 	}
2220       else
2221 	i = REGNO (x);
2222 
2223       if (GET_CODE (y) == SUBREG)
2224 	{
2225 	  j = REGNO (SUBREG_REG (y));
2226 	  if (j >= FIRST_PSEUDO_REGISTER)
2227 	    goto slow;
2228 	  j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2229 				    GET_MODE (SUBREG_REG (y)),
2230 				    SUBREG_BYTE (y),
2231 				    GET_MODE (y));
2232 	}
2233       else
2234 	j = REGNO (y);
2235 
2236       /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2237 	 multiple hard register group of scalar integer registers, so that
2238 	 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2239 	 register.  */
2240       scalar_int_mode xmode;
2241       if (REG_WORDS_BIG_ENDIAN
2242 	  && is_a <scalar_int_mode> (GET_MODE (x), &xmode)
2243 	  && GET_MODE_SIZE (xmode) > UNITS_PER_WORD
2244 	  && i < FIRST_PSEUDO_REGISTER)
2245 	i += hard_regno_nregs (i, xmode) - 1;
2246       scalar_int_mode ymode;
2247       if (REG_WORDS_BIG_ENDIAN
2248 	  && is_a <scalar_int_mode> (GET_MODE (y), &ymode)
2249 	  && GET_MODE_SIZE (ymode) > UNITS_PER_WORD
2250 	  && j < FIRST_PSEUDO_REGISTER)
2251 	j += hard_regno_nregs (j, ymode) - 1;
2252 
2253       return i == j;
2254     }
2255   /* If two operands must match, because they are really a single
2256      operand of an assembler insn, then two postincrements are invalid
2257      because the assembler insn would increment only once.
2258      On the other hand, a postincrement matches ordinary indexing
2259      if the postincrement is the output operand.  */
2260   if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2261     return operands_match_p (XEXP (x, 0), y);
2262   /* Two preincrements are invalid
2263      because the assembler insn would increment only once.
2264      On the other hand, a preincrement matches ordinary indexing
2265      if the preincrement is the input operand.
2266      In this case, return 2, since some callers need to do special
2267      things when this happens.  */
2268   if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2269       || GET_CODE (y) == PRE_MODIFY)
2270     return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2271 
2272  slow:
2273 
2274   /* Now we have disposed of all the cases in which different rtx codes
2275      can match.  */
2276   if (code != GET_CODE (y))
2277     return 0;
2278 
2279   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2280   if (GET_MODE (x) != GET_MODE (y))
2281     return 0;
2282 
2283   /* MEMs referring to different address space are not equivalent.  */
2284   if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2285     return 0;
2286 
2287   switch (code)
2288     {
2289     CASE_CONST_UNIQUE:
2290       return 0;
2291 
2292     case CONST_VECTOR:
2293       if (!same_vector_encodings_p (x, y))
2294 	return false;
2295       break;
2296 
2297     case LABEL_REF:
2298       return label_ref_label (x) == label_ref_label (y);
2299     case SYMBOL_REF:
2300       return XSTR (x, 0) == XSTR (y, 0);
2301 
2302     default:
2303       break;
2304     }
2305 
2306   /* Compare the elements.  If any pair of corresponding elements
2307      fail to match, return 0 for the whole things.  */
2308 
2309   success_2 = 0;
2310   fmt = GET_RTX_FORMAT (code);
2311   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2312     {
2313       int val, j;
2314       switch (fmt[i])
2315 	{
2316 	case 'w':
2317 	  if (XWINT (x, i) != XWINT (y, i))
2318 	    return 0;
2319 	  break;
2320 
2321 	case 'i':
2322 	  if (XINT (x, i) != XINT (y, i))
2323 	    return 0;
2324 	  break;
2325 
2326 	case 'p':
2327 	  if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
2328 	    return 0;
2329 	  break;
2330 
2331 	case 'e':
2332 	  val = operands_match_p (XEXP (x, i), XEXP (y, i));
2333 	  if (val == 0)
2334 	    return 0;
2335 	  /* If any subexpression returns 2,
2336 	     we should return 2 if we are successful.  */
2337 	  if (val == 2)
2338 	    success_2 = 1;
2339 	  break;
2340 
2341 	case '0':
2342 	  break;
2343 
2344 	case 'E':
2345 	  if (XVECLEN (x, i) != XVECLEN (y, i))
2346 	    return 0;
2347 	  for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2348 	    {
2349 	      val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2350 	      if (val == 0)
2351 		return 0;
2352 	      if (val == 2)
2353 		success_2 = 1;
2354 	    }
2355 	  break;
2356 
2357 	  /* It is believed that rtx's at this level will never
2358 	     contain anything but integers and other rtx's,
2359 	     except for within LABEL_REFs and SYMBOL_REFs.  */
2360 	default:
2361 	  gcc_unreachable ();
2362 	}
2363     }
2364   return 1 + success_2;
2365 }
2366 
2367 /* Describe the range of registers or memory referenced by X.
2368    If X is a register, set REG_FLAG and put the first register
2369    number into START and the last plus one into END.
2370    If X is a memory reference, put a base address into BASE
2371    and a range of integer offsets into START and END.
2372    If X is pushing on the stack, we can assume it causes no trouble,
2373    so we set the SAFE field.  */
2374 
2375 static struct decomposition
decompose(rtx x)2376 decompose (rtx x)
2377 {
2378   struct decomposition val;
2379   int all_const = 0, regno;
2380 
2381   memset (&val, 0, sizeof (val));
2382 
2383   switch (GET_CODE (x))
2384     {
2385     case MEM:
2386       {
2387 	rtx base = NULL_RTX, offset = 0;
2388 	rtx addr = XEXP (x, 0);
2389 
2390 	if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2391 	    || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2392 	  {
2393 	    val.base = XEXP (addr, 0);
2394 	    val.start = -GET_MODE_SIZE (GET_MODE (x));
2395 	    val.end = GET_MODE_SIZE (GET_MODE (x));
2396 	    val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2397 	    return val;
2398 	  }
2399 
2400 	if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2401 	  {
2402 	    if (GET_CODE (XEXP (addr, 1)) == PLUS
2403 		&& XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2404 		&& CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2405 	      {
2406 		val.base  = XEXP (addr, 0);
2407 		val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2408 		val.end   = INTVAL (XEXP (XEXP (addr, 1), 1));
2409 		val.safe  = REGNO (val.base) == STACK_POINTER_REGNUM;
2410 		return val;
2411 	      }
2412 	  }
2413 
2414 	if (GET_CODE (addr) == CONST)
2415 	  {
2416 	    addr = XEXP (addr, 0);
2417 	    all_const = 1;
2418 	  }
2419 	if (GET_CODE (addr) == PLUS)
2420 	  {
2421 	    if (CONSTANT_P (XEXP (addr, 0)))
2422 	      {
2423 		base = XEXP (addr, 1);
2424 		offset = XEXP (addr, 0);
2425 	      }
2426 	    else if (CONSTANT_P (XEXP (addr, 1)))
2427 	      {
2428 		base = XEXP (addr, 0);
2429 		offset = XEXP (addr, 1);
2430 	      }
2431 	  }
2432 
2433 	if (offset == 0)
2434 	  {
2435 	    base = addr;
2436 	    offset = const0_rtx;
2437 	  }
2438 	if (GET_CODE (offset) == CONST)
2439 	  offset = XEXP (offset, 0);
2440 	if (GET_CODE (offset) == PLUS)
2441 	  {
2442 	    if (CONST_INT_P (XEXP (offset, 0)))
2443 	      {
2444 		base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2445 		offset = XEXP (offset, 0);
2446 	      }
2447 	    else if (CONST_INT_P (XEXP (offset, 1)))
2448 	      {
2449 		base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2450 		offset = XEXP (offset, 1);
2451 	      }
2452 	    else
2453 	      {
2454 		base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2455 		offset = const0_rtx;
2456 	      }
2457 	  }
2458 	else if (!CONST_INT_P (offset))
2459 	  {
2460 	    base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2461 	    offset = const0_rtx;
2462 	  }
2463 
2464 	if (all_const && GET_CODE (base) == PLUS)
2465 	  base = gen_rtx_CONST (GET_MODE (base), base);
2466 
2467 	gcc_assert (CONST_INT_P (offset));
2468 
2469 	val.start = INTVAL (offset);
2470 	val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2471 	val.base = base;
2472       }
2473       break;
2474 
2475     case REG:
2476       val.reg_flag = 1;
2477       regno = true_regnum (x);
2478       if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2479 	{
2480 	  /* A pseudo with no hard reg.  */
2481 	  val.start = REGNO (x);
2482 	  val.end = val.start + 1;
2483 	}
2484       else
2485 	{
2486 	  /* A hard reg.  */
2487 	  val.start = regno;
2488 	  val.end = end_hard_regno (GET_MODE (x), regno);
2489 	}
2490       break;
2491 
2492     case SUBREG:
2493       if (!REG_P (SUBREG_REG (x)))
2494 	/* This could be more precise, but it's good enough.  */
2495 	return decompose (SUBREG_REG (x));
2496       regno = true_regnum (x);
2497       if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2498 	return decompose (SUBREG_REG (x));
2499 
2500       /* A hard reg.  */
2501       val.reg_flag = 1;
2502       val.start = regno;
2503       val.end = regno + subreg_nregs (x);
2504       break;
2505 
2506     case SCRATCH:
2507       /* This hasn't been assigned yet, so it can't conflict yet.  */
2508       val.safe = 1;
2509       break;
2510 
2511     default:
2512       gcc_assert (CONSTANT_P (x));
2513       val.safe = 1;
2514       break;
2515     }
2516   return val;
2517 }
2518 
2519 /* Return 1 if altering Y will not modify the value of X.
2520    Y is also described by YDATA, which should be decompose (Y).  */
2521 
2522 static int
immune_p(rtx x,rtx y,struct decomposition ydata)2523 immune_p (rtx x, rtx y, struct decomposition ydata)
2524 {
2525   struct decomposition xdata;
2526 
2527   if (ydata.reg_flag)
2528     /* In this case the decomposition structure contains register
2529        numbers rather than byte offsets.  */
2530     return !refers_to_regno_for_reload_p (ydata.start.to_constant (),
2531 					  ydata.end.to_constant (),
2532 					  x, (rtx *) 0);
2533   if (ydata.safe)
2534     return 1;
2535 
2536   gcc_assert (MEM_P (y));
2537   /* If Y is memory and X is not, Y can't affect X.  */
2538   if (!MEM_P (x))
2539     return 1;
2540 
2541   xdata = decompose (x);
2542 
2543   if (! rtx_equal_p (xdata.base, ydata.base))
2544     {
2545       /* If bases are distinct symbolic constants, there is no overlap.  */
2546       if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2547 	return 1;
2548       /* Constants and stack slots never overlap.  */
2549       if (CONSTANT_P (xdata.base)
2550 	  && (ydata.base == frame_pointer_rtx
2551 	      || ydata.base == hard_frame_pointer_rtx
2552 	      || ydata.base == stack_pointer_rtx))
2553 	return 1;
2554       if (CONSTANT_P (ydata.base)
2555 	  && (xdata.base == frame_pointer_rtx
2556 	      || xdata.base == hard_frame_pointer_rtx
2557 	      || xdata.base == stack_pointer_rtx))
2558 	return 1;
2559       /* If either base is variable, we don't know anything.  */
2560       return 0;
2561     }
2562 
2563   return known_ge (xdata.start, ydata.end) || known_ge (ydata.start, xdata.end);
2564 }
2565 
2566 /* Similar, but calls decompose.  */
2567 
2568 int
safe_from_earlyclobber(rtx op,rtx clobber)2569 safe_from_earlyclobber (rtx op, rtx clobber)
2570 {
2571   struct decomposition early_data;
2572 
2573   early_data = decompose (clobber);
2574   return immune_p (op, clobber, early_data);
2575 }
2576 
2577 /* Main entry point of this file: search the body of INSN
2578    for values that need reloading and record them with push_reload.
2579    REPLACE nonzero means record also where the values occur
2580    so that subst_reloads can be used.
2581 
2582    IND_LEVELS says how many levels of indirection are supported by this
2583    machine; a value of zero means that a memory reference is not a valid
2584    memory address.
2585 
2586    LIVE_KNOWN says we have valid information about which hard
2587    regs are live at each point in the program; this is true when
2588    we are called from global_alloc but false when stupid register
2589    allocation has been done.
2590 
2591    RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2592    which is nonnegative if the reg has been commandeered for reloading into.
2593    It is copied into STATIC_RELOAD_REG_P and referenced from there
2594    by various subroutines.
2595 
2596    Return TRUE if some operands need to be changed, because of swapping
2597    commutative operands, reg_equiv_address substitution, or whatever.  */
2598 
2599 int
find_reloads(rtx_insn * insn,int replace,int ind_levels,int live_known,short * reload_reg_p)2600 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2601 	      short *reload_reg_p)
2602 {
2603   int insn_code_number;
2604   int i, j;
2605   int noperands;
2606   /* These start out as the constraints for the insn
2607      and they are chewed up as we consider alternatives.  */
2608   const char *constraints[MAX_RECOG_OPERANDS];
2609   /* These are the preferred classes for an operand, or NO_REGS if it isn't
2610      a register.  */
2611   enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2612   char pref_or_nothing[MAX_RECOG_OPERANDS];
2613   /* Nonzero for a MEM operand whose entire address needs a reload.
2614      May be -1 to indicate the entire address may or may not need a reload.  */
2615   int address_reloaded[MAX_RECOG_OPERANDS];
2616   /* Nonzero for an address operand that needs to be completely reloaded.
2617      May be -1 to indicate the entire operand may or may not need a reload.  */
2618   int address_operand_reloaded[MAX_RECOG_OPERANDS];
2619   /* Value of enum reload_type to use for operand.  */
2620   enum reload_type operand_type[MAX_RECOG_OPERANDS];
2621   /* Value of enum reload_type to use within address of operand.  */
2622   enum reload_type address_type[MAX_RECOG_OPERANDS];
2623   /* Save the usage of each operand.  */
2624   enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2625   int no_input_reloads = 0, no_output_reloads = 0;
2626   int n_alternatives;
2627   reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2628   char this_alternative_match_win[MAX_RECOG_OPERANDS];
2629   char this_alternative_win[MAX_RECOG_OPERANDS];
2630   char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2631   char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2632   int this_alternative_matches[MAX_RECOG_OPERANDS];
2633   reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2634   int this_alternative_number;
2635   int goal_alternative_number = 0;
2636   int operand_reloadnum[MAX_RECOG_OPERANDS];
2637   int goal_alternative_matches[MAX_RECOG_OPERANDS];
2638   int goal_alternative_matched[MAX_RECOG_OPERANDS];
2639   char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2640   char goal_alternative_win[MAX_RECOG_OPERANDS];
2641   char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2642   char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2643   int goal_alternative_swapped;
2644   int best;
2645   int commutative;
2646   char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2647   rtx substed_operand[MAX_RECOG_OPERANDS];
2648   rtx body = PATTERN (insn);
2649   rtx set = single_set (insn);
2650   int goal_earlyclobber = 0, this_earlyclobber;
2651   machine_mode operand_mode[MAX_RECOG_OPERANDS];
2652   int retval = 0;
2653 
2654   this_insn = insn;
2655   n_reloads = 0;
2656   n_replacements = 0;
2657   n_earlyclobbers = 0;
2658   replace_reloads = replace;
2659   hard_regs_live_known = live_known;
2660   static_reload_reg_p = reload_reg_p;
2661 
2662   if (JUMP_P (insn) && INSN_CODE (insn) < 0)
2663     {
2664       extract_insn (insn);
2665       for (i = 0; i < recog_data.n_operands; i++)
2666 	if (recog_data.operand_type[i] != OP_IN)
2667 	  break;
2668       if (i < recog_data.n_operands)
2669 	{
2670 	  error_for_asm (insn,
2671 			 "the target does not support %<asm goto%> "
2672 			 "with outputs in %<asm%>");
2673 	  ira_nullify_asm_goto (insn);
2674 	  return 0;
2675 	}
2676     }
2677 
2678   /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads.  */
2679   if (JUMP_P (insn) || CALL_P (insn))
2680     no_output_reloads = 1;
2681 
2682   /* The eliminated forms of any secondary memory locations are per-insn, so
2683      clear them out here.  */
2684 
2685   if (secondary_memlocs_elim_used)
2686     {
2687       memset (secondary_memlocs_elim, 0,
2688 	      sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2689       secondary_memlocs_elim_used = 0;
2690     }
2691 
2692   /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2693      is cheap to move between them.  If it is not, there may not be an insn
2694      to do the copy, so we may need a reload.  */
2695   if (GET_CODE (body) == SET
2696       && REG_P (SET_DEST (body))
2697       && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2698       && REG_P (SET_SRC (body))
2699       && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2700       && register_move_cost (GET_MODE (SET_SRC (body)),
2701 			     REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2702 			     REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2703     return 0;
2704 
2705   extract_insn (insn);
2706 
2707   noperands = reload_n_operands = recog_data.n_operands;
2708   n_alternatives = recog_data.n_alternatives;
2709 
2710   /* Just return "no reloads" if insn has no operands with constraints.  */
2711   if (noperands == 0 || n_alternatives == 0)
2712     return 0;
2713 
2714   insn_code_number = INSN_CODE (insn);
2715   this_insn_is_asm = insn_code_number < 0;
2716 
2717   memcpy (operand_mode, recog_data.operand_mode,
2718 	  noperands * sizeof (machine_mode));
2719   memcpy (constraints, recog_data.constraints,
2720 	  noperands * sizeof (const char *));
2721 
2722   commutative = -1;
2723 
2724   /* If we will need to know, later, whether some pair of operands
2725      are the same, we must compare them now and save the result.
2726      Reloading the base and index registers will clobber them
2727      and afterward they will fail to match.  */
2728 
2729   for (i = 0; i < noperands; i++)
2730     {
2731       const char *p;
2732       int c;
2733       char *end;
2734 
2735       substed_operand[i] = recog_data.operand[i];
2736       p = constraints[i];
2737 
2738       modified[i] = RELOAD_READ;
2739 
2740       /* Scan this operand's constraint to see if it is an output operand,
2741 	 an in-out operand, is commutative, or should match another.  */
2742 
2743       while ((c = *p))
2744 	{
2745 	  p += CONSTRAINT_LEN (c, p);
2746 	  switch (c)
2747 	    {
2748 	    case '=':
2749 	      modified[i] = RELOAD_WRITE;
2750 	      break;
2751 	    case '+':
2752 	      modified[i] = RELOAD_READ_WRITE;
2753 	      break;
2754 	    case '%':
2755 	      {
2756 		/* The last operand should not be marked commutative.  */
2757 		gcc_assert (i != noperands - 1);
2758 
2759 		/* We currently only support one commutative pair of
2760 		   operands.  Some existing asm code currently uses more
2761 		   than one pair.  Previously, that would usually work,
2762 		   but sometimes it would crash the compiler.  We
2763 		   continue supporting that case as well as we can by
2764 		   silently ignoring all but the first pair.  In the
2765 		   future we may handle it correctly.  */
2766 		if (commutative < 0)
2767 		  commutative = i;
2768 		else
2769 		  gcc_assert (this_insn_is_asm);
2770 	      }
2771 	      break;
2772 	    /* Use of ISDIGIT is tempting here, but it may get expensive because
2773 	       of locale support we don't want.  */
2774 	    case '0': case '1': case '2': case '3': case '4':
2775 	    case '5': case '6': case '7': case '8': case '9':
2776 	      {
2777 		c = strtoul (p - 1, &end, 10);
2778 		p = end;
2779 
2780 		operands_match[c][i]
2781 		  = operands_match_p (recog_data.operand[c],
2782 				      recog_data.operand[i]);
2783 
2784 		/* An operand may not match itself.  */
2785 		gcc_assert (c != i);
2786 
2787 		/* If C can be commuted with C+1, and C might need to match I,
2788 		   then C+1 might also need to match I.  */
2789 		if (commutative >= 0)
2790 		  {
2791 		    if (c == commutative || c == commutative + 1)
2792 		      {
2793 			int other = c + (c == commutative ? 1 : -1);
2794 			operands_match[other][i]
2795 			  = operands_match_p (recog_data.operand[other],
2796 					      recog_data.operand[i]);
2797 		      }
2798 		    if (i == commutative || i == commutative + 1)
2799 		      {
2800 			int other = i + (i == commutative ? 1 : -1);
2801 			operands_match[c][other]
2802 			  = operands_match_p (recog_data.operand[c],
2803 					      recog_data.operand[other]);
2804 		      }
2805 		    /* Note that C is supposed to be less than I.
2806 		       No need to consider altering both C and I because in
2807 		       that case we would alter one into the other.  */
2808 		  }
2809 	      }
2810 	    }
2811 	}
2812     }
2813 
2814   /* Examine each operand that is a memory reference or memory address
2815      and reload parts of the addresses into index registers.
2816      Also here any references to pseudo regs that didn't get hard regs
2817      but are equivalent to constants get replaced in the insn itself
2818      with those constants.  Nobody will ever see them again.
2819 
2820      Finally, set up the preferred classes of each operand.  */
2821 
2822   for (i = 0; i < noperands; i++)
2823     {
2824       RTX_CODE code = GET_CODE (recog_data.operand[i]);
2825 
2826       address_reloaded[i] = 0;
2827       address_operand_reloaded[i] = 0;
2828       operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2829 			 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2830 			 : RELOAD_OTHER);
2831       address_type[i]
2832 	= (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2833 	   : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2834 	   : RELOAD_OTHER);
2835 
2836       if (*constraints[i] == 0)
2837 	/* Ignore things like match_operator operands.  */
2838 	;
2839       else if (insn_extra_address_constraint
2840 	       (lookup_constraint (constraints[i])))
2841 	{
2842 	  address_operand_reloaded[i]
2843 	    = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2844 				    recog_data.operand[i],
2845 				    recog_data.operand_loc[i],
2846 				    i, operand_type[i], ind_levels, insn);
2847 
2848 	  /* If we now have a simple operand where we used to have a
2849 	     PLUS or MULT or ASHIFT, re-recognize and try again.  */
2850 	  if ((OBJECT_P (*recog_data.operand_loc[i])
2851 	       || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2852 	      && (GET_CODE (recog_data.operand[i]) == MULT
2853 		  || GET_CODE (recog_data.operand[i]) == ASHIFT
2854 		  || GET_CODE (recog_data.operand[i]) == PLUS))
2855 	    {
2856 	      INSN_CODE (insn) = -1;
2857 	      retval = find_reloads (insn, replace, ind_levels, live_known,
2858 				     reload_reg_p);
2859 	      return retval;
2860 	    }
2861 
2862 	  recog_data.operand[i] = *recog_data.operand_loc[i];
2863 	  substed_operand[i] = recog_data.operand[i];
2864 
2865 	  /* Address operands are reloaded in their existing mode,
2866 	     no matter what is specified in the machine description.  */
2867 	  operand_mode[i] = GET_MODE (recog_data.operand[i]);
2868 
2869 	  /* If the address is a single CONST_INT pick address mode
2870 	     instead otherwise we will later not know in which mode
2871 	     the reload should be performed.  */
2872 	  if (operand_mode[i] == VOIDmode)
2873 	    operand_mode[i] = Pmode;
2874 
2875 	}
2876       else if (code == MEM)
2877 	{
2878 	  address_reloaded[i]
2879 	    = find_reloads_address (GET_MODE (recog_data.operand[i]),
2880 				    recog_data.operand_loc[i],
2881 				    XEXP (recog_data.operand[i], 0),
2882 				    &XEXP (recog_data.operand[i], 0),
2883 				    i, address_type[i], ind_levels, insn);
2884 	  recog_data.operand[i] = *recog_data.operand_loc[i];
2885 	  substed_operand[i] = recog_data.operand[i];
2886 	}
2887       else if (code == SUBREG)
2888 	{
2889 	  rtx reg = SUBREG_REG (recog_data.operand[i]);
2890 	  rtx op
2891 	    = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2892 				   ind_levels,
2893 				   set != 0
2894 				   && &SET_DEST (set) == recog_data.operand_loc[i],
2895 				   insn,
2896 				   &address_reloaded[i]);
2897 
2898 	  /* If we made a MEM to load (a part of) the stackslot of a pseudo
2899 	     that didn't get a hard register, emit a USE with a REG_EQUAL
2900 	     note in front so that we might inherit a previous, possibly
2901 	     wider reload.  */
2902 
2903 	  if (replace
2904 	      && MEM_P (op)
2905 	      && REG_P (reg)
2906 	      && known_ge (GET_MODE_SIZE (GET_MODE (reg)),
2907 			   GET_MODE_SIZE (GET_MODE (op)))
2908 	      && reg_equiv_constant (REGNO (reg)) == 0)
2909 	    set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2910 						   insn),
2911 				 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2912 
2913 	  substed_operand[i] = recog_data.operand[i] = op;
2914 	}
2915       else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2916 	/* We can get a PLUS as an "operand" as a result of register
2917 	   elimination.  See eliminate_regs and gen_reload.  We handle
2918 	   a unary operator by reloading the operand.  */
2919 	substed_operand[i] = recog_data.operand[i]
2920 	  = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2921 				 ind_levels, 0, insn,
2922 				 &address_reloaded[i]);
2923       else if (code == REG)
2924 	{
2925 	  /* This is equivalent to calling find_reloads_toplev.
2926 	     The code is duplicated for speed.
2927 	     When we find a pseudo always equivalent to a constant,
2928 	     we replace it by the constant.  We must be sure, however,
2929 	     that we don't try to replace it in the insn in which it
2930 	     is being set.  */
2931 	  int regno = REGNO (recog_data.operand[i]);
2932 	  if (reg_equiv_constant (regno) != 0
2933 	      && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2934 	    {
2935 	      /* Record the existing mode so that the check if constants are
2936 		 allowed will work when operand_mode isn't specified.  */
2937 
2938 	      if (operand_mode[i] == VOIDmode)
2939 		operand_mode[i] = GET_MODE (recog_data.operand[i]);
2940 
2941 	      substed_operand[i] = recog_data.operand[i]
2942 		= reg_equiv_constant (regno);
2943 	    }
2944 	  if (reg_equiv_memory_loc (regno) != 0
2945 	      && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2946 	    /* We need not give a valid is_set_dest argument since the case
2947 	       of a constant equivalence was checked above.  */
2948 	    substed_operand[i] = recog_data.operand[i]
2949 	      = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2950 				     ind_levels, 0, insn,
2951 				     &address_reloaded[i]);
2952 	}
2953       /* If the operand is still a register (we didn't replace it with an
2954 	 equivalent), get the preferred class to reload it into.  */
2955       code = GET_CODE (recog_data.operand[i]);
2956       preferred_class[i]
2957 	= ((code == REG && REGNO (recog_data.operand[i])
2958 	    >= FIRST_PSEUDO_REGISTER)
2959 	   ? reg_preferred_class (REGNO (recog_data.operand[i]))
2960 	   : NO_REGS);
2961       pref_or_nothing[i]
2962 	= (code == REG
2963 	   && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2964 	   && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2965     }
2966 
2967   /* If this is simply a copy from operand 1 to operand 0, merge the
2968      preferred classes for the operands.  */
2969   if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2970       && recog_data.operand[1] == SET_SRC (set))
2971     {
2972       preferred_class[0] = preferred_class[1]
2973 	= reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2974       pref_or_nothing[0] |= pref_or_nothing[1];
2975       pref_or_nothing[1] |= pref_or_nothing[0];
2976     }
2977 
2978   /* Now see what we need for pseudo-regs that didn't get hard regs
2979      or got the wrong kind of hard reg.  For this, we must consider
2980      all the operands together against the register constraints.  */
2981 
2982   best = MAX_RECOG_OPERANDS * 2 + 600;
2983 
2984   goal_alternative_swapped = 0;
2985 
2986   /* The constraints are made of several alternatives.
2987      Each operand's constraint looks like foo,bar,... with commas
2988      separating the alternatives.  The first alternatives for all
2989      operands go together, the second alternatives go together, etc.
2990 
2991      First loop over alternatives.  */
2992 
2993   alternative_mask enabled = get_enabled_alternatives (insn);
2994   for (this_alternative_number = 0;
2995        this_alternative_number < n_alternatives;
2996        this_alternative_number++)
2997     {
2998       int swapped;
2999 
3000       if (!TEST_BIT (enabled, this_alternative_number))
3001 	{
3002 	  int i;
3003 
3004 	  for (i = 0; i < recog_data.n_operands; i++)
3005 	    constraints[i] = skip_alternative (constraints[i]);
3006 
3007 	  continue;
3008 	}
3009 
3010       /* If insn is commutative (it's safe to exchange a certain pair
3011 	 of operands) then we need to try each alternative twice, the
3012 	 second time matching those two operands as if we had
3013 	 exchanged them.  To do this, really exchange them in
3014 	 operands.  */
3015       for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3016 	{
3017 	  /* Loop over operands for one constraint alternative.  */
3018 	  /* LOSERS counts those that don't fit this alternative
3019 	     and would require loading.  */
3020 	  int losers = 0;
3021 	  /* BAD is set to 1 if it some operand can't fit this alternative
3022 	     even after reloading.  */
3023 	  int bad = 0;
3024 	  /* REJECT is a count of how undesirable this alternative says it is
3025 	     if any reloading is required.  If the alternative matches exactly
3026 	     then REJECT is ignored, but otherwise it gets this much
3027 	     counted against it in addition to the reloading needed.  Each
3028 	     ? counts three times here since we want the disparaging caused by
3029 	     a bad register class to only count 1/3 as much.  */
3030 	  int reject = 0;
3031 
3032 	  if (swapped)
3033 	    {
3034 	      recog_data.operand[commutative] = substed_operand[commutative + 1];
3035 	      recog_data.operand[commutative + 1] = substed_operand[commutative];
3036 	      /* Swap the duplicates too.  */
3037 	      for (i = 0; i < recog_data.n_dups; i++)
3038 		if (recog_data.dup_num[i] == commutative
3039 		    || recog_data.dup_num[i] == commutative + 1)
3040 		  *recog_data.dup_loc[i]
3041 		    = recog_data.operand[(int) recog_data.dup_num[i]];
3042 
3043 	      std::swap (preferred_class[commutative],
3044 			 preferred_class[commutative + 1]);
3045 	      std::swap (pref_or_nothing[commutative],
3046 			 pref_or_nothing[commutative + 1]);
3047 	      std::swap (address_reloaded[commutative],
3048 			 address_reloaded[commutative + 1]);
3049 	    }
3050 
3051 	  this_earlyclobber = 0;
3052 
3053 	  for (i = 0; i < noperands; i++)
3054 	    {
3055 	      const char *p = constraints[i];
3056 	      char *end;
3057 	      int len;
3058 	      int win = 0;
3059 	      int did_match = 0;
3060 	      /* 0 => this operand can be reloaded somehow for this alternative.  */
3061 	      int badop = 1;
3062 	      /* 0 => this operand can be reloaded if the alternative allows regs.  */
3063 	      int winreg = 0;
3064 	      int c;
3065 	      int m;
3066 	      rtx operand = recog_data.operand[i];
3067 	      int offset = 0;
3068 	      /* Nonzero means this is a MEM that must be reloaded into a reg
3069 		 regardless of what the constraint says.  */
3070 	      int force_reload = 0;
3071 	      int offmemok = 0;
3072 	      /* Nonzero if a constant forced into memory would be OK for this
3073 		 operand.  */
3074 	      int constmemok = 0;
3075 	      int earlyclobber = 0;
3076 	      enum constraint_num cn;
3077 	      enum reg_class cl;
3078 
3079 	      /* If the predicate accepts a unary operator, it means that
3080 		 we need to reload the operand, but do not do this for
3081 		 match_operator and friends.  */
3082 	      if (UNARY_P (operand) && *p != 0)
3083 		operand = XEXP (operand, 0);
3084 
3085 	      /* If the operand is a SUBREG, extract
3086 		 the REG or MEM (or maybe even a constant) within.
3087 		 (Constants can occur as a result of reg_equiv_constant.)  */
3088 
3089 	      while (GET_CODE (operand) == SUBREG)
3090 		{
3091 		  /* Offset only matters when operand is a REG and
3092 		     it is a hard reg.  This is because it is passed
3093 		     to reg_fits_class_p if it is a REG and all pseudos
3094 		     return 0 from that function.  */
3095 		  if (REG_P (SUBREG_REG (operand))
3096 		      && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3097 		    {
3098 		      if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3099 						 GET_MODE (SUBREG_REG (operand)),
3100 						 SUBREG_BYTE (operand),
3101 						 GET_MODE (operand)) < 0)
3102 			force_reload = 1;
3103 		      offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3104 						     GET_MODE (SUBREG_REG (operand)),
3105 						     SUBREG_BYTE (operand),
3106 						     GET_MODE (operand));
3107 		    }
3108 		  operand = SUBREG_REG (operand);
3109 		  /* Force reload if this is a constant or PLUS or if there may
3110 		     be a problem accessing OPERAND in the outer mode.  */
3111 		  scalar_int_mode inner_mode;
3112 		  if (CONSTANT_P (operand)
3113 		      || GET_CODE (operand) == PLUS
3114 		      /* We must force a reload of paradoxical SUBREGs
3115 			 of a MEM because the alignment of the inner value
3116 			 may not be enough to do the outer reference.  On
3117 			 big-endian machines, it may also reference outside
3118 			 the object.
3119 
3120 			 On machines that extend byte operations and we have a
3121 			 SUBREG where both the inner and outer modes are no wider
3122 			 than a word and the inner mode is narrower, is integral,
3123 			 and gets extended when loaded from memory, combine.cc has
3124 			 made assumptions about the behavior of the machine in such
3125 			 register access.  If the data is, in fact, in memory we
3126 			 must always load using the size assumed to be in the
3127 			 register and let the insn do the different-sized
3128 			 accesses.
3129 
3130 			 This is doubly true if WORD_REGISTER_OPERATIONS.  In
3131 			 this case eliminate_regs has left non-paradoxical
3132 			 subregs for push_reload to see.  Make sure it does
3133 			 by forcing the reload.
3134 
3135 			 ??? When is it right at this stage to have a subreg
3136 			 of a mem that is _not_ to be handled specially?  IMO
3137 			 those should have been reduced to just a mem.  */
3138 		      || ((MEM_P (operand)
3139 			   || (REG_P (operand)
3140 			       && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3141 			  && (WORD_REGISTER_OPERATIONS
3142 			      || (((maybe_lt
3143 				    (GET_MODE_BITSIZE (GET_MODE (operand)),
3144 				     BIGGEST_ALIGNMENT))
3145 				   && (paradoxical_subreg_p
3146 				       (operand_mode[i], GET_MODE (operand)))))
3147 			      || BYTES_BIG_ENDIAN
3148 			      || (known_le (GET_MODE_SIZE (operand_mode[i]),
3149 					    UNITS_PER_WORD)
3150 				  && (is_a <scalar_int_mode>
3151 				      (GET_MODE (operand), &inner_mode))
3152 				  && (GET_MODE_SIZE (inner_mode)
3153 				      <= UNITS_PER_WORD)
3154 				  && paradoxical_subreg_p (operand_mode[i],
3155 							   inner_mode)
3156 				  && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)))
3157 		      /* We must force a reload of a SUBREG's inner expression
3158 			 if it is a pseudo that will become a MEM and the MEM
3159 			 has a mode-dependent address, as in that case we
3160 			 obviously cannot change the mode of the MEM to that
3161 			 of the containing SUBREG as that would change the
3162 			 interpretation of the address.  */
3163 		      || (REG_P (operand)
3164 			  && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3165 			  && reg_equiv_mem (REGNO (operand))
3166 			  && (mode_dependent_address_p
3167 			      (XEXP (reg_equiv_mem (REGNO (operand)), 0),
3168 			       (MEM_ADDR_SPACE
3169 				(reg_equiv_mem (REGNO (operand)))))))
3170 		      )
3171 		    force_reload = 1;
3172 		}
3173 
3174 	      this_alternative[i] = NO_REGS;
3175 	      this_alternative_win[i] = 0;
3176 	      this_alternative_match_win[i] = 0;
3177 	      this_alternative_offmemok[i] = 0;
3178 	      this_alternative_earlyclobber[i] = 0;
3179 	      this_alternative_matches[i] = -1;
3180 
3181 	      /* An empty constraint or empty alternative
3182 		 allows anything which matched the pattern.  */
3183 	      if (*p == 0 || *p == ',')
3184 		win = 1, badop = 0;
3185 
3186 	      /* Scan this alternative's specs for this operand;
3187 		 set WIN if the operand fits any letter in this alternative.
3188 		 Otherwise, clear BADOP if this operand could
3189 		 fit some letter after reloads,
3190 		 or set WINREG if this operand could fit after reloads
3191 		 provided the constraint allows some registers.  */
3192 
3193 	      do
3194 		switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3195 		  {
3196 		  case '\0':
3197 		    len = 0;
3198 		    break;
3199 		  case ',':
3200 		    c = '\0';
3201 		    break;
3202 
3203 		  case '?':
3204 		    reject += 6;
3205 		    break;
3206 
3207 		  case '!':
3208 		    reject = 600;
3209 		    break;
3210 
3211 		  case '#':
3212 		    /* Ignore rest of this alternative as far as
3213 		       reloading is concerned.  */
3214 		    do
3215 		      p++;
3216 		    while (*p && *p != ',');
3217 		    len = 0;
3218 		    break;
3219 
3220 		  case '0':  case '1':  case '2':  case '3':  case '4':
3221 		  case '5':  case '6':  case '7':  case '8':  case '9':
3222 		    m = strtoul (p, &end, 10);
3223 		    p = end;
3224 		    len = 0;
3225 
3226 		    this_alternative_matches[i] = m;
3227 		    /* We are supposed to match a previous operand.
3228 		       If we do, we win if that one did.
3229 		       If we do not, count both of the operands as losers.
3230 		       (This is too conservative, since most of the time
3231 		       only a single reload insn will be needed to make
3232 		       the two operands win.  As a result, this alternative
3233 		       may be rejected when it is actually desirable.)  */
3234 		    if ((swapped && (m != commutative || i != commutative + 1))
3235 			/* If we are matching as if two operands were swapped,
3236 			   also pretend that operands_match had been computed
3237 			   with swapped.
3238 			   But if I is the second of those and C is the first,
3239 			   don't exchange them, because operands_match is valid
3240 			   only on one side of its diagonal.  */
3241 			? (operands_match
3242 			   [(m == commutative || m == commutative + 1)
3243 			    ? 2 * commutative + 1 - m : m]
3244 			   [(i == commutative || i == commutative + 1)
3245 			    ? 2 * commutative + 1 - i : i])
3246 			: operands_match[m][i])
3247 		      {
3248 			/* If we are matching a non-offsettable address where an
3249 			   offsettable address was expected, then we must reject
3250 			   this combination, because we can't reload it.  */
3251 			if (this_alternative_offmemok[m]
3252 			    && MEM_P (recog_data.operand[m])
3253 			    && this_alternative[m] == NO_REGS
3254 			    && ! this_alternative_win[m])
3255 			  bad = 1;
3256 
3257 			did_match = this_alternative_win[m];
3258 		      }
3259 		    else
3260 		      {
3261 			/* Operands don't match.  */
3262 			rtx value;
3263 			int loc1, loc2;
3264 			/* Retroactively mark the operand we had to match
3265 			   as a loser, if it wasn't already.  */
3266 			if (this_alternative_win[m])
3267 			  losers++;
3268 			this_alternative_win[m] = 0;
3269 			if (this_alternative[m] == NO_REGS)
3270 			  bad = 1;
3271 			/* But count the pair only once in the total badness of
3272 			   this alternative, if the pair can be a dummy reload.
3273 			   The pointers in operand_loc are not swapped; swap
3274 			   them by hand if necessary.  */
3275 			if (swapped && i == commutative)
3276 			  loc1 = commutative + 1;
3277 			else if (swapped && i == commutative + 1)
3278 			  loc1 = commutative;
3279 			else
3280 			  loc1 = i;
3281 			if (swapped && m == commutative)
3282 			  loc2 = commutative + 1;
3283 			else if (swapped && m == commutative + 1)
3284 			  loc2 = commutative;
3285 			else
3286 			  loc2 = m;
3287 			value
3288 			  = find_dummy_reload (recog_data.operand[i],
3289 					       recog_data.operand[m],
3290 					       recog_data.operand_loc[loc1],
3291 					       recog_data.operand_loc[loc2],
3292 					       operand_mode[i], operand_mode[m],
3293 					       this_alternative[m], -1,
3294 					       this_alternative_earlyclobber[m]);
3295 
3296 			if (value != 0)
3297 			  losers--;
3298 		      }
3299 		    /* This can be fixed with reloads if the operand
3300 		       we are supposed to match can be fixed with reloads.  */
3301 		    badop = 0;
3302 		    this_alternative[i] = this_alternative[m];
3303 
3304 		    /* If we have to reload this operand and some previous
3305 		       operand also had to match the same thing as this
3306 		       operand, we don't know how to do that.  So reject this
3307 		       alternative.  */
3308 		    if (! did_match || force_reload)
3309 		      for (j = 0; j < i; j++)
3310 			if (this_alternative_matches[j]
3311 			    == this_alternative_matches[i])
3312 			  {
3313 			    badop = 1;
3314 			    break;
3315 			  }
3316 		    break;
3317 
3318 		  case 'p':
3319 		    /* All necessary reloads for an address_operand
3320 		       were handled in find_reloads_address.  */
3321 		    this_alternative[i]
3322 		      = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3323 					ADDRESS, SCRATCH);
3324 		    win = 1;
3325 		    badop = 0;
3326 		    break;
3327 
3328 		  case TARGET_MEM_CONSTRAINT:
3329 		    if (force_reload)
3330 		      break;
3331 		    if (MEM_P (operand)
3332 			|| (REG_P (operand)
3333 			    && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3334 			    && reg_renumber[REGNO (operand)] < 0))
3335 		      win = 1;
3336 		    if (CONST_POOL_OK_P (operand_mode[i], operand))
3337 		      badop = 0;
3338 		    constmemok = 1;
3339 		    break;
3340 
3341 		  case '<':
3342 		    if (MEM_P (operand)
3343 			&& ! address_reloaded[i]
3344 			&& (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3345 			    || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3346 		      win = 1;
3347 		    break;
3348 
3349 		  case '>':
3350 		    if (MEM_P (operand)
3351 			&& ! address_reloaded[i]
3352 			&& (GET_CODE (XEXP (operand, 0)) == PRE_INC
3353 			    || GET_CODE (XEXP (operand, 0)) == POST_INC))
3354 		      win = 1;
3355 		    break;
3356 
3357 		    /* Memory operand whose address is not offsettable.  */
3358 		  case 'V':
3359 		    if (force_reload)
3360 		      break;
3361 		    if (MEM_P (operand)
3362 			&& ! (ind_levels ? offsettable_memref_p (operand)
3363 			      : offsettable_nonstrict_memref_p (operand))
3364 			/* Certain mem addresses will become offsettable
3365 			   after they themselves are reloaded.  This is important;
3366 			   we don't want our own handling of unoffsettables
3367 			   to override the handling of reg_equiv_address.  */
3368 			&& !(REG_P (XEXP (operand, 0))
3369 			     && (ind_levels == 0
3370 				 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3371 		      win = 1;
3372 		    break;
3373 
3374 		    /* Memory operand whose address is offsettable.  */
3375 		  case 'o':
3376 		    if (force_reload)
3377 		      break;
3378 		    if ((MEM_P (operand)
3379 			 /* If IND_LEVELS, find_reloads_address won't reload a
3380 			    pseudo that didn't get a hard reg, so we have to
3381 			    reject that case.  */
3382 			 && ((ind_levels ? offsettable_memref_p (operand)
3383 			      : offsettable_nonstrict_memref_p (operand))
3384 			     /* A reloaded address is offsettable because it is now
3385 				just a simple register indirect.  */
3386 			     || address_reloaded[i] == 1))
3387 			|| (REG_P (operand)
3388 			    && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3389 			    && reg_renumber[REGNO (operand)] < 0
3390 			    /* If reg_equiv_address is nonzero, we will be
3391 			       loading it into a register; hence it will be
3392 			       offsettable, but we cannot say that reg_equiv_mem
3393 			       is offsettable without checking.  */
3394 			    && ((reg_equiv_mem (REGNO (operand)) != 0
3395 				 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3396 				|| (reg_equiv_address (REGNO (operand)) != 0))))
3397 		      win = 1;
3398 		    if (CONST_POOL_OK_P (operand_mode[i], operand)
3399 			|| MEM_P (operand))
3400 		      badop = 0;
3401 		    constmemok = 1;
3402 		    offmemok = 1;
3403 		    break;
3404 
3405 		  case '&':
3406 		    /* Output operand that is stored before the need for the
3407 		       input operands (and their index registers) is over.  */
3408 		    earlyclobber = 1, this_earlyclobber = 1;
3409 		    break;
3410 
3411 		  case 'X':
3412 		    force_reload = 0;
3413 		    win = 1;
3414 		    break;
3415 
3416 		  case 'g':
3417 		    if (! force_reload
3418 			/* A PLUS is never a valid operand, but reload can make
3419 			   it from a register when eliminating registers.  */
3420 			&& GET_CODE (operand) != PLUS
3421 			/* A SCRATCH is not a valid operand.  */
3422 			&& GET_CODE (operand) != SCRATCH
3423 			&& (! CONSTANT_P (operand)
3424 			    || ! flag_pic
3425 			    || LEGITIMATE_PIC_OPERAND_P (operand))
3426 			&& (GENERAL_REGS == ALL_REGS
3427 			    || !REG_P (operand)
3428 			    || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3429 				&& reg_renumber[REGNO (operand)] < 0)))
3430 		      win = 1;
3431 		    cl = GENERAL_REGS;
3432 		    goto reg;
3433 
3434 		  default:
3435 		    cn = lookup_constraint (p);
3436 		    switch (get_constraint_type (cn))
3437 		      {
3438 		      case CT_REGISTER:
3439 			cl = reg_class_for_constraint (cn);
3440 			if (cl != NO_REGS)
3441 			  goto reg;
3442 			break;
3443 
3444 		      case CT_CONST_INT:
3445 			if (CONST_INT_P (operand)
3446 			    && (insn_const_int_ok_for_constraint
3447 				(INTVAL (operand), cn)))
3448 			  win = true;
3449 			break;
3450 
3451 		      case CT_MEMORY:
3452 		      case CT_RELAXED_MEMORY:
3453 			if (force_reload)
3454 			  break;
3455 			if (constraint_satisfied_p (operand, cn))
3456 			  win = 1;
3457 			/* If the address was already reloaded,
3458 			   we win as well.  */
3459 			else if (MEM_P (operand) && address_reloaded[i] == 1)
3460 			  win = 1;
3461 			/* Likewise if the address will be reloaded because
3462 			   reg_equiv_address is nonzero.  For reg_equiv_mem
3463 			   we have to check.  */
3464 			else if (REG_P (operand)
3465 				 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3466 				 && reg_renumber[REGNO (operand)] < 0
3467 				 && ((reg_equiv_mem (REGNO (operand)) != 0
3468 				      && (constraint_satisfied_p
3469 					  (reg_equiv_mem (REGNO (operand)),
3470 					   cn)))
3471 				     || (reg_equiv_address (REGNO (operand))
3472 					 != 0)))
3473 			  win = 1;
3474 
3475 			/* If we didn't already win, we can reload
3476 			   constants via force_const_mem, and other
3477 			   MEMs by reloading the address like for 'o'.  */
3478 			if (CONST_POOL_OK_P (operand_mode[i], operand)
3479 			    || MEM_P (operand))
3480 			  badop = 0;
3481 			constmemok = 1;
3482 			offmemok = 1;
3483 			break;
3484 
3485 		      case CT_SPECIAL_MEMORY:
3486 			if (force_reload)
3487 			  break;
3488 			if (constraint_satisfied_p (operand, cn))
3489 			  win = 1;
3490 			/* Likewise if the address will be reloaded because
3491 			   reg_equiv_address is nonzero.  For reg_equiv_mem
3492 			   we have to check.  */
3493 			else if (REG_P (operand)
3494 				 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3495 				 && reg_renumber[REGNO (operand)] < 0
3496 				 && reg_equiv_mem (REGNO (operand)) != 0
3497 				 && (constraint_satisfied_p
3498 				     (reg_equiv_mem (REGNO (operand)), cn)))
3499 			  win = 1;
3500 			break;
3501 
3502 		      case CT_ADDRESS:
3503 			if (constraint_satisfied_p (operand, cn))
3504 			  win = 1;
3505 
3506 			/* If we didn't already win, we can reload
3507 			   the address into a base register.  */
3508 			this_alternative[i]
3509 			  = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3510 					    ADDRESS, SCRATCH);
3511 			badop = 0;
3512 			break;
3513 
3514 		      case CT_FIXED_FORM:
3515 			if (constraint_satisfied_p (operand, cn))
3516 			  win = 1;
3517 			break;
3518 		      }
3519 		    break;
3520 
3521 		  reg:
3522 		    this_alternative[i]
3523 		      = reg_class_subunion[this_alternative[i]][cl];
3524 		    if (GET_MODE (operand) == BLKmode)
3525 		      break;
3526 		    winreg = 1;
3527 		    if (REG_P (operand)
3528 			&& reg_fits_class_p (operand, this_alternative[i],
3529   			             offset, GET_MODE (recog_data.operand[i])))
3530 		      win = 1;
3531 		    break;
3532 		  }
3533 	      while ((p += len), c);
3534 
3535 	      if (swapped == (commutative >= 0 ? 1 : 0))
3536 		constraints[i] = p;
3537 
3538 	      /* If this operand could be handled with a reg,
3539 		 and some reg is allowed, then this operand can be handled.  */
3540 	      if (winreg && this_alternative[i] != NO_REGS
3541 		  && (win || !class_only_fixed_regs[this_alternative[i]]))
3542 		badop = 0;
3543 
3544 	      /* Record which operands fit this alternative.  */
3545 	      this_alternative_earlyclobber[i] = earlyclobber;
3546 	      if (win && ! force_reload)
3547 		this_alternative_win[i] = 1;
3548 	      else if (did_match && ! force_reload)
3549 		this_alternative_match_win[i] = 1;
3550 	      else
3551 		{
3552 		  int const_to_mem = 0;
3553 
3554 		  this_alternative_offmemok[i] = offmemok;
3555 		  losers++;
3556 		  if (badop)
3557 		    bad = 1;
3558 		  /* Alternative loses if it has no regs for a reg operand.  */
3559 		  if (REG_P (operand)
3560 		      && this_alternative[i] == NO_REGS
3561 		      && this_alternative_matches[i] < 0)
3562 		    bad = 1;
3563 
3564 		  /* If this is a constant that is reloaded into the desired
3565 		     class by copying it to memory first, count that as another
3566 		     reload.  This is consistent with other code and is
3567 		     required to avoid choosing another alternative when
3568 		     the constant is moved into memory by this function on
3569 		     an early reload pass.  Note that the test here is
3570 		     precisely the same as in the code below that calls
3571 		     force_const_mem.  */
3572 		  if (CONST_POOL_OK_P (operand_mode[i], operand)
3573 		      && ((targetm.preferred_reload_class (operand,
3574 							   this_alternative[i])
3575 			   == NO_REGS)
3576 			  || no_input_reloads))
3577 		    {
3578 		      const_to_mem = 1;
3579 		      if (this_alternative[i] != NO_REGS)
3580 			losers++;
3581 		    }
3582 
3583 		  /* Alternative loses if it requires a type of reload not
3584 		     permitted for this insn.  We can always reload SCRATCH
3585 		     and objects with a REG_UNUSED note.  */
3586 		  if (GET_CODE (operand) != SCRATCH
3587 		      && modified[i] != RELOAD_READ && no_output_reloads
3588 		      && ! find_reg_note (insn, REG_UNUSED, operand))
3589 		    bad = 1;
3590 		  else if (modified[i] != RELOAD_WRITE && no_input_reloads
3591 			   && ! const_to_mem)
3592 		    bad = 1;
3593 
3594 		  /* If we can't reload this value at all, reject this
3595 		     alternative.  Note that we could also lose due to
3596 		     LIMIT_RELOAD_CLASS, but we don't check that
3597 		     here.  */
3598 
3599 		  if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3600 		    {
3601 		      if (targetm.preferred_reload_class (operand,
3602 							  this_alternative[i])
3603 			  == NO_REGS)
3604 			reject = 600;
3605 
3606 		      if (operand_type[i] == RELOAD_FOR_OUTPUT
3607 			  && (targetm.preferred_output_reload_class (operand,
3608 							    this_alternative[i])
3609 			      == NO_REGS))
3610 			reject = 600;
3611 		    }
3612 
3613 		  /* We prefer to reload pseudos over reloading other things,
3614 		     since such reloads may be able to be eliminated later.
3615 		     If we are reloading a SCRATCH, we won't be generating any
3616 		     insns, just using a register, so it is also preferred.
3617 		     So bump REJECT in other cases.  Don't do this in the
3618 		     case where we are forcing a constant into memory and
3619 		     it will then win since we don't want to have a different
3620 		     alternative match then.  */
3621 		  if (! (REG_P (operand)
3622 			 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3623 		      && GET_CODE (operand) != SCRATCH
3624 		      && ! (const_to_mem && constmemok))
3625 		    reject += 2;
3626 
3627 		  /* Input reloads can be inherited more often than output
3628 		     reloads can be removed, so penalize output reloads.  */
3629 		  if (operand_type[i] != RELOAD_FOR_INPUT
3630 		      && GET_CODE (operand) != SCRATCH)
3631 		    reject++;
3632 		}
3633 
3634 	      /* If this operand is a pseudo register that didn't get
3635 		 a hard reg and this alternative accepts some
3636 		 register, see if the class that we want is a subset
3637 		 of the preferred class for this register.  If not,
3638 		 but it intersects that class, we'd like to use the
3639 		 intersection, but the best we can do is to use the
3640 		 preferred class, if it is instead a subset of the
3641 		 class we want in this alternative.  If we can't use
3642 		 it, show that usage of this alternative should be
3643 		 discouraged; it will be discouraged more still if the
3644 		 register is `preferred or nothing'.  We do this
3645 		 because it increases the chance of reusing our spill
3646 		 register in a later insn and avoiding a pair of
3647 		 memory stores and loads.
3648 
3649 		 Don't bother with this if this alternative will
3650 		 accept this operand.
3651 
3652 		 Don't do this for a multiword operand, since it is
3653 		 only a small win and has the risk of requiring more
3654 		 spill registers, which could cause a large loss.
3655 
3656 		 Don't do this if the preferred class has only one
3657 		 register because we might otherwise exhaust the
3658 		 class.  */
3659 
3660 	      if (! win && ! did_match
3661 		  && this_alternative[i] != NO_REGS
3662 		  && known_le (GET_MODE_SIZE (operand_mode[i]), UNITS_PER_WORD)
3663 		  && reg_class_size [(int) preferred_class[i]] > 0
3664 		  && ! small_register_class_p (preferred_class[i]))
3665 		{
3666 		  if (! reg_class_subset_p (this_alternative[i],
3667 					    preferred_class[i]))
3668 		    {
3669 		      /* Since we don't have a way of forming a register
3670 			 class for the intersection, we just do
3671 			 something special if the preferred class is a
3672 			 subset of the class we have; that's the most
3673 			 common case anyway.  */
3674 		      if (reg_class_subset_p (preferred_class[i],
3675 					      this_alternative[i]))
3676 			this_alternative[i] = preferred_class[i];
3677 		      else
3678 			reject += (2 + 2 * pref_or_nothing[i]);
3679 		    }
3680 		}
3681 	    }
3682 
3683 	  /* Now see if any output operands that are marked "earlyclobber"
3684 	     in this alternative conflict with any input operands
3685 	     or any memory addresses.  */
3686 
3687 	  for (i = 0; i < noperands; i++)
3688 	    if (this_alternative_earlyclobber[i]
3689 		&& (this_alternative_win[i] || this_alternative_match_win[i]))
3690 	      {
3691 		struct decomposition early_data;
3692 
3693 		early_data = decompose (recog_data.operand[i]);
3694 
3695 		gcc_assert (modified[i] != RELOAD_READ);
3696 
3697 		if (this_alternative[i] == NO_REGS)
3698 		  {
3699 		    this_alternative_earlyclobber[i] = 0;
3700 		    gcc_assert (this_insn_is_asm);
3701 		    error_for_asm (this_insn,
3702 			      "%<&%> constraint used with no register class");
3703 		  }
3704 
3705 		for (j = 0; j < noperands; j++)
3706 		  /* Is this an input operand or a memory ref?  */
3707 		  if ((MEM_P (recog_data.operand[j])
3708 		       || modified[j] != RELOAD_WRITE)
3709 		      && j != i
3710 		      /* Ignore things like match_operator operands.  */
3711 		      && !recog_data.is_operator[j]
3712 		      /* Don't count an input operand that is constrained to match
3713 			 the early clobber operand.  */
3714 		      && ! (this_alternative_matches[j] == i
3715 			    && rtx_equal_p (recog_data.operand[i],
3716 					    recog_data.operand[j]))
3717 		      /* Is it altered by storing the earlyclobber operand?  */
3718 		      && !immune_p (recog_data.operand[j], recog_data.operand[i],
3719 				    early_data))
3720 		    {
3721 		      /* If the output is in a non-empty few-regs class,
3722 			 it's costly to reload it, so reload the input instead.  */
3723 		      if (small_register_class_p (this_alternative[i])
3724 			  && (REG_P (recog_data.operand[j])
3725 			      || GET_CODE (recog_data.operand[j]) == SUBREG))
3726 			{
3727 			  losers++;
3728 			  this_alternative_win[j] = 0;
3729 			  this_alternative_match_win[j] = 0;
3730 			}
3731 		      else
3732 			break;
3733 		    }
3734 		/* If an earlyclobber operand conflicts with something,
3735 		   it must be reloaded, so request this and count the cost.  */
3736 		if (j != noperands)
3737 		  {
3738 		    losers++;
3739 		    this_alternative_win[i] = 0;
3740 		    this_alternative_match_win[j] = 0;
3741 		    for (j = 0; j < noperands; j++)
3742 		      if (this_alternative_matches[j] == i
3743 			  && this_alternative_match_win[j])
3744 			{
3745 			  this_alternative_win[j] = 0;
3746 			  this_alternative_match_win[j] = 0;
3747 			  losers++;
3748 			}
3749 		  }
3750 	      }
3751 
3752 	  /* If one alternative accepts all the operands, no reload required,
3753 	     choose that alternative; don't consider the remaining ones.  */
3754 	  if (losers == 0)
3755 	    {
3756 	      /* Unswap these so that they are never swapped at `finish'.  */
3757 	      if (swapped)
3758 		{
3759 		  recog_data.operand[commutative] = substed_operand[commutative];
3760 		  recog_data.operand[commutative + 1]
3761 		    = substed_operand[commutative + 1];
3762 		}
3763 	      for (i = 0; i < noperands; i++)
3764 		{
3765 		  goal_alternative_win[i] = this_alternative_win[i];
3766 		  goal_alternative_match_win[i] = this_alternative_match_win[i];
3767 		  goal_alternative[i] = this_alternative[i];
3768 		  goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3769 		  goal_alternative_matches[i] = this_alternative_matches[i];
3770 		  goal_alternative_earlyclobber[i]
3771 		    = this_alternative_earlyclobber[i];
3772 		}
3773 	      goal_alternative_number = this_alternative_number;
3774 	      goal_alternative_swapped = swapped;
3775 	      goal_earlyclobber = this_earlyclobber;
3776 	      goto finish;
3777 	    }
3778 
3779 	  /* REJECT, set by the ! and ? constraint characters and when a register
3780 	     would be reloaded into a non-preferred class, discourages the use of
3781 	     this alternative for a reload goal.  REJECT is incremented by six
3782 	     for each ? and two for each non-preferred class.  */
3783 	  losers = losers * 6 + reject;
3784 
3785 	  /* If this alternative can be made to work by reloading,
3786 	     and it needs less reloading than the others checked so far,
3787 	     record it as the chosen goal for reloading.  */
3788 	  if (! bad)
3789 	    {
3790 	      if (best > losers)
3791 		{
3792 		  for (i = 0; i < noperands; i++)
3793 		    {
3794 		      goal_alternative[i] = this_alternative[i];
3795 		      goal_alternative_win[i] = this_alternative_win[i];
3796 		      goal_alternative_match_win[i]
3797 			= this_alternative_match_win[i];
3798 		      goal_alternative_offmemok[i]
3799 			= this_alternative_offmemok[i];
3800 		      goal_alternative_matches[i] = this_alternative_matches[i];
3801 		      goal_alternative_earlyclobber[i]
3802 			= this_alternative_earlyclobber[i];
3803 		    }
3804 		  goal_alternative_swapped = swapped;
3805 		  best = losers;
3806 		  goal_alternative_number = this_alternative_number;
3807 		  goal_earlyclobber = this_earlyclobber;
3808 		}
3809 	    }
3810 
3811 	  if (swapped)
3812 	    {
3813 	      /* If the commutative operands have been swapped, swap
3814 		 them back in order to check the next alternative.  */
3815 	      recog_data.operand[commutative] = substed_operand[commutative];
3816 	      recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3817 	      /* Unswap the duplicates too.  */
3818 	      for (i = 0; i < recog_data.n_dups; i++)
3819 		if (recog_data.dup_num[i] == commutative
3820 		    || recog_data.dup_num[i] == commutative + 1)
3821 		  *recog_data.dup_loc[i]
3822 		    = recog_data.operand[(int) recog_data.dup_num[i]];
3823 
3824 	      /* Unswap the operand related information as well.  */
3825 	      std::swap (preferred_class[commutative],
3826 			 preferred_class[commutative + 1]);
3827 	      std::swap (pref_or_nothing[commutative],
3828 			 pref_or_nothing[commutative + 1]);
3829 	      std::swap (address_reloaded[commutative],
3830 			 address_reloaded[commutative + 1]);
3831 	    }
3832 	}
3833     }
3834 
3835   /* The operands don't meet the constraints.
3836      goal_alternative describes the alternative
3837      that we could reach by reloading the fewest operands.
3838      Reload so as to fit it.  */
3839 
3840   if (best == MAX_RECOG_OPERANDS * 2 + 600)
3841     {
3842       /* No alternative works with reloads??  */
3843       if (insn_code_number >= 0)
3844 	fatal_insn ("unable to generate reloads for:", insn);
3845       error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3846       /* Avoid further trouble with this insn.  */
3847       PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3848       n_reloads = 0;
3849       return 0;
3850     }
3851 
3852   /* Jump to `finish' from above if all operands are valid already.
3853      In that case, goal_alternative_win is all 1.  */
3854  finish:
3855 
3856   /* Right now, for any pair of operands I and J that are required to match,
3857      with I < J,
3858      goal_alternative_matches[J] is I.
3859      Set up goal_alternative_matched as the inverse function:
3860      goal_alternative_matched[I] = J.  */
3861 
3862   for (i = 0; i < noperands; i++)
3863     goal_alternative_matched[i] = -1;
3864 
3865   for (i = 0; i < noperands; i++)
3866     if (! goal_alternative_win[i]
3867 	&& goal_alternative_matches[i] >= 0)
3868       goal_alternative_matched[goal_alternative_matches[i]] = i;
3869 
3870   for (i = 0; i < noperands; i++)
3871     goal_alternative_win[i] |= goal_alternative_match_win[i];
3872 
3873   /* If the best alternative is with operands 1 and 2 swapped,
3874      consider them swapped before reporting the reloads.  Update the
3875      operand numbers of any reloads already pushed.  */
3876 
3877   if (goal_alternative_swapped)
3878     {
3879       std::swap (substed_operand[commutative],
3880 		 substed_operand[commutative + 1]);
3881       std::swap (recog_data.operand[commutative],
3882 		 recog_data.operand[commutative + 1]);
3883       std::swap (*recog_data.operand_loc[commutative],
3884 		 *recog_data.operand_loc[commutative + 1]);
3885 
3886       for (i = 0; i < recog_data.n_dups; i++)
3887 	if (recog_data.dup_num[i] == commutative
3888 	    || recog_data.dup_num[i] == commutative + 1)
3889 	  *recog_data.dup_loc[i]
3890 	    = recog_data.operand[(int) recog_data.dup_num[i]];
3891 
3892       for (i = 0; i < n_reloads; i++)
3893 	{
3894 	  if (rld[i].opnum == commutative)
3895 	    rld[i].opnum = commutative + 1;
3896 	  else if (rld[i].opnum == commutative + 1)
3897 	    rld[i].opnum = commutative;
3898 	}
3899     }
3900 
3901   for (i = 0; i < noperands; i++)
3902     {
3903       operand_reloadnum[i] = -1;
3904 
3905       /* If this is an earlyclobber operand, we need to widen the scope.
3906 	 The reload must remain valid from the start of the insn being
3907 	 reloaded until after the operand is stored into its destination.
3908 	 We approximate this with RELOAD_OTHER even though we know that we
3909 	 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3910 
3911 	 One special case that is worth checking is when we have an
3912 	 output that is earlyclobber but isn't used past the insn (typically
3913 	 a SCRATCH).  In this case, we only need have the reload live
3914 	 through the insn itself, but not for any of our input or output
3915 	 reloads.
3916 	 But we must not accidentally narrow the scope of an existing
3917 	 RELOAD_OTHER reload - leave these alone.
3918 
3919 	 In any case, anything needed to address this operand can remain
3920 	 however they were previously categorized.  */
3921 
3922       if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3923 	operand_type[i]
3924 	  = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3925 	     ? RELOAD_FOR_INSN : RELOAD_OTHER);
3926     }
3927 
3928   /* Any constants that aren't allowed and can't be reloaded
3929      into registers are here changed into memory references.  */
3930   for (i = 0; i < noperands; i++)
3931     if (! goal_alternative_win[i])
3932       {
3933 	rtx op = recog_data.operand[i];
3934 	rtx subreg = NULL_RTX;
3935 	rtx plus = NULL_RTX;
3936 	machine_mode mode = operand_mode[i];
3937 
3938 	/* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3939 	   push_reload so we have to let them pass here.  */
3940 	if (GET_CODE (op) == SUBREG)
3941 	  {
3942 	    subreg = op;
3943 	    op = SUBREG_REG (op);
3944 	    mode = GET_MODE (op);
3945 	  }
3946 
3947 	if (GET_CODE (op) == PLUS)
3948 	  {
3949 	    plus = op;
3950 	    op = XEXP (op, 1);
3951 	  }
3952 
3953 	if (CONST_POOL_OK_P (mode, op)
3954 	    && ((targetm.preferred_reload_class (op, goal_alternative[i])
3955 		 == NO_REGS)
3956 		|| no_input_reloads))
3957 	  {
3958 	    int this_address_reloaded;
3959 	    rtx tem = force_const_mem (mode, op);
3960 
3961 	    /* If we stripped a SUBREG or a PLUS above add it back.  */
3962 	    if (plus != NULL_RTX)
3963 	      tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3964 
3965 	    if (subreg != NULL_RTX)
3966 	      tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3967 
3968 	    this_address_reloaded = 0;
3969 	    substed_operand[i] = recog_data.operand[i]
3970 	      = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3971 				     0, insn, &this_address_reloaded);
3972 
3973 	    /* If the alternative accepts constant pool refs directly
3974 	       there will be no reload needed at all.  */
3975 	    if (plus == NULL_RTX
3976 		&& subreg == NULL_RTX
3977 		&& alternative_allows_const_pool_ref (this_address_reloaded != 1
3978 						      ? substed_operand[i]
3979 						      : NULL,
3980 						      recog_data.constraints[i],
3981 						      goal_alternative_number))
3982 	      goal_alternative_win[i] = 1;
3983 	  }
3984       }
3985 
3986   /* Record the values of the earlyclobber operands for the caller.  */
3987   if (goal_earlyclobber)
3988     for (i = 0; i < noperands; i++)
3989       if (goal_alternative_earlyclobber[i])
3990 	reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3991 
3992   /* Now record reloads for all the operands that need them.  */
3993   for (i = 0; i < noperands; i++)
3994     if (! goal_alternative_win[i])
3995       {
3996 	/* Operands that match previous ones have already been handled.  */
3997 	if (goal_alternative_matches[i] >= 0)
3998 	  ;
3999 	/* Handle an operand with a nonoffsettable address
4000 	   appearing where an offsettable address will do
4001 	   by reloading the address into a base register.
4002 
4003 	   ??? We can also do this when the operand is a register and
4004 	   reg_equiv_mem is not offsettable, but this is a bit tricky,
4005 	   so we don't bother with it.  It may not be worth doing.  */
4006 	else if (goal_alternative_matched[i] == -1
4007 		 && goal_alternative_offmemok[i]
4008 		 && MEM_P (recog_data.operand[i]))
4009 	  {
4010 	    /* If the address to be reloaded is a VOIDmode constant,
4011 	       use the default address mode as mode of the reload register,
4012 	       as would have been done by find_reloads_address.  */
4013 	    addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4014 	    machine_mode address_mode;
4015 
4016 	    address_mode = get_address_mode (recog_data.operand[i]);
4017 	    operand_reloadnum[i]
4018 	      = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4019 			     &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4020 			     base_reg_class (VOIDmode, as, MEM, SCRATCH),
4021 			     address_mode,
4022 			     VOIDmode, 0, 0, i, RELOAD_OTHER);
4023 	    rld[operand_reloadnum[i]].inc
4024 	      = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4025 
4026 	    /* If this operand is an output, we will have made any
4027 	       reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4028 	       now we are treating part of the operand as an input, so
4029 	       we must change these to RELOAD_FOR_OTHER_ADDRESS.  */
4030 
4031 	    if (modified[i] == RELOAD_WRITE)
4032 	      {
4033 		for (j = 0; j < n_reloads; j++)
4034 		  {
4035 		    if (rld[j].opnum == i)
4036 		      {
4037 			if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4038 			  rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4039 			else if (rld[j].when_needed
4040 				 == RELOAD_FOR_OUTADDR_ADDRESS)
4041 			  rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4042 		      }
4043 		  }
4044 	      }
4045 	  }
4046 	else if (goal_alternative_matched[i] == -1)
4047 	  {
4048 	    operand_reloadnum[i]
4049 	      = push_reload ((modified[i] != RELOAD_WRITE
4050 			      ? recog_data.operand[i] : 0),
4051 			     (modified[i] != RELOAD_READ
4052 			      ? recog_data.operand[i] : 0),
4053 			     (modified[i] != RELOAD_WRITE
4054 			      ? recog_data.operand_loc[i] : 0),
4055 			     (modified[i] != RELOAD_READ
4056 			      ? recog_data.operand_loc[i] : 0),
4057 			     (enum reg_class) goal_alternative[i],
4058 			     (modified[i] == RELOAD_WRITE
4059 			      ? VOIDmode : operand_mode[i]),
4060 			     (modified[i] == RELOAD_READ
4061 			      ? VOIDmode : operand_mode[i]),
4062 			     (insn_code_number < 0 ? 0
4063 			      : insn_data[insn_code_number].operand[i].strict_low),
4064 			     0, i, operand_type[i]);
4065 	  }
4066 	/* In a matching pair of operands, one must be input only
4067 	   and the other must be output only.
4068 	   Pass the input operand as IN and the other as OUT.  */
4069 	else if (modified[i] == RELOAD_READ
4070 		 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4071 	  {
4072 	    operand_reloadnum[i]
4073 	      = push_reload (recog_data.operand[i],
4074 			     recog_data.operand[goal_alternative_matched[i]],
4075 			     recog_data.operand_loc[i],
4076 			     recog_data.operand_loc[goal_alternative_matched[i]],
4077 			     (enum reg_class) goal_alternative[i],
4078 			     operand_mode[i],
4079 			     operand_mode[goal_alternative_matched[i]],
4080 			     0, 0, i, RELOAD_OTHER);
4081 	    operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4082 	  }
4083 	else if (modified[i] == RELOAD_WRITE
4084 		 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4085 	  {
4086 	    operand_reloadnum[goal_alternative_matched[i]]
4087 	      = push_reload (recog_data.operand[goal_alternative_matched[i]],
4088 			     recog_data.operand[i],
4089 			     recog_data.operand_loc[goal_alternative_matched[i]],
4090 			     recog_data.operand_loc[i],
4091 			     (enum reg_class) goal_alternative[i],
4092 			     operand_mode[goal_alternative_matched[i]],
4093 			     operand_mode[i],
4094 			     0, 0, i, RELOAD_OTHER);
4095 	    operand_reloadnum[i] = output_reloadnum;
4096 	  }
4097 	else
4098 	  {
4099 	    gcc_assert (insn_code_number < 0);
4100 	    error_for_asm (insn, "inconsistent operand constraints "
4101 			   "in an %<asm%>");
4102 	    /* Avoid further trouble with this insn.  */
4103 	    PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4104 	    n_reloads = 0;
4105 	    return 0;
4106 	  }
4107       }
4108     else if (goal_alternative_matched[i] < 0
4109 	     && goal_alternative_matches[i] < 0
4110 	     && address_operand_reloaded[i] != 1
4111 	     && optimize)
4112       {
4113 	/* For each non-matching operand that's a MEM or a pseudo-register
4114 	   that didn't get a hard register, make an optional reload.
4115 	   This may get done even if the insn needs no reloads otherwise.  */
4116 
4117 	rtx operand = recog_data.operand[i];
4118 
4119 	while (GET_CODE (operand) == SUBREG)
4120 	  operand = SUBREG_REG (operand);
4121 	if ((MEM_P (operand)
4122 	     || (REG_P (operand)
4123 		 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4124 	    /* If this is only for an output, the optional reload would not
4125 	       actually cause us to use a register now, just note that
4126 	       something is stored here.  */
4127 	    && (goal_alternative[i] != NO_REGS
4128 		|| modified[i] == RELOAD_WRITE)
4129 	    && ! no_input_reloads
4130 	    /* An optional output reload might allow to delete INSN later.
4131 	       We mustn't make in-out reloads on insns that are not permitted
4132 	       output reloads.
4133 	       If this is an asm, we can't delete it; we must not even call
4134 	       push_reload for an optional output reload in this case,
4135 	       because we can't be sure that the constraint allows a register,
4136 	       and push_reload verifies the constraints for asms.  */
4137 	    && (modified[i] == RELOAD_READ
4138 		|| (! no_output_reloads && ! this_insn_is_asm)))
4139 	  operand_reloadnum[i]
4140 	    = push_reload ((modified[i] != RELOAD_WRITE
4141 			    ? recog_data.operand[i] : 0),
4142 			   (modified[i] != RELOAD_READ
4143 			    ? recog_data.operand[i] : 0),
4144 			   (modified[i] != RELOAD_WRITE
4145 			    ? recog_data.operand_loc[i] : 0),
4146 			   (modified[i] != RELOAD_READ
4147 			    ? recog_data.operand_loc[i] : 0),
4148 			   (enum reg_class) goal_alternative[i],
4149 			   (modified[i] == RELOAD_WRITE
4150 			    ? VOIDmode : operand_mode[i]),
4151 			   (modified[i] == RELOAD_READ
4152 			    ? VOIDmode : operand_mode[i]),
4153 			   (insn_code_number < 0 ? 0
4154 			    : insn_data[insn_code_number].operand[i].strict_low),
4155 			   1, i, operand_type[i]);
4156 	/* If a memory reference remains (either as a MEM or a pseudo that
4157 	   did not get a hard register), yet we can't make an optional
4158 	   reload, check if this is actually a pseudo register reference;
4159 	   we then need to emit a USE and/or a CLOBBER so that reload
4160 	   inheritance will do the right thing.  */
4161 	else if (replace
4162 		 && (MEM_P (operand)
4163 		     || (REG_P (operand)
4164 			 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4165 			 && reg_renumber [REGNO (operand)] < 0)))
4166 	  {
4167 	    operand = *recog_data.operand_loc[i];
4168 
4169 	    while (GET_CODE (operand) == SUBREG)
4170 	      operand = SUBREG_REG (operand);
4171 	    if (REG_P (operand))
4172 	      {
4173 		if (modified[i] != RELOAD_WRITE)
4174 		  /* We mark the USE with QImode so that we recognize
4175 		     it as one that can be safely deleted at the end
4176 		     of reload.  */
4177 		  PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4178 					      insn), QImode);
4179 		if (modified[i] != RELOAD_READ)
4180 		  emit_insn_after (gen_clobber (operand), insn);
4181 	      }
4182 	  }
4183       }
4184     else if (goal_alternative_matches[i] >= 0
4185 	     && goal_alternative_win[goal_alternative_matches[i]]
4186 	     && modified[i] == RELOAD_READ
4187 	     && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4188 	     && ! no_input_reloads && ! no_output_reloads
4189 	     && optimize)
4190       {
4191 	/* Similarly, make an optional reload for a pair of matching
4192 	   objects that are in MEM or a pseudo that didn't get a hard reg.  */
4193 
4194 	rtx operand = recog_data.operand[i];
4195 
4196 	while (GET_CODE (operand) == SUBREG)
4197 	  operand = SUBREG_REG (operand);
4198 	if ((MEM_P (operand)
4199 	     || (REG_P (operand)
4200 		 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4201 	    && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4202 	  operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4203 	    = push_reload (recog_data.operand[goal_alternative_matches[i]],
4204 			   recog_data.operand[i],
4205 			   recog_data.operand_loc[goal_alternative_matches[i]],
4206 			   recog_data.operand_loc[i],
4207 			   (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4208 			   operand_mode[goal_alternative_matches[i]],
4209 			   operand_mode[i],
4210 			   0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4211       }
4212 
4213   /* Perform whatever substitutions on the operands we are supposed
4214      to make due to commutativity or replacement of registers
4215      with equivalent constants or memory slots.  */
4216 
4217   for (i = 0; i < noperands; i++)
4218     {
4219       /* We only do this on the last pass through reload, because it is
4220 	 possible for some data (like reg_equiv_address) to be changed during
4221 	 later passes.  Moreover, we lose the opportunity to get a useful
4222 	 reload_{in,out}_reg when we do these replacements.  */
4223 
4224       if (replace)
4225 	{
4226 	  rtx substitution = substed_operand[i];
4227 
4228 	  *recog_data.operand_loc[i] = substitution;
4229 
4230 	  /* If we're replacing an operand with a LABEL_REF, we need to
4231 	     make sure that there's a REG_LABEL_OPERAND note attached to
4232 	     this instruction.  */
4233 	  if (GET_CODE (substitution) == LABEL_REF
4234 	      && !find_reg_note (insn, REG_LABEL_OPERAND,
4235 				 label_ref_label (substitution))
4236 	      /* For a JUMP_P, if it was a branch target it must have
4237 		 already been recorded as such.  */
4238 	      && (!JUMP_P (insn)
4239 		  || !label_is_jump_target_p (label_ref_label (substitution),
4240 					      insn)))
4241 	    {
4242 	      add_reg_note (insn, REG_LABEL_OPERAND,
4243 			    label_ref_label (substitution));
4244 	      if (LABEL_P (label_ref_label (substitution)))
4245 		++LABEL_NUSES (label_ref_label (substitution));
4246 	    }
4247 
4248 	}
4249       else
4250 	retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4251     }
4252 
4253   /* If this insn pattern contains any MATCH_DUP's, make sure that
4254      they will be substituted if the operands they match are substituted.
4255      Also do now any substitutions we already did on the operands.
4256 
4257      Don't do this if we aren't making replacements because we might be
4258      propagating things allocated by frame pointer elimination into places
4259      it doesn't expect.  */
4260 
4261   if (insn_code_number >= 0 && replace)
4262     for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4263       {
4264 	int opno = recog_data.dup_num[i];
4265 	*recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4266 	dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4267       }
4268 
4269 #if 0
4270   /* This loses because reloading of prior insns can invalidate the equivalence
4271      (or at least find_equiv_reg isn't smart enough to find it any more),
4272      causing this insn to need more reload regs than it needed before.
4273      It may be too late to make the reload regs available.
4274      Now this optimization is done safely in choose_reload_regs.  */
4275 
4276   /* For each reload of a reg into some other class of reg,
4277      search for an existing equivalent reg (same value now) in the right class.
4278      We can use it as long as we don't need to change its contents.  */
4279   for (i = 0; i < n_reloads; i++)
4280     if (rld[i].reg_rtx == 0
4281 	&& rld[i].in != 0
4282 	&& REG_P (rld[i].in)
4283 	&& rld[i].out == 0)
4284       {
4285 	rld[i].reg_rtx
4286 	  = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4287 			    static_reload_reg_p, 0, rld[i].inmode);
4288 	/* Prevent generation of insn to load the value
4289 	   because the one we found already has the value.  */
4290 	if (rld[i].reg_rtx)
4291 	  rld[i].in = rld[i].reg_rtx;
4292       }
4293 #endif
4294 
4295   /* If we detected error and replaced asm instruction by USE, forget about the
4296      reloads.  */
4297   if (GET_CODE (PATTERN (insn)) == USE
4298       && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4299     n_reloads = 0;
4300 
4301   /* Perhaps an output reload can be combined with another
4302      to reduce needs by one.  */
4303   if (!goal_earlyclobber)
4304     combine_reloads ();
4305 
4306   /* If we have a pair of reloads for parts of an address, they are reloading
4307      the same object, the operands themselves were not reloaded, and they
4308      are for two operands that are supposed to match, merge the reloads and
4309      change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS.  */
4310 
4311   for (i = 0; i < n_reloads; i++)
4312     {
4313       int k;
4314 
4315       for (j = i + 1; j < n_reloads; j++)
4316 	if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4317 	     || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4318 	     || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4319 	     || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4320 	    && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4321 		|| rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4322 		|| rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4323 		|| rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4324 	    && rtx_equal_p (rld[i].in, rld[j].in)
4325 	    && (operand_reloadnum[rld[i].opnum] < 0
4326 		|| rld[operand_reloadnum[rld[i].opnum]].optional)
4327 	    && (operand_reloadnum[rld[j].opnum] < 0
4328 		|| rld[operand_reloadnum[rld[j].opnum]].optional)
4329 	    && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4330 		|| (goal_alternative_matches[rld[j].opnum]
4331 		    == rld[i].opnum)))
4332 	  {
4333 	    for (k = 0; k < n_replacements; k++)
4334 	      if (replacements[k].what == j)
4335 		replacements[k].what = i;
4336 
4337 	    if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4338 		|| rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4339 	      rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4340 	    else
4341 	      rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4342 	    rld[j].in = 0;
4343 	  }
4344     }
4345 
4346   /* Scan all the reloads and update their type.
4347      If a reload is for the address of an operand and we didn't reload
4348      that operand, change the type.  Similarly, change the operand number
4349      of a reload when two operands match.  If a reload is optional, treat it
4350      as though the operand isn't reloaded.
4351 
4352      ??? This latter case is somewhat odd because if we do the optional
4353      reload, it means the object is hanging around.  Thus we need only
4354      do the address reload if the optional reload was NOT done.
4355 
4356      Change secondary reloads to be the address type of their operand, not
4357      the normal type.
4358 
4359      If an operand's reload is now RELOAD_OTHER, change any
4360      RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4361      RELOAD_FOR_OTHER_ADDRESS.  */
4362 
4363   for (i = 0; i < n_reloads; i++)
4364     {
4365       if (rld[i].secondary_p
4366 	  && rld[i].when_needed == operand_type[rld[i].opnum])
4367 	rld[i].when_needed = address_type[rld[i].opnum];
4368 
4369       if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4370 	   || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4371 	   || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4372 	   || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4373 	  && (operand_reloadnum[rld[i].opnum] < 0
4374 	      || rld[operand_reloadnum[rld[i].opnum]].optional))
4375 	{
4376 	  /* If we have a secondary reload to go along with this reload,
4377 	     change its type to RELOAD_FOR_OPADDR_ADDR.  */
4378 
4379 	  if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4380 	       || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4381 	      && rld[i].secondary_in_reload != -1)
4382 	    {
4383 	      int secondary_in_reload = rld[i].secondary_in_reload;
4384 
4385 	      rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4386 
4387 	      /* If there's a tertiary reload we have to change it also.  */
4388 	      if (secondary_in_reload > 0
4389 		  && rld[secondary_in_reload].secondary_in_reload != -1)
4390 		rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4391 		  = RELOAD_FOR_OPADDR_ADDR;
4392 	    }
4393 
4394 	  if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4395 	       || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4396 	      && rld[i].secondary_out_reload != -1)
4397 	    {
4398 	      int secondary_out_reload = rld[i].secondary_out_reload;
4399 
4400 	      rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4401 
4402 	      /* If there's a tertiary reload we have to change it also.  */
4403 	      if (secondary_out_reload
4404 		  && rld[secondary_out_reload].secondary_out_reload != -1)
4405 		rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4406 		  = RELOAD_FOR_OPADDR_ADDR;
4407 	    }
4408 
4409 	  if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4410 	      || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4411 	    rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4412 	  else
4413 	    rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4414 	}
4415 
4416       if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4417 	   || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4418 	  && operand_reloadnum[rld[i].opnum] >= 0
4419 	  && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4420 	      == RELOAD_OTHER))
4421 	rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4422 
4423       if (goal_alternative_matches[rld[i].opnum] >= 0)
4424 	rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4425     }
4426 
4427   /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4428      If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4429      reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4430 
4431      choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4432      conflict with RELOAD_FOR_OPERAND_ADDRESS reloads.  This is true for a
4433      single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4434      However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4435      then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4436      RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4437      This is complicated by the fact that a single operand can have more
4438      than one RELOAD_FOR_OPERAND_ADDRESS reload.  It is very difficult to fix
4439      choose_reload_regs without affecting code quality, and cases that
4440      actually fail are extremely rare, so it turns out to be better to fix
4441      the problem here by not generating cases that choose_reload_regs will
4442      fail for.  */
4443   /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4444      RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4445      a single operand.
4446      We can reduce the register pressure by exploiting that a
4447      RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4448      does not conflict with any of them, if it is only used for the first of
4449      the RELOAD_FOR_X_ADDRESS reloads.  */
4450   {
4451     int first_op_addr_num = -2;
4452     int first_inpaddr_num[MAX_RECOG_OPERANDS];
4453     int first_outpaddr_num[MAX_RECOG_OPERANDS];
4454     int need_change = 0;
4455     /* We use last_op_addr_reload and the contents of the above arrays
4456        first as flags - -2 means no instance encountered, -1 means exactly
4457        one instance encountered.
4458        If more than one instance has been encountered, we store the reload
4459        number of the first reload of the kind in question; reload numbers
4460        are known to be non-negative.  */
4461     for (i = 0; i < noperands; i++)
4462       first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4463     for (i = n_reloads - 1; i >= 0; i--)
4464       {
4465 	switch (rld[i].when_needed)
4466 	  {
4467 	  case RELOAD_FOR_OPERAND_ADDRESS:
4468 	    if (++first_op_addr_num >= 0)
4469 	      {
4470 		first_op_addr_num = i;
4471 		need_change = 1;
4472 	      }
4473 	    break;
4474 	  case RELOAD_FOR_INPUT_ADDRESS:
4475 	    if (++first_inpaddr_num[rld[i].opnum] >= 0)
4476 	      {
4477 		first_inpaddr_num[rld[i].opnum] = i;
4478 		need_change = 1;
4479 	      }
4480 	    break;
4481 	  case RELOAD_FOR_OUTPUT_ADDRESS:
4482 	    if (++first_outpaddr_num[rld[i].opnum] >= 0)
4483 	      {
4484 		first_outpaddr_num[rld[i].opnum] = i;
4485 		need_change = 1;
4486 	      }
4487 	    break;
4488 	  default:
4489 	    break;
4490 	  }
4491       }
4492 
4493     if (need_change)
4494       {
4495 	for (i = 0; i < n_reloads; i++)
4496 	  {
4497 	    int first_num;
4498 	    enum reload_type type;
4499 
4500 	    switch (rld[i].when_needed)
4501 	      {
4502 	      case RELOAD_FOR_OPADDR_ADDR:
4503 		first_num = first_op_addr_num;
4504 		type = RELOAD_FOR_OPERAND_ADDRESS;
4505 		break;
4506 	      case RELOAD_FOR_INPADDR_ADDRESS:
4507 		first_num = first_inpaddr_num[rld[i].opnum];
4508 		type = RELOAD_FOR_INPUT_ADDRESS;
4509 		break;
4510 	      case RELOAD_FOR_OUTADDR_ADDRESS:
4511 		first_num = first_outpaddr_num[rld[i].opnum];
4512 		type = RELOAD_FOR_OUTPUT_ADDRESS;
4513 		break;
4514 	      default:
4515 		continue;
4516 	      }
4517 	    if (first_num < 0)
4518 	      continue;
4519 	    else if (i > first_num)
4520 	      rld[i].when_needed = type;
4521 	    else
4522 	      {
4523 		/* Check if the only TYPE reload that uses reload I is
4524 		   reload FIRST_NUM.  */
4525 		for (j = n_reloads - 1; j > first_num; j--)
4526 		  {
4527 		    if (rld[j].when_needed == type
4528 			&& (rld[i].secondary_p
4529 			    ? rld[j].secondary_in_reload == i
4530 			    : reg_mentioned_p (rld[i].in, rld[j].in)))
4531 		      {
4532 			rld[i].when_needed = type;
4533 			break;
4534 		      }
4535 		  }
4536 	      }
4537 	  }
4538       }
4539   }
4540 
4541   /* See if we have any reloads that are now allowed to be merged
4542      because we've changed when the reload is needed to
4543      RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS.  Only
4544      check for the most common cases.  */
4545 
4546   for (i = 0; i < n_reloads; i++)
4547     if (rld[i].in != 0 && rld[i].out == 0
4548 	&& (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4549 	    || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4550 	    || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4551       for (j = 0; j < n_reloads; j++)
4552 	if (i != j && rld[j].in != 0 && rld[j].out == 0
4553 	    && rld[j].when_needed == rld[i].when_needed
4554 	    && MATCHES (rld[i].in, rld[j].in)
4555 	    && rld[i].rclass == rld[j].rclass
4556 	    && !rld[i].nocombine && !rld[j].nocombine
4557 	    && rld[i].reg_rtx == rld[j].reg_rtx)
4558 	  {
4559 	    rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4560 	    transfer_replacements (i, j);
4561 	    rld[j].in = 0;
4562 	  }
4563 
4564   /* Compute reload_mode and reload_nregs.  */
4565   for (i = 0; i < n_reloads; i++)
4566     {
4567       rld[i].mode = rld[i].inmode;
4568       if (rld[i].mode == VOIDmode
4569 	  || partial_subreg_p (rld[i].mode, rld[i].outmode))
4570 	rld[i].mode = rld[i].outmode;
4571 
4572       rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4573     }
4574 
4575   /* Special case a simple move with an input reload and a
4576      destination of a hard reg, if the hard reg is ok, use it.  */
4577   for (i = 0; i < n_reloads; i++)
4578     if (rld[i].when_needed == RELOAD_FOR_INPUT
4579 	&& GET_CODE (PATTERN (insn)) == SET
4580 	&& REG_P (SET_DEST (PATTERN (insn)))
4581 	&& (SET_SRC (PATTERN (insn)) == rld[i].in
4582 	    || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4583 	&& !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4584       {
4585 	rtx dest = SET_DEST (PATTERN (insn));
4586 	unsigned int regno = REGNO (dest);
4587 
4588 	if (regno < FIRST_PSEUDO_REGISTER
4589 	    && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4590 	    && targetm.hard_regno_mode_ok (regno, rld[i].mode))
4591 	  {
4592 	    int nr = hard_regno_nregs (regno, rld[i].mode);
4593 	    int ok = 1, nri;
4594 
4595 	    for (nri = 1; nri < nr; nri ++)
4596 	      if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4597 		{
4598 		  ok = 0;
4599 		  break;
4600 		}
4601 
4602 	    if (ok)
4603 	      rld[i].reg_rtx = dest;
4604 	  }
4605       }
4606 
4607   return retval;
4608 }
4609 
4610 /* Return true if alternative number ALTNUM in constraint-string
4611    CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4612    MEM gives the reference if its address hasn't been fully reloaded,
4613    otherwise it is NULL.  */
4614 
4615 static bool
alternative_allows_const_pool_ref(rtx mem ATTRIBUTE_UNUSED,const char * constraint,int altnum)4616 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4617 				   const char *constraint, int altnum)
4618 {
4619   int c;
4620 
4621   /* Skip alternatives before the one requested.  */
4622   while (altnum > 0)
4623     {
4624       while (*constraint++ != ',')
4625 	;
4626       altnum--;
4627     }
4628   /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4629      If one of them is present, this alternative accepts the result of
4630      passing a constant-pool reference through find_reloads_toplev.
4631 
4632      The same is true of extra memory constraints if the address
4633      was reloaded into a register.  However, the target may elect
4634      to disallow the original constant address, forcing it to be
4635      reloaded into a register instead.  */
4636   for (; (c = *constraint) && c != ',' && c != '#';
4637        constraint += CONSTRAINT_LEN (c, constraint))
4638     {
4639       enum constraint_num cn = lookup_constraint (constraint);
4640       if (insn_extra_memory_constraint (cn)
4641 	  && (mem == NULL || constraint_satisfied_p (mem, cn)))
4642 	return true;
4643     }
4644   return false;
4645 }
4646 
4647 /* Scan X for memory references and scan the addresses for reloading.
4648    Also checks for references to "constant" regs that we want to eliminate
4649    and replaces them with the values they stand for.
4650    We may alter X destructively if it contains a reference to such.
4651    If X is just a constant reg, we return the equivalent value
4652    instead of X.
4653 
4654    IND_LEVELS says how many levels of indirect addressing this machine
4655    supports.
4656 
4657    OPNUM and TYPE identify the purpose of the reload.
4658 
4659    IS_SET_DEST is true if X is the destination of a SET, which is not
4660    appropriate to be replaced by a constant.
4661 
4662    INSN, if nonzero, is the insn in which we do the reload.  It is used
4663    to determine if we may generate output reloads, and where to put USEs
4664    for pseudos that we have to replace with stack slots.
4665 
4666    ADDRESS_RELOADED.  If nonzero, is a pointer to where we put the
4667    result of find_reloads_address.  */
4668 
4669 static rtx
find_reloads_toplev(rtx x,int opnum,enum reload_type type,int ind_levels,int is_set_dest,rtx_insn * insn,int * address_reloaded)4670 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4671 		     int ind_levels, int is_set_dest, rtx_insn *insn,
4672 		     int *address_reloaded)
4673 {
4674   RTX_CODE code = GET_CODE (x);
4675 
4676   const char *fmt = GET_RTX_FORMAT (code);
4677   int i;
4678   int copied;
4679 
4680   if (code == REG)
4681     {
4682       /* This code is duplicated for speed in find_reloads.  */
4683       int regno = REGNO (x);
4684       if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4685 	x = reg_equiv_constant (regno);
4686 #if 0
4687       /*  This creates (subreg (mem...)) which would cause an unnecessary
4688 	  reload of the mem.  */
4689       else if (reg_equiv_mem (regno) != 0)
4690 	x = reg_equiv_mem (regno);
4691 #endif
4692       else if (reg_equiv_memory_loc (regno)
4693 	       && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4694 	{
4695 	  rtx mem = make_memloc (x, regno);
4696 	  if (reg_equiv_address (regno)
4697 	      || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4698 	    {
4699 	      /* If this is not a toplevel operand, find_reloads doesn't see
4700 		 this substitution.  We have to emit a USE of the pseudo so
4701 		 that delete_output_reload can see it.  */
4702 	      if (replace_reloads && recog_data.operand[opnum] != x)
4703 		/* We mark the USE with QImode so that we recognize it
4704 		   as one that can be safely deleted at the end of
4705 		   reload.  */
4706 		PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4707 			  QImode);
4708 	      x = mem;
4709 	      i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4710 					opnum, type, ind_levels, insn);
4711 	      if (!rtx_equal_p (x, mem))
4712 		push_reg_equiv_alt_mem (regno, x);
4713 	      if (address_reloaded)
4714 		*address_reloaded = i;
4715 	    }
4716 	}
4717       return x;
4718     }
4719   if (code == MEM)
4720     {
4721       rtx tem = x;
4722 
4723       i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4724 				opnum, type, ind_levels, insn);
4725       if (address_reloaded)
4726 	*address_reloaded = i;
4727 
4728       return tem;
4729     }
4730 
4731   if (code == SUBREG && REG_P (SUBREG_REG (x)))
4732     {
4733       /* Check for SUBREG containing a REG that's equivalent to a
4734 	 constant.  If the constant has a known value, truncate it
4735 	 right now.  Similarly if we are extracting a single-word of a
4736 	 multi-word constant.  If the constant is symbolic, allow it
4737 	 to be substituted normally.  push_reload will strip the
4738 	 subreg later.  The constant must not be VOIDmode, because we
4739 	 will lose the mode of the register (this should never happen
4740 	 because one of the cases above should handle it).  */
4741 
4742       int regno = REGNO (SUBREG_REG (x));
4743       rtx tem;
4744 
4745       if (regno >= FIRST_PSEUDO_REGISTER
4746 	  && reg_renumber[regno] < 0
4747 	  && reg_equiv_constant (regno) != 0)
4748 	{
4749 	  tem =
4750 	    simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4751 				 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4752 	  gcc_assert (tem);
4753 	  if (CONSTANT_P (tem)
4754 	      && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4755 	    {
4756 	      tem = force_const_mem (GET_MODE (x), tem);
4757 	      i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4758 					&XEXP (tem, 0), opnum, type,
4759 					ind_levels, insn);
4760 	      if (address_reloaded)
4761 		*address_reloaded = i;
4762 	    }
4763 	  return tem;
4764 	}
4765 
4766       /* If the subreg contains a reg that will be converted to a mem,
4767 	 attempt to convert the whole subreg to a (narrower or wider)
4768 	 memory reference instead.  If this succeeds, we're done --
4769 	 otherwise fall through to check whether the inner reg still
4770 	 needs address reloads anyway.  */
4771 
4772       if (regno >= FIRST_PSEUDO_REGISTER
4773 	  && reg_equiv_memory_loc (regno) != 0)
4774 	{
4775 	  tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4776 					     insn, address_reloaded);
4777 	  if (tem)
4778 	    return tem;
4779 	}
4780     }
4781 
4782   for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4783     {
4784       if (fmt[i] == 'e')
4785 	{
4786 	  rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4787 					      ind_levels, is_set_dest, insn,
4788 					      address_reloaded);
4789 	  /* If we have replaced a reg with it's equivalent memory loc -
4790 	     that can still be handled here e.g. if it's in a paradoxical
4791 	     subreg - we must make the change in a copy, rather than using
4792 	     a destructive change.  This way, find_reloads can still elect
4793 	     not to do the change.  */
4794 	  if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4795 	    {
4796 	      x = shallow_copy_rtx (x);
4797 	      copied = 1;
4798 	    }
4799 	  XEXP (x, i) = new_part;
4800 	}
4801     }
4802   return x;
4803 }
4804 
4805 /* Return a mem ref for the memory equivalent of reg REGNO.
4806    This mem ref is not shared with anything.  */
4807 
4808 static rtx
make_memloc(rtx ad,int regno)4809 make_memloc (rtx ad, int regno)
4810 {
4811   /* We must rerun eliminate_regs, in case the elimination
4812      offsets have changed.  */
4813   rtx tem
4814     = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4815 	    0);
4816 
4817   /* If TEM might contain a pseudo, we must copy it to avoid
4818      modifying it when we do the substitution for the reload.  */
4819   if (rtx_varies_p (tem, 0))
4820     tem = copy_rtx (tem);
4821 
4822   tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4823   tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4824 
4825   /* Copy the result if it's still the same as the equivalence, to avoid
4826      modifying it when we do the substitution for the reload.  */
4827   if (tem == reg_equiv_memory_loc (regno))
4828     tem = copy_rtx (tem);
4829   return tem;
4830 }
4831 
4832 /* Returns true if AD could be turned into a valid memory reference
4833    to mode MODE in address space AS by reloading the part pointed to
4834    by PART into a register.  */
4835 
4836 static bool
maybe_memory_address_addr_space_p(machine_mode mode,rtx ad,addr_space_t as,rtx * part)4837 maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4838 				   addr_space_t as, rtx *part)
4839 {
4840   bool retv;
4841   rtx tem = *part;
4842   rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4843 
4844   *part = reg;
4845   retv = memory_address_addr_space_p (mode, ad, as);
4846   *part = tem;
4847 
4848   return retv;
4849 }
4850 
4851 /* Record all reloads needed for handling memory address AD
4852    which appears in *LOC in a memory reference to mode MODE
4853    which itself is found in location  *MEMREFLOC.
4854    Note that we take shortcuts assuming that no multi-reg machine mode
4855    occurs as part of an address.
4856 
4857    OPNUM and TYPE specify the purpose of this reload.
4858 
4859    IND_LEVELS says how many levels of indirect addressing this machine
4860    supports.
4861 
4862    INSN, if nonzero, is the insn in which we do the reload.  It is used
4863    to determine if we may generate output reloads, and where to put USEs
4864    for pseudos that we have to replace with stack slots.
4865 
4866    Value is one if this address is reloaded or replaced as a whole; it is
4867    zero if the top level of this address was not reloaded or replaced, and
4868    it is -1 if it may or may not have been reloaded or replaced.
4869 
4870    Note that there is no verification that the address will be valid after
4871    this routine does its work.  Instead, we rely on the fact that the address
4872    was valid when reload started.  So we need only undo things that reload
4873    could have broken.  These are wrong register types, pseudos not allocated
4874    to a hard register, and frame pointer elimination.  */
4875 
4876 static int
find_reloads_address(machine_mode mode,rtx * memrefloc,rtx ad,rtx * loc,int opnum,enum reload_type type,int ind_levels,rtx_insn * insn)4877 find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4878 		      rtx *loc, int opnum, enum reload_type type,
4879 		      int ind_levels, rtx_insn *insn)
4880 {
4881   addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4882 			     : ADDR_SPACE_GENERIC;
4883   int regno;
4884   int removed_and = 0;
4885   int op_index;
4886   rtx tem;
4887 
4888   /* If the address is a register, see if it is a legitimate address and
4889      reload if not.  We first handle the cases where we need not reload
4890      or where we must reload in a non-standard way.  */
4891 
4892   if (REG_P (ad))
4893     {
4894       regno = REGNO (ad);
4895 
4896       if (reg_equiv_constant (regno) != 0)
4897 	{
4898 	  find_reloads_address_part (reg_equiv_constant (regno), loc,
4899 				     base_reg_class (mode, as, MEM, SCRATCH),
4900 				     GET_MODE (ad), opnum, type, ind_levels);
4901 	  return 1;
4902 	}
4903 
4904       tem = reg_equiv_memory_loc (regno);
4905       if (tem != 0)
4906 	{
4907 	  if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4908 	    {
4909 	      tem = make_memloc (ad, regno);
4910 	      if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4911 							XEXP (tem, 0),
4912 							MEM_ADDR_SPACE (tem)))
4913 		{
4914 		  rtx orig = tem;
4915 
4916 		  find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4917 					&XEXP (tem, 0), opnum,
4918 					ADDR_TYPE (type), ind_levels, insn);
4919 		  if (!rtx_equal_p (tem, orig))
4920 		    push_reg_equiv_alt_mem (regno, tem);
4921 		}
4922 	      /* We can avoid a reload if the register's equivalent memory
4923 		 expression is valid as an indirect memory address.
4924 		 But not all addresses are valid in a mem used as an indirect
4925 		 address: only reg or reg+constant.  */
4926 
4927 	      if (ind_levels > 0
4928 		  && strict_memory_address_addr_space_p (mode, tem, as)
4929 		  && (REG_P (XEXP (tem, 0))
4930 		      || (GET_CODE (XEXP (tem, 0)) == PLUS
4931 			  && REG_P (XEXP (XEXP (tem, 0), 0))
4932 			  && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4933 		{
4934 		  /* TEM is not the same as what we'll be replacing the
4935 		     pseudo with after reload, put a USE in front of INSN
4936 		     in the final reload pass.  */
4937 		  if (replace_reloads
4938 		      && num_not_at_initial_offset
4939 		      && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4940 		    {
4941 		      *loc = tem;
4942 		      /* We mark the USE with QImode so that we
4943 			 recognize it as one that can be safely
4944 			 deleted at the end of reload.  */
4945 		      PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4946 						  insn), QImode);
4947 
4948 		      /* This doesn't really count as replacing the address
4949 			 as a whole, since it is still a memory access.  */
4950 		    }
4951 		  return 0;
4952 		}
4953 	      ad = tem;
4954 	    }
4955 	}
4956 
4957       /* The only remaining case where we can avoid a reload is if this is a
4958 	 hard register that is valid as a base register and which is not the
4959 	 subject of a CLOBBER in this insn.  */
4960 
4961       else if (regno < FIRST_PSEUDO_REGISTER
4962 	       && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4963 	       && ! regno_clobbered_p (regno, this_insn, mode, 0))
4964 	return 0;
4965 
4966       /* If we do not have one of the cases above, we must do the reload.  */
4967       push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4968 		   base_reg_class (mode, as, MEM, SCRATCH),
4969 		   GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4970       return 1;
4971     }
4972 
4973   if (strict_memory_address_addr_space_p (mode, ad, as))
4974     {
4975       /* The address appears valid, so reloads are not needed.
4976 	 But the address may contain an eliminable register.
4977 	 This can happen because a machine with indirect addressing
4978 	 may consider a pseudo register by itself a valid address even when
4979 	 it has failed to get a hard reg.
4980 	 So do a tree-walk to find and eliminate all such regs.  */
4981 
4982       /* But first quickly dispose of a common case.  */
4983       if (GET_CODE (ad) == PLUS
4984 	  && CONST_INT_P (XEXP (ad, 1))
4985 	  && REG_P (XEXP (ad, 0))
4986 	  && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4987 	return 0;
4988 
4989       subst_reg_equivs_changed = 0;
4990       *loc = subst_reg_equivs (ad, insn);
4991 
4992       if (! subst_reg_equivs_changed)
4993 	return 0;
4994 
4995       /* Check result for validity after substitution.  */
4996       if (strict_memory_address_addr_space_p (mode, ad, as))
4997 	return 0;
4998     }
4999 
5000 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5001   do
5002     {
5003       if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5004 	{
5005 	  LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5006 				     ind_levels, win);
5007 	}
5008       break;
5009     win:
5010       *memrefloc = copy_rtx (*memrefloc);
5011       XEXP (*memrefloc, 0) = ad;
5012       move_replacements (&ad, &XEXP (*memrefloc, 0));
5013       return -1;
5014     }
5015   while (0);
5016 #endif
5017 
5018   /* The address is not valid.  We have to figure out why.  First see if
5019      we have an outer AND and remove it if so.  Then analyze what's inside.  */
5020 
5021   if (GET_CODE (ad) == AND)
5022     {
5023       removed_and = 1;
5024       loc = &XEXP (ad, 0);
5025       ad = *loc;
5026     }
5027 
5028   /* One possibility for why the address is invalid is that it is itself
5029      a MEM.  This can happen when the frame pointer is being eliminated, a
5030      pseudo is not allocated to a hard register, and the offset between the
5031      frame and stack pointers is not its initial value.  In that case the
5032      pseudo will have been replaced by a MEM referring to the
5033      stack pointer.  */
5034   if (MEM_P (ad))
5035     {
5036       /* First ensure that the address in this MEM is valid.  Then, unless
5037 	 indirect addresses are valid, reload the MEM into a register.  */
5038       tem = ad;
5039       find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5040 			    opnum, ADDR_TYPE (type),
5041 			    ind_levels == 0 ? 0 : ind_levels - 1, insn);
5042 
5043       /* If tem was changed, then we must create a new memory reference to
5044 	 hold it and store it back into memrefloc.  */
5045       if (tem != ad && memrefloc)
5046 	{
5047 	  *memrefloc = copy_rtx (*memrefloc);
5048 	  copy_replacements (tem, XEXP (*memrefloc, 0));
5049 	  loc = &XEXP (*memrefloc, 0);
5050 	  if (removed_and)
5051 	    loc = &XEXP (*loc, 0);
5052 	}
5053 
5054       /* Check similar cases as for indirect addresses as above except
5055 	 that we can allow pseudos and a MEM since they should have been
5056 	 taken care of above.  */
5057 
5058       if (ind_levels == 0
5059 	  || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5060 	  || MEM_P (XEXP (tem, 0))
5061 	  || ! (REG_P (XEXP (tem, 0))
5062 		|| (GET_CODE (XEXP (tem, 0)) == PLUS
5063 		    && REG_P (XEXP (XEXP (tem, 0), 0))
5064 		    && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5065 	{
5066 	  /* Must use TEM here, not AD, since it is the one that will
5067 	     have any subexpressions reloaded, if needed.  */
5068 	  push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5069 		       base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5070 		       VOIDmode, 0,
5071 		       0, opnum, type);
5072 	  return ! removed_and;
5073 	}
5074       else
5075 	return 0;
5076     }
5077 
5078   /* If we have address of a stack slot but it's not valid because the
5079      displacement is too large, compute the sum in a register.
5080      Handle all base registers here, not just fp/ap/sp, because on some
5081      targets (namely SH) we can also get too large displacements from
5082      big-endian corrections.  */
5083   else if (GET_CODE (ad) == PLUS
5084 	   && REG_P (XEXP (ad, 0))
5085 	   && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5086 	   && CONST_INT_P (XEXP (ad, 1))
5087 	   && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5088 				    CONST_INT)
5089 	       /* Similarly, if we were to reload the base register and the
5090 		  mem+offset address is still invalid, then we want to reload
5091 		  the whole address, not just the base register.  */
5092 	       || ! maybe_memory_address_addr_space_p
5093 		     (mode, ad, as, &(XEXP (ad, 0)))))
5094 
5095     {
5096       /* Unshare the MEM rtx so we can safely alter it.  */
5097       if (memrefloc)
5098 	{
5099 	  *memrefloc = copy_rtx (*memrefloc);
5100 	  loc = &XEXP (*memrefloc, 0);
5101 	  if (removed_and)
5102 	    loc = &XEXP (*loc, 0);
5103 	}
5104 
5105       if (double_reg_address_ok[mode]
5106 	  && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5107 				  PLUS, CONST_INT))
5108 	{
5109 	  /* Unshare the sum as well.  */
5110 	  *loc = ad = copy_rtx (ad);
5111 
5112 	  /* Reload the displacement into an index reg.
5113 	     We assume the frame pointer or arg pointer is a base reg.  */
5114 	  find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5115 				     INDEX_REG_CLASS, GET_MODE (ad), opnum,
5116 				     type, ind_levels);
5117 	  return 0;
5118 	}
5119       else
5120 	{
5121 	  /* If the sum of two regs is not necessarily valid,
5122 	     reload the sum into a base reg.
5123 	     That will at least work.  */
5124 	  find_reloads_address_part (ad, loc,
5125 				     base_reg_class (mode, as, MEM, SCRATCH),
5126 				     GET_MODE (ad), opnum, type, ind_levels);
5127 	}
5128       return ! removed_and;
5129     }
5130 
5131   /* If we have an indexed stack slot, there are three possible reasons why
5132      it might be invalid: The index might need to be reloaded, the address
5133      might have been made by frame pointer elimination and hence have a
5134      constant out of range, or both reasons might apply.
5135 
5136      We can easily check for an index needing reload, but even if that is the
5137      case, we might also have an invalid constant.  To avoid making the
5138      conservative assumption and requiring two reloads, we see if this address
5139      is valid when not interpreted strictly.  If it is, the only problem is
5140      that the index needs a reload and find_reloads_address_1 will take care
5141      of it.
5142 
5143      Handle all base registers here, not just fp/ap/sp, because on some
5144      targets (namely SPARC) we can also get invalid addresses from preventive
5145      subreg big-endian corrections made by find_reloads_toplev.  We
5146      can also get expressions involving LO_SUM (rather than PLUS) from
5147      find_reloads_subreg_address.
5148 
5149      If we decide to do something, it must be that `double_reg_address_ok'
5150      is true.  We generate a reload of the base register + constant and
5151      rework the sum so that the reload register will be added to the index.
5152      This is safe because we know the address isn't shared.
5153 
5154      We check for the base register as both the first and second operand of
5155      the innermost PLUS and/or LO_SUM.  */
5156 
5157   for (op_index = 0; op_index < 2; ++op_index)
5158     {
5159       rtx operand, addend;
5160       enum rtx_code inner_code;
5161 
5162       if (GET_CODE (ad) != PLUS)
5163 	  continue;
5164 
5165       inner_code = GET_CODE (XEXP (ad, 0));
5166       if (!(GET_CODE (ad) == PLUS
5167 	    && CONST_INT_P (XEXP (ad, 1))
5168 	    && (inner_code == PLUS || inner_code == LO_SUM)))
5169 	continue;
5170 
5171       operand = XEXP (XEXP (ad, 0), op_index);
5172       if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5173 	continue;
5174 
5175       addend = XEXP (XEXP (ad, 0), 1 - op_index);
5176 
5177       if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5178 				GET_CODE (addend))
5179 	   || operand == frame_pointer_rtx
5180 	   || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5181 	       && operand == hard_frame_pointer_rtx)
5182 	   || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5183 	       && operand == arg_pointer_rtx)
5184 	   || operand == stack_pointer_rtx)
5185 	  && ! maybe_memory_address_addr_space_p
5186 		(mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5187 	{
5188 	  rtx offset_reg;
5189 	  enum reg_class cls;
5190 
5191 	  offset_reg = plus_constant (GET_MODE (ad), operand,
5192 				      INTVAL (XEXP (ad, 1)));
5193 
5194 	  /* Form the adjusted address.  */
5195 	  if (GET_CODE (XEXP (ad, 0)) == PLUS)
5196 	    ad = gen_rtx_PLUS (GET_MODE (ad),
5197 			       op_index == 0 ? offset_reg : addend,
5198 			       op_index == 0 ? addend : offset_reg);
5199 	  else
5200 	    ad = gen_rtx_LO_SUM (GET_MODE (ad),
5201 				 op_index == 0 ? offset_reg : addend,
5202 				 op_index == 0 ? addend : offset_reg);
5203 	  *loc = ad;
5204 
5205 	  cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5206 	  find_reloads_address_part (XEXP (ad, op_index),
5207 				     &XEXP (ad, op_index), cls,
5208 				     GET_MODE (ad), opnum, type, ind_levels);
5209 	  find_reloads_address_1 (mode, as,
5210 				  XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5211 				  GET_CODE (XEXP (ad, op_index)),
5212 				  &XEXP (ad, 1 - op_index), opnum,
5213 				  type, 0, insn);
5214 
5215 	  return 0;
5216 	}
5217     }
5218 
5219   /* See if address becomes valid when an eliminable register
5220      in a sum is replaced.  */
5221 
5222   tem = ad;
5223   if (GET_CODE (ad) == PLUS)
5224     tem = subst_indexed_address (ad);
5225   if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5226     {
5227       /* Ok, we win that way.  Replace any additional eliminable
5228 	 registers.  */
5229 
5230       subst_reg_equivs_changed = 0;
5231       tem = subst_reg_equivs (tem, insn);
5232 
5233       /* Make sure that didn't make the address invalid again.  */
5234 
5235       if (! subst_reg_equivs_changed
5236 	  || strict_memory_address_addr_space_p (mode, tem, as))
5237 	{
5238 	  *loc = tem;
5239 	  return 0;
5240 	}
5241     }
5242 
5243   /* If constants aren't valid addresses, reload the constant address
5244      into a register.  */
5245   if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5246     {
5247       machine_mode address_mode = GET_MODE (ad);
5248       if (address_mode == VOIDmode)
5249 	address_mode = targetm.addr_space.address_mode (as);
5250 
5251       /* If AD is an address in the constant pool, the MEM rtx may be shared.
5252 	 Unshare it so we can safely alter it.  */
5253       if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5254 	  && CONSTANT_POOL_ADDRESS_P (ad))
5255 	{
5256 	  *memrefloc = copy_rtx (*memrefloc);
5257 	  loc = &XEXP (*memrefloc, 0);
5258 	  if (removed_and)
5259 	    loc = &XEXP (*loc, 0);
5260 	}
5261 
5262       find_reloads_address_part (ad, loc,
5263 				 base_reg_class (mode, as, MEM, SCRATCH),
5264 				 address_mode, opnum, type, ind_levels);
5265       return ! removed_and;
5266     }
5267 
5268   return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5269 				 opnum, type, ind_levels, insn);
5270 }
5271 
5272 /* Find all pseudo regs appearing in AD
5273    that are eliminable in favor of equivalent values
5274    and do not have hard regs; replace them by their equivalents.
5275    INSN, if nonzero, is the insn in which we do the reload.  We put USEs in
5276    front of it for pseudos that we have to replace with stack slots.  */
5277 
5278 static rtx
subst_reg_equivs(rtx ad,rtx_insn * insn)5279 subst_reg_equivs (rtx ad, rtx_insn *insn)
5280 {
5281   RTX_CODE code = GET_CODE (ad);
5282   int i;
5283   const char *fmt;
5284 
5285   switch (code)
5286     {
5287     case HIGH:
5288     case CONST:
5289     CASE_CONST_ANY:
5290     case SYMBOL_REF:
5291     case LABEL_REF:
5292     case PC:
5293       return ad;
5294 
5295     case REG:
5296       {
5297 	int regno = REGNO (ad);
5298 
5299 	if (reg_equiv_constant (regno) != 0)
5300 	  {
5301 	    subst_reg_equivs_changed = 1;
5302 	    return reg_equiv_constant (regno);
5303 	  }
5304 	if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5305 	  {
5306 	    rtx mem = make_memloc (ad, regno);
5307 	    if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5308 	      {
5309 		subst_reg_equivs_changed = 1;
5310 		/* We mark the USE with QImode so that we recognize it
5311 		   as one that can be safely deleted at the end of
5312 		   reload.  */
5313 		PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5314 			  QImode);
5315 		return mem;
5316 	      }
5317 	  }
5318       }
5319       return ad;
5320 
5321     case PLUS:
5322       /* Quickly dispose of a common case.  */
5323       if (XEXP (ad, 0) == frame_pointer_rtx
5324 	  && CONST_INT_P (XEXP (ad, 1)))
5325 	return ad;
5326       break;
5327 
5328     default:
5329       break;
5330     }
5331 
5332   fmt = GET_RTX_FORMAT (code);
5333   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5334     if (fmt[i] == 'e')
5335       XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5336   return ad;
5337 }
5338 
5339 /* Compute the sum of X and Y, making canonicalizations assumed in an
5340    address, namely: sum constant integers, surround the sum of two
5341    constants with a CONST, put the constant as the second operand, and
5342    group the constant on the outermost sum.
5343 
5344    This routine assumes both inputs are already in canonical form.  */
5345 
5346 rtx
form_sum(machine_mode mode,rtx x,rtx y)5347 form_sum (machine_mode mode, rtx x, rtx y)
5348 {
5349   rtx tem;
5350 
5351   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5352   gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5353 
5354   if (CONST_INT_P (x))
5355     return plus_constant (mode, y, INTVAL (x));
5356   else if (CONST_INT_P (y))
5357     return plus_constant (mode, x, INTVAL (y));
5358   else if (CONSTANT_P (x))
5359     tem = x, x = y, y = tem;
5360 
5361   if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5362     return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5363 
5364   /* Note that if the operands of Y are specified in the opposite
5365      order in the recursive calls below, infinite recursion will occur.  */
5366   if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5367     return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5368 
5369   /* If both constant, encapsulate sum.  Otherwise, just form sum.  A
5370      constant will have been placed second.  */
5371   if (CONSTANT_P (x) && CONSTANT_P (y))
5372     {
5373       if (GET_CODE (x) == CONST)
5374 	x = XEXP (x, 0);
5375       if (GET_CODE (y) == CONST)
5376 	y = XEXP (y, 0);
5377 
5378       return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5379     }
5380 
5381   return gen_rtx_PLUS (mode, x, y);
5382 }
5383 
5384 /* If ADDR is a sum containing a pseudo register that should be
5385    replaced with a constant (from reg_equiv_constant),
5386    return the result of doing so, and also apply the associative
5387    law so that the result is more likely to be a valid address.
5388    (But it is not guaranteed to be one.)
5389 
5390    Note that at most one register is replaced, even if more are
5391    replaceable.  Also, we try to put the result into a canonical form
5392    so it is more likely to be a valid address.
5393 
5394    In all other cases, return ADDR.  */
5395 
5396 static rtx
subst_indexed_address(rtx addr)5397 subst_indexed_address (rtx addr)
5398 {
5399   rtx op0 = 0, op1 = 0, op2 = 0;
5400   rtx tem;
5401   int regno;
5402 
5403   if (GET_CODE (addr) == PLUS)
5404     {
5405       /* Try to find a register to replace.  */
5406       op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5407       if (REG_P (op0)
5408 	  && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5409 	  && reg_renumber[regno] < 0
5410 	  && reg_equiv_constant (regno) != 0)
5411 	op0 = reg_equiv_constant (regno);
5412       else if (REG_P (op1)
5413 	       && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5414 	       && reg_renumber[regno] < 0
5415 	       && reg_equiv_constant (regno) != 0)
5416 	op1 = reg_equiv_constant (regno);
5417       else if (GET_CODE (op0) == PLUS
5418 	       && (tem = subst_indexed_address (op0)) != op0)
5419 	op0 = tem;
5420       else if (GET_CODE (op1) == PLUS
5421 	       && (tem = subst_indexed_address (op1)) != op1)
5422 	op1 = tem;
5423       else
5424 	return addr;
5425 
5426       /* Pick out up to three things to add.  */
5427       if (GET_CODE (op1) == PLUS)
5428 	op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5429       else if (GET_CODE (op0) == PLUS)
5430 	op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5431 
5432       /* Compute the sum.  */
5433       if (op2 != 0)
5434 	op1 = form_sum (GET_MODE (addr), op1, op2);
5435       if (op1 != 0)
5436 	op0 = form_sum (GET_MODE (addr), op0, op1);
5437 
5438       return op0;
5439     }
5440   return addr;
5441 }
5442 
5443 /* Update the REG_INC notes for an insn.  It updates all REG_INC
5444    notes for the instruction which refer to REGNO the to refer
5445    to the reload number.
5446 
5447    INSN is the insn for which any REG_INC notes need updating.
5448 
5449    REGNO is the register number which has been reloaded.
5450 
5451    RELOADNUM is the reload number.  */
5452 
5453 static void
update_auto_inc_notes(rtx_insn * insn ATTRIBUTE_UNUSED,int regno ATTRIBUTE_UNUSED,int reloadnum ATTRIBUTE_UNUSED)5454 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5455 		       int reloadnum ATTRIBUTE_UNUSED)
5456 {
5457   if (!AUTO_INC_DEC)
5458     return;
5459 
5460   for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
5461     if (REG_NOTE_KIND (link) == REG_INC
5462         && (int) REGNO (XEXP (link, 0)) == regno)
5463       push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5464 }
5465 
5466 /* Record the pseudo registers we must reload into hard registers in a
5467    subexpression of a would-be memory address, X referring to a value
5468    in mode MODE.  (This function is not called if the address we find
5469    is strictly valid.)
5470 
5471    CONTEXT = 1 means we are considering regs as index regs,
5472    = 0 means we are considering them as base regs.
5473    OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5474    or an autoinc code.
5475    If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5476    is the code of the index part of the address.  Otherwise, pass SCRATCH
5477    for this argument.
5478    OPNUM and TYPE specify the purpose of any reloads made.
5479 
5480    IND_LEVELS says how many levels of indirect addressing are
5481    supported at this point in the address.
5482 
5483    INSN, if nonzero, is the insn in which we do the reload.  It is used
5484    to determine if we may generate output reloads.
5485 
5486    We return nonzero if X, as a whole, is reloaded or replaced.  */
5487 
5488 /* Note that we take shortcuts assuming that no multi-reg machine mode
5489    occurs as part of an address.
5490    Also, this is not fully machine-customizable; it works for machines
5491    such as VAXen and 68000's and 32000's, but other possible machines
5492    could have addressing modes that this does not handle right.
5493    If you add push_reload calls here, you need to make sure gen_reload
5494    handles those cases gracefully.  */
5495 
5496 static int
find_reloads_address_1(machine_mode mode,addr_space_t as,rtx x,int context,enum rtx_code outer_code,enum rtx_code index_code,rtx * loc,int opnum,enum reload_type type,int ind_levels,rtx_insn * insn)5497 find_reloads_address_1 (machine_mode mode, addr_space_t as,
5498 			rtx x, int context,
5499 			enum rtx_code outer_code, enum rtx_code index_code,
5500 			rtx *loc, int opnum, enum reload_type type,
5501 			int ind_levels, rtx_insn *insn)
5502 {
5503 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX)	\
5504   ((CONTEXT) == 0							\
5505    ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX)		\
5506    : REGNO_OK_FOR_INDEX_P (REGNO))
5507 
5508   enum reg_class context_reg_class;
5509   RTX_CODE code = GET_CODE (x);
5510   bool reloaded_inner_of_autoinc = false;
5511 
5512   if (context == 1)
5513     context_reg_class = INDEX_REG_CLASS;
5514   else
5515     context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5516 
5517   switch (code)
5518     {
5519     case PLUS:
5520       {
5521 	rtx orig_op0 = XEXP (x, 0);
5522 	rtx orig_op1 = XEXP (x, 1);
5523 	RTX_CODE code0 = GET_CODE (orig_op0);
5524 	RTX_CODE code1 = GET_CODE (orig_op1);
5525 	rtx op0 = orig_op0;
5526 	rtx op1 = orig_op1;
5527 
5528 	if (GET_CODE (op0) == SUBREG)
5529 	  {
5530 	    op0 = SUBREG_REG (op0);
5531 	    code0 = GET_CODE (op0);
5532 	    if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5533 	      op0 = gen_rtx_REG (word_mode,
5534 				 (REGNO (op0) +
5535 				  subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5536 						       GET_MODE (SUBREG_REG (orig_op0)),
5537 						       SUBREG_BYTE (orig_op0),
5538 						       GET_MODE (orig_op0))));
5539 	  }
5540 
5541 	if (GET_CODE (op1) == SUBREG)
5542 	  {
5543 	    op1 = SUBREG_REG (op1);
5544 	    code1 = GET_CODE (op1);
5545 	    if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5546 	      /* ??? Why is this given op1's mode and above for
5547 		 ??? op0 SUBREGs we use word_mode?  */
5548 	      op1 = gen_rtx_REG (GET_MODE (op1),
5549 				 (REGNO (op1) +
5550 				  subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5551 						       GET_MODE (SUBREG_REG (orig_op1)),
5552 						       SUBREG_BYTE (orig_op1),
5553 						       GET_MODE (orig_op1))));
5554 	  }
5555 	/* Plus in the index register may be created only as a result of
5556 	   register rematerialization for expression like &localvar*4.  Reload it.
5557 	   It may be possible to combine the displacement on the outer level,
5558 	   but it is probably not worthwhile to do so.  */
5559 	if (context == 1)
5560 	  {
5561 	    find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5562 				  opnum, ADDR_TYPE (type), ind_levels, insn);
5563 	    push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5564 			 context_reg_class,
5565 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5566 	    return 1;
5567 	  }
5568 
5569 	if (code0 == MULT || code0 == ASHIFT
5570 	    || code0 == SIGN_EXTEND || code0 == TRUNCATE
5571 	    || code0 == ZERO_EXTEND || code1 == MEM)
5572 	  {
5573 	    find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5574 				    &XEXP (x, 0), opnum, type, ind_levels,
5575 				    insn);
5576 	    find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5577 				    &XEXP (x, 1), opnum, type, ind_levels,
5578 				    insn);
5579 	  }
5580 
5581 	else if (code1 == MULT || code1 == ASHIFT
5582 		 || code1 == SIGN_EXTEND || code1 == TRUNCATE
5583 		 || code1 == ZERO_EXTEND || code0 == MEM)
5584 	  {
5585 	    find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5586 				    &XEXP (x, 0), opnum, type, ind_levels,
5587 				    insn);
5588 	    find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5589 				    &XEXP (x, 1), opnum, type, ind_levels,
5590 				    insn);
5591 	  }
5592 
5593 	else if (code0 == CONST_INT || code0 == CONST
5594 		 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5595 	  find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5596 				  &XEXP (x, 1), opnum, type, ind_levels,
5597 				  insn);
5598 
5599 	else if (code1 == CONST_INT || code1 == CONST
5600 		 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5601 	  find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5602 				  &XEXP (x, 0), opnum, type, ind_levels,
5603 				  insn);
5604 
5605 	else if (code0 == REG && code1 == REG)
5606 	  {
5607 	    if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5608 		&& regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5609 	      return 0;
5610 	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5611 		     && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5612 	      return 0;
5613 	    else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5614 	      find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5615 				      &XEXP (x, 1), opnum, type, ind_levels,
5616 				      insn);
5617 	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5618 	      find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5619 				      &XEXP (x, 0), opnum, type, ind_levels,
5620 				      insn);
5621 	    else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5622 	      find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5623 				      &XEXP (x, 0), opnum, type, ind_levels,
5624 				      insn);
5625 	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5626 	      find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5627 				      &XEXP (x, 1), opnum, type, ind_levels,
5628 				      insn);
5629 	    else
5630 	      {
5631 		find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5632 					&XEXP (x, 0), opnum, type, ind_levels,
5633 					insn);
5634 		find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5635 					&XEXP (x, 1), opnum, type, ind_levels,
5636 					insn);
5637 	      }
5638 	  }
5639 
5640 	else if (code0 == REG)
5641 	  {
5642 	    find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5643 				    &XEXP (x, 0), opnum, type, ind_levels,
5644 				    insn);
5645 	    find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5646 				    &XEXP (x, 1), opnum, type, ind_levels,
5647 				    insn);
5648 	  }
5649 
5650 	else if (code1 == REG)
5651 	  {
5652 	    find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5653 				    &XEXP (x, 1), opnum, type, ind_levels,
5654 				    insn);
5655 	    find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5656 				    &XEXP (x, 0), opnum, type, ind_levels,
5657 				    insn);
5658 	  }
5659       }
5660 
5661       return 0;
5662 
5663     case POST_MODIFY:
5664     case PRE_MODIFY:
5665       {
5666 	rtx op0 = XEXP (x, 0);
5667 	rtx op1 = XEXP (x, 1);
5668 	enum rtx_code index_code;
5669 	int regno;
5670 	int reloadnum;
5671 
5672 	if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5673 	  return 0;
5674 
5675 	/* Currently, we only support {PRE,POST}_MODIFY constructs
5676 	   where a base register is {inc,dec}remented by the contents
5677 	   of another register or by a constant value.  Thus, these
5678 	   operands must match.  */
5679 	gcc_assert (op0 == XEXP (op1, 0));
5680 
5681 	/* Require index register (or constant).  Let's just handle the
5682 	   register case in the meantime... If the target allows
5683 	   auto-modify by a constant then we could try replacing a pseudo
5684 	   register with its equivalent constant where applicable.
5685 
5686 	   We also handle the case where the register was eliminated
5687 	   resulting in a PLUS subexpression.
5688 
5689 	   If we later decide to reload the whole PRE_MODIFY or
5690 	   POST_MODIFY, inc_for_reload might clobber the reload register
5691 	   before reading the index.  The index register might therefore
5692 	   need to live longer than a TYPE reload normally would, so be
5693 	   conservative and class it as RELOAD_OTHER.  */
5694 	if ((REG_P (XEXP (op1, 1))
5695 	     && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5696 	    || GET_CODE (XEXP (op1, 1)) == PLUS)
5697 	  find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5698 				  &XEXP (op1, 1), opnum, RELOAD_OTHER,
5699 				  ind_levels, insn);
5700 
5701 	gcc_assert (REG_P (XEXP (op1, 0)));
5702 
5703 	regno = REGNO (XEXP (op1, 0));
5704 	index_code = GET_CODE (XEXP (op1, 1));
5705 
5706 	/* A register that is incremented cannot be constant!  */
5707 	gcc_assert (regno < FIRST_PSEUDO_REGISTER
5708 		    || reg_equiv_constant (regno) == 0);
5709 
5710 	/* Handle a register that is equivalent to a memory location
5711 	    which cannot be addressed directly.  */
5712 	if (reg_equiv_memory_loc (regno) != 0
5713 	    && (reg_equiv_address (regno) != 0
5714 		|| num_not_at_initial_offset))
5715 	  {
5716 	    rtx tem = make_memloc (XEXP (x, 0), regno);
5717 
5718 	    if (reg_equiv_address (regno)
5719 		|| ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5720 	      {
5721 		rtx orig = tem;
5722 
5723 		/* First reload the memory location's address.
5724 		    We can't use ADDR_TYPE (type) here, because we need to
5725 		    write back the value after reading it, hence we actually
5726 		    need two registers.  */
5727 		find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5728 				      &XEXP (tem, 0), opnum,
5729 				      RELOAD_OTHER,
5730 				      ind_levels, insn);
5731 
5732 		if (!rtx_equal_p (tem, orig))
5733 		  push_reg_equiv_alt_mem (regno, tem);
5734 
5735 		/* Then reload the memory location into a base
5736 		   register.  */
5737 		reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5738 					 &XEXP (op1, 0),
5739 					 base_reg_class (mode, as,
5740 							 code, index_code),
5741 					 GET_MODE (x), GET_MODE (x), 0,
5742 					 0, opnum, RELOAD_OTHER);
5743 
5744 		update_auto_inc_notes (this_insn, regno, reloadnum);
5745 		return 0;
5746 	      }
5747 	  }
5748 
5749 	if (reg_renumber[regno] >= 0)
5750 	  regno = reg_renumber[regno];
5751 
5752 	/* We require a base register here...  */
5753 	if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5754 	  {
5755 	    reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5756 				     &XEXP (op1, 0), &XEXP (x, 0),
5757 				     base_reg_class (mode, as,
5758 						     code, index_code),
5759 				     GET_MODE (x), GET_MODE (x), 0, 0,
5760 				     opnum, RELOAD_OTHER);
5761 
5762 	    update_auto_inc_notes (this_insn, regno, reloadnum);
5763 	    return 0;
5764 	  }
5765       }
5766       return 0;
5767 
5768     case POST_INC:
5769     case POST_DEC:
5770     case PRE_INC:
5771     case PRE_DEC:
5772       if (REG_P (XEXP (x, 0)))
5773 	{
5774 	  int regno = REGNO (XEXP (x, 0));
5775 	  int value = 0;
5776 	  rtx x_orig = x;
5777 
5778 	  /* A register that is incremented cannot be constant!  */
5779 	  gcc_assert (regno < FIRST_PSEUDO_REGISTER
5780 		      || reg_equiv_constant (regno) == 0);
5781 
5782 	  /* Handle a register that is equivalent to a memory location
5783 	     which cannot be addressed directly.  */
5784 	  if (reg_equiv_memory_loc (regno) != 0
5785 	      && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5786 	    {
5787 	      rtx tem = make_memloc (XEXP (x, 0), regno);
5788 	      if (reg_equiv_address (regno)
5789 		  || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5790 		{
5791 		  rtx orig = tem;
5792 
5793 		  /* First reload the memory location's address.
5794 		     We can't use ADDR_TYPE (type) here, because we need to
5795 		     write back the value after reading it, hence we actually
5796 		     need two registers.  */
5797 		  find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5798 					&XEXP (tem, 0), opnum, type,
5799 					ind_levels, insn);
5800 		  reloaded_inner_of_autoinc = true;
5801 		  if (!rtx_equal_p (tem, orig))
5802 		    push_reg_equiv_alt_mem (regno, tem);
5803 		  /* Put this inside a new increment-expression.  */
5804 		  x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5805 		  /* Proceed to reload that, as if it contained a register.  */
5806 		}
5807 	    }
5808 
5809 	  /* If we have a hard register that is ok in this incdec context,
5810 	     don't make a reload.  If the register isn't nice enough for
5811 	     autoincdec, we can reload it.  But, if an autoincrement of a
5812 	     register that we here verified as playing nice, still outside
5813 	     isn't "valid", it must be that no autoincrement is "valid".
5814 	     If that is true and something made an autoincrement anyway,
5815 	     this must be a special context where one is allowed.
5816 	     (For example, a "push" instruction.)
5817 	     We can't improve this address, so leave it alone.  */
5818 
5819 	  /* Otherwise, reload the autoincrement into a suitable hard reg
5820 	     and record how much to increment by.  */
5821 
5822 	  if (reg_renumber[regno] >= 0)
5823 	    regno = reg_renumber[regno];
5824 	  if (regno >= FIRST_PSEUDO_REGISTER
5825 	      || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5826 				      index_code))
5827 	    {
5828 	      int reloadnum;
5829 
5830 	      /* If we can output the register afterwards, do so, this
5831 		 saves the extra update.
5832 		 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5833 		 CALL_INSN.
5834 		 But don't do this if we cannot directly address the
5835 		 memory location, since this will make it harder to
5836 		 reuse address reloads, and increases register pressure.
5837 		 Also don't do this if we can probably update x directly.  */
5838 	      rtx equiv = (MEM_P (XEXP (x, 0))
5839 			   ? XEXP (x, 0)
5840 			   : reg_equiv_mem (regno));
5841 	      enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5842 	      if (insn && NONJUMP_INSN_P (insn)
5843 		  && (regno < FIRST_PSEUDO_REGISTER
5844 		      || (equiv
5845 			  && memory_operand (equiv, GET_MODE (equiv))
5846 			  && ! (icode != CODE_FOR_nothing
5847 				&& insn_operand_matches (icode, 0, equiv)
5848 				&& insn_operand_matches (icode, 1, equiv))))
5849 		  /* Using RELOAD_OTHER means we emit this and the reload we
5850 		     made earlier in the wrong order.  */
5851 		  && !reloaded_inner_of_autoinc)
5852 		{
5853 		  /* We use the original pseudo for loc, so that
5854 		     emit_reload_insns() knows which pseudo this
5855 		     reload refers to and updates the pseudo rtx, not
5856 		     its equivalent memory location, as well as the
5857 		     corresponding entry in reg_last_reload_reg.  */
5858 		  loc = &XEXP (x_orig, 0);
5859 		  x = XEXP (x, 0);
5860 		  reloadnum
5861 		    = push_reload (x, x, loc, loc,
5862 				   context_reg_class,
5863 				   GET_MODE (x), GET_MODE (x), 0, 0,
5864 				   opnum, RELOAD_OTHER);
5865 		}
5866 	      else
5867 		{
5868 		  reloadnum
5869 		    = push_reload (x, x, loc, (rtx*) 0,
5870 				   context_reg_class,
5871 				   GET_MODE (x), GET_MODE (x), 0, 0,
5872 				   opnum, type);
5873 		  rld[reloadnum].inc
5874 		    = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5875 
5876 		  value = 1;
5877 		}
5878 
5879 	      update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5880 				     reloadnum);
5881 	    }
5882 	  return value;
5883 	}
5884       return 0;
5885 
5886     case TRUNCATE:
5887     case SIGN_EXTEND:
5888     case ZERO_EXTEND:
5889       /* Look for parts to reload in the inner expression and reload them
5890 	 too, in addition to this operation.  Reloading all inner parts in
5891 	 addition to this one shouldn't be necessary, but at this point,
5892 	 we don't know if we can possibly omit any part that *can* be
5893 	 reloaded.  Targets that are better off reloading just either part
5894 	 (or perhaps even a different part of an outer expression), should
5895 	 define LEGITIMIZE_RELOAD_ADDRESS.  */
5896       find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5897 			      context, code, SCRATCH, &XEXP (x, 0), opnum,
5898 			      type, ind_levels, insn);
5899       push_reload (x, NULL_RTX, loc, (rtx*) 0,
5900 		   context_reg_class,
5901 		   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5902       return 1;
5903 
5904     case MEM:
5905       /* This is probably the result of a substitution, by eliminate_regs, of
5906 	 an equivalent address for a pseudo that was not allocated to a hard
5907 	 register.  Verify that the specified address is valid and reload it
5908 	 into a register.
5909 
5910 	 Since we know we are going to reload this item, don't decrement for
5911 	 the indirection level.
5912 
5913 	 Note that this is actually conservative:  it would be slightly more
5914 	 efficient to use the value of SPILL_INDIRECT_LEVELS from
5915 	 reload1.cc here.  */
5916 
5917       find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5918 			    opnum, ADDR_TYPE (type), ind_levels, insn);
5919       push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5920 		   context_reg_class,
5921 		   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5922       return 1;
5923 
5924     case REG:
5925       {
5926 	int regno = REGNO (x);
5927 
5928 	if (reg_equiv_constant (regno) != 0)
5929 	  {
5930 	    find_reloads_address_part (reg_equiv_constant (regno), loc,
5931 				       context_reg_class,
5932 				       GET_MODE (x), opnum, type, ind_levels);
5933 	    return 1;
5934 	  }
5935 
5936 #if 0 /* This might screw code in reload1.cc to delete prior output-reload
5937 	 that feeds this insn.  */
5938 	if (reg_equiv_mem (regno) != 0)
5939 	  {
5940 	    push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5941 			 context_reg_class,
5942 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5943 	    return 1;
5944 	  }
5945 #endif
5946 
5947 	if (reg_equiv_memory_loc (regno)
5948 	    && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5949 	  {
5950 	    rtx tem = make_memloc (x, regno);
5951 	    if (reg_equiv_address (regno) != 0
5952 		|| ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5953 	      {
5954 		x = tem;
5955 		find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5956 				      &XEXP (x, 0), opnum, ADDR_TYPE (type),
5957 				      ind_levels, insn);
5958 		if (!rtx_equal_p (x, tem))
5959 		  push_reg_equiv_alt_mem (regno, x);
5960 	      }
5961 	  }
5962 
5963 	if (reg_renumber[regno] >= 0)
5964 	  regno = reg_renumber[regno];
5965 
5966 	if (regno >= FIRST_PSEUDO_REGISTER
5967 	    || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5968 				    index_code))
5969 	  {
5970 	    push_reload (x, NULL_RTX, loc, (rtx*) 0,
5971 			 context_reg_class,
5972 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5973 	    return 1;
5974 	  }
5975 
5976 	/* If a register appearing in an address is the subject of a CLOBBER
5977 	   in this insn, reload it into some other register to be safe.
5978 	   The CLOBBER is supposed to make the register unavailable
5979 	   from before this insn to after it.  */
5980 	if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5981 	  {
5982 	    push_reload (x, NULL_RTX, loc, (rtx*) 0,
5983 			 context_reg_class,
5984 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5985 	    return 1;
5986 	  }
5987       }
5988       return 0;
5989 
5990     case SUBREG:
5991       if (REG_P (SUBREG_REG (x)))
5992 	{
5993 	  /* If this is a SUBREG of a hard register and the resulting register
5994 	     is of the wrong class, reload the whole SUBREG.  This avoids
5995 	     needless copies if SUBREG_REG is multi-word.  */
5996 	  if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5997 	    {
5998 	      int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5999 
6000 	      if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6001 				       index_code))
6002 		{
6003 		  push_reload (x, NULL_RTX, loc, (rtx*) 0,
6004 			       context_reg_class,
6005 			       GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6006 		  return 1;
6007 		}
6008 	    }
6009 	  /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6010 	     is larger than the class size, then reload the whole SUBREG.  */
6011 	  else
6012 	    {
6013 	      enum reg_class rclass = context_reg_class;
6014 	      if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6015 		  > reg_class_size[(int) rclass])
6016 		{
6017 		  /* If the inner register will be replaced by a memory
6018 		     reference, we can do this only if we can replace the
6019 		     whole subreg by a (narrower) memory reference.  If
6020 		     this is not possible, fall through and reload just
6021 		     the inner register (including address reloads).  */
6022 		  if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6023 		    {
6024 		      rtx tem = find_reloads_subreg_address (x, opnum,
6025 							     ADDR_TYPE (type),
6026 							     ind_levels, insn,
6027 							     NULL);
6028 		      if (tem)
6029 			{
6030 			  push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6031 				       GET_MODE (tem), VOIDmode, 0, 0,
6032 				       opnum, type);
6033 			  return 1;
6034 			}
6035 		    }
6036 		  else
6037 		    {
6038 		      push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6039 				   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6040 		      return 1;
6041 		    }
6042 		}
6043 	    }
6044 	}
6045       break;
6046 
6047     default:
6048       break;
6049     }
6050 
6051   {
6052     const char *fmt = GET_RTX_FORMAT (code);
6053     int i;
6054 
6055     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6056       {
6057 	if (fmt[i] == 'e')
6058 	  /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6059 	     we get here.  */
6060 	  find_reloads_address_1 (mode, as, XEXP (x, i), context,
6061 				  code, SCRATCH, &XEXP (x, i),
6062 				  opnum, type, ind_levels, insn);
6063       }
6064   }
6065 
6066 #undef REG_OK_FOR_CONTEXT
6067   return 0;
6068 }
6069 
6070 /* X, which is found at *LOC, is a part of an address that needs to be
6071    reloaded into a register of class RCLASS.  If X is a constant, or if
6072    X is a PLUS that contains a constant, check that the constant is a
6073    legitimate operand and that we are supposed to be able to load
6074    it into the register.
6075 
6076    If not, force the constant into memory and reload the MEM instead.
6077 
6078    MODE is the mode to use, in case X is an integer constant.
6079 
6080    OPNUM and TYPE describe the purpose of any reloads made.
6081 
6082    IND_LEVELS says how many levels of indirect addressing this machine
6083    supports.  */
6084 
6085 static void
find_reloads_address_part(rtx x,rtx * loc,enum reg_class rclass,machine_mode mode,int opnum,enum reload_type type,int ind_levels)6086 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6087 			   machine_mode mode, int opnum,
6088 			   enum reload_type type, int ind_levels)
6089 {
6090   if (CONSTANT_P (x)
6091       && (!targetm.legitimate_constant_p (mode, x)
6092 	  || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6093     {
6094       x = force_const_mem (mode, x);
6095       find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6096 			    opnum, type, ind_levels, 0);
6097     }
6098 
6099   else if (GET_CODE (x) == PLUS
6100 	   && CONSTANT_P (XEXP (x, 1))
6101 	   && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6102 	       || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6103 		   == NO_REGS))
6104     {
6105       rtx tem;
6106 
6107       tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6108       x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6109       find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6110 			    opnum, type, ind_levels, 0);
6111     }
6112 
6113   push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6114 	       mode, VOIDmode, 0, 0, opnum, type);
6115 }
6116 
6117 /* X, a subreg of a pseudo, is a part of an address that needs to be
6118    reloaded, and the pseusdo is equivalent to a memory location.
6119 
6120    Attempt to replace the whole subreg by a (possibly narrower or wider)
6121    memory reference.  If this is possible, return this new memory
6122    reference, and push all required address reloads.  Otherwise,
6123    return NULL.
6124 
6125    OPNUM and TYPE identify the purpose of the reload.
6126 
6127    IND_LEVELS says how many levels of indirect addressing are
6128    supported at this point in the address.
6129 
6130    INSN, if nonzero, is the insn in which we do the reload.  It is used
6131    to determine where to put USEs for pseudos that we have to replace with
6132    stack slots.  */
6133 
6134 static rtx
find_reloads_subreg_address(rtx x,int opnum,enum reload_type type,int ind_levels,rtx_insn * insn,int * address_reloaded)6135 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6136 			     int ind_levels, rtx_insn *insn,
6137 			     int *address_reloaded)
6138 {
6139   machine_mode outer_mode = GET_MODE (x);
6140   machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6141   int regno = REGNO (SUBREG_REG (x));
6142   int reloaded = 0;
6143   rtx tem, orig;
6144   poly_int64 offset;
6145 
6146   gcc_assert (reg_equiv_memory_loc (regno) != 0);
6147 
6148   /* We cannot replace the subreg with a modified memory reference if:
6149 
6150      - we have a paradoxical subreg that implicitly acts as a zero or
6151        sign extension operation due to LOAD_EXTEND_OP;
6152 
6153      - we have a subreg that is implicitly supposed to act on the full
6154        register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6155 
6156      - the address of the equivalent memory location is mode-dependent;  or
6157 
6158      - we have a paradoxical subreg and the resulting memory is not
6159        sufficiently aligned to allow access in the wider mode.
6160 
6161     In addition, we choose not to perform the replacement for *any*
6162     paradoxical subreg, even if it were possible in principle.  This
6163     is to avoid generating wider memory references than necessary.
6164 
6165     This corresponds to how previous versions of reload used to handle
6166     paradoxical subregs where no address reload was required.  */
6167 
6168   if (paradoxical_subreg_p (x))
6169     return NULL;
6170 
6171   if (WORD_REGISTER_OPERATIONS
6172       && partial_subreg_p (outer_mode, inner_mode)
6173       && known_equal_after_align_down (GET_MODE_SIZE (outer_mode) - 1,
6174 				       GET_MODE_SIZE (inner_mode) - 1,
6175 				       UNITS_PER_WORD))
6176     return NULL;
6177 
6178   /* Since we don't attempt to handle paradoxical subregs, we can just
6179      call into simplify_subreg, which will handle all remaining checks
6180      for us.  */
6181   orig = make_memloc (SUBREG_REG (x), regno);
6182   offset = SUBREG_BYTE (x);
6183   tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6184   if (!tem || !MEM_P (tem))
6185     return NULL;
6186 
6187   /* Now push all required address reloads, if any.  */
6188   reloaded = find_reloads_address (GET_MODE (tem), &tem,
6189 				   XEXP (tem, 0), &XEXP (tem, 0),
6190 				   opnum, type, ind_levels, insn);
6191   /* ??? Do we need to handle nonzero offsets somehow?  */
6192   if (known_eq (offset, 0) && !rtx_equal_p (tem, orig))
6193     push_reg_equiv_alt_mem (regno, tem);
6194 
6195   /* For some processors an address may be valid in the original mode but
6196      not in a smaller mode.  For example, ARM accepts a scaled index register
6197      in SImode but not in HImode.  Note that this is only a problem if the
6198      address in reg_equiv_mem is already invalid in the new mode; other
6199      cases would be fixed by find_reloads_address as usual.
6200 
6201      ??? We attempt to handle such cases here by doing an additional reload
6202      of the full address after the usual processing by find_reloads_address.
6203      Note that this may not work in the general case, but it seems to cover
6204      the cases where this situation currently occurs.  A more general fix
6205      might be to reload the *value* instead of the address, but this would
6206      not be expected by the callers of this routine as-is.
6207 
6208      If find_reloads_address already completed replaced the address, there
6209      is nothing further to do.  */
6210   if (reloaded == 0
6211       && reg_equiv_mem (regno) != 0
6212       && !strict_memory_address_addr_space_p
6213 		(GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6214 		 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6215     {
6216       push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6217 		   base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6218 				   MEM, SCRATCH),
6219 		   GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6220       reloaded = 1;
6221     }
6222 
6223   /* If this is not a toplevel operand, find_reloads doesn't see this
6224      substitution.  We have to emit a USE of the pseudo so that
6225      delete_output_reload can see it.  */
6226   if (replace_reloads && recog_data.operand[opnum] != x)
6227     /* We mark the USE with QImode so that we recognize it as one that
6228        can be safely deleted at the end of reload.  */
6229     PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6230 	      QImode);
6231 
6232   if (address_reloaded)
6233     *address_reloaded = reloaded;
6234 
6235   return tem;
6236 }
6237 
6238 /* Substitute into the current INSN the registers into which we have reloaded
6239    the things that need reloading.  The array `replacements'
6240    contains the locations of all pointers that must be changed
6241    and says what to replace them with.
6242 
6243    Return the rtx that X translates into; usually X, but modified.  */
6244 
6245 void
subst_reloads(rtx_insn * insn)6246 subst_reloads (rtx_insn *insn)
6247 {
6248   int i;
6249 
6250   for (i = 0; i < n_replacements; i++)
6251     {
6252       struct replacement *r = &replacements[i];
6253       rtx reloadreg = rld[r->what].reg_rtx;
6254       if (reloadreg)
6255 	{
6256 #ifdef DEBUG_RELOAD
6257 	  /* This checking takes a very long time on some platforms
6258 	     causing the gcc.c-torture/compile/limits-fnargs.c test
6259 	     to time out during testing.  See PR 31850.
6260 
6261 	     Internal consistency test.  Check that we don't modify
6262 	     anything in the equivalence arrays.  Whenever something from
6263 	     those arrays needs to be reloaded, it must be unshared before
6264 	     being substituted into; the equivalence must not be modified.
6265 	     Otherwise, if the equivalence is used after that, it will
6266 	     have been modified, and the thing substituted (probably a
6267 	     register) is likely overwritten and not a usable equivalence.  */
6268 	  int check_regno;
6269 
6270 	  for (check_regno = 0; check_regno < max_regno; check_regno++)
6271 	    {
6272 #define CHECK_MODF(ARRAY)						\
6273 	      gcc_assert (!(*reg_equivs)[check_regno].ARRAY		\
6274 			  || !loc_mentioned_in_p (r->where,		\
6275 						  (*reg_equivs)[check_regno].ARRAY))
6276 
6277 	      CHECK_MODF (constant);
6278 	      CHECK_MODF (memory_loc);
6279 	      CHECK_MODF (address);
6280 	      CHECK_MODF (mem);
6281 #undef CHECK_MODF
6282 	    }
6283 #endif /* DEBUG_RELOAD */
6284 
6285 	  /* If we're replacing a LABEL_REF with a register, there must
6286 	     already be an indication (to e.g. flow) which label this
6287 	     register refers to.  */
6288 	  gcc_assert (GET_CODE (*r->where) != LABEL_REF
6289 		      || !JUMP_P (insn)
6290 		      || find_reg_note (insn,
6291 					REG_LABEL_OPERAND,
6292 					XEXP (*r->where, 0))
6293 		      || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6294 
6295 	  /* Encapsulate RELOADREG so its machine mode matches what
6296 	     used to be there.  Note that gen_lowpart_common will
6297 	     do the wrong thing if RELOADREG is multi-word.  RELOADREG
6298 	     will always be a REG here.  */
6299 	  if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6300 	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6301 
6302 	  *r->where = reloadreg;
6303 	}
6304       /* If reload got no reg and isn't optional, something's wrong.  */
6305       else
6306 	gcc_assert (rld[r->what].optional);
6307     }
6308 }
6309 
6310 /* Make a copy of any replacements being done into X and move those
6311    copies to locations in Y, a copy of X.  */
6312 
6313 void
copy_replacements(rtx x,rtx y)6314 copy_replacements (rtx x, rtx y)
6315 {
6316   copy_replacements_1 (&x, &y, n_replacements);
6317 }
6318 
6319 static void
copy_replacements_1(rtx * px,rtx * py,int orig_replacements)6320 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6321 {
6322   int i, j;
6323   rtx x, y;
6324   struct replacement *r;
6325   enum rtx_code code;
6326   const char *fmt;
6327 
6328   for (j = 0; j < orig_replacements; j++)
6329     if (replacements[j].where == px)
6330       {
6331 	r = &replacements[n_replacements++];
6332 	r->where = py;
6333 	r->what = replacements[j].what;
6334 	r->mode = replacements[j].mode;
6335       }
6336 
6337   x = *px;
6338   y = *py;
6339   code = GET_CODE (x);
6340   fmt = GET_RTX_FORMAT (code);
6341 
6342   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6343     {
6344       if (fmt[i] == 'e')
6345 	copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6346       else if (fmt[i] == 'E')
6347 	for (j = XVECLEN (x, i); --j >= 0; )
6348 	  copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6349 			       orig_replacements);
6350     }
6351 }
6352 
6353 /* Change any replacements being done to *X to be done to *Y.  */
6354 
6355 void
move_replacements(rtx * x,rtx * y)6356 move_replacements (rtx *x, rtx *y)
6357 {
6358   int i;
6359 
6360   for (i = 0; i < n_replacements; i++)
6361     if (replacements[i].where == x)
6362       replacements[i].where = y;
6363 }
6364 
6365 /* If LOC was scheduled to be replaced by something, return the replacement.
6366    Otherwise, return *LOC.  */
6367 
6368 rtx
find_replacement(rtx * loc)6369 find_replacement (rtx *loc)
6370 {
6371   struct replacement *r;
6372 
6373   for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6374     {
6375       rtx reloadreg = rld[r->what].reg_rtx;
6376 
6377       if (reloadreg && r->where == loc)
6378 	{
6379 	  if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6380 	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6381 
6382 	  return reloadreg;
6383 	}
6384       else if (reloadreg && GET_CODE (*loc) == SUBREG
6385 	       && r->where == &SUBREG_REG (*loc))
6386 	{
6387 	  if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6388 	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6389 
6390 	  return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6391 				      GET_MODE (SUBREG_REG (*loc)),
6392 				      SUBREG_BYTE (*loc));
6393 	}
6394     }
6395 
6396   /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6397      what's inside and make a new rtl if so.  */
6398   if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6399       || GET_CODE (*loc) == MULT)
6400     {
6401       rtx x = find_replacement (&XEXP (*loc, 0));
6402       rtx y = find_replacement (&XEXP (*loc, 1));
6403 
6404       if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6405 	return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6406     }
6407 
6408   return *loc;
6409 }
6410 
6411 /* Return nonzero if register in range [REGNO, ENDREGNO)
6412    appears either explicitly or implicitly in X
6413    other than being stored into (except for earlyclobber operands).
6414 
6415    References contained within the substructure at LOC do not count.
6416    LOC may be zero, meaning don't ignore anything.
6417 
6418    This is similar to refers_to_regno_p in rtlanal.cc except that we
6419    look at equivalences for pseudos that didn't get hard registers.  */
6420 
6421 static int
refers_to_regno_for_reload_p(unsigned int regno,unsigned int endregno,rtx x,rtx * loc)6422 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6423 			      rtx x, rtx *loc)
6424 {
6425   int i;
6426   unsigned int r;
6427   RTX_CODE code;
6428   const char *fmt;
6429 
6430   if (x == 0)
6431     return 0;
6432 
6433  repeat:
6434   code = GET_CODE (x);
6435 
6436   switch (code)
6437     {
6438     case REG:
6439       r = REGNO (x);
6440 
6441       /* If this is a pseudo, a hard register must not have been allocated.
6442 	 X must therefore either be a constant or be in memory.  */
6443       if (r >= FIRST_PSEUDO_REGISTER)
6444 	{
6445 	  if (reg_equiv_memory_loc (r))
6446 	    return refers_to_regno_for_reload_p (regno, endregno,
6447 						 reg_equiv_memory_loc (r),
6448 						 (rtx*) 0);
6449 
6450 	  gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6451 	  return 0;
6452 	}
6453 
6454       return endregno > r && regno < END_REGNO (x);
6455 
6456     case SUBREG:
6457       /* If this is a SUBREG of a hard reg, we can see exactly which
6458 	 registers are being modified.  Otherwise, handle normally.  */
6459       if (REG_P (SUBREG_REG (x))
6460 	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6461 	{
6462 	  unsigned int inner_regno = subreg_regno (x);
6463 	  unsigned int inner_endregno
6464 	    = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6465 			     ? subreg_nregs (x) : 1);
6466 
6467 	  return endregno > inner_regno && regno < inner_endregno;
6468 	}
6469       break;
6470 
6471     case CLOBBER:
6472     case SET:
6473       if (&SET_DEST (x) != loc
6474 	  /* Note setting a SUBREG counts as referring to the REG it is in for
6475 	     a pseudo but not for hard registers since we can
6476 	     treat each word individually.  */
6477 	  && ((GET_CODE (SET_DEST (x)) == SUBREG
6478 	       && loc != &SUBREG_REG (SET_DEST (x))
6479 	       && REG_P (SUBREG_REG (SET_DEST (x)))
6480 	       && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6481 	       && refers_to_regno_for_reload_p (regno, endregno,
6482 						SUBREG_REG (SET_DEST (x)),
6483 						loc))
6484 	      /* If the output is an earlyclobber operand, this is
6485 		 a conflict.  */
6486 	      || ((!REG_P (SET_DEST (x))
6487 		   || earlyclobber_operand_p (SET_DEST (x)))
6488 		  && refers_to_regno_for_reload_p (regno, endregno,
6489 						   SET_DEST (x), loc))))
6490 	return 1;
6491 
6492       if (code == CLOBBER || loc == &SET_SRC (x))
6493 	return 0;
6494       x = SET_SRC (x);
6495       goto repeat;
6496 
6497     default:
6498       break;
6499     }
6500 
6501   /* X does not match, so try its subexpressions.  */
6502 
6503   fmt = GET_RTX_FORMAT (code);
6504   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6505     {
6506       if (fmt[i] == 'e' && loc != &XEXP (x, i))
6507 	{
6508 	  if (i == 0)
6509 	    {
6510 	      x = XEXP (x, 0);
6511 	      goto repeat;
6512 	    }
6513 	  else
6514 	    if (refers_to_regno_for_reload_p (regno, endregno,
6515 					      XEXP (x, i), loc))
6516 	      return 1;
6517 	}
6518       else if (fmt[i] == 'E')
6519 	{
6520 	  int j;
6521 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6522 	    if (loc != &XVECEXP (x, i, j)
6523 		&& refers_to_regno_for_reload_p (regno, endregno,
6524 						 XVECEXP (x, i, j), loc))
6525 	      return 1;
6526 	}
6527     }
6528   return 0;
6529 }
6530 
6531 /* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
6532    we check if any register number in X conflicts with the relevant register
6533    numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
6534    contains a MEM (we don't bother checking for memory addresses that can't
6535    conflict because we expect this to be a rare case.
6536 
6537    This function is similar to reg_overlap_mentioned_p in rtlanal.cc except
6538    that we look at equivalences for pseudos that didn't get hard registers.  */
6539 
6540 int
reg_overlap_mentioned_for_reload_p(rtx x,rtx in)6541 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6542 {
6543   int regno, endregno;
6544 
6545   /* Overly conservative.  */
6546   if (GET_CODE (x) == STRICT_LOW_PART
6547       || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6548     x = XEXP (x, 0);
6549 
6550   /* If either argument is a constant, then modifying X cannot affect IN.  */
6551   if (CONSTANT_P (x) || CONSTANT_P (in))
6552     return 0;
6553   else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6554     return refers_to_mem_for_reload_p (in);
6555   else if (GET_CODE (x) == SUBREG)
6556     {
6557       regno = REGNO (SUBREG_REG (x));
6558       if (regno < FIRST_PSEUDO_REGISTER)
6559 	regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6560 				      GET_MODE (SUBREG_REG (x)),
6561 				      SUBREG_BYTE (x),
6562 				      GET_MODE (x));
6563       endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6564 			  ? subreg_nregs (x) : 1);
6565 
6566       return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6567     }
6568   else if (REG_P (x))
6569     {
6570       regno = REGNO (x);
6571 
6572       /* If this is a pseudo, it must not have been assigned a hard register.
6573 	 Therefore, it must either be in memory or be a constant.  */
6574 
6575       if (regno >= FIRST_PSEUDO_REGISTER)
6576 	{
6577 	  if (reg_equiv_memory_loc (regno))
6578 	    return refers_to_mem_for_reload_p (in);
6579 	  gcc_assert (reg_equiv_constant (regno));
6580 	  return 0;
6581 	}
6582 
6583       endregno = END_REGNO (x);
6584 
6585       return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6586     }
6587   else if (MEM_P (x))
6588     return refers_to_mem_for_reload_p (in);
6589   else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC)
6590     return reg_mentioned_p (x, in);
6591   else
6592     {
6593       gcc_assert (GET_CODE (x) == PLUS);
6594 
6595       /* We actually want to know if X is mentioned somewhere inside IN.
6596 	 We must not say that (plus (sp) (const_int 124)) is in
6597 	 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6598 	 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6599 	 into a RELOAD_OTHER on behalf of another RELOAD_OTHER.  */
6600       while (MEM_P (in))
6601 	in = XEXP (in, 0);
6602       if (REG_P (in))
6603 	return 0;
6604       else if (GET_CODE (in) == PLUS)
6605 	return (rtx_equal_p (x, in)
6606 		|| reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6607 		|| reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6608       else
6609 	return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6610 		|| reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6611     }
6612 }
6613 
6614 /* Return nonzero if anything in X contains a MEM.  Look also for pseudo
6615    registers.  */
6616 
6617 static int
refers_to_mem_for_reload_p(rtx x)6618 refers_to_mem_for_reload_p (rtx x)
6619 {
6620   const char *fmt;
6621   int i;
6622 
6623   if (MEM_P (x))
6624     return 1;
6625 
6626   if (REG_P (x))
6627     return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6628 	    && reg_equiv_memory_loc (REGNO (x)));
6629 
6630   fmt = GET_RTX_FORMAT (GET_CODE (x));
6631   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6632     if (fmt[i] == 'e'
6633 	&& (MEM_P (XEXP (x, i))
6634 	    || refers_to_mem_for_reload_p (XEXP (x, i))))
6635       return 1;
6636 
6637   return 0;
6638 }
6639 
6640 /* Check the insns before INSN to see if there is a suitable register
6641    containing the same value as GOAL.
6642    If OTHER is -1, look for a register in class RCLASS.
6643    Otherwise, just see if register number OTHER shares GOAL's value.
6644 
6645    Return an rtx for the register found, or zero if none is found.
6646 
6647    If RELOAD_REG_P is (short *)1,
6648    we reject any hard reg that appears in reload_reg_rtx
6649    because such a hard reg is also needed coming into this insn.
6650 
6651    If RELOAD_REG_P is any other nonzero value,
6652    it is a vector indexed by hard reg number
6653    and we reject any hard reg whose element in the vector is nonnegative
6654    as well as any that appears in reload_reg_rtx.
6655 
6656    If GOAL is zero, then GOALREG is a register number; we look
6657    for an equivalent for that register.
6658 
6659    MODE is the machine mode of the value we want an equivalence for.
6660    If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6661 
6662    This function is used by jump.cc as well as in the reload pass.
6663 
6664    If GOAL is the sum of the stack pointer and a constant, we treat it
6665    as if it were a constant except that sp is required to be unchanging.  */
6666 
6667 rtx
find_equiv_reg(rtx goal,rtx_insn * insn,enum reg_class rclass,int other,short * reload_reg_p,int goalreg,machine_mode mode)6668 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6669 		short *reload_reg_p, int goalreg, machine_mode mode)
6670 {
6671   rtx_insn *p = insn;
6672   rtx goaltry, valtry, value;
6673   rtx_insn *where;
6674   rtx pat;
6675   int regno = -1;
6676   int valueno;
6677   int goal_mem = 0;
6678   int goal_const = 0;
6679   int goal_mem_addr_varies = 0;
6680   int need_stable_sp = 0;
6681   int nregs;
6682   int valuenregs;
6683   int num = 0;
6684 
6685   if (goal == 0)
6686     regno = goalreg;
6687   else if (REG_P (goal))
6688     regno = REGNO (goal);
6689   else if (MEM_P (goal))
6690     {
6691       enum rtx_code code = GET_CODE (XEXP (goal, 0));
6692       if (MEM_VOLATILE_P (goal))
6693 	return 0;
6694       if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6695 	return 0;
6696       /* An address with side effects must be reexecuted.  */
6697       switch (code)
6698 	{
6699 	case POST_INC:
6700 	case PRE_INC:
6701 	case POST_DEC:
6702 	case PRE_DEC:
6703 	case POST_MODIFY:
6704 	case PRE_MODIFY:
6705 	  return 0;
6706 	default:
6707 	  break;
6708 	}
6709       goal_mem = 1;
6710     }
6711   else if (CONSTANT_P (goal))
6712     goal_const = 1;
6713   else if (GET_CODE (goal) == PLUS
6714 	   && XEXP (goal, 0) == stack_pointer_rtx
6715 	   && CONSTANT_P (XEXP (goal, 1)))
6716     goal_const = need_stable_sp = 1;
6717   else if (GET_CODE (goal) == PLUS
6718 	   && XEXP (goal, 0) == frame_pointer_rtx
6719 	   && CONSTANT_P (XEXP (goal, 1)))
6720     goal_const = 1;
6721   else
6722     return 0;
6723 
6724   num = 0;
6725   /* Scan insns back from INSN, looking for one that copies
6726      a value into or out of GOAL.
6727      Stop and give up if we reach a label.  */
6728 
6729   while (1)
6730     {
6731       p = PREV_INSN (p);
6732       if (p && DEBUG_INSN_P (p))
6733 	continue;
6734       num++;
6735       if (p == 0 || LABEL_P (p)
6736 	  || num > param_max_reload_search_insns)
6737 	return 0;
6738 
6739       /* Don't reuse register contents from before a setjmp-type
6740 	 function call; on the second return (from the longjmp) it
6741 	 might have been clobbered by a later reuse.  It doesn't
6742 	 seem worthwhile to actually go and see if it is actually
6743 	 reused even if that information would be readily available;
6744 	 just don't reuse it across the setjmp call.  */
6745       if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6746 	return 0;
6747 
6748       if (NONJUMP_INSN_P (p)
6749 	  /* If we don't want spill regs ...  */
6750 	  && (! (reload_reg_p != 0
6751 		 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6752 	      /* ... then ignore insns introduced by reload; they aren't
6753 		 useful and can cause results in reload_as_needed to be
6754 		 different from what they were when calculating the need for
6755 		 spills.  If we notice an input-reload insn here, we will
6756 		 reject it below, but it might hide a usable equivalent.
6757 		 That makes bad code.  It may even fail: perhaps no reg was
6758 		 spilled for this insn because it was assumed we would find
6759 		 that equivalent.  */
6760 	      || INSN_UID (p) < reload_first_uid))
6761 	{
6762 	  rtx tem;
6763 	  pat = single_set (p);
6764 
6765 	  /* First check for something that sets some reg equal to GOAL.  */
6766 	  if (pat != 0
6767 	      && ((regno >= 0
6768 		   && true_regnum (SET_SRC (pat)) == regno
6769 		   && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6770 		  ||
6771 		  (regno >= 0
6772 		   && true_regnum (SET_DEST (pat)) == regno
6773 		   && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6774 		  ||
6775 		  (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6776 		   /* When looking for stack pointer + const,
6777 		      make sure we don't use a stack adjust.  */
6778 		   && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6779 		   && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6780 		  || (goal_mem
6781 		      && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6782 		      && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6783 		  || (goal_mem
6784 		      && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6785 		      && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6786 		  /* If we are looking for a constant,
6787 		     and something equivalent to that constant was copied
6788 		     into a reg, we can use that reg.  */
6789 		  || (goal_const && REG_NOTES (p) != 0
6790 		      && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6791 		      && ((rtx_equal_p (XEXP (tem, 0), goal)
6792 			   && (valueno
6793 			       = true_regnum (valtry = SET_DEST (pat))) >= 0)
6794 			  || (REG_P (SET_DEST (pat))
6795 			      && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6796 			      && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6797 			      && CONST_INT_P (goal)
6798 			      && (goaltry = operand_subword (XEXP (tem, 0), 0,
6799 							     0, VOIDmode)) != 0
6800 			      && rtx_equal_p (goal, goaltry)
6801 			      && (valtry
6802 				  = operand_subword (SET_DEST (pat), 0, 0,
6803 						     VOIDmode))
6804 			      && (valueno = true_regnum (valtry)) >= 0)))
6805 		  || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6806 							  NULL_RTX))
6807 		      && REG_P (SET_DEST (pat))
6808 		      && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6809 		      && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6810 		      && CONST_INT_P (goal)
6811 		      && (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6812 						     VOIDmode)) != 0
6813 		      && rtx_equal_p (goal, goaltry)
6814 		      && (valtry
6815 			  = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6816 		      && (valueno = true_regnum (valtry)) >= 0)))
6817 	    {
6818 	      if (other >= 0)
6819 		{
6820 		  if (valueno != other)
6821 		    continue;
6822 		}
6823 	      else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6824 		continue;
6825 	      else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6826 					  mode, valueno))
6827 		continue;
6828 	      value = valtry;
6829 	      where = p;
6830 	      break;
6831 	    }
6832 	}
6833     }
6834 
6835   /* We found a previous insn copying GOAL into a suitable other reg VALUE
6836      (or copying VALUE into GOAL, if GOAL is also a register).
6837      Now verify that VALUE is really valid.  */
6838 
6839   /* VALUENO is the register number of VALUE; a hard register.  */
6840 
6841   /* Don't try to re-use something that is killed in this insn.  We want
6842      to be able to trust REG_UNUSED notes.  */
6843   if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6844     return 0;
6845 
6846   /* If we propose to get the value from the stack pointer or if GOAL is
6847      a MEM based on the stack pointer, we need a stable SP.  */
6848   if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6849       || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6850 							  goal)))
6851     need_stable_sp = 1;
6852 
6853   /* Reject VALUE if the copy-insn moved the wrong sort of datum.  */
6854   if (GET_MODE (value) != mode)
6855     return 0;
6856 
6857   /* Reject VALUE if it was loaded from GOAL
6858      and is also a register that appears in the address of GOAL.  */
6859 
6860   if (goal_mem && value == SET_DEST (single_set (where))
6861       && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6862 				       goal, (rtx*) 0))
6863     return 0;
6864 
6865   /* Reject registers that overlap GOAL.  */
6866 
6867   if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6868     nregs = hard_regno_nregs (regno, mode);
6869   else
6870     nregs = 1;
6871   valuenregs = hard_regno_nregs (valueno, mode);
6872 
6873   if (!goal_mem && !goal_const
6874       && regno + nregs > valueno && regno < valueno + valuenregs)
6875     return 0;
6876 
6877   /* Reject VALUE if it is one of the regs reserved for reloads.
6878      Reload1 knows how to reuse them anyway, and it would get
6879      confused if we allocated one without its knowledge.
6880      (Now that insns introduced by reload are ignored above,
6881      this case shouldn't happen, but I'm not positive.)  */
6882 
6883   if (reload_reg_p != 0 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6884     {
6885       int i;
6886       for (i = 0; i < valuenregs; ++i)
6887 	if (reload_reg_p[valueno + i] >= 0)
6888 	  return 0;
6889     }
6890 
6891   /* Reject VALUE if it is a register being used for an input reload
6892      even if it is not one of those reserved.  */
6893 
6894   if (reload_reg_p != 0)
6895     {
6896       int i;
6897       for (i = 0; i < n_reloads; i++)
6898 	if (rld[i].reg_rtx != 0
6899 	    && rld[i].in
6900 	    && (int) REGNO (rld[i].reg_rtx) < valueno + valuenregs
6901 	    && (int) END_REGNO (rld[i].reg_rtx) > valueno)
6902 	  return 0;
6903     }
6904 
6905   if (goal_mem)
6906     /* We must treat frame pointer as varying here,
6907        since it can vary--in a nonlocal goto as generated by expand_goto.  */
6908     goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6909 
6910   /* Now verify that the values of GOAL and VALUE remain unaltered
6911      until INSN is reached.  */
6912 
6913   p = insn;
6914   while (1)
6915     {
6916       p = PREV_INSN (p);
6917       if (p == where)
6918 	return value;
6919 
6920       /* Don't trust the conversion past a function call
6921 	 if either of the two is in a call-clobbered register, or memory.  */
6922       if (CALL_P (p))
6923 	{
6924 	  if (goal_mem || need_stable_sp)
6925 	    return 0;
6926 
6927 	  function_abi callee_abi = insn_callee_abi (p);
6928 	  if (regno >= 0
6929 	      && regno < FIRST_PSEUDO_REGISTER
6930 	      && callee_abi.clobbers_reg_p (mode, regno))
6931 	    return 0;
6932 
6933 	  if (valueno >= 0
6934 	      && valueno < FIRST_PSEUDO_REGISTER
6935 	      && callee_abi.clobbers_reg_p (mode, valueno))
6936 	    return 0;
6937 	}
6938 
6939       if (INSN_P (p))
6940 	{
6941 	  pat = PATTERN (p);
6942 
6943 	  /* Watch out for unspec_volatile, and volatile asms.  */
6944 	  if (volatile_insn_p (pat))
6945 	    return 0;
6946 
6947 	  /* If this insn P stores in either GOAL or VALUE, return 0.
6948 	     If GOAL is a memory ref and this insn writes memory, return 0.
6949 	     If GOAL is a memory ref and its address is not constant,
6950 	     and this insn P changes a register used in GOAL, return 0.  */
6951 
6952 	  if (GET_CODE (pat) == COND_EXEC)
6953 	    pat = COND_EXEC_CODE (pat);
6954 	  if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6955 	    {
6956 	      rtx dest = SET_DEST (pat);
6957 	      while (GET_CODE (dest) == SUBREG
6958 		     || GET_CODE (dest) == ZERO_EXTRACT
6959 		     || GET_CODE (dest) == STRICT_LOW_PART)
6960 		dest = XEXP (dest, 0);
6961 	      if (REG_P (dest))
6962 		{
6963 		  int xregno = REGNO (dest);
6964 		  int end_xregno = END_REGNO (dest);
6965 		  if (xregno < regno + nregs && end_xregno > regno)
6966 		    return 0;
6967 		  if (xregno < valueno + valuenregs
6968 		      && end_xregno > valueno)
6969 		    return 0;
6970 		  if (goal_mem_addr_varies
6971 		      && reg_overlap_mentioned_for_reload_p (dest, goal))
6972 		    return 0;
6973 		  if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6974 		    return 0;
6975 		}
6976 	      else if (goal_mem && MEM_P (dest)
6977 		       && ! push_operand (dest, GET_MODE (dest)))
6978 		return 0;
6979 	      else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6980 		       && reg_equiv_memory_loc (regno) != 0)
6981 		return 0;
6982 	      else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6983 		return 0;
6984 	    }
6985 	  else if (GET_CODE (pat) == PARALLEL)
6986 	    {
6987 	      int i;
6988 	      for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6989 		{
6990 		  rtx v1 = XVECEXP (pat, 0, i);
6991 		  if (GET_CODE (v1) == COND_EXEC)
6992 		    v1 = COND_EXEC_CODE (v1);
6993 		  if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
6994 		    {
6995 		      rtx dest = SET_DEST (v1);
6996 		      while (GET_CODE (dest) == SUBREG
6997 			     || GET_CODE (dest) == ZERO_EXTRACT
6998 			     || GET_CODE (dest) == STRICT_LOW_PART)
6999 			dest = XEXP (dest, 0);
7000 		      if (REG_P (dest))
7001 			{
7002 			  int xregno = REGNO (dest);
7003 			  int end_xregno = END_REGNO (dest);
7004 			  if (xregno < regno + nregs
7005 			      && end_xregno > regno)
7006 			    return 0;
7007 			  if (xregno < valueno + valuenregs
7008 			      && end_xregno > valueno)
7009 			    return 0;
7010 			  if (goal_mem_addr_varies
7011 			      && reg_overlap_mentioned_for_reload_p (dest,
7012 								     goal))
7013 			    return 0;
7014 			  if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7015 			    return 0;
7016 			}
7017 		      else if (goal_mem && MEM_P (dest)
7018 			       && ! push_operand (dest, GET_MODE (dest)))
7019 			return 0;
7020 		      else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7021 			       && reg_equiv_memory_loc (regno) != 0)
7022 			return 0;
7023 		      else if (need_stable_sp
7024 			       && push_operand (dest, GET_MODE (dest)))
7025 			return 0;
7026 		    }
7027 		}
7028 	    }
7029 
7030 	  if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7031 	    {
7032 	      rtx link;
7033 
7034 	      for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7035 		   link = XEXP (link, 1))
7036 		{
7037 		  pat = XEXP (link, 0);
7038 		  if (GET_CODE (pat) == CLOBBER)
7039 		    {
7040 		      rtx dest = SET_DEST (pat);
7041 
7042 		      if (REG_P (dest))
7043 			{
7044 			  int xregno = REGNO (dest);
7045 			  int end_xregno = END_REGNO (dest);
7046 
7047 			  if (xregno < regno + nregs
7048 			      && end_xregno > regno)
7049 			    return 0;
7050 			  else if (xregno < valueno + valuenregs
7051 				   && end_xregno > valueno)
7052 			    return 0;
7053 			  else if (goal_mem_addr_varies
7054 				   && reg_overlap_mentioned_for_reload_p (dest,
7055 								     goal))
7056 			    return 0;
7057 			}
7058 
7059 		      else if (goal_mem && MEM_P (dest)
7060 			       && ! push_operand (dest, GET_MODE (dest)))
7061 			return 0;
7062 		      else if (need_stable_sp
7063 			       && push_operand (dest, GET_MODE (dest)))
7064 			return 0;
7065 		    }
7066 		}
7067 	    }
7068 
7069 #if AUTO_INC_DEC
7070 	  /* If this insn auto-increments or auto-decrements
7071 	     either regno or valueno, return 0 now.
7072 	     If GOAL is a memory ref and its address is not constant,
7073 	     and this insn P increments a register used in GOAL, return 0.  */
7074 	  {
7075 	    rtx link;
7076 
7077 	    for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7078 	      if (REG_NOTE_KIND (link) == REG_INC
7079 		  && REG_P (XEXP (link, 0)))
7080 		{
7081 		  int incno = REGNO (XEXP (link, 0));
7082 		  if (incno < regno + nregs && incno >= regno)
7083 		    return 0;
7084 		  if (incno < valueno + valuenregs && incno >= valueno)
7085 		    return 0;
7086 		  if (goal_mem_addr_varies
7087 		      && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7088 							     goal))
7089 		    return 0;
7090 		}
7091 	  }
7092 #endif
7093 	}
7094     }
7095 }
7096 
7097 /* Find a place where INCED appears in an increment or decrement operator
7098    within X, and return the amount INCED is incremented or decremented by.
7099    The value is always positive.  */
7100 
7101 static poly_int64
find_inc_amount(rtx x,rtx inced)7102 find_inc_amount (rtx x, rtx inced)
7103 {
7104   enum rtx_code code = GET_CODE (x);
7105   const char *fmt;
7106   int i;
7107 
7108   if (code == MEM)
7109     {
7110       rtx addr = XEXP (x, 0);
7111       if ((GET_CODE (addr) == PRE_DEC
7112 	   || GET_CODE (addr) == POST_DEC
7113 	   || GET_CODE (addr) == PRE_INC
7114 	   || GET_CODE (addr) == POST_INC)
7115 	  && XEXP (addr, 0) == inced)
7116 	return GET_MODE_SIZE (GET_MODE (x));
7117       else if ((GET_CODE (addr) == PRE_MODIFY
7118 		|| GET_CODE (addr) == POST_MODIFY)
7119 	       && GET_CODE (XEXP (addr, 1)) == PLUS
7120 	       && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7121 	       && XEXP (addr, 0) == inced
7122 	       && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7123 	{
7124 	  i = INTVAL (XEXP (XEXP (addr, 1), 1));
7125 	  return i < 0 ? -i : i;
7126 	}
7127     }
7128 
7129   fmt = GET_RTX_FORMAT (code);
7130   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7131     {
7132       if (fmt[i] == 'e')
7133 	{
7134 	  poly_int64 tem = find_inc_amount (XEXP (x, i), inced);
7135 	  if (maybe_ne (tem, 0))
7136 	    return tem;
7137 	}
7138       if (fmt[i] == 'E')
7139 	{
7140 	  int j;
7141 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7142 	    {
7143 	      poly_int64 tem = find_inc_amount (XVECEXP (x, i, j), inced);
7144 	      if (maybe_ne (tem, 0))
7145 		return tem;
7146 	    }
7147 	}
7148     }
7149 
7150   return 0;
7151 }
7152 
7153 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7154    REG_INC note in insn INSN.  REGNO must refer to a hard register.  */
7155 
7156 static int
reg_inc_found_and_valid_p(unsigned int regno,unsigned int endregno,rtx insn)7157 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7158 			   rtx insn)
7159 {
7160   rtx link;
7161 
7162   if (!AUTO_INC_DEC)
7163     return 0;
7164 
7165   gcc_assert (insn);
7166 
7167   if (! INSN_P (insn))
7168     return 0;
7169 
7170   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7171     if (REG_NOTE_KIND (link) == REG_INC)
7172       {
7173 	unsigned int test = (int) REGNO (XEXP (link, 0));
7174 	if (test >= regno && test < endregno)
7175 	  return 1;
7176       }
7177   return 0;
7178 }
7179 
7180 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7181    If SETS is 1, also consider SETs.  If SETS is 2, enable checking
7182    REG_INC.  REGNO must refer to a hard register.  */
7183 
7184 int
regno_clobbered_p(unsigned int regno,rtx_insn * insn,machine_mode mode,int sets)7185 regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7186 		   int sets)
7187 {
7188   /* regno must be a hard register.  */
7189   gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7190 
7191   unsigned int endregno = end_hard_regno (mode, regno);
7192 
7193   if ((GET_CODE (PATTERN (insn)) == CLOBBER
7194        || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7195       && REG_P (XEXP (PATTERN (insn), 0)))
7196     {
7197       unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7198 
7199       return test >= regno && test < endregno;
7200     }
7201 
7202   if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7203     return 1;
7204 
7205   if (GET_CODE (PATTERN (insn)) == PARALLEL)
7206     {
7207       int i = XVECLEN (PATTERN (insn), 0) - 1;
7208 
7209       for (; i >= 0; i--)
7210 	{
7211 	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
7212 	  if ((GET_CODE (elt) == CLOBBER
7213 	       || (sets == 1 && GET_CODE (elt) == SET))
7214 	      && REG_P (XEXP (elt, 0)))
7215 	    {
7216 	      unsigned int test = REGNO (XEXP (elt, 0));
7217 
7218 	      if (test >= regno && test < endregno)
7219 		return 1;
7220 	    }
7221 	  if (sets == 2
7222 	      && reg_inc_found_and_valid_p (regno, endregno, elt))
7223 	    return 1;
7224 	}
7225     }
7226 
7227   return 0;
7228 }
7229 
7230 /* Find the low part, with mode MODE, of a hard regno RELOADREG.  */
7231 rtx
reload_adjust_reg_for_mode(rtx reloadreg,machine_mode mode)7232 reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7233 {
7234   int regno;
7235 
7236   if (GET_MODE (reloadreg) == mode)
7237     return reloadreg;
7238 
7239   regno = REGNO (reloadreg);
7240 
7241   if (REG_WORDS_BIG_ENDIAN)
7242     regno += ((int) REG_NREGS (reloadreg)
7243 	      - (int) hard_regno_nregs (regno, mode));
7244 
7245   return gen_rtx_REG (mode, regno);
7246 }
7247 
7248 static const char *const reload_when_needed_name[] =
7249 {
7250   "RELOAD_FOR_INPUT",
7251   "RELOAD_FOR_OUTPUT",
7252   "RELOAD_FOR_INSN",
7253   "RELOAD_FOR_INPUT_ADDRESS",
7254   "RELOAD_FOR_INPADDR_ADDRESS",
7255   "RELOAD_FOR_OUTPUT_ADDRESS",
7256   "RELOAD_FOR_OUTADDR_ADDRESS",
7257   "RELOAD_FOR_OPERAND_ADDRESS",
7258   "RELOAD_FOR_OPADDR_ADDR",
7259   "RELOAD_OTHER",
7260   "RELOAD_FOR_OTHER_ADDRESS"
7261 };
7262 
7263 /* These functions are used to print the variables set by 'find_reloads' */
7264 
7265 DEBUG_FUNCTION void
debug_reload_to_stream(FILE * f)7266 debug_reload_to_stream (FILE *f)
7267 {
7268   int r;
7269   const char *prefix;
7270 
7271   if (! f)
7272     f = stderr;
7273   for (r = 0; r < n_reloads; r++)
7274     {
7275       fprintf (f, "Reload %d: ", r);
7276 
7277       if (rld[r].in != 0)
7278 	{
7279 	  fprintf (f, "reload_in (%s) = ",
7280 		   GET_MODE_NAME (rld[r].inmode));
7281 	  print_inline_rtx (f, rld[r].in, 24);
7282 	  fprintf (f, "\n\t");
7283 	}
7284 
7285       if (rld[r].out != 0)
7286 	{
7287 	  fprintf (f, "reload_out (%s) = ",
7288 		   GET_MODE_NAME (rld[r].outmode));
7289 	  print_inline_rtx (f, rld[r].out, 24);
7290 	  fprintf (f, "\n\t");
7291 	}
7292 
7293       fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7294 
7295       fprintf (f, "%s (opnum = %d)",
7296 	       reload_when_needed_name[(int) rld[r].when_needed],
7297 	       rld[r].opnum);
7298 
7299       if (rld[r].optional)
7300 	fprintf (f, ", optional");
7301 
7302       if (rld[r].nongroup)
7303 	fprintf (f, ", nongroup");
7304 
7305       if (maybe_ne (rld[r].inc, 0))
7306 	{
7307 	  fprintf (f, ", inc by ");
7308 	  print_dec (rld[r].inc, f, SIGNED);
7309 	}
7310 
7311       if (rld[r].nocombine)
7312 	fprintf (f, ", can't combine");
7313 
7314       if (rld[r].secondary_p)
7315 	fprintf (f, ", secondary_reload_p");
7316 
7317       if (rld[r].in_reg != 0)
7318 	{
7319 	  fprintf (f, "\n\treload_in_reg: ");
7320 	  print_inline_rtx (f, rld[r].in_reg, 24);
7321 	}
7322 
7323       if (rld[r].out_reg != 0)
7324 	{
7325 	  fprintf (f, "\n\treload_out_reg: ");
7326 	  print_inline_rtx (f, rld[r].out_reg, 24);
7327 	}
7328 
7329       if (rld[r].reg_rtx != 0)
7330 	{
7331 	  fprintf (f, "\n\treload_reg_rtx: ");
7332 	  print_inline_rtx (f, rld[r].reg_rtx, 24);
7333 	}
7334 
7335       prefix = "\n\t";
7336       if (rld[r].secondary_in_reload != -1)
7337 	{
7338 	  fprintf (f, "%ssecondary_in_reload = %d",
7339 		   prefix, rld[r].secondary_in_reload);
7340 	  prefix = ", ";
7341 	}
7342 
7343       if (rld[r].secondary_out_reload != -1)
7344 	fprintf (f, "%ssecondary_out_reload = %d\n",
7345 		 prefix, rld[r].secondary_out_reload);
7346 
7347       prefix = "\n\t";
7348       if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7349 	{
7350 	  fprintf (f, "%ssecondary_in_icode = %s", prefix,
7351 		   insn_data[rld[r].secondary_in_icode].name);
7352 	  prefix = ", ";
7353 	}
7354 
7355       if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7356 	fprintf (f, "%ssecondary_out_icode = %s", prefix,
7357 		 insn_data[rld[r].secondary_out_icode].name);
7358 
7359       fprintf (f, "\n");
7360     }
7361 }
7362 
7363 DEBUG_FUNCTION void
debug_reload(void)7364 debug_reload (void)
7365 {
7366   debug_reload_to_stream (stderr);
7367 }
7368