1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2    Copyright (C) 1987-2019 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains subroutines used only from the file reload1.c.
21    It knows how to scan one insn for operands and values
22    that need to be copied into registers to make valid code.
23    It also finds other operands and values which are valid
24    but for which equivalent values in registers exist and
25    ought to be used instead.
26 
27    Before processing the first insn of the function, call `init_reload'.
28    init_reload actually has to be called earlier anyway.
29 
30    To scan an insn, call `find_reloads'.  This does two things:
31    1. sets up tables describing which values must be reloaded
32    for this insn, and what kind of hard regs they must be reloaded into;
33    2. optionally record the locations where those values appear in
34    the data, so they can be replaced properly later.
35    This is done only if the second arg to `find_reloads' is nonzero.
36 
37    The third arg to `find_reloads' specifies the number of levels
38    of indirect addressing supported by the machine.  If it is zero,
39    indirect addressing is not valid.  If it is one, (MEM (REG n))
40    is valid even if (REG n) did not get a hard register; if it is two,
41    (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42    hard register, and similarly for higher values.
43 
44    Then you must choose the hard regs to reload those pseudo regs into,
45    and generate appropriate load insns before this insn and perhaps
46    also store insns after this insn.  Set up the array `reload_reg_rtx'
47    to contain the REG rtx's for the registers you used.  In some
48    cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49    for certain reloads.  Then that tells you which register to use,
50    so you do not need to allocate one.  But you still do need to add extra
51    instructions to copy the value into and out of that register.
52 
53    Finally you must call `subst_reloads' to substitute the reload reg rtx's
54    into the locations already recorded.
55 
56 NOTE SIDE EFFECTS:
57 
58    find_reloads can alter the operands of the instruction it is called on.
59 
60    1. Two operands of any sort may be interchanged, if they are in a
61    commutative instruction.
62    This happens only if find_reloads thinks the instruction will compile
63    better that way.
64 
65    2. Pseudo-registers that are equivalent to constants are replaced
66    with those constants if they are not in hard registers.
67 
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
71 
72 Using a reload register for several reloads in one insn:
73 
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
77 
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
81 
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload.  */
85 
86 #define REG_OK_STRICT
87 
88 /* We do not enable this with CHECKING_P, since it is awfully slow.  */
89 #undef DEBUG_RELOAD
90 
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "backend.h"
95 #include "target.h"
96 #include "rtl.h"
97 #include "tree.h"
98 #include "df.h"
99 #include "memmodel.h"
100 #include "tm_p.h"
101 #include "optabs.h"
102 #include "regs.h"
103 #include "ira.h"
104 #include "recog.h"
105 #include "rtl-error.h"
106 #include "reload.h"
107 #include "addresses.h"
108 #include "params.h"
109 
110 /* True if X is a constant that can be forced into the constant pool.
111    MODE is the mode of the operand, or VOIDmode if not known.  */
112 #define CONST_POOL_OK_P(MODE, X)		\
113   ((MODE) != VOIDmode				\
114    && CONSTANT_P (X)				\
115    && GET_CODE (X) != HIGH			\
116    && !targetm.cannot_force_const_mem (MODE, X))
117 
118 /* True if C is a non-empty register class that has too few registers
119    to be safely used as a reload target class.  */
120 
121 static inline bool
small_register_class_p(reg_class_t rclass)122 small_register_class_p (reg_class_t rclass)
123 {
124   return (reg_class_size [(int) rclass] == 1
125 	  || (reg_class_size [(int) rclass] >= 1
126 	      && targetm.class_likely_spilled_p (rclass)));
127 }
128 
129 
130 /* All reloads of the current insn are recorded here.  See reload.h for
131    comments.  */
132 int n_reloads;
133 struct reload rld[MAX_RELOADS];
134 
135 /* All the "earlyclobber" operands of the current insn
136    are recorded here.  */
137 int n_earlyclobbers;
138 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
139 
140 int reload_n_operands;
141 
142 /* Replacing reloads.
143 
144    If `replace_reloads' is nonzero, then as each reload is recorded
145    an entry is made for it in the table `replacements'.
146    Then later `subst_reloads' can look through that table and
147    perform all the replacements needed.  */
148 
149 /* Nonzero means record the places to replace.  */
150 static int replace_reloads;
151 
152 /* Each replacement is recorded with a structure like this.  */
153 struct replacement
154 {
155   rtx *where;			/* Location to store in */
156   int what;			/* which reload this is for */
157   machine_mode mode;	/* mode it must have */
158 };
159 
160 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
161 
162 /* Number of replacements currently recorded.  */
163 static int n_replacements;
164 
165 /* Used to track what is modified by an operand.  */
166 struct decomposition
167 {
168   int reg_flag;		/* Nonzero if referencing a register.  */
169   int safe;		/* Nonzero if this can't conflict with anything.  */
170   rtx base;		/* Base address for MEM.  */
171   poly_int64_pod start;	/* Starting offset or register number.  */
172   poly_int64_pod end;	/* Ending offset or register number.  */
173 };
174 
175 /* Save MEMs needed to copy from one class of registers to another.  One MEM
176    is used per mode, but normally only one or two modes are ever used.
177 
178    We keep two versions, before and after register elimination.  The one
179    after register elimination is record separately for each operand.  This
180    is done in case the address is not valid to be sure that we separately
181    reload each.  */
182 
183 static rtx secondary_memlocs[NUM_MACHINE_MODES];
184 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
185 static int secondary_memlocs_elim_used = 0;
186 
187 /* The instruction we are doing reloads for;
188    so we can test whether a register dies in it.  */
189 static rtx_insn *this_insn;
190 
191 /* Nonzero if this instruction is a user-specified asm with operands.  */
192 static int this_insn_is_asm;
193 
194 /* If hard_regs_live_known is nonzero,
195    we can tell which hard regs are currently live,
196    at least enough to succeed in choosing dummy reloads.  */
197 static int hard_regs_live_known;
198 
199 /* Indexed by hard reg number,
200    element is nonnegative if hard reg has been spilled.
201    This vector is passed to `find_reloads' as an argument
202    and is not changed here.  */
203 static short *static_reload_reg_p;
204 
205 /* Set to 1 in subst_reg_equivs if it changes anything.  */
206 static int subst_reg_equivs_changed;
207 
208 /* On return from push_reload, holds the reload-number for the OUT
209    operand, which can be different for that from the input operand.  */
210 static int output_reloadnum;
211 
212   /* Compare two RTX's.  */
213 #define MATCHES(x, y) \
214  (x == y || (x != 0 && (REG_P (x)				\
215 			? REG_P (y) && REGNO (x) == REGNO (y)	\
216 			: rtx_equal_p (x, y) && ! side_effects_p (x))))
217 
218   /* Indicates if two reloads purposes are for similar enough things that we
219      can merge their reloads.  */
220 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
221   ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER	\
222    || ((when1) == (when2) && (op1) == (op2))		\
223    || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
224    || ((when1) == RELOAD_FOR_OPERAND_ADDRESS		\
225        && (when2) == RELOAD_FOR_OPERAND_ADDRESS)	\
226    || ((when1) == RELOAD_FOR_OTHER_ADDRESS		\
227        && (when2) == RELOAD_FOR_OTHER_ADDRESS))
228 
229   /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged.  */
230 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
231   ((when1) != (when2)					\
232    || ! ((op1) == (op2)					\
233 	 || (when1) == RELOAD_FOR_INPUT			\
234 	 || (when1) == RELOAD_FOR_OPERAND_ADDRESS	\
235 	 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
236 
237   /* If we are going to reload an address, compute the reload type to
238      use.  */
239 #define ADDR_TYPE(type)					\
240   ((type) == RELOAD_FOR_INPUT_ADDRESS			\
241    ? RELOAD_FOR_INPADDR_ADDRESS				\
242    : ((type) == RELOAD_FOR_OUTPUT_ADDRESS		\
243       ? RELOAD_FOR_OUTADDR_ADDRESS			\
244       : (type)))
245 
246 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
247 				  machine_mode, enum reload_type,
248 				  enum insn_code *, secondary_reload_info *);
249 static enum reg_class find_valid_class (machine_mode, machine_mode,
250 					int, unsigned int);
251 static void push_replacement (rtx *, int, machine_mode);
252 static void dup_replacements (rtx *, rtx *);
253 static void combine_reloads (void);
254 static int find_reusable_reload (rtx *, rtx, enum reg_class,
255 				 enum reload_type, int, int);
256 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
257 			      machine_mode, reg_class_t, int, int);
258 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
259 static struct decomposition decompose (rtx);
260 static int immune_p (rtx, rtx, struct decomposition);
261 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
262 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
263 				rtx_insn *, int *);
264 static rtx make_memloc (rtx, int);
265 static int maybe_memory_address_addr_space_p (machine_mode, rtx,
266 					      addr_space_t, rtx *);
267 static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
268 				 int, enum reload_type, int, rtx_insn *);
269 static rtx subst_reg_equivs (rtx, rtx_insn *);
270 static rtx subst_indexed_address (rtx);
271 static void update_auto_inc_notes (rtx_insn *, int, int);
272 static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
273 				   enum rtx_code, enum rtx_code, rtx *,
274 				   int, enum reload_type,int, rtx_insn *);
275 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
276 				       machine_mode, int,
277 				       enum reload_type, int);
278 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
279 					int, rtx_insn *, int *);
280 static void copy_replacements_1 (rtx *, rtx *, int);
281 static poly_int64 find_inc_amount (rtx, rtx);
282 static int refers_to_mem_for_reload_p (rtx);
283 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
284 					 rtx, rtx *);
285 
286 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
287    list yet.  */
288 
289 static void
push_reg_equiv_alt_mem(int regno,rtx mem)290 push_reg_equiv_alt_mem (int regno, rtx mem)
291 {
292   rtx it;
293 
294   for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
295     if (rtx_equal_p (XEXP (it, 0), mem))
296       return;
297 
298   reg_equiv_alt_mem_list (regno)
299     = alloc_EXPR_LIST (REG_EQUIV, mem,
300 		       reg_equiv_alt_mem_list (regno));
301 }
302 
303 /* Determine if any secondary reloads are needed for loading (if IN_P is
304    nonzero) or storing (if IN_P is zero) X to or from a reload register of
305    register class RELOAD_CLASS in mode RELOAD_MODE.  If secondary reloads
306    are needed, push them.
307 
308    Return the reload number of the secondary reload we made, or -1 if
309    we didn't need one.  *PICODE is set to the insn_code to use if we do
310    need a secondary reload.  */
311 
312 static int
push_secondary_reload(int in_p,rtx x,int opnum,int optional,enum reg_class reload_class,machine_mode reload_mode,enum reload_type type,enum insn_code * picode,secondary_reload_info * prev_sri)313 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
314 		       enum reg_class reload_class,
315 		       machine_mode reload_mode, enum reload_type type,
316 		       enum insn_code *picode, secondary_reload_info *prev_sri)
317 {
318   enum reg_class rclass = NO_REGS;
319   enum reg_class scratch_class;
320   machine_mode mode = reload_mode;
321   enum insn_code icode = CODE_FOR_nothing;
322   enum insn_code t_icode = CODE_FOR_nothing;
323   enum reload_type secondary_type;
324   int s_reload, t_reload = -1;
325   const char *scratch_constraint;
326   secondary_reload_info sri;
327 
328   if (type == RELOAD_FOR_INPUT_ADDRESS
329       || type == RELOAD_FOR_OUTPUT_ADDRESS
330       || type == RELOAD_FOR_INPADDR_ADDRESS
331       || type == RELOAD_FOR_OUTADDR_ADDRESS)
332     secondary_type = type;
333   else
334     secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
335 
336   *picode = CODE_FOR_nothing;
337 
338   /* If X is a paradoxical SUBREG, use the inner value to determine both the
339      mode and object being reloaded.  */
340   if (paradoxical_subreg_p (x))
341     {
342       x = SUBREG_REG (x);
343       reload_mode = GET_MODE (x);
344     }
345 
346   /* If X is a pseudo-register that has an equivalent MEM (actually, if it
347      is still a pseudo-register by now, it *must* have an equivalent MEM
348      but we don't want to assume that), use that equivalent when seeing if
349      a secondary reload is needed since whether or not a reload is needed
350      might be sensitive to the form of the MEM.  */
351 
352   if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
353       && reg_equiv_mem (REGNO (x)))
354     x = reg_equiv_mem (REGNO (x));
355 
356   sri.icode = CODE_FOR_nothing;
357   sri.prev_sri = prev_sri;
358   rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
359 						      reload_mode, &sri);
360   icode = (enum insn_code) sri.icode;
361 
362   /* If we don't need any secondary registers, done.  */
363   if (rclass == NO_REGS && icode == CODE_FOR_nothing)
364     return -1;
365 
366   if (rclass != NO_REGS)
367     t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
368 				      reload_mode, type, &t_icode, &sri);
369 
370   /* If we will be using an insn, the secondary reload is for a
371      scratch register.  */
372 
373   if (icode != CODE_FOR_nothing)
374     {
375       /* If IN_P is nonzero, the reload register will be the output in
376 	 operand 0.  If IN_P is zero, the reload register will be the input
377 	 in operand 1.  Outputs should have an initial "=", which we must
378 	 skip.  */
379 
380       /* ??? It would be useful to be able to handle only two, or more than
381 	 three, operands, but for now we can only handle the case of having
382 	 exactly three: output, input and one temp/scratch.  */
383       gcc_assert (insn_data[(int) icode].n_operands == 3);
384 
385       /* ??? We currently have no way to represent a reload that needs
386 	 an icode to reload from an intermediate tertiary reload register.
387 	 We should probably have a new field in struct reload to tag a
388 	 chain of scratch operand reloads onto.   */
389       gcc_assert (rclass == NO_REGS);
390 
391       scratch_constraint = insn_data[(int) icode].operand[2].constraint;
392       gcc_assert (*scratch_constraint == '=');
393       scratch_constraint++;
394       if (*scratch_constraint == '&')
395 	scratch_constraint++;
396       scratch_class = (reg_class_for_constraint
397 		       (lookup_constraint (scratch_constraint)));
398 
399       rclass = scratch_class;
400       mode = insn_data[(int) icode].operand[2].mode;
401     }
402 
403   /* This case isn't valid, so fail.  Reload is allowed to use the same
404      register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
405      in the case of a secondary register, we actually need two different
406      registers for correct code.  We fail here to prevent the possibility of
407      silently generating incorrect code later.
408 
409      The convention is that secondary input reloads are valid only if the
410      secondary_class is different from class.  If you have such a case, you
411      cannot use secondary reloads, you must work around the problem some
412      other way.
413 
414      Allow this when a reload_in/out pattern is being used.  I.e. assume
415      that the generated code handles this case.  */
416 
417   gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
418 	      || t_icode != CODE_FOR_nothing);
419 
420   /* See if we can reuse an existing secondary reload.  */
421   for (s_reload = 0; s_reload < n_reloads; s_reload++)
422     if (rld[s_reload].secondary_p
423 	&& (reg_class_subset_p (rclass, rld[s_reload].rclass)
424 	    || reg_class_subset_p (rld[s_reload].rclass, rclass))
425 	&& ((in_p && rld[s_reload].inmode == mode)
426 	    || (! in_p && rld[s_reload].outmode == mode))
427 	&& ((in_p && rld[s_reload].secondary_in_reload == t_reload)
428 	    || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
429 	&& ((in_p && rld[s_reload].secondary_in_icode == t_icode)
430 	    || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
431 	&& (small_register_class_p (rclass)
432 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
433 	&& MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
434 			     opnum, rld[s_reload].opnum))
435       {
436 	if (in_p)
437 	  rld[s_reload].inmode = mode;
438 	if (! in_p)
439 	  rld[s_reload].outmode = mode;
440 
441 	if (reg_class_subset_p (rclass, rld[s_reload].rclass))
442 	  rld[s_reload].rclass = rclass;
443 
444 	rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
445 	rld[s_reload].optional &= optional;
446 	rld[s_reload].secondary_p = 1;
447 	if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
448 			    opnum, rld[s_reload].opnum))
449 	  rld[s_reload].when_needed = RELOAD_OTHER;
450 
451 	break;
452       }
453 
454   if (s_reload == n_reloads)
455     {
456       /* If we need a memory location to copy between the two reload regs,
457 	 set it up now.  Note that we do the input case before making
458 	 the reload and the output case after.  This is due to the
459 	 way reloads are output.  */
460 
461       if (in_p && icode == CODE_FOR_nothing
462 	  && targetm.secondary_memory_needed (mode, rclass, reload_class))
463 	{
464 	  get_secondary_mem (x, reload_mode, opnum, type);
465 
466 	  /* We may have just added new reloads.  Make sure we add
467 	     the new reload at the end.  */
468 	  s_reload = n_reloads;
469 	}
470 
471       /* We need to make a new secondary reload for this register class.  */
472       rld[s_reload].in = rld[s_reload].out = 0;
473       rld[s_reload].rclass = rclass;
474 
475       rld[s_reload].inmode = in_p ? mode : VOIDmode;
476       rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
477       rld[s_reload].reg_rtx = 0;
478       rld[s_reload].optional = optional;
479       rld[s_reload].inc = 0;
480       /* Maybe we could combine these, but it seems too tricky.  */
481       rld[s_reload].nocombine = 1;
482       rld[s_reload].in_reg = 0;
483       rld[s_reload].out_reg = 0;
484       rld[s_reload].opnum = opnum;
485       rld[s_reload].when_needed = secondary_type;
486       rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
487       rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
488       rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
489       rld[s_reload].secondary_out_icode
490 	= ! in_p ? t_icode : CODE_FOR_nothing;
491       rld[s_reload].secondary_p = 1;
492 
493       n_reloads++;
494 
495       if (! in_p && icode == CODE_FOR_nothing
496 	  && targetm.secondary_memory_needed (mode, reload_class, rclass))
497 	get_secondary_mem (x, mode, opnum, type);
498     }
499 
500   *picode = icode;
501   return s_reload;
502 }
503 
504 /* If a secondary reload is needed, return its class.  If both an intermediate
505    register and a scratch register is needed, we return the class of the
506    intermediate register.  */
507 reg_class_t
secondary_reload_class(bool in_p,reg_class_t rclass,machine_mode mode,rtx x)508 secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
509 			rtx x)
510 {
511   enum insn_code icode;
512   secondary_reload_info sri;
513 
514   sri.icode = CODE_FOR_nothing;
515   sri.prev_sri = NULL;
516   rclass
517     = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
518   icode = (enum insn_code) sri.icode;
519 
520   /* If there are no secondary reloads at all, we return NO_REGS.
521      If an intermediate register is needed, we return its class.  */
522   if (icode == CODE_FOR_nothing || rclass != NO_REGS)
523     return rclass;
524 
525   /* No intermediate register is needed, but we have a special reload
526      pattern, which we assume for now needs a scratch register.  */
527   return scratch_reload_class (icode);
528 }
529 
530 /* ICODE is the insn_code of a reload pattern.  Check that it has exactly
531    three operands, verify that operand 2 is an output operand, and return
532    its register class.
533    ??? We'd like to be able to handle any pattern with at least 2 operands,
534    for zero or more scratch registers, but that needs more infrastructure.  */
535 enum reg_class
scratch_reload_class(enum insn_code icode)536 scratch_reload_class (enum insn_code icode)
537 {
538   const char *scratch_constraint;
539   enum reg_class rclass;
540 
541   gcc_assert (insn_data[(int) icode].n_operands == 3);
542   scratch_constraint = insn_data[(int) icode].operand[2].constraint;
543   gcc_assert (*scratch_constraint == '=');
544   scratch_constraint++;
545   if (*scratch_constraint == '&')
546     scratch_constraint++;
547   rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
548   gcc_assert (rclass != NO_REGS);
549   return rclass;
550 }
551 
552 /* Return a memory location that will be used to copy X in mode MODE.
553    If we haven't already made a location for this mode in this insn,
554    call find_reloads_address on the location being returned.  */
555 
556 rtx
get_secondary_mem(rtx x ATTRIBUTE_UNUSED,machine_mode mode,int opnum,enum reload_type type)557 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
558 		   int opnum, enum reload_type type)
559 {
560   rtx loc;
561   int mem_valid;
562 
563   /* By default, if MODE is narrower than a word, widen it to a word.
564      This is required because most machines that require these memory
565      locations do not support short load and stores from all registers
566      (e.g., FP registers).  */
567 
568   mode = targetm.secondary_memory_needed_mode (mode);
569 
570   /* If we already have made a MEM for this operand in MODE, return it.  */
571   if (secondary_memlocs_elim[(int) mode][opnum] != 0)
572     return secondary_memlocs_elim[(int) mode][opnum];
573 
574   /* If this is the first time we've tried to get a MEM for this mode,
575      allocate a new one.  `something_changed' in reload will get set
576      by noticing that the frame size has changed.  */
577 
578   if (secondary_memlocs[(int) mode] == 0)
579     {
580 #ifdef SECONDARY_MEMORY_NEEDED_RTX
581       secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
582 #else
583       secondary_memlocs[(int) mode]
584 	= assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
585 #endif
586     }
587 
588   /* Get a version of the address doing any eliminations needed.  If that
589      didn't give us a new MEM, make a new one if it isn't valid.  */
590 
591   loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
592   mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
593 						  MEM_ADDR_SPACE (loc));
594 
595   if (! mem_valid && loc == secondary_memlocs[(int) mode])
596     loc = copy_rtx (loc);
597 
598   /* The only time the call below will do anything is if the stack
599      offset is too large.  In that case IND_LEVELS doesn't matter, so we
600      can just pass a zero.  Adjust the type to be the address of the
601      corresponding object.  If the address was valid, save the eliminated
602      address.  If it wasn't valid, we need to make a reload each time, so
603      don't save it.  */
604 
605   if (! mem_valid)
606     {
607       type =  (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
608 	       : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
609 	       : RELOAD_OTHER);
610 
611       find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
612 			    opnum, type, 0, 0);
613     }
614 
615   secondary_memlocs_elim[(int) mode][opnum] = loc;
616   if (secondary_memlocs_elim_used <= (int)mode)
617     secondary_memlocs_elim_used = (int)mode + 1;
618   return loc;
619 }
620 
621 /* Clear any secondary memory locations we've made.  */
622 
623 void
clear_secondary_mem(void)624 clear_secondary_mem (void)
625 {
626   memset (secondary_memlocs, 0, sizeof secondary_memlocs);
627 }
628 
629 
630 /* Find the largest class which has at least one register valid in
631    mode INNER, and which for every such register, that register number
632    plus N is also valid in OUTER (if in range) and is cheap to move
633    into REGNO.  Such a class must exist.  */
634 
635 static enum reg_class
find_valid_class(machine_mode outer ATTRIBUTE_UNUSED,machine_mode inner ATTRIBUTE_UNUSED,int n,unsigned int dest_regno ATTRIBUTE_UNUSED)636 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
637 		  machine_mode inner ATTRIBUTE_UNUSED, int n,
638 		  unsigned int dest_regno ATTRIBUTE_UNUSED)
639 {
640   int best_cost = -1;
641   int rclass;
642   int regno;
643   enum reg_class best_class = NO_REGS;
644   enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
645   unsigned int best_size = 0;
646   int cost;
647 
648   for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
649     {
650       int bad = 0;
651       int good = 0;
652       for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
653 	if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
654 	  {
655 	    if (targetm.hard_regno_mode_ok (regno, inner))
656 	      {
657 		good = 1;
658 		if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
659 		    && !targetm.hard_regno_mode_ok (regno + n, outer))
660 		  bad = 1;
661 	      }
662 	  }
663 
664       if (bad || !good)
665 	continue;
666       cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
667 
668       if ((reg_class_size[rclass] > best_size
669 	   && (best_cost < 0 || best_cost >= cost))
670 	  || best_cost > cost)
671 	{
672 	  best_class = (enum reg_class) rclass;
673 	  best_size = reg_class_size[rclass];
674 	  best_cost = register_move_cost (outer, (enum reg_class) rclass,
675 					  dest_class);
676 	}
677     }
678 
679   gcc_assert (best_size != 0);
680 
681   return best_class;
682 }
683 
684 /* We are trying to reload a subreg of something that is not a register.
685    Find the largest class which contains only registers valid in
686    mode MODE.  OUTER is the mode of the subreg, DEST_CLASS the class in
687    which we would eventually like to obtain the object.  */
688 
689 static enum reg_class
find_valid_class_1(machine_mode outer ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,enum reg_class dest_class ATTRIBUTE_UNUSED)690 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
691 		    machine_mode mode ATTRIBUTE_UNUSED,
692 		    enum reg_class dest_class ATTRIBUTE_UNUSED)
693 {
694   int best_cost = -1;
695   int rclass;
696   int regno;
697   enum reg_class best_class = NO_REGS;
698   unsigned int best_size = 0;
699   int cost;
700 
701   for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
702     {
703       unsigned int computed_rclass_size = 0;
704 
705       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
706         {
707           if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
708 	      && targetm.hard_regno_mode_ok (regno, mode))
709             computed_rclass_size++;
710         }
711 
712       cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
713 
714       if ((computed_rclass_size > best_size
715 	   && (best_cost < 0 || best_cost >= cost))
716 	  || best_cost > cost)
717 	{
718 	  best_class = (enum reg_class) rclass;
719 	  best_size = computed_rclass_size;
720 	  best_cost = register_move_cost (outer, (enum reg_class) rclass,
721 					  dest_class);
722 	}
723     }
724 
725   gcc_assert (best_size != 0);
726 
727 #ifdef LIMIT_RELOAD_CLASS
728   best_class = LIMIT_RELOAD_CLASS (mode, best_class);
729 #endif
730   return best_class;
731 }
732 
733 /* Return the number of a previously made reload that can be combined with
734    a new one, or n_reloads if none of the existing reloads can be used.
735    OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
736    push_reload, they determine the kind of the new reload that we try to
737    combine.  P_IN points to the corresponding value of IN, which can be
738    modified by this function.
739    DONT_SHARE is nonzero if we can't share any input-only reload for IN.  */
740 
741 static int
find_reusable_reload(rtx * p_in,rtx out,enum reg_class rclass,enum reload_type type,int opnum,int dont_share)742 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
743 		      enum reload_type type, int opnum, int dont_share)
744 {
745   rtx in = *p_in;
746   int i;
747   /* We can't merge two reloads if the output of either one is
748      earlyclobbered.  */
749 
750   if (earlyclobber_operand_p (out))
751     return n_reloads;
752 
753   /* We can use an existing reload if the class is right
754      and at least one of IN and OUT is a match
755      and the other is at worst neutral.
756      (A zero compared against anything is neutral.)
757 
758      For targets with small register classes, don't use existing reloads
759      unless they are for the same thing since that can cause us to need
760      more reload registers than we otherwise would.  */
761 
762   for (i = 0; i < n_reloads; i++)
763     if ((reg_class_subset_p (rclass, rld[i].rclass)
764 	 || reg_class_subset_p (rld[i].rclass, rclass))
765 	/* If the existing reload has a register, it must fit our class.  */
766 	&& (rld[i].reg_rtx == 0
767 	    || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
768 				  true_regnum (rld[i].reg_rtx)))
769 	&& ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
770 	     && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
771 	    || (out != 0 && MATCHES (rld[i].out, out)
772 		&& (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
773 	&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
774 	&& (small_register_class_p (rclass)
775 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
776 	&& MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
777       return i;
778 
779   /* Reloading a plain reg for input can match a reload to postincrement
780      that reg, since the postincrement's value is the right value.
781      Likewise, it can match a preincrement reload, since we regard
782      the preincrementation as happening before any ref in this insn
783      to that register.  */
784   for (i = 0; i < n_reloads; i++)
785     if ((reg_class_subset_p (rclass, rld[i].rclass)
786 	 || reg_class_subset_p (rld[i].rclass, rclass))
787 	/* If the existing reload has a register, it must fit our
788 	   class.  */
789 	&& (rld[i].reg_rtx == 0
790 	    || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
791 				  true_regnum (rld[i].reg_rtx)))
792 	&& out == 0 && rld[i].out == 0 && rld[i].in != 0
793 	&& ((REG_P (in)
794 	     && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
795 	     && MATCHES (XEXP (rld[i].in, 0), in))
796 	    || (REG_P (rld[i].in)
797 		&& GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
798 		&& MATCHES (XEXP (in, 0), rld[i].in)))
799 	&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
800 	&& (small_register_class_p (rclass)
801 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
802 	&& MERGABLE_RELOADS (type, rld[i].when_needed,
803 			     opnum, rld[i].opnum))
804       {
805 	/* Make sure reload_in ultimately has the increment,
806 	   not the plain register.  */
807 	if (REG_P (in))
808 	  *p_in = rld[i].in;
809 	return i;
810       }
811   return n_reloads;
812 }
813 
814 /* Return true if:
815 
816    (a) (subreg:OUTER_MODE REG ...) represents a word or subword subreg
817        of a multiword value; and
818 
819    (b) the number of *words* in REG does not match the number of *registers*
820        in REG.  */
821 
822 static bool
complex_word_subreg_p(machine_mode outer_mode,rtx reg)823 complex_word_subreg_p (machine_mode outer_mode, rtx reg)
824 {
825   machine_mode inner_mode = GET_MODE (reg);
826   poly_uint64 reg_words = REG_NREGS (reg) * UNITS_PER_WORD;
827   return (known_le (GET_MODE_SIZE (outer_mode), UNITS_PER_WORD)
828 	  && maybe_gt (GET_MODE_SIZE (inner_mode), UNITS_PER_WORD)
829 	  && !known_equal_after_align_up (GET_MODE_SIZE (inner_mode),
830 					  reg_words, UNITS_PER_WORD));
831 }
832 
833 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
834    expression.  MODE is the mode that X will be used in.  OUTPUT is true if
835    the function is invoked for the output part of an enclosing reload.  */
836 
837 static bool
reload_inner_reg_of_subreg(rtx x,machine_mode mode,bool output)838 reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
839 {
840   rtx inner;
841 
842   /* Only SUBREGs are problematical.  */
843   if (GET_CODE (x) != SUBREG)
844     return false;
845 
846   inner = SUBREG_REG (x);
847 
848   /* If INNER is a constant or PLUS, then INNER will need reloading.  */
849   if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
850     return true;
851 
852   /* If INNER is not a hard register, then INNER will not need reloading.  */
853   if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
854     return false;
855 
856   /* If INNER is not ok for MODE, then INNER will need reloading.  */
857   if (!targetm.hard_regno_mode_ok (subreg_regno (x), mode))
858     return true;
859 
860   /* If this is for an output, and the outer part is a word or smaller,
861      INNER is larger than a word and the number of registers in INNER is
862      not the same as the number of words in INNER, then INNER will need
863      reloading (with an in-out reload).  */
864   return output && complex_word_subreg_p (mode, inner);
865 }
866 
867 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
868    requiring an extra reload register.  The caller has already found that
869    IN contains some reference to REGNO, so check that we can produce the
870    new value in a single step.  E.g. if we have
871    (set (reg r13) (plus (reg r13) (const int 1))), and there is an
872    instruction that adds one to a register, this should succeed.
873    However, if we have something like
874    (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
875    needs to be loaded into a register first, we need a separate reload
876    register.
877    Such PLUS reloads are generated by find_reload_address_part.
878    The out-of-range PLUS expressions are usually introduced in the instruction
879    patterns by register elimination and substituting pseudos without a home
880    by their function-invariant equivalences.  */
881 static int
can_reload_into(rtx in,int regno,machine_mode mode)882 can_reload_into (rtx in, int regno, machine_mode mode)
883 {
884   rtx dst;
885   rtx_insn *test_insn;
886   int r = 0;
887   struct recog_data_d save_recog_data;
888 
889   /* For matching constraints, we often get notional input reloads where
890      we want to use the original register as the reload register.  I.e.
891      technically this is a non-optional input-output reload, but IN is
892      already a valid register, and has been chosen as the reload register.
893      Speed this up, since it trivially works.  */
894   if (REG_P (in))
895     return 1;
896 
897   /* To test MEMs properly, we'd have to take into account all the reloads
898      that are already scheduled, which can become quite complicated.
899      And since we've already handled address reloads for this MEM, it
900      should always succeed anyway.  */
901   if (MEM_P (in))
902     return 1;
903 
904   /* If we can make a simple SET insn that does the job, everything should
905      be fine.  */
906   dst =  gen_rtx_REG (mode, regno);
907   test_insn = make_insn_raw (gen_rtx_SET (dst, in));
908   save_recog_data = recog_data;
909   if (recog_memoized (test_insn) >= 0)
910     {
911       extract_insn (test_insn);
912       r = constrain_operands (1, get_enabled_alternatives (test_insn));
913     }
914   recog_data = save_recog_data;
915   return r;
916 }
917 
918 /* Record one reload that needs to be performed.
919    IN is an rtx saying where the data are to be found before this instruction.
920    OUT says where they must be stored after the instruction.
921    (IN is zero for data not read, and OUT is zero for data not written.)
922    INLOC and OUTLOC point to the places in the instructions where
923    IN and OUT were found.
924    If IN and OUT are both nonzero, it means the same register must be used
925    to reload both IN and OUT.
926 
927    RCLASS is a register class required for the reloaded data.
928    INMODE is the machine mode that the instruction requires
929    for the reg that replaces IN and OUTMODE is likewise for OUT.
930 
931    If IN is zero, then OUT's location and mode should be passed as
932    INLOC and INMODE.
933 
934    STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
935 
936    OPTIONAL nonzero means this reload does not need to be performed:
937    it can be discarded if that is more convenient.
938 
939    OPNUM and TYPE say what the purpose of this reload is.
940 
941    The return value is the reload-number for this reload.
942 
943    If both IN and OUT are nonzero, in some rare cases we might
944    want to make two separate reloads.  (Actually we never do this now.)
945    Therefore, the reload-number for OUT is stored in
946    output_reloadnum when we return; the return value applies to IN.
947    Usually (presently always), when IN and OUT are nonzero,
948    the two reload-numbers are equal, but the caller should be careful to
949    distinguish them.  */
950 
951 int
push_reload(rtx in,rtx out,rtx * inloc,rtx * outloc,enum reg_class rclass,machine_mode inmode,machine_mode outmode,int strict_low,int optional,int opnum,enum reload_type type)952 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
953 	     enum reg_class rclass, machine_mode inmode,
954 	     machine_mode outmode, int strict_low, int optional,
955 	     int opnum, enum reload_type type)
956 {
957   int i;
958   int dont_share = 0;
959   int dont_remove_subreg = 0;
960 #ifdef LIMIT_RELOAD_CLASS
961   rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
962 #endif
963   int secondary_in_reload = -1, secondary_out_reload = -1;
964   enum insn_code secondary_in_icode = CODE_FOR_nothing;
965   enum insn_code secondary_out_icode = CODE_FOR_nothing;
966   enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
967   subreg_in_class = NO_REGS;
968 
969   /* INMODE and/or OUTMODE could be VOIDmode if no mode
970      has been specified for the operand.  In that case,
971      use the operand's mode as the mode to reload.  */
972   if (inmode == VOIDmode && in != 0)
973     inmode = GET_MODE (in);
974   if (outmode == VOIDmode && out != 0)
975     outmode = GET_MODE (out);
976 
977   /* If find_reloads and friends until now missed to replace a pseudo
978      with a constant of reg_equiv_constant something went wrong
979      beforehand.
980      Note that it can't simply be done here if we missed it earlier
981      since the constant might need to be pushed into the literal pool
982      and the resulting memref would probably need further
983      reloading.  */
984   if (in != 0 && REG_P (in))
985     {
986       int regno = REGNO (in);
987 
988       gcc_assert (regno < FIRST_PSEUDO_REGISTER
989 		  || reg_renumber[regno] >= 0
990 		  || reg_equiv_constant (regno) == NULL_RTX);
991     }
992 
993   /* reg_equiv_constant only contains constants which are obviously
994      not appropriate as destination.  So if we would need to replace
995      the destination pseudo with a constant we are in real
996      trouble.  */
997   if (out != 0 && REG_P (out))
998     {
999       int regno = REGNO (out);
1000 
1001       gcc_assert (regno < FIRST_PSEUDO_REGISTER
1002 		  || reg_renumber[regno] >= 0
1003 		  || reg_equiv_constant (regno) == NULL_RTX);
1004     }
1005 
1006   /* If we have a read-write operand with an address side-effect,
1007      change either IN or OUT so the side-effect happens only once.  */
1008   if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1009     switch (GET_CODE (XEXP (in, 0)))
1010       {
1011       case POST_INC: case POST_DEC:   case POST_MODIFY:
1012 	in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1013 	break;
1014 
1015       case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1016 	out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1017 	break;
1018 
1019       default:
1020 	break;
1021       }
1022 
1023   /* If we are reloading a (SUBREG constant ...), really reload just the
1024      inside expression in its own mode.  Similarly for (SUBREG (PLUS ...)).
1025      If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1026      a pseudo and hence will become a MEM) with M1 wider than M2 and the
1027      register is a pseudo, also reload the inside expression.
1028      For machines that extend byte loads, do this for any SUBREG of a pseudo
1029      where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1030      M2 is an integral mode that gets extended when loaded.
1031      Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1032      where either M1 is not valid for R or M2 is wider than a word but we
1033      only need one register to store an M2-sized quantity in R.
1034      (However, if OUT is nonzero, we need to reload the reg *and*
1035      the subreg, so do nothing here, and let following statement handle it.)
1036 
1037      Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1038      we can't handle it here because CONST_INT does not indicate a mode.
1039 
1040      Similarly, we must reload the inside expression if we have a
1041      STRICT_LOW_PART (presumably, in == out in this case).
1042 
1043      Also reload the inner expression if it does not require a secondary
1044      reload but the SUBREG does.
1045 
1046      Finally, reload the inner expression if it is a register that is in
1047      the class whose registers cannot be referenced in a different size
1048      and M1 is not the same size as M2.  If subreg_lowpart_p is false, we
1049      cannot reload just the inside since we might end up with the wrong
1050      register class.  But if it is inside a STRICT_LOW_PART, we have
1051      no choice, so we hope we do get the right register class there.  */
1052 
1053   scalar_int_mode inner_mode;
1054   if (in != 0 && GET_CODE (in) == SUBREG
1055       && (subreg_lowpart_p (in) || strict_low)
1056       && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (in)),
1057 					inmode, rclass)
1058       && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (in))]
1059       && (CONSTANT_P (SUBREG_REG (in))
1060 	  || GET_CODE (SUBREG_REG (in)) == PLUS
1061 	  || strict_low
1062 	  || (((REG_P (SUBREG_REG (in))
1063 		&& REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1064 	       || MEM_P (SUBREG_REG (in)))
1065 	      && (paradoxical_subreg_p (inmode, GET_MODE (SUBREG_REG (in)))
1066 		  || (known_le (GET_MODE_SIZE (inmode), UNITS_PER_WORD)
1067 		      && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (in)),
1068 						 &inner_mode)
1069 		      && GET_MODE_SIZE (inner_mode) <= UNITS_PER_WORD
1070 		      && paradoxical_subreg_p (inmode, inner_mode)
1071 		      && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)
1072 		  || (WORD_REGISTER_OPERATIONS
1073 		      && partial_subreg_p (inmode, GET_MODE (SUBREG_REG (in)))
1074 		      && (known_equal_after_align_down
1075 			  (GET_MODE_SIZE (inmode) - 1,
1076 			   GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1,
1077 			   UNITS_PER_WORD)))))
1078 	  || (REG_P (SUBREG_REG (in))
1079 	      && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1080 	      /* The case where out is nonzero
1081 		 is handled differently in the following statement.  */
1082 	      && (out == 0 || subreg_lowpart_p (in))
1083 	      && (complex_word_subreg_p (inmode, SUBREG_REG (in))
1084 		  || !targetm.hard_regno_mode_ok (subreg_regno (in), inmode)))
1085 	  || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1086 	      && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1087 					  SUBREG_REG (in))
1088 		  == NO_REGS))
1089 	  || (REG_P (SUBREG_REG (in))
1090 	      && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1091 	      && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (in)),
1092 					 GET_MODE (SUBREG_REG (in)), inmode))))
1093     {
1094 #ifdef LIMIT_RELOAD_CLASS
1095       in_subreg_loc = inloc;
1096 #endif
1097       inloc = &SUBREG_REG (in);
1098       in = *inloc;
1099 
1100       if (!WORD_REGISTER_OPERATIONS
1101 	  && LOAD_EXTEND_OP (GET_MODE (in)) == UNKNOWN
1102 	  && MEM_P (in))
1103 	/* This is supposed to happen only for paradoxical subregs made by
1104 	   combine.c.  (SUBREG (MEM)) isn't supposed to occur other ways.  */
1105 	gcc_assert (known_le (GET_MODE_SIZE (GET_MODE (in)),
1106 			      GET_MODE_SIZE (inmode)));
1107 
1108       inmode = GET_MODE (in);
1109     }
1110 
1111   /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1112      where M1 is not valid for R if it was not handled by the code above.
1113 
1114      Similar issue for (SUBREG constant ...) if it was not handled by the
1115      code above.  This can happen if SUBREG_BYTE != 0.
1116 
1117      However, we must reload the inner reg *as well as* the subreg in
1118      that case.  */
1119 
1120   if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1121     {
1122       if (REG_P (SUBREG_REG (in)))
1123 	subreg_in_class
1124 	  = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1125 			      subreg_regno_offset (REGNO (SUBREG_REG (in)),
1126 						   GET_MODE (SUBREG_REG (in)),
1127 						   SUBREG_BYTE (in),
1128 						   GET_MODE (in)),
1129 			      REGNO (SUBREG_REG (in)));
1130       else if (CONSTANT_P (SUBREG_REG (in))
1131                || GET_CODE (SUBREG_REG (in)) == PLUS)
1132 	subreg_in_class = find_valid_class_1 (inmode,
1133 					      GET_MODE (SUBREG_REG (in)),
1134 					      rclass);
1135 
1136       /* This relies on the fact that emit_reload_insns outputs the
1137 	 instructions for input reloads of type RELOAD_OTHER in the same
1138 	 order as the reloads.  Thus if the outer reload is also of type
1139 	 RELOAD_OTHER, we are guaranteed that this inner reload will be
1140 	 output before the outer reload.  */
1141       push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1142 		   subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1143       dont_remove_subreg = 1;
1144     }
1145 
1146   /* Similarly for paradoxical and problematical SUBREGs on the output.
1147      Note that there is no reason we need worry about the previous value
1148      of SUBREG_REG (out); even if wider than out, storing in a subreg is
1149      entitled to clobber it all (except in the case of a word mode subreg
1150      or of a STRICT_LOW_PART, in that latter case the constraint should
1151      label it input-output.)  */
1152   if (out != 0 && GET_CODE (out) == SUBREG
1153       && (subreg_lowpart_p (out) || strict_low)
1154       && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (out)),
1155 					outmode, rclass)
1156       && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (out))]
1157       && (CONSTANT_P (SUBREG_REG (out))
1158 	  || strict_low
1159 	  || (((REG_P (SUBREG_REG (out))
1160 		&& REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1161 	       || MEM_P (SUBREG_REG (out)))
1162 	      && (paradoxical_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1163 		  || (WORD_REGISTER_OPERATIONS
1164 		      && partial_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1165 		      && (known_equal_after_align_down
1166 			  (GET_MODE_SIZE (outmode) - 1,
1167 			   GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1,
1168 			   UNITS_PER_WORD)))))
1169 	  || (REG_P (SUBREG_REG (out))
1170 	      && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1171 	      /* The case of a word mode subreg
1172 		 is handled differently in the following statement.  */
1173 	      && ! (known_le (GET_MODE_SIZE (outmode), UNITS_PER_WORD)
1174 		    && maybe_gt (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))),
1175 				 UNITS_PER_WORD))
1176 	      && !targetm.hard_regno_mode_ok (subreg_regno (out), outmode))
1177 	  || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1178 	      && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1179 					  SUBREG_REG (out))
1180 		  == NO_REGS))
1181 	  || (REG_P (SUBREG_REG (out))
1182 	      && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1183 	      && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1184 					 GET_MODE (SUBREG_REG (out)),
1185 					 outmode))))
1186     {
1187 #ifdef LIMIT_RELOAD_CLASS
1188       out_subreg_loc = outloc;
1189 #endif
1190       outloc = &SUBREG_REG (out);
1191       out = *outloc;
1192       gcc_assert (WORD_REGISTER_OPERATIONS || !MEM_P (out)
1193 		  || known_le (GET_MODE_SIZE (GET_MODE (out)),
1194 			       GET_MODE_SIZE (outmode)));
1195       outmode = GET_MODE (out);
1196     }
1197 
1198   /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1199      where either M1 is not valid for R or M2 is wider than a word but we
1200      only need one register to store an M2-sized quantity in R.
1201 
1202      However, we must reload the inner reg *as well as* the subreg in
1203      that case and the inner reg is an in-out reload.  */
1204 
1205   if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1206     {
1207       enum reg_class in_out_class
1208 	= find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1209 			    subreg_regno_offset (REGNO (SUBREG_REG (out)),
1210 						 GET_MODE (SUBREG_REG (out)),
1211 						 SUBREG_BYTE (out),
1212 						 GET_MODE (out)),
1213 			    REGNO (SUBREG_REG (out)));
1214 
1215       /* This relies on the fact that emit_reload_insns outputs the
1216 	 instructions for output reloads of type RELOAD_OTHER in reverse
1217 	 order of the reloads.  Thus if the outer reload is also of type
1218 	 RELOAD_OTHER, we are guaranteed that this inner reload will be
1219 	 output after the outer reload.  */
1220       push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1221 		   &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1222 		   0, 0, opnum, RELOAD_OTHER);
1223       dont_remove_subreg = 1;
1224     }
1225 
1226   /* If IN appears in OUT, we can't share any input-only reload for IN.  */
1227   if (in != 0 && out != 0 && MEM_P (out)
1228       && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1229       && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1230     dont_share = 1;
1231 
1232   /* If IN is a SUBREG of a hard register, make a new REG.  This
1233      simplifies some of the cases below.  */
1234 
1235   if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1236       && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1237       && ! dont_remove_subreg)
1238     in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1239 
1240   /* Similarly for OUT.  */
1241   if (out != 0 && GET_CODE (out) == SUBREG
1242       && REG_P (SUBREG_REG (out))
1243       && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1244       && ! dont_remove_subreg)
1245     out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1246 
1247   /* Narrow down the class of register wanted if that is
1248      desirable on this machine for efficiency.  */
1249   {
1250     reg_class_t preferred_class = rclass;
1251 
1252     if (in != 0)
1253       preferred_class = targetm.preferred_reload_class (in, rclass);
1254 
1255     /* Output reloads may need analogous treatment, different in detail.  */
1256     if (out != 0)
1257       preferred_class
1258 	= targetm.preferred_output_reload_class (out, preferred_class);
1259 
1260     /* Discard what the target said if we cannot do it.  */
1261     if (preferred_class != NO_REGS
1262 	|| (optional && type == RELOAD_FOR_OUTPUT))
1263       rclass = (enum reg_class) preferred_class;
1264   }
1265 
1266   /* Make sure we use a class that can handle the actual pseudo
1267      inside any subreg.  For example, on the 386, QImode regs
1268      can appear within SImode subregs.  Although GENERAL_REGS
1269      can handle SImode, QImode needs a smaller class.  */
1270 #ifdef LIMIT_RELOAD_CLASS
1271   if (in_subreg_loc)
1272     rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1273   else if (in != 0 && GET_CODE (in) == SUBREG)
1274     rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1275 
1276   if (out_subreg_loc)
1277     rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1278   if (out != 0 && GET_CODE (out) == SUBREG)
1279     rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1280 #endif
1281 
1282   /* Verify that this class is at least possible for the mode that
1283      is specified.  */
1284   if (this_insn_is_asm)
1285     {
1286       machine_mode mode;
1287       if (paradoxical_subreg_p (inmode, outmode))
1288 	mode = inmode;
1289       else
1290 	mode = outmode;
1291       if (mode == VOIDmode)
1292 	{
1293 	  error_for_asm (this_insn, "cannot reload integer constant "
1294 			 "operand in %<asm%>");
1295 	  mode = word_mode;
1296 	  if (in != 0)
1297 	    inmode = word_mode;
1298 	  if (out != 0)
1299 	    outmode = word_mode;
1300 	}
1301       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1302 	if (targetm.hard_regno_mode_ok (i, mode)
1303 	    && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1304 	  break;
1305       if (i == FIRST_PSEUDO_REGISTER)
1306 	{
1307 	  error_for_asm (this_insn, "impossible register constraint "
1308 			 "in %<asm%>");
1309 	  /* Avoid further trouble with this insn.  */
1310 	  PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1311 	  /* We used to continue here setting class to ALL_REGS, but it triggers
1312 	     sanity check on i386 for:
1313 	     void foo(long double d)
1314 	     {
1315 	       asm("" :: "a" (d));
1316 	     }
1317 	     Returning zero here ought to be safe as we take care in
1318 	     find_reloads to not process the reloads when instruction was
1319 	     replaced by USE.  */
1320 
1321 	  return 0;
1322 	}
1323     }
1324 
1325   /* Optional output reloads are always OK even if we have no register class,
1326      since the function of these reloads is only to have spill_reg_store etc.
1327      set, so that the storing insn can be deleted later.  */
1328   gcc_assert (rclass != NO_REGS
1329 	      || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1330 
1331   i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1332 
1333   if (i == n_reloads)
1334     {
1335       /* See if we need a secondary reload register to move between CLASS
1336 	 and IN or CLASS and OUT.  Get the icode and push any required reloads
1337 	 needed for each of them if so.  */
1338 
1339       if (in != 0)
1340 	secondary_in_reload
1341 	  = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1342 				   &secondary_in_icode, NULL);
1343       if (out != 0 && GET_CODE (out) != SCRATCH)
1344 	secondary_out_reload
1345 	  = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1346 				   type, &secondary_out_icode, NULL);
1347 
1348       /* We found no existing reload suitable for re-use.
1349 	 So add an additional reload.  */
1350 
1351       if (subreg_in_class == NO_REGS
1352 	  && in != 0
1353 	  && (REG_P (in)
1354 	      || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1355 	  && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1356 	subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1357       /* If a memory location is needed for the copy, make one.  */
1358       if (subreg_in_class != NO_REGS
1359 	  && targetm.secondary_memory_needed (inmode, subreg_in_class, rclass))
1360 	get_secondary_mem (in, inmode, opnum, type);
1361 
1362       i = n_reloads;
1363       rld[i].in = in;
1364       rld[i].out = out;
1365       rld[i].rclass = rclass;
1366       rld[i].inmode = inmode;
1367       rld[i].outmode = outmode;
1368       rld[i].reg_rtx = 0;
1369       rld[i].optional = optional;
1370       rld[i].inc = 0;
1371       rld[i].nocombine = 0;
1372       rld[i].in_reg = inloc ? *inloc : 0;
1373       rld[i].out_reg = outloc ? *outloc : 0;
1374       rld[i].opnum = opnum;
1375       rld[i].when_needed = type;
1376       rld[i].secondary_in_reload = secondary_in_reload;
1377       rld[i].secondary_out_reload = secondary_out_reload;
1378       rld[i].secondary_in_icode = secondary_in_icode;
1379       rld[i].secondary_out_icode = secondary_out_icode;
1380       rld[i].secondary_p = 0;
1381 
1382       n_reloads++;
1383 
1384       if (out != 0
1385           && (REG_P (out)
1386 	      || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1387 	  && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1388 	  && (targetm.secondary_memory_needed
1389 	      (outmode, rclass, REGNO_REG_CLASS (reg_or_subregno (out)))))
1390 	get_secondary_mem (out, outmode, opnum, type);
1391     }
1392   else
1393     {
1394       /* We are reusing an existing reload,
1395 	 but we may have additional information for it.
1396 	 For example, we may now have both IN and OUT
1397 	 while the old one may have just one of them.  */
1398 
1399       /* The modes can be different.  If they are, we want to reload in
1400 	 the larger mode, so that the value is valid for both modes.  */
1401       if (inmode != VOIDmode
1402 	  && partial_subreg_p (rld[i].inmode, inmode))
1403 	rld[i].inmode = inmode;
1404       if (outmode != VOIDmode
1405 	  && partial_subreg_p (rld[i].outmode, outmode))
1406 	rld[i].outmode = outmode;
1407       if (in != 0)
1408 	{
1409 	  rtx in_reg = inloc ? *inloc : 0;
1410 	  /* If we merge reloads for two distinct rtl expressions that
1411 	     are identical in content, there might be duplicate address
1412 	     reloads.  Remove the extra set now, so that if we later find
1413 	     that we can inherit this reload, we can get rid of the
1414 	     address reloads altogether.
1415 
1416 	     Do not do this if both reloads are optional since the result
1417 	     would be an optional reload which could potentially leave
1418 	     unresolved address replacements.
1419 
1420 	     It is not sufficient to call transfer_replacements since
1421 	     choose_reload_regs will remove the replacements for address
1422 	     reloads of inherited reloads which results in the same
1423 	     problem.  */
1424 	  if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1425 	      && ! (rld[i].optional && optional))
1426 	    {
1427 	      /* We must keep the address reload with the lower operand
1428 		 number alive.  */
1429 	      if (opnum > rld[i].opnum)
1430 		{
1431 		  remove_address_replacements (in);
1432 		  in = rld[i].in;
1433 		  in_reg = rld[i].in_reg;
1434 		}
1435 	      else
1436 		remove_address_replacements (rld[i].in);
1437 	    }
1438 	  /* When emitting reloads we don't necessarily look at the in-
1439 	     and outmode, but also directly at the operands (in and out).
1440 	     So we can't simply overwrite them with whatever we have found
1441 	     for this (to-be-merged) reload, we have to "merge" that too.
1442 	     Reusing another reload already verified that we deal with the
1443 	     same operands, just possibly in different modes.  So we
1444 	     overwrite the operands only when the new mode is larger.
1445 	     See also PR33613.  */
1446 	  if (!rld[i].in
1447 	      || partial_subreg_p (GET_MODE (rld[i].in), GET_MODE (in)))
1448 	    rld[i].in = in;
1449 	  if (!rld[i].in_reg
1450 	      || (in_reg
1451 		  && partial_subreg_p (GET_MODE (rld[i].in_reg),
1452 				       GET_MODE (in_reg))))
1453 	    rld[i].in_reg = in_reg;
1454 	}
1455       if (out != 0)
1456 	{
1457 	  if (!rld[i].out
1458 	      || (out
1459 		  && partial_subreg_p (GET_MODE (rld[i].out),
1460 				       GET_MODE (out))))
1461 	    rld[i].out = out;
1462 	  if (outloc
1463 	      && (!rld[i].out_reg
1464 		  || partial_subreg_p (GET_MODE (rld[i].out_reg),
1465 				       GET_MODE (*outloc))))
1466 	    rld[i].out_reg = *outloc;
1467 	}
1468       if (reg_class_subset_p (rclass, rld[i].rclass))
1469 	rld[i].rclass = rclass;
1470       rld[i].optional &= optional;
1471       if (MERGE_TO_OTHER (type, rld[i].when_needed,
1472 			  opnum, rld[i].opnum))
1473 	rld[i].when_needed = RELOAD_OTHER;
1474       rld[i].opnum = MIN (rld[i].opnum, opnum);
1475     }
1476 
1477   /* If the ostensible rtx being reloaded differs from the rtx found
1478      in the location to substitute, this reload is not safe to combine
1479      because we cannot reliably tell whether it appears in the insn.  */
1480 
1481   if (in != 0 && in != *inloc)
1482     rld[i].nocombine = 1;
1483 
1484 #if 0
1485   /* This was replaced by changes in find_reloads_address_1 and the new
1486      function inc_for_reload, which go with a new meaning of reload_inc.  */
1487 
1488   /* If this is an IN/OUT reload in an insn that sets the CC,
1489      it must be for an autoincrement.  It doesn't work to store
1490      the incremented value after the insn because that would clobber the CC.
1491      So we must do the increment of the value reloaded from,
1492      increment it, store it back, then decrement again.  */
1493   if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1494     {
1495       out = 0;
1496       rld[i].out = 0;
1497       rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1498       /* If we did not find a nonzero amount-to-increment-by,
1499 	 that contradicts the belief that IN is being incremented
1500 	 in an address in this insn.  */
1501       gcc_assert (rld[i].inc != 0);
1502     }
1503 #endif
1504 
1505   /* If we will replace IN and OUT with the reload-reg,
1506      record where they are located so that substitution need
1507      not do a tree walk.  */
1508 
1509   if (replace_reloads)
1510     {
1511       if (inloc != 0)
1512 	{
1513 	  struct replacement *r = &replacements[n_replacements++];
1514 	  r->what = i;
1515 	  r->where = inloc;
1516 	  r->mode = inmode;
1517 	}
1518       if (outloc != 0 && outloc != inloc)
1519 	{
1520 	  struct replacement *r = &replacements[n_replacements++];
1521 	  r->what = i;
1522 	  r->where = outloc;
1523 	  r->mode = outmode;
1524 	}
1525     }
1526 
1527   /* If this reload is just being introduced and it has both
1528      an incoming quantity and an outgoing quantity that are
1529      supposed to be made to match, see if either one of the two
1530      can serve as the place to reload into.
1531 
1532      If one of them is acceptable, set rld[i].reg_rtx
1533      to that one.  */
1534 
1535   if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1536     {
1537       rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1538 					  inmode, outmode,
1539 					  rld[i].rclass, i,
1540 					  earlyclobber_operand_p (out));
1541 
1542       /* If the outgoing register already contains the same value
1543 	 as the incoming one, we can dispense with loading it.
1544 	 The easiest way to tell the caller that is to give a phony
1545 	 value for the incoming operand (same as outgoing one).  */
1546       if (rld[i].reg_rtx == out
1547 	  && (REG_P (in) || CONSTANT_P (in))
1548 	  && find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1549 			     static_reload_reg_p, i, inmode) != 0)
1550 	rld[i].in = out;
1551     }
1552 
1553   /* If this is an input reload and the operand contains a register that
1554      dies in this insn and is used nowhere else, see if it is the right class
1555      to be used for this reload.  Use it if so.  (This occurs most commonly
1556      in the case of paradoxical SUBREGs and in-out reloads).  We cannot do
1557      this if it is also an output reload that mentions the register unless
1558      the output is a SUBREG that clobbers an entire register.
1559 
1560      Note that the operand might be one of the spill regs, if it is a
1561      pseudo reg and we are in a block where spilling has not taken place.
1562      But if there is no spilling in this block, that is OK.
1563      An explicitly used hard reg cannot be a spill reg.  */
1564 
1565   if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1566     {
1567       rtx note;
1568       int regno;
1569       machine_mode rel_mode = inmode;
1570 
1571       if (out && partial_subreg_p (rel_mode, outmode))
1572 	rel_mode = outmode;
1573 
1574       for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1575 	if (REG_NOTE_KIND (note) == REG_DEAD
1576 	    && REG_P (XEXP (note, 0))
1577 	    && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1578 	    && reg_mentioned_p (XEXP (note, 0), in)
1579 	    /* Check that a former pseudo is valid; see find_dummy_reload.  */
1580 	    && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1581 		|| (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1582 				    ORIGINAL_REGNO (XEXP (note, 0)))
1583 		    && REG_NREGS (XEXP (note, 0)) == 1))
1584 	    && ! refers_to_regno_for_reload_p (regno,
1585 					       end_hard_regno (rel_mode,
1586 							       regno),
1587 					       PATTERN (this_insn), inloc)
1588 	    && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1589 	    /* If this is also an output reload, IN cannot be used as
1590 	       the reload register if it is set in this insn unless IN
1591 	       is also OUT.  */
1592 	    && (out == 0 || in == out
1593 		|| ! hard_reg_set_here_p (regno,
1594 					  end_hard_regno (rel_mode, regno),
1595 					  PATTERN (this_insn)))
1596 	    /* ??? Why is this code so different from the previous?
1597 	       Is there any simple coherent way to describe the two together?
1598 	       What's going on here.  */
1599 	    && (in != out
1600 		|| (GET_CODE (in) == SUBREG
1601 		    && (known_equal_after_align_up
1602 			(GET_MODE_SIZE (GET_MODE (in)),
1603 			 GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))),
1604 			 UNITS_PER_WORD))))
1605 	    /* Make sure the operand fits in the reg that dies.  */
1606 	    && known_le (GET_MODE_SIZE (rel_mode),
1607 			 GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1608 	    && targetm.hard_regno_mode_ok (regno, inmode)
1609 	    && targetm.hard_regno_mode_ok (regno, outmode))
1610 	  {
1611 	    unsigned int offs;
1612 	    unsigned int nregs = MAX (hard_regno_nregs (regno, inmode),
1613 				      hard_regno_nregs (regno, outmode));
1614 
1615 	    for (offs = 0; offs < nregs; offs++)
1616 	      if (fixed_regs[regno + offs]
1617 		  || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1618 					  regno + offs))
1619 		break;
1620 
1621 	    if (offs == nregs
1622 		&& (! (refers_to_regno_for_reload_p
1623 		       (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1624 		    || can_reload_into (in, regno, inmode)))
1625 	      {
1626 		rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1627 		break;
1628 	      }
1629 	  }
1630     }
1631 
1632   if (out)
1633     output_reloadnum = i;
1634 
1635   return i;
1636 }
1637 
1638 /* Record an additional place we must replace a value
1639    for which we have already recorded a reload.
1640    RELOADNUM is the value returned by push_reload
1641    when the reload was recorded.
1642    This is used in insn patterns that use match_dup.  */
1643 
1644 static void
push_replacement(rtx * loc,int reloadnum,machine_mode mode)1645 push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1646 {
1647   if (replace_reloads)
1648     {
1649       struct replacement *r = &replacements[n_replacements++];
1650       r->what = reloadnum;
1651       r->where = loc;
1652       r->mode = mode;
1653     }
1654 }
1655 
1656 /* Duplicate any replacement we have recorded to apply at
1657    location ORIG_LOC to also be performed at DUP_LOC.
1658    This is used in insn patterns that use match_dup.  */
1659 
1660 static void
dup_replacements(rtx * dup_loc,rtx * orig_loc)1661 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1662 {
1663   int i, n = n_replacements;
1664 
1665   for (i = 0; i < n; i++)
1666     {
1667       struct replacement *r = &replacements[i];
1668       if (r->where == orig_loc)
1669 	push_replacement (dup_loc, r->what, r->mode);
1670     }
1671 }
1672 
1673 /* Transfer all replacements that used to be in reload FROM to be in
1674    reload TO.  */
1675 
1676 void
transfer_replacements(int to,int from)1677 transfer_replacements (int to, int from)
1678 {
1679   int i;
1680 
1681   for (i = 0; i < n_replacements; i++)
1682     if (replacements[i].what == from)
1683       replacements[i].what = to;
1684 }
1685 
1686 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1687    or a subpart of it.  If we have any replacements registered for IN_RTX,
1688    cancel the reloads that were supposed to load them.
1689    Return nonzero if we canceled any reloads.  */
1690 int
remove_address_replacements(rtx in_rtx)1691 remove_address_replacements (rtx in_rtx)
1692 {
1693   int i, j;
1694   char reload_flags[MAX_RELOADS];
1695   int something_changed = 0;
1696 
1697   memset (reload_flags, 0, sizeof reload_flags);
1698   for (i = 0, j = 0; i < n_replacements; i++)
1699     {
1700       if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1701 	reload_flags[replacements[i].what] |= 1;
1702       else
1703 	{
1704 	  replacements[j++] = replacements[i];
1705 	  reload_flags[replacements[i].what] |= 2;
1706 	}
1707     }
1708   /* Note that the following store must be done before the recursive calls.  */
1709   n_replacements = j;
1710 
1711   for (i = n_reloads - 1; i >= 0; i--)
1712     {
1713       if (reload_flags[i] == 1)
1714 	{
1715 	  deallocate_reload_reg (i);
1716 	  remove_address_replacements (rld[i].in);
1717 	  rld[i].in = 0;
1718 	  something_changed = 1;
1719 	}
1720     }
1721   return something_changed;
1722 }
1723 
1724 /* If there is only one output reload, and it is not for an earlyclobber
1725    operand, try to combine it with a (logically unrelated) input reload
1726    to reduce the number of reload registers needed.
1727 
1728    This is safe if the input reload does not appear in
1729    the value being output-reloaded, because this implies
1730    it is not needed any more once the original insn completes.
1731 
1732    If that doesn't work, see we can use any of the registers that
1733    die in this insn as a reload register.  We can if it is of the right
1734    class and does not appear in the value being output-reloaded.  */
1735 
1736 static void
combine_reloads(void)1737 combine_reloads (void)
1738 {
1739   int i, regno;
1740   int output_reload = -1;
1741   int secondary_out = -1;
1742   rtx note;
1743 
1744   /* Find the output reload; return unless there is exactly one
1745      and that one is mandatory.  */
1746 
1747   for (i = 0; i < n_reloads; i++)
1748     if (rld[i].out != 0)
1749       {
1750 	if (output_reload >= 0)
1751 	  return;
1752 	output_reload = i;
1753       }
1754 
1755   if (output_reload < 0 || rld[output_reload].optional)
1756     return;
1757 
1758   /* An input-output reload isn't combinable.  */
1759 
1760   if (rld[output_reload].in != 0)
1761     return;
1762 
1763   /* If this reload is for an earlyclobber operand, we can't do anything.  */
1764   if (earlyclobber_operand_p (rld[output_reload].out))
1765     return;
1766 
1767   /* If there is a reload for part of the address of this operand, we would
1768      need to change it to RELOAD_FOR_OTHER_ADDRESS.  But that would extend
1769      its life to the point where doing this combine would not lower the
1770      number of spill registers needed.  */
1771   for (i = 0; i < n_reloads; i++)
1772     if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1773 	 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1774 	&& rld[i].opnum == rld[output_reload].opnum)
1775       return;
1776 
1777   /* Check each input reload; can we combine it?  */
1778 
1779   for (i = 0; i < n_reloads; i++)
1780     if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1781 	/* Life span of this reload must not extend past main insn.  */
1782 	&& rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1783 	&& rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1784 	&& rld[i].when_needed != RELOAD_OTHER
1785 	&& (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1786 	    == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1787 				       [(int) rld[output_reload].outmode])
1788 	&& known_eq (rld[i].inc, 0)
1789 	&& rld[i].reg_rtx == 0
1790 	/* Don't combine two reloads with different secondary
1791 	   memory locations.  */
1792 	&& (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1793 	    || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1794 	    || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1795 			    secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1796 	&& (targetm.small_register_classes_for_mode_p (VOIDmode)
1797 	    ? (rld[i].rclass == rld[output_reload].rclass)
1798 	    : (reg_class_subset_p (rld[i].rclass,
1799 				   rld[output_reload].rclass)
1800 	       || reg_class_subset_p (rld[output_reload].rclass,
1801 				      rld[i].rclass)))
1802 	&& (MATCHES (rld[i].in, rld[output_reload].out)
1803 	    /* Args reversed because the first arg seems to be
1804 	       the one that we imagine being modified
1805 	       while the second is the one that might be affected.  */
1806 	    || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1807 						      rld[i].in)
1808 		/* However, if the input is a register that appears inside
1809 		   the output, then we also can't share.
1810 		   Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1811 		   If the same reload reg is used for both reg 69 and the
1812 		   result to be stored in memory, then that result
1813 		   will clobber the address of the memory ref.  */
1814 		&& ! (REG_P (rld[i].in)
1815 		      && reg_overlap_mentioned_for_reload_p (rld[i].in,
1816 							     rld[output_reload].out))))
1817 	&& ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1818 					 rld[i].when_needed != RELOAD_FOR_INPUT)
1819 	&& (reg_class_size[(int) rld[i].rclass]
1820 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
1821 	/* We will allow making things slightly worse by combining an
1822 	   input and an output, but no worse than that.  */
1823 	&& (rld[i].when_needed == RELOAD_FOR_INPUT
1824 	    || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1825       {
1826 	int j;
1827 
1828 	/* We have found a reload to combine with!  */
1829 	rld[i].out = rld[output_reload].out;
1830 	rld[i].out_reg = rld[output_reload].out_reg;
1831 	rld[i].outmode = rld[output_reload].outmode;
1832 	/* Mark the old output reload as inoperative.  */
1833 	rld[output_reload].out = 0;
1834 	/* The combined reload is needed for the entire insn.  */
1835 	rld[i].when_needed = RELOAD_OTHER;
1836 	/* If the output reload had a secondary reload, copy it.  */
1837 	if (rld[output_reload].secondary_out_reload != -1)
1838 	  {
1839 	    rld[i].secondary_out_reload
1840 	      = rld[output_reload].secondary_out_reload;
1841 	    rld[i].secondary_out_icode
1842 	      = rld[output_reload].secondary_out_icode;
1843 	  }
1844 
1845 	/* Copy any secondary MEM.  */
1846 	if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1847 	  secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1848 	    = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1849 	/* If required, minimize the register class.  */
1850 	if (reg_class_subset_p (rld[output_reload].rclass,
1851 				rld[i].rclass))
1852 	  rld[i].rclass = rld[output_reload].rclass;
1853 
1854 	/* Transfer all replacements from the old reload to the combined.  */
1855 	for (j = 0; j < n_replacements; j++)
1856 	  if (replacements[j].what == output_reload)
1857 	    replacements[j].what = i;
1858 
1859 	return;
1860       }
1861 
1862   /* If this insn has only one operand that is modified or written (assumed
1863      to be the first),  it must be the one corresponding to this reload.  It
1864      is safe to use anything that dies in this insn for that output provided
1865      that it does not occur in the output (we already know it isn't an
1866      earlyclobber.  If this is an asm insn, give up.  */
1867 
1868   if (INSN_CODE (this_insn) == -1)
1869     return;
1870 
1871   for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1872     if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1873 	|| insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1874       return;
1875 
1876   /* See if some hard register that dies in this insn and is not used in
1877      the output is the right class.  Only works if the register we pick
1878      up can fully hold our output reload.  */
1879   for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1880     if (REG_NOTE_KIND (note) == REG_DEAD
1881 	&& REG_P (XEXP (note, 0))
1882 	&& !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1883 						rld[output_reload].out)
1884 	&& (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1885 	&& targetm.hard_regno_mode_ok (regno, rld[output_reload].outmode)
1886 	&& TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1887 			      regno)
1888 	&& (hard_regno_nregs (regno, rld[output_reload].outmode)
1889 	    <= REG_NREGS (XEXP (note, 0)))
1890 	/* Ensure that a secondary or tertiary reload for this output
1891 	   won't want this register.  */
1892 	&& ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1893 	    || (!(TEST_HARD_REG_BIT
1894 		  (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1895 		&& ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1896 		    || !(TEST_HARD_REG_BIT
1897 			 (reg_class_contents[(int) rld[secondary_out].rclass],
1898 			  regno)))))
1899 	&& !fixed_regs[regno]
1900 	/* Check that a former pseudo is valid; see find_dummy_reload.  */
1901 	&& (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1902 	    || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1903 			       ORIGINAL_REGNO (XEXP (note, 0)))
1904 		&& REG_NREGS (XEXP (note, 0)) == 1)))
1905       {
1906 	rld[output_reload].reg_rtx
1907 	  = gen_rtx_REG (rld[output_reload].outmode, regno);
1908 	return;
1909       }
1910 }
1911 
1912 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1913    See if one of IN and OUT is a register that may be used;
1914    this is desirable since a spill-register won't be needed.
1915    If so, return the register rtx that proves acceptable.
1916 
1917    INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1918    RCLASS is the register class required for the reload.
1919 
1920    If FOR_REAL is >= 0, it is the number of the reload,
1921    and in some cases when it can be discovered that OUT doesn't need
1922    to be computed, clear out rld[FOR_REAL].out.
1923 
1924    If FOR_REAL is -1, this should not be done, because this call
1925    is just to see if a register can be found, not to find and install it.
1926 
1927    EARLYCLOBBER is nonzero if OUT is an earlyclobber operand.  This
1928    puts an additional constraint on being able to use IN for OUT since
1929    IN must not appear elsewhere in the insn (it is assumed that IN itself
1930    is safe from the earlyclobber).  */
1931 
1932 static rtx
find_dummy_reload(rtx real_in,rtx real_out,rtx * inloc,rtx * outloc,machine_mode inmode,machine_mode outmode,reg_class_t rclass,int for_real,int earlyclobber)1933 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1934 		   machine_mode inmode, machine_mode outmode,
1935 		   reg_class_t rclass, int for_real, int earlyclobber)
1936 {
1937   rtx in = real_in;
1938   rtx out = real_out;
1939   int in_offset = 0;
1940   int out_offset = 0;
1941   rtx value = 0;
1942 
1943   /* If operands exceed a word, we can't use either of them
1944      unless they have the same size.  */
1945   if (maybe_ne (GET_MODE_SIZE (outmode), GET_MODE_SIZE (inmode))
1946       && (maybe_gt (GET_MODE_SIZE (outmode), UNITS_PER_WORD)
1947 	  || maybe_gt (GET_MODE_SIZE (inmode), UNITS_PER_WORD)))
1948     return 0;
1949 
1950   /* Note that {in,out}_offset are needed only when 'in' or 'out'
1951      respectively refers to a hard register.  */
1952 
1953   /* Find the inside of any subregs.  */
1954   while (GET_CODE (out) == SUBREG)
1955     {
1956       if (REG_P (SUBREG_REG (out))
1957 	  && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1958 	out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1959 					   GET_MODE (SUBREG_REG (out)),
1960 					   SUBREG_BYTE (out),
1961 					   GET_MODE (out));
1962       out = SUBREG_REG (out);
1963     }
1964   while (GET_CODE (in) == SUBREG)
1965     {
1966       if (REG_P (SUBREG_REG (in))
1967 	  && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1968 	in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1969 					  GET_MODE (SUBREG_REG (in)),
1970 					  SUBREG_BYTE (in),
1971 					  GET_MODE (in));
1972       in = SUBREG_REG (in);
1973     }
1974 
1975   /* Narrow down the reg class, the same way push_reload will;
1976      otherwise we might find a dummy now, but push_reload won't.  */
1977   {
1978     reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1979     if (preferred_class != NO_REGS)
1980       rclass = (enum reg_class) preferred_class;
1981   }
1982 
1983   /* See if OUT will do.  */
1984   if (REG_P (out)
1985       && REGNO (out) < FIRST_PSEUDO_REGISTER)
1986     {
1987       unsigned int regno = REGNO (out) + out_offset;
1988       unsigned int nwords = hard_regno_nregs (regno, outmode);
1989       rtx saved_rtx;
1990 
1991       /* When we consider whether the insn uses OUT,
1992 	 ignore references within IN.  They don't prevent us
1993 	 from copying IN into OUT, because those refs would
1994 	 move into the insn that reloads IN.
1995 
1996 	 However, we only ignore IN in its role as this reload.
1997 	 If the insn uses IN elsewhere and it contains OUT,
1998 	 that counts.  We can't be sure it's the "same" operand
1999 	 so it might not go through this reload.
2000 
2001          We also need to avoid using OUT if it, or part of it, is a
2002          fixed register.  Modifying such registers, even transiently,
2003          may have undefined effects on the machine, such as modifying
2004          the stack pointer.  */
2005       saved_rtx = *inloc;
2006       *inloc = const0_rtx;
2007 
2008       if (regno < FIRST_PSEUDO_REGISTER
2009 	  && targetm.hard_regno_mode_ok (regno, outmode)
2010 	  && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2011 					     PATTERN (this_insn), outloc))
2012 	{
2013 	  unsigned int i;
2014 
2015 	  for (i = 0; i < nwords; i++)
2016 	    if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2017 				     regno + i)
2018 		|| fixed_regs[regno + i])
2019 	      break;
2020 
2021 	  if (i == nwords)
2022 	    {
2023 	      if (REG_P (real_out))
2024 		value = real_out;
2025 	      else
2026 		value = gen_rtx_REG (outmode, regno);
2027 	    }
2028 	}
2029 
2030       *inloc = saved_rtx;
2031     }
2032 
2033   /* Consider using IN if OUT was not acceptable
2034      or if OUT dies in this insn (like the quotient in a divmod insn).
2035      We can't use IN unless it is dies in this insn,
2036      which means we must know accurately which hard regs are live.
2037      Also, the result can't go in IN if IN is used within OUT,
2038      or if OUT is an earlyclobber and IN appears elsewhere in the insn.  */
2039   if (hard_regs_live_known
2040       && REG_P (in)
2041       && REGNO (in) < FIRST_PSEUDO_REGISTER
2042       && (value == 0
2043 	  || find_reg_note (this_insn, REG_UNUSED, real_out))
2044       && find_reg_note (this_insn, REG_DEAD, real_in)
2045       && !fixed_regs[REGNO (in)]
2046       && targetm.hard_regno_mode_ok (REGNO (in),
2047 				     /* The only case where out and real_out
2048 					might have different modes is where
2049 					real_out is a subreg, and in that
2050 					case, out has a real mode.  */
2051 				     (GET_MODE (out) != VOIDmode
2052 				      ? GET_MODE (out) : outmode))
2053       && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2054 	  /* However only do this if we can be sure that this input
2055 	     operand doesn't correspond with an uninitialized pseudo.
2056 	     global can assign some hardreg to it that is the same as
2057 	     the one assigned to a different, also live pseudo (as it
2058 	     can ignore the conflict).  We must never introduce writes
2059 	     to such hardregs, as they would clobber the other live
2060 	     pseudo.  See PR 20973.  */
2061 	  || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2062 			     ORIGINAL_REGNO (in))
2063 	      /* Similarly, only do this if we can be sure that the death
2064 		 note is still valid.  global can assign some hardreg to
2065 		 the pseudo referenced in the note and simultaneously a
2066 		 subword of this hardreg to a different, also live pseudo,
2067 		 because only another subword of the hardreg is actually
2068 		 used in the insn.  This cannot happen if the pseudo has
2069 		 been assigned exactly one hardreg.  See PR 33732.  */
2070 	      && REG_NREGS (in) == 1)))
2071     {
2072       unsigned int regno = REGNO (in) + in_offset;
2073       unsigned int nwords = hard_regno_nregs (regno, inmode);
2074 
2075       if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2076 	  && ! hard_reg_set_here_p (regno, regno + nwords,
2077 				    PATTERN (this_insn))
2078 	  && (! earlyclobber
2079 	      || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2080 						 PATTERN (this_insn), inloc)))
2081 	{
2082 	  unsigned int i;
2083 
2084 	  for (i = 0; i < nwords; i++)
2085 	    if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2086 				     regno + i))
2087 	      break;
2088 
2089 	  if (i == nwords)
2090 	    {
2091 	      /* If we were going to use OUT as the reload reg
2092 		 and changed our mind, it means OUT is a dummy that
2093 		 dies here.  So don't bother copying value to it.  */
2094 	      if (for_real >= 0 && value == real_out)
2095 		rld[for_real].out = 0;
2096 	      if (REG_P (real_in))
2097 		value = real_in;
2098 	      else
2099 		value = gen_rtx_REG (inmode, regno);
2100 	    }
2101 	}
2102     }
2103 
2104   return value;
2105 }
2106 
2107 /* This page contains subroutines used mainly for determining
2108    whether the IN or an OUT of a reload can serve as the
2109    reload register.  */
2110 
2111 /* Return 1 if X is an operand of an insn that is being earlyclobbered.  */
2112 
2113 int
earlyclobber_operand_p(rtx x)2114 earlyclobber_operand_p (rtx x)
2115 {
2116   int i;
2117 
2118   for (i = 0; i < n_earlyclobbers; i++)
2119     if (reload_earlyclobbers[i] == x)
2120       return 1;
2121 
2122   return 0;
2123 }
2124 
2125 /* Return 1 if expression X alters a hard reg in the range
2126    from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2127    either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2128    X should be the body of an instruction.  */
2129 
2130 static int
hard_reg_set_here_p(unsigned int beg_regno,unsigned int end_regno,rtx x)2131 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2132 {
2133   if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2134     {
2135       rtx op0 = SET_DEST (x);
2136 
2137       while (GET_CODE (op0) == SUBREG)
2138 	op0 = SUBREG_REG (op0);
2139       if (REG_P (op0))
2140 	{
2141 	  unsigned int r = REGNO (op0);
2142 
2143 	  /* See if this reg overlaps range under consideration.  */
2144 	  if (r < end_regno
2145 	      && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2146 	    return 1;
2147 	}
2148     }
2149   else if (GET_CODE (x) == PARALLEL)
2150     {
2151       int i = XVECLEN (x, 0) - 1;
2152 
2153       for (; i >= 0; i--)
2154 	if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2155 	  return 1;
2156     }
2157 
2158   return 0;
2159 }
2160 
2161 /* Return 1 if ADDR is a valid memory address for mode MODE
2162    in address space AS, and check that each pseudo reg has the
2163    proper kind of hard reg.  */
2164 
2165 int
strict_memory_address_addr_space_p(machine_mode mode ATTRIBUTE_UNUSED,rtx addr,addr_space_t as)2166 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2167 				    rtx addr, addr_space_t as)
2168 {
2169 #ifdef GO_IF_LEGITIMATE_ADDRESS
2170   gcc_assert (ADDR_SPACE_GENERIC_P (as));
2171   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2172   return 0;
2173 
2174  win:
2175   return 1;
2176 #else
2177   return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2178 #endif
2179 }
2180 
2181 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2182    if they are the same hard reg, and has special hacks for
2183    autoincrement and autodecrement.
2184    This is specifically intended for find_reloads to use
2185    in determining whether two operands match.
2186    X is the operand whose number is the lower of the two.
2187 
2188    The value is 2 if Y contains a pre-increment that matches
2189    a non-incrementing address in X.  */
2190 
2191 /* ??? To be completely correct, we should arrange to pass
2192    for X the output operand and for Y the input operand.
2193    For now, we assume that the output operand has the lower number
2194    because that is natural in (SET output (... input ...)).  */
2195 
2196 int
operands_match_p(rtx x,rtx y)2197 operands_match_p (rtx x, rtx y)
2198 {
2199   int i;
2200   RTX_CODE code = GET_CODE (x);
2201   const char *fmt;
2202   int success_2;
2203 
2204   if (x == y)
2205     return 1;
2206   if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2207       && (REG_P (y) || (GET_CODE (y) == SUBREG
2208 				  && REG_P (SUBREG_REG (y)))))
2209     {
2210       int j;
2211 
2212       if (code == SUBREG)
2213 	{
2214 	  i = REGNO (SUBREG_REG (x));
2215 	  if (i >= FIRST_PSEUDO_REGISTER)
2216 	    goto slow;
2217 	  i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2218 				    GET_MODE (SUBREG_REG (x)),
2219 				    SUBREG_BYTE (x),
2220 				    GET_MODE (x));
2221 	}
2222       else
2223 	i = REGNO (x);
2224 
2225       if (GET_CODE (y) == SUBREG)
2226 	{
2227 	  j = REGNO (SUBREG_REG (y));
2228 	  if (j >= FIRST_PSEUDO_REGISTER)
2229 	    goto slow;
2230 	  j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2231 				    GET_MODE (SUBREG_REG (y)),
2232 				    SUBREG_BYTE (y),
2233 				    GET_MODE (y));
2234 	}
2235       else
2236 	j = REGNO (y);
2237 
2238       /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2239 	 multiple hard register group of scalar integer registers, so that
2240 	 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2241 	 register.  */
2242       scalar_int_mode xmode;
2243       if (REG_WORDS_BIG_ENDIAN
2244 	  && is_a <scalar_int_mode> (GET_MODE (x), &xmode)
2245 	  && GET_MODE_SIZE (xmode) > UNITS_PER_WORD
2246 	  && i < FIRST_PSEUDO_REGISTER)
2247 	i += hard_regno_nregs (i, xmode) - 1;
2248       scalar_int_mode ymode;
2249       if (REG_WORDS_BIG_ENDIAN
2250 	  && is_a <scalar_int_mode> (GET_MODE (y), &ymode)
2251 	  && GET_MODE_SIZE (ymode) > UNITS_PER_WORD
2252 	  && j < FIRST_PSEUDO_REGISTER)
2253 	j += hard_regno_nregs (j, ymode) - 1;
2254 
2255       return i == j;
2256     }
2257   /* If two operands must match, because they are really a single
2258      operand of an assembler insn, then two postincrements are invalid
2259      because the assembler insn would increment only once.
2260      On the other hand, a postincrement matches ordinary indexing
2261      if the postincrement is the output operand.  */
2262   if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2263     return operands_match_p (XEXP (x, 0), y);
2264   /* Two preincrements are invalid
2265      because the assembler insn would increment only once.
2266      On the other hand, a preincrement matches ordinary indexing
2267      if the preincrement is the input operand.
2268      In this case, return 2, since some callers need to do special
2269      things when this happens.  */
2270   if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2271       || GET_CODE (y) == PRE_MODIFY)
2272     return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2273 
2274  slow:
2275 
2276   /* Now we have disposed of all the cases in which different rtx codes
2277      can match.  */
2278   if (code != GET_CODE (y))
2279     return 0;
2280 
2281   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2282   if (GET_MODE (x) != GET_MODE (y))
2283     return 0;
2284 
2285   /* MEMs referring to different address space are not equivalent.  */
2286   if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2287     return 0;
2288 
2289   switch (code)
2290     {
2291     CASE_CONST_UNIQUE:
2292       return 0;
2293 
2294     case CONST_VECTOR:
2295       if (!same_vector_encodings_p (x, y))
2296 	return false;
2297       break;
2298 
2299     case LABEL_REF:
2300       return label_ref_label (x) == label_ref_label (y);
2301     case SYMBOL_REF:
2302       return XSTR (x, 0) == XSTR (y, 0);
2303 
2304     default:
2305       break;
2306     }
2307 
2308   /* Compare the elements.  If any pair of corresponding elements
2309      fail to match, return 0 for the whole things.  */
2310 
2311   success_2 = 0;
2312   fmt = GET_RTX_FORMAT (code);
2313   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2314     {
2315       int val, j;
2316       switch (fmt[i])
2317 	{
2318 	case 'w':
2319 	  if (XWINT (x, i) != XWINT (y, i))
2320 	    return 0;
2321 	  break;
2322 
2323 	case 'i':
2324 	  if (XINT (x, i) != XINT (y, i))
2325 	    return 0;
2326 	  break;
2327 
2328 	case 'p':
2329 	  if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
2330 	    return 0;
2331 	  break;
2332 
2333 	case 'e':
2334 	  val = operands_match_p (XEXP (x, i), XEXP (y, i));
2335 	  if (val == 0)
2336 	    return 0;
2337 	  /* If any subexpression returns 2,
2338 	     we should return 2 if we are successful.  */
2339 	  if (val == 2)
2340 	    success_2 = 1;
2341 	  break;
2342 
2343 	case '0':
2344 	  break;
2345 
2346 	case 'E':
2347 	  if (XVECLEN (x, i) != XVECLEN (y, i))
2348 	    return 0;
2349 	  for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2350 	    {
2351 	      val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2352 	      if (val == 0)
2353 		return 0;
2354 	      if (val == 2)
2355 		success_2 = 1;
2356 	    }
2357 	  break;
2358 
2359 	  /* It is believed that rtx's at this level will never
2360 	     contain anything but integers and other rtx's,
2361 	     except for within LABEL_REFs and SYMBOL_REFs.  */
2362 	default:
2363 	  gcc_unreachable ();
2364 	}
2365     }
2366   return 1 + success_2;
2367 }
2368 
2369 /* Describe the range of registers or memory referenced by X.
2370    If X is a register, set REG_FLAG and put the first register
2371    number into START and the last plus one into END.
2372    If X is a memory reference, put a base address into BASE
2373    and a range of integer offsets into START and END.
2374    If X is pushing on the stack, we can assume it causes no trouble,
2375    so we set the SAFE field.  */
2376 
2377 static struct decomposition
decompose(rtx x)2378 decompose (rtx x)
2379 {
2380   struct decomposition val;
2381   int all_const = 0, regno;
2382 
2383   memset (&val, 0, sizeof (val));
2384 
2385   switch (GET_CODE (x))
2386     {
2387     case MEM:
2388       {
2389 	rtx base = NULL_RTX, offset = 0;
2390 	rtx addr = XEXP (x, 0);
2391 
2392 	if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2393 	    || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2394 	  {
2395 	    val.base = XEXP (addr, 0);
2396 	    val.start = -GET_MODE_SIZE (GET_MODE (x));
2397 	    val.end = GET_MODE_SIZE (GET_MODE (x));
2398 	    val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2399 	    return val;
2400 	  }
2401 
2402 	if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2403 	  {
2404 	    if (GET_CODE (XEXP (addr, 1)) == PLUS
2405 		&& XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2406 		&& CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2407 	      {
2408 		val.base  = XEXP (addr, 0);
2409 		val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2410 		val.end   = INTVAL (XEXP (XEXP (addr, 1), 1));
2411 		val.safe  = REGNO (val.base) == STACK_POINTER_REGNUM;
2412 		return val;
2413 	      }
2414 	  }
2415 
2416 	if (GET_CODE (addr) == CONST)
2417 	  {
2418 	    addr = XEXP (addr, 0);
2419 	    all_const = 1;
2420 	  }
2421 	if (GET_CODE (addr) == PLUS)
2422 	  {
2423 	    if (CONSTANT_P (XEXP (addr, 0)))
2424 	      {
2425 		base = XEXP (addr, 1);
2426 		offset = XEXP (addr, 0);
2427 	      }
2428 	    else if (CONSTANT_P (XEXP (addr, 1)))
2429 	      {
2430 		base = XEXP (addr, 0);
2431 		offset = XEXP (addr, 1);
2432 	      }
2433 	  }
2434 
2435 	if (offset == 0)
2436 	  {
2437 	    base = addr;
2438 	    offset = const0_rtx;
2439 	  }
2440 	if (GET_CODE (offset) == CONST)
2441 	  offset = XEXP (offset, 0);
2442 	if (GET_CODE (offset) == PLUS)
2443 	  {
2444 	    if (CONST_INT_P (XEXP (offset, 0)))
2445 	      {
2446 		base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2447 		offset = XEXP (offset, 0);
2448 	      }
2449 	    else if (CONST_INT_P (XEXP (offset, 1)))
2450 	      {
2451 		base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2452 		offset = XEXP (offset, 1);
2453 	      }
2454 	    else
2455 	      {
2456 		base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2457 		offset = const0_rtx;
2458 	      }
2459 	  }
2460 	else if (!CONST_INT_P (offset))
2461 	  {
2462 	    base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2463 	    offset = const0_rtx;
2464 	  }
2465 
2466 	if (all_const && GET_CODE (base) == PLUS)
2467 	  base = gen_rtx_CONST (GET_MODE (base), base);
2468 
2469 	gcc_assert (CONST_INT_P (offset));
2470 
2471 	val.start = INTVAL (offset);
2472 	val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2473 	val.base = base;
2474       }
2475       break;
2476 
2477     case REG:
2478       val.reg_flag = 1;
2479       regno = true_regnum (x);
2480       if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2481 	{
2482 	  /* A pseudo with no hard reg.  */
2483 	  val.start = REGNO (x);
2484 	  val.end = val.start + 1;
2485 	}
2486       else
2487 	{
2488 	  /* A hard reg.  */
2489 	  val.start = regno;
2490 	  val.end = end_hard_regno (GET_MODE (x), regno);
2491 	}
2492       break;
2493 
2494     case SUBREG:
2495       if (!REG_P (SUBREG_REG (x)))
2496 	/* This could be more precise, but it's good enough.  */
2497 	return decompose (SUBREG_REG (x));
2498       regno = true_regnum (x);
2499       if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2500 	return decompose (SUBREG_REG (x));
2501 
2502       /* A hard reg.  */
2503       val.reg_flag = 1;
2504       val.start = regno;
2505       val.end = regno + subreg_nregs (x);
2506       break;
2507 
2508     case SCRATCH:
2509       /* This hasn't been assigned yet, so it can't conflict yet.  */
2510       val.safe = 1;
2511       break;
2512 
2513     default:
2514       gcc_assert (CONSTANT_P (x));
2515       val.safe = 1;
2516       break;
2517     }
2518   return val;
2519 }
2520 
2521 /* Return 1 if altering Y will not modify the value of X.
2522    Y is also described by YDATA, which should be decompose (Y).  */
2523 
2524 static int
immune_p(rtx x,rtx y,struct decomposition ydata)2525 immune_p (rtx x, rtx y, struct decomposition ydata)
2526 {
2527   struct decomposition xdata;
2528 
2529   if (ydata.reg_flag)
2530     /* In this case the decomposition structure contains register
2531        numbers rather than byte offsets.  */
2532     return !refers_to_regno_for_reload_p (ydata.start.to_constant (),
2533 					  ydata.end.to_constant (),
2534 					  x, (rtx *) 0);
2535   if (ydata.safe)
2536     return 1;
2537 
2538   gcc_assert (MEM_P (y));
2539   /* If Y is memory and X is not, Y can't affect X.  */
2540   if (!MEM_P (x))
2541     return 1;
2542 
2543   xdata = decompose (x);
2544 
2545   if (! rtx_equal_p (xdata.base, ydata.base))
2546     {
2547       /* If bases are distinct symbolic constants, there is no overlap.  */
2548       if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2549 	return 1;
2550       /* Constants and stack slots never overlap.  */
2551       if (CONSTANT_P (xdata.base)
2552 	  && (ydata.base == frame_pointer_rtx
2553 	      || ydata.base == hard_frame_pointer_rtx
2554 	      || ydata.base == stack_pointer_rtx))
2555 	return 1;
2556       if (CONSTANT_P (ydata.base)
2557 	  && (xdata.base == frame_pointer_rtx
2558 	      || xdata.base == hard_frame_pointer_rtx
2559 	      || xdata.base == stack_pointer_rtx))
2560 	return 1;
2561       /* If either base is variable, we don't know anything.  */
2562       return 0;
2563     }
2564 
2565   return known_ge (xdata.start, ydata.end) || known_ge (ydata.start, xdata.end);
2566 }
2567 
2568 /* Similar, but calls decompose.  */
2569 
2570 int
safe_from_earlyclobber(rtx op,rtx clobber)2571 safe_from_earlyclobber (rtx op, rtx clobber)
2572 {
2573   struct decomposition early_data;
2574 
2575   early_data = decompose (clobber);
2576   return immune_p (op, clobber, early_data);
2577 }
2578 
2579 /* Main entry point of this file: search the body of INSN
2580    for values that need reloading and record them with push_reload.
2581    REPLACE nonzero means record also where the values occur
2582    so that subst_reloads can be used.
2583 
2584    IND_LEVELS says how many levels of indirection are supported by this
2585    machine; a value of zero means that a memory reference is not a valid
2586    memory address.
2587 
2588    LIVE_KNOWN says we have valid information about which hard
2589    regs are live at each point in the program; this is true when
2590    we are called from global_alloc but false when stupid register
2591    allocation has been done.
2592 
2593    RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2594    which is nonnegative if the reg has been commandeered for reloading into.
2595    It is copied into STATIC_RELOAD_REG_P and referenced from there
2596    by various subroutines.
2597 
2598    Return TRUE if some operands need to be changed, because of swapping
2599    commutative operands, reg_equiv_address substitution, or whatever.  */
2600 
2601 int
find_reloads(rtx_insn * insn,int replace,int ind_levels,int live_known,short * reload_reg_p)2602 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2603 	      short *reload_reg_p)
2604 {
2605   int insn_code_number;
2606   int i, j;
2607   int noperands;
2608   /* These start out as the constraints for the insn
2609      and they are chewed up as we consider alternatives.  */
2610   const char *constraints[MAX_RECOG_OPERANDS];
2611   /* These are the preferred classes for an operand, or NO_REGS if it isn't
2612      a register.  */
2613   enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2614   char pref_or_nothing[MAX_RECOG_OPERANDS];
2615   /* Nonzero for a MEM operand whose entire address needs a reload.
2616      May be -1 to indicate the entire address may or may not need a reload.  */
2617   int address_reloaded[MAX_RECOG_OPERANDS];
2618   /* Nonzero for an address operand that needs to be completely reloaded.
2619      May be -1 to indicate the entire operand may or may not need a reload.  */
2620   int address_operand_reloaded[MAX_RECOG_OPERANDS];
2621   /* Value of enum reload_type to use for operand.  */
2622   enum reload_type operand_type[MAX_RECOG_OPERANDS];
2623   /* Value of enum reload_type to use within address of operand.  */
2624   enum reload_type address_type[MAX_RECOG_OPERANDS];
2625   /* Save the usage of each operand.  */
2626   enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2627   int no_input_reloads = 0, no_output_reloads = 0;
2628   int n_alternatives;
2629   reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2630   char this_alternative_match_win[MAX_RECOG_OPERANDS];
2631   char this_alternative_win[MAX_RECOG_OPERANDS];
2632   char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2633   char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2634   int this_alternative_matches[MAX_RECOG_OPERANDS];
2635   reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2636   int this_alternative_number;
2637   int goal_alternative_number = 0;
2638   int operand_reloadnum[MAX_RECOG_OPERANDS];
2639   int goal_alternative_matches[MAX_RECOG_OPERANDS];
2640   int goal_alternative_matched[MAX_RECOG_OPERANDS];
2641   char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2642   char goal_alternative_win[MAX_RECOG_OPERANDS];
2643   char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2644   char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2645   int goal_alternative_swapped;
2646   int best;
2647   int commutative;
2648   char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2649   rtx substed_operand[MAX_RECOG_OPERANDS];
2650   rtx body = PATTERN (insn);
2651   rtx set = single_set (insn);
2652   int goal_earlyclobber = 0, this_earlyclobber;
2653   machine_mode operand_mode[MAX_RECOG_OPERANDS];
2654   int retval = 0;
2655 
2656   this_insn = insn;
2657   n_reloads = 0;
2658   n_replacements = 0;
2659   n_earlyclobbers = 0;
2660   replace_reloads = replace;
2661   hard_regs_live_known = live_known;
2662   static_reload_reg_p = reload_reg_p;
2663 
2664   /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2665      neither are insns that SET cc0.  Insns that use CC0 are not allowed
2666      to have any input reloads.  */
2667   if (JUMP_P (insn) || CALL_P (insn))
2668     no_output_reloads = 1;
2669 
2670   if (HAVE_cc0 && reg_referenced_p (cc0_rtx, PATTERN (insn)))
2671     no_input_reloads = 1;
2672   if (HAVE_cc0 && reg_set_p (cc0_rtx, PATTERN (insn)))
2673     no_output_reloads = 1;
2674 
2675   /* The eliminated forms of any secondary memory locations are per-insn, so
2676      clear them out here.  */
2677 
2678   if (secondary_memlocs_elim_used)
2679     {
2680       memset (secondary_memlocs_elim, 0,
2681 	      sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2682       secondary_memlocs_elim_used = 0;
2683     }
2684 
2685   /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2686      is cheap to move between them.  If it is not, there may not be an insn
2687      to do the copy, so we may need a reload.  */
2688   if (GET_CODE (body) == SET
2689       && REG_P (SET_DEST (body))
2690       && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2691       && REG_P (SET_SRC (body))
2692       && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2693       && register_move_cost (GET_MODE (SET_SRC (body)),
2694 			     REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2695 			     REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2696     return 0;
2697 
2698   extract_insn (insn);
2699 
2700   noperands = reload_n_operands = recog_data.n_operands;
2701   n_alternatives = recog_data.n_alternatives;
2702 
2703   /* Just return "no reloads" if insn has no operands with constraints.  */
2704   if (noperands == 0 || n_alternatives == 0)
2705     return 0;
2706 
2707   insn_code_number = INSN_CODE (insn);
2708   this_insn_is_asm = insn_code_number < 0;
2709 
2710   memcpy (operand_mode, recog_data.operand_mode,
2711 	  noperands * sizeof (machine_mode));
2712   memcpy (constraints, recog_data.constraints,
2713 	  noperands * sizeof (const char *));
2714 
2715   commutative = -1;
2716 
2717   /* If we will need to know, later, whether some pair of operands
2718      are the same, we must compare them now and save the result.
2719      Reloading the base and index registers will clobber them
2720      and afterward they will fail to match.  */
2721 
2722   for (i = 0; i < noperands; i++)
2723     {
2724       const char *p;
2725       int c;
2726       char *end;
2727 
2728       substed_operand[i] = recog_data.operand[i];
2729       p = constraints[i];
2730 
2731       modified[i] = RELOAD_READ;
2732 
2733       /* Scan this operand's constraint to see if it is an output operand,
2734 	 an in-out operand, is commutative, or should match another.  */
2735 
2736       while ((c = *p))
2737 	{
2738 	  p += CONSTRAINT_LEN (c, p);
2739 	  switch (c)
2740 	    {
2741 	    case '=':
2742 	      modified[i] = RELOAD_WRITE;
2743 	      break;
2744 	    case '+':
2745 	      modified[i] = RELOAD_READ_WRITE;
2746 	      break;
2747 	    case '%':
2748 	      {
2749 		/* The last operand should not be marked commutative.  */
2750 		gcc_assert (i != noperands - 1);
2751 
2752 		/* We currently only support one commutative pair of
2753 		   operands.  Some existing asm code currently uses more
2754 		   than one pair.  Previously, that would usually work,
2755 		   but sometimes it would crash the compiler.  We
2756 		   continue supporting that case as well as we can by
2757 		   silently ignoring all but the first pair.  In the
2758 		   future we may handle it correctly.  */
2759 		if (commutative < 0)
2760 		  commutative = i;
2761 		else
2762 		  gcc_assert (this_insn_is_asm);
2763 	      }
2764 	      break;
2765 	    /* Use of ISDIGIT is tempting here, but it may get expensive because
2766 	       of locale support we don't want.  */
2767 	    case '0': case '1': case '2': case '3': case '4':
2768 	    case '5': case '6': case '7': case '8': case '9':
2769 	      {
2770 		c = strtoul (p - 1, &end, 10);
2771 		p = end;
2772 
2773 		operands_match[c][i]
2774 		  = operands_match_p (recog_data.operand[c],
2775 				      recog_data.operand[i]);
2776 
2777 		/* An operand may not match itself.  */
2778 		gcc_assert (c != i);
2779 
2780 		/* If C can be commuted with C+1, and C might need to match I,
2781 		   then C+1 might also need to match I.  */
2782 		if (commutative >= 0)
2783 		  {
2784 		    if (c == commutative || c == commutative + 1)
2785 		      {
2786 			int other = c + (c == commutative ? 1 : -1);
2787 			operands_match[other][i]
2788 			  = operands_match_p (recog_data.operand[other],
2789 					      recog_data.operand[i]);
2790 		      }
2791 		    if (i == commutative || i == commutative + 1)
2792 		      {
2793 			int other = i + (i == commutative ? 1 : -1);
2794 			operands_match[c][other]
2795 			  = operands_match_p (recog_data.operand[c],
2796 					      recog_data.operand[other]);
2797 		      }
2798 		    /* Note that C is supposed to be less than I.
2799 		       No need to consider altering both C and I because in
2800 		       that case we would alter one into the other.  */
2801 		  }
2802 	      }
2803 	    }
2804 	}
2805     }
2806 
2807   /* Examine each operand that is a memory reference or memory address
2808      and reload parts of the addresses into index registers.
2809      Also here any references to pseudo regs that didn't get hard regs
2810      but are equivalent to constants get replaced in the insn itself
2811      with those constants.  Nobody will ever see them again.
2812 
2813      Finally, set up the preferred classes of each operand.  */
2814 
2815   for (i = 0; i < noperands; i++)
2816     {
2817       RTX_CODE code = GET_CODE (recog_data.operand[i]);
2818 
2819       address_reloaded[i] = 0;
2820       address_operand_reloaded[i] = 0;
2821       operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2822 			 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2823 			 : RELOAD_OTHER);
2824       address_type[i]
2825 	= (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2826 	   : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2827 	   : RELOAD_OTHER);
2828 
2829       if (*constraints[i] == 0)
2830 	/* Ignore things like match_operator operands.  */
2831 	;
2832       else if (insn_extra_address_constraint
2833 	       (lookup_constraint (constraints[i])))
2834 	{
2835 	  address_operand_reloaded[i]
2836 	    = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2837 				    recog_data.operand[i],
2838 				    recog_data.operand_loc[i],
2839 				    i, operand_type[i], ind_levels, insn);
2840 
2841 	  /* If we now have a simple operand where we used to have a
2842 	     PLUS or MULT, re-recognize and try again.  */
2843 	  if ((OBJECT_P (*recog_data.operand_loc[i])
2844 	       || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2845 	      && (GET_CODE (recog_data.operand[i]) == MULT
2846 		  || GET_CODE (recog_data.operand[i]) == PLUS))
2847 	    {
2848 	      INSN_CODE (insn) = -1;
2849 	      retval = find_reloads (insn, replace, ind_levels, live_known,
2850 				     reload_reg_p);
2851 	      return retval;
2852 	    }
2853 
2854 	  recog_data.operand[i] = *recog_data.operand_loc[i];
2855 	  substed_operand[i] = recog_data.operand[i];
2856 
2857 	  /* Address operands are reloaded in their existing mode,
2858 	     no matter what is specified in the machine description.  */
2859 	  operand_mode[i] = GET_MODE (recog_data.operand[i]);
2860 
2861 	  /* If the address is a single CONST_INT pick address mode
2862 	     instead otherwise we will later not know in which mode
2863 	     the reload should be performed.  */
2864 	  if (operand_mode[i] == VOIDmode)
2865 	    operand_mode[i] = Pmode;
2866 
2867 	}
2868       else if (code == MEM)
2869 	{
2870 	  address_reloaded[i]
2871 	    = find_reloads_address (GET_MODE (recog_data.operand[i]),
2872 				    recog_data.operand_loc[i],
2873 				    XEXP (recog_data.operand[i], 0),
2874 				    &XEXP (recog_data.operand[i], 0),
2875 				    i, address_type[i], ind_levels, insn);
2876 	  recog_data.operand[i] = *recog_data.operand_loc[i];
2877 	  substed_operand[i] = recog_data.operand[i];
2878 	}
2879       else if (code == SUBREG)
2880 	{
2881 	  rtx reg = SUBREG_REG (recog_data.operand[i]);
2882 	  rtx op
2883 	    = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2884 				   ind_levels,
2885 				   set != 0
2886 				   && &SET_DEST (set) == recog_data.operand_loc[i],
2887 				   insn,
2888 				   &address_reloaded[i]);
2889 
2890 	  /* If we made a MEM to load (a part of) the stackslot of a pseudo
2891 	     that didn't get a hard register, emit a USE with a REG_EQUAL
2892 	     note in front so that we might inherit a previous, possibly
2893 	     wider reload.  */
2894 
2895 	  if (replace
2896 	      && MEM_P (op)
2897 	      && REG_P (reg)
2898 	      && known_ge (GET_MODE_SIZE (GET_MODE (reg)),
2899 			   GET_MODE_SIZE (GET_MODE (op)))
2900 	      && reg_equiv_constant (REGNO (reg)) == 0)
2901 	    set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2902 						   insn),
2903 				 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2904 
2905 	  substed_operand[i] = recog_data.operand[i] = op;
2906 	}
2907       else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2908 	/* We can get a PLUS as an "operand" as a result of register
2909 	   elimination.  See eliminate_regs and gen_reload.  We handle
2910 	   a unary operator by reloading the operand.  */
2911 	substed_operand[i] = recog_data.operand[i]
2912 	  = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2913 				 ind_levels, 0, insn,
2914 				 &address_reloaded[i]);
2915       else if (code == REG)
2916 	{
2917 	  /* This is equivalent to calling find_reloads_toplev.
2918 	     The code is duplicated for speed.
2919 	     When we find a pseudo always equivalent to a constant,
2920 	     we replace it by the constant.  We must be sure, however,
2921 	     that we don't try to replace it in the insn in which it
2922 	     is being set.  */
2923 	  int regno = REGNO (recog_data.operand[i]);
2924 	  if (reg_equiv_constant (regno) != 0
2925 	      && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2926 	    {
2927 	      /* Record the existing mode so that the check if constants are
2928 		 allowed will work when operand_mode isn't specified.  */
2929 
2930 	      if (operand_mode[i] == VOIDmode)
2931 		operand_mode[i] = GET_MODE (recog_data.operand[i]);
2932 
2933 	      substed_operand[i] = recog_data.operand[i]
2934 		= reg_equiv_constant (regno);
2935 	    }
2936 	  if (reg_equiv_memory_loc (regno) != 0
2937 	      && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2938 	    /* We need not give a valid is_set_dest argument since the case
2939 	       of a constant equivalence was checked above.  */
2940 	    substed_operand[i] = recog_data.operand[i]
2941 	      = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2942 				     ind_levels, 0, insn,
2943 				     &address_reloaded[i]);
2944 	}
2945       /* If the operand is still a register (we didn't replace it with an
2946 	 equivalent), get the preferred class to reload it into.  */
2947       code = GET_CODE (recog_data.operand[i]);
2948       preferred_class[i]
2949 	= ((code == REG && REGNO (recog_data.operand[i])
2950 	    >= FIRST_PSEUDO_REGISTER)
2951 	   ? reg_preferred_class (REGNO (recog_data.operand[i]))
2952 	   : NO_REGS);
2953       pref_or_nothing[i]
2954 	= (code == REG
2955 	   && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2956 	   && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2957     }
2958 
2959   /* If this is simply a copy from operand 1 to operand 0, merge the
2960      preferred classes for the operands.  */
2961   if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2962       && recog_data.operand[1] == SET_SRC (set))
2963     {
2964       preferred_class[0] = preferred_class[1]
2965 	= reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2966       pref_or_nothing[0] |= pref_or_nothing[1];
2967       pref_or_nothing[1] |= pref_or_nothing[0];
2968     }
2969 
2970   /* Now see what we need for pseudo-regs that didn't get hard regs
2971      or got the wrong kind of hard reg.  For this, we must consider
2972      all the operands together against the register constraints.  */
2973 
2974   best = MAX_RECOG_OPERANDS * 2 + 600;
2975 
2976   goal_alternative_swapped = 0;
2977 
2978   /* The constraints are made of several alternatives.
2979      Each operand's constraint looks like foo,bar,... with commas
2980      separating the alternatives.  The first alternatives for all
2981      operands go together, the second alternatives go together, etc.
2982 
2983      First loop over alternatives.  */
2984 
2985   alternative_mask enabled = get_enabled_alternatives (insn);
2986   for (this_alternative_number = 0;
2987        this_alternative_number < n_alternatives;
2988        this_alternative_number++)
2989     {
2990       int swapped;
2991 
2992       if (!TEST_BIT (enabled, this_alternative_number))
2993 	{
2994 	  int i;
2995 
2996 	  for (i = 0; i < recog_data.n_operands; i++)
2997 	    constraints[i] = skip_alternative (constraints[i]);
2998 
2999 	  continue;
3000 	}
3001 
3002       /* If insn is commutative (it's safe to exchange a certain pair
3003 	 of operands) then we need to try each alternative twice, the
3004 	 second time matching those two operands as if we had
3005 	 exchanged them.  To do this, really exchange them in
3006 	 operands.  */
3007       for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3008 	{
3009 	  /* Loop over operands for one constraint alternative.  */
3010 	  /* LOSERS counts those that don't fit this alternative
3011 	     and would require loading.  */
3012 	  int losers = 0;
3013 	  /* BAD is set to 1 if it some operand can't fit this alternative
3014 	     even after reloading.  */
3015 	  int bad = 0;
3016 	  /* REJECT is a count of how undesirable this alternative says it is
3017 	     if any reloading is required.  If the alternative matches exactly
3018 	     then REJECT is ignored, but otherwise it gets this much
3019 	     counted against it in addition to the reloading needed.  Each
3020 	     ? counts three times here since we want the disparaging caused by
3021 	     a bad register class to only count 1/3 as much.  */
3022 	  int reject = 0;
3023 
3024 	  if (swapped)
3025 	    {
3026 	      recog_data.operand[commutative] = substed_operand[commutative + 1];
3027 	      recog_data.operand[commutative + 1] = substed_operand[commutative];
3028 	      /* Swap the duplicates too.  */
3029 	      for (i = 0; i < recog_data.n_dups; i++)
3030 		if (recog_data.dup_num[i] == commutative
3031 		    || recog_data.dup_num[i] == commutative + 1)
3032 		  *recog_data.dup_loc[i]
3033 		    = recog_data.operand[(int) recog_data.dup_num[i]];
3034 
3035 	      std::swap (preferred_class[commutative],
3036 			 preferred_class[commutative + 1]);
3037 	      std::swap (pref_or_nothing[commutative],
3038 			 pref_or_nothing[commutative + 1]);
3039 	      std::swap (address_reloaded[commutative],
3040 			 address_reloaded[commutative + 1]);
3041 	    }
3042 
3043 	  this_earlyclobber = 0;
3044 
3045 	  for (i = 0; i < noperands; i++)
3046 	    {
3047 	      const char *p = constraints[i];
3048 	      char *end;
3049 	      int len;
3050 	      int win = 0;
3051 	      int did_match = 0;
3052 	      /* 0 => this operand can be reloaded somehow for this alternative.  */
3053 	      int badop = 1;
3054 	      /* 0 => this operand can be reloaded if the alternative allows regs.  */
3055 	      int winreg = 0;
3056 	      int c;
3057 	      int m;
3058 	      rtx operand = recog_data.operand[i];
3059 	      int offset = 0;
3060 	      /* Nonzero means this is a MEM that must be reloaded into a reg
3061 		 regardless of what the constraint says.  */
3062 	      int force_reload = 0;
3063 	      int offmemok = 0;
3064 	      /* Nonzero if a constant forced into memory would be OK for this
3065 		 operand.  */
3066 	      int constmemok = 0;
3067 	      int earlyclobber = 0;
3068 	      enum constraint_num cn;
3069 	      enum reg_class cl;
3070 
3071 	      /* If the predicate accepts a unary operator, it means that
3072 		 we need to reload the operand, but do not do this for
3073 		 match_operator and friends.  */
3074 	      if (UNARY_P (operand) && *p != 0)
3075 		operand = XEXP (operand, 0);
3076 
3077 	      /* If the operand is a SUBREG, extract
3078 		 the REG or MEM (or maybe even a constant) within.
3079 		 (Constants can occur as a result of reg_equiv_constant.)  */
3080 
3081 	      while (GET_CODE (operand) == SUBREG)
3082 		{
3083 		  /* Offset only matters when operand is a REG and
3084 		     it is a hard reg.  This is because it is passed
3085 		     to reg_fits_class_p if it is a REG and all pseudos
3086 		     return 0 from that function.  */
3087 		  if (REG_P (SUBREG_REG (operand))
3088 		      && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3089 		    {
3090 		      if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3091 						 GET_MODE (SUBREG_REG (operand)),
3092 						 SUBREG_BYTE (operand),
3093 						 GET_MODE (operand)) < 0)
3094 			force_reload = 1;
3095 		      offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3096 						     GET_MODE (SUBREG_REG (operand)),
3097 						     SUBREG_BYTE (operand),
3098 						     GET_MODE (operand));
3099 		    }
3100 		  operand = SUBREG_REG (operand);
3101 		  /* Force reload if this is a constant or PLUS or if there may
3102 		     be a problem accessing OPERAND in the outer mode.  */
3103 		  scalar_int_mode inner_mode;
3104 		  if (CONSTANT_P (operand)
3105 		      || GET_CODE (operand) == PLUS
3106 		      /* We must force a reload of paradoxical SUBREGs
3107 			 of a MEM because the alignment of the inner value
3108 			 may not be enough to do the outer reference.  On
3109 			 big-endian machines, it may also reference outside
3110 			 the object.
3111 
3112 			 On machines that extend byte operations and we have a
3113 			 SUBREG where both the inner and outer modes are no wider
3114 			 than a word and the inner mode is narrower, is integral,
3115 			 and gets extended when loaded from memory, combine.c has
3116 			 made assumptions about the behavior of the machine in such
3117 			 register access.  If the data is, in fact, in memory we
3118 			 must always load using the size assumed to be in the
3119 			 register and let the insn do the different-sized
3120 			 accesses.
3121 
3122 			 This is doubly true if WORD_REGISTER_OPERATIONS.  In
3123 			 this case eliminate_regs has left non-paradoxical
3124 			 subregs for push_reload to see.  Make sure it does
3125 			 by forcing the reload.
3126 
3127 			 ??? When is it right at this stage to have a subreg
3128 			 of a mem that is _not_ to be handled specially?  IMO
3129 			 those should have been reduced to just a mem.  */
3130 		      || ((MEM_P (operand)
3131 			   || (REG_P (operand)
3132 			       && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3133 			  && (WORD_REGISTER_OPERATIONS
3134 			      || (((maybe_lt
3135 				    (GET_MODE_BITSIZE (GET_MODE (operand)),
3136 				     BIGGEST_ALIGNMENT))
3137 				   && (paradoxical_subreg_p
3138 				       (operand_mode[i], GET_MODE (operand)))))
3139 			      || BYTES_BIG_ENDIAN
3140 			      || (known_le (GET_MODE_SIZE (operand_mode[i]),
3141 					    UNITS_PER_WORD)
3142 				  && (is_a <scalar_int_mode>
3143 				      (GET_MODE (operand), &inner_mode))
3144 				  && (GET_MODE_SIZE (inner_mode)
3145 				      <= UNITS_PER_WORD)
3146 				  && paradoxical_subreg_p (operand_mode[i],
3147 							   inner_mode)
3148 				  && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)))
3149 		      )
3150 		    force_reload = 1;
3151 		}
3152 
3153 	      this_alternative[i] = NO_REGS;
3154 	      this_alternative_win[i] = 0;
3155 	      this_alternative_match_win[i] = 0;
3156 	      this_alternative_offmemok[i] = 0;
3157 	      this_alternative_earlyclobber[i] = 0;
3158 	      this_alternative_matches[i] = -1;
3159 
3160 	      /* An empty constraint or empty alternative
3161 		 allows anything which matched the pattern.  */
3162 	      if (*p == 0 || *p == ',')
3163 		win = 1, badop = 0;
3164 
3165 	      /* Scan this alternative's specs for this operand;
3166 		 set WIN if the operand fits any letter in this alternative.
3167 		 Otherwise, clear BADOP if this operand could
3168 		 fit some letter after reloads,
3169 		 or set WINREG if this operand could fit after reloads
3170 		 provided the constraint allows some registers.  */
3171 
3172 	      do
3173 		switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3174 		  {
3175 		  case '\0':
3176 		    len = 0;
3177 		    break;
3178 		  case ',':
3179 		    c = '\0';
3180 		    break;
3181 
3182 		  case '?':
3183 		    reject += 6;
3184 		    break;
3185 
3186 		  case '!':
3187 		    reject = 600;
3188 		    break;
3189 
3190 		  case '#':
3191 		    /* Ignore rest of this alternative as far as
3192 		       reloading is concerned.  */
3193 		    do
3194 		      p++;
3195 		    while (*p && *p != ',');
3196 		    len = 0;
3197 		    break;
3198 
3199 		  case '0':  case '1':  case '2':  case '3':  case '4':
3200 		  case '5':  case '6':  case '7':  case '8':  case '9':
3201 		    m = strtoul (p, &end, 10);
3202 		    p = end;
3203 		    len = 0;
3204 
3205 		    this_alternative_matches[i] = m;
3206 		    /* We are supposed to match a previous operand.
3207 		       If we do, we win if that one did.
3208 		       If we do not, count both of the operands as losers.
3209 		       (This is too conservative, since most of the time
3210 		       only a single reload insn will be needed to make
3211 		       the two operands win.  As a result, this alternative
3212 		       may be rejected when it is actually desirable.)  */
3213 		    if ((swapped && (m != commutative || i != commutative + 1))
3214 			/* If we are matching as if two operands were swapped,
3215 			   also pretend that operands_match had been computed
3216 			   with swapped.
3217 			   But if I is the second of those and C is the first,
3218 			   don't exchange them, because operands_match is valid
3219 			   only on one side of its diagonal.  */
3220 			? (operands_match
3221 			   [(m == commutative || m == commutative + 1)
3222 			    ? 2 * commutative + 1 - m : m]
3223 			   [(i == commutative || i == commutative + 1)
3224 			    ? 2 * commutative + 1 - i : i])
3225 			: operands_match[m][i])
3226 		      {
3227 			/* If we are matching a non-offsettable address where an
3228 			   offsettable address was expected, then we must reject
3229 			   this combination, because we can't reload it.  */
3230 			if (this_alternative_offmemok[m]
3231 			    && MEM_P (recog_data.operand[m])
3232 			    && this_alternative[m] == NO_REGS
3233 			    && ! this_alternative_win[m])
3234 			  bad = 1;
3235 
3236 			did_match = this_alternative_win[m];
3237 		      }
3238 		    else
3239 		      {
3240 			/* Operands don't match.  */
3241 			rtx value;
3242 			int loc1, loc2;
3243 			/* Retroactively mark the operand we had to match
3244 			   as a loser, if it wasn't already.  */
3245 			if (this_alternative_win[m])
3246 			  losers++;
3247 			this_alternative_win[m] = 0;
3248 			if (this_alternative[m] == NO_REGS)
3249 			  bad = 1;
3250 			/* But count the pair only once in the total badness of
3251 			   this alternative, if the pair can be a dummy reload.
3252 			   The pointers in operand_loc are not swapped; swap
3253 			   them by hand if necessary.  */
3254 			if (swapped && i == commutative)
3255 			  loc1 = commutative + 1;
3256 			else if (swapped && i == commutative + 1)
3257 			  loc1 = commutative;
3258 			else
3259 			  loc1 = i;
3260 			if (swapped && m == commutative)
3261 			  loc2 = commutative + 1;
3262 			else if (swapped && m == commutative + 1)
3263 			  loc2 = commutative;
3264 			else
3265 			  loc2 = m;
3266 			value
3267 			  = find_dummy_reload (recog_data.operand[i],
3268 					       recog_data.operand[m],
3269 					       recog_data.operand_loc[loc1],
3270 					       recog_data.operand_loc[loc2],
3271 					       operand_mode[i], operand_mode[m],
3272 					       this_alternative[m], -1,
3273 					       this_alternative_earlyclobber[m]);
3274 
3275 			if (value != 0)
3276 			  losers--;
3277 		      }
3278 		    /* This can be fixed with reloads if the operand
3279 		       we are supposed to match can be fixed with reloads.  */
3280 		    badop = 0;
3281 		    this_alternative[i] = this_alternative[m];
3282 
3283 		    /* If we have to reload this operand and some previous
3284 		       operand also had to match the same thing as this
3285 		       operand, we don't know how to do that.  So reject this
3286 		       alternative.  */
3287 		    if (! did_match || force_reload)
3288 		      for (j = 0; j < i; j++)
3289 			if (this_alternative_matches[j]
3290 			    == this_alternative_matches[i])
3291 			  {
3292 			    badop = 1;
3293 			    break;
3294 			  }
3295 		    break;
3296 
3297 		  case 'p':
3298 		    /* All necessary reloads for an address_operand
3299 		       were handled in find_reloads_address.  */
3300 		    this_alternative[i]
3301 		      = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3302 					ADDRESS, SCRATCH);
3303 		    win = 1;
3304 		    badop = 0;
3305 		    break;
3306 
3307 		  case TARGET_MEM_CONSTRAINT:
3308 		    if (force_reload)
3309 		      break;
3310 		    if (MEM_P (operand)
3311 			|| (REG_P (operand)
3312 			    && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3313 			    && reg_renumber[REGNO (operand)] < 0))
3314 		      win = 1;
3315 		    if (CONST_POOL_OK_P (operand_mode[i], operand))
3316 		      badop = 0;
3317 		    constmemok = 1;
3318 		    break;
3319 
3320 		  case '<':
3321 		    if (MEM_P (operand)
3322 			&& ! address_reloaded[i]
3323 			&& (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3324 			    || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3325 		      win = 1;
3326 		    break;
3327 
3328 		  case '>':
3329 		    if (MEM_P (operand)
3330 			&& ! address_reloaded[i]
3331 			&& (GET_CODE (XEXP (operand, 0)) == PRE_INC
3332 			    || GET_CODE (XEXP (operand, 0)) == POST_INC))
3333 		      win = 1;
3334 		    break;
3335 
3336 		    /* Memory operand whose address is not offsettable.  */
3337 		  case 'V':
3338 		    if (force_reload)
3339 		      break;
3340 		    if (MEM_P (operand)
3341 			&& ! (ind_levels ? offsettable_memref_p (operand)
3342 			      : offsettable_nonstrict_memref_p (operand))
3343 			/* Certain mem addresses will become offsettable
3344 			   after they themselves are reloaded.  This is important;
3345 			   we don't want our own handling of unoffsettables
3346 			   to override the handling of reg_equiv_address.  */
3347 			&& !(REG_P (XEXP (operand, 0))
3348 			     && (ind_levels == 0
3349 				 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3350 		      win = 1;
3351 		    break;
3352 
3353 		    /* Memory operand whose address is offsettable.  */
3354 		  case 'o':
3355 		    if (force_reload)
3356 		      break;
3357 		    if ((MEM_P (operand)
3358 			 /* If IND_LEVELS, find_reloads_address won't reload a
3359 			    pseudo that didn't get a hard reg, so we have to
3360 			    reject that case.  */
3361 			 && ((ind_levels ? offsettable_memref_p (operand)
3362 			      : offsettable_nonstrict_memref_p (operand))
3363 			     /* A reloaded address is offsettable because it is now
3364 				just a simple register indirect.  */
3365 			     || address_reloaded[i] == 1))
3366 			|| (REG_P (operand)
3367 			    && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3368 			    && reg_renumber[REGNO (operand)] < 0
3369 			    /* If reg_equiv_address is nonzero, we will be
3370 			       loading it into a register; hence it will be
3371 			       offsettable, but we cannot say that reg_equiv_mem
3372 			       is offsettable without checking.  */
3373 			    && ((reg_equiv_mem (REGNO (operand)) != 0
3374 				 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3375 				|| (reg_equiv_address (REGNO (operand)) != 0))))
3376 		      win = 1;
3377 		    if (CONST_POOL_OK_P (operand_mode[i], operand)
3378 			|| MEM_P (operand))
3379 		      badop = 0;
3380 		    constmemok = 1;
3381 		    offmemok = 1;
3382 		    break;
3383 
3384 		  case '&':
3385 		    /* Output operand that is stored before the need for the
3386 		       input operands (and their index registers) is over.  */
3387 		    earlyclobber = 1, this_earlyclobber = 1;
3388 		    break;
3389 
3390 		  case 'X':
3391 		    force_reload = 0;
3392 		    win = 1;
3393 		    break;
3394 
3395 		  case 'g':
3396 		    if (! force_reload
3397 			/* A PLUS is never a valid operand, but reload can make
3398 			   it from a register when eliminating registers.  */
3399 			&& GET_CODE (operand) != PLUS
3400 			/* A SCRATCH is not a valid operand.  */
3401 			&& GET_CODE (operand) != SCRATCH
3402 			&& (! CONSTANT_P (operand)
3403 			    || ! flag_pic
3404 			    || LEGITIMATE_PIC_OPERAND_P (operand))
3405 			&& (GENERAL_REGS == ALL_REGS
3406 			    || !REG_P (operand)
3407 			    || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3408 				&& reg_renumber[REGNO (operand)] < 0)))
3409 		      win = 1;
3410 		    cl = GENERAL_REGS;
3411 		    goto reg;
3412 
3413 		  default:
3414 		    cn = lookup_constraint (p);
3415 		    switch (get_constraint_type (cn))
3416 		      {
3417 		      case CT_REGISTER:
3418 			cl = reg_class_for_constraint (cn);
3419 			if (cl != NO_REGS)
3420 			  goto reg;
3421 			break;
3422 
3423 		      case CT_CONST_INT:
3424 			if (CONST_INT_P (operand)
3425 			    && (insn_const_int_ok_for_constraint
3426 				(INTVAL (operand), cn)))
3427 			  win = true;
3428 			break;
3429 
3430 		      case CT_MEMORY:
3431 			if (force_reload)
3432 			  break;
3433 			if (constraint_satisfied_p (operand, cn))
3434 			  win = 1;
3435 			/* If the address was already reloaded,
3436 			   we win as well.  */
3437 			else if (MEM_P (operand) && address_reloaded[i] == 1)
3438 			  win = 1;
3439 			/* Likewise if the address will be reloaded because
3440 			   reg_equiv_address is nonzero.  For reg_equiv_mem
3441 			   we have to check.  */
3442 			else if (REG_P (operand)
3443 				 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3444 				 && reg_renumber[REGNO (operand)] < 0
3445 				 && ((reg_equiv_mem (REGNO (operand)) != 0
3446 				      && (constraint_satisfied_p
3447 					  (reg_equiv_mem (REGNO (operand)),
3448 					   cn)))
3449 				     || (reg_equiv_address (REGNO (operand))
3450 					 != 0)))
3451 			  win = 1;
3452 
3453 			/* If we didn't already win, we can reload
3454 			   constants via force_const_mem, and other
3455 			   MEMs by reloading the address like for 'o'.  */
3456 			if (CONST_POOL_OK_P (operand_mode[i], operand)
3457 			    || MEM_P (operand))
3458 			  badop = 0;
3459 			constmemok = 1;
3460 			offmemok = 1;
3461 			break;
3462 
3463 		      case CT_SPECIAL_MEMORY:
3464 			if (force_reload)
3465 			  break;
3466 			if (constraint_satisfied_p (operand, cn))
3467 			  win = 1;
3468 			/* Likewise if the address will be reloaded because
3469 			   reg_equiv_address is nonzero.  For reg_equiv_mem
3470 			   we have to check.  */
3471 			else if (REG_P (operand)
3472 				 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3473 				 && reg_renumber[REGNO (operand)] < 0
3474 				 && reg_equiv_mem (REGNO (operand)) != 0
3475 				 && (constraint_satisfied_p
3476 				     (reg_equiv_mem (REGNO (operand)), cn)))
3477 			  win = 1;
3478 			break;
3479 
3480 		      case CT_ADDRESS:
3481 			if (constraint_satisfied_p (operand, cn))
3482 			  win = 1;
3483 
3484 			/* If we didn't already win, we can reload
3485 			   the address into a base register.  */
3486 			this_alternative[i]
3487 			  = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3488 					    ADDRESS, SCRATCH);
3489 			badop = 0;
3490 			break;
3491 
3492 		      case CT_FIXED_FORM:
3493 			if (constraint_satisfied_p (operand, cn))
3494 			  win = 1;
3495 			break;
3496 		      }
3497 		    break;
3498 
3499 		  reg:
3500 		    this_alternative[i]
3501 		      = reg_class_subunion[this_alternative[i]][cl];
3502 		    if (GET_MODE (operand) == BLKmode)
3503 		      break;
3504 		    winreg = 1;
3505 		    if (REG_P (operand)
3506 			&& reg_fits_class_p (operand, this_alternative[i],
3507   			             offset, GET_MODE (recog_data.operand[i])))
3508 		      win = 1;
3509 		    break;
3510 		  }
3511 	      while ((p += len), c);
3512 
3513 	      if (swapped == (commutative >= 0 ? 1 : 0))
3514 		constraints[i] = p;
3515 
3516 	      /* If this operand could be handled with a reg,
3517 		 and some reg is allowed, then this operand can be handled.  */
3518 	      if (winreg && this_alternative[i] != NO_REGS
3519 		  && (win || !class_only_fixed_regs[this_alternative[i]]))
3520 		badop = 0;
3521 
3522 	      /* Record which operands fit this alternative.  */
3523 	      this_alternative_earlyclobber[i] = earlyclobber;
3524 	      if (win && ! force_reload)
3525 		this_alternative_win[i] = 1;
3526 	      else if (did_match && ! force_reload)
3527 		this_alternative_match_win[i] = 1;
3528 	      else
3529 		{
3530 		  int const_to_mem = 0;
3531 
3532 		  this_alternative_offmemok[i] = offmemok;
3533 		  losers++;
3534 		  if (badop)
3535 		    bad = 1;
3536 		  /* Alternative loses if it has no regs for a reg operand.  */
3537 		  if (REG_P (operand)
3538 		      && this_alternative[i] == NO_REGS
3539 		      && this_alternative_matches[i] < 0)
3540 		    bad = 1;
3541 
3542 		  /* If this is a constant that is reloaded into the desired
3543 		     class by copying it to memory first, count that as another
3544 		     reload.  This is consistent with other code and is
3545 		     required to avoid choosing another alternative when
3546 		     the constant is moved into memory by this function on
3547 		     an early reload pass.  Note that the test here is
3548 		     precisely the same as in the code below that calls
3549 		     force_const_mem.  */
3550 		  if (CONST_POOL_OK_P (operand_mode[i], operand)
3551 		      && ((targetm.preferred_reload_class (operand,
3552 							   this_alternative[i])
3553 			   == NO_REGS)
3554 			  || no_input_reloads))
3555 		    {
3556 		      const_to_mem = 1;
3557 		      if (this_alternative[i] != NO_REGS)
3558 			losers++;
3559 		    }
3560 
3561 		  /* Alternative loses if it requires a type of reload not
3562 		     permitted for this insn.  We can always reload SCRATCH
3563 		     and objects with a REG_UNUSED note.  */
3564 		  if (GET_CODE (operand) != SCRATCH
3565 		      && modified[i] != RELOAD_READ && no_output_reloads
3566 		      && ! find_reg_note (insn, REG_UNUSED, operand))
3567 		    bad = 1;
3568 		  else if (modified[i] != RELOAD_WRITE && no_input_reloads
3569 			   && ! const_to_mem)
3570 		    bad = 1;
3571 
3572 		  /* If we can't reload this value at all, reject this
3573 		     alternative.  Note that we could also lose due to
3574 		     LIMIT_RELOAD_CLASS, but we don't check that
3575 		     here.  */
3576 
3577 		  if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3578 		    {
3579 		      if (targetm.preferred_reload_class (operand,
3580 							  this_alternative[i])
3581 			  == NO_REGS)
3582 			reject = 600;
3583 
3584 		      if (operand_type[i] == RELOAD_FOR_OUTPUT
3585 			  && (targetm.preferred_output_reload_class (operand,
3586 							    this_alternative[i])
3587 			      == NO_REGS))
3588 			reject = 600;
3589 		    }
3590 
3591 		  /* We prefer to reload pseudos over reloading other things,
3592 		     since such reloads may be able to be eliminated later.
3593 		     If we are reloading a SCRATCH, we won't be generating any
3594 		     insns, just using a register, so it is also preferred.
3595 		     So bump REJECT in other cases.  Don't do this in the
3596 		     case where we are forcing a constant into memory and
3597 		     it will then win since we don't want to have a different
3598 		     alternative match then.  */
3599 		  if (! (REG_P (operand)
3600 			 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3601 		      && GET_CODE (operand) != SCRATCH
3602 		      && ! (const_to_mem && constmemok))
3603 		    reject += 2;
3604 
3605 		  /* Input reloads can be inherited more often than output
3606 		     reloads can be removed, so penalize output reloads.  */
3607 		  if (operand_type[i] != RELOAD_FOR_INPUT
3608 		      && GET_CODE (operand) != SCRATCH)
3609 		    reject++;
3610 		}
3611 
3612 	      /* If this operand is a pseudo register that didn't get
3613 		 a hard reg and this alternative accepts some
3614 		 register, see if the class that we want is a subset
3615 		 of the preferred class for this register.  If not,
3616 		 but it intersects that class, use the preferred class
3617 		 instead.  If it does not intersect the preferred
3618 		 class, show that usage of this alternative should be
3619 		 discouraged; it will be discouraged more still if the
3620 		 register is `preferred or nothing'.  We do this
3621 		 because it increases the chance of reusing our spill
3622 		 register in a later insn and avoiding a pair of
3623 		 memory stores and loads.
3624 
3625 		 Don't bother with this if this alternative will
3626 		 accept this operand.
3627 
3628 		 Don't do this for a multiword operand, since it is
3629 		 only a small win and has the risk of requiring more
3630 		 spill registers, which could cause a large loss.
3631 
3632 		 Don't do this if the preferred class has only one
3633 		 register because we might otherwise exhaust the
3634 		 class.  */
3635 
3636 	      if (! win && ! did_match
3637 		  && this_alternative[i] != NO_REGS
3638 		  && known_le (GET_MODE_SIZE (operand_mode[i]), UNITS_PER_WORD)
3639 		  && reg_class_size [(int) preferred_class[i]] > 0
3640 		  && ! small_register_class_p (preferred_class[i]))
3641 		{
3642 		  if (! reg_class_subset_p (this_alternative[i],
3643 					    preferred_class[i]))
3644 		    {
3645 		      /* Since we don't have a way of forming the intersection,
3646 			 we just do something special if the preferred class
3647 			 is a subset of the class we have; that's the most
3648 			 common case anyway.  */
3649 		      if (reg_class_subset_p (preferred_class[i],
3650 					      this_alternative[i]))
3651 			this_alternative[i] = preferred_class[i];
3652 		      else
3653 			reject += (2 + 2 * pref_or_nothing[i]);
3654 		    }
3655 		}
3656 	    }
3657 
3658 	  /* Now see if any output operands that are marked "earlyclobber"
3659 	     in this alternative conflict with any input operands
3660 	     or any memory addresses.  */
3661 
3662 	  for (i = 0; i < noperands; i++)
3663 	    if (this_alternative_earlyclobber[i]
3664 		&& (this_alternative_win[i] || this_alternative_match_win[i]))
3665 	      {
3666 		struct decomposition early_data;
3667 
3668 		early_data = decompose (recog_data.operand[i]);
3669 
3670 		gcc_assert (modified[i] != RELOAD_READ);
3671 
3672 		if (this_alternative[i] == NO_REGS)
3673 		  {
3674 		    this_alternative_earlyclobber[i] = 0;
3675 		    gcc_assert (this_insn_is_asm);
3676 		    error_for_asm (this_insn,
3677 			      "%<&%> constraint used with no register class");
3678 		  }
3679 
3680 		for (j = 0; j < noperands; j++)
3681 		  /* Is this an input operand or a memory ref?  */
3682 		  if ((MEM_P (recog_data.operand[j])
3683 		       || modified[j] != RELOAD_WRITE)
3684 		      && j != i
3685 		      /* Ignore things like match_operator operands.  */
3686 		      && !recog_data.is_operator[j]
3687 		      /* Don't count an input operand that is constrained to match
3688 			 the early clobber operand.  */
3689 		      && ! (this_alternative_matches[j] == i
3690 			    && rtx_equal_p (recog_data.operand[i],
3691 					    recog_data.operand[j]))
3692 		      /* Is it altered by storing the earlyclobber operand?  */
3693 		      && !immune_p (recog_data.operand[j], recog_data.operand[i],
3694 				    early_data))
3695 		    {
3696 		      /* If the output is in a non-empty few-regs class,
3697 			 it's costly to reload it, so reload the input instead.  */
3698 		      if (small_register_class_p (this_alternative[i])
3699 			  && (REG_P (recog_data.operand[j])
3700 			      || GET_CODE (recog_data.operand[j]) == SUBREG))
3701 			{
3702 			  losers++;
3703 			  this_alternative_win[j] = 0;
3704 			  this_alternative_match_win[j] = 0;
3705 			}
3706 		      else
3707 			break;
3708 		    }
3709 		/* If an earlyclobber operand conflicts with something,
3710 		   it must be reloaded, so request this and count the cost.  */
3711 		if (j != noperands)
3712 		  {
3713 		    losers++;
3714 		    this_alternative_win[i] = 0;
3715 		    this_alternative_match_win[j] = 0;
3716 		    for (j = 0; j < noperands; j++)
3717 		      if (this_alternative_matches[j] == i
3718 			  && this_alternative_match_win[j])
3719 			{
3720 			  this_alternative_win[j] = 0;
3721 			  this_alternative_match_win[j] = 0;
3722 			  losers++;
3723 			}
3724 		  }
3725 	      }
3726 
3727 	  /* If one alternative accepts all the operands, no reload required,
3728 	     choose that alternative; don't consider the remaining ones.  */
3729 	  if (losers == 0)
3730 	    {
3731 	      /* Unswap these so that they are never swapped at `finish'.  */
3732 	      if (swapped)
3733 		{
3734 		  recog_data.operand[commutative] = substed_operand[commutative];
3735 		  recog_data.operand[commutative + 1]
3736 		    = substed_operand[commutative + 1];
3737 		}
3738 	      for (i = 0; i < noperands; i++)
3739 		{
3740 		  goal_alternative_win[i] = this_alternative_win[i];
3741 		  goal_alternative_match_win[i] = this_alternative_match_win[i];
3742 		  goal_alternative[i] = this_alternative[i];
3743 		  goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3744 		  goal_alternative_matches[i] = this_alternative_matches[i];
3745 		  goal_alternative_earlyclobber[i]
3746 		    = this_alternative_earlyclobber[i];
3747 		}
3748 	      goal_alternative_number = this_alternative_number;
3749 	      goal_alternative_swapped = swapped;
3750 	      goal_earlyclobber = this_earlyclobber;
3751 	      goto finish;
3752 	    }
3753 
3754 	  /* REJECT, set by the ! and ? constraint characters and when a register
3755 	     would be reloaded into a non-preferred class, discourages the use of
3756 	     this alternative for a reload goal.  REJECT is incremented by six
3757 	     for each ? and two for each non-preferred class.  */
3758 	  losers = losers * 6 + reject;
3759 
3760 	  /* If this alternative can be made to work by reloading,
3761 	     and it needs less reloading than the others checked so far,
3762 	     record it as the chosen goal for reloading.  */
3763 	  if (! bad)
3764 	    {
3765 	      if (best > losers)
3766 		{
3767 		  for (i = 0; i < noperands; i++)
3768 		    {
3769 		      goal_alternative[i] = this_alternative[i];
3770 		      goal_alternative_win[i] = this_alternative_win[i];
3771 		      goal_alternative_match_win[i]
3772 			= this_alternative_match_win[i];
3773 		      goal_alternative_offmemok[i]
3774 			= this_alternative_offmemok[i];
3775 		      goal_alternative_matches[i] = this_alternative_matches[i];
3776 		      goal_alternative_earlyclobber[i]
3777 			= this_alternative_earlyclobber[i];
3778 		    }
3779 		  goal_alternative_swapped = swapped;
3780 		  best = losers;
3781 		  goal_alternative_number = this_alternative_number;
3782 		  goal_earlyclobber = this_earlyclobber;
3783 		}
3784 	    }
3785 
3786 	  if (swapped)
3787 	    {
3788 	      /* If the commutative operands have been swapped, swap
3789 		 them back in order to check the next alternative.  */
3790 	      recog_data.operand[commutative] = substed_operand[commutative];
3791 	      recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3792 	      /* Unswap the duplicates too.  */
3793 	      for (i = 0; i < recog_data.n_dups; i++)
3794 		if (recog_data.dup_num[i] == commutative
3795 		    || recog_data.dup_num[i] == commutative + 1)
3796 		  *recog_data.dup_loc[i]
3797 		    = recog_data.operand[(int) recog_data.dup_num[i]];
3798 
3799 	      /* Unswap the operand related information as well.  */
3800 	      std::swap (preferred_class[commutative],
3801 			 preferred_class[commutative + 1]);
3802 	      std::swap (pref_or_nothing[commutative],
3803 			 pref_or_nothing[commutative + 1]);
3804 	      std::swap (address_reloaded[commutative],
3805 			 address_reloaded[commutative + 1]);
3806 	    }
3807 	}
3808     }
3809 
3810   /* The operands don't meet the constraints.
3811      goal_alternative describes the alternative
3812      that we could reach by reloading the fewest operands.
3813      Reload so as to fit it.  */
3814 
3815   if (best == MAX_RECOG_OPERANDS * 2 + 600)
3816     {
3817       /* No alternative works with reloads??  */
3818       if (insn_code_number >= 0)
3819 	fatal_insn ("unable to generate reloads for:", insn);
3820       error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3821       /* Avoid further trouble with this insn.  */
3822       PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3823       n_reloads = 0;
3824       return 0;
3825     }
3826 
3827   /* Jump to `finish' from above if all operands are valid already.
3828      In that case, goal_alternative_win is all 1.  */
3829  finish:
3830 
3831   /* Right now, for any pair of operands I and J that are required to match,
3832      with I < J,
3833      goal_alternative_matches[J] is I.
3834      Set up goal_alternative_matched as the inverse function:
3835      goal_alternative_matched[I] = J.  */
3836 
3837   for (i = 0; i < noperands; i++)
3838     goal_alternative_matched[i] = -1;
3839 
3840   for (i = 0; i < noperands; i++)
3841     if (! goal_alternative_win[i]
3842 	&& goal_alternative_matches[i] >= 0)
3843       goal_alternative_matched[goal_alternative_matches[i]] = i;
3844 
3845   for (i = 0; i < noperands; i++)
3846     goal_alternative_win[i] |= goal_alternative_match_win[i];
3847 
3848   /* If the best alternative is with operands 1 and 2 swapped,
3849      consider them swapped before reporting the reloads.  Update the
3850      operand numbers of any reloads already pushed.  */
3851 
3852   if (goal_alternative_swapped)
3853     {
3854       std::swap (substed_operand[commutative],
3855 		 substed_operand[commutative + 1]);
3856       std::swap (recog_data.operand[commutative],
3857 		 recog_data.operand[commutative + 1]);
3858       std::swap (*recog_data.operand_loc[commutative],
3859 		 *recog_data.operand_loc[commutative + 1]);
3860 
3861       for (i = 0; i < recog_data.n_dups; i++)
3862 	if (recog_data.dup_num[i] == commutative
3863 	    || recog_data.dup_num[i] == commutative + 1)
3864 	  *recog_data.dup_loc[i]
3865 	    = recog_data.operand[(int) recog_data.dup_num[i]];
3866 
3867       for (i = 0; i < n_reloads; i++)
3868 	{
3869 	  if (rld[i].opnum == commutative)
3870 	    rld[i].opnum = commutative + 1;
3871 	  else if (rld[i].opnum == commutative + 1)
3872 	    rld[i].opnum = commutative;
3873 	}
3874     }
3875 
3876   for (i = 0; i < noperands; i++)
3877     {
3878       operand_reloadnum[i] = -1;
3879 
3880       /* If this is an earlyclobber operand, we need to widen the scope.
3881 	 The reload must remain valid from the start of the insn being
3882 	 reloaded until after the operand is stored into its destination.
3883 	 We approximate this with RELOAD_OTHER even though we know that we
3884 	 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3885 
3886 	 One special case that is worth checking is when we have an
3887 	 output that is earlyclobber but isn't used past the insn (typically
3888 	 a SCRATCH).  In this case, we only need have the reload live
3889 	 through the insn itself, but not for any of our input or output
3890 	 reloads.
3891 	 But we must not accidentally narrow the scope of an existing
3892 	 RELOAD_OTHER reload - leave these alone.
3893 
3894 	 In any case, anything needed to address this operand can remain
3895 	 however they were previously categorized.  */
3896 
3897       if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3898 	operand_type[i]
3899 	  = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3900 	     ? RELOAD_FOR_INSN : RELOAD_OTHER);
3901     }
3902 
3903   /* Any constants that aren't allowed and can't be reloaded
3904      into registers are here changed into memory references.  */
3905   for (i = 0; i < noperands; i++)
3906     if (! goal_alternative_win[i])
3907       {
3908 	rtx op = recog_data.operand[i];
3909 	rtx subreg = NULL_RTX;
3910 	rtx plus = NULL_RTX;
3911 	machine_mode mode = operand_mode[i];
3912 
3913 	/* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3914 	   push_reload so we have to let them pass here.  */
3915 	if (GET_CODE (op) == SUBREG)
3916 	  {
3917 	    subreg = op;
3918 	    op = SUBREG_REG (op);
3919 	    mode = GET_MODE (op);
3920 	  }
3921 
3922 	if (GET_CODE (op) == PLUS)
3923 	  {
3924 	    plus = op;
3925 	    op = XEXP (op, 1);
3926 	  }
3927 
3928 	if (CONST_POOL_OK_P (mode, op)
3929 	    && ((targetm.preferred_reload_class (op, goal_alternative[i])
3930 		 == NO_REGS)
3931 		|| no_input_reloads))
3932 	  {
3933 	    int this_address_reloaded;
3934 	    rtx tem = force_const_mem (mode, op);
3935 
3936 	    /* If we stripped a SUBREG or a PLUS above add it back.  */
3937 	    if (plus != NULL_RTX)
3938 	      tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3939 
3940 	    if (subreg != NULL_RTX)
3941 	      tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3942 
3943 	    this_address_reloaded = 0;
3944 	    substed_operand[i] = recog_data.operand[i]
3945 	      = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3946 				     0, insn, &this_address_reloaded);
3947 
3948 	    /* If the alternative accepts constant pool refs directly
3949 	       there will be no reload needed at all.  */
3950 	    if (plus == NULL_RTX
3951 		&& subreg == NULL_RTX
3952 		&& alternative_allows_const_pool_ref (this_address_reloaded != 1
3953 						      ? substed_operand[i]
3954 						      : NULL,
3955 						      recog_data.constraints[i],
3956 						      goal_alternative_number))
3957 	      goal_alternative_win[i] = 1;
3958 	  }
3959       }
3960 
3961   /* Record the values of the earlyclobber operands for the caller.  */
3962   if (goal_earlyclobber)
3963     for (i = 0; i < noperands; i++)
3964       if (goal_alternative_earlyclobber[i])
3965 	reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3966 
3967   /* Now record reloads for all the operands that need them.  */
3968   for (i = 0; i < noperands; i++)
3969     if (! goal_alternative_win[i])
3970       {
3971 	/* Operands that match previous ones have already been handled.  */
3972 	if (goal_alternative_matches[i] >= 0)
3973 	  ;
3974 	/* Handle an operand with a nonoffsettable address
3975 	   appearing where an offsettable address will do
3976 	   by reloading the address into a base register.
3977 
3978 	   ??? We can also do this when the operand is a register and
3979 	   reg_equiv_mem is not offsettable, but this is a bit tricky,
3980 	   so we don't bother with it.  It may not be worth doing.  */
3981 	else if (goal_alternative_matched[i] == -1
3982 		 && goal_alternative_offmemok[i]
3983 		 && MEM_P (recog_data.operand[i]))
3984 	  {
3985 	    /* If the address to be reloaded is a VOIDmode constant,
3986 	       use the default address mode as mode of the reload register,
3987 	       as would have been done by find_reloads_address.  */
3988 	    addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3989 	    machine_mode address_mode;
3990 
3991 	    address_mode = get_address_mode (recog_data.operand[i]);
3992 	    operand_reloadnum[i]
3993 	      = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3994 			     &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3995 			     base_reg_class (VOIDmode, as, MEM, SCRATCH),
3996 			     address_mode,
3997 			     VOIDmode, 0, 0, i, RELOAD_OTHER);
3998 	    rld[operand_reloadnum[i]].inc
3999 	      = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4000 
4001 	    /* If this operand is an output, we will have made any
4002 	       reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4003 	       now we are treating part of the operand as an input, so
4004 	       we must change these to RELOAD_FOR_OTHER_ADDRESS.  */
4005 
4006 	    if (modified[i] == RELOAD_WRITE)
4007 	      {
4008 		for (j = 0; j < n_reloads; j++)
4009 		  {
4010 		    if (rld[j].opnum == i)
4011 		      {
4012 			if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4013 			  rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4014 			else if (rld[j].when_needed
4015 				 == RELOAD_FOR_OUTADDR_ADDRESS)
4016 			  rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4017 		      }
4018 		  }
4019 	      }
4020 	  }
4021 	else if (goal_alternative_matched[i] == -1)
4022 	  {
4023 	    operand_reloadnum[i]
4024 	      = push_reload ((modified[i] != RELOAD_WRITE
4025 			      ? recog_data.operand[i] : 0),
4026 			     (modified[i] != RELOAD_READ
4027 			      ? recog_data.operand[i] : 0),
4028 			     (modified[i] != RELOAD_WRITE
4029 			      ? recog_data.operand_loc[i] : 0),
4030 			     (modified[i] != RELOAD_READ
4031 			      ? recog_data.operand_loc[i] : 0),
4032 			     (enum reg_class) goal_alternative[i],
4033 			     (modified[i] == RELOAD_WRITE
4034 			      ? VOIDmode : operand_mode[i]),
4035 			     (modified[i] == RELOAD_READ
4036 			      ? VOIDmode : operand_mode[i]),
4037 			     (insn_code_number < 0 ? 0
4038 			      : insn_data[insn_code_number].operand[i].strict_low),
4039 			     0, i, operand_type[i]);
4040 	  }
4041 	/* In a matching pair of operands, one must be input only
4042 	   and the other must be output only.
4043 	   Pass the input operand as IN and the other as OUT.  */
4044 	else if (modified[i] == RELOAD_READ
4045 		 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4046 	  {
4047 	    operand_reloadnum[i]
4048 	      = push_reload (recog_data.operand[i],
4049 			     recog_data.operand[goal_alternative_matched[i]],
4050 			     recog_data.operand_loc[i],
4051 			     recog_data.operand_loc[goal_alternative_matched[i]],
4052 			     (enum reg_class) goal_alternative[i],
4053 			     operand_mode[i],
4054 			     operand_mode[goal_alternative_matched[i]],
4055 			     0, 0, i, RELOAD_OTHER);
4056 	    operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4057 	  }
4058 	else if (modified[i] == RELOAD_WRITE
4059 		 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4060 	  {
4061 	    operand_reloadnum[goal_alternative_matched[i]]
4062 	      = push_reload (recog_data.operand[goal_alternative_matched[i]],
4063 			     recog_data.operand[i],
4064 			     recog_data.operand_loc[goal_alternative_matched[i]],
4065 			     recog_data.operand_loc[i],
4066 			     (enum reg_class) goal_alternative[i],
4067 			     operand_mode[goal_alternative_matched[i]],
4068 			     operand_mode[i],
4069 			     0, 0, i, RELOAD_OTHER);
4070 	    operand_reloadnum[i] = output_reloadnum;
4071 	  }
4072 	else
4073 	  {
4074 	    gcc_assert (insn_code_number < 0);
4075 	    error_for_asm (insn, "inconsistent operand constraints "
4076 			   "in an %<asm%>");
4077 	    /* Avoid further trouble with this insn.  */
4078 	    PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4079 	    n_reloads = 0;
4080 	    return 0;
4081 	  }
4082       }
4083     else if (goal_alternative_matched[i] < 0
4084 	     && goal_alternative_matches[i] < 0
4085 	     && address_operand_reloaded[i] != 1
4086 	     && optimize)
4087       {
4088 	/* For each non-matching operand that's a MEM or a pseudo-register
4089 	   that didn't get a hard register, make an optional reload.
4090 	   This may get done even if the insn needs no reloads otherwise.  */
4091 
4092 	rtx operand = recog_data.operand[i];
4093 
4094 	while (GET_CODE (operand) == SUBREG)
4095 	  operand = SUBREG_REG (operand);
4096 	if ((MEM_P (operand)
4097 	     || (REG_P (operand)
4098 		 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4099 	    /* If this is only for an output, the optional reload would not
4100 	       actually cause us to use a register now, just note that
4101 	       something is stored here.  */
4102 	    && (goal_alternative[i] != NO_REGS
4103 		|| modified[i] == RELOAD_WRITE)
4104 	    && ! no_input_reloads
4105 	    /* An optional output reload might allow to delete INSN later.
4106 	       We mustn't make in-out reloads on insns that are not permitted
4107 	       output reloads.
4108 	       If this is an asm, we can't delete it; we must not even call
4109 	       push_reload for an optional output reload in this case,
4110 	       because we can't be sure that the constraint allows a register,
4111 	       and push_reload verifies the constraints for asms.  */
4112 	    && (modified[i] == RELOAD_READ
4113 		|| (! no_output_reloads && ! this_insn_is_asm)))
4114 	  operand_reloadnum[i]
4115 	    = push_reload ((modified[i] != RELOAD_WRITE
4116 			    ? recog_data.operand[i] : 0),
4117 			   (modified[i] != RELOAD_READ
4118 			    ? recog_data.operand[i] : 0),
4119 			   (modified[i] != RELOAD_WRITE
4120 			    ? recog_data.operand_loc[i] : 0),
4121 			   (modified[i] != RELOAD_READ
4122 			    ? recog_data.operand_loc[i] : 0),
4123 			   (enum reg_class) goal_alternative[i],
4124 			   (modified[i] == RELOAD_WRITE
4125 			    ? VOIDmode : operand_mode[i]),
4126 			   (modified[i] == RELOAD_READ
4127 			    ? VOIDmode : operand_mode[i]),
4128 			   (insn_code_number < 0 ? 0
4129 			    : insn_data[insn_code_number].operand[i].strict_low),
4130 			   1, i, operand_type[i]);
4131 	/* If a memory reference remains (either as a MEM or a pseudo that
4132 	   did not get a hard register), yet we can't make an optional
4133 	   reload, check if this is actually a pseudo register reference;
4134 	   we then need to emit a USE and/or a CLOBBER so that reload
4135 	   inheritance will do the right thing.  */
4136 	else if (replace
4137 		 && (MEM_P (operand)
4138 		     || (REG_P (operand)
4139 			 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4140 			 && reg_renumber [REGNO (operand)] < 0)))
4141 	  {
4142 	    operand = *recog_data.operand_loc[i];
4143 
4144 	    while (GET_CODE (operand) == SUBREG)
4145 	      operand = SUBREG_REG (operand);
4146 	    if (REG_P (operand))
4147 	      {
4148 		if (modified[i] != RELOAD_WRITE)
4149 		  /* We mark the USE with QImode so that we recognize
4150 		     it as one that can be safely deleted at the end
4151 		     of reload.  */
4152 		  PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4153 					      insn), QImode);
4154 		if (modified[i] != RELOAD_READ)
4155 		  emit_insn_after (gen_clobber (operand), insn);
4156 	      }
4157 	  }
4158       }
4159     else if (goal_alternative_matches[i] >= 0
4160 	     && goal_alternative_win[goal_alternative_matches[i]]
4161 	     && modified[i] == RELOAD_READ
4162 	     && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4163 	     && ! no_input_reloads && ! no_output_reloads
4164 	     && optimize)
4165       {
4166 	/* Similarly, make an optional reload for a pair of matching
4167 	   objects that are in MEM or a pseudo that didn't get a hard reg.  */
4168 
4169 	rtx operand = recog_data.operand[i];
4170 
4171 	while (GET_CODE (operand) == SUBREG)
4172 	  operand = SUBREG_REG (operand);
4173 	if ((MEM_P (operand)
4174 	     || (REG_P (operand)
4175 		 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4176 	    && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4177 	  operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4178 	    = push_reload (recog_data.operand[goal_alternative_matches[i]],
4179 			   recog_data.operand[i],
4180 			   recog_data.operand_loc[goal_alternative_matches[i]],
4181 			   recog_data.operand_loc[i],
4182 			   (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4183 			   operand_mode[goal_alternative_matches[i]],
4184 			   operand_mode[i],
4185 			   0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4186       }
4187 
4188   /* Perform whatever substitutions on the operands we are supposed
4189      to make due to commutativity or replacement of registers
4190      with equivalent constants or memory slots.  */
4191 
4192   for (i = 0; i < noperands; i++)
4193     {
4194       /* We only do this on the last pass through reload, because it is
4195 	 possible for some data (like reg_equiv_address) to be changed during
4196 	 later passes.  Moreover, we lose the opportunity to get a useful
4197 	 reload_{in,out}_reg when we do these replacements.  */
4198 
4199       if (replace)
4200 	{
4201 	  rtx substitution = substed_operand[i];
4202 
4203 	  *recog_data.operand_loc[i] = substitution;
4204 
4205 	  /* If we're replacing an operand with a LABEL_REF, we need to
4206 	     make sure that there's a REG_LABEL_OPERAND note attached to
4207 	     this instruction.  */
4208 	  if (GET_CODE (substitution) == LABEL_REF
4209 	      && !find_reg_note (insn, REG_LABEL_OPERAND,
4210 				 label_ref_label (substitution))
4211 	      /* For a JUMP_P, if it was a branch target it must have
4212 		 already been recorded as such.  */
4213 	      && (!JUMP_P (insn)
4214 		  || !label_is_jump_target_p (label_ref_label (substitution),
4215 					      insn)))
4216 	    {
4217 	      add_reg_note (insn, REG_LABEL_OPERAND,
4218 			    label_ref_label (substitution));
4219 	      if (LABEL_P (label_ref_label (substitution)))
4220 		++LABEL_NUSES (label_ref_label (substitution));
4221 	    }
4222 
4223 	}
4224       else
4225 	retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4226     }
4227 
4228   /* If this insn pattern contains any MATCH_DUP's, make sure that
4229      they will be substituted if the operands they match are substituted.
4230      Also do now any substitutions we already did on the operands.
4231 
4232      Don't do this if we aren't making replacements because we might be
4233      propagating things allocated by frame pointer elimination into places
4234      it doesn't expect.  */
4235 
4236   if (insn_code_number >= 0 && replace)
4237     for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4238       {
4239 	int opno = recog_data.dup_num[i];
4240 	*recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4241 	dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4242       }
4243 
4244 #if 0
4245   /* This loses because reloading of prior insns can invalidate the equivalence
4246      (or at least find_equiv_reg isn't smart enough to find it any more),
4247      causing this insn to need more reload regs than it needed before.
4248      It may be too late to make the reload regs available.
4249      Now this optimization is done safely in choose_reload_regs.  */
4250 
4251   /* For each reload of a reg into some other class of reg,
4252      search for an existing equivalent reg (same value now) in the right class.
4253      We can use it as long as we don't need to change its contents.  */
4254   for (i = 0; i < n_reloads; i++)
4255     if (rld[i].reg_rtx == 0
4256 	&& rld[i].in != 0
4257 	&& REG_P (rld[i].in)
4258 	&& rld[i].out == 0)
4259       {
4260 	rld[i].reg_rtx
4261 	  = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4262 			    static_reload_reg_p, 0, rld[i].inmode);
4263 	/* Prevent generation of insn to load the value
4264 	   because the one we found already has the value.  */
4265 	if (rld[i].reg_rtx)
4266 	  rld[i].in = rld[i].reg_rtx;
4267       }
4268 #endif
4269 
4270   /* If we detected error and replaced asm instruction by USE, forget about the
4271      reloads.  */
4272   if (GET_CODE (PATTERN (insn)) == USE
4273       && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4274     n_reloads = 0;
4275 
4276   /* Perhaps an output reload can be combined with another
4277      to reduce needs by one.  */
4278   if (!goal_earlyclobber)
4279     combine_reloads ();
4280 
4281   /* If we have a pair of reloads for parts of an address, they are reloading
4282      the same object, the operands themselves were not reloaded, and they
4283      are for two operands that are supposed to match, merge the reloads and
4284      change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS.  */
4285 
4286   for (i = 0; i < n_reloads; i++)
4287     {
4288       int k;
4289 
4290       for (j = i + 1; j < n_reloads; j++)
4291 	if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4292 	     || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4293 	     || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4294 	     || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4295 	    && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4296 		|| rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4297 		|| rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4298 		|| rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4299 	    && rtx_equal_p (rld[i].in, rld[j].in)
4300 	    && (operand_reloadnum[rld[i].opnum] < 0
4301 		|| rld[operand_reloadnum[rld[i].opnum]].optional)
4302 	    && (operand_reloadnum[rld[j].opnum] < 0
4303 		|| rld[operand_reloadnum[rld[j].opnum]].optional)
4304 	    && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4305 		|| (goal_alternative_matches[rld[j].opnum]
4306 		    == rld[i].opnum)))
4307 	  {
4308 	    for (k = 0; k < n_replacements; k++)
4309 	      if (replacements[k].what == j)
4310 		replacements[k].what = i;
4311 
4312 	    if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4313 		|| rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4314 	      rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4315 	    else
4316 	      rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4317 	    rld[j].in = 0;
4318 	  }
4319     }
4320 
4321   /* Scan all the reloads and update their type.
4322      If a reload is for the address of an operand and we didn't reload
4323      that operand, change the type.  Similarly, change the operand number
4324      of a reload when two operands match.  If a reload is optional, treat it
4325      as though the operand isn't reloaded.
4326 
4327      ??? This latter case is somewhat odd because if we do the optional
4328      reload, it means the object is hanging around.  Thus we need only
4329      do the address reload if the optional reload was NOT done.
4330 
4331      Change secondary reloads to be the address type of their operand, not
4332      the normal type.
4333 
4334      If an operand's reload is now RELOAD_OTHER, change any
4335      RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4336      RELOAD_FOR_OTHER_ADDRESS.  */
4337 
4338   for (i = 0; i < n_reloads; i++)
4339     {
4340       if (rld[i].secondary_p
4341 	  && rld[i].when_needed == operand_type[rld[i].opnum])
4342 	rld[i].when_needed = address_type[rld[i].opnum];
4343 
4344       if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4345 	   || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4346 	   || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4347 	   || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4348 	  && (operand_reloadnum[rld[i].opnum] < 0
4349 	      || rld[operand_reloadnum[rld[i].opnum]].optional))
4350 	{
4351 	  /* If we have a secondary reload to go along with this reload,
4352 	     change its type to RELOAD_FOR_OPADDR_ADDR.  */
4353 
4354 	  if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4355 	       || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4356 	      && rld[i].secondary_in_reload != -1)
4357 	    {
4358 	      int secondary_in_reload = rld[i].secondary_in_reload;
4359 
4360 	      rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4361 
4362 	      /* If there's a tertiary reload we have to change it also.  */
4363 	      if (secondary_in_reload > 0
4364 		  && rld[secondary_in_reload].secondary_in_reload != -1)
4365 		rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4366 		  = RELOAD_FOR_OPADDR_ADDR;
4367 	    }
4368 
4369 	  if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4370 	       || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4371 	      && rld[i].secondary_out_reload != -1)
4372 	    {
4373 	      int secondary_out_reload = rld[i].secondary_out_reload;
4374 
4375 	      rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4376 
4377 	      /* If there's a tertiary reload we have to change it also.  */
4378 	      if (secondary_out_reload
4379 		  && rld[secondary_out_reload].secondary_out_reload != -1)
4380 		rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4381 		  = RELOAD_FOR_OPADDR_ADDR;
4382 	    }
4383 
4384 	  if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4385 	      || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4386 	    rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4387 	  else
4388 	    rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4389 	}
4390 
4391       if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4392 	   || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4393 	  && operand_reloadnum[rld[i].opnum] >= 0
4394 	  && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4395 	      == RELOAD_OTHER))
4396 	rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4397 
4398       if (goal_alternative_matches[rld[i].opnum] >= 0)
4399 	rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4400     }
4401 
4402   /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4403      If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4404      reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4405 
4406      choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4407      conflict with RELOAD_FOR_OPERAND_ADDRESS reloads.  This is true for a
4408      single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4409      However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4410      then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4411      RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4412      This is complicated by the fact that a single operand can have more
4413      than one RELOAD_FOR_OPERAND_ADDRESS reload.  It is very difficult to fix
4414      choose_reload_regs without affecting code quality, and cases that
4415      actually fail are extremely rare, so it turns out to be better to fix
4416      the problem here by not generating cases that choose_reload_regs will
4417      fail for.  */
4418   /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4419      RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4420      a single operand.
4421      We can reduce the register pressure by exploiting that a
4422      RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4423      does not conflict with any of them, if it is only used for the first of
4424      the RELOAD_FOR_X_ADDRESS reloads.  */
4425   {
4426     int first_op_addr_num = -2;
4427     int first_inpaddr_num[MAX_RECOG_OPERANDS];
4428     int first_outpaddr_num[MAX_RECOG_OPERANDS];
4429     int need_change = 0;
4430     /* We use last_op_addr_reload and the contents of the above arrays
4431        first as flags - -2 means no instance encountered, -1 means exactly
4432        one instance encountered.
4433        If more than one instance has been encountered, we store the reload
4434        number of the first reload of the kind in question; reload numbers
4435        are known to be non-negative.  */
4436     for (i = 0; i < noperands; i++)
4437       first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4438     for (i = n_reloads - 1; i >= 0; i--)
4439       {
4440 	switch (rld[i].when_needed)
4441 	  {
4442 	  case RELOAD_FOR_OPERAND_ADDRESS:
4443 	    if (++first_op_addr_num >= 0)
4444 	      {
4445 		first_op_addr_num = i;
4446 		need_change = 1;
4447 	      }
4448 	    break;
4449 	  case RELOAD_FOR_INPUT_ADDRESS:
4450 	    if (++first_inpaddr_num[rld[i].opnum] >= 0)
4451 	      {
4452 		first_inpaddr_num[rld[i].opnum] = i;
4453 		need_change = 1;
4454 	      }
4455 	    break;
4456 	  case RELOAD_FOR_OUTPUT_ADDRESS:
4457 	    if (++first_outpaddr_num[rld[i].opnum] >= 0)
4458 	      {
4459 		first_outpaddr_num[rld[i].opnum] = i;
4460 		need_change = 1;
4461 	      }
4462 	    break;
4463 	  default:
4464 	    break;
4465 	  }
4466       }
4467 
4468     if (need_change)
4469       {
4470 	for (i = 0; i < n_reloads; i++)
4471 	  {
4472 	    int first_num;
4473 	    enum reload_type type;
4474 
4475 	    switch (rld[i].when_needed)
4476 	      {
4477 	      case RELOAD_FOR_OPADDR_ADDR:
4478 		first_num = first_op_addr_num;
4479 		type = RELOAD_FOR_OPERAND_ADDRESS;
4480 		break;
4481 	      case RELOAD_FOR_INPADDR_ADDRESS:
4482 		first_num = first_inpaddr_num[rld[i].opnum];
4483 		type = RELOAD_FOR_INPUT_ADDRESS;
4484 		break;
4485 	      case RELOAD_FOR_OUTADDR_ADDRESS:
4486 		first_num = first_outpaddr_num[rld[i].opnum];
4487 		type = RELOAD_FOR_OUTPUT_ADDRESS;
4488 		break;
4489 	      default:
4490 		continue;
4491 	      }
4492 	    if (first_num < 0)
4493 	      continue;
4494 	    else if (i > first_num)
4495 	      rld[i].when_needed = type;
4496 	    else
4497 	      {
4498 		/* Check if the only TYPE reload that uses reload I is
4499 		   reload FIRST_NUM.  */
4500 		for (j = n_reloads - 1; j > first_num; j--)
4501 		  {
4502 		    if (rld[j].when_needed == type
4503 			&& (rld[i].secondary_p
4504 			    ? rld[j].secondary_in_reload == i
4505 			    : reg_mentioned_p (rld[i].in, rld[j].in)))
4506 		      {
4507 			rld[i].when_needed = type;
4508 			break;
4509 		      }
4510 		  }
4511 	      }
4512 	  }
4513       }
4514   }
4515 
4516   /* See if we have any reloads that are now allowed to be merged
4517      because we've changed when the reload is needed to
4518      RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS.  Only
4519      check for the most common cases.  */
4520 
4521   for (i = 0; i < n_reloads; i++)
4522     if (rld[i].in != 0 && rld[i].out == 0
4523 	&& (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4524 	    || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4525 	    || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4526       for (j = 0; j < n_reloads; j++)
4527 	if (i != j && rld[j].in != 0 && rld[j].out == 0
4528 	    && rld[j].when_needed == rld[i].when_needed
4529 	    && MATCHES (rld[i].in, rld[j].in)
4530 	    && rld[i].rclass == rld[j].rclass
4531 	    && !rld[i].nocombine && !rld[j].nocombine
4532 	    && rld[i].reg_rtx == rld[j].reg_rtx)
4533 	  {
4534 	    rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4535 	    transfer_replacements (i, j);
4536 	    rld[j].in = 0;
4537 	  }
4538 
4539   /* If we made any reloads for addresses, see if they violate a
4540      "no input reloads" requirement for this insn.  But loads that we
4541      do after the insn (such as for output addresses) are fine.  */
4542   if (HAVE_cc0 && no_input_reloads)
4543     for (i = 0; i < n_reloads; i++)
4544       gcc_assert (rld[i].in == 0
4545 		  || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4546 		  || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4547 
4548   /* Compute reload_mode and reload_nregs.  */
4549   for (i = 0; i < n_reloads; i++)
4550     {
4551       rld[i].mode = rld[i].inmode;
4552       if (rld[i].mode == VOIDmode
4553 	  || partial_subreg_p (rld[i].mode, rld[i].outmode))
4554 	rld[i].mode = rld[i].outmode;
4555 
4556       rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4557     }
4558 
4559   /* Special case a simple move with an input reload and a
4560      destination of a hard reg, if the hard reg is ok, use it.  */
4561   for (i = 0; i < n_reloads; i++)
4562     if (rld[i].when_needed == RELOAD_FOR_INPUT
4563 	&& GET_CODE (PATTERN (insn)) == SET
4564 	&& REG_P (SET_DEST (PATTERN (insn)))
4565 	&& (SET_SRC (PATTERN (insn)) == rld[i].in
4566 	    || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4567 	&& !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4568       {
4569 	rtx dest = SET_DEST (PATTERN (insn));
4570 	unsigned int regno = REGNO (dest);
4571 
4572 	if (regno < FIRST_PSEUDO_REGISTER
4573 	    && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4574 	    && targetm.hard_regno_mode_ok (regno, rld[i].mode))
4575 	  {
4576 	    int nr = hard_regno_nregs (regno, rld[i].mode);
4577 	    int ok = 1, nri;
4578 
4579 	    for (nri = 1; nri < nr; nri ++)
4580 	      if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4581 		{
4582 		  ok = 0;
4583 		  break;
4584 		}
4585 
4586 	    if (ok)
4587 	      rld[i].reg_rtx = dest;
4588 	  }
4589       }
4590 
4591   return retval;
4592 }
4593 
4594 /* Return true if alternative number ALTNUM in constraint-string
4595    CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4596    MEM gives the reference if its address hasn't been fully reloaded,
4597    otherwise it is NULL.  */
4598 
4599 static bool
alternative_allows_const_pool_ref(rtx mem ATTRIBUTE_UNUSED,const char * constraint,int altnum)4600 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4601 				   const char *constraint, int altnum)
4602 {
4603   int c;
4604 
4605   /* Skip alternatives before the one requested.  */
4606   while (altnum > 0)
4607     {
4608       while (*constraint++ != ',')
4609 	;
4610       altnum--;
4611     }
4612   /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4613      If one of them is present, this alternative accepts the result of
4614      passing a constant-pool reference through find_reloads_toplev.
4615 
4616      The same is true of extra memory constraints if the address
4617      was reloaded into a register.  However, the target may elect
4618      to disallow the original constant address, forcing it to be
4619      reloaded into a register instead.  */
4620   for (; (c = *constraint) && c != ',' && c != '#';
4621        constraint += CONSTRAINT_LEN (c, constraint))
4622     {
4623       enum constraint_num cn = lookup_constraint (constraint);
4624       if (insn_extra_memory_constraint (cn)
4625 	  && (mem == NULL || constraint_satisfied_p (mem, cn)))
4626 	return true;
4627     }
4628   return false;
4629 }
4630 
4631 /* Scan X for memory references and scan the addresses for reloading.
4632    Also checks for references to "constant" regs that we want to eliminate
4633    and replaces them with the values they stand for.
4634    We may alter X destructively if it contains a reference to such.
4635    If X is just a constant reg, we return the equivalent value
4636    instead of X.
4637 
4638    IND_LEVELS says how many levels of indirect addressing this machine
4639    supports.
4640 
4641    OPNUM and TYPE identify the purpose of the reload.
4642 
4643    IS_SET_DEST is true if X is the destination of a SET, which is not
4644    appropriate to be replaced by a constant.
4645 
4646    INSN, if nonzero, is the insn in which we do the reload.  It is used
4647    to determine if we may generate output reloads, and where to put USEs
4648    for pseudos that we have to replace with stack slots.
4649 
4650    ADDRESS_RELOADED.  If nonzero, is a pointer to where we put the
4651    result of find_reloads_address.  */
4652 
4653 static rtx
find_reloads_toplev(rtx x,int opnum,enum reload_type type,int ind_levels,int is_set_dest,rtx_insn * insn,int * address_reloaded)4654 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4655 		     int ind_levels, int is_set_dest, rtx_insn *insn,
4656 		     int *address_reloaded)
4657 {
4658   RTX_CODE code = GET_CODE (x);
4659 
4660   const char *fmt = GET_RTX_FORMAT (code);
4661   int i;
4662   int copied;
4663 
4664   if (code == REG)
4665     {
4666       /* This code is duplicated for speed in find_reloads.  */
4667       int regno = REGNO (x);
4668       if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4669 	x = reg_equiv_constant (regno);
4670 #if 0
4671       /*  This creates (subreg (mem...)) which would cause an unnecessary
4672 	  reload of the mem.  */
4673       else if (reg_equiv_mem (regno) != 0)
4674 	x = reg_equiv_mem (regno);
4675 #endif
4676       else if (reg_equiv_memory_loc (regno)
4677 	       && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4678 	{
4679 	  rtx mem = make_memloc (x, regno);
4680 	  if (reg_equiv_address (regno)
4681 	      || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4682 	    {
4683 	      /* If this is not a toplevel operand, find_reloads doesn't see
4684 		 this substitution.  We have to emit a USE of the pseudo so
4685 		 that delete_output_reload can see it.  */
4686 	      if (replace_reloads && recog_data.operand[opnum] != x)
4687 		/* We mark the USE with QImode so that we recognize it
4688 		   as one that can be safely deleted at the end of
4689 		   reload.  */
4690 		PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4691 			  QImode);
4692 	      x = mem;
4693 	      i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4694 					opnum, type, ind_levels, insn);
4695 	      if (!rtx_equal_p (x, mem))
4696 		push_reg_equiv_alt_mem (regno, x);
4697 	      if (address_reloaded)
4698 		*address_reloaded = i;
4699 	    }
4700 	}
4701       return x;
4702     }
4703   if (code == MEM)
4704     {
4705       rtx tem = x;
4706 
4707       i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4708 				opnum, type, ind_levels, insn);
4709       if (address_reloaded)
4710 	*address_reloaded = i;
4711 
4712       return tem;
4713     }
4714 
4715   if (code == SUBREG && REG_P (SUBREG_REG (x)))
4716     {
4717       /* Check for SUBREG containing a REG that's equivalent to a
4718 	 constant.  If the constant has a known value, truncate it
4719 	 right now.  Similarly if we are extracting a single-word of a
4720 	 multi-word constant.  If the constant is symbolic, allow it
4721 	 to be substituted normally.  push_reload will strip the
4722 	 subreg later.  The constant must not be VOIDmode, because we
4723 	 will lose the mode of the register (this should never happen
4724 	 because one of the cases above should handle it).  */
4725 
4726       int regno = REGNO (SUBREG_REG (x));
4727       rtx tem;
4728 
4729       if (regno >= FIRST_PSEUDO_REGISTER
4730 	  && reg_renumber[regno] < 0
4731 	  && reg_equiv_constant (regno) != 0)
4732 	{
4733 	  tem =
4734 	    simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4735 				 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4736 	  gcc_assert (tem);
4737 	  if (CONSTANT_P (tem)
4738 	      && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4739 	    {
4740 	      tem = force_const_mem (GET_MODE (x), tem);
4741 	      i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4742 					&XEXP (tem, 0), opnum, type,
4743 					ind_levels, insn);
4744 	      if (address_reloaded)
4745 		*address_reloaded = i;
4746 	    }
4747 	  return tem;
4748 	}
4749 
4750       /* If the subreg contains a reg that will be converted to a mem,
4751 	 attempt to convert the whole subreg to a (narrower or wider)
4752 	 memory reference instead.  If this succeeds, we're done --
4753 	 otherwise fall through to check whether the inner reg still
4754 	 needs address reloads anyway.  */
4755 
4756       if (regno >= FIRST_PSEUDO_REGISTER
4757 	  && reg_equiv_memory_loc (regno) != 0)
4758 	{
4759 	  tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4760 					     insn, address_reloaded);
4761 	  if (tem)
4762 	    return tem;
4763 	}
4764     }
4765 
4766   for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4767     {
4768       if (fmt[i] == 'e')
4769 	{
4770 	  rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4771 					      ind_levels, is_set_dest, insn,
4772 					      address_reloaded);
4773 	  /* If we have replaced a reg with it's equivalent memory loc -
4774 	     that can still be handled here e.g. if it's in a paradoxical
4775 	     subreg - we must make the change in a copy, rather than using
4776 	     a destructive change.  This way, find_reloads can still elect
4777 	     not to do the change.  */
4778 	  if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4779 	    {
4780 	      x = shallow_copy_rtx (x);
4781 	      copied = 1;
4782 	    }
4783 	  XEXP (x, i) = new_part;
4784 	}
4785     }
4786   return x;
4787 }
4788 
4789 /* Return a mem ref for the memory equivalent of reg REGNO.
4790    This mem ref is not shared with anything.  */
4791 
4792 static rtx
make_memloc(rtx ad,int regno)4793 make_memloc (rtx ad, int regno)
4794 {
4795   /* We must rerun eliminate_regs, in case the elimination
4796      offsets have changed.  */
4797   rtx tem
4798     = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4799 	    0);
4800 
4801   /* If TEM might contain a pseudo, we must copy it to avoid
4802      modifying it when we do the substitution for the reload.  */
4803   if (rtx_varies_p (tem, 0))
4804     tem = copy_rtx (tem);
4805 
4806   tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4807   tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4808 
4809   /* Copy the result if it's still the same as the equivalence, to avoid
4810      modifying it when we do the substitution for the reload.  */
4811   if (tem == reg_equiv_memory_loc (regno))
4812     tem = copy_rtx (tem);
4813   return tem;
4814 }
4815 
4816 /* Returns true if AD could be turned into a valid memory reference
4817    to mode MODE in address space AS by reloading the part pointed to
4818    by PART into a register.  */
4819 
4820 static int
maybe_memory_address_addr_space_p(machine_mode mode,rtx ad,addr_space_t as,rtx * part)4821 maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4822 				   addr_space_t as, rtx *part)
4823 {
4824   int retv;
4825   rtx tem = *part;
4826   rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4827 
4828   *part = reg;
4829   retv = memory_address_addr_space_p (mode, ad, as);
4830   *part = tem;
4831 
4832   return retv;
4833 }
4834 
4835 /* Record all reloads needed for handling memory address AD
4836    which appears in *LOC in a memory reference to mode MODE
4837    which itself is found in location  *MEMREFLOC.
4838    Note that we take shortcuts assuming that no multi-reg machine mode
4839    occurs as part of an address.
4840 
4841    OPNUM and TYPE specify the purpose of this reload.
4842 
4843    IND_LEVELS says how many levels of indirect addressing this machine
4844    supports.
4845 
4846    INSN, if nonzero, is the insn in which we do the reload.  It is used
4847    to determine if we may generate output reloads, and where to put USEs
4848    for pseudos that we have to replace with stack slots.
4849 
4850    Value is one if this address is reloaded or replaced as a whole; it is
4851    zero if the top level of this address was not reloaded or replaced, and
4852    it is -1 if it may or may not have been reloaded or replaced.
4853 
4854    Note that there is no verification that the address will be valid after
4855    this routine does its work.  Instead, we rely on the fact that the address
4856    was valid when reload started.  So we need only undo things that reload
4857    could have broken.  These are wrong register types, pseudos not allocated
4858    to a hard register, and frame pointer elimination.  */
4859 
4860 static int
find_reloads_address(machine_mode mode,rtx * memrefloc,rtx ad,rtx * loc,int opnum,enum reload_type type,int ind_levels,rtx_insn * insn)4861 find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4862 		      rtx *loc, int opnum, enum reload_type type,
4863 		      int ind_levels, rtx_insn *insn)
4864 {
4865   addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4866 			     : ADDR_SPACE_GENERIC;
4867   int regno;
4868   int removed_and = 0;
4869   int op_index;
4870   rtx tem;
4871 
4872   /* If the address is a register, see if it is a legitimate address and
4873      reload if not.  We first handle the cases where we need not reload
4874      or where we must reload in a non-standard way.  */
4875 
4876   if (REG_P (ad))
4877     {
4878       regno = REGNO (ad);
4879 
4880       if (reg_equiv_constant (regno) != 0)
4881 	{
4882 	  find_reloads_address_part (reg_equiv_constant (regno), loc,
4883 				     base_reg_class (mode, as, MEM, SCRATCH),
4884 				     GET_MODE (ad), opnum, type, ind_levels);
4885 	  return 1;
4886 	}
4887 
4888       tem = reg_equiv_memory_loc (regno);
4889       if (tem != 0)
4890 	{
4891 	  if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4892 	    {
4893 	      tem = make_memloc (ad, regno);
4894 	      if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4895 							XEXP (tem, 0),
4896 							MEM_ADDR_SPACE (tem)))
4897 		{
4898 		  rtx orig = tem;
4899 
4900 		  find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4901 					&XEXP (tem, 0), opnum,
4902 					ADDR_TYPE (type), ind_levels, insn);
4903 		  if (!rtx_equal_p (tem, orig))
4904 		    push_reg_equiv_alt_mem (regno, tem);
4905 		}
4906 	      /* We can avoid a reload if the register's equivalent memory
4907 		 expression is valid as an indirect memory address.
4908 		 But not all addresses are valid in a mem used as an indirect
4909 		 address: only reg or reg+constant.  */
4910 
4911 	      if (ind_levels > 0
4912 		  && strict_memory_address_addr_space_p (mode, tem, as)
4913 		  && (REG_P (XEXP (tem, 0))
4914 		      || (GET_CODE (XEXP (tem, 0)) == PLUS
4915 			  && REG_P (XEXP (XEXP (tem, 0), 0))
4916 			  && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4917 		{
4918 		  /* TEM is not the same as what we'll be replacing the
4919 		     pseudo with after reload, put a USE in front of INSN
4920 		     in the final reload pass.  */
4921 		  if (replace_reloads
4922 		      && num_not_at_initial_offset
4923 		      && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4924 		    {
4925 		      *loc = tem;
4926 		      /* We mark the USE with QImode so that we
4927 			 recognize it as one that can be safely
4928 			 deleted at the end of reload.  */
4929 		      PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4930 						  insn), QImode);
4931 
4932 		      /* This doesn't really count as replacing the address
4933 			 as a whole, since it is still a memory access.  */
4934 		    }
4935 		  return 0;
4936 		}
4937 	      ad = tem;
4938 	    }
4939 	}
4940 
4941       /* The only remaining case where we can avoid a reload is if this is a
4942 	 hard register that is valid as a base register and which is not the
4943 	 subject of a CLOBBER in this insn.  */
4944 
4945       else if (regno < FIRST_PSEUDO_REGISTER
4946 	       && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4947 	       && ! regno_clobbered_p (regno, this_insn, mode, 0))
4948 	return 0;
4949 
4950       /* If we do not have one of the cases above, we must do the reload.  */
4951       push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4952 		   base_reg_class (mode, as, MEM, SCRATCH),
4953 		   GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4954       return 1;
4955     }
4956 
4957   if (strict_memory_address_addr_space_p (mode, ad, as))
4958     {
4959       /* The address appears valid, so reloads are not needed.
4960 	 But the address may contain an eliminable register.
4961 	 This can happen because a machine with indirect addressing
4962 	 may consider a pseudo register by itself a valid address even when
4963 	 it has failed to get a hard reg.
4964 	 So do a tree-walk to find and eliminate all such regs.  */
4965 
4966       /* But first quickly dispose of a common case.  */
4967       if (GET_CODE (ad) == PLUS
4968 	  && CONST_INT_P (XEXP (ad, 1))
4969 	  && REG_P (XEXP (ad, 0))
4970 	  && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4971 	return 0;
4972 
4973       subst_reg_equivs_changed = 0;
4974       *loc = subst_reg_equivs (ad, insn);
4975 
4976       if (! subst_reg_equivs_changed)
4977 	return 0;
4978 
4979       /* Check result for validity after substitution.  */
4980       if (strict_memory_address_addr_space_p (mode, ad, as))
4981 	return 0;
4982     }
4983 
4984 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4985   do
4986     {
4987       if (memrefloc && ADDR_SPACE_GENERIC_P (as))
4988 	{
4989 	  LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4990 				     ind_levels, win);
4991 	}
4992       break;
4993     win:
4994       *memrefloc = copy_rtx (*memrefloc);
4995       XEXP (*memrefloc, 0) = ad;
4996       move_replacements (&ad, &XEXP (*memrefloc, 0));
4997       return -1;
4998     }
4999   while (0);
5000 #endif
5001 
5002   /* The address is not valid.  We have to figure out why.  First see if
5003      we have an outer AND and remove it if so.  Then analyze what's inside.  */
5004 
5005   if (GET_CODE (ad) == AND)
5006     {
5007       removed_and = 1;
5008       loc = &XEXP (ad, 0);
5009       ad = *loc;
5010     }
5011 
5012   /* One possibility for why the address is invalid is that it is itself
5013      a MEM.  This can happen when the frame pointer is being eliminated, a
5014      pseudo is not allocated to a hard register, and the offset between the
5015      frame and stack pointers is not its initial value.  In that case the
5016      pseudo will have been replaced by a MEM referring to the
5017      stack pointer.  */
5018   if (MEM_P (ad))
5019     {
5020       /* First ensure that the address in this MEM is valid.  Then, unless
5021 	 indirect addresses are valid, reload the MEM into a register.  */
5022       tem = ad;
5023       find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5024 			    opnum, ADDR_TYPE (type),
5025 			    ind_levels == 0 ? 0 : ind_levels - 1, insn);
5026 
5027       /* If tem was changed, then we must create a new memory reference to
5028 	 hold it and store it back into memrefloc.  */
5029       if (tem != ad && memrefloc)
5030 	{
5031 	  *memrefloc = copy_rtx (*memrefloc);
5032 	  copy_replacements (tem, XEXP (*memrefloc, 0));
5033 	  loc = &XEXP (*memrefloc, 0);
5034 	  if (removed_and)
5035 	    loc = &XEXP (*loc, 0);
5036 	}
5037 
5038       /* Check similar cases as for indirect addresses as above except
5039 	 that we can allow pseudos and a MEM since they should have been
5040 	 taken care of above.  */
5041 
5042       if (ind_levels == 0
5043 	  || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5044 	  || MEM_P (XEXP (tem, 0))
5045 	  || ! (REG_P (XEXP (tem, 0))
5046 		|| (GET_CODE (XEXP (tem, 0)) == PLUS
5047 		    && REG_P (XEXP (XEXP (tem, 0), 0))
5048 		    && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5049 	{
5050 	  /* Must use TEM here, not AD, since it is the one that will
5051 	     have any subexpressions reloaded, if needed.  */
5052 	  push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5053 		       base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5054 		       VOIDmode, 0,
5055 		       0, opnum, type);
5056 	  return ! removed_and;
5057 	}
5058       else
5059 	return 0;
5060     }
5061 
5062   /* If we have address of a stack slot but it's not valid because the
5063      displacement is too large, compute the sum in a register.
5064      Handle all base registers here, not just fp/ap/sp, because on some
5065      targets (namely SH) we can also get too large displacements from
5066      big-endian corrections.  */
5067   else if (GET_CODE (ad) == PLUS
5068 	   && REG_P (XEXP (ad, 0))
5069 	   && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5070 	   && CONST_INT_P (XEXP (ad, 1))
5071 	   && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5072 				    CONST_INT)
5073 	       /* Similarly, if we were to reload the base register and the
5074 		  mem+offset address is still invalid, then we want to reload
5075 		  the whole address, not just the base register.  */
5076 	       || ! maybe_memory_address_addr_space_p
5077 		     (mode, ad, as, &(XEXP (ad, 0)))))
5078 
5079     {
5080       /* Unshare the MEM rtx so we can safely alter it.  */
5081       if (memrefloc)
5082 	{
5083 	  *memrefloc = copy_rtx (*memrefloc);
5084 	  loc = &XEXP (*memrefloc, 0);
5085 	  if (removed_and)
5086 	    loc = &XEXP (*loc, 0);
5087 	}
5088 
5089       if (double_reg_address_ok[mode]
5090 	  && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5091 				  PLUS, CONST_INT))
5092 	{
5093 	  /* Unshare the sum as well.  */
5094 	  *loc = ad = copy_rtx (ad);
5095 
5096 	  /* Reload the displacement into an index reg.
5097 	     We assume the frame pointer or arg pointer is a base reg.  */
5098 	  find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5099 				     INDEX_REG_CLASS, GET_MODE (ad), opnum,
5100 				     type, ind_levels);
5101 	  return 0;
5102 	}
5103       else
5104 	{
5105 	  /* If the sum of two regs is not necessarily valid,
5106 	     reload the sum into a base reg.
5107 	     That will at least work.  */
5108 	  find_reloads_address_part (ad, loc,
5109 				     base_reg_class (mode, as, MEM, SCRATCH),
5110 				     GET_MODE (ad), opnum, type, ind_levels);
5111 	}
5112       return ! removed_and;
5113     }
5114 
5115   /* If we have an indexed stack slot, there are three possible reasons why
5116      it might be invalid: The index might need to be reloaded, the address
5117      might have been made by frame pointer elimination and hence have a
5118      constant out of range, or both reasons might apply.
5119 
5120      We can easily check for an index needing reload, but even if that is the
5121      case, we might also have an invalid constant.  To avoid making the
5122      conservative assumption and requiring two reloads, we see if this address
5123      is valid when not interpreted strictly.  If it is, the only problem is
5124      that the index needs a reload and find_reloads_address_1 will take care
5125      of it.
5126 
5127      Handle all base registers here, not just fp/ap/sp, because on some
5128      targets (namely SPARC) we can also get invalid addresses from preventive
5129      subreg big-endian corrections made by find_reloads_toplev.  We
5130      can also get expressions involving LO_SUM (rather than PLUS) from
5131      find_reloads_subreg_address.
5132 
5133      If we decide to do something, it must be that `double_reg_address_ok'
5134      is true.  We generate a reload of the base register + constant and
5135      rework the sum so that the reload register will be added to the index.
5136      This is safe because we know the address isn't shared.
5137 
5138      We check for the base register as both the first and second operand of
5139      the innermost PLUS and/or LO_SUM.  */
5140 
5141   for (op_index = 0; op_index < 2; ++op_index)
5142     {
5143       rtx operand, addend;
5144       enum rtx_code inner_code;
5145 
5146       if (GET_CODE (ad) != PLUS)
5147 	  continue;
5148 
5149       inner_code = GET_CODE (XEXP (ad, 0));
5150       if (!(GET_CODE (ad) == PLUS
5151 	    && CONST_INT_P (XEXP (ad, 1))
5152 	    && (inner_code == PLUS || inner_code == LO_SUM)))
5153 	continue;
5154 
5155       operand = XEXP (XEXP (ad, 0), op_index);
5156       if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5157 	continue;
5158 
5159       addend = XEXP (XEXP (ad, 0), 1 - op_index);
5160 
5161       if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5162 				GET_CODE (addend))
5163 	   || operand == frame_pointer_rtx
5164 	   || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5165 	       && operand == hard_frame_pointer_rtx)
5166 	   || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5167 	       && operand == arg_pointer_rtx)
5168 	   || operand == stack_pointer_rtx)
5169 	  && ! maybe_memory_address_addr_space_p
5170 		(mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5171 	{
5172 	  rtx offset_reg;
5173 	  enum reg_class cls;
5174 
5175 	  offset_reg = plus_constant (GET_MODE (ad), operand,
5176 				      INTVAL (XEXP (ad, 1)));
5177 
5178 	  /* Form the adjusted address.  */
5179 	  if (GET_CODE (XEXP (ad, 0)) == PLUS)
5180 	    ad = gen_rtx_PLUS (GET_MODE (ad),
5181 			       op_index == 0 ? offset_reg : addend,
5182 			       op_index == 0 ? addend : offset_reg);
5183 	  else
5184 	    ad = gen_rtx_LO_SUM (GET_MODE (ad),
5185 				 op_index == 0 ? offset_reg : addend,
5186 				 op_index == 0 ? addend : offset_reg);
5187 	  *loc = ad;
5188 
5189 	  cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5190 	  find_reloads_address_part (XEXP (ad, op_index),
5191 				     &XEXP (ad, op_index), cls,
5192 				     GET_MODE (ad), opnum, type, ind_levels);
5193 	  find_reloads_address_1 (mode, as,
5194 				  XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5195 				  GET_CODE (XEXP (ad, op_index)),
5196 				  &XEXP (ad, 1 - op_index), opnum,
5197 				  type, 0, insn);
5198 
5199 	  return 0;
5200 	}
5201     }
5202 
5203   /* See if address becomes valid when an eliminable register
5204      in a sum is replaced.  */
5205 
5206   tem = ad;
5207   if (GET_CODE (ad) == PLUS)
5208     tem = subst_indexed_address (ad);
5209   if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5210     {
5211       /* Ok, we win that way.  Replace any additional eliminable
5212 	 registers.  */
5213 
5214       subst_reg_equivs_changed = 0;
5215       tem = subst_reg_equivs (tem, insn);
5216 
5217       /* Make sure that didn't make the address invalid again.  */
5218 
5219       if (! subst_reg_equivs_changed
5220 	  || strict_memory_address_addr_space_p (mode, tem, as))
5221 	{
5222 	  *loc = tem;
5223 	  return 0;
5224 	}
5225     }
5226 
5227   /* If constants aren't valid addresses, reload the constant address
5228      into a register.  */
5229   if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5230     {
5231       machine_mode address_mode = GET_MODE (ad);
5232       if (address_mode == VOIDmode)
5233 	address_mode = targetm.addr_space.address_mode (as);
5234 
5235       /* If AD is an address in the constant pool, the MEM rtx may be shared.
5236 	 Unshare it so we can safely alter it.  */
5237       if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5238 	  && CONSTANT_POOL_ADDRESS_P (ad))
5239 	{
5240 	  *memrefloc = copy_rtx (*memrefloc);
5241 	  loc = &XEXP (*memrefloc, 0);
5242 	  if (removed_and)
5243 	    loc = &XEXP (*loc, 0);
5244 	}
5245 
5246       find_reloads_address_part (ad, loc,
5247 				 base_reg_class (mode, as, MEM, SCRATCH),
5248 				 address_mode, opnum, type, ind_levels);
5249       return ! removed_and;
5250     }
5251 
5252   return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5253 				 opnum, type, ind_levels, insn);
5254 }
5255 
5256 /* Find all pseudo regs appearing in AD
5257    that are eliminable in favor of equivalent values
5258    and do not have hard regs; replace them by their equivalents.
5259    INSN, if nonzero, is the insn in which we do the reload.  We put USEs in
5260    front of it for pseudos that we have to replace with stack slots.  */
5261 
5262 static rtx
subst_reg_equivs(rtx ad,rtx_insn * insn)5263 subst_reg_equivs (rtx ad, rtx_insn *insn)
5264 {
5265   RTX_CODE code = GET_CODE (ad);
5266   int i;
5267   const char *fmt;
5268 
5269   switch (code)
5270     {
5271     case HIGH:
5272     case CONST:
5273     CASE_CONST_ANY:
5274     case SYMBOL_REF:
5275     case LABEL_REF:
5276     case PC:
5277     case CC0:
5278       return ad;
5279 
5280     case REG:
5281       {
5282 	int regno = REGNO (ad);
5283 
5284 	if (reg_equiv_constant (regno) != 0)
5285 	  {
5286 	    subst_reg_equivs_changed = 1;
5287 	    return reg_equiv_constant (regno);
5288 	  }
5289 	if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5290 	  {
5291 	    rtx mem = make_memloc (ad, regno);
5292 	    if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5293 	      {
5294 		subst_reg_equivs_changed = 1;
5295 		/* We mark the USE with QImode so that we recognize it
5296 		   as one that can be safely deleted at the end of
5297 		   reload.  */
5298 		PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5299 			  QImode);
5300 		return mem;
5301 	      }
5302 	  }
5303       }
5304       return ad;
5305 
5306     case PLUS:
5307       /* Quickly dispose of a common case.  */
5308       if (XEXP (ad, 0) == frame_pointer_rtx
5309 	  && CONST_INT_P (XEXP (ad, 1)))
5310 	return ad;
5311       break;
5312 
5313     default:
5314       break;
5315     }
5316 
5317   fmt = GET_RTX_FORMAT (code);
5318   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5319     if (fmt[i] == 'e')
5320       XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5321   return ad;
5322 }
5323 
5324 /* Compute the sum of X and Y, making canonicalizations assumed in an
5325    address, namely: sum constant integers, surround the sum of two
5326    constants with a CONST, put the constant as the second operand, and
5327    group the constant on the outermost sum.
5328 
5329    This routine assumes both inputs are already in canonical form.  */
5330 
5331 rtx
form_sum(machine_mode mode,rtx x,rtx y)5332 form_sum (machine_mode mode, rtx x, rtx y)
5333 {
5334   rtx tem;
5335 
5336   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5337   gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5338 
5339   if (CONST_INT_P (x))
5340     return plus_constant (mode, y, INTVAL (x));
5341   else if (CONST_INT_P (y))
5342     return plus_constant (mode, x, INTVAL (y));
5343   else if (CONSTANT_P (x))
5344     tem = x, x = y, y = tem;
5345 
5346   if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5347     return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5348 
5349   /* Note that if the operands of Y are specified in the opposite
5350      order in the recursive calls below, infinite recursion will occur.  */
5351   if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5352     return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5353 
5354   /* If both constant, encapsulate sum.  Otherwise, just form sum.  A
5355      constant will have been placed second.  */
5356   if (CONSTANT_P (x) && CONSTANT_P (y))
5357     {
5358       if (GET_CODE (x) == CONST)
5359 	x = XEXP (x, 0);
5360       if (GET_CODE (y) == CONST)
5361 	y = XEXP (y, 0);
5362 
5363       return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5364     }
5365 
5366   return gen_rtx_PLUS (mode, x, y);
5367 }
5368 
5369 /* If ADDR is a sum containing a pseudo register that should be
5370    replaced with a constant (from reg_equiv_constant),
5371    return the result of doing so, and also apply the associative
5372    law so that the result is more likely to be a valid address.
5373    (But it is not guaranteed to be one.)
5374 
5375    Note that at most one register is replaced, even if more are
5376    replaceable.  Also, we try to put the result into a canonical form
5377    so it is more likely to be a valid address.
5378 
5379    In all other cases, return ADDR.  */
5380 
5381 static rtx
subst_indexed_address(rtx addr)5382 subst_indexed_address (rtx addr)
5383 {
5384   rtx op0 = 0, op1 = 0, op2 = 0;
5385   rtx tem;
5386   int regno;
5387 
5388   if (GET_CODE (addr) == PLUS)
5389     {
5390       /* Try to find a register to replace.  */
5391       op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5392       if (REG_P (op0)
5393 	  && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5394 	  && reg_renumber[regno] < 0
5395 	  && reg_equiv_constant (regno) != 0)
5396 	op0 = reg_equiv_constant (regno);
5397       else if (REG_P (op1)
5398 	       && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5399 	       && reg_renumber[regno] < 0
5400 	       && reg_equiv_constant (regno) != 0)
5401 	op1 = reg_equiv_constant (regno);
5402       else if (GET_CODE (op0) == PLUS
5403 	       && (tem = subst_indexed_address (op0)) != op0)
5404 	op0 = tem;
5405       else if (GET_CODE (op1) == PLUS
5406 	       && (tem = subst_indexed_address (op1)) != op1)
5407 	op1 = tem;
5408       else
5409 	return addr;
5410 
5411       /* Pick out up to three things to add.  */
5412       if (GET_CODE (op1) == PLUS)
5413 	op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5414       else if (GET_CODE (op0) == PLUS)
5415 	op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5416 
5417       /* Compute the sum.  */
5418       if (op2 != 0)
5419 	op1 = form_sum (GET_MODE (addr), op1, op2);
5420       if (op1 != 0)
5421 	op0 = form_sum (GET_MODE (addr), op0, op1);
5422 
5423       return op0;
5424     }
5425   return addr;
5426 }
5427 
5428 /* Update the REG_INC notes for an insn.  It updates all REG_INC
5429    notes for the instruction which refer to REGNO the to refer
5430    to the reload number.
5431 
5432    INSN is the insn for which any REG_INC notes need updating.
5433 
5434    REGNO is the register number which has been reloaded.
5435 
5436    RELOADNUM is the reload number.  */
5437 
5438 static void
update_auto_inc_notes(rtx_insn * insn ATTRIBUTE_UNUSED,int regno ATTRIBUTE_UNUSED,int reloadnum ATTRIBUTE_UNUSED)5439 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5440 		       int reloadnum ATTRIBUTE_UNUSED)
5441 {
5442   if (!AUTO_INC_DEC)
5443     return;
5444 
5445   for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
5446     if (REG_NOTE_KIND (link) == REG_INC
5447         && (int) REGNO (XEXP (link, 0)) == regno)
5448       push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5449 }
5450 
5451 /* Record the pseudo registers we must reload into hard registers in a
5452    subexpression of a would-be memory address, X referring to a value
5453    in mode MODE.  (This function is not called if the address we find
5454    is strictly valid.)
5455 
5456    CONTEXT = 1 means we are considering regs as index regs,
5457    = 0 means we are considering them as base regs.
5458    OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5459    or an autoinc code.
5460    If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5461    is the code of the index part of the address.  Otherwise, pass SCRATCH
5462    for this argument.
5463    OPNUM and TYPE specify the purpose of any reloads made.
5464 
5465    IND_LEVELS says how many levels of indirect addressing are
5466    supported at this point in the address.
5467 
5468    INSN, if nonzero, is the insn in which we do the reload.  It is used
5469    to determine if we may generate output reloads.
5470 
5471    We return nonzero if X, as a whole, is reloaded or replaced.  */
5472 
5473 /* Note that we take shortcuts assuming that no multi-reg machine mode
5474    occurs as part of an address.
5475    Also, this is not fully machine-customizable; it works for machines
5476    such as VAXen and 68000's and 32000's, but other possible machines
5477    could have addressing modes that this does not handle right.
5478    If you add push_reload calls here, you need to make sure gen_reload
5479    handles those cases gracefully.  */
5480 
5481 static int
find_reloads_address_1(machine_mode mode,addr_space_t as,rtx x,int context,enum rtx_code outer_code,enum rtx_code index_code,rtx * loc,int opnum,enum reload_type type,int ind_levels,rtx_insn * insn)5482 find_reloads_address_1 (machine_mode mode, addr_space_t as,
5483 			rtx x, int context,
5484 			enum rtx_code outer_code, enum rtx_code index_code,
5485 			rtx *loc, int opnum, enum reload_type type,
5486 			int ind_levels, rtx_insn *insn)
5487 {
5488 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX)	\
5489   ((CONTEXT) == 0							\
5490    ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX)		\
5491    : REGNO_OK_FOR_INDEX_P (REGNO))
5492 
5493   enum reg_class context_reg_class;
5494   RTX_CODE code = GET_CODE (x);
5495   bool reloaded_inner_of_autoinc = false;
5496 
5497   if (context == 1)
5498     context_reg_class = INDEX_REG_CLASS;
5499   else
5500     context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5501 
5502   switch (code)
5503     {
5504     case PLUS:
5505       {
5506 	rtx orig_op0 = XEXP (x, 0);
5507 	rtx orig_op1 = XEXP (x, 1);
5508 	RTX_CODE code0 = GET_CODE (orig_op0);
5509 	RTX_CODE code1 = GET_CODE (orig_op1);
5510 	rtx op0 = orig_op0;
5511 	rtx op1 = orig_op1;
5512 
5513 	if (GET_CODE (op0) == SUBREG)
5514 	  {
5515 	    op0 = SUBREG_REG (op0);
5516 	    code0 = GET_CODE (op0);
5517 	    if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5518 	      op0 = gen_rtx_REG (word_mode,
5519 				 (REGNO (op0) +
5520 				  subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5521 						       GET_MODE (SUBREG_REG (orig_op0)),
5522 						       SUBREG_BYTE (orig_op0),
5523 						       GET_MODE (orig_op0))));
5524 	  }
5525 
5526 	if (GET_CODE (op1) == SUBREG)
5527 	  {
5528 	    op1 = SUBREG_REG (op1);
5529 	    code1 = GET_CODE (op1);
5530 	    if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5531 	      /* ??? Why is this given op1's mode and above for
5532 		 ??? op0 SUBREGs we use word_mode?  */
5533 	      op1 = gen_rtx_REG (GET_MODE (op1),
5534 				 (REGNO (op1) +
5535 				  subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5536 						       GET_MODE (SUBREG_REG (orig_op1)),
5537 						       SUBREG_BYTE (orig_op1),
5538 						       GET_MODE (orig_op1))));
5539 	  }
5540 	/* Plus in the index register may be created only as a result of
5541 	   register rematerialization for expression like &localvar*4.  Reload it.
5542 	   It may be possible to combine the displacement on the outer level,
5543 	   but it is probably not worthwhile to do so.  */
5544 	if (context == 1)
5545 	  {
5546 	    find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5547 				  opnum, ADDR_TYPE (type), ind_levels, insn);
5548 	    push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5549 			 context_reg_class,
5550 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5551 	    return 1;
5552 	  }
5553 
5554 	if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5555 	    || code0 == ZERO_EXTEND || code1 == MEM)
5556 	  {
5557 	    find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5558 				    &XEXP (x, 0), opnum, type, ind_levels,
5559 				    insn);
5560 	    find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5561 				    &XEXP (x, 1), opnum, type, ind_levels,
5562 				    insn);
5563 	  }
5564 
5565 	else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5566 		 || code1 == ZERO_EXTEND || code0 == MEM)
5567 	  {
5568 	    find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5569 				    &XEXP (x, 0), opnum, type, ind_levels,
5570 				    insn);
5571 	    find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5572 				    &XEXP (x, 1), opnum, type, ind_levels,
5573 				    insn);
5574 	  }
5575 
5576 	else if (code0 == CONST_INT || code0 == CONST
5577 		 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5578 	  find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5579 				  &XEXP (x, 1), opnum, type, ind_levels,
5580 				  insn);
5581 
5582 	else if (code1 == CONST_INT || code1 == CONST
5583 		 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5584 	  find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5585 				  &XEXP (x, 0), opnum, type, ind_levels,
5586 				  insn);
5587 
5588 	else if (code0 == REG && code1 == REG)
5589 	  {
5590 	    if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5591 		&& regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5592 	      return 0;
5593 	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5594 		     && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5595 	      return 0;
5596 	    else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5597 	      find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5598 				      &XEXP (x, 1), opnum, type, ind_levels,
5599 				      insn);
5600 	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5601 	      find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5602 				      &XEXP (x, 0), opnum, type, ind_levels,
5603 				      insn);
5604 	    else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5605 	      find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5606 				      &XEXP (x, 0), opnum, type, ind_levels,
5607 				      insn);
5608 	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5609 	      find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5610 				      &XEXP (x, 1), opnum, type, ind_levels,
5611 				      insn);
5612 	    else
5613 	      {
5614 		find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5615 					&XEXP (x, 0), opnum, type, ind_levels,
5616 					insn);
5617 		find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5618 					&XEXP (x, 1), opnum, type, ind_levels,
5619 					insn);
5620 	      }
5621 	  }
5622 
5623 	else if (code0 == REG)
5624 	  {
5625 	    find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5626 				    &XEXP (x, 0), opnum, type, ind_levels,
5627 				    insn);
5628 	    find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5629 				    &XEXP (x, 1), opnum, type, ind_levels,
5630 				    insn);
5631 	  }
5632 
5633 	else if (code1 == REG)
5634 	  {
5635 	    find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5636 				    &XEXP (x, 1), opnum, type, ind_levels,
5637 				    insn);
5638 	    find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5639 				    &XEXP (x, 0), opnum, type, ind_levels,
5640 				    insn);
5641 	  }
5642       }
5643 
5644       return 0;
5645 
5646     case POST_MODIFY:
5647     case PRE_MODIFY:
5648       {
5649 	rtx op0 = XEXP (x, 0);
5650 	rtx op1 = XEXP (x, 1);
5651 	enum rtx_code index_code;
5652 	int regno;
5653 	int reloadnum;
5654 
5655 	if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5656 	  return 0;
5657 
5658 	/* Currently, we only support {PRE,POST}_MODIFY constructs
5659 	   where a base register is {inc,dec}remented by the contents
5660 	   of another register or by a constant value.  Thus, these
5661 	   operands must match.  */
5662 	gcc_assert (op0 == XEXP (op1, 0));
5663 
5664 	/* Require index register (or constant).  Let's just handle the
5665 	   register case in the meantime... If the target allows
5666 	   auto-modify by a constant then we could try replacing a pseudo
5667 	   register with its equivalent constant where applicable.
5668 
5669 	   We also handle the case where the register was eliminated
5670 	   resulting in a PLUS subexpression.
5671 
5672 	   If we later decide to reload the whole PRE_MODIFY or
5673 	   POST_MODIFY, inc_for_reload might clobber the reload register
5674 	   before reading the index.  The index register might therefore
5675 	   need to live longer than a TYPE reload normally would, so be
5676 	   conservative and class it as RELOAD_OTHER.  */
5677 	if ((REG_P (XEXP (op1, 1))
5678 	     && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5679 	    || GET_CODE (XEXP (op1, 1)) == PLUS)
5680 	  find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5681 				  &XEXP (op1, 1), opnum, RELOAD_OTHER,
5682 				  ind_levels, insn);
5683 
5684 	gcc_assert (REG_P (XEXP (op1, 0)));
5685 
5686 	regno = REGNO (XEXP (op1, 0));
5687 	index_code = GET_CODE (XEXP (op1, 1));
5688 
5689 	/* A register that is incremented cannot be constant!  */
5690 	gcc_assert (regno < FIRST_PSEUDO_REGISTER
5691 		    || reg_equiv_constant (regno) == 0);
5692 
5693 	/* Handle a register that is equivalent to a memory location
5694 	    which cannot be addressed directly.  */
5695 	if (reg_equiv_memory_loc (regno) != 0
5696 	    && (reg_equiv_address (regno) != 0
5697 		|| num_not_at_initial_offset))
5698 	  {
5699 	    rtx tem = make_memloc (XEXP (x, 0), regno);
5700 
5701 	    if (reg_equiv_address (regno)
5702 		|| ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5703 	      {
5704 		rtx orig = tem;
5705 
5706 		/* First reload the memory location's address.
5707 		    We can't use ADDR_TYPE (type) here, because we need to
5708 		    write back the value after reading it, hence we actually
5709 		    need two registers.  */
5710 		find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5711 				      &XEXP (tem, 0), opnum,
5712 				      RELOAD_OTHER,
5713 				      ind_levels, insn);
5714 
5715 		if (!rtx_equal_p (tem, orig))
5716 		  push_reg_equiv_alt_mem (regno, tem);
5717 
5718 		/* Then reload the memory location into a base
5719 		   register.  */
5720 		reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5721 					 &XEXP (op1, 0),
5722 					 base_reg_class (mode, as,
5723 							 code, index_code),
5724 					 GET_MODE (x), GET_MODE (x), 0,
5725 					 0, opnum, RELOAD_OTHER);
5726 
5727 		update_auto_inc_notes (this_insn, regno, reloadnum);
5728 		return 0;
5729 	      }
5730 	  }
5731 
5732 	if (reg_renumber[regno] >= 0)
5733 	  regno = reg_renumber[regno];
5734 
5735 	/* We require a base register here...  */
5736 	if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5737 	  {
5738 	    reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5739 				     &XEXP (op1, 0), &XEXP (x, 0),
5740 				     base_reg_class (mode, as,
5741 						     code, index_code),
5742 				     GET_MODE (x), GET_MODE (x), 0, 0,
5743 				     opnum, RELOAD_OTHER);
5744 
5745 	    update_auto_inc_notes (this_insn, regno, reloadnum);
5746 	    return 0;
5747 	  }
5748       }
5749       return 0;
5750 
5751     case POST_INC:
5752     case POST_DEC:
5753     case PRE_INC:
5754     case PRE_DEC:
5755       if (REG_P (XEXP (x, 0)))
5756 	{
5757 	  int regno = REGNO (XEXP (x, 0));
5758 	  int value = 0;
5759 	  rtx x_orig = x;
5760 
5761 	  /* A register that is incremented cannot be constant!  */
5762 	  gcc_assert (regno < FIRST_PSEUDO_REGISTER
5763 		      || reg_equiv_constant (regno) == 0);
5764 
5765 	  /* Handle a register that is equivalent to a memory location
5766 	     which cannot be addressed directly.  */
5767 	  if (reg_equiv_memory_loc (regno) != 0
5768 	      && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5769 	    {
5770 	      rtx tem = make_memloc (XEXP (x, 0), regno);
5771 	      if (reg_equiv_address (regno)
5772 		  || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5773 		{
5774 		  rtx orig = tem;
5775 
5776 		  /* First reload the memory location's address.
5777 		     We can't use ADDR_TYPE (type) here, because we need to
5778 		     write back the value after reading it, hence we actually
5779 		     need two registers.  */
5780 		  find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5781 					&XEXP (tem, 0), opnum, type,
5782 					ind_levels, insn);
5783 		  reloaded_inner_of_autoinc = true;
5784 		  if (!rtx_equal_p (tem, orig))
5785 		    push_reg_equiv_alt_mem (regno, tem);
5786 		  /* Put this inside a new increment-expression.  */
5787 		  x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5788 		  /* Proceed to reload that, as if it contained a register.  */
5789 		}
5790 	    }
5791 
5792 	  /* If we have a hard register that is ok in this incdec context,
5793 	     don't make a reload.  If the register isn't nice enough for
5794 	     autoincdec, we can reload it.  But, if an autoincrement of a
5795 	     register that we here verified as playing nice, still outside
5796 	     isn't "valid", it must be that no autoincrement is "valid".
5797 	     If that is true and something made an autoincrement anyway,
5798 	     this must be a special context where one is allowed.
5799 	     (For example, a "push" instruction.)
5800 	     We can't improve this address, so leave it alone.  */
5801 
5802 	  /* Otherwise, reload the autoincrement into a suitable hard reg
5803 	     and record how much to increment by.  */
5804 
5805 	  if (reg_renumber[regno] >= 0)
5806 	    regno = reg_renumber[regno];
5807 	  if (regno >= FIRST_PSEUDO_REGISTER
5808 	      || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5809 				      index_code))
5810 	    {
5811 	      int reloadnum;
5812 
5813 	      /* If we can output the register afterwards, do so, this
5814 		 saves the extra update.
5815 		 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5816 		 CALL_INSN - and it does not set CC0.
5817 		 But don't do this if we cannot directly address the
5818 		 memory location, since this will make it harder to
5819 		 reuse address reloads, and increases register pressure.
5820 		 Also don't do this if we can probably update x directly.  */
5821 	      rtx equiv = (MEM_P (XEXP (x, 0))
5822 			   ? XEXP (x, 0)
5823 			   : reg_equiv_mem (regno));
5824 	      enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5825 	      if (insn && NONJUMP_INSN_P (insn)
5826 #if HAVE_cc0
5827 		  && ! sets_cc0_p (PATTERN (insn))
5828 #endif
5829 		  && (regno < FIRST_PSEUDO_REGISTER
5830 		      || (equiv
5831 			  && memory_operand (equiv, GET_MODE (equiv))
5832 			  && ! (icode != CODE_FOR_nothing
5833 				&& insn_operand_matches (icode, 0, equiv)
5834 				&& insn_operand_matches (icode, 1, equiv))))
5835 		  /* Using RELOAD_OTHER means we emit this and the reload we
5836 		     made earlier in the wrong order.  */
5837 		  && !reloaded_inner_of_autoinc)
5838 		{
5839 		  /* We use the original pseudo for loc, so that
5840 		     emit_reload_insns() knows which pseudo this
5841 		     reload refers to and updates the pseudo rtx, not
5842 		     its equivalent memory location, as well as the
5843 		     corresponding entry in reg_last_reload_reg.  */
5844 		  loc = &XEXP (x_orig, 0);
5845 		  x = XEXP (x, 0);
5846 		  reloadnum
5847 		    = push_reload (x, x, loc, loc,
5848 				   context_reg_class,
5849 				   GET_MODE (x), GET_MODE (x), 0, 0,
5850 				   opnum, RELOAD_OTHER);
5851 		}
5852 	      else
5853 		{
5854 		  reloadnum
5855 		    = push_reload (x, x, loc, (rtx*) 0,
5856 				   context_reg_class,
5857 				   GET_MODE (x), GET_MODE (x), 0, 0,
5858 				   opnum, type);
5859 		  rld[reloadnum].inc
5860 		    = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5861 
5862 		  value = 1;
5863 		}
5864 
5865 	      update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5866 				     reloadnum);
5867 	    }
5868 	  return value;
5869 	}
5870       return 0;
5871 
5872     case TRUNCATE:
5873     case SIGN_EXTEND:
5874     case ZERO_EXTEND:
5875       /* Look for parts to reload in the inner expression and reload them
5876 	 too, in addition to this operation.  Reloading all inner parts in
5877 	 addition to this one shouldn't be necessary, but at this point,
5878 	 we don't know if we can possibly omit any part that *can* be
5879 	 reloaded.  Targets that are better off reloading just either part
5880 	 (or perhaps even a different part of an outer expression), should
5881 	 define LEGITIMIZE_RELOAD_ADDRESS.  */
5882       find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5883 			      context, code, SCRATCH, &XEXP (x, 0), opnum,
5884 			      type, ind_levels, insn);
5885       push_reload (x, NULL_RTX, loc, (rtx*) 0,
5886 		   context_reg_class,
5887 		   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5888       return 1;
5889 
5890     case MEM:
5891       /* This is probably the result of a substitution, by eliminate_regs, of
5892 	 an equivalent address for a pseudo that was not allocated to a hard
5893 	 register.  Verify that the specified address is valid and reload it
5894 	 into a register.
5895 
5896 	 Since we know we are going to reload this item, don't decrement for
5897 	 the indirection level.
5898 
5899 	 Note that this is actually conservative:  it would be slightly more
5900 	 efficient to use the value of SPILL_INDIRECT_LEVELS from
5901 	 reload1.c here.  */
5902 
5903       find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5904 			    opnum, ADDR_TYPE (type), ind_levels, insn);
5905       push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5906 		   context_reg_class,
5907 		   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5908       return 1;
5909 
5910     case REG:
5911       {
5912 	int regno = REGNO (x);
5913 
5914 	if (reg_equiv_constant (regno) != 0)
5915 	  {
5916 	    find_reloads_address_part (reg_equiv_constant (regno), loc,
5917 				       context_reg_class,
5918 				       GET_MODE (x), opnum, type, ind_levels);
5919 	    return 1;
5920 	  }
5921 
5922 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5923 	 that feeds this insn.  */
5924 	if (reg_equiv_mem (regno) != 0)
5925 	  {
5926 	    push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5927 			 context_reg_class,
5928 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5929 	    return 1;
5930 	  }
5931 #endif
5932 
5933 	if (reg_equiv_memory_loc (regno)
5934 	    && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5935 	  {
5936 	    rtx tem = make_memloc (x, regno);
5937 	    if (reg_equiv_address (regno) != 0
5938 		|| ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5939 	      {
5940 		x = tem;
5941 		find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5942 				      &XEXP (x, 0), opnum, ADDR_TYPE (type),
5943 				      ind_levels, insn);
5944 		if (!rtx_equal_p (x, tem))
5945 		  push_reg_equiv_alt_mem (regno, x);
5946 	      }
5947 	  }
5948 
5949 	if (reg_renumber[regno] >= 0)
5950 	  regno = reg_renumber[regno];
5951 
5952 	if (regno >= FIRST_PSEUDO_REGISTER
5953 	    || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5954 				    index_code))
5955 	  {
5956 	    push_reload (x, NULL_RTX, loc, (rtx*) 0,
5957 			 context_reg_class,
5958 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5959 	    return 1;
5960 	  }
5961 
5962 	/* If a register appearing in an address is the subject of a CLOBBER
5963 	   in this insn, reload it into some other register to be safe.
5964 	   The CLOBBER is supposed to make the register unavailable
5965 	   from before this insn to after it.  */
5966 	if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5967 	  {
5968 	    push_reload (x, NULL_RTX, loc, (rtx*) 0,
5969 			 context_reg_class,
5970 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5971 	    return 1;
5972 	  }
5973       }
5974       return 0;
5975 
5976     case SUBREG:
5977       if (REG_P (SUBREG_REG (x)))
5978 	{
5979 	  /* If this is a SUBREG of a hard register and the resulting register
5980 	     is of the wrong class, reload the whole SUBREG.  This avoids
5981 	     needless copies if SUBREG_REG is multi-word.  */
5982 	  if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5983 	    {
5984 	      int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5985 
5986 	      if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5987 				       index_code))
5988 		{
5989 		  push_reload (x, NULL_RTX, loc, (rtx*) 0,
5990 			       context_reg_class,
5991 			       GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5992 		  return 1;
5993 		}
5994 	    }
5995 	  /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5996 	     is larger than the class size, then reload the whole SUBREG.  */
5997 	  else
5998 	    {
5999 	      enum reg_class rclass = context_reg_class;
6000 	      if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6001 		  > reg_class_size[(int) rclass])
6002 		{
6003 		  /* If the inner register will be replaced by a memory
6004 		     reference, we can do this only if we can replace the
6005 		     whole subreg by a (narrower) memory reference.  If
6006 		     this is not possible, fall through and reload just
6007 		     the inner register (including address reloads).  */
6008 		  if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6009 		    {
6010 		      rtx tem = find_reloads_subreg_address (x, opnum,
6011 							     ADDR_TYPE (type),
6012 							     ind_levels, insn,
6013 							     NULL);
6014 		      if (tem)
6015 			{
6016 			  push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6017 				       GET_MODE (tem), VOIDmode, 0, 0,
6018 				       opnum, type);
6019 			  return 1;
6020 			}
6021 		    }
6022 		  else
6023 		    {
6024 		      push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6025 				   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6026 		      return 1;
6027 		    }
6028 		}
6029 	    }
6030 	}
6031       break;
6032 
6033     default:
6034       break;
6035     }
6036 
6037   {
6038     const char *fmt = GET_RTX_FORMAT (code);
6039     int i;
6040 
6041     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6042       {
6043 	if (fmt[i] == 'e')
6044 	  /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6045 	     we get here.  */
6046 	  find_reloads_address_1 (mode, as, XEXP (x, i), context,
6047 				  code, SCRATCH, &XEXP (x, i),
6048 				  opnum, type, ind_levels, insn);
6049       }
6050   }
6051 
6052 #undef REG_OK_FOR_CONTEXT
6053   return 0;
6054 }
6055 
6056 /* X, which is found at *LOC, is a part of an address that needs to be
6057    reloaded into a register of class RCLASS.  If X is a constant, or if
6058    X is a PLUS that contains a constant, check that the constant is a
6059    legitimate operand and that we are supposed to be able to load
6060    it into the register.
6061 
6062    If not, force the constant into memory and reload the MEM instead.
6063 
6064    MODE is the mode to use, in case X is an integer constant.
6065 
6066    OPNUM and TYPE describe the purpose of any reloads made.
6067 
6068    IND_LEVELS says how many levels of indirect addressing this machine
6069    supports.  */
6070 
6071 static void
find_reloads_address_part(rtx x,rtx * loc,enum reg_class rclass,machine_mode mode,int opnum,enum reload_type type,int ind_levels)6072 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6073 			   machine_mode mode, int opnum,
6074 			   enum reload_type type, int ind_levels)
6075 {
6076   if (CONSTANT_P (x)
6077       && (!targetm.legitimate_constant_p (mode, x)
6078 	  || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6079     {
6080       x = force_const_mem (mode, x);
6081       find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6082 			    opnum, type, ind_levels, 0);
6083     }
6084 
6085   else if (GET_CODE (x) == PLUS
6086 	   && CONSTANT_P (XEXP (x, 1))
6087 	   && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6088 	       || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6089 		   == NO_REGS))
6090     {
6091       rtx tem;
6092 
6093       tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6094       x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6095       find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6096 			    opnum, type, ind_levels, 0);
6097     }
6098 
6099   push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6100 	       mode, VOIDmode, 0, 0, opnum, type);
6101 }
6102 
6103 /* X, a subreg of a pseudo, is a part of an address that needs to be
6104    reloaded, and the pseusdo is equivalent to a memory location.
6105 
6106    Attempt to replace the whole subreg by a (possibly narrower or wider)
6107    memory reference.  If this is possible, return this new memory
6108    reference, and push all required address reloads.  Otherwise,
6109    return NULL.
6110 
6111    OPNUM and TYPE identify the purpose of the reload.
6112 
6113    IND_LEVELS says how many levels of indirect addressing are
6114    supported at this point in the address.
6115 
6116    INSN, if nonzero, is the insn in which we do the reload.  It is used
6117    to determine where to put USEs for pseudos that we have to replace with
6118    stack slots.  */
6119 
6120 static rtx
find_reloads_subreg_address(rtx x,int opnum,enum reload_type type,int ind_levels,rtx_insn * insn,int * address_reloaded)6121 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6122 			     int ind_levels, rtx_insn *insn,
6123 			     int *address_reloaded)
6124 {
6125   machine_mode outer_mode = GET_MODE (x);
6126   machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6127   int regno = REGNO (SUBREG_REG (x));
6128   int reloaded = 0;
6129   rtx tem, orig;
6130   poly_int64 offset;
6131 
6132   gcc_assert (reg_equiv_memory_loc (regno) != 0);
6133 
6134   /* We cannot replace the subreg with a modified memory reference if:
6135 
6136      - we have a paradoxical subreg that implicitly acts as a zero or
6137        sign extension operation due to LOAD_EXTEND_OP;
6138 
6139      - we have a subreg that is implicitly supposed to act on the full
6140        register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6141 
6142      - the address of the equivalent memory location is mode-dependent;  or
6143 
6144      - we have a paradoxical subreg and the resulting memory is not
6145        sufficiently aligned to allow access in the wider mode.
6146 
6147     In addition, we choose not to perform the replacement for *any*
6148     paradoxical subreg, even if it were possible in principle.  This
6149     is to avoid generating wider memory references than necessary.
6150 
6151     This corresponds to how previous versions of reload used to handle
6152     paradoxical subregs where no address reload was required.  */
6153 
6154   if (paradoxical_subreg_p (x))
6155     return NULL;
6156 
6157   if (WORD_REGISTER_OPERATIONS
6158       && partial_subreg_p (outer_mode, inner_mode)
6159       && known_equal_after_align_down (GET_MODE_SIZE (outer_mode) - 1,
6160 				       GET_MODE_SIZE (inner_mode) - 1,
6161 				       UNITS_PER_WORD))
6162     return NULL;
6163 
6164   /* Since we don't attempt to handle paradoxical subregs, we can just
6165      call into simplify_subreg, which will handle all remaining checks
6166      for us.  */
6167   orig = make_memloc (SUBREG_REG (x), regno);
6168   offset = SUBREG_BYTE (x);
6169   tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6170   if (!tem || !MEM_P (tem))
6171     return NULL;
6172 
6173   /* Now push all required address reloads, if any.  */
6174   reloaded = find_reloads_address (GET_MODE (tem), &tem,
6175 				   XEXP (tem, 0), &XEXP (tem, 0),
6176 				   opnum, type, ind_levels, insn);
6177   /* ??? Do we need to handle nonzero offsets somehow?  */
6178   if (known_eq (offset, 0) && !rtx_equal_p (tem, orig))
6179     push_reg_equiv_alt_mem (regno, tem);
6180 
6181   /* For some processors an address may be valid in the original mode but
6182      not in a smaller mode.  For example, ARM accepts a scaled index register
6183      in SImode but not in HImode.  Note that this is only a problem if the
6184      address in reg_equiv_mem is already invalid in the new mode; other
6185      cases would be fixed by find_reloads_address as usual.
6186 
6187      ??? We attempt to handle such cases here by doing an additional reload
6188      of the full address after the usual processing by find_reloads_address.
6189      Note that this may not work in the general case, but it seems to cover
6190      the cases where this situation currently occurs.  A more general fix
6191      might be to reload the *value* instead of the address, but this would
6192      not be expected by the callers of this routine as-is.
6193 
6194      If find_reloads_address already completed replaced the address, there
6195      is nothing further to do.  */
6196   if (reloaded == 0
6197       && reg_equiv_mem (regno) != 0
6198       && !strict_memory_address_addr_space_p
6199 		(GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6200 		 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6201     {
6202       push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6203 		   base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6204 				   MEM, SCRATCH),
6205 		   GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6206       reloaded = 1;
6207     }
6208 
6209   /* If this is not a toplevel operand, find_reloads doesn't see this
6210      substitution.  We have to emit a USE of the pseudo so that
6211      delete_output_reload can see it.  */
6212   if (replace_reloads && recog_data.operand[opnum] != x)
6213     /* We mark the USE with QImode so that we recognize it as one that
6214        can be safely deleted at the end of reload.  */
6215     PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6216 	      QImode);
6217 
6218   if (address_reloaded)
6219     *address_reloaded = reloaded;
6220 
6221   return tem;
6222 }
6223 
6224 /* Substitute into the current INSN the registers into which we have reloaded
6225    the things that need reloading.  The array `replacements'
6226    contains the locations of all pointers that must be changed
6227    and says what to replace them with.
6228 
6229    Return the rtx that X translates into; usually X, but modified.  */
6230 
6231 void
subst_reloads(rtx_insn * insn)6232 subst_reloads (rtx_insn *insn)
6233 {
6234   int i;
6235 
6236   for (i = 0; i < n_replacements; i++)
6237     {
6238       struct replacement *r = &replacements[i];
6239       rtx reloadreg = rld[r->what].reg_rtx;
6240       if (reloadreg)
6241 	{
6242 #ifdef DEBUG_RELOAD
6243 	  /* This checking takes a very long time on some platforms
6244 	     causing the gcc.c-torture/compile/limits-fnargs.c test
6245 	     to time out during testing.  See PR 31850.
6246 
6247 	     Internal consistency test.  Check that we don't modify
6248 	     anything in the equivalence arrays.  Whenever something from
6249 	     those arrays needs to be reloaded, it must be unshared before
6250 	     being substituted into; the equivalence must not be modified.
6251 	     Otherwise, if the equivalence is used after that, it will
6252 	     have been modified, and the thing substituted (probably a
6253 	     register) is likely overwritten and not a usable equivalence.  */
6254 	  int check_regno;
6255 
6256 	  for (check_regno = 0; check_regno < max_regno; check_regno++)
6257 	    {
6258 #define CHECK_MODF(ARRAY)						\
6259 	      gcc_assert (!(*reg_equivs)[check_regno].ARRAY		\
6260 			  || !loc_mentioned_in_p (r->where,		\
6261 						  (*reg_equivs)[check_regno].ARRAY))
6262 
6263 	      CHECK_MODF (constant);
6264 	      CHECK_MODF (memory_loc);
6265 	      CHECK_MODF (address);
6266 	      CHECK_MODF (mem);
6267 #undef CHECK_MODF
6268 	    }
6269 #endif /* DEBUG_RELOAD */
6270 
6271 	  /* If we're replacing a LABEL_REF with a register, there must
6272 	     already be an indication (to e.g. flow) which label this
6273 	     register refers to.  */
6274 	  gcc_assert (GET_CODE (*r->where) != LABEL_REF
6275 		      || !JUMP_P (insn)
6276 		      || find_reg_note (insn,
6277 					REG_LABEL_OPERAND,
6278 					XEXP (*r->where, 0))
6279 		      || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6280 
6281 	  /* Encapsulate RELOADREG so its machine mode matches what
6282 	     used to be there.  Note that gen_lowpart_common will
6283 	     do the wrong thing if RELOADREG is multi-word.  RELOADREG
6284 	     will always be a REG here.  */
6285 	  if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6286 	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6287 
6288 	  *r->where = reloadreg;
6289 	}
6290       /* If reload got no reg and isn't optional, something's wrong.  */
6291       else
6292 	gcc_assert (rld[r->what].optional);
6293     }
6294 }
6295 
6296 /* Make a copy of any replacements being done into X and move those
6297    copies to locations in Y, a copy of X.  */
6298 
6299 void
copy_replacements(rtx x,rtx y)6300 copy_replacements (rtx x, rtx y)
6301 {
6302   copy_replacements_1 (&x, &y, n_replacements);
6303 }
6304 
6305 static void
copy_replacements_1(rtx * px,rtx * py,int orig_replacements)6306 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6307 {
6308   int i, j;
6309   rtx x, y;
6310   struct replacement *r;
6311   enum rtx_code code;
6312   const char *fmt;
6313 
6314   for (j = 0; j < orig_replacements; j++)
6315     if (replacements[j].where == px)
6316       {
6317 	r = &replacements[n_replacements++];
6318 	r->where = py;
6319 	r->what = replacements[j].what;
6320 	r->mode = replacements[j].mode;
6321       }
6322 
6323   x = *px;
6324   y = *py;
6325   code = GET_CODE (x);
6326   fmt = GET_RTX_FORMAT (code);
6327 
6328   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6329     {
6330       if (fmt[i] == 'e')
6331 	copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6332       else if (fmt[i] == 'E')
6333 	for (j = XVECLEN (x, i); --j >= 0; )
6334 	  copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6335 			       orig_replacements);
6336     }
6337 }
6338 
6339 /* Change any replacements being done to *X to be done to *Y.  */
6340 
6341 void
move_replacements(rtx * x,rtx * y)6342 move_replacements (rtx *x, rtx *y)
6343 {
6344   int i;
6345 
6346   for (i = 0; i < n_replacements; i++)
6347     if (replacements[i].where == x)
6348       replacements[i].where = y;
6349 }
6350 
6351 /* If LOC was scheduled to be replaced by something, return the replacement.
6352    Otherwise, return *LOC.  */
6353 
6354 rtx
find_replacement(rtx * loc)6355 find_replacement (rtx *loc)
6356 {
6357   struct replacement *r;
6358 
6359   for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6360     {
6361       rtx reloadreg = rld[r->what].reg_rtx;
6362 
6363       if (reloadreg && r->where == loc)
6364 	{
6365 	  if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6366 	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6367 
6368 	  return reloadreg;
6369 	}
6370       else if (reloadreg && GET_CODE (*loc) == SUBREG
6371 	       && r->where == &SUBREG_REG (*loc))
6372 	{
6373 	  if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6374 	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6375 
6376 	  return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6377 				      GET_MODE (SUBREG_REG (*loc)),
6378 				      SUBREG_BYTE (*loc));
6379 	}
6380     }
6381 
6382   /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6383      what's inside and make a new rtl if so.  */
6384   if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6385       || GET_CODE (*loc) == MULT)
6386     {
6387       rtx x = find_replacement (&XEXP (*loc, 0));
6388       rtx y = find_replacement (&XEXP (*loc, 1));
6389 
6390       if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6391 	return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6392     }
6393 
6394   return *loc;
6395 }
6396 
6397 /* Return nonzero if register in range [REGNO, ENDREGNO)
6398    appears either explicitly or implicitly in X
6399    other than being stored into (except for earlyclobber operands).
6400 
6401    References contained within the substructure at LOC do not count.
6402    LOC may be zero, meaning don't ignore anything.
6403 
6404    This is similar to refers_to_regno_p in rtlanal.c except that we
6405    look at equivalences for pseudos that didn't get hard registers.  */
6406 
6407 static int
refers_to_regno_for_reload_p(unsigned int regno,unsigned int endregno,rtx x,rtx * loc)6408 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6409 			      rtx x, rtx *loc)
6410 {
6411   int i;
6412   unsigned int r;
6413   RTX_CODE code;
6414   const char *fmt;
6415 
6416   if (x == 0)
6417     return 0;
6418 
6419  repeat:
6420   code = GET_CODE (x);
6421 
6422   switch (code)
6423     {
6424     case REG:
6425       r = REGNO (x);
6426 
6427       /* If this is a pseudo, a hard register must not have been allocated.
6428 	 X must therefore either be a constant or be in memory.  */
6429       if (r >= FIRST_PSEUDO_REGISTER)
6430 	{
6431 	  if (reg_equiv_memory_loc (r))
6432 	    return refers_to_regno_for_reload_p (regno, endregno,
6433 						 reg_equiv_memory_loc (r),
6434 						 (rtx*) 0);
6435 
6436 	  gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6437 	  return 0;
6438 	}
6439 
6440       return endregno > r && regno < END_REGNO (x);
6441 
6442     case SUBREG:
6443       /* If this is a SUBREG of a hard reg, we can see exactly which
6444 	 registers are being modified.  Otherwise, handle normally.  */
6445       if (REG_P (SUBREG_REG (x))
6446 	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6447 	{
6448 	  unsigned int inner_regno = subreg_regno (x);
6449 	  unsigned int inner_endregno
6450 	    = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6451 			     ? subreg_nregs (x) : 1);
6452 
6453 	  return endregno > inner_regno && regno < inner_endregno;
6454 	}
6455       break;
6456 
6457     case CLOBBER:
6458     case SET:
6459       if (&SET_DEST (x) != loc
6460 	  /* Note setting a SUBREG counts as referring to the REG it is in for
6461 	     a pseudo but not for hard registers since we can
6462 	     treat each word individually.  */
6463 	  && ((GET_CODE (SET_DEST (x)) == SUBREG
6464 	       && loc != &SUBREG_REG (SET_DEST (x))
6465 	       && REG_P (SUBREG_REG (SET_DEST (x)))
6466 	       && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6467 	       && refers_to_regno_for_reload_p (regno, endregno,
6468 						SUBREG_REG (SET_DEST (x)),
6469 						loc))
6470 	      /* If the output is an earlyclobber operand, this is
6471 		 a conflict.  */
6472 	      || ((!REG_P (SET_DEST (x))
6473 		   || earlyclobber_operand_p (SET_DEST (x)))
6474 		  && refers_to_regno_for_reload_p (regno, endregno,
6475 						   SET_DEST (x), loc))))
6476 	return 1;
6477 
6478       if (code == CLOBBER || loc == &SET_SRC (x))
6479 	return 0;
6480       x = SET_SRC (x);
6481       goto repeat;
6482 
6483     default:
6484       break;
6485     }
6486 
6487   /* X does not match, so try its subexpressions.  */
6488 
6489   fmt = GET_RTX_FORMAT (code);
6490   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6491     {
6492       if (fmt[i] == 'e' && loc != &XEXP (x, i))
6493 	{
6494 	  if (i == 0)
6495 	    {
6496 	      x = XEXP (x, 0);
6497 	      goto repeat;
6498 	    }
6499 	  else
6500 	    if (refers_to_regno_for_reload_p (regno, endregno,
6501 					      XEXP (x, i), loc))
6502 	      return 1;
6503 	}
6504       else if (fmt[i] == 'E')
6505 	{
6506 	  int j;
6507 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6508 	    if (loc != &XVECEXP (x, i, j)
6509 		&& refers_to_regno_for_reload_p (regno, endregno,
6510 						 XVECEXP (x, i, j), loc))
6511 	      return 1;
6512 	}
6513     }
6514   return 0;
6515 }
6516 
6517 /* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
6518    we check if any register number in X conflicts with the relevant register
6519    numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
6520    contains a MEM (we don't bother checking for memory addresses that can't
6521    conflict because we expect this to be a rare case.
6522 
6523    This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6524    that we look at equivalences for pseudos that didn't get hard registers.  */
6525 
6526 int
reg_overlap_mentioned_for_reload_p(rtx x,rtx in)6527 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6528 {
6529   int regno, endregno;
6530 
6531   /* Overly conservative.  */
6532   if (GET_CODE (x) == STRICT_LOW_PART
6533       || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6534     x = XEXP (x, 0);
6535 
6536   /* If either argument is a constant, then modifying X cannot affect IN.  */
6537   if (CONSTANT_P (x) || CONSTANT_P (in))
6538     return 0;
6539   else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6540     return refers_to_mem_for_reload_p (in);
6541   else if (GET_CODE (x) == SUBREG)
6542     {
6543       regno = REGNO (SUBREG_REG (x));
6544       if (regno < FIRST_PSEUDO_REGISTER)
6545 	regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6546 				      GET_MODE (SUBREG_REG (x)),
6547 				      SUBREG_BYTE (x),
6548 				      GET_MODE (x));
6549       endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6550 			  ? subreg_nregs (x) : 1);
6551 
6552       return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6553     }
6554   else if (REG_P (x))
6555     {
6556       regno = REGNO (x);
6557 
6558       /* If this is a pseudo, it must not have been assigned a hard register.
6559 	 Therefore, it must either be in memory or be a constant.  */
6560 
6561       if (regno >= FIRST_PSEUDO_REGISTER)
6562 	{
6563 	  if (reg_equiv_memory_loc (regno))
6564 	    return refers_to_mem_for_reload_p (in);
6565 	  gcc_assert (reg_equiv_constant (regno));
6566 	  return 0;
6567 	}
6568 
6569       endregno = END_REGNO (x);
6570 
6571       return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6572     }
6573   else if (MEM_P (x))
6574     return refers_to_mem_for_reload_p (in);
6575   else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6576 	   || GET_CODE (x) == CC0)
6577     return reg_mentioned_p (x, in);
6578   else
6579     {
6580       gcc_assert (GET_CODE (x) == PLUS);
6581 
6582       /* We actually want to know if X is mentioned somewhere inside IN.
6583 	 We must not say that (plus (sp) (const_int 124)) is in
6584 	 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6585 	 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6586 	 into a RELOAD_OTHER on behalf of another RELOAD_OTHER.  */
6587       while (MEM_P (in))
6588 	in = XEXP (in, 0);
6589       if (REG_P (in))
6590 	return 0;
6591       else if (GET_CODE (in) == PLUS)
6592 	return (rtx_equal_p (x, in)
6593 		|| reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6594 		|| reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6595       else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6596 		   || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6597     }
6598 
6599   gcc_unreachable ();
6600 }
6601 
6602 /* Return nonzero if anything in X contains a MEM.  Look also for pseudo
6603    registers.  */
6604 
6605 static int
refers_to_mem_for_reload_p(rtx x)6606 refers_to_mem_for_reload_p (rtx x)
6607 {
6608   const char *fmt;
6609   int i;
6610 
6611   if (MEM_P (x))
6612     return 1;
6613 
6614   if (REG_P (x))
6615     return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6616 	    && reg_equiv_memory_loc (REGNO (x)));
6617 
6618   fmt = GET_RTX_FORMAT (GET_CODE (x));
6619   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6620     if (fmt[i] == 'e'
6621 	&& (MEM_P (XEXP (x, i))
6622 	    || refers_to_mem_for_reload_p (XEXP (x, i))))
6623       return 1;
6624 
6625   return 0;
6626 }
6627 
6628 /* Check the insns before INSN to see if there is a suitable register
6629    containing the same value as GOAL.
6630    If OTHER is -1, look for a register in class RCLASS.
6631    Otherwise, just see if register number OTHER shares GOAL's value.
6632 
6633    Return an rtx for the register found, or zero if none is found.
6634 
6635    If RELOAD_REG_P is (short *)1,
6636    we reject any hard reg that appears in reload_reg_rtx
6637    because such a hard reg is also needed coming into this insn.
6638 
6639    If RELOAD_REG_P is any other nonzero value,
6640    it is a vector indexed by hard reg number
6641    and we reject any hard reg whose element in the vector is nonnegative
6642    as well as any that appears in reload_reg_rtx.
6643 
6644    If GOAL is zero, then GOALREG is a register number; we look
6645    for an equivalent for that register.
6646 
6647    MODE is the machine mode of the value we want an equivalence for.
6648    If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6649 
6650    This function is used by jump.c as well as in the reload pass.
6651 
6652    If GOAL is the sum of the stack pointer and a constant, we treat it
6653    as if it were a constant except that sp is required to be unchanging.  */
6654 
6655 rtx
find_equiv_reg(rtx goal,rtx_insn * insn,enum reg_class rclass,int other,short * reload_reg_p,int goalreg,machine_mode mode)6656 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6657 		short *reload_reg_p, int goalreg, machine_mode mode)
6658 {
6659   rtx_insn *p = insn;
6660   rtx goaltry, valtry, value;
6661   rtx_insn *where;
6662   rtx pat;
6663   int regno = -1;
6664   int valueno;
6665   int goal_mem = 0;
6666   int goal_const = 0;
6667   int goal_mem_addr_varies = 0;
6668   int need_stable_sp = 0;
6669   int nregs;
6670   int valuenregs;
6671   int num = 0;
6672 
6673   if (goal == 0)
6674     regno = goalreg;
6675   else if (REG_P (goal))
6676     regno = REGNO (goal);
6677   else if (MEM_P (goal))
6678     {
6679       enum rtx_code code = GET_CODE (XEXP (goal, 0));
6680       if (MEM_VOLATILE_P (goal))
6681 	return 0;
6682       if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6683 	return 0;
6684       /* An address with side effects must be reexecuted.  */
6685       switch (code)
6686 	{
6687 	case POST_INC:
6688 	case PRE_INC:
6689 	case POST_DEC:
6690 	case PRE_DEC:
6691 	case POST_MODIFY:
6692 	case PRE_MODIFY:
6693 	  return 0;
6694 	default:
6695 	  break;
6696 	}
6697       goal_mem = 1;
6698     }
6699   else if (CONSTANT_P (goal))
6700     goal_const = 1;
6701   else if (GET_CODE (goal) == PLUS
6702 	   && XEXP (goal, 0) == stack_pointer_rtx
6703 	   && CONSTANT_P (XEXP (goal, 1)))
6704     goal_const = need_stable_sp = 1;
6705   else if (GET_CODE (goal) == PLUS
6706 	   && XEXP (goal, 0) == frame_pointer_rtx
6707 	   && CONSTANT_P (XEXP (goal, 1)))
6708     goal_const = 1;
6709   else
6710     return 0;
6711 
6712   num = 0;
6713   /* Scan insns back from INSN, looking for one that copies
6714      a value into or out of GOAL.
6715      Stop and give up if we reach a label.  */
6716 
6717   while (1)
6718     {
6719       p = PREV_INSN (p);
6720       if (p && DEBUG_INSN_P (p))
6721 	continue;
6722       num++;
6723       if (p == 0 || LABEL_P (p)
6724 	  || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6725 	return 0;
6726 
6727       /* Don't reuse register contents from before a setjmp-type
6728 	 function call; on the second return (from the longjmp) it
6729 	 might have been clobbered by a later reuse.  It doesn't
6730 	 seem worthwhile to actually go and see if it is actually
6731 	 reused even if that information would be readily available;
6732 	 just don't reuse it across the setjmp call.  */
6733       if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6734 	return 0;
6735 
6736       if (NONJUMP_INSN_P (p)
6737 	  /* If we don't want spill regs ...  */
6738 	  && (! (reload_reg_p != 0
6739 		 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6740 	      /* ... then ignore insns introduced by reload; they aren't
6741 		 useful and can cause results in reload_as_needed to be
6742 		 different from what they were when calculating the need for
6743 		 spills.  If we notice an input-reload insn here, we will
6744 		 reject it below, but it might hide a usable equivalent.
6745 		 That makes bad code.  It may even fail: perhaps no reg was
6746 		 spilled for this insn because it was assumed we would find
6747 		 that equivalent.  */
6748 	      || INSN_UID (p) < reload_first_uid))
6749 	{
6750 	  rtx tem;
6751 	  pat = single_set (p);
6752 
6753 	  /* First check for something that sets some reg equal to GOAL.  */
6754 	  if (pat != 0
6755 	      && ((regno >= 0
6756 		   && true_regnum (SET_SRC (pat)) == regno
6757 		   && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6758 		  ||
6759 		  (regno >= 0
6760 		   && true_regnum (SET_DEST (pat)) == regno
6761 		   && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6762 		  ||
6763 		  (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6764 		   /* When looking for stack pointer + const,
6765 		      make sure we don't use a stack adjust.  */
6766 		   && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6767 		   && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6768 		  || (goal_mem
6769 		      && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6770 		      && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6771 		  || (goal_mem
6772 		      && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6773 		      && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6774 		  /* If we are looking for a constant,
6775 		     and something equivalent to that constant was copied
6776 		     into a reg, we can use that reg.  */
6777 		  || (goal_const && REG_NOTES (p) != 0
6778 		      && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6779 		      && ((rtx_equal_p (XEXP (tem, 0), goal)
6780 			   && (valueno
6781 			       = true_regnum (valtry = SET_DEST (pat))) >= 0)
6782 			  || (REG_P (SET_DEST (pat))
6783 			      && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6784 			      && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6785 			      && CONST_INT_P (goal)
6786 			      && (goaltry = operand_subword (XEXP (tem, 0), 0,
6787 							     0, VOIDmode)) != 0
6788 			      && rtx_equal_p (goal, goaltry)
6789 			      && (valtry
6790 				  = operand_subword (SET_DEST (pat), 0, 0,
6791 						     VOIDmode))
6792 			      && (valueno = true_regnum (valtry)) >= 0)))
6793 		  || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6794 							  NULL_RTX))
6795 		      && REG_P (SET_DEST (pat))
6796 		      && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6797 		      && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6798 		      && CONST_INT_P (goal)
6799 		      && (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6800 						     VOIDmode)) != 0
6801 		      && rtx_equal_p (goal, goaltry)
6802 		      && (valtry
6803 			  = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6804 		      && (valueno = true_regnum (valtry)) >= 0)))
6805 	    {
6806 	      if (other >= 0)
6807 		{
6808 		  if (valueno != other)
6809 		    continue;
6810 		}
6811 	      else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6812 		continue;
6813 	      else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6814 					  mode, valueno))
6815 		continue;
6816 	      value = valtry;
6817 	      where = p;
6818 	      break;
6819 	    }
6820 	}
6821     }
6822 
6823   /* We found a previous insn copying GOAL into a suitable other reg VALUE
6824      (or copying VALUE into GOAL, if GOAL is also a register).
6825      Now verify that VALUE is really valid.  */
6826 
6827   /* VALUENO is the register number of VALUE; a hard register.  */
6828 
6829   /* Don't try to re-use something that is killed in this insn.  We want
6830      to be able to trust REG_UNUSED notes.  */
6831   if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6832     return 0;
6833 
6834   /* If we propose to get the value from the stack pointer or if GOAL is
6835      a MEM based on the stack pointer, we need a stable SP.  */
6836   if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6837       || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6838 							  goal)))
6839     need_stable_sp = 1;
6840 
6841   /* Reject VALUE if the copy-insn moved the wrong sort of datum.  */
6842   if (GET_MODE (value) != mode)
6843     return 0;
6844 
6845   /* Reject VALUE if it was loaded from GOAL
6846      and is also a register that appears in the address of GOAL.  */
6847 
6848   if (goal_mem && value == SET_DEST (single_set (where))
6849       && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6850 				       goal, (rtx*) 0))
6851     return 0;
6852 
6853   /* Reject registers that overlap GOAL.  */
6854 
6855   if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6856     nregs = hard_regno_nregs (regno, mode);
6857   else
6858     nregs = 1;
6859   valuenregs = hard_regno_nregs (valueno, mode);
6860 
6861   if (!goal_mem && !goal_const
6862       && regno + nregs > valueno && regno < valueno + valuenregs)
6863     return 0;
6864 
6865   /* Reject VALUE if it is one of the regs reserved for reloads.
6866      Reload1 knows how to reuse them anyway, and it would get
6867      confused if we allocated one without its knowledge.
6868      (Now that insns introduced by reload are ignored above,
6869      this case shouldn't happen, but I'm not positive.)  */
6870 
6871   if (reload_reg_p != 0 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6872     {
6873       int i;
6874       for (i = 0; i < valuenregs; ++i)
6875 	if (reload_reg_p[valueno + i] >= 0)
6876 	  return 0;
6877     }
6878 
6879   /* Reject VALUE if it is a register being used for an input reload
6880      even if it is not one of those reserved.  */
6881 
6882   if (reload_reg_p != 0)
6883     {
6884       int i;
6885       for (i = 0; i < n_reloads; i++)
6886 	if (rld[i].reg_rtx != 0
6887 	    && rld[i].in
6888 	    && (int) REGNO (rld[i].reg_rtx) < valueno + valuenregs
6889 	    && (int) END_REGNO (rld[i].reg_rtx) > valueno)
6890 	  return 0;
6891     }
6892 
6893   if (goal_mem)
6894     /* We must treat frame pointer as varying here,
6895        since it can vary--in a nonlocal goto as generated by expand_goto.  */
6896     goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6897 
6898   /* Now verify that the values of GOAL and VALUE remain unaltered
6899      until INSN is reached.  */
6900 
6901   p = insn;
6902   while (1)
6903     {
6904       p = PREV_INSN (p);
6905       if (p == where)
6906 	return value;
6907 
6908       /* Don't trust the conversion past a function call
6909 	 if either of the two is in a call-clobbered register, or memory.  */
6910       if (CALL_P (p))
6911 	{
6912 	  int i;
6913 
6914 	  if (goal_mem || need_stable_sp)
6915 	    return 0;
6916 
6917 	  if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6918 	    for (i = 0; i < nregs; ++i)
6919 	      if (call_used_regs[regno + i]
6920 		  || targetm.hard_regno_call_part_clobbered (NULL, regno + i,
6921 							     mode))
6922 		return 0;
6923 
6924 	  if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6925 	    for (i = 0; i < valuenregs; ++i)
6926 	      if (call_used_regs[valueno + i]
6927 		  || targetm.hard_regno_call_part_clobbered (NULL, valueno + i,
6928 							     mode))
6929 		return 0;
6930 	}
6931 
6932       if (INSN_P (p))
6933 	{
6934 	  pat = PATTERN (p);
6935 
6936 	  /* Watch out for unspec_volatile, and volatile asms.  */
6937 	  if (volatile_insn_p (pat))
6938 	    return 0;
6939 
6940 	  /* If this insn P stores in either GOAL or VALUE, return 0.
6941 	     If GOAL is a memory ref and this insn writes memory, return 0.
6942 	     If GOAL is a memory ref and its address is not constant,
6943 	     and this insn P changes a register used in GOAL, return 0.  */
6944 
6945 	  if (GET_CODE (pat) == COND_EXEC)
6946 	    pat = COND_EXEC_CODE (pat);
6947 	  if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6948 	    {
6949 	      rtx dest = SET_DEST (pat);
6950 	      while (GET_CODE (dest) == SUBREG
6951 		     || GET_CODE (dest) == ZERO_EXTRACT
6952 		     || GET_CODE (dest) == STRICT_LOW_PART)
6953 		dest = XEXP (dest, 0);
6954 	      if (REG_P (dest))
6955 		{
6956 		  int xregno = REGNO (dest);
6957 		  int end_xregno = END_REGNO (dest);
6958 		  if (xregno < regno + nregs && end_xregno > regno)
6959 		    return 0;
6960 		  if (xregno < valueno + valuenregs
6961 		      && end_xregno > valueno)
6962 		    return 0;
6963 		  if (goal_mem_addr_varies
6964 		      && reg_overlap_mentioned_for_reload_p (dest, goal))
6965 		    return 0;
6966 		  if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6967 		    return 0;
6968 		}
6969 	      else if (goal_mem && MEM_P (dest)
6970 		       && ! push_operand (dest, GET_MODE (dest)))
6971 		return 0;
6972 	      else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6973 		       && reg_equiv_memory_loc (regno) != 0)
6974 		return 0;
6975 	      else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6976 		return 0;
6977 	    }
6978 	  else if (GET_CODE (pat) == PARALLEL)
6979 	    {
6980 	      int i;
6981 	      for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6982 		{
6983 		  rtx v1 = XVECEXP (pat, 0, i);
6984 		  if (GET_CODE (v1) == COND_EXEC)
6985 		    v1 = COND_EXEC_CODE (v1);
6986 		  if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
6987 		    {
6988 		      rtx dest = SET_DEST (v1);
6989 		      while (GET_CODE (dest) == SUBREG
6990 			     || GET_CODE (dest) == ZERO_EXTRACT
6991 			     || GET_CODE (dest) == STRICT_LOW_PART)
6992 			dest = XEXP (dest, 0);
6993 		      if (REG_P (dest))
6994 			{
6995 			  int xregno = REGNO (dest);
6996 			  int end_xregno = END_REGNO (dest);
6997 			  if (xregno < regno + nregs
6998 			      && end_xregno > regno)
6999 			    return 0;
7000 			  if (xregno < valueno + valuenregs
7001 			      && end_xregno > valueno)
7002 			    return 0;
7003 			  if (goal_mem_addr_varies
7004 			      && reg_overlap_mentioned_for_reload_p (dest,
7005 								     goal))
7006 			    return 0;
7007 			  if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7008 			    return 0;
7009 			}
7010 		      else if (goal_mem && MEM_P (dest)
7011 			       && ! push_operand (dest, GET_MODE (dest)))
7012 			return 0;
7013 		      else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7014 			       && reg_equiv_memory_loc (regno) != 0)
7015 			return 0;
7016 		      else if (need_stable_sp
7017 			       && push_operand (dest, GET_MODE (dest)))
7018 			return 0;
7019 		    }
7020 		}
7021 	    }
7022 
7023 	  if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7024 	    {
7025 	      rtx link;
7026 
7027 	      for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7028 		   link = XEXP (link, 1))
7029 		{
7030 		  pat = XEXP (link, 0);
7031 		  if (GET_CODE (pat) == CLOBBER)
7032 		    {
7033 		      rtx dest = SET_DEST (pat);
7034 
7035 		      if (REG_P (dest))
7036 			{
7037 			  int xregno = REGNO (dest);
7038 			  int end_xregno = END_REGNO (dest);
7039 
7040 			  if (xregno < regno + nregs
7041 			      && end_xregno > regno)
7042 			    return 0;
7043 			  else if (xregno < valueno + valuenregs
7044 				   && end_xregno > valueno)
7045 			    return 0;
7046 			  else if (goal_mem_addr_varies
7047 				   && reg_overlap_mentioned_for_reload_p (dest,
7048 								     goal))
7049 			    return 0;
7050 			}
7051 
7052 		      else if (goal_mem && MEM_P (dest)
7053 			       && ! push_operand (dest, GET_MODE (dest)))
7054 			return 0;
7055 		      else if (need_stable_sp
7056 			       && push_operand (dest, GET_MODE (dest)))
7057 			return 0;
7058 		    }
7059 		}
7060 	    }
7061 
7062 #if AUTO_INC_DEC
7063 	  /* If this insn auto-increments or auto-decrements
7064 	     either regno or valueno, return 0 now.
7065 	     If GOAL is a memory ref and its address is not constant,
7066 	     and this insn P increments a register used in GOAL, return 0.  */
7067 	  {
7068 	    rtx link;
7069 
7070 	    for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7071 	      if (REG_NOTE_KIND (link) == REG_INC
7072 		  && REG_P (XEXP (link, 0)))
7073 		{
7074 		  int incno = REGNO (XEXP (link, 0));
7075 		  if (incno < regno + nregs && incno >= regno)
7076 		    return 0;
7077 		  if (incno < valueno + valuenregs && incno >= valueno)
7078 		    return 0;
7079 		  if (goal_mem_addr_varies
7080 		      && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7081 							     goal))
7082 		    return 0;
7083 		}
7084 	  }
7085 #endif
7086 	}
7087     }
7088 }
7089 
7090 /* Find a place where INCED appears in an increment or decrement operator
7091    within X, and return the amount INCED is incremented or decremented by.
7092    The value is always positive.  */
7093 
7094 static poly_int64
find_inc_amount(rtx x,rtx inced)7095 find_inc_amount (rtx x, rtx inced)
7096 {
7097   enum rtx_code code = GET_CODE (x);
7098   const char *fmt;
7099   int i;
7100 
7101   if (code == MEM)
7102     {
7103       rtx addr = XEXP (x, 0);
7104       if ((GET_CODE (addr) == PRE_DEC
7105 	   || GET_CODE (addr) == POST_DEC
7106 	   || GET_CODE (addr) == PRE_INC
7107 	   || GET_CODE (addr) == POST_INC)
7108 	  && XEXP (addr, 0) == inced)
7109 	return GET_MODE_SIZE (GET_MODE (x));
7110       else if ((GET_CODE (addr) == PRE_MODIFY
7111 		|| GET_CODE (addr) == POST_MODIFY)
7112 	       && GET_CODE (XEXP (addr, 1)) == PLUS
7113 	       && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7114 	       && XEXP (addr, 0) == inced
7115 	       && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7116 	{
7117 	  i = INTVAL (XEXP (XEXP (addr, 1), 1));
7118 	  return i < 0 ? -i : i;
7119 	}
7120     }
7121 
7122   fmt = GET_RTX_FORMAT (code);
7123   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7124     {
7125       if (fmt[i] == 'e')
7126 	{
7127 	  poly_int64 tem = find_inc_amount (XEXP (x, i), inced);
7128 	  if (maybe_ne (tem, 0))
7129 	    return tem;
7130 	}
7131       if (fmt[i] == 'E')
7132 	{
7133 	  int j;
7134 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7135 	    {
7136 	      poly_int64 tem = find_inc_amount (XVECEXP (x, i, j), inced);
7137 	      if (maybe_ne (tem, 0))
7138 		return tem;
7139 	    }
7140 	}
7141     }
7142 
7143   return 0;
7144 }
7145 
7146 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7147    REG_INC note in insn INSN.  REGNO must refer to a hard register.  */
7148 
7149 static int
reg_inc_found_and_valid_p(unsigned int regno,unsigned int endregno,rtx insn)7150 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7151 			   rtx insn)
7152 {
7153   rtx link;
7154 
7155   if (!AUTO_INC_DEC)
7156     return 0;
7157 
7158   gcc_assert (insn);
7159 
7160   if (! INSN_P (insn))
7161     return 0;
7162 
7163   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7164     if (REG_NOTE_KIND (link) == REG_INC)
7165       {
7166 	unsigned int test = (int) REGNO (XEXP (link, 0));
7167 	if (test >= regno && test < endregno)
7168 	  return 1;
7169       }
7170   return 0;
7171 }
7172 
7173 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7174    If SETS is 1, also consider SETs.  If SETS is 2, enable checking
7175    REG_INC.  REGNO must refer to a hard register.  */
7176 
7177 int
regno_clobbered_p(unsigned int regno,rtx_insn * insn,machine_mode mode,int sets)7178 regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7179 		   int sets)
7180 {
7181   /* regno must be a hard register.  */
7182   gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7183 
7184   unsigned int endregno = end_hard_regno (mode, regno);
7185 
7186   if ((GET_CODE (PATTERN (insn)) == CLOBBER
7187        || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7188       && REG_P (XEXP (PATTERN (insn), 0)))
7189     {
7190       unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7191 
7192       return test >= regno && test < endregno;
7193     }
7194 
7195   if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7196     return 1;
7197 
7198   if (GET_CODE (PATTERN (insn)) == PARALLEL)
7199     {
7200       int i = XVECLEN (PATTERN (insn), 0) - 1;
7201 
7202       for (; i >= 0; i--)
7203 	{
7204 	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
7205 	  if ((GET_CODE (elt) == CLOBBER
7206 	       || (sets == 1 && GET_CODE (elt) == SET))
7207 	      && REG_P (XEXP (elt, 0)))
7208 	    {
7209 	      unsigned int test = REGNO (XEXP (elt, 0));
7210 
7211 	      if (test >= regno && test < endregno)
7212 		return 1;
7213 	    }
7214 	  if (sets == 2
7215 	      && reg_inc_found_and_valid_p (regno, endregno, elt))
7216 	    return 1;
7217 	}
7218     }
7219 
7220   return 0;
7221 }
7222 
7223 /* Find the low part, with mode MODE, of a hard regno RELOADREG.  */
7224 rtx
reload_adjust_reg_for_mode(rtx reloadreg,machine_mode mode)7225 reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7226 {
7227   int regno;
7228 
7229   if (GET_MODE (reloadreg) == mode)
7230     return reloadreg;
7231 
7232   regno = REGNO (reloadreg);
7233 
7234   if (REG_WORDS_BIG_ENDIAN)
7235     regno += ((int) REG_NREGS (reloadreg)
7236 	      - (int) hard_regno_nregs (regno, mode));
7237 
7238   return gen_rtx_REG (mode, regno);
7239 }
7240 
7241 static const char *const reload_when_needed_name[] =
7242 {
7243   "RELOAD_FOR_INPUT",
7244   "RELOAD_FOR_OUTPUT",
7245   "RELOAD_FOR_INSN",
7246   "RELOAD_FOR_INPUT_ADDRESS",
7247   "RELOAD_FOR_INPADDR_ADDRESS",
7248   "RELOAD_FOR_OUTPUT_ADDRESS",
7249   "RELOAD_FOR_OUTADDR_ADDRESS",
7250   "RELOAD_FOR_OPERAND_ADDRESS",
7251   "RELOAD_FOR_OPADDR_ADDR",
7252   "RELOAD_OTHER",
7253   "RELOAD_FOR_OTHER_ADDRESS"
7254 };
7255 
7256 /* These functions are used to print the variables set by 'find_reloads' */
7257 
7258 DEBUG_FUNCTION void
debug_reload_to_stream(FILE * f)7259 debug_reload_to_stream (FILE *f)
7260 {
7261   int r;
7262   const char *prefix;
7263 
7264   if (! f)
7265     f = stderr;
7266   for (r = 0; r < n_reloads; r++)
7267     {
7268       fprintf (f, "Reload %d: ", r);
7269 
7270       if (rld[r].in != 0)
7271 	{
7272 	  fprintf (f, "reload_in (%s) = ",
7273 		   GET_MODE_NAME (rld[r].inmode));
7274 	  print_inline_rtx (f, rld[r].in, 24);
7275 	  fprintf (f, "\n\t");
7276 	}
7277 
7278       if (rld[r].out != 0)
7279 	{
7280 	  fprintf (f, "reload_out (%s) = ",
7281 		   GET_MODE_NAME (rld[r].outmode));
7282 	  print_inline_rtx (f, rld[r].out, 24);
7283 	  fprintf (f, "\n\t");
7284 	}
7285 
7286       fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7287 
7288       fprintf (f, "%s (opnum = %d)",
7289 	       reload_when_needed_name[(int) rld[r].when_needed],
7290 	       rld[r].opnum);
7291 
7292       if (rld[r].optional)
7293 	fprintf (f, ", optional");
7294 
7295       if (rld[r].nongroup)
7296 	fprintf (f, ", nongroup");
7297 
7298       if (maybe_ne (rld[r].inc, 0))
7299 	{
7300 	  fprintf (f, ", inc by ");
7301 	  print_dec (rld[r].inc, f, SIGNED);
7302 	}
7303 
7304       if (rld[r].nocombine)
7305 	fprintf (f, ", can't combine");
7306 
7307       if (rld[r].secondary_p)
7308 	fprintf (f, ", secondary_reload_p");
7309 
7310       if (rld[r].in_reg != 0)
7311 	{
7312 	  fprintf (f, "\n\treload_in_reg: ");
7313 	  print_inline_rtx (f, rld[r].in_reg, 24);
7314 	}
7315 
7316       if (rld[r].out_reg != 0)
7317 	{
7318 	  fprintf (f, "\n\treload_out_reg: ");
7319 	  print_inline_rtx (f, rld[r].out_reg, 24);
7320 	}
7321 
7322       if (rld[r].reg_rtx != 0)
7323 	{
7324 	  fprintf (f, "\n\treload_reg_rtx: ");
7325 	  print_inline_rtx (f, rld[r].reg_rtx, 24);
7326 	}
7327 
7328       prefix = "\n\t";
7329       if (rld[r].secondary_in_reload != -1)
7330 	{
7331 	  fprintf (f, "%ssecondary_in_reload = %d",
7332 		   prefix, rld[r].secondary_in_reload);
7333 	  prefix = ", ";
7334 	}
7335 
7336       if (rld[r].secondary_out_reload != -1)
7337 	fprintf (f, "%ssecondary_out_reload = %d\n",
7338 		 prefix, rld[r].secondary_out_reload);
7339 
7340       prefix = "\n\t";
7341       if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7342 	{
7343 	  fprintf (f, "%ssecondary_in_icode = %s", prefix,
7344 		   insn_data[rld[r].secondary_in_icode].name);
7345 	  prefix = ", ";
7346 	}
7347 
7348       if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7349 	fprintf (f, "%ssecondary_out_icode = %s", prefix,
7350 		 insn_data[rld[r].secondary_out_icode].name);
7351 
7352       fprintf (f, "\n");
7353     }
7354 }
7355 
7356 DEBUG_FUNCTION void
debug_reload(void)7357 debug_reload (void)
7358 {
7359   debug_reload_to_stream (stderr);
7360 }
7361