1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
26
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
29
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
36
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
43
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
52
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
55
56 NOTE SIDE EFFECTS:
57
58 find_reloads can alter the operands of the instruction it is called on.
59
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
64
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
67
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
71
72 Using a reload register for several reloads in one insn:
73
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
77
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
81
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
85
86 #define REG_OK_STRICT
87
88 /* We do not enable this with CHECKING_P, since it is awfully slow. */
89 #undef DEBUG_RELOAD
90
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "backend.h"
95 #include "target.h"
96 #include "rtl.h"
97 #include "tree.h"
98 #include "df.h"
99 #include "memmodel.h"
100 #include "tm_p.h"
101 #include "optabs.h"
102 #include "regs.h"
103 #include "ira.h"
104 #include "recog.h"
105 #include "rtl-error.h"
106 #include "reload.h"
107 #include "addresses.h"
108 #include "params.h"
109
110 /* True if X is a constant that can be forced into the constant pool.
111 MODE is the mode of the operand, or VOIDmode if not known. */
112 #define CONST_POOL_OK_P(MODE, X) \
113 ((MODE) != VOIDmode \
114 && CONSTANT_P (X) \
115 && GET_CODE (X) != HIGH \
116 && !targetm.cannot_force_const_mem (MODE, X))
117
118 /* True if C is a non-empty register class that has too few registers
119 to be safely used as a reload target class. */
120
121 static inline bool
small_register_class_p(reg_class_t rclass)122 small_register_class_p (reg_class_t rclass)
123 {
124 return (reg_class_size [(int) rclass] == 1
125 || (reg_class_size [(int) rclass] >= 1
126 && targetm.class_likely_spilled_p (rclass)));
127 }
128
129
130 /* All reloads of the current insn are recorded here. See reload.h for
131 comments. */
132 int n_reloads;
133 struct reload rld[MAX_RELOADS];
134
135 /* All the "earlyclobber" operands of the current insn
136 are recorded here. */
137 int n_earlyclobbers;
138 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
139
140 int reload_n_operands;
141
142 /* Replacing reloads.
143
144 If `replace_reloads' is nonzero, then as each reload is recorded
145 an entry is made for it in the table `replacements'.
146 Then later `subst_reloads' can look through that table and
147 perform all the replacements needed. */
148
149 /* Nonzero means record the places to replace. */
150 static int replace_reloads;
151
152 /* Each replacement is recorded with a structure like this. */
153 struct replacement
154 {
155 rtx *where; /* Location to store in */
156 int what; /* which reload this is for */
157 machine_mode mode; /* mode it must have */
158 };
159
160 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
161
162 /* Number of replacements currently recorded. */
163 static int n_replacements;
164
165 /* Used to track what is modified by an operand. */
166 struct decomposition
167 {
168 int reg_flag; /* Nonzero if referencing a register. */
169 int safe; /* Nonzero if this can't conflict with anything. */
170 rtx base; /* Base address for MEM. */
171 poly_int64_pod start; /* Starting offset or register number. */
172 poly_int64_pod end; /* Ending offset or register number. */
173 };
174
175 /* Save MEMs needed to copy from one class of registers to another. One MEM
176 is used per mode, but normally only one or two modes are ever used.
177
178 We keep two versions, before and after register elimination. The one
179 after register elimination is record separately for each operand. This
180 is done in case the address is not valid to be sure that we separately
181 reload each. */
182
183 static rtx secondary_memlocs[NUM_MACHINE_MODES];
184 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
185 static int secondary_memlocs_elim_used = 0;
186
187 /* The instruction we are doing reloads for;
188 so we can test whether a register dies in it. */
189 static rtx_insn *this_insn;
190
191 /* Nonzero if this instruction is a user-specified asm with operands. */
192 static int this_insn_is_asm;
193
194 /* If hard_regs_live_known is nonzero,
195 we can tell which hard regs are currently live,
196 at least enough to succeed in choosing dummy reloads. */
197 static int hard_regs_live_known;
198
199 /* Indexed by hard reg number,
200 element is nonnegative if hard reg has been spilled.
201 This vector is passed to `find_reloads' as an argument
202 and is not changed here. */
203 static short *static_reload_reg_p;
204
205 /* Set to 1 in subst_reg_equivs if it changes anything. */
206 static int subst_reg_equivs_changed;
207
208 /* On return from push_reload, holds the reload-number for the OUT
209 operand, which can be different for that from the input operand. */
210 static int output_reloadnum;
211
212 /* Compare two RTX's. */
213 #define MATCHES(x, y) \
214 (x == y || (x != 0 && (REG_P (x) \
215 ? REG_P (y) && REGNO (x) == REGNO (y) \
216 : rtx_equal_p (x, y) && ! side_effects_p (x))))
217
218 /* Indicates if two reloads purposes are for similar enough things that we
219 can merge their reloads. */
220 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
221 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
222 || ((when1) == (when2) && (op1) == (op2)) \
223 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
224 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
225 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
226 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
227 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
228
229 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
230 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
231 ((when1) != (when2) \
232 || ! ((op1) == (op2) \
233 || (when1) == RELOAD_FOR_INPUT \
234 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
235 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
236
237 /* If we are going to reload an address, compute the reload type to
238 use. */
239 #define ADDR_TYPE(type) \
240 ((type) == RELOAD_FOR_INPUT_ADDRESS \
241 ? RELOAD_FOR_INPADDR_ADDRESS \
242 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
243 ? RELOAD_FOR_OUTADDR_ADDRESS \
244 : (type)))
245
246 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
247 machine_mode, enum reload_type,
248 enum insn_code *, secondary_reload_info *);
249 static enum reg_class find_valid_class (machine_mode, machine_mode,
250 int, unsigned int);
251 static void push_replacement (rtx *, int, machine_mode);
252 static void dup_replacements (rtx *, rtx *);
253 static void combine_reloads (void);
254 static int find_reusable_reload (rtx *, rtx, enum reg_class,
255 enum reload_type, int, int);
256 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
257 machine_mode, reg_class_t, int, int);
258 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
259 static struct decomposition decompose (rtx);
260 static int immune_p (rtx, rtx, struct decomposition);
261 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
262 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
263 rtx_insn *, int *);
264 static rtx make_memloc (rtx, int);
265 static int maybe_memory_address_addr_space_p (machine_mode, rtx,
266 addr_space_t, rtx *);
267 static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
268 int, enum reload_type, int, rtx_insn *);
269 static rtx subst_reg_equivs (rtx, rtx_insn *);
270 static rtx subst_indexed_address (rtx);
271 static void update_auto_inc_notes (rtx_insn *, int, int);
272 static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
273 enum rtx_code, enum rtx_code, rtx *,
274 int, enum reload_type,int, rtx_insn *);
275 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
276 machine_mode, int,
277 enum reload_type, int);
278 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
279 int, rtx_insn *, int *);
280 static void copy_replacements_1 (rtx *, rtx *, int);
281 static poly_int64 find_inc_amount (rtx, rtx);
282 static int refers_to_mem_for_reload_p (rtx);
283 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
284 rtx, rtx *);
285
286 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
287 list yet. */
288
289 static void
push_reg_equiv_alt_mem(int regno,rtx mem)290 push_reg_equiv_alt_mem (int regno, rtx mem)
291 {
292 rtx it;
293
294 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
295 if (rtx_equal_p (XEXP (it, 0), mem))
296 return;
297
298 reg_equiv_alt_mem_list (regno)
299 = alloc_EXPR_LIST (REG_EQUIV, mem,
300 reg_equiv_alt_mem_list (regno));
301 }
302
303 /* Determine if any secondary reloads are needed for loading (if IN_P is
304 nonzero) or storing (if IN_P is zero) X to or from a reload register of
305 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
306 are needed, push them.
307
308 Return the reload number of the secondary reload we made, or -1 if
309 we didn't need one. *PICODE is set to the insn_code to use if we do
310 need a secondary reload. */
311
312 static int
push_secondary_reload(int in_p,rtx x,int opnum,int optional,enum reg_class reload_class,machine_mode reload_mode,enum reload_type type,enum insn_code * picode,secondary_reload_info * prev_sri)313 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
314 enum reg_class reload_class,
315 machine_mode reload_mode, enum reload_type type,
316 enum insn_code *picode, secondary_reload_info *prev_sri)
317 {
318 enum reg_class rclass = NO_REGS;
319 enum reg_class scratch_class;
320 machine_mode mode = reload_mode;
321 enum insn_code icode = CODE_FOR_nothing;
322 enum insn_code t_icode = CODE_FOR_nothing;
323 enum reload_type secondary_type;
324 int s_reload, t_reload = -1;
325 const char *scratch_constraint;
326 secondary_reload_info sri;
327
328 if (type == RELOAD_FOR_INPUT_ADDRESS
329 || type == RELOAD_FOR_OUTPUT_ADDRESS
330 || type == RELOAD_FOR_INPADDR_ADDRESS
331 || type == RELOAD_FOR_OUTADDR_ADDRESS)
332 secondary_type = type;
333 else
334 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
335
336 *picode = CODE_FOR_nothing;
337
338 /* If X is a paradoxical SUBREG, use the inner value to determine both the
339 mode and object being reloaded. */
340 if (paradoxical_subreg_p (x))
341 {
342 x = SUBREG_REG (x);
343 reload_mode = GET_MODE (x);
344 }
345
346 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
347 is still a pseudo-register by now, it *must* have an equivalent MEM
348 but we don't want to assume that), use that equivalent when seeing if
349 a secondary reload is needed since whether or not a reload is needed
350 might be sensitive to the form of the MEM. */
351
352 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
353 && reg_equiv_mem (REGNO (x)))
354 x = reg_equiv_mem (REGNO (x));
355
356 sri.icode = CODE_FOR_nothing;
357 sri.prev_sri = prev_sri;
358 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
359 reload_mode, &sri);
360 icode = (enum insn_code) sri.icode;
361
362 /* If we don't need any secondary registers, done. */
363 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
364 return -1;
365
366 if (rclass != NO_REGS)
367 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
368 reload_mode, type, &t_icode, &sri);
369
370 /* If we will be using an insn, the secondary reload is for a
371 scratch register. */
372
373 if (icode != CODE_FOR_nothing)
374 {
375 /* If IN_P is nonzero, the reload register will be the output in
376 operand 0. If IN_P is zero, the reload register will be the input
377 in operand 1. Outputs should have an initial "=", which we must
378 skip. */
379
380 /* ??? It would be useful to be able to handle only two, or more than
381 three, operands, but for now we can only handle the case of having
382 exactly three: output, input and one temp/scratch. */
383 gcc_assert (insn_data[(int) icode].n_operands == 3);
384
385 /* ??? We currently have no way to represent a reload that needs
386 an icode to reload from an intermediate tertiary reload register.
387 We should probably have a new field in struct reload to tag a
388 chain of scratch operand reloads onto. */
389 gcc_assert (rclass == NO_REGS);
390
391 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
392 gcc_assert (*scratch_constraint == '=');
393 scratch_constraint++;
394 if (*scratch_constraint == '&')
395 scratch_constraint++;
396 scratch_class = (reg_class_for_constraint
397 (lookup_constraint (scratch_constraint)));
398
399 rclass = scratch_class;
400 mode = insn_data[(int) icode].operand[2].mode;
401 }
402
403 /* This case isn't valid, so fail. Reload is allowed to use the same
404 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
405 in the case of a secondary register, we actually need two different
406 registers for correct code. We fail here to prevent the possibility of
407 silently generating incorrect code later.
408
409 The convention is that secondary input reloads are valid only if the
410 secondary_class is different from class. If you have such a case, you
411 can not use secondary reloads, you must work around the problem some
412 other way.
413
414 Allow this when a reload_in/out pattern is being used. I.e. assume
415 that the generated code handles this case. */
416
417 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
418 || t_icode != CODE_FOR_nothing);
419
420 /* See if we can reuse an existing secondary reload. */
421 for (s_reload = 0; s_reload < n_reloads; s_reload++)
422 if (rld[s_reload].secondary_p
423 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
424 || reg_class_subset_p (rld[s_reload].rclass, rclass))
425 && ((in_p && rld[s_reload].inmode == mode)
426 || (! in_p && rld[s_reload].outmode == mode))
427 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
428 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
429 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
430 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
431 && (small_register_class_p (rclass)
432 || targetm.small_register_classes_for_mode_p (VOIDmode))
433 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
434 opnum, rld[s_reload].opnum))
435 {
436 if (in_p)
437 rld[s_reload].inmode = mode;
438 if (! in_p)
439 rld[s_reload].outmode = mode;
440
441 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
442 rld[s_reload].rclass = rclass;
443
444 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
445 rld[s_reload].optional &= optional;
446 rld[s_reload].secondary_p = 1;
447 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
448 opnum, rld[s_reload].opnum))
449 rld[s_reload].when_needed = RELOAD_OTHER;
450
451 break;
452 }
453
454 if (s_reload == n_reloads)
455 {
456 /* If we need a memory location to copy between the two reload regs,
457 set it up now. Note that we do the input case before making
458 the reload and the output case after. This is due to the
459 way reloads are output. */
460
461 if (in_p && icode == CODE_FOR_nothing
462 && targetm.secondary_memory_needed (mode, rclass, reload_class))
463 {
464 get_secondary_mem (x, reload_mode, opnum, type);
465
466 /* We may have just added new reloads. Make sure we add
467 the new reload at the end. */
468 s_reload = n_reloads;
469 }
470
471 /* We need to make a new secondary reload for this register class. */
472 rld[s_reload].in = rld[s_reload].out = 0;
473 rld[s_reload].rclass = rclass;
474
475 rld[s_reload].inmode = in_p ? mode : VOIDmode;
476 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
477 rld[s_reload].reg_rtx = 0;
478 rld[s_reload].optional = optional;
479 rld[s_reload].inc = 0;
480 /* Maybe we could combine these, but it seems too tricky. */
481 rld[s_reload].nocombine = 1;
482 rld[s_reload].in_reg = 0;
483 rld[s_reload].out_reg = 0;
484 rld[s_reload].opnum = opnum;
485 rld[s_reload].when_needed = secondary_type;
486 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
487 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
488 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
489 rld[s_reload].secondary_out_icode
490 = ! in_p ? t_icode : CODE_FOR_nothing;
491 rld[s_reload].secondary_p = 1;
492
493 n_reloads++;
494
495 if (! in_p && icode == CODE_FOR_nothing
496 && targetm.secondary_memory_needed (mode, reload_class, rclass))
497 get_secondary_mem (x, mode, opnum, type);
498 }
499
500 *picode = icode;
501 return s_reload;
502 }
503
504 /* If a secondary reload is needed, return its class. If both an intermediate
505 register and a scratch register is needed, we return the class of the
506 intermediate register. */
507 reg_class_t
secondary_reload_class(bool in_p,reg_class_t rclass,machine_mode mode,rtx x)508 secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
509 rtx x)
510 {
511 enum insn_code icode;
512 secondary_reload_info sri;
513
514 sri.icode = CODE_FOR_nothing;
515 sri.prev_sri = NULL;
516 rclass
517 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
518 icode = (enum insn_code) sri.icode;
519
520 /* If there are no secondary reloads at all, we return NO_REGS.
521 If an intermediate register is needed, we return its class. */
522 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
523 return rclass;
524
525 /* No intermediate register is needed, but we have a special reload
526 pattern, which we assume for now needs a scratch register. */
527 return scratch_reload_class (icode);
528 }
529
530 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
531 three operands, verify that operand 2 is an output operand, and return
532 its register class.
533 ??? We'd like to be able to handle any pattern with at least 2 operands,
534 for zero or more scratch registers, but that needs more infrastructure. */
535 enum reg_class
scratch_reload_class(enum insn_code icode)536 scratch_reload_class (enum insn_code icode)
537 {
538 const char *scratch_constraint;
539 enum reg_class rclass;
540
541 gcc_assert (insn_data[(int) icode].n_operands == 3);
542 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
543 gcc_assert (*scratch_constraint == '=');
544 scratch_constraint++;
545 if (*scratch_constraint == '&')
546 scratch_constraint++;
547 rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
548 gcc_assert (rclass != NO_REGS);
549 return rclass;
550 }
551
552 /* Return a memory location that will be used to copy X in mode MODE.
553 If we haven't already made a location for this mode in this insn,
554 call find_reloads_address on the location being returned. */
555
556 rtx
get_secondary_mem(rtx x ATTRIBUTE_UNUSED,machine_mode mode,int opnum,enum reload_type type)557 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
558 int opnum, enum reload_type type)
559 {
560 rtx loc;
561 int mem_valid;
562
563 /* By default, if MODE is narrower than a word, widen it to a word.
564 This is required because most machines that require these memory
565 locations do not support short load and stores from all registers
566 (e.g., FP registers). */
567
568 mode = targetm.secondary_memory_needed_mode (mode);
569
570 /* If we already have made a MEM for this operand in MODE, return it. */
571 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
572 return secondary_memlocs_elim[(int) mode][opnum];
573
574 /* If this is the first time we've tried to get a MEM for this mode,
575 allocate a new one. `something_changed' in reload will get set
576 by noticing that the frame size has changed. */
577
578 if (secondary_memlocs[(int) mode] == 0)
579 {
580 #ifdef SECONDARY_MEMORY_NEEDED_RTX
581 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
582 #else
583 secondary_memlocs[(int) mode]
584 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
585 #endif
586 }
587
588 /* Get a version of the address doing any eliminations needed. If that
589 didn't give us a new MEM, make a new one if it isn't valid. */
590
591 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
592 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
593 MEM_ADDR_SPACE (loc));
594
595 if (! mem_valid && loc == secondary_memlocs[(int) mode])
596 loc = copy_rtx (loc);
597
598 /* The only time the call below will do anything is if the stack
599 offset is too large. In that case IND_LEVELS doesn't matter, so we
600 can just pass a zero. Adjust the type to be the address of the
601 corresponding object. If the address was valid, save the eliminated
602 address. If it wasn't valid, we need to make a reload each time, so
603 don't save it. */
604
605 if (! mem_valid)
606 {
607 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
608 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
609 : RELOAD_OTHER);
610
611 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
612 opnum, type, 0, 0);
613 }
614
615 secondary_memlocs_elim[(int) mode][opnum] = loc;
616 if (secondary_memlocs_elim_used <= (int)mode)
617 secondary_memlocs_elim_used = (int)mode + 1;
618 return loc;
619 }
620
621 /* Clear any secondary memory locations we've made. */
622
623 void
clear_secondary_mem(void)624 clear_secondary_mem (void)
625 {
626 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
627 }
628
629
630 /* Find the largest class which has at least one register valid in
631 mode INNER, and which for every such register, that register number
632 plus N is also valid in OUTER (if in range) and is cheap to move
633 into REGNO. Such a class must exist. */
634
635 static enum reg_class
find_valid_class(machine_mode outer ATTRIBUTE_UNUSED,machine_mode inner ATTRIBUTE_UNUSED,int n,unsigned int dest_regno ATTRIBUTE_UNUSED)636 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
637 machine_mode inner ATTRIBUTE_UNUSED, int n,
638 unsigned int dest_regno ATTRIBUTE_UNUSED)
639 {
640 int best_cost = -1;
641 int rclass;
642 int regno;
643 enum reg_class best_class = NO_REGS;
644 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
645 unsigned int best_size = 0;
646 int cost;
647
648 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
649 {
650 int bad = 0;
651 int good = 0;
652 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
653 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
654 {
655 if (targetm.hard_regno_mode_ok (regno, inner))
656 {
657 good = 1;
658 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
659 && !targetm.hard_regno_mode_ok (regno + n, outer))
660 bad = 1;
661 }
662 }
663
664 if (bad || !good)
665 continue;
666 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
667
668 if ((reg_class_size[rclass] > best_size
669 && (best_cost < 0 || best_cost >= cost))
670 || best_cost > cost)
671 {
672 best_class = (enum reg_class) rclass;
673 best_size = reg_class_size[rclass];
674 best_cost = register_move_cost (outer, (enum reg_class) rclass,
675 dest_class);
676 }
677 }
678
679 gcc_assert (best_size != 0);
680
681 return best_class;
682 }
683
684 /* We are trying to reload a subreg of something that is not a register.
685 Find the largest class which contains only registers valid in
686 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
687 which we would eventually like to obtain the object. */
688
689 static enum reg_class
find_valid_class_1(machine_mode outer ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,enum reg_class dest_class ATTRIBUTE_UNUSED)690 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
691 machine_mode mode ATTRIBUTE_UNUSED,
692 enum reg_class dest_class ATTRIBUTE_UNUSED)
693 {
694 int best_cost = -1;
695 int rclass;
696 int regno;
697 enum reg_class best_class = NO_REGS;
698 unsigned int best_size = 0;
699 int cost;
700
701 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
702 {
703 unsigned int computed_rclass_size = 0;
704
705 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
706 {
707 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
708 && targetm.hard_regno_mode_ok (regno, mode))
709 computed_rclass_size++;
710 }
711
712 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
713
714 if ((computed_rclass_size > best_size
715 && (best_cost < 0 || best_cost >= cost))
716 || best_cost > cost)
717 {
718 best_class = (enum reg_class) rclass;
719 best_size = computed_rclass_size;
720 best_cost = register_move_cost (outer, (enum reg_class) rclass,
721 dest_class);
722 }
723 }
724
725 gcc_assert (best_size != 0);
726
727 #ifdef LIMIT_RELOAD_CLASS
728 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
729 #endif
730 return best_class;
731 }
732
733 /* Return the number of a previously made reload that can be combined with
734 a new one, or n_reloads if none of the existing reloads can be used.
735 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
736 push_reload, they determine the kind of the new reload that we try to
737 combine. P_IN points to the corresponding value of IN, which can be
738 modified by this function.
739 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
740
741 static int
find_reusable_reload(rtx * p_in,rtx out,enum reg_class rclass,enum reload_type type,int opnum,int dont_share)742 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
743 enum reload_type type, int opnum, int dont_share)
744 {
745 rtx in = *p_in;
746 int i;
747 /* We can't merge two reloads if the output of either one is
748 earlyclobbered. */
749
750 if (earlyclobber_operand_p (out))
751 return n_reloads;
752
753 /* We can use an existing reload if the class is right
754 and at least one of IN and OUT is a match
755 and the other is at worst neutral.
756 (A zero compared against anything is neutral.)
757
758 For targets with small register classes, don't use existing reloads
759 unless they are for the same thing since that can cause us to need
760 more reload registers than we otherwise would. */
761
762 for (i = 0; i < n_reloads; i++)
763 if ((reg_class_subset_p (rclass, rld[i].rclass)
764 || reg_class_subset_p (rld[i].rclass, rclass))
765 /* If the existing reload has a register, it must fit our class. */
766 && (rld[i].reg_rtx == 0
767 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
768 true_regnum (rld[i].reg_rtx)))
769 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
770 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
771 || (out != 0 && MATCHES (rld[i].out, out)
772 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
773 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
774 && (small_register_class_p (rclass)
775 || targetm.small_register_classes_for_mode_p (VOIDmode))
776 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
777 return i;
778
779 /* Reloading a plain reg for input can match a reload to postincrement
780 that reg, since the postincrement's value is the right value.
781 Likewise, it can match a preincrement reload, since we regard
782 the preincrementation as happening before any ref in this insn
783 to that register. */
784 for (i = 0; i < n_reloads; i++)
785 if ((reg_class_subset_p (rclass, rld[i].rclass)
786 || reg_class_subset_p (rld[i].rclass, rclass))
787 /* If the existing reload has a register, it must fit our
788 class. */
789 && (rld[i].reg_rtx == 0
790 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
791 true_regnum (rld[i].reg_rtx)))
792 && out == 0 && rld[i].out == 0 && rld[i].in != 0
793 && ((REG_P (in)
794 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
795 && MATCHES (XEXP (rld[i].in, 0), in))
796 || (REG_P (rld[i].in)
797 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
798 && MATCHES (XEXP (in, 0), rld[i].in)))
799 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
800 && (small_register_class_p (rclass)
801 || targetm.small_register_classes_for_mode_p (VOIDmode))
802 && MERGABLE_RELOADS (type, rld[i].when_needed,
803 opnum, rld[i].opnum))
804 {
805 /* Make sure reload_in ultimately has the increment,
806 not the plain register. */
807 if (REG_P (in))
808 *p_in = rld[i].in;
809 return i;
810 }
811 return n_reloads;
812 }
813
814 /* Return true if:
815
816 (a) (subreg:OUTER_MODE REG ...) represents a word or subword subreg
817 of a multiword value; and
818
819 (b) the number of *words* in REG does not match the number of *registers*
820 in REG. */
821
822 static bool
complex_word_subreg_p(machine_mode outer_mode,rtx reg)823 complex_word_subreg_p (machine_mode outer_mode, rtx reg)
824 {
825 machine_mode inner_mode = GET_MODE (reg);
826 poly_uint64 reg_words = REG_NREGS (reg) * UNITS_PER_WORD;
827 return (known_le (GET_MODE_SIZE (outer_mode), UNITS_PER_WORD)
828 && maybe_gt (GET_MODE_SIZE (inner_mode), UNITS_PER_WORD)
829 && !known_equal_after_align_up (GET_MODE_SIZE (inner_mode),
830 reg_words, UNITS_PER_WORD));
831 }
832
833 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
834 expression. MODE is the mode that X will be used in. OUTPUT is true if
835 the function is invoked for the output part of an enclosing reload. */
836
837 static bool
reload_inner_reg_of_subreg(rtx x,machine_mode mode,bool output)838 reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
839 {
840 rtx inner;
841
842 /* Only SUBREGs are problematical. */
843 if (GET_CODE (x) != SUBREG)
844 return false;
845
846 inner = SUBREG_REG (x);
847
848 /* If INNER is a constant or PLUS, then INNER will need reloading. */
849 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
850 return true;
851
852 /* If INNER is not a hard register, then INNER will not need reloading. */
853 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
854 return false;
855
856 /* If INNER is not ok for MODE, then INNER will need reloading. */
857 if (!targetm.hard_regno_mode_ok (subreg_regno (x), mode))
858 return true;
859
860 /* If this is for an output, and the outer part is a word or smaller,
861 INNER is larger than a word and the number of registers in INNER is
862 not the same as the number of words in INNER, then INNER will need
863 reloading (with an in-out reload). */
864 return output && complex_word_subreg_p (mode, inner);
865 }
866
867 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
868 requiring an extra reload register. The caller has already found that
869 IN contains some reference to REGNO, so check that we can produce the
870 new value in a single step. E.g. if we have
871 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
872 instruction that adds one to a register, this should succeed.
873 However, if we have something like
874 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
875 needs to be loaded into a register first, we need a separate reload
876 register.
877 Such PLUS reloads are generated by find_reload_address_part.
878 The out-of-range PLUS expressions are usually introduced in the instruction
879 patterns by register elimination and substituting pseudos without a home
880 by their function-invariant equivalences. */
881 static int
can_reload_into(rtx in,int regno,machine_mode mode)882 can_reload_into (rtx in, int regno, machine_mode mode)
883 {
884 rtx dst;
885 rtx_insn *test_insn;
886 int r = 0;
887 struct recog_data_d save_recog_data;
888
889 /* For matching constraints, we often get notional input reloads where
890 we want to use the original register as the reload register. I.e.
891 technically this is a non-optional input-output reload, but IN is
892 already a valid register, and has been chosen as the reload register.
893 Speed this up, since it trivially works. */
894 if (REG_P (in))
895 return 1;
896
897 /* To test MEMs properly, we'd have to take into account all the reloads
898 that are already scheduled, which can become quite complicated.
899 And since we've already handled address reloads for this MEM, it
900 should always succeed anyway. */
901 if (MEM_P (in))
902 return 1;
903
904 /* If we can make a simple SET insn that does the job, everything should
905 be fine. */
906 dst = gen_rtx_REG (mode, regno);
907 test_insn = make_insn_raw (gen_rtx_SET (dst, in));
908 save_recog_data = recog_data;
909 if (recog_memoized (test_insn) >= 0)
910 {
911 extract_insn (test_insn);
912 r = constrain_operands (1, get_enabled_alternatives (test_insn));
913 }
914 recog_data = save_recog_data;
915 return r;
916 }
917
918 /* Record one reload that needs to be performed.
919 IN is an rtx saying where the data are to be found before this instruction.
920 OUT says where they must be stored after the instruction.
921 (IN is zero for data not read, and OUT is zero for data not written.)
922 INLOC and OUTLOC point to the places in the instructions where
923 IN and OUT were found.
924 If IN and OUT are both nonzero, it means the same register must be used
925 to reload both IN and OUT.
926
927 RCLASS is a register class required for the reloaded data.
928 INMODE is the machine mode that the instruction requires
929 for the reg that replaces IN and OUTMODE is likewise for OUT.
930
931 If IN is zero, then OUT's location and mode should be passed as
932 INLOC and INMODE.
933
934 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
935
936 OPTIONAL nonzero means this reload does not need to be performed:
937 it can be discarded if that is more convenient.
938
939 OPNUM and TYPE say what the purpose of this reload is.
940
941 The return value is the reload-number for this reload.
942
943 If both IN and OUT are nonzero, in some rare cases we might
944 want to make two separate reloads. (Actually we never do this now.)
945 Therefore, the reload-number for OUT is stored in
946 output_reloadnum when we return; the return value applies to IN.
947 Usually (presently always), when IN and OUT are nonzero,
948 the two reload-numbers are equal, but the caller should be careful to
949 distinguish them. */
950
951 int
push_reload(rtx in,rtx out,rtx * inloc,rtx * outloc,enum reg_class rclass,machine_mode inmode,machine_mode outmode,int strict_low,int optional,int opnum,enum reload_type type)952 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
953 enum reg_class rclass, machine_mode inmode,
954 machine_mode outmode, int strict_low, int optional,
955 int opnum, enum reload_type type)
956 {
957 int i;
958 int dont_share = 0;
959 int dont_remove_subreg = 0;
960 #ifdef LIMIT_RELOAD_CLASS
961 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
962 #endif
963 int secondary_in_reload = -1, secondary_out_reload = -1;
964 enum insn_code secondary_in_icode = CODE_FOR_nothing;
965 enum insn_code secondary_out_icode = CODE_FOR_nothing;
966 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
967 subreg_in_class = NO_REGS;
968
969 /* INMODE and/or OUTMODE could be VOIDmode if no mode
970 has been specified for the operand. In that case,
971 use the operand's mode as the mode to reload. */
972 if (inmode == VOIDmode && in != 0)
973 inmode = GET_MODE (in);
974 if (outmode == VOIDmode && out != 0)
975 outmode = GET_MODE (out);
976
977 /* If find_reloads and friends until now missed to replace a pseudo
978 with a constant of reg_equiv_constant something went wrong
979 beforehand.
980 Note that it can't simply be done here if we missed it earlier
981 since the constant might need to be pushed into the literal pool
982 and the resulting memref would probably need further
983 reloading. */
984 if (in != 0 && REG_P (in))
985 {
986 int regno = REGNO (in);
987
988 gcc_assert (regno < FIRST_PSEUDO_REGISTER
989 || reg_renumber[regno] >= 0
990 || reg_equiv_constant (regno) == NULL_RTX);
991 }
992
993 /* reg_equiv_constant only contains constants which are obviously
994 not appropriate as destination. So if we would need to replace
995 the destination pseudo with a constant we are in real
996 trouble. */
997 if (out != 0 && REG_P (out))
998 {
999 int regno = REGNO (out);
1000
1001 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1002 || reg_renumber[regno] >= 0
1003 || reg_equiv_constant (regno) == NULL_RTX);
1004 }
1005
1006 /* If we have a read-write operand with an address side-effect,
1007 change either IN or OUT so the side-effect happens only once. */
1008 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1009 switch (GET_CODE (XEXP (in, 0)))
1010 {
1011 case POST_INC: case POST_DEC: case POST_MODIFY:
1012 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1013 break;
1014
1015 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1016 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1017 break;
1018
1019 default:
1020 break;
1021 }
1022
1023 /* If we are reloading a (SUBREG constant ...), really reload just the
1024 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1025 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1026 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1027 register is a pseudo, also reload the inside expression.
1028 For machines that extend byte loads, do this for any SUBREG of a pseudo
1029 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1030 M2 is an integral mode that gets extended when loaded.
1031 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1032 where either M1 is not valid for R or M2 is wider than a word but we
1033 only need one register to store an M2-sized quantity in R.
1034 (However, if OUT is nonzero, we need to reload the reg *and*
1035 the subreg, so do nothing here, and let following statement handle it.)
1036
1037 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1038 we can't handle it here because CONST_INT does not indicate a mode.
1039
1040 Similarly, we must reload the inside expression if we have a
1041 STRICT_LOW_PART (presumably, in == out in this case).
1042
1043 Also reload the inner expression if it does not require a secondary
1044 reload but the SUBREG does.
1045
1046 Finally, reload the inner expression if it is a register that is in
1047 the class whose registers cannot be referenced in a different size
1048 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1049 cannot reload just the inside since we might end up with the wrong
1050 register class. But if it is inside a STRICT_LOW_PART, we have
1051 no choice, so we hope we do get the right register class there. */
1052
1053 scalar_int_mode inner_mode;
1054 if (in != 0 && GET_CODE (in) == SUBREG
1055 && (subreg_lowpart_p (in) || strict_low)
1056 && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (in)),
1057 inmode, rclass)
1058 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (in))]
1059 && (CONSTANT_P (SUBREG_REG (in))
1060 || GET_CODE (SUBREG_REG (in)) == PLUS
1061 || strict_low
1062 || (((REG_P (SUBREG_REG (in))
1063 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1064 || MEM_P (SUBREG_REG (in)))
1065 && (paradoxical_subreg_p (inmode, GET_MODE (SUBREG_REG (in)))
1066 || (known_le (GET_MODE_SIZE (inmode), UNITS_PER_WORD)
1067 && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (in)),
1068 &inner_mode)
1069 && GET_MODE_SIZE (inner_mode) <= UNITS_PER_WORD
1070 && paradoxical_subreg_p (inmode, inner_mode)
1071 && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)
1072 || (WORD_REGISTER_OPERATIONS
1073 && partial_subreg_p (inmode, GET_MODE (SUBREG_REG (in)))
1074 && (known_equal_after_align_down
1075 (GET_MODE_SIZE (inmode) - 1,
1076 GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1,
1077 UNITS_PER_WORD)))))
1078 || (REG_P (SUBREG_REG (in))
1079 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1080 /* The case where out is nonzero
1081 is handled differently in the following statement. */
1082 && (out == 0 || subreg_lowpart_p (in))
1083 && (complex_word_subreg_p (inmode, SUBREG_REG (in))
1084 || !targetm.hard_regno_mode_ok (subreg_regno (in), inmode)))
1085 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1086 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1087 SUBREG_REG (in))
1088 == NO_REGS))
1089 || (REG_P (SUBREG_REG (in))
1090 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1091 && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (in)),
1092 GET_MODE (SUBREG_REG (in)), inmode))))
1093 {
1094 #ifdef LIMIT_RELOAD_CLASS
1095 in_subreg_loc = inloc;
1096 #endif
1097 inloc = &SUBREG_REG (in);
1098 in = *inloc;
1099
1100 if (!WORD_REGISTER_OPERATIONS
1101 && LOAD_EXTEND_OP (GET_MODE (in)) == UNKNOWN
1102 && MEM_P (in))
1103 /* This is supposed to happen only for paradoxical subregs made by
1104 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1105 gcc_assert (known_le (GET_MODE_SIZE (GET_MODE (in)),
1106 GET_MODE_SIZE (inmode)));
1107
1108 inmode = GET_MODE (in);
1109 }
1110
1111 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1112 where M1 is not valid for R if it was not handled by the code above.
1113
1114 Similar issue for (SUBREG constant ...) if it was not handled by the
1115 code above. This can happen if SUBREG_BYTE != 0.
1116
1117 However, we must reload the inner reg *as well as* the subreg in
1118 that case. */
1119
1120 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1121 {
1122 if (REG_P (SUBREG_REG (in)))
1123 subreg_in_class
1124 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1125 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1126 GET_MODE (SUBREG_REG (in)),
1127 SUBREG_BYTE (in),
1128 GET_MODE (in)),
1129 REGNO (SUBREG_REG (in)));
1130 else if (CONSTANT_P (SUBREG_REG (in))
1131 || GET_CODE (SUBREG_REG (in)) == PLUS)
1132 subreg_in_class = find_valid_class_1 (inmode,
1133 GET_MODE (SUBREG_REG (in)),
1134 rclass);
1135
1136 /* This relies on the fact that emit_reload_insns outputs the
1137 instructions for input reloads of type RELOAD_OTHER in the same
1138 order as the reloads. Thus if the outer reload is also of type
1139 RELOAD_OTHER, we are guaranteed that this inner reload will be
1140 output before the outer reload. */
1141 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1142 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1143 dont_remove_subreg = 1;
1144 }
1145
1146 /* Similarly for paradoxical and problematical SUBREGs on the output.
1147 Note that there is no reason we need worry about the previous value
1148 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1149 entitled to clobber it all (except in the case of a word mode subreg
1150 or of a STRICT_LOW_PART, in that latter case the constraint should
1151 label it input-output.) */
1152 if (out != 0 && GET_CODE (out) == SUBREG
1153 && (subreg_lowpart_p (out) || strict_low)
1154 && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (out)),
1155 outmode, rclass)
1156 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (out))]
1157 && (CONSTANT_P (SUBREG_REG (out))
1158 || strict_low
1159 || (((REG_P (SUBREG_REG (out))
1160 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1161 || MEM_P (SUBREG_REG (out)))
1162 && (paradoxical_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1163 || (WORD_REGISTER_OPERATIONS
1164 && partial_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1165 && (known_equal_after_align_down
1166 (GET_MODE_SIZE (outmode) - 1,
1167 GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1,
1168 UNITS_PER_WORD)))))
1169 || (REG_P (SUBREG_REG (out))
1170 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1171 /* The case of a word mode subreg
1172 is handled differently in the following statement. */
1173 && ! (known_le (GET_MODE_SIZE (outmode), UNITS_PER_WORD)
1174 && maybe_gt (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))),
1175 UNITS_PER_WORD))
1176 && !targetm.hard_regno_mode_ok (subreg_regno (out), outmode))
1177 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1178 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1179 SUBREG_REG (out))
1180 == NO_REGS))
1181 || (REG_P (SUBREG_REG (out))
1182 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1183 && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1184 GET_MODE (SUBREG_REG (out)),
1185 outmode))))
1186 {
1187 #ifdef LIMIT_RELOAD_CLASS
1188 out_subreg_loc = outloc;
1189 #endif
1190 outloc = &SUBREG_REG (out);
1191 out = *outloc;
1192 gcc_assert (WORD_REGISTER_OPERATIONS || !MEM_P (out)
1193 || known_le (GET_MODE_SIZE (GET_MODE (out)),
1194 GET_MODE_SIZE (outmode)));
1195 outmode = GET_MODE (out);
1196 }
1197
1198 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1199 where either M1 is not valid for R or M2 is wider than a word but we
1200 only need one register to store an M2-sized quantity in R.
1201
1202 However, we must reload the inner reg *as well as* the subreg in
1203 that case and the inner reg is an in-out reload. */
1204
1205 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1206 {
1207 enum reg_class in_out_class
1208 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1209 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1210 GET_MODE (SUBREG_REG (out)),
1211 SUBREG_BYTE (out),
1212 GET_MODE (out)),
1213 REGNO (SUBREG_REG (out)));
1214
1215 /* This relies on the fact that emit_reload_insns outputs the
1216 instructions for output reloads of type RELOAD_OTHER in reverse
1217 order of the reloads. Thus if the outer reload is also of type
1218 RELOAD_OTHER, we are guaranteed that this inner reload will be
1219 output after the outer reload. */
1220 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1221 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1222 0, 0, opnum, RELOAD_OTHER);
1223 dont_remove_subreg = 1;
1224 }
1225
1226 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1227 if (in != 0 && out != 0 && MEM_P (out)
1228 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1229 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1230 dont_share = 1;
1231
1232 /* If IN is a SUBREG of a hard register, make a new REG. This
1233 simplifies some of the cases below. */
1234
1235 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1236 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1237 && ! dont_remove_subreg)
1238 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1239
1240 /* Similarly for OUT. */
1241 if (out != 0 && GET_CODE (out) == SUBREG
1242 && REG_P (SUBREG_REG (out))
1243 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1244 && ! dont_remove_subreg)
1245 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1246
1247 /* Narrow down the class of register wanted if that is
1248 desirable on this machine for efficiency. */
1249 {
1250 reg_class_t preferred_class = rclass;
1251
1252 if (in != 0)
1253 preferred_class = targetm.preferred_reload_class (in, rclass);
1254
1255 /* Output reloads may need analogous treatment, different in detail. */
1256 if (out != 0)
1257 preferred_class
1258 = targetm.preferred_output_reload_class (out, preferred_class);
1259
1260 /* Discard what the target said if we cannot do it. */
1261 if (preferred_class != NO_REGS
1262 || (optional && type == RELOAD_FOR_OUTPUT))
1263 rclass = (enum reg_class) preferred_class;
1264 }
1265
1266 /* Make sure we use a class that can handle the actual pseudo
1267 inside any subreg. For example, on the 386, QImode regs
1268 can appear within SImode subregs. Although GENERAL_REGS
1269 can handle SImode, QImode needs a smaller class. */
1270 #ifdef LIMIT_RELOAD_CLASS
1271 if (in_subreg_loc)
1272 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1273 else if (in != 0 && GET_CODE (in) == SUBREG)
1274 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1275
1276 if (out_subreg_loc)
1277 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1278 if (out != 0 && GET_CODE (out) == SUBREG)
1279 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1280 #endif
1281
1282 /* Verify that this class is at least possible for the mode that
1283 is specified. */
1284 if (this_insn_is_asm)
1285 {
1286 machine_mode mode;
1287 if (paradoxical_subreg_p (inmode, outmode))
1288 mode = inmode;
1289 else
1290 mode = outmode;
1291 if (mode == VOIDmode)
1292 {
1293 error_for_asm (this_insn, "cannot reload integer constant "
1294 "operand in %<asm%>");
1295 mode = word_mode;
1296 if (in != 0)
1297 inmode = word_mode;
1298 if (out != 0)
1299 outmode = word_mode;
1300 }
1301 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1302 if (targetm.hard_regno_mode_ok (i, mode)
1303 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1304 break;
1305 if (i == FIRST_PSEUDO_REGISTER)
1306 {
1307 error_for_asm (this_insn, "impossible register constraint "
1308 "in %<asm%>");
1309 /* Avoid further trouble with this insn. */
1310 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1311 /* We used to continue here setting class to ALL_REGS, but it triggers
1312 sanity check on i386 for:
1313 void foo(long double d)
1314 {
1315 asm("" :: "a" (d));
1316 }
1317 Returning zero here ought to be safe as we take care in
1318 find_reloads to not process the reloads when instruction was
1319 replaced by USE. */
1320
1321 return 0;
1322 }
1323 }
1324
1325 /* Optional output reloads are always OK even if we have no register class,
1326 since the function of these reloads is only to have spill_reg_store etc.
1327 set, so that the storing insn can be deleted later. */
1328 gcc_assert (rclass != NO_REGS
1329 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1330
1331 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1332
1333 if (i == n_reloads)
1334 {
1335 /* See if we need a secondary reload register to move between CLASS
1336 and IN or CLASS and OUT. Get the icode and push any required reloads
1337 needed for each of them if so. */
1338
1339 if (in != 0)
1340 secondary_in_reload
1341 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1342 &secondary_in_icode, NULL);
1343 if (out != 0 && GET_CODE (out) != SCRATCH)
1344 secondary_out_reload
1345 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1346 type, &secondary_out_icode, NULL);
1347
1348 /* We found no existing reload suitable for re-use.
1349 So add an additional reload. */
1350
1351 if (subreg_in_class == NO_REGS
1352 && in != 0
1353 && (REG_P (in)
1354 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1355 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1356 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1357 /* If a memory location is needed for the copy, make one. */
1358 if (subreg_in_class != NO_REGS
1359 && targetm.secondary_memory_needed (inmode, subreg_in_class, rclass))
1360 get_secondary_mem (in, inmode, opnum, type);
1361
1362 i = n_reloads;
1363 rld[i].in = in;
1364 rld[i].out = out;
1365 rld[i].rclass = rclass;
1366 rld[i].inmode = inmode;
1367 rld[i].outmode = outmode;
1368 rld[i].reg_rtx = 0;
1369 rld[i].optional = optional;
1370 rld[i].inc = 0;
1371 rld[i].nocombine = 0;
1372 rld[i].in_reg = inloc ? *inloc : 0;
1373 rld[i].out_reg = outloc ? *outloc : 0;
1374 rld[i].opnum = opnum;
1375 rld[i].when_needed = type;
1376 rld[i].secondary_in_reload = secondary_in_reload;
1377 rld[i].secondary_out_reload = secondary_out_reload;
1378 rld[i].secondary_in_icode = secondary_in_icode;
1379 rld[i].secondary_out_icode = secondary_out_icode;
1380 rld[i].secondary_p = 0;
1381
1382 n_reloads++;
1383
1384 if (out != 0
1385 && (REG_P (out)
1386 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1387 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1388 && (targetm.secondary_memory_needed
1389 (outmode, rclass, REGNO_REG_CLASS (reg_or_subregno (out)))))
1390 get_secondary_mem (out, outmode, opnum, type);
1391 }
1392 else
1393 {
1394 /* We are reusing an existing reload,
1395 but we may have additional information for it.
1396 For example, we may now have both IN and OUT
1397 while the old one may have just one of them. */
1398
1399 /* The modes can be different. If they are, we want to reload in
1400 the larger mode, so that the value is valid for both modes. */
1401 if (inmode != VOIDmode
1402 && partial_subreg_p (rld[i].inmode, inmode))
1403 rld[i].inmode = inmode;
1404 if (outmode != VOIDmode
1405 && partial_subreg_p (rld[i].outmode, outmode))
1406 rld[i].outmode = outmode;
1407 if (in != 0)
1408 {
1409 rtx in_reg = inloc ? *inloc : 0;
1410 /* If we merge reloads for two distinct rtl expressions that
1411 are identical in content, there might be duplicate address
1412 reloads. Remove the extra set now, so that if we later find
1413 that we can inherit this reload, we can get rid of the
1414 address reloads altogether.
1415
1416 Do not do this if both reloads are optional since the result
1417 would be an optional reload which could potentially leave
1418 unresolved address replacements.
1419
1420 It is not sufficient to call transfer_replacements since
1421 choose_reload_regs will remove the replacements for address
1422 reloads of inherited reloads which results in the same
1423 problem. */
1424 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1425 && ! (rld[i].optional && optional))
1426 {
1427 /* We must keep the address reload with the lower operand
1428 number alive. */
1429 if (opnum > rld[i].opnum)
1430 {
1431 remove_address_replacements (in);
1432 in = rld[i].in;
1433 in_reg = rld[i].in_reg;
1434 }
1435 else
1436 remove_address_replacements (rld[i].in);
1437 }
1438 /* When emitting reloads we don't necessarily look at the in-
1439 and outmode, but also directly at the operands (in and out).
1440 So we can't simply overwrite them with whatever we have found
1441 for this (to-be-merged) reload, we have to "merge" that too.
1442 Reusing another reload already verified that we deal with the
1443 same operands, just possibly in different modes. So we
1444 overwrite the operands only when the new mode is larger.
1445 See also PR33613. */
1446 if (!rld[i].in
1447 || partial_subreg_p (GET_MODE (rld[i].in), GET_MODE (in)))
1448 rld[i].in = in;
1449 if (!rld[i].in_reg
1450 || (in_reg
1451 && partial_subreg_p (GET_MODE (rld[i].in_reg),
1452 GET_MODE (in_reg))))
1453 rld[i].in_reg = in_reg;
1454 }
1455 if (out != 0)
1456 {
1457 if (!rld[i].out
1458 || (out
1459 && partial_subreg_p (GET_MODE (rld[i].out),
1460 GET_MODE (out))))
1461 rld[i].out = out;
1462 if (outloc
1463 && (!rld[i].out_reg
1464 || partial_subreg_p (GET_MODE (rld[i].out_reg),
1465 GET_MODE (*outloc))))
1466 rld[i].out_reg = *outloc;
1467 }
1468 if (reg_class_subset_p (rclass, rld[i].rclass))
1469 rld[i].rclass = rclass;
1470 rld[i].optional &= optional;
1471 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1472 opnum, rld[i].opnum))
1473 rld[i].when_needed = RELOAD_OTHER;
1474 rld[i].opnum = MIN (rld[i].opnum, opnum);
1475 }
1476
1477 /* If the ostensible rtx being reloaded differs from the rtx found
1478 in the location to substitute, this reload is not safe to combine
1479 because we cannot reliably tell whether it appears in the insn. */
1480
1481 if (in != 0 && in != *inloc)
1482 rld[i].nocombine = 1;
1483
1484 #if 0
1485 /* This was replaced by changes in find_reloads_address_1 and the new
1486 function inc_for_reload, which go with a new meaning of reload_inc. */
1487
1488 /* If this is an IN/OUT reload in an insn that sets the CC,
1489 it must be for an autoincrement. It doesn't work to store
1490 the incremented value after the insn because that would clobber the CC.
1491 So we must do the increment of the value reloaded from,
1492 increment it, store it back, then decrement again. */
1493 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1494 {
1495 out = 0;
1496 rld[i].out = 0;
1497 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1498 /* If we did not find a nonzero amount-to-increment-by,
1499 that contradicts the belief that IN is being incremented
1500 in an address in this insn. */
1501 gcc_assert (rld[i].inc != 0);
1502 }
1503 #endif
1504
1505 /* If we will replace IN and OUT with the reload-reg,
1506 record where they are located so that substitution need
1507 not do a tree walk. */
1508
1509 if (replace_reloads)
1510 {
1511 if (inloc != 0)
1512 {
1513 struct replacement *r = &replacements[n_replacements++];
1514 r->what = i;
1515 r->where = inloc;
1516 r->mode = inmode;
1517 }
1518 if (outloc != 0 && outloc != inloc)
1519 {
1520 struct replacement *r = &replacements[n_replacements++];
1521 r->what = i;
1522 r->where = outloc;
1523 r->mode = outmode;
1524 }
1525 }
1526
1527 /* If this reload is just being introduced and it has both
1528 an incoming quantity and an outgoing quantity that are
1529 supposed to be made to match, see if either one of the two
1530 can serve as the place to reload into.
1531
1532 If one of them is acceptable, set rld[i].reg_rtx
1533 to that one. */
1534
1535 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1536 {
1537 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1538 inmode, outmode,
1539 rld[i].rclass, i,
1540 earlyclobber_operand_p (out));
1541
1542 /* If the outgoing register already contains the same value
1543 as the incoming one, we can dispense with loading it.
1544 The easiest way to tell the caller that is to give a phony
1545 value for the incoming operand (same as outgoing one). */
1546 if (rld[i].reg_rtx == out
1547 && (REG_P (in) || CONSTANT_P (in))
1548 && find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1549 static_reload_reg_p, i, inmode) != 0)
1550 rld[i].in = out;
1551 }
1552
1553 /* If this is an input reload and the operand contains a register that
1554 dies in this insn and is used nowhere else, see if it is the right class
1555 to be used for this reload. Use it if so. (This occurs most commonly
1556 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1557 this if it is also an output reload that mentions the register unless
1558 the output is a SUBREG that clobbers an entire register.
1559
1560 Note that the operand might be one of the spill regs, if it is a
1561 pseudo reg and we are in a block where spilling has not taken place.
1562 But if there is no spilling in this block, that is OK.
1563 An explicitly used hard reg cannot be a spill reg. */
1564
1565 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1566 {
1567 rtx note;
1568 int regno;
1569 machine_mode rel_mode = inmode;
1570
1571 if (out && partial_subreg_p (rel_mode, outmode))
1572 rel_mode = outmode;
1573
1574 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1575 if (REG_NOTE_KIND (note) == REG_DEAD
1576 && REG_P (XEXP (note, 0))
1577 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1578 && reg_mentioned_p (XEXP (note, 0), in)
1579 /* Check that a former pseudo is valid; see find_dummy_reload. */
1580 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1581 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1582 ORIGINAL_REGNO (XEXP (note, 0)))
1583 && REG_NREGS (XEXP (note, 0)) == 1))
1584 && ! refers_to_regno_for_reload_p (regno,
1585 end_hard_regno (rel_mode,
1586 regno),
1587 PATTERN (this_insn), inloc)
1588 && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1589 /* If this is also an output reload, IN cannot be used as
1590 the reload register if it is set in this insn unless IN
1591 is also OUT. */
1592 && (out == 0 || in == out
1593 || ! hard_reg_set_here_p (regno,
1594 end_hard_regno (rel_mode, regno),
1595 PATTERN (this_insn)))
1596 /* ??? Why is this code so different from the previous?
1597 Is there any simple coherent way to describe the two together?
1598 What's going on here. */
1599 && (in != out
1600 || (GET_CODE (in) == SUBREG
1601 && (known_equal_after_align_up
1602 (GET_MODE_SIZE (GET_MODE (in)),
1603 GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))),
1604 UNITS_PER_WORD))))
1605 /* Make sure the operand fits in the reg that dies. */
1606 && known_le (GET_MODE_SIZE (rel_mode),
1607 GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1608 && targetm.hard_regno_mode_ok (regno, inmode)
1609 && targetm.hard_regno_mode_ok (regno, outmode))
1610 {
1611 unsigned int offs;
1612 unsigned int nregs = MAX (hard_regno_nregs (regno, inmode),
1613 hard_regno_nregs (regno, outmode));
1614
1615 for (offs = 0; offs < nregs; offs++)
1616 if (fixed_regs[regno + offs]
1617 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1618 regno + offs))
1619 break;
1620
1621 if (offs == nregs
1622 && (! (refers_to_regno_for_reload_p
1623 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1624 || can_reload_into (in, regno, inmode)))
1625 {
1626 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1627 break;
1628 }
1629 }
1630 }
1631
1632 if (out)
1633 output_reloadnum = i;
1634
1635 return i;
1636 }
1637
1638 /* Record an additional place we must replace a value
1639 for which we have already recorded a reload.
1640 RELOADNUM is the value returned by push_reload
1641 when the reload was recorded.
1642 This is used in insn patterns that use match_dup. */
1643
1644 static void
push_replacement(rtx * loc,int reloadnum,machine_mode mode)1645 push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1646 {
1647 if (replace_reloads)
1648 {
1649 struct replacement *r = &replacements[n_replacements++];
1650 r->what = reloadnum;
1651 r->where = loc;
1652 r->mode = mode;
1653 }
1654 }
1655
1656 /* Duplicate any replacement we have recorded to apply at
1657 location ORIG_LOC to also be performed at DUP_LOC.
1658 This is used in insn patterns that use match_dup. */
1659
1660 static void
dup_replacements(rtx * dup_loc,rtx * orig_loc)1661 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1662 {
1663 int i, n = n_replacements;
1664
1665 for (i = 0; i < n; i++)
1666 {
1667 struct replacement *r = &replacements[i];
1668 if (r->where == orig_loc)
1669 push_replacement (dup_loc, r->what, r->mode);
1670 }
1671 }
1672
1673 /* Transfer all replacements that used to be in reload FROM to be in
1674 reload TO. */
1675
1676 void
transfer_replacements(int to,int from)1677 transfer_replacements (int to, int from)
1678 {
1679 int i;
1680
1681 for (i = 0; i < n_replacements; i++)
1682 if (replacements[i].what == from)
1683 replacements[i].what = to;
1684 }
1685
1686 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1687 or a subpart of it. If we have any replacements registered for IN_RTX,
1688 cancel the reloads that were supposed to load them.
1689 Return nonzero if we canceled any reloads. */
1690 int
remove_address_replacements(rtx in_rtx)1691 remove_address_replacements (rtx in_rtx)
1692 {
1693 int i, j;
1694 char reload_flags[MAX_RELOADS];
1695 int something_changed = 0;
1696
1697 memset (reload_flags, 0, sizeof reload_flags);
1698 for (i = 0, j = 0; i < n_replacements; i++)
1699 {
1700 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1701 reload_flags[replacements[i].what] |= 1;
1702 else
1703 {
1704 replacements[j++] = replacements[i];
1705 reload_flags[replacements[i].what] |= 2;
1706 }
1707 }
1708 /* Note that the following store must be done before the recursive calls. */
1709 n_replacements = j;
1710
1711 for (i = n_reloads - 1; i >= 0; i--)
1712 {
1713 if (reload_flags[i] == 1)
1714 {
1715 deallocate_reload_reg (i);
1716 remove_address_replacements (rld[i].in);
1717 rld[i].in = 0;
1718 something_changed = 1;
1719 }
1720 }
1721 return something_changed;
1722 }
1723
1724 /* If there is only one output reload, and it is not for an earlyclobber
1725 operand, try to combine it with a (logically unrelated) input reload
1726 to reduce the number of reload registers needed.
1727
1728 This is safe if the input reload does not appear in
1729 the value being output-reloaded, because this implies
1730 it is not needed any more once the original insn completes.
1731
1732 If that doesn't work, see we can use any of the registers that
1733 die in this insn as a reload register. We can if it is of the right
1734 class and does not appear in the value being output-reloaded. */
1735
1736 static void
combine_reloads(void)1737 combine_reloads (void)
1738 {
1739 int i, regno;
1740 int output_reload = -1;
1741 int secondary_out = -1;
1742 rtx note;
1743
1744 /* Find the output reload; return unless there is exactly one
1745 and that one is mandatory. */
1746
1747 for (i = 0; i < n_reloads; i++)
1748 if (rld[i].out != 0)
1749 {
1750 if (output_reload >= 0)
1751 return;
1752 output_reload = i;
1753 }
1754
1755 if (output_reload < 0 || rld[output_reload].optional)
1756 return;
1757
1758 /* An input-output reload isn't combinable. */
1759
1760 if (rld[output_reload].in != 0)
1761 return;
1762
1763 /* If this reload is for an earlyclobber operand, we can't do anything. */
1764 if (earlyclobber_operand_p (rld[output_reload].out))
1765 return;
1766
1767 /* If there is a reload for part of the address of this operand, we would
1768 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1769 its life to the point where doing this combine would not lower the
1770 number of spill registers needed. */
1771 for (i = 0; i < n_reloads; i++)
1772 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1773 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1774 && rld[i].opnum == rld[output_reload].opnum)
1775 return;
1776
1777 /* Check each input reload; can we combine it? */
1778
1779 for (i = 0; i < n_reloads; i++)
1780 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1781 /* Life span of this reload must not extend past main insn. */
1782 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1783 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1784 && rld[i].when_needed != RELOAD_OTHER
1785 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1786 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1787 [(int) rld[output_reload].outmode])
1788 && known_eq (rld[i].inc, 0)
1789 && rld[i].reg_rtx == 0
1790 /* Don't combine two reloads with different secondary
1791 memory locations. */
1792 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1793 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1794 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1795 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1796 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1797 ? (rld[i].rclass == rld[output_reload].rclass)
1798 : (reg_class_subset_p (rld[i].rclass,
1799 rld[output_reload].rclass)
1800 || reg_class_subset_p (rld[output_reload].rclass,
1801 rld[i].rclass)))
1802 && (MATCHES (rld[i].in, rld[output_reload].out)
1803 /* Args reversed because the first arg seems to be
1804 the one that we imagine being modified
1805 while the second is the one that might be affected. */
1806 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1807 rld[i].in)
1808 /* However, if the input is a register that appears inside
1809 the output, then we also can't share.
1810 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1811 If the same reload reg is used for both reg 69 and the
1812 result to be stored in memory, then that result
1813 will clobber the address of the memory ref. */
1814 && ! (REG_P (rld[i].in)
1815 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1816 rld[output_reload].out))))
1817 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1818 rld[i].when_needed != RELOAD_FOR_INPUT)
1819 && (reg_class_size[(int) rld[i].rclass]
1820 || targetm.small_register_classes_for_mode_p (VOIDmode))
1821 /* We will allow making things slightly worse by combining an
1822 input and an output, but no worse than that. */
1823 && (rld[i].when_needed == RELOAD_FOR_INPUT
1824 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1825 {
1826 int j;
1827
1828 /* We have found a reload to combine with! */
1829 rld[i].out = rld[output_reload].out;
1830 rld[i].out_reg = rld[output_reload].out_reg;
1831 rld[i].outmode = rld[output_reload].outmode;
1832 /* Mark the old output reload as inoperative. */
1833 rld[output_reload].out = 0;
1834 /* The combined reload is needed for the entire insn. */
1835 rld[i].when_needed = RELOAD_OTHER;
1836 /* If the output reload had a secondary reload, copy it. */
1837 if (rld[output_reload].secondary_out_reload != -1)
1838 {
1839 rld[i].secondary_out_reload
1840 = rld[output_reload].secondary_out_reload;
1841 rld[i].secondary_out_icode
1842 = rld[output_reload].secondary_out_icode;
1843 }
1844
1845 /* Copy any secondary MEM. */
1846 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1847 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1848 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1849 /* If required, minimize the register class. */
1850 if (reg_class_subset_p (rld[output_reload].rclass,
1851 rld[i].rclass))
1852 rld[i].rclass = rld[output_reload].rclass;
1853
1854 /* Transfer all replacements from the old reload to the combined. */
1855 for (j = 0; j < n_replacements; j++)
1856 if (replacements[j].what == output_reload)
1857 replacements[j].what = i;
1858
1859 return;
1860 }
1861
1862 /* If this insn has only one operand that is modified or written (assumed
1863 to be the first), it must be the one corresponding to this reload. It
1864 is safe to use anything that dies in this insn for that output provided
1865 that it does not occur in the output (we already know it isn't an
1866 earlyclobber. If this is an asm insn, give up. */
1867
1868 if (INSN_CODE (this_insn) == -1)
1869 return;
1870
1871 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1872 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1873 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1874 return;
1875
1876 /* See if some hard register that dies in this insn and is not used in
1877 the output is the right class. Only works if the register we pick
1878 up can fully hold our output reload. */
1879 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1880 if (REG_NOTE_KIND (note) == REG_DEAD
1881 && REG_P (XEXP (note, 0))
1882 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1883 rld[output_reload].out)
1884 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1885 && targetm.hard_regno_mode_ok (regno, rld[output_reload].outmode)
1886 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1887 regno)
1888 && (hard_regno_nregs (regno, rld[output_reload].outmode)
1889 <= REG_NREGS (XEXP (note, 0)))
1890 /* Ensure that a secondary or tertiary reload for this output
1891 won't want this register. */
1892 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1893 || (!(TEST_HARD_REG_BIT
1894 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1895 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1896 || !(TEST_HARD_REG_BIT
1897 (reg_class_contents[(int) rld[secondary_out].rclass],
1898 regno)))))
1899 && !fixed_regs[regno]
1900 /* Check that a former pseudo is valid; see find_dummy_reload. */
1901 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1902 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1903 ORIGINAL_REGNO (XEXP (note, 0)))
1904 && REG_NREGS (XEXP (note, 0)) == 1)))
1905 {
1906 rld[output_reload].reg_rtx
1907 = gen_rtx_REG (rld[output_reload].outmode, regno);
1908 return;
1909 }
1910 }
1911
1912 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1913 See if one of IN and OUT is a register that may be used;
1914 this is desirable since a spill-register won't be needed.
1915 If so, return the register rtx that proves acceptable.
1916
1917 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1918 RCLASS is the register class required for the reload.
1919
1920 If FOR_REAL is >= 0, it is the number of the reload,
1921 and in some cases when it can be discovered that OUT doesn't need
1922 to be computed, clear out rld[FOR_REAL].out.
1923
1924 If FOR_REAL is -1, this should not be done, because this call
1925 is just to see if a register can be found, not to find and install it.
1926
1927 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1928 puts an additional constraint on being able to use IN for OUT since
1929 IN must not appear elsewhere in the insn (it is assumed that IN itself
1930 is safe from the earlyclobber). */
1931
1932 static rtx
find_dummy_reload(rtx real_in,rtx real_out,rtx * inloc,rtx * outloc,machine_mode inmode,machine_mode outmode,reg_class_t rclass,int for_real,int earlyclobber)1933 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1934 machine_mode inmode, machine_mode outmode,
1935 reg_class_t rclass, int for_real, int earlyclobber)
1936 {
1937 rtx in = real_in;
1938 rtx out = real_out;
1939 int in_offset = 0;
1940 int out_offset = 0;
1941 rtx value = 0;
1942
1943 /* If operands exceed a word, we can't use either of them
1944 unless they have the same size. */
1945 if (maybe_ne (GET_MODE_SIZE (outmode), GET_MODE_SIZE (inmode))
1946 && (maybe_gt (GET_MODE_SIZE (outmode), UNITS_PER_WORD)
1947 || maybe_gt (GET_MODE_SIZE (inmode), UNITS_PER_WORD)))
1948 return 0;
1949
1950 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1951 respectively refers to a hard register. */
1952
1953 /* Find the inside of any subregs. */
1954 while (GET_CODE (out) == SUBREG)
1955 {
1956 if (REG_P (SUBREG_REG (out))
1957 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1958 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1959 GET_MODE (SUBREG_REG (out)),
1960 SUBREG_BYTE (out),
1961 GET_MODE (out));
1962 out = SUBREG_REG (out);
1963 }
1964 while (GET_CODE (in) == SUBREG)
1965 {
1966 if (REG_P (SUBREG_REG (in))
1967 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1968 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1969 GET_MODE (SUBREG_REG (in)),
1970 SUBREG_BYTE (in),
1971 GET_MODE (in));
1972 in = SUBREG_REG (in);
1973 }
1974
1975 /* Narrow down the reg class, the same way push_reload will;
1976 otherwise we might find a dummy now, but push_reload won't. */
1977 {
1978 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1979 if (preferred_class != NO_REGS)
1980 rclass = (enum reg_class) preferred_class;
1981 }
1982
1983 /* See if OUT will do. */
1984 if (REG_P (out)
1985 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1986 {
1987 unsigned int regno = REGNO (out) + out_offset;
1988 unsigned int nwords = hard_regno_nregs (regno, outmode);
1989 rtx saved_rtx;
1990
1991 /* When we consider whether the insn uses OUT,
1992 ignore references within IN. They don't prevent us
1993 from copying IN into OUT, because those refs would
1994 move into the insn that reloads IN.
1995
1996 However, we only ignore IN in its role as this reload.
1997 If the insn uses IN elsewhere and it contains OUT,
1998 that counts. We can't be sure it's the "same" operand
1999 so it might not go through this reload.
2000
2001 We also need to avoid using OUT if it, or part of it, is a
2002 fixed register. Modifying such registers, even transiently,
2003 may have undefined effects on the machine, such as modifying
2004 the stack pointer. */
2005 saved_rtx = *inloc;
2006 *inloc = const0_rtx;
2007
2008 if (regno < FIRST_PSEUDO_REGISTER
2009 && targetm.hard_regno_mode_ok (regno, outmode)
2010 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2011 PATTERN (this_insn), outloc))
2012 {
2013 unsigned int i;
2014
2015 for (i = 0; i < nwords; i++)
2016 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2017 regno + i)
2018 || fixed_regs[regno + i])
2019 break;
2020
2021 if (i == nwords)
2022 {
2023 if (REG_P (real_out))
2024 value = real_out;
2025 else
2026 value = gen_rtx_REG (outmode, regno);
2027 }
2028 }
2029
2030 *inloc = saved_rtx;
2031 }
2032
2033 /* Consider using IN if OUT was not acceptable
2034 or if OUT dies in this insn (like the quotient in a divmod insn).
2035 We can't use IN unless it is dies in this insn,
2036 which means we must know accurately which hard regs are live.
2037 Also, the result can't go in IN if IN is used within OUT,
2038 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2039 if (hard_regs_live_known
2040 && REG_P (in)
2041 && REGNO (in) < FIRST_PSEUDO_REGISTER
2042 && (value == 0
2043 || find_reg_note (this_insn, REG_UNUSED, real_out))
2044 && find_reg_note (this_insn, REG_DEAD, real_in)
2045 && !fixed_regs[REGNO (in)]
2046 && targetm.hard_regno_mode_ok (REGNO (in),
2047 /* The only case where out and real_out
2048 might have different modes is where
2049 real_out is a subreg, and in that
2050 case, out has a real mode. */
2051 (GET_MODE (out) != VOIDmode
2052 ? GET_MODE (out) : outmode))
2053 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2054 /* However only do this if we can be sure that this input
2055 operand doesn't correspond with an uninitialized pseudo.
2056 global can assign some hardreg to it that is the same as
2057 the one assigned to a different, also live pseudo (as it
2058 can ignore the conflict). We must never introduce writes
2059 to such hardregs, as they would clobber the other live
2060 pseudo. See PR 20973. */
2061 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2062 ORIGINAL_REGNO (in))
2063 /* Similarly, only do this if we can be sure that the death
2064 note is still valid. global can assign some hardreg to
2065 the pseudo referenced in the note and simultaneously a
2066 subword of this hardreg to a different, also live pseudo,
2067 because only another subword of the hardreg is actually
2068 used in the insn. This cannot happen if the pseudo has
2069 been assigned exactly one hardreg. See PR 33732. */
2070 && REG_NREGS (in) == 1)))
2071 {
2072 unsigned int regno = REGNO (in) + in_offset;
2073 unsigned int nwords = hard_regno_nregs (regno, inmode);
2074
2075 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2076 && ! hard_reg_set_here_p (regno, regno + nwords,
2077 PATTERN (this_insn))
2078 && (! earlyclobber
2079 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2080 PATTERN (this_insn), inloc)))
2081 {
2082 unsigned int i;
2083
2084 for (i = 0; i < nwords; i++)
2085 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2086 regno + i))
2087 break;
2088
2089 if (i == nwords)
2090 {
2091 /* If we were going to use OUT as the reload reg
2092 and changed our mind, it means OUT is a dummy that
2093 dies here. So don't bother copying value to it. */
2094 if (for_real >= 0 && value == real_out)
2095 rld[for_real].out = 0;
2096 if (REG_P (real_in))
2097 value = real_in;
2098 else
2099 value = gen_rtx_REG (inmode, regno);
2100 }
2101 }
2102 }
2103
2104 return value;
2105 }
2106
2107 /* This page contains subroutines used mainly for determining
2108 whether the IN or an OUT of a reload can serve as the
2109 reload register. */
2110
2111 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2112
2113 int
earlyclobber_operand_p(rtx x)2114 earlyclobber_operand_p (rtx x)
2115 {
2116 int i;
2117
2118 for (i = 0; i < n_earlyclobbers; i++)
2119 if (reload_earlyclobbers[i] == x)
2120 return 1;
2121
2122 return 0;
2123 }
2124
2125 /* Return 1 if expression X alters a hard reg in the range
2126 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2127 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2128 X should be the body of an instruction. */
2129
2130 static int
hard_reg_set_here_p(unsigned int beg_regno,unsigned int end_regno,rtx x)2131 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2132 {
2133 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2134 {
2135 rtx op0 = SET_DEST (x);
2136
2137 while (GET_CODE (op0) == SUBREG)
2138 op0 = SUBREG_REG (op0);
2139 if (REG_P (op0))
2140 {
2141 unsigned int r = REGNO (op0);
2142
2143 /* See if this reg overlaps range under consideration. */
2144 if (r < end_regno
2145 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2146 return 1;
2147 }
2148 }
2149 else if (GET_CODE (x) == PARALLEL)
2150 {
2151 int i = XVECLEN (x, 0) - 1;
2152
2153 for (; i >= 0; i--)
2154 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2155 return 1;
2156 }
2157
2158 return 0;
2159 }
2160
2161 /* Return 1 if ADDR is a valid memory address for mode MODE
2162 in address space AS, and check that each pseudo reg has the
2163 proper kind of hard reg. */
2164
2165 int
strict_memory_address_addr_space_p(machine_mode mode ATTRIBUTE_UNUSED,rtx addr,addr_space_t as)2166 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2167 rtx addr, addr_space_t as)
2168 {
2169 #ifdef GO_IF_LEGITIMATE_ADDRESS
2170 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2171 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2172 return 0;
2173
2174 win:
2175 return 1;
2176 #else
2177 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2178 #endif
2179 }
2180
2181 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2182 if they are the same hard reg, and has special hacks for
2183 autoincrement and autodecrement.
2184 This is specifically intended for find_reloads to use
2185 in determining whether two operands match.
2186 X is the operand whose number is the lower of the two.
2187
2188 The value is 2 if Y contains a pre-increment that matches
2189 a non-incrementing address in X. */
2190
2191 /* ??? To be completely correct, we should arrange to pass
2192 for X the output operand and for Y the input operand.
2193 For now, we assume that the output operand has the lower number
2194 because that is natural in (SET output (... input ...)). */
2195
2196 int
operands_match_p(rtx x,rtx y)2197 operands_match_p (rtx x, rtx y)
2198 {
2199 int i;
2200 RTX_CODE code = GET_CODE (x);
2201 const char *fmt;
2202 int success_2;
2203
2204 if (x == y)
2205 return 1;
2206 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2207 && (REG_P (y) || (GET_CODE (y) == SUBREG
2208 && REG_P (SUBREG_REG (y)))))
2209 {
2210 int j;
2211
2212 if (code == SUBREG)
2213 {
2214 i = REGNO (SUBREG_REG (x));
2215 if (i >= FIRST_PSEUDO_REGISTER)
2216 goto slow;
2217 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2218 GET_MODE (SUBREG_REG (x)),
2219 SUBREG_BYTE (x),
2220 GET_MODE (x));
2221 }
2222 else
2223 i = REGNO (x);
2224
2225 if (GET_CODE (y) == SUBREG)
2226 {
2227 j = REGNO (SUBREG_REG (y));
2228 if (j >= FIRST_PSEUDO_REGISTER)
2229 goto slow;
2230 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2231 GET_MODE (SUBREG_REG (y)),
2232 SUBREG_BYTE (y),
2233 GET_MODE (y));
2234 }
2235 else
2236 j = REGNO (y);
2237
2238 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2239 multiple hard register group of scalar integer registers, so that
2240 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2241 register. */
2242 scalar_int_mode xmode;
2243 if (REG_WORDS_BIG_ENDIAN
2244 && is_a <scalar_int_mode> (GET_MODE (x), &xmode)
2245 && GET_MODE_SIZE (xmode) > UNITS_PER_WORD
2246 && i < FIRST_PSEUDO_REGISTER)
2247 i += hard_regno_nregs (i, xmode) - 1;
2248 scalar_int_mode ymode;
2249 if (REG_WORDS_BIG_ENDIAN
2250 && is_a <scalar_int_mode> (GET_MODE (y), &ymode)
2251 && GET_MODE_SIZE (ymode) > UNITS_PER_WORD
2252 && j < FIRST_PSEUDO_REGISTER)
2253 j += hard_regno_nregs (j, ymode) - 1;
2254
2255 return i == j;
2256 }
2257 /* If two operands must match, because they are really a single
2258 operand of an assembler insn, then two postincrements are invalid
2259 because the assembler insn would increment only once.
2260 On the other hand, a postincrement matches ordinary indexing
2261 if the postincrement is the output operand. */
2262 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2263 return operands_match_p (XEXP (x, 0), y);
2264 /* Two preincrements are invalid
2265 because the assembler insn would increment only once.
2266 On the other hand, a preincrement matches ordinary indexing
2267 if the preincrement is the input operand.
2268 In this case, return 2, since some callers need to do special
2269 things when this happens. */
2270 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2271 || GET_CODE (y) == PRE_MODIFY)
2272 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2273
2274 slow:
2275
2276 /* Now we have disposed of all the cases in which different rtx codes
2277 can match. */
2278 if (code != GET_CODE (y))
2279 return 0;
2280
2281 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2282 if (GET_MODE (x) != GET_MODE (y))
2283 return 0;
2284
2285 /* MEMs referring to different address space are not equivalent. */
2286 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2287 return 0;
2288
2289 switch (code)
2290 {
2291 CASE_CONST_UNIQUE:
2292 return 0;
2293
2294 case LABEL_REF:
2295 return label_ref_label (x) == label_ref_label (y);
2296 case SYMBOL_REF:
2297 return XSTR (x, 0) == XSTR (y, 0);
2298
2299 default:
2300 break;
2301 }
2302
2303 /* Compare the elements. If any pair of corresponding elements
2304 fail to match, return 0 for the whole things. */
2305
2306 success_2 = 0;
2307 fmt = GET_RTX_FORMAT (code);
2308 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2309 {
2310 int val, j;
2311 switch (fmt[i])
2312 {
2313 case 'w':
2314 if (XWINT (x, i) != XWINT (y, i))
2315 return 0;
2316 break;
2317
2318 case 'i':
2319 if (XINT (x, i) != XINT (y, i))
2320 return 0;
2321 break;
2322
2323 case 'p':
2324 if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
2325 return 0;
2326 break;
2327
2328 case 'e':
2329 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2330 if (val == 0)
2331 return 0;
2332 /* If any subexpression returns 2,
2333 we should return 2 if we are successful. */
2334 if (val == 2)
2335 success_2 = 1;
2336 break;
2337
2338 case '0':
2339 break;
2340
2341 case 'E':
2342 if (XVECLEN (x, i) != XVECLEN (y, i))
2343 return 0;
2344 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2345 {
2346 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2347 if (val == 0)
2348 return 0;
2349 if (val == 2)
2350 success_2 = 1;
2351 }
2352 break;
2353
2354 /* It is believed that rtx's at this level will never
2355 contain anything but integers and other rtx's,
2356 except for within LABEL_REFs and SYMBOL_REFs. */
2357 default:
2358 gcc_unreachable ();
2359 }
2360 }
2361 return 1 + success_2;
2362 }
2363
2364 /* Describe the range of registers or memory referenced by X.
2365 If X is a register, set REG_FLAG and put the first register
2366 number into START and the last plus one into END.
2367 If X is a memory reference, put a base address into BASE
2368 and a range of integer offsets into START and END.
2369 If X is pushing on the stack, we can assume it causes no trouble,
2370 so we set the SAFE field. */
2371
2372 static struct decomposition
decompose(rtx x)2373 decompose (rtx x)
2374 {
2375 struct decomposition val;
2376 int all_const = 0, regno;
2377
2378 memset (&val, 0, sizeof (val));
2379
2380 switch (GET_CODE (x))
2381 {
2382 case MEM:
2383 {
2384 rtx base = NULL_RTX, offset = 0;
2385 rtx addr = XEXP (x, 0);
2386
2387 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2388 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2389 {
2390 val.base = XEXP (addr, 0);
2391 val.start = -GET_MODE_SIZE (GET_MODE (x));
2392 val.end = GET_MODE_SIZE (GET_MODE (x));
2393 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2394 return val;
2395 }
2396
2397 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2398 {
2399 if (GET_CODE (XEXP (addr, 1)) == PLUS
2400 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2401 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2402 {
2403 val.base = XEXP (addr, 0);
2404 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2405 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2406 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2407 return val;
2408 }
2409 }
2410
2411 if (GET_CODE (addr) == CONST)
2412 {
2413 addr = XEXP (addr, 0);
2414 all_const = 1;
2415 }
2416 if (GET_CODE (addr) == PLUS)
2417 {
2418 if (CONSTANT_P (XEXP (addr, 0)))
2419 {
2420 base = XEXP (addr, 1);
2421 offset = XEXP (addr, 0);
2422 }
2423 else if (CONSTANT_P (XEXP (addr, 1)))
2424 {
2425 base = XEXP (addr, 0);
2426 offset = XEXP (addr, 1);
2427 }
2428 }
2429
2430 if (offset == 0)
2431 {
2432 base = addr;
2433 offset = const0_rtx;
2434 }
2435 if (GET_CODE (offset) == CONST)
2436 offset = XEXP (offset, 0);
2437 if (GET_CODE (offset) == PLUS)
2438 {
2439 if (CONST_INT_P (XEXP (offset, 0)))
2440 {
2441 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2442 offset = XEXP (offset, 0);
2443 }
2444 else if (CONST_INT_P (XEXP (offset, 1)))
2445 {
2446 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2447 offset = XEXP (offset, 1);
2448 }
2449 else
2450 {
2451 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2452 offset = const0_rtx;
2453 }
2454 }
2455 else if (!CONST_INT_P (offset))
2456 {
2457 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2458 offset = const0_rtx;
2459 }
2460
2461 if (all_const && GET_CODE (base) == PLUS)
2462 base = gen_rtx_CONST (GET_MODE (base), base);
2463
2464 gcc_assert (CONST_INT_P (offset));
2465
2466 val.start = INTVAL (offset);
2467 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2468 val.base = base;
2469 }
2470 break;
2471
2472 case REG:
2473 val.reg_flag = 1;
2474 regno = true_regnum (x);
2475 if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2476 {
2477 /* A pseudo with no hard reg. */
2478 val.start = REGNO (x);
2479 val.end = val.start + 1;
2480 }
2481 else
2482 {
2483 /* A hard reg. */
2484 val.start = regno;
2485 val.end = end_hard_regno (GET_MODE (x), regno);
2486 }
2487 break;
2488
2489 case SUBREG:
2490 if (!REG_P (SUBREG_REG (x)))
2491 /* This could be more precise, but it's good enough. */
2492 return decompose (SUBREG_REG (x));
2493 regno = true_regnum (x);
2494 if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2495 return decompose (SUBREG_REG (x));
2496
2497 /* A hard reg. */
2498 val.reg_flag = 1;
2499 val.start = regno;
2500 val.end = regno + subreg_nregs (x);
2501 break;
2502
2503 case SCRATCH:
2504 /* This hasn't been assigned yet, so it can't conflict yet. */
2505 val.safe = 1;
2506 break;
2507
2508 default:
2509 gcc_assert (CONSTANT_P (x));
2510 val.safe = 1;
2511 break;
2512 }
2513 return val;
2514 }
2515
2516 /* Return 1 if altering Y will not modify the value of X.
2517 Y is also described by YDATA, which should be decompose (Y). */
2518
2519 static int
immune_p(rtx x,rtx y,struct decomposition ydata)2520 immune_p (rtx x, rtx y, struct decomposition ydata)
2521 {
2522 struct decomposition xdata;
2523
2524 if (ydata.reg_flag)
2525 /* In this case the decomposition structure contains register
2526 numbers rather than byte offsets. */
2527 return !refers_to_regno_for_reload_p (ydata.start.to_constant (),
2528 ydata.end.to_constant (),
2529 x, (rtx *) 0);
2530 if (ydata.safe)
2531 return 1;
2532
2533 gcc_assert (MEM_P (y));
2534 /* If Y is memory and X is not, Y can't affect X. */
2535 if (!MEM_P (x))
2536 return 1;
2537
2538 xdata = decompose (x);
2539
2540 if (! rtx_equal_p (xdata.base, ydata.base))
2541 {
2542 /* If bases are distinct symbolic constants, there is no overlap. */
2543 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2544 return 1;
2545 /* Constants and stack slots never overlap. */
2546 if (CONSTANT_P (xdata.base)
2547 && (ydata.base == frame_pointer_rtx
2548 || ydata.base == hard_frame_pointer_rtx
2549 || ydata.base == stack_pointer_rtx))
2550 return 1;
2551 if (CONSTANT_P (ydata.base)
2552 && (xdata.base == frame_pointer_rtx
2553 || xdata.base == hard_frame_pointer_rtx
2554 || xdata.base == stack_pointer_rtx))
2555 return 1;
2556 /* If either base is variable, we don't know anything. */
2557 return 0;
2558 }
2559
2560 return known_ge (xdata.start, ydata.end) || known_ge (ydata.start, xdata.end);
2561 }
2562
2563 /* Similar, but calls decompose. */
2564
2565 int
safe_from_earlyclobber(rtx op,rtx clobber)2566 safe_from_earlyclobber (rtx op, rtx clobber)
2567 {
2568 struct decomposition early_data;
2569
2570 early_data = decompose (clobber);
2571 return immune_p (op, clobber, early_data);
2572 }
2573
2574 /* Main entry point of this file: search the body of INSN
2575 for values that need reloading and record them with push_reload.
2576 REPLACE nonzero means record also where the values occur
2577 so that subst_reloads can be used.
2578
2579 IND_LEVELS says how many levels of indirection are supported by this
2580 machine; a value of zero means that a memory reference is not a valid
2581 memory address.
2582
2583 LIVE_KNOWN says we have valid information about which hard
2584 regs are live at each point in the program; this is true when
2585 we are called from global_alloc but false when stupid register
2586 allocation has been done.
2587
2588 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2589 which is nonnegative if the reg has been commandeered for reloading into.
2590 It is copied into STATIC_RELOAD_REG_P and referenced from there
2591 by various subroutines.
2592
2593 Return TRUE if some operands need to be changed, because of swapping
2594 commutative operands, reg_equiv_address substitution, or whatever. */
2595
2596 int
find_reloads(rtx_insn * insn,int replace,int ind_levels,int live_known,short * reload_reg_p)2597 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2598 short *reload_reg_p)
2599 {
2600 int insn_code_number;
2601 int i, j;
2602 int noperands;
2603 /* These start out as the constraints for the insn
2604 and they are chewed up as we consider alternatives. */
2605 const char *constraints[MAX_RECOG_OPERANDS];
2606 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2607 a register. */
2608 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2609 char pref_or_nothing[MAX_RECOG_OPERANDS];
2610 /* Nonzero for a MEM operand whose entire address needs a reload.
2611 May be -1 to indicate the entire address may or may not need a reload. */
2612 int address_reloaded[MAX_RECOG_OPERANDS];
2613 /* Nonzero for an address operand that needs to be completely reloaded.
2614 May be -1 to indicate the entire operand may or may not need a reload. */
2615 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2616 /* Value of enum reload_type to use for operand. */
2617 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2618 /* Value of enum reload_type to use within address of operand. */
2619 enum reload_type address_type[MAX_RECOG_OPERANDS];
2620 /* Save the usage of each operand. */
2621 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2622 int no_input_reloads = 0, no_output_reloads = 0;
2623 int n_alternatives;
2624 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2625 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2626 char this_alternative_win[MAX_RECOG_OPERANDS];
2627 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2628 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2629 int this_alternative_matches[MAX_RECOG_OPERANDS];
2630 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2631 int this_alternative_number;
2632 int goal_alternative_number = 0;
2633 int operand_reloadnum[MAX_RECOG_OPERANDS];
2634 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2635 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2636 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2637 char goal_alternative_win[MAX_RECOG_OPERANDS];
2638 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2639 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2640 int goal_alternative_swapped;
2641 int best;
2642 int commutative;
2643 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2644 rtx substed_operand[MAX_RECOG_OPERANDS];
2645 rtx body = PATTERN (insn);
2646 rtx set = single_set (insn);
2647 int goal_earlyclobber = 0, this_earlyclobber;
2648 machine_mode operand_mode[MAX_RECOG_OPERANDS];
2649 int retval = 0;
2650
2651 this_insn = insn;
2652 n_reloads = 0;
2653 n_replacements = 0;
2654 n_earlyclobbers = 0;
2655 replace_reloads = replace;
2656 hard_regs_live_known = live_known;
2657 static_reload_reg_p = reload_reg_p;
2658
2659 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2660 neither are insns that SET cc0. Insns that use CC0 are not allowed
2661 to have any input reloads. */
2662 if (JUMP_P (insn) || CALL_P (insn))
2663 no_output_reloads = 1;
2664
2665 if (HAVE_cc0 && reg_referenced_p (cc0_rtx, PATTERN (insn)))
2666 no_input_reloads = 1;
2667 if (HAVE_cc0 && reg_set_p (cc0_rtx, PATTERN (insn)))
2668 no_output_reloads = 1;
2669
2670 /* The eliminated forms of any secondary memory locations are per-insn, so
2671 clear them out here. */
2672
2673 if (secondary_memlocs_elim_used)
2674 {
2675 memset (secondary_memlocs_elim, 0,
2676 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2677 secondary_memlocs_elim_used = 0;
2678 }
2679
2680 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2681 is cheap to move between them. If it is not, there may not be an insn
2682 to do the copy, so we may need a reload. */
2683 if (GET_CODE (body) == SET
2684 && REG_P (SET_DEST (body))
2685 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2686 && REG_P (SET_SRC (body))
2687 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2688 && register_move_cost (GET_MODE (SET_SRC (body)),
2689 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2690 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2691 return 0;
2692
2693 extract_insn (insn);
2694
2695 noperands = reload_n_operands = recog_data.n_operands;
2696 n_alternatives = recog_data.n_alternatives;
2697
2698 /* Just return "no reloads" if insn has no operands with constraints. */
2699 if (noperands == 0 || n_alternatives == 0)
2700 return 0;
2701
2702 insn_code_number = INSN_CODE (insn);
2703 this_insn_is_asm = insn_code_number < 0;
2704
2705 memcpy (operand_mode, recog_data.operand_mode,
2706 noperands * sizeof (machine_mode));
2707 memcpy (constraints, recog_data.constraints,
2708 noperands * sizeof (const char *));
2709
2710 commutative = -1;
2711
2712 /* If we will need to know, later, whether some pair of operands
2713 are the same, we must compare them now and save the result.
2714 Reloading the base and index registers will clobber them
2715 and afterward they will fail to match. */
2716
2717 for (i = 0; i < noperands; i++)
2718 {
2719 const char *p;
2720 int c;
2721 char *end;
2722
2723 substed_operand[i] = recog_data.operand[i];
2724 p = constraints[i];
2725
2726 modified[i] = RELOAD_READ;
2727
2728 /* Scan this operand's constraint to see if it is an output operand,
2729 an in-out operand, is commutative, or should match another. */
2730
2731 while ((c = *p))
2732 {
2733 p += CONSTRAINT_LEN (c, p);
2734 switch (c)
2735 {
2736 case '=':
2737 modified[i] = RELOAD_WRITE;
2738 break;
2739 case '+':
2740 modified[i] = RELOAD_READ_WRITE;
2741 break;
2742 case '%':
2743 {
2744 /* The last operand should not be marked commutative. */
2745 gcc_assert (i != noperands - 1);
2746
2747 /* We currently only support one commutative pair of
2748 operands. Some existing asm code currently uses more
2749 than one pair. Previously, that would usually work,
2750 but sometimes it would crash the compiler. We
2751 continue supporting that case as well as we can by
2752 silently ignoring all but the first pair. In the
2753 future we may handle it correctly. */
2754 if (commutative < 0)
2755 commutative = i;
2756 else
2757 gcc_assert (this_insn_is_asm);
2758 }
2759 break;
2760 /* Use of ISDIGIT is tempting here, but it may get expensive because
2761 of locale support we don't want. */
2762 case '0': case '1': case '2': case '3': case '4':
2763 case '5': case '6': case '7': case '8': case '9':
2764 {
2765 c = strtoul (p - 1, &end, 10);
2766 p = end;
2767
2768 operands_match[c][i]
2769 = operands_match_p (recog_data.operand[c],
2770 recog_data.operand[i]);
2771
2772 /* An operand may not match itself. */
2773 gcc_assert (c != i);
2774
2775 /* If C can be commuted with C+1, and C might need to match I,
2776 then C+1 might also need to match I. */
2777 if (commutative >= 0)
2778 {
2779 if (c == commutative || c == commutative + 1)
2780 {
2781 int other = c + (c == commutative ? 1 : -1);
2782 operands_match[other][i]
2783 = operands_match_p (recog_data.operand[other],
2784 recog_data.operand[i]);
2785 }
2786 if (i == commutative || i == commutative + 1)
2787 {
2788 int other = i + (i == commutative ? 1 : -1);
2789 operands_match[c][other]
2790 = operands_match_p (recog_data.operand[c],
2791 recog_data.operand[other]);
2792 }
2793 /* Note that C is supposed to be less than I.
2794 No need to consider altering both C and I because in
2795 that case we would alter one into the other. */
2796 }
2797 }
2798 }
2799 }
2800 }
2801
2802 /* Examine each operand that is a memory reference or memory address
2803 and reload parts of the addresses into index registers.
2804 Also here any references to pseudo regs that didn't get hard regs
2805 but are equivalent to constants get replaced in the insn itself
2806 with those constants. Nobody will ever see them again.
2807
2808 Finally, set up the preferred classes of each operand. */
2809
2810 for (i = 0; i < noperands; i++)
2811 {
2812 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2813
2814 address_reloaded[i] = 0;
2815 address_operand_reloaded[i] = 0;
2816 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2817 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2818 : RELOAD_OTHER);
2819 address_type[i]
2820 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2821 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2822 : RELOAD_OTHER);
2823
2824 if (*constraints[i] == 0)
2825 /* Ignore things like match_operator operands. */
2826 ;
2827 else if (insn_extra_address_constraint
2828 (lookup_constraint (constraints[i])))
2829 {
2830 address_operand_reloaded[i]
2831 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2832 recog_data.operand[i],
2833 recog_data.operand_loc[i],
2834 i, operand_type[i], ind_levels, insn);
2835
2836 /* If we now have a simple operand where we used to have a
2837 PLUS or MULT, re-recognize and try again. */
2838 if ((OBJECT_P (*recog_data.operand_loc[i])
2839 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2840 && (GET_CODE (recog_data.operand[i]) == MULT
2841 || GET_CODE (recog_data.operand[i]) == PLUS))
2842 {
2843 INSN_CODE (insn) = -1;
2844 retval = find_reloads (insn, replace, ind_levels, live_known,
2845 reload_reg_p);
2846 return retval;
2847 }
2848
2849 recog_data.operand[i] = *recog_data.operand_loc[i];
2850 substed_operand[i] = recog_data.operand[i];
2851
2852 /* Address operands are reloaded in their existing mode,
2853 no matter what is specified in the machine description. */
2854 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2855
2856 /* If the address is a single CONST_INT pick address mode
2857 instead otherwise we will later not know in which mode
2858 the reload should be performed. */
2859 if (operand_mode[i] == VOIDmode)
2860 operand_mode[i] = Pmode;
2861
2862 }
2863 else if (code == MEM)
2864 {
2865 address_reloaded[i]
2866 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2867 recog_data.operand_loc[i],
2868 XEXP (recog_data.operand[i], 0),
2869 &XEXP (recog_data.operand[i], 0),
2870 i, address_type[i], ind_levels, insn);
2871 recog_data.operand[i] = *recog_data.operand_loc[i];
2872 substed_operand[i] = recog_data.operand[i];
2873 }
2874 else if (code == SUBREG)
2875 {
2876 rtx reg = SUBREG_REG (recog_data.operand[i]);
2877 rtx op
2878 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2879 ind_levels,
2880 set != 0
2881 && &SET_DEST (set) == recog_data.operand_loc[i],
2882 insn,
2883 &address_reloaded[i]);
2884
2885 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2886 that didn't get a hard register, emit a USE with a REG_EQUAL
2887 note in front so that we might inherit a previous, possibly
2888 wider reload. */
2889
2890 if (replace
2891 && MEM_P (op)
2892 && REG_P (reg)
2893 && known_ge (GET_MODE_SIZE (GET_MODE (reg)),
2894 GET_MODE_SIZE (GET_MODE (op)))
2895 && reg_equiv_constant (REGNO (reg)) == 0)
2896 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2897 insn),
2898 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2899
2900 substed_operand[i] = recog_data.operand[i] = op;
2901 }
2902 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2903 /* We can get a PLUS as an "operand" as a result of register
2904 elimination. See eliminate_regs and gen_reload. We handle
2905 a unary operator by reloading the operand. */
2906 substed_operand[i] = recog_data.operand[i]
2907 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2908 ind_levels, 0, insn,
2909 &address_reloaded[i]);
2910 else if (code == REG)
2911 {
2912 /* This is equivalent to calling find_reloads_toplev.
2913 The code is duplicated for speed.
2914 When we find a pseudo always equivalent to a constant,
2915 we replace it by the constant. We must be sure, however,
2916 that we don't try to replace it in the insn in which it
2917 is being set. */
2918 int regno = REGNO (recog_data.operand[i]);
2919 if (reg_equiv_constant (regno) != 0
2920 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2921 {
2922 /* Record the existing mode so that the check if constants are
2923 allowed will work when operand_mode isn't specified. */
2924
2925 if (operand_mode[i] == VOIDmode)
2926 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2927
2928 substed_operand[i] = recog_data.operand[i]
2929 = reg_equiv_constant (regno);
2930 }
2931 if (reg_equiv_memory_loc (regno) != 0
2932 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2933 /* We need not give a valid is_set_dest argument since the case
2934 of a constant equivalence was checked above. */
2935 substed_operand[i] = recog_data.operand[i]
2936 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2937 ind_levels, 0, insn,
2938 &address_reloaded[i]);
2939 }
2940 /* If the operand is still a register (we didn't replace it with an
2941 equivalent), get the preferred class to reload it into. */
2942 code = GET_CODE (recog_data.operand[i]);
2943 preferred_class[i]
2944 = ((code == REG && REGNO (recog_data.operand[i])
2945 >= FIRST_PSEUDO_REGISTER)
2946 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2947 : NO_REGS);
2948 pref_or_nothing[i]
2949 = (code == REG
2950 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2951 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2952 }
2953
2954 /* If this is simply a copy from operand 1 to operand 0, merge the
2955 preferred classes for the operands. */
2956 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2957 && recog_data.operand[1] == SET_SRC (set))
2958 {
2959 preferred_class[0] = preferred_class[1]
2960 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2961 pref_or_nothing[0] |= pref_or_nothing[1];
2962 pref_or_nothing[1] |= pref_or_nothing[0];
2963 }
2964
2965 /* Now see what we need for pseudo-regs that didn't get hard regs
2966 or got the wrong kind of hard reg. For this, we must consider
2967 all the operands together against the register constraints. */
2968
2969 best = MAX_RECOG_OPERANDS * 2 + 600;
2970
2971 goal_alternative_swapped = 0;
2972
2973 /* The constraints are made of several alternatives.
2974 Each operand's constraint looks like foo,bar,... with commas
2975 separating the alternatives. The first alternatives for all
2976 operands go together, the second alternatives go together, etc.
2977
2978 First loop over alternatives. */
2979
2980 alternative_mask enabled = get_enabled_alternatives (insn);
2981 for (this_alternative_number = 0;
2982 this_alternative_number < n_alternatives;
2983 this_alternative_number++)
2984 {
2985 int swapped;
2986
2987 if (!TEST_BIT (enabled, this_alternative_number))
2988 {
2989 int i;
2990
2991 for (i = 0; i < recog_data.n_operands; i++)
2992 constraints[i] = skip_alternative (constraints[i]);
2993
2994 continue;
2995 }
2996
2997 /* If insn is commutative (it's safe to exchange a certain pair
2998 of operands) then we need to try each alternative twice, the
2999 second time matching those two operands as if we had
3000 exchanged them. To do this, really exchange them in
3001 operands. */
3002 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3003 {
3004 /* Loop over operands for one constraint alternative. */
3005 /* LOSERS counts those that don't fit this alternative
3006 and would require loading. */
3007 int losers = 0;
3008 /* BAD is set to 1 if it some operand can't fit this alternative
3009 even after reloading. */
3010 int bad = 0;
3011 /* REJECT is a count of how undesirable this alternative says it is
3012 if any reloading is required. If the alternative matches exactly
3013 then REJECT is ignored, but otherwise it gets this much
3014 counted against it in addition to the reloading needed. Each
3015 ? counts three times here since we want the disparaging caused by
3016 a bad register class to only count 1/3 as much. */
3017 int reject = 0;
3018
3019 if (swapped)
3020 {
3021 recog_data.operand[commutative] = substed_operand[commutative + 1];
3022 recog_data.operand[commutative + 1] = substed_operand[commutative];
3023 /* Swap the duplicates too. */
3024 for (i = 0; i < recog_data.n_dups; i++)
3025 if (recog_data.dup_num[i] == commutative
3026 || recog_data.dup_num[i] == commutative + 1)
3027 *recog_data.dup_loc[i]
3028 = recog_data.operand[(int) recog_data.dup_num[i]];
3029
3030 std::swap (preferred_class[commutative],
3031 preferred_class[commutative + 1]);
3032 std::swap (pref_or_nothing[commutative],
3033 pref_or_nothing[commutative + 1]);
3034 std::swap (address_reloaded[commutative],
3035 address_reloaded[commutative + 1]);
3036 }
3037
3038 this_earlyclobber = 0;
3039
3040 for (i = 0; i < noperands; i++)
3041 {
3042 const char *p = constraints[i];
3043 char *end;
3044 int len;
3045 int win = 0;
3046 int did_match = 0;
3047 /* 0 => this operand can be reloaded somehow for this alternative. */
3048 int badop = 1;
3049 /* 0 => this operand can be reloaded if the alternative allows regs. */
3050 int winreg = 0;
3051 int c;
3052 int m;
3053 rtx operand = recog_data.operand[i];
3054 int offset = 0;
3055 /* Nonzero means this is a MEM that must be reloaded into a reg
3056 regardless of what the constraint says. */
3057 int force_reload = 0;
3058 int offmemok = 0;
3059 /* Nonzero if a constant forced into memory would be OK for this
3060 operand. */
3061 int constmemok = 0;
3062 int earlyclobber = 0;
3063 enum constraint_num cn;
3064 enum reg_class cl;
3065
3066 /* If the predicate accepts a unary operator, it means that
3067 we need to reload the operand, but do not do this for
3068 match_operator and friends. */
3069 if (UNARY_P (operand) && *p != 0)
3070 operand = XEXP (operand, 0);
3071
3072 /* If the operand is a SUBREG, extract
3073 the REG or MEM (or maybe even a constant) within.
3074 (Constants can occur as a result of reg_equiv_constant.) */
3075
3076 while (GET_CODE (operand) == SUBREG)
3077 {
3078 /* Offset only matters when operand is a REG and
3079 it is a hard reg. This is because it is passed
3080 to reg_fits_class_p if it is a REG and all pseudos
3081 return 0 from that function. */
3082 if (REG_P (SUBREG_REG (operand))
3083 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3084 {
3085 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3086 GET_MODE (SUBREG_REG (operand)),
3087 SUBREG_BYTE (operand),
3088 GET_MODE (operand)) < 0)
3089 force_reload = 1;
3090 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3091 GET_MODE (SUBREG_REG (operand)),
3092 SUBREG_BYTE (operand),
3093 GET_MODE (operand));
3094 }
3095 operand = SUBREG_REG (operand);
3096 /* Force reload if this is a constant or PLUS or if there may
3097 be a problem accessing OPERAND in the outer mode. */
3098 scalar_int_mode inner_mode;
3099 if (CONSTANT_P (operand)
3100 || GET_CODE (operand) == PLUS
3101 /* We must force a reload of paradoxical SUBREGs
3102 of a MEM because the alignment of the inner value
3103 may not be enough to do the outer reference. On
3104 big-endian machines, it may also reference outside
3105 the object.
3106
3107 On machines that extend byte operations and we have a
3108 SUBREG where both the inner and outer modes are no wider
3109 than a word and the inner mode is narrower, is integral,
3110 and gets extended when loaded from memory, combine.c has
3111 made assumptions about the behavior of the machine in such
3112 register access. If the data is, in fact, in memory we
3113 must always load using the size assumed to be in the
3114 register and let the insn do the different-sized
3115 accesses.
3116
3117 This is doubly true if WORD_REGISTER_OPERATIONS. In
3118 this case eliminate_regs has left non-paradoxical
3119 subregs for push_reload to see. Make sure it does
3120 by forcing the reload.
3121
3122 ??? When is it right at this stage to have a subreg
3123 of a mem that is _not_ to be handled specially? IMO
3124 those should have been reduced to just a mem. */
3125 || ((MEM_P (operand)
3126 || (REG_P (operand)
3127 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3128 && (WORD_REGISTER_OPERATIONS
3129 || (((maybe_lt
3130 (GET_MODE_BITSIZE (GET_MODE (operand)),
3131 BIGGEST_ALIGNMENT))
3132 && (paradoxical_subreg_p
3133 (operand_mode[i], GET_MODE (operand)))))
3134 || BYTES_BIG_ENDIAN
3135 || (known_le (GET_MODE_SIZE (operand_mode[i]),
3136 UNITS_PER_WORD)
3137 && (is_a <scalar_int_mode>
3138 (GET_MODE (operand), &inner_mode))
3139 && (GET_MODE_SIZE (inner_mode)
3140 <= UNITS_PER_WORD)
3141 && paradoxical_subreg_p (operand_mode[i],
3142 inner_mode)
3143 && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)))
3144 )
3145 force_reload = 1;
3146 }
3147
3148 this_alternative[i] = NO_REGS;
3149 this_alternative_win[i] = 0;
3150 this_alternative_match_win[i] = 0;
3151 this_alternative_offmemok[i] = 0;
3152 this_alternative_earlyclobber[i] = 0;
3153 this_alternative_matches[i] = -1;
3154
3155 /* An empty constraint or empty alternative
3156 allows anything which matched the pattern. */
3157 if (*p == 0 || *p == ',')
3158 win = 1, badop = 0;
3159
3160 /* Scan this alternative's specs for this operand;
3161 set WIN if the operand fits any letter in this alternative.
3162 Otherwise, clear BADOP if this operand could
3163 fit some letter after reloads,
3164 or set WINREG if this operand could fit after reloads
3165 provided the constraint allows some registers. */
3166
3167 do
3168 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3169 {
3170 case '\0':
3171 len = 0;
3172 break;
3173 case ',':
3174 c = '\0';
3175 break;
3176
3177 case '?':
3178 reject += 6;
3179 break;
3180
3181 case '!':
3182 reject = 600;
3183 break;
3184
3185 case '#':
3186 /* Ignore rest of this alternative as far as
3187 reloading is concerned. */
3188 do
3189 p++;
3190 while (*p && *p != ',');
3191 len = 0;
3192 break;
3193
3194 case '0': case '1': case '2': case '3': case '4':
3195 case '5': case '6': case '7': case '8': case '9':
3196 m = strtoul (p, &end, 10);
3197 p = end;
3198 len = 0;
3199
3200 this_alternative_matches[i] = m;
3201 /* We are supposed to match a previous operand.
3202 If we do, we win if that one did.
3203 If we do not, count both of the operands as losers.
3204 (This is too conservative, since most of the time
3205 only a single reload insn will be needed to make
3206 the two operands win. As a result, this alternative
3207 may be rejected when it is actually desirable.) */
3208 if ((swapped && (m != commutative || i != commutative + 1))
3209 /* If we are matching as if two operands were swapped,
3210 also pretend that operands_match had been computed
3211 with swapped.
3212 But if I is the second of those and C is the first,
3213 don't exchange them, because operands_match is valid
3214 only on one side of its diagonal. */
3215 ? (operands_match
3216 [(m == commutative || m == commutative + 1)
3217 ? 2 * commutative + 1 - m : m]
3218 [(i == commutative || i == commutative + 1)
3219 ? 2 * commutative + 1 - i : i])
3220 : operands_match[m][i])
3221 {
3222 /* If we are matching a non-offsettable address where an
3223 offsettable address was expected, then we must reject
3224 this combination, because we can't reload it. */
3225 if (this_alternative_offmemok[m]
3226 && MEM_P (recog_data.operand[m])
3227 && this_alternative[m] == NO_REGS
3228 && ! this_alternative_win[m])
3229 bad = 1;
3230
3231 did_match = this_alternative_win[m];
3232 }
3233 else
3234 {
3235 /* Operands don't match. */
3236 rtx value;
3237 int loc1, loc2;
3238 /* Retroactively mark the operand we had to match
3239 as a loser, if it wasn't already. */
3240 if (this_alternative_win[m])
3241 losers++;
3242 this_alternative_win[m] = 0;
3243 if (this_alternative[m] == NO_REGS)
3244 bad = 1;
3245 /* But count the pair only once in the total badness of
3246 this alternative, if the pair can be a dummy reload.
3247 The pointers in operand_loc are not swapped; swap
3248 them by hand if necessary. */
3249 if (swapped && i == commutative)
3250 loc1 = commutative + 1;
3251 else if (swapped && i == commutative + 1)
3252 loc1 = commutative;
3253 else
3254 loc1 = i;
3255 if (swapped && m == commutative)
3256 loc2 = commutative + 1;
3257 else if (swapped && m == commutative + 1)
3258 loc2 = commutative;
3259 else
3260 loc2 = m;
3261 value
3262 = find_dummy_reload (recog_data.operand[i],
3263 recog_data.operand[m],
3264 recog_data.operand_loc[loc1],
3265 recog_data.operand_loc[loc2],
3266 operand_mode[i], operand_mode[m],
3267 this_alternative[m], -1,
3268 this_alternative_earlyclobber[m]);
3269
3270 if (value != 0)
3271 losers--;
3272 }
3273 /* This can be fixed with reloads if the operand
3274 we are supposed to match can be fixed with reloads. */
3275 badop = 0;
3276 this_alternative[i] = this_alternative[m];
3277
3278 /* If we have to reload this operand and some previous
3279 operand also had to match the same thing as this
3280 operand, we don't know how to do that. So reject this
3281 alternative. */
3282 if (! did_match || force_reload)
3283 for (j = 0; j < i; j++)
3284 if (this_alternative_matches[j]
3285 == this_alternative_matches[i])
3286 {
3287 badop = 1;
3288 break;
3289 }
3290 break;
3291
3292 case 'p':
3293 /* All necessary reloads for an address_operand
3294 were handled in find_reloads_address. */
3295 this_alternative[i]
3296 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3297 ADDRESS, SCRATCH);
3298 win = 1;
3299 badop = 0;
3300 break;
3301
3302 case TARGET_MEM_CONSTRAINT:
3303 if (force_reload)
3304 break;
3305 if (MEM_P (operand)
3306 || (REG_P (operand)
3307 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3308 && reg_renumber[REGNO (operand)] < 0))
3309 win = 1;
3310 if (CONST_POOL_OK_P (operand_mode[i], operand))
3311 badop = 0;
3312 constmemok = 1;
3313 break;
3314
3315 case '<':
3316 if (MEM_P (operand)
3317 && ! address_reloaded[i]
3318 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3319 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3320 win = 1;
3321 break;
3322
3323 case '>':
3324 if (MEM_P (operand)
3325 && ! address_reloaded[i]
3326 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3327 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3328 win = 1;
3329 break;
3330
3331 /* Memory operand whose address is not offsettable. */
3332 case 'V':
3333 if (force_reload)
3334 break;
3335 if (MEM_P (operand)
3336 && ! (ind_levels ? offsettable_memref_p (operand)
3337 : offsettable_nonstrict_memref_p (operand))
3338 /* Certain mem addresses will become offsettable
3339 after they themselves are reloaded. This is important;
3340 we don't want our own handling of unoffsettables
3341 to override the handling of reg_equiv_address. */
3342 && !(REG_P (XEXP (operand, 0))
3343 && (ind_levels == 0
3344 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3345 win = 1;
3346 break;
3347
3348 /* Memory operand whose address is offsettable. */
3349 case 'o':
3350 if (force_reload)
3351 break;
3352 if ((MEM_P (operand)
3353 /* If IND_LEVELS, find_reloads_address won't reload a
3354 pseudo that didn't get a hard reg, so we have to
3355 reject that case. */
3356 && ((ind_levels ? offsettable_memref_p (operand)
3357 : offsettable_nonstrict_memref_p (operand))
3358 /* A reloaded address is offsettable because it is now
3359 just a simple register indirect. */
3360 || address_reloaded[i] == 1))
3361 || (REG_P (operand)
3362 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3363 && reg_renumber[REGNO (operand)] < 0
3364 /* If reg_equiv_address is nonzero, we will be
3365 loading it into a register; hence it will be
3366 offsettable, but we cannot say that reg_equiv_mem
3367 is offsettable without checking. */
3368 && ((reg_equiv_mem (REGNO (operand)) != 0
3369 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3370 || (reg_equiv_address (REGNO (operand)) != 0))))
3371 win = 1;
3372 if (CONST_POOL_OK_P (operand_mode[i], operand)
3373 || MEM_P (operand))
3374 badop = 0;
3375 constmemok = 1;
3376 offmemok = 1;
3377 break;
3378
3379 case '&':
3380 /* Output operand that is stored before the need for the
3381 input operands (and their index registers) is over. */
3382 earlyclobber = 1, this_earlyclobber = 1;
3383 break;
3384
3385 case 'X':
3386 force_reload = 0;
3387 win = 1;
3388 break;
3389
3390 case 'g':
3391 if (! force_reload
3392 /* A PLUS is never a valid operand, but reload can make
3393 it from a register when eliminating registers. */
3394 && GET_CODE (operand) != PLUS
3395 /* A SCRATCH is not a valid operand. */
3396 && GET_CODE (operand) != SCRATCH
3397 && (! CONSTANT_P (operand)
3398 || ! flag_pic
3399 || LEGITIMATE_PIC_OPERAND_P (operand))
3400 && (GENERAL_REGS == ALL_REGS
3401 || !REG_P (operand)
3402 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3403 && reg_renumber[REGNO (operand)] < 0)))
3404 win = 1;
3405 cl = GENERAL_REGS;
3406 goto reg;
3407
3408 default:
3409 cn = lookup_constraint (p);
3410 switch (get_constraint_type (cn))
3411 {
3412 case CT_REGISTER:
3413 cl = reg_class_for_constraint (cn);
3414 if (cl != NO_REGS)
3415 goto reg;
3416 break;
3417
3418 case CT_CONST_INT:
3419 if (CONST_INT_P (operand)
3420 && (insn_const_int_ok_for_constraint
3421 (INTVAL (operand), cn)))
3422 win = true;
3423 break;
3424
3425 case CT_MEMORY:
3426 if (force_reload)
3427 break;
3428 if (constraint_satisfied_p (operand, cn))
3429 win = 1;
3430 /* If the address was already reloaded,
3431 we win as well. */
3432 else if (MEM_P (operand) && address_reloaded[i] == 1)
3433 win = 1;
3434 /* Likewise if the address will be reloaded because
3435 reg_equiv_address is nonzero. For reg_equiv_mem
3436 we have to check. */
3437 else if (REG_P (operand)
3438 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3439 && reg_renumber[REGNO (operand)] < 0
3440 && ((reg_equiv_mem (REGNO (operand)) != 0
3441 && (constraint_satisfied_p
3442 (reg_equiv_mem (REGNO (operand)),
3443 cn)))
3444 || (reg_equiv_address (REGNO (operand))
3445 != 0)))
3446 win = 1;
3447
3448 /* If we didn't already win, we can reload
3449 constants via force_const_mem, and other
3450 MEMs by reloading the address like for 'o'. */
3451 if (CONST_POOL_OK_P (operand_mode[i], operand)
3452 || MEM_P (operand))
3453 badop = 0;
3454 constmemok = 1;
3455 offmemok = 1;
3456 break;
3457
3458 case CT_SPECIAL_MEMORY:
3459 if (force_reload)
3460 break;
3461 if (constraint_satisfied_p (operand, cn))
3462 win = 1;
3463 /* Likewise if the address will be reloaded because
3464 reg_equiv_address is nonzero. For reg_equiv_mem
3465 we have to check. */
3466 else if (REG_P (operand)
3467 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3468 && reg_renumber[REGNO (operand)] < 0
3469 && reg_equiv_mem (REGNO (operand)) != 0
3470 && (constraint_satisfied_p
3471 (reg_equiv_mem (REGNO (operand)), cn)))
3472 win = 1;
3473 break;
3474
3475 case CT_ADDRESS:
3476 if (constraint_satisfied_p (operand, cn))
3477 win = 1;
3478
3479 /* If we didn't already win, we can reload
3480 the address into a base register. */
3481 this_alternative[i]
3482 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3483 ADDRESS, SCRATCH);
3484 badop = 0;
3485 break;
3486
3487 case CT_FIXED_FORM:
3488 if (constraint_satisfied_p (operand, cn))
3489 win = 1;
3490 break;
3491 }
3492 break;
3493
3494 reg:
3495 this_alternative[i]
3496 = reg_class_subunion[this_alternative[i]][cl];
3497 if (GET_MODE (operand) == BLKmode)
3498 break;
3499 winreg = 1;
3500 if (REG_P (operand)
3501 && reg_fits_class_p (operand, this_alternative[i],
3502 offset, GET_MODE (recog_data.operand[i])))
3503 win = 1;
3504 break;
3505 }
3506 while ((p += len), c);
3507
3508 if (swapped == (commutative >= 0 ? 1 : 0))
3509 constraints[i] = p;
3510
3511 /* If this operand could be handled with a reg,
3512 and some reg is allowed, then this operand can be handled. */
3513 if (winreg && this_alternative[i] != NO_REGS
3514 && (win || !class_only_fixed_regs[this_alternative[i]]))
3515 badop = 0;
3516
3517 /* Record which operands fit this alternative. */
3518 this_alternative_earlyclobber[i] = earlyclobber;
3519 if (win && ! force_reload)
3520 this_alternative_win[i] = 1;
3521 else if (did_match && ! force_reload)
3522 this_alternative_match_win[i] = 1;
3523 else
3524 {
3525 int const_to_mem = 0;
3526
3527 this_alternative_offmemok[i] = offmemok;
3528 losers++;
3529 if (badop)
3530 bad = 1;
3531 /* Alternative loses if it has no regs for a reg operand. */
3532 if (REG_P (operand)
3533 && this_alternative[i] == NO_REGS
3534 && this_alternative_matches[i] < 0)
3535 bad = 1;
3536
3537 /* If this is a constant that is reloaded into the desired
3538 class by copying it to memory first, count that as another
3539 reload. This is consistent with other code and is
3540 required to avoid choosing another alternative when
3541 the constant is moved into memory by this function on
3542 an early reload pass. Note that the test here is
3543 precisely the same as in the code below that calls
3544 force_const_mem. */
3545 if (CONST_POOL_OK_P (operand_mode[i], operand)
3546 && ((targetm.preferred_reload_class (operand,
3547 this_alternative[i])
3548 == NO_REGS)
3549 || no_input_reloads))
3550 {
3551 const_to_mem = 1;
3552 if (this_alternative[i] != NO_REGS)
3553 losers++;
3554 }
3555
3556 /* Alternative loses if it requires a type of reload not
3557 permitted for this insn. We can always reload SCRATCH
3558 and objects with a REG_UNUSED note. */
3559 if (GET_CODE (operand) != SCRATCH
3560 && modified[i] != RELOAD_READ && no_output_reloads
3561 && ! find_reg_note (insn, REG_UNUSED, operand))
3562 bad = 1;
3563 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3564 && ! const_to_mem)
3565 bad = 1;
3566
3567 /* If we can't reload this value at all, reject this
3568 alternative. Note that we could also lose due to
3569 LIMIT_RELOAD_CLASS, but we don't check that
3570 here. */
3571
3572 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3573 {
3574 if (targetm.preferred_reload_class (operand,
3575 this_alternative[i])
3576 == NO_REGS)
3577 reject = 600;
3578
3579 if (operand_type[i] == RELOAD_FOR_OUTPUT
3580 && (targetm.preferred_output_reload_class (operand,
3581 this_alternative[i])
3582 == NO_REGS))
3583 reject = 600;
3584 }
3585
3586 /* We prefer to reload pseudos over reloading other things,
3587 since such reloads may be able to be eliminated later.
3588 If we are reloading a SCRATCH, we won't be generating any
3589 insns, just using a register, so it is also preferred.
3590 So bump REJECT in other cases. Don't do this in the
3591 case where we are forcing a constant into memory and
3592 it will then win since we don't want to have a different
3593 alternative match then. */
3594 if (! (REG_P (operand)
3595 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3596 && GET_CODE (operand) != SCRATCH
3597 && ! (const_to_mem && constmemok))
3598 reject += 2;
3599
3600 /* Input reloads can be inherited more often than output
3601 reloads can be removed, so penalize output reloads. */
3602 if (operand_type[i] != RELOAD_FOR_INPUT
3603 && GET_CODE (operand) != SCRATCH)
3604 reject++;
3605 }
3606
3607 /* If this operand is a pseudo register that didn't get
3608 a hard reg and this alternative accepts some
3609 register, see if the class that we want is a subset
3610 of the preferred class for this register. If not,
3611 but it intersects that class, use the preferred class
3612 instead. If it does not intersect the preferred
3613 class, show that usage of this alternative should be
3614 discouraged; it will be discouraged more still if the
3615 register is `preferred or nothing'. We do this
3616 because it increases the chance of reusing our spill
3617 register in a later insn and avoiding a pair of
3618 memory stores and loads.
3619
3620 Don't bother with this if this alternative will
3621 accept this operand.
3622
3623 Don't do this for a multiword operand, since it is
3624 only a small win and has the risk of requiring more
3625 spill registers, which could cause a large loss.
3626
3627 Don't do this if the preferred class has only one
3628 register because we might otherwise exhaust the
3629 class. */
3630
3631 if (! win && ! did_match
3632 && this_alternative[i] != NO_REGS
3633 && known_le (GET_MODE_SIZE (operand_mode[i]), UNITS_PER_WORD)
3634 && reg_class_size [(int) preferred_class[i]] > 0
3635 && ! small_register_class_p (preferred_class[i]))
3636 {
3637 if (! reg_class_subset_p (this_alternative[i],
3638 preferred_class[i]))
3639 {
3640 /* Since we don't have a way of forming the intersection,
3641 we just do something special if the preferred class
3642 is a subset of the class we have; that's the most
3643 common case anyway. */
3644 if (reg_class_subset_p (preferred_class[i],
3645 this_alternative[i]))
3646 this_alternative[i] = preferred_class[i];
3647 else
3648 reject += (2 + 2 * pref_or_nothing[i]);
3649 }
3650 }
3651 }
3652
3653 /* Now see if any output operands that are marked "earlyclobber"
3654 in this alternative conflict with any input operands
3655 or any memory addresses. */
3656
3657 for (i = 0; i < noperands; i++)
3658 if (this_alternative_earlyclobber[i]
3659 && (this_alternative_win[i] || this_alternative_match_win[i]))
3660 {
3661 struct decomposition early_data;
3662
3663 early_data = decompose (recog_data.operand[i]);
3664
3665 gcc_assert (modified[i] != RELOAD_READ);
3666
3667 if (this_alternative[i] == NO_REGS)
3668 {
3669 this_alternative_earlyclobber[i] = 0;
3670 gcc_assert (this_insn_is_asm);
3671 error_for_asm (this_insn,
3672 "%<&%> constraint used with no register class");
3673 }
3674
3675 for (j = 0; j < noperands; j++)
3676 /* Is this an input operand or a memory ref? */
3677 if ((MEM_P (recog_data.operand[j])
3678 || modified[j] != RELOAD_WRITE)
3679 && j != i
3680 /* Ignore things like match_operator operands. */
3681 && !recog_data.is_operator[j]
3682 /* Don't count an input operand that is constrained to match
3683 the early clobber operand. */
3684 && ! (this_alternative_matches[j] == i
3685 && rtx_equal_p (recog_data.operand[i],
3686 recog_data.operand[j]))
3687 /* Is it altered by storing the earlyclobber operand? */
3688 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3689 early_data))
3690 {
3691 /* If the output is in a non-empty few-regs class,
3692 it's costly to reload it, so reload the input instead. */
3693 if (small_register_class_p (this_alternative[i])
3694 && (REG_P (recog_data.operand[j])
3695 || GET_CODE (recog_data.operand[j]) == SUBREG))
3696 {
3697 losers++;
3698 this_alternative_win[j] = 0;
3699 this_alternative_match_win[j] = 0;
3700 }
3701 else
3702 break;
3703 }
3704 /* If an earlyclobber operand conflicts with something,
3705 it must be reloaded, so request this and count the cost. */
3706 if (j != noperands)
3707 {
3708 losers++;
3709 this_alternative_win[i] = 0;
3710 this_alternative_match_win[j] = 0;
3711 for (j = 0; j < noperands; j++)
3712 if (this_alternative_matches[j] == i
3713 && this_alternative_match_win[j])
3714 {
3715 this_alternative_win[j] = 0;
3716 this_alternative_match_win[j] = 0;
3717 losers++;
3718 }
3719 }
3720 }
3721
3722 /* If one alternative accepts all the operands, no reload required,
3723 choose that alternative; don't consider the remaining ones. */
3724 if (losers == 0)
3725 {
3726 /* Unswap these so that they are never swapped at `finish'. */
3727 if (swapped)
3728 {
3729 recog_data.operand[commutative] = substed_operand[commutative];
3730 recog_data.operand[commutative + 1]
3731 = substed_operand[commutative + 1];
3732 }
3733 for (i = 0; i < noperands; i++)
3734 {
3735 goal_alternative_win[i] = this_alternative_win[i];
3736 goal_alternative_match_win[i] = this_alternative_match_win[i];
3737 goal_alternative[i] = this_alternative[i];
3738 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3739 goal_alternative_matches[i] = this_alternative_matches[i];
3740 goal_alternative_earlyclobber[i]
3741 = this_alternative_earlyclobber[i];
3742 }
3743 goal_alternative_number = this_alternative_number;
3744 goal_alternative_swapped = swapped;
3745 goal_earlyclobber = this_earlyclobber;
3746 goto finish;
3747 }
3748
3749 /* REJECT, set by the ! and ? constraint characters and when a register
3750 would be reloaded into a non-preferred class, discourages the use of
3751 this alternative for a reload goal. REJECT is incremented by six
3752 for each ? and two for each non-preferred class. */
3753 losers = losers * 6 + reject;
3754
3755 /* If this alternative can be made to work by reloading,
3756 and it needs less reloading than the others checked so far,
3757 record it as the chosen goal for reloading. */
3758 if (! bad)
3759 {
3760 if (best > losers)
3761 {
3762 for (i = 0; i < noperands; i++)
3763 {
3764 goal_alternative[i] = this_alternative[i];
3765 goal_alternative_win[i] = this_alternative_win[i];
3766 goal_alternative_match_win[i]
3767 = this_alternative_match_win[i];
3768 goal_alternative_offmemok[i]
3769 = this_alternative_offmemok[i];
3770 goal_alternative_matches[i] = this_alternative_matches[i];
3771 goal_alternative_earlyclobber[i]
3772 = this_alternative_earlyclobber[i];
3773 }
3774 goal_alternative_swapped = swapped;
3775 best = losers;
3776 goal_alternative_number = this_alternative_number;
3777 goal_earlyclobber = this_earlyclobber;
3778 }
3779 }
3780
3781 if (swapped)
3782 {
3783 /* If the commutative operands have been swapped, swap
3784 them back in order to check the next alternative. */
3785 recog_data.operand[commutative] = substed_operand[commutative];
3786 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3787 /* Unswap the duplicates too. */
3788 for (i = 0; i < recog_data.n_dups; i++)
3789 if (recog_data.dup_num[i] == commutative
3790 || recog_data.dup_num[i] == commutative + 1)
3791 *recog_data.dup_loc[i]
3792 = recog_data.operand[(int) recog_data.dup_num[i]];
3793
3794 /* Unswap the operand related information as well. */
3795 std::swap (preferred_class[commutative],
3796 preferred_class[commutative + 1]);
3797 std::swap (pref_or_nothing[commutative],
3798 pref_or_nothing[commutative + 1]);
3799 std::swap (address_reloaded[commutative],
3800 address_reloaded[commutative + 1]);
3801 }
3802 }
3803 }
3804
3805 /* The operands don't meet the constraints.
3806 goal_alternative describes the alternative
3807 that we could reach by reloading the fewest operands.
3808 Reload so as to fit it. */
3809
3810 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3811 {
3812 /* No alternative works with reloads?? */
3813 if (insn_code_number >= 0)
3814 fatal_insn ("unable to generate reloads for:", insn);
3815 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3816 /* Avoid further trouble with this insn. */
3817 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3818 n_reloads = 0;
3819 return 0;
3820 }
3821
3822 /* Jump to `finish' from above if all operands are valid already.
3823 In that case, goal_alternative_win is all 1. */
3824 finish:
3825
3826 /* Right now, for any pair of operands I and J that are required to match,
3827 with I < J,
3828 goal_alternative_matches[J] is I.
3829 Set up goal_alternative_matched as the inverse function:
3830 goal_alternative_matched[I] = J. */
3831
3832 for (i = 0; i < noperands; i++)
3833 goal_alternative_matched[i] = -1;
3834
3835 for (i = 0; i < noperands; i++)
3836 if (! goal_alternative_win[i]
3837 && goal_alternative_matches[i] >= 0)
3838 goal_alternative_matched[goal_alternative_matches[i]] = i;
3839
3840 for (i = 0; i < noperands; i++)
3841 goal_alternative_win[i] |= goal_alternative_match_win[i];
3842
3843 /* If the best alternative is with operands 1 and 2 swapped,
3844 consider them swapped before reporting the reloads. Update the
3845 operand numbers of any reloads already pushed. */
3846
3847 if (goal_alternative_swapped)
3848 {
3849 std::swap (substed_operand[commutative],
3850 substed_operand[commutative + 1]);
3851 std::swap (recog_data.operand[commutative],
3852 recog_data.operand[commutative + 1]);
3853 std::swap (*recog_data.operand_loc[commutative],
3854 *recog_data.operand_loc[commutative + 1]);
3855
3856 for (i = 0; i < recog_data.n_dups; i++)
3857 if (recog_data.dup_num[i] == commutative
3858 || recog_data.dup_num[i] == commutative + 1)
3859 *recog_data.dup_loc[i]
3860 = recog_data.operand[(int) recog_data.dup_num[i]];
3861
3862 for (i = 0; i < n_reloads; i++)
3863 {
3864 if (rld[i].opnum == commutative)
3865 rld[i].opnum = commutative + 1;
3866 else if (rld[i].opnum == commutative + 1)
3867 rld[i].opnum = commutative;
3868 }
3869 }
3870
3871 for (i = 0; i < noperands; i++)
3872 {
3873 operand_reloadnum[i] = -1;
3874
3875 /* If this is an earlyclobber operand, we need to widen the scope.
3876 The reload must remain valid from the start of the insn being
3877 reloaded until after the operand is stored into its destination.
3878 We approximate this with RELOAD_OTHER even though we know that we
3879 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3880
3881 One special case that is worth checking is when we have an
3882 output that is earlyclobber but isn't used past the insn (typically
3883 a SCRATCH). In this case, we only need have the reload live
3884 through the insn itself, but not for any of our input or output
3885 reloads.
3886 But we must not accidentally narrow the scope of an existing
3887 RELOAD_OTHER reload - leave these alone.
3888
3889 In any case, anything needed to address this operand can remain
3890 however they were previously categorized. */
3891
3892 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3893 operand_type[i]
3894 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3895 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3896 }
3897
3898 /* Any constants that aren't allowed and can't be reloaded
3899 into registers are here changed into memory references. */
3900 for (i = 0; i < noperands; i++)
3901 if (! goal_alternative_win[i])
3902 {
3903 rtx op = recog_data.operand[i];
3904 rtx subreg = NULL_RTX;
3905 rtx plus = NULL_RTX;
3906 machine_mode mode = operand_mode[i];
3907
3908 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3909 push_reload so we have to let them pass here. */
3910 if (GET_CODE (op) == SUBREG)
3911 {
3912 subreg = op;
3913 op = SUBREG_REG (op);
3914 mode = GET_MODE (op);
3915 }
3916
3917 if (GET_CODE (op) == PLUS)
3918 {
3919 plus = op;
3920 op = XEXP (op, 1);
3921 }
3922
3923 if (CONST_POOL_OK_P (mode, op)
3924 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3925 == NO_REGS)
3926 || no_input_reloads))
3927 {
3928 int this_address_reloaded;
3929 rtx tem = force_const_mem (mode, op);
3930
3931 /* If we stripped a SUBREG or a PLUS above add it back. */
3932 if (plus != NULL_RTX)
3933 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3934
3935 if (subreg != NULL_RTX)
3936 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3937
3938 this_address_reloaded = 0;
3939 substed_operand[i] = recog_data.operand[i]
3940 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3941 0, insn, &this_address_reloaded);
3942
3943 /* If the alternative accepts constant pool refs directly
3944 there will be no reload needed at all. */
3945 if (plus == NULL_RTX
3946 && subreg == NULL_RTX
3947 && alternative_allows_const_pool_ref (this_address_reloaded != 1
3948 ? substed_operand[i]
3949 : NULL,
3950 recog_data.constraints[i],
3951 goal_alternative_number))
3952 goal_alternative_win[i] = 1;
3953 }
3954 }
3955
3956 /* Record the values of the earlyclobber operands for the caller. */
3957 if (goal_earlyclobber)
3958 for (i = 0; i < noperands; i++)
3959 if (goal_alternative_earlyclobber[i])
3960 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3961
3962 /* Now record reloads for all the operands that need them. */
3963 for (i = 0; i < noperands; i++)
3964 if (! goal_alternative_win[i])
3965 {
3966 /* Operands that match previous ones have already been handled. */
3967 if (goal_alternative_matches[i] >= 0)
3968 ;
3969 /* Handle an operand with a nonoffsettable address
3970 appearing where an offsettable address will do
3971 by reloading the address into a base register.
3972
3973 ??? We can also do this when the operand is a register and
3974 reg_equiv_mem is not offsettable, but this is a bit tricky,
3975 so we don't bother with it. It may not be worth doing. */
3976 else if (goal_alternative_matched[i] == -1
3977 && goal_alternative_offmemok[i]
3978 && MEM_P (recog_data.operand[i]))
3979 {
3980 /* If the address to be reloaded is a VOIDmode constant,
3981 use the default address mode as mode of the reload register,
3982 as would have been done by find_reloads_address. */
3983 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3984 machine_mode address_mode;
3985
3986 address_mode = get_address_mode (recog_data.operand[i]);
3987 operand_reloadnum[i]
3988 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3989 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3990 base_reg_class (VOIDmode, as, MEM, SCRATCH),
3991 address_mode,
3992 VOIDmode, 0, 0, i, RELOAD_OTHER);
3993 rld[operand_reloadnum[i]].inc
3994 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
3995
3996 /* If this operand is an output, we will have made any
3997 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
3998 now we are treating part of the operand as an input, so
3999 we must change these to RELOAD_FOR_OTHER_ADDRESS. */
4000
4001 if (modified[i] == RELOAD_WRITE)
4002 {
4003 for (j = 0; j < n_reloads; j++)
4004 {
4005 if (rld[j].opnum == i)
4006 {
4007 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4008 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4009 else if (rld[j].when_needed
4010 == RELOAD_FOR_OUTADDR_ADDRESS)
4011 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4012 }
4013 }
4014 }
4015 }
4016 else if (goal_alternative_matched[i] == -1)
4017 {
4018 operand_reloadnum[i]
4019 = push_reload ((modified[i] != RELOAD_WRITE
4020 ? recog_data.operand[i] : 0),
4021 (modified[i] != RELOAD_READ
4022 ? recog_data.operand[i] : 0),
4023 (modified[i] != RELOAD_WRITE
4024 ? recog_data.operand_loc[i] : 0),
4025 (modified[i] != RELOAD_READ
4026 ? recog_data.operand_loc[i] : 0),
4027 (enum reg_class) goal_alternative[i],
4028 (modified[i] == RELOAD_WRITE
4029 ? VOIDmode : operand_mode[i]),
4030 (modified[i] == RELOAD_READ
4031 ? VOIDmode : operand_mode[i]),
4032 (insn_code_number < 0 ? 0
4033 : insn_data[insn_code_number].operand[i].strict_low),
4034 0, i, operand_type[i]);
4035 }
4036 /* In a matching pair of operands, one must be input only
4037 and the other must be output only.
4038 Pass the input operand as IN and the other as OUT. */
4039 else if (modified[i] == RELOAD_READ
4040 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4041 {
4042 operand_reloadnum[i]
4043 = push_reload (recog_data.operand[i],
4044 recog_data.operand[goal_alternative_matched[i]],
4045 recog_data.operand_loc[i],
4046 recog_data.operand_loc[goal_alternative_matched[i]],
4047 (enum reg_class) goal_alternative[i],
4048 operand_mode[i],
4049 operand_mode[goal_alternative_matched[i]],
4050 0, 0, i, RELOAD_OTHER);
4051 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4052 }
4053 else if (modified[i] == RELOAD_WRITE
4054 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4055 {
4056 operand_reloadnum[goal_alternative_matched[i]]
4057 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4058 recog_data.operand[i],
4059 recog_data.operand_loc[goal_alternative_matched[i]],
4060 recog_data.operand_loc[i],
4061 (enum reg_class) goal_alternative[i],
4062 operand_mode[goal_alternative_matched[i]],
4063 operand_mode[i],
4064 0, 0, i, RELOAD_OTHER);
4065 operand_reloadnum[i] = output_reloadnum;
4066 }
4067 else
4068 {
4069 gcc_assert (insn_code_number < 0);
4070 error_for_asm (insn, "inconsistent operand constraints "
4071 "in an %<asm%>");
4072 /* Avoid further trouble with this insn. */
4073 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4074 n_reloads = 0;
4075 return 0;
4076 }
4077 }
4078 else if (goal_alternative_matched[i] < 0
4079 && goal_alternative_matches[i] < 0
4080 && address_operand_reloaded[i] != 1
4081 && optimize)
4082 {
4083 /* For each non-matching operand that's a MEM or a pseudo-register
4084 that didn't get a hard register, make an optional reload.
4085 This may get done even if the insn needs no reloads otherwise. */
4086
4087 rtx operand = recog_data.operand[i];
4088
4089 while (GET_CODE (operand) == SUBREG)
4090 operand = SUBREG_REG (operand);
4091 if ((MEM_P (operand)
4092 || (REG_P (operand)
4093 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4094 /* If this is only for an output, the optional reload would not
4095 actually cause us to use a register now, just note that
4096 something is stored here. */
4097 && (goal_alternative[i] != NO_REGS
4098 || modified[i] == RELOAD_WRITE)
4099 && ! no_input_reloads
4100 /* An optional output reload might allow to delete INSN later.
4101 We mustn't make in-out reloads on insns that are not permitted
4102 output reloads.
4103 If this is an asm, we can't delete it; we must not even call
4104 push_reload for an optional output reload in this case,
4105 because we can't be sure that the constraint allows a register,
4106 and push_reload verifies the constraints for asms. */
4107 && (modified[i] == RELOAD_READ
4108 || (! no_output_reloads && ! this_insn_is_asm)))
4109 operand_reloadnum[i]
4110 = push_reload ((modified[i] != RELOAD_WRITE
4111 ? recog_data.operand[i] : 0),
4112 (modified[i] != RELOAD_READ
4113 ? recog_data.operand[i] : 0),
4114 (modified[i] != RELOAD_WRITE
4115 ? recog_data.operand_loc[i] : 0),
4116 (modified[i] != RELOAD_READ
4117 ? recog_data.operand_loc[i] : 0),
4118 (enum reg_class) goal_alternative[i],
4119 (modified[i] == RELOAD_WRITE
4120 ? VOIDmode : operand_mode[i]),
4121 (modified[i] == RELOAD_READ
4122 ? VOIDmode : operand_mode[i]),
4123 (insn_code_number < 0 ? 0
4124 : insn_data[insn_code_number].operand[i].strict_low),
4125 1, i, operand_type[i]);
4126 /* If a memory reference remains (either as a MEM or a pseudo that
4127 did not get a hard register), yet we can't make an optional
4128 reload, check if this is actually a pseudo register reference;
4129 we then need to emit a USE and/or a CLOBBER so that reload
4130 inheritance will do the right thing. */
4131 else if (replace
4132 && (MEM_P (operand)
4133 || (REG_P (operand)
4134 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4135 && reg_renumber [REGNO (operand)] < 0)))
4136 {
4137 operand = *recog_data.operand_loc[i];
4138
4139 while (GET_CODE (operand) == SUBREG)
4140 operand = SUBREG_REG (operand);
4141 if (REG_P (operand))
4142 {
4143 if (modified[i] != RELOAD_WRITE)
4144 /* We mark the USE with QImode so that we recognize
4145 it as one that can be safely deleted at the end
4146 of reload. */
4147 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4148 insn), QImode);
4149 if (modified[i] != RELOAD_READ)
4150 emit_insn_after (gen_clobber (operand), insn);
4151 }
4152 }
4153 }
4154 else if (goal_alternative_matches[i] >= 0
4155 && goal_alternative_win[goal_alternative_matches[i]]
4156 && modified[i] == RELOAD_READ
4157 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4158 && ! no_input_reloads && ! no_output_reloads
4159 && optimize)
4160 {
4161 /* Similarly, make an optional reload for a pair of matching
4162 objects that are in MEM or a pseudo that didn't get a hard reg. */
4163
4164 rtx operand = recog_data.operand[i];
4165
4166 while (GET_CODE (operand) == SUBREG)
4167 operand = SUBREG_REG (operand);
4168 if ((MEM_P (operand)
4169 || (REG_P (operand)
4170 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4171 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4172 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4173 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4174 recog_data.operand[i],
4175 recog_data.operand_loc[goal_alternative_matches[i]],
4176 recog_data.operand_loc[i],
4177 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4178 operand_mode[goal_alternative_matches[i]],
4179 operand_mode[i],
4180 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4181 }
4182
4183 /* Perform whatever substitutions on the operands we are supposed
4184 to make due to commutativity or replacement of registers
4185 with equivalent constants or memory slots. */
4186
4187 for (i = 0; i < noperands; i++)
4188 {
4189 /* We only do this on the last pass through reload, because it is
4190 possible for some data (like reg_equiv_address) to be changed during
4191 later passes. Moreover, we lose the opportunity to get a useful
4192 reload_{in,out}_reg when we do these replacements. */
4193
4194 if (replace)
4195 {
4196 rtx substitution = substed_operand[i];
4197
4198 *recog_data.operand_loc[i] = substitution;
4199
4200 /* If we're replacing an operand with a LABEL_REF, we need to
4201 make sure that there's a REG_LABEL_OPERAND note attached to
4202 this instruction. */
4203 if (GET_CODE (substitution) == LABEL_REF
4204 && !find_reg_note (insn, REG_LABEL_OPERAND,
4205 label_ref_label (substitution))
4206 /* For a JUMP_P, if it was a branch target it must have
4207 already been recorded as such. */
4208 && (!JUMP_P (insn)
4209 || !label_is_jump_target_p (label_ref_label (substitution),
4210 insn)))
4211 {
4212 add_reg_note (insn, REG_LABEL_OPERAND,
4213 label_ref_label (substitution));
4214 if (LABEL_P (label_ref_label (substitution)))
4215 ++LABEL_NUSES (label_ref_label (substitution));
4216 }
4217
4218 }
4219 else
4220 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4221 }
4222
4223 /* If this insn pattern contains any MATCH_DUP's, make sure that
4224 they will be substituted if the operands they match are substituted.
4225 Also do now any substitutions we already did on the operands.
4226
4227 Don't do this if we aren't making replacements because we might be
4228 propagating things allocated by frame pointer elimination into places
4229 it doesn't expect. */
4230
4231 if (insn_code_number >= 0 && replace)
4232 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4233 {
4234 int opno = recog_data.dup_num[i];
4235 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4236 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4237 }
4238
4239 #if 0
4240 /* This loses because reloading of prior insns can invalidate the equivalence
4241 (or at least find_equiv_reg isn't smart enough to find it any more),
4242 causing this insn to need more reload regs than it needed before.
4243 It may be too late to make the reload regs available.
4244 Now this optimization is done safely in choose_reload_regs. */
4245
4246 /* For each reload of a reg into some other class of reg,
4247 search for an existing equivalent reg (same value now) in the right class.
4248 We can use it as long as we don't need to change its contents. */
4249 for (i = 0; i < n_reloads; i++)
4250 if (rld[i].reg_rtx == 0
4251 && rld[i].in != 0
4252 && REG_P (rld[i].in)
4253 && rld[i].out == 0)
4254 {
4255 rld[i].reg_rtx
4256 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4257 static_reload_reg_p, 0, rld[i].inmode);
4258 /* Prevent generation of insn to load the value
4259 because the one we found already has the value. */
4260 if (rld[i].reg_rtx)
4261 rld[i].in = rld[i].reg_rtx;
4262 }
4263 #endif
4264
4265 /* If we detected error and replaced asm instruction by USE, forget about the
4266 reloads. */
4267 if (GET_CODE (PATTERN (insn)) == USE
4268 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4269 n_reloads = 0;
4270
4271 /* Perhaps an output reload can be combined with another
4272 to reduce needs by one. */
4273 if (!goal_earlyclobber)
4274 combine_reloads ();
4275
4276 /* If we have a pair of reloads for parts of an address, they are reloading
4277 the same object, the operands themselves were not reloaded, and they
4278 are for two operands that are supposed to match, merge the reloads and
4279 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4280
4281 for (i = 0; i < n_reloads; i++)
4282 {
4283 int k;
4284
4285 for (j = i + 1; j < n_reloads; j++)
4286 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4287 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4288 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4289 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4290 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4291 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4292 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4293 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4294 && rtx_equal_p (rld[i].in, rld[j].in)
4295 && (operand_reloadnum[rld[i].opnum] < 0
4296 || rld[operand_reloadnum[rld[i].opnum]].optional)
4297 && (operand_reloadnum[rld[j].opnum] < 0
4298 || rld[operand_reloadnum[rld[j].opnum]].optional)
4299 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4300 || (goal_alternative_matches[rld[j].opnum]
4301 == rld[i].opnum)))
4302 {
4303 for (k = 0; k < n_replacements; k++)
4304 if (replacements[k].what == j)
4305 replacements[k].what = i;
4306
4307 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4308 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4309 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4310 else
4311 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4312 rld[j].in = 0;
4313 }
4314 }
4315
4316 /* Scan all the reloads and update their type.
4317 If a reload is for the address of an operand and we didn't reload
4318 that operand, change the type. Similarly, change the operand number
4319 of a reload when two operands match. If a reload is optional, treat it
4320 as though the operand isn't reloaded.
4321
4322 ??? This latter case is somewhat odd because if we do the optional
4323 reload, it means the object is hanging around. Thus we need only
4324 do the address reload if the optional reload was NOT done.
4325
4326 Change secondary reloads to be the address type of their operand, not
4327 the normal type.
4328
4329 If an operand's reload is now RELOAD_OTHER, change any
4330 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4331 RELOAD_FOR_OTHER_ADDRESS. */
4332
4333 for (i = 0; i < n_reloads; i++)
4334 {
4335 if (rld[i].secondary_p
4336 && rld[i].when_needed == operand_type[rld[i].opnum])
4337 rld[i].when_needed = address_type[rld[i].opnum];
4338
4339 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4340 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4341 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4342 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4343 && (operand_reloadnum[rld[i].opnum] < 0
4344 || rld[operand_reloadnum[rld[i].opnum]].optional))
4345 {
4346 /* If we have a secondary reload to go along with this reload,
4347 change its type to RELOAD_FOR_OPADDR_ADDR. */
4348
4349 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4350 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4351 && rld[i].secondary_in_reload != -1)
4352 {
4353 int secondary_in_reload = rld[i].secondary_in_reload;
4354
4355 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4356
4357 /* If there's a tertiary reload we have to change it also. */
4358 if (secondary_in_reload > 0
4359 && rld[secondary_in_reload].secondary_in_reload != -1)
4360 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4361 = RELOAD_FOR_OPADDR_ADDR;
4362 }
4363
4364 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4365 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4366 && rld[i].secondary_out_reload != -1)
4367 {
4368 int secondary_out_reload = rld[i].secondary_out_reload;
4369
4370 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4371
4372 /* If there's a tertiary reload we have to change it also. */
4373 if (secondary_out_reload
4374 && rld[secondary_out_reload].secondary_out_reload != -1)
4375 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4376 = RELOAD_FOR_OPADDR_ADDR;
4377 }
4378
4379 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4380 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4381 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4382 else
4383 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4384 }
4385
4386 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4387 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4388 && operand_reloadnum[rld[i].opnum] >= 0
4389 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4390 == RELOAD_OTHER))
4391 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4392
4393 if (goal_alternative_matches[rld[i].opnum] >= 0)
4394 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4395 }
4396
4397 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4398 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4399 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4400
4401 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4402 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4403 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4404 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4405 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4406 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4407 This is complicated by the fact that a single operand can have more
4408 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4409 choose_reload_regs without affecting code quality, and cases that
4410 actually fail are extremely rare, so it turns out to be better to fix
4411 the problem here by not generating cases that choose_reload_regs will
4412 fail for. */
4413 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4414 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4415 a single operand.
4416 We can reduce the register pressure by exploiting that a
4417 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4418 does not conflict with any of them, if it is only used for the first of
4419 the RELOAD_FOR_X_ADDRESS reloads. */
4420 {
4421 int first_op_addr_num = -2;
4422 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4423 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4424 int need_change = 0;
4425 /* We use last_op_addr_reload and the contents of the above arrays
4426 first as flags - -2 means no instance encountered, -1 means exactly
4427 one instance encountered.
4428 If more than one instance has been encountered, we store the reload
4429 number of the first reload of the kind in question; reload numbers
4430 are known to be non-negative. */
4431 for (i = 0; i < noperands; i++)
4432 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4433 for (i = n_reloads - 1; i >= 0; i--)
4434 {
4435 switch (rld[i].when_needed)
4436 {
4437 case RELOAD_FOR_OPERAND_ADDRESS:
4438 if (++first_op_addr_num >= 0)
4439 {
4440 first_op_addr_num = i;
4441 need_change = 1;
4442 }
4443 break;
4444 case RELOAD_FOR_INPUT_ADDRESS:
4445 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4446 {
4447 first_inpaddr_num[rld[i].opnum] = i;
4448 need_change = 1;
4449 }
4450 break;
4451 case RELOAD_FOR_OUTPUT_ADDRESS:
4452 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4453 {
4454 first_outpaddr_num[rld[i].opnum] = i;
4455 need_change = 1;
4456 }
4457 break;
4458 default:
4459 break;
4460 }
4461 }
4462
4463 if (need_change)
4464 {
4465 for (i = 0; i < n_reloads; i++)
4466 {
4467 int first_num;
4468 enum reload_type type;
4469
4470 switch (rld[i].when_needed)
4471 {
4472 case RELOAD_FOR_OPADDR_ADDR:
4473 first_num = first_op_addr_num;
4474 type = RELOAD_FOR_OPERAND_ADDRESS;
4475 break;
4476 case RELOAD_FOR_INPADDR_ADDRESS:
4477 first_num = first_inpaddr_num[rld[i].opnum];
4478 type = RELOAD_FOR_INPUT_ADDRESS;
4479 break;
4480 case RELOAD_FOR_OUTADDR_ADDRESS:
4481 first_num = first_outpaddr_num[rld[i].opnum];
4482 type = RELOAD_FOR_OUTPUT_ADDRESS;
4483 break;
4484 default:
4485 continue;
4486 }
4487 if (first_num < 0)
4488 continue;
4489 else if (i > first_num)
4490 rld[i].when_needed = type;
4491 else
4492 {
4493 /* Check if the only TYPE reload that uses reload I is
4494 reload FIRST_NUM. */
4495 for (j = n_reloads - 1; j > first_num; j--)
4496 {
4497 if (rld[j].when_needed == type
4498 && (rld[i].secondary_p
4499 ? rld[j].secondary_in_reload == i
4500 : reg_mentioned_p (rld[i].in, rld[j].in)))
4501 {
4502 rld[i].when_needed = type;
4503 break;
4504 }
4505 }
4506 }
4507 }
4508 }
4509 }
4510
4511 /* See if we have any reloads that are now allowed to be merged
4512 because we've changed when the reload is needed to
4513 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4514 check for the most common cases. */
4515
4516 for (i = 0; i < n_reloads; i++)
4517 if (rld[i].in != 0 && rld[i].out == 0
4518 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4519 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4520 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4521 for (j = 0; j < n_reloads; j++)
4522 if (i != j && rld[j].in != 0 && rld[j].out == 0
4523 && rld[j].when_needed == rld[i].when_needed
4524 && MATCHES (rld[i].in, rld[j].in)
4525 && rld[i].rclass == rld[j].rclass
4526 && !rld[i].nocombine && !rld[j].nocombine
4527 && rld[i].reg_rtx == rld[j].reg_rtx)
4528 {
4529 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4530 transfer_replacements (i, j);
4531 rld[j].in = 0;
4532 }
4533
4534 /* If we made any reloads for addresses, see if they violate a
4535 "no input reloads" requirement for this insn. But loads that we
4536 do after the insn (such as for output addresses) are fine. */
4537 if (HAVE_cc0 && no_input_reloads)
4538 for (i = 0; i < n_reloads; i++)
4539 gcc_assert (rld[i].in == 0
4540 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4541 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4542
4543 /* Compute reload_mode and reload_nregs. */
4544 for (i = 0; i < n_reloads; i++)
4545 {
4546 rld[i].mode = rld[i].inmode;
4547 if (rld[i].mode == VOIDmode
4548 || partial_subreg_p (rld[i].mode, rld[i].outmode))
4549 rld[i].mode = rld[i].outmode;
4550
4551 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4552 }
4553
4554 /* Special case a simple move with an input reload and a
4555 destination of a hard reg, if the hard reg is ok, use it. */
4556 for (i = 0; i < n_reloads; i++)
4557 if (rld[i].when_needed == RELOAD_FOR_INPUT
4558 && GET_CODE (PATTERN (insn)) == SET
4559 && REG_P (SET_DEST (PATTERN (insn)))
4560 && (SET_SRC (PATTERN (insn)) == rld[i].in
4561 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4562 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4563 {
4564 rtx dest = SET_DEST (PATTERN (insn));
4565 unsigned int regno = REGNO (dest);
4566
4567 if (regno < FIRST_PSEUDO_REGISTER
4568 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4569 && targetm.hard_regno_mode_ok (regno, rld[i].mode))
4570 {
4571 int nr = hard_regno_nregs (regno, rld[i].mode);
4572 int ok = 1, nri;
4573
4574 for (nri = 1; nri < nr; nri ++)
4575 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4576 {
4577 ok = 0;
4578 break;
4579 }
4580
4581 if (ok)
4582 rld[i].reg_rtx = dest;
4583 }
4584 }
4585
4586 return retval;
4587 }
4588
4589 /* Return true if alternative number ALTNUM in constraint-string
4590 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4591 MEM gives the reference if its address hasn't been fully reloaded,
4592 otherwise it is NULL. */
4593
4594 static bool
alternative_allows_const_pool_ref(rtx mem ATTRIBUTE_UNUSED,const char * constraint,int altnum)4595 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4596 const char *constraint, int altnum)
4597 {
4598 int c;
4599
4600 /* Skip alternatives before the one requested. */
4601 while (altnum > 0)
4602 {
4603 while (*constraint++ != ',')
4604 ;
4605 altnum--;
4606 }
4607 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4608 If one of them is present, this alternative accepts the result of
4609 passing a constant-pool reference through find_reloads_toplev.
4610
4611 The same is true of extra memory constraints if the address
4612 was reloaded into a register. However, the target may elect
4613 to disallow the original constant address, forcing it to be
4614 reloaded into a register instead. */
4615 for (; (c = *constraint) && c != ',' && c != '#';
4616 constraint += CONSTRAINT_LEN (c, constraint))
4617 {
4618 enum constraint_num cn = lookup_constraint (constraint);
4619 if (insn_extra_memory_constraint (cn)
4620 && (mem == NULL || constraint_satisfied_p (mem, cn)))
4621 return true;
4622 }
4623 return false;
4624 }
4625
4626 /* Scan X for memory references and scan the addresses for reloading.
4627 Also checks for references to "constant" regs that we want to eliminate
4628 and replaces them with the values they stand for.
4629 We may alter X destructively if it contains a reference to such.
4630 If X is just a constant reg, we return the equivalent value
4631 instead of X.
4632
4633 IND_LEVELS says how many levels of indirect addressing this machine
4634 supports.
4635
4636 OPNUM and TYPE identify the purpose of the reload.
4637
4638 IS_SET_DEST is true if X is the destination of a SET, which is not
4639 appropriate to be replaced by a constant.
4640
4641 INSN, if nonzero, is the insn in which we do the reload. It is used
4642 to determine if we may generate output reloads, and where to put USEs
4643 for pseudos that we have to replace with stack slots.
4644
4645 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4646 result of find_reloads_address. */
4647
4648 static rtx
find_reloads_toplev(rtx x,int opnum,enum reload_type type,int ind_levels,int is_set_dest,rtx_insn * insn,int * address_reloaded)4649 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4650 int ind_levels, int is_set_dest, rtx_insn *insn,
4651 int *address_reloaded)
4652 {
4653 RTX_CODE code = GET_CODE (x);
4654
4655 const char *fmt = GET_RTX_FORMAT (code);
4656 int i;
4657 int copied;
4658
4659 if (code == REG)
4660 {
4661 /* This code is duplicated for speed in find_reloads. */
4662 int regno = REGNO (x);
4663 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4664 x = reg_equiv_constant (regno);
4665 #if 0
4666 /* This creates (subreg (mem...)) which would cause an unnecessary
4667 reload of the mem. */
4668 else if (reg_equiv_mem (regno) != 0)
4669 x = reg_equiv_mem (regno);
4670 #endif
4671 else if (reg_equiv_memory_loc (regno)
4672 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4673 {
4674 rtx mem = make_memloc (x, regno);
4675 if (reg_equiv_address (regno)
4676 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4677 {
4678 /* If this is not a toplevel operand, find_reloads doesn't see
4679 this substitution. We have to emit a USE of the pseudo so
4680 that delete_output_reload can see it. */
4681 if (replace_reloads && recog_data.operand[opnum] != x)
4682 /* We mark the USE with QImode so that we recognize it
4683 as one that can be safely deleted at the end of
4684 reload. */
4685 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4686 QImode);
4687 x = mem;
4688 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4689 opnum, type, ind_levels, insn);
4690 if (!rtx_equal_p (x, mem))
4691 push_reg_equiv_alt_mem (regno, x);
4692 if (address_reloaded)
4693 *address_reloaded = i;
4694 }
4695 }
4696 return x;
4697 }
4698 if (code == MEM)
4699 {
4700 rtx tem = x;
4701
4702 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4703 opnum, type, ind_levels, insn);
4704 if (address_reloaded)
4705 *address_reloaded = i;
4706
4707 return tem;
4708 }
4709
4710 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4711 {
4712 /* Check for SUBREG containing a REG that's equivalent to a
4713 constant. If the constant has a known value, truncate it
4714 right now. Similarly if we are extracting a single-word of a
4715 multi-word constant. If the constant is symbolic, allow it
4716 to be substituted normally. push_reload will strip the
4717 subreg later. The constant must not be VOIDmode, because we
4718 will lose the mode of the register (this should never happen
4719 because one of the cases above should handle it). */
4720
4721 int regno = REGNO (SUBREG_REG (x));
4722 rtx tem;
4723
4724 if (regno >= FIRST_PSEUDO_REGISTER
4725 && reg_renumber[regno] < 0
4726 && reg_equiv_constant (regno) != 0)
4727 {
4728 tem =
4729 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4730 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4731 gcc_assert (tem);
4732 if (CONSTANT_P (tem)
4733 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4734 {
4735 tem = force_const_mem (GET_MODE (x), tem);
4736 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4737 &XEXP (tem, 0), opnum, type,
4738 ind_levels, insn);
4739 if (address_reloaded)
4740 *address_reloaded = i;
4741 }
4742 return tem;
4743 }
4744
4745 /* If the subreg contains a reg that will be converted to a mem,
4746 attempt to convert the whole subreg to a (narrower or wider)
4747 memory reference instead. If this succeeds, we're done --
4748 otherwise fall through to check whether the inner reg still
4749 needs address reloads anyway. */
4750
4751 if (regno >= FIRST_PSEUDO_REGISTER
4752 && reg_equiv_memory_loc (regno) != 0)
4753 {
4754 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4755 insn, address_reloaded);
4756 if (tem)
4757 return tem;
4758 }
4759 }
4760
4761 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4762 {
4763 if (fmt[i] == 'e')
4764 {
4765 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4766 ind_levels, is_set_dest, insn,
4767 address_reloaded);
4768 /* If we have replaced a reg with it's equivalent memory loc -
4769 that can still be handled here e.g. if it's in a paradoxical
4770 subreg - we must make the change in a copy, rather than using
4771 a destructive change. This way, find_reloads can still elect
4772 not to do the change. */
4773 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4774 {
4775 x = shallow_copy_rtx (x);
4776 copied = 1;
4777 }
4778 XEXP (x, i) = new_part;
4779 }
4780 }
4781 return x;
4782 }
4783
4784 /* Return a mem ref for the memory equivalent of reg REGNO.
4785 This mem ref is not shared with anything. */
4786
4787 static rtx
make_memloc(rtx ad,int regno)4788 make_memloc (rtx ad, int regno)
4789 {
4790 /* We must rerun eliminate_regs, in case the elimination
4791 offsets have changed. */
4792 rtx tem
4793 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4794 0);
4795
4796 /* If TEM might contain a pseudo, we must copy it to avoid
4797 modifying it when we do the substitution for the reload. */
4798 if (rtx_varies_p (tem, 0))
4799 tem = copy_rtx (tem);
4800
4801 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4802 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4803
4804 /* Copy the result if it's still the same as the equivalence, to avoid
4805 modifying it when we do the substitution for the reload. */
4806 if (tem == reg_equiv_memory_loc (regno))
4807 tem = copy_rtx (tem);
4808 return tem;
4809 }
4810
4811 /* Returns true if AD could be turned into a valid memory reference
4812 to mode MODE in address space AS by reloading the part pointed to
4813 by PART into a register. */
4814
4815 static int
maybe_memory_address_addr_space_p(machine_mode mode,rtx ad,addr_space_t as,rtx * part)4816 maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4817 addr_space_t as, rtx *part)
4818 {
4819 int retv;
4820 rtx tem = *part;
4821 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4822
4823 *part = reg;
4824 retv = memory_address_addr_space_p (mode, ad, as);
4825 *part = tem;
4826
4827 return retv;
4828 }
4829
4830 /* Record all reloads needed for handling memory address AD
4831 which appears in *LOC in a memory reference to mode MODE
4832 which itself is found in location *MEMREFLOC.
4833 Note that we take shortcuts assuming that no multi-reg machine mode
4834 occurs as part of an address.
4835
4836 OPNUM and TYPE specify the purpose of this reload.
4837
4838 IND_LEVELS says how many levels of indirect addressing this machine
4839 supports.
4840
4841 INSN, if nonzero, is the insn in which we do the reload. It is used
4842 to determine if we may generate output reloads, and where to put USEs
4843 for pseudos that we have to replace with stack slots.
4844
4845 Value is one if this address is reloaded or replaced as a whole; it is
4846 zero if the top level of this address was not reloaded or replaced, and
4847 it is -1 if it may or may not have been reloaded or replaced.
4848
4849 Note that there is no verification that the address will be valid after
4850 this routine does its work. Instead, we rely on the fact that the address
4851 was valid when reload started. So we need only undo things that reload
4852 could have broken. These are wrong register types, pseudos not allocated
4853 to a hard register, and frame pointer elimination. */
4854
4855 static int
find_reloads_address(machine_mode mode,rtx * memrefloc,rtx ad,rtx * loc,int opnum,enum reload_type type,int ind_levels,rtx_insn * insn)4856 find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4857 rtx *loc, int opnum, enum reload_type type,
4858 int ind_levels, rtx_insn *insn)
4859 {
4860 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4861 : ADDR_SPACE_GENERIC;
4862 int regno;
4863 int removed_and = 0;
4864 int op_index;
4865 rtx tem;
4866
4867 /* If the address is a register, see if it is a legitimate address and
4868 reload if not. We first handle the cases where we need not reload
4869 or where we must reload in a non-standard way. */
4870
4871 if (REG_P (ad))
4872 {
4873 regno = REGNO (ad);
4874
4875 if (reg_equiv_constant (regno) != 0)
4876 {
4877 find_reloads_address_part (reg_equiv_constant (regno), loc,
4878 base_reg_class (mode, as, MEM, SCRATCH),
4879 GET_MODE (ad), opnum, type, ind_levels);
4880 return 1;
4881 }
4882
4883 tem = reg_equiv_memory_loc (regno);
4884 if (tem != 0)
4885 {
4886 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4887 {
4888 tem = make_memloc (ad, regno);
4889 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4890 XEXP (tem, 0),
4891 MEM_ADDR_SPACE (tem)))
4892 {
4893 rtx orig = tem;
4894
4895 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4896 &XEXP (tem, 0), opnum,
4897 ADDR_TYPE (type), ind_levels, insn);
4898 if (!rtx_equal_p (tem, orig))
4899 push_reg_equiv_alt_mem (regno, tem);
4900 }
4901 /* We can avoid a reload if the register's equivalent memory
4902 expression is valid as an indirect memory address.
4903 But not all addresses are valid in a mem used as an indirect
4904 address: only reg or reg+constant. */
4905
4906 if (ind_levels > 0
4907 && strict_memory_address_addr_space_p (mode, tem, as)
4908 && (REG_P (XEXP (tem, 0))
4909 || (GET_CODE (XEXP (tem, 0)) == PLUS
4910 && REG_P (XEXP (XEXP (tem, 0), 0))
4911 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4912 {
4913 /* TEM is not the same as what we'll be replacing the
4914 pseudo with after reload, put a USE in front of INSN
4915 in the final reload pass. */
4916 if (replace_reloads
4917 && num_not_at_initial_offset
4918 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4919 {
4920 *loc = tem;
4921 /* We mark the USE with QImode so that we
4922 recognize it as one that can be safely
4923 deleted at the end of reload. */
4924 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4925 insn), QImode);
4926
4927 /* This doesn't really count as replacing the address
4928 as a whole, since it is still a memory access. */
4929 }
4930 return 0;
4931 }
4932 ad = tem;
4933 }
4934 }
4935
4936 /* The only remaining case where we can avoid a reload is if this is a
4937 hard register that is valid as a base register and which is not the
4938 subject of a CLOBBER in this insn. */
4939
4940 else if (regno < FIRST_PSEUDO_REGISTER
4941 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4942 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4943 return 0;
4944
4945 /* If we do not have one of the cases above, we must do the reload. */
4946 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4947 base_reg_class (mode, as, MEM, SCRATCH),
4948 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4949 return 1;
4950 }
4951
4952 if (strict_memory_address_addr_space_p (mode, ad, as))
4953 {
4954 /* The address appears valid, so reloads are not needed.
4955 But the address may contain an eliminable register.
4956 This can happen because a machine with indirect addressing
4957 may consider a pseudo register by itself a valid address even when
4958 it has failed to get a hard reg.
4959 So do a tree-walk to find and eliminate all such regs. */
4960
4961 /* But first quickly dispose of a common case. */
4962 if (GET_CODE (ad) == PLUS
4963 && CONST_INT_P (XEXP (ad, 1))
4964 && REG_P (XEXP (ad, 0))
4965 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4966 return 0;
4967
4968 subst_reg_equivs_changed = 0;
4969 *loc = subst_reg_equivs (ad, insn);
4970
4971 if (! subst_reg_equivs_changed)
4972 return 0;
4973
4974 /* Check result for validity after substitution. */
4975 if (strict_memory_address_addr_space_p (mode, ad, as))
4976 return 0;
4977 }
4978
4979 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4980 do
4981 {
4982 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
4983 {
4984 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4985 ind_levels, win);
4986 }
4987 break;
4988 win:
4989 *memrefloc = copy_rtx (*memrefloc);
4990 XEXP (*memrefloc, 0) = ad;
4991 move_replacements (&ad, &XEXP (*memrefloc, 0));
4992 return -1;
4993 }
4994 while (0);
4995 #endif
4996
4997 /* The address is not valid. We have to figure out why. First see if
4998 we have an outer AND and remove it if so. Then analyze what's inside. */
4999
5000 if (GET_CODE (ad) == AND)
5001 {
5002 removed_and = 1;
5003 loc = &XEXP (ad, 0);
5004 ad = *loc;
5005 }
5006
5007 /* One possibility for why the address is invalid is that it is itself
5008 a MEM. This can happen when the frame pointer is being eliminated, a
5009 pseudo is not allocated to a hard register, and the offset between the
5010 frame and stack pointers is not its initial value. In that case the
5011 pseudo will have been replaced by a MEM referring to the
5012 stack pointer. */
5013 if (MEM_P (ad))
5014 {
5015 /* First ensure that the address in this MEM is valid. Then, unless
5016 indirect addresses are valid, reload the MEM into a register. */
5017 tem = ad;
5018 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5019 opnum, ADDR_TYPE (type),
5020 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5021
5022 /* If tem was changed, then we must create a new memory reference to
5023 hold it and store it back into memrefloc. */
5024 if (tem != ad && memrefloc)
5025 {
5026 *memrefloc = copy_rtx (*memrefloc);
5027 copy_replacements (tem, XEXP (*memrefloc, 0));
5028 loc = &XEXP (*memrefloc, 0);
5029 if (removed_and)
5030 loc = &XEXP (*loc, 0);
5031 }
5032
5033 /* Check similar cases as for indirect addresses as above except
5034 that we can allow pseudos and a MEM since they should have been
5035 taken care of above. */
5036
5037 if (ind_levels == 0
5038 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5039 || MEM_P (XEXP (tem, 0))
5040 || ! (REG_P (XEXP (tem, 0))
5041 || (GET_CODE (XEXP (tem, 0)) == PLUS
5042 && REG_P (XEXP (XEXP (tem, 0), 0))
5043 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5044 {
5045 /* Must use TEM here, not AD, since it is the one that will
5046 have any subexpressions reloaded, if needed. */
5047 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5048 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5049 VOIDmode, 0,
5050 0, opnum, type);
5051 return ! removed_and;
5052 }
5053 else
5054 return 0;
5055 }
5056
5057 /* If we have address of a stack slot but it's not valid because the
5058 displacement is too large, compute the sum in a register.
5059 Handle all base registers here, not just fp/ap/sp, because on some
5060 targets (namely SH) we can also get too large displacements from
5061 big-endian corrections. */
5062 else if (GET_CODE (ad) == PLUS
5063 && REG_P (XEXP (ad, 0))
5064 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5065 && CONST_INT_P (XEXP (ad, 1))
5066 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5067 CONST_INT)
5068 /* Similarly, if we were to reload the base register and the
5069 mem+offset address is still invalid, then we want to reload
5070 the whole address, not just the base register. */
5071 || ! maybe_memory_address_addr_space_p
5072 (mode, ad, as, &(XEXP (ad, 0)))))
5073
5074 {
5075 /* Unshare the MEM rtx so we can safely alter it. */
5076 if (memrefloc)
5077 {
5078 *memrefloc = copy_rtx (*memrefloc);
5079 loc = &XEXP (*memrefloc, 0);
5080 if (removed_and)
5081 loc = &XEXP (*loc, 0);
5082 }
5083
5084 if (double_reg_address_ok[mode]
5085 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5086 PLUS, CONST_INT))
5087 {
5088 /* Unshare the sum as well. */
5089 *loc = ad = copy_rtx (ad);
5090
5091 /* Reload the displacement into an index reg.
5092 We assume the frame pointer or arg pointer is a base reg. */
5093 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5094 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5095 type, ind_levels);
5096 return 0;
5097 }
5098 else
5099 {
5100 /* If the sum of two regs is not necessarily valid,
5101 reload the sum into a base reg.
5102 That will at least work. */
5103 find_reloads_address_part (ad, loc,
5104 base_reg_class (mode, as, MEM, SCRATCH),
5105 GET_MODE (ad), opnum, type, ind_levels);
5106 }
5107 return ! removed_and;
5108 }
5109
5110 /* If we have an indexed stack slot, there are three possible reasons why
5111 it might be invalid: The index might need to be reloaded, the address
5112 might have been made by frame pointer elimination and hence have a
5113 constant out of range, or both reasons might apply.
5114
5115 We can easily check for an index needing reload, but even if that is the
5116 case, we might also have an invalid constant. To avoid making the
5117 conservative assumption and requiring two reloads, we see if this address
5118 is valid when not interpreted strictly. If it is, the only problem is
5119 that the index needs a reload and find_reloads_address_1 will take care
5120 of it.
5121
5122 Handle all base registers here, not just fp/ap/sp, because on some
5123 targets (namely SPARC) we can also get invalid addresses from preventive
5124 subreg big-endian corrections made by find_reloads_toplev. We
5125 can also get expressions involving LO_SUM (rather than PLUS) from
5126 find_reloads_subreg_address.
5127
5128 If we decide to do something, it must be that `double_reg_address_ok'
5129 is true. We generate a reload of the base register + constant and
5130 rework the sum so that the reload register will be added to the index.
5131 This is safe because we know the address isn't shared.
5132
5133 We check for the base register as both the first and second operand of
5134 the innermost PLUS and/or LO_SUM. */
5135
5136 for (op_index = 0; op_index < 2; ++op_index)
5137 {
5138 rtx operand, addend;
5139 enum rtx_code inner_code;
5140
5141 if (GET_CODE (ad) != PLUS)
5142 continue;
5143
5144 inner_code = GET_CODE (XEXP (ad, 0));
5145 if (!(GET_CODE (ad) == PLUS
5146 && CONST_INT_P (XEXP (ad, 1))
5147 && (inner_code == PLUS || inner_code == LO_SUM)))
5148 continue;
5149
5150 operand = XEXP (XEXP (ad, 0), op_index);
5151 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5152 continue;
5153
5154 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5155
5156 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5157 GET_CODE (addend))
5158 || operand == frame_pointer_rtx
5159 || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5160 && operand == hard_frame_pointer_rtx)
5161 || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5162 && operand == arg_pointer_rtx)
5163 || operand == stack_pointer_rtx)
5164 && ! maybe_memory_address_addr_space_p
5165 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5166 {
5167 rtx offset_reg;
5168 enum reg_class cls;
5169
5170 offset_reg = plus_constant (GET_MODE (ad), operand,
5171 INTVAL (XEXP (ad, 1)));
5172
5173 /* Form the adjusted address. */
5174 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5175 ad = gen_rtx_PLUS (GET_MODE (ad),
5176 op_index == 0 ? offset_reg : addend,
5177 op_index == 0 ? addend : offset_reg);
5178 else
5179 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5180 op_index == 0 ? offset_reg : addend,
5181 op_index == 0 ? addend : offset_reg);
5182 *loc = ad;
5183
5184 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5185 find_reloads_address_part (XEXP (ad, op_index),
5186 &XEXP (ad, op_index), cls,
5187 GET_MODE (ad), opnum, type, ind_levels);
5188 find_reloads_address_1 (mode, as,
5189 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5190 GET_CODE (XEXP (ad, op_index)),
5191 &XEXP (ad, 1 - op_index), opnum,
5192 type, 0, insn);
5193
5194 return 0;
5195 }
5196 }
5197
5198 /* See if address becomes valid when an eliminable register
5199 in a sum is replaced. */
5200
5201 tem = ad;
5202 if (GET_CODE (ad) == PLUS)
5203 tem = subst_indexed_address (ad);
5204 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5205 {
5206 /* Ok, we win that way. Replace any additional eliminable
5207 registers. */
5208
5209 subst_reg_equivs_changed = 0;
5210 tem = subst_reg_equivs (tem, insn);
5211
5212 /* Make sure that didn't make the address invalid again. */
5213
5214 if (! subst_reg_equivs_changed
5215 || strict_memory_address_addr_space_p (mode, tem, as))
5216 {
5217 *loc = tem;
5218 return 0;
5219 }
5220 }
5221
5222 /* If constants aren't valid addresses, reload the constant address
5223 into a register. */
5224 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5225 {
5226 machine_mode address_mode = GET_MODE (ad);
5227 if (address_mode == VOIDmode)
5228 address_mode = targetm.addr_space.address_mode (as);
5229
5230 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5231 Unshare it so we can safely alter it. */
5232 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5233 && CONSTANT_POOL_ADDRESS_P (ad))
5234 {
5235 *memrefloc = copy_rtx (*memrefloc);
5236 loc = &XEXP (*memrefloc, 0);
5237 if (removed_and)
5238 loc = &XEXP (*loc, 0);
5239 }
5240
5241 find_reloads_address_part (ad, loc,
5242 base_reg_class (mode, as, MEM, SCRATCH),
5243 address_mode, opnum, type, ind_levels);
5244 return ! removed_and;
5245 }
5246
5247 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5248 opnum, type, ind_levels, insn);
5249 }
5250
5251 /* Find all pseudo regs appearing in AD
5252 that are eliminable in favor of equivalent values
5253 and do not have hard regs; replace them by their equivalents.
5254 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5255 front of it for pseudos that we have to replace with stack slots. */
5256
5257 static rtx
subst_reg_equivs(rtx ad,rtx_insn * insn)5258 subst_reg_equivs (rtx ad, rtx_insn *insn)
5259 {
5260 RTX_CODE code = GET_CODE (ad);
5261 int i;
5262 const char *fmt;
5263
5264 switch (code)
5265 {
5266 case HIGH:
5267 case CONST:
5268 CASE_CONST_ANY:
5269 case SYMBOL_REF:
5270 case LABEL_REF:
5271 case PC:
5272 case CC0:
5273 return ad;
5274
5275 case REG:
5276 {
5277 int regno = REGNO (ad);
5278
5279 if (reg_equiv_constant (regno) != 0)
5280 {
5281 subst_reg_equivs_changed = 1;
5282 return reg_equiv_constant (regno);
5283 }
5284 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5285 {
5286 rtx mem = make_memloc (ad, regno);
5287 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5288 {
5289 subst_reg_equivs_changed = 1;
5290 /* We mark the USE with QImode so that we recognize it
5291 as one that can be safely deleted at the end of
5292 reload. */
5293 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5294 QImode);
5295 return mem;
5296 }
5297 }
5298 }
5299 return ad;
5300
5301 case PLUS:
5302 /* Quickly dispose of a common case. */
5303 if (XEXP (ad, 0) == frame_pointer_rtx
5304 && CONST_INT_P (XEXP (ad, 1)))
5305 return ad;
5306 break;
5307
5308 default:
5309 break;
5310 }
5311
5312 fmt = GET_RTX_FORMAT (code);
5313 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5314 if (fmt[i] == 'e')
5315 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5316 return ad;
5317 }
5318
5319 /* Compute the sum of X and Y, making canonicalizations assumed in an
5320 address, namely: sum constant integers, surround the sum of two
5321 constants with a CONST, put the constant as the second operand, and
5322 group the constant on the outermost sum.
5323
5324 This routine assumes both inputs are already in canonical form. */
5325
5326 rtx
form_sum(machine_mode mode,rtx x,rtx y)5327 form_sum (machine_mode mode, rtx x, rtx y)
5328 {
5329 rtx tem;
5330
5331 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5332 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5333
5334 if (CONST_INT_P (x))
5335 return plus_constant (mode, y, INTVAL (x));
5336 else if (CONST_INT_P (y))
5337 return plus_constant (mode, x, INTVAL (y));
5338 else if (CONSTANT_P (x))
5339 tem = x, x = y, y = tem;
5340
5341 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5342 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5343
5344 /* Note that if the operands of Y are specified in the opposite
5345 order in the recursive calls below, infinite recursion will occur. */
5346 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5347 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5348
5349 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5350 constant will have been placed second. */
5351 if (CONSTANT_P (x) && CONSTANT_P (y))
5352 {
5353 if (GET_CODE (x) == CONST)
5354 x = XEXP (x, 0);
5355 if (GET_CODE (y) == CONST)
5356 y = XEXP (y, 0);
5357
5358 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5359 }
5360
5361 return gen_rtx_PLUS (mode, x, y);
5362 }
5363
5364 /* If ADDR is a sum containing a pseudo register that should be
5365 replaced with a constant (from reg_equiv_constant),
5366 return the result of doing so, and also apply the associative
5367 law so that the result is more likely to be a valid address.
5368 (But it is not guaranteed to be one.)
5369
5370 Note that at most one register is replaced, even if more are
5371 replaceable. Also, we try to put the result into a canonical form
5372 so it is more likely to be a valid address.
5373
5374 In all other cases, return ADDR. */
5375
5376 static rtx
subst_indexed_address(rtx addr)5377 subst_indexed_address (rtx addr)
5378 {
5379 rtx op0 = 0, op1 = 0, op2 = 0;
5380 rtx tem;
5381 int regno;
5382
5383 if (GET_CODE (addr) == PLUS)
5384 {
5385 /* Try to find a register to replace. */
5386 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5387 if (REG_P (op0)
5388 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5389 && reg_renumber[regno] < 0
5390 && reg_equiv_constant (regno) != 0)
5391 op0 = reg_equiv_constant (regno);
5392 else if (REG_P (op1)
5393 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5394 && reg_renumber[regno] < 0
5395 && reg_equiv_constant (regno) != 0)
5396 op1 = reg_equiv_constant (regno);
5397 else if (GET_CODE (op0) == PLUS
5398 && (tem = subst_indexed_address (op0)) != op0)
5399 op0 = tem;
5400 else if (GET_CODE (op1) == PLUS
5401 && (tem = subst_indexed_address (op1)) != op1)
5402 op1 = tem;
5403 else
5404 return addr;
5405
5406 /* Pick out up to three things to add. */
5407 if (GET_CODE (op1) == PLUS)
5408 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5409 else if (GET_CODE (op0) == PLUS)
5410 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5411
5412 /* Compute the sum. */
5413 if (op2 != 0)
5414 op1 = form_sum (GET_MODE (addr), op1, op2);
5415 if (op1 != 0)
5416 op0 = form_sum (GET_MODE (addr), op0, op1);
5417
5418 return op0;
5419 }
5420 return addr;
5421 }
5422
5423 /* Update the REG_INC notes for an insn. It updates all REG_INC
5424 notes for the instruction which refer to REGNO the to refer
5425 to the reload number.
5426
5427 INSN is the insn for which any REG_INC notes need updating.
5428
5429 REGNO is the register number which has been reloaded.
5430
5431 RELOADNUM is the reload number. */
5432
5433 static void
update_auto_inc_notes(rtx_insn * insn ATTRIBUTE_UNUSED,int regno ATTRIBUTE_UNUSED,int reloadnum ATTRIBUTE_UNUSED)5434 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5435 int reloadnum ATTRIBUTE_UNUSED)
5436 {
5437 if (!AUTO_INC_DEC)
5438 return;
5439
5440 for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
5441 if (REG_NOTE_KIND (link) == REG_INC
5442 && (int) REGNO (XEXP (link, 0)) == regno)
5443 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5444 }
5445
5446 /* Record the pseudo registers we must reload into hard registers in a
5447 subexpression of a would-be memory address, X referring to a value
5448 in mode MODE. (This function is not called if the address we find
5449 is strictly valid.)
5450
5451 CONTEXT = 1 means we are considering regs as index regs,
5452 = 0 means we are considering them as base regs.
5453 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5454 or an autoinc code.
5455 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5456 is the code of the index part of the address. Otherwise, pass SCRATCH
5457 for this argument.
5458 OPNUM and TYPE specify the purpose of any reloads made.
5459
5460 IND_LEVELS says how many levels of indirect addressing are
5461 supported at this point in the address.
5462
5463 INSN, if nonzero, is the insn in which we do the reload. It is used
5464 to determine if we may generate output reloads.
5465
5466 We return nonzero if X, as a whole, is reloaded or replaced. */
5467
5468 /* Note that we take shortcuts assuming that no multi-reg machine mode
5469 occurs as part of an address.
5470 Also, this is not fully machine-customizable; it works for machines
5471 such as VAXen and 68000's and 32000's, but other possible machines
5472 could have addressing modes that this does not handle right.
5473 If you add push_reload calls here, you need to make sure gen_reload
5474 handles those cases gracefully. */
5475
5476 static int
find_reloads_address_1(machine_mode mode,addr_space_t as,rtx x,int context,enum rtx_code outer_code,enum rtx_code index_code,rtx * loc,int opnum,enum reload_type type,int ind_levels,rtx_insn * insn)5477 find_reloads_address_1 (machine_mode mode, addr_space_t as,
5478 rtx x, int context,
5479 enum rtx_code outer_code, enum rtx_code index_code,
5480 rtx *loc, int opnum, enum reload_type type,
5481 int ind_levels, rtx_insn *insn)
5482 {
5483 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5484 ((CONTEXT) == 0 \
5485 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5486 : REGNO_OK_FOR_INDEX_P (REGNO))
5487
5488 enum reg_class context_reg_class;
5489 RTX_CODE code = GET_CODE (x);
5490 bool reloaded_inner_of_autoinc = false;
5491
5492 if (context == 1)
5493 context_reg_class = INDEX_REG_CLASS;
5494 else
5495 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5496
5497 switch (code)
5498 {
5499 case PLUS:
5500 {
5501 rtx orig_op0 = XEXP (x, 0);
5502 rtx orig_op1 = XEXP (x, 1);
5503 RTX_CODE code0 = GET_CODE (orig_op0);
5504 RTX_CODE code1 = GET_CODE (orig_op1);
5505 rtx op0 = orig_op0;
5506 rtx op1 = orig_op1;
5507
5508 if (GET_CODE (op0) == SUBREG)
5509 {
5510 op0 = SUBREG_REG (op0);
5511 code0 = GET_CODE (op0);
5512 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5513 op0 = gen_rtx_REG (word_mode,
5514 (REGNO (op0) +
5515 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5516 GET_MODE (SUBREG_REG (orig_op0)),
5517 SUBREG_BYTE (orig_op0),
5518 GET_MODE (orig_op0))));
5519 }
5520
5521 if (GET_CODE (op1) == SUBREG)
5522 {
5523 op1 = SUBREG_REG (op1);
5524 code1 = GET_CODE (op1);
5525 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5526 /* ??? Why is this given op1's mode and above for
5527 ??? op0 SUBREGs we use word_mode? */
5528 op1 = gen_rtx_REG (GET_MODE (op1),
5529 (REGNO (op1) +
5530 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5531 GET_MODE (SUBREG_REG (orig_op1)),
5532 SUBREG_BYTE (orig_op1),
5533 GET_MODE (orig_op1))));
5534 }
5535 /* Plus in the index register may be created only as a result of
5536 register rematerialization for expression like &localvar*4. Reload it.
5537 It may be possible to combine the displacement on the outer level,
5538 but it is probably not worthwhile to do so. */
5539 if (context == 1)
5540 {
5541 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5542 opnum, ADDR_TYPE (type), ind_levels, insn);
5543 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5544 context_reg_class,
5545 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5546 return 1;
5547 }
5548
5549 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5550 || code0 == ZERO_EXTEND || code1 == MEM)
5551 {
5552 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5553 &XEXP (x, 0), opnum, type, ind_levels,
5554 insn);
5555 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5556 &XEXP (x, 1), opnum, type, ind_levels,
5557 insn);
5558 }
5559
5560 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5561 || code1 == ZERO_EXTEND || code0 == MEM)
5562 {
5563 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5564 &XEXP (x, 0), opnum, type, ind_levels,
5565 insn);
5566 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5567 &XEXP (x, 1), opnum, type, ind_levels,
5568 insn);
5569 }
5570
5571 else if (code0 == CONST_INT || code0 == CONST
5572 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5573 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5574 &XEXP (x, 1), opnum, type, ind_levels,
5575 insn);
5576
5577 else if (code1 == CONST_INT || code1 == CONST
5578 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5579 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5580 &XEXP (x, 0), opnum, type, ind_levels,
5581 insn);
5582
5583 else if (code0 == REG && code1 == REG)
5584 {
5585 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5586 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5587 return 0;
5588 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5589 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5590 return 0;
5591 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5592 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5593 &XEXP (x, 1), opnum, type, ind_levels,
5594 insn);
5595 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5596 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5597 &XEXP (x, 0), opnum, type, ind_levels,
5598 insn);
5599 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5600 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5601 &XEXP (x, 0), opnum, type, ind_levels,
5602 insn);
5603 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5604 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5605 &XEXP (x, 1), opnum, type, ind_levels,
5606 insn);
5607 else
5608 {
5609 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5610 &XEXP (x, 0), opnum, type, ind_levels,
5611 insn);
5612 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5613 &XEXP (x, 1), opnum, type, ind_levels,
5614 insn);
5615 }
5616 }
5617
5618 else if (code0 == REG)
5619 {
5620 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5621 &XEXP (x, 0), opnum, type, ind_levels,
5622 insn);
5623 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5624 &XEXP (x, 1), opnum, type, ind_levels,
5625 insn);
5626 }
5627
5628 else if (code1 == REG)
5629 {
5630 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5631 &XEXP (x, 1), opnum, type, ind_levels,
5632 insn);
5633 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5634 &XEXP (x, 0), opnum, type, ind_levels,
5635 insn);
5636 }
5637 }
5638
5639 return 0;
5640
5641 case POST_MODIFY:
5642 case PRE_MODIFY:
5643 {
5644 rtx op0 = XEXP (x, 0);
5645 rtx op1 = XEXP (x, 1);
5646 enum rtx_code index_code;
5647 int regno;
5648 int reloadnum;
5649
5650 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5651 return 0;
5652
5653 /* Currently, we only support {PRE,POST}_MODIFY constructs
5654 where a base register is {inc,dec}remented by the contents
5655 of another register or by a constant value. Thus, these
5656 operands must match. */
5657 gcc_assert (op0 == XEXP (op1, 0));
5658
5659 /* Require index register (or constant). Let's just handle the
5660 register case in the meantime... If the target allows
5661 auto-modify by a constant then we could try replacing a pseudo
5662 register with its equivalent constant where applicable.
5663
5664 We also handle the case where the register was eliminated
5665 resulting in a PLUS subexpression.
5666
5667 If we later decide to reload the whole PRE_MODIFY or
5668 POST_MODIFY, inc_for_reload might clobber the reload register
5669 before reading the index. The index register might therefore
5670 need to live longer than a TYPE reload normally would, so be
5671 conservative and class it as RELOAD_OTHER. */
5672 if ((REG_P (XEXP (op1, 1))
5673 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5674 || GET_CODE (XEXP (op1, 1)) == PLUS)
5675 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5676 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5677 ind_levels, insn);
5678
5679 gcc_assert (REG_P (XEXP (op1, 0)));
5680
5681 regno = REGNO (XEXP (op1, 0));
5682 index_code = GET_CODE (XEXP (op1, 1));
5683
5684 /* A register that is incremented cannot be constant! */
5685 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5686 || reg_equiv_constant (regno) == 0);
5687
5688 /* Handle a register that is equivalent to a memory location
5689 which cannot be addressed directly. */
5690 if (reg_equiv_memory_loc (regno) != 0
5691 && (reg_equiv_address (regno) != 0
5692 || num_not_at_initial_offset))
5693 {
5694 rtx tem = make_memloc (XEXP (x, 0), regno);
5695
5696 if (reg_equiv_address (regno)
5697 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5698 {
5699 rtx orig = tem;
5700
5701 /* First reload the memory location's address.
5702 We can't use ADDR_TYPE (type) here, because we need to
5703 write back the value after reading it, hence we actually
5704 need two registers. */
5705 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5706 &XEXP (tem, 0), opnum,
5707 RELOAD_OTHER,
5708 ind_levels, insn);
5709
5710 if (!rtx_equal_p (tem, orig))
5711 push_reg_equiv_alt_mem (regno, tem);
5712
5713 /* Then reload the memory location into a base
5714 register. */
5715 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5716 &XEXP (op1, 0),
5717 base_reg_class (mode, as,
5718 code, index_code),
5719 GET_MODE (x), GET_MODE (x), 0,
5720 0, opnum, RELOAD_OTHER);
5721
5722 update_auto_inc_notes (this_insn, regno, reloadnum);
5723 return 0;
5724 }
5725 }
5726
5727 if (reg_renumber[regno] >= 0)
5728 regno = reg_renumber[regno];
5729
5730 /* We require a base register here... */
5731 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5732 {
5733 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5734 &XEXP (op1, 0), &XEXP (x, 0),
5735 base_reg_class (mode, as,
5736 code, index_code),
5737 GET_MODE (x), GET_MODE (x), 0, 0,
5738 opnum, RELOAD_OTHER);
5739
5740 update_auto_inc_notes (this_insn, regno, reloadnum);
5741 return 0;
5742 }
5743 }
5744 return 0;
5745
5746 case POST_INC:
5747 case POST_DEC:
5748 case PRE_INC:
5749 case PRE_DEC:
5750 if (REG_P (XEXP (x, 0)))
5751 {
5752 int regno = REGNO (XEXP (x, 0));
5753 int value = 0;
5754 rtx x_orig = x;
5755
5756 /* A register that is incremented cannot be constant! */
5757 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5758 || reg_equiv_constant (regno) == 0);
5759
5760 /* Handle a register that is equivalent to a memory location
5761 which cannot be addressed directly. */
5762 if (reg_equiv_memory_loc (regno) != 0
5763 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5764 {
5765 rtx tem = make_memloc (XEXP (x, 0), regno);
5766 if (reg_equiv_address (regno)
5767 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5768 {
5769 rtx orig = tem;
5770
5771 /* First reload the memory location's address.
5772 We can't use ADDR_TYPE (type) here, because we need to
5773 write back the value after reading it, hence we actually
5774 need two registers. */
5775 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5776 &XEXP (tem, 0), opnum, type,
5777 ind_levels, insn);
5778 reloaded_inner_of_autoinc = true;
5779 if (!rtx_equal_p (tem, orig))
5780 push_reg_equiv_alt_mem (regno, tem);
5781 /* Put this inside a new increment-expression. */
5782 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5783 /* Proceed to reload that, as if it contained a register. */
5784 }
5785 }
5786
5787 /* If we have a hard register that is ok in this incdec context,
5788 don't make a reload. If the register isn't nice enough for
5789 autoincdec, we can reload it. But, if an autoincrement of a
5790 register that we here verified as playing nice, still outside
5791 isn't "valid", it must be that no autoincrement is "valid".
5792 If that is true and something made an autoincrement anyway,
5793 this must be a special context where one is allowed.
5794 (For example, a "push" instruction.)
5795 We can't improve this address, so leave it alone. */
5796
5797 /* Otherwise, reload the autoincrement into a suitable hard reg
5798 and record how much to increment by. */
5799
5800 if (reg_renumber[regno] >= 0)
5801 regno = reg_renumber[regno];
5802 if (regno >= FIRST_PSEUDO_REGISTER
5803 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5804 index_code))
5805 {
5806 int reloadnum;
5807
5808 /* If we can output the register afterwards, do so, this
5809 saves the extra update.
5810 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5811 CALL_INSN - and it does not set CC0.
5812 But don't do this if we cannot directly address the
5813 memory location, since this will make it harder to
5814 reuse address reloads, and increases register pressure.
5815 Also don't do this if we can probably update x directly. */
5816 rtx equiv = (MEM_P (XEXP (x, 0))
5817 ? XEXP (x, 0)
5818 : reg_equiv_mem (regno));
5819 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5820 if (insn && NONJUMP_INSN_P (insn)
5821 #if HAVE_cc0
5822 && ! sets_cc0_p (PATTERN (insn))
5823 #endif
5824 && (regno < FIRST_PSEUDO_REGISTER
5825 || (equiv
5826 && memory_operand (equiv, GET_MODE (equiv))
5827 && ! (icode != CODE_FOR_nothing
5828 && insn_operand_matches (icode, 0, equiv)
5829 && insn_operand_matches (icode, 1, equiv))))
5830 /* Using RELOAD_OTHER means we emit this and the reload we
5831 made earlier in the wrong order. */
5832 && !reloaded_inner_of_autoinc)
5833 {
5834 /* We use the original pseudo for loc, so that
5835 emit_reload_insns() knows which pseudo this
5836 reload refers to and updates the pseudo rtx, not
5837 its equivalent memory location, as well as the
5838 corresponding entry in reg_last_reload_reg. */
5839 loc = &XEXP (x_orig, 0);
5840 x = XEXP (x, 0);
5841 reloadnum
5842 = push_reload (x, x, loc, loc,
5843 context_reg_class,
5844 GET_MODE (x), GET_MODE (x), 0, 0,
5845 opnum, RELOAD_OTHER);
5846 }
5847 else
5848 {
5849 reloadnum
5850 = push_reload (x, x, loc, (rtx*) 0,
5851 context_reg_class,
5852 GET_MODE (x), GET_MODE (x), 0, 0,
5853 opnum, type);
5854 rld[reloadnum].inc
5855 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5856
5857 value = 1;
5858 }
5859
5860 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5861 reloadnum);
5862 }
5863 return value;
5864 }
5865 return 0;
5866
5867 case TRUNCATE:
5868 case SIGN_EXTEND:
5869 case ZERO_EXTEND:
5870 /* Look for parts to reload in the inner expression and reload them
5871 too, in addition to this operation. Reloading all inner parts in
5872 addition to this one shouldn't be necessary, but at this point,
5873 we don't know if we can possibly omit any part that *can* be
5874 reloaded. Targets that are better off reloading just either part
5875 (or perhaps even a different part of an outer expression), should
5876 define LEGITIMIZE_RELOAD_ADDRESS. */
5877 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5878 context, code, SCRATCH, &XEXP (x, 0), opnum,
5879 type, ind_levels, insn);
5880 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5881 context_reg_class,
5882 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5883 return 1;
5884
5885 case MEM:
5886 /* This is probably the result of a substitution, by eliminate_regs, of
5887 an equivalent address for a pseudo that was not allocated to a hard
5888 register. Verify that the specified address is valid and reload it
5889 into a register.
5890
5891 Since we know we are going to reload this item, don't decrement for
5892 the indirection level.
5893
5894 Note that this is actually conservative: it would be slightly more
5895 efficient to use the value of SPILL_INDIRECT_LEVELS from
5896 reload1.c here. */
5897
5898 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5899 opnum, ADDR_TYPE (type), ind_levels, insn);
5900 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5901 context_reg_class,
5902 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5903 return 1;
5904
5905 case REG:
5906 {
5907 int regno = REGNO (x);
5908
5909 if (reg_equiv_constant (regno) != 0)
5910 {
5911 find_reloads_address_part (reg_equiv_constant (regno), loc,
5912 context_reg_class,
5913 GET_MODE (x), opnum, type, ind_levels);
5914 return 1;
5915 }
5916
5917 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5918 that feeds this insn. */
5919 if (reg_equiv_mem (regno) != 0)
5920 {
5921 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5922 context_reg_class,
5923 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5924 return 1;
5925 }
5926 #endif
5927
5928 if (reg_equiv_memory_loc (regno)
5929 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5930 {
5931 rtx tem = make_memloc (x, regno);
5932 if (reg_equiv_address (regno) != 0
5933 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5934 {
5935 x = tem;
5936 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5937 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5938 ind_levels, insn);
5939 if (!rtx_equal_p (x, tem))
5940 push_reg_equiv_alt_mem (regno, x);
5941 }
5942 }
5943
5944 if (reg_renumber[regno] >= 0)
5945 regno = reg_renumber[regno];
5946
5947 if (regno >= FIRST_PSEUDO_REGISTER
5948 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5949 index_code))
5950 {
5951 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5952 context_reg_class,
5953 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5954 return 1;
5955 }
5956
5957 /* If a register appearing in an address is the subject of a CLOBBER
5958 in this insn, reload it into some other register to be safe.
5959 The CLOBBER is supposed to make the register unavailable
5960 from before this insn to after it. */
5961 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5962 {
5963 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5964 context_reg_class,
5965 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5966 return 1;
5967 }
5968 }
5969 return 0;
5970
5971 case SUBREG:
5972 if (REG_P (SUBREG_REG (x)))
5973 {
5974 /* If this is a SUBREG of a hard register and the resulting register
5975 is of the wrong class, reload the whole SUBREG. This avoids
5976 needless copies if SUBREG_REG is multi-word. */
5977 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5978 {
5979 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5980
5981 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5982 index_code))
5983 {
5984 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5985 context_reg_class,
5986 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5987 return 1;
5988 }
5989 }
5990 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5991 is larger than the class size, then reload the whole SUBREG. */
5992 else
5993 {
5994 enum reg_class rclass = context_reg_class;
5995 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
5996 > reg_class_size[(int) rclass])
5997 {
5998 /* If the inner register will be replaced by a memory
5999 reference, we can do this only if we can replace the
6000 whole subreg by a (narrower) memory reference. If
6001 this is not possible, fall through and reload just
6002 the inner register (including address reloads). */
6003 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6004 {
6005 rtx tem = find_reloads_subreg_address (x, opnum,
6006 ADDR_TYPE (type),
6007 ind_levels, insn,
6008 NULL);
6009 if (tem)
6010 {
6011 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6012 GET_MODE (tem), VOIDmode, 0, 0,
6013 opnum, type);
6014 return 1;
6015 }
6016 }
6017 else
6018 {
6019 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6020 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6021 return 1;
6022 }
6023 }
6024 }
6025 }
6026 break;
6027
6028 default:
6029 break;
6030 }
6031
6032 {
6033 const char *fmt = GET_RTX_FORMAT (code);
6034 int i;
6035
6036 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6037 {
6038 if (fmt[i] == 'e')
6039 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6040 we get here. */
6041 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6042 code, SCRATCH, &XEXP (x, i),
6043 opnum, type, ind_levels, insn);
6044 }
6045 }
6046
6047 #undef REG_OK_FOR_CONTEXT
6048 return 0;
6049 }
6050
6051 /* X, which is found at *LOC, is a part of an address that needs to be
6052 reloaded into a register of class RCLASS. If X is a constant, or if
6053 X is a PLUS that contains a constant, check that the constant is a
6054 legitimate operand and that we are supposed to be able to load
6055 it into the register.
6056
6057 If not, force the constant into memory and reload the MEM instead.
6058
6059 MODE is the mode to use, in case X is an integer constant.
6060
6061 OPNUM and TYPE describe the purpose of any reloads made.
6062
6063 IND_LEVELS says how many levels of indirect addressing this machine
6064 supports. */
6065
6066 static void
find_reloads_address_part(rtx x,rtx * loc,enum reg_class rclass,machine_mode mode,int opnum,enum reload_type type,int ind_levels)6067 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6068 machine_mode mode, int opnum,
6069 enum reload_type type, int ind_levels)
6070 {
6071 if (CONSTANT_P (x)
6072 && (!targetm.legitimate_constant_p (mode, x)
6073 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6074 {
6075 x = force_const_mem (mode, x);
6076 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6077 opnum, type, ind_levels, 0);
6078 }
6079
6080 else if (GET_CODE (x) == PLUS
6081 && CONSTANT_P (XEXP (x, 1))
6082 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6083 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6084 == NO_REGS))
6085 {
6086 rtx tem;
6087
6088 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6089 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6090 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6091 opnum, type, ind_levels, 0);
6092 }
6093
6094 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6095 mode, VOIDmode, 0, 0, opnum, type);
6096 }
6097
6098 /* X, a subreg of a pseudo, is a part of an address that needs to be
6099 reloaded, and the pseusdo is equivalent to a memory location.
6100
6101 Attempt to replace the whole subreg by a (possibly narrower or wider)
6102 memory reference. If this is possible, return this new memory
6103 reference, and push all required address reloads. Otherwise,
6104 return NULL.
6105
6106 OPNUM and TYPE identify the purpose of the reload.
6107
6108 IND_LEVELS says how many levels of indirect addressing are
6109 supported at this point in the address.
6110
6111 INSN, if nonzero, is the insn in which we do the reload. It is used
6112 to determine where to put USEs for pseudos that we have to replace with
6113 stack slots. */
6114
6115 static rtx
find_reloads_subreg_address(rtx x,int opnum,enum reload_type type,int ind_levels,rtx_insn * insn,int * address_reloaded)6116 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6117 int ind_levels, rtx_insn *insn,
6118 int *address_reloaded)
6119 {
6120 machine_mode outer_mode = GET_MODE (x);
6121 machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6122 int regno = REGNO (SUBREG_REG (x));
6123 int reloaded = 0;
6124 rtx tem, orig;
6125 poly_int64 offset;
6126
6127 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6128
6129 /* We cannot replace the subreg with a modified memory reference if:
6130
6131 - we have a paradoxical subreg that implicitly acts as a zero or
6132 sign extension operation due to LOAD_EXTEND_OP;
6133
6134 - we have a subreg that is implicitly supposed to act on the full
6135 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6136
6137 - the address of the equivalent memory location is mode-dependent; or
6138
6139 - we have a paradoxical subreg and the resulting memory is not
6140 sufficiently aligned to allow access in the wider mode.
6141
6142 In addition, we choose not to perform the replacement for *any*
6143 paradoxical subreg, even if it were possible in principle. This
6144 is to avoid generating wider memory references than necessary.
6145
6146 This corresponds to how previous versions of reload used to handle
6147 paradoxical subregs where no address reload was required. */
6148
6149 if (paradoxical_subreg_p (x))
6150 return NULL;
6151
6152 if (WORD_REGISTER_OPERATIONS
6153 && partial_subreg_p (outer_mode, inner_mode)
6154 && known_equal_after_align_down (GET_MODE_SIZE (outer_mode) - 1,
6155 GET_MODE_SIZE (inner_mode) - 1,
6156 UNITS_PER_WORD))
6157 return NULL;
6158
6159 /* Since we don't attempt to handle paradoxical subregs, we can just
6160 call into simplify_subreg, which will handle all remaining checks
6161 for us. */
6162 orig = make_memloc (SUBREG_REG (x), regno);
6163 offset = SUBREG_BYTE (x);
6164 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6165 if (!tem || !MEM_P (tem))
6166 return NULL;
6167
6168 /* Now push all required address reloads, if any. */
6169 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6170 XEXP (tem, 0), &XEXP (tem, 0),
6171 opnum, type, ind_levels, insn);
6172 /* ??? Do we need to handle nonzero offsets somehow? */
6173 if (known_eq (offset, 0) && !rtx_equal_p (tem, orig))
6174 push_reg_equiv_alt_mem (regno, tem);
6175
6176 /* For some processors an address may be valid in the original mode but
6177 not in a smaller mode. For example, ARM accepts a scaled index register
6178 in SImode but not in HImode. Note that this is only a problem if the
6179 address in reg_equiv_mem is already invalid in the new mode; other
6180 cases would be fixed by find_reloads_address as usual.
6181
6182 ??? We attempt to handle such cases here by doing an additional reload
6183 of the full address after the usual processing by find_reloads_address.
6184 Note that this may not work in the general case, but it seems to cover
6185 the cases where this situation currently occurs. A more general fix
6186 might be to reload the *value* instead of the address, but this would
6187 not be expected by the callers of this routine as-is.
6188
6189 If find_reloads_address already completed replaced the address, there
6190 is nothing further to do. */
6191 if (reloaded == 0
6192 && reg_equiv_mem (regno) != 0
6193 && !strict_memory_address_addr_space_p
6194 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6195 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6196 {
6197 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6198 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6199 MEM, SCRATCH),
6200 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6201 reloaded = 1;
6202 }
6203
6204 /* If this is not a toplevel operand, find_reloads doesn't see this
6205 substitution. We have to emit a USE of the pseudo so that
6206 delete_output_reload can see it. */
6207 if (replace_reloads && recog_data.operand[opnum] != x)
6208 /* We mark the USE with QImode so that we recognize it as one that
6209 can be safely deleted at the end of reload. */
6210 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6211 QImode);
6212
6213 if (address_reloaded)
6214 *address_reloaded = reloaded;
6215
6216 return tem;
6217 }
6218
6219 /* Substitute into the current INSN the registers into which we have reloaded
6220 the things that need reloading. The array `replacements'
6221 contains the locations of all pointers that must be changed
6222 and says what to replace them with.
6223
6224 Return the rtx that X translates into; usually X, but modified. */
6225
6226 void
subst_reloads(rtx_insn * insn)6227 subst_reloads (rtx_insn *insn)
6228 {
6229 int i;
6230
6231 for (i = 0; i < n_replacements; i++)
6232 {
6233 struct replacement *r = &replacements[i];
6234 rtx reloadreg = rld[r->what].reg_rtx;
6235 if (reloadreg)
6236 {
6237 #ifdef DEBUG_RELOAD
6238 /* This checking takes a very long time on some platforms
6239 causing the gcc.c-torture/compile/limits-fnargs.c test
6240 to time out during testing. See PR 31850.
6241
6242 Internal consistency test. Check that we don't modify
6243 anything in the equivalence arrays. Whenever something from
6244 those arrays needs to be reloaded, it must be unshared before
6245 being substituted into; the equivalence must not be modified.
6246 Otherwise, if the equivalence is used after that, it will
6247 have been modified, and the thing substituted (probably a
6248 register) is likely overwritten and not a usable equivalence. */
6249 int check_regno;
6250
6251 for (check_regno = 0; check_regno < max_regno; check_regno++)
6252 {
6253 #define CHECK_MODF(ARRAY) \
6254 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6255 || !loc_mentioned_in_p (r->where, \
6256 (*reg_equivs)[check_regno].ARRAY))
6257
6258 CHECK_MODF (constant);
6259 CHECK_MODF (memory_loc);
6260 CHECK_MODF (address);
6261 CHECK_MODF (mem);
6262 #undef CHECK_MODF
6263 }
6264 #endif /* DEBUG_RELOAD */
6265
6266 /* If we're replacing a LABEL_REF with a register, there must
6267 already be an indication (to e.g. flow) which label this
6268 register refers to. */
6269 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6270 || !JUMP_P (insn)
6271 || find_reg_note (insn,
6272 REG_LABEL_OPERAND,
6273 XEXP (*r->where, 0))
6274 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6275
6276 /* Encapsulate RELOADREG so its machine mode matches what
6277 used to be there. Note that gen_lowpart_common will
6278 do the wrong thing if RELOADREG is multi-word. RELOADREG
6279 will always be a REG here. */
6280 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6281 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6282
6283 *r->where = reloadreg;
6284 }
6285 /* If reload got no reg and isn't optional, something's wrong. */
6286 else
6287 gcc_assert (rld[r->what].optional);
6288 }
6289 }
6290
6291 /* Make a copy of any replacements being done into X and move those
6292 copies to locations in Y, a copy of X. */
6293
6294 void
copy_replacements(rtx x,rtx y)6295 copy_replacements (rtx x, rtx y)
6296 {
6297 copy_replacements_1 (&x, &y, n_replacements);
6298 }
6299
6300 static void
copy_replacements_1(rtx * px,rtx * py,int orig_replacements)6301 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6302 {
6303 int i, j;
6304 rtx x, y;
6305 struct replacement *r;
6306 enum rtx_code code;
6307 const char *fmt;
6308
6309 for (j = 0; j < orig_replacements; j++)
6310 if (replacements[j].where == px)
6311 {
6312 r = &replacements[n_replacements++];
6313 r->where = py;
6314 r->what = replacements[j].what;
6315 r->mode = replacements[j].mode;
6316 }
6317
6318 x = *px;
6319 y = *py;
6320 code = GET_CODE (x);
6321 fmt = GET_RTX_FORMAT (code);
6322
6323 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6324 {
6325 if (fmt[i] == 'e')
6326 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6327 else if (fmt[i] == 'E')
6328 for (j = XVECLEN (x, i); --j >= 0; )
6329 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6330 orig_replacements);
6331 }
6332 }
6333
6334 /* Change any replacements being done to *X to be done to *Y. */
6335
6336 void
move_replacements(rtx * x,rtx * y)6337 move_replacements (rtx *x, rtx *y)
6338 {
6339 int i;
6340
6341 for (i = 0; i < n_replacements; i++)
6342 if (replacements[i].where == x)
6343 replacements[i].where = y;
6344 }
6345
6346 /* If LOC was scheduled to be replaced by something, return the replacement.
6347 Otherwise, return *LOC. */
6348
6349 rtx
find_replacement(rtx * loc)6350 find_replacement (rtx *loc)
6351 {
6352 struct replacement *r;
6353
6354 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6355 {
6356 rtx reloadreg = rld[r->what].reg_rtx;
6357
6358 if (reloadreg && r->where == loc)
6359 {
6360 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6361 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6362
6363 return reloadreg;
6364 }
6365 else if (reloadreg && GET_CODE (*loc) == SUBREG
6366 && r->where == &SUBREG_REG (*loc))
6367 {
6368 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6369 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6370
6371 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6372 GET_MODE (SUBREG_REG (*loc)),
6373 SUBREG_BYTE (*loc));
6374 }
6375 }
6376
6377 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6378 what's inside and make a new rtl if so. */
6379 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6380 || GET_CODE (*loc) == MULT)
6381 {
6382 rtx x = find_replacement (&XEXP (*loc, 0));
6383 rtx y = find_replacement (&XEXP (*loc, 1));
6384
6385 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6386 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6387 }
6388
6389 return *loc;
6390 }
6391
6392 /* Return nonzero if register in range [REGNO, ENDREGNO)
6393 appears either explicitly or implicitly in X
6394 other than being stored into (except for earlyclobber operands).
6395
6396 References contained within the substructure at LOC do not count.
6397 LOC may be zero, meaning don't ignore anything.
6398
6399 This is similar to refers_to_regno_p in rtlanal.c except that we
6400 look at equivalences for pseudos that didn't get hard registers. */
6401
6402 static int
refers_to_regno_for_reload_p(unsigned int regno,unsigned int endregno,rtx x,rtx * loc)6403 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6404 rtx x, rtx *loc)
6405 {
6406 int i;
6407 unsigned int r;
6408 RTX_CODE code;
6409 const char *fmt;
6410
6411 if (x == 0)
6412 return 0;
6413
6414 repeat:
6415 code = GET_CODE (x);
6416
6417 switch (code)
6418 {
6419 case REG:
6420 r = REGNO (x);
6421
6422 /* If this is a pseudo, a hard register must not have been allocated.
6423 X must therefore either be a constant or be in memory. */
6424 if (r >= FIRST_PSEUDO_REGISTER)
6425 {
6426 if (reg_equiv_memory_loc (r))
6427 return refers_to_regno_for_reload_p (regno, endregno,
6428 reg_equiv_memory_loc (r),
6429 (rtx*) 0);
6430
6431 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6432 return 0;
6433 }
6434
6435 return endregno > r && regno < END_REGNO (x);
6436
6437 case SUBREG:
6438 /* If this is a SUBREG of a hard reg, we can see exactly which
6439 registers are being modified. Otherwise, handle normally. */
6440 if (REG_P (SUBREG_REG (x))
6441 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6442 {
6443 unsigned int inner_regno = subreg_regno (x);
6444 unsigned int inner_endregno
6445 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6446 ? subreg_nregs (x) : 1);
6447
6448 return endregno > inner_regno && regno < inner_endregno;
6449 }
6450 break;
6451
6452 case CLOBBER:
6453 case SET:
6454 if (&SET_DEST (x) != loc
6455 /* Note setting a SUBREG counts as referring to the REG it is in for
6456 a pseudo but not for hard registers since we can
6457 treat each word individually. */
6458 && ((GET_CODE (SET_DEST (x)) == SUBREG
6459 && loc != &SUBREG_REG (SET_DEST (x))
6460 && REG_P (SUBREG_REG (SET_DEST (x)))
6461 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6462 && refers_to_regno_for_reload_p (regno, endregno,
6463 SUBREG_REG (SET_DEST (x)),
6464 loc))
6465 /* If the output is an earlyclobber operand, this is
6466 a conflict. */
6467 || ((!REG_P (SET_DEST (x))
6468 || earlyclobber_operand_p (SET_DEST (x)))
6469 && refers_to_regno_for_reload_p (regno, endregno,
6470 SET_DEST (x), loc))))
6471 return 1;
6472
6473 if (code == CLOBBER || loc == &SET_SRC (x))
6474 return 0;
6475 x = SET_SRC (x);
6476 goto repeat;
6477
6478 default:
6479 break;
6480 }
6481
6482 /* X does not match, so try its subexpressions. */
6483
6484 fmt = GET_RTX_FORMAT (code);
6485 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6486 {
6487 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6488 {
6489 if (i == 0)
6490 {
6491 x = XEXP (x, 0);
6492 goto repeat;
6493 }
6494 else
6495 if (refers_to_regno_for_reload_p (regno, endregno,
6496 XEXP (x, i), loc))
6497 return 1;
6498 }
6499 else if (fmt[i] == 'E')
6500 {
6501 int j;
6502 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6503 if (loc != &XVECEXP (x, i, j)
6504 && refers_to_regno_for_reload_p (regno, endregno,
6505 XVECEXP (x, i, j), loc))
6506 return 1;
6507 }
6508 }
6509 return 0;
6510 }
6511
6512 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6513 we check if any register number in X conflicts with the relevant register
6514 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6515 contains a MEM (we don't bother checking for memory addresses that can't
6516 conflict because we expect this to be a rare case.
6517
6518 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6519 that we look at equivalences for pseudos that didn't get hard registers. */
6520
6521 int
reg_overlap_mentioned_for_reload_p(rtx x,rtx in)6522 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6523 {
6524 int regno, endregno;
6525
6526 /* Overly conservative. */
6527 if (GET_CODE (x) == STRICT_LOW_PART
6528 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6529 x = XEXP (x, 0);
6530
6531 /* If either argument is a constant, then modifying X can not affect IN. */
6532 if (CONSTANT_P (x) || CONSTANT_P (in))
6533 return 0;
6534 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6535 return refers_to_mem_for_reload_p (in);
6536 else if (GET_CODE (x) == SUBREG)
6537 {
6538 regno = REGNO (SUBREG_REG (x));
6539 if (regno < FIRST_PSEUDO_REGISTER)
6540 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6541 GET_MODE (SUBREG_REG (x)),
6542 SUBREG_BYTE (x),
6543 GET_MODE (x));
6544 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6545 ? subreg_nregs (x) : 1);
6546
6547 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6548 }
6549 else if (REG_P (x))
6550 {
6551 regno = REGNO (x);
6552
6553 /* If this is a pseudo, it must not have been assigned a hard register.
6554 Therefore, it must either be in memory or be a constant. */
6555
6556 if (regno >= FIRST_PSEUDO_REGISTER)
6557 {
6558 if (reg_equiv_memory_loc (regno))
6559 return refers_to_mem_for_reload_p (in);
6560 gcc_assert (reg_equiv_constant (regno));
6561 return 0;
6562 }
6563
6564 endregno = END_REGNO (x);
6565
6566 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6567 }
6568 else if (MEM_P (x))
6569 return refers_to_mem_for_reload_p (in);
6570 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6571 || GET_CODE (x) == CC0)
6572 return reg_mentioned_p (x, in);
6573 else
6574 {
6575 gcc_assert (GET_CODE (x) == PLUS);
6576
6577 /* We actually want to know if X is mentioned somewhere inside IN.
6578 We must not say that (plus (sp) (const_int 124)) is in
6579 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6580 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6581 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6582 while (MEM_P (in))
6583 in = XEXP (in, 0);
6584 if (REG_P (in))
6585 return 0;
6586 else if (GET_CODE (in) == PLUS)
6587 return (rtx_equal_p (x, in)
6588 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6589 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6590 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6591 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6592 }
6593
6594 gcc_unreachable ();
6595 }
6596
6597 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6598 registers. */
6599
6600 static int
refers_to_mem_for_reload_p(rtx x)6601 refers_to_mem_for_reload_p (rtx x)
6602 {
6603 const char *fmt;
6604 int i;
6605
6606 if (MEM_P (x))
6607 return 1;
6608
6609 if (REG_P (x))
6610 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6611 && reg_equiv_memory_loc (REGNO (x)));
6612
6613 fmt = GET_RTX_FORMAT (GET_CODE (x));
6614 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6615 if (fmt[i] == 'e'
6616 && (MEM_P (XEXP (x, i))
6617 || refers_to_mem_for_reload_p (XEXP (x, i))))
6618 return 1;
6619
6620 return 0;
6621 }
6622
6623 /* Check the insns before INSN to see if there is a suitable register
6624 containing the same value as GOAL.
6625 If OTHER is -1, look for a register in class RCLASS.
6626 Otherwise, just see if register number OTHER shares GOAL's value.
6627
6628 Return an rtx for the register found, or zero if none is found.
6629
6630 If RELOAD_REG_P is (short *)1,
6631 we reject any hard reg that appears in reload_reg_rtx
6632 because such a hard reg is also needed coming into this insn.
6633
6634 If RELOAD_REG_P is any other nonzero value,
6635 it is a vector indexed by hard reg number
6636 and we reject any hard reg whose element in the vector is nonnegative
6637 as well as any that appears in reload_reg_rtx.
6638
6639 If GOAL is zero, then GOALREG is a register number; we look
6640 for an equivalent for that register.
6641
6642 MODE is the machine mode of the value we want an equivalence for.
6643 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6644
6645 This function is used by jump.c as well as in the reload pass.
6646
6647 If GOAL is the sum of the stack pointer and a constant, we treat it
6648 as if it were a constant except that sp is required to be unchanging. */
6649
6650 rtx
find_equiv_reg(rtx goal,rtx_insn * insn,enum reg_class rclass,int other,short * reload_reg_p,int goalreg,machine_mode mode)6651 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6652 short *reload_reg_p, int goalreg, machine_mode mode)
6653 {
6654 rtx_insn *p = insn;
6655 rtx goaltry, valtry, value;
6656 rtx_insn *where;
6657 rtx pat;
6658 int regno = -1;
6659 int valueno;
6660 int goal_mem = 0;
6661 int goal_const = 0;
6662 int goal_mem_addr_varies = 0;
6663 int need_stable_sp = 0;
6664 int nregs;
6665 int valuenregs;
6666 int num = 0;
6667
6668 if (goal == 0)
6669 regno = goalreg;
6670 else if (REG_P (goal))
6671 regno = REGNO (goal);
6672 else if (MEM_P (goal))
6673 {
6674 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6675 if (MEM_VOLATILE_P (goal))
6676 return 0;
6677 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6678 return 0;
6679 /* An address with side effects must be reexecuted. */
6680 switch (code)
6681 {
6682 case POST_INC:
6683 case PRE_INC:
6684 case POST_DEC:
6685 case PRE_DEC:
6686 case POST_MODIFY:
6687 case PRE_MODIFY:
6688 return 0;
6689 default:
6690 break;
6691 }
6692 goal_mem = 1;
6693 }
6694 else if (CONSTANT_P (goal))
6695 goal_const = 1;
6696 else if (GET_CODE (goal) == PLUS
6697 && XEXP (goal, 0) == stack_pointer_rtx
6698 && CONSTANT_P (XEXP (goal, 1)))
6699 goal_const = need_stable_sp = 1;
6700 else if (GET_CODE (goal) == PLUS
6701 && XEXP (goal, 0) == frame_pointer_rtx
6702 && CONSTANT_P (XEXP (goal, 1)))
6703 goal_const = 1;
6704 else
6705 return 0;
6706
6707 num = 0;
6708 /* Scan insns back from INSN, looking for one that copies
6709 a value into or out of GOAL.
6710 Stop and give up if we reach a label. */
6711
6712 while (1)
6713 {
6714 p = PREV_INSN (p);
6715 if (p && DEBUG_INSN_P (p))
6716 continue;
6717 num++;
6718 if (p == 0 || LABEL_P (p)
6719 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6720 return 0;
6721
6722 /* Don't reuse register contents from before a setjmp-type
6723 function call; on the second return (from the longjmp) it
6724 might have been clobbered by a later reuse. It doesn't
6725 seem worthwhile to actually go and see if it is actually
6726 reused even if that information would be readily available;
6727 just don't reuse it across the setjmp call. */
6728 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6729 return 0;
6730
6731 if (NONJUMP_INSN_P (p)
6732 /* If we don't want spill regs ... */
6733 && (! (reload_reg_p != 0
6734 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6735 /* ... then ignore insns introduced by reload; they aren't
6736 useful and can cause results in reload_as_needed to be
6737 different from what they were when calculating the need for
6738 spills. If we notice an input-reload insn here, we will
6739 reject it below, but it might hide a usable equivalent.
6740 That makes bad code. It may even fail: perhaps no reg was
6741 spilled for this insn because it was assumed we would find
6742 that equivalent. */
6743 || INSN_UID (p) < reload_first_uid))
6744 {
6745 rtx tem;
6746 pat = single_set (p);
6747
6748 /* First check for something that sets some reg equal to GOAL. */
6749 if (pat != 0
6750 && ((regno >= 0
6751 && true_regnum (SET_SRC (pat)) == regno
6752 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6753 ||
6754 (regno >= 0
6755 && true_regnum (SET_DEST (pat)) == regno
6756 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6757 ||
6758 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6759 /* When looking for stack pointer + const,
6760 make sure we don't use a stack adjust. */
6761 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6762 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6763 || (goal_mem
6764 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6765 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6766 || (goal_mem
6767 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6768 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6769 /* If we are looking for a constant,
6770 and something equivalent to that constant was copied
6771 into a reg, we can use that reg. */
6772 || (goal_const && REG_NOTES (p) != 0
6773 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6774 && ((rtx_equal_p (XEXP (tem, 0), goal)
6775 && (valueno
6776 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6777 || (REG_P (SET_DEST (pat))
6778 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6779 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6780 && CONST_INT_P (goal)
6781 && (goaltry = operand_subword (XEXP (tem, 0), 0,
6782 0, VOIDmode)) != 0
6783 && rtx_equal_p (goal, goaltry)
6784 && (valtry
6785 = operand_subword (SET_DEST (pat), 0, 0,
6786 VOIDmode))
6787 && (valueno = true_regnum (valtry)) >= 0)))
6788 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6789 NULL_RTX))
6790 && REG_P (SET_DEST (pat))
6791 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6792 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6793 && CONST_INT_P (goal)
6794 && (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6795 VOIDmode)) != 0
6796 && rtx_equal_p (goal, goaltry)
6797 && (valtry
6798 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6799 && (valueno = true_regnum (valtry)) >= 0)))
6800 {
6801 if (other >= 0)
6802 {
6803 if (valueno != other)
6804 continue;
6805 }
6806 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6807 continue;
6808 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6809 mode, valueno))
6810 continue;
6811 value = valtry;
6812 where = p;
6813 break;
6814 }
6815 }
6816 }
6817
6818 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6819 (or copying VALUE into GOAL, if GOAL is also a register).
6820 Now verify that VALUE is really valid. */
6821
6822 /* VALUENO is the register number of VALUE; a hard register. */
6823
6824 /* Don't try to re-use something that is killed in this insn. We want
6825 to be able to trust REG_UNUSED notes. */
6826 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6827 return 0;
6828
6829 /* If we propose to get the value from the stack pointer or if GOAL is
6830 a MEM based on the stack pointer, we need a stable SP. */
6831 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6832 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6833 goal)))
6834 need_stable_sp = 1;
6835
6836 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6837 if (GET_MODE (value) != mode)
6838 return 0;
6839
6840 /* Reject VALUE if it was loaded from GOAL
6841 and is also a register that appears in the address of GOAL. */
6842
6843 if (goal_mem && value == SET_DEST (single_set (where))
6844 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6845 goal, (rtx*) 0))
6846 return 0;
6847
6848 /* Reject registers that overlap GOAL. */
6849
6850 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6851 nregs = hard_regno_nregs (regno, mode);
6852 else
6853 nregs = 1;
6854 valuenregs = hard_regno_nregs (valueno, mode);
6855
6856 if (!goal_mem && !goal_const
6857 && regno + nregs > valueno && regno < valueno + valuenregs)
6858 return 0;
6859
6860 /* Reject VALUE if it is one of the regs reserved for reloads.
6861 Reload1 knows how to reuse them anyway, and it would get
6862 confused if we allocated one without its knowledge.
6863 (Now that insns introduced by reload are ignored above,
6864 this case shouldn't happen, but I'm not positive.) */
6865
6866 if (reload_reg_p != 0 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6867 {
6868 int i;
6869 for (i = 0; i < valuenregs; ++i)
6870 if (reload_reg_p[valueno + i] >= 0)
6871 return 0;
6872 }
6873
6874 /* Reject VALUE if it is a register being used for an input reload
6875 even if it is not one of those reserved. */
6876
6877 if (reload_reg_p != 0)
6878 {
6879 int i;
6880 for (i = 0; i < n_reloads; i++)
6881 if (rld[i].reg_rtx != 0
6882 && rld[i].in
6883 && (int) REGNO (rld[i].reg_rtx) < valueno + valuenregs
6884 && (int) END_REGNO (rld[i].reg_rtx) > valueno)
6885 return 0;
6886 }
6887
6888 if (goal_mem)
6889 /* We must treat frame pointer as varying here,
6890 since it can vary--in a nonlocal goto as generated by expand_goto. */
6891 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6892
6893 /* Now verify that the values of GOAL and VALUE remain unaltered
6894 until INSN is reached. */
6895
6896 p = insn;
6897 while (1)
6898 {
6899 p = PREV_INSN (p);
6900 if (p == where)
6901 return value;
6902
6903 /* Don't trust the conversion past a function call
6904 if either of the two is in a call-clobbered register, or memory. */
6905 if (CALL_P (p))
6906 {
6907 int i;
6908
6909 if (goal_mem || need_stable_sp)
6910 return 0;
6911
6912 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6913 for (i = 0; i < nregs; ++i)
6914 if (call_used_regs[regno + i]
6915 || targetm.hard_regno_call_part_clobbered (regno + i, mode))
6916 return 0;
6917
6918 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6919 for (i = 0; i < valuenregs; ++i)
6920 if (call_used_regs[valueno + i]
6921 || targetm.hard_regno_call_part_clobbered (valueno + i,
6922 mode))
6923 return 0;
6924 }
6925
6926 if (INSN_P (p))
6927 {
6928 pat = PATTERN (p);
6929
6930 /* Watch out for unspec_volatile, and volatile asms. */
6931 if (volatile_insn_p (pat))
6932 return 0;
6933
6934 /* If this insn P stores in either GOAL or VALUE, return 0.
6935 If GOAL is a memory ref and this insn writes memory, return 0.
6936 If GOAL is a memory ref and its address is not constant,
6937 and this insn P changes a register used in GOAL, return 0. */
6938
6939 if (GET_CODE (pat) == COND_EXEC)
6940 pat = COND_EXEC_CODE (pat);
6941 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6942 {
6943 rtx dest = SET_DEST (pat);
6944 while (GET_CODE (dest) == SUBREG
6945 || GET_CODE (dest) == ZERO_EXTRACT
6946 || GET_CODE (dest) == STRICT_LOW_PART)
6947 dest = XEXP (dest, 0);
6948 if (REG_P (dest))
6949 {
6950 int xregno = REGNO (dest);
6951 int end_xregno = END_REGNO (dest);
6952 if (xregno < regno + nregs && end_xregno > regno)
6953 return 0;
6954 if (xregno < valueno + valuenregs
6955 && end_xregno > valueno)
6956 return 0;
6957 if (goal_mem_addr_varies
6958 && reg_overlap_mentioned_for_reload_p (dest, goal))
6959 return 0;
6960 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6961 return 0;
6962 }
6963 else if (goal_mem && MEM_P (dest)
6964 && ! push_operand (dest, GET_MODE (dest)))
6965 return 0;
6966 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6967 && reg_equiv_memory_loc (regno) != 0)
6968 return 0;
6969 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6970 return 0;
6971 }
6972 else if (GET_CODE (pat) == PARALLEL)
6973 {
6974 int i;
6975 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6976 {
6977 rtx v1 = XVECEXP (pat, 0, i);
6978 if (GET_CODE (v1) == COND_EXEC)
6979 v1 = COND_EXEC_CODE (v1);
6980 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
6981 {
6982 rtx dest = SET_DEST (v1);
6983 while (GET_CODE (dest) == SUBREG
6984 || GET_CODE (dest) == ZERO_EXTRACT
6985 || GET_CODE (dest) == STRICT_LOW_PART)
6986 dest = XEXP (dest, 0);
6987 if (REG_P (dest))
6988 {
6989 int xregno = REGNO (dest);
6990 int end_xregno = END_REGNO (dest);
6991 if (xregno < regno + nregs
6992 && end_xregno > regno)
6993 return 0;
6994 if (xregno < valueno + valuenregs
6995 && end_xregno > valueno)
6996 return 0;
6997 if (goal_mem_addr_varies
6998 && reg_overlap_mentioned_for_reload_p (dest,
6999 goal))
7000 return 0;
7001 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7002 return 0;
7003 }
7004 else if (goal_mem && MEM_P (dest)
7005 && ! push_operand (dest, GET_MODE (dest)))
7006 return 0;
7007 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7008 && reg_equiv_memory_loc (regno) != 0)
7009 return 0;
7010 else if (need_stable_sp
7011 && push_operand (dest, GET_MODE (dest)))
7012 return 0;
7013 }
7014 }
7015 }
7016
7017 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7018 {
7019 rtx link;
7020
7021 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7022 link = XEXP (link, 1))
7023 {
7024 pat = XEXP (link, 0);
7025 if (GET_CODE (pat) == CLOBBER)
7026 {
7027 rtx dest = SET_DEST (pat);
7028
7029 if (REG_P (dest))
7030 {
7031 int xregno = REGNO (dest);
7032 int end_xregno = END_REGNO (dest);
7033
7034 if (xregno < regno + nregs
7035 && end_xregno > regno)
7036 return 0;
7037 else if (xregno < valueno + valuenregs
7038 && end_xregno > valueno)
7039 return 0;
7040 else if (goal_mem_addr_varies
7041 && reg_overlap_mentioned_for_reload_p (dest,
7042 goal))
7043 return 0;
7044 }
7045
7046 else if (goal_mem && MEM_P (dest)
7047 && ! push_operand (dest, GET_MODE (dest)))
7048 return 0;
7049 else if (need_stable_sp
7050 && push_operand (dest, GET_MODE (dest)))
7051 return 0;
7052 }
7053 }
7054 }
7055
7056 #if AUTO_INC_DEC
7057 /* If this insn auto-increments or auto-decrements
7058 either regno or valueno, return 0 now.
7059 If GOAL is a memory ref and its address is not constant,
7060 and this insn P increments a register used in GOAL, return 0. */
7061 {
7062 rtx link;
7063
7064 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7065 if (REG_NOTE_KIND (link) == REG_INC
7066 && REG_P (XEXP (link, 0)))
7067 {
7068 int incno = REGNO (XEXP (link, 0));
7069 if (incno < regno + nregs && incno >= regno)
7070 return 0;
7071 if (incno < valueno + valuenregs && incno >= valueno)
7072 return 0;
7073 if (goal_mem_addr_varies
7074 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7075 goal))
7076 return 0;
7077 }
7078 }
7079 #endif
7080 }
7081 }
7082 }
7083
7084 /* Find a place where INCED appears in an increment or decrement operator
7085 within X, and return the amount INCED is incremented or decremented by.
7086 The value is always positive. */
7087
7088 static poly_int64
find_inc_amount(rtx x,rtx inced)7089 find_inc_amount (rtx x, rtx inced)
7090 {
7091 enum rtx_code code = GET_CODE (x);
7092 const char *fmt;
7093 int i;
7094
7095 if (code == MEM)
7096 {
7097 rtx addr = XEXP (x, 0);
7098 if ((GET_CODE (addr) == PRE_DEC
7099 || GET_CODE (addr) == POST_DEC
7100 || GET_CODE (addr) == PRE_INC
7101 || GET_CODE (addr) == POST_INC)
7102 && XEXP (addr, 0) == inced)
7103 return GET_MODE_SIZE (GET_MODE (x));
7104 else if ((GET_CODE (addr) == PRE_MODIFY
7105 || GET_CODE (addr) == POST_MODIFY)
7106 && GET_CODE (XEXP (addr, 1)) == PLUS
7107 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7108 && XEXP (addr, 0) == inced
7109 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7110 {
7111 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7112 return i < 0 ? -i : i;
7113 }
7114 }
7115
7116 fmt = GET_RTX_FORMAT (code);
7117 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7118 {
7119 if (fmt[i] == 'e')
7120 {
7121 poly_int64 tem = find_inc_amount (XEXP (x, i), inced);
7122 if (maybe_ne (tem, 0))
7123 return tem;
7124 }
7125 if (fmt[i] == 'E')
7126 {
7127 int j;
7128 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7129 {
7130 poly_int64 tem = find_inc_amount (XVECEXP (x, i, j), inced);
7131 if (maybe_ne (tem, 0))
7132 return tem;
7133 }
7134 }
7135 }
7136
7137 return 0;
7138 }
7139
7140 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7141 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7142
7143 static int
reg_inc_found_and_valid_p(unsigned int regno,unsigned int endregno,rtx insn)7144 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7145 rtx insn)
7146 {
7147 rtx link;
7148
7149 if (!AUTO_INC_DEC)
7150 return 0;
7151
7152 gcc_assert (insn);
7153
7154 if (! INSN_P (insn))
7155 return 0;
7156
7157 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7158 if (REG_NOTE_KIND (link) == REG_INC)
7159 {
7160 unsigned int test = (int) REGNO (XEXP (link, 0));
7161 if (test >= regno && test < endregno)
7162 return 1;
7163 }
7164 return 0;
7165 }
7166
7167 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7168 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7169 REG_INC. REGNO must refer to a hard register. */
7170
7171 int
regno_clobbered_p(unsigned int regno,rtx_insn * insn,machine_mode mode,int sets)7172 regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7173 int sets)
7174 {
7175 /* regno must be a hard register. */
7176 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7177
7178 unsigned int endregno = end_hard_regno (mode, regno);
7179
7180 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7181 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7182 && REG_P (XEXP (PATTERN (insn), 0)))
7183 {
7184 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7185
7186 return test >= regno && test < endregno;
7187 }
7188
7189 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7190 return 1;
7191
7192 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7193 {
7194 int i = XVECLEN (PATTERN (insn), 0) - 1;
7195
7196 for (; i >= 0; i--)
7197 {
7198 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7199 if ((GET_CODE (elt) == CLOBBER
7200 || (sets == 1 && GET_CODE (elt) == SET))
7201 && REG_P (XEXP (elt, 0)))
7202 {
7203 unsigned int test = REGNO (XEXP (elt, 0));
7204
7205 if (test >= regno && test < endregno)
7206 return 1;
7207 }
7208 if (sets == 2
7209 && reg_inc_found_and_valid_p (regno, endregno, elt))
7210 return 1;
7211 }
7212 }
7213
7214 return 0;
7215 }
7216
7217 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7218 rtx
reload_adjust_reg_for_mode(rtx reloadreg,machine_mode mode)7219 reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7220 {
7221 int regno;
7222
7223 if (GET_MODE (reloadreg) == mode)
7224 return reloadreg;
7225
7226 regno = REGNO (reloadreg);
7227
7228 if (REG_WORDS_BIG_ENDIAN)
7229 regno += ((int) REG_NREGS (reloadreg)
7230 - (int) hard_regno_nregs (regno, mode));
7231
7232 return gen_rtx_REG (mode, regno);
7233 }
7234
7235 static const char *const reload_when_needed_name[] =
7236 {
7237 "RELOAD_FOR_INPUT",
7238 "RELOAD_FOR_OUTPUT",
7239 "RELOAD_FOR_INSN",
7240 "RELOAD_FOR_INPUT_ADDRESS",
7241 "RELOAD_FOR_INPADDR_ADDRESS",
7242 "RELOAD_FOR_OUTPUT_ADDRESS",
7243 "RELOAD_FOR_OUTADDR_ADDRESS",
7244 "RELOAD_FOR_OPERAND_ADDRESS",
7245 "RELOAD_FOR_OPADDR_ADDR",
7246 "RELOAD_OTHER",
7247 "RELOAD_FOR_OTHER_ADDRESS"
7248 };
7249
7250 /* These functions are used to print the variables set by 'find_reloads' */
7251
7252 DEBUG_FUNCTION void
debug_reload_to_stream(FILE * f)7253 debug_reload_to_stream (FILE *f)
7254 {
7255 int r;
7256 const char *prefix;
7257
7258 if (! f)
7259 f = stderr;
7260 for (r = 0; r < n_reloads; r++)
7261 {
7262 fprintf (f, "Reload %d: ", r);
7263
7264 if (rld[r].in != 0)
7265 {
7266 fprintf (f, "reload_in (%s) = ",
7267 GET_MODE_NAME (rld[r].inmode));
7268 print_inline_rtx (f, rld[r].in, 24);
7269 fprintf (f, "\n\t");
7270 }
7271
7272 if (rld[r].out != 0)
7273 {
7274 fprintf (f, "reload_out (%s) = ",
7275 GET_MODE_NAME (rld[r].outmode));
7276 print_inline_rtx (f, rld[r].out, 24);
7277 fprintf (f, "\n\t");
7278 }
7279
7280 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7281
7282 fprintf (f, "%s (opnum = %d)",
7283 reload_when_needed_name[(int) rld[r].when_needed],
7284 rld[r].opnum);
7285
7286 if (rld[r].optional)
7287 fprintf (f, ", optional");
7288
7289 if (rld[r].nongroup)
7290 fprintf (f, ", nongroup");
7291
7292 if (maybe_ne (rld[r].inc, 0))
7293 {
7294 fprintf (f, ", inc by ");
7295 print_dec (rld[r].inc, f, SIGNED);
7296 }
7297
7298 if (rld[r].nocombine)
7299 fprintf (f, ", can't combine");
7300
7301 if (rld[r].secondary_p)
7302 fprintf (f, ", secondary_reload_p");
7303
7304 if (rld[r].in_reg != 0)
7305 {
7306 fprintf (f, "\n\treload_in_reg: ");
7307 print_inline_rtx (f, rld[r].in_reg, 24);
7308 }
7309
7310 if (rld[r].out_reg != 0)
7311 {
7312 fprintf (f, "\n\treload_out_reg: ");
7313 print_inline_rtx (f, rld[r].out_reg, 24);
7314 }
7315
7316 if (rld[r].reg_rtx != 0)
7317 {
7318 fprintf (f, "\n\treload_reg_rtx: ");
7319 print_inline_rtx (f, rld[r].reg_rtx, 24);
7320 }
7321
7322 prefix = "\n\t";
7323 if (rld[r].secondary_in_reload != -1)
7324 {
7325 fprintf (f, "%ssecondary_in_reload = %d",
7326 prefix, rld[r].secondary_in_reload);
7327 prefix = ", ";
7328 }
7329
7330 if (rld[r].secondary_out_reload != -1)
7331 fprintf (f, "%ssecondary_out_reload = %d\n",
7332 prefix, rld[r].secondary_out_reload);
7333
7334 prefix = "\n\t";
7335 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7336 {
7337 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7338 insn_data[rld[r].secondary_in_icode].name);
7339 prefix = ", ";
7340 }
7341
7342 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7343 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7344 insn_data[rld[r].secondary_out_icode].name);
7345
7346 fprintf (f, "\n");
7347 }
7348 }
7349
7350 DEBUG_FUNCTION void
debug_reload(void)7351 debug_reload (void)
7352 {
7353 debug_reload_to_stream (stderr);
7354 }
7355