1*c87b03e5Sespie /* Allocate registers within a basic block, for GNU compiler.
2*c87b03e5Sespie Copyright (C) 1987, 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3*c87b03e5Sespie 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4*c87b03e5Sespie
5*c87b03e5Sespie This file is part of GCC.
6*c87b03e5Sespie
7*c87b03e5Sespie GCC is free software; you can redistribute it and/or modify it under
8*c87b03e5Sespie the terms of the GNU General Public License as published by the Free
9*c87b03e5Sespie Software Foundation; either version 2, or (at your option) any later
10*c87b03e5Sespie version.
11*c87b03e5Sespie
12*c87b03e5Sespie GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13*c87b03e5Sespie WARRANTY; without even the implied warranty of MERCHANTABILITY or
14*c87b03e5Sespie FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15*c87b03e5Sespie for more details.
16*c87b03e5Sespie
17*c87b03e5Sespie You should have received a copy of the GNU General Public License
18*c87b03e5Sespie along with GCC; see the file COPYING. If not, write to the Free
19*c87b03e5Sespie Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20*c87b03e5Sespie 02111-1307, USA. */
21*c87b03e5Sespie
22*c87b03e5Sespie /* Allocation of hard register numbers to pseudo registers is done in
23*c87b03e5Sespie two passes. In this pass we consider only regs that are born and
24*c87b03e5Sespie die once within one basic block. We do this one basic block at a
25*c87b03e5Sespie time. Then the next pass allocates the registers that remain.
26*c87b03e5Sespie Two passes are used because this pass uses methods that work only
27*c87b03e5Sespie on linear code, but that do a better job than the general methods
28*c87b03e5Sespie used in global_alloc, and more quickly too.
29*c87b03e5Sespie
30*c87b03e5Sespie The assignments made are recorded in the vector reg_renumber
31*c87b03e5Sespie whose space is allocated here. The rtl code itself is not altered.
32*c87b03e5Sespie
33*c87b03e5Sespie We assign each instruction in the basic block a number
34*c87b03e5Sespie which is its order from the beginning of the block.
35*c87b03e5Sespie Then we can represent the lifetime of a pseudo register with
36*c87b03e5Sespie a pair of numbers, and check for conflicts easily.
37*c87b03e5Sespie We can record the availability of hard registers with a
38*c87b03e5Sespie HARD_REG_SET for each instruction. The HARD_REG_SET
39*c87b03e5Sespie contains 0 or 1 for each hard reg.
40*c87b03e5Sespie
41*c87b03e5Sespie To avoid register shuffling, we tie registers together when one
42*c87b03e5Sespie dies by being copied into another, or dies in an instruction that
43*c87b03e5Sespie does arithmetic to produce another. The tied registers are
44*c87b03e5Sespie allocated as one. Registers with different reg class preferences
45*c87b03e5Sespie can never be tied unless the class preferred by one is a subclass
46*c87b03e5Sespie of the one preferred by the other.
47*c87b03e5Sespie
48*c87b03e5Sespie Tying is represented with "quantity numbers".
49*c87b03e5Sespie A non-tied register is given a new quantity number.
50*c87b03e5Sespie Tied registers have the same quantity number.
51*c87b03e5Sespie
52*c87b03e5Sespie We have provision to exempt registers, even when they are contained
53*c87b03e5Sespie within the block, that can be tied to others that are not contained in it.
54*c87b03e5Sespie This is so that global_alloc could process them both and tie them then.
55*c87b03e5Sespie But this is currently disabled since tying in global_alloc is not
56*c87b03e5Sespie yet implemented. */
57*c87b03e5Sespie
58*c87b03e5Sespie /* Pseudos allocated here can be reallocated by global.c if the hard register
59*c87b03e5Sespie is used as a spill register. Currently we don't allocate such pseudos
60*c87b03e5Sespie here if their preferred class is likely to be used by spills. */
61*c87b03e5Sespie
62*c87b03e5Sespie #include "config.h"
63*c87b03e5Sespie #include "system.h"
64*c87b03e5Sespie #include "hard-reg-set.h"
65*c87b03e5Sespie #include "rtl.h"
66*c87b03e5Sespie #include "tm_p.h"
67*c87b03e5Sespie #include "flags.h"
68*c87b03e5Sespie #include "basic-block.h"
69*c87b03e5Sespie #include "regs.h"
70*c87b03e5Sespie #include "function.h"
71*c87b03e5Sespie #include "insn-config.h"
72*c87b03e5Sespie #include "insn-attr.h"
73*c87b03e5Sespie #include "recog.h"
74*c87b03e5Sespie #include "output.h"
75*c87b03e5Sespie #include "toplev.h"
76*c87b03e5Sespie #include "except.h"
77*c87b03e5Sespie #include "integrate.h"
78*c87b03e5Sespie
79*c87b03e5Sespie /* Next quantity number available for allocation. */
80*c87b03e5Sespie
81*c87b03e5Sespie static int next_qty;
82*c87b03e5Sespie
83*c87b03e5Sespie /* Information we maintain about each quantity. */
84*c87b03e5Sespie struct qty
85*c87b03e5Sespie {
86*c87b03e5Sespie /* The number of refs to quantity Q. */
87*c87b03e5Sespie
88*c87b03e5Sespie int n_refs;
89*c87b03e5Sespie
90*c87b03e5Sespie /* The frequency of uses of quantity Q. */
91*c87b03e5Sespie
92*c87b03e5Sespie int freq;
93*c87b03e5Sespie
94*c87b03e5Sespie /* Insn number (counting from head of basic block)
95*c87b03e5Sespie where quantity Q was born. -1 if birth has not been recorded. */
96*c87b03e5Sespie
97*c87b03e5Sespie int birth;
98*c87b03e5Sespie
99*c87b03e5Sespie /* Insn number (counting from head of basic block)
100*c87b03e5Sespie where given quantity died. Due to the way tying is done,
101*c87b03e5Sespie and the fact that we consider in this pass only regs that die but once,
102*c87b03e5Sespie a quantity can die only once. Each quantity's life span
103*c87b03e5Sespie is a set of consecutive insns. -1 if death has not been recorded. */
104*c87b03e5Sespie
105*c87b03e5Sespie int death;
106*c87b03e5Sespie
107*c87b03e5Sespie /* Number of words needed to hold the data in given quantity.
108*c87b03e5Sespie This depends on its machine mode. It is used for these purposes:
109*c87b03e5Sespie 1. It is used in computing the relative importances of qtys,
110*c87b03e5Sespie which determines the order in which we look for regs for them.
111*c87b03e5Sespie 2. It is used in rules that prevent tying several registers of
112*c87b03e5Sespie different sizes in a way that is geometrically impossible
113*c87b03e5Sespie (see combine_regs). */
114*c87b03e5Sespie
115*c87b03e5Sespie int size;
116*c87b03e5Sespie
117*c87b03e5Sespie /* Number of times a reg tied to given qty lives across a CALL_INSN. */
118*c87b03e5Sespie
119*c87b03e5Sespie int n_calls_crossed;
120*c87b03e5Sespie
121*c87b03e5Sespie /* The register number of one pseudo register whose reg_qty value is Q.
122*c87b03e5Sespie This register should be the head of the chain
123*c87b03e5Sespie maintained in reg_next_in_qty. */
124*c87b03e5Sespie
125*c87b03e5Sespie int first_reg;
126*c87b03e5Sespie
127*c87b03e5Sespie /* Reg class contained in (smaller than) the preferred classes of all
128*c87b03e5Sespie the pseudo regs that are tied in given quantity.
129*c87b03e5Sespie This is the preferred class for allocating that quantity. */
130*c87b03e5Sespie
131*c87b03e5Sespie enum reg_class min_class;
132*c87b03e5Sespie
133*c87b03e5Sespie /* Register class within which we allocate given qty if we can't get
134*c87b03e5Sespie its preferred class. */
135*c87b03e5Sespie
136*c87b03e5Sespie enum reg_class alternate_class;
137*c87b03e5Sespie
138*c87b03e5Sespie /* This holds the mode of the registers that are tied to given qty,
139*c87b03e5Sespie or VOIDmode if registers with differing modes are tied together. */
140*c87b03e5Sespie
141*c87b03e5Sespie enum machine_mode mode;
142*c87b03e5Sespie
143*c87b03e5Sespie /* the hard reg number chosen for given quantity,
144*c87b03e5Sespie or -1 if none was found. */
145*c87b03e5Sespie
146*c87b03e5Sespie short phys_reg;
147*c87b03e5Sespie };
148*c87b03e5Sespie
149*c87b03e5Sespie static struct qty *qty;
150*c87b03e5Sespie
151*c87b03e5Sespie /* These fields are kept separately to speedup their clearing. */
152*c87b03e5Sespie
153*c87b03e5Sespie /* We maintain two hard register sets that indicate suggested hard registers
154*c87b03e5Sespie for each quantity. The first, phys_copy_sugg, contains hard registers
155*c87b03e5Sespie that are tied to the quantity by a simple copy. The second contains all
156*c87b03e5Sespie hard registers that are tied to the quantity via an arithmetic operation.
157*c87b03e5Sespie
158*c87b03e5Sespie The former register set is given priority for allocation. This tends to
159*c87b03e5Sespie eliminate copy insns. */
160*c87b03e5Sespie
161*c87b03e5Sespie /* Element Q is a set of hard registers that are suggested for quantity Q by
162*c87b03e5Sespie copy insns. */
163*c87b03e5Sespie
164*c87b03e5Sespie static HARD_REG_SET *qty_phys_copy_sugg;
165*c87b03e5Sespie
166*c87b03e5Sespie /* Element Q is a set of hard registers that are suggested for quantity Q by
167*c87b03e5Sespie arithmetic insns. */
168*c87b03e5Sespie
169*c87b03e5Sespie static HARD_REG_SET *qty_phys_sugg;
170*c87b03e5Sespie
171*c87b03e5Sespie /* Element Q is the number of suggested registers in qty_phys_copy_sugg. */
172*c87b03e5Sespie
173*c87b03e5Sespie static short *qty_phys_num_copy_sugg;
174*c87b03e5Sespie
175*c87b03e5Sespie /* Element Q is the number of suggested registers in qty_phys_sugg. */
176*c87b03e5Sespie
177*c87b03e5Sespie static short *qty_phys_num_sugg;
178*c87b03e5Sespie
179*c87b03e5Sespie /* If (REG N) has been assigned a quantity number, is a register number
180*c87b03e5Sespie of another register assigned the same quantity number, or -1 for the
181*c87b03e5Sespie end of the chain. qty->first_reg point to the head of this chain. */
182*c87b03e5Sespie
183*c87b03e5Sespie static int *reg_next_in_qty;
184*c87b03e5Sespie
185*c87b03e5Sespie /* reg_qty[N] (where N is a pseudo reg number) is the qty number of that reg
186*c87b03e5Sespie if it is >= 0,
187*c87b03e5Sespie of -1 if this register cannot be allocated by local-alloc,
188*c87b03e5Sespie or -2 if not known yet.
189*c87b03e5Sespie
190*c87b03e5Sespie Note that if we see a use or death of pseudo register N with
191*c87b03e5Sespie reg_qty[N] == -2, register N must be local to the current block. If
192*c87b03e5Sespie it were used in more than one block, we would have reg_qty[N] == -1.
193*c87b03e5Sespie This relies on the fact that if reg_basic_block[N] is >= 0, register N
194*c87b03e5Sespie will not appear in any other block. We save a considerable number of
195*c87b03e5Sespie tests by exploiting this.
196*c87b03e5Sespie
197*c87b03e5Sespie If N is < FIRST_PSEUDO_REGISTER, reg_qty[N] is undefined and should not
198*c87b03e5Sespie be referenced. */
199*c87b03e5Sespie
200*c87b03e5Sespie static int *reg_qty;
201*c87b03e5Sespie
202*c87b03e5Sespie /* The offset (in words) of register N within its quantity.
203*c87b03e5Sespie This can be nonzero if register N is SImode, and has been tied
204*c87b03e5Sespie to a subreg of a DImode register. */
205*c87b03e5Sespie
206*c87b03e5Sespie static char *reg_offset;
207*c87b03e5Sespie
208*c87b03e5Sespie /* Vector of substitutions of register numbers,
209*c87b03e5Sespie used to map pseudo regs into hardware regs.
210*c87b03e5Sespie This is set up as a result of register allocation.
211*c87b03e5Sespie Element N is the hard reg assigned to pseudo reg N,
212*c87b03e5Sespie or is -1 if no hard reg was assigned.
213*c87b03e5Sespie If N is a hard reg number, element N is N. */
214*c87b03e5Sespie
215*c87b03e5Sespie short *reg_renumber;
216*c87b03e5Sespie
217*c87b03e5Sespie /* Set of hard registers live at the current point in the scan
218*c87b03e5Sespie of the instructions in a basic block. */
219*c87b03e5Sespie
220*c87b03e5Sespie static HARD_REG_SET regs_live;
221*c87b03e5Sespie
222*c87b03e5Sespie /* Each set of hard registers indicates registers live at a particular
223*c87b03e5Sespie point in the basic block. For N even, regs_live_at[N] says which
224*c87b03e5Sespie hard registers are needed *after* insn N/2 (i.e., they may not
225*c87b03e5Sespie conflict with the outputs of insn N/2 or the inputs of insn N/2 + 1.
226*c87b03e5Sespie
227*c87b03e5Sespie If an object is to conflict with the inputs of insn J but not the
228*c87b03e5Sespie outputs of insn J + 1, we say it is born at index J*2 - 1. Similarly,
229*c87b03e5Sespie if it is to conflict with the outputs of insn J but not the inputs of
230*c87b03e5Sespie insn J + 1, it is said to die at index J*2 + 1. */
231*c87b03e5Sespie
232*c87b03e5Sespie static HARD_REG_SET *regs_live_at;
233*c87b03e5Sespie
234*c87b03e5Sespie /* Communicate local vars `insn_number' and `insn'
235*c87b03e5Sespie from `block_alloc' to `reg_is_set', `wipe_dead_reg', and `alloc_qty'. */
236*c87b03e5Sespie static int this_insn_number;
237*c87b03e5Sespie static rtx this_insn;
238*c87b03e5Sespie
239*c87b03e5Sespie struct equivalence
240*c87b03e5Sespie {
241*c87b03e5Sespie /* Set when an attempt should be made to replace a register
242*c87b03e5Sespie with the associated src_p entry. */
243*c87b03e5Sespie
244*c87b03e5Sespie char replace;
245*c87b03e5Sespie
246*c87b03e5Sespie /* Set when a REG_EQUIV note is found or created. Use to
247*c87b03e5Sespie keep track of what memory accesses might be created later,
248*c87b03e5Sespie e.g. by reload. */
249*c87b03e5Sespie
250*c87b03e5Sespie rtx replacement;
251*c87b03e5Sespie
252*c87b03e5Sespie rtx *src_p;
253*c87b03e5Sespie
254*c87b03e5Sespie /* Loop depth is used to recognize equivalences which appear
255*c87b03e5Sespie to be present within the same loop (or in an inner loop). */
256*c87b03e5Sespie
257*c87b03e5Sespie int loop_depth;
258*c87b03e5Sespie
259*c87b03e5Sespie /* The list of each instruction which initializes this register. */
260*c87b03e5Sespie
261*c87b03e5Sespie rtx init_insns;
262*c87b03e5Sespie };
263*c87b03e5Sespie
264*c87b03e5Sespie /* reg_equiv[N] (where N is a pseudo reg number) is the equivalence
265*c87b03e5Sespie structure for that register. */
266*c87b03e5Sespie
267*c87b03e5Sespie static struct equivalence *reg_equiv;
268*c87b03e5Sespie
269*c87b03e5Sespie /* Nonzero if we recorded an equivalence for a LABEL_REF. */
270*c87b03e5Sespie static int recorded_label_ref;
271*c87b03e5Sespie
272*c87b03e5Sespie static void alloc_qty PARAMS ((int, enum machine_mode, int, int));
273*c87b03e5Sespie static void validate_equiv_mem_from_store PARAMS ((rtx, rtx, void *));
274*c87b03e5Sespie static int validate_equiv_mem PARAMS ((rtx, rtx, rtx));
275*c87b03e5Sespie static int equiv_init_varies_p PARAMS ((rtx));
276*c87b03e5Sespie static int equiv_init_movable_p PARAMS ((rtx, int));
277*c87b03e5Sespie static int contains_replace_regs PARAMS ((rtx));
278*c87b03e5Sespie static int memref_referenced_p PARAMS ((rtx, rtx));
279*c87b03e5Sespie static int memref_used_between_p PARAMS ((rtx, rtx, rtx));
280*c87b03e5Sespie static void update_equiv_regs PARAMS ((void));
281*c87b03e5Sespie static void no_equiv PARAMS ((rtx, rtx, void *));
282*c87b03e5Sespie static void block_alloc PARAMS ((int));
283*c87b03e5Sespie static int qty_sugg_compare PARAMS ((int, int));
284*c87b03e5Sespie static int qty_sugg_compare_1 PARAMS ((const PTR, const PTR));
285*c87b03e5Sespie static int qty_compare PARAMS ((int, int));
286*c87b03e5Sespie static int qty_compare_1 PARAMS ((const PTR, const PTR));
287*c87b03e5Sespie static int combine_regs PARAMS ((rtx, rtx, int, int, rtx, int));
288*c87b03e5Sespie static int reg_meets_class_p PARAMS ((int, enum reg_class));
289*c87b03e5Sespie static void update_qty_class PARAMS ((int, int));
290*c87b03e5Sespie static void reg_is_set PARAMS ((rtx, rtx, void *));
291*c87b03e5Sespie static void reg_is_born PARAMS ((rtx, int));
292*c87b03e5Sespie static void wipe_dead_reg PARAMS ((rtx, int));
293*c87b03e5Sespie static int find_free_reg PARAMS ((enum reg_class, enum machine_mode,
294*c87b03e5Sespie int, int, int, int, int));
295*c87b03e5Sespie static void mark_life PARAMS ((int, enum machine_mode, int));
296*c87b03e5Sespie static void post_mark_life PARAMS ((int, enum machine_mode, int, int, int));
297*c87b03e5Sespie static int no_conflict_p PARAMS ((rtx, rtx, rtx));
298*c87b03e5Sespie static int requires_inout PARAMS ((const char *));
299*c87b03e5Sespie
300*c87b03e5Sespie /* Allocate a new quantity (new within current basic block)
301*c87b03e5Sespie for register number REGNO which is born at index BIRTH
302*c87b03e5Sespie within the block. MODE and SIZE are info on reg REGNO. */
303*c87b03e5Sespie
304*c87b03e5Sespie static void
alloc_qty(regno,mode,size,birth)305*c87b03e5Sespie alloc_qty (regno, mode, size, birth)
306*c87b03e5Sespie int regno;
307*c87b03e5Sespie enum machine_mode mode;
308*c87b03e5Sespie int size, birth;
309*c87b03e5Sespie {
310*c87b03e5Sespie int qtyno = next_qty++;
311*c87b03e5Sespie
312*c87b03e5Sespie reg_qty[regno] = qtyno;
313*c87b03e5Sespie reg_offset[regno] = 0;
314*c87b03e5Sespie reg_next_in_qty[regno] = -1;
315*c87b03e5Sespie
316*c87b03e5Sespie qty[qtyno].first_reg = regno;
317*c87b03e5Sespie qty[qtyno].size = size;
318*c87b03e5Sespie qty[qtyno].mode = mode;
319*c87b03e5Sespie qty[qtyno].birth = birth;
320*c87b03e5Sespie qty[qtyno].n_calls_crossed = REG_N_CALLS_CROSSED (regno);
321*c87b03e5Sespie qty[qtyno].min_class = reg_preferred_class (regno);
322*c87b03e5Sespie qty[qtyno].alternate_class = reg_alternate_class (regno);
323*c87b03e5Sespie qty[qtyno].n_refs = REG_N_REFS (regno);
324*c87b03e5Sespie qty[qtyno].freq = REG_FREQ (regno);
325*c87b03e5Sespie }
326*c87b03e5Sespie
327*c87b03e5Sespie /* Main entry point of this file. */
328*c87b03e5Sespie
329*c87b03e5Sespie int
local_alloc()330*c87b03e5Sespie local_alloc ()
331*c87b03e5Sespie {
332*c87b03e5Sespie int i;
333*c87b03e5Sespie int max_qty;
334*c87b03e5Sespie basic_block b;
335*c87b03e5Sespie
336*c87b03e5Sespie /* We need to keep track of whether or not we recorded a LABEL_REF so
337*c87b03e5Sespie that we know if the jump optimizer needs to be rerun. */
338*c87b03e5Sespie recorded_label_ref = 0;
339*c87b03e5Sespie
340*c87b03e5Sespie /* Leaf functions and non-leaf functions have different needs.
341*c87b03e5Sespie If defined, let the machine say what kind of ordering we
342*c87b03e5Sespie should use. */
343*c87b03e5Sespie #ifdef ORDER_REGS_FOR_LOCAL_ALLOC
344*c87b03e5Sespie ORDER_REGS_FOR_LOCAL_ALLOC;
345*c87b03e5Sespie #endif
346*c87b03e5Sespie
347*c87b03e5Sespie /* Promote REG_EQUAL notes to REG_EQUIV notes and adjust status of affected
348*c87b03e5Sespie registers. */
349*c87b03e5Sespie if (optimize)
350*c87b03e5Sespie update_equiv_regs ();
351*c87b03e5Sespie
352*c87b03e5Sespie /* This sets the maximum number of quantities we can have. Quantity
353*c87b03e5Sespie numbers start at zero and we can have one for each pseudo. */
354*c87b03e5Sespie max_qty = (max_regno - FIRST_PSEUDO_REGISTER);
355*c87b03e5Sespie
356*c87b03e5Sespie /* Allocate vectors of temporary data.
357*c87b03e5Sespie See the declarations of these variables, above,
358*c87b03e5Sespie for what they mean. */
359*c87b03e5Sespie
360*c87b03e5Sespie qty = (struct qty *) xmalloc (max_qty * sizeof (struct qty));
361*c87b03e5Sespie qty_phys_copy_sugg
362*c87b03e5Sespie = (HARD_REG_SET *) xmalloc (max_qty * sizeof (HARD_REG_SET));
363*c87b03e5Sespie qty_phys_num_copy_sugg = (short *) xmalloc (max_qty * sizeof (short));
364*c87b03e5Sespie qty_phys_sugg = (HARD_REG_SET *) xmalloc (max_qty * sizeof (HARD_REG_SET));
365*c87b03e5Sespie qty_phys_num_sugg = (short *) xmalloc (max_qty * sizeof (short));
366*c87b03e5Sespie
367*c87b03e5Sespie reg_qty = (int *) xmalloc (max_regno * sizeof (int));
368*c87b03e5Sespie reg_offset = (char *) xmalloc (max_regno * sizeof (char));
369*c87b03e5Sespie reg_next_in_qty = (int *) xmalloc (max_regno * sizeof (int));
370*c87b03e5Sespie
371*c87b03e5Sespie /* Determine which pseudo-registers can be allocated by local-alloc.
372*c87b03e5Sespie In general, these are the registers used only in a single block and
373*c87b03e5Sespie which only die once.
374*c87b03e5Sespie
375*c87b03e5Sespie We need not be concerned with which block actually uses the register
376*c87b03e5Sespie since we will never see it outside that block. */
377*c87b03e5Sespie
378*c87b03e5Sespie for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
379*c87b03e5Sespie {
380*c87b03e5Sespie if (REG_BASIC_BLOCK (i) >= 0 && REG_N_DEATHS (i) == 1)
381*c87b03e5Sespie reg_qty[i] = -2;
382*c87b03e5Sespie else
383*c87b03e5Sespie reg_qty[i] = -1;
384*c87b03e5Sespie }
385*c87b03e5Sespie
386*c87b03e5Sespie /* Force loop below to initialize entire quantity array. */
387*c87b03e5Sespie next_qty = max_qty;
388*c87b03e5Sespie
389*c87b03e5Sespie /* Allocate each block's local registers, block by block. */
390*c87b03e5Sespie
391*c87b03e5Sespie FOR_EACH_BB (b)
392*c87b03e5Sespie {
393*c87b03e5Sespie /* NEXT_QTY indicates which elements of the `qty_...'
394*c87b03e5Sespie vectors might need to be initialized because they were used
395*c87b03e5Sespie for the previous block; it is set to the entire array before
396*c87b03e5Sespie block 0. Initialize those, with explicit loop if there are few,
397*c87b03e5Sespie else with bzero and bcopy. Do not initialize vectors that are
398*c87b03e5Sespie explicit set by `alloc_qty'. */
399*c87b03e5Sespie
400*c87b03e5Sespie if (next_qty < 6)
401*c87b03e5Sespie {
402*c87b03e5Sespie for (i = 0; i < next_qty; i++)
403*c87b03e5Sespie {
404*c87b03e5Sespie CLEAR_HARD_REG_SET (qty_phys_copy_sugg[i]);
405*c87b03e5Sespie qty_phys_num_copy_sugg[i] = 0;
406*c87b03e5Sespie CLEAR_HARD_REG_SET (qty_phys_sugg[i]);
407*c87b03e5Sespie qty_phys_num_sugg[i] = 0;
408*c87b03e5Sespie }
409*c87b03e5Sespie }
410*c87b03e5Sespie else
411*c87b03e5Sespie {
412*c87b03e5Sespie #define CLEAR(vector) \
413*c87b03e5Sespie memset ((char *) (vector), 0, (sizeof (*(vector))) * next_qty);
414*c87b03e5Sespie
415*c87b03e5Sespie CLEAR (qty_phys_copy_sugg);
416*c87b03e5Sespie CLEAR (qty_phys_num_copy_sugg);
417*c87b03e5Sespie CLEAR (qty_phys_sugg);
418*c87b03e5Sespie CLEAR (qty_phys_num_sugg);
419*c87b03e5Sespie }
420*c87b03e5Sespie
421*c87b03e5Sespie next_qty = 0;
422*c87b03e5Sespie
423*c87b03e5Sespie block_alloc (b->index);
424*c87b03e5Sespie }
425*c87b03e5Sespie
426*c87b03e5Sespie free (qty);
427*c87b03e5Sespie free (qty_phys_copy_sugg);
428*c87b03e5Sespie free (qty_phys_num_copy_sugg);
429*c87b03e5Sespie free (qty_phys_sugg);
430*c87b03e5Sespie free (qty_phys_num_sugg);
431*c87b03e5Sespie
432*c87b03e5Sespie free (reg_qty);
433*c87b03e5Sespie free (reg_offset);
434*c87b03e5Sespie free (reg_next_in_qty);
435*c87b03e5Sespie
436*c87b03e5Sespie return recorded_label_ref;
437*c87b03e5Sespie }
438*c87b03e5Sespie
439*c87b03e5Sespie /* Used for communication between the following two functions: contains
440*c87b03e5Sespie a MEM that we wish to ensure remains unchanged. */
441*c87b03e5Sespie static rtx equiv_mem;
442*c87b03e5Sespie
443*c87b03e5Sespie /* Set nonzero if EQUIV_MEM is modified. */
444*c87b03e5Sespie static int equiv_mem_modified;
445*c87b03e5Sespie
446*c87b03e5Sespie /* If EQUIV_MEM is modified by modifying DEST, indicate that it is modified.
447*c87b03e5Sespie Called via note_stores. */
448*c87b03e5Sespie
449*c87b03e5Sespie static void
validate_equiv_mem_from_store(dest,set,data)450*c87b03e5Sespie validate_equiv_mem_from_store (dest, set, data)
451*c87b03e5Sespie rtx dest;
452*c87b03e5Sespie rtx set ATTRIBUTE_UNUSED;
453*c87b03e5Sespie void *data ATTRIBUTE_UNUSED;
454*c87b03e5Sespie {
455*c87b03e5Sespie if ((GET_CODE (dest) == REG
456*c87b03e5Sespie && reg_overlap_mentioned_p (dest, equiv_mem))
457*c87b03e5Sespie || (GET_CODE (dest) == MEM
458*c87b03e5Sespie && true_dependence (dest, VOIDmode, equiv_mem, rtx_varies_p)))
459*c87b03e5Sespie equiv_mem_modified = 1;
460*c87b03e5Sespie }
461*c87b03e5Sespie
462*c87b03e5Sespie /* Verify that no store between START and the death of REG invalidates
463*c87b03e5Sespie MEMREF. MEMREF is invalidated by modifying a register used in MEMREF,
464*c87b03e5Sespie by storing into an overlapping memory location, or with a non-const
465*c87b03e5Sespie CALL_INSN.
466*c87b03e5Sespie
467*c87b03e5Sespie Return 1 if MEMREF remains valid. */
468*c87b03e5Sespie
469*c87b03e5Sespie static int
validate_equiv_mem(start,reg,memref)470*c87b03e5Sespie validate_equiv_mem (start, reg, memref)
471*c87b03e5Sespie rtx start;
472*c87b03e5Sespie rtx reg;
473*c87b03e5Sespie rtx memref;
474*c87b03e5Sespie {
475*c87b03e5Sespie rtx insn;
476*c87b03e5Sespie rtx note;
477*c87b03e5Sespie
478*c87b03e5Sespie equiv_mem = memref;
479*c87b03e5Sespie equiv_mem_modified = 0;
480*c87b03e5Sespie
481*c87b03e5Sespie /* If the memory reference has side effects or is volatile, it isn't a
482*c87b03e5Sespie valid equivalence. */
483*c87b03e5Sespie if (side_effects_p (memref))
484*c87b03e5Sespie return 0;
485*c87b03e5Sespie
486*c87b03e5Sespie for (insn = start; insn && ! equiv_mem_modified; insn = NEXT_INSN (insn))
487*c87b03e5Sespie {
488*c87b03e5Sespie if (! INSN_P (insn))
489*c87b03e5Sespie continue;
490*c87b03e5Sespie
491*c87b03e5Sespie if (find_reg_note (insn, REG_DEAD, reg))
492*c87b03e5Sespie return 1;
493*c87b03e5Sespie
494*c87b03e5Sespie if (GET_CODE (insn) == CALL_INSN && ! RTX_UNCHANGING_P (memref)
495*c87b03e5Sespie && ! CONST_OR_PURE_CALL_P (insn))
496*c87b03e5Sespie return 0;
497*c87b03e5Sespie
498*c87b03e5Sespie note_stores (PATTERN (insn), validate_equiv_mem_from_store, NULL);
499*c87b03e5Sespie
500*c87b03e5Sespie /* If a register mentioned in MEMREF is modified via an
501*c87b03e5Sespie auto-increment, we lose the equivalence. Do the same if one
502*c87b03e5Sespie dies; although we could extend the life, it doesn't seem worth
503*c87b03e5Sespie the trouble. */
504*c87b03e5Sespie
505*c87b03e5Sespie for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
506*c87b03e5Sespie if ((REG_NOTE_KIND (note) == REG_INC
507*c87b03e5Sespie || REG_NOTE_KIND (note) == REG_DEAD)
508*c87b03e5Sespie && GET_CODE (XEXP (note, 0)) == REG
509*c87b03e5Sespie && reg_overlap_mentioned_p (XEXP (note, 0), memref))
510*c87b03e5Sespie return 0;
511*c87b03e5Sespie }
512*c87b03e5Sespie
513*c87b03e5Sespie return 0;
514*c87b03e5Sespie }
515*c87b03e5Sespie
516*c87b03e5Sespie /* Returns zero if X is known to be invariant. */
517*c87b03e5Sespie
518*c87b03e5Sespie static int
equiv_init_varies_p(x)519*c87b03e5Sespie equiv_init_varies_p (x)
520*c87b03e5Sespie rtx x;
521*c87b03e5Sespie {
522*c87b03e5Sespie RTX_CODE code = GET_CODE (x);
523*c87b03e5Sespie int i;
524*c87b03e5Sespie const char *fmt;
525*c87b03e5Sespie
526*c87b03e5Sespie switch (code)
527*c87b03e5Sespie {
528*c87b03e5Sespie case MEM:
529*c87b03e5Sespie return ! RTX_UNCHANGING_P (x) || equiv_init_varies_p (XEXP (x, 0));
530*c87b03e5Sespie
531*c87b03e5Sespie case QUEUED:
532*c87b03e5Sespie return 1;
533*c87b03e5Sespie
534*c87b03e5Sespie case CONST:
535*c87b03e5Sespie case CONST_INT:
536*c87b03e5Sespie case CONST_DOUBLE:
537*c87b03e5Sespie case CONST_VECTOR:
538*c87b03e5Sespie case SYMBOL_REF:
539*c87b03e5Sespie case LABEL_REF:
540*c87b03e5Sespie return 0;
541*c87b03e5Sespie
542*c87b03e5Sespie case REG:
543*c87b03e5Sespie return reg_equiv[REGNO (x)].replace == 0 && rtx_varies_p (x, 0);
544*c87b03e5Sespie
545*c87b03e5Sespie case ASM_OPERANDS:
546*c87b03e5Sespie if (MEM_VOLATILE_P (x))
547*c87b03e5Sespie return 1;
548*c87b03e5Sespie
549*c87b03e5Sespie /* FALLTHROUGH */
550*c87b03e5Sespie
551*c87b03e5Sespie default:
552*c87b03e5Sespie break;
553*c87b03e5Sespie }
554*c87b03e5Sespie
555*c87b03e5Sespie fmt = GET_RTX_FORMAT (code);
556*c87b03e5Sespie for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
557*c87b03e5Sespie if (fmt[i] == 'e')
558*c87b03e5Sespie {
559*c87b03e5Sespie if (equiv_init_varies_p (XEXP (x, i)))
560*c87b03e5Sespie return 1;
561*c87b03e5Sespie }
562*c87b03e5Sespie else if (fmt[i] == 'E')
563*c87b03e5Sespie {
564*c87b03e5Sespie int j;
565*c87b03e5Sespie for (j = 0; j < XVECLEN (x, i); j++)
566*c87b03e5Sespie if (equiv_init_varies_p (XVECEXP (x, i, j)))
567*c87b03e5Sespie return 1;
568*c87b03e5Sespie }
569*c87b03e5Sespie
570*c87b03e5Sespie return 0;
571*c87b03e5Sespie }
572*c87b03e5Sespie
573*c87b03e5Sespie /* Returns nonzero if X (used to initialize register REGNO) is movable.
574*c87b03e5Sespie X is only movable if the registers it uses have equivalent initializations
575*c87b03e5Sespie which appear to be within the same loop (or in an inner loop) and movable
576*c87b03e5Sespie or if they are not candidates for local_alloc and don't vary. */
577*c87b03e5Sespie
578*c87b03e5Sespie static int
equiv_init_movable_p(x,regno)579*c87b03e5Sespie equiv_init_movable_p (x, regno)
580*c87b03e5Sespie rtx x;
581*c87b03e5Sespie int regno;
582*c87b03e5Sespie {
583*c87b03e5Sespie int i, j;
584*c87b03e5Sespie const char *fmt;
585*c87b03e5Sespie enum rtx_code code = GET_CODE (x);
586*c87b03e5Sespie
587*c87b03e5Sespie switch (code)
588*c87b03e5Sespie {
589*c87b03e5Sespie case SET:
590*c87b03e5Sespie return equiv_init_movable_p (SET_SRC (x), regno);
591*c87b03e5Sespie
592*c87b03e5Sespie case CC0:
593*c87b03e5Sespie case CLOBBER:
594*c87b03e5Sespie return 0;
595*c87b03e5Sespie
596*c87b03e5Sespie case PRE_INC:
597*c87b03e5Sespie case PRE_DEC:
598*c87b03e5Sespie case POST_INC:
599*c87b03e5Sespie case POST_DEC:
600*c87b03e5Sespie case PRE_MODIFY:
601*c87b03e5Sespie case POST_MODIFY:
602*c87b03e5Sespie return 0;
603*c87b03e5Sespie
604*c87b03e5Sespie case REG:
605*c87b03e5Sespie return (reg_equiv[REGNO (x)].loop_depth >= reg_equiv[regno].loop_depth
606*c87b03e5Sespie && reg_equiv[REGNO (x)].replace)
607*c87b03e5Sespie || (REG_BASIC_BLOCK (REGNO (x)) < 0 && ! rtx_varies_p (x, 0));
608*c87b03e5Sespie
609*c87b03e5Sespie case UNSPEC_VOLATILE:
610*c87b03e5Sespie return 0;
611*c87b03e5Sespie
612*c87b03e5Sespie case ASM_OPERANDS:
613*c87b03e5Sespie if (MEM_VOLATILE_P (x))
614*c87b03e5Sespie return 0;
615*c87b03e5Sespie
616*c87b03e5Sespie /* FALLTHROUGH */
617*c87b03e5Sespie
618*c87b03e5Sespie default:
619*c87b03e5Sespie break;
620*c87b03e5Sespie }
621*c87b03e5Sespie
622*c87b03e5Sespie fmt = GET_RTX_FORMAT (code);
623*c87b03e5Sespie for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
624*c87b03e5Sespie switch (fmt[i])
625*c87b03e5Sespie {
626*c87b03e5Sespie case 'e':
627*c87b03e5Sespie if (! equiv_init_movable_p (XEXP (x, i), regno))
628*c87b03e5Sespie return 0;
629*c87b03e5Sespie break;
630*c87b03e5Sespie case 'E':
631*c87b03e5Sespie for (j = XVECLEN (x, i) - 1; j >= 0; j--)
632*c87b03e5Sespie if (! equiv_init_movable_p (XVECEXP (x, i, j), regno))
633*c87b03e5Sespie return 0;
634*c87b03e5Sespie break;
635*c87b03e5Sespie }
636*c87b03e5Sespie
637*c87b03e5Sespie return 1;
638*c87b03e5Sespie }
639*c87b03e5Sespie
640*c87b03e5Sespie /* TRUE if X uses any registers for which reg_equiv[REGNO].replace is true. */
641*c87b03e5Sespie
642*c87b03e5Sespie static int
contains_replace_regs(x)643*c87b03e5Sespie contains_replace_regs (x)
644*c87b03e5Sespie rtx x;
645*c87b03e5Sespie {
646*c87b03e5Sespie int i, j;
647*c87b03e5Sespie const char *fmt;
648*c87b03e5Sespie enum rtx_code code = GET_CODE (x);
649*c87b03e5Sespie
650*c87b03e5Sespie switch (code)
651*c87b03e5Sespie {
652*c87b03e5Sespie case CONST_INT:
653*c87b03e5Sespie case CONST:
654*c87b03e5Sespie case LABEL_REF:
655*c87b03e5Sespie case SYMBOL_REF:
656*c87b03e5Sespie case CONST_DOUBLE:
657*c87b03e5Sespie case CONST_VECTOR:
658*c87b03e5Sespie case PC:
659*c87b03e5Sespie case CC0:
660*c87b03e5Sespie case HIGH:
661*c87b03e5Sespie return 0;
662*c87b03e5Sespie
663*c87b03e5Sespie case REG:
664*c87b03e5Sespie return reg_equiv[REGNO (x)].replace;
665*c87b03e5Sespie
666*c87b03e5Sespie default:
667*c87b03e5Sespie break;
668*c87b03e5Sespie }
669*c87b03e5Sespie
670*c87b03e5Sespie fmt = GET_RTX_FORMAT (code);
671*c87b03e5Sespie for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
672*c87b03e5Sespie switch (fmt[i])
673*c87b03e5Sespie {
674*c87b03e5Sespie case 'e':
675*c87b03e5Sespie if (contains_replace_regs (XEXP (x, i)))
676*c87b03e5Sespie return 1;
677*c87b03e5Sespie break;
678*c87b03e5Sespie case 'E':
679*c87b03e5Sespie for (j = XVECLEN (x, i) - 1; j >= 0; j--)
680*c87b03e5Sespie if (contains_replace_regs (XVECEXP (x, i, j)))
681*c87b03e5Sespie return 1;
682*c87b03e5Sespie break;
683*c87b03e5Sespie }
684*c87b03e5Sespie
685*c87b03e5Sespie return 0;
686*c87b03e5Sespie }
687*c87b03e5Sespie
688*c87b03e5Sespie /* TRUE if X references a memory location that would be affected by a store
689*c87b03e5Sespie to MEMREF. */
690*c87b03e5Sespie
691*c87b03e5Sespie static int
memref_referenced_p(memref,x)692*c87b03e5Sespie memref_referenced_p (memref, x)
693*c87b03e5Sespie rtx x;
694*c87b03e5Sespie rtx memref;
695*c87b03e5Sespie {
696*c87b03e5Sespie int i, j;
697*c87b03e5Sespie const char *fmt;
698*c87b03e5Sespie enum rtx_code code = GET_CODE (x);
699*c87b03e5Sespie
700*c87b03e5Sespie switch (code)
701*c87b03e5Sespie {
702*c87b03e5Sespie case CONST_INT:
703*c87b03e5Sespie case CONST:
704*c87b03e5Sespie case LABEL_REF:
705*c87b03e5Sespie case SYMBOL_REF:
706*c87b03e5Sespie case CONST_DOUBLE:
707*c87b03e5Sespie case CONST_VECTOR:
708*c87b03e5Sespie case PC:
709*c87b03e5Sespie case CC0:
710*c87b03e5Sespie case HIGH:
711*c87b03e5Sespie case LO_SUM:
712*c87b03e5Sespie return 0;
713*c87b03e5Sespie
714*c87b03e5Sespie case REG:
715*c87b03e5Sespie return (reg_equiv[REGNO (x)].replacement
716*c87b03e5Sespie && memref_referenced_p (memref,
717*c87b03e5Sespie reg_equiv[REGNO (x)].replacement));
718*c87b03e5Sespie
719*c87b03e5Sespie case MEM:
720*c87b03e5Sespie if (true_dependence (memref, VOIDmode, x, rtx_varies_p))
721*c87b03e5Sespie return 1;
722*c87b03e5Sespie break;
723*c87b03e5Sespie
724*c87b03e5Sespie case SET:
725*c87b03e5Sespie /* If we are setting a MEM, it doesn't count (its address does), but any
726*c87b03e5Sespie other SET_DEST that has a MEM in it is referencing the MEM. */
727*c87b03e5Sespie if (GET_CODE (SET_DEST (x)) == MEM)
728*c87b03e5Sespie {
729*c87b03e5Sespie if (memref_referenced_p (memref, XEXP (SET_DEST (x), 0)))
730*c87b03e5Sespie return 1;
731*c87b03e5Sespie }
732*c87b03e5Sespie else if (memref_referenced_p (memref, SET_DEST (x)))
733*c87b03e5Sespie return 1;
734*c87b03e5Sespie
735*c87b03e5Sespie return memref_referenced_p (memref, SET_SRC (x));
736*c87b03e5Sespie
737*c87b03e5Sespie default:
738*c87b03e5Sespie break;
739*c87b03e5Sespie }
740*c87b03e5Sespie
741*c87b03e5Sespie fmt = GET_RTX_FORMAT (code);
742*c87b03e5Sespie for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
743*c87b03e5Sespie switch (fmt[i])
744*c87b03e5Sespie {
745*c87b03e5Sespie case 'e':
746*c87b03e5Sespie if (memref_referenced_p (memref, XEXP (x, i)))
747*c87b03e5Sespie return 1;
748*c87b03e5Sespie break;
749*c87b03e5Sespie case 'E':
750*c87b03e5Sespie for (j = XVECLEN (x, i) - 1; j >= 0; j--)
751*c87b03e5Sespie if (memref_referenced_p (memref, XVECEXP (x, i, j)))
752*c87b03e5Sespie return 1;
753*c87b03e5Sespie break;
754*c87b03e5Sespie }
755*c87b03e5Sespie
756*c87b03e5Sespie return 0;
757*c87b03e5Sespie }
758*c87b03e5Sespie
759*c87b03e5Sespie /* TRUE if some insn in the range (START, END] references a memory location
760*c87b03e5Sespie that would be affected by a store to MEMREF. */
761*c87b03e5Sespie
762*c87b03e5Sespie static int
memref_used_between_p(memref,start,end)763*c87b03e5Sespie memref_used_between_p (memref, start, end)
764*c87b03e5Sespie rtx memref;
765*c87b03e5Sespie rtx start;
766*c87b03e5Sespie rtx end;
767*c87b03e5Sespie {
768*c87b03e5Sespie rtx insn;
769*c87b03e5Sespie
770*c87b03e5Sespie for (insn = NEXT_INSN (start); insn != NEXT_INSN (end);
771*c87b03e5Sespie insn = NEXT_INSN (insn))
772*c87b03e5Sespie if (INSN_P (insn) && memref_referenced_p (memref, PATTERN (insn)))
773*c87b03e5Sespie return 1;
774*c87b03e5Sespie
775*c87b03e5Sespie return 0;
776*c87b03e5Sespie }
777*c87b03e5Sespie
778*c87b03e5Sespie /* Return nonzero if the rtx X is invariant over the current function. */
779*c87b03e5Sespie /* ??? Actually, the places this is used in reload expect exactly what
780*c87b03e5Sespie is tested here, and not everything that is function invariant. In
781*c87b03e5Sespie particular, the frame pointer and arg pointer are special cased;
782*c87b03e5Sespie pic_offset_table_rtx is not, and this will cause aborts when we
783*c87b03e5Sespie go to spill these things to memory. */
784*c87b03e5Sespie
785*c87b03e5Sespie int
function_invariant_p(x)786*c87b03e5Sespie function_invariant_p (x)
787*c87b03e5Sespie rtx x;
788*c87b03e5Sespie {
789*c87b03e5Sespie if (CONSTANT_P (x))
790*c87b03e5Sespie return 1;
791*c87b03e5Sespie if (x == frame_pointer_rtx || x == arg_pointer_rtx)
792*c87b03e5Sespie return 1;
793*c87b03e5Sespie if (GET_CODE (x) == PLUS
794*c87b03e5Sespie && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
795*c87b03e5Sespie && CONSTANT_P (XEXP (x, 1)))
796*c87b03e5Sespie return 1;
797*c87b03e5Sespie return 0;
798*c87b03e5Sespie }
799*c87b03e5Sespie
800*c87b03e5Sespie /* Find registers that are equivalent to a single value throughout the
801*c87b03e5Sespie compilation (either because they can be referenced in memory or are set once
802*c87b03e5Sespie from a single constant). Lower their priority for a register.
803*c87b03e5Sespie
804*c87b03e5Sespie If such a register is only referenced once, try substituting its value
805*c87b03e5Sespie into the using insn. If it succeeds, we can eliminate the register
806*c87b03e5Sespie completely. */
807*c87b03e5Sespie
808*c87b03e5Sespie static void
update_equiv_regs()809*c87b03e5Sespie update_equiv_regs ()
810*c87b03e5Sespie {
811*c87b03e5Sespie rtx insn;
812*c87b03e5Sespie basic_block bb;
813*c87b03e5Sespie int loop_depth;
814*c87b03e5Sespie regset_head cleared_regs;
815*c87b03e5Sespie int clear_regnos = 0;
816*c87b03e5Sespie
817*c87b03e5Sespie reg_equiv = (struct equivalence *) xcalloc (max_regno, sizeof *reg_equiv);
818*c87b03e5Sespie INIT_REG_SET (&cleared_regs);
819*c87b03e5Sespie
820*c87b03e5Sespie init_alias_analysis ();
821*c87b03e5Sespie
822*c87b03e5Sespie /* Scan the insns and find which registers have equivalences. Do this
823*c87b03e5Sespie in a separate scan of the insns because (due to -fcse-follow-jumps)
824*c87b03e5Sespie a register can be set below its use. */
825*c87b03e5Sespie FOR_EACH_BB (bb)
826*c87b03e5Sespie {
827*c87b03e5Sespie loop_depth = bb->loop_depth;
828*c87b03e5Sespie
829*c87b03e5Sespie for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = NEXT_INSN (insn))
830*c87b03e5Sespie {
831*c87b03e5Sespie rtx note;
832*c87b03e5Sespie rtx set;
833*c87b03e5Sespie rtx dest, src;
834*c87b03e5Sespie int regno;
835*c87b03e5Sespie
836*c87b03e5Sespie if (! INSN_P (insn))
837*c87b03e5Sespie continue;
838*c87b03e5Sespie
839*c87b03e5Sespie for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
840*c87b03e5Sespie if (REG_NOTE_KIND (note) == REG_INC)
841*c87b03e5Sespie no_equiv (XEXP (note, 0), note, NULL);
842*c87b03e5Sespie
843*c87b03e5Sespie set = single_set (insn);
844*c87b03e5Sespie
845*c87b03e5Sespie /* If this insn contains more (or less) than a single SET,
846*c87b03e5Sespie only mark all destinations as having no known equivalence. */
847*c87b03e5Sespie if (set == 0)
848*c87b03e5Sespie {
849*c87b03e5Sespie note_stores (PATTERN (insn), no_equiv, NULL);
850*c87b03e5Sespie continue;
851*c87b03e5Sespie }
852*c87b03e5Sespie else if (GET_CODE (PATTERN (insn)) == PARALLEL)
853*c87b03e5Sespie {
854*c87b03e5Sespie int i;
855*c87b03e5Sespie
856*c87b03e5Sespie for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
857*c87b03e5Sespie {
858*c87b03e5Sespie rtx part = XVECEXP (PATTERN (insn), 0, i);
859*c87b03e5Sespie if (part != set)
860*c87b03e5Sespie note_stores (part, no_equiv, NULL);
861*c87b03e5Sespie }
862*c87b03e5Sespie }
863*c87b03e5Sespie
864*c87b03e5Sespie dest = SET_DEST (set);
865*c87b03e5Sespie src = SET_SRC (set);
866*c87b03e5Sespie
867*c87b03e5Sespie /* If this sets a MEM to the contents of a REG that is only used
868*c87b03e5Sespie in a single basic block, see if the register is always equivalent
869*c87b03e5Sespie to that memory location and if moving the store from INSN to the
870*c87b03e5Sespie insn that set REG is safe. If so, put a REG_EQUIV note on the
871*c87b03e5Sespie initializing insn.
872*c87b03e5Sespie
873*c87b03e5Sespie Don't add a REG_EQUIV note if the insn already has one. The existing
874*c87b03e5Sespie REG_EQUIV is likely more useful than the one we are adding.
875*c87b03e5Sespie
876*c87b03e5Sespie If one of the regs in the address has reg_equiv[REGNO].replace set,
877*c87b03e5Sespie then we can't add this REG_EQUIV note. The reg_equiv[REGNO].replace
878*c87b03e5Sespie optimization may move the set of this register immediately before
879*c87b03e5Sespie insn, which puts it after reg_equiv[REGNO].init_insns, and hence
880*c87b03e5Sespie the mention in the REG_EQUIV note would be to an uninitialized
881*c87b03e5Sespie pseudo. */
882*c87b03e5Sespie /* ????? This test isn't good enough; we might see a MEM with a use of
883*c87b03e5Sespie a pseudo register before we see its setting insn that will cause
884*c87b03e5Sespie reg_equiv[].replace for that pseudo to be set.
885*c87b03e5Sespie Equivalences to MEMs should be made in another pass, after the
886*c87b03e5Sespie reg_equiv[].replace information has been gathered. */
887*c87b03e5Sespie
888*c87b03e5Sespie if (GET_CODE (dest) == MEM && GET_CODE (src) == REG
889*c87b03e5Sespie && (regno = REGNO (src)) >= FIRST_PSEUDO_REGISTER
890*c87b03e5Sespie && REG_BASIC_BLOCK (regno) >= 0
891*c87b03e5Sespie && REG_N_SETS (regno) == 1
892*c87b03e5Sespie && reg_equiv[regno].init_insns != 0
893*c87b03e5Sespie && reg_equiv[regno].init_insns != const0_rtx
894*c87b03e5Sespie && ! find_reg_note (XEXP (reg_equiv[regno].init_insns, 0),
895*c87b03e5Sespie REG_EQUIV, NULL_RTX)
896*c87b03e5Sespie && ! contains_replace_regs (XEXP (dest, 0)))
897*c87b03e5Sespie {
898*c87b03e5Sespie rtx init_insn = XEXP (reg_equiv[regno].init_insns, 0);
899*c87b03e5Sespie if (validate_equiv_mem (init_insn, src, dest)
900*c87b03e5Sespie && ! memref_used_between_p (dest, init_insn, insn))
901*c87b03e5Sespie REG_NOTES (init_insn)
902*c87b03e5Sespie = gen_rtx_EXPR_LIST (REG_EQUIV, dest, REG_NOTES (init_insn));
903*c87b03e5Sespie }
904*c87b03e5Sespie
905*c87b03e5Sespie /* We only handle the case of a pseudo register being set
906*c87b03e5Sespie once, or always to the same value. */
907*c87b03e5Sespie /* ??? The mn10200 port breaks if we add equivalences for
908*c87b03e5Sespie values that need an ADDRESS_REGS register and set them equivalent
909*c87b03e5Sespie to a MEM of a pseudo. The actual problem is in the over-conservative
910*c87b03e5Sespie handling of INPADDR_ADDRESS / INPUT_ADDRESS / INPUT triples in
911*c87b03e5Sespie calculate_needs, but we traditionally work around this problem
912*c87b03e5Sespie here by rejecting equivalences when the destination is in a register
913*c87b03e5Sespie that's likely spilled. This is fragile, of course, since the
914*c87b03e5Sespie preferred class of a pseudo depends on all instructions that set
915*c87b03e5Sespie or use it. */
916*c87b03e5Sespie
917*c87b03e5Sespie if (GET_CODE (dest) != REG
918*c87b03e5Sespie || (regno = REGNO (dest)) < FIRST_PSEUDO_REGISTER
919*c87b03e5Sespie || reg_equiv[regno].init_insns == const0_rtx
920*c87b03e5Sespie || (CLASS_LIKELY_SPILLED_P (reg_preferred_class (regno))
921*c87b03e5Sespie && GET_CODE (src) == MEM))
922*c87b03e5Sespie {
923*c87b03e5Sespie /* This might be seting a SUBREG of a pseudo, a pseudo that is
924*c87b03e5Sespie also set somewhere else to a constant. */
925*c87b03e5Sespie note_stores (set, no_equiv, NULL);
926*c87b03e5Sespie continue;
927*c87b03e5Sespie }
928*c87b03e5Sespie
929*c87b03e5Sespie note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
930*c87b03e5Sespie
931*c87b03e5Sespie /* cse sometimes generates function invariants, but doesn't put a
932*c87b03e5Sespie REG_EQUAL note on the insn. Since this note would be redundant,
933*c87b03e5Sespie there's no point creating it earlier than here. */
934*c87b03e5Sespie if (! note && ! rtx_varies_p (src, 0))
935*c87b03e5Sespie note = set_unique_reg_note (insn, REG_EQUAL, src);
936*c87b03e5Sespie
937*c87b03e5Sespie /* Don't bother considering a REG_EQUAL note containing an EXPR_LIST
938*c87b03e5Sespie since it represents a function call */
939*c87b03e5Sespie if (note && GET_CODE (XEXP (note, 0)) == EXPR_LIST)
940*c87b03e5Sespie note = NULL_RTX;
941*c87b03e5Sespie
942*c87b03e5Sespie if (REG_N_SETS (regno) != 1
943*c87b03e5Sespie && (! note
944*c87b03e5Sespie || rtx_varies_p (XEXP (note, 0), 0)
945*c87b03e5Sespie || (reg_equiv[regno].replacement
946*c87b03e5Sespie && ! rtx_equal_p (XEXP (note, 0),
947*c87b03e5Sespie reg_equiv[regno].replacement))))
948*c87b03e5Sespie {
949*c87b03e5Sespie no_equiv (dest, set, NULL);
950*c87b03e5Sespie continue;
951*c87b03e5Sespie }
952*c87b03e5Sespie /* Record this insn as initializing this register. */
953*c87b03e5Sespie reg_equiv[regno].init_insns
954*c87b03e5Sespie = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv[regno].init_insns);
955*c87b03e5Sespie
956*c87b03e5Sespie /* If this register is known to be equal to a constant, record that
957*c87b03e5Sespie it is always equivalent to the constant. */
958*c87b03e5Sespie if (note && ! rtx_varies_p (XEXP (note, 0), 0))
959*c87b03e5Sespie PUT_MODE (note, (enum machine_mode) REG_EQUIV);
960*c87b03e5Sespie
961*c87b03e5Sespie /* If this insn introduces a "constant" register, decrease the priority
962*c87b03e5Sespie of that register. Record this insn if the register is only used once
963*c87b03e5Sespie more and the equivalence value is the same as our source.
964*c87b03e5Sespie
965*c87b03e5Sespie The latter condition is checked for two reasons: First, it is an
966*c87b03e5Sespie indication that it may be more efficient to actually emit the insn
967*c87b03e5Sespie as written (if no registers are available, reload will substitute
968*c87b03e5Sespie the equivalence). Secondly, it avoids problems with any registers
969*c87b03e5Sespie dying in this insn whose death notes would be missed.
970*c87b03e5Sespie
971*c87b03e5Sespie If we don't have a REG_EQUIV note, see if this insn is loading
972*c87b03e5Sespie a register used only in one basic block from a MEM. If so, and the
973*c87b03e5Sespie MEM remains unchanged for the life of the register, add a REG_EQUIV
974*c87b03e5Sespie note. */
975*c87b03e5Sespie
976*c87b03e5Sespie note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
977*c87b03e5Sespie
978*c87b03e5Sespie if (note == 0 && REG_BASIC_BLOCK (regno) >= 0
979*c87b03e5Sespie && GET_CODE (SET_SRC (set)) == MEM
980*c87b03e5Sespie && validate_equiv_mem (insn, dest, SET_SRC (set)))
981*c87b03e5Sespie REG_NOTES (insn) = note = gen_rtx_EXPR_LIST (REG_EQUIV, SET_SRC (set),
982*c87b03e5Sespie REG_NOTES (insn));
983*c87b03e5Sespie
984*c87b03e5Sespie if (note)
985*c87b03e5Sespie {
986*c87b03e5Sespie int regno = REGNO (dest);
987*c87b03e5Sespie
988*c87b03e5Sespie /* Record whether or not we created a REG_EQUIV note for a LABEL_REF.
989*c87b03e5Sespie We might end up substituting the LABEL_REF for uses of the
990*c87b03e5Sespie pseudo here or later. That kind of transformation may turn an
991*c87b03e5Sespie indirect jump into a direct jump, in which case we must rerun the
992*c87b03e5Sespie jump optimizer to ensure that the JUMP_LABEL fields are valid. */
993*c87b03e5Sespie if (GET_CODE (XEXP (note, 0)) == LABEL_REF
994*c87b03e5Sespie || (GET_CODE (XEXP (note, 0)) == CONST
995*c87b03e5Sespie && GET_CODE (XEXP (XEXP (note, 0), 0)) == PLUS
996*c87b03e5Sespie && (GET_CODE (XEXP (XEXP (XEXP (note, 0), 0), 0))
997*c87b03e5Sespie == LABEL_REF)))
998*c87b03e5Sespie recorded_label_ref = 1;
999*c87b03e5Sespie
1000*c87b03e5Sespie reg_equiv[regno].replacement = XEXP (note, 0);
1001*c87b03e5Sespie reg_equiv[regno].src_p = &SET_SRC (set);
1002*c87b03e5Sespie reg_equiv[regno].loop_depth = loop_depth;
1003*c87b03e5Sespie
1004*c87b03e5Sespie /* Don't mess with things live during setjmp. */
1005*c87b03e5Sespie if (REG_LIVE_LENGTH (regno) >= 0 && optimize)
1006*c87b03e5Sespie {
1007*c87b03e5Sespie /* Note that the statement below does not affect the priority
1008*c87b03e5Sespie in local-alloc! */
1009*c87b03e5Sespie REG_LIVE_LENGTH (regno) *= 2;
1010*c87b03e5Sespie
1011*c87b03e5Sespie
1012*c87b03e5Sespie /* If the register is referenced exactly twice, meaning it is
1013*c87b03e5Sespie set once and used once, indicate that the reference may be
1014*c87b03e5Sespie replaced by the equivalence we computed above. Do this
1015*c87b03e5Sespie even if the register is only used in one block so that
1016*c87b03e5Sespie dependencies can be handled where the last register is
1017*c87b03e5Sespie used in a different block (i.e. HIGH / LO_SUM sequences)
1018*c87b03e5Sespie and to reduce the number of registers alive across
1019*c87b03e5Sespie calls. */
1020*c87b03e5Sespie
1021*c87b03e5Sespie if (REG_N_REFS (regno) == 2
1022*c87b03e5Sespie && (rtx_equal_p (XEXP (note, 0), src)
1023*c87b03e5Sespie || ! equiv_init_varies_p (src))
1024*c87b03e5Sespie && GET_CODE (insn) == INSN
1025*c87b03e5Sespie && equiv_init_movable_p (PATTERN (insn), regno))
1026*c87b03e5Sespie reg_equiv[regno].replace = 1;
1027*c87b03e5Sespie }
1028*c87b03e5Sespie }
1029*c87b03e5Sespie }
1030*c87b03e5Sespie }
1031*c87b03e5Sespie
1032*c87b03e5Sespie /* Now scan all regs killed in an insn to see if any of them are
1033*c87b03e5Sespie registers only used that once. If so, see if we can replace the
1034*c87b03e5Sespie reference with the equivalent from. If we can, delete the
1035*c87b03e5Sespie initializing reference and this register will go away. If we
1036*c87b03e5Sespie can't replace the reference, and the initialzing reference is
1037*c87b03e5Sespie within the same loop (or in an inner loop), then move the register
1038*c87b03e5Sespie initialization just before the use, so that they are in the same
1039*c87b03e5Sespie basic block. */
1040*c87b03e5Sespie FOR_EACH_BB_REVERSE (bb)
1041*c87b03e5Sespie {
1042*c87b03e5Sespie loop_depth = bb->loop_depth;
1043*c87b03e5Sespie for (insn = bb->end; insn != PREV_INSN (bb->head); insn = PREV_INSN (insn))
1044*c87b03e5Sespie {
1045*c87b03e5Sespie rtx link;
1046*c87b03e5Sespie
1047*c87b03e5Sespie if (! INSN_P (insn))
1048*c87b03e5Sespie continue;
1049*c87b03e5Sespie
1050*c87b03e5Sespie for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1051*c87b03e5Sespie {
1052*c87b03e5Sespie if (REG_NOTE_KIND (link) == REG_DEAD
1053*c87b03e5Sespie /* Make sure this insn still refers to the register. */
1054*c87b03e5Sespie && reg_mentioned_p (XEXP (link, 0), PATTERN (insn)))
1055*c87b03e5Sespie {
1056*c87b03e5Sespie int regno = REGNO (XEXP (link, 0));
1057*c87b03e5Sespie rtx equiv_insn;
1058*c87b03e5Sespie
1059*c87b03e5Sespie if (! reg_equiv[regno].replace
1060*c87b03e5Sespie || reg_equiv[regno].loop_depth < loop_depth)
1061*c87b03e5Sespie continue;
1062*c87b03e5Sespie
1063*c87b03e5Sespie /* reg_equiv[REGNO].replace gets set only when
1064*c87b03e5Sespie REG_N_REFS[REGNO] is 2, i.e. the register is set
1065*c87b03e5Sespie once and used once. (If it were only set, but not used,
1066*c87b03e5Sespie flow would have deleted the setting insns.) Hence
1067*c87b03e5Sespie there can only be one insn in reg_equiv[REGNO].init_insns. */
1068*c87b03e5Sespie if (reg_equiv[regno].init_insns == NULL_RTX
1069*c87b03e5Sespie || XEXP (reg_equiv[regno].init_insns, 1) != NULL_RTX)
1070*c87b03e5Sespie abort ();
1071*c87b03e5Sespie equiv_insn = XEXP (reg_equiv[regno].init_insns, 0);
1072*c87b03e5Sespie
1073*c87b03e5Sespie /* We may not move instructions that can throw, since
1074*c87b03e5Sespie that changes basic block boundaries and we are not
1075*c87b03e5Sespie prepared to adjust the CFG to match. */
1076*c87b03e5Sespie if (can_throw_internal (equiv_insn))
1077*c87b03e5Sespie continue;
1078*c87b03e5Sespie
1079*c87b03e5Sespie if (asm_noperands (PATTERN (equiv_insn)) < 0
1080*c87b03e5Sespie && validate_replace_rtx (regno_reg_rtx[regno],
1081*c87b03e5Sespie *(reg_equiv[regno].src_p), insn))
1082*c87b03e5Sespie {
1083*c87b03e5Sespie rtx equiv_link;
1084*c87b03e5Sespie rtx last_link;
1085*c87b03e5Sespie rtx note;
1086*c87b03e5Sespie
1087*c87b03e5Sespie /* Find the last note. */
1088*c87b03e5Sespie for (last_link = link; XEXP (last_link, 1);
1089*c87b03e5Sespie last_link = XEXP (last_link, 1))
1090*c87b03e5Sespie ;
1091*c87b03e5Sespie
1092*c87b03e5Sespie /* Append the REG_DEAD notes from equiv_insn. */
1093*c87b03e5Sespie equiv_link = REG_NOTES (equiv_insn);
1094*c87b03e5Sespie while (equiv_link)
1095*c87b03e5Sespie {
1096*c87b03e5Sespie note = equiv_link;
1097*c87b03e5Sespie equiv_link = XEXP (equiv_link, 1);
1098*c87b03e5Sespie if (REG_NOTE_KIND (note) == REG_DEAD)
1099*c87b03e5Sespie {
1100*c87b03e5Sespie remove_note (equiv_insn, note);
1101*c87b03e5Sespie XEXP (last_link, 1) = note;
1102*c87b03e5Sespie XEXP (note, 1) = NULL_RTX;
1103*c87b03e5Sespie last_link = note;
1104*c87b03e5Sespie }
1105*c87b03e5Sespie }
1106*c87b03e5Sespie
1107*c87b03e5Sespie remove_death (regno, insn);
1108*c87b03e5Sespie REG_N_REFS (regno) = 0;
1109*c87b03e5Sespie REG_FREQ (regno) = 0;
1110*c87b03e5Sespie delete_insn (equiv_insn);
1111*c87b03e5Sespie
1112*c87b03e5Sespie reg_equiv[regno].init_insns
1113*c87b03e5Sespie = XEXP (reg_equiv[regno].init_insns, 1);
1114*c87b03e5Sespie }
1115*c87b03e5Sespie /* Move the initialization of the register to just before
1116*c87b03e5Sespie INSN. Update the flow information. */
1117*c87b03e5Sespie else if (PREV_INSN (insn) != equiv_insn)
1118*c87b03e5Sespie {
1119*c87b03e5Sespie rtx new_insn;
1120*c87b03e5Sespie
1121*c87b03e5Sespie new_insn = emit_insn_before (PATTERN (equiv_insn), insn);
1122*c87b03e5Sespie REG_NOTES (new_insn) = REG_NOTES (equiv_insn);
1123*c87b03e5Sespie REG_NOTES (equiv_insn) = 0;
1124*c87b03e5Sespie
1125*c87b03e5Sespie /* Make sure this insn is recognized before reload begins,
1126*c87b03e5Sespie otherwise eliminate_regs_in_insn will abort. */
1127*c87b03e5Sespie INSN_CODE (new_insn) = INSN_CODE (equiv_insn);
1128*c87b03e5Sespie
1129*c87b03e5Sespie delete_insn (equiv_insn);
1130*c87b03e5Sespie
1131*c87b03e5Sespie XEXP (reg_equiv[regno].init_insns, 0) = new_insn;
1132*c87b03e5Sespie
1133*c87b03e5Sespie REG_BASIC_BLOCK (regno) = bb->index;
1134*c87b03e5Sespie REG_N_CALLS_CROSSED (regno) = 0;
1135*c87b03e5Sespie REG_LIVE_LENGTH (regno) = 2;
1136*c87b03e5Sespie
1137*c87b03e5Sespie if (insn == bb->head)
1138*c87b03e5Sespie bb->head = PREV_INSN (insn);
1139*c87b03e5Sespie
1140*c87b03e5Sespie /* Remember to clear REGNO from all basic block's live
1141*c87b03e5Sespie info. */
1142*c87b03e5Sespie SET_REGNO_REG_SET (&cleared_regs, regno);
1143*c87b03e5Sespie clear_regnos++;
1144*c87b03e5Sespie }
1145*c87b03e5Sespie }
1146*c87b03e5Sespie }
1147*c87b03e5Sespie }
1148*c87b03e5Sespie }
1149*c87b03e5Sespie
1150*c87b03e5Sespie /* Clear all dead REGNOs from all basic block's live info. */
1151*c87b03e5Sespie if (clear_regnos)
1152*c87b03e5Sespie {
1153*c87b03e5Sespie int j;
1154*c87b03e5Sespie if (clear_regnos > 8)
1155*c87b03e5Sespie {
1156*c87b03e5Sespie FOR_EACH_BB (bb)
1157*c87b03e5Sespie {
1158*c87b03e5Sespie AND_COMPL_REG_SET (bb->global_live_at_start, &cleared_regs);
1159*c87b03e5Sespie AND_COMPL_REG_SET (bb->global_live_at_end, &cleared_regs);
1160*c87b03e5Sespie }
1161*c87b03e5Sespie }
1162*c87b03e5Sespie else
1163*c87b03e5Sespie EXECUTE_IF_SET_IN_REG_SET (&cleared_regs, 0, j,
1164*c87b03e5Sespie {
1165*c87b03e5Sespie FOR_EACH_BB (bb)
1166*c87b03e5Sespie {
1167*c87b03e5Sespie CLEAR_REGNO_REG_SET (bb->global_live_at_start, j);
1168*c87b03e5Sespie CLEAR_REGNO_REG_SET (bb->global_live_at_end, j);
1169*c87b03e5Sespie }
1170*c87b03e5Sespie });
1171*c87b03e5Sespie }
1172*c87b03e5Sespie
1173*c87b03e5Sespie /* Clean up. */
1174*c87b03e5Sespie end_alias_analysis ();
1175*c87b03e5Sespie CLEAR_REG_SET (&cleared_regs);
1176*c87b03e5Sespie free (reg_equiv);
1177*c87b03e5Sespie }
1178*c87b03e5Sespie
1179*c87b03e5Sespie /* Mark REG as having no known equivalence.
1180*c87b03e5Sespie Some instructions might have been proceessed before and furnished
1181*c87b03e5Sespie with REG_EQUIV notes for this register; these notes will have to be
1182*c87b03e5Sespie removed.
1183*c87b03e5Sespie STORE is the piece of RTL that does the non-constant / conflicting
1184*c87b03e5Sespie assignment - a SET, CLOBBER or REG_INC note. It is currently not used,
1185*c87b03e5Sespie but needs to be there because this function is called from note_stores. */
1186*c87b03e5Sespie static void
no_equiv(reg,store,data)1187*c87b03e5Sespie no_equiv (reg, store, data)
1188*c87b03e5Sespie rtx reg, store ATTRIBUTE_UNUSED;
1189*c87b03e5Sespie void *data ATTRIBUTE_UNUSED;
1190*c87b03e5Sespie {
1191*c87b03e5Sespie int regno;
1192*c87b03e5Sespie rtx list;
1193*c87b03e5Sespie
1194*c87b03e5Sespie if (GET_CODE (reg) != REG)
1195*c87b03e5Sespie return;
1196*c87b03e5Sespie regno = REGNO (reg);
1197*c87b03e5Sespie list = reg_equiv[regno].init_insns;
1198*c87b03e5Sespie if (list == const0_rtx)
1199*c87b03e5Sespie return;
1200*c87b03e5Sespie for (; list; list = XEXP (list, 1))
1201*c87b03e5Sespie {
1202*c87b03e5Sespie rtx insn = XEXP (list, 0);
1203*c87b03e5Sespie remove_note (insn, find_reg_note (insn, REG_EQUIV, NULL_RTX));
1204*c87b03e5Sespie }
1205*c87b03e5Sespie reg_equiv[regno].init_insns = const0_rtx;
1206*c87b03e5Sespie reg_equiv[regno].replacement = NULL_RTX;
1207*c87b03e5Sespie }
1208*c87b03e5Sespie
1209*c87b03e5Sespie /* Allocate hard regs to the pseudo regs used only within block number B.
1210*c87b03e5Sespie Only the pseudos that die but once can be handled. */
1211*c87b03e5Sespie
1212*c87b03e5Sespie static void
block_alloc(b)1213*c87b03e5Sespie block_alloc (b)
1214*c87b03e5Sespie int b;
1215*c87b03e5Sespie {
1216*c87b03e5Sespie int i, q;
1217*c87b03e5Sespie rtx insn;
1218*c87b03e5Sespie rtx note, hard_reg;
1219*c87b03e5Sespie int insn_number = 0;
1220*c87b03e5Sespie int insn_count = 0;
1221*c87b03e5Sespie int max_uid = get_max_uid ();
1222*c87b03e5Sespie int *qty_order;
1223*c87b03e5Sespie int no_conflict_combined_regno = -1;
1224*c87b03e5Sespie
1225*c87b03e5Sespie /* Count the instructions in the basic block. */
1226*c87b03e5Sespie
1227*c87b03e5Sespie insn = BLOCK_END (b);
1228*c87b03e5Sespie while (1)
1229*c87b03e5Sespie {
1230*c87b03e5Sespie if (GET_CODE (insn) != NOTE)
1231*c87b03e5Sespie if (++insn_count > max_uid)
1232*c87b03e5Sespie abort ();
1233*c87b03e5Sespie if (insn == BLOCK_HEAD (b))
1234*c87b03e5Sespie break;
1235*c87b03e5Sespie insn = PREV_INSN (insn);
1236*c87b03e5Sespie }
1237*c87b03e5Sespie
1238*c87b03e5Sespie /* +2 to leave room for a post_mark_life at the last insn and for
1239*c87b03e5Sespie the birth of a CLOBBER in the first insn. */
1240*c87b03e5Sespie regs_live_at = (HARD_REG_SET *) xcalloc ((2 * insn_count + 2),
1241*c87b03e5Sespie sizeof (HARD_REG_SET));
1242*c87b03e5Sespie
1243*c87b03e5Sespie /* Initialize table of hardware registers currently live. */
1244*c87b03e5Sespie
1245*c87b03e5Sespie REG_SET_TO_HARD_REG_SET (regs_live, BASIC_BLOCK (b)->global_live_at_start);
1246*c87b03e5Sespie
1247*c87b03e5Sespie /* This loop scans the instructions of the basic block
1248*c87b03e5Sespie and assigns quantities to registers.
1249*c87b03e5Sespie It computes which registers to tie. */
1250*c87b03e5Sespie
1251*c87b03e5Sespie insn = BLOCK_HEAD (b);
1252*c87b03e5Sespie while (1)
1253*c87b03e5Sespie {
1254*c87b03e5Sespie if (GET_CODE (insn) != NOTE)
1255*c87b03e5Sespie insn_number++;
1256*c87b03e5Sespie
1257*c87b03e5Sespie if (INSN_P (insn))
1258*c87b03e5Sespie {
1259*c87b03e5Sespie rtx link, set;
1260*c87b03e5Sespie int win = 0;
1261*c87b03e5Sespie rtx r0, r1 = NULL_RTX;
1262*c87b03e5Sespie int combined_regno = -1;
1263*c87b03e5Sespie int i;
1264*c87b03e5Sespie
1265*c87b03e5Sespie this_insn_number = insn_number;
1266*c87b03e5Sespie this_insn = insn;
1267*c87b03e5Sespie
1268*c87b03e5Sespie extract_insn (insn);
1269*c87b03e5Sespie which_alternative = -1;
1270*c87b03e5Sespie
1271*c87b03e5Sespie /* Is this insn suitable for tying two registers?
1272*c87b03e5Sespie If so, try doing that.
1273*c87b03e5Sespie Suitable insns are those with at least two operands and where
1274*c87b03e5Sespie operand 0 is an output that is a register that is not
1275*c87b03e5Sespie earlyclobber.
1276*c87b03e5Sespie
1277*c87b03e5Sespie We can tie operand 0 with some operand that dies in this insn.
1278*c87b03e5Sespie First look for operands that are required to be in the same
1279*c87b03e5Sespie register as operand 0. If we find such, only try tying that
1280*c87b03e5Sespie operand or one that can be put into that operand if the
1281*c87b03e5Sespie operation is commutative. If we don't find an operand
1282*c87b03e5Sespie that is required to be in the same register as operand 0,
1283*c87b03e5Sespie we can tie with any operand.
1284*c87b03e5Sespie
1285*c87b03e5Sespie Subregs in place of regs are also ok.
1286*c87b03e5Sespie
1287*c87b03e5Sespie If tying is done, WIN is set nonzero. */
1288*c87b03e5Sespie
1289*c87b03e5Sespie if (optimize
1290*c87b03e5Sespie && recog_data.n_operands > 1
1291*c87b03e5Sespie && recog_data.constraints[0][0] == '='
1292*c87b03e5Sespie && recog_data.constraints[0][1] != '&')
1293*c87b03e5Sespie {
1294*c87b03e5Sespie /* If non-negative, is an operand that must match operand 0. */
1295*c87b03e5Sespie int must_match_0 = -1;
1296*c87b03e5Sespie /* Counts number of alternatives that require a match with
1297*c87b03e5Sespie operand 0. */
1298*c87b03e5Sespie int n_matching_alts = 0;
1299*c87b03e5Sespie
1300*c87b03e5Sespie for (i = 1; i < recog_data.n_operands; i++)
1301*c87b03e5Sespie {
1302*c87b03e5Sespie const char *p = recog_data.constraints[i];
1303*c87b03e5Sespie int this_match = requires_inout (p);
1304*c87b03e5Sespie
1305*c87b03e5Sespie n_matching_alts += this_match;
1306*c87b03e5Sespie if (this_match == recog_data.n_alternatives)
1307*c87b03e5Sespie must_match_0 = i;
1308*c87b03e5Sespie }
1309*c87b03e5Sespie
1310*c87b03e5Sespie r0 = recog_data.operand[0];
1311*c87b03e5Sespie for (i = 1; i < recog_data.n_operands; i++)
1312*c87b03e5Sespie {
1313*c87b03e5Sespie /* Skip this operand if we found an operand that
1314*c87b03e5Sespie must match operand 0 and this operand isn't it
1315*c87b03e5Sespie and can't be made to be it by commutativity. */
1316*c87b03e5Sespie
1317*c87b03e5Sespie if (must_match_0 >= 0 && i != must_match_0
1318*c87b03e5Sespie && ! (i == must_match_0 + 1
1319*c87b03e5Sespie && recog_data.constraints[i-1][0] == '%')
1320*c87b03e5Sespie && ! (i == must_match_0 - 1
1321*c87b03e5Sespie && recog_data.constraints[i][0] == '%'))
1322*c87b03e5Sespie continue;
1323*c87b03e5Sespie
1324*c87b03e5Sespie /* Likewise if each alternative has some operand that
1325*c87b03e5Sespie must match operand zero. In that case, skip any
1326*c87b03e5Sespie operand that doesn't list operand 0 since we know that
1327*c87b03e5Sespie the operand always conflicts with operand 0. We
1328*c87b03e5Sespie ignore commutatity in this case to keep things simple. */
1329*c87b03e5Sespie if (n_matching_alts == recog_data.n_alternatives
1330*c87b03e5Sespie && 0 == requires_inout (recog_data.constraints[i]))
1331*c87b03e5Sespie continue;
1332*c87b03e5Sespie
1333*c87b03e5Sespie r1 = recog_data.operand[i];
1334*c87b03e5Sespie
1335*c87b03e5Sespie /* If the operand is an address, find a register in it.
1336*c87b03e5Sespie There may be more than one register, but we only try one
1337*c87b03e5Sespie of them. */
1338*c87b03e5Sespie if (recog_data.constraints[i][0] == 'p'
1339*c87b03e5Sespie || EXTRA_ADDRESS_CONSTRAINT (recog_data.constraints[i][0]))
1340*c87b03e5Sespie while (GET_CODE (r1) == PLUS || GET_CODE (r1) == MULT)
1341*c87b03e5Sespie r1 = XEXP (r1, 0);
1342*c87b03e5Sespie
1343*c87b03e5Sespie /* Avoid making a call-saved register unnecessarily
1344*c87b03e5Sespie clobbered. */
1345*c87b03e5Sespie hard_reg = get_hard_reg_initial_reg (cfun, r1);
1346*c87b03e5Sespie if (hard_reg != NULL_RTX)
1347*c87b03e5Sespie {
1348*c87b03e5Sespie if (GET_CODE (hard_reg) == REG
1349*c87b03e5Sespie && IN_RANGE (REGNO (hard_reg),
1350*c87b03e5Sespie 0, FIRST_PSEUDO_REGISTER - 1)
1351*c87b03e5Sespie && ! call_used_regs[REGNO (hard_reg)])
1352*c87b03e5Sespie continue;
1353*c87b03e5Sespie }
1354*c87b03e5Sespie
1355*c87b03e5Sespie if (GET_CODE (r0) == REG || GET_CODE (r0) == SUBREG)
1356*c87b03e5Sespie {
1357*c87b03e5Sespie /* We have two priorities for hard register preferences.
1358*c87b03e5Sespie If we have a move insn or an insn whose first input
1359*c87b03e5Sespie can only be in the same register as the output, give
1360*c87b03e5Sespie priority to an equivalence found from that insn. */
1361*c87b03e5Sespie int may_save_copy
1362*c87b03e5Sespie = (r1 == recog_data.operand[i] && must_match_0 >= 0);
1363*c87b03e5Sespie
1364*c87b03e5Sespie if (GET_CODE (r1) == REG || GET_CODE (r1) == SUBREG)
1365*c87b03e5Sespie win = combine_regs (r1, r0, may_save_copy,
1366*c87b03e5Sespie insn_number, insn, 0);
1367*c87b03e5Sespie }
1368*c87b03e5Sespie if (win)
1369*c87b03e5Sespie break;
1370*c87b03e5Sespie }
1371*c87b03e5Sespie }
1372*c87b03e5Sespie
1373*c87b03e5Sespie /* Recognize an insn sequence with an ultimate result
1374*c87b03e5Sespie which can safely overlap one of the inputs.
1375*c87b03e5Sespie The sequence begins with a CLOBBER of its result,
1376*c87b03e5Sespie and ends with an insn that copies the result to itself
1377*c87b03e5Sespie and has a REG_EQUAL note for an equivalent formula.
1378*c87b03e5Sespie That note indicates what the inputs are.
1379*c87b03e5Sespie The result and the input can overlap if each insn in
1380*c87b03e5Sespie the sequence either doesn't mention the input
1381*c87b03e5Sespie or has a REG_NO_CONFLICT note to inhibit the conflict.
1382*c87b03e5Sespie
1383*c87b03e5Sespie We do the combining test at the CLOBBER so that the
1384*c87b03e5Sespie destination register won't have had a quantity number
1385*c87b03e5Sespie assigned, since that would prevent combining. */
1386*c87b03e5Sespie
1387*c87b03e5Sespie if (optimize
1388*c87b03e5Sespie && GET_CODE (PATTERN (insn)) == CLOBBER
1389*c87b03e5Sespie && (r0 = XEXP (PATTERN (insn), 0),
1390*c87b03e5Sespie GET_CODE (r0) == REG)
1391*c87b03e5Sespie && (link = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0
1392*c87b03e5Sespie && XEXP (link, 0) != 0
1393*c87b03e5Sespie && GET_CODE (XEXP (link, 0)) == INSN
1394*c87b03e5Sespie && (set = single_set (XEXP (link, 0))) != 0
1395*c87b03e5Sespie && SET_DEST (set) == r0 && SET_SRC (set) == r0
1396*c87b03e5Sespie && (note = find_reg_note (XEXP (link, 0), REG_EQUAL,
1397*c87b03e5Sespie NULL_RTX)) != 0)
1398*c87b03e5Sespie {
1399*c87b03e5Sespie if (r1 = XEXP (note, 0), GET_CODE (r1) == REG
1400*c87b03e5Sespie /* Check that we have such a sequence. */
1401*c87b03e5Sespie && no_conflict_p (insn, r0, r1))
1402*c87b03e5Sespie win = combine_regs (r1, r0, 1, insn_number, insn, 1);
1403*c87b03e5Sespie else if (GET_RTX_FORMAT (GET_CODE (XEXP (note, 0)))[0] == 'e'
1404*c87b03e5Sespie && (r1 = XEXP (XEXP (note, 0), 0),
1405*c87b03e5Sespie GET_CODE (r1) == REG || GET_CODE (r1) == SUBREG)
1406*c87b03e5Sespie && no_conflict_p (insn, r0, r1))
1407*c87b03e5Sespie win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1408*c87b03e5Sespie
1409*c87b03e5Sespie /* Here we care if the operation to be computed is
1410*c87b03e5Sespie commutative. */
1411*c87b03e5Sespie else if ((GET_CODE (XEXP (note, 0)) == EQ
1412*c87b03e5Sespie || GET_CODE (XEXP (note, 0)) == NE
1413*c87b03e5Sespie || GET_RTX_CLASS (GET_CODE (XEXP (note, 0))) == 'c')
1414*c87b03e5Sespie && (r1 = XEXP (XEXP (note, 0), 1),
1415*c87b03e5Sespie (GET_CODE (r1) == REG || GET_CODE (r1) == SUBREG))
1416*c87b03e5Sespie && no_conflict_p (insn, r0, r1))
1417*c87b03e5Sespie win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1418*c87b03e5Sespie
1419*c87b03e5Sespie /* If we did combine something, show the register number
1420*c87b03e5Sespie in question so that we know to ignore its death. */
1421*c87b03e5Sespie if (win)
1422*c87b03e5Sespie no_conflict_combined_regno = REGNO (r1);
1423*c87b03e5Sespie }
1424*c87b03e5Sespie
1425*c87b03e5Sespie /* If registers were just tied, set COMBINED_REGNO
1426*c87b03e5Sespie to the number of the register used in this insn
1427*c87b03e5Sespie that was tied to the register set in this insn.
1428*c87b03e5Sespie This register's qty should not be "killed". */
1429*c87b03e5Sespie
1430*c87b03e5Sespie if (win)
1431*c87b03e5Sespie {
1432*c87b03e5Sespie while (GET_CODE (r1) == SUBREG)
1433*c87b03e5Sespie r1 = SUBREG_REG (r1);
1434*c87b03e5Sespie combined_regno = REGNO (r1);
1435*c87b03e5Sespie }
1436*c87b03e5Sespie
1437*c87b03e5Sespie /* Mark the death of everything that dies in this instruction,
1438*c87b03e5Sespie except for anything that was just combined. */
1439*c87b03e5Sespie
1440*c87b03e5Sespie for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1441*c87b03e5Sespie if (REG_NOTE_KIND (link) == REG_DEAD
1442*c87b03e5Sespie && GET_CODE (XEXP (link, 0)) == REG
1443*c87b03e5Sespie && combined_regno != (int) REGNO (XEXP (link, 0))
1444*c87b03e5Sespie && (no_conflict_combined_regno != (int) REGNO (XEXP (link, 0))
1445*c87b03e5Sespie || ! find_reg_note (insn, REG_NO_CONFLICT,
1446*c87b03e5Sespie XEXP (link, 0))))
1447*c87b03e5Sespie wipe_dead_reg (XEXP (link, 0), 0);
1448*c87b03e5Sespie
1449*c87b03e5Sespie /* Allocate qty numbers for all registers local to this block
1450*c87b03e5Sespie that are born (set) in this instruction.
1451*c87b03e5Sespie A pseudo that already has a qty is not changed. */
1452*c87b03e5Sespie
1453*c87b03e5Sespie note_stores (PATTERN (insn), reg_is_set, NULL);
1454*c87b03e5Sespie
1455*c87b03e5Sespie /* If anything is set in this insn and then unused, mark it as dying
1456*c87b03e5Sespie after this insn, so it will conflict with our outputs. This
1457*c87b03e5Sespie can't match with something that combined, and it doesn't matter
1458*c87b03e5Sespie if it did. Do this after the calls to reg_is_set since these
1459*c87b03e5Sespie die after, not during, the current insn. */
1460*c87b03e5Sespie
1461*c87b03e5Sespie for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1462*c87b03e5Sespie if (REG_NOTE_KIND (link) == REG_UNUSED
1463*c87b03e5Sespie && GET_CODE (XEXP (link, 0)) == REG)
1464*c87b03e5Sespie wipe_dead_reg (XEXP (link, 0), 1);
1465*c87b03e5Sespie
1466*c87b03e5Sespie /* If this is an insn that has a REG_RETVAL note pointing at a
1467*c87b03e5Sespie CLOBBER insn, we have reached the end of a REG_NO_CONFLICT
1468*c87b03e5Sespie block, so clear any register number that combined within it. */
1469*c87b03e5Sespie if ((note = find_reg_note (insn, REG_RETVAL, NULL_RTX)) != 0
1470*c87b03e5Sespie && GET_CODE (XEXP (note, 0)) == INSN
1471*c87b03e5Sespie && GET_CODE (PATTERN (XEXP (note, 0))) == CLOBBER)
1472*c87b03e5Sespie no_conflict_combined_regno = -1;
1473*c87b03e5Sespie }
1474*c87b03e5Sespie
1475*c87b03e5Sespie /* Set the registers live after INSN_NUMBER. Note that we never
1476*c87b03e5Sespie record the registers live before the block's first insn, since no
1477*c87b03e5Sespie pseudos we care about are live before that insn. */
1478*c87b03e5Sespie
1479*c87b03e5Sespie IOR_HARD_REG_SET (regs_live_at[2 * insn_number], regs_live);
1480*c87b03e5Sespie IOR_HARD_REG_SET (regs_live_at[2 * insn_number + 1], regs_live);
1481*c87b03e5Sespie
1482*c87b03e5Sespie if (insn == BLOCK_END (b))
1483*c87b03e5Sespie break;
1484*c87b03e5Sespie
1485*c87b03e5Sespie insn = NEXT_INSN (insn);
1486*c87b03e5Sespie }
1487*c87b03e5Sespie
1488*c87b03e5Sespie /* Now every register that is local to this basic block
1489*c87b03e5Sespie should have been given a quantity, or else -1 meaning ignore it.
1490*c87b03e5Sespie Every quantity should have a known birth and death.
1491*c87b03e5Sespie
1492*c87b03e5Sespie Order the qtys so we assign them registers in order of the
1493*c87b03e5Sespie number of suggested registers they need so we allocate those with
1494*c87b03e5Sespie the most restrictive needs first. */
1495*c87b03e5Sespie
1496*c87b03e5Sespie qty_order = (int *) xmalloc (next_qty * sizeof (int));
1497*c87b03e5Sespie for (i = 0; i < next_qty; i++)
1498*c87b03e5Sespie qty_order[i] = i;
1499*c87b03e5Sespie
1500*c87b03e5Sespie #define EXCHANGE(I1, I2) \
1501*c87b03e5Sespie { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1502*c87b03e5Sespie
1503*c87b03e5Sespie switch (next_qty)
1504*c87b03e5Sespie {
1505*c87b03e5Sespie case 3:
1506*c87b03e5Sespie /* Make qty_order[2] be the one to allocate last. */
1507*c87b03e5Sespie if (qty_sugg_compare (0, 1) > 0)
1508*c87b03e5Sespie EXCHANGE (0, 1);
1509*c87b03e5Sespie if (qty_sugg_compare (1, 2) > 0)
1510*c87b03e5Sespie EXCHANGE (2, 1);
1511*c87b03e5Sespie
1512*c87b03e5Sespie /* ... Fall through ... */
1513*c87b03e5Sespie case 2:
1514*c87b03e5Sespie /* Put the best one to allocate in qty_order[0]. */
1515*c87b03e5Sespie if (qty_sugg_compare (0, 1) > 0)
1516*c87b03e5Sespie EXCHANGE (0, 1);
1517*c87b03e5Sespie
1518*c87b03e5Sespie /* ... Fall through ... */
1519*c87b03e5Sespie
1520*c87b03e5Sespie case 1:
1521*c87b03e5Sespie case 0:
1522*c87b03e5Sespie /* Nothing to do here. */
1523*c87b03e5Sespie break;
1524*c87b03e5Sespie
1525*c87b03e5Sespie default:
1526*c87b03e5Sespie qsort (qty_order, next_qty, sizeof (int), qty_sugg_compare_1);
1527*c87b03e5Sespie }
1528*c87b03e5Sespie
1529*c87b03e5Sespie /* Try to put each quantity in a suggested physical register, if it has one.
1530*c87b03e5Sespie This may cause registers to be allocated that otherwise wouldn't be, but
1531*c87b03e5Sespie this seems acceptable in local allocation (unlike global allocation). */
1532*c87b03e5Sespie for (i = 0; i < next_qty; i++)
1533*c87b03e5Sespie {
1534*c87b03e5Sespie q = qty_order[i];
1535*c87b03e5Sespie if (qty_phys_num_sugg[q] != 0 || qty_phys_num_copy_sugg[q] != 0)
1536*c87b03e5Sespie qty[q].phys_reg = find_free_reg (qty[q].min_class, qty[q].mode, q,
1537*c87b03e5Sespie 0, 1, qty[q].birth, qty[q].death);
1538*c87b03e5Sespie else
1539*c87b03e5Sespie qty[q].phys_reg = -1;
1540*c87b03e5Sespie }
1541*c87b03e5Sespie
1542*c87b03e5Sespie /* Order the qtys so we assign them registers in order of
1543*c87b03e5Sespie decreasing length of life. Normally call qsort, but if we
1544*c87b03e5Sespie have only a very small number of quantities, sort them ourselves. */
1545*c87b03e5Sespie
1546*c87b03e5Sespie for (i = 0; i < next_qty; i++)
1547*c87b03e5Sespie qty_order[i] = i;
1548*c87b03e5Sespie
1549*c87b03e5Sespie #define EXCHANGE(I1, I2) \
1550*c87b03e5Sespie { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1551*c87b03e5Sespie
1552*c87b03e5Sespie switch (next_qty)
1553*c87b03e5Sespie {
1554*c87b03e5Sespie case 3:
1555*c87b03e5Sespie /* Make qty_order[2] be the one to allocate last. */
1556*c87b03e5Sespie if (qty_compare (0, 1) > 0)
1557*c87b03e5Sespie EXCHANGE (0, 1);
1558*c87b03e5Sespie if (qty_compare (1, 2) > 0)
1559*c87b03e5Sespie EXCHANGE (2, 1);
1560*c87b03e5Sespie
1561*c87b03e5Sespie /* ... Fall through ... */
1562*c87b03e5Sespie case 2:
1563*c87b03e5Sespie /* Put the best one to allocate in qty_order[0]. */
1564*c87b03e5Sespie if (qty_compare (0, 1) > 0)
1565*c87b03e5Sespie EXCHANGE (0, 1);
1566*c87b03e5Sespie
1567*c87b03e5Sespie /* ... Fall through ... */
1568*c87b03e5Sespie
1569*c87b03e5Sespie case 1:
1570*c87b03e5Sespie case 0:
1571*c87b03e5Sespie /* Nothing to do here. */
1572*c87b03e5Sespie break;
1573*c87b03e5Sespie
1574*c87b03e5Sespie default:
1575*c87b03e5Sespie qsort (qty_order, next_qty, sizeof (int), qty_compare_1);
1576*c87b03e5Sespie }
1577*c87b03e5Sespie
1578*c87b03e5Sespie /* Now for each qty that is not a hardware register,
1579*c87b03e5Sespie look for a hardware register to put it in.
1580*c87b03e5Sespie First try the register class that is cheapest for this qty,
1581*c87b03e5Sespie if there is more than one class. */
1582*c87b03e5Sespie
1583*c87b03e5Sespie for (i = 0; i < next_qty; i++)
1584*c87b03e5Sespie {
1585*c87b03e5Sespie q = qty_order[i];
1586*c87b03e5Sespie if (qty[q].phys_reg < 0)
1587*c87b03e5Sespie {
1588*c87b03e5Sespie #ifdef INSN_SCHEDULING
1589*c87b03e5Sespie /* These values represent the adjusted lifetime of a qty so
1590*c87b03e5Sespie that it conflicts with qtys which appear near the start/end
1591*c87b03e5Sespie of this qty's lifetime.
1592*c87b03e5Sespie
1593*c87b03e5Sespie The purpose behind extending the lifetime of this qty is to
1594*c87b03e5Sespie discourage the register allocator from creating false
1595*c87b03e5Sespie dependencies.
1596*c87b03e5Sespie
1597*c87b03e5Sespie The adjustment value is chosen to indicate that this qty
1598*c87b03e5Sespie conflicts with all the qtys in the instructions immediately
1599*c87b03e5Sespie before and after the lifetime of this qty.
1600*c87b03e5Sespie
1601*c87b03e5Sespie Experiments have shown that higher values tend to hurt
1602*c87b03e5Sespie overall code performance.
1603*c87b03e5Sespie
1604*c87b03e5Sespie If allocation using the extended lifetime fails we will try
1605*c87b03e5Sespie again with the qty's unadjusted lifetime. */
1606*c87b03e5Sespie int fake_birth = MAX (0, qty[q].birth - 2 + qty[q].birth % 2);
1607*c87b03e5Sespie int fake_death = MIN (insn_number * 2 + 1,
1608*c87b03e5Sespie qty[q].death + 2 - qty[q].death % 2);
1609*c87b03e5Sespie #endif
1610*c87b03e5Sespie
1611*c87b03e5Sespie if (N_REG_CLASSES > 1)
1612*c87b03e5Sespie {
1613*c87b03e5Sespie #ifdef INSN_SCHEDULING
1614*c87b03e5Sespie /* We try to avoid using hard registers allocated to qtys which
1615*c87b03e5Sespie are born immediately after this qty or die immediately before
1616*c87b03e5Sespie this qty.
1617*c87b03e5Sespie
1618*c87b03e5Sespie This optimization is only appropriate when we will run
1619*c87b03e5Sespie a scheduling pass after reload and we are not optimizing
1620*c87b03e5Sespie for code size. */
1621*c87b03e5Sespie if (flag_schedule_insns_after_reload
1622*c87b03e5Sespie && !optimize_size
1623*c87b03e5Sespie && !SMALL_REGISTER_CLASSES)
1624*c87b03e5Sespie {
1625*c87b03e5Sespie qty[q].phys_reg = find_free_reg (qty[q].min_class,
1626*c87b03e5Sespie qty[q].mode, q, 0, 0,
1627*c87b03e5Sespie fake_birth, fake_death);
1628*c87b03e5Sespie if (qty[q].phys_reg >= 0)
1629*c87b03e5Sespie continue;
1630*c87b03e5Sespie }
1631*c87b03e5Sespie #endif
1632*c87b03e5Sespie qty[q].phys_reg = find_free_reg (qty[q].min_class,
1633*c87b03e5Sespie qty[q].mode, q, 0, 0,
1634*c87b03e5Sespie qty[q].birth, qty[q].death);
1635*c87b03e5Sespie if (qty[q].phys_reg >= 0)
1636*c87b03e5Sespie continue;
1637*c87b03e5Sespie }
1638*c87b03e5Sespie
1639*c87b03e5Sespie #ifdef INSN_SCHEDULING
1640*c87b03e5Sespie /* Similarly, avoid false dependencies. */
1641*c87b03e5Sespie if (flag_schedule_insns_after_reload
1642*c87b03e5Sespie && !optimize_size
1643*c87b03e5Sespie && !SMALL_REGISTER_CLASSES
1644*c87b03e5Sespie && qty[q].alternate_class != NO_REGS)
1645*c87b03e5Sespie qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1646*c87b03e5Sespie qty[q].mode, q, 0, 0,
1647*c87b03e5Sespie fake_birth, fake_death);
1648*c87b03e5Sespie #endif
1649*c87b03e5Sespie if (qty[q].alternate_class != NO_REGS)
1650*c87b03e5Sespie qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1651*c87b03e5Sespie qty[q].mode, q, 0, 0,
1652*c87b03e5Sespie qty[q].birth, qty[q].death);
1653*c87b03e5Sespie }
1654*c87b03e5Sespie }
1655*c87b03e5Sespie
1656*c87b03e5Sespie /* Now propagate the register assignments
1657*c87b03e5Sespie to the pseudo regs belonging to the qtys. */
1658*c87b03e5Sespie
1659*c87b03e5Sespie for (q = 0; q < next_qty; q++)
1660*c87b03e5Sespie if (qty[q].phys_reg >= 0)
1661*c87b03e5Sespie {
1662*c87b03e5Sespie for (i = qty[q].first_reg; i >= 0; i = reg_next_in_qty[i])
1663*c87b03e5Sespie reg_renumber[i] = qty[q].phys_reg + reg_offset[i];
1664*c87b03e5Sespie }
1665*c87b03e5Sespie
1666*c87b03e5Sespie /* Clean up. */
1667*c87b03e5Sespie free (regs_live_at);
1668*c87b03e5Sespie free (qty_order);
1669*c87b03e5Sespie }
1670*c87b03e5Sespie
1671*c87b03e5Sespie /* Compare two quantities' priority for getting real registers.
1672*c87b03e5Sespie We give shorter-lived quantities higher priority.
1673*c87b03e5Sespie Quantities with more references are also preferred, as are quantities that
1674*c87b03e5Sespie require multiple registers. This is the identical prioritization as
1675*c87b03e5Sespie done by global-alloc.
1676*c87b03e5Sespie
1677*c87b03e5Sespie We used to give preference to registers with *longer* lives, but using
1678*c87b03e5Sespie the same algorithm in both local- and global-alloc can speed up execution
1679*c87b03e5Sespie of some programs by as much as a factor of three! */
1680*c87b03e5Sespie
1681*c87b03e5Sespie /* Note that the quotient will never be bigger than
1682*c87b03e5Sespie the value of floor_log2 times the maximum number of
1683*c87b03e5Sespie times a register can occur in one insn (surely less than 100)
1684*c87b03e5Sespie weighted by frequency (max REG_FREQ_MAX).
1685*c87b03e5Sespie Multiplying this by 10000/REG_FREQ_MAX can't overflow.
1686*c87b03e5Sespie QTY_CMP_PRI is also used by qty_sugg_compare. */
1687*c87b03e5Sespie
1688*c87b03e5Sespie #define QTY_CMP_PRI(q) \
1689*c87b03e5Sespie ((int) (((double) (floor_log2 (qty[q].n_refs) * qty[q].freq * qty[q].size) \
1690*c87b03e5Sespie / (qty[q].death - qty[q].birth)) * (10000 / REG_FREQ_MAX)))
1691*c87b03e5Sespie
1692*c87b03e5Sespie static int
qty_compare(q1,q2)1693*c87b03e5Sespie qty_compare (q1, q2)
1694*c87b03e5Sespie int q1, q2;
1695*c87b03e5Sespie {
1696*c87b03e5Sespie return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1697*c87b03e5Sespie }
1698*c87b03e5Sespie
1699*c87b03e5Sespie static int
qty_compare_1(q1p,q2p)1700*c87b03e5Sespie qty_compare_1 (q1p, q2p)
1701*c87b03e5Sespie const PTR q1p;
1702*c87b03e5Sespie const PTR q2p;
1703*c87b03e5Sespie {
1704*c87b03e5Sespie int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1705*c87b03e5Sespie int tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1706*c87b03e5Sespie
1707*c87b03e5Sespie if (tem != 0)
1708*c87b03e5Sespie return tem;
1709*c87b03e5Sespie
1710*c87b03e5Sespie /* If qtys are equally good, sort by qty number,
1711*c87b03e5Sespie so that the results of qsort leave nothing to chance. */
1712*c87b03e5Sespie return q1 - q2;
1713*c87b03e5Sespie }
1714*c87b03e5Sespie
1715*c87b03e5Sespie /* Compare two quantities' priority for getting real registers. This version
1716*c87b03e5Sespie is called for quantities that have suggested hard registers. First priority
1717*c87b03e5Sespie goes to quantities that have copy preferences, then to those that have
1718*c87b03e5Sespie normal preferences. Within those groups, quantities with the lower
1719*c87b03e5Sespie number of preferences have the highest priority. Of those, we use the same
1720*c87b03e5Sespie algorithm as above. */
1721*c87b03e5Sespie
1722*c87b03e5Sespie #define QTY_CMP_SUGG(q) \
1723*c87b03e5Sespie (qty_phys_num_copy_sugg[q] \
1724*c87b03e5Sespie ? qty_phys_num_copy_sugg[q] \
1725*c87b03e5Sespie : qty_phys_num_sugg[q] * FIRST_PSEUDO_REGISTER)
1726*c87b03e5Sespie
1727*c87b03e5Sespie static int
qty_sugg_compare(q1,q2)1728*c87b03e5Sespie qty_sugg_compare (q1, q2)
1729*c87b03e5Sespie int q1, q2;
1730*c87b03e5Sespie {
1731*c87b03e5Sespie int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1732*c87b03e5Sespie
1733*c87b03e5Sespie if (tem != 0)
1734*c87b03e5Sespie return tem;
1735*c87b03e5Sespie
1736*c87b03e5Sespie return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1737*c87b03e5Sespie }
1738*c87b03e5Sespie
1739*c87b03e5Sespie static int
qty_sugg_compare_1(q1p,q2p)1740*c87b03e5Sespie qty_sugg_compare_1 (q1p, q2p)
1741*c87b03e5Sespie const PTR q1p;
1742*c87b03e5Sespie const PTR q2p;
1743*c87b03e5Sespie {
1744*c87b03e5Sespie int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1745*c87b03e5Sespie int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1746*c87b03e5Sespie
1747*c87b03e5Sespie if (tem != 0)
1748*c87b03e5Sespie return tem;
1749*c87b03e5Sespie
1750*c87b03e5Sespie tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1751*c87b03e5Sespie if (tem != 0)
1752*c87b03e5Sespie return tem;
1753*c87b03e5Sespie
1754*c87b03e5Sespie /* If qtys are equally good, sort by qty number,
1755*c87b03e5Sespie so that the results of qsort leave nothing to chance. */
1756*c87b03e5Sespie return q1 - q2;
1757*c87b03e5Sespie }
1758*c87b03e5Sespie
1759*c87b03e5Sespie #undef QTY_CMP_SUGG
1760*c87b03e5Sespie #undef QTY_CMP_PRI
1761*c87b03e5Sespie
1762*c87b03e5Sespie /* Attempt to combine the two registers (rtx's) USEDREG and SETREG.
1763*c87b03e5Sespie Returns 1 if have done so, or 0 if cannot.
1764*c87b03e5Sespie
1765*c87b03e5Sespie Combining registers means marking them as having the same quantity
1766*c87b03e5Sespie and adjusting the offsets within the quantity if either of
1767*c87b03e5Sespie them is a SUBREG).
1768*c87b03e5Sespie
1769*c87b03e5Sespie We don't actually combine a hard reg with a pseudo; instead
1770*c87b03e5Sespie we just record the hard reg as the suggestion for the pseudo's quantity.
1771*c87b03e5Sespie If we really combined them, we could lose if the pseudo lives
1772*c87b03e5Sespie across an insn that clobbers the hard reg (eg, movstr).
1773*c87b03e5Sespie
1774*c87b03e5Sespie ALREADY_DEAD is nonzero if USEDREG is known to be dead even though
1775*c87b03e5Sespie there is no REG_DEAD note on INSN. This occurs during the processing
1776*c87b03e5Sespie of REG_NO_CONFLICT blocks.
1777*c87b03e5Sespie
1778*c87b03e5Sespie MAY_SAVE_COPYCOPY is nonzero if this insn is simply copying USEDREG to
1779*c87b03e5Sespie SETREG or if the input and output must share a register.
1780*c87b03e5Sespie In that case, we record a hard reg suggestion in QTY_PHYS_COPY_SUGG.
1781*c87b03e5Sespie
1782*c87b03e5Sespie There are elaborate checks for the validity of combining. */
1783*c87b03e5Sespie
1784*c87b03e5Sespie static int
combine_regs(usedreg,setreg,may_save_copy,insn_number,insn,already_dead)1785*c87b03e5Sespie combine_regs (usedreg, setreg, may_save_copy, insn_number, insn, already_dead)
1786*c87b03e5Sespie rtx usedreg, setreg;
1787*c87b03e5Sespie int may_save_copy;
1788*c87b03e5Sespie int insn_number;
1789*c87b03e5Sespie rtx insn;
1790*c87b03e5Sespie int already_dead;
1791*c87b03e5Sespie {
1792*c87b03e5Sespie int ureg, sreg;
1793*c87b03e5Sespie int offset = 0;
1794*c87b03e5Sespie int usize, ssize;
1795*c87b03e5Sespie int sqty;
1796*c87b03e5Sespie
1797*c87b03e5Sespie /* Determine the numbers and sizes of registers being used. If a subreg
1798*c87b03e5Sespie is present that does not change the entire register, don't consider
1799*c87b03e5Sespie this a copy insn. */
1800*c87b03e5Sespie
1801*c87b03e5Sespie while (GET_CODE (usedreg) == SUBREG)
1802*c87b03e5Sespie {
1803*c87b03e5Sespie rtx subreg = SUBREG_REG (usedreg);
1804*c87b03e5Sespie
1805*c87b03e5Sespie if (GET_CODE (subreg) == REG)
1806*c87b03e5Sespie {
1807*c87b03e5Sespie if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1808*c87b03e5Sespie may_save_copy = 0;
1809*c87b03e5Sespie
1810*c87b03e5Sespie if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1811*c87b03e5Sespie offset += subreg_regno_offset (REGNO (subreg),
1812*c87b03e5Sespie GET_MODE (subreg),
1813*c87b03e5Sespie SUBREG_BYTE (usedreg),
1814*c87b03e5Sespie GET_MODE (usedreg));
1815*c87b03e5Sespie else
1816*c87b03e5Sespie offset += (SUBREG_BYTE (usedreg)
1817*c87b03e5Sespie / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1818*c87b03e5Sespie }
1819*c87b03e5Sespie
1820*c87b03e5Sespie usedreg = subreg;
1821*c87b03e5Sespie }
1822*c87b03e5Sespie
1823*c87b03e5Sespie if (GET_CODE (usedreg) != REG)
1824*c87b03e5Sespie return 0;
1825*c87b03e5Sespie
1826*c87b03e5Sespie ureg = REGNO (usedreg);
1827*c87b03e5Sespie if (ureg < FIRST_PSEUDO_REGISTER)
1828*c87b03e5Sespie usize = HARD_REGNO_NREGS (ureg, GET_MODE (usedreg));
1829*c87b03e5Sespie else
1830*c87b03e5Sespie usize = ((GET_MODE_SIZE (GET_MODE (usedreg))
1831*c87b03e5Sespie + (REGMODE_NATURAL_SIZE (GET_MODE (usedreg)) - 1))
1832*c87b03e5Sespie / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1833*c87b03e5Sespie
1834*c87b03e5Sespie while (GET_CODE (setreg) == SUBREG)
1835*c87b03e5Sespie {
1836*c87b03e5Sespie rtx subreg = SUBREG_REG (setreg);
1837*c87b03e5Sespie
1838*c87b03e5Sespie if (GET_CODE (subreg) == REG)
1839*c87b03e5Sespie {
1840*c87b03e5Sespie if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1841*c87b03e5Sespie may_save_copy = 0;
1842*c87b03e5Sespie
1843*c87b03e5Sespie if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1844*c87b03e5Sespie offset -= subreg_regno_offset (REGNO (subreg),
1845*c87b03e5Sespie GET_MODE (subreg),
1846*c87b03e5Sespie SUBREG_BYTE (setreg),
1847*c87b03e5Sespie GET_MODE (setreg));
1848*c87b03e5Sespie else
1849*c87b03e5Sespie offset -= (SUBREG_BYTE (setreg)
1850*c87b03e5Sespie / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1851*c87b03e5Sespie }
1852*c87b03e5Sespie
1853*c87b03e5Sespie setreg = subreg;
1854*c87b03e5Sespie }
1855*c87b03e5Sespie
1856*c87b03e5Sespie if (GET_CODE (setreg) != REG)
1857*c87b03e5Sespie return 0;
1858*c87b03e5Sespie
1859*c87b03e5Sespie sreg = REGNO (setreg);
1860*c87b03e5Sespie if (sreg < FIRST_PSEUDO_REGISTER)
1861*c87b03e5Sespie ssize = HARD_REGNO_NREGS (sreg, GET_MODE (setreg));
1862*c87b03e5Sespie else
1863*c87b03e5Sespie ssize = ((GET_MODE_SIZE (GET_MODE (setreg))
1864*c87b03e5Sespie + (REGMODE_NATURAL_SIZE (GET_MODE (setreg)) - 1))
1865*c87b03e5Sespie / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1866*c87b03e5Sespie
1867*c87b03e5Sespie /* If UREG is a pseudo-register that hasn't already been assigned a
1868*c87b03e5Sespie quantity number, it means that it is not local to this block or dies
1869*c87b03e5Sespie more than once. In either event, we can't do anything with it. */
1870*c87b03e5Sespie if ((ureg >= FIRST_PSEUDO_REGISTER && reg_qty[ureg] < 0)
1871*c87b03e5Sespie /* Do not combine registers unless one fits within the other. */
1872*c87b03e5Sespie || (offset > 0 && usize + offset > ssize)
1873*c87b03e5Sespie || (offset < 0 && usize + offset < ssize)
1874*c87b03e5Sespie /* Do not combine with a smaller already-assigned object
1875*c87b03e5Sespie if that smaller object is already combined with something bigger. */
1876*c87b03e5Sespie || (ssize > usize && ureg >= FIRST_PSEUDO_REGISTER
1877*c87b03e5Sespie && usize < qty[reg_qty[ureg]].size)
1878*c87b03e5Sespie /* Can't combine if SREG is not a register we can allocate. */
1879*c87b03e5Sespie || (sreg >= FIRST_PSEUDO_REGISTER && reg_qty[sreg] == -1)
1880*c87b03e5Sespie /* Don't combine with a pseudo mentioned in a REG_NO_CONFLICT note.
1881*c87b03e5Sespie These have already been taken care of. This probably wouldn't
1882*c87b03e5Sespie combine anyway, but don't take any chances. */
1883*c87b03e5Sespie || (ureg >= FIRST_PSEUDO_REGISTER
1884*c87b03e5Sespie && find_reg_note (insn, REG_NO_CONFLICT, usedreg))
1885*c87b03e5Sespie /* Don't tie something to itself. In most cases it would make no
1886*c87b03e5Sespie difference, but it would screw up if the reg being tied to itself
1887*c87b03e5Sespie also dies in this insn. */
1888*c87b03e5Sespie || ureg == sreg
1889*c87b03e5Sespie /* Don't try to connect two different hardware registers. */
1890*c87b03e5Sespie || (ureg < FIRST_PSEUDO_REGISTER && sreg < FIRST_PSEUDO_REGISTER)
1891*c87b03e5Sespie /* Don't connect two different machine modes if they have different
1892*c87b03e5Sespie implications as to which registers may be used. */
1893*c87b03e5Sespie || !MODES_TIEABLE_P (GET_MODE (usedreg), GET_MODE (setreg)))
1894*c87b03e5Sespie return 0;
1895*c87b03e5Sespie
1896*c87b03e5Sespie /* Now, if UREG is a hard reg and SREG is a pseudo, record the hard reg in
1897*c87b03e5Sespie qty_phys_sugg for the pseudo instead of tying them.
1898*c87b03e5Sespie
1899*c87b03e5Sespie Return "failure" so that the lifespan of UREG is terminated here;
1900*c87b03e5Sespie that way the two lifespans will be disjoint and nothing will prevent
1901*c87b03e5Sespie the pseudo reg from being given this hard reg. */
1902*c87b03e5Sespie
1903*c87b03e5Sespie if (ureg < FIRST_PSEUDO_REGISTER)
1904*c87b03e5Sespie {
1905*c87b03e5Sespie /* Allocate a quantity number so we have a place to put our
1906*c87b03e5Sespie suggestions. */
1907*c87b03e5Sespie if (reg_qty[sreg] == -2)
1908*c87b03e5Sespie reg_is_born (setreg, 2 * insn_number);
1909*c87b03e5Sespie
1910*c87b03e5Sespie if (reg_qty[sreg] >= 0)
1911*c87b03e5Sespie {
1912*c87b03e5Sespie if (may_save_copy
1913*c87b03e5Sespie && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg))
1914*c87b03e5Sespie {
1915*c87b03e5Sespie SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg);
1916*c87b03e5Sespie qty_phys_num_copy_sugg[reg_qty[sreg]]++;
1917*c87b03e5Sespie }
1918*c87b03e5Sespie else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg))
1919*c87b03e5Sespie {
1920*c87b03e5Sespie SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg);
1921*c87b03e5Sespie qty_phys_num_sugg[reg_qty[sreg]]++;
1922*c87b03e5Sespie }
1923*c87b03e5Sespie }
1924*c87b03e5Sespie return 0;
1925*c87b03e5Sespie }
1926*c87b03e5Sespie
1927*c87b03e5Sespie /* Similarly for SREG a hard register and UREG a pseudo register. */
1928*c87b03e5Sespie
1929*c87b03e5Sespie if (sreg < FIRST_PSEUDO_REGISTER)
1930*c87b03e5Sespie {
1931*c87b03e5Sespie if (may_save_copy
1932*c87b03e5Sespie && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg))
1933*c87b03e5Sespie {
1934*c87b03e5Sespie SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg);
1935*c87b03e5Sespie qty_phys_num_copy_sugg[reg_qty[ureg]]++;
1936*c87b03e5Sespie }
1937*c87b03e5Sespie else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg))
1938*c87b03e5Sespie {
1939*c87b03e5Sespie SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg);
1940*c87b03e5Sespie qty_phys_num_sugg[reg_qty[ureg]]++;
1941*c87b03e5Sespie }
1942*c87b03e5Sespie return 0;
1943*c87b03e5Sespie }
1944*c87b03e5Sespie
1945*c87b03e5Sespie /* At this point we know that SREG and UREG are both pseudos.
1946*c87b03e5Sespie Do nothing if SREG already has a quantity or is a register that we
1947*c87b03e5Sespie don't allocate. */
1948*c87b03e5Sespie if (reg_qty[sreg] >= -1
1949*c87b03e5Sespie /* If we are not going to let any regs live across calls,
1950*c87b03e5Sespie don't tie a call-crossing reg to a non-call-crossing reg. */
1951*c87b03e5Sespie || (current_function_has_nonlocal_label
1952*c87b03e5Sespie && ((REG_N_CALLS_CROSSED (ureg) > 0)
1953*c87b03e5Sespie != (REG_N_CALLS_CROSSED (sreg) > 0))))
1954*c87b03e5Sespie return 0;
1955*c87b03e5Sespie
1956*c87b03e5Sespie /* We don't already know about SREG, so tie it to UREG
1957*c87b03e5Sespie if this is the last use of UREG, provided the classes they want
1958*c87b03e5Sespie are compatible. */
1959*c87b03e5Sespie
1960*c87b03e5Sespie if ((already_dead || find_regno_note (insn, REG_DEAD, ureg))
1961*c87b03e5Sespie && reg_meets_class_p (sreg, qty[reg_qty[ureg]].min_class))
1962*c87b03e5Sespie {
1963*c87b03e5Sespie /* Add SREG to UREG's quantity. */
1964*c87b03e5Sespie sqty = reg_qty[ureg];
1965*c87b03e5Sespie reg_qty[sreg] = sqty;
1966*c87b03e5Sespie reg_offset[sreg] = reg_offset[ureg] + offset;
1967*c87b03e5Sespie reg_next_in_qty[sreg] = qty[sqty].first_reg;
1968*c87b03e5Sespie qty[sqty].first_reg = sreg;
1969*c87b03e5Sespie
1970*c87b03e5Sespie /* If SREG's reg class is smaller, set qty[SQTY].min_class. */
1971*c87b03e5Sespie update_qty_class (sqty, sreg);
1972*c87b03e5Sespie
1973*c87b03e5Sespie /* Update info about quantity SQTY. */
1974*c87b03e5Sespie qty[sqty].n_calls_crossed += REG_N_CALLS_CROSSED (sreg);
1975*c87b03e5Sespie qty[sqty].n_refs += REG_N_REFS (sreg);
1976*c87b03e5Sespie qty[sqty].freq += REG_FREQ (sreg);
1977*c87b03e5Sespie if (usize < ssize)
1978*c87b03e5Sespie {
1979*c87b03e5Sespie int i;
1980*c87b03e5Sespie
1981*c87b03e5Sespie for (i = qty[sqty].first_reg; i >= 0; i = reg_next_in_qty[i])
1982*c87b03e5Sespie reg_offset[i] -= offset;
1983*c87b03e5Sespie
1984*c87b03e5Sespie qty[sqty].size = ssize;
1985*c87b03e5Sespie qty[sqty].mode = GET_MODE (setreg);
1986*c87b03e5Sespie }
1987*c87b03e5Sespie }
1988*c87b03e5Sespie else
1989*c87b03e5Sespie return 0;
1990*c87b03e5Sespie
1991*c87b03e5Sespie return 1;
1992*c87b03e5Sespie }
1993*c87b03e5Sespie
1994*c87b03e5Sespie /* Return 1 if the preferred class of REG allows it to be tied
1995*c87b03e5Sespie to a quantity or register whose class is CLASS.
1996*c87b03e5Sespie True if REG's reg class either contains or is contained in CLASS. */
1997*c87b03e5Sespie
1998*c87b03e5Sespie static int
reg_meets_class_p(reg,class)1999*c87b03e5Sespie reg_meets_class_p (reg, class)
2000*c87b03e5Sespie int reg;
2001*c87b03e5Sespie enum reg_class class;
2002*c87b03e5Sespie {
2003*c87b03e5Sespie enum reg_class rclass = reg_preferred_class (reg);
2004*c87b03e5Sespie return (reg_class_subset_p (rclass, class)
2005*c87b03e5Sespie || reg_class_subset_p (class, rclass));
2006*c87b03e5Sespie }
2007*c87b03e5Sespie
2008*c87b03e5Sespie /* Update the class of QTYNO assuming that REG is being tied to it. */
2009*c87b03e5Sespie
2010*c87b03e5Sespie static void
update_qty_class(qtyno,reg)2011*c87b03e5Sespie update_qty_class (qtyno, reg)
2012*c87b03e5Sespie int qtyno;
2013*c87b03e5Sespie int reg;
2014*c87b03e5Sespie {
2015*c87b03e5Sespie enum reg_class rclass = reg_preferred_class (reg);
2016*c87b03e5Sespie if (reg_class_subset_p (rclass, qty[qtyno].min_class))
2017*c87b03e5Sespie qty[qtyno].min_class = rclass;
2018*c87b03e5Sespie
2019*c87b03e5Sespie rclass = reg_alternate_class (reg);
2020*c87b03e5Sespie if (reg_class_subset_p (rclass, qty[qtyno].alternate_class))
2021*c87b03e5Sespie qty[qtyno].alternate_class = rclass;
2022*c87b03e5Sespie }
2023*c87b03e5Sespie
2024*c87b03e5Sespie /* Handle something which alters the value of an rtx REG.
2025*c87b03e5Sespie
2026*c87b03e5Sespie REG is whatever is set or clobbered. SETTER is the rtx that
2027*c87b03e5Sespie is modifying the register.
2028*c87b03e5Sespie
2029*c87b03e5Sespie If it is not really a register, we do nothing.
2030*c87b03e5Sespie The file-global variables `this_insn' and `this_insn_number'
2031*c87b03e5Sespie carry info from `block_alloc'. */
2032*c87b03e5Sespie
2033*c87b03e5Sespie static void
reg_is_set(reg,setter,data)2034*c87b03e5Sespie reg_is_set (reg, setter, data)
2035*c87b03e5Sespie rtx reg;
2036*c87b03e5Sespie rtx setter;
2037*c87b03e5Sespie void *data ATTRIBUTE_UNUSED;
2038*c87b03e5Sespie {
2039*c87b03e5Sespie /* Note that note_stores will only pass us a SUBREG if it is a SUBREG of
2040*c87b03e5Sespie a hard register. These may actually not exist any more. */
2041*c87b03e5Sespie
2042*c87b03e5Sespie if (GET_CODE (reg) != SUBREG
2043*c87b03e5Sespie && GET_CODE (reg) != REG)
2044*c87b03e5Sespie return;
2045*c87b03e5Sespie
2046*c87b03e5Sespie /* Mark this register as being born. If it is used in a CLOBBER, mark
2047*c87b03e5Sespie it as being born halfway between the previous insn and this insn so that
2048*c87b03e5Sespie it conflicts with our inputs but not the outputs of the previous insn. */
2049*c87b03e5Sespie
2050*c87b03e5Sespie reg_is_born (reg, 2 * this_insn_number - (GET_CODE (setter) == CLOBBER));
2051*c87b03e5Sespie }
2052*c87b03e5Sespie
2053*c87b03e5Sespie /* Handle beginning of the life of register REG.
2054*c87b03e5Sespie BIRTH is the index at which this is happening. */
2055*c87b03e5Sespie
2056*c87b03e5Sespie static void
reg_is_born(reg,birth)2057*c87b03e5Sespie reg_is_born (reg, birth)
2058*c87b03e5Sespie rtx reg;
2059*c87b03e5Sespie int birth;
2060*c87b03e5Sespie {
2061*c87b03e5Sespie int regno;
2062*c87b03e5Sespie
2063*c87b03e5Sespie if (GET_CODE (reg) == SUBREG)
2064*c87b03e5Sespie {
2065*c87b03e5Sespie regno = REGNO (SUBREG_REG (reg));
2066*c87b03e5Sespie if (regno < FIRST_PSEUDO_REGISTER)
2067*c87b03e5Sespie regno = subreg_hard_regno (reg, 1);
2068*c87b03e5Sespie }
2069*c87b03e5Sespie else
2070*c87b03e5Sespie regno = REGNO (reg);
2071*c87b03e5Sespie
2072*c87b03e5Sespie if (regno < FIRST_PSEUDO_REGISTER)
2073*c87b03e5Sespie {
2074*c87b03e5Sespie mark_life (regno, GET_MODE (reg), 1);
2075*c87b03e5Sespie
2076*c87b03e5Sespie /* If the register was to have been born earlier that the present
2077*c87b03e5Sespie insn, mark it as live where it is actually born. */
2078*c87b03e5Sespie if (birth < 2 * this_insn_number)
2079*c87b03e5Sespie post_mark_life (regno, GET_MODE (reg), 1, birth, 2 * this_insn_number);
2080*c87b03e5Sespie }
2081*c87b03e5Sespie else
2082*c87b03e5Sespie {
2083*c87b03e5Sespie if (reg_qty[regno] == -2)
2084*c87b03e5Sespie alloc_qty (regno, GET_MODE (reg), PSEUDO_REGNO_SIZE (regno), birth);
2085*c87b03e5Sespie
2086*c87b03e5Sespie /* If this register has a quantity number, show that it isn't dead. */
2087*c87b03e5Sespie if (reg_qty[regno] >= 0)
2088*c87b03e5Sespie qty[reg_qty[regno]].death = -1;
2089*c87b03e5Sespie }
2090*c87b03e5Sespie }
2091*c87b03e5Sespie
2092*c87b03e5Sespie /* Record the death of REG in the current insn. If OUTPUT_P is nonzero,
2093*c87b03e5Sespie REG is an output that is dying (i.e., it is never used), otherwise it
2094*c87b03e5Sespie is an input (the normal case).
2095*c87b03e5Sespie If OUTPUT_P is 1, then we extend the life past the end of this insn. */
2096*c87b03e5Sespie
2097*c87b03e5Sespie static void
wipe_dead_reg(reg,output_p)2098*c87b03e5Sespie wipe_dead_reg (reg, output_p)
2099*c87b03e5Sespie rtx reg;
2100*c87b03e5Sespie int output_p;
2101*c87b03e5Sespie {
2102*c87b03e5Sespie int regno = REGNO (reg);
2103*c87b03e5Sespie
2104*c87b03e5Sespie /* If this insn has multiple results,
2105*c87b03e5Sespie and the dead reg is used in one of the results,
2106*c87b03e5Sespie extend its life to after this insn,
2107*c87b03e5Sespie so it won't get allocated together with any other result of this insn.
2108*c87b03e5Sespie
2109*c87b03e5Sespie It is unsafe to use !single_set here since it will ignore an unused
2110*c87b03e5Sespie output. Just because an output is unused does not mean the compiler
2111*c87b03e5Sespie can assume the side effect will not occur. Consider if REG appears
2112*c87b03e5Sespie in the address of an output and we reload the output. If we allocate
2113*c87b03e5Sespie REG to the same hard register as an unused output we could set the hard
2114*c87b03e5Sespie register before the output reload insn. */
2115*c87b03e5Sespie if (GET_CODE (PATTERN (this_insn)) == PARALLEL
2116*c87b03e5Sespie && multiple_sets (this_insn))
2117*c87b03e5Sespie {
2118*c87b03e5Sespie int i;
2119*c87b03e5Sespie for (i = XVECLEN (PATTERN (this_insn), 0) - 1; i >= 0; i--)
2120*c87b03e5Sespie {
2121*c87b03e5Sespie rtx set = XVECEXP (PATTERN (this_insn), 0, i);
2122*c87b03e5Sespie if (GET_CODE (set) == SET
2123*c87b03e5Sespie && GET_CODE (SET_DEST (set)) != REG
2124*c87b03e5Sespie && !rtx_equal_p (reg, SET_DEST (set))
2125*c87b03e5Sespie && reg_overlap_mentioned_p (reg, SET_DEST (set)))
2126*c87b03e5Sespie output_p = 1;
2127*c87b03e5Sespie }
2128*c87b03e5Sespie }
2129*c87b03e5Sespie
2130*c87b03e5Sespie /* If this register is used in an auto-increment address, then extend its
2131*c87b03e5Sespie life to after this insn, so that it won't get allocated together with
2132*c87b03e5Sespie the result of this insn. */
2133*c87b03e5Sespie if (! output_p && find_regno_note (this_insn, REG_INC, regno))
2134*c87b03e5Sespie output_p = 1;
2135*c87b03e5Sespie
2136*c87b03e5Sespie if (regno < FIRST_PSEUDO_REGISTER)
2137*c87b03e5Sespie {
2138*c87b03e5Sespie mark_life (regno, GET_MODE (reg), 0);
2139*c87b03e5Sespie
2140*c87b03e5Sespie /* If a hard register is dying as an output, mark it as in use at
2141*c87b03e5Sespie the beginning of this insn (the above statement would cause this
2142*c87b03e5Sespie not to happen). */
2143*c87b03e5Sespie if (output_p)
2144*c87b03e5Sespie post_mark_life (regno, GET_MODE (reg), 1,
2145*c87b03e5Sespie 2 * this_insn_number, 2 * this_insn_number + 1);
2146*c87b03e5Sespie }
2147*c87b03e5Sespie
2148*c87b03e5Sespie else if (reg_qty[regno] >= 0)
2149*c87b03e5Sespie qty[reg_qty[regno]].death = 2 * this_insn_number + output_p;
2150*c87b03e5Sespie }
2151*c87b03e5Sespie
2152*c87b03e5Sespie /* Find a block of SIZE words of hard regs in reg_class CLASS
2153*c87b03e5Sespie that can hold something of machine-mode MODE
2154*c87b03e5Sespie (but actually we test only the first of the block for holding MODE)
2155*c87b03e5Sespie and still free between insn BORN_INDEX and insn DEAD_INDEX,
2156*c87b03e5Sespie and return the number of the first of them.
2157*c87b03e5Sespie Return -1 if such a block cannot be found.
2158*c87b03e5Sespie If QTYNO crosses calls, insist on a register preserved by calls,
2159*c87b03e5Sespie unless ACCEPT_CALL_CLOBBERED is nonzero.
2160*c87b03e5Sespie
2161*c87b03e5Sespie If JUST_TRY_SUGGESTED is nonzero, only try to see if the suggested
2162*c87b03e5Sespie register is available. If not, return -1. */
2163*c87b03e5Sespie
2164*c87b03e5Sespie static int
find_free_reg(class,mode,qtyno,accept_call_clobbered,just_try_suggested,born_index,dead_index)2165*c87b03e5Sespie find_free_reg (class, mode, qtyno, accept_call_clobbered, just_try_suggested,
2166*c87b03e5Sespie born_index, dead_index)
2167*c87b03e5Sespie enum reg_class class;
2168*c87b03e5Sespie enum machine_mode mode;
2169*c87b03e5Sespie int qtyno;
2170*c87b03e5Sespie int accept_call_clobbered;
2171*c87b03e5Sespie int just_try_suggested;
2172*c87b03e5Sespie int born_index, dead_index;
2173*c87b03e5Sespie {
2174*c87b03e5Sespie int i, ins;
2175*c87b03e5Sespie HARD_REG_SET first_used, used;
2176*c87b03e5Sespie #ifdef ELIMINABLE_REGS
2177*c87b03e5Sespie static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
2178*c87b03e5Sespie #endif
2179*c87b03e5Sespie
2180*c87b03e5Sespie /* Validate our parameters. */
2181*c87b03e5Sespie if (born_index < 0 || born_index > dead_index)
2182*c87b03e5Sespie abort ();
2183*c87b03e5Sespie
2184*c87b03e5Sespie /* Don't let a pseudo live in a reg across a function call
2185*c87b03e5Sespie if we might get a nonlocal goto. */
2186*c87b03e5Sespie if (current_function_has_nonlocal_label
2187*c87b03e5Sespie && qty[qtyno].n_calls_crossed > 0)
2188*c87b03e5Sespie return -1;
2189*c87b03e5Sespie
2190*c87b03e5Sespie if (accept_call_clobbered)
2191*c87b03e5Sespie COPY_HARD_REG_SET (used, call_fixed_reg_set);
2192*c87b03e5Sespie else if (qty[qtyno].n_calls_crossed == 0)
2193*c87b03e5Sespie COPY_HARD_REG_SET (used, fixed_reg_set);
2194*c87b03e5Sespie else
2195*c87b03e5Sespie COPY_HARD_REG_SET (used, call_used_reg_set);
2196*c87b03e5Sespie
2197*c87b03e5Sespie if (accept_call_clobbered)
2198*c87b03e5Sespie IOR_HARD_REG_SET (used, losing_caller_save_reg_set);
2199*c87b03e5Sespie
2200*c87b03e5Sespie for (ins = born_index; ins < dead_index; ins++)
2201*c87b03e5Sespie IOR_HARD_REG_SET (used, regs_live_at[ins]);
2202*c87b03e5Sespie
2203*c87b03e5Sespie IOR_COMPL_HARD_REG_SET (used, reg_class_contents[(int) class]);
2204*c87b03e5Sespie
2205*c87b03e5Sespie /* Don't use the frame pointer reg in local-alloc even if
2206*c87b03e5Sespie we may omit the frame pointer, because if we do that and then we
2207*c87b03e5Sespie need a frame pointer, reload won't know how to move the pseudo
2208*c87b03e5Sespie to another hard reg. It can move only regs made by global-alloc.
2209*c87b03e5Sespie
2210*c87b03e5Sespie This is true of any register that can be eliminated. */
2211*c87b03e5Sespie #ifdef ELIMINABLE_REGS
2212*c87b03e5Sespie for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2213*c87b03e5Sespie SET_HARD_REG_BIT (used, eliminables[i].from);
2214*c87b03e5Sespie #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2215*c87b03e5Sespie /* If FRAME_POINTER_REGNUM is not a real register, then protect the one
2216*c87b03e5Sespie that it might be eliminated into. */
2217*c87b03e5Sespie SET_HARD_REG_BIT (used, HARD_FRAME_POINTER_REGNUM);
2218*c87b03e5Sespie #endif
2219*c87b03e5Sespie #else
2220*c87b03e5Sespie SET_HARD_REG_BIT (used, FRAME_POINTER_REGNUM);
2221*c87b03e5Sespie #endif
2222*c87b03e5Sespie
2223*c87b03e5Sespie #ifdef CANNOT_CHANGE_MODE_CLASS
2224*c87b03e5Sespie cannot_change_mode_set_regs (&used, mode, qty[qtyno].first_reg);
2225*c87b03e5Sespie #endif
2226*c87b03e5Sespie
2227*c87b03e5Sespie /* Normally, the registers that can be used for the first register in
2228*c87b03e5Sespie a multi-register quantity are the same as those that can be used for
2229*c87b03e5Sespie subsequent registers. However, if just trying suggested registers,
2230*c87b03e5Sespie restrict our consideration to them. If there are copy-suggested
2231*c87b03e5Sespie register, try them. Otherwise, try the arithmetic-suggested
2232*c87b03e5Sespie registers. */
2233*c87b03e5Sespie COPY_HARD_REG_SET (first_used, used);
2234*c87b03e5Sespie
2235*c87b03e5Sespie if (just_try_suggested)
2236*c87b03e5Sespie {
2237*c87b03e5Sespie if (qty_phys_num_copy_sugg[qtyno] != 0)
2238*c87b03e5Sespie IOR_COMPL_HARD_REG_SET (first_used, qty_phys_copy_sugg[qtyno]);
2239*c87b03e5Sespie else
2240*c87b03e5Sespie IOR_COMPL_HARD_REG_SET (first_used, qty_phys_sugg[qtyno]);
2241*c87b03e5Sespie }
2242*c87b03e5Sespie
2243*c87b03e5Sespie /* If all registers are excluded, we can't do anything. */
2244*c87b03e5Sespie GO_IF_HARD_REG_SUBSET (reg_class_contents[(int) ALL_REGS], first_used, fail);
2245*c87b03e5Sespie
2246*c87b03e5Sespie /* If at least one would be suitable, test each hard reg. */
2247*c87b03e5Sespie
2248*c87b03e5Sespie for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2249*c87b03e5Sespie {
2250*c87b03e5Sespie #ifdef REG_ALLOC_ORDER
2251*c87b03e5Sespie int regno = reg_alloc_order[i];
2252*c87b03e5Sespie #else
2253*c87b03e5Sespie int regno = i;
2254*c87b03e5Sespie #endif
2255*c87b03e5Sespie if (! TEST_HARD_REG_BIT (first_used, regno)
2256*c87b03e5Sespie && HARD_REGNO_MODE_OK (regno, mode)
2257*c87b03e5Sespie && (qty[qtyno].n_calls_crossed == 0
2258*c87b03e5Sespie || accept_call_clobbered
2259*c87b03e5Sespie || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
2260*c87b03e5Sespie {
2261*c87b03e5Sespie int j;
2262*c87b03e5Sespie int size1 = HARD_REGNO_NREGS (regno, mode);
2263*c87b03e5Sespie for (j = 1; j < size1 && ! TEST_HARD_REG_BIT (used, regno + j); j++);
2264*c87b03e5Sespie if (j == size1)
2265*c87b03e5Sespie {
2266*c87b03e5Sespie /* Mark that this register is in use between its birth and death
2267*c87b03e5Sespie insns. */
2268*c87b03e5Sespie post_mark_life (regno, mode, 1, born_index, dead_index);
2269*c87b03e5Sespie return regno;
2270*c87b03e5Sespie }
2271*c87b03e5Sespie #ifndef REG_ALLOC_ORDER
2272*c87b03e5Sespie /* Skip starting points we know will lose. */
2273*c87b03e5Sespie i += j;
2274*c87b03e5Sespie #endif
2275*c87b03e5Sespie }
2276*c87b03e5Sespie }
2277*c87b03e5Sespie
2278*c87b03e5Sespie fail:
2279*c87b03e5Sespie /* If we are just trying suggested register, we have just tried copy-
2280*c87b03e5Sespie suggested registers, and there are arithmetic-suggested registers,
2281*c87b03e5Sespie try them. */
2282*c87b03e5Sespie
2283*c87b03e5Sespie /* If it would be profitable to allocate a call-clobbered register
2284*c87b03e5Sespie and save and restore it around calls, do that. */
2285*c87b03e5Sespie if (just_try_suggested && qty_phys_num_copy_sugg[qtyno] != 0
2286*c87b03e5Sespie && qty_phys_num_sugg[qtyno] != 0)
2287*c87b03e5Sespie {
2288*c87b03e5Sespie /* Don't try the copy-suggested regs again. */
2289*c87b03e5Sespie qty_phys_num_copy_sugg[qtyno] = 0;
2290*c87b03e5Sespie return find_free_reg (class, mode, qtyno, accept_call_clobbered, 1,
2291*c87b03e5Sespie born_index, dead_index);
2292*c87b03e5Sespie }
2293*c87b03e5Sespie
2294*c87b03e5Sespie /* We need not check to see if the current function has nonlocal
2295*c87b03e5Sespie labels because we don't put any pseudos that are live over calls in
2296*c87b03e5Sespie registers in that case. */
2297*c87b03e5Sespie
2298*c87b03e5Sespie if (! accept_call_clobbered
2299*c87b03e5Sespie && flag_caller_saves
2300*c87b03e5Sespie && ! just_try_suggested
2301*c87b03e5Sespie && qty[qtyno].n_calls_crossed != 0
2302*c87b03e5Sespie && CALLER_SAVE_PROFITABLE (qty[qtyno].n_refs,
2303*c87b03e5Sespie qty[qtyno].n_calls_crossed))
2304*c87b03e5Sespie {
2305*c87b03e5Sespie i = find_free_reg (class, mode, qtyno, 1, 0, born_index, dead_index);
2306*c87b03e5Sespie if (i >= 0)
2307*c87b03e5Sespie caller_save_needed = 1;
2308*c87b03e5Sespie return i;
2309*c87b03e5Sespie }
2310*c87b03e5Sespie return -1;
2311*c87b03e5Sespie }
2312*c87b03e5Sespie
2313*c87b03e5Sespie /* Mark that REGNO with machine-mode MODE is live starting from the current
2314*c87b03e5Sespie insn (if LIFE is nonzero) or dead starting at the current insn (if LIFE
2315*c87b03e5Sespie is zero). */
2316*c87b03e5Sespie
2317*c87b03e5Sespie static void
mark_life(regno,mode,life)2318*c87b03e5Sespie mark_life (regno, mode, life)
2319*c87b03e5Sespie int regno;
2320*c87b03e5Sespie enum machine_mode mode;
2321*c87b03e5Sespie int life;
2322*c87b03e5Sespie {
2323*c87b03e5Sespie int j = HARD_REGNO_NREGS (regno, mode);
2324*c87b03e5Sespie if (life)
2325*c87b03e5Sespie while (--j >= 0)
2326*c87b03e5Sespie SET_HARD_REG_BIT (regs_live, regno + j);
2327*c87b03e5Sespie else
2328*c87b03e5Sespie while (--j >= 0)
2329*c87b03e5Sespie CLEAR_HARD_REG_BIT (regs_live, regno + j);
2330*c87b03e5Sespie }
2331*c87b03e5Sespie
2332*c87b03e5Sespie /* Mark register number REGNO (with machine-mode MODE) as live (if LIFE
2333*c87b03e5Sespie is nonzero) or dead (if LIFE is zero) from insn number BIRTH (inclusive)
2334*c87b03e5Sespie to insn number DEATH (exclusive). */
2335*c87b03e5Sespie
2336*c87b03e5Sespie static void
post_mark_life(regno,mode,life,birth,death)2337*c87b03e5Sespie post_mark_life (regno, mode, life, birth, death)
2338*c87b03e5Sespie int regno;
2339*c87b03e5Sespie enum machine_mode mode;
2340*c87b03e5Sespie int life, birth, death;
2341*c87b03e5Sespie {
2342*c87b03e5Sespie int j = HARD_REGNO_NREGS (regno, mode);
2343*c87b03e5Sespie #ifdef HARD_REG_SET
2344*c87b03e5Sespie /* Declare it register if it's a scalar. */
2345*c87b03e5Sespie register
2346*c87b03e5Sespie #endif
2347*c87b03e5Sespie HARD_REG_SET this_reg;
2348*c87b03e5Sespie
2349*c87b03e5Sespie CLEAR_HARD_REG_SET (this_reg);
2350*c87b03e5Sespie while (--j >= 0)
2351*c87b03e5Sespie SET_HARD_REG_BIT (this_reg, regno + j);
2352*c87b03e5Sespie
2353*c87b03e5Sespie if (life)
2354*c87b03e5Sespie while (birth < death)
2355*c87b03e5Sespie {
2356*c87b03e5Sespie IOR_HARD_REG_SET (regs_live_at[birth], this_reg);
2357*c87b03e5Sespie birth++;
2358*c87b03e5Sespie }
2359*c87b03e5Sespie else
2360*c87b03e5Sespie while (birth < death)
2361*c87b03e5Sespie {
2362*c87b03e5Sespie AND_COMPL_HARD_REG_SET (regs_live_at[birth], this_reg);
2363*c87b03e5Sespie birth++;
2364*c87b03e5Sespie }
2365*c87b03e5Sespie }
2366*c87b03e5Sespie
2367*c87b03e5Sespie /* INSN is the CLOBBER insn that starts a REG_NO_NOCONFLICT block, R0
2368*c87b03e5Sespie is the register being clobbered, and R1 is a register being used in
2369*c87b03e5Sespie the equivalent expression.
2370*c87b03e5Sespie
2371*c87b03e5Sespie If R1 dies in the block and has a REG_NO_CONFLICT note on every insn
2372*c87b03e5Sespie in which it is used, return 1.
2373*c87b03e5Sespie
2374*c87b03e5Sespie Otherwise, return 0. */
2375*c87b03e5Sespie
2376*c87b03e5Sespie static int
no_conflict_p(insn,r0,r1)2377*c87b03e5Sespie no_conflict_p (insn, r0, r1)
2378*c87b03e5Sespie rtx insn, r0 ATTRIBUTE_UNUSED, r1;
2379*c87b03e5Sespie {
2380*c87b03e5Sespie int ok = 0;
2381*c87b03e5Sespie rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
2382*c87b03e5Sespie rtx p, last;
2383*c87b03e5Sespie
2384*c87b03e5Sespie /* If R1 is a hard register, return 0 since we handle this case
2385*c87b03e5Sespie when we scan the insns that actually use it. */
2386*c87b03e5Sespie
2387*c87b03e5Sespie if (note == 0
2388*c87b03e5Sespie || (GET_CODE (r1) == REG && REGNO (r1) < FIRST_PSEUDO_REGISTER)
2389*c87b03e5Sespie || (GET_CODE (r1) == SUBREG && GET_CODE (SUBREG_REG (r1)) == REG
2390*c87b03e5Sespie && REGNO (SUBREG_REG (r1)) < FIRST_PSEUDO_REGISTER))
2391*c87b03e5Sespie return 0;
2392*c87b03e5Sespie
2393*c87b03e5Sespie last = XEXP (note, 0);
2394*c87b03e5Sespie
2395*c87b03e5Sespie for (p = NEXT_INSN (insn); p && p != last; p = NEXT_INSN (p))
2396*c87b03e5Sespie if (INSN_P (p))
2397*c87b03e5Sespie {
2398*c87b03e5Sespie if (find_reg_note (p, REG_DEAD, r1))
2399*c87b03e5Sespie ok = 1;
2400*c87b03e5Sespie
2401*c87b03e5Sespie /* There must be a REG_NO_CONFLICT note on every insn, otherwise
2402*c87b03e5Sespie some earlier optimization pass has inserted instructions into
2403*c87b03e5Sespie the sequence, and it is not safe to perform this optimization.
2404*c87b03e5Sespie Note that emit_no_conflict_block always ensures that this is
2405*c87b03e5Sespie true when these sequences are created. */
2406*c87b03e5Sespie if (! find_reg_note (p, REG_NO_CONFLICT, r1))
2407*c87b03e5Sespie return 0;
2408*c87b03e5Sespie }
2409*c87b03e5Sespie
2410*c87b03e5Sespie return ok;
2411*c87b03e5Sespie }
2412*c87b03e5Sespie
2413*c87b03e5Sespie /* Return the number of alternatives for which the constraint string P
2414*c87b03e5Sespie indicates that the operand must be equal to operand 0 and that no register
2415*c87b03e5Sespie is acceptable. */
2416*c87b03e5Sespie
2417*c87b03e5Sespie static int
requires_inout(p)2418*c87b03e5Sespie requires_inout (p)
2419*c87b03e5Sespie const char *p;
2420*c87b03e5Sespie {
2421*c87b03e5Sespie char c;
2422*c87b03e5Sespie int found_zero = 0;
2423*c87b03e5Sespie int reg_allowed = 0;
2424*c87b03e5Sespie int num_matching_alts = 0;
2425*c87b03e5Sespie
2426*c87b03e5Sespie while ((c = *p++))
2427*c87b03e5Sespie switch (c)
2428*c87b03e5Sespie {
2429*c87b03e5Sespie case '=': case '+': case '?':
2430*c87b03e5Sespie case '#': case '&': case '!':
2431*c87b03e5Sespie case '*': case '%':
2432*c87b03e5Sespie case 'm': case '<': case '>': case 'V': case 'o':
2433*c87b03e5Sespie case 'E': case 'F': case 'G': case 'H':
2434*c87b03e5Sespie case 's': case 'i': case 'n':
2435*c87b03e5Sespie case 'I': case 'J': case 'K': case 'L':
2436*c87b03e5Sespie case 'M': case 'N': case 'O': case 'P':
2437*c87b03e5Sespie case 'X':
2438*c87b03e5Sespie /* These don't say anything we care about. */
2439*c87b03e5Sespie break;
2440*c87b03e5Sespie
2441*c87b03e5Sespie case ',':
2442*c87b03e5Sespie if (found_zero && ! reg_allowed)
2443*c87b03e5Sespie num_matching_alts++;
2444*c87b03e5Sespie
2445*c87b03e5Sespie found_zero = reg_allowed = 0;
2446*c87b03e5Sespie break;
2447*c87b03e5Sespie
2448*c87b03e5Sespie case '0':
2449*c87b03e5Sespie found_zero = 1;
2450*c87b03e5Sespie break;
2451*c87b03e5Sespie
2452*c87b03e5Sespie case '1': case '2': case '3': case '4': case '5':
2453*c87b03e5Sespie case '6': case '7': case '8': case '9':
2454*c87b03e5Sespie /* Skip the balance of the matching constraint. */
2455*c87b03e5Sespie while (ISDIGIT (*p))
2456*c87b03e5Sespie p++;
2457*c87b03e5Sespie break;
2458*c87b03e5Sespie
2459*c87b03e5Sespie default:
2460*c87b03e5Sespie if (REG_CLASS_FROM_LETTER (c) == NO_REGS
2461*c87b03e5Sespie && !EXTRA_ADDRESS_CONSTRAINT (c))
2462*c87b03e5Sespie break;
2463*c87b03e5Sespie /* FALLTHRU */
2464*c87b03e5Sespie case 'p':
2465*c87b03e5Sespie case 'g': case 'r':
2466*c87b03e5Sespie reg_allowed = 1;
2467*c87b03e5Sespie break;
2468*c87b03e5Sespie }
2469*c87b03e5Sespie
2470*c87b03e5Sespie if (found_zero && ! reg_allowed)
2471*c87b03e5Sespie num_matching_alts++;
2472*c87b03e5Sespie
2473*c87b03e5Sespie return num_matching_alts;
2474*c87b03e5Sespie }
2475*c87b03e5Sespie
2476*c87b03e5Sespie void
dump_local_alloc(file)2477*c87b03e5Sespie dump_local_alloc (file)
2478*c87b03e5Sespie FILE *file;
2479*c87b03e5Sespie {
2480*c87b03e5Sespie int i;
2481*c87b03e5Sespie for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2482*c87b03e5Sespie if (reg_renumber[i] != -1)
2483*c87b03e5Sespie fprintf (file, ";; Register %d in %d.\n", i, reg_renumber[i]);
2484*c87b03e5Sespie }
2485