xref: /openbsd/gnu/gcc/gcc/local-alloc.c (revision 404b540a)
1*404b540aSrobert /* Allocate registers within a basic block, for GNU compiler.
2*404b540aSrobert    Copyright (C) 1987, 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3*404b540aSrobert    1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
4*404b540aSrobert    Inc.
5*404b540aSrobert 
6*404b540aSrobert This file is part of GCC.
7*404b540aSrobert 
8*404b540aSrobert GCC is free software; you can redistribute it and/or modify it under
9*404b540aSrobert the terms of the GNU General Public License as published by the Free
10*404b540aSrobert Software Foundation; either version 2, or (at your option) any later
11*404b540aSrobert version.
12*404b540aSrobert 
13*404b540aSrobert GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14*404b540aSrobert WARRANTY; without even the implied warranty of MERCHANTABILITY or
15*404b540aSrobert FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16*404b540aSrobert for more details.
17*404b540aSrobert 
18*404b540aSrobert You should have received a copy of the GNU General Public License
19*404b540aSrobert along with GCC; see the file COPYING.  If not, write to the Free
20*404b540aSrobert Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21*404b540aSrobert 02110-1301, USA.  */
22*404b540aSrobert 
23*404b540aSrobert /* Allocation of hard register numbers to pseudo registers is done in
24*404b540aSrobert    two passes.  In this pass we consider only regs that are born and
25*404b540aSrobert    die once within one basic block.  We do this one basic block at a
26*404b540aSrobert    time.  Then the next pass allocates the registers that remain.
27*404b540aSrobert    Two passes are used because this pass uses methods that work only
28*404b540aSrobert    on linear code, but that do a better job than the general methods
29*404b540aSrobert    used in global_alloc, and more quickly too.
30*404b540aSrobert 
31*404b540aSrobert    The assignments made are recorded in the vector reg_renumber
32*404b540aSrobert    whose space is allocated here.  The rtl code itself is not altered.
33*404b540aSrobert 
34*404b540aSrobert    We assign each instruction in the basic block a number
35*404b540aSrobert    which is its order from the beginning of the block.
36*404b540aSrobert    Then we can represent the lifetime of a pseudo register with
37*404b540aSrobert    a pair of numbers, and check for conflicts easily.
38*404b540aSrobert    We can record the availability of hard registers with a
39*404b540aSrobert    HARD_REG_SET for each instruction.  The HARD_REG_SET
40*404b540aSrobert    contains 0 or 1 for each hard reg.
41*404b540aSrobert 
42*404b540aSrobert    To avoid register shuffling, we tie registers together when one
43*404b540aSrobert    dies by being copied into another, or dies in an instruction that
44*404b540aSrobert    does arithmetic to produce another.  The tied registers are
45*404b540aSrobert    allocated as one.  Registers with different reg class preferences
46*404b540aSrobert    can never be tied unless the class preferred by one is a subclass
47*404b540aSrobert    of the one preferred by the other.
48*404b540aSrobert 
49*404b540aSrobert    Tying is represented with "quantity numbers".
50*404b540aSrobert    A non-tied register is given a new quantity number.
51*404b540aSrobert    Tied registers have the same quantity number.
52*404b540aSrobert 
53*404b540aSrobert    We have provision to exempt registers, even when they are contained
54*404b540aSrobert    within the block, that can be tied to others that are not contained in it.
55*404b540aSrobert    This is so that global_alloc could process them both and tie them then.
56*404b540aSrobert    But this is currently disabled since tying in global_alloc is not
57*404b540aSrobert    yet implemented.  */
58*404b540aSrobert 
59*404b540aSrobert /* Pseudos allocated here can be reallocated by global.c if the hard register
60*404b540aSrobert    is used as a spill register.  Currently we don't allocate such pseudos
61*404b540aSrobert    here if their preferred class is likely to be used by spills.  */
62*404b540aSrobert 
63*404b540aSrobert #include "config.h"
64*404b540aSrobert #include "system.h"
65*404b540aSrobert #include "coretypes.h"
66*404b540aSrobert #include "tm.h"
67*404b540aSrobert #include "hard-reg-set.h"
68*404b540aSrobert #include "rtl.h"
69*404b540aSrobert #include "tm_p.h"
70*404b540aSrobert #include "flags.h"
71*404b540aSrobert #include "regs.h"
72*404b540aSrobert #include "function.h"
73*404b540aSrobert #include "insn-config.h"
74*404b540aSrobert #include "insn-attr.h"
75*404b540aSrobert #include "recog.h"
76*404b540aSrobert #include "output.h"
77*404b540aSrobert #include "toplev.h"
78*404b540aSrobert #include "except.h"
79*404b540aSrobert #include "integrate.h"
80*404b540aSrobert #include "reload.h"
81*404b540aSrobert #include "ggc.h"
82*404b540aSrobert #include "timevar.h"
83*404b540aSrobert #include "tree-pass.h"
84*404b540aSrobert 
85*404b540aSrobert /* Next quantity number available for allocation.  */
86*404b540aSrobert 
87*404b540aSrobert static int next_qty;
88*404b540aSrobert 
89*404b540aSrobert /* Information we maintain about each quantity.  */
90*404b540aSrobert struct qty
91*404b540aSrobert {
92*404b540aSrobert   /* The number of refs to quantity Q.  */
93*404b540aSrobert 
94*404b540aSrobert   int n_refs;
95*404b540aSrobert 
96*404b540aSrobert   /* The frequency of uses of quantity Q.  */
97*404b540aSrobert 
98*404b540aSrobert   int freq;
99*404b540aSrobert 
100*404b540aSrobert   /* Insn number (counting from head of basic block)
101*404b540aSrobert      where quantity Q was born.  -1 if birth has not been recorded.  */
102*404b540aSrobert 
103*404b540aSrobert   int birth;
104*404b540aSrobert 
105*404b540aSrobert   /* Insn number (counting from head of basic block)
106*404b540aSrobert      where given quantity died.  Due to the way tying is done,
107*404b540aSrobert      and the fact that we consider in this pass only regs that die but once,
108*404b540aSrobert      a quantity can die only once.  Each quantity's life span
109*404b540aSrobert      is a set of consecutive insns.  -1 if death has not been recorded.  */
110*404b540aSrobert 
111*404b540aSrobert   int death;
112*404b540aSrobert 
113*404b540aSrobert   /* Number of words needed to hold the data in given quantity.
114*404b540aSrobert      This depends on its machine mode.  It is used for these purposes:
115*404b540aSrobert      1. It is used in computing the relative importance of qtys,
116*404b540aSrobert 	which determines the order in which we look for regs for them.
117*404b540aSrobert      2. It is used in rules that prevent tying several registers of
118*404b540aSrobert 	different sizes in a way that is geometrically impossible
119*404b540aSrobert 	(see combine_regs).  */
120*404b540aSrobert 
121*404b540aSrobert   int size;
122*404b540aSrobert 
123*404b540aSrobert   /* Number of times a reg tied to given qty lives across a CALL_INSN.  */
124*404b540aSrobert 
125*404b540aSrobert   int n_calls_crossed;
126*404b540aSrobert 
127*404b540aSrobert   /* Number of times a reg tied to given qty lives across a CALL_INSN
128*404b540aSrobert      that might throw.  */
129*404b540aSrobert 
130*404b540aSrobert   int n_throwing_calls_crossed;
131*404b540aSrobert 
132*404b540aSrobert   /* The register number of one pseudo register whose reg_qty value is Q.
133*404b540aSrobert      This register should be the head of the chain
134*404b540aSrobert      maintained in reg_next_in_qty.  */
135*404b540aSrobert 
136*404b540aSrobert   int first_reg;
137*404b540aSrobert 
138*404b540aSrobert   /* Reg class contained in (smaller than) the preferred classes of all
139*404b540aSrobert      the pseudo regs that are tied in given quantity.
140*404b540aSrobert      This is the preferred class for allocating that quantity.  */
141*404b540aSrobert 
142*404b540aSrobert   enum reg_class min_class;
143*404b540aSrobert 
144*404b540aSrobert   /* Register class within which we allocate given qty if we can't get
145*404b540aSrobert      its preferred class.  */
146*404b540aSrobert 
147*404b540aSrobert   enum reg_class alternate_class;
148*404b540aSrobert 
149*404b540aSrobert   /* This holds the mode of the registers that are tied to given qty,
150*404b540aSrobert      or VOIDmode if registers with differing modes are tied together.  */
151*404b540aSrobert 
152*404b540aSrobert   enum machine_mode mode;
153*404b540aSrobert 
154*404b540aSrobert   /* the hard reg number chosen for given quantity,
155*404b540aSrobert      or -1 if none was found.  */
156*404b540aSrobert 
157*404b540aSrobert   short phys_reg;
158*404b540aSrobert };
159*404b540aSrobert 
160*404b540aSrobert static struct qty *qty;
161*404b540aSrobert 
162*404b540aSrobert /* These fields are kept separately to speedup their clearing.  */
163*404b540aSrobert 
164*404b540aSrobert /* We maintain two hard register sets that indicate suggested hard registers
165*404b540aSrobert    for each quantity.  The first, phys_copy_sugg, contains hard registers
166*404b540aSrobert    that are tied to the quantity by a simple copy.  The second contains all
167*404b540aSrobert    hard registers that are tied to the quantity via an arithmetic operation.
168*404b540aSrobert 
169*404b540aSrobert    The former register set is given priority for allocation.  This tends to
170*404b540aSrobert    eliminate copy insns.  */
171*404b540aSrobert 
172*404b540aSrobert /* Element Q is a set of hard registers that are suggested for quantity Q by
173*404b540aSrobert    copy insns.  */
174*404b540aSrobert 
175*404b540aSrobert static HARD_REG_SET *qty_phys_copy_sugg;
176*404b540aSrobert 
177*404b540aSrobert /* Element Q is a set of hard registers that are suggested for quantity Q by
178*404b540aSrobert    arithmetic insns.  */
179*404b540aSrobert 
180*404b540aSrobert static HARD_REG_SET *qty_phys_sugg;
181*404b540aSrobert 
182*404b540aSrobert /* Element Q is the number of suggested registers in qty_phys_copy_sugg.  */
183*404b540aSrobert 
184*404b540aSrobert static short *qty_phys_num_copy_sugg;
185*404b540aSrobert 
186*404b540aSrobert /* Element Q is the number of suggested registers in qty_phys_sugg.  */
187*404b540aSrobert 
188*404b540aSrobert static short *qty_phys_num_sugg;
189*404b540aSrobert 
190*404b540aSrobert /* If (REG N) has been assigned a quantity number, is a register number
191*404b540aSrobert    of another register assigned the same quantity number, or -1 for the
192*404b540aSrobert    end of the chain.  qty->first_reg point to the head of this chain.  */
193*404b540aSrobert 
194*404b540aSrobert static int *reg_next_in_qty;
195*404b540aSrobert 
196*404b540aSrobert /* reg_qty[N] (where N is a pseudo reg number) is the qty number of that reg
197*404b540aSrobert    if it is >= 0,
198*404b540aSrobert    of -1 if this register cannot be allocated by local-alloc,
199*404b540aSrobert    or -2 if not known yet.
200*404b540aSrobert 
201*404b540aSrobert    Note that if we see a use or death of pseudo register N with
202*404b540aSrobert    reg_qty[N] == -2, register N must be local to the current block.  If
203*404b540aSrobert    it were used in more than one block, we would have reg_qty[N] == -1.
204*404b540aSrobert    This relies on the fact that if reg_basic_block[N] is >= 0, register N
205*404b540aSrobert    will not appear in any other block.  We save a considerable number of
206*404b540aSrobert    tests by exploiting this.
207*404b540aSrobert 
208*404b540aSrobert    If N is < FIRST_PSEUDO_REGISTER, reg_qty[N] is undefined and should not
209*404b540aSrobert    be referenced.  */
210*404b540aSrobert 
211*404b540aSrobert static int *reg_qty;
212*404b540aSrobert 
213*404b540aSrobert /* The offset (in words) of register N within its quantity.
214*404b540aSrobert    This can be nonzero if register N is SImode, and has been tied
215*404b540aSrobert    to a subreg of a DImode register.  */
216*404b540aSrobert 
217*404b540aSrobert static char *reg_offset;
218*404b540aSrobert 
219*404b540aSrobert /* Vector of substitutions of register numbers,
220*404b540aSrobert    used to map pseudo regs into hardware regs.
221*404b540aSrobert    This is set up as a result of register allocation.
222*404b540aSrobert    Element N is the hard reg assigned to pseudo reg N,
223*404b540aSrobert    or is -1 if no hard reg was assigned.
224*404b540aSrobert    If N is a hard reg number, element N is N.  */
225*404b540aSrobert 
226*404b540aSrobert short *reg_renumber;
227*404b540aSrobert 
228*404b540aSrobert /* Set of hard registers live at the current point in the scan
229*404b540aSrobert    of the instructions in a basic block.  */
230*404b540aSrobert 
231*404b540aSrobert static HARD_REG_SET regs_live;
232*404b540aSrobert 
233*404b540aSrobert /* Each set of hard registers indicates registers live at a particular
234*404b540aSrobert    point in the basic block.  For N even, regs_live_at[N] says which
235*404b540aSrobert    hard registers are needed *after* insn N/2 (i.e., they may not
236*404b540aSrobert    conflict with the outputs of insn N/2 or the inputs of insn N/2 + 1.
237*404b540aSrobert 
238*404b540aSrobert    If an object is to conflict with the inputs of insn J but not the
239*404b540aSrobert    outputs of insn J + 1, we say it is born at index J*2 - 1.  Similarly,
240*404b540aSrobert    if it is to conflict with the outputs of insn J but not the inputs of
241*404b540aSrobert    insn J + 1, it is said to die at index J*2 + 1.  */
242*404b540aSrobert 
243*404b540aSrobert static HARD_REG_SET *regs_live_at;
244*404b540aSrobert 
245*404b540aSrobert /* Communicate local vars `insn_number' and `insn'
246*404b540aSrobert    from `block_alloc' to `reg_is_set', `wipe_dead_reg', and `alloc_qty'.  */
247*404b540aSrobert static int this_insn_number;
248*404b540aSrobert static rtx this_insn;
249*404b540aSrobert 
250*404b540aSrobert struct equivalence
251*404b540aSrobert {
252*404b540aSrobert   /* Set when an attempt should be made to replace a register
253*404b540aSrobert      with the associated src_p entry.  */
254*404b540aSrobert 
255*404b540aSrobert   char replace;
256*404b540aSrobert 
257*404b540aSrobert   /* Set when a REG_EQUIV note is found or created.  Use to
258*404b540aSrobert      keep track of what memory accesses might be created later,
259*404b540aSrobert      e.g. by reload.  */
260*404b540aSrobert 
261*404b540aSrobert   rtx replacement;
262*404b540aSrobert 
263*404b540aSrobert   rtx *src_p;
264*404b540aSrobert 
265*404b540aSrobert   /* Loop depth is used to recognize equivalences which appear
266*404b540aSrobert      to be present within the same loop (or in an inner loop).  */
267*404b540aSrobert 
268*404b540aSrobert   int loop_depth;
269*404b540aSrobert 
270*404b540aSrobert   /* The list of each instruction which initializes this register.  */
271*404b540aSrobert 
272*404b540aSrobert   rtx init_insns;
273*404b540aSrobert 
274*404b540aSrobert   /* Nonzero if this had a preexisting REG_EQUIV note.  */
275*404b540aSrobert 
276*404b540aSrobert   int is_arg_equivalence;
277*404b540aSrobert };
278*404b540aSrobert 
279*404b540aSrobert /* reg_equiv[N] (where N is a pseudo reg number) is the equivalence
280*404b540aSrobert    structure for that register.  */
281*404b540aSrobert 
282*404b540aSrobert static struct equivalence *reg_equiv;
283*404b540aSrobert 
284*404b540aSrobert /* Nonzero if we recorded an equivalence for a LABEL_REF.  */
285*404b540aSrobert static int recorded_label_ref;
286*404b540aSrobert 
287*404b540aSrobert static void alloc_qty (int, enum machine_mode, int, int);
288*404b540aSrobert static void validate_equiv_mem_from_store (rtx, rtx, void *);
289*404b540aSrobert static int validate_equiv_mem (rtx, rtx, rtx);
290*404b540aSrobert static int equiv_init_varies_p (rtx);
291*404b540aSrobert static int equiv_init_movable_p (rtx, int);
292*404b540aSrobert static int contains_replace_regs (rtx);
293*404b540aSrobert static int memref_referenced_p (rtx, rtx);
294*404b540aSrobert static int memref_used_between_p (rtx, rtx, rtx);
295*404b540aSrobert static void update_equiv_regs (void);
296*404b540aSrobert static void no_equiv (rtx, rtx, void *);
297*404b540aSrobert static void block_alloc (int);
298*404b540aSrobert static int qty_sugg_compare (int, int);
299*404b540aSrobert static int qty_sugg_compare_1 (const void *, const void *);
300*404b540aSrobert static int qty_compare (int, int);
301*404b540aSrobert static int qty_compare_1 (const void *, const void *);
302*404b540aSrobert static int combine_regs (rtx, rtx, int, int, rtx, int);
303*404b540aSrobert static int reg_meets_class_p (int, enum reg_class);
304*404b540aSrobert static void update_qty_class (int, int);
305*404b540aSrobert static void reg_is_set (rtx, rtx, void *);
306*404b540aSrobert static void reg_is_born (rtx, int);
307*404b540aSrobert static void wipe_dead_reg (rtx, int);
308*404b540aSrobert static int find_free_reg (enum reg_class, enum machine_mode, int, int, int,
309*404b540aSrobert 			  int, int);
310*404b540aSrobert static void mark_life (int, enum machine_mode, int);
311*404b540aSrobert static void post_mark_life (int, enum machine_mode, int, int, int);
312*404b540aSrobert static int no_conflict_p (rtx, rtx, rtx);
313*404b540aSrobert static int requires_inout (const char *);
314*404b540aSrobert 
315*404b540aSrobert /* Allocate a new quantity (new within current basic block)
316*404b540aSrobert    for register number REGNO which is born at index BIRTH
317*404b540aSrobert    within the block.  MODE and SIZE are info on reg REGNO.  */
318*404b540aSrobert 
319*404b540aSrobert static void
alloc_qty(int regno,enum machine_mode mode,int size,int birth)320*404b540aSrobert alloc_qty (int regno, enum machine_mode mode, int size, int birth)
321*404b540aSrobert {
322*404b540aSrobert   int qtyno = next_qty++;
323*404b540aSrobert 
324*404b540aSrobert   reg_qty[regno] = qtyno;
325*404b540aSrobert   reg_offset[regno] = 0;
326*404b540aSrobert   reg_next_in_qty[regno] = -1;
327*404b540aSrobert 
328*404b540aSrobert   qty[qtyno].first_reg = regno;
329*404b540aSrobert   qty[qtyno].size = size;
330*404b540aSrobert   qty[qtyno].mode = mode;
331*404b540aSrobert   qty[qtyno].birth = birth;
332*404b540aSrobert   qty[qtyno].n_calls_crossed = REG_N_CALLS_CROSSED (regno);
333*404b540aSrobert   qty[qtyno].n_throwing_calls_crossed = REG_N_THROWING_CALLS_CROSSED (regno);
334*404b540aSrobert   qty[qtyno].min_class = reg_preferred_class (regno);
335*404b540aSrobert   qty[qtyno].alternate_class = reg_alternate_class (regno);
336*404b540aSrobert   qty[qtyno].n_refs = REG_N_REFS (regno);
337*404b540aSrobert   qty[qtyno].freq = REG_FREQ (regno);
338*404b540aSrobert }
339*404b540aSrobert 
340*404b540aSrobert /* Main entry point of this file.  */
341*404b540aSrobert 
342*404b540aSrobert static int
local_alloc(void)343*404b540aSrobert local_alloc (void)
344*404b540aSrobert {
345*404b540aSrobert   int i;
346*404b540aSrobert   int max_qty;
347*404b540aSrobert   basic_block b;
348*404b540aSrobert 
349*404b540aSrobert   /* We need to keep track of whether or not we recorded a LABEL_REF so
350*404b540aSrobert      that we know if the jump optimizer needs to be rerun.  */
351*404b540aSrobert   recorded_label_ref = 0;
352*404b540aSrobert 
353*404b540aSrobert   /* Leaf functions and non-leaf functions have different needs.
354*404b540aSrobert      If defined, let the machine say what kind of ordering we
355*404b540aSrobert      should use.  */
356*404b540aSrobert #ifdef ORDER_REGS_FOR_LOCAL_ALLOC
357*404b540aSrobert   ORDER_REGS_FOR_LOCAL_ALLOC;
358*404b540aSrobert #endif
359*404b540aSrobert 
360*404b540aSrobert   /* Promote REG_EQUAL notes to REG_EQUIV notes and adjust status of affected
361*404b540aSrobert      registers.  */
362*404b540aSrobert   update_equiv_regs ();
363*404b540aSrobert 
364*404b540aSrobert   /* This sets the maximum number of quantities we can have.  Quantity
365*404b540aSrobert      numbers start at zero and we can have one for each pseudo.  */
366*404b540aSrobert   max_qty = (max_regno - FIRST_PSEUDO_REGISTER);
367*404b540aSrobert 
368*404b540aSrobert   /* Allocate vectors of temporary data.
369*404b540aSrobert      See the declarations of these variables, above,
370*404b540aSrobert      for what they mean.  */
371*404b540aSrobert 
372*404b540aSrobert   qty = XNEWVEC (struct qty, max_qty);
373*404b540aSrobert   qty_phys_copy_sugg = XNEWVEC (HARD_REG_SET, max_qty);
374*404b540aSrobert   qty_phys_num_copy_sugg = XNEWVEC (short, max_qty);
375*404b540aSrobert   qty_phys_sugg = XNEWVEC (HARD_REG_SET, max_qty);
376*404b540aSrobert   qty_phys_num_sugg = XNEWVEC (short, max_qty);
377*404b540aSrobert 
378*404b540aSrobert   reg_qty = XNEWVEC (int, max_regno);
379*404b540aSrobert   reg_offset = XNEWVEC (char, max_regno);
380*404b540aSrobert   reg_next_in_qty = XNEWVEC (int, max_regno);
381*404b540aSrobert 
382*404b540aSrobert   /* Determine which pseudo-registers can be allocated by local-alloc.
383*404b540aSrobert      In general, these are the registers used only in a single block and
384*404b540aSrobert      which only die once.
385*404b540aSrobert 
386*404b540aSrobert      We need not be concerned with which block actually uses the register
387*404b540aSrobert      since we will never see it outside that block.  */
388*404b540aSrobert 
389*404b540aSrobert   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
390*404b540aSrobert     {
391*404b540aSrobert       if (REG_BASIC_BLOCK (i) >= 0 && REG_N_DEATHS (i) == 1)
392*404b540aSrobert 	reg_qty[i] = -2;
393*404b540aSrobert       else
394*404b540aSrobert 	reg_qty[i] = -1;
395*404b540aSrobert     }
396*404b540aSrobert 
397*404b540aSrobert   /* Force loop below to initialize entire quantity array.  */
398*404b540aSrobert   next_qty = max_qty;
399*404b540aSrobert 
400*404b540aSrobert   /* Allocate each block's local registers, block by block.  */
401*404b540aSrobert 
402*404b540aSrobert   FOR_EACH_BB (b)
403*404b540aSrobert     {
404*404b540aSrobert       /* NEXT_QTY indicates which elements of the `qty_...'
405*404b540aSrobert 	 vectors might need to be initialized because they were used
406*404b540aSrobert 	 for the previous block; it is set to the entire array before
407*404b540aSrobert 	 block 0.  Initialize those, with explicit loop if there are few,
408*404b540aSrobert 	 else with bzero and bcopy.  Do not initialize vectors that are
409*404b540aSrobert 	 explicit set by `alloc_qty'.  */
410*404b540aSrobert 
411*404b540aSrobert       if (next_qty < 6)
412*404b540aSrobert 	{
413*404b540aSrobert 	  for (i = 0; i < next_qty; i++)
414*404b540aSrobert 	    {
415*404b540aSrobert 	      CLEAR_HARD_REG_SET (qty_phys_copy_sugg[i]);
416*404b540aSrobert 	      qty_phys_num_copy_sugg[i] = 0;
417*404b540aSrobert 	      CLEAR_HARD_REG_SET (qty_phys_sugg[i]);
418*404b540aSrobert 	      qty_phys_num_sugg[i] = 0;
419*404b540aSrobert 	    }
420*404b540aSrobert 	}
421*404b540aSrobert       else
422*404b540aSrobert 	{
423*404b540aSrobert #define CLEAR(vector)  \
424*404b540aSrobert 	  memset ((vector), 0, (sizeof (*(vector))) * next_qty);
425*404b540aSrobert 
426*404b540aSrobert 	  CLEAR (qty_phys_copy_sugg);
427*404b540aSrobert 	  CLEAR (qty_phys_num_copy_sugg);
428*404b540aSrobert 	  CLEAR (qty_phys_sugg);
429*404b540aSrobert 	  CLEAR (qty_phys_num_sugg);
430*404b540aSrobert 	}
431*404b540aSrobert 
432*404b540aSrobert       next_qty = 0;
433*404b540aSrobert 
434*404b540aSrobert       block_alloc (b->index);
435*404b540aSrobert     }
436*404b540aSrobert 
437*404b540aSrobert   free (qty);
438*404b540aSrobert   free (qty_phys_copy_sugg);
439*404b540aSrobert   free (qty_phys_num_copy_sugg);
440*404b540aSrobert   free (qty_phys_sugg);
441*404b540aSrobert   free (qty_phys_num_sugg);
442*404b540aSrobert 
443*404b540aSrobert   free (reg_qty);
444*404b540aSrobert   free (reg_offset);
445*404b540aSrobert   free (reg_next_in_qty);
446*404b540aSrobert 
447*404b540aSrobert   return recorded_label_ref;
448*404b540aSrobert }
449*404b540aSrobert 
450*404b540aSrobert /* Used for communication between the following two functions: contains
451*404b540aSrobert    a MEM that we wish to ensure remains unchanged.  */
452*404b540aSrobert static rtx equiv_mem;
453*404b540aSrobert 
454*404b540aSrobert /* Set nonzero if EQUIV_MEM is modified.  */
455*404b540aSrobert static int equiv_mem_modified;
456*404b540aSrobert 
457*404b540aSrobert /* If EQUIV_MEM is modified by modifying DEST, indicate that it is modified.
458*404b540aSrobert    Called via note_stores.  */
459*404b540aSrobert 
460*404b540aSrobert static void
validate_equiv_mem_from_store(rtx dest,rtx set ATTRIBUTE_UNUSED,void * data ATTRIBUTE_UNUSED)461*404b540aSrobert validate_equiv_mem_from_store (rtx dest, rtx set ATTRIBUTE_UNUSED,
462*404b540aSrobert 			       void *data ATTRIBUTE_UNUSED)
463*404b540aSrobert {
464*404b540aSrobert   if ((REG_P (dest)
465*404b540aSrobert        && reg_overlap_mentioned_p (dest, equiv_mem))
466*404b540aSrobert       || (MEM_P (dest)
467*404b540aSrobert 	  && true_dependence (dest, VOIDmode, equiv_mem, rtx_varies_p)))
468*404b540aSrobert     equiv_mem_modified = 1;
469*404b540aSrobert }
470*404b540aSrobert 
471*404b540aSrobert /* Verify that no store between START and the death of REG invalidates
472*404b540aSrobert    MEMREF.  MEMREF is invalidated by modifying a register used in MEMREF,
473*404b540aSrobert    by storing into an overlapping memory location, or with a non-const
474*404b540aSrobert    CALL_INSN.
475*404b540aSrobert 
476*404b540aSrobert    Return 1 if MEMREF remains valid.  */
477*404b540aSrobert 
478*404b540aSrobert static int
validate_equiv_mem(rtx start,rtx reg,rtx memref)479*404b540aSrobert validate_equiv_mem (rtx start, rtx reg, rtx memref)
480*404b540aSrobert {
481*404b540aSrobert   rtx insn;
482*404b540aSrobert   rtx note;
483*404b540aSrobert 
484*404b540aSrobert   equiv_mem = memref;
485*404b540aSrobert   equiv_mem_modified = 0;
486*404b540aSrobert 
487*404b540aSrobert   /* If the memory reference has side effects or is volatile, it isn't a
488*404b540aSrobert      valid equivalence.  */
489*404b540aSrobert   if (side_effects_p (memref))
490*404b540aSrobert     return 0;
491*404b540aSrobert 
492*404b540aSrobert   for (insn = start; insn && ! equiv_mem_modified; insn = NEXT_INSN (insn))
493*404b540aSrobert     {
494*404b540aSrobert       if (! INSN_P (insn))
495*404b540aSrobert 	continue;
496*404b540aSrobert 
497*404b540aSrobert       if (find_reg_note (insn, REG_DEAD, reg))
498*404b540aSrobert 	return 1;
499*404b540aSrobert 
500*404b540aSrobert       if (CALL_P (insn) && ! MEM_READONLY_P (memref)
501*404b540aSrobert 	  && ! CONST_OR_PURE_CALL_P (insn))
502*404b540aSrobert 	return 0;
503*404b540aSrobert 
504*404b540aSrobert       note_stores (PATTERN (insn), validate_equiv_mem_from_store, NULL);
505*404b540aSrobert 
506*404b540aSrobert       /* If a register mentioned in MEMREF is modified via an
507*404b540aSrobert 	 auto-increment, we lose the equivalence.  Do the same if one
508*404b540aSrobert 	 dies; although we could extend the life, it doesn't seem worth
509*404b540aSrobert 	 the trouble.  */
510*404b540aSrobert 
511*404b540aSrobert       for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
512*404b540aSrobert 	if ((REG_NOTE_KIND (note) == REG_INC
513*404b540aSrobert 	     || REG_NOTE_KIND (note) == REG_DEAD)
514*404b540aSrobert 	    && REG_P (XEXP (note, 0))
515*404b540aSrobert 	    && reg_overlap_mentioned_p (XEXP (note, 0), memref))
516*404b540aSrobert 	  return 0;
517*404b540aSrobert     }
518*404b540aSrobert 
519*404b540aSrobert   return 0;
520*404b540aSrobert }
521*404b540aSrobert 
522*404b540aSrobert /* Returns zero if X is known to be invariant.  */
523*404b540aSrobert 
524*404b540aSrobert static int
equiv_init_varies_p(rtx x)525*404b540aSrobert equiv_init_varies_p (rtx x)
526*404b540aSrobert {
527*404b540aSrobert   RTX_CODE code = GET_CODE (x);
528*404b540aSrobert   int i;
529*404b540aSrobert   const char *fmt;
530*404b540aSrobert 
531*404b540aSrobert   switch (code)
532*404b540aSrobert     {
533*404b540aSrobert     case MEM:
534*404b540aSrobert       return !MEM_READONLY_P (x) || equiv_init_varies_p (XEXP (x, 0));
535*404b540aSrobert 
536*404b540aSrobert     case CONST:
537*404b540aSrobert     case CONST_INT:
538*404b540aSrobert     case CONST_DOUBLE:
539*404b540aSrobert     case CONST_VECTOR:
540*404b540aSrobert     case SYMBOL_REF:
541*404b540aSrobert     case LABEL_REF:
542*404b540aSrobert       return 0;
543*404b540aSrobert 
544*404b540aSrobert     case REG:
545*404b540aSrobert       return reg_equiv[REGNO (x)].replace == 0 && rtx_varies_p (x, 0);
546*404b540aSrobert 
547*404b540aSrobert     case ASM_OPERANDS:
548*404b540aSrobert       if (MEM_VOLATILE_P (x))
549*404b540aSrobert 	return 1;
550*404b540aSrobert 
551*404b540aSrobert       /* Fall through.  */
552*404b540aSrobert 
553*404b540aSrobert     default:
554*404b540aSrobert       break;
555*404b540aSrobert     }
556*404b540aSrobert 
557*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
558*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
559*404b540aSrobert     if (fmt[i] == 'e')
560*404b540aSrobert       {
561*404b540aSrobert 	if (equiv_init_varies_p (XEXP (x, i)))
562*404b540aSrobert 	  return 1;
563*404b540aSrobert       }
564*404b540aSrobert     else if (fmt[i] == 'E')
565*404b540aSrobert       {
566*404b540aSrobert 	int j;
567*404b540aSrobert 	for (j = 0; j < XVECLEN (x, i); j++)
568*404b540aSrobert 	  if (equiv_init_varies_p (XVECEXP (x, i, j)))
569*404b540aSrobert 	    return 1;
570*404b540aSrobert       }
571*404b540aSrobert 
572*404b540aSrobert   return 0;
573*404b540aSrobert }
574*404b540aSrobert 
575*404b540aSrobert /* Returns nonzero if X (used to initialize register REGNO) is movable.
576*404b540aSrobert    X is only movable if the registers it uses have equivalent initializations
577*404b540aSrobert    which appear to be within the same loop (or in an inner loop) and movable
578*404b540aSrobert    or if they are not candidates for local_alloc and don't vary.  */
579*404b540aSrobert 
580*404b540aSrobert static int
equiv_init_movable_p(rtx x,int regno)581*404b540aSrobert equiv_init_movable_p (rtx x, int regno)
582*404b540aSrobert {
583*404b540aSrobert   int i, j;
584*404b540aSrobert   const char *fmt;
585*404b540aSrobert   enum rtx_code code = GET_CODE (x);
586*404b540aSrobert 
587*404b540aSrobert   switch (code)
588*404b540aSrobert     {
589*404b540aSrobert     case SET:
590*404b540aSrobert       return equiv_init_movable_p (SET_SRC (x), regno);
591*404b540aSrobert 
592*404b540aSrobert     case CC0:
593*404b540aSrobert     case CLOBBER:
594*404b540aSrobert       return 0;
595*404b540aSrobert 
596*404b540aSrobert     case PRE_INC:
597*404b540aSrobert     case PRE_DEC:
598*404b540aSrobert     case POST_INC:
599*404b540aSrobert     case POST_DEC:
600*404b540aSrobert     case PRE_MODIFY:
601*404b540aSrobert     case POST_MODIFY:
602*404b540aSrobert       return 0;
603*404b540aSrobert 
604*404b540aSrobert     case REG:
605*404b540aSrobert       return (reg_equiv[REGNO (x)].loop_depth >= reg_equiv[regno].loop_depth
606*404b540aSrobert 	      && reg_equiv[REGNO (x)].replace)
607*404b540aSrobert 	     || (REG_BASIC_BLOCK (REGNO (x)) < 0 && ! rtx_varies_p (x, 0));
608*404b540aSrobert 
609*404b540aSrobert     case UNSPEC_VOLATILE:
610*404b540aSrobert       return 0;
611*404b540aSrobert 
612*404b540aSrobert     case ASM_OPERANDS:
613*404b540aSrobert       if (MEM_VOLATILE_P (x))
614*404b540aSrobert 	return 0;
615*404b540aSrobert 
616*404b540aSrobert       /* Fall through.  */
617*404b540aSrobert 
618*404b540aSrobert     default:
619*404b540aSrobert       break;
620*404b540aSrobert     }
621*404b540aSrobert 
622*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
623*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
624*404b540aSrobert     switch (fmt[i])
625*404b540aSrobert       {
626*404b540aSrobert       case 'e':
627*404b540aSrobert 	if (! equiv_init_movable_p (XEXP (x, i), regno))
628*404b540aSrobert 	  return 0;
629*404b540aSrobert 	break;
630*404b540aSrobert       case 'E':
631*404b540aSrobert 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
632*404b540aSrobert 	  if (! equiv_init_movable_p (XVECEXP (x, i, j), regno))
633*404b540aSrobert 	    return 0;
634*404b540aSrobert 	break;
635*404b540aSrobert       }
636*404b540aSrobert 
637*404b540aSrobert   return 1;
638*404b540aSrobert }
639*404b540aSrobert 
640*404b540aSrobert /* TRUE if X uses any registers for which reg_equiv[REGNO].replace is true.  */
641*404b540aSrobert 
642*404b540aSrobert static int
contains_replace_regs(rtx x)643*404b540aSrobert contains_replace_regs (rtx x)
644*404b540aSrobert {
645*404b540aSrobert   int i, j;
646*404b540aSrobert   const char *fmt;
647*404b540aSrobert   enum rtx_code code = GET_CODE (x);
648*404b540aSrobert 
649*404b540aSrobert   switch (code)
650*404b540aSrobert     {
651*404b540aSrobert     case CONST_INT:
652*404b540aSrobert     case CONST:
653*404b540aSrobert     case LABEL_REF:
654*404b540aSrobert     case SYMBOL_REF:
655*404b540aSrobert     case CONST_DOUBLE:
656*404b540aSrobert     case CONST_VECTOR:
657*404b540aSrobert     case PC:
658*404b540aSrobert     case CC0:
659*404b540aSrobert     case HIGH:
660*404b540aSrobert       return 0;
661*404b540aSrobert 
662*404b540aSrobert     case REG:
663*404b540aSrobert       return reg_equiv[REGNO (x)].replace;
664*404b540aSrobert 
665*404b540aSrobert     default:
666*404b540aSrobert       break;
667*404b540aSrobert     }
668*404b540aSrobert 
669*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
670*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
671*404b540aSrobert     switch (fmt[i])
672*404b540aSrobert       {
673*404b540aSrobert       case 'e':
674*404b540aSrobert 	if (contains_replace_regs (XEXP (x, i)))
675*404b540aSrobert 	  return 1;
676*404b540aSrobert 	break;
677*404b540aSrobert       case 'E':
678*404b540aSrobert 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
679*404b540aSrobert 	  if (contains_replace_regs (XVECEXP (x, i, j)))
680*404b540aSrobert 	    return 1;
681*404b540aSrobert 	break;
682*404b540aSrobert       }
683*404b540aSrobert 
684*404b540aSrobert   return 0;
685*404b540aSrobert }
686*404b540aSrobert 
687*404b540aSrobert /* TRUE if X references a memory location that would be affected by a store
688*404b540aSrobert    to MEMREF.  */
689*404b540aSrobert 
690*404b540aSrobert static int
memref_referenced_p(rtx memref,rtx x)691*404b540aSrobert memref_referenced_p (rtx memref, rtx x)
692*404b540aSrobert {
693*404b540aSrobert   int i, j;
694*404b540aSrobert   const char *fmt;
695*404b540aSrobert   enum rtx_code code = GET_CODE (x);
696*404b540aSrobert 
697*404b540aSrobert   switch (code)
698*404b540aSrobert     {
699*404b540aSrobert     case CONST_INT:
700*404b540aSrobert     case CONST:
701*404b540aSrobert     case LABEL_REF:
702*404b540aSrobert     case SYMBOL_REF:
703*404b540aSrobert     case CONST_DOUBLE:
704*404b540aSrobert     case CONST_VECTOR:
705*404b540aSrobert     case PC:
706*404b540aSrobert     case CC0:
707*404b540aSrobert     case HIGH:
708*404b540aSrobert     case LO_SUM:
709*404b540aSrobert       return 0;
710*404b540aSrobert 
711*404b540aSrobert     case REG:
712*404b540aSrobert       return (reg_equiv[REGNO (x)].replacement
713*404b540aSrobert 	      && memref_referenced_p (memref,
714*404b540aSrobert 				      reg_equiv[REGNO (x)].replacement));
715*404b540aSrobert 
716*404b540aSrobert     case MEM:
717*404b540aSrobert       if (true_dependence (memref, VOIDmode, x, rtx_varies_p))
718*404b540aSrobert 	return 1;
719*404b540aSrobert       break;
720*404b540aSrobert 
721*404b540aSrobert     case SET:
722*404b540aSrobert       /* If we are setting a MEM, it doesn't count (its address does), but any
723*404b540aSrobert 	 other SET_DEST that has a MEM in it is referencing the MEM.  */
724*404b540aSrobert       if (MEM_P (SET_DEST (x)))
725*404b540aSrobert 	{
726*404b540aSrobert 	  if (memref_referenced_p (memref, XEXP (SET_DEST (x), 0)))
727*404b540aSrobert 	    return 1;
728*404b540aSrobert 	}
729*404b540aSrobert       else if (memref_referenced_p (memref, SET_DEST (x)))
730*404b540aSrobert 	return 1;
731*404b540aSrobert 
732*404b540aSrobert       return memref_referenced_p (memref, SET_SRC (x));
733*404b540aSrobert 
734*404b540aSrobert     default:
735*404b540aSrobert       break;
736*404b540aSrobert     }
737*404b540aSrobert 
738*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
739*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
740*404b540aSrobert     switch (fmt[i])
741*404b540aSrobert       {
742*404b540aSrobert       case 'e':
743*404b540aSrobert 	if (memref_referenced_p (memref, XEXP (x, i)))
744*404b540aSrobert 	  return 1;
745*404b540aSrobert 	break;
746*404b540aSrobert       case 'E':
747*404b540aSrobert 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
748*404b540aSrobert 	  if (memref_referenced_p (memref, XVECEXP (x, i, j)))
749*404b540aSrobert 	    return 1;
750*404b540aSrobert 	break;
751*404b540aSrobert       }
752*404b540aSrobert 
753*404b540aSrobert   return 0;
754*404b540aSrobert }
755*404b540aSrobert 
756*404b540aSrobert /* TRUE if some insn in the range (START, END] references a memory location
757*404b540aSrobert    that would be affected by a store to MEMREF.  */
758*404b540aSrobert 
759*404b540aSrobert static int
memref_used_between_p(rtx memref,rtx start,rtx end)760*404b540aSrobert memref_used_between_p (rtx memref, rtx start, rtx end)
761*404b540aSrobert {
762*404b540aSrobert   rtx insn;
763*404b540aSrobert 
764*404b540aSrobert   for (insn = NEXT_INSN (start); insn != NEXT_INSN (end);
765*404b540aSrobert        insn = NEXT_INSN (insn))
766*404b540aSrobert     {
767*404b540aSrobert       if (!INSN_P (insn))
768*404b540aSrobert 	continue;
769*404b540aSrobert 
770*404b540aSrobert       if (memref_referenced_p (memref, PATTERN (insn)))
771*404b540aSrobert 	return 1;
772*404b540aSrobert 
773*404b540aSrobert       /* Nonconst functions may access memory.  */
774*404b540aSrobert       if (CALL_P (insn)
775*404b540aSrobert 	  && (! CONST_OR_PURE_CALL_P (insn)
776*404b540aSrobert 	      || pure_call_p (insn)))
777*404b540aSrobert 	return 1;
778*404b540aSrobert     }
779*404b540aSrobert 
780*404b540aSrobert   return 0;
781*404b540aSrobert }
782*404b540aSrobert 
783*404b540aSrobert /* Find registers that are equivalent to a single value throughout the
784*404b540aSrobert    compilation (either because they can be referenced in memory or are set once
785*404b540aSrobert    from a single constant).  Lower their priority for a register.
786*404b540aSrobert 
787*404b540aSrobert    If such a register is only referenced once, try substituting its value
788*404b540aSrobert    into the using insn.  If it succeeds, we can eliminate the register
789*404b540aSrobert    completely.
790*404b540aSrobert 
791*404b540aSrobert    Initialize the REG_EQUIV_INIT array of initializing insns.  */
792*404b540aSrobert 
793*404b540aSrobert static void
update_equiv_regs(void)794*404b540aSrobert update_equiv_regs (void)
795*404b540aSrobert {
796*404b540aSrobert   rtx insn;
797*404b540aSrobert   basic_block bb;
798*404b540aSrobert   int loop_depth;
799*404b540aSrobert   regset_head cleared_regs;
800*404b540aSrobert   int clear_regnos = 0;
801*404b540aSrobert 
802*404b540aSrobert   reg_equiv = XCNEWVEC (struct equivalence, max_regno);
803*404b540aSrobert   INIT_REG_SET (&cleared_regs);
804*404b540aSrobert   reg_equiv_init = ggc_alloc_cleared (max_regno * sizeof (rtx));
805*404b540aSrobert   reg_equiv_init_size = max_regno;
806*404b540aSrobert 
807*404b540aSrobert   init_alias_analysis ();
808*404b540aSrobert 
809*404b540aSrobert   /* Scan the insns and find which registers have equivalences.  Do this
810*404b540aSrobert      in a separate scan of the insns because (due to -fcse-follow-jumps)
811*404b540aSrobert      a register can be set below its use.  */
812*404b540aSrobert   FOR_EACH_BB (bb)
813*404b540aSrobert     {
814*404b540aSrobert       loop_depth = bb->loop_depth;
815*404b540aSrobert 
816*404b540aSrobert       for (insn = BB_HEAD (bb);
817*404b540aSrobert 	   insn != NEXT_INSN (BB_END (bb));
818*404b540aSrobert 	   insn = NEXT_INSN (insn))
819*404b540aSrobert 	{
820*404b540aSrobert 	  rtx note;
821*404b540aSrobert 	  rtx set;
822*404b540aSrobert 	  rtx dest, src;
823*404b540aSrobert 	  int regno;
824*404b540aSrobert 
825*404b540aSrobert 	  if (! INSN_P (insn))
826*404b540aSrobert 	    continue;
827*404b540aSrobert 
828*404b540aSrobert 	  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
829*404b540aSrobert 	    if (REG_NOTE_KIND (note) == REG_INC)
830*404b540aSrobert 	      no_equiv (XEXP (note, 0), note, NULL);
831*404b540aSrobert 
832*404b540aSrobert 	  set = single_set (insn);
833*404b540aSrobert 
834*404b540aSrobert 	  /* If this insn contains more (or less) than a single SET,
835*404b540aSrobert 	     only mark all destinations as having no known equivalence.  */
836*404b540aSrobert 	  if (set == 0)
837*404b540aSrobert 	    {
838*404b540aSrobert 	      note_stores (PATTERN (insn), no_equiv, NULL);
839*404b540aSrobert 	      continue;
840*404b540aSrobert 	    }
841*404b540aSrobert 	  else if (GET_CODE (PATTERN (insn)) == PARALLEL)
842*404b540aSrobert 	    {
843*404b540aSrobert 	      int i;
844*404b540aSrobert 
845*404b540aSrobert 	      for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
846*404b540aSrobert 		{
847*404b540aSrobert 		  rtx part = XVECEXP (PATTERN (insn), 0, i);
848*404b540aSrobert 		  if (part != set)
849*404b540aSrobert 		    note_stores (part, no_equiv, NULL);
850*404b540aSrobert 		}
851*404b540aSrobert 	    }
852*404b540aSrobert 
853*404b540aSrobert 	  dest = SET_DEST (set);
854*404b540aSrobert 	  src = SET_SRC (set);
855*404b540aSrobert 
856*404b540aSrobert 	  /* See if this is setting up the equivalence between an argument
857*404b540aSrobert 	     register and its stack slot.  */
858*404b540aSrobert 	  note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
859*404b540aSrobert 	  if (note)
860*404b540aSrobert 	    {
861*404b540aSrobert 	      gcc_assert (REG_P (dest));
862*404b540aSrobert 	      regno = REGNO (dest);
863*404b540aSrobert 
864*404b540aSrobert 	      /* Note that we don't want to clear reg_equiv_init even if there
865*404b540aSrobert 		 are multiple sets of this register.  */
866*404b540aSrobert 	      reg_equiv[regno].is_arg_equivalence = 1;
867*404b540aSrobert 
868*404b540aSrobert 	      /* Record for reload that this is an equivalencing insn.  */
869*404b540aSrobert 	      if (rtx_equal_p (src, XEXP (note, 0)))
870*404b540aSrobert 		reg_equiv_init[regno]
871*404b540aSrobert 		  = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[regno]);
872*404b540aSrobert 
873*404b540aSrobert 	      /* Continue normally in case this is a candidate for
874*404b540aSrobert 		 replacements.  */
875*404b540aSrobert 	    }
876*404b540aSrobert 
877*404b540aSrobert 	  if (!optimize)
878*404b540aSrobert 	    continue;
879*404b540aSrobert 
880*404b540aSrobert 	  /* We only handle the case of a pseudo register being set
881*404b540aSrobert 	     once, or always to the same value.  */
882*404b540aSrobert 	  /* ??? The mn10200 port breaks if we add equivalences for
883*404b540aSrobert 	     values that need an ADDRESS_REGS register and set them equivalent
884*404b540aSrobert 	     to a MEM of a pseudo.  The actual problem is in the over-conservative
885*404b540aSrobert 	     handling of INPADDR_ADDRESS / INPUT_ADDRESS / INPUT triples in
886*404b540aSrobert 	     calculate_needs, but we traditionally work around this problem
887*404b540aSrobert 	     here by rejecting equivalences when the destination is in a register
888*404b540aSrobert 	     that's likely spilled.  This is fragile, of course, since the
889*404b540aSrobert 	     preferred class of a pseudo depends on all instructions that set
890*404b540aSrobert 	     or use it.  */
891*404b540aSrobert 
892*404b540aSrobert 	  if (!REG_P (dest)
893*404b540aSrobert 	      || (regno = REGNO (dest)) < FIRST_PSEUDO_REGISTER
894*404b540aSrobert 	      || reg_equiv[regno].init_insns == const0_rtx
895*404b540aSrobert 	      || (CLASS_LIKELY_SPILLED_P (reg_preferred_class (regno))
896*404b540aSrobert 		  && MEM_P (src) && ! reg_equiv[regno].is_arg_equivalence))
897*404b540aSrobert 	    {
898*404b540aSrobert 	      /* This might be setting a SUBREG of a pseudo, a pseudo that is
899*404b540aSrobert 		 also set somewhere else to a constant.  */
900*404b540aSrobert 	      note_stores (set, no_equiv, NULL);
901*404b540aSrobert 	      continue;
902*404b540aSrobert 	    }
903*404b540aSrobert 
904*404b540aSrobert 	  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
905*404b540aSrobert 
906*404b540aSrobert 	  /* cse sometimes generates function invariants, but doesn't put a
907*404b540aSrobert 	     REG_EQUAL note on the insn.  Since this note would be redundant,
908*404b540aSrobert 	     there's no point creating it earlier than here.  */
909*404b540aSrobert 	  if (! note && ! rtx_varies_p (src, 0))
910*404b540aSrobert 	    note = set_unique_reg_note (insn, REG_EQUAL, src);
911*404b540aSrobert 
912*404b540aSrobert 	  /* Don't bother considering a REG_EQUAL note containing an EXPR_LIST
913*404b540aSrobert 	     since it represents a function call */
914*404b540aSrobert 	  if (note && GET_CODE (XEXP (note, 0)) == EXPR_LIST)
915*404b540aSrobert 	    note = NULL_RTX;
916*404b540aSrobert 
917*404b540aSrobert 	  if (REG_N_SETS (regno) != 1
918*404b540aSrobert 	      && (! note
919*404b540aSrobert 		  || rtx_varies_p (XEXP (note, 0), 0)
920*404b540aSrobert 		  || (reg_equiv[regno].replacement
921*404b540aSrobert 		      && ! rtx_equal_p (XEXP (note, 0),
922*404b540aSrobert 					reg_equiv[regno].replacement))))
923*404b540aSrobert 	    {
924*404b540aSrobert 	      no_equiv (dest, set, NULL);
925*404b540aSrobert 	      continue;
926*404b540aSrobert 	    }
927*404b540aSrobert 	  /* Record this insn as initializing this register.  */
928*404b540aSrobert 	  reg_equiv[regno].init_insns
929*404b540aSrobert 	    = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv[regno].init_insns);
930*404b540aSrobert 
931*404b540aSrobert 	  /* If this register is known to be equal to a constant, record that
932*404b540aSrobert 	     it is always equivalent to the constant.  */
933*404b540aSrobert 	  if (note && ! rtx_varies_p (XEXP (note, 0), 0))
934*404b540aSrobert 	    PUT_MODE (note, (enum machine_mode) REG_EQUIV);
935*404b540aSrobert 
936*404b540aSrobert 	  /* If this insn introduces a "constant" register, decrease the priority
937*404b540aSrobert 	     of that register.  Record this insn if the register is only used once
938*404b540aSrobert 	     more and the equivalence value is the same as our source.
939*404b540aSrobert 
940*404b540aSrobert 	     The latter condition is checked for two reasons:  First, it is an
941*404b540aSrobert 	     indication that it may be more efficient to actually emit the insn
942*404b540aSrobert 	     as written (if no registers are available, reload will substitute
943*404b540aSrobert 	     the equivalence).  Secondly, it avoids problems with any registers
944*404b540aSrobert 	     dying in this insn whose death notes would be missed.
945*404b540aSrobert 
946*404b540aSrobert 	     If we don't have a REG_EQUIV note, see if this insn is loading
947*404b540aSrobert 	     a register used only in one basic block from a MEM.  If so, and the
948*404b540aSrobert 	     MEM remains unchanged for the life of the register, add a REG_EQUIV
949*404b540aSrobert 	     note.  */
950*404b540aSrobert 
951*404b540aSrobert 	  note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
952*404b540aSrobert 
953*404b540aSrobert 	  if (note == 0 && REG_BASIC_BLOCK (regno) >= 0
954*404b540aSrobert 	      && MEM_P (SET_SRC (set))
955*404b540aSrobert 	      && validate_equiv_mem (insn, dest, SET_SRC (set)))
956*404b540aSrobert 	    REG_NOTES (insn) = note = gen_rtx_EXPR_LIST (REG_EQUIV, SET_SRC (set),
957*404b540aSrobert 							 REG_NOTES (insn));
958*404b540aSrobert 
959*404b540aSrobert 	  if (note)
960*404b540aSrobert 	    {
961*404b540aSrobert 	      int regno = REGNO (dest);
962*404b540aSrobert 	      rtx x = XEXP (note, 0);
963*404b540aSrobert 
964*404b540aSrobert 	      /* If we haven't done so, record for reload that this is an
965*404b540aSrobert 		 equivalencing insn.  */
966*404b540aSrobert 	      if (!reg_equiv[regno].is_arg_equivalence)
967*404b540aSrobert 		reg_equiv_init[regno]
968*404b540aSrobert 		  = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[regno]);
969*404b540aSrobert 
970*404b540aSrobert 	      /* Record whether or not we created a REG_EQUIV note for a LABEL_REF.
971*404b540aSrobert 		 We might end up substituting the LABEL_REF for uses of the
972*404b540aSrobert 		 pseudo here or later.  That kind of transformation may turn an
973*404b540aSrobert 		 indirect jump into a direct jump, in which case we must rerun the
974*404b540aSrobert 		 jump optimizer to ensure that the JUMP_LABEL fields are valid.  */
975*404b540aSrobert 	      if (GET_CODE (x) == LABEL_REF
976*404b540aSrobert 		  || (GET_CODE (x) == CONST
977*404b540aSrobert 		      && GET_CODE (XEXP (x, 0)) == PLUS
978*404b540aSrobert 		      && (GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)))
979*404b540aSrobert 		recorded_label_ref = 1;
980*404b540aSrobert 
981*404b540aSrobert 	      reg_equiv[regno].replacement = x;
982*404b540aSrobert 	      reg_equiv[regno].src_p = &SET_SRC (set);
983*404b540aSrobert 	      reg_equiv[regno].loop_depth = loop_depth;
984*404b540aSrobert 
985*404b540aSrobert 	      /* Don't mess with things live during setjmp.  */
986*404b540aSrobert 	      if (REG_LIVE_LENGTH (regno) >= 0 && optimize)
987*404b540aSrobert 		{
988*404b540aSrobert 		  /* Note that the statement below does not affect the priority
989*404b540aSrobert 		     in local-alloc!  */
990*404b540aSrobert 		  REG_LIVE_LENGTH (regno) *= 2;
991*404b540aSrobert 
992*404b540aSrobert 		  /* If the register is referenced exactly twice, meaning it is
993*404b540aSrobert 		     set once and used once, indicate that the reference may be
994*404b540aSrobert 		     replaced by the equivalence we computed above.  Do this
995*404b540aSrobert 		     even if the register is only used in one block so that
996*404b540aSrobert 		     dependencies can be handled where the last register is
997*404b540aSrobert 		     used in a different block (i.e. HIGH / LO_SUM sequences)
998*404b540aSrobert 		     and to reduce the number of registers alive across
999*404b540aSrobert 		     calls.  */
1000*404b540aSrobert 
1001*404b540aSrobert 		  if (REG_N_REFS (regno) == 2
1002*404b540aSrobert 		      && (rtx_equal_p (x, src)
1003*404b540aSrobert 			  || ! equiv_init_varies_p (src))
1004*404b540aSrobert 		      && NONJUMP_INSN_P (insn)
1005*404b540aSrobert 		      && equiv_init_movable_p (PATTERN (insn), regno))
1006*404b540aSrobert 		    reg_equiv[regno].replace = 1;
1007*404b540aSrobert 		}
1008*404b540aSrobert 	    }
1009*404b540aSrobert 	}
1010*404b540aSrobert     }
1011*404b540aSrobert 
1012*404b540aSrobert   if (!optimize)
1013*404b540aSrobert     goto out;
1014*404b540aSrobert 
1015*404b540aSrobert   /* A second pass, to gather additional equivalences with memory.  This needs
1016*404b540aSrobert      to be done after we know which registers we are going to replace.  */
1017*404b540aSrobert 
1018*404b540aSrobert   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1019*404b540aSrobert     {
1020*404b540aSrobert       rtx set, src, dest;
1021*404b540aSrobert       unsigned regno;
1022*404b540aSrobert 
1023*404b540aSrobert       if (! INSN_P (insn))
1024*404b540aSrobert 	continue;
1025*404b540aSrobert 
1026*404b540aSrobert       set = single_set (insn);
1027*404b540aSrobert       if (! set)
1028*404b540aSrobert 	continue;
1029*404b540aSrobert 
1030*404b540aSrobert       dest = SET_DEST (set);
1031*404b540aSrobert       src = SET_SRC (set);
1032*404b540aSrobert 
1033*404b540aSrobert       /* If this sets a MEM to the contents of a REG that is only used
1034*404b540aSrobert 	 in a single basic block, see if the register is always equivalent
1035*404b540aSrobert 	 to that memory location and if moving the store from INSN to the
1036*404b540aSrobert 	 insn that set REG is safe.  If so, put a REG_EQUIV note on the
1037*404b540aSrobert 	 initializing insn.
1038*404b540aSrobert 
1039*404b540aSrobert 	 Don't add a REG_EQUIV note if the insn already has one.  The existing
1040*404b540aSrobert 	 REG_EQUIV is likely more useful than the one we are adding.
1041*404b540aSrobert 
1042*404b540aSrobert 	 If one of the regs in the address has reg_equiv[REGNO].replace set,
1043*404b540aSrobert 	 then we can't add this REG_EQUIV note.  The reg_equiv[REGNO].replace
1044*404b540aSrobert 	 optimization may move the set of this register immediately before
1045*404b540aSrobert 	 insn, which puts it after reg_equiv[REGNO].init_insns, and hence
1046*404b540aSrobert 	 the mention in the REG_EQUIV note would be to an uninitialized
1047*404b540aSrobert 	 pseudo.  */
1048*404b540aSrobert 
1049*404b540aSrobert       if (MEM_P (dest) && REG_P (src)
1050*404b540aSrobert 	  && (regno = REGNO (src)) >= FIRST_PSEUDO_REGISTER
1051*404b540aSrobert 	  && REG_BASIC_BLOCK (regno) >= 0
1052*404b540aSrobert 	  && REG_N_SETS (regno) == 1
1053*404b540aSrobert 	  && reg_equiv[regno].init_insns != 0
1054*404b540aSrobert 	  && reg_equiv[regno].init_insns != const0_rtx
1055*404b540aSrobert 	  && ! find_reg_note (XEXP (reg_equiv[regno].init_insns, 0),
1056*404b540aSrobert 			      REG_EQUIV, NULL_RTX)
1057*404b540aSrobert 	  && ! contains_replace_regs (XEXP (dest, 0)))
1058*404b540aSrobert 	{
1059*404b540aSrobert 	  rtx init_insn = XEXP (reg_equiv[regno].init_insns, 0);
1060*404b540aSrobert 	  if (validate_equiv_mem (init_insn, src, dest)
1061*404b540aSrobert 	      && ! memref_used_between_p (dest, init_insn, insn))
1062*404b540aSrobert 	    {
1063*404b540aSrobert 	      REG_NOTES (init_insn)
1064*404b540aSrobert 		= gen_rtx_EXPR_LIST (REG_EQUIV, dest,
1065*404b540aSrobert 				     REG_NOTES (init_insn));
1066*404b540aSrobert 	      /* This insn makes the equivalence, not the one initializing
1067*404b540aSrobert 		 the register.  */
1068*404b540aSrobert 	      reg_equiv_init[regno]
1069*404b540aSrobert 		= gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
1070*404b540aSrobert 	    }
1071*404b540aSrobert 	}
1072*404b540aSrobert     }
1073*404b540aSrobert 
1074*404b540aSrobert   /* Now scan all regs killed in an insn to see if any of them are
1075*404b540aSrobert      registers only used that once.  If so, see if we can replace the
1076*404b540aSrobert      reference with the equivalent form.  If we can, delete the
1077*404b540aSrobert      initializing reference and this register will go away.  If we
1078*404b540aSrobert      can't replace the reference, and the initializing reference is
1079*404b540aSrobert      within the same loop (or in an inner loop), then move the register
1080*404b540aSrobert      initialization just before the use, so that they are in the same
1081*404b540aSrobert      basic block.  */
1082*404b540aSrobert   FOR_EACH_BB_REVERSE (bb)
1083*404b540aSrobert     {
1084*404b540aSrobert       loop_depth = bb->loop_depth;
1085*404b540aSrobert       for (insn = BB_END (bb);
1086*404b540aSrobert 	   insn != PREV_INSN (BB_HEAD (bb));
1087*404b540aSrobert 	   insn = PREV_INSN (insn))
1088*404b540aSrobert 	{
1089*404b540aSrobert 	  rtx link;
1090*404b540aSrobert 
1091*404b540aSrobert 	  if (! INSN_P (insn))
1092*404b540aSrobert 	    continue;
1093*404b540aSrobert 
1094*404b540aSrobert 	  /* Don't substitute into a non-local goto, this confuses CFG.  */
1095*404b540aSrobert 	  if (JUMP_P (insn)
1096*404b540aSrobert 	      && find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1097*404b540aSrobert 	    continue;
1098*404b540aSrobert 
1099*404b540aSrobert 	  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1100*404b540aSrobert 	    {
1101*404b540aSrobert 	      if (REG_NOTE_KIND (link) == REG_DEAD
1102*404b540aSrobert 		  /* Make sure this insn still refers to the register.  */
1103*404b540aSrobert 		  && reg_mentioned_p (XEXP (link, 0), PATTERN (insn)))
1104*404b540aSrobert 		{
1105*404b540aSrobert 		  int regno = REGNO (XEXP (link, 0));
1106*404b540aSrobert 		  rtx equiv_insn;
1107*404b540aSrobert 
1108*404b540aSrobert 		  if (! reg_equiv[regno].replace
1109*404b540aSrobert 		      || reg_equiv[regno].loop_depth < loop_depth)
1110*404b540aSrobert 		    continue;
1111*404b540aSrobert 
1112*404b540aSrobert 		  /* reg_equiv[REGNO].replace gets set only when
1113*404b540aSrobert 		     REG_N_REFS[REGNO] is 2, i.e. the register is set
1114*404b540aSrobert 		     once and used once.  (If it were only set, but not used,
1115*404b540aSrobert 		     flow would have deleted the setting insns.)  Hence
1116*404b540aSrobert 		     there can only be one insn in reg_equiv[REGNO].init_insns.  */
1117*404b540aSrobert 		  gcc_assert (reg_equiv[regno].init_insns
1118*404b540aSrobert 			      && !XEXP (reg_equiv[regno].init_insns, 1));
1119*404b540aSrobert 		  equiv_insn = XEXP (reg_equiv[regno].init_insns, 0);
1120*404b540aSrobert 
1121*404b540aSrobert 		  /* We may not move instructions that can throw, since
1122*404b540aSrobert 		     that changes basic block boundaries and we are not
1123*404b540aSrobert 		     prepared to adjust the CFG to match.  */
1124*404b540aSrobert 		  if (can_throw_internal (equiv_insn))
1125*404b540aSrobert 		    continue;
1126*404b540aSrobert 
1127*404b540aSrobert 		  if (asm_noperands (PATTERN (equiv_insn)) < 0
1128*404b540aSrobert 		      && validate_replace_rtx (regno_reg_rtx[regno],
1129*404b540aSrobert 					       *(reg_equiv[regno].src_p), insn))
1130*404b540aSrobert 		    {
1131*404b540aSrobert 		      rtx equiv_link;
1132*404b540aSrobert 		      rtx last_link;
1133*404b540aSrobert 		      rtx note;
1134*404b540aSrobert 
1135*404b540aSrobert 		      /* Find the last note.  */
1136*404b540aSrobert 		      for (last_link = link; XEXP (last_link, 1);
1137*404b540aSrobert 			   last_link = XEXP (last_link, 1))
1138*404b540aSrobert 			;
1139*404b540aSrobert 
1140*404b540aSrobert 		      /* Append the REG_DEAD notes from equiv_insn.  */
1141*404b540aSrobert 		      equiv_link = REG_NOTES (equiv_insn);
1142*404b540aSrobert 		      while (equiv_link)
1143*404b540aSrobert 			{
1144*404b540aSrobert 			  note = equiv_link;
1145*404b540aSrobert 			  equiv_link = XEXP (equiv_link, 1);
1146*404b540aSrobert 			  if (REG_NOTE_KIND (note) == REG_DEAD)
1147*404b540aSrobert 			    {
1148*404b540aSrobert 			      remove_note (equiv_insn, note);
1149*404b540aSrobert 			      XEXP (last_link, 1) = note;
1150*404b540aSrobert 			      XEXP (note, 1) = NULL_RTX;
1151*404b540aSrobert 			      last_link = note;
1152*404b540aSrobert 			    }
1153*404b540aSrobert 			}
1154*404b540aSrobert 
1155*404b540aSrobert 		      remove_death (regno, insn);
1156*404b540aSrobert 		      REG_N_REFS (regno) = 0;
1157*404b540aSrobert 		      REG_FREQ (regno) = 0;
1158*404b540aSrobert 		      delete_insn (equiv_insn);
1159*404b540aSrobert 
1160*404b540aSrobert 		      reg_equiv[regno].init_insns
1161*404b540aSrobert 			= XEXP (reg_equiv[regno].init_insns, 1);
1162*404b540aSrobert 
1163*404b540aSrobert 		      /* Remember to clear REGNO from all basic block's live
1164*404b540aSrobert 			 info.  */
1165*404b540aSrobert 		      SET_REGNO_REG_SET (&cleared_regs, regno);
1166*404b540aSrobert 		      clear_regnos++;
1167*404b540aSrobert 		      reg_equiv_init[regno] = NULL_RTX;
1168*404b540aSrobert 		    }
1169*404b540aSrobert 		  /* Move the initialization of the register to just before
1170*404b540aSrobert 		     INSN.  Update the flow information.  */
1171*404b540aSrobert 		  else if (PREV_INSN (insn) != equiv_insn)
1172*404b540aSrobert 		    {
1173*404b540aSrobert 		      rtx new_insn;
1174*404b540aSrobert 
1175*404b540aSrobert 		      new_insn = emit_insn_before (PATTERN (equiv_insn), insn);
1176*404b540aSrobert 		      REG_NOTES (new_insn) = REG_NOTES (equiv_insn);
1177*404b540aSrobert 		      REG_NOTES (equiv_insn) = 0;
1178*404b540aSrobert 
1179*404b540aSrobert 		      /* Make sure this insn is recognized before
1180*404b540aSrobert 			 reload begins, otherwise
1181*404b540aSrobert 			 eliminate_regs_in_insn will die.  */
1182*404b540aSrobert 		      INSN_CODE (new_insn) = INSN_CODE (equiv_insn);
1183*404b540aSrobert 
1184*404b540aSrobert 		      delete_insn (equiv_insn);
1185*404b540aSrobert 
1186*404b540aSrobert 		      XEXP (reg_equiv[regno].init_insns, 0) = new_insn;
1187*404b540aSrobert 
1188*404b540aSrobert 		      REG_BASIC_BLOCK (regno) = bb->index;
1189*404b540aSrobert 		      REG_N_CALLS_CROSSED (regno) = 0;
1190*404b540aSrobert 		      REG_N_THROWING_CALLS_CROSSED (regno) = 0;
1191*404b540aSrobert 		      REG_LIVE_LENGTH (regno) = 2;
1192*404b540aSrobert 
1193*404b540aSrobert 		      if (insn == BB_HEAD (bb))
1194*404b540aSrobert 			BB_HEAD (bb) = PREV_INSN (insn);
1195*404b540aSrobert 
1196*404b540aSrobert 		      /* Remember to clear REGNO from all basic block's live
1197*404b540aSrobert 			 info.  */
1198*404b540aSrobert 		      SET_REGNO_REG_SET (&cleared_regs, regno);
1199*404b540aSrobert 		      clear_regnos++;
1200*404b540aSrobert 		      reg_equiv_init[regno]
1201*404b540aSrobert 			= gen_rtx_INSN_LIST (VOIDmode, new_insn, NULL_RTX);
1202*404b540aSrobert 		    }
1203*404b540aSrobert 		}
1204*404b540aSrobert 	    }
1205*404b540aSrobert 	}
1206*404b540aSrobert     }
1207*404b540aSrobert 
1208*404b540aSrobert   /* Clear all dead REGNOs from all basic block's live info.  */
1209*404b540aSrobert   if (clear_regnos)
1210*404b540aSrobert     {
1211*404b540aSrobert       unsigned j;
1212*404b540aSrobert 
1213*404b540aSrobert       if (clear_regnos > 8)
1214*404b540aSrobert 	{
1215*404b540aSrobert 	  FOR_EACH_BB (bb)
1216*404b540aSrobert 	    {
1217*404b540aSrobert 	      AND_COMPL_REG_SET (bb->il.rtl->global_live_at_start,
1218*404b540aSrobert 			         &cleared_regs);
1219*404b540aSrobert 	      AND_COMPL_REG_SET (bb->il.rtl->global_live_at_end,
1220*404b540aSrobert 			         &cleared_regs);
1221*404b540aSrobert 	    }
1222*404b540aSrobert 	}
1223*404b540aSrobert       else
1224*404b540aSrobert 	{
1225*404b540aSrobert 	  reg_set_iterator rsi;
1226*404b540aSrobert 	  EXECUTE_IF_SET_IN_REG_SET (&cleared_regs, 0, j, rsi)
1227*404b540aSrobert 	    {
1228*404b540aSrobert 	      FOR_EACH_BB (bb)
1229*404b540aSrobert 		{
1230*404b540aSrobert 		  CLEAR_REGNO_REG_SET (bb->il.rtl->global_live_at_start, j);
1231*404b540aSrobert 		  CLEAR_REGNO_REG_SET (bb->il.rtl->global_live_at_end, j);
1232*404b540aSrobert 		}
1233*404b540aSrobert 	    }
1234*404b540aSrobert 	}
1235*404b540aSrobert     }
1236*404b540aSrobert 
1237*404b540aSrobert   out:
1238*404b540aSrobert   /* Clean up.  */
1239*404b540aSrobert   end_alias_analysis ();
1240*404b540aSrobert   CLEAR_REG_SET (&cleared_regs);
1241*404b540aSrobert   free (reg_equiv);
1242*404b540aSrobert }
1243*404b540aSrobert 
1244*404b540aSrobert /* Mark REG as having no known equivalence.
1245*404b540aSrobert    Some instructions might have been processed before and furnished
1246*404b540aSrobert    with REG_EQUIV notes for this register; these notes will have to be
1247*404b540aSrobert    removed.
1248*404b540aSrobert    STORE is the piece of RTL that does the non-constant / conflicting
1249*404b540aSrobert    assignment - a SET, CLOBBER or REG_INC note.  It is currently not used,
1250*404b540aSrobert    but needs to be there because this function is called from note_stores.  */
1251*404b540aSrobert static void
no_equiv(rtx reg,rtx store ATTRIBUTE_UNUSED,void * data ATTRIBUTE_UNUSED)1252*404b540aSrobert no_equiv (rtx reg, rtx store ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
1253*404b540aSrobert {
1254*404b540aSrobert   int regno;
1255*404b540aSrobert   rtx list;
1256*404b540aSrobert 
1257*404b540aSrobert   if (!REG_P (reg))
1258*404b540aSrobert     return;
1259*404b540aSrobert   regno = REGNO (reg);
1260*404b540aSrobert   list = reg_equiv[regno].init_insns;
1261*404b540aSrobert   if (list == const0_rtx)
1262*404b540aSrobert     return;
1263*404b540aSrobert   reg_equiv[regno].init_insns = const0_rtx;
1264*404b540aSrobert   reg_equiv[regno].replacement = NULL_RTX;
1265*404b540aSrobert   /* This doesn't matter for equivalences made for argument registers, we
1266*404b540aSrobert      should keep their initialization insns.  */
1267*404b540aSrobert   if (reg_equiv[regno].is_arg_equivalence)
1268*404b540aSrobert     return;
1269*404b540aSrobert   reg_equiv_init[regno] = NULL_RTX;
1270*404b540aSrobert   for (; list; list =  XEXP (list, 1))
1271*404b540aSrobert     {
1272*404b540aSrobert       rtx insn = XEXP (list, 0);
1273*404b540aSrobert       remove_note (insn, find_reg_note (insn, REG_EQUIV, NULL_RTX));
1274*404b540aSrobert     }
1275*404b540aSrobert }
1276*404b540aSrobert 
1277*404b540aSrobert /* Allocate hard regs to the pseudo regs used only within block number B.
1278*404b540aSrobert    Only the pseudos that die but once can be handled.  */
1279*404b540aSrobert 
1280*404b540aSrobert static void
block_alloc(int b)1281*404b540aSrobert block_alloc (int b)
1282*404b540aSrobert {
1283*404b540aSrobert   int i, q;
1284*404b540aSrobert   rtx insn;
1285*404b540aSrobert   rtx note, hard_reg;
1286*404b540aSrobert   int insn_number = 0;
1287*404b540aSrobert   int insn_count = 0;
1288*404b540aSrobert   int max_uid = get_max_uid ();
1289*404b540aSrobert   int *qty_order;
1290*404b540aSrobert   int no_conflict_combined_regno = -1;
1291*404b540aSrobert 
1292*404b540aSrobert   /* Count the instructions in the basic block.  */
1293*404b540aSrobert 
1294*404b540aSrobert   insn = BB_END (BASIC_BLOCK (b));
1295*404b540aSrobert   while (1)
1296*404b540aSrobert     {
1297*404b540aSrobert       if (!NOTE_P (insn))
1298*404b540aSrobert 	{
1299*404b540aSrobert 	  ++insn_count;
1300*404b540aSrobert 	  gcc_assert (insn_count <= max_uid);
1301*404b540aSrobert 	}
1302*404b540aSrobert       if (insn == BB_HEAD (BASIC_BLOCK (b)))
1303*404b540aSrobert 	break;
1304*404b540aSrobert       insn = PREV_INSN (insn);
1305*404b540aSrobert     }
1306*404b540aSrobert 
1307*404b540aSrobert   /* +2 to leave room for a post_mark_life at the last insn and for
1308*404b540aSrobert      the birth of a CLOBBER in the first insn.  */
1309*404b540aSrobert   regs_live_at = XCNEWVEC (HARD_REG_SET, 2 * insn_count + 2);
1310*404b540aSrobert 
1311*404b540aSrobert   /* Initialize table of hardware registers currently live.  */
1312*404b540aSrobert 
1313*404b540aSrobert   REG_SET_TO_HARD_REG_SET (regs_live,
1314*404b540aSrobert 		  	   BASIC_BLOCK (b)->il.rtl->global_live_at_start);
1315*404b540aSrobert 
1316*404b540aSrobert   /* This loop scans the instructions of the basic block
1317*404b540aSrobert      and assigns quantities to registers.
1318*404b540aSrobert      It computes which registers to tie.  */
1319*404b540aSrobert 
1320*404b540aSrobert   insn = BB_HEAD (BASIC_BLOCK (b));
1321*404b540aSrobert   while (1)
1322*404b540aSrobert     {
1323*404b540aSrobert       if (!NOTE_P (insn))
1324*404b540aSrobert 	insn_number++;
1325*404b540aSrobert 
1326*404b540aSrobert       if (INSN_P (insn))
1327*404b540aSrobert 	{
1328*404b540aSrobert 	  rtx link, set;
1329*404b540aSrobert 	  int win = 0;
1330*404b540aSrobert 	  rtx r0, r1 = NULL_RTX;
1331*404b540aSrobert 	  int combined_regno = -1;
1332*404b540aSrobert 	  int i;
1333*404b540aSrobert 
1334*404b540aSrobert 	  this_insn_number = insn_number;
1335*404b540aSrobert 	  this_insn = insn;
1336*404b540aSrobert 
1337*404b540aSrobert 	  extract_insn (insn);
1338*404b540aSrobert 	  which_alternative = -1;
1339*404b540aSrobert 
1340*404b540aSrobert 	  /* Is this insn suitable for tying two registers?
1341*404b540aSrobert 	     If so, try doing that.
1342*404b540aSrobert 	     Suitable insns are those with at least two operands and where
1343*404b540aSrobert 	     operand 0 is an output that is a register that is not
1344*404b540aSrobert 	     earlyclobber.
1345*404b540aSrobert 
1346*404b540aSrobert 	     We can tie operand 0 with some operand that dies in this insn.
1347*404b540aSrobert 	     First look for operands that are required to be in the same
1348*404b540aSrobert 	     register as operand 0.  If we find such, only try tying that
1349*404b540aSrobert 	     operand or one that can be put into that operand if the
1350*404b540aSrobert 	     operation is commutative.  If we don't find an operand
1351*404b540aSrobert 	     that is required to be in the same register as operand 0,
1352*404b540aSrobert 	     we can tie with any operand.
1353*404b540aSrobert 
1354*404b540aSrobert 	     Subregs in place of regs are also ok.
1355*404b540aSrobert 
1356*404b540aSrobert 	     If tying is done, WIN is set nonzero.  */
1357*404b540aSrobert 
1358*404b540aSrobert 	  if (optimize
1359*404b540aSrobert 	      && recog_data.n_operands > 1
1360*404b540aSrobert 	      && recog_data.constraints[0][0] == '='
1361*404b540aSrobert 	      && recog_data.constraints[0][1] != '&')
1362*404b540aSrobert 	    {
1363*404b540aSrobert 	      /* If non-negative, is an operand that must match operand 0.  */
1364*404b540aSrobert 	      int must_match_0 = -1;
1365*404b540aSrobert 	      /* Counts number of alternatives that require a match with
1366*404b540aSrobert 		 operand 0.  */
1367*404b540aSrobert 	      int n_matching_alts = 0;
1368*404b540aSrobert 
1369*404b540aSrobert 	      for (i = 1; i < recog_data.n_operands; i++)
1370*404b540aSrobert 		{
1371*404b540aSrobert 		  const char *p = recog_data.constraints[i];
1372*404b540aSrobert 		  int this_match = requires_inout (p);
1373*404b540aSrobert 
1374*404b540aSrobert 		  n_matching_alts += this_match;
1375*404b540aSrobert 		  if (this_match == recog_data.n_alternatives)
1376*404b540aSrobert 		    must_match_0 = i;
1377*404b540aSrobert 		}
1378*404b540aSrobert 
1379*404b540aSrobert 	      r0 = recog_data.operand[0];
1380*404b540aSrobert 	      for (i = 1; i < recog_data.n_operands; i++)
1381*404b540aSrobert 		{
1382*404b540aSrobert 		  /* Skip this operand if we found an operand that
1383*404b540aSrobert 		     must match operand 0 and this operand isn't it
1384*404b540aSrobert 		     and can't be made to be it by commutativity.  */
1385*404b540aSrobert 
1386*404b540aSrobert 		  if (must_match_0 >= 0 && i != must_match_0
1387*404b540aSrobert 		      && ! (i == must_match_0 + 1
1388*404b540aSrobert 			    && recog_data.constraints[i-1][0] == '%')
1389*404b540aSrobert 		      && ! (i == must_match_0 - 1
1390*404b540aSrobert 			    && recog_data.constraints[i][0] == '%'))
1391*404b540aSrobert 		    continue;
1392*404b540aSrobert 
1393*404b540aSrobert 		  /* Likewise if each alternative has some operand that
1394*404b540aSrobert 		     must match operand zero.  In that case, skip any
1395*404b540aSrobert 		     operand that doesn't list operand 0 since we know that
1396*404b540aSrobert 		     the operand always conflicts with operand 0.  We
1397*404b540aSrobert 		     ignore commutativity in this case to keep things simple.  */
1398*404b540aSrobert 		  if (n_matching_alts == recog_data.n_alternatives
1399*404b540aSrobert 		      && 0 == requires_inout (recog_data.constraints[i]))
1400*404b540aSrobert 		    continue;
1401*404b540aSrobert 
1402*404b540aSrobert 		  r1 = recog_data.operand[i];
1403*404b540aSrobert 
1404*404b540aSrobert 		  /* If the operand is an address, find a register in it.
1405*404b540aSrobert 		     There may be more than one register, but we only try one
1406*404b540aSrobert 		     of them.  */
1407*404b540aSrobert 		  if (recog_data.constraints[i][0] == 'p'
1408*404b540aSrobert 		      || EXTRA_ADDRESS_CONSTRAINT (recog_data.constraints[i][0],
1409*404b540aSrobert 						   recog_data.constraints[i]))
1410*404b540aSrobert 		    while (GET_CODE (r1) == PLUS || GET_CODE (r1) == MULT)
1411*404b540aSrobert 		      r1 = XEXP (r1, 0);
1412*404b540aSrobert 
1413*404b540aSrobert 		  /* Avoid making a call-saved register unnecessarily
1414*404b540aSrobert                      clobbered.  */
1415*404b540aSrobert 		  hard_reg = get_hard_reg_initial_reg (cfun, r1);
1416*404b540aSrobert 		  if (hard_reg != NULL_RTX)
1417*404b540aSrobert 		    {
1418*404b540aSrobert 		      if (REG_P (hard_reg)
1419*404b540aSrobert 			  && REGNO (hard_reg) < FIRST_PSEUDO_REGISTER
1420*404b540aSrobert 			  && !call_used_regs[REGNO (hard_reg)])
1421*404b540aSrobert 			continue;
1422*404b540aSrobert 		    }
1423*404b540aSrobert 
1424*404b540aSrobert 		  if (REG_P (r0) || GET_CODE (r0) == SUBREG)
1425*404b540aSrobert 		    {
1426*404b540aSrobert 		      /* We have two priorities for hard register preferences.
1427*404b540aSrobert 			 If we have a move insn or an insn whose first input
1428*404b540aSrobert 			 can only be in the same register as the output, give
1429*404b540aSrobert 			 priority to an equivalence found from that insn.  */
1430*404b540aSrobert 		      int may_save_copy
1431*404b540aSrobert 			= (r1 == recog_data.operand[i] && must_match_0 >= 0);
1432*404b540aSrobert 
1433*404b540aSrobert 		      if (REG_P (r1) || GET_CODE (r1) == SUBREG)
1434*404b540aSrobert 			win = combine_regs (r1, r0, may_save_copy,
1435*404b540aSrobert 					    insn_number, insn, 0);
1436*404b540aSrobert 		    }
1437*404b540aSrobert 		  if (win)
1438*404b540aSrobert 		    break;
1439*404b540aSrobert 		}
1440*404b540aSrobert 	    }
1441*404b540aSrobert 
1442*404b540aSrobert 	  /* Recognize an insn sequence with an ultimate result
1443*404b540aSrobert 	     which can safely overlap one of the inputs.
1444*404b540aSrobert 	     The sequence begins with a CLOBBER of its result,
1445*404b540aSrobert 	     and ends with an insn that copies the result to itself
1446*404b540aSrobert 	     and has a REG_EQUAL note for an equivalent formula.
1447*404b540aSrobert 	     That note indicates what the inputs are.
1448*404b540aSrobert 	     The result and the input can overlap if each insn in
1449*404b540aSrobert 	     the sequence either doesn't mention the input
1450*404b540aSrobert 	     or has a REG_NO_CONFLICT note to inhibit the conflict.
1451*404b540aSrobert 
1452*404b540aSrobert 	     We do the combining test at the CLOBBER so that the
1453*404b540aSrobert 	     destination register won't have had a quantity number
1454*404b540aSrobert 	     assigned, since that would prevent combining.  */
1455*404b540aSrobert 
1456*404b540aSrobert 	  if (optimize
1457*404b540aSrobert 	      && GET_CODE (PATTERN (insn)) == CLOBBER
1458*404b540aSrobert 	      && (r0 = XEXP (PATTERN (insn), 0),
1459*404b540aSrobert 		  REG_P (r0))
1460*404b540aSrobert 	      && (link = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0
1461*404b540aSrobert 	      && XEXP (link, 0) != 0
1462*404b540aSrobert 	      && NONJUMP_INSN_P (XEXP (link, 0))
1463*404b540aSrobert 	      && (set = single_set (XEXP (link, 0))) != 0
1464*404b540aSrobert 	      && SET_DEST (set) == r0 && SET_SRC (set) == r0
1465*404b540aSrobert 	      && (note = find_reg_note (XEXP (link, 0), REG_EQUAL,
1466*404b540aSrobert 					NULL_RTX)) != 0)
1467*404b540aSrobert 	    {
1468*404b540aSrobert 	      if (r1 = XEXP (note, 0), REG_P (r1)
1469*404b540aSrobert 		  /* Check that we have such a sequence.  */
1470*404b540aSrobert 		  && no_conflict_p (insn, r0, r1))
1471*404b540aSrobert 		win = combine_regs (r1, r0, 1, insn_number, insn, 1);
1472*404b540aSrobert 	      else if (GET_RTX_FORMAT (GET_CODE (XEXP (note, 0)))[0] == 'e'
1473*404b540aSrobert 		       && (r1 = XEXP (XEXP (note, 0), 0),
1474*404b540aSrobert 			   REG_P (r1) || GET_CODE (r1) == SUBREG)
1475*404b540aSrobert 		       && no_conflict_p (insn, r0, r1))
1476*404b540aSrobert 		win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1477*404b540aSrobert 
1478*404b540aSrobert 	      /* Here we care if the operation to be computed is
1479*404b540aSrobert 		 commutative.  */
1480*404b540aSrobert 	      else if (COMMUTATIVE_P (XEXP (note, 0))
1481*404b540aSrobert 		       && (r1 = XEXP (XEXP (note, 0), 1),
1482*404b540aSrobert 			   (REG_P (r1) || GET_CODE (r1) == SUBREG))
1483*404b540aSrobert 		       && no_conflict_p (insn, r0, r1))
1484*404b540aSrobert 		win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1485*404b540aSrobert 
1486*404b540aSrobert 	      /* If we did combine something, show the register number
1487*404b540aSrobert 		 in question so that we know to ignore its death.  */
1488*404b540aSrobert 	      if (win)
1489*404b540aSrobert 		no_conflict_combined_regno = REGNO (r1);
1490*404b540aSrobert 	    }
1491*404b540aSrobert 
1492*404b540aSrobert 	  /* If registers were just tied, set COMBINED_REGNO
1493*404b540aSrobert 	     to the number of the register used in this insn
1494*404b540aSrobert 	     that was tied to the register set in this insn.
1495*404b540aSrobert 	     This register's qty should not be "killed".  */
1496*404b540aSrobert 
1497*404b540aSrobert 	  if (win)
1498*404b540aSrobert 	    {
1499*404b540aSrobert 	      while (GET_CODE (r1) == SUBREG)
1500*404b540aSrobert 		r1 = SUBREG_REG (r1);
1501*404b540aSrobert 	      combined_regno = REGNO (r1);
1502*404b540aSrobert 	    }
1503*404b540aSrobert 
1504*404b540aSrobert 	  /* Mark the death of everything that dies in this instruction,
1505*404b540aSrobert 	     except for anything that was just combined.  */
1506*404b540aSrobert 
1507*404b540aSrobert 	  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1508*404b540aSrobert 	    if (REG_NOTE_KIND (link) == REG_DEAD
1509*404b540aSrobert 		&& REG_P (XEXP (link, 0))
1510*404b540aSrobert 		&& combined_regno != (int) REGNO (XEXP (link, 0))
1511*404b540aSrobert 		&& (no_conflict_combined_regno != (int) REGNO (XEXP (link, 0))
1512*404b540aSrobert 		    || ! find_reg_note (insn, REG_NO_CONFLICT,
1513*404b540aSrobert 					XEXP (link, 0))))
1514*404b540aSrobert 	      wipe_dead_reg (XEXP (link, 0), 0);
1515*404b540aSrobert 
1516*404b540aSrobert 	  /* Allocate qty numbers for all registers local to this block
1517*404b540aSrobert 	     that are born (set) in this instruction.
1518*404b540aSrobert 	     A pseudo that already has a qty is not changed.  */
1519*404b540aSrobert 
1520*404b540aSrobert 	  note_stores (PATTERN (insn), reg_is_set, NULL);
1521*404b540aSrobert 
1522*404b540aSrobert 	  /* If anything is set in this insn and then unused, mark it as dying
1523*404b540aSrobert 	     after this insn, so it will conflict with our outputs.  This
1524*404b540aSrobert 	     can't match with something that combined, and it doesn't matter
1525*404b540aSrobert 	     if it did.  Do this after the calls to reg_is_set since these
1526*404b540aSrobert 	     die after, not during, the current insn.  */
1527*404b540aSrobert 
1528*404b540aSrobert 	  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1529*404b540aSrobert 	    if (REG_NOTE_KIND (link) == REG_UNUSED
1530*404b540aSrobert 		&& REG_P (XEXP (link, 0)))
1531*404b540aSrobert 	      wipe_dead_reg (XEXP (link, 0), 1);
1532*404b540aSrobert 
1533*404b540aSrobert 	  /* If this is an insn that has a REG_RETVAL note pointing at a
1534*404b540aSrobert 	     CLOBBER insn, we have reached the end of a REG_NO_CONFLICT
1535*404b540aSrobert 	     block, so clear any register number that combined within it.  */
1536*404b540aSrobert 	  if ((note = find_reg_note (insn, REG_RETVAL, NULL_RTX)) != 0
1537*404b540aSrobert 	      && NONJUMP_INSN_P (XEXP (note, 0))
1538*404b540aSrobert 	      && GET_CODE (PATTERN (XEXP (note, 0))) == CLOBBER)
1539*404b540aSrobert 	    no_conflict_combined_regno = -1;
1540*404b540aSrobert 	}
1541*404b540aSrobert 
1542*404b540aSrobert       /* Set the registers live after INSN_NUMBER.  Note that we never
1543*404b540aSrobert 	 record the registers live before the block's first insn, since no
1544*404b540aSrobert 	 pseudos we care about are live before that insn.  */
1545*404b540aSrobert 
1546*404b540aSrobert       IOR_HARD_REG_SET (regs_live_at[2 * insn_number], regs_live);
1547*404b540aSrobert       IOR_HARD_REG_SET (regs_live_at[2 * insn_number + 1], regs_live);
1548*404b540aSrobert 
1549*404b540aSrobert       if (insn == BB_END (BASIC_BLOCK (b)))
1550*404b540aSrobert 	break;
1551*404b540aSrobert 
1552*404b540aSrobert       insn = NEXT_INSN (insn);
1553*404b540aSrobert     }
1554*404b540aSrobert 
1555*404b540aSrobert   /* Now every register that is local to this basic block
1556*404b540aSrobert      should have been given a quantity, or else -1 meaning ignore it.
1557*404b540aSrobert      Every quantity should have a known birth and death.
1558*404b540aSrobert 
1559*404b540aSrobert      Order the qtys so we assign them registers in order of the
1560*404b540aSrobert      number of suggested registers they need so we allocate those with
1561*404b540aSrobert      the most restrictive needs first.  */
1562*404b540aSrobert 
1563*404b540aSrobert   qty_order = XNEWVEC (int, next_qty);
1564*404b540aSrobert   for (i = 0; i < next_qty; i++)
1565*404b540aSrobert     qty_order[i] = i;
1566*404b540aSrobert 
1567*404b540aSrobert #define EXCHANGE(I1, I2)  \
1568*404b540aSrobert   { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1569*404b540aSrobert 
1570*404b540aSrobert   switch (next_qty)
1571*404b540aSrobert     {
1572*404b540aSrobert     case 3:
1573*404b540aSrobert       /* Make qty_order[2] be the one to allocate last.  */
1574*404b540aSrobert       if (qty_sugg_compare (0, 1) > 0)
1575*404b540aSrobert 	EXCHANGE (0, 1);
1576*404b540aSrobert       if (qty_sugg_compare (1, 2) > 0)
1577*404b540aSrobert 	EXCHANGE (2, 1);
1578*404b540aSrobert 
1579*404b540aSrobert       /* ... Fall through ...  */
1580*404b540aSrobert     case 2:
1581*404b540aSrobert       /* Put the best one to allocate in qty_order[0].  */
1582*404b540aSrobert       if (qty_sugg_compare (0, 1) > 0)
1583*404b540aSrobert 	EXCHANGE (0, 1);
1584*404b540aSrobert 
1585*404b540aSrobert       /* ... Fall through ...  */
1586*404b540aSrobert 
1587*404b540aSrobert     case 1:
1588*404b540aSrobert     case 0:
1589*404b540aSrobert       /* Nothing to do here.  */
1590*404b540aSrobert       break;
1591*404b540aSrobert 
1592*404b540aSrobert     default:
1593*404b540aSrobert       qsort (qty_order, next_qty, sizeof (int), qty_sugg_compare_1);
1594*404b540aSrobert     }
1595*404b540aSrobert 
1596*404b540aSrobert   /* Try to put each quantity in a suggested physical register, if it has one.
1597*404b540aSrobert      This may cause registers to be allocated that otherwise wouldn't be, but
1598*404b540aSrobert      this seems acceptable in local allocation (unlike global allocation).  */
1599*404b540aSrobert   for (i = 0; i < next_qty; i++)
1600*404b540aSrobert     {
1601*404b540aSrobert       q = qty_order[i];
1602*404b540aSrobert       if (qty_phys_num_sugg[q] != 0 || qty_phys_num_copy_sugg[q] != 0)
1603*404b540aSrobert 	qty[q].phys_reg = find_free_reg (qty[q].min_class, qty[q].mode, q,
1604*404b540aSrobert 					 0, 1, qty[q].birth, qty[q].death);
1605*404b540aSrobert       else
1606*404b540aSrobert 	qty[q].phys_reg = -1;
1607*404b540aSrobert     }
1608*404b540aSrobert 
1609*404b540aSrobert   /* Order the qtys so we assign them registers in order of
1610*404b540aSrobert      decreasing length of life.  Normally call qsort, but if we
1611*404b540aSrobert      have only a very small number of quantities, sort them ourselves.  */
1612*404b540aSrobert 
1613*404b540aSrobert   for (i = 0; i < next_qty; i++)
1614*404b540aSrobert     qty_order[i] = i;
1615*404b540aSrobert 
1616*404b540aSrobert #define EXCHANGE(I1, I2)  \
1617*404b540aSrobert   { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1618*404b540aSrobert 
1619*404b540aSrobert   switch (next_qty)
1620*404b540aSrobert     {
1621*404b540aSrobert     case 3:
1622*404b540aSrobert       /* Make qty_order[2] be the one to allocate last.  */
1623*404b540aSrobert       if (qty_compare (0, 1) > 0)
1624*404b540aSrobert 	EXCHANGE (0, 1);
1625*404b540aSrobert       if (qty_compare (1, 2) > 0)
1626*404b540aSrobert 	EXCHANGE (2, 1);
1627*404b540aSrobert 
1628*404b540aSrobert       /* ... Fall through ...  */
1629*404b540aSrobert     case 2:
1630*404b540aSrobert       /* Put the best one to allocate in qty_order[0].  */
1631*404b540aSrobert       if (qty_compare (0, 1) > 0)
1632*404b540aSrobert 	EXCHANGE (0, 1);
1633*404b540aSrobert 
1634*404b540aSrobert       /* ... Fall through ...  */
1635*404b540aSrobert 
1636*404b540aSrobert     case 1:
1637*404b540aSrobert     case 0:
1638*404b540aSrobert       /* Nothing to do here.  */
1639*404b540aSrobert       break;
1640*404b540aSrobert 
1641*404b540aSrobert     default:
1642*404b540aSrobert       qsort (qty_order, next_qty, sizeof (int), qty_compare_1);
1643*404b540aSrobert     }
1644*404b540aSrobert 
1645*404b540aSrobert   /* Now for each qty that is not a hardware register,
1646*404b540aSrobert      look for a hardware register to put it in.
1647*404b540aSrobert      First try the register class that is cheapest for this qty,
1648*404b540aSrobert      if there is more than one class.  */
1649*404b540aSrobert 
1650*404b540aSrobert   for (i = 0; i < next_qty; i++)
1651*404b540aSrobert     {
1652*404b540aSrobert       q = qty_order[i];
1653*404b540aSrobert       if (qty[q].phys_reg < 0)
1654*404b540aSrobert 	{
1655*404b540aSrobert #ifdef INSN_SCHEDULING
1656*404b540aSrobert 	  /* These values represent the adjusted lifetime of a qty so
1657*404b540aSrobert 	     that it conflicts with qtys which appear near the start/end
1658*404b540aSrobert 	     of this qty's lifetime.
1659*404b540aSrobert 
1660*404b540aSrobert 	     The purpose behind extending the lifetime of this qty is to
1661*404b540aSrobert 	     discourage the register allocator from creating false
1662*404b540aSrobert 	     dependencies.
1663*404b540aSrobert 
1664*404b540aSrobert 	     The adjustment value is chosen to indicate that this qty
1665*404b540aSrobert 	     conflicts with all the qtys in the instructions immediately
1666*404b540aSrobert 	     before and after the lifetime of this qty.
1667*404b540aSrobert 
1668*404b540aSrobert 	     Experiments have shown that higher values tend to hurt
1669*404b540aSrobert 	     overall code performance.
1670*404b540aSrobert 
1671*404b540aSrobert 	     If allocation using the extended lifetime fails we will try
1672*404b540aSrobert 	     again with the qty's unadjusted lifetime.  */
1673*404b540aSrobert 	  int fake_birth = MAX (0, qty[q].birth - 2 + qty[q].birth % 2);
1674*404b540aSrobert 	  int fake_death = MIN (insn_number * 2 + 1,
1675*404b540aSrobert 				qty[q].death + 2 - qty[q].death % 2);
1676*404b540aSrobert #endif
1677*404b540aSrobert 
1678*404b540aSrobert 	  if (N_REG_CLASSES > 1)
1679*404b540aSrobert 	    {
1680*404b540aSrobert #ifdef INSN_SCHEDULING
1681*404b540aSrobert 	      /* We try to avoid using hard registers allocated to qtys which
1682*404b540aSrobert 		 are born immediately after this qty or die immediately before
1683*404b540aSrobert 		 this qty.
1684*404b540aSrobert 
1685*404b540aSrobert 		 This optimization is only appropriate when we will run
1686*404b540aSrobert 		 a scheduling pass after reload and we are not optimizing
1687*404b540aSrobert 		 for code size.  */
1688*404b540aSrobert 	      if (flag_schedule_insns_after_reload
1689*404b540aSrobert 		  && !optimize_size
1690*404b540aSrobert 		  && !SMALL_REGISTER_CLASSES)
1691*404b540aSrobert 		{
1692*404b540aSrobert 		  qty[q].phys_reg = find_free_reg (qty[q].min_class,
1693*404b540aSrobert 						   qty[q].mode, q, 0, 0,
1694*404b540aSrobert 						   fake_birth, fake_death);
1695*404b540aSrobert 		  if (qty[q].phys_reg >= 0)
1696*404b540aSrobert 		    continue;
1697*404b540aSrobert 		}
1698*404b540aSrobert #endif
1699*404b540aSrobert 	      qty[q].phys_reg = find_free_reg (qty[q].min_class,
1700*404b540aSrobert 					       qty[q].mode, q, 0, 0,
1701*404b540aSrobert 					       qty[q].birth, qty[q].death);
1702*404b540aSrobert 	      if (qty[q].phys_reg >= 0)
1703*404b540aSrobert 		continue;
1704*404b540aSrobert 	    }
1705*404b540aSrobert 
1706*404b540aSrobert #ifdef INSN_SCHEDULING
1707*404b540aSrobert 	  /* Similarly, avoid false dependencies.  */
1708*404b540aSrobert 	  if (flag_schedule_insns_after_reload
1709*404b540aSrobert 	      && !optimize_size
1710*404b540aSrobert 	      && !SMALL_REGISTER_CLASSES
1711*404b540aSrobert 	      && qty[q].alternate_class != NO_REGS)
1712*404b540aSrobert 	    qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1713*404b540aSrobert 					     qty[q].mode, q, 0, 0,
1714*404b540aSrobert 					     fake_birth, fake_death);
1715*404b540aSrobert #endif
1716*404b540aSrobert 	  if (qty[q].alternate_class != NO_REGS)
1717*404b540aSrobert 	    qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1718*404b540aSrobert 					     qty[q].mode, q, 0, 0,
1719*404b540aSrobert 					     qty[q].birth, qty[q].death);
1720*404b540aSrobert 	}
1721*404b540aSrobert     }
1722*404b540aSrobert 
1723*404b540aSrobert   /* Now propagate the register assignments
1724*404b540aSrobert      to the pseudo regs belonging to the qtys.  */
1725*404b540aSrobert 
1726*404b540aSrobert   for (q = 0; q < next_qty; q++)
1727*404b540aSrobert     if (qty[q].phys_reg >= 0)
1728*404b540aSrobert       {
1729*404b540aSrobert 	for (i = qty[q].first_reg; i >= 0; i = reg_next_in_qty[i])
1730*404b540aSrobert 	  reg_renumber[i] = qty[q].phys_reg + reg_offset[i];
1731*404b540aSrobert       }
1732*404b540aSrobert 
1733*404b540aSrobert   /* Clean up.  */
1734*404b540aSrobert   free (regs_live_at);
1735*404b540aSrobert   free (qty_order);
1736*404b540aSrobert }
1737*404b540aSrobert 
1738*404b540aSrobert /* Compare two quantities' priority for getting real registers.
1739*404b540aSrobert    We give shorter-lived quantities higher priority.
1740*404b540aSrobert    Quantities with more references are also preferred, as are quantities that
1741*404b540aSrobert    require multiple registers.  This is the identical prioritization as
1742*404b540aSrobert    done by global-alloc.
1743*404b540aSrobert 
1744*404b540aSrobert    We used to give preference to registers with *longer* lives, but using
1745*404b540aSrobert    the same algorithm in both local- and global-alloc can speed up execution
1746*404b540aSrobert    of some programs by as much as a factor of three!  */
1747*404b540aSrobert 
1748*404b540aSrobert /* Note that the quotient will never be bigger than
1749*404b540aSrobert    the value of floor_log2 times the maximum number of
1750*404b540aSrobert    times a register can occur in one insn (surely less than 100)
1751*404b540aSrobert    weighted by frequency (max REG_FREQ_MAX).
1752*404b540aSrobert    Multiplying this by 10000/REG_FREQ_MAX can't overflow.
1753*404b540aSrobert    QTY_CMP_PRI is also used by qty_sugg_compare.  */
1754*404b540aSrobert 
1755*404b540aSrobert #define QTY_CMP_PRI(q)		\
1756*404b540aSrobert   ((int) (((double) (floor_log2 (qty[q].n_refs) * qty[q].freq * qty[q].size) \
1757*404b540aSrobert 	  / (qty[q].death - qty[q].birth)) * (10000 / REG_FREQ_MAX)))
1758*404b540aSrobert 
1759*404b540aSrobert static int
qty_compare(int q1,int q2)1760*404b540aSrobert qty_compare (int q1, int q2)
1761*404b540aSrobert {
1762*404b540aSrobert   return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1763*404b540aSrobert }
1764*404b540aSrobert 
1765*404b540aSrobert static int
qty_compare_1(const void * q1p,const void * q2p)1766*404b540aSrobert qty_compare_1 (const void *q1p, const void *q2p)
1767*404b540aSrobert {
1768*404b540aSrobert   int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1769*404b540aSrobert   int tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1770*404b540aSrobert 
1771*404b540aSrobert   if (tem != 0)
1772*404b540aSrobert     return tem;
1773*404b540aSrobert 
1774*404b540aSrobert   /* If qtys are equally good, sort by qty number,
1775*404b540aSrobert      so that the results of qsort leave nothing to chance.  */
1776*404b540aSrobert   return q1 - q2;
1777*404b540aSrobert }
1778*404b540aSrobert 
1779*404b540aSrobert /* Compare two quantities' priority for getting real registers.  This version
1780*404b540aSrobert    is called for quantities that have suggested hard registers.  First priority
1781*404b540aSrobert    goes to quantities that have copy preferences, then to those that have
1782*404b540aSrobert    normal preferences.  Within those groups, quantities with the lower
1783*404b540aSrobert    number of preferences have the highest priority.  Of those, we use the same
1784*404b540aSrobert    algorithm as above.  */
1785*404b540aSrobert 
1786*404b540aSrobert #define QTY_CMP_SUGG(q)		\
1787*404b540aSrobert   (qty_phys_num_copy_sugg[q]		\
1788*404b540aSrobert     ? qty_phys_num_copy_sugg[q]	\
1789*404b540aSrobert     : qty_phys_num_sugg[q] * FIRST_PSEUDO_REGISTER)
1790*404b540aSrobert 
1791*404b540aSrobert static int
qty_sugg_compare(int q1,int q2)1792*404b540aSrobert qty_sugg_compare (int q1, int q2)
1793*404b540aSrobert {
1794*404b540aSrobert   int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1795*404b540aSrobert 
1796*404b540aSrobert   if (tem != 0)
1797*404b540aSrobert     return tem;
1798*404b540aSrobert 
1799*404b540aSrobert   return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1800*404b540aSrobert }
1801*404b540aSrobert 
1802*404b540aSrobert static int
qty_sugg_compare_1(const void * q1p,const void * q2p)1803*404b540aSrobert qty_sugg_compare_1 (const void *q1p, const void *q2p)
1804*404b540aSrobert {
1805*404b540aSrobert   int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1806*404b540aSrobert   int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1807*404b540aSrobert 
1808*404b540aSrobert   if (tem != 0)
1809*404b540aSrobert     return tem;
1810*404b540aSrobert 
1811*404b540aSrobert   tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1812*404b540aSrobert   if (tem != 0)
1813*404b540aSrobert     return tem;
1814*404b540aSrobert 
1815*404b540aSrobert   /* If qtys are equally good, sort by qty number,
1816*404b540aSrobert      so that the results of qsort leave nothing to chance.  */
1817*404b540aSrobert   return q1 - q2;
1818*404b540aSrobert }
1819*404b540aSrobert 
1820*404b540aSrobert #undef QTY_CMP_SUGG
1821*404b540aSrobert #undef QTY_CMP_PRI
1822*404b540aSrobert 
1823*404b540aSrobert /* Attempt to combine the two registers (rtx's) USEDREG and SETREG.
1824*404b540aSrobert    Returns 1 if have done so, or 0 if cannot.
1825*404b540aSrobert 
1826*404b540aSrobert    Combining registers means marking them as having the same quantity
1827*404b540aSrobert    and adjusting the offsets within the quantity if either of
1828*404b540aSrobert    them is a SUBREG.
1829*404b540aSrobert 
1830*404b540aSrobert    We don't actually combine a hard reg with a pseudo; instead
1831*404b540aSrobert    we just record the hard reg as the suggestion for the pseudo's quantity.
1832*404b540aSrobert    If we really combined them, we could lose if the pseudo lives
1833*404b540aSrobert    across an insn that clobbers the hard reg (eg, movmem).
1834*404b540aSrobert 
1835*404b540aSrobert    ALREADY_DEAD is nonzero if USEDREG is known to be dead even though
1836*404b540aSrobert    there is no REG_DEAD note on INSN.  This occurs during the processing
1837*404b540aSrobert    of REG_NO_CONFLICT blocks.
1838*404b540aSrobert 
1839*404b540aSrobert    MAY_SAVE_COPY is nonzero if this insn is simply copying USEDREG to
1840*404b540aSrobert    SETREG or if the input and output must share a register.
1841*404b540aSrobert    In that case, we record a hard reg suggestion in QTY_PHYS_COPY_SUGG.
1842*404b540aSrobert 
1843*404b540aSrobert    There are elaborate checks for the validity of combining.  */
1844*404b540aSrobert 
1845*404b540aSrobert static int
combine_regs(rtx usedreg,rtx setreg,int may_save_copy,int insn_number,rtx insn,int already_dead)1846*404b540aSrobert combine_regs (rtx usedreg, rtx setreg, int may_save_copy, int insn_number,
1847*404b540aSrobert 	      rtx insn, int already_dead)
1848*404b540aSrobert {
1849*404b540aSrobert   int ureg, sreg;
1850*404b540aSrobert   int offset = 0;
1851*404b540aSrobert   int usize, ssize;
1852*404b540aSrobert   int sqty;
1853*404b540aSrobert 
1854*404b540aSrobert   /* Determine the numbers and sizes of registers being used.  If a subreg
1855*404b540aSrobert      is present that does not change the entire register, don't consider
1856*404b540aSrobert      this a copy insn.  */
1857*404b540aSrobert 
1858*404b540aSrobert   while (GET_CODE (usedreg) == SUBREG)
1859*404b540aSrobert     {
1860*404b540aSrobert       rtx subreg = SUBREG_REG (usedreg);
1861*404b540aSrobert 
1862*404b540aSrobert       if (REG_P (subreg))
1863*404b540aSrobert 	{
1864*404b540aSrobert 	  if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1865*404b540aSrobert 	    may_save_copy = 0;
1866*404b540aSrobert 
1867*404b540aSrobert 	  if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1868*404b540aSrobert 	    offset += subreg_regno_offset (REGNO (subreg),
1869*404b540aSrobert 					   GET_MODE (subreg),
1870*404b540aSrobert 					   SUBREG_BYTE (usedreg),
1871*404b540aSrobert 					   GET_MODE (usedreg));
1872*404b540aSrobert 	  else
1873*404b540aSrobert 	    offset += (SUBREG_BYTE (usedreg)
1874*404b540aSrobert 		      / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1875*404b540aSrobert 	}
1876*404b540aSrobert 
1877*404b540aSrobert       usedreg = subreg;
1878*404b540aSrobert     }
1879*404b540aSrobert 
1880*404b540aSrobert   if (!REG_P (usedreg))
1881*404b540aSrobert     return 0;
1882*404b540aSrobert 
1883*404b540aSrobert   ureg = REGNO (usedreg);
1884*404b540aSrobert   if (ureg < FIRST_PSEUDO_REGISTER)
1885*404b540aSrobert     usize = hard_regno_nregs[ureg][GET_MODE (usedreg)];
1886*404b540aSrobert   else
1887*404b540aSrobert     usize = ((GET_MODE_SIZE (GET_MODE (usedreg))
1888*404b540aSrobert 	      + (REGMODE_NATURAL_SIZE (GET_MODE (usedreg)) - 1))
1889*404b540aSrobert 	     / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1890*404b540aSrobert 
1891*404b540aSrobert   while (GET_CODE (setreg) == SUBREG)
1892*404b540aSrobert     {
1893*404b540aSrobert       rtx subreg = SUBREG_REG (setreg);
1894*404b540aSrobert 
1895*404b540aSrobert       if (REG_P (subreg))
1896*404b540aSrobert 	{
1897*404b540aSrobert 	  if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1898*404b540aSrobert 	    may_save_copy = 0;
1899*404b540aSrobert 
1900*404b540aSrobert 	  if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1901*404b540aSrobert 	    offset -= subreg_regno_offset (REGNO (subreg),
1902*404b540aSrobert 					   GET_MODE (subreg),
1903*404b540aSrobert 					   SUBREG_BYTE (setreg),
1904*404b540aSrobert 					   GET_MODE (setreg));
1905*404b540aSrobert 	  else
1906*404b540aSrobert 	    offset -= (SUBREG_BYTE (setreg)
1907*404b540aSrobert 		      / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1908*404b540aSrobert 	}
1909*404b540aSrobert 
1910*404b540aSrobert       setreg = subreg;
1911*404b540aSrobert     }
1912*404b540aSrobert 
1913*404b540aSrobert   if (!REG_P (setreg))
1914*404b540aSrobert     return 0;
1915*404b540aSrobert 
1916*404b540aSrobert   sreg = REGNO (setreg);
1917*404b540aSrobert   if (sreg < FIRST_PSEUDO_REGISTER)
1918*404b540aSrobert     ssize = hard_regno_nregs[sreg][GET_MODE (setreg)];
1919*404b540aSrobert   else
1920*404b540aSrobert     ssize = ((GET_MODE_SIZE (GET_MODE (setreg))
1921*404b540aSrobert 	      + (REGMODE_NATURAL_SIZE (GET_MODE (setreg)) - 1))
1922*404b540aSrobert 	     / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1923*404b540aSrobert 
1924*404b540aSrobert   /* If UREG is a pseudo-register that hasn't already been assigned a
1925*404b540aSrobert      quantity number, it means that it is not local to this block or dies
1926*404b540aSrobert      more than once.  In either event, we can't do anything with it.  */
1927*404b540aSrobert   if ((ureg >= FIRST_PSEUDO_REGISTER && reg_qty[ureg] < 0)
1928*404b540aSrobert       /* Do not combine registers unless one fits within the other.  */
1929*404b540aSrobert       || (offset > 0 && usize + offset > ssize)
1930*404b540aSrobert       || (offset < 0 && usize + offset < ssize)
1931*404b540aSrobert       /* Do not combine with a smaller already-assigned object
1932*404b540aSrobert 	 if that smaller object is already combined with something bigger.  */
1933*404b540aSrobert       || (ssize > usize && ureg >= FIRST_PSEUDO_REGISTER
1934*404b540aSrobert 	  && usize < qty[reg_qty[ureg]].size)
1935*404b540aSrobert       /* Can't combine if SREG is not a register we can allocate.  */
1936*404b540aSrobert       || (sreg >= FIRST_PSEUDO_REGISTER && reg_qty[sreg] == -1)
1937*404b540aSrobert       /* Don't combine with a pseudo mentioned in a REG_NO_CONFLICT note.
1938*404b540aSrobert 	 These have already been taken care of.  This probably wouldn't
1939*404b540aSrobert 	 combine anyway, but don't take any chances.  */
1940*404b540aSrobert       || (ureg >= FIRST_PSEUDO_REGISTER
1941*404b540aSrobert 	  && find_reg_note (insn, REG_NO_CONFLICT, usedreg))
1942*404b540aSrobert       /* Don't tie something to itself.  In most cases it would make no
1943*404b540aSrobert 	 difference, but it would screw up if the reg being tied to itself
1944*404b540aSrobert 	 also dies in this insn.  */
1945*404b540aSrobert       || ureg == sreg
1946*404b540aSrobert       /* Don't try to connect two different hardware registers.  */
1947*404b540aSrobert       || (ureg < FIRST_PSEUDO_REGISTER && sreg < FIRST_PSEUDO_REGISTER)
1948*404b540aSrobert       /* Don't connect two different machine modes if they have different
1949*404b540aSrobert 	 implications as to which registers may be used.  */
1950*404b540aSrobert       || !MODES_TIEABLE_P (GET_MODE (usedreg), GET_MODE (setreg)))
1951*404b540aSrobert     return 0;
1952*404b540aSrobert 
1953*404b540aSrobert   /* Now, if UREG is a hard reg and SREG is a pseudo, record the hard reg in
1954*404b540aSrobert      qty_phys_sugg for the pseudo instead of tying them.
1955*404b540aSrobert 
1956*404b540aSrobert      Return "failure" so that the lifespan of UREG is terminated here;
1957*404b540aSrobert      that way the two lifespans will be disjoint and nothing will prevent
1958*404b540aSrobert      the pseudo reg from being given this hard reg.  */
1959*404b540aSrobert 
1960*404b540aSrobert   if (ureg < FIRST_PSEUDO_REGISTER)
1961*404b540aSrobert     {
1962*404b540aSrobert       /* Allocate a quantity number so we have a place to put our
1963*404b540aSrobert 	 suggestions.  */
1964*404b540aSrobert       if (reg_qty[sreg] == -2)
1965*404b540aSrobert 	reg_is_born (setreg, 2 * insn_number);
1966*404b540aSrobert 
1967*404b540aSrobert       if (reg_qty[sreg] >= 0)
1968*404b540aSrobert 	{
1969*404b540aSrobert 	  if (may_save_copy
1970*404b540aSrobert 	      && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg))
1971*404b540aSrobert 	    {
1972*404b540aSrobert 	      SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg);
1973*404b540aSrobert 	      qty_phys_num_copy_sugg[reg_qty[sreg]]++;
1974*404b540aSrobert 	    }
1975*404b540aSrobert 	  else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg))
1976*404b540aSrobert 	    {
1977*404b540aSrobert 	      SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg);
1978*404b540aSrobert 	      qty_phys_num_sugg[reg_qty[sreg]]++;
1979*404b540aSrobert 	    }
1980*404b540aSrobert 	}
1981*404b540aSrobert       return 0;
1982*404b540aSrobert     }
1983*404b540aSrobert 
1984*404b540aSrobert   /* Similarly for SREG a hard register and UREG a pseudo register.  */
1985*404b540aSrobert 
1986*404b540aSrobert   if (sreg < FIRST_PSEUDO_REGISTER)
1987*404b540aSrobert     {
1988*404b540aSrobert       if (may_save_copy
1989*404b540aSrobert 	  && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg))
1990*404b540aSrobert 	{
1991*404b540aSrobert 	  SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg);
1992*404b540aSrobert 	  qty_phys_num_copy_sugg[reg_qty[ureg]]++;
1993*404b540aSrobert 	}
1994*404b540aSrobert       else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg))
1995*404b540aSrobert 	{
1996*404b540aSrobert 	  SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg);
1997*404b540aSrobert 	  qty_phys_num_sugg[reg_qty[ureg]]++;
1998*404b540aSrobert 	}
1999*404b540aSrobert       return 0;
2000*404b540aSrobert     }
2001*404b540aSrobert 
2002*404b540aSrobert   /* At this point we know that SREG and UREG are both pseudos.
2003*404b540aSrobert      Do nothing if SREG already has a quantity or is a register that we
2004*404b540aSrobert      don't allocate.  */
2005*404b540aSrobert   if (reg_qty[sreg] >= -1
2006*404b540aSrobert       /* If we are not going to let any regs live across calls,
2007*404b540aSrobert 	 don't tie a call-crossing reg to a non-call-crossing reg.  */
2008*404b540aSrobert       || (current_function_has_nonlocal_label
2009*404b540aSrobert 	  && ((REG_N_CALLS_CROSSED (ureg) > 0)
2010*404b540aSrobert 	      != (REG_N_CALLS_CROSSED (sreg) > 0))))
2011*404b540aSrobert     return 0;
2012*404b540aSrobert 
2013*404b540aSrobert   /* We don't already know about SREG, so tie it to UREG
2014*404b540aSrobert      if this is the last use of UREG, provided the classes they want
2015*404b540aSrobert      are compatible.  */
2016*404b540aSrobert 
2017*404b540aSrobert   if ((already_dead || find_regno_note (insn, REG_DEAD, ureg))
2018*404b540aSrobert       && reg_meets_class_p (sreg, qty[reg_qty[ureg]].min_class))
2019*404b540aSrobert     {
2020*404b540aSrobert       /* Add SREG to UREG's quantity.  */
2021*404b540aSrobert       sqty = reg_qty[ureg];
2022*404b540aSrobert       reg_qty[sreg] = sqty;
2023*404b540aSrobert       reg_offset[sreg] = reg_offset[ureg] + offset;
2024*404b540aSrobert       reg_next_in_qty[sreg] = qty[sqty].first_reg;
2025*404b540aSrobert       qty[sqty].first_reg = sreg;
2026*404b540aSrobert 
2027*404b540aSrobert       /* If SREG's reg class is smaller, set qty[SQTY].min_class.  */
2028*404b540aSrobert       update_qty_class (sqty, sreg);
2029*404b540aSrobert 
2030*404b540aSrobert       /* Update info about quantity SQTY.  */
2031*404b540aSrobert       qty[sqty].n_calls_crossed += REG_N_CALLS_CROSSED (sreg);
2032*404b540aSrobert       qty[sqty].n_throwing_calls_crossed
2033*404b540aSrobert 	+= REG_N_THROWING_CALLS_CROSSED (sreg);
2034*404b540aSrobert       qty[sqty].n_refs += REG_N_REFS (sreg);
2035*404b540aSrobert       qty[sqty].freq += REG_FREQ (sreg);
2036*404b540aSrobert       if (usize < ssize)
2037*404b540aSrobert 	{
2038*404b540aSrobert 	  int i;
2039*404b540aSrobert 
2040*404b540aSrobert 	  for (i = qty[sqty].first_reg; i >= 0; i = reg_next_in_qty[i])
2041*404b540aSrobert 	    reg_offset[i] -= offset;
2042*404b540aSrobert 
2043*404b540aSrobert 	  qty[sqty].size = ssize;
2044*404b540aSrobert 	  qty[sqty].mode = GET_MODE (setreg);
2045*404b540aSrobert 	}
2046*404b540aSrobert     }
2047*404b540aSrobert   else
2048*404b540aSrobert     return 0;
2049*404b540aSrobert 
2050*404b540aSrobert   return 1;
2051*404b540aSrobert }
2052*404b540aSrobert 
2053*404b540aSrobert /* Return 1 if the preferred class of REG allows it to be tied
2054*404b540aSrobert    to a quantity or register whose class is CLASS.
2055*404b540aSrobert    True if REG's reg class either contains or is contained in CLASS.  */
2056*404b540aSrobert 
2057*404b540aSrobert static int
reg_meets_class_p(int reg,enum reg_class class)2058*404b540aSrobert reg_meets_class_p (int reg, enum reg_class class)
2059*404b540aSrobert {
2060*404b540aSrobert   enum reg_class rclass = reg_preferred_class (reg);
2061*404b540aSrobert   return (reg_class_subset_p (rclass, class)
2062*404b540aSrobert 	  || reg_class_subset_p (class, rclass));
2063*404b540aSrobert }
2064*404b540aSrobert 
2065*404b540aSrobert /* Update the class of QTYNO assuming that REG is being tied to it.  */
2066*404b540aSrobert 
2067*404b540aSrobert static void
update_qty_class(int qtyno,int reg)2068*404b540aSrobert update_qty_class (int qtyno, int reg)
2069*404b540aSrobert {
2070*404b540aSrobert   enum reg_class rclass = reg_preferred_class (reg);
2071*404b540aSrobert   if (reg_class_subset_p (rclass, qty[qtyno].min_class))
2072*404b540aSrobert     qty[qtyno].min_class = rclass;
2073*404b540aSrobert 
2074*404b540aSrobert   rclass = reg_alternate_class (reg);
2075*404b540aSrobert   if (reg_class_subset_p (rclass, qty[qtyno].alternate_class))
2076*404b540aSrobert     qty[qtyno].alternate_class = rclass;
2077*404b540aSrobert }
2078*404b540aSrobert 
2079*404b540aSrobert /* Handle something which alters the value of an rtx REG.
2080*404b540aSrobert 
2081*404b540aSrobert    REG is whatever is set or clobbered.  SETTER is the rtx that
2082*404b540aSrobert    is modifying the register.
2083*404b540aSrobert 
2084*404b540aSrobert    If it is not really a register, we do nothing.
2085*404b540aSrobert    The file-global variables `this_insn' and `this_insn_number'
2086*404b540aSrobert    carry info from `block_alloc'.  */
2087*404b540aSrobert 
2088*404b540aSrobert static void
reg_is_set(rtx reg,rtx setter,void * data ATTRIBUTE_UNUSED)2089*404b540aSrobert reg_is_set (rtx reg, rtx setter, void *data ATTRIBUTE_UNUSED)
2090*404b540aSrobert {
2091*404b540aSrobert   /* Note that note_stores will only pass us a SUBREG if it is a SUBREG of
2092*404b540aSrobert      a hard register.  These may actually not exist any more.  */
2093*404b540aSrobert 
2094*404b540aSrobert   if (GET_CODE (reg) != SUBREG
2095*404b540aSrobert       && !REG_P (reg))
2096*404b540aSrobert     return;
2097*404b540aSrobert 
2098*404b540aSrobert   /* Mark this register as being born.  If it is used in a CLOBBER, mark
2099*404b540aSrobert      it as being born halfway between the previous insn and this insn so that
2100*404b540aSrobert      it conflicts with our inputs but not the outputs of the previous insn.  */
2101*404b540aSrobert 
2102*404b540aSrobert   reg_is_born (reg, 2 * this_insn_number - (GET_CODE (setter) == CLOBBER));
2103*404b540aSrobert }
2104*404b540aSrobert 
2105*404b540aSrobert /* Handle beginning of the life of register REG.
2106*404b540aSrobert    BIRTH is the index at which this is happening.  */
2107*404b540aSrobert 
2108*404b540aSrobert static void
reg_is_born(rtx reg,int birth)2109*404b540aSrobert reg_is_born (rtx reg, int birth)
2110*404b540aSrobert {
2111*404b540aSrobert   int regno;
2112*404b540aSrobert 
2113*404b540aSrobert   if (GET_CODE (reg) == SUBREG)
2114*404b540aSrobert     {
2115*404b540aSrobert       regno = REGNO (SUBREG_REG (reg));
2116*404b540aSrobert       if (regno < FIRST_PSEUDO_REGISTER)
2117*404b540aSrobert 	regno = subreg_regno (reg);
2118*404b540aSrobert     }
2119*404b540aSrobert   else
2120*404b540aSrobert     regno = REGNO (reg);
2121*404b540aSrobert 
2122*404b540aSrobert   if (regno < FIRST_PSEUDO_REGISTER)
2123*404b540aSrobert     {
2124*404b540aSrobert       mark_life (regno, GET_MODE (reg), 1);
2125*404b540aSrobert 
2126*404b540aSrobert       /* If the register was to have been born earlier that the present
2127*404b540aSrobert 	 insn, mark it as live where it is actually born.  */
2128*404b540aSrobert       if (birth < 2 * this_insn_number)
2129*404b540aSrobert 	post_mark_life (regno, GET_MODE (reg), 1, birth, 2 * this_insn_number);
2130*404b540aSrobert     }
2131*404b540aSrobert   else
2132*404b540aSrobert     {
2133*404b540aSrobert       if (reg_qty[regno] == -2)
2134*404b540aSrobert 	alloc_qty (regno, GET_MODE (reg), PSEUDO_REGNO_SIZE (regno), birth);
2135*404b540aSrobert 
2136*404b540aSrobert       /* If this register has a quantity number, show that it isn't dead.  */
2137*404b540aSrobert       if (reg_qty[regno] >= 0)
2138*404b540aSrobert 	qty[reg_qty[regno]].death = -1;
2139*404b540aSrobert     }
2140*404b540aSrobert }
2141*404b540aSrobert 
2142*404b540aSrobert /* Record the death of REG in the current insn.  If OUTPUT_P is nonzero,
2143*404b540aSrobert    REG is an output that is dying (i.e., it is never used), otherwise it
2144*404b540aSrobert    is an input (the normal case).
2145*404b540aSrobert    If OUTPUT_P is 1, then we extend the life past the end of this insn.  */
2146*404b540aSrobert 
2147*404b540aSrobert static void
wipe_dead_reg(rtx reg,int output_p)2148*404b540aSrobert wipe_dead_reg (rtx reg, int output_p)
2149*404b540aSrobert {
2150*404b540aSrobert   int regno = REGNO (reg);
2151*404b540aSrobert 
2152*404b540aSrobert   /* If this insn has multiple results,
2153*404b540aSrobert      and the dead reg is used in one of the results,
2154*404b540aSrobert      extend its life to after this insn,
2155*404b540aSrobert      so it won't get allocated together with any other result of this insn.
2156*404b540aSrobert 
2157*404b540aSrobert      It is unsafe to use !single_set here since it will ignore an unused
2158*404b540aSrobert      output.  Just because an output is unused does not mean the compiler
2159*404b540aSrobert      can assume the side effect will not occur.   Consider if REG appears
2160*404b540aSrobert      in the address of an output and we reload the output.  If we allocate
2161*404b540aSrobert      REG to the same hard register as an unused output we could set the hard
2162*404b540aSrobert      register before the output reload insn.  */
2163*404b540aSrobert   if (GET_CODE (PATTERN (this_insn)) == PARALLEL
2164*404b540aSrobert       && multiple_sets (this_insn))
2165*404b540aSrobert     {
2166*404b540aSrobert       int i;
2167*404b540aSrobert       for (i = XVECLEN (PATTERN (this_insn), 0) - 1; i >= 0; i--)
2168*404b540aSrobert 	{
2169*404b540aSrobert 	  rtx set = XVECEXP (PATTERN (this_insn), 0, i);
2170*404b540aSrobert 	  if (GET_CODE (set) == SET
2171*404b540aSrobert 	      && !REG_P (SET_DEST (set))
2172*404b540aSrobert 	      && !rtx_equal_p (reg, SET_DEST (set))
2173*404b540aSrobert 	      && reg_overlap_mentioned_p (reg, SET_DEST (set)))
2174*404b540aSrobert 	    output_p = 1;
2175*404b540aSrobert 	}
2176*404b540aSrobert     }
2177*404b540aSrobert 
2178*404b540aSrobert   /* If this register is used in an auto-increment address, then extend its
2179*404b540aSrobert      life to after this insn, so that it won't get allocated together with
2180*404b540aSrobert      the result of this insn.  */
2181*404b540aSrobert   if (! output_p && find_regno_note (this_insn, REG_INC, regno))
2182*404b540aSrobert     output_p = 1;
2183*404b540aSrobert 
2184*404b540aSrobert   if (regno < FIRST_PSEUDO_REGISTER)
2185*404b540aSrobert     {
2186*404b540aSrobert       mark_life (regno, GET_MODE (reg), 0);
2187*404b540aSrobert 
2188*404b540aSrobert       /* If a hard register is dying as an output, mark it as in use at
2189*404b540aSrobert 	 the beginning of this insn (the above statement would cause this
2190*404b540aSrobert 	 not to happen).  */
2191*404b540aSrobert       if (output_p)
2192*404b540aSrobert 	post_mark_life (regno, GET_MODE (reg), 1,
2193*404b540aSrobert 			2 * this_insn_number, 2 * this_insn_number + 1);
2194*404b540aSrobert     }
2195*404b540aSrobert 
2196*404b540aSrobert   else if (reg_qty[regno] >= 0)
2197*404b540aSrobert     qty[reg_qty[regno]].death = 2 * this_insn_number + output_p;
2198*404b540aSrobert }
2199*404b540aSrobert 
2200*404b540aSrobert /* Find a block of SIZE words of hard regs in reg_class CLASS
2201*404b540aSrobert    that can hold something of machine-mode MODE
2202*404b540aSrobert      (but actually we test only the first of the block for holding MODE)
2203*404b540aSrobert    and still free between insn BORN_INDEX and insn DEAD_INDEX,
2204*404b540aSrobert    and return the number of the first of them.
2205*404b540aSrobert    Return -1 if such a block cannot be found.
2206*404b540aSrobert    If QTYNO crosses calls, insist on a register preserved by calls,
2207*404b540aSrobert    unless ACCEPT_CALL_CLOBBERED is nonzero.
2208*404b540aSrobert 
2209*404b540aSrobert    If JUST_TRY_SUGGESTED is nonzero, only try to see if the suggested
2210*404b540aSrobert    register is available.  If not, return -1.  */
2211*404b540aSrobert 
2212*404b540aSrobert static int
find_free_reg(enum reg_class class,enum machine_mode mode,int qtyno,int accept_call_clobbered,int just_try_suggested,int born_index,int dead_index)2213*404b540aSrobert find_free_reg (enum reg_class class, enum machine_mode mode, int qtyno,
2214*404b540aSrobert 	       int accept_call_clobbered, int just_try_suggested,
2215*404b540aSrobert 	       int born_index, int dead_index)
2216*404b540aSrobert {
2217*404b540aSrobert   int i, ins;
2218*404b540aSrobert   HARD_REG_SET first_used, used;
2219*404b540aSrobert #ifdef ELIMINABLE_REGS
2220*404b540aSrobert   static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
2221*404b540aSrobert #endif
2222*404b540aSrobert 
2223*404b540aSrobert   /* Validate our parameters.  */
2224*404b540aSrobert   gcc_assert (born_index >= 0 && born_index <= dead_index);
2225*404b540aSrobert 
2226*404b540aSrobert   /* Don't let a pseudo live in a reg across a function call
2227*404b540aSrobert      if we might get a nonlocal goto.  */
2228*404b540aSrobert   if (current_function_has_nonlocal_label
2229*404b540aSrobert       && qty[qtyno].n_calls_crossed > 0)
2230*404b540aSrobert     return -1;
2231*404b540aSrobert 
2232*404b540aSrobert   if (accept_call_clobbered)
2233*404b540aSrobert     COPY_HARD_REG_SET (used, call_fixed_reg_set);
2234*404b540aSrobert   else if (qty[qtyno].n_calls_crossed == 0)
2235*404b540aSrobert     COPY_HARD_REG_SET (used, fixed_reg_set);
2236*404b540aSrobert   else
2237*404b540aSrobert     COPY_HARD_REG_SET (used, call_used_reg_set);
2238*404b540aSrobert 
2239*404b540aSrobert   if (accept_call_clobbered)
2240*404b540aSrobert     IOR_HARD_REG_SET (used, losing_caller_save_reg_set);
2241*404b540aSrobert 
2242*404b540aSrobert   for (ins = born_index; ins < dead_index; ins++)
2243*404b540aSrobert     IOR_HARD_REG_SET (used, regs_live_at[ins]);
2244*404b540aSrobert 
2245*404b540aSrobert   IOR_COMPL_HARD_REG_SET (used, reg_class_contents[(int) class]);
2246*404b540aSrobert 
2247*404b540aSrobert   /* Don't use the frame pointer reg in local-alloc even if
2248*404b540aSrobert      we may omit the frame pointer, because if we do that and then we
2249*404b540aSrobert      need a frame pointer, reload won't know how to move the pseudo
2250*404b540aSrobert      to another hard reg.  It can move only regs made by global-alloc.
2251*404b540aSrobert 
2252*404b540aSrobert      This is true of any register that can be eliminated.  */
2253*404b540aSrobert #ifdef ELIMINABLE_REGS
2254*404b540aSrobert   for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2255*404b540aSrobert     SET_HARD_REG_BIT (used, eliminables[i].from);
2256*404b540aSrobert #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2257*404b540aSrobert   /* If FRAME_POINTER_REGNUM is not a real register, then protect the one
2258*404b540aSrobert      that it might be eliminated into.  */
2259*404b540aSrobert   SET_HARD_REG_BIT (used, HARD_FRAME_POINTER_REGNUM);
2260*404b540aSrobert #endif
2261*404b540aSrobert #else
2262*404b540aSrobert   SET_HARD_REG_BIT (used, FRAME_POINTER_REGNUM);
2263*404b540aSrobert #endif
2264*404b540aSrobert 
2265*404b540aSrobert #ifdef CANNOT_CHANGE_MODE_CLASS
2266*404b540aSrobert   cannot_change_mode_set_regs (&used, mode, qty[qtyno].first_reg);
2267*404b540aSrobert #endif
2268*404b540aSrobert 
2269*404b540aSrobert   /* Normally, the registers that can be used for the first register in
2270*404b540aSrobert      a multi-register quantity are the same as those that can be used for
2271*404b540aSrobert      subsequent registers.  However, if just trying suggested registers,
2272*404b540aSrobert      restrict our consideration to them.  If there are copy-suggested
2273*404b540aSrobert      register, try them.  Otherwise, try the arithmetic-suggested
2274*404b540aSrobert      registers.  */
2275*404b540aSrobert   COPY_HARD_REG_SET (first_used, used);
2276*404b540aSrobert 
2277*404b540aSrobert   if (just_try_suggested)
2278*404b540aSrobert     {
2279*404b540aSrobert       if (qty_phys_num_copy_sugg[qtyno] != 0)
2280*404b540aSrobert 	IOR_COMPL_HARD_REG_SET (first_used, qty_phys_copy_sugg[qtyno]);
2281*404b540aSrobert       else
2282*404b540aSrobert 	IOR_COMPL_HARD_REG_SET (first_used, qty_phys_sugg[qtyno]);
2283*404b540aSrobert     }
2284*404b540aSrobert 
2285*404b540aSrobert   /* If all registers are excluded, we can't do anything.  */
2286*404b540aSrobert   GO_IF_HARD_REG_SUBSET (reg_class_contents[(int) ALL_REGS], first_used, fail);
2287*404b540aSrobert 
2288*404b540aSrobert   /* If at least one would be suitable, test each hard reg.  */
2289*404b540aSrobert 
2290*404b540aSrobert   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2291*404b540aSrobert     {
2292*404b540aSrobert #ifdef REG_ALLOC_ORDER
2293*404b540aSrobert       int regno = reg_alloc_order[i];
2294*404b540aSrobert #else
2295*404b540aSrobert       int regno = i;
2296*404b540aSrobert #endif
2297*404b540aSrobert       if (! TEST_HARD_REG_BIT (first_used, regno)
2298*404b540aSrobert 	  && HARD_REGNO_MODE_OK (regno, mode)
2299*404b540aSrobert 	  && (qty[qtyno].n_calls_crossed == 0
2300*404b540aSrobert 	      || accept_call_clobbered
2301*404b540aSrobert 	      || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
2302*404b540aSrobert 	{
2303*404b540aSrobert 	  int j;
2304*404b540aSrobert 	  int size1 = hard_regno_nregs[regno][mode];
2305*404b540aSrobert 	  for (j = 1; j < size1 && ! TEST_HARD_REG_BIT (used, regno + j); j++);
2306*404b540aSrobert 	  if (j == size1)
2307*404b540aSrobert 	    {
2308*404b540aSrobert 	      /* Mark that this register is in use between its birth and death
2309*404b540aSrobert 		 insns.  */
2310*404b540aSrobert 	      post_mark_life (regno, mode, 1, born_index, dead_index);
2311*404b540aSrobert 	      return regno;
2312*404b540aSrobert 	    }
2313*404b540aSrobert #ifndef REG_ALLOC_ORDER
2314*404b540aSrobert 	  /* Skip starting points we know will lose.  */
2315*404b540aSrobert 	  i += j;
2316*404b540aSrobert #endif
2317*404b540aSrobert 	}
2318*404b540aSrobert     }
2319*404b540aSrobert 
2320*404b540aSrobert  fail:
2321*404b540aSrobert   /* If we are just trying suggested register, we have just tried copy-
2322*404b540aSrobert      suggested registers, and there are arithmetic-suggested registers,
2323*404b540aSrobert      try them.  */
2324*404b540aSrobert 
2325*404b540aSrobert   /* If it would be profitable to allocate a call-clobbered register
2326*404b540aSrobert      and save and restore it around calls, do that.  */
2327*404b540aSrobert   if (just_try_suggested && qty_phys_num_copy_sugg[qtyno] != 0
2328*404b540aSrobert       && qty_phys_num_sugg[qtyno] != 0)
2329*404b540aSrobert     {
2330*404b540aSrobert       /* Don't try the copy-suggested regs again.  */
2331*404b540aSrobert       qty_phys_num_copy_sugg[qtyno] = 0;
2332*404b540aSrobert       return find_free_reg (class, mode, qtyno, accept_call_clobbered, 1,
2333*404b540aSrobert 			    born_index, dead_index);
2334*404b540aSrobert     }
2335*404b540aSrobert 
2336*404b540aSrobert   /* We need not check to see if the current function has nonlocal
2337*404b540aSrobert      labels because we don't put any pseudos that are live over calls in
2338*404b540aSrobert      registers in that case.  Avoid putting pseudos crossing calls that
2339*404b540aSrobert      might throw into call used registers.  */
2340*404b540aSrobert 
2341*404b540aSrobert   if (! accept_call_clobbered
2342*404b540aSrobert       && flag_caller_saves
2343*404b540aSrobert       && ! just_try_suggested
2344*404b540aSrobert       && qty[qtyno].n_calls_crossed != 0
2345*404b540aSrobert       && qty[qtyno].n_throwing_calls_crossed == 0
2346*404b540aSrobert       && CALLER_SAVE_PROFITABLE (qty[qtyno].n_refs,
2347*404b540aSrobert 				 qty[qtyno].n_calls_crossed))
2348*404b540aSrobert     {
2349*404b540aSrobert       i = find_free_reg (class, mode, qtyno, 1, 0, born_index, dead_index);
2350*404b540aSrobert       if (i >= 0)
2351*404b540aSrobert 	caller_save_needed = 1;
2352*404b540aSrobert       return i;
2353*404b540aSrobert     }
2354*404b540aSrobert   return -1;
2355*404b540aSrobert }
2356*404b540aSrobert 
2357*404b540aSrobert /* Mark that REGNO with machine-mode MODE is live starting from the current
2358*404b540aSrobert    insn (if LIFE is nonzero) or dead starting at the current insn (if LIFE
2359*404b540aSrobert    is zero).  */
2360*404b540aSrobert 
2361*404b540aSrobert static void
mark_life(int regno,enum machine_mode mode,int life)2362*404b540aSrobert mark_life (int regno, enum machine_mode mode, int life)
2363*404b540aSrobert {
2364*404b540aSrobert   int j = hard_regno_nregs[regno][mode];
2365*404b540aSrobert   if (life)
2366*404b540aSrobert     while (--j >= 0)
2367*404b540aSrobert       SET_HARD_REG_BIT (regs_live, regno + j);
2368*404b540aSrobert   else
2369*404b540aSrobert     while (--j >= 0)
2370*404b540aSrobert       CLEAR_HARD_REG_BIT (regs_live, regno + j);
2371*404b540aSrobert }
2372*404b540aSrobert 
2373*404b540aSrobert /* Mark register number REGNO (with machine-mode MODE) as live (if LIFE
2374*404b540aSrobert    is nonzero) or dead (if LIFE is zero) from insn number BIRTH (inclusive)
2375*404b540aSrobert    to insn number DEATH (exclusive).  */
2376*404b540aSrobert 
2377*404b540aSrobert static void
post_mark_life(int regno,enum machine_mode mode,int life,int birth,int death)2378*404b540aSrobert post_mark_life (int regno, enum machine_mode mode, int life, int birth,
2379*404b540aSrobert 		int death)
2380*404b540aSrobert {
2381*404b540aSrobert   int j = hard_regno_nregs[regno][mode];
2382*404b540aSrobert   HARD_REG_SET this_reg;
2383*404b540aSrobert 
2384*404b540aSrobert   CLEAR_HARD_REG_SET (this_reg);
2385*404b540aSrobert   while (--j >= 0)
2386*404b540aSrobert     SET_HARD_REG_BIT (this_reg, regno + j);
2387*404b540aSrobert 
2388*404b540aSrobert   if (life)
2389*404b540aSrobert     while (birth < death)
2390*404b540aSrobert       {
2391*404b540aSrobert 	IOR_HARD_REG_SET (regs_live_at[birth], this_reg);
2392*404b540aSrobert 	birth++;
2393*404b540aSrobert       }
2394*404b540aSrobert   else
2395*404b540aSrobert     while (birth < death)
2396*404b540aSrobert       {
2397*404b540aSrobert 	AND_COMPL_HARD_REG_SET (regs_live_at[birth], this_reg);
2398*404b540aSrobert 	birth++;
2399*404b540aSrobert       }
2400*404b540aSrobert }
2401*404b540aSrobert 
2402*404b540aSrobert /* INSN is the CLOBBER insn that starts a REG_NO_NOCONFLICT block, R0
2403*404b540aSrobert    is the register being clobbered, and R1 is a register being used in
2404*404b540aSrobert    the equivalent expression.
2405*404b540aSrobert 
2406*404b540aSrobert    If R1 dies in the block and has a REG_NO_CONFLICT note on every insn
2407*404b540aSrobert    in which it is used, return 1.
2408*404b540aSrobert 
2409*404b540aSrobert    Otherwise, return 0.  */
2410*404b540aSrobert 
2411*404b540aSrobert static int
no_conflict_p(rtx insn,rtx r0 ATTRIBUTE_UNUSED,rtx r1)2412*404b540aSrobert no_conflict_p (rtx insn, rtx r0 ATTRIBUTE_UNUSED, rtx r1)
2413*404b540aSrobert {
2414*404b540aSrobert   int ok = 0;
2415*404b540aSrobert   rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
2416*404b540aSrobert   rtx p, last;
2417*404b540aSrobert 
2418*404b540aSrobert   /* If R1 is a hard register, return 0 since we handle this case
2419*404b540aSrobert      when we scan the insns that actually use it.  */
2420*404b540aSrobert 
2421*404b540aSrobert   if (note == 0
2422*404b540aSrobert       || (REG_P (r1) && REGNO (r1) < FIRST_PSEUDO_REGISTER)
2423*404b540aSrobert       || (GET_CODE (r1) == SUBREG && REG_P (SUBREG_REG (r1))
2424*404b540aSrobert 	  && REGNO (SUBREG_REG (r1)) < FIRST_PSEUDO_REGISTER))
2425*404b540aSrobert     return 0;
2426*404b540aSrobert 
2427*404b540aSrobert   last = XEXP (note, 0);
2428*404b540aSrobert 
2429*404b540aSrobert   for (p = NEXT_INSN (insn); p && p != last; p = NEXT_INSN (p))
2430*404b540aSrobert     if (INSN_P (p))
2431*404b540aSrobert       {
2432*404b540aSrobert 	if (find_reg_note (p, REG_DEAD, r1))
2433*404b540aSrobert 	  ok = 1;
2434*404b540aSrobert 
2435*404b540aSrobert 	/* There must be a REG_NO_CONFLICT note on every insn, otherwise
2436*404b540aSrobert 	   some earlier optimization pass has inserted instructions into
2437*404b540aSrobert 	   the sequence, and it is not safe to perform this optimization.
2438*404b540aSrobert 	   Note that emit_no_conflict_block always ensures that this is
2439*404b540aSrobert 	   true when these sequences are created.  */
2440*404b540aSrobert 	if (! find_reg_note (p, REG_NO_CONFLICT, r1))
2441*404b540aSrobert 	  return 0;
2442*404b540aSrobert       }
2443*404b540aSrobert 
2444*404b540aSrobert   return ok;
2445*404b540aSrobert }
2446*404b540aSrobert 
2447*404b540aSrobert /* Return the number of alternatives for which the constraint string P
2448*404b540aSrobert    indicates that the operand must be equal to operand 0 and that no register
2449*404b540aSrobert    is acceptable.  */
2450*404b540aSrobert 
2451*404b540aSrobert static int
requires_inout(const char * p)2452*404b540aSrobert requires_inout (const char *p)
2453*404b540aSrobert {
2454*404b540aSrobert   char c;
2455*404b540aSrobert   int found_zero = 0;
2456*404b540aSrobert   int reg_allowed = 0;
2457*404b540aSrobert   int num_matching_alts = 0;
2458*404b540aSrobert   int len;
2459*404b540aSrobert 
2460*404b540aSrobert   for ( ; (c = *p); p += len)
2461*404b540aSrobert     {
2462*404b540aSrobert       len = CONSTRAINT_LEN (c, p);
2463*404b540aSrobert       switch (c)
2464*404b540aSrobert 	{
2465*404b540aSrobert 	case '=':  case '+':  case '?':
2466*404b540aSrobert 	case '#':  case '&':  case '!':
2467*404b540aSrobert 	case '*':  case '%':
2468*404b540aSrobert 	case 'm':  case '<':  case '>':  case 'V':  case 'o':
2469*404b540aSrobert 	case 'E':  case 'F':  case 'G':  case 'H':
2470*404b540aSrobert 	case 's':  case 'i':  case 'n':
2471*404b540aSrobert 	case 'I':  case 'J':  case 'K':  case 'L':
2472*404b540aSrobert 	case 'M':  case 'N':  case 'O':  case 'P':
2473*404b540aSrobert 	case 'X':
2474*404b540aSrobert 	  /* These don't say anything we care about.  */
2475*404b540aSrobert 	  break;
2476*404b540aSrobert 
2477*404b540aSrobert 	case ',':
2478*404b540aSrobert 	  if (found_zero && ! reg_allowed)
2479*404b540aSrobert 	    num_matching_alts++;
2480*404b540aSrobert 
2481*404b540aSrobert 	  found_zero = reg_allowed = 0;
2482*404b540aSrobert 	  break;
2483*404b540aSrobert 
2484*404b540aSrobert 	case '0':
2485*404b540aSrobert 	  found_zero = 1;
2486*404b540aSrobert 	  break;
2487*404b540aSrobert 
2488*404b540aSrobert 	case '1':  case '2':  case '3':  case '4': case '5':
2489*404b540aSrobert 	case '6':  case '7':  case '8':  case '9':
2490*404b540aSrobert 	  /* Skip the balance of the matching constraint.  */
2491*404b540aSrobert 	  do
2492*404b540aSrobert 	    p++;
2493*404b540aSrobert 	  while (ISDIGIT (*p));
2494*404b540aSrobert 	  len = 0;
2495*404b540aSrobert 	  break;
2496*404b540aSrobert 
2497*404b540aSrobert 	default:
2498*404b540aSrobert 	  if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS
2499*404b540aSrobert 	      && !EXTRA_ADDRESS_CONSTRAINT (c, p))
2500*404b540aSrobert 	    break;
2501*404b540aSrobert 	  /* Fall through.  */
2502*404b540aSrobert 	case 'p':
2503*404b540aSrobert 	case 'g': case 'r':
2504*404b540aSrobert 	  reg_allowed = 1;
2505*404b540aSrobert 	  break;
2506*404b540aSrobert 	}
2507*404b540aSrobert     }
2508*404b540aSrobert 
2509*404b540aSrobert   if (found_zero && ! reg_allowed)
2510*404b540aSrobert     num_matching_alts++;
2511*404b540aSrobert 
2512*404b540aSrobert   return num_matching_alts;
2513*404b540aSrobert }
2514*404b540aSrobert 
2515*404b540aSrobert void
dump_local_alloc(FILE * file)2516*404b540aSrobert dump_local_alloc (FILE *file)
2517*404b540aSrobert {
2518*404b540aSrobert   int i;
2519*404b540aSrobert   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2520*404b540aSrobert     if (reg_renumber[i] != -1)
2521*404b540aSrobert       fprintf (file, ";; Register %d in %d.\n", i, reg_renumber[i]);
2522*404b540aSrobert }
2523*404b540aSrobert 
2524*404b540aSrobert /* Run old register allocator.  Return TRUE if we must exit
2525*404b540aSrobert    rest_of_compilation upon return.  */
2526*404b540aSrobert static unsigned int
rest_of_handle_local_alloc(void)2527*404b540aSrobert rest_of_handle_local_alloc (void)
2528*404b540aSrobert {
2529*404b540aSrobert   int rebuild_notes;
2530*404b540aSrobert 
2531*404b540aSrobert   /* Determine if the current function is a leaf before running reload
2532*404b540aSrobert      since this can impact optimizations done by the prologue and
2533*404b540aSrobert      epilogue thus changing register elimination offsets.  */
2534*404b540aSrobert   current_function_is_leaf = leaf_function_p ();
2535*404b540aSrobert 
2536*404b540aSrobert   /* Allocate the reg_renumber array.  */
2537*404b540aSrobert   allocate_reg_info (max_regno, FALSE, TRUE);
2538*404b540aSrobert 
2539*404b540aSrobert   /* And the reg_equiv_memory_loc array.  */
2540*404b540aSrobert   VEC_safe_grow (rtx, gc, reg_equiv_memory_loc_vec, max_regno);
2541*404b540aSrobert   memset (VEC_address (rtx, reg_equiv_memory_loc_vec), 0,
2542*404b540aSrobert 	  sizeof (rtx) * max_regno);
2543*404b540aSrobert   reg_equiv_memory_loc = VEC_address (rtx, reg_equiv_memory_loc_vec);
2544*404b540aSrobert 
2545*404b540aSrobert   allocate_initial_values (reg_equiv_memory_loc);
2546*404b540aSrobert 
2547*404b540aSrobert   regclass (get_insns (), max_reg_num ());
2548*404b540aSrobert   rebuild_notes = local_alloc ();
2549*404b540aSrobert 
2550*404b540aSrobert   /* Local allocation may have turned an indirect jump into a direct
2551*404b540aSrobert      jump.  If so, we must rebuild the JUMP_LABEL fields of jumping
2552*404b540aSrobert      instructions.  */
2553*404b540aSrobert   if (rebuild_notes)
2554*404b540aSrobert     {
2555*404b540aSrobert       timevar_push (TV_JUMP);
2556*404b540aSrobert 
2557*404b540aSrobert       rebuild_jump_labels (get_insns ());
2558*404b540aSrobert       purge_all_dead_edges ();
2559*404b540aSrobert       delete_unreachable_blocks ();
2560*404b540aSrobert 
2561*404b540aSrobert       timevar_pop (TV_JUMP);
2562*404b540aSrobert     }
2563*404b540aSrobert 
2564*404b540aSrobert   if (dump_file && (dump_flags & TDF_DETAILS))
2565*404b540aSrobert     {
2566*404b540aSrobert       timevar_push (TV_DUMP);
2567*404b540aSrobert       dump_flow_info (dump_file, dump_flags);
2568*404b540aSrobert       dump_local_alloc (dump_file);
2569*404b540aSrobert       timevar_pop (TV_DUMP);
2570*404b540aSrobert     }
2571*404b540aSrobert   return 0;
2572*404b540aSrobert }
2573*404b540aSrobert 
2574*404b540aSrobert struct tree_opt_pass pass_local_alloc =
2575*404b540aSrobert {
2576*404b540aSrobert   "lreg",                               /* name */
2577*404b540aSrobert   NULL,                                 /* gate */
2578*404b540aSrobert   rest_of_handle_local_alloc,           /* execute */
2579*404b540aSrobert   NULL,                                 /* sub */
2580*404b540aSrobert   NULL,                                 /* next */
2581*404b540aSrobert   0,                                    /* static_pass_number */
2582*404b540aSrobert   TV_LOCAL_ALLOC,                       /* tv_id */
2583*404b540aSrobert   0,                                    /* properties_required */
2584*404b540aSrobert   0,                                    /* properties_provided */
2585*404b540aSrobert   0,                                    /* properties_destroyed */
2586*404b540aSrobert   0,                                    /* todo_flags_start */
2587*404b540aSrobert   TODO_dump_func |
2588*404b540aSrobert   TODO_ggc_collect,                     /* todo_flags_finish */
2589*404b540aSrobert   'l'                                   /* letter */
2590*404b540aSrobert };
2591*404b540aSrobert 
2592