1 /* Allocate registers within a basic block, for GNU compiler.
2    Copyright (C) 1987, 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3    1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA.  */
21 
22 /* Allocation of hard register numbers to pseudo registers is done in
23    two passes.  In this pass we consider only regs that are born and
24    die once within one basic block.  We do this one basic block at a
25    time.  Then the next pass allocates the registers that remain.
26    Two passes are used because this pass uses methods that work only
27    on linear code, but that do a better job than the general methods
28    used in global_alloc, and more quickly too.
29 
30    The assignments made are recorded in the vector reg_renumber
31    whose space is allocated here.  The rtl code itself is not altered.
32 
33    We assign each instruction in the basic block a number
34    which is its order from the beginning of the block.
35    Then we can represent the lifetime of a pseudo register with
36    a pair of numbers, and check for conflicts easily.
37    We can record the availability of hard registers with a
38    HARD_REG_SET for each instruction.  The HARD_REG_SET
39    contains 0 or 1 for each hard reg.
40 
41    To avoid register shuffling, we tie registers together when one
42    dies by being copied into another, or dies in an instruction that
43    does arithmetic to produce another.  The tied registers are
44    allocated as one.  Registers with different reg class preferences
45    can never be tied unless the class preferred by one is a subclass
46    of the one preferred by the other.
47 
48    Tying is represented with "quantity numbers".
49    A non-tied register is given a new quantity number.
50    Tied registers have the same quantity number.
51 
52    We have provision to exempt registers, even when they are contained
53    within the block, that can be tied to others that are not contained in it.
54    This is so that global_alloc could process them both and tie them then.
55    But this is currently disabled since tying in global_alloc is not
56    yet implemented.  */
57 
58 /* Pseudos allocated here can be reallocated by global.c if the hard register
59    is used as a spill register.  Currently we don't allocate such pseudos
60    here if their preferred class is likely to be used by spills.  */
61 
62 #include "config.h"
63 #include "system.h"
64 #include "coretypes.h"
65 #include "tm.h"
66 #include "hard-reg-set.h"
67 #include "rtl.h"
68 #include "tm_p.h"
69 #include "flags.h"
70 #include "basic-block.h"
71 #include "regs.h"
72 #include "function.h"
73 #include "insn-config.h"
74 #include "insn-attr.h"
75 #include "recog.h"
76 #include "output.h"
77 #include "toplev.h"
78 #include "except.h"
79 #include "integrate.h"
80 
81 /* Next quantity number available for allocation.  */
82 
83 static int next_qty;
84 
85 /* Information we maintain about each quantity.  */
86 struct qty
87 {
88   /* The number of refs to quantity Q.  */
89 
90   int n_refs;
91 
92   /* The frequency of uses of quantity Q.  */
93 
94   int freq;
95 
96   /* Insn number (counting from head of basic block)
97      where quantity Q was born.  -1 if birth has not been recorded.  */
98 
99   int birth;
100 
101   /* Insn number (counting from head of basic block)
102      where given quantity died.  Due to the way tying is done,
103      and the fact that we consider in this pass only regs that die but once,
104      a quantity can die only once.  Each quantity's life span
105      is a set of consecutive insns.  -1 if death has not been recorded.  */
106 
107   int death;
108 
109   /* Number of words needed to hold the data in given quantity.
110      This depends on its machine mode.  It is used for these purposes:
111      1. It is used in computing the relative importance of qtys,
112 	which determines the order in which we look for regs for them.
113      2. It is used in rules that prevent tying several registers of
114 	different sizes in a way that is geometrically impossible
115 	(see combine_regs).  */
116 
117   int size;
118 
119   /* Number of times a reg tied to given qty lives across a CALL_INSN.  */
120 
121   int n_calls_crossed;
122 
123   /* The register number of one pseudo register whose reg_qty value is Q.
124      This register should be the head of the chain
125      maintained in reg_next_in_qty.  */
126 
127   int first_reg;
128 
129   /* Reg class contained in (smaller than) the preferred classes of all
130      the pseudo regs that are tied in given quantity.
131      This is the preferred class for allocating that quantity.  */
132 
133   enum reg_class min_class;
134 
135   /* Register class within which we allocate given qty if we can't get
136      its preferred class.  */
137 
138   enum reg_class alternate_class;
139 
140   /* This holds the mode of the registers that are tied to given qty,
141      or VOIDmode if registers with differing modes are tied together.  */
142 
143   enum machine_mode mode;
144 
145   /* the hard reg number chosen for given quantity,
146      or -1 if none was found.  */
147 
148   short phys_reg;
149 };
150 
151 static struct qty *qty;
152 
153 /* These fields are kept separately to speedup their clearing.  */
154 
155 /* We maintain two hard register sets that indicate suggested hard registers
156    for each quantity.  The first, phys_copy_sugg, contains hard registers
157    that are tied to the quantity by a simple copy.  The second contains all
158    hard registers that are tied to the quantity via an arithmetic operation.
159 
160    The former register set is given priority for allocation.  This tends to
161    eliminate copy insns.  */
162 
163 /* Element Q is a set of hard registers that are suggested for quantity Q by
164    copy insns.  */
165 
166 static HARD_REG_SET *qty_phys_copy_sugg;
167 
168 /* Element Q is a set of hard registers that are suggested for quantity Q by
169    arithmetic insns.  */
170 
171 static HARD_REG_SET *qty_phys_sugg;
172 
173 /* Element Q is the number of suggested registers in qty_phys_copy_sugg.  */
174 
175 static short *qty_phys_num_copy_sugg;
176 
177 /* Element Q is the number of suggested registers in qty_phys_sugg.  */
178 
179 static short *qty_phys_num_sugg;
180 
181 /* If (REG N) has been assigned a quantity number, is a register number
182    of another register assigned the same quantity number, or -1 for the
183    end of the chain.  qty->first_reg point to the head of this chain.  */
184 
185 static int *reg_next_in_qty;
186 
187 /* reg_qty[N] (where N is a pseudo reg number) is the qty number of that reg
188    if it is >= 0,
189    of -1 if this register cannot be allocated by local-alloc,
190    or -2 if not known yet.
191 
192    Note that if we see a use or death of pseudo register N with
193    reg_qty[N] == -2, register N must be local to the current block.  If
194    it were used in more than one block, we would have reg_qty[N] == -1.
195    This relies on the fact that if reg_basic_block[N] is >= 0, register N
196    will not appear in any other block.  We save a considerable number of
197    tests by exploiting this.
198 
199    If N is < FIRST_PSEUDO_REGISTER, reg_qty[N] is undefined and should not
200    be referenced.  */
201 
202 static int *reg_qty;
203 
204 /* The offset (in words) of register N within its quantity.
205    This can be nonzero if register N is SImode, and has been tied
206    to a subreg of a DImode register.  */
207 
208 static char *reg_offset;
209 
210 /* Vector of substitutions of register numbers,
211    used to map pseudo regs into hardware regs.
212    This is set up as a result of register allocation.
213    Element N is the hard reg assigned to pseudo reg N,
214    or is -1 if no hard reg was assigned.
215    If N is a hard reg number, element N is N.  */
216 
217 short *reg_renumber;
218 
219 /* Set of hard registers live at the current point in the scan
220    of the instructions in a basic block.  */
221 
222 static HARD_REG_SET regs_live;
223 
224 /* Each set of hard registers indicates registers live at a particular
225    point in the basic block.  For N even, regs_live_at[N] says which
226    hard registers are needed *after* insn N/2 (i.e., they may not
227    conflict with the outputs of insn N/2 or the inputs of insn N/2 + 1.
228 
229    If an object is to conflict with the inputs of insn J but not the
230    outputs of insn J + 1, we say it is born at index J*2 - 1.  Similarly,
231    if it is to conflict with the outputs of insn J but not the inputs of
232    insn J + 1, it is said to die at index J*2 + 1.  */
233 
234 static HARD_REG_SET *regs_live_at;
235 
236 /* Communicate local vars `insn_number' and `insn'
237    from `block_alloc' to `reg_is_set', `wipe_dead_reg', and `alloc_qty'.  */
238 static int this_insn_number;
239 static rtx this_insn;
240 
241 struct equivalence
242 {
243   /* Set when an attempt should be made to replace a register
244      with the associated src_p entry.  */
245 
246   char replace;
247 
248   /* Set when a REG_EQUIV note is found or created.  Use to
249      keep track of what memory accesses might be created later,
250      e.g. by reload.  */
251 
252   rtx replacement;
253 
254   rtx *src_p;
255 
256   /* Loop depth is used to recognize equivalences which appear
257      to be present within the same loop (or in an inner loop).  */
258 
259   int loop_depth;
260 
261   /* The list of each instruction which initializes this register.  */
262 
263   rtx init_insns;
264 };
265 
266 /* reg_equiv[N] (where N is a pseudo reg number) is the equivalence
267    structure for that register.  */
268 
269 static struct equivalence *reg_equiv;
270 
271 /* Nonzero if we recorded an equivalence for a LABEL_REF.  */
272 static int recorded_label_ref;
273 
274 static void alloc_qty (int, enum machine_mode, int, int);
275 static void validate_equiv_mem_from_store (rtx, rtx, void *);
276 static int validate_equiv_mem (rtx, rtx, rtx);
277 static int equiv_init_varies_p (rtx);
278 static int equiv_init_movable_p (rtx, int);
279 static int contains_replace_regs (rtx);
280 static int memref_referenced_p (rtx, rtx);
281 static int memref_used_between_p (rtx, rtx, rtx);
282 static void update_equiv_regs (void);
283 static void no_equiv (rtx, rtx, void *);
284 static void block_alloc (int);
285 static int qty_sugg_compare (int, int);
286 static int qty_sugg_compare_1 (const void *, const void *);
287 static int qty_compare (int, int);
288 static int qty_compare_1 (const void *, const void *);
289 static int combine_regs (rtx, rtx, int, int, rtx, int);
290 static int reg_meets_class_p (int, enum reg_class);
291 static void update_qty_class (int, int);
292 static void reg_is_set (rtx, rtx, void *);
293 static void reg_is_born (rtx, int);
294 static void wipe_dead_reg (rtx, int);
295 static int find_free_reg (enum reg_class, enum machine_mode, int, int, int,
296 			  int, int);
297 static void mark_life (int, enum machine_mode, int);
298 static void post_mark_life (int, enum machine_mode, int, int, int);
299 static int no_conflict_p (rtx, rtx, rtx);
300 static int requires_inout (const char *);
301 
302 /* Allocate a new quantity (new within current basic block)
303    for register number REGNO which is born at index BIRTH
304    within the block.  MODE and SIZE are info on reg REGNO.  */
305 
306 static void
alloc_qty(int regno,enum machine_mode mode,int size,int birth)307 alloc_qty (int regno, enum machine_mode mode, int size, int birth)
308 {
309   int qtyno = next_qty++;
310 
311   reg_qty[regno] = qtyno;
312   reg_offset[regno] = 0;
313   reg_next_in_qty[regno] = -1;
314 
315   qty[qtyno].first_reg = regno;
316   qty[qtyno].size = size;
317   qty[qtyno].mode = mode;
318   qty[qtyno].birth = birth;
319   qty[qtyno].n_calls_crossed = REG_N_CALLS_CROSSED (regno);
320   qty[qtyno].min_class = reg_preferred_class (regno);
321   qty[qtyno].alternate_class = reg_alternate_class (regno);
322   qty[qtyno].n_refs = REG_N_REFS (regno);
323   qty[qtyno].freq = REG_FREQ (regno);
324 }
325 
326 /* Main entry point of this file.  */
327 
328 int
local_alloc(void)329 local_alloc (void)
330 {
331 /* ZPU: commenting out the code below disables register allocation  */
332 #if 1
333   int i;
334   int max_qty;
335   basic_block b;
336 
337   /* We need to keep track of whether or not we recorded a LABEL_REF so
338      that we know if the jump optimizer needs to be rerun.  */
339   recorded_label_ref = 0;
340 
341   /* Leaf functions and non-leaf functions have different needs.
342      If defined, let the machine say what kind of ordering we
343      should use.  */
344 #ifdef ORDER_REGS_FOR_LOCAL_ALLOC
345   ORDER_REGS_FOR_LOCAL_ALLOC;
346 #endif
347 
348   /* Promote REG_EQUAL notes to REG_EQUIV notes and adjust status of affected
349      registers.  */
350   if (optimize)
351     update_equiv_regs ();
352 
353   /* This sets the maximum number of quantities we can have.  Quantity
354      numbers start at zero and we can have one for each pseudo.  */
355   max_qty = (max_regno - FIRST_PSEUDO_REGISTER);
356 
357   /* Allocate vectors of temporary data.
358      See the declarations of these variables, above,
359      for what they mean.  */
360 
361   qty = xmalloc (max_qty * sizeof (struct qty));
362   qty_phys_copy_sugg = xmalloc (max_qty * sizeof (HARD_REG_SET));
363   qty_phys_num_copy_sugg = xmalloc (max_qty * sizeof (short));
364   qty_phys_sugg = xmalloc (max_qty * sizeof (HARD_REG_SET));
365   qty_phys_num_sugg = xmalloc (max_qty * sizeof (short));
366 
367   reg_qty = xmalloc (max_regno * sizeof (int));
368   reg_offset = xmalloc (max_regno * sizeof (char));
369   reg_next_in_qty = xmalloc (max_regno * sizeof (int));
370 
371   /* Determine which pseudo-registers can be allocated by local-alloc.
372      In general, these are the registers used only in a single block and
373      which only die once.
374 
375      We need not be concerned with which block actually uses the register
376      since we will never see it outside that block.  */
377 
378   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
379     {
380       if (REG_BASIC_BLOCK (i) >= 0 && REG_N_DEATHS (i) == 1)
381 	reg_qty[i] = -2;
382       else
383 	reg_qty[i] = -1;
384     }
385 
386   /* Force loop below to initialize entire quantity array.  */
387   next_qty = max_qty;
388 
389   /* Allocate each block's local registers, block by block.  */
390 
391   FOR_EACH_BB (b)
392     {
393       /* NEXT_QTY indicates which elements of the `qty_...'
394 	 vectors might need to be initialized because they were used
395 	 for the previous block; it is set to the entire array before
396 	 block 0.  Initialize those, with explicit loop if there are few,
397 	 else with bzero and bcopy.  Do not initialize vectors that are
398 	 explicit set by `alloc_qty'.  */
399 
400       if (next_qty < 6)
401 	{
402 	  for (i = 0; i < next_qty; i++)
403 	    {
404 	      CLEAR_HARD_REG_SET (qty_phys_copy_sugg[i]);
405 	      qty_phys_num_copy_sugg[i] = 0;
406 	      CLEAR_HARD_REG_SET (qty_phys_sugg[i]);
407 	      qty_phys_num_sugg[i] = 0;
408 	    }
409 	}
410       else
411 	{
412 #define CLEAR(vector)  \
413 	  memset ((vector), 0, (sizeof (*(vector))) * next_qty);
414 
415 	  CLEAR (qty_phys_copy_sugg);
416 	  CLEAR (qty_phys_num_copy_sugg);
417 	  CLEAR (qty_phys_sugg);
418 	  CLEAR (qty_phys_num_sugg);
419 	}
420 
421       next_qty = 0;
422 
423       block_alloc (b->index);
424     }
425 
426   free (qty);
427   free (qty_phys_copy_sugg);
428   free (qty_phys_num_copy_sugg);
429   free (qty_phys_sugg);
430   free (qty_phys_num_sugg);
431 
432   free (reg_qty);
433   free (reg_offset);
434   free (reg_next_in_qty);
435 
436   return recorded_label_ref;
437 #else
438 	return 0;
439 #endif
440 }
441 
442 /* Used for communication between the following two functions: contains
443    a MEM that we wish to ensure remains unchanged.  */
444 static rtx equiv_mem;
445 
446 /* Set nonzero if EQUIV_MEM is modified.  */
447 static int equiv_mem_modified;
448 
449 /* If EQUIV_MEM is modified by modifying DEST, indicate that it is modified.
450    Called via note_stores.  */
451 
452 static void
validate_equiv_mem_from_store(rtx dest,rtx set ATTRIBUTE_UNUSED,void * data ATTRIBUTE_UNUSED)453 validate_equiv_mem_from_store (rtx dest, rtx set ATTRIBUTE_UNUSED,
454 			       void *data ATTRIBUTE_UNUSED)
455 {
456   if ((GET_CODE (dest) == REG
457        && reg_overlap_mentioned_p (dest, equiv_mem))
458       || (GET_CODE (dest) == MEM
459 	  && true_dependence (dest, VOIDmode, equiv_mem, rtx_varies_p)))
460     equiv_mem_modified = 1;
461 }
462 
463 /* Verify that no store between START and the death of REG invalidates
464    MEMREF.  MEMREF is invalidated by modifying a register used in MEMREF,
465    by storing into an overlapping memory location, or with a non-const
466    CALL_INSN.
467 
468    Return 1 if MEMREF remains valid.  */
469 
470 static int
validate_equiv_mem(rtx start,rtx reg,rtx memref)471 validate_equiv_mem (rtx start, rtx reg, rtx memref)
472 {
473   rtx insn;
474   rtx note;
475 
476   equiv_mem = memref;
477   equiv_mem_modified = 0;
478 
479   /* If the memory reference has side effects or is volatile, it isn't a
480      valid equivalence.  */
481   if (side_effects_p (memref))
482     return 0;
483 
484   for (insn = start; insn && ! equiv_mem_modified; insn = NEXT_INSN (insn))
485     {
486       if (! INSN_P (insn))
487 	continue;
488 
489       if (find_reg_note (insn, REG_DEAD, reg))
490 	return 1;
491 
492       if (GET_CODE (insn) == CALL_INSN && ! RTX_UNCHANGING_P (memref)
493 	  && ! CONST_OR_PURE_CALL_P (insn))
494 	return 0;
495 
496       note_stores (PATTERN (insn), validate_equiv_mem_from_store, NULL);
497 
498       /* If a register mentioned in MEMREF is modified via an
499 	 auto-increment, we lose the equivalence.  Do the same if one
500 	 dies; although we could extend the life, it doesn't seem worth
501 	 the trouble.  */
502 
503       for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
504 	if ((REG_NOTE_KIND (note) == REG_INC
505 	     || REG_NOTE_KIND (note) == REG_DEAD)
506 	    && GET_CODE (XEXP (note, 0)) == REG
507 	    && reg_overlap_mentioned_p (XEXP (note, 0), memref))
508 	  return 0;
509     }
510 
511   return 0;
512 }
513 
514 /* Returns zero if X is known to be invariant.  */
515 
516 static int
equiv_init_varies_p(rtx x)517 equiv_init_varies_p (rtx x)
518 {
519   RTX_CODE code = GET_CODE (x);
520   int i;
521   const char *fmt;
522 
523   switch (code)
524     {
525     case MEM:
526       return ! RTX_UNCHANGING_P (x) || equiv_init_varies_p (XEXP (x, 0));
527 
528     case QUEUED:
529       return 1;
530 
531     case CONST:
532     case CONST_INT:
533     case CONST_DOUBLE:
534     case CONST_VECTOR:
535     case SYMBOL_REF:
536     case LABEL_REF:
537       return 0;
538 
539     case REG:
540       return reg_equiv[REGNO (x)].replace == 0 && rtx_varies_p (x, 0);
541 
542     case ASM_OPERANDS:
543       if (MEM_VOLATILE_P (x))
544 	return 1;
545 
546       /* Fall through.  */
547 
548     default:
549       break;
550     }
551 
552   fmt = GET_RTX_FORMAT (code);
553   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
554     if (fmt[i] == 'e')
555       {
556 	if (equiv_init_varies_p (XEXP (x, i)))
557 	  return 1;
558       }
559     else if (fmt[i] == 'E')
560       {
561 	int j;
562 	for (j = 0; j < XVECLEN (x, i); j++)
563 	  if (equiv_init_varies_p (XVECEXP (x, i, j)))
564 	    return 1;
565       }
566 
567   return 0;
568 }
569 
570 /* Returns nonzero if X (used to initialize register REGNO) is movable.
571    X is only movable if the registers it uses have equivalent initializations
572    which appear to be within the same loop (or in an inner loop) and movable
573    or if they are not candidates for local_alloc and don't vary.  */
574 
575 static int
equiv_init_movable_p(rtx x,int regno)576 equiv_init_movable_p (rtx x, int regno)
577 {
578   int i, j;
579   const char *fmt;
580   enum rtx_code code = GET_CODE (x);
581 
582   switch (code)
583     {
584     case SET:
585       return equiv_init_movable_p (SET_SRC (x), regno);
586 
587     case CC0:
588     case CLOBBER:
589       return 0;
590 
591     case PRE_INC:
592     case PRE_DEC:
593     case POST_INC:
594     case POST_DEC:
595     case PRE_MODIFY:
596     case POST_MODIFY:
597       return 0;
598 
599     case REG:
600       return (reg_equiv[REGNO (x)].loop_depth >= reg_equiv[regno].loop_depth
601 	      && reg_equiv[REGNO (x)].replace)
602 	     || (REG_BASIC_BLOCK (REGNO (x)) < 0 && ! rtx_varies_p (x, 0));
603 
604     case UNSPEC_VOLATILE:
605       return 0;
606 
607     case ASM_OPERANDS:
608       if (MEM_VOLATILE_P (x))
609 	return 0;
610 
611       /* Fall through.  */
612 
613     default:
614       break;
615     }
616 
617   fmt = GET_RTX_FORMAT (code);
618   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
619     switch (fmt[i])
620       {
621       case 'e':
622 	if (! equiv_init_movable_p (XEXP (x, i), regno))
623 	  return 0;
624 	break;
625       case 'E':
626 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
627 	  if (! equiv_init_movable_p (XVECEXP (x, i, j), regno))
628 	    return 0;
629 	break;
630       }
631 
632   return 1;
633 }
634 
635 /* TRUE if X uses any registers for which reg_equiv[REGNO].replace is true.  */
636 
637 static int
contains_replace_regs(rtx x)638 contains_replace_regs (rtx x)
639 {
640   int i, j;
641   const char *fmt;
642   enum rtx_code code = GET_CODE (x);
643 
644   switch (code)
645     {
646     case CONST_INT:
647     case CONST:
648     case LABEL_REF:
649     case SYMBOL_REF:
650     case CONST_DOUBLE:
651     case CONST_VECTOR:
652     case PC:
653     case CC0:
654     case HIGH:
655       return 0;
656 
657     case REG:
658       return reg_equiv[REGNO (x)].replace;
659 
660     default:
661       break;
662     }
663 
664   fmt = GET_RTX_FORMAT (code);
665   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
666     switch (fmt[i])
667       {
668       case 'e':
669 	if (contains_replace_regs (XEXP (x, i)))
670 	  return 1;
671 	break;
672       case 'E':
673 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
674 	  if (contains_replace_regs (XVECEXP (x, i, j)))
675 	    return 1;
676 	break;
677       }
678 
679   return 0;
680 }
681 
682 /* TRUE if X references a memory location that would be affected by a store
683    to MEMREF.  */
684 
685 static int
memref_referenced_p(rtx memref,rtx x)686 memref_referenced_p (rtx memref, rtx x)
687 {
688   int i, j;
689   const char *fmt;
690   enum rtx_code code = GET_CODE (x);
691 
692   switch (code)
693     {
694     case CONST_INT:
695     case CONST:
696     case LABEL_REF:
697     case SYMBOL_REF:
698     case CONST_DOUBLE:
699     case CONST_VECTOR:
700     case PC:
701     case CC0:
702     case HIGH:
703     case LO_SUM:
704       return 0;
705 
706     case REG:
707       return (reg_equiv[REGNO (x)].replacement
708 	      && memref_referenced_p (memref,
709 				      reg_equiv[REGNO (x)].replacement));
710 
711     case MEM:
712       if (true_dependence (memref, VOIDmode, x, rtx_varies_p))
713 	return 1;
714       break;
715 
716     case SET:
717       /* If we are setting a MEM, it doesn't count (its address does), but any
718 	 other SET_DEST that has a MEM in it is referencing the MEM.  */
719       if (GET_CODE (SET_DEST (x)) == MEM)
720 	{
721 	  if (memref_referenced_p (memref, XEXP (SET_DEST (x), 0)))
722 	    return 1;
723 	}
724       else if (memref_referenced_p (memref, SET_DEST (x)))
725 	return 1;
726 
727       return memref_referenced_p (memref, SET_SRC (x));
728 
729     default:
730       break;
731     }
732 
733   fmt = GET_RTX_FORMAT (code);
734   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
735     switch (fmt[i])
736       {
737       case 'e':
738 	if (memref_referenced_p (memref, XEXP (x, i)))
739 	  return 1;
740 	break;
741       case 'E':
742 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
743 	  if (memref_referenced_p (memref, XVECEXP (x, i, j)))
744 	    return 1;
745 	break;
746       }
747 
748   return 0;
749 }
750 
751 /* TRUE if some insn in the range (START, END] references a memory location
752    that would be affected by a store to MEMREF.  */
753 
754 static int
memref_used_between_p(rtx memref,rtx start,rtx end)755 memref_used_between_p (rtx memref, rtx start, rtx end)
756 {
757   rtx insn;
758 
759   for (insn = NEXT_INSN (start); insn != NEXT_INSN (end);
760        insn = NEXT_INSN (insn))
761     if (INSN_P (insn) && memref_referenced_p (memref, PATTERN (insn)))
762       return 1;
763 
764   return 0;
765 }
766 
767 /* Return nonzero if the rtx X is invariant over the current function.  */
768 /* ??? Actually, the places this is used in reload expect exactly what
769    is tested here, and not everything that is function invariant.  In
770    particular, the frame pointer and arg pointer are special cased;
771    pic_offset_table_rtx is not, and this will cause aborts when we
772    go to spill these things to memory.  */
773 
774 int
function_invariant_p(rtx x)775 function_invariant_p (rtx x)
776 {
777   if (CONSTANT_P (x))
778     return 1;
779   if (x == frame_pointer_rtx || x == arg_pointer_rtx)
780     return 1;
781   if (GET_CODE (x) == PLUS
782       && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
783       && CONSTANT_P (XEXP (x, 1)))
784     return 1;
785   return 0;
786 }
787 
788 /* Find registers that are equivalent to a single value throughout the
789    compilation (either because they can be referenced in memory or are set once
790    from a single constant).  Lower their priority for a register.
791 
792    If such a register is only referenced once, try substituting its value
793    into the using insn.  If it succeeds, we can eliminate the register
794    completely.  */
795 
796 static void
update_equiv_regs(void)797 update_equiv_regs (void)
798 {
799   rtx insn;
800   basic_block bb;
801   int loop_depth;
802   regset_head cleared_regs;
803   int clear_regnos = 0;
804 
805   reg_equiv = xcalloc (max_regno, sizeof *reg_equiv);
806   INIT_REG_SET (&cleared_regs);
807 
808   init_alias_analysis ();
809 
810   /* Scan the insns and find which registers have equivalences.  Do this
811      in a separate scan of the insns because (due to -fcse-follow-jumps)
812      a register can be set below its use.  */
813   FOR_EACH_BB (bb)
814     {
815       loop_depth = bb->loop_depth;
816 
817       for (insn = BB_HEAD (bb);
818 	   insn != NEXT_INSN (BB_END (bb));
819 	   insn = NEXT_INSN (insn))
820 	{
821 	  rtx note;
822 	  rtx set;
823 	  rtx dest, src;
824 	  int regno;
825 
826 	  if (! INSN_P (insn))
827 	    continue;
828 
829 	  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
830 	    if (REG_NOTE_KIND (note) == REG_INC)
831 	      no_equiv (XEXP (note, 0), note, NULL);
832 
833 	  set = single_set (insn);
834 
835 	  /* If this insn contains more (or less) than a single SET,
836 	     only mark all destinations as having no known equivalence.  */
837 	  if (set == 0)
838 	    {
839 	      note_stores (PATTERN (insn), no_equiv, NULL);
840 	      continue;
841 	    }
842 	  else if (GET_CODE (PATTERN (insn)) == PARALLEL)
843 	    {
844 	      int i;
845 
846 	      for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
847 		{
848 		  rtx part = XVECEXP (PATTERN (insn), 0, i);
849 		  if (part != set)
850 		    note_stores (part, no_equiv, NULL);
851 		}
852 	    }
853 
854 	  dest = SET_DEST (set);
855 	  src = SET_SRC (set);
856 
857 	  /* If this sets a MEM to the contents of a REG that is only used
858 	     in a single basic block, see if the register is always equivalent
859 	     to that memory location and if moving the store from INSN to the
860 	     insn that set REG is safe.  If so, put a REG_EQUIV note on the
861 	     initializing insn.
862 
863 	     Don't add a REG_EQUIV note if the insn already has one.  The existing
864 	     REG_EQUIV is likely more useful than the one we are adding.
865 
866 	     If one of the regs in the address has reg_equiv[REGNO].replace set,
867 	     then we can't add this REG_EQUIV note.  The reg_equiv[REGNO].replace
868 	     optimization may move the set of this register immediately before
869 	     insn, which puts it after reg_equiv[REGNO].init_insns, and hence
870 	     the mention in the REG_EQUIV note would be to an uninitialized
871 	     pseudo.  */
872 	  /* ????? This test isn't good enough; we might see a MEM with a use of
873 	     a pseudo register before we see its setting insn that will cause
874 	     reg_equiv[].replace for that pseudo to be set.
875 	     Equivalences to MEMs should be made in another pass, after the
876 	     reg_equiv[].replace information has been gathered.  */
877 
878 	  if (GET_CODE (dest) == MEM && GET_CODE (src) == REG
879 	      && (regno = REGNO (src)) >= FIRST_PSEUDO_REGISTER
880 	      && REG_BASIC_BLOCK (regno) >= 0
881 	      && REG_N_SETS (regno) == 1
882 	      && reg_equiv[regno].init_insns != 0
883 	      && reg_equiv[regno].init_insns != const0_rtx
884 	      && ! find_reg_note (XEXP (reg_equiv[regno].init_insns, 0),
885 				  REG_EQUIV, NULL_RTX)
886 	      && ! contains_replace_regs (XEXP (dest, 0)))
887 	    {
888 	      rtx init_insn = XEXP (reg_equiv[regno].init_insns, 0);
889 	      if (validate_equiv_mem (init_insn, src, dest)
890 		  && ! memref_used_between_p (dest, init_insn, insn))
891 		REG_NOTES (init_insn)
892 		  = gen_rtx_EXPR_LIST (REG_EQUIV, dest, REG_NOTES (init_insn));
893 	    }
894 
895 	  /* We only handle the case of a pseudo register being set
896 	     once, or always to the same value.  */
897 	  /* ??? The mn10200 port breaks if we add equivalences for
898 	     values that need an ADDRESS_REGS register and set them equivalent
899 	     to a MEM of a pseudo.  The actual problem is in the over-conservative
900 	     handling of INPADDR_ADDRESS / INPUT_ADDRESS / INPUT triples in
901 	     calculate_needs, but we traditionally work around this problem
902 	     here by rejecting equivalences when the destination is in a register
903 	     that's likely spilled.  This is fragile, of course, since the
904 	     preferred class of a pseudo depends on all instructions that set
905 	     or use it.  */
906 
907 	  if (GET_CODE (dest) != REG
908 	      || (regno = REGNO (dest)) < FIRST_PSEUDO_REGISTER
909 	      || reg_equiv[regno].init_insns == const0_rtx
910 	      || (CLASS_LIKELY_SPILLED_P (reg_preferred_class (regno))
911 		  && GET_CODE (src) == MEM))
912 	    {
913 	      /* This might be setting a SUBREG of a pseudo, a pseudo that is
914 		 also set somewhere else to a constant.  */
915 	      note_stores (set, no_equiv, NULL);
916 	      continue;
917 	    }
918 
919 	  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
920 
921 	  /* cse sometimes generates function invariants, but doesn't put a
922 	     REG_EQUAL note on the insn.  Since this note would be redundant,
923 	     there's no point creating it earlier than here.  */
924 	  if (! note && ! rtx_varies_p (src, 0))
925 	    note = set_unique_reg_note (insn, REG_EQUAL, src);
926 
927 	  /* Don't bother considering a REG_EQUAL note containing an EXPR_LIST
928 	     since it represents a function call */
929 	  if (note && GET_CODE (XEXP (note, 0)) == EXPR_LIST)
930 	    note = NULL_RTX;
931 
932 	  if (REG_N_SETS (regno) != 1
933 	      && (! note
934 		  || rtx_varies_p (XEXP (note, 0), 0)
935 		  || (reg_equiv[regno].replacement
936 		      && ! rtx_equal_p (XEXP (note, 0),
937 					reg_equiv[regno].replacement))))
938 	    {
939 	      no_equiv (dest, set, NULL);
940 	      continue;
941 	    }
942 	  /* Record this insn as initializing this register.  */
943 	  reg_equiv[regno].init_insns
944 	    = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv[regno].init_insns);
945 
946 	  /* If this register is known to be equal to a constant, record that
947 	     it is always equivalent to the constant.  */
948 	  if (note && ! rtx_varies_p (XEXP (note, 0), 0))
949 	    PUT_MODE (note, (enum machine_mode) REG_EQUIV);
950 
951 	  /* If this insn introduces a "constant" register, decrease the priority
952 	     of that register.  Record this insn if the register is only used once
953 	     more and the equivalence value is the same as our source.
954 
955 	     The latter condition is checked for two reasons:  First, it is an
956 	     indication that it may be more efficient to actually emit the insn
957 	     as written (if no registers are available, reload will substitute
958 	     the equivalence).  Secondly, it avoids problems with any registers
959 	     dying in this insn whose death notes would be missed.
960 
961 	     If we don't have a REG_EQUIV note, see if this insn is loading
962 	     a register used only in one basic block from a MEM.  If so, and the
963 	     MEM remains unchanged for the life of the register, add a REG_EQUIV
964 	     note.  */
965 
966 	  note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
967 
968 	  if (note == 0 && REG_BASIC_BLOCK (regno) >= 0
969 	      && GET_CODE (SET_SRC (set)) == MEM
970 	      && validate_equiv_mem (insn, dest, SET_SRC (set)))
971 	    REG_NOTES (insn) = note = gen_rtx_EXPR_LIST (REG_EQUIV, SET_SRC (set),
972 							 REG_NOTES (insn));
973 
974 	  if (note)
975 	    {
976 	      int regno = REGNO (dest);
977 
978 	      /* Record whether or not we created a REG_EQUIV note for a LABEL_REF.
979 		 We might end up substituting the LABEL_REF for uses of the
980 		 pseudo here or later.  That kind of transformation may turn an
981 		 indirect jump into a direct jump, in which case we must rerun the
982 		 jump optimizer to ensure that the JUMP_LABEL fields are valid.  */
983 	      if (GET_CODE (XEXP (note, 0)) == LABEL_REF
984 		  || (GET_CODE (XEXP (note, 0)) == CONST
985 		      && GET_CODE (XEXP (XEXP (note, 0), 0)) == PLUS
986 		      && (GET_CODE (XEXP (XEXP (XEXP (note, 0), 0), 0))
987 			  == LABEL_REF)))
988 		recorded_label_ref = 1;
989 
990 	      reg_equiv[regno].replacement = XEXP (note, 0);
991 	      reg_equiv[regno].src_p = &SET_SRC (set);
992 	      reg_equiv[regno].loop_depth = loop_depth;
993 
994 	      /* Don't mess with things live during setjmp.  */
995 	      if (REG_LIVE_LENGTH (regno) >= 0 && optimize)
996 		{
997 		  /* Note that the statement below does not affect the priority
998 		     in local-alloc!  */
999 		  REG_LIVE_LENGTH (regno) *= 2;
1000 
1001 
1002 		  /* If the register is referenced exactly twice, meaning it is
1003 		     set once and used once, indicate that the reference may be
1004 		     replaced by the equivalence we computed above.  Do this
1005 		     even if the register is only used in one block so that
1006 		     dependencies can be handled where the last register is
1007 		     used in a different block (i.e. HIGH / LO_SUM sequences)
1008 		     and to reduce the number of registers alive across
1009 		     calls.  */
1010 
1011 		    if (REG_N_REFS (regno) == 2
1012 			&& (rtx_equal_p (XEXP (note, 0), src)
1013 			    || ! equiv_init_varies_p (src))
1014 			&& GET_CODE (insn) == INSN
1015 			&& equiv_init_movable_p (PATTERN (insn), regno))
1016 		      reg_equiv[regno].replace = 1;
1017 		}
1018 	    }
1019 	}
1020     }
1021 
1022   /* Now scan all regs killed in an insn to see if any of them are
1023      registers only used that once.  If so, see if we can replace the
1024      reference with the equivalent from.  If we can, delete the
1025      initializing reference and this register will go away.  If we
1026      can't replace the reference, and the initializing reference is
1027      within the same loop (or in an inner loop), then move the register
1028      initialization just before the use, so that they are in the same
1029      basic block.  */
1030   FOR_EACH_BB_REVERSE (bb)
1031     {
1032       loop_depth = bb->loop_depth;
1033       for (insn = BB_END (bb);
1034 	   insn != PREV_INSN (BB_HEAD (bb));
1035 	   insn = PREV_INSN (insn))
1036 	{
1037 	  rtx link;
1038 
1039 	  if (! INSN_P (insn))
1040 	    continue;
1041 
1042 	  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1043 	    {
1044 	      if (REG_NOTE_KIND (link) == REG_DEAD
1045 		  /* Make sure this insn still refers to the register.  */
1046 		  && reg_mentioned_p (XEXP (link, 0), PATTERN (insn)))
1047 		{
1048 		  int regno = REGNO (XEXP (link, 0));
1049 		  rtx equiv_insn;
1050 
1051 		  if (! reg_equiv[regno].replace
1052 		      || reg_equiv[regno].loop_depth < loop_depth)
1053 		    continue;
1054 
1055 		  /* reg_equiv[REGNO].replace gets set only when
1056 		     REG_N_REFS[REGNO] is 2, i.e. the register is set
1057 		     once and used once.  (If it were only set, but not used,
1058 		     flow would have deleted the setting insns.)  Hence
1059 		     there can only be one insn in reg_equiv[REGNO].init_insns.  */
1060 		  if (reg_equiv[regno].init_insns == NULL_RTX
1061 		      || XEXP (reg_equiv[regno].init_insns, 1) != NULL_RTX)
1062 		    abort ();
1063 		  equiv_insn = XEXP (reg_equiv[regno].init_insns, 0);
1064 
1065 		  /* We may not move instructions that can throw, since
1066 		     that changes basic block boundaries and we are not
1067 		     prepared to adjust the CFG to match.  */
1068 		  if (can_throw_internal (equiv_insn))
1069 		    continue;
1070 
1071 		  if (asm_noperands (PATTERN (equiv_insn)) < 0
1072 		      && validate_replace_rtx (regno_reg_rtx[regno],
1073 					       *(reg_equiv[regno].src_p), insn))
1074 		    {
1075 		      rtx equiv_link;
1076 		      rtx last_link;
1077 		      rtx note;
1078 
1079 		      /* Find the last note.  */
1080 		      for (last_link = link; XEXP (last_link, 1);
1081 			   last_link = XEXP (last_link, 1))
1082 			;
1083 
1084 		      /* Append the REG_DEAD notes from equiv_insn.  */
1085 		      equiv_link = REG_NOTES (equiv_insn);
1086 		      while (equiv_link)
1087 			{
1088 			  note = equiv_link;
1089 			  equiv_link = XEXP (equiv_link, 1);
1090 			  if (REG_NOTE_KIND (note) == REG_DEAD)
1091 			    {
1092 			      remove_note (equiv_insn, note);
1093 			      XEXP (last_link, 1) = note;
1094 			      XEXP (note, 1) = NULL_RTX;
1095 			      last_link = note;
1096 			    }
1097 			}
1098 
1099 		      remove_death (regno, insn);
1100 		      REG_N_REFS (regno) = 0;
1101 		      REG_FREQ (regno) = 0;
1102 		      delete_insn (equiv_insn);
1103 
1104 		      reg_equiv[regno].init_insns
1105 			= XEXP (reg_equiv[regno].init_insns, 1);
1106 		    }
1107 		  /* Move the initialization of the register to just before
1108 		     INSN.  Update the flow information.  */
1109 		  else if (PREV_INSN (insn) != equiv_insn)
1110 		    {
1111 		      rtx new_insn;
1112 
1113 		      new_insn = emit_insn_before (PATTERN (equiv_insn), insn);
1114 		      REG_NOTES (new_insn) = REG_NOTES (equiv_insn);
1115 		      REG_NOTES (equiv_insn) = 0;
1116 
1117 		      /* Make sure this insn is recognized before reload begins,
1118 			 otherwise eliminate_regs_in_insn will abort.  */
1119 		      INSN_CODE (new_insn) = INSN_CODE (equiv_insn);
1120 
1121 		      delete_insn (equiv_insn);
1122 
1123 		      XEXP (reg_equiv[regno].init_insns, 0) = new_insn;
1124 
1125 		      REG_BASIC_BLOCK (regno) = bb->index;
1126 		      REG_N_CALLS_CROSSED (regno) = 0;
1127 		      REG_LIVE_LENGTH (regno) = 2;
1128 
1129 		      if (insn == BB_HEAD (bb))
1130 			BB_HEAD (bb) = PREV_INSN (insn);
1131 
1132 		      /* Remember to clear REGNO from all basic block's live
1133 			 info.  */
1134 		      SET_REGNO_REG_SET (&cleared_regs, regno);
1135 		      clear_regnos++;
1136 		    }
1137 		}
1138 	    }
1139 	}
1140     }
1141 
1142   /* Clear all dead REGNOs from all basic block's live info.  */
1143   if (clear_regnos)
1144     {
1145       int j;
1146       if (clear_regnos > 8)
1147 	{
1148 	  FOR_EACH_BB (bb)
1149 	    {
1150 	      AND_COMPL_REG_SET (bb->global_live_at_start, &cleared_regs);
1151 	      AND_COMPL_REG_SET (bb->global_live_at_end, &cleared_regs);
1152 	    }
1153 	}
1154       else
1155 	EXECUTE_IF_SET_IN_REG_SET (&cleared_regs, 0, j,
1156 	  {
1157 	    FOR_EACH_BB (bb)
1158 	      {
1159 	        CLEAR_REGNO_REG_SET (bb->global_live_at_start, j);
1160 	        CLEAR_REGNO_REG_SET (bb->global_live_at_end, j);
1161 	      }
1162 	  });
1163     }
1164 
1165   /* Clean up.  */
1166   end_alias_analysis ();
1167   CLEAR_REG_SET (&cleared_regs);
1168   free (reg_equiv);
1169 }
1170 
1171 /* Mark REG as having no known equivalence.
1172    Some instructions might have been processed before and furnished
1173    with REG_EQUIV notes for this register; these notes will have to be
1174    removed.
1175    STORE is the piece of RTL that does the non-constant / conflicting
1176    assignment - a SET, CLOBBER or REG_INC note.  It is currently not used,
1177    but needs to be there because this function is called from note_stores.  */
1178 static void
no_equiv(rtx reg,rtx store ATTRIBUTE_UNUSED,void * data ATTRIBUTE_UNUSED)1179 no_equiv (rtx reg, rtx store ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
1180 {
1181   int regno;
1182   rtx list;
1183 
1184   if (GET_CODE (reg) != REG)
1185     return;
1186   regno = REGNO (reg);
1187   list = reg_equiv[regno].init_insns;
1188   if (list == const0_rtx)
1189     return;
1190   for (; list; list =  XEXP (list, 1))
1191     {
1192       rtx insn = XEXP (list, 0);
1193       remove_note (insn, find_reg_note (insn, REG_EQUIV, NULL_RTX));
1194     }
1195   reg_equiv[regno].init_insns = const0_rtx;
1196   reg_equiv[regno].replacement = NULL_RTX;
1197 }
1198 
1199 /* Allocate hard regs to the pseudo regs used only within block number B.
1200    Only the pseudos that die but once can be handled.  */
1201 
1202 static void
block_alloc(int b)1203 block_alloc (int b)
1204 {
1205   int i, q;
1206   rtx insn;
1207   rtx note, hard_reg;
1208   int insn_number = 0;
1209   int insn_count = 0;
1210   int max_uid = get_max_uid ();
1211   int *qty_order;
1212   int no_conflict_combined_regno = -1;
1213 
1214   /* Count the instructions in the basic block.  */
1215 
1216   insn = BB_END (BASIC_BLOCK (b));
1217   while (1)
1218     {
1219       if (GET_CODE (insn) != NOTE)
1220 	if (++insn_count > max_uid)
1221 	  abort ();
1222       if (insn == BB_HEAD (BASIC_BLOCK (b)))
1223 	break;
1224       insn = PREV_INSN (insn);
1225     }
1226 
1227   /* +2 to leave room for a post_mark_life at the last insn and for
1228      the birth of a CLOBBER in the first insn.  */
1229   regs_live_at = xcalloc ((2 * insn_count + 2), sizeof (HARD_REG_SET));
1230 
1231   /* Initialize table of hardware registers currently live.  */
1232 
1233   REG_SET_TO_HARD_REG_SET (regs_live, BASIC_BLOCK (b)->global_live_at_start);
1234 
1235   /* This loop scans the instructions of the basic block
1236      and assigns quantities to registers.
1237      It computes which registers to tie.  */
1238 
1239   insn = BB_HEAD (BASIC_BLOCK (b));
1240   while (1)
1241     {
1242       if (GET_CODE (insn) != NOTE)
1243 	insn_number++;
1244 
1245       if (INSN_P (insn))
1246 	{
1247 	  rtx link, set;
1248 	  int win = 0;
1249 	  rtx r0, r1 = NULL_RTX;
1250 	  int combined_regno = -1;
1251 	  int i;
1252 
1253 	  this_insn_number = insn_number;
1254 	  this_insn = insn;
1255 
1256 	  extract_insn (insn);
1257 	  which_alternative = -1;
1258 
1259 	  /* Is this insn suitable for tying two registers?
1260 	     If so, try doing that.
1261 	     Suitable insns are those with at least two operands and where
1262 	     operand 0 is an output that is a register that is not
1263 	     earlyclobber.
1264 
1265 	     We can tie operand 0 with some operand that dies in this insn.
1266 	     First look for operands that are required to be in the same
1267 	     register as operand 0.  If we find such, only try tying that
1268 	     operand or one that can be put into that operand if the
1269 	     operation is commutative.  If we don't find an operand
1270 	     that is required to be in the same register as operand 0,
1271 	     we can tie with any operand.
1272 
1273 	     Subregs in place of regs are also ok.
1274 
1275 	     If tying is done, WIN is set nonzero.  */
1276 
1277 	  if (optimize
1278 	      && recog_data.n_operands > 1
1279 	      && recog_data.constraints[0][0] == '='
1280 	      && recog_data.constraints[0][1] != '&')
1281 	    {
1282 	      /* If non-negative, is an operand that must match operand 0.  */
1283 	      int must_match_0 = -1;
1284 	      /* Counts number of alternatives that require a match with
1285 		 operand 0.  */
1286 	      int n_matching_alts = 0;
1287 
1288 	      for (i = 1; i < recog_data.n_operands; i++)
1289 		{
1290 		  const char *p = recog_data.constraints[i];
1291 		  int this_match = requires_inout (p);
1292 
1293 		  n_matching_alts += this_match;
1294 		  if (this_match == recog_data.n_alternatives)
1295 		    must_match_0 = i;
1296 		}
1297 
1298 	      r0 = recog_data.operand[0];
1299 	      for (i = 1; i < recog_data.n_operands; i++)
1300 		{
1301 		  /* Skip this operand if we found an operand that
1302 		     must match operand 0 and this operand isn't it
1303 		     and can't be made to be it by commutativity.  */
1304 
1305 		  if (must_match_0 >= 0 && i != must_match_0
1306 		      && ! (i == must_match_0 + 1
1307 			    && recog_data.constraints[i-1][0] == '%')
1308 		      && ! (i == must_match_0 - 1
1309 			    && recog_data.constraints[i][0] == '%'))
1310 		    continue;
1311 
1312 		  /* Likewise if each alternative has some operand that
1313 		     must match operand zero.  In that case, skip any
1314 		     operand that doesn't list operand 0 since we know that
1315 		     the operand always conflicts with operand 0.  We
1316 		     ignore commutativity in this case to keep things simple.  */
1317 		  if (n_matching_alts == recog_data.n_alternatives
1318 		      && 0 == requires_inout (recog_data.constraints[i]))
1319 		    continue;
1320 
1321 		  r1 = recog_data.operand[i];
1322 
1323 		  /* If the operand is an address, find a register in it.
1324 		     There may be more than one register, but we only try one
1325 		     of them.  */
1326 		  if (recog_data.constraints[i][0] == 'p'
1327 		      || EXTRA_ADDRESS_CONSTRAINT (recog_data.constraints[i][0],
1328 						   recog_data.constraints[i]))
1329 		    while (GET_CODE (r1) == PLUS || GET_CODE (r1) == MULT)
1330 		      r1 = XEXP (r1, 0);
1331 
1332 		  /* Avoid making a call-saved register unnecessarily
1333                      clobbered.  */
1334 		  hard_reg = get_hard_reg_initial_reg (cfun, r1);
1335 		  if (hard_reg != NULL_RTX)
1336 		    {
1337 		      if (GET_CODE (hard_reg) == REG
1338 			  && IN_RANGE (REGNO (hard_reg),
1339 				       0, FIRST_PSEUDO_REGISTER - 1)
1340 			  && ! call_used_regs[REGNO (hard_reg)])
1341 			continue;
1342 		    }
1343 
1344 		  if (GET_CODE (r0) == REG || GET_CODE (r0) == SUBREG)
1345 		    {
1346 		      /* We have two priorities for hard register preferences.
1347 			 If we have a move insn or an insn whose first input
1348 			 can only be in the same register as the output, give
1349 			 priority to an equivalence found from that insn.  */
1350 		      int may_save_copy
1351 			= (r1 == recog_data.operand[i] && must_match_0 >= 0);
1352 
1353 		      if (GET_CODE (r1) == REG || GET_CODE (r1) == SUBREG)
1354 			win = combine_regs (r1, r0, may_save_copy,
1355 					    insn_number, insn, 0);
1356 		    }
1357 		  if (win)
1358 		    break;
1359 		}
1360 	    }
1361 
1362 	  /* Recognize an insn sequence with an ultimate result
1363 	     which can safely overlap one of the inputs.
1364 	     The sequence begins with a CLOBBER of its result,
1365 	     and ends with an insn that copies the result to itself
1366 	     and has a REG_EQUAL note for an equivalent formula.
1367 	     That note indicates what the inputs are.
1368 	     The result and the input can overlap if each insn in
1369 	     the sequence either doesn't mention the input
1370 	     or has a REG_NO_CONFLICT note to inhibit the conflict.
1371 
1372 	     We do the combining test at the CLOBBER so that the
1373 	     destination register won't have had a quantity number
1374 	     assigned, since that would prevent combining.  */
1375 
1376 	  if (optimize
1377 	      && GET_CODE (PATTERN (insn)) == CLOBBER
1378 	      && (r0 = XEXP (PATTERN (insn), 0),
1379 		  GET_CODE (r0) == REG)
1380 	      && (link = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0
1381 	      && XEXP (link, 0) != 0
1382 	      && GET_CODE (XEXP (link, 0)) == INSN
1383 	      && (set = single_set (XEXP (link, 0))) != 0
1384 	      && SET_DEST (set) == r0 && SET_SRC (set) == r0
1385 	      && (note = find_reg_note (XEXP (link, 0), REG_EQUAL,
1386 					NULL_RTX)) != 0)
1387 	    {
1388 	      if (r1 = XEXP (note, 0), GET_CODE (r1) == REG
1389 		  /* Check that we have such a sequence.  */
1390 		  && no_conflict_p (insn, r0, r1))
1391 		win = combine_regs (r1, r0, 1, insn_number, insn, 1);
1392 	      else if (GET_RTX_FORMAT (GET_CODE (XEXP (note, 0)))[0] == 'e'
1393 		       && (r1 = XEXP (XEXP (note, 0), 0),
1394 			   GET_CODE (r1) == REG || GET_CODE (r1) == SUBREG)
1395 		       && no_conflict_p (insn, r0, r1))
1396 		win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1397 
1398 	      /* Here we care if the operation to be computed is
1399 		 commutative.  */
1400 	      else if ((GET_CODE (XEXP (note, 0)) == EQ
1401 			|| GET_CODE (XEXP (note, 0)) == NE
1402 			|| GET_RTX_CLASS (GET_CODE (XEXP (note, 0))) == 'c')
1403 		       && (r1 = XEXP (XEXP (note, 0), 1),
1404 			   (GET_CODE (r1) == REG || GET_CODE (r1) == SUBREG))
1405 		       && no_conflict_p (insn, r0, r1))
1406 		win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1407 
1408 	      /* If we did combine something, show the register number
1409 		 in question so that we know to ignore its death.  */
1410 	      if (win)
1411 		no_conflict_combined_regno = REGNO (r1);
1412 	    }
1413 
1414 	  /* If registers were just tied, set COMBINED_REGNO
1415 	     to the number of the register used in this insn
1416 	     that was tied to the register set in this insn.
1417 	     This register's qty should not be "killed".  */
1418 
1419 	  if (win)
1420 	    {
1421 	      while (GET_CODE (r1) == SUBREG)
1422 		r1 = SUBREG_REG (r1);
1423 	      combined_regno = REGNO (r1);
1424 	    }
1425 
1426 	  /* Mark the death of everything that dies in this instruction,
1427 	     except for anything that was just combined.  */
1428 
1429 	  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1430 	    if (REG_NOTE_KIND (link) == REG_DEAD
1431 		&& GET_CODE (XEXP (link, 0)) == REG
1432 		&& combined_regno != (int) REGNO (XEXP (link, 0))
1433 		&& (no_conflict_combined_regno != (int) REGNO (XEXP (link, 0))
1434 		    || ! find_reg_note (insn, REG_NO_CONFLICT,
1435 					XEXP (link, 0))))
1436 	      wipe_dead_reg (XEXP (link, 0), 0);
1437 
1438 	  /* Allocate qty numbers for all registers local to this block
1439 	     that are born (set) in this instruction.
1440 	     A pseudo that already has a qty is not changed.  */
1441 
1442 	  note_stores (PATTERN (insn), reg_is_set, NULL);
1443 
1444 	  /* If anything is set in this insn and then unused, mark it as dying
1445 	     after this insn, so it will conflict with our outputs.  This
1446 	     can't match with something that combined, and it doesn't matter
1447 	     if it did.  Do this after the calls to reg_is_set since these
1448 	     die after, not during, the current insn.  */
1449 
1450 	  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1451 	    if (REG_NOTE_KIND (link) == REG_UNUSED
1452 		&& GET_CODE (XEXP (link, 0)) == REG)
1453 	      wipe_dead_reg (XEXP (link, 0), 1);
1454 
1455 	  /* If this is an insn that has a REG_RETVAL note pointing at a
1456 	     CLOBBER insn, we have reached the end of a REG_NO_CONFLICT
1457 	     block, so clear any register number that combined within it.  */
1458 	  if ((note = find_reg_note (insn, REG_RETVAL, NULL_RTX)) != 0
1459 	      && GET_CODE (XEXP (note, 0)) == INSN
1460 	      && GET_CODE (PATTERN (XEXP (note, 0))) == CLOBBER)
1461 	    no_conflict_combined_regno = -1;
1462 	}
1463 
1464       /* Set the registers live after INSN_NUMBER.  Note that we never
1465 	 record the registers live before the block's first insn, since no
1466 	 pseudos we care about are live before that insn.  */
1467 
1468       IOR_HARD_REG_SET (regs_live_at[2 * insn_number], regs_live);
1469       IOR_HARD_REG_SET (regs_live_at[2 * insn_number + 1], regs_live);
1470 
1471       if (insn == BB_END (BASIC_BLOCK (b)))
1472 	break;
1473 
1474       insn = NEXT_INSN (insn);
1475     }
1476 
1477   /* Now every register that is local to this basic block
1478      should have been given a quantity, or else -1 meaning ignore it.
1479      Every quantity should have a known birth and death.
1480 
1481      Order the qtys so we assign them registers in order of the
1482      number of suggested registers they need so we allocate those with
1483      the most restrictive needs first.  */
1484 
1485   qty_order = xmalloc (next_qty * sizeof (int));
1486   for (i = 0; i < next_qty; i++)
1487     qty_order[i] = i;
1488 
1489 #define EXCHANGE(I1, I2)  \
1490   { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1491 
1492   switch (next_qty)
1493     {
1494     case 3:
1495       /* Make qty_order[2] be the one to allocate last.  */
1496       if (qty_sugg_compare (0, 1) > 0)
1497 	EXCHANGE (0, 1);
1498       if (qty_sugg_compare (1, 2) > 0)
1499 	EXCHANGE (2, 1);
1500 
1501       /* ... Fall through ...  */
1502     case 2:
1503       /* Put the best one to allocate in qty_order[0].  */
1504       if (qty_sugg_compare (0, 1) > 0)
1505 	EXCHANGE (0, 1);
1506 
1507       /* ... Fall through ...  */
1508 
1509     case 1:
1510     case 0:
1511       /* Nothing to do here.  */
1512       break;
1513 
1514     default:
1515       qsort (qty_order, next_qty, sizeof (int), qty_sugg_compare_1);
1516     }
1517 
1518   /* Try to put each quantity in a suggested physical register, if it has one.
1519      This may cause registers to be allocated that otherwise wouldn't be, but
1520      this seems acceptable in local allocation (unlike global allocation).  */
1521   for (i = 0; i < next_qty; i++)
1522     {
1523       q = qty_order[i];
1524       if (qty_phys_num_sugg[q] != 0 || qty_phys_num_copy_sugg[q] != 0)
1525 	qty[q].phys_reg = find_free_reg (qty[q].min_class, qty[q].mode, q,
1526 					 0, 1, qty[q].birth, qty[q].death);
1527       else
1528 	qty[q].phys_reg = -1;
1529     }
1530 
1531   /* Order the qtys so we assign them registers in order of
1532      decreasing length of life.  Normally call qsort, but if we
1533      have only a very small number of quantities, sort them ourselves.  */
1534 
1535   for (i = 0; i < next_qty; i++)
1536     qty_order[i] = i;
1537 
1538 #define EXCHANGE(I1, I2)  \
1539   { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1540 
1541   switch (next_qty)
1542     {
1543     case 3:
1544       /* Make qty_order[2] be the one to allocate last.  */
1545       if (qty_compare (0, 1) > 0)
1546 	EXCHANGE (0, 1);
1547       if (qty_compare (1, 2) > 0)
1548 	EXCHANGE (2, 1);
1549 
1550       /* ... Fall through ...  */
1551     case 2:
1552       /* Put the best one to allocate in qty_order[0].  */
1553       if (qty_compare (0, 1) > 0)
1554 	EXCHANGE (0, 1);
1555 
1556       /* ... Fall through ...  */
1557 
1558     case 1:
1559     case 0:
1560       /* Nothing to do here.  */
1561       break;
1562 
1563     default:
1564       qsort (qty_order, next_qty, sizeof (int), qty_compare_1);
1565     }
1566 
1567   /* Now for each qty that is not a hardware register,
1568      look for a hardware register to put it in.
1569      First try the register class that is cheapest for this qty,
1570      if there is more than one class.  */
1571 
1572   for (i = 0; i < next_qty; i++)
1573     {
1574       q = qty_order[i];
1575       if (qty[q].phys_reg < 0)
1576 	{
1577 #ifdef INSN_SCHEDULING
1578 	  /* These values represent the adjusted lifetime of a qty so
1579 	     that it conflicts with qtys which appear near the start/end
1580 	     of this qty's lifetime.
1581 
1582 	     The purpose behind extending the lifetime of this qty is to
1583 	     discourage the register allocator from creating false
1584 	     dependencies.
1585 
1586 	     The adjustment value is chosen to indicate that this qty
1587 	     conflicts with all the qtys in the instructions immediately
1588 	     before and after the lifetime of this qty.
1589 
1590 	     Experiments have shown that higher values tend to hurt
1591 	     overall code performance.
1592 
1593 	     If allocation using the extended lifetime fails we will try
1594 	     again with the qty's unadjusted lifetime.  */
1595 	  int fake_birth = MAX (0, qty[q].birth - 2 + qty[q].birth % 2);
1596 	  int fake_death = MIN (insn_number * 2 + 1,
1597 				qty[q].death + 2 - qty[q].death % 2);
1598 #endif
1599 
1600 	  if (N_REG_CLASSES > 1)
1601 	    {
1602 #ifdef INSN_SCHEDULING
1603 	      /* We try to avoid using hard registers allocated to qtys which
1604 		 are born immediately after this qty or die immediately before
1605 		 this qty.
1606 
1607 		 This optimization is only appropriate when we will run
1608 		 a scheduling pass after reload and we are not optimizing
1609 		 for code size.  */
1610 	      if (flag_schedule_insns_after_reload
1611 		  && !optimize_size
1612 		  && !SMALL_REGISTER_CLASSES)
1613 		{
1614 		  qty[q].phys_reg = find_free_reg (qty[q].min_class,
1615 						   qty[q].mode, q, 0, 0,
1616 						   fake_birth, fake_death);
1617 		  if (qty[q].phys_reg >= 0)
1618 		    continue;
1619 		}
1620 #endif
1621 	      qty[q].phys_reg = find_free_reg (qty[q].min_class,
1622 					       qty[q].mode, q, 0, 0,
1623 					       qty[q].birth, qty[q].death);
1624 	      if (qty[q].phys_reg >= 0)
1625 		continue;
1626 	    }
1627 
1628 #ifdef INSN_SCHEDULING
1629 	  /* Similarly, avoid false dependencies.  */
1630 	  if (flag_schedule_insns_after_reload
1631 	      && !optimize_size
1632 	      && !SMALL_REGISTER_CLASSES
1633 	      && qty[q].alternate_class != NO_REGS)
1634 	    qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1635 					     qty[q].mode, q, 0, 0,
1636 					     fake_birth, fake_death);
1637 #endif
1638 	  if (qty[q].alternate_class != NO_REGS)
1639 	    qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1640 					     qty[q].mode, q, 0, 0,
1641 					     qty[q].birth, qty[q].death);
1642 	}
1643     }
1644 
1645   /* Now propagate the register assignments
1646      to the pseudo regs belonging to the qtys.  */
1647 
1648   for (q = 0; q < next_qty; q++)
1649     if (qty[q].phys_reg >= 0)
1650       {
1651 	for (i = qty[q].first_reg; i >= 0; i = reg_next_in_qty[i])
1652 	  reg_renumber[i] = qty[q].phys_reg + reg_offset[i];
1653       }
1654 
1655   /* Clean up.  */
1656   free (regs_live_at);
1657   free (qty_order);
1658 }
1659 
1660 /* Compare two quantities' priority for getting real registers.
1661    We give shorter-lived quantities higher priority.
1662    Quantities with more references are also preferred, as are quantities that
1663    require multiple registers.  This is the identical prioritization as
1664    done by global-alloc.
1665 
1666    We used to give preference to registers with *longer* lives, but using
1667    the same algorithm in both local- and global-alloc can speed up execution
1668    of some programs by as much as a factor of three!  */
1669 
1670 /* Note that the quotient will never be bigger than
1671    the value of floor_log2 times the maximum number of
1672    times a register can occur in one insn (surely less than 100)
1673    weighted by frequency (max REG_FREQ_MAX).
1674    Multiplying this by 10000/REG_FREQ_MAX can't overflow.
1675    QTY_CMP_PRI is also used by qty_sugg_compare.  */
1676 
1677 #define QTY_CMP_PRI(q)		\
1678   ((int) (((double) (floor_log2 (qty[q].n_refs) * qty[q].freq * qty[q].size) \
1679 	  / (qty[q].death - qty[q].birth)) * (10000 / REG_FREQ_MAX)))
1680 
1681 static int
qty_compare(int q1,int q2)1682 qty_compare (int q1, int q2)
1683 {
1684   return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1685 }
1686 
1687 static int
qty_compare_1(const void * q1p,const void * q2p)1688 qty_compare_1 (const void *q1p, const void *q2p)
1689 {
1690   int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1691   int tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1692 
1693   if (tem != 0)
1694     return tem;
1695 
1696   /* If qtys are equally good, sort by qty number,
1697      so that the results of qsort leave nothing to chance.  */
1698   return q1 - q2;
1699 }
1700 
1701 /* Compare two quantities' priority for getting real registers.  This version
1702    is called for quantities that have suggested hard registers.  First priority
1703    goes to quantities that have copy preferences, then to those that have
1704    normal preferences.  Within those groups, quantities with the lower
1705    number of preferences have the highest priority.  Of those, we use the same
1706    algorithm as above.  */
1707 
1708 #define QTY_CMP_SUGG(q)		\
1709   (qty_phys_num_copy_sugg[q]		\
1710     ? qty_phys_num_copy_sugg[q]	\
1711     : qty_phys_num_sugg[q] * FIRST_PSEUDO_REGISTER)
1712 
1713 static int
qty_sugg_compare(int q1,int q2)1714 qty_sugg_compare (int q1, int q2)
1715 {
1716   int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1717 
1718   if (tem != 0)
1719     return tem;
1720 
1721   return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1722 }
1723 
1724 static int
qty_sugg_compare_1(const void * q1p,const void * q2p)1725 qty_sugg_compare_1 (const void *q1p, const void *q2p)
1726 {
1727   int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1728   int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1729 
1730   if (tem != 0)
1731     return tem;
1732 
1733   tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1734   if (tem != 0)
1735     return tem;
1736 
1737   /* If qtys are equally good, sort by qty number,
1738      so that the results of qsort leave nothing to chance.  */
1739   return q1 - q2;
1740 }
1741 
1742 #undef QTY_CMP_SUGG
1743 #undef QTY_CMP_PRI
1744 
1745 /* Attempt to combine the two registers (rtx's) USEDREG and SETREG.
1746    Returns 1 if have done so, or 0 if cannot.
1747 
1748    Combining registers means marking them as having the same quantity
1749    and adjusting the offsets within the quantity if either of
1750    them is a SUBREG.
1751 
1752    We don't actually combine a hard reg with a pseudo; instead
1753    we just record the hard reg as the suggestion for the pseudo's quantity.
1754    If we really combined them, we could lose if the pseudo lives
1755    across an insn that clobbers the hard reg (eg, movstr).
1756 
1757    ALREADY_DEAD is nonzero if USEDREG is known to be dead even though
1758    there is no REG_DEAD note on INSN.  This occurs during the processing
1759    of REG_NO_CONFLICT blocks.
1760 
1761    MAY_SAVE_COPY is nonzero if this insn is simply copying USEDREG to
1762    SETREG or if the input and output must share a register.
1763    In that case, we record a hard reg suggestion in QTY_PHYS_COPY_SUGG.
1764 
1765    There are elaborate checks for the validity of combining.  */
1766 
1767 static int
combine_regs(rtx usedreg,rtx setreg,int may_save_copy,int insn_number,rtx insn,int already_dead)1768 combine_regs (rtx usedreg, rtx setreg, int may_save_copy, int insn_number,
1769 	      rtx insn, int already_dead)
1770 {
1771   int ureg, sreg;
1772   int offset = 0;
1773   int usize, ssize;
1774   int sqty;
1775 
1776   /* Determine the numbers and sizes of registers being used.  If a subreg
1777      is present that does not change the entire register, don't consider
1778      this a copy insn.  */
1779 
1780   while (GET_CODE (usedreg) == SUBREG)
1781     {
1782       rtx subreg = SUBREG_REG (usedreg);
1783 
1784       if (GET_CODE (subreg) == REG)
1785 	{
1786 	  if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1787 	    may_save_copy = 0;
1788 
1789 	  if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1790 	    offset += subreg_regno_offset (REGNO (subreg),
1791 					   GET_MODE (subreg),
1792 					   SUBREG_BYTE (usedreg),
1793 					   GET_MODE (usedreg));
1794 	  else
1795 	    offset += (SUBREG_BYTE (usedreg)
1796 		      / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1797 	}
1798 
1799       usedreg = subreg;
1800     }
1801 
1802   if (GET_CODE (usedreg) != REG)
1803     return 0;
1804 
1805   ureg = REGNO (usedreg);
1806   if (ureg < FIRST_PSEUDO_REGISTER)
1807     usize = HARD_REGNO_NREGS (ureg, GET_MODE (usedreg));
1808   else
1809     usize = ((GET_MODE_SIZE (GET_MODE (usedreg))
1810 	      + (REGMODE_NATURAL_SIZE (GET_MODE (usedreg)) - 1))
1811 	     / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1812 
1813   while (GET_CODE (setreg) == SUBREG)
1814     {
1815       rtx subreg = SUBREG_REG (setreg);
1816 
1817       if (GET_CODE (subreg) == REG)
1818 	{
1819 	  if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1820 	    may_save_copy = 0;
1821 
1822 	  if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1823 	    offset -= subreg_regno_offset (REGNO (subreg),
1824 					   GET_MODE (subreg),
1825 					   SUBREG_BYTE (setreg),
1826 					   GET_MODE (setreg));
1827 	  else
1828 	    offset -= (SUBREG_BYTE (setreg)
1829 		      / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1830 	}
1831 
1832       setreg = subreg;
1833     }
1834 
1835   if (GET_CODE (setreg) != REG)
1836     return 0;
1837 
1838   sreg = REGNO (setreg);
1839   if (sreg < FIRST_PSEUDO_REGISTER)
1840     ssize = HARD_REGNO_NREGS (sreg, GET_MODE (setreg));
1841   else
1842     ssize = ((GET_MODE_SIZE (GET_MODE (setreg))
1843 	      + (REGMODE_NATURAL_SIZE (GET_MODE (setreg)) - 1))
1844 	     / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1845 
1846   /* If UREG is a pseudo-register that hasn't already been assigned a
1847      quantity number, it means that it is not local to this block or dies
1848      more than once.  In either event, we can't do anything with it.  */
1849   if ((ureg >= FIRST_PSEUDO_REGISTER && reg_qty[ureg] < 0)
1850       /* Do not combine registers unless one fits within the other.  */
1851       || (offset > 0 && usize + offset > ssize)
1852       || (offset < 0 && usize + offset < ssize)
1853       /* Do not combine with a smaller already-assigned object
1854 	 if that smaller object is already combined with something bigger.  */
1855       || (ssize > usize && ureg >= FIRST_PSEUDO_REGISTER
1856 	  && usize < qty[reg_qty[ureg]].size)
1857       /* Can't combine if SREG is not a register we can allocate.  */
1858       || (sreg >= FIRST_PSEUDO_REGISTER && reg_qty[sreg] == -1)
1859       /* Don't combine with a pseudo mentioned in a REG_NO_CONFLICT note.
1860 	 These have already been taken care of.  This probably wouldn't
1861 	 combine anyway, but don't take any chances.  */
1862       || (ureg >= FIRST_PSEUDO_REGISTER
1863 	  && find_reg_note (insn, REG_NO_CONFLICT, usedreg))
1864       /* Don't tie something to itself.  In most cases it would make no
1865 	 difference, but it would screw up if the reg being tied to itself
1866 	 also dies in this insn.  */
1867       || ureg == sreg
1868       /* Don't try to connect two different hardware registers.  */
1869       || (ureg < FIRST_PSEUDO_REGISTER && sreg < FIRST_PSEUDO_REGISTER)
1870       /* Don't connect two different machine modes if they have different
1871 	 implications as to which registers may be used.  */
1872       || !MODES_TIEABLE_P (GET_MODE (usedreg), GET_MODE (setreg)))
1873     return 0;
1874 
1875   /* Now, if UREG is a hard reg and SREG is a pseudo, record the hard reg in
1876      qty_phys_sugg for the pseudo instead of tying them.
1877 
1878      Return "failure" so that the lifespan of UREG is terminated here;
1879      that way the two lifespans will be disjoint and nothing will prevent
1880      the pseudo reg from being given this hard reg.  */
1881 
1882   if (ureg < FIRST_PSEUDO_REGISTER)
1883     {
1884       /* Allocate a quantity number so we have a place to put our
1885 	 suggestions.  */
1886       if (reg_qty[sreg] == -2)
1887 	reg_is_born (setreg, 2 * insn_number);
1888 
1889       if (reg_qty[sreg] >= 0)
1890 	{
1891 	  if (may_save_copy
1892 	      && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg))
1893 	    {
1894 	      SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg);
1895 	      qty_phys_num_copy_sugg[reg_qty[sreg]]++;
1896 	    }
1897 	  else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg))
1898 	    {
1899 	      SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg);
1900 	      qty_phys_num_sugg[reg_qty[sreg]]++;
1901 	    }
1902 	}
1903       return 0;
1904     }
1905 
1906   /* Similarly for SREG a hard register and UREG a pseudo register.  */
1907 
1908   if (sreg < FIRST_PSEUDO_REGISTER)
1909     {
1910       if (may_save_copy
1911 	  && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg))
1912 	{
1913 	  SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg);
1914 	  qty_phys_num_copy_sugg[reg_qty[ureg]]++;
1915 	}
1916       else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg))
1917 	{
1918 	  SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg);
1919 	  qty_phys_num_sugg[reg_qty[ureg]]++;
1920 	}
1921       return 0;
1922     }
1923 
1924   /* At this point we know that SREG and UREG are both pseudos.
1925      Do nothing if SREG already has a quantity or is a register that we
1926      don't allocate.  */
1927   if (reg_qty[sreg] >= -1
1928       /* If we are not going to let any regs live across calls,
1929 	 don't tie a call-crossing reg to a non-call-crossing reg.  */
1930       || (current_function_has_nonlocal_label
1931 	  && ((REG_N_CALLS_CROSSED (ureg) > 0)
1932 	      != (REG_N_CALLS_CROSSED (sreg) > 0))))
1933     return 0;
1934 
1935   /* We don't already know about SREG, so tie it to UREG
1936      if this is the last use of UREG, provided the classes they want
1937      are compatible.  */
1938 
1939   if ((already_dead || find_regno_note (insn, REG_DEAD, ureg))
1940       && reg_meets_class_p (sreg, qty[reg_qty[ureg]].min_class))
1941     {
1942       /* Add SREG to UREG's quantity.  */
1943       sqty = reg_qty[ureg];
1944       reg_qty[sreg] = sqty;
1945       reg_offset[sreg] = reg_offset[ureg] + offset;
1946       reg_next_in_qty[sreg] = qty[sqty].first_reg;
1947       qty[sqty].first_reg = sreg;
1948 
1949       /* If SREG's reg class is smaller, set qty[SQTY].min_class.  */
1950       update_qty_class (sqty, sreg);
1951 
1952       /* Update info about quantity SQTY.  */
1953       qty[sqty].n_calls_crossed += REG_N_CALLS_CROSSED (sreg);
1954       qty[sqty].n_refs += REG_N_REFS (sreg);
1955       qty[sqty].freq += REG_FREQ (sreg);
1956       if (usize < ssize)
1957 	{
1958 	  int i;
1959 
1960 	  for (i = qty[sqty].first_reg; i >= 0; i = reg_next_in_qty[i])
1961 	    reg_offset[i] -= offset;
1962 
1963 	  qty[sqty].size = ssize;
1964 	  qty[sqty].mode = GET_MODE (setreg);
1965 	}
1966     }
1967   else
1968     return 0;
1969 
1970   return 1;
1971 }
1972 
1973 /* Return 1 if the preferred class of REG allows it to be tied
1974    to a quantity or register whose class is CLASS.
1975    True if REG's reg class either contains or is contained in CLASS.  */
1976 
1977 static int
reg_meets_class_p(int reg,enum reg_class class)1978 reg_meets_class_p (int reg, enum reg_class class)
1979 {
1980   enum reg_class rclass = reg_preferred_class (reg);
1981   return (reg_class_subset_p (rclass, class)
1982 	  || reg_class_subset_p (class, rclass));
1983 }
1984 
1985 /* Update the class of QTYNO assuming that REG is being tied to it.  */
1986 
1987 static void
update_qty_class(int qtyno,int reg)1988 update_qty_class (int qtyno, int reg)
1989 {
1990   enum reg_class rclass = reg_preferred_class (reg);
1991   if (reg_class_subset_p (rclass, qty[qtyno].min_class))
1992     qty[qtyno].min_class = rclass;
1993 
1994   rclass = reg_alternate_class (reg);
1995   if (reg_class_subset_p (rclass, qty[qtyno].alternate_class))
1996     qty[qtyno].alternate_class = rclass;
1997 }
1998 
1999 /* Handle something which alters the value of an rtx REG.
2000 
2001    REG is whatever is set or clobbered.  SETTER is the rtx that
2002    is modifying the register.
2003 
2004    If it is not really a register, we do nothing.
2005    The file-global variables `this_insn' and `this_insn_number'
2006    carry info from `block_alloc'.  */
2007 
2008 static void
reg_is_set(rtx reg,rtx setter,void * data ATTRIBUTE_UNUSED)2009 reg_is_set (rtx reg, rtx setter, void *data ATTRIBUTE_UNUSED)
2010 {
2011   /* Note that note_stores will only pass us a SUBREG if it is a SUBREG of
2012      a hard register.  These may actually not exist any more.  */
2013 
2014   if (GET_CODE (reg) != SUBREG
2015       && GET_CODE (reg) != REG)
2016     return;
2017 
2018   /* Mark this register as being born.  If it is used in a CLOBBER, mark
2019      it as being born halfway between the previous insn and this insn so that
2020      it conflicts with our inputs but not the outputs of the previous insn.  */
2021 
2022   reg_is_born (reg, 2 * this_insn_number - (GET_CODE (setter) == CLOBBER));
2023 }
2024 
2025 /* Handle beginning of the life of register REG.
2026    BIRTH is the index at which this is happening.  */
2027 
2028 static void
reg_is_born(rtx reg,int birth)2029 reg_is_born (rtx reg, int birth)
2030 {
2031   int regno;
2032 
2033   if (GET_CODE (reg) == SUBREG)
2034     {
2035       regno = REGNO (SUBREG_REG (reg));
2036       if (regno < FIRST_PSEUDO_REGISTER)
2037 	regno = subreg_hard_regno (reg, 1);
2038     }
2039   else
2040     regno = REGNO (reg);
2041 
2042   if (regno < FIRST_PSEUDO_REGISTER)
2043     {
2044       mark_life (regno, GET_MODE (reg), 1);
2045 
2046       /* If the register was to have been born earlier that the present
2047 	 insn, mark it as live where it is actually born.  */
2048       if (birth < 2 * this_insn_number)
2049 	post_mark_life (regno, GET_MODE (reg), 1, birth, 2 * this_insn_number);
2050     }
2051   else
2052     {
2053       if (reg_qty[regno] == -2)
2054 	alloc_qty (regno, GET_MODE (reg), PSEUDO_REGNO_SIZE (regno), birth);
2055 
2056       /* If this register has a quantity number, show that it isn't dead.  */
2057       if (reg_qty[regno] >= 0)
2058 	qty[reg_qty[regno]].death = -1;
2059     }
2060 }
2061 
2062 /* Record the death of REG in the current insn.  If OUTPUT_P is nonzero,
2063    REG is an output that is dying (i.e., it is never used), otherwise it
2064    is an input (the normal case).
2065    If OUTPUT_P is 1, then we extend the life past the end of this insn.  */
2066 
2067 static void
wipe_dead_reg(rtx reg,int output_p)2068 wipe_dead_reg (rtx reg, int output_p)
2069 {
2070   int regno = REGNO (reg);
2071 
2072   /* If this insn has multiple results,
2073      and the dead reg is used in one of the results,
2074      extend its life to after this insn,
2075      so it won't get allocated together with any other result of this insn.
2076 
2077      It is unsafe to use !single_set here since it will ignore an unused
2078      output.  Just because an output is unused does not mean the compiler
2079      can assume the side effect will not occur.   Consider if REG appears
2080      in the address of an output and we reload the output.  If we allocate
2081      REG to the same hard register as an unused output we could set the hard
2082      register before the output reload insn.  */
2083   if (GET_CODE (PATTERN (this_insn)) == PARALLEL
2084       && multiple_sets (this_insn))
2085     {
2086       int i;
2087       for (i = XVECLEN (PATTERN (this_insn), 0) - 1; i >= 0; i--)
2088 	{
2089 	  rtx set = XVECEXP (PATTERN (this_insn), 0, i);
2090 	  if (GET_CODE (set) == SET
2091 	      && GET_CODE (SET_DEST (set)) != REG
2092 	      && !rtx_equal_p (reg, SET_DEST (set))
2093 	      && reg_overlap_mentioned_p (reg, SET_DEST (set)))
2094 	    output_p = 1;
2095 	}
2096     }
2097 
2098   /* If this register is used in an auto-increment address, then extend its
2099      life to after this insn, so that it won't get allocated together with
2100      the result of this insn.  */
2101   if (! output_p && find_regno_note (this_insn, REG_INC, regno))
2102     output_p = 1;
2103 
2104   if (regno < FIRST_PSEUDO_REGISTER)
2105     {
2106       mark_life (regno, GET_MODE (reg), 0);
2107 
2108       /* If a hard register is dying as an output, mark it as in use at
2109 	 the beginning of this insn (the above statement would cause this
2110 	 not to happen).  */
2111       if (output_p)
2112 	post_mark_life (regno, GET_MODE (reg), 1,
2113 			2 * this_insn_number, 2 * this_insn_number + 1);
2114     }
2115 
2116   else if (reg_qty[regno] >= 0)
2117     qty[reg_qty[regno]].death = 2 * this_insn_number + output_p;
2118 }
2119 
2120 /* Find a block of SIZE words of hard regs in reg_class CLASS
2121    that can hold something of machine-mode MODE
2122      (but actually we test only the first of the block for holding MODE)
2123    and still free between insn BORN_INDEX and insn DEAD_INDEX,
2124    and return the number of the first of them.
2125    Return -1 if such a block cannot be found.
2126    If QTYNO crosses calls, insist on a register preserved by calls,
2127    unless ACCEPT_CALL_CLOBBERED is nonzero.
2128 
2129    If JUST_TRY_SUGGESTED is nonzero, only try to see if the suggested
2130    register is available.  If not, return -1.  */
2131 
2132 static int
find_free_reg(enum reg_class class,enum machine_mode mode,int qtyno,int accept_call_clobbered,int just_try_suggested,int born_index,int dead_index)2133 find_free_reg (enum reg_class class, enum machine_mode mode, int qtyno,
2134 	       int accept_call_clobbered, int just_try_suggested,
2135 	       int born_index, int dead_index)
2136 {
2137   int i, ins;
2138   HARD_REG_SET first_used, used;
2139 #ifdef ELIMINABLE_REGS
2140   static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
2141 #endif
2142 
2143   /* Validate our parameters.  */
2144   if (born_index < 0 || born_index > dead_index)
2145     abort ();
2146 
2147   /* Don't let a pseudo live in a reg across a function call
2148      if we might get a nonlocal goto.  */
2149   if (current_function_has_nonlocal_label
2150       && qty[qtyno].n_calls_crossed > 0)
2151     return -1;
2152 
2153   if (accept_call_clobbered)
2154     COPY_HARD_REG_SET (used, call_fixed_reg_set);
2155   else if (qty[qtyno].n_calls_crossed == 0)
2156     COPY_HARD_REG_SET (used, fixed_reg_set);
2157   else
2158     COPY_HARD_REG_SET (used, call_used_reg_set);
2159 
2160   if (accept_call_clobbered)
2161     IOR_HARD_REG_SET (used, losing_caller_save_reg_set);
2162 
2163   for (ins = born_index; ins < dead_index; ins++)
2164     IOR_HARD_REG_SET (used, regs_live_at[ins]);
2165 
2166   IOR_COMPL_HARD_REG_SET (used, reg_class_contents[(int) class]);
2167 
2168   /* Don't use the frame pointer reg in local-alloc even if
2169      we may omit the frame pointer, because if we do that and then we
2170      need a frame pointer, reload won't know how to move the pseudo
2171      to another hard reg.  It can move only regs made by global-alloc.
2172 
2173      This is true of any register that can be eliminated.  */
2174 #ifdef ELIMINABLE_REGS
2175   for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2176     SET_HARD_REG_BIT (used, eliminables[i].from);
2177 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2178   /* If FRAME_POINTER_REGNUM is not a real register, then protect the one
2179      that it might be eliminated into.  */
2180   SET_HARD_REG_BIT (used, HARD_FRAME_POINTER_REGNUM);
2181 #endif
2182 #else
2183   SET_HARD_REG_BIT (used, FRAME_POINTER_REGNUM);
2184 #endif
2185 
2186 #ifdef CANNOT_CHANGE_MODE_CLASS
2187   cannot_change_mode_set_regs (&used, mode, qty[qtyno].first_reg);
2188 #endif
2189 
2190   /* Normally, the registers that can be used for the first register in
2191      a multi-register quantity are the same as those that can be used for
2192      subsequent registers.  However, if just trying suggested registers,
2193      restrict our consideration to them.  If there are copy-suggested
2194      register, try them.  Otherwise, try the arithmetic-suggested
2195      registers.  */
2196   COPY_HARD_REG_SET (first_used, used);
2197 
2198   if (just_try_suggested)
2199     {
2200       if (qty_phys_num_copy_sugg[qtyno] != 0)
2201 	IOR_COMPL_HARD_REG_SET (first_used, qty_phys_copy_sugg[qtyno]);
2202       else
2203 	IOR_COMPL_HARD_REG_SET (first_used, qty_phys_sugg[qtyno]);
2204     }
2205 
2206   /* If all registers are excluded, we can't do anything.  */
2207   GO_IF_HARD_REG_SUBSET (reg_class_contents[(int) ALL_REGS], first_used, fail);
2208 
2209   /* If at least one would be suitable, test each hard reg.  */
2210 
2211   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2212     {
2213 #ifdef REG_ALLOC_ORDER
2214       int regno = reg_alloc_order[i];
2215 #else
2216       int regno = i;
2217 #endif
2218       if (! TEST_HARD_REG_BIT (first_used, regno)
2219 	  && HARD_REGNO_MODE_OK (regno, mode)
2220 	  && (qty[qtyno].n_calls_crossed == 0
2221 	      || accept_call_clobbered
2222 	      || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
2223 	{
2224 	  int j;
2225 	  int size1 = HARD_REGNO_NREGS (regno, mode);
2226 	  for (j = 1; j < size1 && ! TEST_HARD_REG_BIT (used, regno + j); j++);
2227 	  if (j == size1)
2228 	    {
2229 	      /* Mark that this register is in use between its birth and death
2230 		 insns.  */
2231 	      post_mark_life (regno, mode, 1, born_index, dead_index);
2232 	      return regno;
2233 	    }
2234 #ifndef REG_ALLOC_ORDER
2235 	  /* Skip starting points we know will lose.  */
2236 	  i += j;
2237 #endif
2238 	}
2239     }
2240 
2241  fail:
2242   /* If we are just trying suggested register, we have just tried copy-
2243      suggested registers, and there are arithmetic-suggested registers,
2244      try them.  */
2245 
2246   /* If it would be profitable to allocate a call-clobbered register
2247      and save and restore it around calls, do that.  */
2248   if (just_try_suggested && qty_phys_num_copy_sugg[qtyno] != 0
2249       && qty_phys_num_sugg[qtyno] != 0)
2250     {
2251       /* Don't try the copy-suggested regs again.  */
2252       qty_phys_num_copy_sugg[qtyno] = 0;
2253       return find_free_reg (class, mode, qtyno, accept_call_clobbered, 1,
2254 			    born_index, dead_index);
2255     }
2256 
2257   /* We need not check to see if the current function has nonlocal
2258      labels because we don't put any pseudos that are live over calls in
2259      registers in that case.  */
2260 
2261   if (! accept_call_clobbered
2262       && flag_caller_saves
2263       && ! just_try_suggested
2264       && qty[qtyno].n_calls_crossed != 0
2265       && CALLER_SAVE_PROFITABLE (qty[qtyno].n_refs,
2266 				 qty[qtyno].n_calls_crossed))
2267     {
2268       i = find_free_reg (class, mode, qtyno, 1, 0, born_index, dead_index);
2269       if (i >= 0)
2270 	caller_save_needed = 1;
2271       return i;
2272     }
2273   return -1;
2274 }
2275 
2276 /* Mark that REGNO with machine-mode MODE is live starting from the current
2277    insn (if LIFE is nonzero) or dead starting at the current insn (if LIFE
2278    is zero).  */
2279 
2280 static void
mark_life(int regno,enum machine_mode mode,int life)2281 mark_life (int regno, enum machine_mode mode, int life)
2282 {
2283   int j = HARD_REGNO_NREGS (regno, mode);
2284   if (life)
2285     while (--j >= 0)
2286       SET_HARD_REG_BIT (regs_live, regno + j);
2287   else
2288     while (--j >= 0)
2289       CLEAR_HARD_REG_BIT (regs_live, regno + j);
2290 }
2291 
2292 /* Mark register number REGNO (with machine-mode MODE) as live (if LIFE
2293    is nonzero) or dead (if LIFE is zero) from insn number BIRTH (inclusive)
2294    to insn number DEATH (exclusive).  */
2295 
2296 static void
post_mark_life(int regno,enum machine_mode mode,int life,int birth,int death)2297 post_mark_life (int regno, enum machine_mode mode, int life, int birth,
2298 		int death)
2299 {
2300   int j = HARD_REGNO_NREGS (regno, mode);
2301   HARD_REG_SET this_reg;
2302 
2303   CLEAR_HARD_REG_SET (this_reg);
2304   while (--j >= 0)
2305     SET_HARD_REG_BIT (this_reg, regno + j);
2306 
2307   if (life)
2308     while (birth < death)
2309       {
2310 	IOR_HARD_REG_SET (regs_live_at[birth], this_reg);
2311 	birth++;
2312       }
2313   else
2314     while (birth < death)
2315       {
2316 	AND_COMPL_HARD_REG_SET (regs_live_at[birth], this_reg);
2317 	birth++;
2318       }
2319 }
2320 
2321 /* INSN is the CLOBBER insn that starts a REG_NO_NOCONFLICT block, R0
2322    is the register being clobbered, and R1 is a register being used in
2323    the equivalent expression.
2324 
2325    If R1 dies in the block and has a REG_NO_CONFLICT note on every insn
2326    in which it is used, return 1.
2327 
2328    Otherwise, return 0.  */
2329 
2330 static int
no_conflict_p(rtx insn,rtx r0 ATTRIBUTE_UNUSED,rtx r1)2331 no_conflict_p (rtx insn, rtx r0 ATTRIBUTE_UNUSED, rtx r1)
2332 {
2333   int ok = 0;
2334   rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
2335   rtx p, last;
2336 
2337   /* If R1 is a hard register, return 0 since we handle this case
2338      when we scan the insns that actually use it.  */
2339 
2340   if (note == 0
2341       || (GET_CODE (r1) == REG && REGNO (r1) < FIRST_PSEUDO_REGISTER)
2342       || (GET_CODE (r1) == SUBREG && GET_CODE (SUBREG_REG (r1)) == REG
2343 	  && REGNO (SUBREG_REG (r1)) < FIRST_PSEUDO_REGISTER))
2344     return 0;
2345 
2346   last = XEXP (note, 0);
2347 
2348   for (p = NEXT_INSN (insn); p && p != last; p = NEXT_INSN (p))
2349     if (INSN_P (p))
2350       {
2351 	if (find_reg_note (p, REG_DEAD, r1))
2352 	  ok = 1;
2353 
2354 	/* There must be a REG_NO_CONFLICT note on every insn, otherwise
2355 	   some earlier optimization pass has inserted instructions into
2356 	   the sequence, and it is not safe to perform this optimization.
2357 	   Note that emit_no_conflict_block always ensures that this is
2358 	   true when these sequences are created.  */
2359 	if (! find_reg_note (p, REG_NO_CONFLICT, r1))
2360 	  return 0;
2361       }
2362 
2363   return ok;
2364 }
2365 
2366 /* Return the number of alternatives for which the constraint string P
2367    indicates that the operand must be equal to operand 0 and that no register
2368    is acceptable.  */
2369 
2370 static int
requires_inout(const char * p)2371 requires_inout (const char *p)
2372 {
2373   char c;
2374   int found_zero = 0;
2375   int reg_allowed = 0;
2376   int num_matching_alts = 0;
2377   int len;
2378 
2379   for ( ; (c = *p); p += len)
2380     {
2381       len = CONSTRAINT_LEN (c, p);
2382       switch (c)
2383 	{
2384 	case '=':  case '+':  case '?':
2385 	case '#':  case '&':  case '!':
2386 	case '*':  case '%':
2387 	case 'm':  case '<':  case '>':  case 'V':  case 'o':
2388 	case 'E':  case 'F':  case 'G':  case 'H':
2389 	case 's':  case 'i':  case 'n':
2390 	case 'I':  case 'J':  case 'K':  case 'L':
2391 	case 'M':  case 'N':  case 'O':  case 'P':
2392 	case 'X':
2393 	  /* These don't say anything we care about.  */
2394 	  break;
2395 
2396 	case ',':
2397 	  if (found_zero && ! reg_allowed)
2398 	    num_matching_alts++;
2399 
2400 	  found_zero = reg_allowed = 0;
2401 	  break;
2402 
2403 	case '0':
2404 	  found_zero = 1;
2405 	  break;
2406 
2407 	case '1':  case '2':  case '3':  case '4': case '5':
2408 	case '6':  case '7':  case '8':  case '9':
2409 	  /* Skip the balance of the matching constraint.  */
2410 	  do
2411 	    p++;
2412 	  while (ISDIGIT (*p));
2413 	  len = 0;
2414 	  break;
2415 
2416 	default:
2417 	  if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS
2418 	      && !EXTRA_ADDRESS_CONSTRAINT (c, p))
2419 	    break;
2420 	  /* Fall through.  */
2421 	case 'p':
2422 	case 'g': case 'r':
2423 	  reg_allowed = 1;
2424 	  break;
2425 	}
2426     }
2427 
2428   if (found_zero && ! reg_allowed)
2429     num_matching_alts++;
2430 
2431   return num_matching_alts;
2432 }
2433 
2434 void
dump_local_alloc(FILE * file)2435 dump_local_alloc (FILE *file)
2436 {
2437   int i;
2438   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2439     if (reg_renumber[i] != -1)
2440       fprintf (file, ";; Register %d in %d.\n", i, reg_renumber[i]);
2441 }
2442