xref: /dragonfly/contrib/gcc-8.0/gcc/resource.c (revision 38fd1498)
1*38fd1498Szrj /* Definitions for computing resource usage of specific insns.
2*38fd1498Szrj    Copyright (C) 1999-2018 Free Software Foundation, Inc.
3*38fd1498Szrj 
4*38fd1498Szrj This file is part of GCC.
5*38fd1498Szrj 
6*38fd1498Szrj GCC is free software; you can redistribute it and/or modify it under
7*38fd1498Szrj the terms of the GNU General Public License as published by the Free
8*38fd1498Szrj Software Foundation; either version 3, or (at your option) any later
9*38fd1498Szrj version.
10*38fd1498Szrj 
11*38fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12*38fd1498Szrj WARRANTY; without even the implied warranty of MERCHANTABILITY or
13*38fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14*38fd1498Szrj for more details.
15*38fd1498Szrj 
16*38fd1498Szrj You should have received a copy of the GNU General Public License
17*38fd1498Szrj along with GCC; see the file COPYING3.  If not see
18*38fd1498Szrj <http://www.gnu.org/licenses/>.  */
19*38fd1498Szrj 
20*38fd1498Szrj #include "config.h"
21*38fd1498Szrj #include "system.h"
22*38fd1498Szrj #include "coretypes.h"
23*38fd1498Szrj #include "backend.h"
24*38fd1498Szrj #include "rtl.h"
25*38fd1498Szrj #include "df.h"
26*38fd1498Szrj #include "memmodel.h"
27*38fd1498Szrj #include "tm_p.h"
28*38fd1498Szrj #include "regs.h"
29*38fd1498Szrj #include "emit-rtl.h"
30*38fd1498Szrj #include "resource.h"
31*38fd1498Szrj #include "insn-attr.h"
32*38fd1498Szrj #include "params.h"
33*38fd1498Szrj 
34*38fd1498Szrj /* This structure is used to record liveness information at the targets or
35*38fd1498Szrj    fallthrough insns of branches.  We will most likely need the information
36*38fd1498Szrj    at targets again, so save them in a hash table rather than recomputing them
37*38fd1498Szrj    each time.  */
38*38fd1498Szrj 
39*38fd1498Szrj struct target_info
40*38fd1498Szrj {
41*38fd1498Szrj   int uid;			/* INSN_UID of target.  */
42*38fd1498Szrj   struct target_info *next;	/* Next info for same hash bucket.  */
43*38fd1498Szrj   HARD_REG_SET live_regs;	/* Registers live at target.  */
44*38fd1498Szrj   int block;			/* Basic block number containing target.  */
45*38fd1498Szrj   int bb_tick;			/* Generation count of basic block info.  */
46*38fd1498Szrj };
47*38fd1498Szrj 
48*38fd1498Szrj #define TARGET_HASH_PRIME 257
49*38fd1498Szrj 
50*38fd1498Szrj /* Indicates what resources are required at the beginning of the epilogue.  */
51*38fd1498Szrj static struct resources start_of_epilogue_needs;
52*38fd1498Szrj 
53*38fd1498Szrj /* Indicates what resources are required at function end.  */
54*38fd1498Szrj static struct resources end_of_function_needs;
55*38fd1498Szrj 
56*38fd1498Szrj /* Define the hash table itself.  */
57*38fd1498Szrj static struct target_info **target_hash_table = NULL;
58*38fd1498Szrj 
59*38fd1498Szrj /* For each basic block, we maintain a generation number of its basic
60*38fd1498Szrj    block info, which is updated each time we move an insn from the
61*38fd1498Szrj    target of a jump.  This is the generation number indexed by block
62*38fd1498Szrj    number.  */
63*38fd1498Szrj 
64*38fd1498Szrj static int *bb_ticks;
65*38fd1498Szrj 
66*38fd1498Szrj /* Marks registers possibly live at the current place being scanned by
67*38fd1498Szrj    mark_target_live_regs.  Also used by update_live_status.  */
68*38fd1498Szrj 
69*38fd1498Szrj static HARD_REG_SET current_live_regs;
70*38fd1498Szrj 
71*38fd1498Szrj /* Marks registers for which we have seen a REG_DEAD note but no assignment.
72*38fd1498Szrj    Also only used by the next two functions.  */
73*38fd1498Szrj 
74*38fd1498Szrj static HARD_REG_SET pending_dead_regs;
75*38fd1498Szrj 
76*38fd1498Szrj static void update_live_status (rtx, const_rtx, void *);
77*38fd1498Szrj static int find_basic_block (rtx_insn *, int);
78*38fd1498Szrj static rtx_insn *next_insn_no_annul (rtx_insn *);
79*38fd1498Szrj static rtx_insn *find_dead_or_set_registers (rtx_insn *, struct resources*,
80*38fd1498Szrj 					     rtx *, int, struct resources,
81*38fd1498Szrj 					     struct resources);
82*38fd1498Szrj 
83*38fd1498Szrj /* Utility function called from mark_target_live_regs via note_stores.
84*38fd1498Szrj    It deadens any CLOBBERed registers and livens any SET registers.  */
85*38fd1498Szrj 
86*38fd1498Szrj static void
update_live_status(rtx dest,const_rtx x,void * data ATTRIBUTE_UNUSED)87*38fd1498Szrj update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
88*38fd1498Szrj {
89*38fd1498Szrj   int first_regno, last_regno;
90*38fd1498Szrj   int i;
91*38fd1498Szrj 
92*38fd1498Szrj   if (!REG_P (dest)
93*38fd1498Szrj       && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest))))
94*38fd1498Szrj     return;
95*38fd1498Szrj 
96*38fd1498Szrj   if (GET_CODE (dest) == SUBREG)
97*38fd1498Szrj     {
98*38fd1498Szrj       first_regno = subreg_regno (dest);
99*38fd1498Szrj       last_regno = first_regno + subreg_nregs (dest);
100*38fd1498Szrj 
101*38fd1498Szrj     }
102*38fd1498Szrj   else
103*38fd1498Szrj     {
104*38fd1498Szrj       first_regno = REGNO (dest);
105*38fd1498Szrj       last_regno = END_REGNO (dest);
106*38fd1498Szrj     }
107*38fd1498Szrj 
108*38fd1498Szrj   if (GET_CODE (x) == CLOBBER)
109*38fd1498Szrj     for (i = first_regno; i < last_regno; i++)
110*38fd1498Szrj       CLEAR_HARD_REG_BIT (current_live_regs, i);
111*38fd1498Szrj   else
112*38fd1498Szrj     for (i = first_regno; i < last_regno; i++)
113*38fd1498Szrj       {
114*38fd1498Szrj 	SET_HARD_REG_BIT (current_live_regs, i);
115*38fd1498Szrj 	CLEAR_HARD_REG_BIT (pending_dead_regs, i);
116*38fd1498Szrj       }
117*38fd1498Szrj }
118*38fd1498Szrj 
119*38fd1498Szrj /* Find the number of the basic block with correct live register
120*38fd1498Szrj    information that starts closest to INSN.  Return -1 if we couldn't
121*38fd1498Szrj    find such a basic block or the beginning is more than
122*38fd1498Szrj    SEARCH_LIMIT instructions before INSN.  Use SEARCH_LIMIT = -1 for
123*38fd1498Szrj    an unlimited search.
124*38fd1498Szrj 
125*38fd1498Szrj    The delay slot filling code destroys the control-flow graph so,
126*38fd1498Szrj    instead of finding the basic block containing INSN, we search
127*38fd1498Szrj    backwards toward a BARRIER where the live register information is
128*38fd1498Szrj    correct.  */
129*38fd1498Szrj 
130*38fd1498Szrj static int
find_basic_block(rtx_insn * insn,int search_limit)131*38fd1498Szrj find_basic_block (rtx_insn *insn, int search_limit)
132*38fd1498Szrj {
133*38fd1498Szrj   /* Scan backwards to the previous BARRIER.  Then see if we can find a
134*38fd1498Szrj      label that starts a basic block.  Return the basic block number.  */
135*38fd1498Szrj   for (insn = prev_nonnote_insn (insn);
136*38fd1498Szrj        insn && !BARRIER_P (insn) && search_limit != 0;
137*38fd1498Szrj        insn = prev_nonnote_insn (insn), --search_limit)
138*38fd1498Szrj     ;
139*38fd1498Szrj 
140*38fd1498Szrj   /* The closest BARRIER is too far away.  */
141*38fd1498Szrj   if (search_limit == 0)
142*38fd1498Szrj     return -1;
143*38fd1498Szrj 
144*38fd1498Szrj   /* The start of the function.  */
145*38fd1498Szrj   else if (insn == 0)
146*38fd1498Szrj     return ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index;
147*38fd1498Szrj 
148*38fd1498Szrj   /* See if any of the upcoming CODE_LABELs start a basic block.  If we reach
149*38fd1498Szrj      anything other than a CODE_LABEL or note, we can't find this code.  */
150*38fd1498Szrj   for (insn = next_nonnote_insn (insn);
151*38fd1498Szrj        insn && LABEL_P (insn);
152*38fd1498Szrj        insn = next_nonnote_insn (insn))
153*38fd1498Szrj     if (BLOCK_FOR_INSN (insn))
154*38fd1498Szrj       return BLOCK_FOR_INSN (insn)->index;
155*38fd1498Szrj 
156*38fd1498Szrj   return -1;
157*38fd1498Szrj }
158*38fd1498Szrj 
159*38fd1498Szrj /* Similar to next_insn, but ignores insns in the delay slots of
160*38fd1498Szrj    an annulled branch.  */
161*38fd1498Szrj 
162*38fd1498Szrj static rtx_insn *
next_insn_no_annul(rtx_insn * insn)163*38fd1498Szrj next_insn_no_annul (rtx_insn *insn)
164*38fd1498Szrj {
165*38fd1498Szrj   if (insn)
166*38fd1498Szrj     {
167*38fd1498Szrj       /* If INSN is an annulled branch, skip any insns from the target
168*38fd1498Szrj 	 of the branch.  */
169*38fd1498Szrj       if (JUMP_P (insn)
170*38fd1498Szrj 	  && INSN_ANNULLED_BRANCH_P (insn)
171*38fd1498Szrj 	  && NEXT_INSN (PREV_INSN (insn)) != insn)
172*38fd1498Szrj 	{
173*38fd1498Szrj 	  rtx_insn *next = NEXT_INSN (insn);
174*38fd1498Szrj 
175*38fd1498Szrj 	  while ((NONJUMP_INSN_P (next) || JUMP_P (next) || CALL_P (next))
176*38fd1498Szrj 		 && INSN_FROM_TARGET_P (next))
177*38fd1498Szrj 	    {
178*38fd1498Szrj 	      insn = next;
179*38fd1498Szrj 	      next = NEXT_INSN (insn);
180*38fd1498Szrj 	    }
181*38fd1498Szrj 	}
182*38fd1498Szrj 
183*38fd1498Szrj       insn = NEXT_INSN (insn);
184*38fd1498Szrj       if (insn && NONJUMP_INSN_P (insn)
185*38fd1498Szrj 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
186*38fd1498Szrj 	insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
187*38fd1498Szrj     }
188*38fd1498Szrj 
189*38fd1498Szrj   return insn;
190*38fd1498Szrj }
191*38fd1498Szrj 
192*38fd1498Szrj /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
193*38fd1498Szrj    which resources are referenced by the insn.  If INCLUDE_DELAYED_EFFECTS
194*38fd1498Szrj    is TRUE, resources used by the called routine will be included for
195*38fd1498Szrj    CALL_INSNs.  */
196*38fd1498Szrj 
197*38fd1498Szrj void
mark_referenced_resources(rtx x,struct resources * res,bool include_delayed_effects)198*38fd1498Szrj mark_referenced_resources (rtx x, struct resources *res,
199*38fd1498Szrj 			   bool include_delayed_effects)
200*38fd1498Szrj {
201*38fd1498Szrj   enum rtx_code code = GET_CODE (x);
202*38fd1498Szrj   int i, j;
203*38fd1498Szrj   unsigned int r;
204*38fd1498Szrj   const char *format_ptr;
205*38fd1498Szrj 
206*38fd1498Szrj   /* Handle leaf items for which we set resource flags.  Also, special-case
207*38fd1498Szrj      CALL, SET and CLOBBER operators.  */
208*38fd1498Szrj   switch (code)
209*38fd1498Szrj     {
210*38fd1498Szrj     case CONST:
211*38fd1498Szrj     CASE_CONST_ANY:
212*38fd1498Szrj     case PC:
213*38fd1498Szrj     case SYMBOL_REF:
214*38fd1498Szrj     case LABEL_REF:
215*38fd1498Szrj     case DEBUG_INSN:
216*38fd1498Szrj       return;
217*38fd1498Szrj 
218*38fd1498Szrj     case SUBREG:
219*38fd1498Szrj       if (!REG_P (SUBREG_REG (x)))
220*38fd1498Szrj 	mark_referenced_resources (SUBREG_REG (x), res, false);
221*38fd1498Szrj       else
222*38fd1498Szrj 	{
223*38fd1498Szrj 	  unsigned int regno = subreg_regno (x);
224*38fd1498Szrj 	  unsigned int last_regno = regno + subreg_nregs (x);
225*38fd1498Szrj 
226*38fd1498Szrj 	  gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
227*38fd1498Szrj 	  for (r = regno; r < last_regno; r++)
228*38fd1498Szrj 	    SET_HARD_REG_BIT (res->regs, r);
229*38fd1498Szrj 	}
230*38fd1498Szrj       return;
231*38fd1498Szrj 
232*38fd1498Szrj     case REG:
233*38fd1498Szrj       gcc_assert (HARD_REGISTER_P (x));
234*38fd1498Szrj       add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
235*38fd1498Szrj       return;
236*38fd1498Szrj 
237*38fd1498Szrj     case MEM:
238*38fd1498Szrj       /* If this memory shouldn't change, it really isn't referencing
239*38fd1498Szrj 	 memory.  */
240*38fd1498Szrj       if (! MEM_READONLY_P (x))
241*38fd1498Szrj 	res->memory = 1;
242*38fd1498Szrj       res->volatil |= MEM_VOLATILE_P (x);
243*38fd1498Szrj 
244*38fd1498Szrj       /* Mark registers used to access memory.  */
245*38fd1498Szrj       mark_referenced_resources (XEXP (x, 0), res, false);
246*38fd1498Szrj       return;
247*38fd1498Szrj 
248*38fd1498Szrj     case CC0:
249*38fd1498Szrj       res->cc = 1;
250*38fd1498Szrj       return;
251*38fd1498Szrj 
252*38fd1498Szrj     case UNSPEC_VOLATILE:
253*38fd1498Szrj     case TRAP_IF:
254*38fd1498Szrj     case ASM_INPUT:
255*38fd1498Szrj       /* Traditional asm's are always volatile.  */
256*38fd1498Szrj       res->volatil = 1;
257*38fd1498Szrj       break;
258*38fd1498Szrj 
259*38fd1498Szrj     case ASM_OPERANDS:
260*38fd1498Szrj       res->volatil |= MEM_VOLATILE_P (x);
261*38fd1498Szrj 
262*38fd1498Szrj       /* For all ASM_OPERANDS, we must traverse the vector of input operands.
263*38fd1498Szrj 	 We can not just fall through here since then we would be confused
264*38fd1498Szrj 	 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
265*38fd1498Szrj 	 traditional asms unlike their normal usage.  */
266*38fd1498Szrj 
267*38fd1498Szrj       for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
268*38fd1498Szrj 	mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, false);
269*38fd1498Szrj       return;
270*38fd1498Szrj 
271*38fd1498Szrj     case CALL:
272*38fd1498Szrj       /* The first operand will be a (MEM (xxx)) but doesn't really reference
273*38fd1498Szrj 	 memory.  The second operand may be referenced, though.  */
274*38fd1498Szrj       mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, false);
275*38fd1498Szrj       mark_referenced_resources (XEXP (x, 1), res, false);
276*38fd1498Szrj       return;
277*38fd1498Szrj 
278*38fd1498Szrj     case SET:
279*38fd1498Szrj       /* Usually, the first operand of SET is set, not referenced.  But
280*38fd1498Szrj 	 registers used to access memory are referenced.  SET_DEST is
281*38fd1498Szrj 	 also referenced if it is a ZERO_EXTRACT.  */
282*38fd1498Szrj 
283*38fd1498Szrj       mark_referenced_resources (SET_SRC (x), res, false);
284*38fd1498Szrj 
285*38fd1498Szrj       x = SET_DEST (x);
286*38fd1498Szrj       if (GET_CODE (x) == ZERO_EXTRACT
287*38fd1498Szrj 	  || GET_CODE (x) == STRICT_LOW_PART)
288*38fd1498Szrj 	mark_referenced_resources (x, res, false);
289*38fd1498Szrj       else if (GET_CODE (x) == SUBREG)
290*38fd1498Szrj 	x = SUBREG_REG (x);
291*38fd1498Szrj       if (MEM_P (x))
292*38fd1498Szrj 	mark_referenced_resources (XEXP (x, 0), res, false);
293*38fd1498Szrj       return;
294*38fd1498Szrj 
295*38fd1498Szrj     case CLOBBER:
296*38fd1498Szrj       return;
297*38fd1498Szrj 
298*38fd1498Szrj     case CALL_INSN:
299*38fd1498Szrj       if (include_delayed_effects)
300*38fd1498Szrj 	{
301*38fd1498Szrj 	  /* A CALL references memory, the frame pointer if it exists, the
302*38fd1498Szrj 	     stack pointer, any global registers and any registers given in
303*38fd1498Szrj 	     USE insns immediately in front of the CALL.
304*38fd1498Szrj 
305*38fd1498Szrj 	     However, we may have moved some of the parameter loading insns
306*38fd1498Szrj 	     into the delay slot of this CALL.  If so, the USE's for them
307*38fd1498Szrj 	     don't count and should be skipped.  */
308*38fd1498Szrj 	  rtx_insn *insn = PREV_INSN (as_a <rtx_insn *> (x));
309*38fd1498Szrj 	  rtx_sequence *sequence = 0;
310*38fd1498Szrj 	  int seq_size = 0;
311*38fd1498Szrj 	  int i;
312*38fd1498Szrj 
313*38fd1498Szrj 	  /* If we are part of a delay slot sequence, point at the SEQUENCE.  */
314*38fd1498Szrj 	  if (NEXT_INSN (insn) != x)
315*38fd1498Szrj 	    {
316*38fd1498Szrj 	      sequence = as_a <rtx_sequence *> (PATTERN (NEXT_INSN (insn)));
317*38fd1498Szrj 	      seq_size = sequence->len ();
318*38fd1498Szrj 	      gcc_assert (GET_CODE (sequence) == SEQUENCE);
319*38fd1498Szrj 	    }
320*38fd1498Szrj 
321*38fd1498Szrj 	  res->memory = 1;
322*38fd1498Szrj 	  SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
323*38fd1498Szrj 	  if (frame_pointer_needed)
324*38fd1498Szrj 	    {
325*38fd1498Szrj 	      SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
326*38fd1498Szrj 	      if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
327*38fd1498Szrj 		SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
328*38fd1498Szrj 	    }
329*38fd1498Szrj 
330*38fd1498Szrj 	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
331*38fd1498Szrj 	    if (global_regs[i])
332*38fd1498Szrj 	      SET_HARD_REG_BIT (res->regs, i);
333*38fd1498Szrj 
334*38fd1498Szrj 	  /* Check for a REG_SETJMP.  If it exists, then we must
335*38fd1498Szrj 	     assume that this call can need any register.
336*38fd1498Szrj 
337*38fd1498Szrj 	     This is done to be more conservative about how we handle setjmp.
338*38fd1498Szrj 	     We assume that they both use and set all registers.  Using all
339*38fd1498Szrj 	     registers ensures that a register will not be considered dead
340*38fd1498Szrj 	     just because it crosses a setjmp call.  A register should be
341*38fd1498Szrj 	     considered dead only if the setjmp call returns nonzero.  */
342*38fd1498Szrj 	  if (find_reg_note (x, REG_SETJMP, NULL))
343*38fd1498Szrj 	    SET_HARD_REG_SET (res->regs);
344*38fd1498Szrj 
345*38fd1498Szrj 	  {
346*38fd1498Szrj 	    rtx link;
347*38fd1498Szrj 
348*38fd1498Szrj 	    for (link = CALL_INSN_FUNCTION_USAGE (x);
349*38fd1498Szrj 		 link;
350*38fd1498Szrj 		 link = XEXP (link, 1))
351*38fd1498Szrj 	      if (GET_CODE (XEXP (link, 0)) == USE)
352*38fd1498Szrj 		{
353*38fd1498Szrj 		  for (i = 1; i < seq_size; i++)
354*38fd1498Szrj 		    {
355*38fd1498Szrj 		      rtx slot_pat = PATTERN (sequence->element (i));
356*38fd1498Szrj 		      if (GET_CODE (slot_pat) == SET
357*38fd1498Szrj 			  && rtx_equal_p (SET_DEST (slot_pat),
358*38fd1498Szrj 					  XEXP (XEXP (link, 0), 0)))
359*38fd1498Szrj 			break;
360*38fd1498Szrj 		    }
361*38fd1498Szrj 		  if (i >= seq_size)
362*38fd1498Szrj 		    mark_referenced_resources (XEXP (XEXP (link, 0), 0),
363*38fd1498Szrj 					       res, false);
364*38fd1498Szrj 		}
365*38fd1498Szrj 	  }
366*38fd1498Szrj 	}
367*38fd1498Szrj 
368*38fd1498Szrj       /* ... fall through to other INSN processing ...  */
369*38fd1498Szrj       gcc_fallthrough ();
370*38fd1498Szrj 
371*38fd1498Szrj     case INSN:
372*38fd1498Szrj     case JUMP_INSN:
373*38fd1498Szrj 
374*38fd1498Szrj       if (GET_CODE (PATTERN (x)) == COND_EXEC)
375*38fd1498Szrj       /* In addition to the usual references, also consider all outputs
376*38fd1498Szrj 	 as referenced, to compensate for mark_set_resources treating
377*38fd1498Szrj 	 them as killed.  This is similar to ZERO_EXTRACT / STRICT_LOW_PART
378*38fd1498Szrj 	 handling, execpt that we got a partial incidence instead of a partial
379*38fd1498Szrj 	 width.  */
380*38fd1498Szrj       mark_set_resources (x, res, 0,
381*38fd1498Szrj 			  include_delayed_effects
382*38fd1498Szrj 			  ? MARK_SRC_DEST_CALL : MARK_SRC_DEST);
383*38fd1498Szrj 
384*38fd1498Szrj       if (! include_delayed_effects
385*38fd1498Szrj 	  && INSN_REFERENCES_ARE_DELAYED (as_a <rtx_insn *> (x)))
386*38fd1498Szrj 	return;
387*38fd1498Szrj 
388*38fd1498Szrj       /* No special processing, just speed up.  */
389*38fd1498Szrj       mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
390*38fd1498Szrj       return;
391*38fd1498Szrj 
392*38fd1498Szrj     default:
393*38fd1498Szrj       break;
394*38fd1498Szrj     }
395*38fd1498Szrj 
396*38fd1498Szrj   /* Process each sub-expression and flag what it needs.  */
397*38fd1498Szrj   format_ptr = GET_RTX_FORMAT (code);
398*38fd1498Szrj   for (i = 0; i < GET_RTX_LENGTH (code); i++)
399*38fd1498Szrj     switch (*format_ptr++)
400*38fd1498Szrj       {
401*38fd1498Szrj       case 'e':
402*38fd1498Szrj 	mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
403*38fd1498Szrj 	break;
404*38fd1498Szrj 
405*38fd1498Szrj       case 'E':
406*38fd1498Szrj 	for (j = 0; j < XVECLEN (x, i); j++)
407*38fd1498Szrj 	  mark_referenced_resources (XVECEXP (x, i, j), res,
408*38fd1498Szrj 				     include_delayed_effects);
409*38fd1498Szrj 	break;
410*38fd1498Szrj       }
411*38fd1498Szrj }
412*38fd1498Szrj 
413*38fd1498Szrj /* A subroutine of mark_target_live_regs.  Search forward from TARGET
414*38fd1498Szrj    looking for registers that are set before they are used.  These are dead.
415*38fd1498Szrj    Stop after passing a few conditional jumps, and/or a small
416*38fd1498Szrj    number of unconditional branches.  */
417*38fd1498Szrj 
418*38fd1498Szrj static rtx_insn *
find_dead_or_set_registers(rtx_insn * target,struct resources * res,rtx * jump_target,int jump_count,struct resources set,struct resources needed)419*38fd1498Szrj find_dead_or_set_registers (rtx_insn *target, struct resources *res,
420*38fd1498Szrj 			    rtx *jump_target, int jump_count,
421*38fd1498Szrj 			    struct resources set, struct resources needed)
422*38fd1498Szrj {
423*38fd1498Szrj   HARD_REG_SET scratch;
424*38fd1498Szrj   rtx_insn *insn;
425*38fd1498Szrj   rtx_insn *next_insn;
426*38fd1498Szrj   rtx_insn *jump_insn = 0;
427*38fd1498Szrj   int i;
428*38fd1498Szrj 
429*38fd1498Szrj   for (insn = target; insn; insn = next_insn)
430*38fd1498Szrj     {
431*38fd1498Szrj       rtx_insn *this_insn = insn;
432*38fd1498Szrj 
433*38fd1498Szrj       next_insn = NEXT_INSN (insn);
434*38fd1498Szrj 
435*38fd1498Szrj       /* If this instruction can throw an exception, then we don't
436*38fd1498Szrj 	 know where we might end up next.  That means that we have to
437*38fd1498Szrj 	 assume that whatever we have already marked as live really is
438*38fd1498Szrj 	 live.  */
439*38fd1498Szrj       if (can_throw_internal (insn))
440*38fd1498Szrj 	break;
441*38fd1498Szrj 
442*38fd1498Szrj       switch (GET_CODE (insn))
443*38fd1498Szrj 	{
444*38fd1498Szrj 	case CODE_LABEL:
445*38fd1498Szrj 	  /* After a label, any pending dead registers that weren't yet
446*38fd1498Szrj 	     used can be made dead.  */
447*38fd1498Szrj 	  AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
448*38fd1498Szrj 	  AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
449*38fd1498Szrj 	  CLEAR_HARD_REG_SET (pending_dead_regs);
450*38fd1498Szrj 
451*38fd1498Szrj 	  continue;
452*38fd1498Szrj 
453*38fd1498Szrj 	case BARRIER:
454*38fd1498Szrj 	case NOTE:
455*38fd1498Szrj 	case DEBUG_INSN:
456*38fd1498Szrj 	  continue;
457*38fd1498Szrj 
458*38fd1498Szrj 	case INSN:
459*38fd1498Szrj 	  if (GET_CODE (PATTERN (insn)) == USE)
460*38fd1498Szrj 	    {
461*38fd1498Szrj 	      /* If INSN is a USE made by update_block, we care about the
462*38fd1498Szrj 		 underlying insn.  Any registers set by the underlying insn
463*38fd1498Szrj 		 are live since the insn is being done somewhere else.  */
464*38fd1498Szrj 	      if (INSN_P (XEXP (PATTERN (insn), 0)))
465*38fd1498Szrj 		mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
466*38fd1498Szrj 				    MARK_SRC_DEST_CALL);
467*38fd1498Szrj 
468*38fd1498Szrj 	      /* All other USE insns are to be ignored.  */
469*38fd1498Szrj 	      continue;
470*38fd1498Szrj 	    }
471*38fd1498Szrj 	  else if (GET_CODE (PATTERN (insn)) == CLOBBER)
472*38fd1498Szrj 	    continue;
473*38fd1498Szrj 	  else if (rtx_sequence *seq =
474*38fd1498Szrj 		     dyn_cast <rtx_sequence *> (PATTERN (insn)))
475*38fd1498Szrj 	    {
476*38fd1498Szrj 	      /* An unconditional jump can be used to fill the delay slot
477*38fd1498Szrj 		 of a call, so search for a JUMP_INSN in any position.  */
478*38fd1498Szrj 	      for (i = 0; i < seq->len (); i++)
479*38fd1498Szrj 		{
480*38fd1498Szrj 		  this_insn = seq->insn (i);
481*38fd1498Szrj 		  if (JUMP_P (this_insn))
482*38fd1498Szrj 		    break;
483*38fd1498Szrj 		}
484*38fd1498Szrj 	    }
485*38fd1498Szrj 
486*38fd1498Szrj 	default:
487*38fd1498Szrj 	  break;
488*38fd1498Szrj 	}
489*38fd1498Szrj 
490*38fd1498Szrj       if (rtx_jump_insn *this_jump_insn =
491*38fd1498Szrj 	    dyn_cast <rtx_jump_insn *> (this_insn))
492*38fd1498Szrj 	{
493*38fd1498Szrj 	  if (jump_count++ < 10)
494*38fd1498Szrj 	    {
495*38fd1498Szrj 	      if (any_uncondjump_p (this_jump_insn)
496*38fd1498Szrj 		  || ANY_RETURN_P (PATTERN (this_jump_insn)))
497*38fd1498Szrj 		{
498*38fd1498Szrj 		  rtx lab_or_return = this_jump_insn->jump_label ();
499*38fd1498Szrj 		  if (ANY_RETURN_P (lab_or_return))
500*38fd1498Szrj 		    next_insn = NULL;
501*38fd1498Szrj 		  else
502*38fd1498Szrj 		    next_insn = as_a <rtx_insn *> (lab_or_return);
503*38fd1498Szrj 		  if (jump_insn == 0)
504*38fd1498Szrj 		    {
505*38fd1498Szrj 		      jump_insn = insn;
506*38fd1498Szrj 		      if (jump_target)
507*38fd1498Szrj 			*jump_target = JUMP_LABEL (this_jump_insn);
508*38fd1498Szrj 		    }
509*38fd1498Szrj 		}
510*38fd1498Szrj 	      else if (any_condjump_p (this_jump_insn))
511*38fd1498Szrj 		{
512*38fd1498Szrj 		  struct resources target_set, target_res;
513*38fd1498Szrj 		  struct resources fallthrough_res;
514*38fd1498Szrj 
515*38fd1498Szrj 		  /* We can handle conditional branches here by following
516*38fd1498Szrj 		     both paths, and then IOR the results of the two paths
517*38fd1498Szrj 		     together, which will give us registers that are dead
518*38fd1498Szrj 		     on both paths.  Since this is expensive, we give it
519*38fd1498Szrj 		     a much higher cost than unconditional branches.  The
520*38fd1498Szrj 		     cost was chosen so that we will follow at most 1
521*38fd1498Szrj 		     conditional branch.  */
522*38fd1498Szrj 
523*38fd1498Szrj 		  jump_count += 4;
524*38fd1498Szrj 		  if (jump_count >= 10)
525*38fd1498Szrj 		    break;
526*38fd1498Szrj 
527*38fd1498Szrj 		  mark_referenced_resources (insn, &needed, true);
528*38fd1498Szrj 
529*38fd1498Szrj 		  /* For an annulled branch, mark_set_resources ignores slots
530*38fd1498Szrj 		     filled by instructions from the target.  This is correct
531*38fd1498Szrj 		     if the branch is not taken.  Since we are following both
532*38fd1498Szrj 		     paths from the branch, we must also compute correct info
533*38fd1498Szrj 		     if the branch is taken.  We do this by inverting all of
534*38fd1498Szrj 		     the INSN_FROM_TARGET_P bits, calling mark_set_resources,
535*38fd1498Szrj 		     and then inverting the INSN_FROM_TARGET_P bits again.  */
536*38fd1498Szrj 
537*38fd1498Szrj 		  if (GET_CODE (PATTERN (insn)) == SEQUENCE
538*38fd1498Szrj 		      && INSN_ANNULLED_BRANCH_P (this_jump_insn))
539*38fd1498Szrj 		    {
540*38fd1498Szrj 		      rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
541*38fd1498Szrj 		      for (i = 1; i < seq->len (); i++)
542*38fd1498Szrj 			INSN_FROM_TARGET_P (seq->element (i))
543*38fd1498Szrj 			  = ! INSN_FROM_TARGET_P (seq->element (i));
544*38fd1498Szrj 
545*38fd1498Szrj 		      target_set = set;
546*38fd1498Szrj 		      mark_set_resources (insn, &target_set, 0,
547*38fd1498Szrj 					  MARK_SRC_DEST_CALL);
548*38fd1498Szrj 
549*38fd1498Szrj 		      for (i = 1; i < seq->len (); i++)
550*38fd1498Szrj 			INSN_FROM_TARGET_P (seq->element (i))
551*38fd1498Szrj 			  = ! INSN_FROM_TARGET_P (seq->element (i));
552*38fd1498Szrj 
553*38fd1498Szrj 		      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
554*38fd1498Szrj 		    }
555*38fd1498Szrj 		  else
556*38fd1498Szrj 		    {
557*38fd1498Szrj 		      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
558*38fd1498Szrj 		      target_set = set;
559*38fd1498Szrj 		    }
560*38fd1498Szrj 
561*38fd1498Szrj 		  target_res = *res;
562*38fd1498Szrj 		  COPY_HARD_REG_SET (scratch, target_set.regs);
563*38fd1498Szrj 		  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
564*38fd1498Szrj 		  AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
565*38fd1498Szrj 
566*38fd1498Szrj 		  fallthrough_res = *res;
567*38fd1498Szrj 		  COPY_HARD_REG_SET (scratch, set.regs);
568*38fd1498Szrj 		  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
569*38fd1498Szrj 		  AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
570*38fd1498Szrj 
571*38fd1498Szrj 		  if (!ANY_RETURN_P (this_jump_insn->jump_label ()))
572*38fd1498Szrj 		    find_dead_or_set_registers
573*38fd1498Szrj 			  (this_jump_insn->jump_target (),
574*38fd1498Szrj 			   &target_res, 0, jump_count, target_set, needed);
575*38fd1498Szrj 		  find_dead_or_set_registers (next_insn,
576*38fd1498Szrj 					      &fallthrough_res, 0, jump_count,
577*38fd1498Szrj 					      set, needed);
578*38fd1498Szrj 		  IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
579*38fd1498Szrj 		  AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
580*38fd1498Szrj 		  break;
581*38fd1498Szrj 		}
582*38fd1498Szrj 	      else
583*38fd1498Szrj 		break;
584*38fd1498Szrj 	    }
585*38fd1498Szrj 	  else
586*38fd1498Szrj 	    {
587*38fd1498Szrj 	      /* Don't try this optimization if we expired our jump count
588*38fd1498Szrj 		 above, since that would mean there may be an infinite loop
589*38fd1498Szrj 		 in the function being compiled.  */
590*38fd1498Szrj 	      jump_insn = 0;
591*38fd1498Szrj 	      break;
592*38fd1498Szrj 	    }
593*38fd1498Szrj 	}
594*38fd1498Szrj 
595*38fd1498Szrj       mark_referenced_resources (insn, &needed, true);
596*38fd1498Szrj       mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
597*38fd1498Szrj 
598*38fd1498Szrj       COPY_HARD_REG_SET (scratch, set.regs);
599*38fd1498Szrj       AND_COMPL_HARD_REG_SET (scratch, needed.regs);
600*38fd1498Szrj       AND_COMPL_HARD_REG_SET (res->regs, scratch);
601*38fd1498Szrj     }
602*38fd1498Szrj 
603*38fd1498Szrj   return jump_insn;
604*38fd1498Szrj }
605*38fd1498Szrj 
606*38fd1498Szrj /* Given X, a part of an insn, and a pointer to a `struct resource',
607*38fd1498Szrj    RES, indicate which resources are modified by the insn. If
608*38fd1498Szrj    MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
609*38fd1498Szrj    set by the called routine.
610*38fd1498Szrj 
611*38fd1498Szrj    If IN_DEST is nonzero, it means we are inside a SET.  Otherwise,
612*38fd1498Szrj    objects are being referenced instead of set.
613*38fd1498Szrj 
614*38fd1498Szrj    We never mark the insn as modifying the condition code unless it explicitly
615*38fd1498Szrj    SETs CC0 even though this is not totally correct.  The reason for this is
616*38fd1498Szrj    that we require a SET of CC0 to immediately precede the reference to CC0.
617*38fd1498Szrj    So if some other insn sets CC0 as a side-effect, we know it cannot affect
618*38fd1498Szrj    our computation and thus may be placed in a delay slot.  */
619*38fd1498Szrj 
620*38fd1498Szrj void
mark_set_resources(rtx x,struct resources * res,int in_dest,enum mark_resource_type mark_type)621*38fd1498Szrj mark_set_resources (rtx x, struct resources *res, int in_dest,
622*38fd1498Szrj 		    enum mark_resource_type mark_type)
623*38fd1498Szrj {
624*38fd1498Szrj   enum rtx_code code;
625*38fd1498Szrj   int i, j;
626*38fd1498Szrj   unsigned int r;
627*38fd1498Szrj   const char *format_ptr;
628*38fd1498Szrj 
629*38fd1498Szrj  restart:
630*38fd1498Szrj 
631*38fd1498Szrj   code = GET_CODE (x);
632*38fd1498Szrj 
633*38fd1498Szrj   switch (code)
634*38fd1498Szrj     {
635*38fd1498Szrj     case NOTE:
636*38fd1498Szrj     case BARRIER:
637*38fd1498Szrj     case CODE_LABEL:
638*38fd1498Szrj     case USE:
639*38fd1498Szrj     CASE_CONST_ANY:
640*38fd1498Szrj     case LABEL_REF:
641*38fd1498Szrj     case SYMBOL_REF:
642*38fd1498Szrj     case CONST:
643*38fd1498Szrj     case PC:
644*38fd1498Szrj     case DEBUG_INSN:
645*38fd1498Szrj       /* These don't set any resources.  */
646*38fd1498Szrj       return;
647*38fd1498Szrj 
648*38fd1498Szrj     case CC0:
649*38fd1498Szrj       if (in_dest)
650*38fd1498Szrj 	res->cc = 1;
651*38fd1498Szrj       return;
652*38fd1498Szrj 
653*38fd1498Szrj     case CALL_INSN:
654*38fd1498Szrj       /* Called routine modifies the condition code, memory, any registers
655*38fd1498Szrj 	 that aren't saved across calls, global registers and anything
656*38fd1498Szrj 	 explicitly CLOBBERed immediately after the CALL_INSN.  */
657*38fd1498Szrj 
658*38fd1498Szrj       if (mark_type == MARK_SRC_DEST_CALL)
659*38fd1498Szrj 	{
660*38fd1498Szrj 	  rtx_call_insn *call_insn = as_a <rtx_call_insn *> (x);
661*38fd1498Szrj 	  rtx link;
662*38fd1498Szrj 	  HARD_REG_SET regs;
663*38fd1498Szrj 
664*38fd1498Szrj 	  res->cc = res->memory = 1;
665*38fd1498Szrj 
666*38fd1498Szrj 	  get_call_reg_set_usage (call_insn, &regs, regs_invalidated_by_call);
667*38fd1498Szrj 	  IOR_HARD_REG_SET (res->regs, regs);
668*38fd1498Szrj 
669*38fd1498Szrj 	  for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
670*38fd1498Szrj 	       link; link = XEXP (link, 1))
671*38fd1498Szrj 	    if (GET_CODE (XEXP (link, 0)) == CLOBBER)
672*38fd1498Szrj 	      mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
673*38fd1498Szrj 				  MARK_SRC_DEST);
674*38fd1498Szrj 
675*38fd1498Szrj 	  /* Check for a REG_SETJMP.  If it exists, then we must
676*38fd1498Szrj 	     assume that this call can clobber any register.  */
677*38fd1498Szrj 	  if (find_reg_note (call_insn, REG_SETJMP, NULL))
678*38fd1498Szrj 	    SET_HARD_REG_SET (res->regs);
679*38fd1498Szrj 	}
680*38fd1498Szrj 
681*38fd1498Szrj       /* ... and also what its RTL says it modifies, if anything.  */
682*38fd1498Szrj       gcc_fallthrough ();
683*38fd1498Szrj 
684*38fd1498Szrj     case JUMP_INSN:
685*38fd1498Szrj     case INSN:
686*38fd1498Szrj 
687*38fd1498Szrj 	/* An insn consisting of just a CLOBBER (or USE) is just for flow
688*38fd1498Szrj 	   and doesn't actually do anything, so we ignore it.  */
689*38fd1498Szrj 
690*38fd1498Szrj       if (mark_type != MARK_SRC_DEST_CALL
691*38fd1498Szrj 	  && INSN_SETS_ARE_DELAYED (as_a <rtx_insn *> (x)))
692*38fd1498Szrj 	return;
693*38fd1498Szrj 
694*38fd1498Szrj       x = PATTERN (x);
695*38fd1498Szrj       if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
696*38fd1498Szrj 	goto restart;
697*38fd1498Szrj       return;
698*38fd1498Szrj 
699*38fd1498Szrj     case SET:
700*38fd1498Szrj       /* If the source of a SET is a CALL, this is actually done by
701*38fd1498Szrj 	 the called routine.  So only include it if we are to include the
702*38fd1498Szrj 	 effects of the calling routine.  */
703*38fd1498Szrj 
704*38fd1498Szrj       mark_set_resources (SET_DEST (x), res,
705*38fd1498Szrj 			  (mark_type == MARK_SRC_DEST_CALL
706*38fd1498Szrj 			   || GET_CODE (SET_SRC (x)) != CALL),
707*38fd1498Szrj 			  mark_type);
708*38fd1498Szrj 
709*38fd1498Szrj       mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
710*38fd1498Szrj       return;
711*38fd1498Szrj 
712*38fd1498Szrj     case CLOBBER:
713*38fd1498Szrj       mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
714*38fd1498Szrj       return;
715*38fd1498Szrj 
716*38fd1498Szrj     case SEQUENCE:
717*38fd1498Szrj       {
718*38fd1498Szrj         rtx_sequence *seq = as_a <rtx_sequence *> (x);
719*38fd1498Szrj         rtx control = seq->element (0);
720*38fd1498Szrj         bool annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control);
721*38fd1498Szrj 
722*38fd1498Szrj         mark_set_resources (control, res, 0, mark_type);
723*38fd1498Szrj         for (i = seq->len () - 1; i >= 0; --i)
724*38fd1498Szrj 	  {
725*38fd1498Szrj 	    rtx elt = seq->element (i);
726*38fd1498Szrj 	    if (!annul_p && INSN_FROM_TARGET_P (elt))
727*38fd1498Szrj 	      mark_set_resources (elt, res, 0, mark_type);
728*38fd1498Szrj 	  }
729*38fd1498Szrj       }
730*38fd1498Szrj       return;
731*38fd1498Szrj 
732*38fd1498Szrj     case POST_INC:
733*38fd1498Szrj     case PRE_INC:
734*38fd1498Szrj     case POST_DEC:
735*38fd1498Szrj     case PRE_DEC:
736*38fd1498Szrj       mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
737*38fd1498Szrj       return;
738*38fd1498Szrj 
739*38fd1498Szrj     case PRE_MODIFY:
740*38fd1498Szrj     case POST_MODIFY:
741*38fd1498Szrj       mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
742*38fd1498Szrj       mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
743*38fd1498Szrj       mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
744*38fd1498Szrj       return;
745*38fd1498Szrj 
746*38fd1498Szrj     case SIGN_EXTRACT:
747*38fd1498Szrj     case ZERO_EXTRACT:
748*38fd1498Szrj       mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
749*38fd1498Szrj       mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
750*38fd1498Szrj       mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
751*38fd1498Szrj       return;
752*38fd1498Szrj 
753*38fd1498Szrj     case MEM:
754*38fd1498Szrj       if (in_dest)
755*38fd1498Szrj 	{
756*38fd1498Szrj 	  res->memory = 1;
757*38fd1498Szrj 	  res->volatil |= MEM_VOLATILE_P (x);
758*38fd1498Szrj 	}
759*38fd1498Szrj 
760*38fd1498Szrj       mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
761*38fd1498Szrj       return;
762*38fd1498Szrj 
763*38fd1498Szrj     case SUBREG:
764*38fd1498Szrj       if (in_dest)
765*38fd1498Szrj 	{
766*38fd1498Szrj 	  if (!REG_P (SUBREG_REG (x)))
767*38fd1498Szrj 	    mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
768*38fd1498Szrj 	  else
769*38fd1498Szrj 	    {
770*38fd1498Szrj 	      unsigned int regno = subreg_regno (x);
771*38fd1498Szrj 	      unsigned int last_regno = regno + subreg_nregs (x);
772*38fd1498Szrj 
773*38fd1498Szrj 	      gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
774*38fd1498Szrj 	      for (r = regno; r < last_regno; r++)
775*38fd1498Szrj 		SET_HARD_REG_BIT (res->regs, r);
776*38fd1498Szrj 	    }
777*38fd1498Szrj 	}
778*38fd1498Szrj       return;
779*38fd1498Szrj 
780*38fd1498Szrj     case REG:
781*38fd1498Szrj       if (in_dest)
782*38fd1498Szrj 	{
783*38fd1498Szrj 	  gcc_assert (HARD_REGISTER_P (x));
784*38fd1498Szrj 	  add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
785*38fd1498Szrj 	}
786*38fd1498Szrj       return;
787*38fd1498Szrj 
788*38fd1498Szrj     case UNSPEC_VOLATILE:
789*38fd1498Szrj     case ASM_INPUT:
790*38fd1498Szrj       /* Traditional asm's are always volatile.  */
791*38fd1498Szrj       res->volatil = 1;
792*38fd1498Szrj       return;
793*38fd1498Szrj 
794*38fd1498Szrj     case TRAP_IF:
795*38fd1498Szrj       res->volatil = 1;
796*38fd1498Szrj       break;
797*38fd1498Szrj 
798*38fd1498Szrj     case ASM_OPERANDS:
799*38fd1498Szrj       res->volatil |= MEM_VOLATILE_P (x);
800*38fd1498Szrj 
801*38fd1498Szrj       /* For all ASM_OPERANDS, we must traverse the vector of input operands.
802*38fd1498Szrj 	 We can not just fall through here since then we would be confused
803*38fd1498Szrj 	 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
804*38fd1498Szrj 	 traditional asms unlike their normal usage.  */
805*38fd1498Szrj 
806*38fd1498Szrj       for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
807*38fd1498Szrj 	mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
808*38fd1498Szrj 			    MARK_SRC_DEST);
809*38fd1498Szrj       return;
810*38fd1498Szrj 
811*38fd1498Szrj     default:
812*38fd1498Szrj       break;
813*38fd1498Szrj     }
814*38fd1498Szrj 
815*38fd1498Szrj   /* Process each sub-expression and flag what it needs.  */
816*38fd1498Szrj   format_ptr = GET_RTX_FORMAT (code);
817*38fd1498Szrj   for (i = 0; i < GET_RTX_LENGTH (code); i++)
818*38fd1498Szrj     switch (*format_ptr++)
819*38fd1498Szrj       {
820*38fd1498Szrj       case 'e':
821*38fd1498Szrj 	mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
822*38fd1498Szrj 	break;
823*38fd1498Szrj 
824*38fd1498Szrj       case 'E':
825*38fd1498Szrj 	for (j = 0; j < XVECLEN (x, i); j++)
826*38fd1498Szrj 	  mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
827*38fd1498Szrj 	break;
828*38fd1498Szrj       }
829*38fd1498Szrj }
830*38fd1498Szrj 
831*38fd1498Szrj /* Return TRUE if INSN is a return, possibly with a filled delay slot.  */
832*38fd1498Szrj 
833*38fd1498Szrj static bool
return_insn_p(const_rtx insn)834*38fd1498Szrj return_insn_p (const_rtx insn)
835*38fd1498Szrj {
836*38fd1498Szrj   if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
837*38fd1498Szrj     return true;
838*38fd1498Szrj 
839*38fd1498Szrj   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
840*38fd1498Szrj     return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
841*38fd1498Szrj 
842*38fd1498Szrj   return false;
843*38fd1498Szrj }
844*38fd1498Szrj 
845*38fd1498Szrj /* Set the resources that are live at TARGET.
846*38fd1498Szrj 
847*38fd1498Szrj    If TARGET is zero, we refer to the end of the current function and can
848*38fd1498Szrj    return our precomputed value.
849*38fd1498Szrj 
850*38fd1498Szrj    Otherwise, we try to find out what is live by consulting the basic block
851*38fd1498Szrj    information.  This is tricky, because we must consider the actions of
852*38fd1498Szrj    reload and jump optimization, which occur after the basic block information
853*38fd1498Szrj    has been computed.
854*38fd1498Szrj 
855*38fd1498Szrj    Accordingly, we proceed as follows::
856*38fd1498Szrj 
857*38fd1498Szrj    We find the previous BARRIER and look at all immediately following labels
858*38fd1498Szrj    (with no intervening active insns) to see if any of them start a basic
859*38fd1498Szrj    block.  If we hit the start of the function first, we use block 0.
860*38fd1498Szrj 
861*38fd1498Szrj    Once we have found a basic block and a corresponding first insn, we can
862*38fd1498Szrj    accurately compute the live status (by starting at a label following a
863*38fd1498Szrj    BARRIER, we are immune to actions taken by reload and jump.)  Then we
864*38fd1498Szrj    scan all insns between that point and our target.  For each CLOBBER (or
865*38fd1498Szrj    for call-clobbered regs when we pass a CALL_INSN), mark the appropriate
866*38fd1498Szrj    registers are dead.  For a SET, mark them as live.
867*38fd1498Szrj 
868*38fd1498Szrj    We have to be careful when using REG_DEAD notes because they are not
869*38fd1498Szrj    updated by such things as find_equiv_reg.  So keep track of registers
870*38fd1498Szrj    marked as dead that haven't been assigned to, and mark them dead at the
871*38fd1498Szrj    next CODE_LABEL since reload and jump won't propagate values across labels.
872*38fd1498Szrj 
873*38fd1498Szrj    If we cannot find the start of a basic block (should be a very rare
874*38fd1498Szrj    case, if it can happen at all), mark everything as potentially live.
875*38fd1498Szrj 
876*38fd1498Szrj    Next, scan forward from TARGET looking for things set or clobbered
877*38fd1498Szrj    before they are used.  These are not live.
878*38fd1498Szrj 
879*38fd1498Szrj    Because we can be called many times on the same target, save our results
880*38fd1498Szrj    in a hash table indexed by INSN_UID.  This is only done if the function
881*38fd1498Szrj    init_resource_info () was invoked before we are called.  */
882*38fd1498Szrj 
883*38fd1498Szrj void
mark_target_live_regs(rtx_insn * insns,rtx target_maybe_return,struct resources * res)884*38fd1498Szrj mark_target_live_regs (rtx_insn *insns, rtx target_maybe_return, struct resources *res)
885*38fd1498Szrj {
886*38fd1498Szrj   int b = -1;
887*38fd1498Szrj   unsigned int i;
888*38fd1498Szrj   struct target_info *tinfo = NULL;
889*38fd1498Szrj   rtx_insn *insn;
890*38fd1498Szrj   rtx jump_target;
891*38fd1498Szrj   HARD_REG_SET scratch;
892*38fd1498Szrj   struct resources set, needed;
893*38fd1498Szrj 
894*38fd1498Szrj   /* Handle end of function.  */
895*38fd1498Szrj   if (target_maybe_return == 0 || ANY_RETURN_P (target_maybe_return))
896*38fd1498Szrj     {
897*38fd1498Szrj       *res = end_of_function_needs;
898*38fd1498Szrj       return;
899*38fd1498Szrj     }
900*38fd1498Szrj 
901*38fd1498Szrj   /* We've handled the case of RETURN/SIMPLE_RETURN; we should now have an
902*38fd1498Szrj      instruction.  */
903*38fd1498Szrj   rtx_insn *target = as_a <rtx_insn *> (target_maybe_return);
904*38fd1498Szrj 
905*38fd1498Szrj   /* Handle return insn.  */
906*38fd1498Szrj   if (return_insn_p (target))
907*38fd1498Szrj     {
908*38fd1498Szrj       *res = end_of_function_needs;
909*38fd1498Szrj       mark_referenced_resources (target, res, false);
910*38fd1498Szrj       return;
911*38fd1498Szrj     }
912*38fd1498Szrj 
913*38fd1498Szrj   /* We have to assume memory is needed, but the CC isn't.  */
914*38fd1498Szrj   res->memory = 1;
915*38fd1498Szrj   res->volatil = 0;
916*38fd1498Szrj   res->cc = 0;
917*38fd1498Szrj 
918*38fd1498Szrj   /* See if we have computed this value already.  */
919*38fd1498Szrj   if (target_hash_table != NULL)
920*38fd1498Szrj     {
921*38fd1498Szrj       for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
922*38fd1498Szrj 	   tinfo; tinfo = tinfo->next)
923*38fd1498Szrj 	if (tinfo->uid == INSN_UID (target))
924*38fd1498Szrj 	  break;
925*38fd1498Szrj 
926*38fd1498Szrj       /* Start by getting the basic block number.  If we have saved
927*38fd1498Szrj 	 information, we can get it from there unless the insn at the
928*38fd1498Szrj 	 start of the basic block has been deleted.  */
929*38fd1498Szrj       if (tinfo && tinfo->block != -1
930*38fd1498Szrj 	  && ! BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, tinfo->block))->deleted ())
931*38fd1498Szrj 	b = tinfo->block;
932*38fd1498Szrj     }
933*38fd1498Szrj 
934*38fd1498Szrj   if (b == -1)
935*38fd1498Szrj     b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH);
936*38fd1498Szrj 
937*38fd1498Szrj   if (target_hash_table != NULL)
938*38fd1498Szrj     {
939*38fd1498Szrj       if (tinfo)
940*38fd1498Szrj 	{
941*38fd1498Szrj 	  /* If the information is up-to-date, use it.  Otherwise, we will
942*38fd1498Szrj 	     update it below.  */
943*38fd1498Szrj 	  if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
944*38fd1498Szrj 	    {
945*38fd1498Szrj 	      COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
946*38fd1498Szrj 	      return;
947*38fd1498Szrj 	    }
948*38fd1498Szrj 	}
949*38fd1498Szrj       else
950*38fd1498Szrj 	{
951*38fd1498Szrj 	  /* Allocate a place to put our results and chain it into the
952*38fd1498Szrj 	     hash table.  */
953*38fd1498Szrj 	  tinfo = XNEW (struct target_info);
954*38fd1498Szrj 	  tinfo->uid = INSN_UID (target);
955*38fd1498Szrj 	  tinfo->block = b;
956*38fd1498Szrj 	  tinfo->next
957*38fd1498Szrj 	    = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
958*38fd1498Szrj 	  target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
959*38fd1498Szrj 	}
960*38fd1498Szrj     }
961*38fd1498Szrj 
962*38fd1498Szrj   CLEAR_HARD_REG_SET (pending_dead_regs);
963*38fd1498Szrj 
964*38fd1498Szrj   /* If we found a basic block, get the live registers from it and update
965*38fd1498Szrj      them with anything set or killed between its start and the insn before
966*38fd1498Szrj      TARGET; this custom life analysis is really about registers so we need
967*38fd1498Szrj      to use the LR problem.  Otherwise, we must assume everything is live.  */
968*38fd1498Szrj   if (b != -1)
969*38fd1498Szrj     {
970*38fd1498Szrj       regset regs_live = DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun, b));
971*38fd1498Szrj       rtx_insn *start_insn, *stop_insn;
972*38fd1498Szrj 
973*38fd1498Szrj       /* Compute hard regs live at start of block.  */
974*38fd1498Szrj       REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
975*38fd1498Szrj 
976*38fd1498Szrj       /* Get starting and ending insn, handling the case where each might
977*38fd1498Szrj 	 be a SEQUENCE.  */
978*38fd1498Szrj       start_insn = (b == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index ?
979*38fd1498Szrj 		    insns : BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, b)));
980*38fd1498Szrj       stop_insn = target;
981*38fd1498Szrj 
982*38fd1498Szrj       if (NONJUMP_INSN_P (start_insn)
983*38fd1498Szrj 	  && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
984*38fd1498Szrj 	start_insn = as_a <rtx_sequence *> (PATTERN (start_insn))->insn (0);
985*38fd1498Szrj 
986*38fd1498Szrj       if (NONJUMP_INSN_P (stop_insn)
987*38fd1498Szrj 	  && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
988*38fd1498Szrj 	stop_insn = next_insn (PREV_INSN (stop_insn));
989*38fd1498Szrj 
990*38fd1498Szrj       for (insn = start_insn; insn != stop_insn;
991*38fd1498Szrj 	   insn = next_insn_no_annul (insn))
992*38fd1498Szrj 	{
993*38fd1498Szrj 	  rtx link;
994*38fd1498Szrj 	  rtx_insn *real_insn = insn;
995*38fd1498Szrj 	  enum rtx_code code = GET_CODE (insn);
996*38fd1498Szrj 
997*38fd1498Szrj 	  if (DEBUG_INSN_P (insn))
998*38fd1498Szrj 	    continue;
999*38fd1498Szrj 
1000*38fd1498Szrj 	  /* If this insn is from the target of a branch, it isn't going to
1001*38fd1498Szrj 	     be used in the sequel.  If it is used in both cases, this
1002*38fd1498Szrj 	     test will not be true.  */
1003*38fd1498Szrj 	  if ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
1004*38fd1498Szrj 	      && INSN_FROM_TARGET_P (insn))
1005*38fd1498Szrj 	    continue;
1006*38fd1498Szrj 
1007*38fd1498Szrj 	  /* If this insn is a USE made by update_block, we care about the
1008*38fd1498Szrj 	     underlying insn.  */
1009*38fd1498Szrj 	  if (code == INSN
1010*38fd1498Szrj 	      && GET_CODE (PATTERN (insn)) == USE
1011*38fd1498Szrj 	      && INSN_P (XEXP (PATTERN (insn), 0)))
1012*38fd1498Szrj 	    real_insn = as_a <rtx_insn *> (XEXP (PATTERN (insn), 0));
1013*38fd1498Szrj 
1014*38fd1498Szrj 	  if (CALL_P (real_insn))
1015*38fd1498Szrj 	    {
1016*38fd1498Szrj 	      /* Values in call-clobbered registers survive a COND_EXEC CALL
1017*38fd1498Szrj 		 if that is not executed; this matters for resoure use because
1018*38fd1498Szrj 		 they may be used by a complementarily (or more strictly)
1019*38fd1498Szrj 		 predicated instruction, or if the CALL is NORETURN.  */
1020*38fd1498Szrj 	      if (GET_CODE (PATTERN (real_insn)) != COND_EXEC)
1021*38fd1498Szrj 		{
1022*38fd1498Szrj 		  HARD_REG_SET regs_invalidated_by_this_call;
1023*38fd1498Szrj 		  get_call_reg_set_usage (real_insn,
1024*38fd1498Szrj 					  &regs_invalidated_by_this_call,
1025*38fd1498Szrj 					  regs_invalidated_by_call);
1026*38fd1498Szrj 		  /* CALL clobbers all call-used regs that aren't fixed except
1027*38fd1498Szrj 		     sp, ap, and fp.  Do this before setting the result of the
1028*38fd1498Szrj 		     call live.  */
1029*38fd1498Szrj 		  AND_COMPL_HARD_REG_SET (current_live_regs,
1030*38fd1498Szrj 					  regs_invalidated_by_this_call);
1031*38fd1498Szrj 		}
1032*38fd1498Szrj 
1033*38fd1498Szrj 	      /* A CALL_INSN sets any global register live, since it may
1034*38fd1498Szrj 		 have been modified by the call.  */
1035*38fd1498Szrj 	      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1036*38fd1498Szrj 		if (global_regs[i])
1037*38fd1498Szrj 		  SET_HARD_REG_BIT (current_live_regs, i);
1038*38fd1498Szrj 	    }
1039*38fd1498Szrj 
1040*38fd1498Szrj 	  /* Mark anything killed in an insn to be deadened at the next
1041*38fd1498Szrj 	     label.  Ignore USE insns; the only REG_DEAD notes will be for
1042*38fd1498Szrj 	     parameters.  But they might be early.  A CALL_INSN will usually
1043*38fd1498Szrj 	     clobber registers used for parameters.  It isn't worth bothering
1044*38fd1498Szrj 	     with the unlikely case when it won't.  */
1045*38fd1498Szrj 	  if ((NONJUMP_INSN_P (real_insn)
1046*38fd1498Szrj 	       && GET_CODE (PATTERN (real_insn)) != USE
1047*38fd1498Szrj 	       && GET_CODE (PATTERN (real_insn)) != CLOBBER)
1048*38fd1498Szrj 	      || JUMP_P (real_insn)
1049*38fd1498Szrj 	      || CALL_P (real_insn))
1050*38fd1498Szrj 	    {
1051*38fd1498Szrj 	      for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1052*38fd1498Szrj 		if (REG_NOTE_KIND (link) == REG_DEAD
1053*38fd1498Szrj 		    && REG_P (XEXP (link, 0))
1054*38fd1498Szrj 		    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1055*38fd1498Szrj 		  add_to_hard_reg_set (&pending_dead_regs,
1056*38fd1498Szrj 				      GET_MODE (XEXP (link, 0)),
1057*38fd1498Szrj 				      REGNO (XEXP (link, 0)));
1058*38fd1498Szrj 
1059*38fd1498Szrj 	      note_stores (PATTERN (real_insn), update_live_status, NULL);
1060*38fd1498Szrj 
1061*38fd1498Szrj 	      /* If any registers were unused after this insn, kill them.
1062*38fd1498Szrj 		 These notes will always be accurate.  */
1063*38fd1498Szrj 	      for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1064*38fd1498Szrj 		if (REG_NOTE_KIND (link) == REG_UNUSED
1065*38fd1498Szrj 		    && REG_P (XEXP (link, 0))
1066*38fd1498Szrj 		    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1067*38fd1498Szrj 		  remove_from_hard_reg_set (&current_live_regs,
1068*38fd1498Szrj 					   GET_MODE (XEXP (link, 0)),
1069*38fd1498Szrj 					   REGNO (XEXP (link, 0)));
1070*38fd1498Szrj 	    }
1071*38fd1498Szrj 
1072*38fd1498Szrj 	  else if (LABEL_P (real_insn))
1073*38fd1498Szrj 	    {
1074*38fd1498Szrj 	      basic_block bb;
1075*38fd1498Szrj 
1076*38fd1498Szrj 	      /* A label clobbers the pending dead registers since neither
1077*38fd1498Szrj 		 reload nor jump will propagate a value across a label.  */
1078*38fd1498Szrj 	      AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
1079*38fd1498Szrj 	      CLEAR_HARD_REG_SET (pending_dead_regs);
1080*38fd1498Szrj 
1081*38fd1498Szrj 	      /* We must conservatively assume that all registers that used
1082*38fd1498Szrj 		 to be live here still are.  The fallthrough edge may have
1083*38fd1498Szrj 		 left a live register uninitialized.  */
1084*38fd1498Szrj 	      bb = BLOCK_FOR_INSN (real_insn);
1085*38fd1498Szrj 	      if (bb)
1086*38fd1498Szrj 		{
1087*38fd1498Szrj 		  HARD_REG_SET extra_live;
1088*38fd1498Szrj 
1089*38fd1498Szrj 		  REG_SET_TO_HARD_REG_SET (extra_live, DF_LR_IN (bb));
1090*38fd1498Szrj 		  IOR_HARD_REG_SET (current_live_regs, extra_live);
1091*38fd1498Szrj 		}
1092*38fd1498Szrj 	    }
1093*38fd1498Szrj 
1094*38fd1498Szrj 	  /* The beginning of the epilogue corresponds to the end of the
1095*38fd1498Szrj 	     RTL chain when there are no epilogue insns.  Certain resources
1096*38fd1498Szrj 	     are implicitly required at that point.  */
1097*38fd1498Szrj 	  else if (NOTE_P (real_insn)
1098*38fd1498Szrj 		   && NOTE_KIND (real_insn) == NOTE_INSN_EPILOGUE_BEG)
1099*38fd1498Szrj 	    IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
1100*38fd1498Szrj 	}
1101*38fd1498Szrj 
1102*38fd1498Szrj       COPY_HARD_REG_SET (res->regs, current_live_regs);
1103*38fd1498Szrj       if (tinfo != NULL)
1104*38fd1498Szrj 	{
1105*38fd1498Szrj 	  tinfo->block = b;
1106*38fd1498Szrj 	  tinfo->bb_tick = bb_ticks[b];
1107*38fd1498Szrj 	}
1108*38fd1498Szrj     }
1109*38fd1498Szrj   else
1110*38fd1498Szrj     /* We didn't find the start of a basic block.  Assume everything
1111*38fd1498Szrj        in use.  This should happen only extremely rarely.  */
1112*38fd1498Szrj     SET_HARD_REG_SET (res->regs);
1113*38fd1498Szrj 
1114*38fd1498Szrj   CLEAR_RESOURCE (&set);
1115*38fd1498Szrj   CLEAR_RESOURCE (&needed);
1116*38fd1498Szrj 
1117*38fd1498Szrj   rtx_insn *jump_insn = find_dead_or_set_registers (target, res, &jump_target,
1118*38fd1498Szrj 						    0, set, needed);
1119*38fd1498Szrj 
1120*38fd1498Szrj   /* If we hit an unconditional branch, we have another way of finding out
1121*38fd1498Szrj      what is live: we can see what is live at the branch target and include
1122*38fd1498Szrj      anything used but not set before the branch.  We add the live
1123*38fd1498Szrj      resources found using the test below to those found until now.  */
1124*38fd1498Szrj 
1125*38fd1498Szrj   if (jump_insn)
1126*38fd1498Szrj     {
1127*38fd1498Szrj       struct resources new_resources;
1128*38fd1498Szrj       rtx_insn *stop_insn = next_active_insn (jump_insn);
1129*38fd1498Szrj 
1130*38fd1498Szrj       if (!ANY_RETURN_P (jump_target))
1131*38fd1498Szrj 	jump_target = next_active_insn (as_a<rtx_insn *> (jump_target));
1132*38fd1498Szrj       mark_target_live_regs (insns, jump_target, &new_resources);
1133*38fd1498Szrj       CLEAR_RESOURCE (&set);
1134*38fd1498Szrj       CLEAR_RESOURCE (&needed);
1135*38fd1498Szrj 
1136*38fd1498Szrj       /* Include JUMP_INSN in the needed registers.  */
1137*38fd1498Szrj       for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1138*38fd1498Szrj 	{
1139*38fd1498Szrj 	  mark_referenced_resources (insn, &needed, true);
1140*38fd1498Szrj 
1141*38fd1498Szrj 	  COPY_HARD_REG_SET (scratch, needed.regs);
1142*38fd1498Szrj 	  AND_COMPL_HARD_REG_SET (scratch, set.regs);
1143*38fd1498Szrj 	  IOR_HARD_REG_SET (new_resources.regs, scratch);
1144*38fd1498Szrj 
1145*38fd1498Szrj 	  mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1146*38fd1498Szrj 	}
1147*38fd1498Szrj 
1148*38fd1498Szrj       IOR_HARD_REG_SET (res->regs, new_resources.regs);
1149*38fd1498Szrj     }
1150*38fd1498Szrj 
1151*38fd1498Szrj   if (tinfo != NULL)
1152*38fd1498Szrj     {
1153*38fd1498Szrj       COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
1154*38fd1498Szrj     }
1155*38fd1498Szrj }
1156*38fd1498Szrj 
1157*38fd1498Szrj /* Initialize the resources required by mark_target_live_regs ().
1158*38fd1498Szrj    This should be invoked before the first call to mark_target_live_regs.  */
1159*38fd1498Szrj 
1160*38fd1498Szrj void
init_resource_info(rtx_insn * epilogue_insn)1161*38fd1498Szrj init_resource_info (rtx_insn *epilogue_insn)
1162*38fd1498Szrj {
1163*38fd1498Szrj   int i;
1164*38fd1498Szrj   basic_block bb;
1165*38fd1498Szrj 
1166*38fd1498Szrj   /* Indicate what resources are required to be valid at the end of the current
1167*38fd1498Szrj      function.  The condition code never is and memory always is.
1168*38fd1498Szrj      The stack pointer is needed unless EXIT_IGNORE_STACK is true
1169*38fd1498Szrj      and there is an epilogue that restores the original stack pointer
1170*38fd1498Szrj      from the frame pointer.  Registers used to return the function value
1171*38fd1498Szrj      are needed.  Registers holding global variables are needed.  */
1172*38fd1498Szrj 
1173*38fd1498Szrj   end_of_function_needs.cc = 0;
1174*38fd1498Szrj   end_of_function_needs.memory = 1;
1175*38fd1498Szrj   CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1176*38fd1498Szrj 
1177*38fd1498Szrj   if (frame_pointer_needed)
1178*38fd1498Szrj     {
1179*38fd1498Szrj       SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
1180*38fd1498Szrj       if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
1181*38fd1498Szrj 	SET_HARD_REG_BIT (end_of_function_needs.regs,
1182*38fd1498Szrj 			  HARD_FRAME_POINTER_REGNUM);
1183*38fd1498Szrj     }
1184*38fd1498Szrj   if (!(frame_pointer_needed
1185*38fd1498Szrj 	&& EXIT_IGNORE_STACK
1186*38fd1498Szrj 	&& epilogue_insn
1187*38fd1498Szrj 	&& !crtl->sp_is_unchanging))
1188*38fd1498Szrj     SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1189*38fd1498Szrj 
1190*38fd1498Szrj   if (crtl->return_rtx != 0)
1191*38fd1498Szrj     mark_referenced_resources (crtl->return_rtx,
1192*38fd1498Szrj 			       &end_of_function_needs, true);
1193*38fd1498Szrj 
1194*38fd1498Szrj   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1195*38fd1498Szrj     if (global_regs[i] || EPILOGUE_USES (i))
1196*38fd1498Szrj       SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1197*38fd1498Szrj 
1198*38fd1498Szrj   /* The registers required to be live at the end of the function are
1199*38fd1498Szrj      represented in the flow information as being dead just prior to
1200*38fd1498Szrj      reaching the end of the function.  For example, the return of a value
1201*38fd1498Szrj      might be represented by a USE of the return register immediately
1202*38fd1498Szrj      followed by an unconditional jump to the return label where the
1203*38fd1498Szrj      return label is the end of the RTL chain.  The end of the RTL chain
1204*38fd1498Szrj      is then taken to mean that the return register is live.
1205*38fd1498Szrj 
1206*38fd1498Szrj      This sequence is no longer maintained when epilogue instructions are
1207*38fd1498Szrj      added to the RTL chain.  To reconstruct the original meaning, the
1208*38fd1498Szrj      start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1209*38fd1498Szrj      point where these registers become live (start_of_epilogue_needs).
1210*38fd1498Szrj      If epilogue instructions are present, the registers set by those
1211*38fd1498Szrj      instructions won't have been processed by flow.  Thus, those
1212*38fd1498Szrj      registers are additionally required at the end of the RTL chain
1213*38fd1498Szrj      (end_of_function_needs).  */
1214*38fd1498Szrj 
1215*38fd1498Szrj   start_of_epilogue_needs = end_of_function_needs;
1216*38fd1498Szrj 
1217*38fd1498Szrj   while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
1218*38fd1498Szrj     {
1219*38fd1498Szrj       mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
1220*38fd1498Szrj 			  MARK_SRC_DEST_CALL);
1221*38fd1498Szrj       if (return_insn_p (epilogue_insn))
1222*38fd1498Szrj 	break;
1223*38fd1498Szrj     }
1224*38fd1498Szrj 
1225*38fd1498Szrj   /* Allocate and initialize the tables used by mark_target_live_regs.  */
1226*38fd1498Szrj   target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME);
1227*38fd1498Szrj   bb_ticks = XCNEWVEC (int, last_basic_block_for_fn (cfun));
1228*38fd1498Szrj 
1229*38fd1498Szrj   /* Set the BLOCK_FOR_INSN of each label that starts a basic block.  */
1230*38fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
1231*38fd1498Szrj     if (LABEL_P (BB_HEAD (bb)))
1232*38fd1498Szrj       BLOCK_FOR_INSN (BB_HEAD (bb)) = bb;
1233*38fd1498Szrj }
1234*38fd1498Szrj 
1235*38fd1498Szrj /* Free up the resources allocated to mark_target_live_regs ().  This
1236*38fd1498Szrj    should be invoked after the last call to mark_target_live_regs ().  */
1237*38fd1498Szrj 
1238*38fd1498Szrj void
free_resource_info(void)1239*38fd1498Szrj free_resource_info (void)
1240*38fd1498Szrj {
1241*38fd1498Szrj   basic_block bb;
1242*38fd1498Szrj 
1243*38fd1498Szrj   if (target_hash_table != NULL)
1244*38fd1498Szrj     {
1245*38fd1498Szrj       int i;
1246*38fd1498Szrj 
1247*38fd1498Szrj       for (i = 0; i < TARGET_HASH_PRIME; ++i)
1248*38fd1498Szrj 	{
1249*38fd1498Szrj 	  struct target_info *ti = target_hash_table[i];
1250*38fd1498Szrj 
1251*38fd1498Szrj 	  while (ti)
1252*38fd1498Szrj 	    {
1253*38fd1498Szrj 	      struct target_info *next = ti->next;
1254*38fd1498Szrj 	      free (ti);
1255*38fd1498Szrj 	      ti = next;
1256*38fd1498Szrj 	    }
1257*38fd1498Szrj 	}
1258*38fd1498Szrj 
1259*38fd1498Szrj       free (target_hash_table);
1260*38fd1498Szrj       target_hash_table = NULL;
1261*38fd1498Szrj     }
1262*38fd1498Szrj 
1263*38fd1498Szrj   if (bb_ticks != NULL)
1264*38fd1498Szrj     {
1265*38fd1498Szrj       free (bb_ticks);
1266*38fd1498Szrj       bb_ticks = NULL;
1267*38fd1498Szrj     }
1268*38fd1498Szrj 
1269*38fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
1270*38fd1498Szrj     if (LABEL_P (BB_HEAD (bb)))
1271*38fd1498Szrj       BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL;
1272*38fd1498Szrj }
1273*38fd1498Szrj 
1274*38fd1498Szrj /* Clear any hashed information that we have stored for INSN.  */
1275*38fd1498Szrj 
1276*38fd1498Szrj void
clear_hashed_info_for_insn(rtx_insn * insn)1277*38fd1498Szrj clear_hashed_info_for_insn (rtx_insn *insn)
1278*38fd1498Szrj {
1279*38fd1498Szrj   struct target_info *tinfo;
1280*38fd1498Szrj 
1281*38fd1498Szrj   if (target_hash_table != NULL)
1282*38fd1498Szrj     {
1283*38fd1498Szrj       for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
1284*38fd1498Szrj 	   tinfo; tinfo = tinfo->next)
1285*38fd1498Szrj 	if (tinfo->uid == INSN_UID (insn))
1286*38fd1498Szrj 	  break;
1287*38fd1498Szrj 
1288*38fd1498Szrj       if (tinfo)
1289*38fd1498Szrj 	tinfo->block = -1;
1290*38fd1498Szrj     }
1291*38fd1498Szrj }
1292*38fd1498Szrj 
1293*38fd1498Szrj /* Increment the tick count for the basic block that contains INSN.  */
1294*38fd1498Szrj 
1295*38fd1498Szrj void
incr_ticks_for_insn(rtx_insn * insn)1296*38fd1498Szrj incr_ticks_for_insn (rtx_insn *insn)
1297*38fd1498Szrj {
1298*38fd1498Szrj   int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH);
1299*38fd1498Szrj 
1300*38fd1498Szrj   if (b != -1)
1301*38fd1498Szrj     bb_ticks[b]++;
1302*38fd1498Szrj }
1303*38fd1498Szrj 
1304*38fd1498Szrj /* Add TRIAL to the set of resources used at the end of the current
1305*38fd1498Szrj    function.  */
1306*38fd1498Szrj void
mark_end_of_function_resources(rtx trial,bool include_delayed_effects)1307*38fd1498Szrj mark_end_of_function_resources (rtx trial, bool include_delayed_effects)
1308*38fd1498Szrj {
1309*38fd1498Szrj   mark_referenced_resources (trial, &end_of_function_needs,
1310*38fd1498Szrj 			     include_delayed_effects);
1311*38fd1498Szrj }
1312