1 /* Definitions for computing resource usage of specific insns.
2    Copyright (C) 1999-2013 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "diagnostic-core.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "hard-reg-set.h"
28 #include "function.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "output.h"
32 #include "resource.h"
33 #include "except.h"
34 #include "insn-attr.h"
35 #include "params.h"
36 #include "df.h"
37 
38 /* This structure is used to record liveness information at the targets or
39    fallthrough insns of branches.  We will most likely need the information
40    at targets again, so save them in a hash table rather than recomputing them
41    each time.  */
42 
43 struct target_info
44 {
45   int uid;			/* INSN_UID of target.  */
46   struct target_info *next;	/* Next info for same hash bucket.  */
47   HARD_REG_SET live_regs;	/* Registers live at target.  */
48   int block;			/* Basic block number containing target.  */
49   int bb_tick;			/* Generation count of basic block info.  */
50 };
51 
52 #define TARGET_HASH_PRIME 257
53 
54 /* Indicates what resources are required at the beginning of the epilogue.  */
55 static struct resources start_of_epilogue_needs;
56 
57 /* Indicates what resources are required at function end.  */
58 static struct resources end_of_function_needs;
59 
60 /* Define the hash table itself.  */
61 static struct target_info **target_hash_table = NULL;
62 
63 /* For each basic block, we maintain a generation number of its basic
64    block info, which is updated each time we move an insn from the
65    target of a jump.  This is the generation number indexed by block
66    number.  */
67 
68 static int *bb_ticks;
69 
70 /* Marks registers possibly live at the current place being scanned by
71    mark_target_live_regs.  Also used by update_live_status.  */
72 
73 static HARD_REG_SET current_live_regs;
74 
75 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
76    Also only used by the next two functions.  */
77 
78 static HARD_REG_SET pending_dead_regs;
79 
80 static void update_live_status (rtx, const_rtx, void *);
81 static int find_basic_block (rtx, int);
82 static rtx next_insn_no_annul (rtx);
83 static rtx find_dead_or_set_registers (rtx, struct resources*,
84 				       rtx*, int, struct resources,
85 				       struct resources);
86 
87 /* Utility function called from mark_target_live_regs via note_stores.
88    It deadens any CLOBBERed registers and livens any SET registers.  */
89 
90 static void
update_live_status(rtx dest,const_rtx x,void * data ATTRIBUTE_UNUSED)91 update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
92 {
93   int first_regno, last_regno;
94   int i;
95 
96   if (!REG_P (dest)
97       && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest))))
98     return;
99 
100   if (GET_CODE (dest) == SUBREG)
101     {
102       first_regno = subreg_regno (dest);
103       last_regno = first_regno + subreg_nregs (dest);
104 
105     }
106   else
107     {
108       first_regno = REGNO (dest);
109       last_regno = END_HARD_REGNO (dest);
110     }
111 
112   if (GET_CODE (x) == CLOBBER)
113     for (i = first_regno; i < last_regno; i++)
114       CLEAR_HARD_REG_BIT (current_live_regs, i);
115   else
116     for (i = first_regno; i < last_regno; i++)
117       {
118 	SET_HARD_REG_BIT (current_live_regs, i);
119 	CLEAR_HARD_REG_BIT (pending_dead_regs, i);
120       }
121 }
122 
123 /* Find the number of the basic block with correct live register
124    information that starts closest to INSN.  Return -1 if we couldn't
125    find such a basic block or the beginning is more than
126    SEARCH_LIMIT instructions before INSN.  Use SEARCH_LIMIT = -1 for
127    an unlimited search.
128 
129    The delay slot filling code destroys the control-flow graph so,
130    instead of finding the basic block containing INSN, we search
131    backwards toward a BARRIER where the live register information is
132    correct.  */
133 
134 static int
find_basic_block(rtx insn,int search_limit)135 find_basic_block (rtx insn, int search_limit)
136 {
137   /* Scan backwards to the previous BARRIER.  Then see if we can find a
138      label that starts a basic block.  Return the basic block number.  */
139   for (insn = prev_nonnote_insn (insn);
140        insn && !BARRIER_P (insn) && search_limit != 0;
141        insn = prev_nonnote_insn (insn), --search_limit)
142     ;
143 
144   /* The closest BARRIER is too far away.  */
145   if (search_limit == 0)
146     return -1;
147 
148   /* The start of the function.  */
149   else if (insn == 0)
150     return ENTRY_BLOCK_PTR->next_bb->index;
151 
152   /* See if any of the upcoming CODE_LABELs start a basic block.  If we reach
153      anything other than a CODE_LABEL or note, we can't find this code.  */
154   for (insn = next_nonnote_insn (insn);
155        insn && LABEL_P (insn);
156        insn = next_nonnote_insn (insn))
157     if (BLOCK_FOR_INSN (insn))
158       return BLOCK_FOR_INSN (insn)->index;
159 
160   return -1;
161 }
162 
163 /* Similar to next_insn, but ignores insns in the delay slots of
164    an annulled branch.  */
165 
166 static rtx
next_insn_no_annul(rtx insn)167 next_insn_no_annul (rtx insn)
168 {
169   if (insn)
170     {
171       /* If INSN is an annulled branch, skip any insns from the target
172 	 of the branch.  */
173       if (JUMP_P (insn)
174 	  && INSN_ANNULLED_BRANCH_P (insn)
175 	  && NEXT_INSN (PREV_INSN (insn)) != insn)
176 	{
177 	  rtx next = NEXT_INSN (insn);
178 	  enum rtx_code code = GET_CODE (next);
179 
180 	  while ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
181 		 && INSN_FROM_TARGET_P (next))
182 	    {
183 	      insn = next;
184 	      next = NEXT_INSN (insn);
185 	      code = GET_CODE (next);
186 	    }
187 	}
188 
189       insn = NEXT_INSN (insn);
190       if (insn && NONJUMP_INSN_P (insn)
191 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
192 	insn = XVECEXP (PATTERN (insn), 0, 0);
193     }
194 
195   return insn;
196 }
197 
198 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
199    which resources are referenced by the insn.  If INCLUDE_DELAYED_EFFECTS
200    is TRUE, resources used by the called routine will be included for
201    CALL_INSNs.  */
202 
203 void
mark_referenced_resources(rtx x,struct resources * res,bool include_delayed_effects)204 mark_referenced_resources (rtx x, struct resources *res,
205 			   bool include_delayed_effects)
206 {
207   enum rtx_code code = GET_CODE (x);
208   int i, j;
209   unsigned int r;
210   const char *format_ptr;
211 
212   /* Handle leaf items for which we set resource flags.  Also, special-case
213      CALL, SET and CLOBBER operators.  */
214   switch (code)
215     {
216     case CONST:
217     CASE_CONST_ANY:
218     case PC:
219     case SYMBOL_REF:
220     case LABEL_REF:
221       return;
222 
223     case SUBREG:
224       if (!REG_P (SUBREG_REG (x)))
225 	mark_referenced_resources (SUBREG_REG (x), res, false);
226       else
227 	{
228 	  unsigned int regno = subreg_regno (x);
229 	  unsigned int last_regno = regno + subreg_nregs (x);
230 
231 	  gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
232 	  for (r = regno; r < last_regno; r++)
233 	    SET_HARD_REG_BIT (res->regs, r);
234 	}
235       return;
236 
237     case REG:
238       gcc_assert (HARD_REGISTER_P (x));
239       add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
240       return;
241 
242     case MEM:
243       /* If this memory shouldn't change, it really isn't referencing
244 	 memory.  */
245       if (MEM_READONLY_P (x))
246 	res->unch_memory = 1;
247       else
248 	res->memory = 1;
249       res->volatil |= MEM_VOLATILE_P (x);
250 
251       /* Mark registers used to access memory.  */
252       mark_referenced_resources (XEXP (x, 0), res, false);
253       return;
254 
255     case CC0:
256       res->cc = 1;
257       return;
258 
259     case UNSPEC_VOLATILE:
260     case TRAP_IF:
261     case ASM_INPUT:
262       /* Traditional asm's are always volatile.  */
263       res->volatil = 1;
264       break;
265 
266     case ASM_OPERANDS:
267       res->volatil |= MEM_VOLATILE_P (x);
268 
269       /* For all ASM_OPERANDS, we must traverse the vector of input operands.
270 	 We can not just fall through here since then we would be confused
271 	 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
272 	 traditional asms unlike their normal usage.  */
273 
274       for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
275 	mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, false);
276       return;
277 
278     case CALL:
279       /* The first operand will be a (MEM (xxx)) but doesn't really reference
280 	 memory.  The second operand may be referenced, though.  */
281       mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, false);
282       mark_referenced_resources (XEXP (x, 1), res, false);
283       return;
284 
285     case SET:
286       /* Usually, the first operand of SET is set, not referenced.  But
287 	 registers used to access memory are referenced.  SET_DEST is
288 	 also referenced if it is a ZERO_EXTRACT.  */
289 
290       mark_referenced_resources (SET_SRC (x), res, false);
291 
292       x = SET_DEST (x);
293       if (GET_CODE (x) == ZERO_EXTRACT
294 	  || GET_CODE (x) == STRICT_LOW_PART)
295 	mark_referenced_resources (x, res, false);
296       else if (GET_CODE (x) == SUBREG)
297 	x = SUBREG_REG (x);
298       if (MEM_P (x))
299 	mark_referenced_resources (XEXP (x, 0), res, false);
300       return;
301 
302     case CLOBBER:
303       return;
304 
305     case CALL_INSN:
306       if (include_delayed_effects)
307 	{
308 	  /* A CALL references memory, the frame pointer if it exists, the
309 	     stack pointer, any global registers and any registers given in
310 	     USE insns immediately in front of the CALL.
311 
312 	     However, we may have moved some of the parameter loading insns
313 	     into the delay slot of this CALL.  If so, the USE's for them
314 	     don't count and should be skipped.  */
315 	  rtx insn = PREV_INSN (x);
316 	  rtx sequence = 0;
317 	  int seq_size = 0;
318 	  int i;
319 
320 	  /* If we are part of a delay slot sequence, point at the SEQUENCE.  */
321 	  if (NEXT_INSN (insn) != x)
322 	    {
323 	      sequence = PATTERN (NEXT_INSN (insn));
324 	      seq_size = XVECLEN (sequence, 0);
325 	      gcc_assert (GET_CODE (sequence) == SEQUENCE);
326 	    }
327 
328 	  res->memory = 1;
329 	  SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
330 	  if (frame_pointer_needed)
331 	    {
332 	      SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
333 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
334 	      SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
335 #endif
336 	    }
337 
338 	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
339 	    if (global_regs[i])
340 	      SET_HARD_REG_BIT (res->regs, i);
341 
342 	  /* Check for a REG_SETJMP.  If it exists, then we must
343 	     assume that this call can need any register.
344 
345 	     This is done to be more conservative about how we handle setjmp.
346 	     We assume that they both use and set all registers.  Using all
347 	     registers ensures that a register will not be considered dead
348 	     just because it crosses a setjmp call.  A register should be
349 	     considered dead only if the setjmp call returns nonzero.  */
350 	  if (find_reg_note (x, REG_SETJMP, NULL))
351 	    SET_HARD_REG_SET (res->regs);
352 
353 	  {
354 	    rtx link;
355 
356 	    for (link = CALL_INSN_FUNCTION_USAGE (x);
357 		 link;
358 		 link = XEXP (link, 1))
359 	      if (GET_CODE (XEXP (link, 0)) == USE)
360 		{
361 		  for (i = 1; i < seq_size; i++)
362 		    {
363 		      rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
364 		      if (GET_CODE (slot_pat) == SET
365 			  && rtx_equal_p (SET_DEST (slot_pat),
366 					  XEXP (XEXP (link, 0), 0)))
367 			break;
368 		    }
369 		  if (i >= seq_size)
370 		    mark_referenced_resources (XEXP (XEXP (link, 0), 0),
371 					       res, false);
372 		}
373 	  }
374 	}
375 
376       /* ... fall through to other INSN processing ...  */
377 
378     case INSN:
379     case JUMP_INSN:
380 
381 #ifdef INSN_REFERENCES_ARE_DELAYED
382       if (! include_delayed_effects
383 	  && INSN_REFERENCES_ARE_DELAYED (x))
384 	return;
385 #endif
386 
387       /* No special processing, just speed up.  */
388       mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
389       return;
390 
391     default:
392       break;
393     }
394 
395   /* Process each sub-expression and flag what it needs.  */
396   format_ptr = GET_RTX_FORMAT (code);
397   for (i = 0; i < GET_RTX_LENGTH (code); i++)
398     switch (*format_ptr++)
399       {
400       case 'e':
401 	mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
402 	break;
403 
404       case 'E':
405 	for (j = 0; j < XVECLEN (x, i); j++)
406 	  mark_referenced_resources (XVECEXP (x, i, j), res,
407 				     include_delayed_effects);
408 	break;
409       }
410 }
411 
412 /* A subroutine of mark_target_live_regs.  Search forward from TARGET
413    looking for registers that are set before they are used.  These are dead.
414    Stop after passing a few conditional jumps, and/or a small
415    number of unconditional branches.  */
416 
417 static rtx
find_dead_or_set_registers(rtx target,struct resources * res,rtx * jump_target,int jump_count,struct resources set,struct resources needed)418 find_dead_or_set_registers (rtx target, struct resources *res,
419 			    rtx *jump_target, int jump_count,
420 			    struct resources set, struct resources needed)
421 {
422   HARD_REG_SET scratch;
423   rtx insn, next;
424   rtx jump_insn = 0;
425   int i;
426 
427   for (insn = target; insn; insn = next)
428     {
429       rtx this_jump_insn = insn;
430 
431       next = NEXT_INSN (insn);
432 
433       /* If this instruction can throw an exception, then we don't
434 	 know where we might end up next.  That means that we have to
435 	 assume that whatever we have already marked as live really is
436 	 live.  */
437       if (can_throw_internal (insn))
438 	break;
439 
440       switch (GET_CODE (insn))
441 	{
442 	case CODE_LABEL:
443 	  /* After a label, any pending dead registers that weren't yet
444 	     used can be made dead.  */
445 	  AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
446 	  AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
447 	  CLEAR_HARD_REG_SET (pending_dead_regs);
448 
449 	  continue;
450 
451 	case BARRIER:
452 	case NOTE:
453 	  continue;
454 
455 	case INSN:
456 	  if (GET_CODE (PATTERN (insn)) == USE)
457 	    {
458 	      /* If INSN is a USE made by update_block, we care about the
459 		 underlying insn.  Any registers set by the underlying insn
460 		 are live since the insn is being done somewhere else.  */
461 	      if (INSN_P (XEXP (PATTERN (insn), 0)))
462 		mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
463 				    MARK_SRC_DEST_CALL);
464 
465 	      /* All other USE insns are to be ignored.  */
466 	      continue;
467 	    }
468 	  else if (GET_CODE (PATTERN (insn)) == CLOBBER)
469 	    continue;
470 	  else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
471 	    {
472 	      /* An unconditional jump can be used to fill the delay slot
473 		 of a call, so search for a JUMP_INSN in any position.  */
474 	      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
475 		{
476 		  this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
477 		  if (JUMP_P (this_jump_insn))
478 		    break;
479 		}
480 	    }
481 
482 	default:
483 	  break;
484 	}
485 
486       if (JUMP_P (this_jump_insn))
487 	{
488 	  if (jump_count++ < 10)
489 	    {
490 	      if (any_uncondjump_p (this_jump_insn)
491 		  || ANY_RETURN_P (PATTERN (this_jump_insn)))
492 		{
493 		  next = JUMP_LABEL (this_jump_insn);
494 		  if (ANY_RETURN_P (next))
495 		    next = NULL_RTX;
496 		  if (jump_insn == 0)
497 		    {
498 		      jump_insn = insn;
499 		      if (jump_target)
500 			*jump_target = JUMP_LABEL (this_jump_insn);
501 		    }
502 		}
503 	      else if (any_condjump_p (this_jump_insn))
504 		{
505 		  struct resources target_set, target_res;
506 		  struct resources fallthrough_res;
507 
508 		  /* We can handle conditional branches here by following
509 		     both paths, and then IOR the results of the two paths
510 		     together, which will give us registers that are dead
511 		     on both paths.  Since this is expensive, we give it
512 		     a much higher cost than unconditional branches.  The
513 		     cost was chosen so that we will follow at most 1
514 		     conditional branch.  */
515 
516 		  jump_count += 4;
517 		  if (jump_count >= 10)
518 		    break;
519 
520 		  mark_referenced_resources (insn, &needed, true);
521 
522 		  /* For an annulled branch, mark_set_resources ignores slots
523 		     filled by instructions from the target.  This is correct
524 		     if the branch is not taken.  Since we are following both
525 		     paths from the branch, we must also compute correct info
526 		     if the branch is taken.  We do this by inverting all of
527 		     the INSN_FROM_TARGET_P bits, calling mark_set_resources,
528 		     and then inverting the INSN_FROM_TARGET_P bits again.  */
529 
530 		  if (GET_CODE (PATTERN (insn)) == SEQUENCE
531 		      && INSN_ANNULLED_BRANCH_P (this_jump_insn))
532 		    {
533 		      for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
534 			INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
535 			  = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
536 
537 		      target_set = set;
538 		      mark_set_resources (insn, &target_set, 0,
539 					  MARK_SRC_DEST_CALL);
540 
541 		      for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
542 			INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
543 			  = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
544 
545 		      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
546 		    }
547 		  else
548 		    {
549 		      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
550 		      target_set = set;
551 		    }
552 
553 		  target_res = *res;
554 		  COPY_HARD_REG_SET (scratch, target_set.regs);
555 		  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
556 		  AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
557 
558 		  fallthrough_res = *res;
559 		  COPY_HARD_REG_SET (scratch, set.regs);
560 		  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
561 		  AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
562 
563 		  if (!ANY_RETURN_P (JUMP_LABEL (this_jump_insn)))
564 		    find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
565 						&target_res, 0, jump_count,
566 						target_set, needed);
567 		  find_dead_or_set_registers (next,
568 					      &fallthrough_res, 0, jump_count,
569 					      set, needed);
570 		  IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
571 		  AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
572 		  break;
573 		}
574 	      else
575 		break;
576 	    }
577 	  else
578 	    {
579 	      /* Don't try this optimization if we expired our jump count
580 		 above, since that would mean there may be an infinite loop
581 		 in the function being compiled.  */
582 	      jump_insn = 0;
583 	      break;
584 	    }
585 	}
586 
587       mark_referenced_resources (insn, &needed, true);
588       mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
589 
590       COPY_HARD_REG_SET (scratch, set.regs);
591       AND_COMPL_HARD_REG_SET (scratch, needed.regs);
592       AND_COMPL_HARD_REG_SET (res->regs, scratch);
593     }
594 
595   return jump_insn;
596 }
597 
598 /* Given X, a part of an insn, and a pointer to a `struct resource',
599    RES, indicate which resources are modified by the insn. If
600    MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
601    set by the called routine.
602 
603    If IN_DEST is nonzero, it means we are inside a SET.  Otherwise,
604    objects are being referenced instead of set.
605 
606    We never mark the insn as modifying the condition code unless it explicitly
607    SETs CC0 even though this is not totally correct.  The reason for this is
608    that we require a SET of CC0 to immediately precede the reference to CC0.
609    So if some other insn sets CC0 as a side-effect, we know it cannot affect
610    our computation and thus may be placed in a delay slot.  */
611 
612 void
mark_set_resources(rtx x,struct resources * res,int in_dest,enum mark_resource_type mark_type)613 mark_set_resources (rtx x, struct resources *res, int in_dest,
614 		    enum mark_resource_type mark_type)
615 {
616   enum rtx_code code;
617   int i, j;
618   unsigned int r;
619   const char *format_ptr;
620 
621  restart:
622 
623   code = GET_CODE (x);
624 
625   switch (code)
626     {
627     case NOTE:
628     case BARRIER:
629     case CODE_LABEL:
630     case USE:
631     CASE_CONST_ANY:
632     case LABEL_REF:
633     case SYMBOL_REF:
634     case CONST:
635     case PC:
636       /* These don't set any resources.  */
637       return;
638 
639     case CC0:
640       if (in_dest)
641 	res->cc = 1;
642       return;
643 
644     case CALL_INSN:
645       /* Called routine modifies the condition code, memory, any registers
646 	 that aren't saved across calls, global registers and anything
647 	 explicitly CLOBBERed immediately after the CALL_INSN.  */
648 
649       if (mark_type == MARK_SRC_DEST_CALL)
650 	{
651 	  rtx link;
652 
653 	  res->cc = res->memory = 1;
654 
655 	  IOR_HARD_REG_SET (res->regs, regs_invalidated_by_call);
656 
657 	  for (link = CALL_INSN_FUNCTION_USAGE (x);
658 	       link; link = XEXP (link, 1))
659 	    if (GET_CODE (XEXP (link, 0)) == CLOBBER)
660 	      mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
661 				  MARK_SRC_DEST);
662 
663 	  /* Check for a REG_SETJMP.  If it exists, then we must
664 	     assume that this call can clobber any register.  */
665 	  if (find_reg_note (x, REG_SETJMP, NULL))
666 	    SET_HARD_REG_SET (res->regs);
667 	}
668 
669       /* ... and also what its RTL says it modifies, if anything.  */
670 
671     case JUMP_INSN:
672     case INSN:
673 
674 	/* An insn consisting of just a CLOBBER (or USE) is just for flow
675 	   and doesn't actually do anything, so we ignore it.  */
676 
677 #ifdef INSN_SETS_ARE_DELAYED
678       if (mark_type != MARK_SRC_DEST_CALL
679 	  && INSN_SETS_ARE_DELAYED (x))
680 	return;
681 #endif
682 
683       x = PATTERN (x);
684       if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
685 	goto restart;
686       return;
687 
688     case SET:
689       /* If the source of a SET is a CALL, this is actually done by
690 	 the called routine.  So only include it if we are to include the
691 	 effects of the calling routine.  */
692 
693       mark_set_resources (SET_DEST (x), res,
694 			  (mark_type == MARK_SRC_DEST_CALL
695 			   || GET_CODE (SET_SRC (x)) != CALL),
696 			  mark_type);
697 
698       mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
699       return;
700 
701     case CLOBBER:
702       mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
703       return;
704 
705     case SEQUENCE:
706       {
707         rtx control = XVECEXP (x, 0, 0);
708         bool annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control);
709 
710         mark_set_resources (control, res, 0, mark_type);
711         for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
712 	  {
713 	    rtx elt = XVECEXP (x, 0, i);
714 	    if (!annul_p && INSN_FROM_TARGET_P (elt))
715 	      mark_set_resources (elt, res, 0, mark_type);
716 	  }
717       }
718       return;
719 
720     case POST_INC:
721     case PRE_INC:
722     case POST_DEC:
723     case PRE_DEC:
724       mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
725       return;
726 
727     case PRE_MODIFY:
728     case POST_MODIFY:
729       mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
730       mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
731       mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
732       return;
733 
734     case SIGN_EXTRACT:
735     case ZERO_EXTRACT:
736       mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
737       mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
738       mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
739       return;
740 
741     case MEM:
742       if (in_dest)
743 	{
744 	  res->memory = 1;
745 	  res->unch_memory |= MEM_READONLY_P (x);
746 	  res->volatil |= MEM_VOLATILE_P (x);
747 	}
748 
749       mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
750       return;
751 
752     case SUBREG:
753       if (in_dest)
754 	{
755 	  if (!REG_P (SUBREG_REG (x)))
756 	    mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
757 	  else
758 	    {
759 	      unsigned int regno = subreg_regno (x);
760 	      unsigned int last_regno = regno + subreg_nregs (x);
761 
762 	      gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
763 	      for (r = regno; r < last_regno; r++)
764 		SET_HARD_REG_BIT (res->regs, r);
765 	    }
766 	}
767       return;
768 
769     case REG:
770       if (in_dest)
771 	{
772 	  gcc_assert (HARD_REGISTER_P (x));
773 	  add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
774 	}
775       return;
776 
777     case UNSPEC_VOLATILE:
778     case ASM_INPUT:
779       /* Traditional asm's are always volatile.  */
780       res->volatil = 1;
781       return;
782 
783     case TRAP_IF:
784       res->volatil = 1;
785       break;
786 
787     case ASM_OPERANDS:
788       res->volatil |= MEM_VOLATILE_P (x);
789 
790       /* For all ASM_OPERANDS, we must traverse the vector of input operands.
791 	 We can not just fall through here since then we would be confused
792 	 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
793 	 traditional asms unlike their normal usage.  */
794 
795       for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
796 	mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
797 			    MARK_SRC_DEST);
798       return;
799 
800     default:
801       break;
802     }
803 
804   /* Process each sub-expression and flag what it needs.  */
805   format_ptr = GET_RTX_FORMAT (code);
806   for (i = 0; i < GET_RTX_LENGTH (code); i++)
807     switch (*format_ptr++)
808       {
809       case 'e':
810 	mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
811 	break;
812 
813       case 'E':
814 	for (j = 0; j < XVECLEN (x, i); j++)
815 	  mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
816 	break;
817       }
818 }
819 
820 /* Return TRUE if INSN is a return, possibly with a filled delay slot.  */
821 
822 static bool
return_insn_p(const_rtx insn)823 return_insn_p (const_rtx insn)
824 {
825   if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
826     return true;
827 
828   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
829     return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
830 
831   return false;
832 }
833 
834 /* Set the resources that are live at TARGET.
835 
836    If TARGET is zero, we refer to the end of the current function and can
837    return our precomputed value.
838 
839    Otherwise, we try to find out what is live by consulting the basic block
840    information.  This is tricky, because we must consider the actions of
841    reload and jump optimization, which occur after the basic block information
842    has been computed.
843 
844    Accordingly, we proceed as follows::
845 
846    We find the previous BARRIER and look at all immediately following labels
847    (with no intervening active insns) to see if any of them start a basic
848    block.  If we hit the start of the function first, we use block 0.
849 
850    Once we have found a basic block and a corresponding first insn, we can
851    accurately compute the live status (by starting at a label following a
852    BARRIER, we are immune to actions taken by reload and jump.)  Then we
853    scan all insns between that point and our target.  For each CLOBBER (or
854    for call-clobbered regs when we pass a CALL_INSN), mark the appropriate
855    registers are dead.  For a SET, mark them as live.
856 
857    We have to be careful when using REG_DEAD notes because they are not
858    updated by such things as find_equiv_reg.  So keep track of registers
859    marked as dead that haven't been assigned to, and mark them dead at the
860    next CODE_LABEL since reload and jump won't propagate values across labels.
861 
862    If we cannot find the start of a basic block (should be a very rare
863    case, if it can happen at all), mark everything as potentially live.
864 
865    Next, scan forward from TARGET looking for things set or clobbered
866    before they are used.  These are not live.
867 
868    Because we can be called many times on the same target, save our results
869    in a hash table indexed by INSN_UID.  This is only done if the function
870    init_resource_info () was invoked before we are called.  */
871 
872 void
mark_target_live_regs(rtx insns,rtx target,struct resources * res)873 mark_target_live_regs (rtx insns, rtx target, struct resources *res)
874 {
875   int b = -1;
876   unsigned int i;
877   struct target_info *tinfo = NULL;
878   rtx insn;
879   rtx jump_insn = 0;
880   rtx jump_target;
881   HARD_REG_SET scratch;
882   struct resources set, needed;
883 
884   /* Handle end of function.  */
885   if (target == 0 || ANY_RETURN_P (target))
886     {
887       *res = end_of_function_needs;
888       return;
889     }
890 
891   /* Handle return insn.  */
892   else if (return_insn_p (target))
893     {
894       *res = end_of_function_needs;
895       mark_referenced_resources (target, res, false);
896       return;
897     }
898 
899   /* We have to assume memory is needed, but the CC isn't.  */
900   res->memory = 1;
901   res->volatil = res->unch_memory = 0;
902   res->cc = 0;
903 
904   /* See if we have computed this value already.  */
905   if (target_hash_table != NULL)
906     {
907       for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
908 	   tinfo; tinfo = tinfo->next)
909 	if (tinfo->uid == INSN_UID (target))
910 	  break;
911 
912       /* Start by getting the basic block number.  If we have saved
913 	 information, we can get it from there unless the insn at the
914 	 start of the basic block has been deleted.  */
915       if (tinfo && tinfo->block != -1
916 	  && ! INSN_DELETED_P (BB_HEAD (BASIC_BLOCK (tinfo->block))))
917 	b = tinfo->block;
918     }
919 
920   if (b == -1)
921     b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH);
922 
923   if (target_hash_table != NULL)
924     {
925       if (tinfo)
926 	{
927 	  /* If the information is up-to-date, use it.  Otherwise, we will
928 	     update it below.  */
929 	  if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
930 	    {
931 	      COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
932 	      return;
933 	    }
934 	}
935       else
936 	{
937 	  /* Allocate a place to put our results and chain it into the
938 	     hash table.  */
939 	  tinfo = XNEW (struct target_info);
940 	  tinfo->uid = INSN_UID (target);
941 	  tinfo->block = b;
942 	  tinfo->next
943 	    = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
944 	  target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
945 	}
946     }
947 
948   CLEAR_HARD_REG_SET (pending_dead_regs);
949 
950   /* If we found a basic block, get the live registers from it and update
951      them with anything set or killed between its start and the insn before
952      TARGET; this custom life analysis is really about registers so we need
953      to use the LR problem.  Otherwise, we must assume everything is live.  */
954   if (b != -1)
955     {
956       regset regs_live = DF_LR_IN (BASIC_BLOCK (b));
957       rtx start_insn, stop_insn;
958 
959       /* Compute hard regs live at start of block.  */
960       REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
961 
962       /* Get starting and ending insn, handling the case where each might
963 	 be a SEQUENCE.  */
964       start_insn = (b == ENTRY_BLOCK_PTR->next_bb->index ?
965 		    insns : BB_HEAD (BASIC_BLOCK (b)));
966       stop_insn = target;
967 
968       if (NONJUMP_INSN_P (start_insn)
969 	  && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
970 	start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
971 
972       if (NONJUMP_INSN_P (stop_insn)
973 	  && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
974 	stop_insn = next_insn (PREV_INSN (stop_insn));
975 
976       for (insn = start_insn; insn != stop_insn;
977 	   insn = next_insn_no_annul (insn))
978 	{
979 	  rtx link;
980 	  rtx real_insn = insn;
981 	  enum rtx_code code = GET_CODE (insn);
982 
983 	  if (DEBUG_INSN_P (insn))
984 	    continue;
985 
986 	  /* If this insn is from the target of a branch, it isn't going to
987 	     be used in the sequel.  If it is used in both cases, this
988 	     test will not be true.  */
989 	  if ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
990 	      && INSN_FROM_TARGET_P (insn))
991 	    continue;
992 
993 	  /* If this insn is a USE made by update_block, we care about the
994 	     underlying insn.  */
995 	  if (code == INSN && GET_CODE (PATTERN (insn)) == USE
996 	      && INSN_P (XEXP (PATTERN (insn), 0)))
997 	      real_insn = XEXP (PATTERN (insn), 0);
998 
999 	  if (CALL_P (real_insn))
1000 	    {
1001 	      /* CALL clobbers all call-used regs that aren't fixed except
1002 		 sp, ap, and fp.  Do this before setting the result of the
1003 		 call live.  */
1004 	      AND_COMPL_HARD_REG_SET (current_live_regs,
1005 				      regs_invalidated_by_call);
1006 
1007 	      /* A CALL_INSN sets any global register live, since it may
1008 		 have been modified by the call.  */
1009 	      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1010 		if (global_regs[i])
1011 		  SET_HARD_REG_BIT (current_live_regs, i);
1012 	    }
1013 
1014 	  /* Mark anything killed in an insn to be deadened at the next
1015 	     label.  Ignore USE insns; the only REG_DEAD notes will be for
1016 	     parameters.  But they might be early.  A CALL_INSN will usually
1017 	     clobber registers used for parameters.  It isn't worth bothering
1018 	     with the unlikely case when it won't.  */
1019 	  if ((NONJUMP_INSN_P (real_insn)
1020 	       && GET_CODE (PATTERN (real_insn)) != USE
1021 	       && GET_CODE (PATTERN (real_insn)) != CLOBBER)
1022 	      || JUMP_P (real_insn)
1023 	      || CALL_P (real_insn))
1024 	    {
1025 	      for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1026 		if (REG_NOTE_KIND (link) == REG_DEAD
1027 		    && REG_P (XEXP (link, 0))
1028 		    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1029 		  add_to_hard_reg_set (&pending_dead_regs,
1030 				      GET_MODE (XEXP (link, 0)),
1031 				      REGNO (XEXP (link, 0)));
1032 
1033 	      note_stores (PATTERN (real_insn), update_live_status, NULL);
1034 
1035 	      /* If any registers were unused after this insn, kill them.
1036 		 These notes will always be accurate.  */
1037 	      for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1038 		if (REG_NOTE_KIND (link) == REG_UNUSED
1039 		    && REG_P (XEXP (link, 0))
1040 		    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1041 		  remove_from_hard_reg_set (&current_live_regs,
1042 					   GET_MODE (XEXP (link, 0)),
1043 					   REGNO (XEXP (link, 0)));
1044 	    }
1045 
1046 	  else if (LABEL_P (real_insn))
1047 	    {
1048 	      basic_block bb;
1049 
1050 	      /* A label clobbers the pending dead registers since neither
1051 		 reload nor jump will propagate a value across a label.  */
1052 	      AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
1053 	      CLEAR_HARD_REG_SET (pending_dead_regs);
1054 
1055 	      /* We must conservatively assume that all registers that used
1056 		 to be live here still are.  The fallthrough edge may have
1057 		 left a live register uninitialized.  */
1058 	      bb = BLOCK_FOR_INSN (real_insn);
1059 	      if (bb)
1060 		{
1061 		  HARD_REG_SET extra_live;
1062 
1063 		  REG_SET_TO_HARD_REG_SET (extra_live, DF_LR_IN (bb));
1064 		  IOR_HARD_REG_SET (current_live_regs, extra_live);
1065 		}
1066 	    }
1067 
1068 	  /* The beginning of the epilogue corresponds to the end of the
1069 	     RTL chain when there are no epilogue insns.  Certain resources
1070 	     are implicitly required at that point.  */
1071 	  else if (NOTE_P (real_insn)
1072 		   && NOTE_KIND (real_insn) == NOTE_INSN_EPILOGUE_BEG)
1073 	    IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
1074 	}
1075 
1076       COPY_HARD_REG_SET (res->regs, current_live_regs);
1077       if (tinfo != NULL)
1078 	{
1079 	  tinfo->block = b;
1080 	  tinfo->bb_tick = bb_ticks[b];
1081 	}
1082     }
1083   else
1084     /* We didn't find the start of a basic block.  Assume everything
1085        in use.  This should happen only extremely rarely.  */
1086     SET_HARD_REG_SET (res->regs);
1087 
1088   CLEAR_RESOURCE (&set);
1089   CLEAR_RESOURCE (&needed);
1090 
1091   jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
1092 					  set, needed);
1093 
1094   /* If we hit an unconditional branch, we have another way of finding out
1095      what is live: we can see what is live at the branch target and include
1096      anything used but not set before the branch.  We add the live
1097      resources found using the test below to those found until now.  */
1098 
1099   if (jump_insn)
1100     {
1101       struct resources new_resources;
1102       rtx stop_insn = next_active_insn (jump_insn);
1103 
1104       if (!ANY_RETURN_P (jump_target))
1105 	jump_target = next_active_insn (jump_target);
1106       mark_target_live_regs (insns, jump_target, &new_resources);
1107       CLEAR_RESOURCE (&set);
1108       CLEAR_RESOURCE (&needed);
1109 
1110       /* Include JUMP_INSN in the needed registers.  */
1111       for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1112 	{
1113 	  mark_referenced_resources (insn, &needed, true);
1114 
1115 	  COPY_HARD_REG_SET (scratch, needed.regs);
1116 	  AND_COMPL_HARD_REG_SET (scratch, set.regs);
1117 	  IOR_HARD_REG_SET (new_resources.regs, scratch);
1118 
1119 	  mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1120 	}
1121 
1122       IOR_HARD_REG_SET (res->regs, new_resources.regs);
1123     }
1124 
1125   if (tinfo != NULL)
1126     {
1127       COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
1128     }
1129 }
1130 
1131 /* Initialize the resources required by mark_target_live_regs ().
1132    This should be invoked before the first call to mark_target_live_regs.  */
1133 
1134 void
init_resource_info(rtx epilogue_insn)1135 init_resource_info (rtx epilogue_insn)
1136 {
1137   int i;
1138   basic_block bb;
1139 
1140   /* Indicate what resources are required to be valid at the end of the current
1141      function.  The condition code never is and memory always is.
1142      The stack pointer is needed unless EXIT_IGNORE_STACK is true
1143      and there is an epilogue that restores the original stack pointer
1144      from the frame pointer.  Registers used to return the function value
1145      are needed.  Registers holding global variables are needed.  */
1146 
1147   end_of_function_needs.cc = 0;
1148   end_of_function_needs.memory = 1;
1149   end_of_function_needs.unch_memory = 0;
1150   CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1151 
1152   if (frame_pointer_needed)
1153     {
1154       SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
1155 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
1156       SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
1157 #endif
1158     }
1159   if (!(frame_pointer_needed
1160 	&& EXIT_IGNORE_STACK
1161 	&& epilogue_insn
1162 	&& !crtl->sp_is_unchanging))
1163     SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1164 
1165   if (crtl->return_rtx != 0)
1166     mark_referenced_resources (crtl->return_rtx,
1167 			       &end_of_function_needs, true);
1168 
1169   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1170     if (global_regs[i]
1171 #ifdef EPILOGUE_USES
1172 	|| EPILOGUE_USES (i)
1173 #endif
1174 	)
1175       SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1176 
1177   /* The registers required to be live at the end of the function are
1178      represented in the flow information as being dead just prior to
1179      reaching the end of the function.  For example, the return of a value
1180      might be represented by a USE of the return register immediately
1181      followed by an unconditional jump to the return label where the
1182      return label is the end of the RTL chain.  The end of the RTL chain
1183      is then taken to mean that the return register is live.
1184 
1185      This sequence is no longer maintained when epilogue instructions are
1186      added to the RTL chain.  To reconstruct the original meaning, the
1187      start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1188      point where these registers become live (start_of_epilogue_needs).
1189      If epilogue instructions are present, the registers set by those
1190      instructions won't have been processed by flow.  Thus, those
1191      registers are additionally required at the end of the RTL chain
1192      (end_of_function_needs).  */
1193 
1194   start_of_epilogue_needs = end_of_function_needs;
1195 
1196   while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
1197     {
1198       mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
1199 			  MARK_SRC_DEST_CALL);
1200       if (return_insn_p (epilogue_insn))
1201 	break;
1202     }
1203 
1204   /* Allocate and initialize the tables used by mark_target_live_regs.  */
1205   target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME);
1206   bb_ticks = XCNEWVEC (int, last_basic_block);
1207 
1208   /* Set the BLOCK_FOR_INSN of each label that starts a basic block.  */
1209   FOR_EACH_BB (bb)
1210     if (LABEL_P (BB_HEAD (bb)))
1211       BLOCK_FOR_INSN (BB_HEAD (bb)) = bb;
1212 }
1213 
1214 /* Free up the resources allocated to mark_target_live_regs ().  This
1215    should be invoked after the last call to mark_target_live_regs ().  */
1216 
1217 void
free_resource_info(void)1218 free_resource_info (void)
1219 {
1220   basic_block bb;
1221 
1222   if (target_hash_table != NULL)
1223     {
1224       int i;
1225 
1226       for (i = 0; i < TARGET_HASH_PRIME; ++i)
1227 	{
1228 	  struct target_info *ti = target_hash_table[i];
1229 
1230 	  while (ti)
1231 	    {
1232 	      struct target_info *next = ti->next;
1233 	      free (ti);
1234 	      ti = next;
1235 	    }
1236 	}
1237 
1238       free (target_hash_table);
1239       target_hash_table = NULL;
1240     }
1241 
1242   if (bb_ticks != NULL)
1243     {
1244       free (bb_ticks);
1245       bb_ticks = NULL;
1246     }
1247 
1248   FOR_EACH_BB (bb)
1249     if (LABEL_P (BB_HEAD (bb)))
1250       BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL;
1251 }
1252 
1253 /* Clear any hashed information that we have stored for INSN.  */
1254 
1255 void
clear_hashed_info_for_insn(rtx insn)1256 clear_hashed_info_for_insn (rtx insn)
1257 {
1258   struct target_info *tinfo;
1259 
1260   if (target_hash_table != NULL)
1261     {
1262       for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
1263 	   tinfo; tinfo = tinfo->next)
1264 	if (tinfo->uid == INSN_UID (insn))
1265 	  break;
1266 
1267       if (tinfo)
1268 	tinfo->block = -1;
1269     }
1270 }
1271 
1272 /* Increment the tick count for the basic block that contains INSN.  */
1273 
1274 void
incr_ticks_for_insn(rtx insn)1275 incr_ticks_for_insn (rtx insn)
1276 {
1277   int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH);
1278 
1279   if (b != -1)
1280     bb_ticks[b]++;
1281 }
1282 
1283 /* Add TRIAL to the set of resources used at the end of the current
1284    function.  */
1285 void
mark_end_of_function_resources(rtx trial,bool include_delayed_effects)1286 mark_end_of_function_resources (rtx trial, bool include_delayed_effects)
1287 {
1288   mark_referenced_resources (trial, &end_of_function_needs,
1289 			     include_delayed_effects);
1290 }
1291