1 /* Definitions for computing resource usage of specific insns.
2    Copyright (C) 1999-2014 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "diagnostic-core.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "hard-reg-set.h"
28 #include "function.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "output.h"
32 #include "resource.h"
33 #include "except.h"
34 #include "insn-attr.h"
35 #include "params.h"
36 #include "df.h"
37 
38 /* This structure is used to record liveness information at the targets or
39    fallthrough insns of branches.  We will most likely need the information
40    at targets again, so save them in a hash table rather than recomputing them
41    each time.  */
42 
43 struct target_info
44 {
45   int uid;			/* INSN_UID of target.  */
46   struct target_info *next;	/* Next info for same hash bucket.  */
47   HARD_REG_SET live_regs;	/* Registers live at target.  */
48   int block;			/* Basic block number containing target.  */
49   int bb_tick;			/* Generation count of basic block info.  */
50 };
51 
52 #define TARGET_HASH_PRIME 257
53 
54 /* Indicates what resources are required at the beginning of the epilogue.  */
55 static struct resources start_of_epilogue_needs;
56 
57 /* Indicates what resources are required at function end.  */
58 static struct resources end_of_function_needs;
59 
60 /* Define the hash table itself.  */
61 static struct target_info **target_hash_table = NULL;
62 
63 /* For each basic block, we maintain a generation number of its basic
64    block info, which is updated each time we move an insn from the
65    target of a jump.  This is the generation number indexed by block
66    number.  */
67 
68 static int *bb_ticks;
69 
70 /* Marks registers possibly live at the current place being scanned by
71    mark_target_live_regs.  Also used by update_live_status.  */
72 
73 static HARD_REG_SET current_live_regs;
74 
75 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
76    Also only used by the next two functions.  */
77 
78 static HARD_REG_SET pending_dead_regs;
79 
80 static void update_live_status (rtx, const_rtx, void *);
81 static int find_basic_block (rtx, int);
82 static rtx next_insn_no_annul (rtx);
83 static rtx find_dead_or_set_registers (rtx, struct resources*,
84 				       rtx*, int, struct resources,
85 				       struct resources);
86 
87 /* Utility function called from mark_target_live_regs via note_stores.
88    It deadens any CLOBBERed registers and livens any SET registers.  */
89 
90 static void
update_live_status(rtx dest,const_rtx x,void * data ATTRIBUTE_UNUSED)91 update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
92 {
93   int first_regno, last_regno;
94   int i;
95 
96   if (!REG_P (dest)
97       && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest))))
98     return;
99 
100   if (GET_CODE (dest) == SUBREG)
101     {
102       first_regno = subreg_regno (dest);
103       last_regno = first_regno + subreg_nregs (dest);
104 
105     }
106   else
107     {
108       first_regno = REGNO (dest);
109       last_regno = END_HARD_REGNO (dest);
110     }
111 
112   if (GET_CODE (x) == CLOBBER)
113     for (i = first_regno; i < last_regno; i++)
114       CLEAR_HARD_REG_BIT (current_live_regs, i);
115   else
116     for (i = first_regno; i < last_regno; i++)
117       {
118 	SET_HARD_REG_BIT (current_live_regs, i);
119 	CLEAR_HARD_REG_BIT (pending_dead_regs, i);
120       }
121 }
122 
123 /* Find the number of the basic block with correct live register
124    information that starts closest to INSN.  Return -1 if we couldn't
125    find such a basic block or the beginning is more than
126    SEARCH_LIMIT instructions before INSN.  Use SEARCH_LIMIT = -1 for
127    an unlimited search.
128 
129    The delay slot filling code destroys the control-flow graph so,
130    instead of finding the basic block containing INSN, we search
131    backwards toward a BARRIER where the live register information is
132    correct.  */
133 
134 static int
find_basic_block(rtx insn,int search_limit)135 find_basic_block (rtx insn, int search_limit)
136 {
137   /* Scan backwards to the previous BARRIER.  Then see if we can find a
138      label that starts a basic block.  Return the basic block number.  */
139   for (insn = prev_nonnote_insn (insn);
140        insn && !BARRIER_P (insn) && search_limit != 0;
141        insn = prev_nonnote_insn (insn), --search_limit)
142     ;
143 
144   /* The closest BARRIER is too far away.  */
145   if (search_limit == 0)
146     return -1;
147 
148   /* The start of the function.  */
149   else if (insn == 0)
150     return ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index;
151 
152   /* See if any of the upcoming CODE_LABELs start a basic block.  If we reach
153      anything other than a CODE_LABEL or note, we can't find this code.  */
154   for (insn = next_nonnote_insn (insn);
155        insn && LABEL_P (insn);
156        insn = next_nonnote_insn (insn))
157     if (BLOCK_FOR_INSN (insn))
158       return BLOCK_FOR_INSN (insn)->index;
159 
160   return -1;
161 }
162 
163 /* Similar to next_insn, but ignores insns in the delay slots of
164    an annulled branch.  */
165 
166 static rtx
next_insn_no_annul(rtx insn)167 next_insn_no_annul (rtx insn)
168 {
169   if (insn)
170     {
171       /* If INSN is an annulled branch, skip any insns from the target
172 	 of the branch.  */
173       if (JUMP_P (insn)
174 	  && INSN_ANNULLED_BRANCH_P (insn)
175 	  && NEXT_INSN (PREV_INSN (insn)) != insn)
176 	{
177 	  rtx next = NEXT_INSN (insn);
178 
179 	  while ((NONJUMP_INSN_P (next) || JUMP_P (next) || CALL_P (next))
180 		 && INSN_FROM_TARGET_P (next))
181 	    {
182 	      insn = next;
183 	      next = NEXT_INSN (insn);
184 	    }
185 	}
186 
187       insn = NEXT_INSN (insn);
188       if (insn && NONJUMP_INSN_P (insn)
189 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
190 	insn = XVECEXP (PATTERN (insn), 0, 0);
191     }
192 
193   return insn;
194 }
195 
196 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
197    which resources are referenced by the insn.  If INCLUDE_DELAYED_EFFECTS
198    is TRUE, resources used by the called routine will be included for
199    CALL_INSNs.  */
200 
201 void
mark_referenced_resources(rtx x,struct resources * res,bool include_delayed_effects)202 mark_referenced_resources (rtx x, struct resources *res,
203 			   bool include_delayed_effects)
204 {
205   enum rtx_code code = GET_CODE (x);
206   int i, j;
207   unsigned int r;
208   const char *format_ptr;
209 
210   /* Handle leaf items for which we set resource flags.  Also, special-case
211      CALL, SET and CLOBBER operators.  */
212   switch (code)
213     {
214     case CONST:
215     CASE_CONST_ANY:
216     case PC:
217     case SYMBOL_REF:
218     case LABEL_REF:
219       return;
220 
221     case SUBREG:
222       if (!REG_P (SUBREG_REG (x)))
223 	mark_referenced_resources (SUBREG_REG (x), res, false);
224       else
225 	{
226 	  unsigned int regno = subreg_regno (x);
227 	  unsigned int last_regno = regno + subreg_nregs (x);
228 
229 	  gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
230 	  for (r = regno; r < last_regno; r++)
231 	    SET_HARD_REG_BIT (res->regs, r);
232 	}
233       return;
234 
235     case REG:
236       gcc_assert (HARD_REGISTER_P (x));
237       add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
238       return;
239 
240     case MEM:
241       /* If this memory shouldn't change, it really isn't referencing
242 	 memory.  */
243       if (! MEM_READONLY_P (x))
244 	res->memory = 1;
245       res->volatil |= MEM_VOLATILE_P (x);
246 
247       /* Mark registers used to access memory.  */
248       mark_referenced_resources (XEXP (x, 0), res, false);
249       return;
250 
251     case CC0:
252       res->cc = 1;
253       return;
254 
255     case UNSPEC_VOLATILE:
256     case TRAP_IF:
257     case ASM_INPUT:
258       /* Traditional asm's are always volatile.  */
259       res->volatil = 1;
260       break;
261 
262     case ASM_OPERANDS:
263       res->volatil |= MEM_VOLATILE_P (x);
264 
265       /* For all ASM_OPERANDS, we must traverse the vector of input operands.
266 	 We can not just fall through here since then we would be confused
267 	 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
268 	 traditional asms unlike their normal usage.  */
269 
270       for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
271 	mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, false);
272       return;
273 
274     case CALL:
275       /* The first operand will be a (MEM (xxx)) but doesn't really reference
276 	 memory.  The second operand may be referenced, though.  */
277       mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, false);
278       mark_referenced_resources (XEXP (x, 1), res, false);
279       return;
280 
281     case SET:
282       /* Usually, the first operand of SET is set, not referenced.  But
283 	 registers used to access memory are referenced.  SET_DEST is
284 	 also referenced if it is a ZERO_EXTRACT.  */
285 
286       mark_referenced_resources (SET_SRC (x), res, false);
287 
288       x = SET_DEST (x);
289       if (GET_CODE (x) == ZERO_EXTRACT
290 	  || GET_CODE (x) == STRICT_LOW_PART)
291 	mark_referenced_resources (x, res, false);
292       else if (GET_CODE (x) == SUBREG)
293 	x = SUBREG_REG (x);
294       if (MEM_P (x))
295 	mark_referenced_resources (XEXP (x, 0), res, false);
296       return;
297 
298     case CLOBBER:
299       return;
300 
301     case CALL_INSN:
302       if (include_delayed_effects)
303 	{
304 	  /* A CALL references memory, the frame pointer if it exists, the
305 	     stack pointer, any global registers and any registers given in
306 	     USE insns immediately in front of the CALL.
307 
308 	     However, we may have moved some of the parameter loading insns
309 	     into the delay slot of this CALL.  If so, the USE's for them
310 	     don't count and should be skipped.  */
311 	  rtx insn = PREV_INSN (x);
312 	  rtx sequence = 0;
313 	  int seq_size = 0;
314 	  int i;
315 
316 	  /* If we are part of a delay slot sequence, point at the SEQUENCE.  */
317 	  if (NEXT_INSN (insn) != x)
318 	    {
319 	      sequence = PATTERN (NEXT_INSN (insn));
320 	      seq_size = XVECLEN (sequence, 0);
321 	      gcc_assert (GET_CODE (sequence) == SEQUENCE);
322 	    }
323 
324 	  res->memory = 1;
325 	  SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
326 	  if (frame_pointer_needed)
327 	    {
328 	      SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
329 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
330 	      SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
331 #endif
332 	    }
333 
334 	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
335 	    if (global_regs[i])
336 	      SET_HARD_REG_BIT (res->regs, i);
337 
338 	  /* Check for a REG_SETJMP.  If it exists, then we must
339 	     assume that this call can need any register.
340 
341 	     This is done to be more conservative about how we handle setjmp.
342 	     We assume that they both use and set all registers.  Using all
343 	     registers ensures that a register will not be considered dead
344 	     just because it crosses a setjmp call.  A register should be
345 	     considered dead only if the setjmp call returns nonzero.  */
346 	  if (find_reg_note (x, REG_SETJMP, NULL))
347 	    SET_HARD_REG_SET (res->regs);
348 
349 	  {
350 	    rtx link;
351 
352 	    for (link = CALL_INSN_FUNCTION_USAGE (x);
353 		 link;
354 		 link = XEXP (link, 1))
355 	      if (GET_CODE (XEXP (link, 0)) == USE)
356 		{
357 		  for (i = 1; i < seq_size; i++)
358 		    {
359 		      rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
360 		      if (GET_CODE (slot_pat) == SET
361 			  && rtx_equal_p (SET_DEST (slot_pat),
362 					  XEXP (XEXP (link, 0), 0)))
363 			break;
364 		    }
365 		  if (i >= seq_size)
366 		    mark_referenced_resources (XEXP (XEXP (link, 0), 0),
367 					       res, false);
368 		}
369 	  }
370 	}
371 
372       /* ... fall through to other INSN processing ...  */
373 
374     case INSN:
375     case JUMP_INSN:
376 
377       if (GET_CODE (PATTERN (x)) == COND_EXEC)
378       /* In addition to the usual references, also consider all outputs
379 	 as referenced, to compensate for mark_set_resources treating
380 	 them as killed.  This is similar to ZERO_EXTRACT / STRICT_LOW_PART
381 	 handling, execpt that we got a partial incidence instead of a partial
382 	 width.  */
383       mark_set_resources (x, res, 0,
384 			  include_delayed_effects
385 			  ? MARK_SRC_DEST_CALL : MARK_SRC_DEST);
386 
387 #ifdef INSN_REFERENCES_ARE_DELAYED
388       if (! include_delayed_effects
389 	  && INSN_REFERENCES_ARE_DELAYED (x))
390 	return;
391 #endif
392 
393       /* No special processing, just speed up.  */
394       mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
395       return;
396 
397     default:
398       break;
399     }
400 
401   /* Process each sub-expression and flag what it needs.  */
402   format_ptr = GET_RTX_FORMAT (code);
403   for (i = 0; i < GET_RTX_LENGTH (code); i++)
404     switch (*format_ptr++)
405       {
406       case 'e':
407 	mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
408 	break;
409 
410       case 'E':
411 	for (j = 0; j < XVECLEN (x, i); j++)
412 	  mark_referenced_resources (XVECEXP (x, i, j), res,
413 				     include_delayed_effects);
414 	break;
415       }
416 }
417 
418 /* A subroutine of mark_target_live_regs.  Search forward from TARGET
419    looking for registers that are set before they are used.  These are dead.
420    Stop after passing a few conditional jumps, and/or a small
421    number of unconditional branches.  */
422 
423 static rtx
find_dead_or_set_registers(rtx target,struct resources * res,rtx * jump_target,int jump_count,struct resources set,struct resources needed)424 find_dead_or_set_registers (rtx target, struct resources *res,
425 			    rtx *jump_target, int jump_count,
426 			    struct resources set, struct resources needed)
427 {
428   HARD_REG_SET scratch;
429   rtx insn, next;
430   rtx jump_insn = 0;
431   int i;
432 
433   for (insn = target; insn; insn = next)
434     {
435       rtx this_jump_insn = insn;
436 
437       next = NEXT_INSN (insn);
438 
439       /* If this instruction can throw an exception, then we don't
440 	 know where we might end up next.  That means that we have to
441 	 assume that whatever we have already marked as live really is
442 	 live.  */
443       if (can_throw_internal (insn))
444 	break;
445 
446       switch (GET_CODE (insn))
447 	{
448 	case CODE_LABEL:
449 	  /* After a label, any pending dead registers that weren't yet
450 	     used can be made dead.  */
451 	  AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
452 	  AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
453 	  CLEAR_HARD_REG_SET (pending_dead_regs);
454 
455 	  continue;
456 
457 	case BARRIER:
458 	case NOTE:
459 	  continue;
460 
461 	case INSN:
462 	  if (GET_CODE (PATTERN (insn)) == USE)
463 	    {
464 	      /* If INSN is a USE made by update_block, we care about the
465 		 underlying insn.  Any registers set by the underlying insn
466 		 are live since the insn is being done somewhere else.  */
467 	      if (INSN_P (XEXP (PATTERN (insn), 0)))
468 		mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
469 				    MARK_SRC_DEST_CALL);
470 
471 	      /* All other USE insns are to be ignored.  */
472 	      continue;
473 	    }
474 	  else if (GET_CODE (PATTERN (insn)) == CLOBBER)
475 	    continue;
476 	  else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
477 	    {
478 	      /* An unconditional jump can be used to fill the delay slot
479 		 of a call, so search for a JUMP_INSN in any position.  */
480 	      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
481 		{
482 		  this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
483 		  if (JUMP_P (this_jump_insn))
484 		    break;
485 		}
486 	    }
487 
488 	default:
489 	  break;
490 	}
491 
492       if (JUMP_P (this_jump_insn))
493 	{
494 	  if (jump_count++ < 10)
495 	    {
496 	      if (any_uncondjump_p (this_jump_insn)
497 		  || ANY_RETURN_P (PATTERN (this_jump_insn)))
498 		{
499 		  next = JUMP_LABEL (this_jump_insn);
500 		  if (ANY_RETURN_P (next))
501 		    next = NULL_RTX;
502 		  if (jump_insn == 0)
503 		    {
504 		      jump_insn = insn;
505 		      if (jump_target)
506 			*jump_target = JUMP_LABEL (this_jump_insn);
507 		    }
508 		}
509 	      else if (any_condjump_p (this_jump_insn))
510 		{
511 		  struct resources target_set, target_res;
512 		  struct resources fallthrough_res;
513 
514 		  /* We can handle conditional branches here by following
515 		     both paths, and then IOR the results of the two paths
516 		     together, which will give us registers that are dead
517 		     on both paths.  Since this is expensive, we give it
518 		     a much higher cost than unconditional branches.  The
519 		     cost was chosen so that we will follow at most 1
520 		     conditional branch.  */
521 
522 		  jump_count += 4;
523 		  if (jump_count >= 10)
524 		    break;
525 
526 		  mark_referenced_resources (insn, &needed, true);
527 
528 		  /* For an annulled branch, mark_set_resources ignores slots
529 		     filled by instructions from the target.  This is correct
530 		     if the branch is not taken.  Since we are following both
531 		     paths from the branch, we must also compute correct info
532 		     if the branch is taken.  We do this by inverting all of
533 		     the INSN_FROM_TARGET_P bits, calling mark_set_resources,
534 		     and then inverting the INSN_FROM_TARGET_P bits again.  */
535 
536 		  if (GET_CODE (PATTERN (insn)) == SEQUENCE
537 		      && INSN_ANNULLED_BRANCH_P (this_jump_insn))
538 		    {
539 		      for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
540 			INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
541 			  = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
542 
543 		      target_set = set;
544 		      mark_set_resources (insn, &target_set, 0,
545 					  MARK_SRC_DEST_CALL);
546 
547 		      for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
548 			INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
549 			  = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
550 
551 		      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
552 		    }
553 		  else
554 		    {
555 		      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
556 		      target_set = set;
557 		    }
558 
559 		  target_res = *res;
560 		  COPY_HARD_REG_SET (scratch, target_set.regs);
561 		  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
562 		  AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
563 
564 		  fallthrough_res = *res;
565 		  COPY_HARD_REG_SET (scratch, set.regs);
566 		  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
567 		  AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
568 
569 		  if (!ANY_RETURN_P (JUMP_LABEL (this_jump_insn)))
570 		    find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
571 						&target_res, 0, jump_count,
572 						target_set, needed);
573 		  find_dead_or_set_registers (next,
574 					      &fallthrough_res, 0, jump_count,
575 					      set, needed);
576 		  IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
577 		  AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
578 		  break;
579 		}
580 	      else
581 		break;
582 	    }
583 	  else
584 	    {
585 	      /* Don't try this optimization if we expired our jump count
586 		 above, since that would mean there may be an infinite loop
587 		 in the function being compiled.  */
588 	      jump_insn = 0;
589 	      break;
590 	    }
591 	}
592 
593       mark_referenced_resources (insn, &needed, true);
594       mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
595 
596       COPY_HARD_REG_SET (scratch, set.regs);
597       AND_COMPL_HARD_REG_SET (scratch, needed.regs);
598       AND_COMPL_HARD_REG_SET (res->regs, scratch);
599     }
600 
601   return jump_insn;
602 }
603 
604 /* Given X, a part of an insn, and a pointer to a `struct resource',
605    RES, indicate which resources are modified by the insn. If
606    MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
607    set by the called routine.
608 
609    If IN_DEST is nonzero, it means we are inside a SET.  Otherwise,
610    objects are being referenced instead of set.
611 
612    We never mark the insn as modifying the condition code unless it explicitly
613    SETs CC0 even though this is not totally correct.  The reason for this is
614    that we require a SET of CC0 to immediately precede the reference to CC0.
615    So if some other insn sets CC0 as a side-effect, we know it cannot affect
616    our computation and thus may be placed in a delay slot.  */
617 
618 void
mark_set_resources(rtx x,struct resources * res,int in_dest,enum mark_resource_type mark_type)619 mark_set_resources (rtx x, struct resources *res, int in_dest,
620 		    enum mark_resource_type mark_type)
621 {
622   enum rtx_code code;
623   int i, j;
624   unsigned int r;
625   const char *format_ptr;
626 
627  restart:
628 
629   code = GET_CODE (x);
630 
631   switch (code)
632     {
633     case NOTE:
634     case BARRIER:
635     case CODE_LABEL:
636     case USE:
637     CASE_CONST_ANY:
638     case LABEL_REF:
639     case SYMBOL_REF:
640     case CONST:
641     case PC:
642       /* These don't set any resources.  */
643       return;
644 
645     case CC0:
646       if (in_dest)
647 	res->cc = 1;
648       return;
649 
650     case CALL_INSN:
651       /* Called routine modifies the condition code, memory, any registers
652 	 that aren't saved across calls, global registers and anything
653 	 explicitly CLOBBERed immediately after the CALL_INSN.  */
654 
655       if (mark_type == MARK_SRC_DEST_CALL)
656 	{
657 	  rtx link;
658 
659 	  res->cc = res->memory = 1;
660 
661 	  IOR_HARD_REG_SET (res->regs, regs_invalidated_by_call);
662 
663 	  for (link = CALL_INSN_FUNCTION_USAGE (x);
664 	       link; link = XEXP (link, 1))
665 	    if (GET_CODE (XEXP (link, 0)) == CLOBBER)
666 	      mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
667 				  MARK_SRC_DEST);
668 
669 	  /* Check for a REG_SETJMP.  If it exists, then we must
670 	     assume that this call can clobber any register.  */
671 	  if (find_reg_note (x, REG_SETJMP, NULL))
672 	    SET_HARD_REG_SET (res->regs);
673 	}
674 
675       /* ... and also what its RTL says it modifies, if anything.  */
676 
677     case JUMP_INSN:
678     case INSN:
679 
680 	/* An insn consisting of just a CLOBBER (or USE) is just for flow
681 	   and doesn't actually do anything, so we ignore it.  */
682 
683 #ifdef INSN_SETS_ARE_DELAYED
684       if (mark_type != MARK_SRC_DEST_CALL
685 	  && INSN_SETS_ARE_DELAYED (x))
686 	return;
687 #endif
688 
689       x = PATTERN (x);
690       if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
691 	goto restart;
692       return;
693 
694     case SET:
695       /* If the source of a SET is a CALL, this is actually done by
696 	 the called routine.  So only include it if we are to include the
697 	 effects of the calling routine.  */
698 
699       mark_set_resources (SET_DEST (x), res,
700 			  (mark_type == MARK_SRC_DEST_CALL
701 			   || GET_CODE (SET_SRC (x)) != CALL),
702 			  mark_type);
703 
704       mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
705       return;
706 
707     case CLOBBER:
708       mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
709       return;
710 
711     case SEQUENCE:
712       {
713         rtx control = XVECEXP (x, 0, 0);
714         bool annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control);
715 
716         mark_set_resources (control, res, 0, mark_type);
717         for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
718 	  {
719 	    rtx elt = XVECEXP (x, 0, i);
720 	    if (!annul_p && INSN_FROM_TARGET_P (elt))
721 	      mark_set_resources (elt, res, 0, mark_type);
722 	  }
723       }
724       return;
725 
726     case POST_INC:
727     case PRE_INC:
728     case POST_DEC:
729     case PRE_DEC:
730       mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
731       return;
732 
733     case PRE_MODIFY:
734     case POST_MODIFY:
735       mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
736       mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
737       mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
738       return;
739 
740     case SIGN_EXTRACT:
741     case ZERO_EXTRACT:
742       mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
743       mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
744       mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
745       return;
746 
747     case MEM:
748       if (in_dest)
749 	{
750 	  res->memory = 1;
751 	  res->volatil |= MEM_VOLATILE_P (x);
752 	}
753 
754       mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
755       return;
756 
757     case SUBREG:
758       if (in_dest)
759 	{
760 	  if (!REG_P (SUBREG_REG (x)))
761 	    mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
762 	  else
763 	    {
764 	      unsigned int regno = subreg_regno (x);
765 	      unsigned int last_regno = regno + subreg_nregs (x);
766 
767 	      gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
768 	      for (r = regno; r < last_regno; r++)
769 		SET_HARD_REG_BIT (res->regs, r);
770 	    }
771 	}
772       return;
773 
774     case REG:
775       if (in_dest)
776 	{
777 	  gcc_assert (HARD_REGISTER_P (x));
778 	  add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
779 	}
780       return;
781 
782     case UNSPEC_VOLATILE:
783     case ASM_INPUT:
784       /* Traditional asm's are always volatile.  */
785       res->volatil = 1;
786       return;
787 
788     case TRAP_IF:
789       res->volatil = 1;
790       break;
791 
792     case ASM_OPERANDS:
793       res->volatil |= MEM_VOLATILE_P (x);
794 
795       /* For all ASM_OPERANDS, we must traverse the vector of input operands.
796 	 We can not just fall through here since then we would be confused
797 	 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
798 	 traditional asms unlike their normal usage.  */
799 
800       for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
801 	mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
802 			    MARK_SRC_DEST);
803       return;
804 
805     default:
806       break;
807     }
808 
809   /* Process each sub-expression and flag what it needs.  */
810   format_ptr = GET_RTX_FORMAT (code);
811   for (i = 0; i < GET_RTX_LENGTH (code); i++)
812     switch (*format_ptr++)
813       {
814       case 'e':
815 	mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
816 	break;
817 
818       case 'E':
819 	for (j = 0; j < XVECLEN (x, i); j++)
820 	  mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
821 	break;
822       }
823 }
824 
825 /* Return TRUE if INSN is a return, possibly with a filled delay slot.  */
826 
827 static bool
return_insn_p(const_rtx insn)828 return_insn_p (const_rtx insn)
829 {
830   if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
831     return true;
832 
833   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
834     return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
835 
836   return false;
837 }
838 
839 /* Set the resources that are live at TARGET.
840 
841    If TARGET is zero, we refer to the end of the current function and can
842    return our precomputed value.
843 
844    Otherwise, we try to find out what is live by consulting the basic block
845    information.  This is tricky, because we must consider the actions of
846    reload and jump optimization, which occur after the basic block information
847    has been computed.
848 
849    Accordingly, we proceed as follows::
850 
851    We find the previous BARRIER and look at all immediately following labels
852    (with no intervening active insns) to see if any of them start a basic
853    block.  If we hit the start of the function first, we use block 0.
854 
855    Once we have found a basic block and a corresponding first insn, we can
856    accurately compute the live status (by starting at a label following a
857    BARRIER, we are immune to actions taken by reload and jump.)  Then we
858    scan all insns between that point and our target.  For each CLOBBER (or
859    for call-clobbered regs when we pass a CALL_INSN), mark the appropriate
860    registers are dead.  For a SET, mark them as live.
861 
862    We have to be careful when using REG_DEAD notes because they are not
863    updated by such things as find_equiv_reg.  So keep track of registers
864    marked as dead that haven't been assigned to, and mark them dead at the
865    next CODE_LABEL since reload and jump won't propagate values across labels.
866 
867    If we cannot find the start of a basic block (should be a very rare
868    case, if it can happen at all), mark everything as potentially live.
869 
870    Next, scan forward from TARGET looking for things set or clobbered
871    before they are used.  These are not live.
872 
873    Because we can be called many times on the same target, save our results
874    in a hash table indexed by INSN_UID.  This is only done if the function
875    init_resource_info () was invoked before we are called.  */
876 
877 void
mark_target_live_regs(rtx insns,rtx target,struct resources * res)878 mark_target_live_regs (rtx insns, rtx target, struct resources *res)
879 {
880   int b = -1;
881   unsigned int i;
882   struct target_info *tinfo = NULL;
883   rtx insn;
884   rtx jump_insn = 0;
885   rtx jump_target;
886   HARD_REG_SET scratch;
887   struct resources set, needed;
888 
889   /* Handle end of function.  */
890   if (target == 0 || ANY_RETURN_P (target))
891     {
892       *res = end_of_function_needs;
893       return;
894     }
895 
896   /* Handle return insn.  */
897   else if (return_insn_p (target))
898     {
899       *res = end_of_function_needs;
900       mark_referenced_resources (target, res, false);
901       return;
902     }
903 
904   /* We have to assume memory is needed, but the CC isn't.  */
905   res->memory = 1;
906   res->volatil = 0;
907   res->cc = 0;
908 
909   /* See if we have computed this value already.  */
910   if (target_hash_table != NULL)
911     {
912       for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
913 	   tinfo; tinfo = tinfo->next)
914 	if (tinfo->uid == INSN_UID (target))
915 	  break;
916 
917       /* Start by getting the basic block number.  If we have saved
918 	 information, we can get it from there unless the insn at the
919 	 start of the basic block has been deleted.  */
920       if (tinfo && tinfo->block != -1
921 	  && ! INSN_DELETED_P (BB_HEAD (BASIC_BLOCK_FOR_FN (cfun,
922 							    tinfo->block))))
923 	b = tinfo->block;
924     }
925 
926   if (b == -1)
927     b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH);
928 
929   if (target_hash_table != NULL)
930     {
931       if (tinfo)
932 	{
933 	  /* If the information is up-to-date, use it.  Otherwise, we will
934 	     update it below.  */
935 	  if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
936 	    {
937 	      COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
938 	      return;
939 	    }
940 	}
941       else
942 	{
943 	  /* Allocate a place to put our results and chain it into the
944 	     hash table.  */
945 	  tinfo = XNEW (struct target_info);
946 	  tinfo->uid = INSN_UID (target);
947 	  tinfo->block = b;
948 	  tinfo->next
949 	    = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
950 	  target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
951 	}
952     }
953 
954   CLEAR_HARD_REG_SET (pending_dead_regs);
955 
956   /* If we found a basic block, get the live registers from it and update
957      them with anything set or killed between its start and the insn before
958      TARGET; this custom life analysis is really about registers so we need
959      to use the LR problem.  Otherwise, we must assume everything is live.  */
960   if (b != -1)
961     {
962       regset regs_live = DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun, b));
963       rtx start_insn, stop_insn;
964 
965       /* Compute hard regs live at start of block.  */
966       REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
967 
968       /* Get starting and ending insn, handling the case where each might
969 	 be a SEQUENCE.  */
970       start_insn = (b == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index ?
971 		    insns : BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, b)));
972       stop_insn = target;
973 
974       if (NONJUMP_INSN_P (start_insn)
975 	  && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
976 	start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
977 
978       if (NONJUMP_INSN_P (stop_insn)
979 	  && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
980 	stop_insn = next_insn (PREV_INSN (stop_insn));
981 
982       for (insn = start_insn; insn != stop_insn;
983 	   insn = next_insn_no_annul (insn))
984 	{
985 	  rtx link;
986 	  rtx real_insn = insn;
987 	  enum rtx_code code = GET_CODE (insn);
988 
989 	  if (DEBUG_INSN_P (insn))
990 	    continue;
991 
992 	  /* If this insn is from the target of a branch, it isn't going to
993 	     be used in the sequel.  If it is used in both cases, this
994 	     test will not be true.  */
995 	  if ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
996 	      && INSN_FROM_TARGET_P (insn))
997 	    continue;
998 
999 	  /* If this insn is a USE made by update_block, we care about the
1000 	     underlying insn.  */
1001 	  if (code == INSN
1002 	      && GET_CODE (PATTERN (insn)) == USE
1003 	      && INSN_P (XEXP (PATTERN (insn), 0)))
1004 	    real_insn = XEXP (PATTERN (insn), 0);
1005 
1006 	  if (CALL_P (real_insn))
1007 	    {
1008 	      /* Values in call-clobbered registers survive a COND_EXEC CALL
1009 		 if that is not executed; this matters for resoure use because
1010 		 they may be used by a complementarily (or more strictly)
1011 		 predicated instruction, or if the CALL is NORETURN.  */
1012 	      if (GET_CODE (PATTERN (real_insn)) != COND_EXEC)
1013 		{
1014 		  /* CALL clobbers all call-used regs that aren't fixed except
1015 		     sp, ap, and fp.  Do this before setting the result of the
1016 		     call live.  */
1017 		  AND_COMPL_HARD_REG_SET (current_live_regs,
1018 					  regs_invalidated_by_call);
1019 		}
1020 
1021 	      /* A CALL_INSN sets any global register live, since it may
1022 		 have been modified by the call.  */
1023 	      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1024 		if (global_regs[i])
1025 		  SET_HARD_REG_BIT (current_live_regs, i);
1026 	    }
1027 
1028 	  /* Mark anything killed in an insn to be deadened at the next
1029 	     label.  Ignore USE insns; the only REG_DEAD notes will be for
1030 	     parameters.  But they might be early.  A CALL_INSN will usually
1031 	     clobber registers used for parameters.  It isn't worth bothering
1032 	     with the unlikely case when it won't.  */
1033 	  if ((NONJUMP_INSN_P (real_insn)
1034 	       && GET_CODE (PATTERN (real_insn)) != USE
1035 	       && GET_CODE (PATTERN (real_insn)) != CLOBBER)
1036 	      || JUMP_P (real_insn)
1037 	      || CALL_P (real_insn))
1038 	    {
1039 	      for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1040 		if (REG_NOTE_KIND (link) == REG_DEAD
1041 		    && REG_P (XEXP (link, 0))
1042 		    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1043 		  add_to_hard_reg_set (&pending_dead_regs,
1044 				      GET_MODE (XEXP (link, 0)),
1045 				      REGNO (XEXP (link, 0)));
1046 
1047 	      note_stores (PATTERN (real_insn), update_live_status, NULL);
1048 
1049 	      /* If any registers were unused after this insn, kill them.
1050 		 These notes will always be accurate.  */
1051 	      for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1052 		if (REG_NOTE_KIND (link) == REG_UNUSED
1053 		    && REG_P (XEXP (link, 0))
1054 		    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1055 		  remove_from_hard_reg_set (&current_live_regs,
1056 					   GET_MODE (XEXP (link, 0)),
1057 					   REGNO (XEXP (link, 0)));
1058 	    }
1059 
1060 	  else if (LABEL_P (real_insn))
1061 	    {
1062 	      basic_block bb;
1063 
1064 	      /* A label clobbers the pending dead registers since neither
1065 		 reload nor jump will propagate a value across a label.  */
1066 	      AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
1067 	      CLEAR_HARD_REG_SET (pending_dead_regs);
1068 
1069 	      /* We must conservatively assume that all registers that used
1070 		 to be live here still are.  The fallthrough edge may have
1071 		 left a live register uninitialized.  */
1072 	      bb = BLOCK_FOR_INSN (real_insn);
1073 	      if (bb)
1074 		{
1075 		  HARD_REG_SET extra_live;
1076 
1077 		  REG_SET_TO_HARD_REG_SET (extra_live, DF_LR_IN (bb));
1078 		  IOR_HARD_REG_SET (current_live_regs, extra_live);
1079 		}
1080 	    }
1081 
1082 	  /* The beginning of the epilogue corresponds to the end of the
1083 	     RTL chain when there are no epilogue insns.  Certain resources
1084 	     are implicitly required at that point.  */
1085 	  else if (NOTE_P (real_insn)
1086 		   && NOTE_KIND (real_insn) == NOTE_INSN_EPILOGUE_BEG)
1087 	    IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
1088 	}
1089 
1090       COPY_HARD_REG_SET (res->regs, current_live_regs);
1091       if (tinfo != NULL)
1092 	{
1093 	  tinfo->block = b;
1094 	  tinfo->bb_tick = bb_ticks[b];
1095 	}
1096     }
1097   else
1098     /* We didn't find the start of a basic block.  Assume everything
1099        in use.  This should happen only extremely rarely.  */
1100     SET_HARD_REG_SET (res->regs);
1101 
1102   CLEAR_RESOURCE (&set);
1103   CLEAR_RESOURCE (&needed);
1104 
1105   jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
1106 					  set, needed);
1107 
1108   /* If we hit an unconditional branch, we have another way of finding out
1109      what is live: we can see what is live at the branch target and include
1110      anything used but not set before the branch.  We add the live
1111      resources found using the test below to those found until now.  */
1112 
1113   if (jump_insn)
1114     {
1115       struct resources new_resources;
1116       rtx stop_insn = next_active_insn (jump_insn);
1117 
1118       if (!ANY_RETURN_P (jump_target))
1119 	jump_target = next_active_insn (jump_target);
1120       mark_target_live_regs (insns, jump_target, &new_resources);
1121       CLEAR_RESOURCE (&set);
1122       CLEAR_RESOURCE (&needed);
1123 
1124       /* Include JUMP_INSN in the needed registers.  */
1125       for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1126 	{
1127 	  mark_referenced_resources (insn, &needed, true);
1128 
1129 	  COPY_HARD_REG_SET (scratch, needed.regs);
1130 	  AND_COMPL_HARD_REG_SET (scratch, set.regs);
1131 	  IOR_HARD_REG_SET (new_resources.regs, scratch);
1132 
1133 	  mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1134 	}
1135 
1136       IOR_HARD_REG_SET (res->regs, new_resources.regs);
1137     }
1138 
1139   if (tinfo != NULL)
1140     {
1141       COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
1142     }
1143 }
1144 
1145 /* Initialize the resources required by mark_target_live_regs ().
1146    This should be invoked before the first call to mark_target_live_regs.  */
1147 
1148 void
init_resource_info(rtx epilogue_insn)1149 init_resource_info (rtx epilogue_insn)
1150 {
1151   int i;
1152   basic_block bb;
1153 
1154   /* Indicate what resources are required to be valid at the end of the current
1155      function.  The condition code never is and memory always is.
1156      The stack pointer is needed unless EXIT_IGNORE_STACK is true
1157      and there is an epilogue that restores the original stack pointer
1158      from the frame pointer.  Registers used to return the function value
1159      are needed.  Registers holding global variables are needed.  */
1160 
1161   end_of_function_needs.cc = 0;
1162   end_of_function_needs.memory = 1;
1163   CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1164 
1165   if (frame_pointer_needed)
1166     {
1167       SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
1168 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
1169       SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
1170 #endif
1171     }
1172   if (!(frame_pointer_needed
1173 	&& EXIT_IGNORE_STACK
1174 	&& epilogue_insn
1175 	&& !crtl->sp_is_unchanging))
1176     SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1177 
1178   if (crtl->return_rtx != 0)
1179     mark_referenced_resources (crtl->return_rtx,
1180 			       &end_of_function_needs, true);
1181 
1182   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1183     if (global_regs[i]
1184 #ifdef EPILOGUE_USES
1185 	|| EPILOGUE_USES (i)
1186 #endif
1187 	)
1188       SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1189 
1190   /* The registers required to be live at the end of the function are
1191      represented in the flow information as being dead just prior to
1192      reaching the end of the function.  For example, the return of a value
1193      might be represented by a USE of the return register immediately
1194      followed by an unconditional jump to the return label where the
1195      return label is the end of the RTL chain.  The end of the RTL chain
1196      is then taken to mean that the return register is live.
1197 
1198      This sequence is no longer maintained when epilogue instructions are
1199      added to the RTL chain.  To reconstruct the original meaning, the
1200      start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1201      point where these registers become live (start_of_epilogue_needs).
1202      If epilogue instructions are present, the registers set by those
1203      instructions won't have been processed by flow.  Thus, those
1204      registers are additionally required at the end of the RTL chain
1205      (end_of_function_needs).  */
1206 
1207   start_of_epilogue_needs = end_of_function_needs;
1208 
1209   while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
1210     {
1211       mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
1212 			  MARK_SRC_DEST_CALL);
1213       if (return_insn_p (epilogue_insn))
1214 	break;
1215     }
1216 
1217   /* Allocate and initialize the tables used by mark_target_live_regs.  */
1218   target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME);
1219   bb_ticks = XCNEWVEC (int, last_basic_block_for_fn (cfun));
1220 
1221   /* Set the BLOCK_FOR_INSN of each label that starts a basic block.  */
1222   FOR_EACH_BB_FN (bb, cfun)
1223     if (LABEL_P (BB_HEAD (bb)))
1224       BLOCK_FOR_INSN (BB_HEAD (bb)) = bb;
1225 }
1226 
1227 /* Free up the resources allocated to mark_target_live_regs ().  This
1228    should be invoked after the last call to mark_target_live_regs ().  */
1229 
1230 void
free_resource_info(void)1231 free_resource_info (void)
1232 {
1233   basic_block bb;
1234 
1235   if (target_hash_table != NULL)
1236     {
1237       int i;
1238 
1239       for (i = 0; i < TARGET_HASH_PRIME; ++i)
1240 	{
1241 	  struct target_info *ti = target_hash_table[i];
1242 
1243 	  while (ti)
1244 	    {
1245 	      struct target_info *next = ti->next;
1246 	      free (ti);
1247 	      ti = next;
1248 	    }
1249 	}
1250 
1251       free (target_hash_table);
1252       target_hash_table = NULL;
1253     }
1254 
1255   if (bb_ticks != NULL)
1256     {
1257       free (bb_ticks);
1258       bb_ticks = NULL;
1259     }
1260 
1261   FOR_EACH_BB_FN (bb, cfun)
1262     if (LABEL_P (BB_HEAD (bb)))
1263       BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL;
1264 }
1265 
1266 /* Clear any hashed information that we have stored for INSN.  */
1267 
1268 void
clear_hashed_info_for_insn(rtx insn)1269 clear_hashed_info_for_insn (rtx insn)
1270 {
1271   struct target_info *tinfo;
1272 
1273   if (target_hash_table != NULL)
1274     {
1275       for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
1276 	   tinfo; tinfo = tinfo->next)
1277 	if (tinfo->uid == INSN_UID (insn))
1278 	  break;
1279 
1280       if (tinfo)
1281 	tinfo->block = -1;
1282     }
1283 }
1284 
1285 /* Increment the tick count for the basic block that contains INSN.  */
1286 
1287 void
incr_ticks_for_insn(rtx insn)1288 incr_ticks_for_insn (rtx insn)
1289 {
1290   int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH);
1291 
1292   if (b != -1)
1293     bb_ticks[b]++;
1294 }
1295 
1296 /* Add TRIAL to the set of resources used at the end of the current
1297    function.  */
1298 void
mark_end_of_function_resources(rtx trial,bool include_delayed_effects)1299 mark_end_of_function_resources (rtx trial, bool include_delayed_effects)
1300 {
1301   mark_referenced_resources (trial, &end_of_function_needs,
1302 			     include_delayed_effects);
1303 }
1304