1 /* Definitions for computing resource usage of specific insns.
2    Copyright (C) 1999-2021 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "df.h"
27 #include "memmodel.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "emit-rtl.h"
31 #include "resource.h"
32 #include "insn-attr.h"
33 #include "function-abi.h"
34 
35 /* This structure is used to record liveness information at the targets or
36    fallthrough insns of branches.  We will most likely need the information
37    at targets again, so save them in a hash table rather than recomputing them
38    each time.  */
39 
40 struct target_info
41 {
42   int uid;			/* INSN_UID of target.  */
43   struct target_info *next;	/* Next info for same hash bucket.  */
44   HARD_REG_SET live_regs;	/* Registers live at target.  */
45   int block;			/* Basic block number containing target.  */
46   int bb_tick;			/* Generation count of basic block info.  */
47 };
48 
49 #define TARGET_HASH_PRIME 257
50 
51 /* Indicates what resources are required at the beginning of the epilogue.  */
52 static struct resources start_of_epilogue_needs;
53 
54 /* Indicates what resources are required at function end.  */
55 static struct resources end_of_function_needs;
56 
57 /* Define the hash table itself.  */
58 static struct target_info **target_hash_table = NULL;
59 
60 /* For each basic block, we maintain a generation number of its basic
61    block info, which is updated each time we move an insn from the
62    target of a jump.  This is the generation number indexed by block
63    number.  */
64 
65 static int *bb_ticks;
66 
67 /* Marks registers possibly live at the current place being scanned by
68    mark_target_live_regs.  Also used by update_live_status.  */
69 
70 static HARD_REG_SET current_live_regs;
71 
72 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
73    Also only used by the next two functions.  */
74 
75 static HARD_REG_SET pending_dead_regs;
76 
77 static void update_live_status (rtx, const_rtx, void *);
78 static int find_basic_block (rtx_insn *, int);
79 static rtx_insn *next_insn_no_annul (rtx_insn *);
80 static rtx_insn *find_dead_or_set_registers (rtx_insn *, struct resources*,
81 					     rtx *, int, struct resources,
82 					     struct resources);
83 
84 /* Utility function called from mark_target_live_regs via note_stores.
85    It deadens any CLOBBERed registers and livens any SET registers.  */
86 
87 static void
update_live_status(rtx dest,const_rtx x,void * data ATTRIBUTE_UNUSED)88 update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
89 {
90   int first_regno, last_regno;
91   int i;
92 
93   if (!REG_P (dest)
94       && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest))))
95     return;
96 
97   if (GET_CODE (dest) == SUBREG)
98     {
99       first_regno = subreg_regno (dest);
100       last_regno = first_regno + subreg_nregs (dest);
101 
102     }
103   else
104     {
105       first_regno = REGNO (dest);
106       last_regno = END_REGNO (dest);
107     }
108 
109   if (GET_CODE (x) == CLOBBER)
110     for (i = first_regno; i < last_regno; i++)
111       CLEAR_HARD_REG_BIT (current_live_regs, i);
112   else
113     for (i = first_regno; i < last_regno; i++)
114       {
115 	SET_HARD_REG_BIT (current_live_regs, i);
116 	CLEAR_HARD_REG_BIT (pending_dead_regs, i);
117       }
118 }
119 
120 /* Find the number of the basic block with correct live register
121    information that starts closest to INSN.  Return -1 if we couldn't
122    find such a basic block or the beginning is more than
123    SEARCH_LIMIT instructions before INSN.  Use SEARCH_LIMIT = -1 for
124    an unlimited search.
125 
126    The delay slot filling code destroys the control-flow graph so,
127    instead of finding the basic block containing INSN, we search
128    backwards toward a BARRIER where the live register information is
129    correct.  */
130 
131 static int
find_basic_block(rtx_insn * insn,int search_limit)132 find_basic_block (rtx_insn *insn, int search_limit)
133 {
134   /* Scan backwards to the previous BARRIER.  Then see if we can find a
135      label that starts a basic block.  Return the basic block number.  */
136   for (insn = prev_nonnote_insn (insn);
137        insn && !BARRIER_P (insn) && search_limit != 0;
138        insn = prev_nonnote_insn (insn), --search_limit)
139     ;
140 
141   /* The closest BARRIER is too far away.  */
142   if (search_limit == 0)
143     return -1;
144 
145   /* The start of the function.  */
146   else if (insn == 0)
147     return ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index;
148 
149   /* See if any of the upcoming CODE_LABELs start a basic block.  If we reach
150      anything other than a CODE_LABEL or note, we can't find this code.  */
151   for (insn = next_nonnote_insn (insn);
152        insn && LABEL_P (insn);
153        insn = next_nonnote_insn (insn))
154     if (BLOCK_FOR_INSN (insn))
155       return BLOCK_FOR_INSN (insn)->index;
156 
157   return -1;
158 }
159 
160 /* Similar to next_insn, but ignores insns in the delay slots of
161    an annulled branch.  */
162 
163 static rtx_insn *
next_insn_no_annul(rtx_insn * insn)164 next_insn_no_annul (rtx_insn *insn)
165 {
166   if (insn)
167     {
168       /* If INSN is an annulled branch, skip any insns from the target
169 	 of the branch.  */
170       if (JUMP_P (insn)
171 	  && INSN_ANNULLED_BRANCH_P (insn)
172 	  && NEXT_INSN (PREV_INSN (insn)) != insn)
173 	{
174 	  rtx_insn *next = NEXT_INSN (insn);
175 
176 	  while ((NONJUMP_INSN_P (next) || JUMP_P (next) || CALL_P (next))
177 		 && INSN_FROM_TARGET_P (next))
178 	    {
179 	      insn = next;
180 	      next = NEXT_INSN (insn);
181 	    }
182 	}
183 
184       insn = NEXT_INSN (insn);
185       if (insn && NONJUMP_INSN_P (insn)
186 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
187 	insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
188     }
189 
190   return insn;
191 }
192 
193 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
194    which resources are referenced by the insn.  If INCLUDE_DELAYED_EFFECTS
195    is TRUE, resources used by the called routine will be included for
196    CALL_INSNs.  */
197 
198 void
mark_referenced_resources(rtx x,struct resources * res,bool include_delayed_effects)199 mark_referenced_resources (rtx x, struct resources *res,
200 			   bool include_delayed_effects)
201 {
202   enum rtx_code code = GET_CODE (x);
203   int i, j;
204   unsigned int r;
205   const char *format_ptr;
206 
207   /* Handle leaf items for which we set resource flags.  Also, special-case
208      CALL, SET and CLOBBER operators.  */
209   switch (code)
210     {
211     case CONST:
212     CASE_CONST_ANY:
213     case PC:
214     case SYMBOL_REF:
215     case LABEL_REF:
216     case DEBUG_INSN:
217       return;
218 
219     case SUBREG:
220       if (!REG_P (SUBREG_REG (x)))
221 	mark_referenced_resources (SUBREG_REG (x), res, false);
222       else
223 	{
224 	  unsigned int regno = subreg_regno (x);
225 	  unsigned int last_regno = regno + subreg_nregs (x);
226 
227 	  gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
228 	  for (r = regno; r < last_regno; r++)
229 	    SET_HARD_REG_BIT (res->regs, r);
230 	}
231       return;
232 
233     case REG:
234       gcc_assert (HARD_REGISTER_P (x));
235       add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
236       return;
237 
238     case MEM:
239       /* If this memory shouldn't change, it really isn't referencing
240 	 memory.  */
241       if (! MEM_READONLY_P (x))
242 	res->memory = 1;
243       res->volatil |= MEM_VOLATILE_P (x);
244 
245       /* Mark registers used to access memory.  */
246       mark_referenced_resources (XEXP (x, 0), res, false);
247       return;
248 
249     case UNSPEC_VOLATILE:
250     case TRAP_IF:
251     case ASM_INPUT:
252       /* Traditional asm's are always volatile.  */
253       res->volatil = 1;
254       break;
255 
256     case ASM_OPERANDS:
257       res->volatil |= MEM_VOLATILE_P (x);
258 
259       /* For all ASM_OPERANDS, we must traverse the vector of input operands.
260 	 We cannot just fall through here since then we would be confused
261 	 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
262 	 traditional asms unlike their normal usage.  */
263 
264       for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
265 	mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, false);
266       return;
267 
268     case CALL:
269       /* The first operand will be a (MEM (xxx)) but doesn't really reference
270 	 memory.  The second operand may be referenced, though.  */
271       mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, false);
272       mark_referenced_resources (XEXP (x, 1), res, false);
273       return;
274 
275     case SET:
276       /* Usually, the first operand of SET is set, not referenced.  But
277 	 registers used to access memory are referenced.  SET_DEST is
278 	 also referenced if it is a ZERO_EXTRACT.  */
279 
280       mark_referenced_resources (SET_SRC (x), res, false);
281 
282       x = SET_DEST (x);
283       if (GET_CODE (x) == ZERO_EXTRACT
284 	  || GET_CODE (x) == STRICT_LOW_PART)
285 	mark_referenced_resources (x, res, false);
286       else if (GET_CODE (x) == SUBREG)
287 	x = SUBREG_REG (x);
288       if (MEM_P (x))
289 	mark_referenced_resources (XEXP (x, 0), res, false);
290       return;
291 
292     case CLOBBER:
293       return;
294 
295     case CALL_INSN:
296       if (include_delayed_effects)
297 	{
298 	  /* A CALL references memory, the frame pointer if it exists, the
299 	     stack pointer, any global registers and any registers given in
300 	     USE insns immediately in front of the CALL.
301 
302 	     However, we may have moved some of the parameter loading insns
303 	     into the delay slot of this CALL.  If so, the USE's for them
304 	     don't count and should be skipped.  */
305 	  rtx_insn *insn = PREV_INSN (as_a <rtx_insn *> (x));
306 	  rtx_sequence *sequence = 0;
307 	  int seq_size = 0;
308 	  int i;
309 
310 	  /* If we are part of a delay slot sequence, point at the SEQUENCE.  */
311 	  if (NEXT_INSN (insn) != x)
312 	    {
313 	      sequence = as_a <rtx_sequence *> (PATTERN (NEXT_INSN (insn)));
314 	      seq_size = sequence->len ();
315 	      gcc_assert (GET_CODE (sequence) == SEQUENCE);
316 	    }
317 
318 	  res->memory = 1;
319 	  SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
320 	  if (frame_pointer_needed)
321 	    {
322 	      SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
323 	      if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
324 		SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
325 	    }
326 
327 	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
328 	    if (global_regs[i])
329 	      SET_HARD_REG_BIT (res->regs, i);
330 
331 	  /* Check for a REG_SETJMP.  If it exists, then we must
332 	     assume that this call can need any register.
333 
334 	     This is done to be more conservative about how we handle setjmp.
335 	     We assume that they both use and set all registers.  Using all
336 	     registers ensures that a register will not be considered dead
337 	     just because it crosses a setjmp call.  A register should be
338 	     considered dead only if the setjmp call returns nonzero.  */
339 	  if (find_reg_note (x, REG_SETJMP, NULL))
340 	    SET_HARD_REG_SET (res->regs);
341 
342 	  {
343 	    rtx link;
344 
345 	    for (link = CALL_INSN_FUNCTION_USAGE (x);
346 		 link;
347 		 link = XEXP (link, 1))
348 	      if (GET_CODE (XEXP (link, 0)) == USE)
349 		{
350 		  for (i = 1; i < seq_size; i++)
351 		    {
352 		      rtx slot_pat = PATTERN (sequence->element (i));
353 		      if (GET_CODE (slot_pat) == SET
354 			  && rtx_equal_p (SET_DEST (slot_pat),
355 					  XEXP (XEXP (link, 0), 0)))
356 			break;
357 		    }
358 		  if (i >= seq_size)
359 		    mark_referenced_resources (XEXP (XEXP (link, 0), 0),
360 					       res, false);
361 		}
362 	  }
363 	}
364 
365       /* ... fall through to other INSN processing ...  */
366       gcc_fallthrough ();
367 
368     case INSN:
369     case JUMP_INSN:
370 
371       if (GET_CODE (PATTERN (x)) == COND_EXEC)
372       /* In addition to the usual references, also consider all outputs
373 	 as referenced, to compensate for mark_set_resources treating
374 	 them as killed.  This is similar to ZERO_EXTRACT / STRICT_LOW_PART
375 	 handling, execpt that we got a partial incidence instead of a partial
376 	 width.  */
377       mark_set_resources (x, res, 0,
378 			  include_delayed_effects
379 			  ? MARK_SRC_DEST_CALL : MARK_SRC_DEST);
380 
381       if (! include_delayed_effects
382 	  && INSN_REFERENCES_ARE_DELAYED (as_a <rtx_insn *> (x)))
383 	return;
384 
385       /* No special processing, just speed up.  */
386       mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
387       return;
388 
389     default:
390       break;
391     }
392 
393   /* Process each sub-expression and flag what it needs.  */
394   format_ptr = GET_RTX_FORMAT (code);
395   for (i = 0; i < GET_RTX_LENGTH (code); i++)
396     switch (*format_ptr++)
397       {
398       case 'e':
399 	mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
400 	break;
401 
402       case 'E':
403 	for (j = 0; j < XVECLEN (x, i); j++)
404 	  mark_referenced_resources (XVECEXP (x, i, j), res,
405 				     include_delayed_effects);
406 	break;
407       }
408 }
409 
410 /* A subroutine of mark_target_live_regs.  Search forward from TARGET
411    looking for registers that are set before they are used.  These are dead.
412    Stop after passing a few conditional jumps, and/or a small
413    number of unconditional branches.  */
414 
415 static rtx_insn *
find_dead_or_set_registers(rtx_insn * target,struct resources * res,rtx * jump_target,int jump_count,struct resources set,struct resources needed)416 find_dead_or_set_registers (rtx_insn *target, struct resources *res,
417 			    rtx *jump_target, int jump_count,
418 			    struct resources set, struct resources needed)
419 {
420   HARD_REG_SET scratch;
421   rtx_insn *insn;
422   rtx_insn *next_insn;
423   rtx_insn *jump_insn = 0;
424   int i;
425 
426   for (insn = target; insn; insn = next_insn)
427     {
428       rtx_insn *this_insn = insn;
429 
430       next_insn = NEXT_INSN (insn);
431 
432       /* If this instruction can throw an exception, then we don't
433 	 know where we might end up next.  That means that we have to
434 	 assume that whatever we have already marked as live really is
435 	 live.  */
436       if (can_throw_internal (insn))
437 	break;
438 
439       switch (GET_CODE (insn))
440 	{
441 	case CODE_LABEL:
442 	  /* After a label, any pending dead registers that weren't yet
443 	     used can be made dead.  */
444 	  pending_dead_regs &= ~needed.regs;
445 	  res->regs &= ~pending_dead_regs;
446 	  CLEAR_HARD_REG_SET (pending_dead_regs);
447 
448 	  continue;
449 
450 	case BARRIER:
451 	case NOTE:
452 	case DEBUG_INSN:
453 	  continue;
454 
455 	case INSN:
456 	  if (GET_CODE (PATTERN (insn)) == USE)
457 	    {
458 	      /* If INSN is a USE made by update_block, we care about the
459 		 underlying insn.  Any registers set by the underlying insn
460 		 are live since the insn is being done somewhere else.  */
461 	      if (INSN_P (XEXP (PATTERN (insn), 0)))
462 		mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
463 				    MARK_SRC_DEST_CALL);
464 
465 	      /* All other USE insns are to be ignored.  */
466 	      continue;
467 	    }
468 	  else if (GET_CODE (PATTERN (insn)) == CLOBBER)
469 	    continue;
470 	  else if (rtx_sequence *seq =
471 		     dyn_cast <rtx_sequence *> (PATTERN (insn)))
472 	    {
473 	      /* An unconditional jump can be used to fill the delay slot
474 		 of a call, so search for a JUMP_INSN in any position.  */
475 	      for (i = 0; i < seq->len (); i++)
476 		{
477 		  this_insn = seq->insn (i);
478 		  if (JUMP_P (this_insn))
479 		    break;
480 		}
481 	    }
482 
483 	default:
484 	  break;
485 	}
486 
487       if (rtx_jump_insn *this_jump_insn =
488 	    dyn_cast <rtx_jump_insn *> (this_insn))
489 	{
490 	  if (jump_count++ < 10)
491 	    {
492 	      if (any_uncondjump_p (this_jump_insn)
493 		  || ANY_RETURN_P (PATTERN (this_jump_insn)))
494 		{
495 		  rtx lab_or_return = this_jump_insn->jump_label ();
496 		  if (ANY_RETURN_P (lab_or_return))
497 		    next_insn = NULL;
498 		  else
499 		    next_insn = as_a <rtx_insn *> (lab_or_return);
500 		  if (jump_insn == 0)
501 		    {
502 		      jump_insn = insn;
503 		      if (jump_target)
504 			*jump_target = JUMP_LABEL (this_jump_insn);
505 		    }
506 		}
507 	      else if (any_condjump_p (this_jump_insn))
508 		{
509 		  struct resources target_set, target_res;
510 		  struct resources fallthrough_res;
511 
512 		  /* We can handle conditional branches here by following
513 		     both paths, and then IOR the results of the two paths
514 		     together, which will give us registers that are dead
515 		     on both paths.  Since this is expensive, we give it
516 		     a much higher cost than unconditional branches.  The
517 		     cost was chosen so that we will follow at most 1
518 		     conditional branch.  */
519 
520 		  jump_count += 4;
521 		  if (jump_count >= 10)
522 		    break;
523 
524 		  mark_referenced_resources (insn, &needed, true);
525 
526 		  /* For an annulled branch, mark_set_resources ignores slots
527 		     filled by instructions from the target.  This is correct
528 		     if the branch is not taken.  Since we are following both
529 		     paths from the branch, we must also compute correct info
530 		     if the branch is taken.  We do this by inverting all of
531 		     the INSN_FROM_TARGET_P bits, calling mark_set_resources,
532 		     and then inverting the INSN_FROM_TARGET_P bits again.  */
533 
534 		  if (GET_CODE (PATTERN (insn)) == SEQUENCE
535 		      && INSN_ANNULLED_BRANCH_P (this_jump_insn))
536 		    {
537 		      rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
538 		      for (i = 1; i < seq->len (); i++)
539 			INSN_FROM_TARGET_P (seq->element (i))
540 			  = ! INSN_FROM_TARGET_P (seq->element (i));
541 
542 		      target_set = set;
543 		      mark_set_resources (insn, &target_set, 0,
544 					  MARK_SRC_DEST_CALL);
545 
546 		      for (i = 1; i < seq->len (); i++)
547 			INSN_FROM_TARGET_P (seq->element (i))
548 			  = ! INSN_FROM_TARGET_P (seq->element (i));
549 
550 		      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
551 		    }
552 		  else
553 		    {
554 		      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
555 		      target_set = set;
556 		    }
557 
558 		  target_res = *res;
559 		  scratch = target_set.regs & ~needed.regs;
560 		  target_res.regs &= ~scratch;
561 
562 		  fallthrough_res = *res;
563 		  scratch = set.regs & ~needed.regs;
564 		  fallthrough_res.regs &= ~scratch;
565 
566 		  if (!ANY_RETURN_P (this_jump_insn->jump_label ()))
567 		    find_dead_or_set_registers
568 			  (this_jump_insn->jump_target (),
569 			   &target_res, 0, jump_count, target_set, needed);
570 		  find_dead_or_set_registers (next_insn,
571 					      &fallthrough_res, 0, jump_count,
572 					      set, needed);
573 		  fallthrough_res.regs |= target_res.regs;
574 		  res->regs &= fallthrough_res.regs;
575 		  break;
576 		}
577 	      else
578 		break;
579 	    }
580 	  else
581 	    {
582 	      /* Don't try this optimization if we expired our jump count
583 		 above, since that would mean there may be an infinite loop
584 		 in the function being compiled.  */
585 	      jump_insn = 0;
586 	      break;
587 	    }
588 	}
589 
590       mark_referenced_resources (insn, &needed, true);
591       mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
592 
593       scratch = set.regs & ~needed.regs;
594       res->regs &= ~scratch;
595     }
596 
597   return jump_insn;
598 }
599 
600 /* Given X, a part of an insn, and a pointer to a `struct resource',
601    RES, indicate which resources are modified by the insn. If
602    MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
603    set by the called routine.
604 
605    If IN_DEST is nonzero, it means we are inside a SET.  Otherwise,
606    objects are being referenced instead of set.  */
607 
608 void
mark_set_resources(rtx x,struct resources * res,int in_dest,enum mark_resource_type mark_type)609 mark_set_resources (rtx x, struct resources *res, int in_dest,
610 		    enum mark_resource_type mark_type)
611 {
612   enum rtx_code code;
613   int i, j;
614   unsigned int r;
615   const char *format_ptr;
616 
617  restart:
618 
619   code = GET_CODE (x);
620 
621   switch (code)
622     {
623     case NOTE:
624     case BARRIER:
625     case CODE_LABEL:
626     case USE:
627     CASE_CONST_ANY:
628     case LABEL_REF:
629     case SYMBOL_REF:
630     case CONST:
631     case PC:
632     case DEBUG_INSN:
633       /* These don't set any resources.  */
634       return;
635 
636     case CALL_INSN:
637       /* Called routine modifies the condition code, memory, any registers
638 	 that aren't saved across calls, global registers and anything
639 	 explicitly CLOBBERed immediately after the CALL_INSN.  */
640 
641       if (mark_type == MARK_SRC_DEST_CALL)
642 	{
643 	  rtx_call_insn *call_insn = as_a <rtx_call_insn *> (x);
644 	  rtx link;
645 
646 	  res->cc = res->memory = 1;
647 
648 	  res->regs |= insn_callee_abi (call_insn).full_reg_clobbers ();
649 
650 	  for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
651 	       link; link = XEXP (link, 1))
652 	    if (GET_CODE (XEXP (link, 0)) == CLOBBER)
653 	      mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
654 				  MARK_SRC_DEST);
655 
656 	  /* Check for a REG_SETJMP.  If it exists, then we must
657 	     assume that this call can clobber any register.  */
658 	  if (find_reg_note (call_insn, REG_SETJMP, NULL))
659 	    SET_HARD_REG_SET (res->regs);
660 	}
661 
662       /* ... and also what its RTL says it modifies, if anything.  */
663       gcc_fallthrough ();
664 
665     case JUMP_INSN:
666     case INSN:
667 
668 	/* An insn consisting of just a CLOBBER (or USE) is just for flow
669 	   and doesn't actually do anything, so we ignore it.  */
670 
671       if (mark_type != MARK_SRC_DEST_CALL
672 	  && INSN_SETS_ARE_DELAYED (as_a <rtx_insn *> (x)))
673 	return;
674 
675       x = PATTERN (x);
676       if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
677 	goto restart;
678       return;
679 
680     case SET:
681       /* If the source of a SET is a CALL, this is actually done by
682 	 the called routine.  So only include it if we are to include the
683 	 effects of the calling routine.  */
684 
685       mark_set_resources (SET_DEST (x), res,
686 			  (mark_type == MARK_SRC_DEST_CALL
687 			   || GET_CODE (SET_SRC (x)) != CALL),
688 			  mark_type);
689 
690       mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
691       return;
692 
693     case CLOBBER:
694       mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
695       return;
696 
697     case SEQUENCE:
698       {
699         rtx_sequence *seq = as_a <rtx_sequence *> (x);
700         rtx control = seq->element (0);
701         bool annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control);
702 
703         mark_set_resources (control, res, 0, mark_type);
704         for (i = seq->len () - 1; i >= 0; --i)
705 	  {
706 	    rtx elt = seq->element (i);
707 	    if (!annul_p && INSN_FROM_TARGET_P (elt))
708 	      mark_set_resources (elt, res, 0, mark_type);
709 	  }
710       }
711       return;
712 
713     case POST_INC:
714     case PRE_INC:
715     case POST_DEC:
716     case PRE_DEC:
717       mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
718       return;
719 
720     case PRE_MODIFY:
721     case POST_MODIFY:
722       mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
723       mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
724       mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
725       return;
726 
727     case SIGN_EXTRACT:
728     case ZERO_EXTRACT:
729       mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
730       mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
731       mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
732       return;
733 
734     case MEM:
735       if (in_dest)
736 	{
737 	  res->memory = 1;
738 	  res->volatil |= MEM_VOLATILE_P (x);
739 	}
740 
741       mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
742       return;
743 
744     case SUBREG:
745       if (in_dest)
746 	{
747 	  if (!REG_P (SUBREG_REG (x)))
748 	    mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
749 	  else
750 	    {
751 	      unsigned int regno = subreg_regno (x);
752 	      unsigned int last_regno = regno + subreg_nregs (x);
753 
754 	      gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
755 	      for (r = regno; r < last_regno; r++)
756 		SET_HARD_REG_BIT (res->regs, r);
757 	    }
758 	}
759       return;
760 
761     case REG:
762       if (in_dest)
763 	{
764 	  gcc_assert (HARD_REGISTER_P (x));
765 	  add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
766 	}
767       return;
768 
769     case UNSPEC_VOLATILE:
770     case ASM_INPUT:
771       /* Traditional asm's are always volatile.  */
772       res->volatil = 1;
773       return;
774 
775     case TRAP_IF:
776       res->volatil = 1;
777       break;
778 
779     case ASM_OPERANDS:
780       res->volatil |= MEM_VOLATILE_P (x);
781 
782       /* For all ASM_OPERANDS, we must traverse the vector of input operands.
783 	 We cannot just fall through here since then we would be confused
784 	 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
785 	 traditional asms unlike their normal usage.  */
786 
787       for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
788 	mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
789 			    MARK_SRC_DEST);
790       return;
791 
792     default:
793       break;
794     }
795 
796   /* Process each sub-expression and flag what it needs.  */
797   format_ptr = GET_RTX_FORMAT (code);
798   for (i = 0; i < GET_RTX_LENGTH (code); i++)
799     switch (*format_ptr++)
800       {
801       case 'e':
802 	mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
803 	break;
804 
805       case 'E':
806 	for (j = 0; j < XVECLEN (x, i); j++)
807 	  mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
808 	break;
809       }
810 }
811 
812 /* Return TRUE if INSN is a return, possibly with a filled delay slot.  */
813 
814 static bool
return_insn_p(const_rtx insn)815 return_insn_p (const_rtx insn)
816 {
817   if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
818     return true;
819 
820   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
821     return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
822 
823   return false;
824 }
825 
826 /* Set the resources that are live at TARGET.
827 
828    If TARGET is zero, we refer to the end of the current function and can
829    return our precomputed value.
830 
831    Otherwise, we try to find out what is live by consulting the basic block
832    information.  This is tricky, because we must consider the actions of
833    reload and jump optimization, which occur after the basic block information
834    has been computed.
835 
836    Accordingly, we proceed as follows::
837 
838    We find the previous BARRIER and look at all immediately following labels
839    (with no intervening active insns) to see if any of them start a basic
840    block.  If we hit the start of the function first, we use block 0.
841 
842    Once we have found a basic block and a corresponding first insn, we can
843    accurately compute the live status (by starting at a label following a
844    BARRIER, we are immune to actions taken by reload and jump.)  Then we
845    scan all insns between that point and our target.  For each CLOBBER (or
846    for call-clobbered regs when we pass a CALL_INSN), mark the appropriate
847    registers are dead.  For a SET, mark them as live.
848 
849    We have to be careful when using REG_DEAD notes because they are not
850    updated by such things as find_equiv_reg.  So keep track of registers
851    marked as dead that haven't been assigned to, and mark them dead at the
852    next CODE_LABEL since reload and jump won't propagate values across labels.
853 
854    If we cannot find the start of a basic block (should be a very rare
855    case, if it can happen at all), mark everything as potentially live.
856 
857    Next, scan forward from TARGET looking for things set or clobbered
858    before they are used.  These are not live.
859 
860    Because we can be called many times on the same target, save our results
861    in a hash table indexed by INSN_UID.  This is only done if the function
862    init_resource_info () was invoked before we are called.  */
863 
864 void
mark_target_live_regs(rtx_insn * insns,rtx target_maybe_return,struct resources * res)865 mark_target_live_regs (rtx_insn *insns, rtx target_maybe_return, struct resources *res)
866 {
867   int b = -1;
868   unsigned int i;
869   struct target_info *tinfo = NULL;
870   rtx_insn *insn;
871   rtx jump_target;
872   HARD_REG_SET scratch;
873   struct resources set, needed;
874 
875   /* Handle end of function.  */
876   if (target_maybe_return == 0 || ANY_RETURN_P (target_maybe_return))
877     {
878       *res = end_of_function_needs;
879       return;
880     }
881 
882   /* We've handled the case of RETURN/SIMPLE_RETURN; we should now have an
883      instruction.  */
884   rtx_insn *target = as_a <rtx_insn *> (target_maybe_return);
885 
886   /* Handle return insn.  */
887   if (return_insn_p (target))
888     {
889       *res = end_of_function_needs;
890       mark_referenced_resources (target, res, false);
891       return;
892     }
893 
894   /* We have to assume memory is needed, but the CC isn't.  */
895   res->memory = 1;
896   res->volatil = 0;
897   res->cc = 0;
898 
899   /* See if we have computed this value already.  */
900   if (target_hash_table != NULL)
901     {
902       for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
903 	   tinfo; tinfo = tinfo->next)
904 	if (tinfo->uid == INSN_UID (target))
905 	  break;
906 
907       /* Start by getting the basic block number.  If we have saved
908 	 information, we can get it from there unless the insn at the
909 	 start of the basic block has been deleted.  */
910       if (tinfo && tinfo->block != -1
911 	  && ! BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, tinfo->block))->deleted ())
912 	b = tinfo->block;
913     }
914 
915   if (b == -1)
916     b = find_basic_block (target, param_max_delay_slot_live_search);
917 
918   if (target_hash_table != NULL)
919     {
920       if (tinfo)
921 	{
922 	  /* If the information is up-to-date, use it.  Otherwise, we will
923 	     update it below.  */
924 	  if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
925 	    {
926 	      res->regs = tinfo->live_regs;
927 	      return;
928 	    }
929 	}
930       else
931 	{
932 	  /* Allocate a place to put our results and chain it into the
933 	     hash table.  */
934 	  tinfo = XNEW (struct target_info);
935 	  tinfo->uid = INSN_UID (target);
936 	  tinfo->block = b;
937 	  tinfo->next
938 	    = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
939 	  target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
940 	}
941     }
942 
943   CLEAR_HARD_REG_SET (pending_dead_regs);
944 
945   /* If we found a basic block, get the live registers from it and update
946      them with anything set or killed between its start and the insn before
947      TARGET; this custom life analysis is really about registers so we need
948      to use the LR problem.  Otherwise, we must assume everything is live.  */
949   if (b != -1)
950     {
951       regset regs_live = DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun, b));
952       rtx_insn *start_insn, *stop_insn;
953       df_ref def;
954 
955       /* Compute hard regs live at start of block.  */
956       REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
957       FOR_EACH_ARTIFICIAL_DEF (def, b)
958 	if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
959 	  SET_HARD_REG_BIT (current_live_regs, DF_REF_REGNO (def));
960 
961       /* Get starting and ending insn, handling the case where each might
962 	 be a SEQUENCE.  */
963       start_insn = (b == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index ?
964 		    insns : BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, b)));
965       stop_insn = target;
966 
967       if (NONJUMP_INSN_P (start_insn)
968 	  && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
969 	start_insn = as_a <rtx_sequence *> (PATTERN (start_insn))->insn (0);
970 
971       if (NONJUMP_INSN_P (stop_insn)
972 	  && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
973 	stop_insn = next_insn (PREV_INSN (stop_insn));
974 
975       for (insn = start_insn; insn != stop_insn;
976 	   insn = next_insn_no_annul (insn))
977 	{
978 	  rtx link;
979 	  rtx_insn *real_insn = insn;
980 	  enum rtx_code code = GET_CODE (insn);
981 
982 	  if (DEBUG_INSN_P (insn))
983 	    continue;
984 
985 	  /* If this insn is from the target of a branch, it isn't going to
986 	     be used in the sequel.  If it is used in both cases, this
987 	     test will not be true.  */
988 	  if ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
989 	      && INSN_FROM_TARGET_P (insn))
990 	    continue;
991 
992 	  /* If this insn is a USE made by update_block, we care about the
993 	     underlying insn.  */
994 	  if (code == INSN
995 	      && GET_CODE (PATTERN (insn)) == USE
996 	      && INSN_P (XEXP (PATTERN (insn), 0)))
997 	    real_insn = as_a <rtx_insn *> (XEXP (PATTERN (insn), 0));
998 
999 	  if (CALL_P (real_insn))
1000 	    {
1001 	      /* Values in call-clobbered registers survive a COND_EXEC CALL
1002 		 if that is not executed; this matters for resoure use because
1003 		 they may be used by a complementarily (or more strictly)
1004 		 predicated instruction, or if the CALL is NORETURN.  */
1005 	      if (GET_CODE (PATTERN (real_insn)) != COND_EXEC)
1006 		{
1007 		  HARD_REG_SET regs_invalidated_by_this_call
1008 		    = insn_callee_abi (real_insn).full_reg_clobbers ();
1009 		  /* CALL clobbers all call-used regs that aren't fixed except
1010 		     sp, ap, and fp.  Do this before setting the result of the
1011 		     call live.  */
1012 		  current_live_regs &= ~regs_invalidated_by_this_call;
1013 		}
1014 
1015 	      /* A CALL_INSN sets any global register live, since it may
1016 		 have been modified by the call.  */
1017 	      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1018 		if (global_regs[i])
1019 		  SET_HARD_REG_BIT (current_live_regs, i);
1020 	    }
1021 
1022 	  /* Mark anything killed in an insn to be deadened at the next
1023 	     label.  Ignore USE insns; the only REG_DEAD notes will be for
1024 	     parameters.  But they might be early.  A CALL_INSN will usually
1025 	     clobber registers used for parameters.  It isn't worth bothering
1026 	     with the unlikely case when it won't.  */
1027 	  if ((NONJUMP_INSN_P (real_insn)
1028 	       && GET_CODE (PATTERN (real_insn)) != USE
1029 	       && GET_CODE (PATTERN (real_insn)) != CLOBBER)
1030 	      || JUMP_P (real_insn)
1031 	      || CALL_P (real_insn))
1032 	    {
1033 	      for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1034 		if (REG_NOTE_KIND (link) == REG_DEAD
1035 		    && REG_P (XEXP (link, 0))
1036 		    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1037 		  add_to_hard_reg_set (&pending_dead_regs,
1038 				      GET_MODE (XEXP (link, 0)),
1039 				      REGNO (XEXP (link, 0)));
1040 
1041 	      note_stores (real_insn, update_live_status, NULL);
1042 
1043 	      /* If any registers were unused after this insn, kill them.
1044 		 These notes will always be accurate.  */
1045 	      for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1046 		if (REG_NOTE_KIND (link) == REG_UNUSED
1047 		    && REG_P (XEXP (link, 0))
1048 		    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1049 		  remove_from_hard_reg_set (&current_live_regs,
1050 					   GET_MODE (XEXP (link, 0)),
1051 					   REGNO (XEXP (link, 0)));
1052 	    }
1053 
1054 	  else if (LABEL_P (real_insn))
1055 	    {
1056 	      basic_block bb;
1057 
1058 	      /* A label clobbers the pending dead registers since neither
1059 		 reload nor jump will propagate a value across a label.  */
1060 	      current_live_regs &= ~pending_dead_regs;
1061 	      CLEAR_HARD_REG_SET (pending_dead_regs);
1062 
1063 	      /* We must conservatively assume that all registers that used
1064 		 to be live here still are.  The fallthrough edge may have
1065 		 left a live register uninitialized.  */
1066 	      bb = BLOCK_FOR_INSN (real_insn);
1067 	      if (bb)
1068 		{
1069 		  HARD_REG_SET extra_live;
1070 
1071 		  REG_SET_TO_HARD_REG_SET (extra_live, DF_LR_IN (bb));
1072 		  current_live_regs |= extra_live;
1073 		}
1074 	    }
1075 
1076 	  /* The beginning of the epilogue corresponds to the end of the
1077 	     RTL chain when there are no epilogue insns.  Certain resources
1078 	     are implicitly required at that point.  */
1079 	  else if (NOTE_P (real_insn)
1080 		   && NOTE_KIND (real_insn) == NOTE_INSN_EPILOGUE_BEG)
1081 	    current_live_regs |= start_of_epilogue_needs.regs;
1082 	}
1083 
1084       res->regs = current_live_regs;
1085       if (tinfo != NULL)
1086 	{
1087 	  tinfo->block = b;
1088 	  tinfo->bb_tick = bb_ticks[b];
1089 	}
1090     }
1091   else
1092     /* We didn't find the start of a basic block.  Assume everything
1093        in use.  This should happen only extremely rarely.  */
1094     SET_HARD_REG_SET (res->regs);
1095 
1096   CLEAR_RESOURCE (&set);
1097   CLEAR_RESOURCE (&needed);
1098 
1099   rtx_insn *jump_insn = find_dead_or_set_registers (target, res, &jump_target,
1100 						    0, set, needed);
1101 
1102   /* If we hit an unconditional branch, we have another way of finding out
1103      what is live: we can see what is live at the branch target and include
1104      anything used but not set before the branch.  We add the live
1105      resources found using the test below to those found until now.  */
1106 
1107   if (jump_insn)
1108     {
1109       struct resources new_resources;
1110       rtx_insn *stop_insn = next_active_insn (jump_insn);
1111 
1112       if (!ANY_RETURN_P (jump_target))
1113 	jump_target = next_active_insn (as_a<rtx_insn *> (jump_target));
1114       mark_target_live_regs (insns, jump_target, &new_resources);
1115       CLEAR_RESOURCE (&set);
1116       CLEAR_RESOURCE (&needed);
1117 
1118       /* Include JUMP_INSN in the needed registers.  */
1119       for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1120 	{
1121 	  mark_referenced_resources (insn, &needed, true);
1122 
1123 	  scratch = needed.regs & ~set.regs;
1124 	  new_resources.regs |= scratch;
1125 
1126 	  mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1127 	}
1128 
1129       res->regs |= new_resources.regs;
1130     }
1131 
1132   if (tinfo != NULL)
1133     tinfo->live_regs = res->regs;
1134 }
1135 
1136 /* Initialize the resources required by mark_target_live_regs ().
1137    This should be invoked before the first call to mark_target_live_regs.  */
1138 
1139 void
init_resource_info(rtx_insn * epilogue_insn)1140 init_resource_info (rtx_insn *epilogue_insn)
1141 {
1142   int i;
1143   basic_block bb;
1144 
1145   /* Indicate what resources are required to be valid at the end of the current
1146      function.  The condition code never is and memory always is.
1147      The stack pointer is needed unless EXIT_IGNORE_STACK is true
1148      and there is an epilogue that restores the original stack pointer
1149      from the frame pointer.  Registers used to return the function value
1150      are needed.  Registers holding global variables are needed.  */
1151 
1152   end_of_function_needs.cc = 0;
1153   end_of_function_needs.memory = 1;
1154   CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1155 
1156   if (frame_pointer_needed)
1157     {
1158       SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
1159       if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
1160 	SET_HARD_REG_BIT (end_of_function_needs.regs,
1161 			  HARD_FRAME_POINTER_REGNUM);
1162     }
1163   if (!(frame_pointer_needed
1164 	&& EXIT_IGNORE_STACK
1165 	&& epilogue_insn
1166 	&& !crtl->sp_is_unchanging))
1167     SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1168 
1169   if (crtl->return_rtx != 0)
1170     mark_referenced_resources (crtl->return_rtx,
1171 			       &end_of_function_needs, true);
1172 
1173   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1174     if (global_regs[i] || df_epilogue_uses_p (i))
1175       SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1176 
1177   /* The registers required to be live at the end of the function are
1178      represented in the flow information as being dead just prior to
1179      reaching the end of the function.  For example, the return of a value
1180      might be represented by a USE of the return register immediately
1181      followed by an unconditional jump to the return label where the
1182      return label is the end of the RTL chain.  The end of the RTL chain
1183      is then taken to mean that the return register is live.
1184 
1185      This sequence is no longer maintained when epilogue instructions are
1186      added to the RTL chain.  To reconstruct the original meaning, the
1187      start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1188      point where these registers become live (start_of_epilogue_needs).
1189      If epilogue instructions are present, the registers set by those
1190      instructions won't have been processed by flow.  Thus, those
1191      registers are additionally required at the end of the RTL chain
1192      (end_of_function_needs).  */
1193 
1194   start_of_epilogue_needs = end_of_function_needs;
1195 
1196   while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
1197     {
1198       mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
1199 			  MARK_SRC_DEST_CALL);
1200       if (return_insn_p (epilogue_insn))
1201 	break;
1202     }
1203 
1204   /* Filter-out the flags register from those additionally required
1205      registers. */
1206   if (targetm.flags_regnum != INVALID_REGNUM)
1207     CLEAR_HARD_REG_BIT (end_of_function_needs.regs, targetm.flags_regnum);
1208 
1209   /* Allocate and initialize the tables used by mark_target_live_regs.  */
1210   target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME);
1211   bb_ticks = XCNEWVEC (int, last_basic_block_for_fn (cfun));
1212 
1213   /* Set the BLOCK_FOR_INSN of each label that starts a basic block.  */
1214   FOR_EACH_BB_FN (bb, cfun)
1215     if (LABEL_P (BB_HEAD (bb)))
1216       BLOCK_FOR_INSN (BB_HEAD (bb)) = bb;
1217 }
1218 
1219 /* Free up the resources allocated to mark_target_live_regs ().  This
1220    should be invoked after the last call to mark_target_live_regs ().  */
1221 
1222 void
free_resource_info(void)1223 free_resource_info (void)
1224 {
1225   basic_block bb;
1226 
1227   if (target_hash_table != NULL)
1228     {
1229       int i;
1230 
1231       for (i = 0; i < TARGET_HASH_PRIME; ++i)
1232 	{
1233 	  struct target_info *ti = target_hash_table[i];
1234 
1235 	  while (ti)
1236 	    {
1237 	      struct target_info *next = ti->next;
1238 	      free (ti);
1239 	      ti = next;
1240 	    }
1241 	}
1242 
1243       free (target_hash_table);
1244       target_hash_table = NULL;
1245     }
1246 
1247   if (bb_ticks != NULL)
1248     {
1249       free (bb_ticks);
1250       bb_ticks = NULL;
1251     }
1252 
1253   FOR_EACH_BB_FN (bb, cfun)
1254     if (LABEL_P (BB_HEAD (bb)))
1255       BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL;
1256 }
1257 
1258 /* Clear any hashed information that we have stored for INSN.  */
1259 
1260 void
clear_hashed_info_for_insn(rtx_insn * insn)1261 clear_hashed_info_for_insn (rtx_insn *insn)
1262 {
1263   struct target_info *tinfo;
1264 
1265   if (target_hash_table != NULL)
1266     {
1267       for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
1268 	   tinfo; tinfo = tinfo->next)
1269 	if (tinfo->uid == INSN_UID (insn))
1270 	  break;
1271 
1272       if (tinfo)
1273 	tinfo->block = -1;
1274     }
1275 }
1276 
1277 /* Clear any hashed information that we have stored for instructions
1278    between INSN and the next BARRIER that follow a JUMP or a LABEL.  */
1279 
1280 void
clear_hashed_info_until_next_barrier(rtx_insn * insn)1281 clear_hashed_info_until_next_barrier (rtx_insn *insn)
1282 {
1283   while (insn && !BARRIER_P (insn))
1284     {
1285       if (JUMP_P (insn) || LABEL_P (insn))
1286 	{
1287 	  rtx_insn *next = next_active_insn (insn);
1288 	  if (next)
1289 	    clear_hashed_info_for_insn (next);
1290 	}
1291 
1292       insn = next_nonnote_insn (insn);
1293     }
1294 }
1295 
1296 /* Increment the tick count for the basic block that contains INSN.  */
1297 
1298 void
incr_ticks_for_insn(rtx_insn * insn)1299 incr_ticks_for_insn (rtx_insn *insn)
1300 {
1301   int b = find_basic_block (insn, param_max_delay_slot_live_search);
1302 
1303   if (b != -1)
1304     bb_ticks[b]++;
1305 }
1306 
1307 /* Add TRIAL to the set of resources used at the end of the current
1308    function.  */
1309 void
mark_end_of_function_resources(rtx trial,bool include_delayed_effects)1310 mark_end_of_function_resources (rtx trial, bool include_delayed_effects)
1311 {
1312   mark_referenced_resources (trial, &end_of_function_needs,
1313 			     include_delayed_effects);
1314 }
1315