1 /* Definitions for computing resource usage of specific insns.
2    Copyright (C) 1999-2019 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "df.h"
26 #include "memmodel.h"
27 #include "tm_p.h"
28 #include "regs.h"
29 #include "emit-rtl.h"
30 #include "resource.h"
31 #include "insn-attr.h"
32 #include "params.h"
33 
34 /* This structure is used to record liveness information at the targets or
35    fallthrough insns of branches.  We will most likely need the information
36    at targets again, so save them in a hash table rather than recomputing them
37    each time.  */
38 
39 struct target_info
40 {
41   int uid;			/* INSN_UID of target.  */
42   struct target_info *next;	/* Next info for same hash bucket.  */
43   HARD_REG_SET live_regs;	/* Registers live at target.  */
44   int block;			/* Basic block number containing target.  */
45   int bb_tick;			/* Generation count of basic block info.  */
46 };
47 
48 #define TARGET_HASH_PRIME 257
49 
50 /* Indicates what resources are required at the beginning of the epilogue.  */
51 static struct resources start_of_epilogue_needs;
52 
53 /* Indicates what resources are required at function end.  */
54 static struct resources end_of_function_needs;
55 
56 /* Define the hash table itself.  */
57 static struct target_info **target_hash_table = NULL;
58 
59 /* For each basic block, we maintain a generation number of its basic
60    block info, which is updated each time we move an insn from the
61    target of a jump.  This is the generation number indexed by block
62    number.  */
63 
64 static int *bb_ticks;
65 
66 /* Marks registers possibly live at the current place being scanned by
67    mark_target_live_regs.  Also used by update_live_status.  */
68 
69 static HARD_REG_SET current_live_regs;
70 
71 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
72    Also only used by the next two functions.  */
73 
74 static HARD_REG_SET pending_dead_regs;
75 
76 static void update_live_status (rtx, const_rtx, void *);
77 static int find_basic_block (rtx_insn *, int);
78 static rtx_insn *next_insn_no_annul (rtx_insn *);
79 static rtx_insn *find_dead_or_set_registers (rtx_insn *, struct resources*,
80 					     rtx *, int, struct resources,
81 					     struct resources);
82 
83 /* Utility function called from mark_target_live_regs via note_stores.
84    It deadens any CLOBBERed registers and livens any SET registers.  */
85 
86 static void
update_live_status(rtx dest,const_rtx x,void * data ATTRIBUTE_UNUSED)87 update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
88 {
89   int first_regno, last_regno;
90   int i;
91 
92   if (!REG_P (dest)
93       && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest))))
94     return;
95 
96   if (GET_CODE (dest) == SUBREG)
97     {
98       first_regno = subreg_regno (dest);
99       last_regno = first_regno + subreg_nregs (dest);
100 
101     }
102   else
103     {
104       first_regno = REGNO (dest);
105       last_regno = END_REGNO (dest);
106     }
107 
108   if (GET_CODE (x) == CLOBBER)
109     for (i = first_regno; i < last_regno; i++)
110       CLEAR_HARD_REG_BIT (current_live_regs, i);
111   else if (GET_CODE (x) == CLOBBER_HIGH)
112     /* No current target supports both branch delay slots and CLOBBER_HIGH.
113        We'd need more elaborate liveness tracking to handle that
114        combination.  */
115     gcc_unreachable ();
116   else
117     for (i = first_regno; i < last_regno; i++)
118       {
119 	SET_HARD_REG_BIT (current_live_regs, i);
120 	CLEAR_HARD_REG_BIT (pending_dead_regs, i);
121       }
122 }
123 
124 /* Find the number of the basic block with correct live register
125    information that starts closest to INSN.  Return -1 if we couldn't
126    find such a basic block or the beginning is more than
127    SEARCH_LIMIT instructions before INSN.  Use SEARCH_LIMIT = -1 for
128    an unlimited search.
129 
130    The delay slot filling code destroys the control-flow graph so,
131    instead of finding the basic block containing INSN, we search
132    backwards toward a BARRIER where the live register information is
133    correct.  */
134 
135 static int
find_basic_block(rtx_insn * insn,int search_limit)136 find_basic_block (rtx_insn *insn, int search_limit)
137 {
138   /* Scan backwards to the previous BARRIER.  Then see if we can find a
139      label that starts a basic block.  Return the basic block number.  */
140   for (insn = prev_nonnote_insn (insn);
141        insn && !BARRIER_P (insn) && search_limit != 0;
142        insn = prev_nonnote_insn (insn), --search_limit)
143     ;
144 
145   /* The closest BARRIER is too far away.  */
146   if (search_limit == 0)
147     return -1;
148 
149   /* The start of the function.  */
150   else if (insn == 0)
151     return ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index;
152 
153   /* See if any of the upcoming CODE_LABELs start a basic block.  If we reach
154      anything other than a CODE_LABEL or note, we can't find this code.  */
155   for (insn = next_nonnote_insn (insn);
156        insn && LABEL_P (insn);
157        insn = next_nonnote_insn (insn))
158     if (BLOCK_FOR_INSN (insn))
159       return BLOCK_FOR_INSN (insn)->index;
160 
161   return -1;
162 }
163 
164 /* Similar to next_insn, but ignores insns in the delay slots of
165    an annulled branch.  */
166 
167 static rtx_insn *
next_insn_no_annul(rtx_insn * insn)168 next_insn_no_annul (rtx_insn *insn)
169 {
170   if (insn)
171     {
172       /* If INSN is an annulled branch, skip any insns from the target
173 	 of the branch.  */
174       if (JUMP_P (insn)
175 	  && INSN_ANNULLED_BRANCH_P (insn)
176 	  && NEXT_INSN (PREV_INSN (insn)) != insn)
177 	{
178 	  rtx_insn *next = NEXT_INSN (insn);
179 
180 	  while ((NONJUMP_INSN_P (next) || JUMP_P (next) || CALL_P (next))
181 		 && INSN_FROM_TARGET_P (next))
182 	    {
183 	      insn = next;
184 	      next = NEXT_INSN (insn);
185 	    }
186 	}
187 
188       insn = NEXT_INSN (insn);
189       if (insn && NONJUMP_INSN_P (insn)
190 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
191 	insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
192     }
193 
194   return insn;
195 }
196 
197 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
198    which resources are referenced by the insn.  If INCLUDE_DELAYED_EFFECTS
199    is TRUE, resources used by the called routine will be included for
200    CALL_INSNs.  */
201 
202 void
mark_referenced_resources(rtx x,struct resources * res,bool include_delayed_effects)203 mark_referenced_resources (rtx x, struct resources *res,
204 			   bool include_delayed_effects)
205 {
206   enum rtx_code code = GET_CODE (x);
207   int i, j;
208   unsigned int r;
209   const char *format_ptr;
210 
211   /* Handle leaf items for which we set resource flags.  Also, special-case
212      CALL, SET and CLOBBER operators.  */
213   switch (code)
214     {
215     case CONST:
216     CASE_CONST_ANY:
217     case PC:
218     case SYMBOL_REF:
219     case LABEL_REF:
220     case DEBUG_INSN:
221       return;
222 
223     case SUBREG:
224       if (!REG_P (SUBREG_REG (x)))
225 	mark_referenced_resources (SUBREG_REG (x), res, false);
226       else
227 	{
228 	  unsigned int regno = subreg_regno (x);
229 	  unsigned int last_regno = regno + subreg_nregs (x);
230 
231 	  gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
232 	  for (r = regno; r < last_regno; r++)
233 	    SET_HARD_REG_BIT (res->regs, r);
234 	}
235       return;
236 
237     case REG:
238       gcc_assert (HARD_REGISTER_P (x));
239       add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
240       return;
241 
242     case MEM:
243       /* If this memory shouldn't change, it really isn't referencing
244 	 memory.  */
245       if (! MEM_READONLY_P (x))
246 	res->memory = 1;
247       res->volatil |= MEM_VOLATILE_P (x);
248 
249       /* Mark registers used to access memory.  */
250       mark_referenced_resources (XEXP (x, 0), res, false);
251       return;
252 
253     case CC0:
254       res->cc = 1;
255       return;
256 
257     case UNSPEC_VOLATILE:
258     case TRAP_IF:
259     case ASM_INPUT:
260       /* Traditional asm's are always volatile.  */
261       res->volatil = 1;
262       break;
263 
264     case ASM_OPERANDS:
265       res->volatil |= MEM_VOLATILE_P (x);
266 
267       /* For all ASM_OPERANDS, we must traverse the vector of input operands.
268 	 We cannot just fall through here since then we would be confused
269 	 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
270 	 traditional asms unlike their normal usage.  */
271 
272       for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
273 	mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, false);
274       return;
275 
276     case CALL:
277       /* The first operand will be a (MEM (xxx)) but doesn't really reference
278 	 memory.  The second operand may be referenced, though.  */
279       mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, false);
280       mark_referenced_resources (XEXP (x, 1), res, false);
281       return;
282 
283     case SET:
284       /* Usually, the first operand of SET is set, not referenced.  But
285 	 registers used to access memory are referenced.  SET_DEST is
286 	 also referenced if it is a ZERO_EXTRACT.  */
287 
288       mark_referenced_resources (SET_SRC (x), res, false);
289 
290       x = SET_DEST (x);
291       if (GET_CODE (x) == ZERO_EXTRACT
292 	  || GET_CODE (x) == STRICT_LOW_PART)
293 	mark_referenced_resources (x, res, false);
294       else if (GET_CODE (x) == SUBREG)
295 	x = SUBREG_REG (x);
296       if (MEM_P (x))
297 	mark_referenced_resources (XEXP (x, 0), res, false);
298       return;
299 
300     case CLOBBER:
301     case CLOBBER_HIGH:
302       return;
303 
304     case CALL_INSN:
305       if (include_delayed_effects)
306 	{
307 	  /* A CALL references memory, the frame pointer if it exists, the
308 	     stack pointer, any global registers and any registers given in
309 	     USE insns immediately in front of the CALL.
310 
311 	     However, we may have moved some of the parameter loading insns
312 	     into the delay slot of this CALL.  If so, the USE's for them
313 	     don't count and should be skipped.  */
314 	  rtx_insn *insn = PREV_INSN (as_a <rtx_insn *> (x));
315 	  rtx_sequence *sequence = 0;
316 	  int seq_size = 0;
317 	  int i;
318 
319 	  /* If we are part of a delay slot sequence, point at the SEQUENCE.  */
320 	  if (NEXT_INSN (insn) != x)
321 	    {
322 	      sequence = as_a <rtx_sequence *> (PATTERN (NEXT_INSN (insn)));
323 	      seq_size = sequence->len ();
324 	      gcc_assert (GET_CODE (sequence) == SEQUENCE);
325 	    }
326 
327 	  res->memory = 1;
328 	  SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
329 	  if (frame_pointer_needed)
330 	    {
331 	      SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
332 	      if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
333 		SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
334 	    }
335 
336 	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
337 	    if (global_regs[i])
338 	      SET_HARD_REG_BIT (res->regs, i);
339 
340 	  /* Check for a REG_SETJMP.  If it exists, then we must
341 	     assume that this call can need any register.
342 
343 	     This is done to be more conservative about how we handle setjmp.
344 	     We assume that they both use and set all registers.  Using all
345 	     registers ensures that a register will not be considered dead
346 	     just because it crosses a setjmp call.  A register should be
347 	     considered dead only if the setjmp call returns nonzero.  */
348 	  if (find_reg_note (x, REG_SETJMP, NULL))
349 	    SET_HARD_REG_SET (res->regs);
350 
351 	  {
352 	    rtx link;
353 
354 	    for (link = CALL_INSN_FUNCTION_USAGE (x);
355 		 link;
356 		 link = XEXP (link, 1))
357 	      if (GET_CODE (XEXP (link, 0)) == USE)
358 		{
359 		  for (i = 1; i < seq_size; i++)
360 		    {
361 		      rtx slot_pat = PATTERN (sequence->element (i));
362 		      if (GET_CODE (slot_pat) == SET
363 			  && rtx_equal_p (SET_DEST (slot_pat),
364 					  XEXP (XEXP (link, 0), 0)))
365 			break;
366 		    }
367 		  if (i >= seq_size)
368 		    mark_referenced_resources (XEXP (XEXP (link, 0), 0),
369 					       res, false);
370 		}
371 	  }
372 	}
373 
374       /* ... fall through to other INSN processing ...  */
375       gcc_fallthrough ();
376 
377     case INSN:
378     case JUMP_INSN:
379 
380       if (GET_CODE (PATTERN (x)) == COND_EXEC)
381       /* In addition to the usual references, also consider all outputs
382 	 as referenced, to compensate for mark_set_resources treating
383 	 them as killed.  This is similar to ZERO_EXTRACT / STRICT_LOW_PART
384 	 handling, execpt that we got a partial incidence instead of a partial
385 	 width.  */
386       mark_set_resources (x, res, 0,
387 			  include_delayed_effects
388 			  ? MARK_SRC_DEST_CALL : MARK_SRC_DEST);
389 
390       if (! include_delayed_effects
391 	  && INSN_REFERENCES_ARE_DELAYED (as_a <rtx_insn *> (x)))
392 	return;
393 
394       /* No special processing, just speed up.  */
395       mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
396       return;
397 
398     default:
399       break;
400     }
401 
402   /* Process each sub-expression and flag what it needs.  */
403   format_ptr = GET_RTX_FORMAT (code);
404   for (i = 0; i < GET_RTX_LENGTH (code); i++)
405     switch (*format_ptr++)
406       {
407       case 'e':
408 	mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
409 	break;
410 
411       case 'E':
412 	for (j = 0; j < XVECLEN (x, i); j++)
413 	  mark_referenced_resources (XVECEXP (x, i, j), res,
414 				     include_delayed_effects);
415 	break;
416       }
417 }
418 
419 /* A subroutine of mark_target_live_regs.  Search forward from TARGET
420    looking for registers that are set before they are used.  These are dead.
421    Stop after passing a few conditional jumps, and/or a small
422    number of unconditional branches.  */
423 
424 static rtx_insn *
find_dead_or_set_registers(rtx_insn * target,struct resources * res,rtx * jump_target,int jump_count,struct resources set,struct resources needed)425 find_dead_or_set_registers (rtx_insn *target, struct resources *res,
426 			    rtx *jump_target, int jump_count,
427 			    struct resources set, struct resources needed)
428 {
429   HARD_REG_SET scratch;
430   rtx_insn *insn;
431   rtx_insn *next_insn;
432   rtx_insn *jump_insn = 0;
433   int i;
434 
435   for (insn = target; insn; insn = next_insn)
436     {
437       rtx_insn *this_insn = insn;
438 
439       next_insn = NEXT_INSN (insn);
440 
441       /* If this instruction can throw an exception, then we don't
442 	 know where we might end up next.  That means that we have to
443 	 assume that whatever we have already marked as live really is
444 	 live.  */
445       if (can_throw_internal (insn))
446 	break;
447 
448       switch (GET_CODE (insn))
449 	{
450 	case CODE_LABEL:
451 	  /* After a label, any pending dead registers that weren't yet
452 	     used can be made dead.  */
453 	  AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
454 	  AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
455 	  CLEAR_HARD_REG_SET (pending_dead_regs);
456 
457 	  continue;
458 
459 	case BARRIER:
460 	case NOTE:
461 	case DEBUG_INSN:
462 	  continue;
463 
464 	case INSN:
465 	  if (GET_CODE (PATTERN (insn)) == USE)
466 	    {
467 	      /* If INSN is a USE made by update_block, we care about the
468 		 underlying insn.  Any registers set by the underlying insn
469 		 are live since the insn is being done somewhere else.  */
470 	      if (INSN_P (XEXP (PATTERN (insn), 0)))
471 		mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
472 				    MARK_SRC_DEST_CALL);
473 
474 	      /* All other USE insns are to be ignored.  */
475 	      continue;
476 	    }
477 	  else if (GET_CODE (PATTERN (insn)) == CLOBBER)
478 	    continue;
479 	  else if (rtx_sequence *seq =
480 		     dyn_cast <rtx_sequence *> (PATTERN (insn)))
481 	    {
482 	      /* An unconditional jump can be used to fill the delay slot
483 		 of a call, so search for a JUMP_INSN in any position.  */
484 	      for (i = 0; i < seq->len (); i++)
485 		{
486 		  this_insn = seq->insn (i);
487 		  if (JUMP_P (this_insn))
488 		    break;
489 		}
490 	    }
491 
492 	default:
493 	  break;
494 	}
495 
496       if (rtx_jump_insn *this_jump_insn =
497 	    dyn_cast <rtx_jump_insn *> (this_insn))
498 	{
499 	  if (jump_count++ < 10)
500 	    {
501 	      if (any_uncondjump_p (this_jump_insn)
502 		  || ANY_RETURN_P (PATTERN (this_jump_insn)))
503 		{
504 		  rtx lab_or_return = this_jump_insn->jump_label ();
505 		  if (ANY_RETURN_P (lab_or_return))
506 		    next_insn = NULL;
507 		  else
508 		    next_insn = as_a <rtx_insn *> (lab_or_return);
509 		  if (jump_insn == 0)
510 		    {
511 		      jump_insn = insn;
512 		      if (jump_target)
513 			*jump_target = JUMP_LABEL (this_jump_insn);
514 		    }
515 		}
516 	      else if (any_condjump_p (this_jump_insn))
517 		{
518 		  struct resources target_set, target_res;
519 		  struct resources fallthrough_res;
520 
521 		  /* We can handle conditional branches here by following
522 		     both paths, and then IOR the results of the two paths
523 		     together, which will give us registers that are dead
524 		     on both paths.  Since this is expensive, we give it
525 		     a much higher cost than unconditional branches.  The
526 		     cost was chosen so that we will follow at most 1
527 		     conditional branch.  */
528 
529 		  jump_count += 4;
530 		  if (jump_count >= 10)
531 		    break;
532 
533 		  mark_referenced_resources (insn, &needed, true);
534 
535 		  /* For an annulled branch, mark_set_resources ignores slots
536 		     filled by instructions from the target.  This is correct
537 		     if the branch is not taken.  Since we are following both
538 		     paths from the branch, we must also compute correct info
539 		     if the branch is taken.  We do this by inverting all of
540 		     the INSN_FROM_TARGET_P bits, calling mark_set_resources,
541 		     and then inverting the INSN_FROM_TARGET_P bits again.  */
542 
543 		  if (GET_CODE (PATTERN (insn)) == SEQUENCE
544 		      && INSN_ANNULLED_BRANCH_P (this_jump_insn))
545 		    {
546 		      rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
547 		      for (i = 1; i < seq->len (); i++)
548 			INSN_FROM_TARGET_P (seq->element (i))
549 			  = ! INSN_FROM_TARGET_P (seq->element (i));
550 
551 		      target_set = set;
552 		      mark_set_resources (insn, &target_set, 0,
553 					  MARK_SRC_DEST_CALL);
554 
555 		      for (i = 1; i < seq->len (); i++)
556 			INSN_FROM_TARGET_P (seq->element (i))
557 			  = ! INSN_FROM_TARGET_P (seq->element (i));
558 
559 		      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
560 		    }
561 		  else
562 		    {
563 		      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
564 		      target_set = set;
565 		    }
566 
567 		  target_res = *res;
568 		  COPY_HARD_REG_SET (scratch, target_set.regs);
569 		  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
570 		  AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
571 
572 		  fallthrough_res = *res;
573 		  COPY_HARD_REG_SET (scratch, set.regs);
574 		  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
575 		  AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
576 
577 		  if (!ANY_RETURN_P (this_jump_insn->jump_label ()))
578 		    find_dead_or_set_registers
579 			  (this_jump_insn->jump_target (),
580 			   &target_res, 0, jump_count, target_set, needed);
581 		  find_dead_or_set_registers (next_insn,
582 					      &fallthrough_res, 0, jump_count,
583 					      set, needed);
584 		  IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
585 		  AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
586 		  break;
587 		}
588 	      else
589 		break;
590 	    }
591 	  else
592 	    {
593 	      /* Don't try this optimization if we expired our jump count
594 		 above, since that would mean there may be an infinite loop
595 		 in the function being compiled.  */
596 	      jump_insn = 0;
597 	      break;
598 	    }
599 	}
600 
601       mark_referenced_resources (insn, &needed, true);
602       mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
603 
604       COPY_HARD_REG_SET (scratch, set.regs);
605       AND_COMPL_HARD_REG_SET (scratch, needed.regs);
606       AND_COMPL_HARD_REG_SET (res->regs, scratch);
607     }
608 
609   return jump_insn;
610 }
611 
612 /* Given X, a part of an insn, and a pointer to a `struct resource',
613    RES, indicate which resources are modified by the insn. If
614    MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
615    set by the called routine.
616 
617    If IN_DEST is nonzero, it means we are inside a SET.  Otherwise,
618    objects are being referenced instead of set.
619 
620    We never mark the insn as modifying the condition code unless it explicitly
621    SETs CC0 even though this is not totally correct.  The reason for this is
622    that we require a SET of CC0 to immediately precede the reference to CC0.
623    So if some other insn sets CC0 as a side-effect, we know it cannot affect
624    our computation and thus may be placed in a delay slot.  */
625 
626 void
mark_set_resources(rtx x,struct resources * res,int in_dest,enum mark_resource_type mark_type)627 mark_set_resources (rtx x, struct resources *res, int in_dest,
628 		    enum mark_resource_type mark_type)
629 {
630   enum rtx_code code;
631   int i, j;
632   unsigned int r;
633   const char *format_ptr;
634 
635  restart:
636 
637   code = GET_CODE (x);
638 
639   switch (code)
640     {
641     case NOTE:
642     case BARRIER:
643     case CODE_LABEL:
644     case USE:
645     CASE_CONST_ANY:
646     case LABEL_REF:
647     case SYMBOL_REF:
648     case CONST:
649     case PC:
650     case DEBUG_INSN:
651       /* These don't set any resources.  */
652       return;
653 
654     case CC0:
655       if (in_dest)
656 	res->cc = 1;
657       return;
658 
659     case CALL_INSN:
660       /* Called routine modifies the condition code, memory, any registers
661 	 that aren't saved across calls, global registers and anything
662 	 explicitly CLOBBERed immediately after the CALL_INSN.  */
663 
664       if (mark_type == MARK_SRC_DEST_CALL)
665 	{
666 	  rtx_call_insn *call_insn = as_a <rtx_call_insn *> (x);
667 	  rtx link;
668 	  HARD_REG_SET regs;
669 
670 	  res->cc = res->memory = 1;
671 
672 	  get_call_reg_set_usage (call_insn, &regs, regs_invalidated_by_call);
673 	  IOR_HARD_REG_SET (res->regs, regs);
674 
675 	  for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
676 	       link; link = XEXP (link, 1))
677 	    {
678 	      /* We could support CLOBBER_HIGH and treat it in the same way as
679 		 HARD_REGNO_CALL_PART_CLOBBERED, but no port needs that
680 		 yet.  */
681 	      gcc_assert (GET_CODE (XEXP (link, 0)) != CLOBBER_HIGH);
682 	      if (GET_CODE (XEXP (link, 0)) == CLOBBER)
683 		mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
684 				    MARK_SRC_DEST);
685 	    }
686 
687 	  /* Check for a REG_SETJMP.  If it exists, then we must
688 	     assume that this call can clobber any register.  */
689 	  if (find_reg_note (call_insn, REG_SETJMP, NULL))
690 	    SET_HARD_REG_SET (res->regs);
691 	}
692 
693       /* ... and also what its RTL says it modifies, if anything.  */
694       gcc_fallthrough ();
695 
696     case JUMP_INSN:
697     case INSN:
698 
699 	/* An insn consisting of just a CLOBBER (or USE) is just for flow
700 	   and doesn't actually do anything, so we ignore it.  */
701 
702       if (mark_type != MARK_SRC_DEST_CALL
703 	  && INSN_SETS_ARE_DELAYED (as_a <rtx_insn *> (x)))
704 	return;
705 
706       x = PATTERN (x);
707       if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
708 	goto restart;
709       return;
710 
711     case SET:
712       /* If the source of a SET is a CALL, this is actually done by
713 	 the called routine.  So only include it if we are to include the
714 	 effects of the calling routine.  */
715 
716       mark_set_resources (SET_DEST (x), res,
717 			  (mark_type == MARK_SRC_DEST_CALL
718 			   || GET_CODE (SET_SRC (x)) != CALL),
719 			  mark_type);
720 
721       mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
722       return;
723 
724     case CLOBBER:
725       mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
726       return;
727 
728     case CLOBBER_HIGH:
729       /* No current target supports both branch delay slots and CLOBBER_HIGH.
730 	 We'd need more elaborate liveness tracking to handle that
731 	 combination.  */
732       gcc_unreachable ();
733 
734     case SEQUENCE:
735       {
736         rtx_sequence *seq = as_a <rtx_sequence *> (x);
737         rtx control = seq->element (0);
738         bool annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control);
739 
740         mark_set_resources (control, res, 0, mark_type);
741         for (i = seq->len () - 1; i >= 0; --i)
742 	  {
743 	    rtx elt = seq->element (i);
744 	    if (!annul_p && INSN_FROM_TARGET_P (elt))
745 	      mark_set_resources (elt, res, 0, mark_type);
746 	  }
747       }
748       return;
749 
750     case POST_INC:
751     case PRE_INC:
752     case POST_DEC:
753     case PRE_DEC:
754       mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
755       return;
756 
757     case PRE_MODIFY:
758     case POST_MODIFY:
759       mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
760       mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
761       mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
762       return;
763 
764     case SIGN_EXTRACT:
765     case ZERO_EXTRACT:
766       mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
767       mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
768       mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
769       return;
770 
771     case MEM:
772       if (in_dest)
773 	{
774 	  res->memory = 1;
775 	  res->volatil |= MEM_VOLATILE_P (x);
776 	}
777 
778       mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
779       return;
780 
781     case SUBREG:
782       if (in_dest)
783 	{
784 	  if (!REG_P (SUBREG_REG (x)))
785 	    mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
786 	  else
787 	    {
788 	      unsigned int regno = subreg_regno (x);
789 	      unsigned int last_regno = regno + subreg_nregs (x);
790 
791 	      gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
792 	      for (r = regno; r < last_regno; r++)
793 		SET_HARD_REG_BIT (res->regs, r);
794 	    }
795 	}
796       return;
797 
798     case REG:
799       if (in_dest)
800 	{
801 	  gcc_assert (HARD_REGISTER_P (x));
802 	  add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
803 	}
804       return;
805 
806     case UNSPEC_VOLATILE:
807     case ASM_INPUT:
808       /* Traditional asm's are always volatile.  */
809       res->volatil = 1;
810       return;
811 
812     case TRAP_IF:
813       res->volatil = 1;
814       break;
815 
816     case ASM_OPERANDS:
817       res->volatil |= MEM_VOLATILE_P (x);
818 
819       /* For all ASM_OPERANDS, we must traverse the vector of input operands.
820 	 We cannot just fall through here since then we would be confused
821 	 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
822 	 traditional asms unlike their normal usage.  */
823 
824       for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
825 	mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
826 			    MARK_SRC_DEST);
827       return;
828 
829     default:
830       break;
831     }
832 
833   /* Process each sub-expression and flag what it needs.  */
834   format_ptr = GET_RTX_FORMAT (code);
835   for (i = 0; i < GET_RTX_LENGTH (code); i++)
836     switch (*format_ptr++)
837       {
838       case 'e':
839 	mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
840 	break;
841 
842       case 'E':
843 	for (j = 0; j < XVECLEN (x, i); j++)
844 	  mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
845 	break;
846       }
847 }
848 
849 /* Return TRUE if INSN is a return, possibly with a filled delay slot.  */
850 
851 static bool
return_insn_p(const_rtx insn)852 return_insn_p (const_rtx insn)
853 {
854   if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
855     return true;
856 
857   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
858     return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
859 
860   return false;
861 }
862 
863 /* Set the resources that are live at TARGET.
864 
865    If TARGET is zero, we refer to the end of the current function and can
866    return our precomputed value.
867 
868    Otherwise, we try to find out what is live by consulting the basic block
869    information.  This is tricky, because we must consider the actions of
870    reload and jump optimization, which occur after the basic block information
871    has been computed.
872 
873    Accordingly, we proceed as follows::
874 
875    We find the previous BARRIER and look at all immediately following labels
876    (with no intervening active insns) to see if any of them start a basic
877    block.  If we hit the start of the function first, we use block 0.
878 
879    Once we have found a basic block and a corresponding first insn, we can
880    accurately compute the live status (by starting at a label following a
881    BARRIER, we are immune to actions taken by reload and jump.)  Then we
882    scan all insns between that point and our target.  For each CLOBBER (or
883    for call-clobbered regs when we pass a CALL_INSN), mark the appropriate
884    registers are dead.  For a SET, mark them as live.
885 
886    We have to be careful when using REG_DEAD notes because they are not
887    updated by such things as find_equiv_reg.  So keep track of registers
888    marked as dead that haven't been assigned to, and mark them dead at the
889    next CODE_LABEL since reload and jump won't propagate values across labels.
890 
891    If we cannot find the start of a basic block (should be a very rare
892    case, if it can happen at all), mark everything as potentially live.
893 
894    Next, scan forward from TARGET looking for things set or clobbered
895    before they are used.  These are not live.
896 
897    Because we can be called many times on the same target, save our results
898    in a hash table indexed by INSN_UID.  This is only done if the function
899    init_resource_info () was invoked before we are called.  */
900 
901 void
mark_target_live_regs(rtx_insn * insns,rtx target_maybe_return,struct resources * res)902 mark_target_live_regs (rtx_insn *insns, rtx target_maybe_return, struct resources *res)
903 {
904   int b = -1;
905   unsigned int i;
906   struct target_info *tinfo = NULL;
907   rtx_insn *insn;
908   rtx jump_target;
909   HARD_REG_SET scratch;
910   struct resources set, needed;
911 
912   /* Handle end of function.  */
913   if (target_maybe_return == 0 || ANY_RETURN_P (target_maybe_return))
914     {
915       *res = end_of_function_needs;
916       return;
917     }
918 
919   /* We've handled the case of RETURN/SIMPLE_RETURN; we should now have an
920      instruction.  */
921   rtx_insn *target = as_a <rtx_insn *> (target_maybe_return);
922 
923   /* Handle return insn.  */
924   if (return_insn_p (target))
925     {
926       *res = end_of_function_needs;
927       mark_referenced_resources (target, res, false);
928       return;
929     }
930 
931   /* We have to assume memory is needed, but the CC isn't.  */
932   res->memory = 1;
933   res->volatil = 0;
934   res->cc = 0;
935 
936   /* See if we have computed this value already.  */
937   if (target_hash_table != NULL)
938     {
939       for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
940 	   tinfo; tinfo = tinfo->next)
941 	if (tinfo->uid == INSN_UID (target))
942 	  break;
943 
944       /* Start by getting the basic block number.  If we have saved
945 	 information, we can get it from there unless the insn at the
946 	 start of the basic block has been deleted.  */
947       if (tinfo && tinfo->block != -1
948 	  && ! BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, tinfo->block))->deleted ())
949 	b = tinfo->block;
950     }
951 
952   if (b == -1)
953     b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH);
954 
955   if (target_hash_table != NULL)
956     {
957       if (tinfo)
958 	{
959 	  /* If the information is up-to-date, use it.  Otherwise, we will
960 	     update it below.  */
961 	  if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
962 	    {
963 	      COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
964 	      return;
965 	    }
966 	}
967       else
968 	{
969 	  /* Allocate a place to put our results and chain it into the
970 	     hash table.  */
971 	  tinfo = XNEW (struct target_info);
972 	  tinfo->uid = INSN_UID (target);
973 	  tinfo->block = b;
974 	  tinfo->next
975 	    = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
976 	  target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
977 	}
978     }
979 
980   CLEAR_HARD_REG_SET (pending_dead_regs);
981 
982   /* If we found a basic block, get the live registers from it and update
983      them with anything set or killed between its start and the insn before
984      TARGET; this custom life analysis is really about registers so we need
985      to use the LR problem.  Otherwise, we must assume everything is live.  */
986   if (b != -1)
987     {
988       regset regs_live = DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun, b));
989       rtx_insn *start_insn, *stop_insn;
990 
991       /* Compute hard regs live at start of block.  */
992       REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
993 
994       /* Get starting and ending insn, handling the case where each might
995 	 be a SEQUENCE.  */
996       start_insn = (b == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index ?
997 		    insns : BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, b)));
998       stop_insn = target;
999 
1000       if (NONJUMP_INSN_P (start_insn)
1001 	  && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
1002 	start_insn = as_a <rtx_sequence *> (PATTERN (start_insn))->insn (0);
1003 
1004       if (NONJUMP_INSN_P (stop_insn)
1005 	  && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
1006 	stop_insn = next_insn (PREV_INSN (stop_insn));
1007 
1008       for (insn = start_insn; insn != stop_insn;
1009 	   insn = next_insn_no_annul (insn))
1010 	{
1011 	  rtx link;
1012 	  rtx_insn *real_insn = insn;
1013 	  enum rtx_code code = GET_CODE (insn);
1014 
1015 	  if (DEBUG_INSN_P (insn))
1016 	    continue;
1017 
1018 	  /* If this insn is from the target of a branch, it isn't going to
1019 	     be used in the sequel.  If it is used in both cases, this
1020 	     test will not be true.  */
1021 	  if ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
1022 	      && INSN_FROM_TARGET_P (insn))
1023 	    continue;
1024 
1025 	  /* If this insn is a USE made by update_block, we care about the
1026 	     underlying insn.  */
1027 	  if (code == INSN
1028 	      && GET_CODE (PATTERN (insn)) == USE
1029 	      && INSN_P (XEXP (PATTERN (insn), 0)))
1030 	    real_insn = as_a <rtx_insn *> (XEXP (PATTERN (insn), 0));
1031 
1032 	  if (CALL_P (real_insn))
1033 	    {
1034 	      /* Values in call-clobbered registers survive a COND_EXEC CALL
1035 		 if that is not executed; this matters for resoure use because
1036 		 they may be used by a complementarily (or more strictly)
1037 		 predicated instruction, or if the CALL is NORETURN.  */
1038 	      if (GET_CODE (PATTERN (real_insn)) != COND_EXEC)
1039 		{
1040 		  HARD_REG_SET regs_invalidated_by_this_call;
1041 		  get_call_reg_set_usage (real_insn,
1042 					  &regs_invalidated_by_this_call,
1043 					  regs_invalidated_by_call);
1044 		  /* CALL clobbers all call-used regs that aren't fixed except
1045 		     sp, ap, and fp.  Do this before setting the result of the
1046 		     call live.  */
1047 		  AND_COMPL_HARD_REG_SET (current_live_regs,
1048 					  regs_invalidated_by_this_call);
1049 		}
1050 
1051 	      /* A CALL_INSN sets any global register live, since it may
1052 		 have been modified by the call.  */
1053 	      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1054 		if (global_regs[i])
1055 		  SET_HARD_REG_BIT (current_live_regs, i);
1056 	    }
1057 
1058 	  /* Mark anything killed in an insn to be deadened at the next
1059 	     label.  Ignore USE insns; the only REG_DEAD notes will be for
1060 	     parameters.  But they might be early.  A CALL_INSN will usually
1061 	     clobber registers used for parameters.  It isn't worth bothering
1062 	     with the unlikely case when it won't.  */
1063 	  if ((NONJUMP_INSN_P (real_insn)
1064 	       && GET_CODE (PATTERN (real_insn)) != USE
1065 	       && GET_CODE (PATTERN (real_insn)) != CLOBBER)
1066 	      || JUMP_P (real_insn)
1067 	      || CALL_P (real_insn))
1068 	    {
1069 	      for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1070 		if (REG_NOTE_KIND (link) == REG_DEAD
1071 		    && REG_P (XEXP (link, 0))
1072 		    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1073 		  add_to_hard_reg_set (&pending_dead_regs,
1074 				      GET_MODE (XEXP (link, 0)),
1075 				      REGNO (XEXP (link, 0)));
1076 
1077 	      note_stores (PATTERN (real_insn), update_live_status, NULL);
1078 
1079 	      /* If any registers were unused after this insn, kill them.
1080 		 These notes will always be accurate.  */
1081 	      for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1082 		if (REG_NOTE_KIND (link) == REG_UNUSED
1083 		    && REG_P (XEXP (link, 0))
1084 		    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1085 		  remove_from_hard_reg_set (&current_live_regs,
1086 					   GET_MODE (XEXP (link, 0)),
1087 					   REGNO (XEXP (link, 0)));
1088 	    }
1089 
1090 	  else if (LABEL_P (real_insn))
1091 	    {
1092 	      basic_block bb;
1093 
1094 	      /* A label clobbers the pending dead registers since neither
1095 		 reload nor jump will propagate a value across a label.  */
1096 	      AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
1097 	      CLEAR_HARD_REG_SET (pending_dead_regs);
1098 
1099 	      /* We must conservatively assume that all registers that used
1100 		 to be live here still are.  The fallthrough edge may have
1101 		 left a live register uninitialized.  */
1102 	      bb = BLOCK_FOR_INSN (real_insn);
1103 	      if (bb)
1104 		{
1105 		  HARD_REG_SET extra_live;
1106 
1107 		  REG_SET_TO_HARD_REG_SET (extra_live, DF_LR_IN (bb));
1108 		  IOR_HARD_REG_SET (current_live_regs, extra_live);
1109 		}
1110 	    }
1111 
1112 	  /* The beginning of the epilogue corresponds to the end of the
1113 	     RTL chain when there are no epilogue insns.  Certain resources
1114 	     are implicitly required at that point.  */
1115 	  else if (NOTE_P (real_insn)
1116 		   && NOTE_KIND (real_insn) == NOTE_INSN_EPILOGUE_BEG)
1117 	    IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
1118 	}
1119 
1120       COPY_HARD_REG_SET (res->regs, current_live_regs);
1121       if (tinfo != NULL)
1122 	{
1123 	  tinfo->block = b;
1124 	  tinfo->bb_tick = bb_ticks[b];
1125 	}
1126     }
1127   else
1128     /* We didn't find the start of a basic block.  Assume everything
1129        in use.  This should happen only extremely rarely.  */
1130     SET_HARD_REG_SET (res->regs);
1131 
1132   CLEAR_RESOURCE (&set);
1133   CLEAR_RESOURCE (&needed);
1134 
1135   rtx_insn *jump_insn = find_dead_or_set_registers (target, res, &jump_target,
1136 						    0, set, needed);
1137 
1138   /* If we hit an unconditional branch, we have another way of finding out
1139      what is live: we can see what is live at the branch target and include
1140      anything used but not set before the branch.  We add the live
1141      resources found using the test below to those found until now.  */
1142 
1143   if (jump_insn)
1144     {
1145       struct resources new_resources;
1146       rtx_insn *stop_insn = next_active_insn (jump_insn);
1147 
1148       if (!ANY_RETURN_P (jump_target))
1149 	jump_target = next_active_insn (as_a<rtx_insn *> (jump_target));
1150       mark_target_live_regs (insns, jump_target, &new_resources);
1151       CLEAR_RESOURCE (&set);
1152       CLEAR_RESOURCE (&needed);
1153 
1154       /* Include JUMP_INSN in the needed registers.  */
1155       for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1156 	{
1157 	  mark_referenced_resources (insn, &needed, true);
1158 
1159 	  COPY_HARD_REG_SET (scratch, needed.regs);
1160 	  AND_COMPL_HARD_REG_SET (scratch, set.regs);
1161 	  IOR_HARD_REG_SET (new_resources.regs, scratch);
1162 
1163 	  mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1164 	}
1165 
1166       IOR_HARD_REG_SET (res->regs, new_resources.regs);
1167     }
1168 
1169   if (tinfo != NULL)
1170     {
1171       COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
1172     }
1173 }
1174 
1175 /* Initialize the resources required by mark_target_live_regs ().
1176    This should be invoked before the first call to mark_target_live_regs.  */
1177 
1178 void
init_resource_info(rtx_insn * epilogue_insn)1179 init_resource_info (rtx_insn *epilogue_insn)
1180 {
1181   int i;
1182   basic_block bb;
1183 
1184   /* Indicate what resources are required to be valid at the end of the current
1185      function.  The condition code never is and memory always is.
1186      The stack pointer is needed unless EXIT_IGNORE_STACK is true
1187      and there is an epilogue that restores the original stack pointer
1188      from the frame pointer.  Registers used to return the function value
1189      are needed.  Registers holding global variables are needed.  */
1190 
1191   end_of_function_needs.cc = 0;
1192   end_of_function_needs.memory = 1;
1193   CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1194 
1195   if (frame_pointer_needed)
1196     {
1197       SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
1198       if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
1199 	SET_HARD_REG_BIT (end_of_function_needs.regs,
1200 			  HARD_FRAME_POINTER_REGNUM);
1201     }
1202   if (!(frame_pointer_needed
1203 	&& EXIT_IGNORE_STACK
1204 	&& epilogue_insn
1205 	&& !crtl->sp_is_unchanging))
1206     SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1207 
1208   if (crtl->return_rtx != 0)
1209     mark_referenced_resources (crtl->return_rtx,
1210 			       &end_of_function_needs, true);
1211 
1212   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1213     if (global_regs[i] || EPILOGUE_USES (i))
1214       SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1215 
1216   /* The registers required to be live at the end of the function are
1217      represented in the flow information as being dead just prior to
1218      reaching the end of the function.  For example, the return of a value
1219      might be represented by a USE of the return register immediately
1220      followed by an unconditional jump to the return label where the
1221      return label is the end of the RTL chain.  The end of the RTL chain
1222      is then taken to mean that the return register is live.
1223 
1224      This sequence is no longer maintained when epilogue instructions are
1225      added to the RTL chain.  To reconstruct the original meaning, the
1226      start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1227      point where these registers become live (start_of_epilogue_needs).
1228      If epilogue instructions are present, the registers set by those
1229      instructions won't have been processed by flow.  Thus, those
1230      registers are additionally required at the end of the RTL chain
1231      (end_of_function_needs).  */
1232 
1233   start_of_epilogue_needs = end_of_function_needs;
1234 
1235   while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
1236     {
1237       mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
1238 			  MARK_SRC_DEST_CALL);
1239       if (return_insn_p (epilogue_insn))
1240 	break;
1241     }
1242 
1243   /* Allocate and initialize the tables used by mark_target_live_regs.  */
1244   target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME);
1245   bb_ticks = XCNEWVEC (int, last_basic_block_for_fn (cfun));
1246 
1247   /* Set the BLOCK_FOR_INSN of each label that starts a basic block.  */
1248   FOR_EACH_BB_FN (bb, cfun)
1249     if (LABEL_P (BB_HEAD (bb)))
1250       BLOCK_FOR_INSN (BB_HEAD (bb)) = bb;
1251 }
1252 
1253 /* Free up the resources allocated to mark_target_live_regs ().  This
1254    should be invoked after the last call to mark_target_live_regs ().  */
1255 
1256 void
free_resource_info(void)1257 free_resource_info (void)
1258 {
1259   basic_block bb;
1260 
1261   if (target_hash_table != NULL)
1262     {
1263       int i;
1264 
1265       for (i = 0; i < TARGET_HASH_PRIME; ++i)
1266 	{
1267 	  struct target_info *ti = target_hash_table[i];
1268 
1269 	  while (ti)
1270 	    {
1271 	      struct target_info *next = ti->next;
1272 	      free (ti);
1273 	      ti = next;
1274 	    }
1275 	}
1276 
1277       free (target_hash_table);
1278       target_hash_table = NULL;
1279     }
1280 
1281   if (bb_ticks != NULL)
1282     {
1283       free (bb_ticks);
1284       bb_ticks = NULL;
1285     }
1286 
1287   FOR_EACH_BB_FN (bb, cfun)
1288     if (LABEL_P (BB_HEAD (bb)))
1289       BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL;
1290 }
1291 
1292 /* Clear any hashed information that we have stored for INSN.  */
1293 
1294 void
clear_hashed_info_for_insn(rtx_insn * insn)1295 clear_hashed_info_for_insn (rtx_insn *insn)
1296 {
1297   struct target_info *tinfo;
1298 
1299   if (target_hash_table != NULL)
1300     {
1301       for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
1302 	   tinfo; tinfo = tinfo->next)
1303 	if (tinfo->uid == INSN_UID (insn))
1304 	  break;
1305 
1306       if (tinfo)
1307 	tinfo->block = -1;
1308     }
1309 }
1310 
1311 /* Increment the tick count for the basic block that contains INSN.  */
1312 
1313 void
incr_ticks_for_insn(rtx_insn * insn)1314 incr_ticks_for_insn (rtx_insn *insn)
1315 {
1316   int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH);
1317 
1318   if (b != -1)
1319     bb_ticks[b]++;
1320 }
1321 
1322 /* Add TRIAL to the set of resources used at the end of the current
1323    function.  */
1324 void
mark_end_of_function_resources(rtx trial,bool include_delayed_effects)1325 mark_end_of_function_resources (rtx trial, bool include_delayed_effects)
1326 {
1327   mark_referenced_resources (trial, &end_of_function_needs,
1328 			     include_delayed_effects);
1329 }
1330