1 /* Perform instruction reorganizations for delay slot filling.
2    Copyright (C) 1992-2013 Free Software Foundation, Inc.
3    Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4    Hacked by Michael Tiemann (tiemann@cygnus.com).
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 /* Instruction reorganization pass.
23 
24    This pass runs after register allocation and final jump
25    optimization.  It should be the last pass to run before peephole.
26    It serves primarily to fill delay slots of insns, typically branch
27    and call insns.  Other insns typically involve more complicated
28    interactions of data dependencies and resource constraints, and
29    are better handled by scheduling before register allocation (by the
30    function `schedule_insns').
31 
32    The Branch Penalty is the number of extra cycles that are needed to
33    execute a branch insn.  On an ideal machine, branches take a single
34    cycle, and the Branch Penalty is 0.  Several RISC machines approach
35    branch delays differently:
36 
37    The MIPS has a single branch delay slot.  Most insns
38    (except other branches) can be used to fill this slot.  When the
39    slot is filled, two insns execute in two cycles, reducing the
40    branch penalty to zero.
41 
42    The SPARC always has a branch delay slot, but its effects can be
43    annulled when the branch is not taken.  This means that failing to
44    find other sources of insns, we can hoist an insn from the branch
45    target that would only be safe to execute knowing that the branch
46    is taken.
47 
48    The HP-PA always has a branch delay slot.  For unconditional branches
49    its effects can be annulled when the branch is taken.  The effects
50    of the delay slot in a conditional branch can be nullified for forward
51    taken branches, or for untaken backward branches.  This means
52    we can hoist insns from the fall-through path for forward branches or
53    steal insns from the target of backward branches.
54 
55    The TMS320C3x and C4x have three branch delay slots.  When the three
56    slots are filled, the branch penalty is zero.  Most insns can fill the
57    delay slots except jump insns.
58 
59    Three techniques for filling delay slots have been implemented so far:
60 
61    (1) `fill_simple_delay_slots' is the simplest, most efficient way
62    to fill delay slots.  This pass first looks for insns which come
63    from before the branch and which are safe to execute after the
64    branch.  Then it searches after the insn requiring delay slots or,
65    in the case of a branch, for insns that are after the point at
66    which the branch merges into the fallthrough code, if such a point
67    exists.  When such insns are found, the branch penalty decreases
68    and no code expansion takes place.
69 
70    (2) `fill_eager_delay_slots' is more complicated: it is used for
71    scheduling conditional jumps, or for scheduling jumps which cannot
72    be filled using (1).  A machine need not have annulled jumps to use
73    this strategy, but it helps (by keeping more options open).
74    `fill_eager_delay_slots' tries to guess the direction the branch
75    will go; if it guesses right 100% of the time, it can reduce the
76    branch penalty as much as `fill_simple_delay_slots' does.  If it
77    guesses wrong 100% of the time, it might as well schedule nops.  When
78    `fill_eager_delay_slots' takes insns from the fall-through path of
79    the jump, usually there is no code expansion; when it takes insns
80    from the branch target, there is code expansion if it is not the
81    only way to reach that target.
82 
83    (3) `relax_delay_slots' uses a set of rules to simplify code that
84    has been reorganized by (1) and (2).  It finds cases where
85    conditional test can be eliminated, jumps can be threaded, extra
86    insns can be eliminated, etc.  It is the job of (1) and (2) to do a
87    good job of scheduling locally; `relax_delay_slots' takes care of
88    making the various individual schedules work well together.  It is
89    especially tuned to handle the control flow interactions of branch
90    insns.  It does nothing for insns with delay slots that do not
91    branch.
92 
93    On machines that use CC0, we are very conservative.  We will not make
94    a copy of an insn involving CC0 since we want to maintain a 1-1
95    correspondence between the insn that sets and uses CC0.  The insns are
96    allowed to be separated by placing an insn that sets CC0 (but not an insn
97    that uses CC0; we could do this, but it doesn't seem worthwhile) in a
98    delay slot.  In that case, we point each insn at the other with REG_CC_USER
99    and REG_CC_SETTER notes.  Note that these restrictions affect very few
100    machines because most RISC machines with delay slots will not use CC0
101    (the RT is the only known exception at this point).  */
102 
103 #include "config.h"
104 #include "system.h"
105 #include "coretypes.h"
106 #include "tm.h"
107 #include "diagnostic-core.h"
108 #include "rtl.h"
109 #include "tm_p.h"
110 #include "expr.h"
111 #include "function.h"
112 #include "insn-config.h"
113 #include "conditions.h"
114 #include "hard-reg-set.h"
115 #include "basic-block.h"
116 #include "regs.h"
117 #include "recog.h"
118 #include "flags.h"
119 #include "obstack.h"
120 #include "insn-attr.h"
121 #include "resource.h"
122 #include "except.h"
123 #include "params.h"
124 #include "target.h"
125 #include "tree-pass.h"
126 #include "emit-rtl.h"
127 
128 #ifdef DELAY_SLOTS
129 
130 #ifndef ANNUL_IFTRUE_SLOTS
131 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
132 #endif
133 #ifndef ANNUL_IFFALSE_SLOTS
134 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
135 #endif
136 
137 /* Insns which have delay slots that have not yet been filled.  */
138 
139 static struct obstack unfilled_slots_obstack;
140 static rtx *unfilled_firstobj;
141 
142 /* Define macros to refer to the first and last slot containing unfilled
143    insns.  These are used because the list may move and its address
144    should be recomputed at each use.  */
145 
146 #define unfilled_slots_base	\
147   ((rtx *) obstack_base (&unfilled_slots_obstack))
148 
149 #define unfilled_slots_next	\
150   ((rtx *) obstack_next_free (&unfilled_slots_obstack))
151 
152 /* Points to the label before the end of the function, or before a
153    return insn.  */
154 static rtx function_return_label;
155 /* Likewise for a simple_return.  */
156 static rtx function_simple_return_label;
157 
158 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
159    not always monotonically increase.  */
160 static int *uid_to_ruid;
161 
162 /* Highest valid index in `uid_to_ruid'.  */
163 static int max_uid;
164 
165 static int stop_search_p (rtx, int);
166 static int resource_conflicts_p (struct resources *, struct resources *);
167 static int insn_references_resource_p (rtx, struct resources *, bool);
168 static int insn_sets_resource_p (rtx, struct resources *, bool);
169 static rtx find_end_label (rtx);
170 static rtx emit_delay_sequence (rtx, rtx, int);
171 static rtx add_to_delay_list (rtx, rtx);
172 static rtx delete_from_delay_slot (rtx);
173 static void delete_scheduled_jump (rtx);
174 static void note_delay_statistics (int, int);
175 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
176 static rtx optimize_skip (rtx);
177 #endif
178 static int get_jump_flags (rtx, rtx);
179 static int mostly_true_jump (rtx);
180 static rtx get_branch_condition (rtx, rtx);
181 static int condition_dominates_p (rtx, rtx);
182 static int redirect_with_delay_slots_safe_p (rtx, rtx, rtx);
183 static int redirect_with_delay_list_safe_p (rtx, rtx, rtx);
184 static int check_annul_list_true_false (int, rtx);
185 static rtx steal_delay_list_from_target (rtx, rtx, rtx, rtx,
186 					 struct resources *,
187 					 struct resources *,
188 					 struct resources *,
189 					 int, int *, int *, rtx *);
190 static rtx steal_delay_list_from_fallthrough (rtx, rtx, rtx, rtx,
191 					      struct resources *,
192 					      struct resources *,
193 					      struct resources *,
194 					      int, int *, int *);
195 static void try_merge_delay_insns (rtx, rtx);
196 static rtx redundant_insn (rtx, rtx, rtx);
197 static int own_thread_p (rtx, rtx, int);
198 static void update_block (rtx, rtx);
199 static int reorg_redirect_jump (rtx, rtx);
200 static void update_reg_dead_notes (rtx, rtx);
201 static void fix_reg_dead_note (rtx, rtx);
202 static void update_reg_unused_notes (rtx, rtx);
203 static void fill_simple_delay_slots (int);
204 static rtx fill_slots_from_thread (rtx, rtx, rtx, rtx,
205 				   int, int, int, int,
206 				   int *, rtx);
207 static void fill_eager_delay_slots (void);
208 static void relax_delay_slots (rtx);
209 static void make_return_insns (rtx);
210 
211 /* A wrapper around next_active_insn which takes care to return ret_rtx
212    unchanged.  */
213 
214 static rtx
first_active_target_insn(rtx insn)215 first_active_target_insn (rtx insn)
216 {
217   if (ANY_RETURN_P (insn))
218     return insn;
219   return next_active_insn (insn);
220 }
221 
222 /* Return true iff INSN is a simplejump, or any kind of return insn.  */
223 
224 static bool
simplejump_or_return_p(rtx insn)225 simplejump_or_return_p (rtx insn)
226 {
227   return (JUMP_P (insn)
228 	  && (simplejump_p (insn) || ANY_RETURN_P (PATTERN (insn))));
229 }
230 
231 /* Return TRUE if this insn should stop the search for insn to fill delay
232    slots.  LABELS_P indicates that labels should terminate the search.
233    In all cases, jumps terminate the search.  */
234 
235 static int
stop_search_p(rtx insn,int labels_p)236 stop_search_p (rtx insn, int labels_p)
237 {
238   if (insn == 0)
239     return 1;
240 
241   /* If the insn can throw an exception that is caught within the function,
242      it may effectively perform a jump from the viewpoint of the function.
243      Therefore act like for a jump.  */
244   if (can_throw_internal (insn))
245     return 1;
246 
247   switch (GET_CODE (insn))
248     {
249     case NOTE:
250     case CALL_INSN:
251       return 0;
252 
253     case CODE_LABEL:
254       return labels_p;
255 
256     case JUMP_INSN:
257     case BARRIER:
258       return 1;
259 
260     case INSN:
261       /* OK unless it contains a delay slot or is an `asm' insn of some type.
262 	 We don't know anything about these.  */
263       return (GET_CODE (PATTERN (insn)) == SEQUENCE
264 	      || GET_CODE (PATTERN (insn)) == ASM_INPUT
265 	      || asm_noperands (PATTERN (insn)) >= 0);
266 
267     default:
268       gcc_unreachable ();
269     }
270 }
271 
272 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
273    resource set contains a volatile memory reference.  Otherwise, return FALSE.  */
274 
275 static int
resource_conflicts_p(struct resources * res1,struct resources * res2)276 resource_conflicts_p (struct resources *res1, struct resources *res2)
277 {
278   if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
279       || (res1->unch_memory && res2->unch_memory)
280       || res1->volatil || res2->volatil)
281     return 1;
282 
283   return hard_reg_set_intersect_p (res1->regs, res2->regs);
284 }
285 
286 /* Return TRUE if any resource marked in RES, a `struct resources', is
287    referenced by INSN.  If INCLUDE_DELAYED_EFFECTS is set, return if the called
288    routine is using those resources.
289 
290    We compute this by computing all the resources referenced by INSN and
291    seeing if this conflicts with RES.  It might be faster to directly check
292    ourselves, and this is the way it used to work, but it means duplicating
293    a large block of complex code.  */
294 
295 static int
insn_references_resource_p(rtx insn,struct resources * res,bool include_delayed_effects)296 insn_references_resource_p (rtx insn, struct resources *res,
297 			    bool include_delayed_effects)
298 {
299   struct resources insn_res;
300 
301   CLEAR_RESOURCE (&insn_res);
302   mark_referenced_resources (insn, &insn_res, include_delayed_effects);
303   return resource_conflicts_p (&insn_res, res);
304 }
305 
306 /* Return TRUE if INSN modifies resources that are marked in RES.
307    INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
308    included.   CC0 is only modified if it is explicitly set; see comments
309    in front of mark_set_resources for details.  */
310 
311 static int
insn_sets_resource_p(rtx insn,struct resources * res,bool include_delayed_effects)312 insn_sets_resource_p (rtx insn, struct resources *res,
313 		      bool include_delayed_effects)
314 {
315   struct resources insn_sets;
316 
317   CLEAR_RESOURCE (&insn_sets);
318   mark_set_resources (insn, &insn_sets, 0,
319 		      (include_delayed_effects
320 		       ? MARK_SRC_DEST_CALL
321 		       : MARK_SRC_DEST));
322   return resource_conflicts_p (&insn_sets, res);
323 }
324 
325 /* Find a label at the end of the function or before a RETURN.  If there
326    is none, try to make one.  If that fails, returns 0.
327 
328    The property of such a label is that it is placed just before the
329    epilogue or a bare RETURN insn, so that another bare RETURN can be
330    turned into a jump to the label unconditionally.  In particular, the
331    label cannot be placed before a RETURN insn with a filled delay slot.
332 
333    ??? There may be a problem with the current implementation.  Suppose
334    we start with a bare RETURN insn and call find_end_label.  It may set
335    function_return_label just before the RETURN.  Suppose the machinery
336    is able to fill the delay slot of the RETURN insn afterwards.  Then
337    function_return_label is no longer valid according to the property
338    described above and find_end_label will still return it unmodified.
339    Note that this is probably mitigated by the following observation:
340    once function_return_label is made, it is very likely the target of
341    a jump, so filling the delay slot of the RETURN will be much more
342    difficult.
343    KIND is either simple_return_rtx or ret_rtx, indicating which type of
344    return we're looking for.  */
345 
346 static rtx
find_end_label(rtx kind)347 find_end_label (rtx kind)
348 {
349   rtx insn;
350   rtx *plabel;
351 
352   if (kind == ret_rtx)
353     plabel = &function_return_label;
354   else
355     {
356       gcc_assert (kind == simple_return_rtx);
357       plabel = &function_simple_return_label;
358     }
359 
360   /* If we found one previously, return it.  */
361   if (*plabel)
362     return *plabel;
363 
364   /* Otherwise, see if there is a label at the end of the function.  If there
365      is, it must be that RETURN insns aren't needed, so that is our return
366      label and we don't have to do anything else.  */
367 
368   insn = get_last_insn ();
369   while (NOTE_P (insn)
370 	 || (NONJUMP_INSN_P (insn)
371 	     && (GET_CODE (PATTERN (insn)) == USE
372 		 || GET_CODE (PATTERN (insn)) == CLOBBER)))
373     insn = PREV_INSN (insn);
374 
375   /* When a target threads its epilogue we might already have a
376      suitable return insn.  If so put a label before it for the
377      function_return_label.  */
378   if (BARRIER_P (insn)
379       && JUMP_P (PREV_INSN (insn))
380       && PATTERN (PREV_INSN (insn)) == kind)
381     {
382       rtx temp = PREV_INSN (PREV_INSN (insn));
383       rtx label = gen_label_rtx ();
384       LABEL_NUSES (label) = 0;
385 
386       /* Put the label before any USE insns that may precede the RETURN
387 	 insn.  */
388       while (GET_CODE (temp) == USE)
389 	temp = PREV_INSN (temp);
390 
391       emit_label_after (label, temp);
392       *plabel = label;
393     }
394 
395   else if (LABEL_P (insn))
396     *plabel = insn;
397   else
398     {
399       rtx label = gen_label_rtx ();
400       LABEL_NUSES (label) = 0;
401       /* If the basic block reorder pass moves the return insn to
402 	 some other place try to locate it again and put our
403 	 function_return_label there.  */
404       while (insn && ! (JUMP_P (insn) && (PATTERN (insn) == kind)))
405 	insn = PREV_INSN (insn);
406       if (insn)
407 	{
408 	  insn = PREV_INSN (insn);
409 
410 	  /* Put the label before any USE insns that may precede the
411 	     RETURN insn.  */
412 	  while (GET_CODE (insn) == USE)
413 	    insn = PREV_INSN (insn);
414 
415 	  emit_label_after (label, insn);
416 	}
417       else
418 	{
419 #ifdef HAVE_epilogue
420 	  if (HAVE_epilogue
421 #ifdef HAVE_return
422 	      && ! HAVE_return
423 #endif
424 	      )
425 	    /* The RETURN insn has its delay slot filled so we cannot
426 	       emit the label just before it.  Since we already have
427 	       an epilogue and cannot emit a new RETURN, we cannot
428 	       emit the label at all.  */
429 	    return NULL_RTX;
430 #endif /* HAVE_epilogue */
431 
432 	  /* Otherwise, make a new label and emit a RETURN and BARRIER,
433 	     if needed.  */
434 	  emit_label (label);
435 #ifdef HAVE_return
436 	  if (HAVE_return)
437 	    {
438 	      /* The return we make may have delay slots too.  */
439 	      rtx insn = gen_return ();
440 	      insn = emit_jump_insn (insn);
441 	      set_return_jump_label (insn);
442 	      emit_barrier ();
443 	      if (num_delay_slots (insn) > 0)
444 		obstack_ptr_grow (&unfilled_slots_obstack, insn);
445 	    }
446 #endif
447 	}
448       *plabel = label;
449     }
450 
451   /* Show one additional use for this label so it won't go away until
452      we are done.  */
453   ++LABEL_NUSES (*plabel);
454 
455   return *plabel;
456 }
457 
458 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
459    the pattern of INSN with the SEQUENCE.
460 
461    Chain the insns so that NEXT_INSN of each insn in the sequence points to
462    the next and NEXT_INSN of the last insn in the sequence points to
463    the first insn after the sequence.  Similarly for PREV_INSN.  This makes
464    it easier to scan all insns.
465 
466    Returns the SEQUENCE that replaces INSN.  */
467 
468 static rtx
emit_delay_sequence(rtx insn,rtx list,int length)469 emit_delay_sequence (rtx insn, rtx list, int length)
470 {
471   int i = 1;
472   rtx li;
473   int had_barrier = 0;
474 
475   /* Allocate the rtvec to hold the insns and the SEQUENCE.  */
476   rtvec seqv = rtvec_alloc (length + 1);
477   rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
478   rtx seq_insn = make_insn_raw (seq);
479   rtx first = get_insns ();
480   rtx last = get_last_insn ();
481 
482   /* Make a copy of the insn having delay slots.  */
483   rtx delay_insn = copy_rtx (insn);
484 
485   /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
486      confuse further processing.  Update LAST in case it was the last insn.
487      We will put the BARRIER back in later.  */
488   if (NEXT_INSN (insn) && BARRIER_P (NEXT_INSN (insn)))
489     {
490       delete_related_insns (NEXT_INSN (insn));
491       last = get_last_insn ();
492       had_barrier = 1;
493     }
494 
495   /* Splice our SEQUENCE into the insn stream where INSN used to be.  */
496   NEXT_INSN (seq_insn) = NEXT_INSN (insn);
497   PREV_INSN (seq_insn) = PREV_INSN (insn);
498 
499   if (insn != last)
500     PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
501 
502   if (insn != first)
503     NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
504 
505   /* Note the calls to set_new_first_and_last_insn must occur after
506      SEQ_INSN has been completely spliced into the insn stream.
507 
508      Otherwise CUR_INSN_UID will get set to an incorrect value because
509      set_new_first_and_last_insn will not find SEQ_INSN in the chain.  */
510   if (insn == last)
511     set_new_first_and_last_insn (first, seq_insn);
512 
513   if (insn == first)
514     set_new_first_and_last_insn (seq_insn, last);
515 
516   /* Build our SEQUENCE and rebuild the insn chain.  */
517   XVECEXP (seq, 0, 0) = delay_insn;
518   INSN_DELETED_P (delay_insn) = 0;
519   PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
520 
521   INSN_LOCATION (seq_insn) = INSN_LOCATION (delay_insn);
522 
523   for (li = list; li; li = XEXP (li, 1), i++)
524     {
525       rtx tem = XEXP (li, 0);
526       rtx note, next;
527 
528       /* Show that this copy of the insn isn't deleted.  */
529       INSN_DELETED_P (tem) = 0;
530 
531       XVECEXP (seq, 0, i) = tem;
532       PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
533       NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
534 
535       /* SPARC assembler, for instance, emit warning when debug info is output
536          into the delay slot.  */
537       if (INSN_LOCATION (tem) && !INSN_LOCATION (seq_insn))
538 	INSN_LOCATION (seq_insn) = INSN_LOCATION (tem);
539       INSN_LOCATION (tem) = 0;
540 
541       for (note = REG_NOTES (tem); note; note = next)
542 	{
543 	  next = XEXP (note, 1);
544 	  switch (REG_NOTE_KIND (note))
545 	    {
546 	    case REG_DEAD:
547 	      /* Remove any REG_DEAD notes because we can't rely on them now
548 		 that the insn has been moved.  */
549 	      remove_note (tem, note);
550 	      break;
551 
552 	    case REG_LABEL_OPERAND:
553 	    case REG_LABEL_TARGET:
554 	      /* Keep the label reference count up to date.  */
555 	      if (LABEL_P (XEXP (note, 0)))
556 		LABEL_NUSES (XEXP (note, 0)) ++;
557 	      break;
558 
559 	    default:
560 	      break;
561 	    }
562 	}
563     }
564 
565   NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
566 
567   /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
568      last insn in that SEQUENCE to point to us.  Similarly for the first
569      insn in the following insn if it is a SEQUENCE.  */
570 
571   if (PREV_INSN (seq_insn) && NONJUMP_INSN_P (PREV_INSN (seq_insn))
572       && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
573     NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
574 			XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
575       = seq_insn;
576 
577   if (NEXT_INSN (seq_insn) && NONJUMP_INSN_P (NEXT_INSN (seq_insn))
578       && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
579     PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
580 
581   /* If there used to be a BARRIER, put it back.  */
582   if (had_barrier)
583     emit_barrier_after (seq_insn);
584 
585   gcc_assert (i == length + 1);
586 
587   return seq_insn;
588 }
589 
590 /* Add INSN to DELAY_LIST and return the head of the new list.  The list must
591    be in the order in which the insns are to be executed.  */
592 
593 static rtx
add_to_delay_list(rtx insn,rtx delay_list)594 add_to_delay_list (rtx insn, rtx delay_list)
595 {
596   /* If we have an empty list, just make a new list element.  If
597      INSN has its block number recorded, clear it since we may
598      be moving the insn to a new block.  */
599 
600   if (delay_list == 0)
601     {
602       clear_hashed_info_for_insn (insn);
603       return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
604     }
605 
606   /* Otherwise this must be an INSN_LIST.  Add INSN to the end of the
607      list.  */
608   XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
609 
610   return delay_list;
611 }
612 
613 /* Delete INSN from the delay slot of the insn that it is in, which may
614    produce an insn with no delay slots.  Return the new insn.  */
615 
616 static rtx
delete_from_delay_slot(rtx insn)617 delete_from_delay_slot (rtx insn)
618 {
619   rtx trial, seq_insn, seq, prev;
620   rtx delay_list = 0;
621   int i;
622   int had_barrier = 0;
623 
624   /* We first must find the insn containing the SEQUENCE with INSN in its
625      delay slot.  Do this by finding an insn, TRIAL, where
626      PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL.  */
627 
628   for (trial = insn;
629        PREV_INSN (NEXT_INSN (trial)) == trial;
630        trial = NEXT_INSN (trial))
631     ;
632 
633   seq_insn = PREV_INSN (NEXT_INSN (trial));
634   seq = PATTERN (seq_insn);
635 
636   if (NEXT_INSN (seq_insn) && BARRIER_P (NEXT_INSN (seq_insn)))
637     had_barrier = 1;
638 
639   /* Create a delay list consisting of all the insns other than the one
640      we are deleting (unless we were the only one).  */
641   if (XVECLEN (seq, 0) > 2)
642     for (i = 1; i < XVECLEN (seq, 0); i++)
643       if (XVECEXP (seq, 0, i) != insn)
644 	delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
645 
646   /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
647      list, and rebuild the delay list if non-empty.  */
648   prev = PREV_INSN (seq_insn);
649   trial = XVECEXP (seq, 0, 0);
650   delete_related_insns (seq_insn);
651   add_insn_after (trial, prev, NULL);
652 
653   /* If there was a barrier after the old SEQUENCE, remit it.  */
654   if (had_barrier)
655     emit_barrier_after (trial);
656 
657   /* If there are any delay insns, remit them.  Otherwise clear the
658      annul flag.  */
659   if (delay_list)
660     trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
661   else if (JUMP_P (trial))
662     INSN_ANNULLED_BRANCH_P (trial) = 0;
663 
664   INSN_FROM_TARGET_P (insn) = 0;
665 
666   /* Show we need to fill this insn again.  */
667   obstack_ptr_grow (&unfilled_slots_obstack, trial);
668 
669   return trial;
670 }
671 
672 /* Delete INSN, a JUMP_INSN.  If it is a conditional jump, we must track down
673    the insn that sets CC0 for it and delete it too.  */
674 
675 static void
delete_scheduled_jump(rtx insn)676 delete_scheduled_jump (rtx insn)
677 {
678   /* Delete the insn that sets cc0 for us.  On machines without cc0, we could
679      delete the insn that sets the condition code, but it is hard to find it.
680      Since this case is rare anyway, don't bother trying; there would likely
681      be other insns that became dead anyway, which we wouldn't know to
682      delete.  */
683 
684 #ifdef HAVE_cc0
685   if (reg_mentioned_p (cc0_rtx, insn))
686     {
687       rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
688 
689       /* If a reg-note was found, it points to an insn to set CC0.  This
690 	 insn is in the delay list of some other insn.  So delete it from
691 	 the delay list it was in.  */
692       if (note)
693 	{
694 	  if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
695 	      && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
696 	    delete_from_delay_slot (XEXP (note, 0));
697 	}
698       else
699 	{
700 	  /* The insn setting CC0 is our previous insn, but it may be in
701 	     a delay slot.  It will be the last insn in the delay slot, if
702 	     it is.  */
703 	  rtx trial = previous_insn (insn);
704 	  if (NOTE_P (trial))
705 	    trial = prev_nonnote_insn (trial);
706 	  if (sets_cc0_p (PATTERN (trial)) != 1
707 	      || FIND_REG_INC_NOTE (trial, NULL_RTX))
708 	    return;
709 	  if (PREV_INSN (NEXT_INSN (trial)) == trial)
710 	    delete_related_insns (trial);
711 	  else
712 	    delete_from_delay_slot (trial);
713 	}
714     }
715 #endif
716 
717   delete_related_insns (insn);
718 }
719 
720 /* Counters for delay-slot filling.  */
721 
722 #define NUM_REORG_FUNCTIONS 2
723 #define MAX_DELAY_HISTOGRAM 3
724 #define MAX_REORG_PASSES 2
725 
726 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
727 
728 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
729 
730 static int reorg_pass_number;
731 
732 static void
note_delay_statistics(int slots_filled,int index)733 note_delay_statistics (int slots_filled, int index)
734 {
735   num_insns_needing_delays[index][reorg_pass_number]++;
736   if (slots_filled > MAX_DELAY_HISTOGRAM)
737     slots_filled = MAX_DELAY_HISTOGRAM;
738   num_filled_delays[index][slots_filled][reorg_pass_number]++;
739 }
740 
741 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
742 
743 /* Optimize the following cases:
744 
745    1.  When a conditional branch skips over only one instruction,
746        use an annulling branch and put that insn in the delay slot.
747        Use either a branch that annuls when the condition if true or
748        invert the test with a branch that annuls when the condition is
749        false.  This saves insns, since otherwise we must copy an insn
750        from the L1 target.
751 
752         (orig)		 (skip)		(otherwise)
753 	Bcc.n L1	Bcc',a L1	Bcc,a L1'
754 	insn		insn		insn2
755       L1:	      L1:	      L1:
756 	insn2		insn2		insn2
757 	insn3		insn3	      L1':
758 					insn3
759 
760    2.  When a conditional branch skips over only one instruction,
761        and after that, it unconditionally branches somewhere else,
762        perform the similar optimization. This saves executing the
763        second branch in the case where the inverted condition is true.
764 
765 	Bcc.n L1	Bcc',a L2
766 	insn		insn
767       L1:	      L1:
768 	Bra L2		Bra L2
769 
770    INSN is a JUMP_INSN.
771 
772    This should be expanded to skip over N insns, where N is the number
773    of delay slots required.  */
774 
775 static rtx
optimize_skip(rtx insn)776 optimize_skip (rtx insn)
777 {
778   rtx trial = next_nonnote_insn (insn);
779   rtx next_trial = next_active_insn (trial);
780   rtx delay_list = 0;
781   int flags;
782 
783   flags = get_jump_flags (insn, JUMP_LABEL (insn));
784 
785   if (trial == 0
786       || !NONJUMP_INSN_P (trial)
787       || GET_CODE (PATTERN (trial)) == SEQUENCE
788       || recog_memoized (trial) < 0
789       || (! eligible_for_annul_false (insn, 0, trial, flags)
790 	  && ! eligible_for_annul_true (insn, 0, trial, flags))
791       || can_throw_internal (trial))
792     return 0;
793 
794   /* There are two cases where we are just executing one insn (we assume
795      here that a branch requires only one insn; this should be generalized
796      at some point):  Where the branch goes around a single insn or where
797      we have one insn followed by a branch to the same label we branch to.
798      In both of these cases, inverting the jump and annulling the delay
799      slot give the same effect in fewer insns.  */
800   if (next_trial == next_active_insn (JUMP_LABEL (insn))
801       || (next_trial != 0
802 	  && simplejump_or_return_p (next_trial)
803 	  && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)))
804     {
805       if (eligible_for_annul_false (insn, 0, trial, flags))
806 	{
807 	  if (invert_jump (insn, JUMP_LABEL (insn), 1))
808 	    INSN_FROM_TARGET_P (trial) = 1;
809 	  else if (! eligible_for_annul_true (insn, 0, trial, flags))
810 	    return 0;
811 	}
812 
813       delay_list = add_to_delay_list (trial, NULL_RTX);
814       next_trial = next_active_insn (trial);
815       update_block (trial, trial);
816       delete_related_insns (trial);
817 
818       /* Also, if we are targeting an unconditional
819 	 branch, thread our jump to the target of that branch.  Don't
820 	 change this into a RETURN here, because it may not accept what
821 	 we have in the delay slot.  We'll fix this up later.  */
822       if (next_trial && simplejump_or_return_p (next_trial))
823 	{
824 	  rtx target_label = JUMP_LABEL (next_trial);
825 	  if (ANY_RETURN_P (target_label))
826 	    target_label = find_end_label (target_label);
827 
828 	  if (target_label)
829 	    {
830 	      /* Recompute the flags based on TARGET_LABEL since threading
831 		 the jump to TARGET_LABEL may change the direction of the
832 		 jump (which may change the circumstances in which the
833 		 delay slot is nullified).  */
834 	      flags = get_jump_flags (insn, target_label);
835 	      if (eligible_for_annul_true (insn, 0, trial, flags))
836 		reorg_redirect_jump (insn, target_label);
837 	    }
838 	}
839 
840       INSN_ANNULLED_BRANCH_P (insn) = 1;
841     }
842 
843   return delay_list;
844 }
845 #endif
846 
847 /*  Encode and return branch direction and prediction information for
848     INSN assuming it will jump to LABEL.
849 
850     Non conditional branches return no direction information and
851     are predicted as very likely taken.  */
852 
853 static int
get_jump_flags(rtx insn,rtx label)854 get_jump_flags (rtx insn, rtx label)
855 {
856   int flags;
857 
858   /* get_jump_flags can be passed any insn with delay slots, these may
859      be INSNs, CALL_INSNs, or JUMP_INSNs.  Only JUMP_INSNs have branch
860      direction information, and only if they are conditional jumps.
861 
862      If LABEL is a return, then there is no way to determine the branch
863      direction.  */
864   if (JUMP_P (insn)
865       && (condjump_p (insn) || condjump_in_parallel_p (insn))
866       && !ANY_RETURN_P (label)
867       && INSN_UID (insn) <= max_uid
868       && INSN_UID (label) <= max_uid)
869     flags
870       = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
871 	 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
872   /* No valid direction information.  */
873   else
874     flags = 0;
875 
876   return flags;
877 }
878 
879 /* Return truth value of the statement that this branch
880    is mostly taken.  If we think that the branch is extremely likely
881    to be taken, we return 2.  If the branch is slightly more likely to be
882    taken, return 1.  If the branch is slightly less likely to be taken,
883    return 0 and if the branch is highly unlikely to be taken, return -1.  */
884 
885 static int
mostly_true_jump(rtx jump_insn)886 mostly_true_jump (rtx jump_insn)
887 {
888   /* If branch probabilities are available, then use that number since it
889      always gives a correct answer.  */
890   rtx note = find_reg_note (jump_insn, REG_BR_PROB, 0);
891   if (note)
892     {
893       int prob = INTVAL (XEXP (note, 0));
894 
895       if (prob >= REG_BR_PROB_BASE * 9 / 10)
896 	return 2;
897       else if (prob >= REG_BR_PROB_BASE / 2)
898 	return 1;
899       else if (prob >= REG_BR_PROB_BASE / 10)
900 	return 0;
901       else
902 	return -1;
903     }
904 
905   /* If there is no note, assume branches are not taken.
906      This should be rare.  */
907     return 0;
908 }
909 
910 /* Return the condition under which INSN will branch to TARGET.  If TARGET
911    is zero, return the condition under which INSN will return.  If INSN is
912    an unconditional branch, return const_true_rtx.  If INSN isn't a simple
913    type of jump, or it doesn't go to TARGET, return 0.  */
914 
915 static rtx
get_branch_condition(rtx insn,rtx target)916 get_branch_condition (rtx insn, rtx target)
917 {
918   rtx pat = PATTERN (insn);
919   rtx src;
920 
921   if (condjump_in_parallel_p (insn))
922     pat = XVECEXP (pat, 0, 0);
923 
924   if (ANY_RETURN_P (pat))
925     return pat == target ? const_true_rtx : 0;
926 
927   if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
928     return 0;
929 
930   src = SET_SRC (pat);
931   if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
932     return const_true_rtx;
933 
934   else if (GET_CODE (src) == IF_THEN_ELSE
935 	   && XEXP (src, 2) == pc_rtx
936 	   && GET_CODE (XEXP (src, 1)) == LABEL_REF
937 	   && XEXP (XEXP (src, 1), 0) == target)
938     return XEXP (src, 0);
939 
940   else if (GET_CODE (src) == IF_THEN_ELSE
941 	   && XEXP (src, 1) == pc_rtx
942 	   && GET_CODE (XEXP (src, 2)) == LABEL_REF
943 	   && XEXP (XEXP (src, 2), 0) == target)
944     {
945       enum rtx_code rev;
946       rev = reversed_comparison_code (XEXP (src, 0), insn);
947       if (rev != UNKNOWN)
948 	return gen_rtx_fmt_ee (rev, GET_MODE (XEXP (src, 0)),
949 			       XEXP (XEXP (src, 0), 0),
950 			       XEXP (XEXP (src, 0), 1));
951     }
952 
953   return 0;
954 }
955 
956 /* Return nonzero if CONDITION is more strict than the condition of
957    INSN, i.e., if INSN will always branch if CONDITION is true.  */
958 
959 static int
condition_dominates_p(rtx condition,rtx insn)960 condition_dominates_p (rtx condition, rtx insn)
961 {
962   rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
963   enum rtx_code code = GET_CODE (condition);
964   enum rtx_code other_code;
965 
966   if (rtx_equal_p (condition, other_condition)
967       || other_condition == const_true_rtx)
968     return 1;
969 
970   else if (condition == const_true_rtx || other_condition == 0)
971     return 0;
972 
973   other_code = GET_CODE (other_condition);
974   if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
975       || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
976       || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
977     return 0;
978 
979   return comparison_dominates_p (code, other_code);
980 }
981 
982 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
983    any insns already in the delay slot of JUMP.  */
984 
985 static int
redirect_with_delay_slots_safe_p(rtx jump,rtx newlabel,rtx seq)986 redirect_with_delay_slots_safe_p (rtx jump, rtx newlabel, rtx seq)
987 {
988   int flags, i;
989   rtx pat = PATTERN (seq);
990 
991   /* Make sure all the delay slots of this jump would still
992      be valid after threading the jump.  If they are still
993      valid, then return nonzero.  */
994 
995   flags = get_jump_flags (jump, newlabel);
996   for (i = 1; i < XVECLEN (pat, 0); i++)
997     if (! (
998 #ifdef ANNUL_IFFALSE_SLOTS
999 	   (INSN_ANNULLED_BRANCH_P (jump)
1000 	    && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1001 	   ? eligible_for_annul_false (jump, i - 1,
1002 				       XVECEXP (pat, 0, i), flags) :
1003 #endif
1004 #ifdef ANNUL_IFTRUE_SLOTS
1005 	   (INSN_ANNULLED_BRANCH_P (jump)
1006 	    && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1007 	   ? eligible_for_annul_true (jump, i - 1,
1008 				      XVECEXP (pat, 0, i), flags) :
1009 #endif
1010 	   eligible_for_delay (jump, i - 1, XVECEXP (pat, 0, i), flags)))
1011       break;
1012 
1013   return (i == XVECLEN (pat, 0));
1014 }
1015 
1016 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1017    any insns we wish to place in the delay slot of JUMP.  */
1018 
1019 static int
redirect_with_delay_list_safe_p(rtx jump,rtx newlabel,rtx delay_list)1020 redirect_with_delay_list_safe_p (rtx jump, rtx newlabel, rtx delay_list)
1021 {
1022   int flags, i;
1023   rtx li;
1024 
1025   /* Make sure all the insns in DELAY_LIST would still be
1026      valid after threading the jump.  If they are still
1027      valid, then return nonzero.  */
1028 
1029   flags = get_jump_flags (jump, newlabel);
1030   for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1031     if (! (
1032 #ifdef ANNUL_IFFALSE_SLOTS
1033 	   (INSN_ANNULLED_BRANCH_P (jump)
1034 	    && INSN_FROM_TARGET_P (XEXP (li, 0)))
1035 	   ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1036 #endif
1037 #ifdef ANNUL_IFTRUE_SLOTS
1038 	   (INSN_ANNULLED_BRANCH_P (jump)
1039 	    && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1040 	   ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1041 #endif
1042 	   eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1043       break;
1044 
1045   return (li == NULL);
1046 }
1047 
1048 /* DELAY_LIST is a list of insns that have already been placed into delay
1049    slots.  See if all of them have the same annulling status as ANNUL_TRUE_P.
1050    If not, return 0; otherwise return 1.  */
1051 
1052 static int
check_annul_list_true_false(int annul_true_p,rtx delay_list)1053 check_annul_list_true_false (int annul_true_p, rtx delay_list)
1054 {
1055   rtx temp;
1056 
1057   if (delay_list)
1058     {
1059       for (temp = delay_list; temp; temp = XEXP (temp, 1))
1060 	{
1061 	  rtx trial = XEXP (temp, 0);
1062 
1063 	  if ((annul_true_p && INSN_FROM_TARGET_P (trial))
1064 	      || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
1065 	    return 0;
1066 	}
1067     }
1068 
1069   return 1;
1070 }
1071 
1072 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE.  Given that
1073    the condition tested by INSN is CONDITION and the resources shown in
1074    OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1075    from SEQ's delay list, in addition to whatever insns it may execute
1076    (in DELAY_LIST).   SETS and NEEDED are denote resources already set and
1077    needed while searching for delay slot insns.  Return the concatenated
1078    delay list if possible, otherwise, return 0.
1079 
1080    SLOTS_TO_FILL is the total number of slots required by INSN, and
1081    PSLOTS_FILLED points to the number filled so far (also the number of
1082    insns in DELAY_LIST).  It is updated with the number that have been
1083    filled from the SEQUENCE, if any.
1084 
1085    PANNUL_P points to a nonzero value if we already know that we need
1086    to annul INSN.  If this routine determines that annulling is needed,
1087    it may set that value nonzero.
1088 
1089    PNEW_THREAD points to a location that is to receive the place at which
1090    execution should continue.  */
1091 
1092 static rtx
steal_delay_list_from_target(rtx insn,rtx condition,rtx seq,rtx delay_list,struct resources * sets,struct resources * needed,struct resources * other_needed,int slots_to_fill,int * pslots_filled,int * pannul_p,rtx * pnew_thread)1093 steal_delay_list_from_target (rtx insn, rtx condition, rtx seq,
1094 			      rtx delay_list, struct resources *sets,
1095 			      struct resources *needed,
1096 			      struct resources *other_needed,
1097 			      int slots_to_fill, int *pslots_filled,
1098 			      int *pannul_p, rtx *pnew_thread)
1099 {
1100   rtx temp;
1101   int slots_remaining = slots_to_fill - *pslots_filled;
1102   int total_slots_filled = *pslots_filled;
1103   rtx new_delay_list = 0;
1104   int must_annul = *pannul_p;
1105   int used_annul = 0;
1106   int i;
1107   struct resources cc_set;
1108   bool *redundant;
1109 
1110   /* We can't do anything if there are more delay slots in SEQ than we
1111      can handle, or if we don't know that it will be a taken branch.
1112      We know that it will be a taken branch if it is either an unconditional
1113      branch or a conditional branch with a stricter branch condition.
1114 
1115      Also, exit if the branch has more than one set, since then it is computing
1116      other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1117      ??? It may be possible to move other sets into INSN in addition to
1118      moving the instructions in the delay slots.
1119 
1120      We can not steal the delay list if one of the instructions in the
1121      current delay_list modifies the condition codes and the jump in the
1122      sequence is a conditional jump. We can not do this because we can
1123      not change the direction of the jump because the condition codes
1124      will effect the direction of the jump in the sequence.  */
1125 
1126   CLEAR_RESOURCE (&cc_set);
1127   for (temp = delay_list; temp; temp = XEXP (temp, 1))
1128     {
1129       rtx trial = XEXP (temp, 0);
1130 
1131       mark_set_resources (trial, &cc_set, 0, MARK_SRC_DEST_CALL);
1132       if (insn_references_resource_p (XVECEXP (seq , 0, 0), &cc_set, false))
1133 	return delay_list;
1134     }
1135 
1136   if (XVECLEN (seq, 0) - 1 > slots_remaining
1137       || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1138       || ! single_set (XVECEXP (seq, 0, 0)))
1139     return delay_list;
1140 
1141 #ifdef MD_CAN_REDIRECT_BRANCH
1142   /* On some targets, branches with delay slots can have a limited
1143      displacement.  Give the back end a chance to tell us we can't do
1144      this.  */
1145   if (! MD_CAN_REDIRECT_BRANCH (insn, XVECEXP (seq, 0, 0)))
1146     return delay_list;
1147 #endif
1148 
1149   redundant = XALLOCAVEC (bool, XVECLEN (seq, 0));
1150   for (i = 1; i < XVECLEN (seq, 0); i++)
1151     {
1152       rtx trial = XVECEXP (seq, 0, i);
1153       int flags;
1154 
1155       if (insn_references_resource_p (trial, sets, false)
1156 	  || insn_sets_resource_p (trial, needed, false)
1157 	  || insn_sets_resource_p (trial, sets, false)
1158 #ifdef HAVE_cc0
1159 	  /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1160 	     delay list.  */
1161 	  || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1162 #endif
1163 	  /* If TRIAL is from the fallthrough code of an annulled branch insn
1164 	     in SEQ, we cannot use it.  */
1165 	  || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1166 	      && ! INSN_FROM_TARGET_P (trial)))
1167 	return delay_list;
1168 
1169       /* If this insn was already done (usually in a previous delay slot),
1170 	 pretend we put it in our delay slot.  */
1171       redundant[i] = redundant_insn (trial, insn, new_delay_list);
1172       if (redundant[i])
1173 	continue;
1174 
1175       /* We will end up re-vectoring this branch, so compute flags
1176 	 based on jumping to the new label.  */
1177       flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1178 
1179       if (! must_annul
1180 	  && ((condition == const_true_rtx
1181 	       || (! insn_sets_resource_p (trial, other_needed, false)
1182 		   && ! may_trap_or_fault_p (PATTERN (trial)))))
1183 	  ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1184 	  : (must_annul || (delay_list == NULL && new_delay_list == NULL))
1185 	     && (must_annul = 1,
1186 	         check_annul_list_true_false (0, delay_list)
1187 	         && check_annul_list_true_false (0, new_delay_list)
1188 	         && eligible_for_annul_false (insn, total_slots_filled,
1189 					      trial, flags)))
1190 	{
1191 	  if (must_annul)
1192 	    used_annul = 1;
1193 	  temp = copy_delay_slot_insn (trial);
1194 	  INSN_FROM_TARGET_P (temp) = 1;
1195 	  new_delay_list = add_to_delay_list (temp, new_delay_list);
1196 	  total_slots_filled++;
1197 
1198 	  if (--slots_remaining == 0)
1199 	    break;
1200 	}
1201       else
1202 	return delay_list;
1203     }
1204 
1205   /* Record the effect of the instructions that were redundant and which
1206      we therefore decided not to copy.  */
1207   for (i = 1; i < XVECLEN (seq, 0); i++)
1208     if (redundant[i])
1209       update_block (XVECEXP (seq, 0, i), insn);
1210 
1211   /* Show the place to which we will be branching.  */
1212   *pnew_thread = first_active_target_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1213 
1214   /* Add any new insns to the delay list and update the count of the
1215      number of slots filled.  */
1216   *pslots_filled = total_slots_filled;
1217   if (used_annul)
1218     *pannul_p = 1;
1219 
1220   if (delay_list == 0)
1221     return new_delay_list;
1222 
1223   for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1224     delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1225 
1226   return delay_list;
1227 }
1228 
1229 /* Similar to steal_delay_list_from_target except that SEQ is on the
1230    fallthrough path of INSN.  Here we only do something if the delay insn
1231    of SEQ is an unconditional branch.  In that case we steal its delay slot
1232    for INSN since unconditional branches are much easier to fill.  */
1233 
1234 static rtx
steal_delay_list_from_fallthrough(rtx insn,rtx condition,rtx seq,rtx delay_list,struct resources * sets,struct resources * needed,struct resources * other_needed,int slots_to_fill,int * pslots_filled,int * pannul_p)1235 steal_delay_list_from_fallthrough (rtx insn, rtx condition, rtx seq,
1236 				   rtx delay_list, struct resources *sets,
1237 				   struct resources *needed,
1238 				   struct resources *other_needed,
1239 				   int slots_to_fill, int *pslots_filled,
1240 				   int *pannul_p)
1241 {
1242   int i;
1243   int flags;
1244   int must_annul = *pannul_p;
1245   int used_annul = 0;
1246 
1247   flags = get_jump_flags (insn, JUMP_LABEL (insn));
1248 
1249   /* We can't do anything if SEQ's delay insn isn't an
1250      unconditional branch.  */
1251 
1252   if (! simplejump_or_return_p (XVECEXP (seq, 0, 0)))
1253     return delay_list;
1254 
1255   for (i = 1; i < XVECLEN (seq, 0); i++)
1256     {
1257       rtx trial = XVECEXP (seq, 0, i);
1258 
1259       /* If TRIAL sets CC0, stealing it will move it too far from the use
1260 	 of CC0.  */
1261       if (insn_references_resource_p (trial, sets, false)
1262 	  || insn_sets_resource_p (trial, needed, false)
1263 	  || insn_sets_resource_p (trial, sets, false)
1264 #ifdef HAVE_cc0
1265 	  || sets_cc0_p (PATTERN (trial))
1266 #endif
1267 	  )
1268 
1269 	break;
1270 
1271       /* If this insn was already done, we don't need it.  */
1272       if (redundant_insn (trial, insn, delay_list))
1273 	{
1274 	  update_block (trial, insn);
1275 	  delete_from_delay_slot (trial);
1276 	  continue;
1277 	}
1278 
1279       if (! must_annul
1280 	  && ((condition == const_true_rtx
1281 	       || (! insn_sets_resource_p (trial, other_needed, false)
1282 		   && ! may_trap_or_fault_p (PATTERN (trial)))))
1283 	  ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1284 	  : (must_annul || delay_list == NULL) && (must_annul = 1,
1285 	     check_annul_list_true_false (1, delay_list)
1286 	     && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1287 	{
1288 	  if (must_annul)
1289 	    used_annul = 1;
1290 	  delete_from_delay_slot (trial);
1291 	  delay_list = add_to_delay_list (trial, delay_list);
1292 
1293 	  if (++(*pslots_filled) == slots_to_fill)
1294 	    break;
1295 	}
1296       else
1297 	break;
1298     }
1299 
1300   if (used_annul)
1301     *pannul_p = 1;
1302   return delay_list;
1303 }
1304 
1305 /* Try merging insns starting at THREAD which match exactly the insns in
1306    INSN's delay list.
1307 
1308    If all insns were matched and the insn was previously annulling, the
1309    annul bit will be cleared.
1310 
1311    For each insn that is merged, if the branch is or will be non-annulling,
1312    we delete the merged insn.  */
1313 
1314 static void
try_merge_delay_insns(rtx insn,rtx thread)1315 try_merge_delay_insns (rtx insn, rtx thread)
1316 {
1317   rtx trial, next_trial;
1318   rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1319   int annul_p = JUMP_P (delay_insn) && INSN_ANNULLED_BRANCH_P (delay_insn);
1320   int slot_number = 1;
1321   int num_slots = XVECLEN (PATTERN (insn), 0);
1322   rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1323   struct resources set, needed;
1324   rtx merged_insns = 0;
1325   int i;
1326   int flags;
1327 
1328   flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1329 
1330   CLEAR_RESOURCE (&needed);
1331   CLEAR_RESOURCE (&set);
1332 
1333   /* If this is not an annulling branch, take into account anything needed in
1334      INSN's delay slot.  This prevents two increments from being incorrectly
1335      folded into one.  If we are annulling, this would be the correct
1336      thing to do.  (The alternative, looking at things set in NEXT_TO_MATCH
1337      will essentially disable this optimization.  This method is somewhat of
1338      a kludge, but I don't see a better way.)  */
1339   if (! annul_p)
1340     for (i = 1 ; i < num_slots; i++)
1341       if (XVECEXP (PATTERN (insn), 0, i))
1342 	mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed,
1343 				   true);
1344 
1345   for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1346     {
1347       rtx pat = PATTERN (trial);
1348       rtx oldtrial = trial;
1349 
1350       next_trial = next_nonnote_insn (trial);
1351 
1352       /* TRIAL must be a CALL_INSN or INSN.  Skip USE and CLOBBER.  */
1353       if (NONJUMP_INSN_P (trial)
1354 	  && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1355 	continue;
1356 
1357       if (GET_CODE (next_to_match) == GET_CODE (trial)
1358 #ifdef HAVE_cc0
1359 	  /* We can't share an insn that sets cc0.  */
1360 	  && ! sets_cc0_p (pat)
1361 #endif
1362 	  && ! insn_references_resource_p (trial, &set, true)
1363 	  && ! insn_sets_resource_p (trial, &set, true)
1364 	  && ! insn_sets_resource_p (trial, &needed, true)
1365 	  && (trial = try_split (pat, trial, 0)) != 0
1366 	  /* Update next_trial, in case try_split succeeded.  */
1367 	  && (next_trial = next_nonnote_insn (trial))
1368 	  /* Likewise THREAD.  */
1369 	  && (thread = oldtrial == thread ? trial : thread)
1370 	  && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1371 	  /* Have to test this condition if annul condition is different
1372 	     from (and less restrictive than) non-annulling one.  */
1373 	  && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1374 	{
1375 
1376 	  if (! annul_p)
1377 	    {
1378 	      update_block (trial, thread);
1379 	      if (trial == thread)
1380 		thread = next_active_insn (thread);
1381 
1382 	      delete_related_insns (trial);
1383 	      INSN_FROM_TARGET_P (next_to_match) = 0;
1384 	    }
1385 	  else
1386 	    merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
1387 
1388 	  if (++slot_number == num_slots)
1389 	    break;
1390 
1391 	  next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1392 	}
1393 
1394       mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
1395       mark_referenced_resources (trial, &needed, true);
1396     }
1397 
1398   /* See if we stopped on a filled insn.  If we did, try to see if its
1399      delay slots match.  */
1400   if (slot_number != num_slots
1401       && trial && NONJUMP_INSN_P (trial)
1402       && GET_CODE (PATTERN (trial)) == SEQUENCE
1403       && !(JUMP_P (XVECEXP (PATTERN (trial), 0, 0))
1404            && INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0))))
1405     {
1406       rtx pat = PATTERN (trial);
1407       rtx filled_insn = XVECEXP (pat, 0, 0);
1408 
1409       /* Account for resources set/needed by the filled insn.  */
1410       mark_set_resources (filled_insn, &set, 0, MARK_SRC_DEST_CALL);
1411       mark_referenced_resources (filled_insn, &needed, true);
1412 
1413       for (i = 1; i < XVECLEN (pat, 0); i++)
1414 	{
1415 	  rtx dtrial = XVECEXP (pat, 0, i);
1416 
1417 	  if (! insn_references_resource_p (dtrial, &set, true)
1418 	      && ! insn_sets_resource_p (dtrial, &set, true)
1419 	      && ! insn_sets_resource_p (dtrial, &needed, true)
1420 #ifdef HAVE_cc0
1421 	      && ! sets_cc0_p (PATTERN (dtrial))
1422 #endif
1423 	      && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1424 	      && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1425 	    {
1426 	      if (! annul_p)
1427 		{
1428 		  rtx new_rtx;
1429 
1430 		  update_block (dtrial, thread);
1431 		  new_rtx = delete_from_delay_slot (dtrial);
1432 	          if (INSN_DELETED_P (thread))
1433 		    thread = new_rtx;
1434 		  INSN_FROM_TARGET_P (next_to_match) = 0;
1435 		}
1436 	      else
1437 		merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1438 						  merged_insns);
1439 
1440 	      if (++slot_number == num_slots)
1441 		break;
1442 
1443 	      next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1444 	    }
1445 	  else
1446 	    {
1447 	      /* Keep track of the set/referenced resources for the delay
1448 		 slots of any trial insns we encounter.  */
1449 	      mark_set_resources (dtrial, &set, 0, MARK_SRC_DEST_CALL);
1450 	      mark_referenced_resources (dtrial, &needed, true);
1451 	    }
1452 	}
1453     }
1454 
1455   /* If all insns in the delay slot have been matched and we were previously
1456      annulling the branch, we need not any more.  In that case delete all the
1457      merged insns.  Also clear the INSN_FROM_TARGET_P bit of each insn in
1458      the delay list so that we know that it isn't only being used at the
1459      target.  */
1460   if (slot_number == num_slots && annul_p)
1461     {
1462       for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1463 	{
1464 	  if (GET_MODE (merged_insns) == SImode)
1465 	    {
1466 	      rtx new_rtx;
1467 
1468 	      update_block (XEXP (merged_insns, 0), thread);
1469 	      new_rtx = delete_from_delay_slot (XEXP (merged_insns, 0));
1470 	      if (INSN_DELETED_P (thread))
1471 		thread = new_rtx;
1472 	    }
1473 	  else
1474 	    {
1475 	      update_block (XEXP (merged_insns, 0), thread);
1476 	      delete_related_insns (XEXP (merged_insns, 0));
1477 	    }
1478 	}
1479 
1480       INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1481 
1482       for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1483 	INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1484     }
1485 }
1486 
1487 /* See if INSN is redundant with an insn in front of TARGET.  Often this
1488    is called when INSN is a candidate for a delay slot of TARGET.
1489    DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1490    of INSN.  Often INSN will be redundant with an insn in a delay slot of
1491    some previous insn.  This happens when we have a series of branches to the
1492    same label; in that case the first insn at the target might want to go
1493    into each of the delay slots.
1494 
1495    If we are not careful, this routine can take up a significant fraction
1496    of the total compilation time (4%), but only wins rarely.  Hence we
1497    speed this routine up by making two passes.  The first pass goes back
1498    until it hits a label and sees if it finds an insn with an identical
1499    pattern.  Only in this (relatively rare) event does it check for
1500    data conflicts.
1501 
1502    We do not split insns we encounter.  This could cause us not to find a
1503    redundant insn, but the cost of splitting seems greater than the possible
1504    gain in rare cases.  */
1505 
1506 static rtx
redundant_insn(rtx insn,rtx target,rtx delay_list)1507 redundant_insn (rtx insn, rtx target, rtx delay_list)
1508 {
1509   rtx target_main = target;
1510   rtx ipat = PATTERN (insn);
1511   rtx trial, pat;
1512   struct resources needed, set;
1513   int i;
1514   unsigned insns_to_search;
1515 
1516   /* If INSN has any REG_UNUSED notes, it can't match anything since we
1517      are allowed to not actually assign to such a register.  */
1518   if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
1519     return 0;
1520 
1521   /* Scan backwards looking for a match.  */
1522   for (trial = PREV_INSN (target),
1523 	 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1524        trial && insns_to_search > 0;
1525        trial = PREV_INSN (trial))
1526     {
1527       if (LABEL_P (trial))
1528 	return 0;
1529 
1530       if (!INSN_P (trial))
1531 	continue;
1532       --insns_to_search;
1533 
1534       pat = PATTERN (trial);
1535       if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1536 	continue;
1537 
1538       if (GET_CODE (pat) == SEQUENCE)
1539 	{
1540 	  /* Stop for a CALL and its delay slots because it is difficult to
1541 	     track its resource needs correctly.  */
1542 	  if (CALL_P (XVECEXP (pat, 0, 0)))
1543 	    return 0;
1544 
1545 	  /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1546 	     slots because it is difficult to track its resource needs
1547 	     correctly.  */
1548 
1549 #ifdef INSN_SETS_ARE_DELAYED
1550 	  if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1551 	    return 0;
1552 #endif
1553 
1554 #ifdef INSN_REFERENCES_ARE_DELAYED
1555 	  if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1556 	    return 0;
1557 #endif
1558 
1559 	  /* See if any of the insns in the delay slot match, updating
1560 	     resource requirements as we go.  */
1561 	  for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1562 	    if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
1563 		&& rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
1564 		&& ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
1565 	      break;
1566 
1567 	  /* If found a match, exit this loop early.  */
1568 	  if (i > 0)
1569 	    break;
1570 	}
1571 
1572       else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
1573 	       && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
1574 	break;
1575     }
1576 
1577   /* If we didn't find an insn that matches, return 0.  */
1578   if (trial == 0)
1579     return 0;
1580 
1581   /* See what resources this insn sets and needs.  If they overlap, or
1582      if this insn references CC0, it can't be redundant.  */
1583 
1584   CLEAR_RESOURCE (&needed);
1585   CLEAR_RESOURCE (&set);
1586   mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1587   mark_referenced_resources (insn, &needed, true);
1588 
1589   /* If TARGET is a SEQUENCE, get the main insn.  */
1590   if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
1591     target_main = XVECEXP (PATTERN (target), 0, 0);
1592 
1593   if (resource_conflicts_p (&needed, &set)
1594 #ifdef HAVE_cc0
1595       || reg_mentioned_p (cc0_rtx, ipat)
1596 #endif
1597       /* The insn requiring the delay may not set anything needed or set by
1598 	 INSN.  */
1599       || insn_sets_resource_p (target_main, &needed, true)
1600       || insn_sets_resource_p (target_main, &set, true))
1601     return 0;
1602 
1603   /* Insns we pass may not set either NEEDED or SET, so merge them for
1604      simpler tests.  */
1605   needed.memory |= set.memory;
1606   needed.unch_memory |= set.unch_memory;
1607   IOR_HARD_REG_SET (needed.regs, set.regs);
1608 
1609   /* This insn isn't redundant if it conflicts with an insn that either is
1610      or will be in a delay slot of TARGET.  */
1611 
1612   while (delay_list)
1613     {
1614       if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, true))
1615 	return 0;
1616       delay_list = XEXP (delay_list, 1);
1617     }
1618 
1619   if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
1620     for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1621       if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed,
1622 				true))
1623 	return 0;
1624 
1625   /* Scan backwards until we reach a label or an insn that uses something
1626      INSN sets or sets something insn uses or sets.  */
1627 
1628   for (trial = PREV_INSN (target),
1629 	 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1630        trial && !LABEL_P (trial) && insns_to_search > 0;
1631        trial = PREV_INSN (trial))
1632     {
1633       if (!INSN_P (trial))
1634 	continue;
1635       --insns_to_search;
1636 
1637       pat = PATTERN (trial);
1638       if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1639 	continue;
1640 
1641       if (GET_CODE (pat) == SEQUENCE)
1642 	{
1643 	  bool annul_p = false;
1644           rtx control = XVECEXP (pat, 0, 0);
1645 
1646 	  /* If this is a CALL_INSN and its delay slots, it is hard to track
1647 	     the resource needs properly, so give up.  */
1648 	  if (CALL_P (control))
1649 	    return 0;
1650 
1651 	  /* If this is an INSN or JUMP_INSN with delayed effects, it
1652 	     is hard to track the resource needs properly, so give up.  */
1653 
1654 #ifdef INSN_SETS_ARE_DELAYED
1655 	  if (INSN_SETS_ARE_DELAYED (control))
1656 	    return 0;
1657 #endif
1658 
1659 #ifdef INSN_REFERENCES_ARE_DELAYED
1660 	  if (INSN_REFERENCES_ARE_DELAYED (control))
1661 	    return 0;
1662 #endif
1663 
1664 	  if (JUMP_P (control))
1665 	    annul_p = INSN_ANNULLED_BRANCH_P (control);
1666 
1667 	  /* See if any of the insns in the delay slot match, updating
1668 	     resource requirements as we go.  */
1669 	  for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1670 	    {
1671 	      rtx candidate = XVECEXP (pat, 0, i);
1672 
1673 	      /* If an insn will be annulled if the branch is false, it isn't
1674 		 considered as a possible duplicate insn.  */
1675 	      if (rtx_equal_p (PATTERN (candidate), ipat)
1676 		  && ! (annul_p && INSN_FROM_TARGET_P (candidate)))
1677 		{
1678 		  /* Show that this insn will be used in the sequel.  */
1679 		  INSN_FROM_TARGET_P (candidate) = 0;
1680 		  return candidate;
1681 		}
1682 
1683 	      /* Unless this is an annulled insn from the target of a branch,
1684 		 we must stop if it sets anything needed or set by INSN.  */
1685 	      if ((!annul_p || !INSN_FROM_TARGET_P (candidate))
1686 		  && insn_sets_resource_p (candidate, &needed, true))
1687 		return 0;
1688 	    }
1689 
1690 	  /* If the insn requiring the delay slot conflicts with INSN, we
1691 	     must stop.  */
1692 	  if (insn_sets_resource_p (control, &needed, true))
1693 	    return 0;
1694 	}
1695       else
1696 	{
1697 	  /* See if TRIAL is the same as INSN.  */
1698 	  pat = PATTERN (trial);
1699 	  if (rtx_equal_p (pat, ipat))
1700 	    return trial;
1701 
1702 	  /* Can't go any further if TRIAL conflicts with INSN.  */
1703 	  if (insn_sets_resource_p (trial, &needed, true))
1704 	    return 0;
1705 	}
1706     }
1707 
1708   return 0;
1709 }
1710 
1711 /* Return 1 if THREAD can only be executed in one way.  If LABEL is nonzero,
1712    it is the target of the branch insn being scanned.  If ALLOW_FALLTHROUGH
1713    is nonzero, we are allowed to fall into this thread; otherwise, we are
1714    not.
1715 
1716    If LABEL is used more than one or we pass a label other than LABEL before
1717    finding an active insn, we do not own this thread.  */
1718 
1719 static int
own_thread_p(rtx thread,rtx label,int allow_fallthrough)1720 own_thread_p (rtx thread, rtx label, int allow_fallthrough)
1721 {
1722   rtx active_insn;
1723   rtx insn;
1724 
1725   /* We don't own the function end.  */
1726   if (thread == 0 || ANY_RETURN_P (thread))
1727     return 0;
1728 
1729   /* Get the first active insn, or THREAD, if it is an active insn.  */
1730   active_insn = next_active_insn (PREV_INSN (thread));
1731 
1732   for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
1733     if (LABEL_P (insn)
1734 	&& (insn != label || LABEL_NUSES (insn) != 1))
1735       return 0;
1736 
1737   if (allow_fallthrough)
1738     return 1;
1739 
1740   /* Ensure that we reach a BARRIER before any insn or label.  */
1741   for (insn = prev_nonnote_insn (thread);
1742        insn == 0 || !BARRIER_P (insn);
1743        insn = prev_nonnote_insn (insn))
1744     if (insn == 0
1745 	|| LABEL_P (insn)
1746 	|| (NONJUMP_INSN_P (insn)
1747 	    && GET_CODE (PATTERN (insn)) != USE
1748 	    && GET_CODE (PATTERN (insn)) != CLOBBER))
1749       return 0;
1750 
1751   return 1;
1752 }
1753 
1754 /* Called when INSN is being moved from a location near the target of a jump.
1755    We leave a marker of the form (use (INSN)) immediately in front
1756    of WHERE for mark_target_live_regs.  These markers will be deleted when
1757    reorg finishes.
1758 
1759    We used to try to update the live status of registers if WHERE is at
1760    the start of a basic block, but that can't work since we may remove a
1761    BARRIER in relax_delay_slots.  */
1762 
1763 static void
update_block(rtx insn,rtx where)1764 update_block (rtx insn, rtx where)
1765 {
1766   /* Ignore if this was in a delay slot and it came from the target of
1767      a branch.  */
1768   if (INSN_FROM_TARGET_P (insn))
1769     return;
1770 
1771   emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
1772 
1773   /* INSN might be making a value live in a block where it didn't use to
1774      be.  So recompute liveness information for this block.  */
1775 
1776   incr_ticks_for_insn (insn);
1777 }
1778 
1779 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1780    the basic block containing the jump.  */
1781 
1782 static int
reorg_redirect_jump(rtx jump,rtx nlabel)1783 reorg_redirect_jump (rtx jump, rtx nlabel)
1784 {
1785   incr_ticks_for_insn (jump);
1786   return redirect_jump (jump, nlabel, 1);
1787 }
1788 
1789 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1790    We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1791    that reference values used in INSN.  If we find one, then we move the
1792    REG_DEAD note to INSN.
1793 
1794    This is needed to handle the case where a later insn (after INSN) has a
1795    REG_DEAD note for a register used by INSN, and this later insn subsequently
1796    gets moved before a CODE_LABEL because it is a redundant insn.  In this
1797    case, mark_target_live_regs may be confused into thinking the register
1798    is dead because it sees a REG_DEAD note immediately before a CODE_LABEL.  */
1799 
1800 static void
update_reg_dead_notes(rtx insn,rtx delayed_insn)1801 update_reg_dead_notes (rtx insn, rtx delayed_insn)
1802 {
1803   rtx p, link, next;
1804 
1805   for (p = next_nonnote_insn (insn); p != delayed_insn;
1806        p = next_nonnote_insn (p))
1807     for (link = REG_NOTES (p); link; link = next)
1808       {
1809 	next = XEXP (link, 1);
1810 
1811 	if (REG_NOTE_KIND (link) != REG_DEAD
1812 	    || !REG_P (XEXP (link, 0)))
1813 	  continue;
1814 
1815 	if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
1816 	  {
1817 	    /* Move the REG_DEAD note from P to INSN.  */
1818 	    remove_note (p, link);
1819 	    XEXP (link, 1) = REG_NOTES (insn);
1820 	    REG_NOTES (insn) = link;
1821 	  }
1822       }
1823 }
1824 
1825 /* Called when an insn redundant with start_insn is deleted.  If there
1826    is a REG_DEAD note for the target of start_insn between start_insn
1827    and stop_insn, then the REG_DEAD note needs to be deleted since the
1828    value no longer dies there.
1829 
1830    If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1831    confused into thinking the register is dead.  */
1832 
1833 static void
fix_reg_dead_note(rtx start_insn,rtx stop_insn)1834 fix_reg_dead_note (rtx start_insn, rtx stop_insn)
1835 {
1836   rtx p, link, next;
1837 
1838   for (p = next_nonnote_insn (start_insn); p != stop_insn;
1839        p = next_nonnote_insn (p))
1840     for (link = REG_NOTES (p); link; link = next)
1841       {
1842 	next = XEXP (link, 1);
1843 
1844 	if (REG_NOTE_KIND (link) != REG_DEAD
1845 	    || !REG_P (XEXP (link, 0)))
1846 	  continue;
1847 
1848 	if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
1849 	  {
1850 	    remove_note (p, link);
1851 	    return;
1852 	  }
1853       }
1854 }
1855 
1856 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
1857 
1858    This handles the case of udivmodXi4 instructions which optimize their
1859    output depending on whether any REG_UNUSED notes are present.
1860    we must make sure that INSN calculates as many results as REDUNDANT_INSN
1861    does.  */
1862 
1863 static void
update_reg_unused_notes(rtx insn,rtx redundant_insn)1864 update_reg_unused_notes (rtx insn, rtx redundant_insn)
1865 {
1866   rtx link, next;
1867 
1868   for (link = REG_NOTES (insn); link; link = next)
1869     {
1870       next = XEXP (link, 1);
1871 
1872       if (REG_NOTE_KIND (link) != REG_UNUSED
1873 	  || !REG_P (XEXP (link, 0)))
1874 	continue;
1875 
1876       if (! find_regno_note (redundant_insn, REG_UNUSED,
1877 			     REGNO (XEXP (link, 0))))
1878 	remove_note (insn, link);
1879     }
1880 }
1881 
1882 /* Return the label before INSN, or put a new label there.  */
1883 
1884 static rtx
get_label_before(rtx insn)1885 get_label_before (rtx insn)
1886 {
1887   rtx label;
1888 
1889   /* Find an existing label at this point
1890      or make a new one if there is none.  */
1891   label = prev_nonnote_insn (insn);
1892 
1893   if (label == 0 || !LABEL_P (label))
1894     {
1895       rtx prev = PREV_INSN (insn);
1896 
1897       label = gen_label_rtx ();
1898       emit_label_after (label, prev);
1899       LABEL_NUSES (label) = 0;
1900     }
1901   return label;
1902 }
1903 
1904 /* Scan a function looking for insns that need a delay slot and find insns to
1905    put into the delay slot.
1906 
1907    NON_JUMPS_P is nonzero if we are to only try to fill non-jump insns (such
1908    as calls).  We do these first since we don't want jump insns (that are
1909    easier to fill) to get the only insns that could be used for non-jump insns.
1910    When it is zero, only try to fill JUMP_INSNs.
1911 
1912    When slots are filled in this manner, the insns (including the
1913    delay_insn) are put together in a SEQUENCE rtx.  In this fashion,
1914    it is possible to tell whether a delay slot has really been filled
1915    or not.  `final' knows how to deal with this, by communicating
1916    through FINAL_SEQUENCE.  */
1917 
1918 static void
fill_simple_delay_slots(int non_jumps_p)1919 fill_simple_delay_slots (int non_jumps_p)
1920 {
1921   rtx insn, pat, trial, next_trial;
1922   int i;
1923   int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
1924   struct resources needed, set;
1925   int slots_to_fill, slots_filled;
1926   rtx delay_list;
1927 
1928   for (i = 0; i < num_unfilled_slots; i++)
1929     {
1930       int flags;
1931       /* Get the next insn to fill.  If it has already had any slots assigned,
1932 	 we can't do anything with it.  Maybe we'll improve this later.  */
1933 
1934       insn = unfilled_slots_base[i];
1935       if (insn == 0
1936 	  || INSN_DELETED_P (insn)
1937 	  || (NONJUMP_INSN_P (insn)
1938 	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
1939 	  || (JUMP_P (insn) && non_jumps_p)
1940 	  || (!JUMP_P (insn) && ! non_jumps_p))
1941 	continue;
1942 
1943       /* It may have been that this insn used to need delay slots, but
1944 	 now doesn't; ignore in that case.  This can happen, for example,
1945 	 on the HP PA RISC, where the number of delay slots depends on
1946 	 what insns are nearby.  */
1947       slots_to_fill = num_delay_slots (insn);
1948 
1949       /* Some machine description have defined instructions to have
1950 	 delay slots only in certain circumstances which may depend on
1951 	 nearby insns (which change due to reorg's actions).
1952 
1953 	 For example, the PA port normally has delay slots for unconditional
1954 	 jumps.
1955 
1956 	 However, the PA port claims such jumps do not have a delay slot
1957 	 if they are immediate successors of certain CALL_INSNs.  This
1958 	 allows the port to favor filling the delay slot of the call with
1959 	 the unconditional jump.  */
1960       if (slots_to_fill == 0)
1961 	continue;
1962 
1963       /* This insn needs, or can use, some delay slots.  SLOTS_TO_FILL
1964 	 says how many.  After initialization, first try optimizing
1965 
1966 	 call _foo		call _foo
1967 	 nop			add %o7,.-L1,%o7
1968 	 b,a L1
1969 	 nop
1970 
1971 	 If this case applies, the delay slot of the call is filled with
1972 	 the unconditional jump.  This is done first to avoid having the
1973 	 delay slot of the call filled in the backward scan.  Also, since
1974 	 the unconditional jump is likely to also have a delay slot, that
1975 	 insn must exist when it is subsequently scanned.
1976 
1977 	 This is tried on each insn with delay slots as some machines
1978 	 have insns which perform calls, but are not represented as
1979 	 CALL_INSNs.  */
1980 
1981       slots_filled = 0;
1982       delay_list = 0;
1983 
1984       if (JUMP_P (insn))
1985 	flags = get_jump_flags (insn, JUMP_LABEL (insn));
1986       else
1987 	flags = get_jump_flags (insn, NULL_RTX);
1988 
1989       if ((trial = next_active_insn (insn))
1990 	  && JUMP_P (trial)
1991 	  && simplejump_p (trial)
1992 	  && eligible_for_delay (insn, slots_filled, trial, flags)
1993 	  && no_labels_between_p (insn, trial)
1994 	  && ! can_throw_internal (trial))
1995 	{
1996 	  rtx *tmp;
1997 	  slots_filled++;
1998 	  delay_list = add_to_delay_list (trial, delay_list);
1999 
2000 	  /* TRIAL may have had its delay slot filled, then unfilled.  When
2001 	     the delay slot is unfilled, TRIAL is placed back on the unfilled
2002 	     slots obstack.  Unfortunately, it is placed on the end of the
2003 	     obstack, not in its original location.  Therefore, we must search
2004 	     from entry i + 1 to the end of the unfilled slots obstack to
2005 	     try and find TRIAL.  */
2006 	  tmp = &unfilled_slots_base[i + 1];
2007 	  while (*tmp != trial && tmp != unfilled_slots_next)
2008 	    tmp++;
2009 
2010 	  /* Remove the unconditional jump from consideration for delay slot
2011 	     filling and unthread it.  */
2012 	  if (*tmp == trial)
2013 	    *tmp = 0;
2014 	  {
2015 	    rtx next = NEXT_INSN (trial);
2016 	    rtx prev = PREV_INSN (trial);
2017 	    if (prev)
2018 	      NEXT_INSN (prev) = next;
2019 	    if (next)
2020 	      PREV_INSN (next) = prev;
2021 	  }
2022 	}
2023 
2024       /* Now, scan backwards from the insn to search for a potential
2025 	 delay-slot candidate.  Stop searching when a label or jump is hit.
2026 
2027 	 For each candidate, if it is to go into the delay slot (moved
2028 	 forward in execution sequence), it must not need or set any resources
2029 	 that were set by later insns and must not set any resources that
2030 	 are needed for those insns.
2031 
2032 	 The delay slot insn itself sets resources unless it is a call
2033 	 (in which case the called routine, not the insn itself, is doing
2034 	 the setting).  */
2035 
2036       if (slots_filled < slots_to_fill)
2037 	{
2038 	  CLEAR_RESOURCE (&needed);
2039 	  CLEAR_RESOURCE (&set);
2040 	  mark_set_resources (insn, &set, 0, MARK_SRC_DEST);
2041 	  mark_referenced_resources (insn, &needed, false);
2042 
2043 	  for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2044 	       trial = next_trial)
2045 	    {
2046 	      next_trial = prev_nonnote_insn (trial);
2047 
2048 	      /* This must be an INSN or CALL_INSN.  */
2049 	      pat = PATTERN (trial);
2050 
2051 	      /* Stand-alone USE and CLOBBER are just for flow.  */
2052 	      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2053 		continue;
2054 
2055 	      /* Check for resource conflict first, to avoid unnecessary
2056 		 splitting.  */
2057 	      if (! insn_references_resource_p (trial, &set, true)
2058 		  && ! insn_sets_resource_p (trial, &set, true)
2059 		  && ! insn_sets_resource_p (trial, &needed, true)
2060 #ifdef HAVE_cc0
2061 		  /* Can't separate set of cc0 from its use.  */
2062 		  && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2063 #endif
2064 		  && ! can_throw_internal (trial))
2065 		{
2066 		  trial = try_split (pat, trial, 1);
2067 		  next_trial = prev_nonnote_insn (trial);
2068 		  if (eligible_for_delay (insn, slots_filled, trial, flags))
2069 		    {
2070 		      /* In this case, we are searching backward, so if we
2071 			 find insns to put on the delay list, we want
2072 			 to put them at the head, rather than the
2073 			 tail, of the list.  */
2074 
2075 		      update_reg_dead_notes (trial, insn);
2076 		      delay_list = gen_rtx_INSN_LIST (VOIDmode,
2077 						      trial, delay_list);
2078 		      update_block (trial, trial);
2079 		      delete_related_insns (trial);
2080 		      if (slots_to_fill == ++slots_filled)
2081 			break;
2082 		      continue;
2083 		    }
2084 		}
2085 
2086 	      mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2087 	      mark_referenced_resources (trial, &needed, true);
2088 	    }
2089 	}
2090 
2091       /* If all needed slots haven't been filled, we come here.  */
2092 
2093       /* Try to optimize case of jumping around a single insn.  */
2094 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2095       if (slots_filled != slots_to_fill
2096 	  && delay_list == 0
2097 	  && JUMP_P (insn)
2098 	  && (condjump_p (insn) || condjump_in_parallel_p (insn))
2099 	  && !ANY_RETURN_P (JUMP_LABEL (insn)))
2100 	{
2101 	  delay_list = optimize_skip (insn);
2102 	  if (delay_list)
2103 	    slots_filled += 1;
2104 	}
2105 #endif
2106 
2107       /* Try to get insns from beyond the insn needing the delay slot.
2108 	 These insns can neither set or reference resources set in insns being
2109 	 skipped, cannot set resources in the insn being skipped, and, if this
2110 	 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2111 	 call might not return).
2112 
2113 	 There used to be code which continued past the target label if
2114 	 we saw all uses of the target label.  This code did not work,
2115 	 because it failed to account for some instructions which were
2116 	 both annulled and marked as from the target.  This can happen as a
2117 	 result of optimize_skip.  Since this code was redundant with
2118 	 fill_eager_delay_slots anyways, it was just deleted.  */
2119 
2120       if (slots_filled != slots_to_fill
2121 	  /* If this instruction could throw an exception which is
2122 	     caught in the same function, then it's not safe to fill
2123 	     the delay slot with an instruction from beyond this
2124 	     point.  For example, consider:
2125 
2126                int i = 2;
2127 
2128 	       try {
2129                  f();
2130 	         i = 3;
2131                } catch (...) {}
2132 
2133                return i;
2134 
2135 	     Even though `i' is a local variable, we must be sure not
2136 	     to put `i = 3' in the delay slot if `f' might throw an
2137 	     exception.
2138 
2139 	     Presumably, we should also check to see if we could get
2140 	     back to this function via `setjmp'.  */
2141 	  && ! can_throw_internal (insn)
2142 	  && (!JUMP_P (insn)
2143 	      || ((condjump_p (insn) || condjump_in_parallel_p (insn))
2144 		  && ! simplejump_p (insn)
2145 		  && !ANY_RETURN_P (JUMP_LABEL (insn)))))
2146 	{
2147 	  /* Invariant: If insn is a JUMP_INSN, the insn's jump
2148 	     label.  Otherwise, zero.  */
2149 	  rtx target = 0;
2150 	  int maybe_never = 0;
2151 	  rtx pat, trial_delay;
2152 
2153 	  CLEAR_RESOURCE (&needed);
2154 	  CLEAR_RESOURCE (&set);
2155 
2156 	  if (CALL_P (insn))
2157 	    {
2158 	      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2159 	      mark_referenced_resources (insn, &needed, true);
2160 	      maybe_never = 1;
2161 	    }
2162 	  else
2163 	    {
2164 	      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2165 	      mark_referenced_resources (insn, &needed, true);
2166 	      if (JUMP_P (insn))
2167 		target = JUMP_LABEL (insn);
2168 	    }
2169 
2170 	  if (target == 0 || ANY_RETURN_P (target))
2171 	    for (trial = next_nonnote_insn (insn); !stop_search_p (trial, 1);
2172 		 trial = next_trial)
2173 	      {
2174 		next_trial = next_nonnote_insn (trial);
2175 
2176 		/* This must be an INSN or CALL_INSN.  */
2177 		pat = PATTERN (trial);
2178 
2179 		/* Stand-alone USE and CLOBBER are just for flow.  */
2180 		if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2181 		  continue;
2182 
2183 		/* If this already has filled delay slots, get the insn needing
2184 		   the delay slots.  */
2185 		if (GET_CODE (pat) == SEQUENCE)
2186 		  trial_delay = XVECEXP (pat, 0, 0);
2187 		else
2188 		  trial_delay = trial;
2189 
2190 		/* Stop our search when seeing a jump.  */
2191 		if (JUMP_P (trial_delay))
2192 		  break;
2193 
2194 		/* See if we have a resource problem before we try to
2195 		   split.  */
2196 		if (GET_CODE (pat) != SEQUENCE
2197 		    && ! insn_references_resource_p (trial, &set, true)
2198 		    && ! insn_sets_resource_p (trial, &set, true)
2199 		    && ! insn_sets_resource_p (trial, &needed, true)
2200 #ifdef HAVE_cc0
2201 		    && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2202 #endif
2203 		    && ! (maybe_never && may_trap_or_fault_p (pat))
2204 		    && (trial = try_split (pat, trial, 0))
2205 		    && eligible_for_delay (insn, slots_filled, trial, flags)
2206 		    && ! can_throw_internal(trial))
2207 		  {
2208 		    next_trial = next_nonnote_insn (trial);
2209 		    delay_list = add_to_delay_list (trial, delay_list);
2210 
2211 #ifdef HAVE_cc0
2212 		    if (reg_mentioned_p (cc0_rtx, pat))
2213 		      link_cc0_insns (trial);
2214 #endif
2215 
2216 		    delete_related_insns (trial);
2217 		    if (slots_to_fill == ++slots_filled)
2218 		      break;
2219 		    continue;
2220 		  }
2221 
2222 		mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2223 		mark_referenced_resources (trial, &needed, true);
2224 
2225 		/* Ensure we don't put insns between the setting of cc and the
2226 		   comparison by moving a setting of cc into an earlier delay
2227 		   slot since these insns could clobber the condition code.  */
2228 		set.cc = 1;
2229 
2230 		/* If this is a call or jump, we might not get here.  */
2231 		if (CALL_P (trial_delay)
2232 		    || JUMP_P (trial_delay))
2233 		  maybe_never = 1;
2234 	      }
2235 
2236 	  /* If there are slots left to fill and our search was stopped by an
2237 	     unconditional branch, try the insn at the branch target.  We can
2238 	     redirect the branch if it works.
2239 
2240 	     Don't do this if the insn at the branch target is a branch.  */
2241 	  if (slots_to_fill != slots_filled
2242 	      && trial
2243 	      && jump_to_label_p (trial)
2244 	      && simplejump_p (trial)
2245 	      && (target == 0 || JUMP_LABEL (trial) == target)
2246 	      && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2247 	      && ! (NONJUMP_INSN_P (next_trial)
2248 		    && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
2249 	      && !JUMP_P (next_trial)
2250 	      && ! insn_references_resource_p (next_trial, &set, true)
2251 	      && ! insn_sets_resource_p (next_trial, &set, true)
2252 	      && ! insn_sets_resource_p (next_trial, &needed, true)
2253 #ifdef HAVE_cc0
2254 	      && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
2255 #endif
2256 	      && ! (maybe_never && may_trap_or_fault_p (PATTERN (next_trial)))
2257 	      && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
2258 	      && eligible_for_delay (insn, slots_filled, next_trial, flags)
2259 	      && ! can_throw_internal (trial))
2260 	    {
2261 	      /* See comment in relax_delay_slots about necessity of using
2262 		 next_real_insn here.  */
2263 	      rtx new_label = next_real_insn (next_trial);
2264 
2265 	      if (new_label != 0)
2266 		new_label = get_label_before (new_label);
2267 	      else
2268 		new_label = find_end_label (simple_return_rtx);
2269 
2270 	      if (new_label)
2271 	        {
2272 		  delay_list
2273 		    = add_to_delay_list (copy_delay_slot_insn (next_trial),
2274 					 delay_list);
2275 		  slots_filled++;
2276 		  reorg_redirect_jump (trial, new_label);
2277 
2278 		  /* If we merged because we both jumped to the same place,
2279 		     redirect the original insn also.  */
2280 		  if (target)
2281 		    reorg_redirect_jump (insn, new_label);
2282 		}
2283 	    }
2284 	}
2285 
2286       /* If this is an unconditional jump, then try to get insns from the
2287 	 target of the jump.  */
2288       if (JUMP_P (insn)
2289 	  && simplejump_p (insn)
2290 	  && slots_filled != slots_to_fill)
2291 	delay_list
2292 	  = fill_slots_from_thread (insn, const_true_rtx,
2293 				    next_active_insn (JUMP_LABEL (insn)),
2294 				    NULL, 1, 1,
2295 				    own_thread_p (JUMP_LABEL (insn),
2296 						  JUMP_LABEL (insn), 0),
2297 				    slots_to_fill, &slots_filled,
2298 				    delay_list);
2299 
2300       if (delay_list)
2301 	unfilled_slots_base[i]
2302 	  = emit_delay_sequence (insn, delay_list, slots_filled);
2303 
2304       if (slots_to_fill == slots_filled)
2305 	unfilled_slots_base[i] = 0;
2306 
2307       note_delay_statistics (slots_filled, 0);
2308     }
2309 }
2310 
2311 /* Follow any unconditional jump at LABEL, for the purpose of redirecting JUMP;
2312    return the ultimate label reached by any such chain of jumps.
2313    Return a suitable return rtx if the chain ultimately leads to a
2314    return instruction.
2315    If LABEL is not followed by a jump, return LABEL.
2316    If the chain loops or we can't find end, return LABEL,
2317    since that tells caller to avoid changing the insn.
2318    If the returned label is obtained by following a REG_CROSSING_JUMP
2319    jump, set *CROSSING to true, otherwise set it to false.  */
2320 
2321 static rtx
follow_jumps(rtx label,rtx jump,bool * crossing)2322 follow_jumps (rtx label, rtx jump, bool *crossing)
2323 {
2324   rtx insn;
2325   rtx next;
2326   rtx value = label;
2327   int depth;
2328 
2329   *crossing = false;
2330   if (ANY_RETURN_P (label))
2331     return label;
2332   for (depth = 0;
2333        (depth < 10
2334 	&& (insn = next_active_insn (value)) != 0
2335 	&& JUMP_P (insn)
2336 	&& JUMP_LABEL (insn) != NULL_RTX
2337 	&& ((any_uncondjump_p (insn) && onlyjump_p (insn))
2338 	    || ANY_RETURN_P (PATTERN (insn)))
2339 	&& (next = NEXT_INSN (insn))
2340 	&& BARRIER_P (next));
2341        depth++)
2342     {
2343       rtx this_label = JUMP_LABEL (insn);
2344       rtx tem;
2345 
2346       /* If we have found a cycle, make the insn jump to itself.  */
2347       if (this_label == label)
2348 	return label;
2349       if (ANY_RETURN_P (this_label))
2350 	return this_label;
2351       tem = next_active_insn (this_label);
2352       if (tem
2353 	  && (GET_CODE (PATTERN (tem)) == ADDR_VEC
2354 	      || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
2355 	break;
2356 
2357       if (!targetm.can_follow_jump (jump, insn))
2358 	break;
2359       if (!*crossing)
2360 	*crossing
2361 	  = find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX) != NULL_RTX;
2362       value = this_label;
2363     }
2364   if (depth == 10)
2365     return label;
2366   return value;
2367 }
2368 
2369 /* Try to find insns to place in delay slots.
2370 
2371    INSN is the jump needing SLOTS_TO_FILL delay slots.  It tests CONDITION
2372    or is an unconditional branch if CONDITION is const_true_rtx.
2373    *PSLOTS_FILLED is updated with the number of slots that we have filled.
2374 
2375    THREAD is a flow-of-control, either the insns to be executed if the
2376    branch is true or if the branch is false, THREAD_IF_TRUE says which.
2377 
2378    OPPOSITE_THREAD is the thread in the opposite direction.  It is used
2379    to see if any potential delay slot insns set things needed there.
2380 
2381    LIKELY is nonzero if it is extremely likely that the branch will be
2382    taken and THREAD_IF_TRUE is set.  This is used for the branch at the
2383    end of a loop back up to the top.
2384 
2385    OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2386    thread.  I.e., it is the fallthrough code of our jump or the target of the
2387    jump when we are the only jump going there.
2388 
2389    If OWN_THREAD is false, it must be the "true" thread of a jump.  In that
2390    case, we can only take insns from the head of the thread for our delay
2391    slot.  We then adjust the jump to point after the insns we have taken.  */
2392 
2393 static rtx
fill_slots_from_thread(rtx insn,rtx condition,rtx thread,rtx opposite_thread,int likely,int thread_if_true,int own_thread,int slots_to_fill,int * pslots_filled,rtx delay_list)2394 fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
2395 			rtx opposite_thread, int likely, int thread_if_true,
2396 			int own_thread, int slots_to_fill,
2397 			int *pslots_filled, rtx delay_list)
2398 {
2399   rtx new_thread;
2400   struct resources opposite_needed, set, needed;
2401   rtx trial;
2402   int lose = 0;
2403   int must_annul = 0;
2404   int flags;
2405 
2406   /* Validate our arguments.  */
2407   gcc_assert(condition != const_true_rtx || thread_if_true);
2408   gcc_assert(own_thread || thread_if_true);
2409 
2410   flags = get_jump_flags (insn, JUMP_LABEL (insn));
2411 
2412   /* If our thread is the end of subroutine, we can't get any delay
2413      insns from that.  */
2414   if (thread == NULL_RTX || ANY_RETURN_P (thread))
2415     return delay_list;
2416 
2417   /* If this is an unconditional branch, nothing is needed at the
2418      opposite thread.  Otherwise, compute what is needed there.  */
2419   if (condition == const_true_rtx)
2420     CLEAR_RESOURCE (&opposite_needed);
2421   else
2422     mark_target_live_regs (get_insns (), opposite_thread, &opposite_needed);
2423 
2424   /* If the insn at THREAD can be split, do it here to avoid having to
2425      update THREAD and NEW_THREAD if it is done in the loop below.  Also
2426      initialize NEW_THREAD.  */
2427 
2428   new_thread = thread = try_split (PATTERN (thread), thread, 0);
2429 
2430   /* Scan insns at THREAD.  We are looking for an insn that can be removed
2431      from THREAD (it neither sets nor references resources that were set
2432      ahead of it and it doesn't set anything needs by the insns ahead of
2433      it) and that either can be placed in an annulling insn or aren't
2434      needed at OPPOSITE_THREAD.  */
2435 
2436   CLEAR_RESOURCE (&needed);
2437   CLEAR_RESOURCE (&set);
2438 
2439   /* If we do not own this thread, we must stop as soon as we find
2440      something that we can't put in a delay slot, since all we can do
2441      is branch into THREAD at a later point.  Therefore, labels stop
2442      the search if this is not the `true' thread.  */
2443 
2444   for (trial = thread;
2445        ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
2446        trial = next_nonnote_insn (trial))
2447     {
2448       rtx pat, old_trial;
2449 
2450       /* If we have passed a label, we no longer own this thread.  */
2451       if (LABEL_P (trial))
2452 	{
2453 	  own_thread = 0;
2454 	  continue;
2455 	}
2456 
2457       pat = PATTERN (trial);
2458       if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2459 	continue;
2460 
2461       /* If TRIAL conflicts with the insns ahead of it, we lose.  Also,
2462 	 don't separate or copy insns that set and use CC0.  */
2463       if (! insn_references_resource_p (trial, &set, true)
2464 	  && ! insn_sets_resource_p (trial, &set, true)
2465 	  && ! insn_sets_resource_p (trial, &needed, true)
2466 #ifdef HAVE_cc0
2467 	  && ! (reg_mentioned_p (cc0_rtx, pat)
2468 		&& (! own_thread || ! sets_cc0_p (pat)))
2469 #endif
2470 	  && ! can_throw_internal (trial))
2471 	{
2472 	  rtx prior_insn;
2473 
2474 	  /* If TRIAL is redundant with some insn before INSN, we don't
2475 	     actually need to add it to the delay list; we can merely pretend
2476 	     we did.  */
2477 	  if ((prior_insn = redundant_insn (trial, insn, delay_list)))
2478 	    {
2479 	      fix_reg_dead_note (prior_insn, insn);
2480 	      if (own_thread)
2481 		{
2482 		  update_block (trial, thread);
2483 		  if (trial == thread)
2484 		    {
2485 		      thread = next_active_insn (thread);
2486 		      if (new_thread == trial)
2487 			new_thread = thread;
2488 		    }
2489 
2490 		  delete_related_insns (trial);
2491 		}
2492 	      else
2493 		{
2494 		  update_reg_unused_notes (prior_insn, trial);
2495 		  new_thread = next_active_insn (trial);
2496 		}
2497 
2498 	      continue;
2499 	    }
2500 
2501 	  /* There are two ways we can win:  If TRIAL doesn't set anything
2502 	     needed at the opposite thread and can't trap, or if it can
2503 	     go into an annulled delay slot.  */
2504 	  if (!must_annul
2505 	      && (condition == const_true_rtx
2506 	          || (! insn_sets_resource_p (trial, &opposite_needed, true)
2507 		      && ! may_trap_or_fault_p (pat)
2508 		      && ! RTX_FRAME_RELATED_P (trial))))
2509 	    {
2510 	      old_trial = trial;
2511 	      trial = try_split (pat, trial, 0);
2512 	      if (new_thread == old_trial)
2513 		new_thread = trial;
2514 	      if (thread == old_trial)
2515 		thread = trial;
2516 	      pat = PATTERN (trial);
2517 	      if (eligible_for_delay (insn, *pslots_filled, trial, flags))
2518 		goto winner;
2519 	    }
2520 	  else if (0
2521 #ifdef ANNUL_IFTRUE_SLOTS
2522 		   || ! thread_if_true
2523 #endif
2524 #ifdef ANNUL_IFFALSE_SLOTS
2525 		   || thread_if_true
2526 #endif
2527 		   )
2528 	    {
2529 	      old_trial = trial;
2530 	      trial = try_split (pat, trial, 0);
2531 	      if (new_thread == old_trial)
2532 		new_thread = trial;
2533 	      if (thread == old_trial)
2534 		thread = trial;
2535 	      pat = PATTERN (trial);
2536 	      if ((must_annul || delay_list == NULL) && (thread_if_true
2537 		   ? check_annul_list_true_false (0, delay_list)
2538 		     && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
2539 		   : check_annul_list_true_false (1, delay_list)
2540 		     && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
2541 		{
2542 		  rtx temp;
2543 
2544 		  must_annul = 1;
2545 		winner:
2546 
2547 #ifdef HAVE_cc0
2548 		  if (reg_mentioned_p (cc0_rtx, pat))
2549 		    link_cc0_insns (trial);
2550 #endif
2551 
2552 		  /* If we own this thread, delete the insn.  If this is the
2553 		     destination of a branch, show that a basic block status
2554 		     may have been updated.  In any case, mark the new
2555 		     starting point of this thread.  */
2556 		  if (own_thread)
2557 		    {
2558 		      rtx note;
2559 
2560 		      update_block (trial, thread);
2561 		      if (trial == thread)
2562 			{
2563 			  thread = next_active_insn (thread);
2564 			  if (new_thread == trial)
2565 			    new_thread = thread;
2566 			}
2567 
2568 		      /* We are moving this insn, not deleting it.  We must
2569 			 temporarily increment the use count on any referenced
2570 			 label lest it be deleted by delete_related_insns.  */
2571 		      for (note = REG_NOTES (trial);
2572 			   note != NULL_RTX;
2573 			   note = XEXP (note, 1))
2574 			if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
2575 			    || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
2576 			  {
2577 			    /* REG_LABEL_OPERAND could be
2578 			       NOTE_INSN_DELETED_LABEL too.  */
2579 			    if (LABEL_P (XEXP (note, 0)))
2580 			      LABEL_NUSES (XEXP (note, 0))++;
2581 			    else
2582 			      gcc_assert (REG_NOTE_KIND (note)
2583 					  == REG_LABEL_OPERAND);
2584 			  }
2585 		      if (jump_to_label_p (trial))
2586 			LABEL_NUSES (JUMP_LABEL (trial))++;
2587 
2588 		      delete_related_insns (trial);
2589 
2590 		      for (note = REG_NOTES (trial);
2591 			   note != NULL_RTX;
2592 			   note = XEXP (note, 1))
2593 			if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
2594 			    || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
2595 			  {
2596 			    /* REG_LABEL_OPERAND could be
2597 			       NOTE_INSN_DELETED_LABEL too.  */
2598 			    if (LABEL_P (XEXP (note, 0)))
2599 			      LABEL_NUSES (XEXP (note, 0))--;
2600 			    else
2601 			      gcc_assert (REG_NOTE_KIND (note)
2602 					  == REG_LABEL_OPERAND);
2603 			  }
2604 		      if (jump_to_label_p (trial))
2605 			LABEL_NUSES (JUMP_LABEL (trial))--;
2606 		    }
2607 		  else
2608 		    new_thread = next_active_insn (trial);
2609 
2610 		  temp = own_thread ? trial : copy_delay_slot_insn (trial);
2611 		  if (thread_if_true)
2612 		    INSN_FROM_TARGET_P (temp) = 1;
2613 
2614 		  delay_list = add_to_delay_list (temp, delay_list);
2615 
2616 		  if (slots_to_fill == ++(*pslots_filled))
2617 		    {
2618 		      /* Even though we have filled all the slots, we
2619 			 may be branching to a location that has a
2620 			 redundant insn.  Skip any if so.  */
2621 		      while (new_thread && ! own_thread
2622 			     && ! insn_sets_resource_p (new_thread, &set, true)
2623 			     && ! insn_sets_resource_p (new_thread, &needed,
2624 							true)
2625 			     && ! insn_references_resource_p (new_thread,
2626 							      &set, true)
2627 			     && (prior_insn
2628 				 = redundant_insn (new_thread, insn,
2629 						   delay_list)))
2630 			{
2631 			  /* We know we do not own the thread, so no need
2632 			     to call update_block and delete_insn.  */
2633 			  fix_reg_dead_note (prior_insn, insn);
2634 			  update_reg_unused_notes (prior_insn, new_thread);
2635 			  new_thread = next_active_insn (new_thread);
2636 			}
2637 		      break;
2638 		    }
2639 
2640 		  continue;
2641 		}
2642 	    }
2643 	}
2644 
2645       /* This insn can't go into a delay slot.  */
2646       lose = 1;
2647       mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2648       mark_referenced_resources (trial, &needed, true);
2649 
2650       /* Ensure we don't put insns between the setting of cc and the comparison
2651 	 by moving a setting of cc into an earlier delay slot since these insns
2652 	 could clobber the condition code.  */
2653       set.cc = 1;
2654 
2655       /* If this insn is a register-register copy and the next insn has
2656 	 a use of our destination, change it to use our source.  That way,
2657 	 it will become a candidate for our delay slot the next time
2658 	 through this loop.  This case occurs commonly in loops that
2659 	 scan a list.
2660 
2661 	 We could check for more complex cases than those tested below,
2662 	 but it doesn't seem worth it.  It might also be a good idea to try
2663 	 to swap the two insns.  That might do better.
2664 
2665 	 We can't do this if the next insn modifies our destination, because
2666 	 that would make the replacement into the insn invalid.  We also can't
2667 	 do this if it modifies our source, because it might be an earlyclobber
2668 	 operand.  This latter test also prevents updating the contents of
2669 	 a PRE_INC.  We also can't do this if there's overlap of source and
2670 	 destination.  Overlap may happen for larger-than-register-size modes.  */
2671 
2672       if (NONJUMP_INSN_P (trial) && GET_CODE (pat) == SET
2673 	  && REG_P (SET_SRC (pat))
2674 	  && REG_P (SET_DEST (pat))
2675 	  && !reg_overlap_mentioned_p (SET_DEST (pat), SET_SRC (pat)))
2676 	{
2677 	  rtx next = next_nonnote_insn (trial);
2678 
2679 	  if (next && NONJUMP_INSN_P (next)
2680 	      && GET_CODE (PATTERN (next)) != USE
2681 	      && ! reg_set_p (SET_DEST (pat), next)
2682 	      && ! reg_set_p (SET_SRC (pat), next)
2683 	      && reg_referenced_p (SET_DEST (pat), PATTERN (next))
2684 	      && ! modified_in_p (SET_DEST (pat), next))
2685 	    validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
2686 	}
2687     }
2688 
2689   /* If we stopped on a branch insn that has delay slots, see if we can
2690      steal some of the insns in those slots.  */
2691   if (trial && NONJUMP_INSN_P (trial)
2692       && GET_CODE (PATTERN (trial)) == SEQUENCE
2693       && JUMP_P (XVECEXP (PATTERN (trial), 0, 0)))
2694     {
2695       /* If this is the `true' thread, we will want to follow the jump,
2696 	 so we can only do this if we have taken everything up to here.  */
2697       if (thread_if_true && trial == new_thread)
2698 	{
2699 	  delay_list
2700 	    = steal_delay_list_from_target (insn, condition, PATTERN (trial),
2701 					    delay_list, &set, &needed,
2702 					    &opposite_needed, slots_to_fill,
2703 					    pslots_filled, &must_annul,
2704 					    &new_thread);
2705 	  /* If we owned the thread and are told that it branched
2706 	     elsewhere, make sure we own the thread at the new location.  */
2707 	  if (own_thread && trial != new_thread)
2708 	    own_thread = own_thread_p (new_thread, new_thread, 0);
2709 	}
2710       else if (! thread_if_true)
2711 	delay_list
2712 	  = steal_delay_list_from_fallthrough (insn, condition,
2713 					       PATTERN (trial),
2714 					       delay_list, &set, &needed,
2715 					       &opposite_needed, slots_to_fill,
2716 					       pslots_filled, &must_annul);
2717     }
2718 
2719   /* If we haven't found anything for this delay slot and it is very
2720      likely that the branch will be taken, see if the insn at our target
2721      increments or decrements a register with an increment that does not
2722      depend on the destination register.  If so, try to place the opposite
2723      arithmetic insn after the jump insn and put the arithmetic insn in the
2724      delay slot.  If we can't do this, return.  */
2725   if (delay_list == 0 && likely
2726       && new_thread && !ANY_RETURN_P (new_thread)
2727       && NONJUMP_INSN_P (new_thread)
2728       && !RTX_FRAME_RELATED_P (new_thread)
2729       && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
2730       && asm_noperands (PATTERN (new_thread)) < 0)
2731     {
2732       rtx pat = PATTERN (new_thread);
2733       rtx dest;
2734       rtx src;
2735 
2736       trial = new_thread;
2737       pat = PATTERN (trial);
2738 
2739       if (!NONJUMP_INSN_P (trial)
2740 	  || GET_CODE (pat) != SET
2741 	  || ! eligible_for_delay (insn, 0, trial, flags)
2742 	  || can_throw_internal (trial))
2743 	return 0;
2744 
2745       dest = SET_DEST (pat), src = SET_SRC (pat);
2746       if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
2747 	  && rtx_equal_p (XEXP (src, 0), dest)
2748 	  && (!FLOAT_MODE_P (GET_MODE (src))
2749 	      || flag_unsafe_math_optimizations)
2750 	  && ! reg_overlap_mentioned_p (dest, XEXP (src, 1))
2751 	  && ! side_effects_p (pat))
2752 	{
2753 	  rtx other = XEXP (src, 1);
2754 	  rtx new_arith;
2755 	  rtx ninsn;
2756 
2757 	  /* If this is a constant adjustment, use the same code with
2758 	     the negated constant.  Otherwise, reverse the sense of the
2759 	     arithmetic.  */
2760 	  if (CONST_INT_P (other))
2761 	    new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
2762 					negate_rtx (GET_MODE (src), other));
2763 	  else
2764 	    new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
2765 					GET_MODE (src), dest, other);
2766 
2767 	  ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
2768 				   insn);
2769 
2770 	  if (recog_memoized (ninsn) < 0
2771 	      || (extract_insn (ninsn), ! constrain_operands (1)))
2772 	    {
2773 	      delete_related_insns (ninsn);
2774 	      return 0;
2775 	    }
2776 
2777 	  if (own_thread)
2778 	    {
2779 	      update_block (trial, thread);
2780 	      if (trial == thread)
2781 		{
2782 		  thread = next_active_insn (thread);
2783 		  if (new_thread == trial)
2784 		    new_thread = thread;
2785 		}
2786 	      delete_related_insns (trial);
2787 	    }
2788 	  else
2789 	    new_thread = next_active_insn (trial);
2790 
2791 	  ninsn = own_thread ? trial : copy_delay_slot_insn (trial);
2792 	  if (thread_if_true)
2793 	    INSN_FROM_TARGET_P (ninsn) = 1;
2794 
2795 	  delay_list = add_to_delay_list (ninsn, NULL_RTX);
2796 	  (*pslots_filled)++;
2797 	}
2798     }
2799 
2800   if (delay_list && must_annul)
2801     INSN_ANNULLED_BRANCH_P (insn) = 1;
2802 
2803   /* If we are to branch into the middle of this thread, find an appropriate
2804      label or make a new one if none, and redirect INSN to it.  If we hit the
2805      end of the function, use the end-of-function label.  */
2806   if (new_thread != thread)
2807     {
2808       rtx label;
2809       bool crossing = false;
2810 
2811       gcc_assert (thread_if_true);
2812 
2813       if (new_thread && simplejump_or_return_p (new_thread)
2814 	  && redirect_with_delay_list_safe_p (insn,
2815 					      JUMP_LABEL (new_thread),
2816 					      delay_list))
2817 	new_thread = follow_jumps (JUMP_LABEL (new_thread), insn, &crossing);
2818 
2819       if (ANY_RETURN_P (new_thread))
2820 	label = find_end_label (new_thread);
2821       else if (LABEL_P (new_thread))
2822 	label = new_thread;
2823       else
2824 	label = get_label_before (new_thread);
2825 
2826       if (label)
2827 	{
2828 	  reorg_redirect_jump (insn, label);
2829 	  if (crossing)
2830 	    set_unique_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX);
2831 	}
2832     }
2833 
2834   return delay_list;
2835 }
2836 
2837 /* Make another attempt to find insns to place in delay slots.
2838 
2839    We previously looked for insns located in front of the delay insn
2840    and, for non-jump delay insns, located behind the delay insn.
2841 
2842    Here only try to schedule jump insns and try to move insns from either
2843    the target or the following insns into the delay slot.  If annulling is
2844    supported, we will be likely to do this.  Otherwise, we can do this only
2845    if safe.  */
2846 
2847 static void
fill_eager_delay_slots(void)2848 fill_eager_delay_slots (void)
2849 {
2850   rtx insn;
2851   int i;
2852   int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2853 
2854   for (i = 0; i < num_unfilled_slots; i++)
2855     {
2856       rtx condition;
2857       rtx target_label, insn_at_target, fallthrough_insn;
2858       rtx delay_list = 0;
2859       int own_target;
2860       int own_fallthrough;
2861       int prediction, slots_to_fill, slots_filled;
2862 
2863       insn = unfilled_slots_base[i];
2864       if (insn == 0
2865 	  || INSN_DELETED_P (insn)
2866 	  || !JUMP_P (insn)
2867 	  || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
2868 	continue;
2869 
2870       slots_to_fill = num_delay_slots (insn);
2871       /* Some machine description have defined instructions to have
2872 	 delay slots only in certain circumstances which may depend on
2873 	 nearby insns (which change due to reorg's actions).
2874 
2875 	 For example, the PA port normally has delay slots for unconditional
2876 	 jumps.
2877 
2878 	 However, the PA port claims such jumps do not have a delay slot
2879 	 if they are immediate successors of certain CALL_INSNs.  This
2880 	 allows the port to favor filling the delay slot of the call with
2881 	 the unconditional jump.  */
2882       if (slots_to_fill == 0)
2883 	continue;
2884 
2885       slots_filled = 0;
2886       target_label = JUMP_LABEL (insn);
2887       condition = get_branch_condition (insn, target_label);
2888 
2889       if (condition == 0)
2890 	continue;
2891 
2892       /* Get the next active fallthrough and target insns and see if we own
2893 	 them.  Then see whether the branch is likely true.  We don't need
2894 	 to do a lot of this for unconditional branches.  */
2895 
2896       insn_at_target = first_active_target_insn (target_label);
2897       own_target = own_thread_p (target_label, target_label, 0);
2898 
2899       if (condition == const_true_rtx)
2900 	{
2901 	  own_fallthrough = 0;
2902 	  fallthrough_insn = 0;
2903 	  prediction = 2;
2904 	}
2905       else
2906 	{
2907 	  fallthrough_insn = next_active_insn (insn);
2908 	  own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
2909 	  prediction = mostly_true_jump (insn);
2910 	}
2911 
2912       /* If this insn is expected to branch, first try to get insns from our
2913 	 target, then our fallthrough insns.  If it is not expected to branch,
2914 	 try the other order.  */
2915 
2916       if (prediction > 0)
2917 	{
2918 	  delay_list
2919 	    = fill_slots_from_thread (insn, condition, insn_at_target,
2920 				      fallthrough_insn, prediction == 2, 1,
2921 				      own_target,
2922 				      slots_to_fill, &slots_filled, delay_list);
2923 
2924 	  if (delay_list == 0 && own_fallthrough)
2925 	    {
2926 	      /* Even though we didn't find anything for delay slots,
2927 		 we might have found a redundant insn which we deleted
2928 		 from the thread that was filled.  So we have to recompute
2929 		 the next insn at the target.  */
2930 	      target_label = JUMP_LABEL (insn);
2931 	      insn_at_target = first_active_target_insn (target_label);
2932 
2933 	      delay_list
2934 		= fill_slots_from_thread (insn, condition, fallthrough_insn,
2935 					  insn_at_target, 0, 0,
2936 					  own_fallthrough,
2937 					  slots_to_fill, &slots_filled,
2938 					  delay_list);
2939 	    }
2940 	}
2941       else
2942 	{
2943 	  if (own_fallthrough)
2944 	    delay_list
2945 	      = fill_slots_from_thread (insn, condition, fallthrough_insn,
2946 					insn_at_target, 0, 0,
2947 					own_fallthrough,
2948 					slots_to_fill, &slots_filled,
2949 					delay_list);
2950 
2951 	  if (delay_list == 0)
2952 	    delay_list
2953 	      = fill_slots_from_thread (insn, condition, insn_at_target,
2954 					next_active_insn (insn), 0, 1,
2955 					own_target,
2956 					slots_to_fill, &slots_filled,
2957 					delay_list);
2958 	}
2959 
2960       if (delay_list)
2961 	unfilled_slots_base[i]
2962 	  = emit_delay_sequence (insn, delay_list, slots_filled);
2963 
2964       if (slots_to_fill == slots_filled)
2965 	unfilled_slots_base[i] = 0;
2966 
2967       note_delay_statistics (slots_filled, 1);
2968     }
2969 }
2970 
2971 static void delete_computation (rtx insn);
2972 
2973 /* Recursively delete prior insns that compute the value (used only by INSN
2974    which the caller is deleting) stored in the register mentioned by NOTE
2975    which is a REG_DEAD note associated with INSN.  */
2976 
2977 static void
delete_prior_computation(rtx note,rtx insn)2978 delete_prior_computation (rtx note, rtx insn)
2979 {
2980   rtx our_prev;
2981   rtx reg = XEXP (note, 0);
2982 
2983   for (our_prev = prev_nonnote_insn (insn);
2984        our_prev && (NONJUMP_INSN_P (our_prev)
2985 		    || CALL_P (our_prev));
2986        our_prev = prev_nonnote_insn (our_prev))
2987     {
2988       rtx pat = PATTERN (our_prev);
2989 
2990       /* If we reach a CALL which is not calling a const function
2991 	 or the callee pops the arguments, then give up.  */
2992       if (CALL_P (our_prev)
2993 	  && (! RTL_CONST_CALL_P (our_prev)
2994 	      || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
2995 	break;
2996 
2997       /* If we reach a SEQUENCE, it is too complex to try to
2998 	 do anything with it, so give up.  We can be run during
2999 	 and after reorg, so SEQUENCE rtl can legitimately show
3000 	 up here.  */
3001       if (GET_CODE (pat) == SEQUENCE)
3002 	break;
3003 
3004       if (GET_CODE (pat) == USE
3005 	  && NONJUMP_INSN_P (XEXP (pat, 0)))
3006 	/* reorg creates USEs that look like this.  We leave them
3007 	   alone because reorg needs them for its own purposes.  */
3008 	break;
3009 
3010       if (reg_set_p (reg, pat))
3011 	{
3012 	  if (side_effects_p (pat) && !CALL_P (our_prev))
3013 	    break;
3014 
3015 	  if (GET_CODE (pat) == PARALLEL)
3016 	    {
3017 	      /* If we find a SET of something else, we can't
3018 		 delete the insn.  */
3019 
3020 	      int i;
3021 
3022 	      for (i = 0; i < XVECLEN (pat, 0); i++)
3023 		{
3024 		  rtx part = XVECEXP (pat, 0, i);
3025 
3026 		  if (GET_CODE (part) == SET
3027 		      && SET_DEST (part) != reg)
3028 		    break;
3029 		}
3030 
3031 	      if (i == XVECLEN (pat, 0))
3032 		delete_computation (our_prev);
3033 	    }
3034 	  else if (GET_CODE (pat) == SET
3035 		   && REG_P (SET_DEST (pat)))
3036 	    {
3037 	      int dest_regno = REGNO (SET_DEST (pat));
3038 	      int dest_endregno = END_REGNO (SET_DEST (pat));
3039 	      int regno = REGNO (reg);
3040 	      int endregno = END_REGNO (reg);
3041 
3042 	      if (dest_regno >= regno
3043 		  && dest_endregno <= endregno)
3044 		delete_computation (our_prev);
3045 
3046 	      /* We may have a multi-word hard register and some, but not
3047 		 all, of the words of the register are needed in subsequent
3048 		 insns.  Write REG_UNUSED notes for those parts that were not
3049 		 needed.  */
3050 	      else if (dest_regno <= regno
3051 		       && dest_endregno >= endregno)
3052 		{
3053 		  int i;
3054 
3055 		  add_reg_note (our_prev, REG_UNUSED, reg);
3056 
3057 		  for (i = dest_regno; i < dest_endregno; i++)
3058 		    if (! find_regno_note (our_prev, REG_UNUSED, i))
3059 		      break;
3060 
3061 		  if (i == dest_endregno)
3062 		    delete_computation (our_prev);
3063 		}
3064 	    }
3065 
3066 	  break;
3067 	}
3068 
3069       /* If PAT references the register that dies here, it is an
3070 	 additional use.  Hence any prior SET isn't dead.  However, this
3071 	 insn becomes the new place for the REG_DEAD note.  */
3072       if (reg_overlap_mentioned_p (reg, pat))
3073 	{
3074 	  XEXP (note, 1) = REG_NOTES (our_prev);
3075 	  REG_NOTES (our_prev) = note;
3076 	  break;
3077 	}
3078     }
3079 }
3080 
3081 /* Delete INSN and recursively delete insns that compute values used only
3082    by INSN.  This uses the REG_DEAD notes computed during flow analysis.
3083 
3084    Look at all our REG_DEAD notes.  If a previous insn does nothing other
3085    than set a register that dies in this insn, we can delete that insn
3086    as well.
3087 
3088    On machines with CC0, if CC0 is used in this insn, we may be able to
3089    delete the insn that set it.  */
3090 
3091 static void
delete_computation(rtx insn)3092 delete_computation (rtx insn)
3093 {
3094   rtx note, next;
3095 
3096 #ifdef HAVE_cc0
3097   if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3098     {
3099       rtx prev = prev_nonnote_insn (insn);
3100       /* We assume that at this stage
3101 	 CC's are always set explicitly
3102 	 and always immediately before the jump that
3103 	 will use them.  So if the previous insn
3104 	 exists to set the CC's, delete it
3105 	 (unless it performs auto-increments, etc.).  */
3106       if (prev && NONJUMP_INSN_P (prev)
3107 	  && sets_cc0_p (PATTERN (prev)))
3108 	{
3109 	  if (sets_cc0_p (PATTERN (prev)) > 0
3110 	      && ! side_effects_p (PATTERN (prev)))
3111 	    delete_computation (prev);
3112 	  else
3113 	    /* Otherwise, show that cc0 won't be used.  */
3114 	    add_reg_note (prev, REG_UNUSED, cc0_rtx);
3115 	}
3116     }
3117 #endif
3118 
3119   for (note = REG_NOTES (insn); note; note = next)
3120     {
3121       next = XEXP (note, 1);
3122 
3123       if (REG_NOTE_KIND (note) != REG_DEAD
3124 	  /* Verify that the REG_NOTE is legitimate.  */
3125 	  || !REG_P (XEXP (note, 0)))
3126 	continue;
3127 
3128       delete_prior_computation (note, insn);
3129     }
3130 
3131   delete_related_insns (insn);
3132 }
3133 
3134 /* If all INSN does is set the pc, delete it,
3135    and delete the insn that set the condition codes for it
3136    if that's what the previous thing was.  */
3137 
3138 static void
delete_jump(rtx insn)3139 delete_jump (rtx insn)
3140 {
3141   rtx set = single_set (insn);
3142 
3143   if (set && GET_CODE (SET_DEST (set)) == PC)
3144     delete_computation (insn);
3145 }
3146 
3147 static rtx
label_before_next_insn(rtx x,rtx scan_limit)3148 label_before_next_insn (rtx x, rtx scan_limit)
3149 {
3150   rtx insn = next_active_insn (x);
3151   while (insn)
3152     {
3153       insn = PREV_INSN (insn);
3154       if (insn == scan_limit || insn == NULL_RTX)
3155 	return NULL_RTX;
3156       if (LABEL_P (insn))
3157 	break;
3158     }
3159   return insn;
3160 }
3161 
3162 
3163 /* Once we have tried two ways to fill a delay slot, make a pass over the
3164    code to try to improve the results and to do such things as more jump
3165    threading.  */
3166 
3167 static void
relax_delay_slots(rtx first)3168 relax_delay_slots (rtx first)
3169 {
3170   rtx insn, next, pat;
3171   rtx trial, delay_insn, target_label;
3172 
3173   /* Look at every JUMP_INSN and see if we can improve it.  */
3174   for (insn = first; insn; insn = next)
3175     {
3176       rtx other;
3177       bool crossing;
3178 
3179       next = next_active_insn (insn);
3180 
3181       /* If this is a jump insn, see if it now jumps to a jump, jumps to
3182 	 the next insn, or jumps to a label that is not the last of a
3183 	 group of consecutive labels.  */
3184       if (JUMP_P (insn)
3185 	  && (condjump_p (insn) || condjump_in_parallel_p (insn))
3186 	  && !ANY_RETURN_P (target_label = JUMP_LABEL (insn)))
3187 	{
3188 	  target_label
3189 	    = skip_consecutive_labels (follow_jumps (target_label, insn,
3190 						     &crossing));
3191 	  if (ANY_RETURN_P (target_label))
3192 	    target_label = find_end_label (target_label);
3193 
3194 	  if (target_label && next_active_insn (target_label) == next
3195 	      && ! condjump_in_parallel_p (insn))
3196 	    {
3197 	      delete_jump (insn);
3198 	      continue;
3199 	    }
3200 
3201 	  if (target_label && target_label != JUMP_LABEL (insn))
3202 	    {
3203 	      reorg_redirect_jump (insn, target_label);
3204 	      if (crossing)
3205 		set_unique_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX);
3206 	    }
3207 
3208 	  /* See if this jump conditionally branches around an unconditional
3209 	     jump.  If so, invert this jump and point it to the target of the
3210 	     second jump.  */
3211 	  if (next && simplejump_or_return_p (next)
3212 	      && any_condjump_p (insn)
3213 	      && target_label
3214 	      && next_active_insn (target_label) == next_active_insn (next)
3215 	      && no_labels_between_p (insn, next))
3216 	    {
3217 	      rtx label = JUMP_LABEL (next);
3218 
3219 	      /* Be careful how we do this to avoid deleting code or
3220 		 labels that are momentarily dead.  See similar optimization
3221 		 in jump.c.
3222 
3223 		 We also need to ensure we properly handle the case when
3224 		 invert_jump fails.  */
3225 
3226 	      ++LABEL_NUSES (target_label);
3227 	      if (!ANY_RETURN_P (label))
3228 		++LABEL_NUSES (label);
3229 
3230 	      if (invert_jump (insn, label, 1))
3231 		{
3232 		  delete_related_insns (next);
3233 		  next = insn;
3234 		}
3235 
3236 	      if (!ANY_RETURN_P (label))
3237 		--LABEL_NUSES (label);
3238 
3239 	      if (--LABEL_NUSES (target_label) == 0)
3240 		delete_related_insns (target_label);
3241 
3242 	      continue;
3243 	    }
3244 	}
3245 
3246       /* If this is an unconditional jump and the previous insn is a
3247 	 conditional jump, try reversing the condition of the previous
3248 	 insn and swapping our targets.  The next pass might be able to
3249 	 fill the slots.
3250 
3251 	 Don't do this if we expect the conditional branch to be true, because
3252 	 we would then be making the more common case longer.  */
3253 
3254       if (simplejump_or_return_p (insn)
3255 	  && (other = prev_active_insn (insn)) != 0
3256 	  && any_condjump_p (other)
3257 	  && no_labels_between_p (other, insn)
3258 	  && 0 > mostly_true_jump (other))
3259 	{
3260 	  rtx other_target = JUMP_LABEL (other);
3261 	  target_label = JUMP_LABEL (insn);
3262 
3263 	  if (invert_jump (other, target_label, 0))
3264 	    reorg_redirect_jump (insn, other_target);
3265 	}
3266 
3267       /* Now look only at cases where we have a filled delay slot.  */
3268       if (!NONJUMP_INSN_P (insn) || GET_CODE (PATTERN (insn)) != SEQUENCE)
3269 	continue;
3270 
3271       pat = PATTERN (insn);
3272       delay_insn = XVECEXP (pat, 0, 0);
3273 
3274       /* See if the first insn in the delay slot is redundant with some
3275 	 previous insn.  Remove it from the delay slot if so; then set up
3276 	 to reprocess this insn.  */
3277       if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
3278 	{
3279 	  update_block (XVECEXP (pat, 0, 1), insn);
3280 	  delete_from_delay_slot (XVECEXP (pat, 0, 1));
3281 	  next = prev_active_insn (next);
3282 	  continue;
3283 	}
3284 
3285       /* See if we have a RETURN insn with a filled delay slot followed
3286 	 by a RETURN insn with an unfilled a delay slot.  If so, we can delete
3287 	 the first RETURN (but not its delay insn).  This gives the same
3288 	 effect in fewer instructions.
3289 
3290 	 Only do so if optimizing for size since this results in slower, but
3291 	 smaller code.  */
3292       if (optimize_function_for_size_p (cfun)
3293 	  && ANY_RETURN_P (PATTERN (delay_insn))
3294 	  && next
3295 	  && JUMP_P (next)
3296 	  && PATTERN (next) == PATTERN (delay_insn))
3297 	{
3298 	  rtx after;
3299 	  int i;
3300 
3301 	  /* Delete the RETURN and just execute the delay list insns.
3302 
3303 	     We do this by deleting the INSN containing the SEQUENCE, then
3304 	     re-emitting the insns separately, and then deleting the RETURN.
3305 	     This allows the count of the jump target to be properly
3306 	     decremented.
3307 
3308 	     Note that we need to change the INSN_UID of the re-emitted insns
3309 	     since it is used to hash the insns for mark_target_live_regs and
3310 	     the re-emitted insns will no longer be wrapped up in a SEQUENCE.
3311 
3312 	     Clear the from target bit, since these insns are no longer
3313 	     in delay slots.  */
3314 	  for (i = 0; i < XVECLEN (pat, 0); i++)
3315 	    INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3316 
3317 	  trial = PREV_INSN (insn);
3318 	  delete_related_insns (insn);
3319 	  gcc_assert (GET_CODE (pat) == SEQUENCE);
3320 	  add_insn_after (delay_insn, trial, NULL);
3321 	  after = delay_insn;
3322 	  for (i = 1; i < XVECLEN (pat, 0); i++)
3323 	    after = emit_copy_of_insn_after (XVECEXP (pat, 0, i), after);
3324 	  delete_scheduled_jump (delay_insn);
3325 	  continue;
3326 	}
3327 
3328       /* Now look only at the cases where we have a filled JUMP_INSN.  */
3329       if (!JUMP_P (delay_insn)
3330 	  || !(condjump_p (delay_insn) || condjump_in_parallel_p (delay_insn)))
3331 	continue;
3332 
3333       target_label = JUMP_LABEL (delay_insn);
3334       if (target_label && ANY_RETURN_P (target_label))
3335 	continue;
3336 
3337       /* If this jump goes to another unconditional jump, thread it, but
3338 	 don't convert a jump into a RETURN here.  */
3339       trial = skip_consecutive_labels (follow_jumps (target_label, delay_insn,
3340 						     &crossing));
3341       if (ANY_RETURN_P (trial))
3342 	trial = find_end_label (trial);
3343 
3344       if (trial && trial != target_label
3345 	  && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3346 	{
3347 	  reorg_redirect_jump (delay_insn, trial);
3348 	  target_label = trial;
3349 	  if (crossing)
3350 	    set_unique_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX);
3351 	}
3352 
3353       /* If the first insn at TARGET_LABEL is redundant with a previous
3354 	 insn, redirect the jump to the following insn and process again.
3355 	 We use next_real_insn instead of next_active_insn so we
3356 	 don't skip USE-markers, or we'll end up with incorrect
3357 	 liveness info.  */
3358       trial = next_real_insn (target_label);
3359       if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3360 	  && redundant_insn (trial, insn, 0)
3361 	  && ! can_throw_internal (trial))
3362 	{
3363 	  /* Figure out where to emit the special USE insn so we don't
3364 	     later incorrectly compute register live/death info.  */
3365 	  rtx tmp = next_active_insn (trial);
3366 	  if (tmp == 0)
3367 	    tmp = find_end_label (simple_return_rtx);
3368 
3369 	  if (tmp)
3370 	    {
3371 	      /* Insert the special USE insn and update dataflow info.  */
3372 	      update_block (trial, tmp);
3373 
3374 	      /* Now emit a label before the special USE insn, and
3375 		 redirect our jump to the new label.  */
3376 	      target_label = get_label_before (PREV_INSN (tmp));
3377 	      reorg_redirect_jump (delay_insn, target_label);
3378 	      next = insn;
3379 	      continue;
3380 	    }
3381 	}
3382 
3383       /* Similarly, if it is an unconditional jump with one insn in its
3384 	 delay list and that insn is redundant, thread the jump.  */
3385       if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3386 	  && XVECLEN (PATTERN (trial), 0) == 2
3387 	  && JUMP_P (XVECEXP (PATTERN (trial), 0, 0))
3388 	  && simplejump_or_return_p (XVECEXP (PATTERN (trial), 0, 0))
3389 	  && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3390 	{
3391 	  target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3392 	  if (ANY_RETURN_P (target_label))
3393 	    target_label = find_end_label (target_label);
3394 
3395 	  if (target_label
3396 	      && redirect_with_delay_slots_safe_p (delay_insn, target_label,
3397 						   insn))
3398 	    {
3399 	      update_block (XVECEXP (PATTERN (trial), 0, 1), insn);
3400 	      reorg_redirect_jump (delay_insn, target_label);
3401 	      next = insn;
3402 	      continue;
3403 	    }
3404 	}
3405 
3406       /* See if we have a simple (conditional) jump that is useless.  */
3407       if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3408 	  && ! condjump_in_parallel_p (delay_insn)
3409 	  && prev_active_insn (target_label) == insn
3410 	  && ! BARRIER_P (prev_nonnote_insn (target_label))
3411 #ifdef HAVE_cc0
3412 	  /* If the last insn in the delay slot sets CC0 for some insn,
3413 	     various code assumes that it is in a delay slot.  We could
3414 	     put it back where it belonged and delete the register notes,
3415 	     but it doesn't seem worthwhile in this uncommon case.  */
3416 	  && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3417 			      REG_CC_USER, NULL_RTX)
3418 #endif
3419 	  )
3420 	{
3421 	  rtx after;
3422 	  int i;
3423 
3424 	  /* All this insn does is execute its delay list and jump to the
3425 	     following insn.  So delete the jump and just execute the delay
3426 	     list insns.
3427 
3428 	     We do this by deleting the INSN containing the SEQUENCE, then
3429 	     re-emitting the insns separately, and then deleting the jump.
3430 	     This allows the count of the jump target to be properly
3431 	     decremented.
3432 
3433 	     Note that we need to change the INSN_UID of the re-emitted insns
3434 	     since it is used to hash the insns for mark_target_live_regs and
3435 	     the re-emitted insns will no longer be wrapped up in a SEQUENCE.
3436 
3437 	     Clear the from target bit, since these insns are no longer
3438 	     in delay slots.  */
3439 	  for (i = 0; i < XVECLEN (pat, 0); i++)
3440 	    INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3441 
3442 	  trial = PREV_INSN (insn);
3443 	  delete_related_insns (insn);
3444 	  gcc_assert (GET_CODE (pat) == SEQUENCE);
3445 	  add_insn_after (delay_insn, trial, NULL);
3446 	  after = delay_insn;
3447 	  for (i = 1; i < XVECLEN (pat, 0); i++)
3448 	    after = emit_copy_of_insn_after (XVECEXP (pat, 0, i), after);
3449 	  delete_scheduled_jump (delay_insn);
3450 	  continue;
3451 	}
3452 
3453       /* See if this is an unconditional jump around a single insn which is
3454 	 identical to the one in its delay slot.  In this case, we can just
3455 	 delete the branch and the insn in its delay slot.  */
3456       if (next && NONJUMP_INSN_P (next)
3457 	  && label_before_next_insn (next, insn) == target_label
3458 	  && simplejump_p (insn)
3459 	  && XVECLEN (pat, 0) == 2
3460 	  && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3461 	{
3462 	  delete_related_insns (insn);
3463 	  continue;
3464 	}
3465 
3466       /* See if this jump (with its delay slots) conditionally branches
3467 	 around an unconditional jump (without delay slots).  If so, invert
3468 	 this jump and point it to the target of the second jump.  We cannot
3469 	 do this for annulled jumps, though.  Again, don't convert a jump to
3470 	 a RETURN here.  */
3471       if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3472 	  && any_condjump_p (delay_insn)
3473 	  && next && simplejump_or_return_p (next)
3474 	  && next_active_insn (target_label) == next_active_insn (next)
3475 	  && no_labels_between_p (insn, next))
3476 	{
3477 	  rtx label = JUMP_LABEL (next);
3478 	  rtx old_label = JUMP_LABEL (delay_insn);
3479 
3480 	  if (ANY_RETURN_P (label))
3481 	    label = find_end_label (label);
3482 
3483 	  /* find_end_label can generate a new label. Check this first.  */
3484 	  if (label
3485 	      && no_labels_between_p (insn, next)
3486 	      && redirect_with_delay_slots_safe_p (delay_insn, label, insn))
3487 	    {
3488 	      /* Be careful how we do this to avoid deleting code or labels
3489 		 that are momentarily dead.  See similar optimization in
3490 		 jump.c  */
3491 	      if (old_label)
3492 		++LABEL_NUSES (old_label);
3493 
3494 	      if (invert_jump (delay_insn, label, 1))
3495 		{
3496 		  int i;
3497 
3498 		  /* Must update the INSN_FROM_TARGET_P bits now that
3499 		     the branch is reversed, so that mark_target_live_regs
3500 		     will handle the delay slot insn correctly.  */
3501 		  for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
3502 		    {
3503 		      rtx slot = XVECEXP (PATTERN (insn), 0, i);
3504 		      INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
3505 		    }
3506 
3507 		  delete_related_insns (next);
3508 		  next = insn;
3509 		}
3510 
3511 	      if (old_label && --LABEL_NUSES (old_label) == 0)
3512 		delete_related_insns (old_label);
3513 	      continue;
3514 	    }
3515 	}
3516 
3517       /* If we own the thread opposite the way this insn branches, see if we
3518 	 can merge its delay slots with following insns.  */
3519       if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3520 	  && own_thread_p (NEXT_INSN (insn), 0, 1))
3521 	try_merge_delay_insns (insn, next);
3522       else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3523 	       && own_thread_p (target_label, target_label, 0))
3524 	try_merge_delay_insns (insn, next_active_insn (target_label));
3525 
3526       /* If we get here, we haven't deleted INSN.  But we may have deleted
3527 	 NEXT, so recompute it.  */
3528       next = next_active_insn (insn);
3529     }
3530 }
3531 
3532 
3533 /* Look for filled jumps to the end of function label.  We can try to convert
3534    them into RETURN insns if the insns in the delay slot are valid for the
3535    RETURN as well.  */
3536 
3537 static void
make_return_insns(rtx first)3538 make_return_insns (rtx first)
3539 {
3540   rtx insn, jump_insn, pat;
3541   rtx real_return_label = function_return_label;
3542   rtx real_simple_return_label = function_simple_return_label;
3543   int slots, i;
3544 
3545   /* See if there is a RETURN insn in the function other than the one we
3546      made for END_OF_FUNCTION_LABEL.  If so, set up anything we can't change
3547      into a RETURN to jump to it.  */
3548   for (insn = first; insn; insn = NEXT_INSN (insn))
3549     if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
3550       {
3551 	rtx t = get_label_before (insn);
3552 	if (PATTERN (insn) == ret_rtx)
3553 	  real_return_label = t;
3554 	else
3555 	  real_simple_return_label = t;
3556 	break;
3557       }
3558 
3559   /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3560      was equal to END_OF_FUNCTION_LABEL.  */
3561   if (real_return_label)
3562     LABEL_NUSES (real_return_label)++;
3563   if (real_simple_return_label)
3564     LABEL_NUSES (real_simple_return_label)++;
3565 
3566   /* Clear the list of insns to fill so we can use it.  */
3567   obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3568 
3569   for (insn = first; insn; insn = NEXT_INSN (insn))
3570     {
3571       int flags;
3572       rtx kind, real_label;
3573 
3574       /* Only look at filled JUMP_INSNs that go to the end of function
3575 	 label.  */
3576       if (!NONJUMP_INSN_P (insn)
3577 	  || GET_CODE (PATTERN (insn)) != SEQUENCE
3578 	  || !jump_to_label_p (XVECEXP (PATTERN (insn), 0, 0)))
3579 	continue;
3580 
3581       if (JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) == function_return_label)
3582 	{
3583 	  kind = ret_rtx;
3584 	  real_label = real_return_label;
3585 	}
3586       else if (JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0))
3587 	       == function_simple_return_label)
3588 	{
3589 	  kind = simple_return_rtx;
3590 	  real_label = real_simple_return_label;
3591 	}
3592       else
3593 	continue;
3594 
3595       pat = PATTERN (insn);
3596       jump_insn = XVECEXP (pat, 0, 0);
3597 
3598       /* If we can't make the jump into a RETURN, try to redirect it to the best
3599 	 RETURN and go on to the next insn.  */
3600       if (!reorg_redirect_jump (jump_insn, kind))
3601 	{
3602 	  /* Make sure redirecting the jump will not invalidate the delay
3603 	     slot insns.  */
3604 	  if (redirect_with_delay_slots_safe_p (jump_insn, real_label, insn))
3605 	    reorg_redirect_jump (jump_insn, real_label);
3606 	  continue;
3607 	}
3608 
3609       /* See if this RETURN can accept the insns current in its delay slot.
3610 	 It can if it has more or an equal number of slots and the contents
3611 	 of each is valid.  */
3612 
3613       flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
3614       slots = num_delay_slots (jump_insn);
3615       if (slots >= XVECLEN (pat, 0) - 1)
3616 	{
3617 	  for (i = 1; i < XVECLEN (pat, 0); i++)
3618 	    if (! (
3619 #ifdef ANNUL_IFFALSE_SLOTS
3620 		   (INSN_ANNULLED_BRANCH_P (jump_insn)
3621 		    && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3622 		   ? eligible_for_annul_false (jump_insn, i - 1,
3623 					       XVECEXP (pat, 0, i), flags) :
3624 #endif
3625 #ifdef ANNUL_IFTRUE_SLOTS
3626 		   (INSN_ANNULLED_BRANCH_P (jump_insn)
3627 		    && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3628 		   ? eligible_for_annul_true (jump_insn, i - 1,
3629 					      XVECEXP (pat, 0, i), flags) :
3630 #endif
3631 		   eligible_for_delay (jump_insn, i - 1,
3632 				       XVECEXP (pat, 0, i), flags)))
3633 	      break;
3634 	}
3635       else
3636 	i = 0;
3637 
3638       if (i == XVECLEN (pat, 0))
3639 	continue;
3640 
3641       /* We have to do something with this insn.  If it is an unconditional
3642 	 RETURN, delete the SEQUENCE and output the individual insns,
3643 	 followed by the RETURN.  Then set things up so we try to find
3644 	 insns for its delay slots, if it needs some.  */
3645       if (ANY_RETURN_P (PATTERN (jump_insn)))
3646 	{
3647 	  rtx prev = PREV_INSN (insn);
3648 
3649 	  delete_related_insns (insn);
3650 	  for (i = 1; i < XVECLEN (pat, 0); i++)
3651 	    prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3652 
3653 	  insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3654 	  emit_barrier_after (insn);
3655 
3656 	  if (slots)
3657 	    obstack_ptr_grow (&unfilled_slots_obstack, insn);
3658 	}
3659       else
3660 	/* It is probably more efficient to keep this with its current
3661 	   delay slot as a branch to a RETURN.  */
3662 	reorg_redirect_jump (jump_insn, real_label);
3663     }
3664 
3665   /* Now delete REAL_RETURN_LABEL if we never used it.  Then try to fill any
3666      new delay slots we have created.  */
3667   if (real_return_label != NULL_RTX && --LABEL_NUSES (real_return_label) == 0)
3668     delete_related_insns (real_return_label);
3669   if (real_simple_return_label != NULL_RTX
3670       && --LABEL_NUSES (real_simple_return_label) == 0)
3671     delete_related_insns (real_simple_return_label);
3672 
3673   fill_simple_delay_slots (1);
3674   fill_simple_delay_slots (0);
3675 }
3676 
3677 /* Try to find insns to place in delay slots.  */
3678 
3679 void
dbr_schedule(rtx first)3680 dbr_schedule (rtx first)
3681 {
3682   rtx insn, next, epilogue_insn = 0;
3683   int i;
3684   bool need_return_insns;
3685 
3686   /* If the current function has no insns other than the prologue and
3687      epilogue, then do not try to fill any delay slots.  */
3688   if (n_basic_blocks == NUM_FIXED_BLOCKS)
3689     return;
3690 
3691   /* Find the highest INSN_UID and allocate and initialize our map from
3692      INSN_UID's to position in code.  */
3693   for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
3694     {
3695       if (INSN_UID (insn) > max_uid)
3696 	max_uid = INSN_UID (insn);
3697       if (NOTE_P (insn)
3698 	  && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
3699 	epilogue_insn = insn;
3700     }
3701 
3702   uid_to_ruid = XNEWVEC (int, max_uid + 1);
3703   for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
3704     uid_to_ruid[INSN_UID (insn)] = i;
3705 
3706   /* Initialize the list of insns that need filling.  */
3707   if (unfilled_firstobj == 0)
3708     {
3709       gcc_obstack_init (&unfilled_slots_obstack);
3710       unfilled_firstobj = XOBNEWVAR (&unfilled_slots_obstack, rtx, 0);
3711     }
3712 
3713   for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
3714     {
3715       rtx target;
3716 
3717       if (JUMP_P (insn))
3718         INSN_ANNULLED_BRANCH_P (insn) = 0;
3719       INSN_FROM_TARGET_P (insn) = 0;
3720 
3721       /* Skip vector tables.  We can't get attributes for them.  */
3722       if (JUMP_TABLE_DATA_P (insn))
3723 	continue;
3724 
3725       if (num_delay_slots (insn) > 0)
3726 	obstack_ptr_grow (&unfilled_slots_obstack, insn);
3727 
3728       /* Ensure all jumps go to the last of a set of consecutive labels.  */
3729       if (JUMP_P (insn)
3730 	  && (condjump_p (insn) || condjump_in_parallel_p (insn))
3731 	  && !ANY_RETURN_P (JUMP_LABEL (insn))
3732 	  && ((target = skip_consecutive_labels (JUMP_LABEL (insn)))
3733 	      != JUMP_LABEL (insn)))
3734 	redirect_jump (insn, target, 1);
3735     }
3736 
3737   init_resource_info (epilogue_insn);
3738 
3739   /* Show we haven't computed an end-of-function label yet.  */
3740   function_return_label = function_simple_return_label = NULL_RTX;
3741 
3742   /* Initialize the statistics for this function.  */
3743   memset (num_insns_needing_delays, 0, sizeof num_insns_needing_delays);
3744   memset (num_filled_delays, 0, sizeof num_filled_delays);
3745 
3746   /* Now do the delay slot filling.  Try everything twice in case earlier
3747      changes make more slots fillable.  */
3748 
3749   for (reorg_pass_number = 0;
3750        reorg_pass_number < MAX_REORG_PASSES;
3751        reorg_pass_number++)
3752     {
3753       fill_simple_delay_slots (1);
3754       fill_simple_delay_slots (0);
3755       fill_eager_delay_slots ();
3756       relax_delay_slots (first);
3757     }
3758 
3759   /* If we made an end of function label, indicate that it is now
3760      safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3761      If it is now unused, delete it.  */
3762   if (function_return_label && --LABEL_NUSES (function_return_label) == 0)
3763     delete_related_insns (function_return_label);
3764   if (function_simple_return_label
3765       && --LABEL_NUSES (function_simple_return_label) == 0)
3766     delete_related_insns (function_simple_return_label);
3767 
3768   need_return_insns = false;
3769 #ifdef HAVE_return
3770   need_return_insns |= HAVE_return && function_return_label != 0;
3771 #endif
3772 #ifdef HAVE_simple_return
3773   need_return_insns |= HAVE_simple_return && function_simple_return_label != 0;
3774 #endif
3775   if (need_return_insns)
3776     make_return_insns (first);
3777 
3778   /* Delete any USE insns made by update_block; subsequent passes don't need
3779      them or know how to deal with them.  */
3780   for (insn = first; insn; insn = next)
3781     {
3782       next = NEXT_INSN (insn);
3783 
3784       if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
3785 	  && INSN_P (XEXP (PATTERN (insn), 0)))
3786 	next = delete_related_insns (insn);
3787     }
3788 
3789   obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3790 
3791   /* It is not clear why the line below is needed, but it does seem to be.  */
3792   unfilled_firstobj = XOBNEWVAR (&unfilled_slots_obstack, rtx, 0);
3793 
3794   if (dump_file)
3795     {
3796       int i, j, need_comma;
3797       int total_delay_slots[MAX_DELAY_HISTOGRAM + 1];
3798       int total_annul_slots[MAX_DELAY_HISTOGRAM + 1];
3799 
3800       for (reorg_pass_number = 0;
3801 	   reorg_pass_number < MAX_REORG_PASSES;
3802 	   reorg_pass_number++)
3803 	{
3804 	  fprintf (dump_file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
3805 	  for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
3806 	    {
3807 	      need_comma = 0;
3808 	      fprintf (dump_file, ";; Reorg function #%d\n", i);
3809 
3810 	      fprintf (dump_file, ";; %d insns needing delay slots\n;; ",
3811 		       num_insns_needing_delays[i][reorg_pass_number]);
3812 
3813 	      for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3814 		if (num_filled_delays[i][j][reorg_pass_number])
3815 		  {
3816 		    if (need_comma)
3817 		      fprintf (dump_file, ", ");
3818 		    need_comma = 1;
3819 		    fprintf (dump_file, "%d got %d delays",
3820 			     num_filled_delays[i][j][reorg_pass_number], j);
3821 		  }
3822 	      fprintf (dump_file, "\n");
3823 	    }
3824 	}
3825       memset (total_delay_slots, 0, sizeof total_delay_slots);
3826       memset (total_annul_slots, 0, sizeof total_annul_slots);
3827       for (insn = first; insn; insn = NEXT_INSN (insn))
3828 	{
3829 	  if (! INSN_DELETED_P (insn)
3830 	      && NONJUMP_INSN_P (insn)
3831 	      && GET_CODE (PATTERN (insn)) != USE
3832 	      && GET_CODE (PATTERN (insn)) != CLOBBER)
3833 	    {
3834 	      if (GET_CODE (PATTERN (insn)) == SEQUENCE)
3835 		{
3836                   rtx control;
3837 		  j = XVECLEN (PATTERN (insn), 0) - 1;
3838 		  if (j > MAX_DELAY_HISTOGRAM)
3839 		    j = MAX_DELAY_HISTOGRAM;
3840                   control = XVECEXP (PATTERN (insn), 0, 0);
3841 		  if (JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control))
3842 		    total_annul_slots[j]++;
3843 		  else
3844 		    total_delay_slots[j]++;
3845 		}
3846 	      else if (num_delay_slots (insn) > 0)
3847 		total_delay_slots[0]++;
3848 	    }
3849 	}
3850       fprintf (dump_file, ";; Reorg totals: ");
3851       need_comma = 0;
3852       for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3853 	{
3854 	  if (total_delay_slots[j])
3855 	    {
3856 	      if (need_comma)
3857 		fprintf (dump_file, ", ");
3858 	      need_comma = 1;
3859 	      fprintf (dump_file, "%d got %d delays", total_delay_slots[j], j);
3860 	    }
3861 	}
3862       fprintf (dump_file, "\n");
3863 #if defined (ANNUL_IFTRUE_SLOTS) || defined (ANNUL_IFFALSE_SLOTS)
3864       fprintf (dump_file, ";; Reorg annuls: ");
3865       need_comma = 0;
3866       for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3867 	{
3868 	  if (total_annul_slots[j])
3869 	    {
3870 	      if (need_comma)
3871 		fprintf (dump_file, ", ");
3872 	      need_comma = 1;
3873 	      fprintf (dump_file, "%d got %d delays", total_annul_slots[j], j);
3874 	    }
3875 	}
3876       fprintf (dump_file, "\n");
3877 #endif
3878       fprintf (dump_file, "\n");
3879     }
3880 
3881   free_resource_info ();
3882   free (uid_to_ruid);
3883   crtl->dbr_scheduled_p = true;
3884 }
3885 #endif /* DELAY_SLOTS */
3886 
3887 static bool
gate_handle_delay_slots(void)3888 gate_handle_delay_slots (void)
3889 {
3890 #ifdef DELAY_SLOTS
3891   /* At -O0 dataflow info isn't updated after RA.  */
3892   return optimize > 0 && flag_delayed_branch && !crtl->dbr_scheduled_p;
3893 #else
3894   return 0;
3895 #endif
3896 }
3897 
3898 /* Run delay slot optimization.  */
3899 static unsigned int
rest_of_handle_delay_slots(void)3900 rest_of_handle_delay_slots (void)
3901 {
3902 #ifdef DELAY_SLOTS
3903   dbr_schedule (get_insns ());
3904 #endif
3905   return 0;
3906 }
3907 
3908 struct rtl_opt_pass pass_delay_slots =
3909 {
3910  {
3911   RTL_PASS,
3912   "dbr",                                /* name */
3913   OPTGROUP_NONE,                        /* optinfo_flags */
3914   gate_handle_delay_slots,              /* gate */
3915   rest_of_handle_delay_slots,           /* execute */
3916   NULL,                                 /* sub */
3917   NULL,                                 /* next */
3918   0,                                    /* static_pass_number */
3919   TV_DBR_SCHED,                         /* tv_id */
3920   0,                                    /* properties_required */
3921   0,                                    /* properties_provided */
3922   0,                                    /* properties_destroyed */
3923   0,                                    /* todo_flags_start */
3924   TODO_ggc_collect                      /* todo_flags_finish */
3925  }
3926 };
3927 
3928 /* Machine dependent reorg pass.  */
3929 static bool
gate_handle_machine_reorg(void)3930 gate_handle_machine_reorg (void)
3931 {
3932   return targetm.machine_dependent_reorg != 0;
3933 }
3934 
3935 
3936 static unsigned int
rest_of_handle_machine_reorg(void)3937 rest_of_handle_machine_reorg (void)
3938 {
3939   targetm.machine_dependent_reorg ();
3940   return 0;
3941 }
3942 
3943 struct rtl_opt_pass pass_machine_reorg =
3944 {
3945  {
3946   RTL_PASS,
3947   "mach",                               /* name */
3948   OPTGROUP_NONE,                        /* optinfo_flags */
3949   gate_handle_machine_reorg,            /* gate */
3950   rest_of_handle_machine_reorg,         /* execute */
3951   NULL,                                 /* sub */
3952   NULL,                                 /* next */
3953   0,                                    /* static_pass_number */
3954   TV_MACH_DEP,                          /* tv_id */
3955   0,                                    /* properties_required */
3956   0,                                    /* properties_provided */
3957   0,                                    /* properties_destroyed */
3958   0,                                    /* todo_flags_start */
3959   TODO_ggc_collect                      /* todo_flags_finish */
3960  }
3961 };
3962