1 /* Optimize jump instructions, for GNU compiler.
2    Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3    1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA.  */
21 
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23    of the compiler.  Now it contains basically set of utility function to
24    operate with jumps.
25 
26    Each CODE_LABEL has a count of the times it is used
27    stored in the LABEL_NUSES internal field, and each JUMP_INSN
28    has one label that it refers to stored in the
29    JUMP_LABEL internal field.  With this we can detect labels that
30    become unused because of the deletion of all the jumps that
31    formerly used them.  The JUMP_LABEL info is sometimes looked
32    at by later passes.
33 
34    The subroutines delete_insn, redirect_jump, and invert_jump are used
35    from other passes as well.  */
36 
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "rtl.h"
42 #include "tm_p.h"
43 #include "flags.h"
44 #include "hard-reg-set.h"
45 #include "regs.h"
46 #include "insn-config.h"
47 #include "insn-attr.h"
48 #include "recog.h"
49 #include "function.h"
50 #include "expr.h"
51 #include "real.h"
52 #include "except.h"
53 #include "diagnostic.h"
54 #include "toplev.h"
55 #include "reload.h"
56 #include "predict.h"
57 #include "timevar.h"
58 
59 /* Optimize jump y; x: ... y: jumpif... x?
60    Don't know if it is worth bothering with.  */
61 /* Optimize two cases of conditional jump to conditional jump?
62    This can never delete any instruction or make anything dead,
63    or even change what is live at any point.
64    So perhaps let combiner do it.  */
65 
66 static rtx next_nonnote_insn_in_loop (rtx);
67 static void init_label_info (rtx);
68 static void mark_all_labels (rtx);
69 static int duplicate_loop_exit_test (rtx);
70 static void delete_computation (rtx);
71 static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
72 static int redirect_exp (rtx, rtx, rtx);
73 static void invert_exp_1 (rtx);
74 static int invert_exp (rtx);
75 static int returnjump_p_1 (rtx *, void *);
76 static void delete_prior_computation (rtx, rtx);
77 
78 /* Alternate entry into the jump optimizer.  This entry point only rebuilds
79    the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
80    instructions.  */
81 void
rebuild_jump_labels(rtx f)82 rebuild_jump_labels (rtx f)
83 {
84   rtx insn;
85 
86   timevar_push (TV_REBUILD_JUMP);
87   init_label_info (f);
88   mark_all_labels (f);
89 
90   /* Keep track of labels used from static data; we don't track them
91      closely enough to delete them here, so make sure their reference
92      count doesn't drop to zero.  */
93 
94   for (insn = forced_labels; insn; insn = XEXP (insn, 1))
95     if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
96       LABEL_NUSES (XEXP (insn, 0))++;
97   timevar_pop (TV_REBUILD_JUMP);
98 }
99 
100 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
101    non-fallthru insn.  This is not generally true, as multiple barriers
102    may have crept in, or the BARRIER may be separated from the last
103    real insn by one or more NOTEs.
104 
105    This simple pass moves barriers and removes duplicates so that the
106    old code is happy.
107  */
108 void
cleanup_barriers(void)109 cleanup_barriers (void)
110 {
111   rtx insn, next, prev;
112   for (insn = get_insns (); insn; insn = next)
113     {
114       next = NEXT_INSN (insn);
115       if (GET_CODE (insn) == BARRIER)
116 	{
117 	  prev = prev_nonnote_insn (insn);
118 	  if (GET_CODE (prev) == BARRIER)
119 	    delete_barrier (insn);
120 	  else if (prev != PREV_INSN (insn))
121 	    reorder_insns (insn, insn, prev);
122 	}
123     }
124 }
125 
126 /* Return the next insn after INSN that is not a NOTE and is in the loop,
127    i.e. when there is no such INSN before NOTE_INSN_LOOP_END return NULL_RTX.
128    This routine does not look inside SEQUENCEs.  */
129 
130 static rtx
next_nonnote_insn_in_loop(rtx insn)131 next_nonnote_insn_in_loop (rtx insn)
132 {
133   while (insn)
134     {
135       insn = NEXT_INSN (insn);
136       if (insn == 0 || GET_CODE (insn) != NOTE)
137 	break;
138       if (GET_CODE (insn) == NOTE
139 	  && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
140 	return NULL_RTX;
141     }
142 
143   return insn;
144 }
145 
146 void
copy_loop_headers(rtx f)147 copy_loop_headers (rtx f)
148 {
149   rtx insn, next;
150   /* Now iterate optimizing jumps until nothing changes over one pass.  */
151   for (insn = f; insn; insn = next)
152     {
153       rtx temp, temp1;
154 
155       next = NEXT_INSN (insn);
156 
157       /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
158 	 jump.  Try to optimize by duplicating the loop exit test if so.
159 	 This is only safe immediately after regscan, because it uses
160 	 the values of regno_first_uid and regno_last_uid.  */
161       if (GET_CODE (insn) == NOTE
162 	  && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
163 	  && (temp1 = next_nonnote_insn_in_loop (insn)) != 0
164 	  && any_uncondjump_p (temp1) && onlyjump_p (temp1))
165 	{
166 	  temp = PREV_INSN (insn);
167 	  if (duplicate_loop_exit_test (insn))
168 	    {
169 	      next = NEXT_INSN (temp);
170 	    }
171 	}
172     }
173 }
174 
175 void
purge_line_number_notes(rtx f)176 purge_line_number_notes (rtx f)
177 {
178   rtx last_note = 0;
179   rtx insn;
180   /* Delete extraneous line number notes.
181      Note that two consecutive notes for different lines are not really
182      extraneous.  There should be some indication where that line belonged,
183      even if it became empty.  */
184 
185   for (insn = f; insn; insn = NEXT_INSN (insn))
186     if (GET_CODE (insn) == NOTE)
187       {
188 	if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
189 	  /* Any previous line note was for the prologue; gdb wants a new
190 	     note after the prologue even if it is for the same line.  */
191 	  last_note = NULL_RTX;
192 	else if (NOTE_LINE_NUMBER (insn) >= 0)
193 	  {
194 	    /* Delete this note if it is identical to previous note.  */
195 	    if (last_note
196 		&& NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
197 		&& NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
198 	      {
199 		delete_related_insns (insn);
200 		continue;
201 	      }
202 
203 	    last_note = insn;
204 	  }
205       }
206 }
207 
208 /* Initialize LABEL_NUSES and JUMP_LABEL fields.  Delete any REG_LABEL
209    notes whose labels don't occur in the insn any more.  Returns the
210    largest INSN_UID found.  */
211 static void
init_label_info(rtx f)212 init_label_info (rtx f)
213 {
214   rtx insn;
215 
216   for (insn = f; insn; insn = NEXT_INSN (insn))
217     if (GET_CODE (insn) == CODE_LABEL)
218       LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
219     else if (GET_CODE (insn) == JUMP_INSN)
220       JUMP_LABEL (insn) = 0;
221     else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
222       {
223 	rtx note, next;
224 
225 	for (note = REG_NOTES (insn); note; note = next)
226 	  {
227 	    next = XEXP (note, 1);
228 	    if (REG_NOTE_KIND (note) == REG_LABEL
229 		&& ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
230 	      remove_note (insn, note);
231 	  }
232       }
233 }
234 
235 /* Mark the label each jump jumps to.
236    Combine consecutive labels, and count uses of labels.  */
237 
238 static void
mark_all_labels(rtx f)239 mark_all_labels (rtx f)
240 {
241   rtx insn;
242 
243   for (insn = f; insn; insn = NEXT_INSN (insn))
244     if (INSN_P (insn))
245       {
246 	if (GET_CODE (insn) == CALL_INSN
247 	    && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
248 	  {
249 	    mark_all_labels (XEXP (PATTERN (insn), 0));
250 	    mark_all_labels (XEXP (PATTERN (insn), 1));
251 	    mark_all_labels (XEXP (PATTERN (insn), 2));
252 
253 	    /* Canonicalize the tail recursion label attached to the
254 	       CALL_PLACEHOLDER insn.  */
255 	    if (XEXP (PATTERN (insn), 3))
256 	      {
257 		rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
258 						   XEXP (PATTERN (insn), 3));
259 		mark_jump_label (label_ref, insn, 0);
260 		XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
261 	      }
262 
263 	    continue;
264 	  }
265 
266 	mark_jump_label (PATTERN (insn), insn, 0);
267 	if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
268 	  {
269 	    /* When we know the LABEL_REF contained in a REG used in
270 	       an indirect jump, we'll have a REG_LABEL note so that
271 	       flow can tell where it's going.  */
272 	    if (JUMP_LABEL (insn) == 0)
273 	      {
274 		rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
275 		if (label_note)
276 		  {
277 		    /* But a LABEL_REF around the REG_LABEL note, so
278 		       that we can canonicalize it.  */
279 		    rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
280 						       XEXP (label_note, 0));
281 
282 		    mark_jump_label (label_ref, insn, 0);
283 		    XEXP (label_note, 0) = XEXP (label_ref, 0);
284 		    JUMP_LABEL (insn) = XEXP (label_note, 0);
285 		  }
286 	      }
287 	  }
288       }
289 }
290 
291 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
292    jump.  Assume that this unconditional jump is to the exit test code.  If
293    the code is sufficiently simple, make a copy of it before INSN,
294    followed by a jump to the exit of the loop.  Then delete the unconditional
295    jump after INSN.
296 
297    Return 1 if we made the change, else 0.
298 
299    This is only safe immediately after a regscan pass because it uses the
300    values of regno_first_uid and regno_last_uid.  */
301 
302 static int
duplicate_loop_exit_test(rtx loop_start)303 duplicate_loop_exit_test (rtx loop_start)
304 {
305   rtx insn, set, reg, p, link;
306   rtx copy = 0, first_copy = 0;
307   int num_insns = 0;
308   rtx exitcode
309     = NEXT_INSN (JUMP_LABEL (next_nonnote_insn_in_loop (loop_start)));
310   rtx lastexit;
311   int max_reg = max_reg_num ();
312   rtx *reg_map = 0;
313   rtx loop_pre_header_label;
314   int loop_depth;
315 
316   /* If EXITCODE is not in the loop, then this optimization is not
317      safe; we will emit a VTOP note entirely outside the loop.  */
318   for (insn = loop_start, loop_depth = 0;
319        insn != exitcode;
320        insn = NEXT_INSN (insn))
321     {
322       if (GET_CODE (insn) != NOTE)
323 	continue;
324 
325       if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
326 	++loop_depth;
327       else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
328 	       && --loop_depth == 0)
329 	return 0;
330     }
331 
332   /* Scan the exit code.  We do not perform this optimization if any insn:
333 
334          is a CALL_INSN
335 	 is a CODE_LABEL
336 	 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
337 	 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
338 
339      We also do not do this if we find an insn with ASM_OPERANDS.  While
340      this restriction should not be necessary, copying an insn with
341      ASM_OPERANDS can confuse asm_noperands in some cases.
342 
343      Also, don't do this if the exit code is more than 20 insns.  */
344 
345   for (insn = exitcode;
346        insn
347        && ! (GET_CODE (insn) == NOTE
348 	     && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
349        insn = NEXT_INSN (insn))
350     {
351       switch (GET_CODE (insn))
352 	{
353 	case CODE_LABEL:
354 	case CALL_INSN:
355 	  return 0;
356 	case NOTE:
357 
358 	  if (optimize < 2
359 	      && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
360 		  || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
361 	    /* If we were to duplicate this code, we would not move
362 	       the BLOCK notes, and so debugging the moved code would
363 	       be difficult.  Thus, we only move the code with -O2 or
364 	       higher.  */
365 	    return 0;
366 
367 	  break;
368 	case JUMP_INSN:
369 	case INSN:
370 	  if (++num_insns > 20
371 	      || find_reg_note (insn, REG_RETVAL, NULL_RTX)
372 	      || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
373 	    return 0;
374 	  break;
375 	default:
376 	  break;
377 	}
378     }
379 
380   /* Unless INSN is zero, we can do the optimization.  */
381   if (insn == 0)
382     return 0;
383 
384   lastexit = insn;
385 
386   /* See if any insn sets a register only used in the loop exit code and
387      not a user variable.  If so, replace it with a new register.  */
388   for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
389     if (GET_CODE (insn) == INSN
390 	&& (set = single_set (insn)) != 0
391 	&& ((reg = SET_DEST (set), GET_CODE (reg) == REG)
392 	    || (GET_CODE (reg) == SUBREG
393 		&& (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
394 	&& REGNO (reg) >= FIRST_PSEUDO_REGISTER
395 	&& REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
396       {
397 	for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
398 	  if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
399 	    break;
400 
401 	if (p != lastexit)
402 	  {
403 	    /* We can do the replacement.  Allocate reg_map if this is the
404 	       first replacement we found.  */
405 	    if (reg_map == 0)
406 	      reg_map = xcalloc (max_reg, sizeof (rtx));
407 
408 	    REG_LOOP_TEST_P (reg) = 1;
409 
410 	    reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
411 	  }
412       }
413   loop_pre_header_label = gen_label_rtx ();
414 
415   /* Now copy each insn.  */
416   for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
417     {
418       switch (GET_CODE (insn))
419 	{
420 	case BARRIER:
421 	  copy = emit_barrier_before (loop_start);
422 	  break;
423 	case NOTE:
424 	  /* Only copy line-number notes.  */
425 	  if (NOTE_LINE_NUMBER (insn) >= 0)
426 	    {
427 	      copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
428 	      NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
429 	    }
430 	  break;
431 
432 	case INSN:
433 	  copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
434 	  if (reg_map)
435 	    replace_regs (PATTERN (copy), reg_map, max_reg, 1);
436 
437 	  mark_jump_label (PATTERN (copy), copy, 0);
438 	  INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
439 
440 	  /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
441 	     make them.  */
442 	  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
443 	    if (REG_NOTE_KIND (link) != REG_LABEL)
444 	      {
445 		if (GET_CODE (link) == EXPR_LIST)
446 		  REG_NOTES (copy)
447 		    = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
448 						      XEXP (link, 0),
449 						      REG_NOTES (copy)));
450 		else
451 		  REG_NOTES (copy)
452 		    = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
453 						      XEXP (link, 0),
454 						      REG_NOTES (copy)));
455 	      }
456 
457 	  if (reg_map && REG_NOTES (copy))
458 	    replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
459 	  break;
460 
461 	case JUMP_INSN:
462 	  copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
463 					loop_start);
464 	  INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
465 	  if (reg_map)
466 	    replace_regs (PATTERN (copy), reg_map, max_reg, 1);
467 	  mark_jump_label (PATTERN (copy), copy, 0);
468 	  if (REG_NOTES (insn))
469 	    {
470 	      REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
471 	      if (reg_map)
472 		replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
473 	    }
474 
475 	  /* Predict conditional jump that do make loop looping as taken.
476 	     Other jumps are probably exit conditions, so predict
477 	     them as untaken.  */
478 	  if (any_condjump_p (copy))
479 	    {
480 	      rtx label = JUMP_LABEL (copy);
481 	      if (label)
482 		{
483 		  /* The jump_insn after loop_start should be followed
484 		     by barrier and loopback label.  */
485 		  if (prev_nonnote_insn (label)
486 		      && (prev_nonnote_insn (prev_nonnote_insn (label))
487 			  == next_nonnote_insn (loop_start)))
488 		    {
489 		      predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
490 		      /* To keep pre-header, we need to redirect all loop
491 		         entrances before the LOOP_BEG note.  */
492 		      redirect_jump (copy, loop_pre_header_label, 0);
493 		    }
494 		  else
495 		    predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
496 		}
497 	    }
498 	  break;
499 
500 	default:
501 	  abort ();
502 	}
503 
504       /* Record the first insn we copied.  We need it so that we can
505 	 scan the copied insns for new pseudo registers.  */
506       if (! first_copy)
507 	first_copy = copy;
508     }
509 
510   /* Now clean up by emitting a jump to the end label and deleting the jump
511      at the start of the loop.  */
512   if (! copy || GET_CODE (copy) != BARRIER)
513     {
514       copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
515 				    loop_start);
516 
517       /* Record the first insn we copied.  We need it so that we can
518 	 scan the copied insns for new pseudo registers.   This may not
519 	 be strictly necessary since we should have copied at least one
520 	 insn above.  But I am going to be safe.  */
521       if (! first_copy)
522 	first_copy = copy;
523 
524       mark_jump_label (PATTERN (copy), copy, 0);
525       emit_barrier_before (loop_start);
526     }
527 
528   emit_label_before (loop_pre_header_label, loop_start);
529 
530   /* Now scan from the first insn we copied to the last insn we copied
531      (copy) for new pseudo registers.  Do this after the code to jump to
532      the end label since that might create a new pseudo too.  */
533   reg_scan_update (first_copy, copy, max_reg);
534 
535   /* Mark the exit code as the virtual top of the converted loop.  */
536   emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
537 
538   delete_related_insns (next_nonnote_insn (loop_start));
539 
540   /* Clean up.  */
541   if (reg_map)
542     free (reg_map);
543 
544   return 1;
545 }
546 
547 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
548    notes between START and END out before START.  START and END may be such
549    notes.  Returns the values of the new starting and ending insns, which
550    may be different if the original ones were such notes.
551    Return true if there were only such notes and no real instructions.  */
552 
553 bool
squeeze_notes(rtx * startp,rtx * endp)554 squeeze_notes (rtx* startp, rtx* endp)
555 {
556   rtx start = *startp;
557   rtx end = *endp;
558 
559   rtx insn;
560   rtx next;
561   rtx last = NULL;
562   rtx past_end = NEXT_INSN (end);
563 
564   for (insn = start; insn != past_end; insn = next)
565     {
566       next = NEXT_INSN (insn);
567       if (GET_CODE (insn) == NOTE
568 	  && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
569 	      || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
570 	      || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
571 	      || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
572 	      || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
573 	      || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
574 	{
575 	  if (insn == start)
576 	    start = next;
577 	  else
578 	    {
579 	      rtx prev = PREV_INSN (insn);
580 	      PREV_INSN (insn) = PREV_INSN (start);
581 	      NEXT_INSN (insn) = start;
582 	      NEXT_INSN (PREV_INSN (insn)) = insn;
583 	      PREV_INSN (NEXT_INSN (insn)) = insn;
584 	      NEXT_INSN (prev) = next;
585 	      PREV_INSN (next) = prev;
586 	    }
587 	}
588       else
589 	last = insn;
590     }
591 
592   /* There were no real instructions.  */
593   if (start == past_end)
594     return true;
595 
596   end = last;
597 
598   *startp = start;
599   *endp = end;
600   return false;
601 }
602 
603 /* Return the label before INSN, or put a new label there.  */
604 
605 rtx
get_label_before(rtx insn)606 get_label_before (rtx insn)
607 {
608   rtx label;
609 
610   /* Find an existing label at this point
611      or make a new one if there is none.  */
612   label = prev_nonnote_insn (insn);
613 
614   if (label == 0 || GET_CODE (label) != CODE_LABEL)
615     {
616       rtx prev = PREV_INSN (insn);
617 
618       label = gen_label_rtx ();
619       emit_label_after (label, prev);
620       LABEL_NUSES (label) = 0;
621     }
622   return label;
623 }
624 
625 /* Return the label after INSN, or put a new label there.  */
626 
627 rtx
get_label_after(rtx insn)628 get_label_after (rtx insn)
629 {
630   rtx label;
631 
632   /* Find an existing label at this point
633      or make a new one if there is none.  */
634   label = next_nonnote_insn (insn);
635 
636   if (label == 0 || GET_CODE (label) != CODE_LABEL)
637     {
638       label = gen_label_rtx ();
639       emit_label_after (label, insn);
640       LABEL_NUSES (label) = 0;
641     }
642   return label;
643 }
644 
645 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
646    of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
647    UNKNOWN may be returned in case we are having CC_MODE compare and we don't
648    know whether it's source is floating point or integer comparison.  Machine
649    description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
650    to help this function avoid overhead in these cases.  */
651 enum rtx_code
reversed_comparison_code_parts(enum rtx_code code,rtx arg0,rtx arg1,rtx insn)652 reversed_comparison_code_parts (enum rtx_code code, rtx arg0, rtx arg1, rtx insn)
653 {
654   enum machine_mode mode;
655 
656   /* If this is not actually a comparison, we can't reverse it.  */
657   if (GET_RTX_CLASS (code) != '<')
658     return UNKNOWN;
659 
660   mode = GET_MODE (arg0);
661   if (mode == VOIDmode)
662     mode = GET_MODE (arg1);
663 
664   /* First see if machine description supply us way to reverse the comparison.
665      Give it priority over everything else to allow machine description to do
666      tricks.  */
667 #ifdef REVERSIBLE_CC_MODE
668   if (GET_MODE_CLASS (mode) == MODE_CC
669       && REVERSIBLE_CC_MODE (mode))
670     {
671 #ifdef REVERSE_CONDITION
672       return REVERSE_CONDITION (code, mode);
673 #endif
674       return reverse_condition (code);
675     }
676 #endif
677 
678   /* Try a few special cases based on the comparison code.  */
679   switch (code)
680     {
681     case GEU:
682     case GTU:
683     case LEU:
684     case LTU:
685     case NE:
686     case EQ:
687       /* It is always safe to reverse EQ and NE, even for the floating
688 	 point.  Similarly the unsigned comparisons are never used for
689 	 floating point so we can reverse them in the default way.  */
690       return reverse_condition (code);
691     case ORDERED:
692     case UNORDERED:
693     case LTGT:
694     case UNEQ:
695       /* In case we already see unordered comparison, we can be sure to
696 	 be dealing with floating point so we don't need any more tests.  */
697       return reverse_condition_maybe_unordered (code);
698     case UNLT:
699     case UNLE:
700     case UNGT:
701     case UNGE:
702       /* We don't have safe way to reverse these yet.  */
703       return UNKNOWN;
704     default:
705       break;
706     }
707 
708   if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
709     {
710       rtx prev;
711       /* Try to search for the comparison to determine the real mode.
712          This code is expensive, but with sane machine description it
713          will be never used, since REVERSIBLE_CC_MODE will return true
714          in all cases.  */
715       if (! insn)
716 	return UNKNOWN;
717 
718       for (prev = prev_nonnote_insn (insn);
719 	   prev != 0 && GET_CODE (prev) != CODE_LABEL;
720 	   prev = prev_nonnote_insn (prev))
721 	{
722 	  rtx set = set_of (arg0, prev);
723 	  if (set && GET_CODE (set) == SET
724 	      && rtx_equal_p (SET_DEST (set), arg0))
725 	    {
726 	      rtx src = SET_SRC (set);
727 
728 	      if (GET_CODE (src) == COMPARE)
729 		{
730 		  rtx comparison = src;
731 		  arg0 = XEXP (src, 0);
732 		  mode = GET_MODE (arg0);
733 		  if (mode == VOIDmode)
734 		    mode = GET_MODE (XEXP (comparison, 1));
735 		  break;
736 		}
737 	      /* We can get past reg-reg moves.  This may be useful for model
738 	         of i387 comparisons that first move flag registers around.  */
739 	      if (REG_P (src))
740 		{
741 		  arg0 = src;
742 		  continue;
743 		}
744 	    }
745 	  /* If register is clobbered in some ununderstandable way,
746 	     give up.  */
747 	  if (set)
748 	    return UNKNOWN;
749 	}
750     }
751 
752   /* Test for an integer condition, or a floating-point comparison
753      in which NaNs can be ignored.  */
754   if (GET_CODE (arg0) == CONST_INT
755       || (GET_MODE (arg0) != VOIDmode
756 	  && GET_MODE_CLASS (mode) != MODE_CC
757 	  && !HONOR_NANS (mode)))
758     return reverse_condition (code);
759 
760   return UNKNOWN;
761 }
762 
763 /* A wrapper around the previous function to take COMPARISON as rtx
764    expression.  This simplifies many callers.  */
765 enum rtx_code
reversed_comparison_code(rtx comparison,rtx insn)766 reversed_comparison_code (rtx comparison, rtx insn)
767 {
768   if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
769     return UNKNOWN;
770   return reversed_comparison_code_parts (GET_CODE (comparison),
771 					 XEXP (comparison, 0),
772 					 XEXP (comparison, 1), insn);
773 }
774 
775 /* Given an rtx-code for a comparison, return the code for the negated
776    comparison.  If no such code exists, return UNKNOWN.
777 
778    WATCH OUT!  reverse_condition is not safe to use on a jump that might
779    be acting on the results of an IEEE floating point comparison, because
780    of the special treatment of non-signaling nans in comparisons.
781    Use reversed_comparison_code instead.  */
782 
783 enum rtx_code
reverse_condition(enum rtx_code code)784 reverse_condition (enum rtx_code code)
785 {
786   switch (code)
787     {
788     case EQ:
789       return NE;
790     case NE:
791       return EQ;
792     case GT:
793       return LE;
794     case GE:
795       return LT;
796     case LT:
797       return GE;
798     case LE:
799       return GT;
800     case GTU:
801       return LEU;
802     case GEU:
803       return LTU;
804     case LTU:
805       return GEU;
806     case LEU:
807       return GTU;
808     case UNORDERED:
809       return ORDERED;
810     case ORDERED:
811       return UNORDERED;
812 
813     case UNLT:
814     case UNLE:
815     case UNGT:
816     case UNGE:
817     case UNEQ:
818     case LTGT:
819       return UNKNOWN;
820 
821     default:
822       abort ();
823     }
824 }
825 
826 /* Similar, but we're allowed to generate unordered comparisons, which
827    makes it safe for IEEE floating-point.  Of course, we have to recognize
828    that the target will support them too...  */
829 
830 enum rtx_code
reverse_condition_maybe_unordered(enum rtx_code code)831 reverse_condition_maybe_unordered (enum rtx_code code)
832 {
833   switch (code)
834     {
835     case EQ:
836       return NE;
837     case NE:
838       return EQ;
839     case GT:
840       return UNLE;
841     case GE:
842       return UNLT;
843     case LT:
844       return UNGE;
845     case LE:
846       return UNGT;
847     case LTGT:
848       return UNEQ;
849     case UNORDERED:
850       return ORDERED;
851     case ORDERED:
852       return UNORDERED;
853     case UNLT:
854       return GE;
855     case UNLE:
856       return GT;
857     case UNGT:
858       return LE;
859     case UNGE:
860       return LT;
861     case UNEQ:
862       return LTGT;
863 
864     default:
865       abort ();
866     }
867 }
868 
869 /* Similar, but return the code when two operands of a comparison are swapped.
870    This IS safe for IEEE floating-point.  */
871 
872 enum rtx_code
swap_condition(enum rtx_code code)873 swap_condition (enum rtx_code code)
874 {
875   switch (code)
876     {
877     case EQ:
878     case NE:
879     case UNORDERED:
880     case ORDERED:
881     case UNEQ:
882     case LTGT:
883       return code;
884 
885     case GT:
886       return LT;
887     case GE:
888       return LE;
889     case LT:
890       return GT;
891     case LE:
892       return GE;
893     case GTU:
894       return LTU;
895     case GEU:
896       return LEU;
897     case LTU:
898       return GTU;
899     case LEU:
900       return GEU;
901     case UNLT:
902       return UNGT;
903     case UNLE:
904       return UNGE;
905     case UNGT:
906       return UNLT;
907     case UNGE:
908       return UNLE;
909 
910     default:
911       abort ();
912     }
913 }
914 
915 /* Given a comparison CODE, return the corresponding unsigned comparison.
916    If CODE is an equality comparison or already an unsigned comparison,
917    CODE is returned.  */
918 
919 enum rtx_code
unsigned_condition(enum rtx_code code)920 unsigned_condition (enum rtx_code code)
921 {
922   switch (code)
923     {
924     case EQ:
925     case NE:
926     case GTU:
927     case GEU:
928     case LTU:
929     case LEU:
930       return code;
931 
932     case GT:
933       return GTU;
934     case GE:
935       return GEU;
936     case LT:
937       return LTU;
938     case LE:
939       return LEU;
940 
941     default:
942       abort ();
943     }
944 }
945 
946 /* Similarly, return the signed version of a comparison.  */
947 
948 enum rtx_code
signed_condition(enum rtx_code code)949 signed_condition (enum rtx_code code)
950 {
951   switch (code)
952     {
953     case EQ:
954     case NE:
955     case GT:
956     case GE:
957     case LT:
958     case LE:
959       return code;
960 
961     case GTU:
962       return GT;
963     case GEU:
964       return GE;
965     case LTU:
966       return LT;
967     case LEU:
968       return LE;
969 
970     default:
971       abort ();
972     }
973 }
974 
975 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
976    truth of CODE1 implies the truth of CODE2.  */
977 
978 int
comparison_dominates_p(enum rtx_code code1,enum rtx_code code2)979 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
980 {
981   /* UNKNOWN comparison codes can happen as a result of trying to revert
982      comparison codes.
983      They can't match anything, so we have to reject them here.  */
984   if (code1 == UNKNOWN || code2 == UNKNOWN)
985     return 0;
986 
987   if (code1 == code2)
988     return 1;
989 
990   switch (code1)
991     {
992     case UNEQ:
993       if (code2 == UNLE || code2 == UNGE)
994 	return 1;
995       break;
996 
997     case EQ:
998       if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
999 	  || code2 == ORDERED)
1000 	return 1;
1001       break;
1002 
1003     case UNLT:
1004       if (code2 == UNLE || code2 == NE)
1005 	return 1;
1006       break;
1007 
1008     case LT:
1009       if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1010 	return 1;
1011       break;
1012 
1013     case UNGT:
1014       if (code2 == UNGE || code2 == NE)
1015 	return 1;
1016       break;
1017 
1018     case GT:
1019       if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1020 	return 1;
1021       break;
1022 
1023     case GE:
1024     case LE:
1025       if (code2 == ORDERED)
1026 	return 1;
1027       break;
1028 
1029     case LTGT:
1030       if (code2 == NE || code2 == ORDERED)
1031 	return 1;
1032       break;
1033 
1034     case LTU:
1035       if (code2 == LEU || code2 == NE)
1036 	return 1;
1037       break;
1038 
1039     case GTU:
1040       if (code2 == GEU || code2 == NE)
1041 	return 1;
1042       break;
1043 
1044     case UNORDERED:
1045       if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1046 	  || code2 == UNGE || code2 == UNGT)
1047 	return 1;
1048       break;
1049 
1050     default:
1051       break;
1052     }
1053 
1054   return 0;
1055 }
1056 
1057 /* Return 1 if INSN is an unconditional jump and nothing else.  */
1058 
1059 int
simplejump_p(rtx insn)1060 simplejump_p (rtx insn)
1061 {
1062   return (GET_CODE (insn) == JUMP_INSN
1063 	  && GET_CODE (PATTERN (insn)) == SET
1064 	  && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1065 	  && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1066 }
1067 
1068 /* Return nonzero if INSN is a (possibly) conditional jump
1069    and nothing more.
1070 
1071    Use this function is deprecated, since we need to support combined
1072    branch and compare insns.  Use any_condjump_p instead whenever possible.  */
1073 
1074 int
condjump_p(rtx insn)1075 condjump_p (rtx insn)
1076 {
1077   rtx x = PATTERN (insn);
1078 
1079   if (GET_CODE (x) != SET
1080       || GET_CODE (SET_DEST (x)) != PC)
1081     return 0;
1082 
1083   x = SET_SRC (x);
1084   if (GET_CODE (x) == LABEL_REF)
1085     return 1;
1086   else
1087     return (GET_CODE (x) == IF_THEN_ELSE
1088 	    && ((GET_CODE (XEXP (x, 2)) == PC
1089 		 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1090 		     || GET_CODE (XEXP (x, 1)) == RETURN))
1091 		|| (GET_CODE (XEXP (x, 1)) == PC
1092 		    && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1093 			|| GET_CODE (XEXP (x, 2)) == RETURN))));
1094 
1095   return 0;
1096 }
1097 
1098 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1099    PARALLEL.
1100 
1101    Use this function is deprecated, since we need to support combined
1102    branch and compare insns.  Use any_condjump_p instead whenever possible.  */
1103 
1104 int
condjump_in_parallel_p(rtx insn)1105 condjump_in_parallel_p (rtx insn)
1106 {
1107   rtx x = PATTERN (insn);
1108 
1109   if (GET_CODE (x) != PARALLEL)
1110     return 0;
1111   else
1112     x = XVECEXP (x, 0, 0);
1113 
1114   if (GET_CODE (x) != SET)
1115     return 0;
1116   if (GET_CODE (SET_DEST (x)) != PC)
1117     return 0;
1118   if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1119     return 1;
1120   if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1121     return 0;
1122   if (XEXP (SET_SRC (x), 2) == pc_rtx
1123       && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1124 	  || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1125     return 1;
1126   if (XEXP (SET_SRC (x), 1) == pc_rtx
1127       && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1128 	  || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1129     return 1;
1130   return 0;
1131 }
1132 
1133 /* Return set of PC, otherwise NULL.  */
1134 
1135 rtx
pc_set(rtx insn)1136 pc_set (rtx insn)
1137 {
1138   rtx pat;
1139   if (GET_CODE (insn) != JUMP_INSN)
1140     return NULL_RTX;
1141   pat = PATTERN (insn);
1142 
1143   /* The set is allowed to appear either as the insn pattern or
1144      the first set in a PARALLEL.  */
1145   if (GET_CODE (pat) == PARALLEL)
1146     pat = XVECEXP (pat, 0, 0);
1147   if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1148     return pat;
1149 
1150   return NULL_RTX;
1151 }
1152 
1153 /* Return true when insn is an unconditional direct jump,
1154    possibly bundled inside a PARALLEL.  */
1155 
1156 int
any_uncondjump_p(rtx insn)1157 any_uncondjump_p (rtx insn)
1158 {
1159   rtx x = pc_set (insn);
1160   if (!x)
1161     return 0;
1162   if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1163     return 0;
1164   return 1;
1165 }
1166 
1167 /* Return true when insn is a conditional jump.  This function works for
1168    instructions containing PC sets in PARALLELs.  The instruction may have
1169    various other effects so before removing the jump you must verify
1170    onlyjump_p.
1171 
1172    Note that unlike condjump_p it returns false for unconditional jumps.  */
1173 
1174 int
any_condjump_p(rtx insn)1175 any_condjump_p (rtx insn)
1176 {
1177   rtx x = pc_set (insn);
1178   enum rtx_code a, b;
1179 
1180   if (!x)
1181     return 0;
1182   if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1183     return 0;
1184 
1185   a = GET_CODE (XEXP (SET_SRC (x), 1));
1186   b = GET_CODE (XEXP (SET_SRC (x), 2));
1187 
1188   return ((b == PC && (a == LABEL_REF || a == RETURN))
1189 	  || (a == PC && (b == LABEL_REF || b == RETURN)));
1190 }
1191 
1192 /* Return the label of a conditional jump.  */
1193 
1194 rtx
condjump_label(rtx insn)1195 condjump_label (rtx insn)
1196 {
1197   rtx x = pc_set (insn);
1198 
1199   if (!x)
1200     return NULL_RTX;
1201   x = SET_SRC (x);
1202   if (GET_CODE (x) == LABEL_REF)
1203     return x;
1204   if (GET_CODE (x) != IF_THEN_ELSE)
1205     return NULL_RTX;
1206   if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1207     return XEXP (x, 1);
1208   if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1209     return XEXP (x, 2);
1210   return NULL_RTX;
1211 }
1212 
1213 /* Return true if INSN is a (possibly conditional) return insn.  */
1214 
1215 static int
returnjump_p_1(rtx * loc,void * data ATTRIBUTE_UNUSED)1216 returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
1217 {
1218   rtx x = *loc;
1219 
1220   return x && (GET_CODE (x) == RETURN
1221 	       || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
1222 }
1223 
1224 int
returnjump_p(rtx insn)1225 returnjump_p (rtx insn)
1226 {
1227   if (GET_CODE (insn) != JUMP_INSN)
1228     return 0;
1229   return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1230 }
1231 
1232 /* Return true if INSN is a jump that only transfers control and
1233    nothing more.  */
1234 
1235 int
onlyjump_p(rtx insn)1236 onlyjump_p (rtx insn)
1237 {
1238   rtx set;
1239 
1240   if (GET_CODE (insn) != JUMP_INSN)
1241     return 0;
1242 
1243   set = single_set (insn);
1244   if (set == NULL)
1245     return 0;
1246   if (GET_CODE (SET_DEST (set)) != PC)
1247     return 0;
1248   if (side_effects_p (SET_SRC (set)))
1249     return 0;
1250 
1251   return 1;
1252 }
1253 
1254 #ifdef HAVE_cc0
1255 
1256 /* Return nonzero if X is an RTX that only sets the condition codes
1257    and has no side effects.  */
1258 
1259 int
only_sets_cc0_p(rtx x)1260 only_sets_cc0_p (rtx x)
1261 {
1262   if (! x)
1263     return 0;
1264 
1265   if (INSN_P (x))
1266     x = PATTERN (x);
1267 
1268   return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1269 }
1270 
1271 /* Return 1 if X is an RTX that does nothing but set the condition codes
1272    and CLOBBER or USE registers.
1273    Return -1 if X does explicitly set the condition codes,
1274    but also does other things.  */
1275 
1276 int
sets_cc0_p(rtx x)1277 sets_cc0_p (rtx x)
1278 {
1279   if (! x)
1280     return 0;
1281 
1282   if (INSN_P (x))
1283     x = PATTERN (x);
1284 
1285   if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1286     return 1;
1287   if (GET_CODE (x) == PARALLEL)
1288     {
1289       int i;
1290       int sets_cc0 = 0;
1291       int other_things = 0;
1292       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1293 	{
1294 	  if (GET_CODE (XVECEXP (x, 0, i)) == SET
1295 	      && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1296 	    sets_cc0 = 1;
1297 	  else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1298 	    other_things = 1;
1299 	}
1300       return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1301     }
1302   return 0;
1303 }
1304 #endif
1305 
1306 /* Follow any unconditional jump at LABEL;
1307    return the ultimate label reached by any such chain of jumps.
1308    If LABEL is not followed by a jump, return LABEL.
1309    If the chain loops or we can't find end, return LABEL,
1310    since that tells caller to avoid changing the insn.
1311 
1312    If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1313    a USE or CLOBBER.  */
1314 
1315 rtx
follow_jumps(rtx label)1316 follow_jumps (rtx label)
1317 {
1318   rtx insn;
1319   rtx next;
1320   rtx value = label;
1321   int depth;
1322 
1323   for (depth = 0;
1324        (depth < 10
1325 	&& (insn = next_active_insn (value)) != 0
1326 	&& GET_CODE (insn) == JUMP_INSN
1327 	&& ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1328 	     && onlyjump_p (insn))
1329 	    || GET_CODE (PATTERN (insn)) == RETURN)
1330 	&& (next = NEXT_INSN (insn))
1331 	&& GET_CODE (next) == BARRIER);
1332        depth++)
1333     {
1334       /* Don't chain through the insn that jumps into a loop
1335 	 from outside the loop,
1336 	 since that would create multiple loop entry jumps
1337 	 and prevent loop optimization.  */
1338       rtx tem;
1339       if (!reload_completed)
1340 	for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1341 	  if (GET_CODE (tem) == NOTE
1342 	      && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1343 		  /* ??? Optional.  Disables some optimizations, but makes
1344 		     gcov output more accurate with -O.  */
1345 		  || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1346 	    return value;
1347 
1348       /* If we have found a cycle, make the insn jump to itself.  */
1349       if (JUMP_LABEL (insn) == label)
1350 	return label;
1351 
1352       tem = next_active_insn (JUMP_LABEL (insn));
1353       if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1354 		  || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1355 	break;
1356 
1357       value = JUMP_LABEL (insn);
1358     }
1359   if (depth == 10)
1360     return label;
1361   return value;
1362 }
1363 
1364 
1365 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1366    If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1367    in INSN, then store one of them in JUMP_LABEL (INSN).
1368    If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1369    referenced in INSN, add a REG_LABEL note containing that label to INSN.
1370    Also, when there are consecutive labels, canonicalize on the last of them.
1371 
1372    Note that two labels separated by a loop-beginning note
1373    must be kept distinct if we have not yet done loop-optimization,
1374    because the gap between them is where loop-optimize
1375    will want to move invariant code to.  CROSS_JUMP tells us
1376    that loop-optimization is done with.  */
1377 
1378 void
mark_jump_label(rtx x,rtx insn,int in_mem)1379 mark_jump_label (rtx x, rtx insn, int in_mem)
1380 {
1381   RTX_CODE code = GET_CODE (x);
1382   int i;
1383   const char *fmt;
1384 
1385   switch (code)
1386     {
1387     case PC:
1388     case CC0:
1389     case REG:
1390     case CONST_INT:
1391     case CONST_DOUBLE:
1392     case CLOBBER:
1393     case CALL:
1394       return;
1395 
1396     case MEM:
1397       in_mem = 1;
1398       break;
1399 
1400     case SYMBOL_REF:
1401       if (!in_mem)
1402 	return;
1403 
1404       /* If this is a constant-pool reference, see if it is a label.  */
1405       if (CONSTANT_POOL_ADDRESS_P (x))
1406 	mark_jump_label (get_pool_constant (x), insn, in_mem);
1407       break;
1408 
1409     case LABEL_REF:
1410       {
1411 	rtx label = XEXP (x, 0);
1412 
1413 	/* Ignore remaining references to unreachable labels that
1414 	   have been deleted.  */
1415 	if (GET_CODE (label) == NOTE
1416 	    && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1417 	  break;
1418 
1419 	if (GET_CODE (label) != CODE_LABEL)
1420 	  abort ();
1421 
1422 	/* Ignore references to labels of containing functions.  */
1423 	if (LABEL_REF_NONLOCAL_P (x))
1424 	  break;
1425 
1426 	XEXP (x, 0) = label;
1427 	if (! insn || ! INSN_DELETED_P (insn))
1428 	  ++LABEL_NUSES (label);
1429 
1430 	if (insn)
1431 	  {
1432 	    if (GET_CODE (insn) == JUMP_INSN)
1433 	      JUMP_LABEL (insn) = label;
1434 	    else
1435 	      {
1436 		/* Add a REG_LABEL note for LABEL unless there already
1437 		   is one.  All uses of a label, except for labels
1438 		   that are the targets of jumps, must have a
1439 		   REG_LABEL note.  */
1440 		if (! find_reg_note (insn, REG_LABEL, label))
1441 		  REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1442 							REG_NOTES (insn));
1443 	      }
1444 	  }
1445 	return;
1446       }
1447 
1448   /* Do walk the labels in a vector, but not the first operand of an
1449      ADDR_DIFF_VEC.  Don't set the JUMP_LABEL of a vector.  */
1450     case ADDR_VEC:
1451     case ADDR_DIFF_VEC:
1452       if (! INSN_DELETED_P (insn))
1453 	{
1454 	  int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1455 
1456 	  for (i = 0; i < XVECLEN (x, eltnum); i++)
1457 	    mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1458 	}
1459       return;
1460 
1461     default:
1462       break;
1463     }
1464 
1465   fmt = GET_RTX_FORMAT (code);
1466   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1467     {
1468       if (fmt[i] == 'e')
1469 	mark_jump_label (XEXP (x, i), insn, in_mem);
1470       else if (fmt[i] == 'E')
1471 	{
1472 	  int j;
1473 	  for (j = 0; j < XVECLEN (x, i); j++)
1474 	    mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1475 	}
1476     }
1477 }
1478 
1479 /* If all INSN does is set the pc, delete it,
1480    and delete the insn that set the condition codes for it
1481    if that's what the previous thing was.  */
1482 
1483 void
delete_jump(rtx insn)1484 delete_jump (rtx insn)
1485 {
1486   rtx set = single_set (insn);
1487 
1488   if (set && GET_CODE (SET_DEST (set)) == PC)
1489     delete_computation (insn);
1490 }
1491 
1492 /* Verify INSN is a BARRIER and delete it.  */
1493 
1494 void
delete_barrier(rtx insn)1495 delete_barrier (rtx insn)
1496 {
1497   if (GET_CODE (insn) != BARRIER)
1498     abort ();
1499 
1500   delete_insn (insn);
1501 }
1502 
1503 /* Recursively delete prior insns that compute the value (used only by INSN
1504    which the caller is deleting) stored in the register mentioned by NOTE
1505    which is a REG_DEAD note associated with INSN.  */
1506 
1507 static void
delete_prior_computation(rtx note,rtx insn)1508 delete_prior_computation (rtx note, rtx insn)
1509 {
1510   rtx our_prev;
1511   rtx reg = XEXP (note, 0);
1512 
1513   for (our_prev = prev_nonnote_insn (insn);
1514        our_prev && (GET_CODE (our_prev) == INSN
1515 		    || GET_CODE (our_prev) == CALL_INSN);
1516        our_prev = prev_nonnote_insn (our_prev))
1517     {
1518       rtx pat = PATTERN (our_prev);
1519 
1520       /* If we reach a CALL which is not calling a const function
1521 	 or the callee pops the arguments, then give up.  */
1522       if (GET_CODE (our_prev) == CALL_INSN
1523 	  && (! CONST_OR_PURE_CALL_P (our_prev)
1524 	      || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1525 	break;
1526 
1527       /* If we reach a SEQUENCE, it is too complex to try to
1528 	 do anything with it, so give up.  We can be run during
1529 	 and after reorg, so SEQUENCE rtl can legitimately show
1530 	 up here.  */
1531       if (GET_CODE (pat) == SEQUENCE)
1532 	break;
1533 
1534       if (GET_CODE (pat) == USE
1535 	  && GET_CODE (XEXP (pat, 0)) == INSN)
1536 	/* reorg creates USEs that look like this.  We leave them
1537 	   alone because reorg needs them for its own purposes.  */
1538 	break;
1539 
1540       if (reg_set_p (reg, pat))
1541 	{
1542 	  if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1543 	    break;
1544 
1545 	  if (GET_CODE (pat) == PARALLEL)
1546 	    {
1547 	      /* If we find a SET of something else, we can't
1548 		 delete the insn.  */
1549 
1550 	      int i;
1551 
1552 	      for (i = 0; i < XVECLEN (pat, 0); i++)
1553 		{
1554 		  rtx part = XVECEXP (pat, 0, i);
1555 
1556 		  if (GET_CODE (part) == SET
1557 		      && SET_DEST (part) != reg)
1558 		    break;
1559 		}
1560 
1561 	      if (i == XVECLEN (pat, 0))
1562 		delete_computation (our_prev);
1563 	    }
1564 	  else if (GET_CODE (pat) == SET
1565 		   && GET_CODE (SET_DEST (pat)) == REG)
1566 	    {
1567 	      int dest_regno = REGNO (SET_DEST (pat));
1568 	      int dest_endregno
1569 		= (dest_regno
1570 		   + (dest_regno < FIRST_PSEUDO_REGISTER
1571 		      ? HARD_REGNO_NREGS (dest_regno,
1572 					  GET_MODE (SET_DEST (pat))) : 1));
1573 	      int regno = REGNO (reg);
1574 	      int endregno
1575 		= (regno
1576 		   + (regno < FIRST_PSEUDO_REGISTER
1577 		      ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1578 
1579 	      if (dest_regno >= regno
1580 		  && dest_endregno <= endregno)
1581 		delete_computation (our_prev);
1582 
1583 	      /* We may have a multi-word hard register and some, but not
1584 		 all, of the words of the register are needed in subsequent
1585 		 insns.  Write REG_UNUSED notes for those parts that were not
1586 		 needed.  */
1587 	      else if (dest_regno <= regno
1588 		       && dest_endregno >= endregno)
1589 		{
1590 		  int i;
1591 
1592 		  REG_NOTES (our_prev)
1593 		    = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1594 					 REG_NOTES (our_prev));
1595 
1596 		  for (i = dest_regno; i < dest_endregno; i++)
1597 		    if (! find_regno_note (our_prev, REG_UNUSED, i))
1598 		      break;
1599 
1600 		  if (i == dest_endregno)
1601 		    delete_computation (our_prev);
1602 		}
1603 	    }
1604 
1605 	  break;
1606 	}
1607 
1608       /* If PAT references the register that dies here, it is an
1609 	 additional use.  Hence any prior SET isn't dead.  However, this
1610 	 insn becomes the new place for the REG_DEAD note.  */
1611       if (reg_overlap_mentioned_p (reg, pat))
1612 	{
1613 	  XEXP (note, 1) = REG_NOTES (our_prev);
1614 	  REG_NOTES (our_prev) = note;
1615 	  break;
1616 	}
1617     }
1618 }
1619 
1620 /* Delete INSN and recursively delete insns that compute values used only
1621    by INSN.  This uses the REG_DEAD notes computed during flow analysis.
1622    If we are running before flow.c, we need do nothing since flow.c will
1623    delete dead code.  We also can't know if the registers being used are
1624    dead or not at this point.
1625 
1626    Otherwise, look at all our REG_DEAD notes.  If a previous insn does
1627    nothing other than set a register that dies in this insn, we can delete
1628    that insn as well.
1629 
1630    On machines with CC0, if CC0 is used in this insn, we may be able to
1631    delete the insn that set it.  */
1632 
1633 static void
delete_computation(rtx insn)1634 delete_computation (rtx insn)
1635 {
1636   rtx note, next;
1637 
1638 #ifdef HAVE_cc0
1639   if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1640     {
1641       rtx prev = prev_nonnote_insn (insn);
1642       /* We assume that at this stage
1643 	 CC's are always set explicitly
1644 	 and always immediately before the jump that
1645 	 will use them.  So if the previous insn
1646 	 exists to set the CC's, delete it
1647 	 (unless it performs auto-increments, etc.).  */
1648       if (prev && GET_CODE (prev) == INSN
1649 	  && sets_cc0_p (PATTERN (prev)))
1650 	{
1651 	  if (sets_cc0_p (PATTERN (prev)) > 0
1652 	      && ! side_effects_p (PATTERN (prev)))
1653 	    delete_computation (prev);
1654 	  else
1655 	    /* Otherwise, show that cc0 won't be used.  */
1656 	    REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1657 						  cc0_rtx, REG_NOTES (prev));
1658 	}
1659     }
1660 #endif
1661 
1662   for (note = REG_NOTES (insn); note; note = next)
1663     {
1664       next = XEXP (note, 1);
1665 
1666       if (REG_NOTE_KIND (note) != REG_DEAD
1667 	  /* Verify that the REG_NOTE is legitimate.  */
1668 	  || GET_CODE (XEXP (note, 0)) != REG)
1669 	continue;
1670 
1671       delete_prior_computation (note, insn);
1672     }
1673 
1674   delete_related_insns (insn);
1675 }
1676 
1677 /* Delete insn INSN from the chain of insns and update label ref counts
1678    and delete insns now unreachable.
1679 
1680    Returns the first insn after INSN that was not deleted.
1681 
1682    Usage of this instruction is deprecated.  Use delete_insn instead and
1683    subsequent cfg_cleanup pass to delete unreachable code if needed.  */
1684 
1685 rtx
delete_related_insns(rtx insn)1686 delete_related_insns (rtx insn)
1687 {
1688   int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1689   rtx note;
1690   rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1691 
1692   while (next && INSN_DELETED_P (next))
1693     next = NEXT_INSN (next);
1694 
1695   /* This insn is already deleted => return first following nondeleted.  */
1696   if (INSN_DELETED_P (insn))
1697     return next;
1698 
1699   delete_insn (insn);
1700 
1701   /* If instruction is followed by a barrier,
1702      delete the barrier too.  */
1703 
1704   if (next != 0 && GET_CODE (next) == BARRIER)
1705     delete_insn (next);
1706 
1707   /* If deleting a jump, decrement the count of the label,
1708      and delete the label if it is now unused.  */
1709 
1710   if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1711     {
1712       rtx lab = JUMP_LABEL (insn), lab_next;
1713 
1714       if (LABEL_NUSES (lab) == 0)
1715 	{
1716 	  /* This can delete NEXT or PREV,
1717 	     either directly if NEXT is JUMP_LABEL (INSN),
1718 	     or indirectly through more levels of jumps.  */
1719 	  delete_related_insns (lab);
1720 
1721 	  /* I feel a little doubtful about this loop,
1722 	     but I see no clean and sure alternative way
1723 	     to find the first insn after INSN that is not now deleted.
1724 	     I hope this works.  */
1725 	  while (next && INSN_DELETED_P (next))
1726 	    next = NEXT_INSN (next);
1727 	  return next;
1728 	}
1729       else if (tablejump_p (insn, NULL, &lab_next))
1730 	{
1731 	  /* If we're deleting the tablejump, delete the dispatch table.
1732 	     We may not be able to kill the label immediately preceding
1733 	     just yet, as it might be referenced in code leading up to
1734 	     the tablejump.  */
1735 	  delete_related_insns (lab_next);
1736 	}
1737     }
1738 
1739   /* Likewise if we're deleting a dispatch table.  */
1740 
1741   if (GET_CODE (insn) == JUMP_INSN
1742       && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1743 	  || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1744     {
1745       rtx pat = PATTERN (insn);
1746       int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1747       int len = XVECLEN (pat, diff_vec_p);
1748 
1749       for (i = 0; i < len; i++)
1750 	if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1751 	  delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1752       while (next && INSN_DELETED_P (next))
1753 	next = NEXT_INSN (next);
1754       return next;
1755     }
1756 
1757   /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note.  */
1758   if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1759     for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1760       if (REG_NOTE_KIND (note) == REG_LABEL
1761 	  /* This could also be a NOTE_INSN_DELETED_LABEL note.  */
1762 	  && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1763 	if (LABEL_NUSES (XEXP (note, 0)) == 0)
1764 	  delete_related_insns (XEXP (note, 0));
1765 
1766   while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1767     prev = PREV_INSN (prev);
1768 
1769   /* If INSN was a label and a dispatch table follows it,
1770      delete the dispatch table.  The tablejump must have gone already.
1771      It isn't useful to fall through into a table.  */
1772 
1773   if (was_code_label
1774       && NEXT_INSN (insn) != 0
1775       && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1776       && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1777 	  || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1778     next = delete_related_insns (NEXT_INSN (insn));
1779 
1780   /* If INSN was a label, delete insns following it if now unreachable.  */
1781 
1782   if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1783     {
1784       RTX_CODE code;
1785       while (next != 0
1786 	     && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1787 		 || code == NOTE || code == BARRIER
1788 		 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1789 	{
1790 	  if (code == NOTE
1791 	      && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1792 	    next = NEXT_INSN (next);
1793 	  /* Keep going past other deleted labels to delete what follows.  */
1794 	  else if (code == CODE_LABEL && INSN_DELETED_P (next))
1795 	    next = NEXT_INSN (next);
1796 	  else
1797 	    /* Note: if this deletes a jump, it can cause more
1798 	       deletion of unreachable code, after a different label.
1799 	       As long as the value from this recursive call is correct,
1800 	       this invocation functions correctly.  */
1801 	    next = delete_related_insns (next);
1802 	}
1803     }
1804 
1805   return next;
1806 }
1807 
1808 /* Delete a range of insns from FROM to TO, inclusive.
1809    This is for the sake of peephole optimization, so assume
1810    that whatever these insns do will still be done by a new
1811    peephole insn that will replace them.  */
1812 
1813 void
delete_for_peephole(rtx from,rtx to)1814 delete_for_peephole (rtx from, rtx to)
1815 {
1816   rtx insn = from;
1817 
1818   while (1)
1819     {
1820       rtx next = NEXT_INSN (insn);
1821       rtx prev = PREV_INSN (insn);
1822 
1823       if (GET_CODE (insn) != NOTE)
1824 	{
1825 	  INSN_DELETED_P (insn) = 1;
1826 
1827 	  /* Patch this insn out of the chain.  */
1828 	  /* We don't do this all at once, because we
1829 	     must preserve all NOTEs.  */
1830 	  if (prev)
1831 	    NEXT_INSN (prev) = next;
1832 
1833 	  if (next)
1834 	    PREV_INSN (next) = prev;
1835 	}
1836 
1837       if (insn == to)
1838 	break;
1839       insn = next;
1840     }
1841 
1842   /* Note that if TO is an unconditional jump
1843      we *do not* delete the BARRIER that follows,
1844      since the peephole that replaces this sequence
1845      is also an unconditional jump in that case.  */
1846 }
1847 
1848 /* We have determined that AVOIDED_INSN is never reached, and are
1849    about to delete it.  If the insn chain between AVOIDED_INSN and
1850    FINISH contains more than one line from the current function, and
1851    contains at least one operation, print a warning if the user asked
1852    for it.  If FINISH is NULL, look between AVOIDED_INSN and a LABEL.
1853 
1854    CSE and inlining can duplicate insns, so it's possible to get
1855    spurious warnings from this.  */
1856 
1857 void
never_reached_warning(rtx avoided_insn,rtx finish)1858 never_reached_warning (rtx avoided_insn, rtx finish)
1859 {
1860   rtx insn;
1861   rtx a_line_note = NULL;
1862   int two_avoided_lines = 0, contains_insn = 0, reached_end = 0;
1863 
1864   if (!warn_notreached)
1865     return;
1866 
1867   /* Back up to the first of any NOTEs preceding avoided_insn; flow passes
1868      us the head of a block, a NOTE_INSN_BASIC_BLOCK, which often follows
1869      the line note.  */
1870   insn = avoided_insn;
1871   while (1)
1872     {
1873       rtx prev = PREV_INSN (insn);
1874       if (prev == NULL_RTX
1875 	  || GET_CODE (prev) != NOTE)
1876 	break;
1877       insn = prev;
1878     }
1879 
1880   /* Scan forwards, looking at LINE_NUMBER notes, until we hit a LABEL
1881      in case FINISH is NULL, otherwise until we run out of insns.  */
1882 
1883   for (; insn != NULL; insn = NEXT_INSN (insn))
1884     {
1885       if ((finish == NULL && GET_CODE (insn) == CODE_LABEL)
1886 	  || GET_CODE (insn) == BARRIER)
1887 	break;
1888 
1889       if (GET_CODE (insn) == NOTE		/* A line number note?  */
1890 	  && NOTE_LINE_NUMBER (insn) >= 0)
1891 	{
1892 	  if (a_line_note == NULL)
1893 	    a_line_note = insn;
1894 	  else
1895 	    two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1896 				  != NOTE_LINE_NUMBER (insn));
1897 	}
1898       else if (INSN_P (insn))
1899 	{
1900 	  if (reached_end)
1901 	    break;
1902 	  contains_insn = 1;
1903 	}
1904 
1905       if (insn == finish)
1906 	reached_end = 1;
1907     }
1908   if (two_avoided_lines && contains_insn)
1909     {
1910       location_t locus;
1911       locus.file = NOTE_SOURCE_FILE (a_line_note);
1912       locus.line = NOTE_LINE_NUMBER (a_line_note);
1913       warning ("%Hwill never be executed", &locus);
1914     }
1915 }
1916 
1917 /* Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
1918    NLABEL as a return.  Accrue modifications into the change group.  */
1919 
1920 static void
redirect_exp_1(rtx * loc,rtx olabel,rtx nlabel,rtx insn)1921 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1922 {
1923   rtx x = *loc;
1924   RTX_CODE code = GET_CODE (x);
1925   int i;
1926   const char *fmt;
1927 
1928   if (code == LABEL_REF)
1929     {
1930       if (XEXP (x, 0) == olabel)
1931 	{
1932 	  rtx n;
1933 	  if (nlabel)
1934 	    n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1935 	  else
1936 	    n = gen_rtx_RETURN (VOIDmode);
1937 
1938 	  validate_change (insn, loc, n, 1);
1939 	  return;
1940 	}
1941     }
1942   else if (code == RETURN && olabel == 0)
1943     {
1944       x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1945       if (loc == &PATTERN (insn))
1946 	x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1947       validate_change (insn, loc, x, 1);
1948       return;
1949     }
1950 
1951   if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1952       && GET_CODE (SET_SRC (x)) == LABEL_REF
1953       && XEXP (SET_SRC (x), 0) == olabel)
1954     {
1955       validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1956       return;
1957     }
1958 
1959   fmt = GET_RTX_FORMAT (code);
1960   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1961     {
1962       if (fmt[i] == 'e')
1963 	redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1964       else if (fmt[i] == 'E')
1965 	{
1966 	  int j;
1967 	  for (j = 0; j < XVECLEN (x, i); j++)
1968 	    redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1969 	}
1970     }
1971 }
1972 
1973 /* Similar, but apply the change group and report success or failure.  */
1974 
1975 static int
redirect_exp(rtx olabel,rtx nlabel,rtx insn)1976 redirect_exp (rtx olabel, rtx nlabel, rtx insn)
1977 {
1978   rtx *loc;
1979 
1980   if (GET_CODE (PATTERN (insn)) == PARALLEL)
1981     loc = &XVECEXP (PATTERN (insn), 0, 0);
1982   else
1983     loc = &PATTERN (insn);
1984 
1985   redirect_exp_1 (loc, olabel, nlabel, insn);
1986   if (num_validated_changes () == 0)
1987     return 0;
1988 
1989   return apply_change_group ();
1990 }
1991 
1992 /* Make JUMP go to NLABEL instead of where it jumps now.  Accrue
1993    the modifications into the change group.  Return false if we did
1994    not see how to do that.  */
1995 
1996 int
redirect_jump_1(rtx jump,rtx nlabel)1997 redirect_jump_1 (rtx jump, rtx nlabel)
1998 {
1999   int ochanges = num_validated_changes ();
2000   rtx *loc;
2001 
2002   if (GET_CODE (PATTERN (jump)) == PARALLEL)
2003     loc = &XVECEXP (PATTERN (jump), 0, 0);
2004   else
2005     loc = &PATTERN (jump);
2006 
2007   redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2008   return num_validated_changes () > ochanges;
2009 }
2010 
2011 /* Make JUMP go to NLABEL instead of where it jumps now.  If the old
2012    jump target label is unused as a result, it and the code following
2013    it may be deleted.
2014 
2015    If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2016    RETURN insn.
2017 
2018    The return value will be 1 if the change was made, 0 if it wasn't
2019    (this can only occur for NLABEL == 0).  */
2020 
2021 int
redirect_jump(rtx jump,rtx nlabel,int delete_unused)2022 redirect_jump (rtx jump, rtx nlabel, int delete_unused)
2023 {
2024   rtx olabel = JUMP_LABEL (jump);
2025   rtx note;
2026 
2027   if (nlabel == olabel)
2028     return 1;
2029 
2030   if (! redirect_exp (olabel, nlabel, jump))
2031     return 0;
2032 
2033   JUMP_LABEL (jump) = nlabel;
2034   if (nlabel)
2035     ++LABEL_NUSES (nlabel);
2036 
2037   /* Update labels in any REG_EQUAL note.  */
2038   if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
2039     {
2040       if (nlabel && olabel)
2041 	{
2042 	  rtx dest = XEXP (note, 0);
2043 
2044 	  if (GET_CODE (dest) == IF_THEN_ELSE)
2045 	    {
2046 	      if (GET_CODE (XEXP (dest, 1)) == LABEL_REF
2047 		  && XEXP (XEXP (dest, 1), 0) == olabel)
2048 		XEXP (XEXP (dest, 1), 0) = nlabel;
2049 	      if (GET_CODE (XEXP (dest, 2)) == LABEL_REF
2050 		  && XEXP (XEXP (dest, 2), 0) == olabel)
2051 		XEXP (XEXP (dest, 2), 0) = nlabel;
2052 	    }
2053 	  else
2054 	    remove_note (jump, note);
2055 	}
2056       else
2057         remove_note (jump, note);
2058     }
2059 
2060   /* If we're eliding the jump over exception cleanups at the end of a
2061      function, move the function end note so that -Wreturn-type works.  */
2062   if (olabel && nlabel
2063       && NEXT_INSN (olabel)
2064       && GET_CODE (NEXT_INSN (olabel)) == NOTE
2065       && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2066     emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2067 
2068   if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused
2069       /* Undefined labels will remain outside the insn stream.  */
2070       && INSN_UID (olabel))
2071     delete_related_insns (olabel);
2072 
2073   return 1;
2074 }
2075 
2076 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2077    Accrue the modifications into the change group.  */
2078 
2079 static void
invert_exp_1(rtx insn)2080 invert_exp_1 (rtx insn)
2081 {
2082   RTX_CODE code;
2083   rtx x = pc_set (insn);
2084 
2085   if (!x)
2086     abort ();
2087   x = SET_SRC (x);
2088 
2089   code = GET_CODE (x);
2090 
2091   if (code == IF_THEN_ELSE)
2092     {
2093       rtx comp = XEXP (x, 0);
2094       rtx tem;
2095       enum rtx_code reversed_code;
2096 
2097       /* We can do this in two ways:  The preferable way, which can only
2098 	 be done if this is not an integer comparison, is to reverse
2099 	 the comparison code.  Otherwise, swap the THEN-part and ELSE-part
2100 	 of the IF_THEN_ELSE.  If we can't do either, fail.  */
2101 
2102       reversed_code = reversed_comparison_code (comp, insn);
2103 
2104       if (reversed_code != UNKNOWN)
2105 	{
2106 	  validate_change (insn, &XEXP (x, 0),
2107 			   gen_rtx_fmt_ee (reversed_code,
2108 					   GET_MODE (comp), XEXP (comp, 0),
2109 					   XEXP (comp, 1)),
2110 			   1);
2111 	  return;
2112 	}
2113 
2114       tem = XEXP (x, 1);
2115       validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2116       validate_change (insn, &XEXP (x, 2), tem, 1);
2117     }
2118   else
2119     abort ();
2120 }
2121 
2122 /* Invert the jump condition of conditional jump insn, INSN.
2123 
2124    Return 1 if we can do so, 0 if we cannot find a way to do so that
2125    matches a pattern.  */
2126 
2127 static int
invert_exp(rtx insn)2128 invert_exp (rtx insn)
2129 {
2130   invert_exp_1 (insn);
2131   if (num_validated_changes () == 0)
2132     return 0;
2133 
2134   return apply_change_group ();
2135 }
2136 
2137 /* Invert the condition of the jump JUMP, and make it jump to label
2138    NLABEL instead of where it jumps now.  Accrue changes into the
2139    change group.  Return false if we didn't see how to perform the
2140    inversion and redirection.  */
2141 
2142 int
invert_jump_1(rtx jump,rtx nlabel)2143 invert_jump_1 (rtx jump, rtx nlabel)
2144 {
2145   int ochanges;
2146 
2147   ochanges = num_validated_changes ();
2148   invert_exp_1 (jump);
2149   if (num_validated_changes () == ochanges)
2150     return 0;
2151 
2152   return redirect_jump_1 (jump, nlabel);
2153 }
2154 
2155 /* Invert the condition of the jump JUMP, and make it jump to label
2156    NLABEL instead of where it jumps now.  Return true if successful.  */
2157 
2158 int
invert_jump(rtx jump,rtx nlabel,int delete_unused)2159 invert_jump (rtx jump, rtx nlabel, int delete_unused)
2160 {
2161   /* We have to either invert the condition and change the label or
2162      do neither.  Either operation could fail.  We first try to invert
2163      the jump. If that succeeds, we try changing the label.  If that fails,
2164      we invert the jump back to what it was.  */
2165 
2166   if (! invert_exp (jump))
2167     return 0;
2168 
2169   if (redirect_jump (jump, nlabel, delete_unused))
2170     {
2171       /* Remove REG_EQUAL note if we have one.  */
2172       rtx note = find_reg_note (jump, REG_EQUAL, NULL_RTX);
2173       if (note)
2174 	remove_note (jump, note);
2175 
2176       invert_br_probabilities (jump);
2177 
2178       return 1;
2179     }
2180 
2181   if (! invert_exp (jump))
2182     /* This should just be putting it back the way it was.  */
2183     abort ();
2184 
2185   return 0;
2186 }
2187 
2188 
2189 /* Like rtx_equal_p except that it considers two REGs as equal
2190    if they renumber to the same value and considers two commutative
2191    operations to be the same if the order of the operands has been
2192    reversed.
2193 
2194    ??? Addition is not commutative on the PA due to the weird implicit
2195    space register selection rules for memory addresses.  Therefore, we
2196    don't consider a + b == b + a.
2197 
2198    We could/should make this test a little tighter.  Possibly only
2199    disabling it on the PA via some backend macro or only disabling this
2200    case when the PLUS is inside a MEM.  */
2201 
2202 int
rtx_renumbered_equal_p(rtx x,rtx y)2203 rtx_renumbered_equal_p (rtx x, rtx y)
2204 {
2205   int i;
2206   RTX_CODE code = GET_CODE (x);
2207   const char *fmt;
2208 
2209   if (x == y)
2210     return 1;
2211 
2212   if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2213       && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2214 				  && GET_CODE (SUBREG_REG (y)) == REG)))
2215     {
2216       int reg_x = -1, reg_y = -1;
2217       int byte_x = 0, byte_y = 0;
2218 
2219       if (GET_MODE (x) != GET_MODE (y))
2220 	return 0;
2221 
2222       /* If we haven't done any renumbering, don't
2223 	 make any assumptions.  */
2224       if (reg_renumber == 0)
2225 	return rtx_equal_p (x, y);
2226 
2227       if (code == SUBREG)
2228 	{
2229 	  reg_x = REGNO (SUBREG_REG (x));
2230 	  byte_x = SUBREG_BYTE (x);
2231 
2232 	  if (reg_renumber[reg_x] >= 0)
2233 	    {
2234 	      reg_x = subreg_regno_offset (reg_renumber[reg_x],
2235 					   GET_MODE (SUBREG_REG (x)),
2236 					   byte_x,
2237 					   GET_MODE (x));
2238 	      byte_x = 0;
2239 	    }
2240 	}
2241       else
2242 	{
2243 	  reg_x = REGNO (x);
2244 	  if (reg_renumber[reg_x] >= 0)
2245 	    reg_x = reg_renumber[reg_x];
2246 	}
2247 
2248       if (GET_CODE (y) == SUBREG)
2249 	{
2250 	  reg_y = REGNO (SUBREG_REG (y));
2251 	  byte_y = SUBREG_BYTE (y);
2252 
2253 	  if (reg_renumber[reg_y] >= 0)
2254 	    {
2255 	      reg_y = subreg_regno_offset (reg_renumber[reg_y],
2256 					   GET_MODE (SUBREG_REG (y)),
2257 					   byte_y,
2258 					   GET_MODE (y));
2259 	      byte_y = 0;
2260 	    }
2261 	}
2262       else
2263 	{
2264 	  reg_y = REGNO (y);
2265 	  if (reg_renumber[reg_y] >= 0)
2266 	    reg_y = reg_renumber[reg_y];
2267 	}
2268 
2269       return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2270     }
2271 
2272   /* Now we have disposed of all the cases
2273      in which different rtx codes can match.  */
2274   if (code != GET_CODE (y))
2275     return 0;
2276 
2277   switch (code)
2278     {
2279     case PC:
2280     case CC0:
2281     case ADDR_VEC:
2282     case ADDR_DIFF_VEC:
2283     case CONST_INT:
2284       return 0;
2285 
2286     case LABEL_REF:
2287       /* We can't assume nonlocal labels have their following insns yet.  */
2288       if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2289 	return XEXP (x, 0) == XEXP (y, 0);
2290 
2291       /* Two label-refs are equivalent if they point at labels
2292 	 in the same position in the instruction stream.  */
2293       return (next_real_insn (XEXP (x, 0))
2294 	      == next_real_insn (XEXP (y, 0)));
2295 
2296     case SYMBOL_REF:
2297       return XSTR (x, 0) == XSTR (y, 0);
2298 
2299     case CODE_LABEL:
2300       /* If we didn't match EQ equality above, they aren't the same.  */
2301       return 0;
2302 
2303     default:
2304       break;
2305     }
2306 
2307   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2308 
2309   if (GET_MODE (x) != GET_MODE (y))
2310     return 0;
2311 
2312   /* For commutative operations, the RTX match if the operand match in any
2313      order.  Also handle the simple binary and unary cases without a loop.
2314 
2315      ??? Don't consider PLUS a commutative operator; see comments above.  */
2316   if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2317       && code != PLUS)
2318     return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2319 	     && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2320 	    || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2321 		&& rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2322   else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2323     return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2324 	    && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2325   else if (GET_RTX_CLASS (code) == '1')
2326     return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2327 
2328   /* Compare the elements.  If any pair of corresponding elements
2329      fail to match, return 0 for the whole things.  */
2330 
2331   fmt = GET_RTX_FORMAT (code);
2332   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2333     {
2334       int j;
2335       switch (fmt[i])
2336 	{
2337 	case 'w':
2338 	  if (XWINT (x, i) != XWINT (y, i))
2339 	    return 0;
2340 	  break;
2341 
2342 	case 'i':
2343 	  if (XINT (x, i) != XINT (y, i))
2344 	    return 0;
2345 	  break;
2346 
2347 	case 't':
2348 	  if (XTREE (x, i) != XTREE (y, i))
2349 	    return 0;
2350 	  break;
2351 
2352 	case 's':
2353 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2354 	    return 0;
2355 	  break;
2356 
2357 	case 'e':
2358 	  if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2359 	    return 0;
2360 	  break;
2361 
2362 	case 'u':
2363 	  if (XEXP (x, i) != XEXP (y, i))
2364 	    return 0;
2365 	  /* Fall through.  */
2366 	case '0':
2367 	  break;
2368 
2369 	case 'E':
2370 	  if (XVECLEN (x, i) != XVECLEN (y, i))
2371 	    return 0;
2372 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2373 	    if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2374 	      return 0;
2375 	  break;
2376 
2377 	default:
2378 	  abort ();
2379 	}
2380     }
2381   return 1;
2382 }
2383 
2384 /* If X is a hard register or equivalent to one or a subregister of one,
2385    return the hard register number.  If X is a pseudo register that was not
2386    assigned a hard register, return the pseudo register number.  Otherwise,
2387    return -1.  Any rtx is valid for X.  */
2388 
2389 int
true_regnum(rtx x)2390 true_regnum (rtx x)
2391 {
2392   if (GET_CODE (x) == REG)
2393     {
2394       if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2395 	return reg_renumber[REGNO (x)];
2396       return REGNO (x);
2397     }
2398   if (GET_CODE (x) == SUBREG)
2399     {
2400       int base = true_regnum (SUBREG_REG (x));
2401       if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2402 	return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2403 					   GET_MODE (SUBREG_REG (x)),
2404 					   SUBREG_BYTE (x), GET_MODE (x));
2405     }
2406   return -1;
2407 }
2408 
2409 /* Return regno of the register REG and handle subregs too.  */
2410 unsigned int
reg_or_subregno(rtx reg)2411 reg_or_subregno (rtx reg)
2412 {
2413   if (REG_P (reg))
2414     return REGNO (reg);
2415   if (GET_CODE (reg) == SUBREG)
2416     return REGNO (SUBREG_REG (reg));
2417   abort ();
2418 }
2419