xref: /openbsd/gnu/gcc/gcc/jump.c (revision 404b540a)
1*404b540aSrobert /* Optimize jump instructions, for GNU compiler.
2*404b540aSrobert    Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3*404b540aSrobert    1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4*404b540aSrobert    Free Software Foundation, Inc.
5*404b540aSrobert 
6*404b540aSrobert This file is part of GCC.
7*404b540aSrobert 
8*404b540aSrobert GCC is free software; you can redistribute it and/or modify it under
9*404b540aSrobert the terms of the GNU General Public License as published by the Free
10*404b540aSrobert Software Foundation; either version 2, or (at your option) any later
11*404b540aSrobert version.
12*404b540aSrobert 
13*404b540aSrobert GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14*404b540aSrobert WARRANTY; without even the implied warranty of MERCHANTABILITY or
15*404b540aSrobert FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16*404b540aSrobert for more details.
17*404b540aSrobert 
18*404b540aSrobert You should have received a copy of the GNU General Public License
19*404b540aSrobert along with GCC; see the file COPYING.  If not, write to the Free
20*404b540aSrobert Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21*404b540aSrobert 02110-1301, USA.  */
22*404b540aSrobert 
23*404b540aSrobert /* This is the pathetic reminder of old fame of the jump-optimization pass
24*404b540aSrobert    of the compiler.  Now it contains basically a set of utility functions to
25*404b540aSrobert    operate with jumps.
26*404b540aSrobert 
27*404b540aSrobert    Each CODE_LABEL has a count of the times it is used
28*404b540aSrobert    stored in the LABEL_NUSES internal field, and each JUMP_INSN
29*404b540aSrobert    has one label that it refers to stored in the
30*404b540aSrobert    JUMP_LABEL internal field.  With this we can detect labels that
31*404b540aSrobert    become unused because of the deletion of all the jumps that
32*404b540aSrobert    formerly used them.  The JUMP_LABEL info is sometimes looked
33*404b540aSrobert    at by later passes.
34*404b540aSrobert 
35*404b540aSrobert    The subroutines redirect_jump and invert_jump are used
36*404b540aSrobert    from other passes as well.  */
37*404b540aSrobert 
38*404b540aSrobert #include "config.h"
39*404b540aSrobert #include "system.h"
40*404b540aSrobert #include "coretypes.h"
41*404b540aSrobert #include "tm.h"
42*404b540aSrobert #include "rtl.h"
43*404b540aSrobert #include "tm_p.h"
44*404b540aSrobert #include "flags.h"
45*404b540aSrobert #include "hard-reg-set.h"
46*404b540aSrobert #include "regs.h"
47*404b540aSrobert #include "insn-config.h"
48*404b540aSrobert #include "insn-attr.h"
49*404b540aSrobert #include "recog.h"
50*404b540aSrobert #include "function.h"
51*404b540aSrobert #include "expr.h"
52*404b540aSrobert #include "real.h"
53*404b540aSrobert #include "except.h"
54*404b540aSrobert #include "diagnostic.h"
55*404b540aSrobert #include "toplev.h"
56*404b540aSrobert #include "reload.h"
57*404b540aSrobert #include "predict.h"
58*404b540aSrobert #include "timevar.h"
59*404b540aSrobert #include "tree-pass.h"
60*404b540aSrobert #include "target.h"
61*404b540aSrobert 
62*404b540aSrobert /* Optimize jump y; x: ... y: jumpif... x?
63*404b540aSrobert    Don't know if it is worth bothering with.  */
64*404b540aSrobert /* Optimize two cases of conditional jump to conditional jump?
65*404b540aSrobert    This can never delete any instruction or make anything dead,
66*404b540aSrobert    or even change what is live at any point.
67*404b540aSrobert    So perhaps let combiner do it.  */
68*404b540aSrobert 
69*404b540aSrobert static void init_label_info (rtx);
70*404b540aSrobert static void mark_all_labels (rtx);
71*404b540aSrobert static void delete_computation (rtx);
72*404b540aSrobert static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
73*404b540aSrobert static int invert_exp_1 (rtx, rtx);
74*404b540aSrobert static int returnjump_p_1 (rtx *, void *);
75*404b540aSrobert static void delete_prior_computation (rtx, rtx);
76*404b540aSrobert 
77*404b540aSrobert /* Alternate entry into the jump optimizer.  This entry point only rebuilds
78*404b540aSrobert    the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
79*404b540aSrobert    instructions.  */
80*404b540aSrobert void
rebuild_jump_labels(rtx f)81*404b540aSrobert rebuild_jump_labels (rtx f)
82*404b540aSrobert {
83*404b540aSrobert   rtx insn;
84*404b540aSrobert 
85*404b540aSrobert   timevar_push (TV_REBUILD_JUMP);
86*404b540aSrobert   init_label_info (f);
87*404b540aSrobert   mark_all_labels (f);
88*404b540aSrobert 
89*404b540aSrobert   /* Keep track of labels used from static data; we don't track them
90*404b540aSrobert      closely enough to delete them here, so make sure their reference
91*404b540aSrobert      count doesn't drop to zero.  */
92*404b540aSrobert 
93*404b540aSrobert   for (insn = forced_labels; insn; insn = XEXP (insn, 1))
94*404b540aSrobert     if (LABEL_P (XEXP (insn, 0)))
95*404b540aSrobert       LABEL_NUSES (XEXP (insn, 0))++;
96*404b540aSrobert   timevar_pop (TV_REBUILD_JUMP);
97*404b540aSrobert }
98*404b540aSrobert 
99*404b540aSrobert /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
100*404b540aSrobert    non-fallthru insn.  This is not generally true, as multiple barriers
101*404b540aSrobert    may have crept in, or the BARRIER may be separated from the last
102*404b540aSrobert    real insn by one or more NOTEs.
103*404b540aSrobert 
104*404b540aSrobert    This simple pass moves barriers and removes duplicates so that the
105*404b540aSrobert    old code is happy.
106*404b540aSrobert  */
107*404b540aSrobert unsigned int
cleanup_barriers(void)108*404b540aSrobert cleanup_barriers (void)
109*404b540aSrobert {
110*404b540aSrobert   rtx insn, next, prev;
111*404b540aSrobert   for (insn = get_insns (); insn; insn = next)
112*404b540aSrobert     {
113*404b540aSrobert       next = NEXT_INSN (insn);
114*404b540aSrobert       if (BARRIER_P (insn))
115*404b540aSrobert 	{
116*404b540aSrobert 	  prev = prev_nonnote_insn (insn);
117*404b540aSrobert 	  if (BARRIER_P (prev))
118*404b540aSrobert 	    delete_insn (insn);
119*404b540aSrobert 	  else if (prev != PREV_INSN (insn))
120*404b540aSrobert 	    reorder_insns (insn, insn, prev);
121*404b540aSrobert 	}
122*404b540aSrobert     }
123*404b540aSrobert   return 0;
124*404b540aSrobert }
125*404b540aSrobert 
126*404b540aSrobert struct tree_opt_pass pass_cleanup_barriers =
127*404b540aSrobert {
128*404b540aSrobert   "barriers",                           /* name */
129*404b540aSrobert   NULL,                                 /* gate */
130*404b540aSrobert   cleanup_barriers,                     /* execute */
131*404b540aSrobert   NULL,                                 /* sub */
132*404b540aSrobert   NULL,                                 /* next */
133*404b540aSrobert   0,                                    /* static_pass_number */
134*404b540aSrobert   0,                                    /* tv_id */
135*404b540aSrobert   0,                                    /* properties_required */
136*404b540aSrobert   0,                                    /* properties_provided */
137*404b540aSrobert   0,                                    /* properties_destroyed */
138*404b540aSrobert   0,                                    /* todo_flags_start */
139*404b540aSrobert   TODO_dump_func,                       /* todo_flags_finish */
140*404b540aSrobert   0                                     /* letter */
141*404b540aSrobert };
142*404b540aSrobert 
143*404b540aSrobert unsigned int
purge_line_number_notes(void)144*404b540aSrobert purge_line_number_notes (void)
145*404b540aSrobert {
146*404b540aSrobert   rtx last_note = 0;
147*404b540aSrobert   rtx insn;
148*404b540aSrobert   /* Delete extraneous line number notes.
149*404b540aSrobert      Note that two consecutive notes for different lines are not really
150*404b540aSrobert      extraneous.  There should be some indication where that line belonged,
151*404b540aSrobert      even if it became empty.  */
152*404b540aSrobert 
153*404b540aSrobert   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
154*404b540aSrobert     if (NOTE_P (insn))
155*404b540aSrobert       {
156*404b540aSrobert 	if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
157*404b540aSrobert 	  /* Any previous line note was for the prologue; gdb wants a new
158*404b540aSrobert 	     note after the prologue even if it is for the same line.  */
159*404b540aSrobert 	  last_note = NULL_RTX;
160*404b540aSrobert 	else if (NOTE_LINE_NUMBER (insn) >= 0)
161*404b540aSrobert 	  {
162*404b540aSrobert 	    /* Delete this note if it is identical to previous note.  */
163*404b540aSrobert 	    if (last_note
164*404b540aSrobert #ifdef USE_MAPPED_LOCATION
165*404b540aSrobert 		&& NOTE_SOURCE_LOCATION (insn) == NOTE_SOURCE_LOCATION (last_note)
166*404b540aSrobert #else
167*404b540aSrobert 		&& NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
168*404b540aSrobert 		&& NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note)
169*404b540aSrobert #endif
170*404b540aSrobert )
171*404b540aSrobert 	      {
172*404b540aSrobert 		delete_related_insns (insn);
173*404b540aSrobert 		continue;
174*404b540aSrobert 	      }
175*404b540aSrobert 
176*404b540aSrobert 	    last_note = insn;
177*404b540aSrobert 	  }
178*404b540aSrobert       }
179*404b540aSrobert   return 0;
180*404b540aSrobert }
181*404b540aSrobert 
182*404b540aSrobert struct tree_opt_pass pass_purge_lineno_notes =
183*404b540aSrobert {
184*404b540aSrobert   "elnotes",                            /* name */
185*404b540aSrobert   NULL,                                 /* gate */
186*404b540aSrobert   purge_line_number_notes,              /* execute */
187*404b540aSrobert   NULL,                                 /* sub */
188*404b540aSrobert   NULL,                                 /* next */
189*404b540aSrobert   0,                                    /* static_pass_number */
190*404b540aSrobert   0,                                    /* tv_id */
191*404b540aSrobert   0,                                    /* properties_required */
192*404b540aSrobert   0,                                    /* properties_provided */
193*404b540aSrobert   0,                                    /* properties_destroyed */
194*404b540aSrobert   0,                                    /* todo_flags_start */
195*404b540aSrobert   TODO_dump_func,                       /* todo_flags_finish */
196*404b540aSrobert   0                                     /* letter */
197*404b540aSrobert };
198*404b540aSrobert 
199*404b540aSrobert 
200*404b540aSrobert /* Initialize LABEL_NUSES and JUMP_LABEL fields.  Delete any REG_LABEL
201*404b540aSrobert    notes whose labels don't occur in the insn any more.  Returns the
202*404b540aSrobert    largest INSN_UID found.  */
203*404b540aSrobert static void
init_label_info(rtx f)204*404b540aSrobert init_label_info (rtx f)
205*404b540aSrobert {
206*404b540aSrobert   rtx insn;
207*404b540aSrobert 
208*404b540aSrobert   for (insn = f; insn; insn = NEXT_INSN (insn))
209*404b540aSrobert     if (LABEL_P (insn))
210*404b540aSrobert       LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
211*404b540aSrobert     else if (JUMP_P (insn))
212*404b540aSrobert       JUMP_LABEL (insn) = 0;
213*404b540aSrobert     else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
214*404b540aSrobert       {
215*404b540aSrobert 	rtx note, next;
216*404b540aSrobert 
217*404b540aSrobert 	for (note = REG_NOTES (insn); note; note = next)
218*404b540aSrobert 	  {
219*404b540aSrobert 	    next = XEXP (note, 1);
220*404b540aSrobert 	    if (REG_NOTE_KIND (note) == REG_LABEL
221*404b540aSrobert 		&& ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
222*404b540aSrobert 	      remove_note (insn, note);
223*404b540aSrobert 	  }
224*404b540aSrobert       }
225*404b540aSrobert }
226*404b540aSrobert 
227*404b540aSrobert /* Mark the label each jump jumps to.
228*404b540aSrobert    Combine consecutive labels, and count uses of labels.  */
229*404b540aSrobert 
230*404b540aSrobert static void
mark_all_labels(rtx f)231*404b540aSrobert mark_all_labels (rtx f)
232*404b540aSrobert {
233*404b540aSrobert   rtx insn;
234*404b540aSrobert 
235*404b540aSrobert   for (insn = f; insn; insn = NEXT_INSN (insn))
236*404b540aSrobert     if (INSN_P (insn))
237*404b540aSrobert       {
238*404b540aSrobert 	mark_jump_label (PATTERN (insn), insn, 0);
239*404b540aSrobert 	if (! INSN_DELETED_P (insn) && JUMP_P (insn))
240*404b540aSrobert 	  {
241*404b540aSrobert 	    /* When we know the LABEL_REF contained in a REG used in
242*404b540aSrobert 	       an indirect jump, we'll have a REG_LABEL note so that
243*404b540aSrobert 	       flow can tell where it's going.  */
244*404b540aSrobert 	    if (JUMP_LABEL (insn) == 0)
245*404b540aSrobert 	      {
246*404b540aSrobert 		rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
247*404b540aSrobert 		if (label_note)
248*404b540aSrobert 		  {
249*404b540aSrobert 		    /* But a LABEL_REF around the REG_LABEL note, so
250*404b540aSrobert 		       that we can canonicalize it.  */
251*404b540aSrobert 		    rtx label_ref = gen_rtx_LABEL_REF (Pmode,
252*404b540aSrobert 						       XEXP (label_note, 0));
253*404b540aSrobert 
254*404b540aSrobert 		    mark_jump_label (label_ref, insn, 0);
255*404b540aSrobert 		    XEXP (label_note, 0) = XEXP (label_ref, 0);
256*404b540aSrobert 		    JUMP_LABEL (insn) = XEXP (label_note, 0);
257*404b540aSrobert 		  }
258*404b540aSrobert 	      }
259*404b540aSrobert 	  }
260*404b540aSrobert       }
261*404b540aSrobert }
262*404b540aSrobert 
263*404b540aSrobert /* Move all block-beg, block-end and loop-beg notes between START and END out
264*404b540aSrobert    before START.  START and END may be such notes.  Returns the values of the
265*404b540aSrobert    new starting and ending insns, which may be different if the original ones
266*404b540aSrobert    were such notes.  Return true if there were only such notes and no real
267*404b540aSrobert    instructions.  */
268*404b540aSrobert 
269*404b540aSrobert bool
squeeze_notes(rtx * startp,rtx * endp)270*404b540aSrobert squeeze_notes (rtx* startp, rtx* endp)
271*404b540aSrobert {
272*404b540aSrobert   rtx start = *startp;
273*404b540aSrobert   rtx end = *endp;
274*404b540aSrobert 
275*404b540aSrobert   rtx insn;
276*404b540aSrobert   rtx next;
277*404b540aSrobert   rtx last = NULL;
278*404b540aSrobert   rtx past_end = NEXT_INSN (end);
279*404b540aSrobert 
280*404b540aSrobert   for (insn = start; insn != past_end; insn = next)
281*404b540aSrobert     {
282*404b540aSrobert       next = NEXT_INSN (insn);
283*404b540aSrobert       if (NOTE_P (insn)
284*404b540aSrobert 	  && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
285*404b540aSrobert 	      || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG))
286*404b540aSrobert 	{
287*404b540aSrobert 	  /* BLOCK_BEG or BLOCK_END notes only exist in the `final' pass.  */
288*404b540aSrobert 	  gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
289*404b540aSrobert 		      && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
290*404b540aSrobert 
291*404b540aSrobert 	  if (insn == start)
292*404b540aSrobert 	    start = next;
293*404b540aSrobert 	  else
294*404b540aSrobert 	    {
295*404b540aSrobert 	      rtx prev = PREV_INSN (insn);
296*404b540aSrobert 	      PREV_INSN (insn) = PREV_INSN (start);
297*404b540aSrobert 	      NEXT_INSN (insn) = start;
298*404b540aSrobert 	      NEXT_INSN (PREV_INSN (insn)) = insn;
299*404b540aSrobert 	      PREV_INSN (NEXT_INSN (insn)) = insn;
300*404b540aSrobert 	      NEXT_INSN (prev) = next;
301*404b540aSrobert 	      PREV_INSN (next) = prev;
302*404b540aSrobert 	    }
303*404b540aSrobert 	}
304*404b540aSrobert       else
305*404b540aSrobert 	last = insn;
306*404b540aSrobert     }
307*404b540aSrobert 
308*404b540aSrobert   /* There were no real instructions.  */
309*404b540aSrobert   if (start == past_end)
310*404b540aSrobert     return true;
311*404b540aSrobert 
312*404b540aSrobert   end = last;
313*404b540aSrobert 
314*404b540aSrobert   *startp = start;
315*404b540aSrobert   *endp = end;
316*404b540aSrobert   return false;
317*404b540aSrobert }
318*404b540aSrobert 
319*404b540aSrobert /* Return the label before INSN, or put a new label there.  */
320*404b540aSrobert 
321*404b540aSrobert rtx
get_label_before(rtx insn)322*404b540aSrobert get_label_before (rtx insn)
323*404b540aSrobert {
324*404b540aSrobert   rtx label;
325*404b540aSrobert 
326*404b540aSrobert   /* Find an existing label at this point
327*404b540aSrobert      or make a new one if there is none.  */
328*404b540aSrobert   label = prev_nonnote_insn (insn);
329*404b540aSrobert 
330*404b540aSrobert   if (label == 0 || !LABEL_P (label))
331*404b540aSrobert     {
332*404b540aSrobert       rtx prev = PREV_INSN (insn);
333*404b540aSrobert 
334*404b540aSrobert       label = gen_label_rtx ();
335*404b540aSrobert       emit_label_after (label, prev);
336*404b540aSrobert       LABEL_NUSES (label) = 0;
337*404b540aSrobert     }
338*404b540aSrobert   return label;
339*404b540aSrobert }
340*404b540aSrobert 
341*404b540aSrobert /* Return the label after INSN, or put a new label there.  */
342*404b540aSrobert 
343*404b540aSrobert rtx
get_label_after(rtx insn)344*404b540aSrobert get_label_after (rtx insn)
345*404b540aSrobert {
346*404b540aSrobert   rtx label;
347*404b540aSrobert 
348*404b540aSrobert   /* Find an existing label at this point
349*404b540aSrobert      or make a new one if there is none.  */
350*404b540aSrobert   label = next_nonnote_insn (insn);
351*404b540aSrobert 
352*404b540aSrobert   if (label == 0 || !LABEL_P (label))
353*404b540aSrobert     {
354*404b540aSrobert       label = gen_label_rtx ();
355*404b540aSrobert       emit_label_after (label, insn);
356*404b540aSrobert       LABEL_NUSES (label) = 0;
357*404b540aSrobert     }
358*404b540aSrobert   return label;
359*404b540aSrobert }
360*404b540aSrobert 
361*404b540aSrobert /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
362*404b540aSrobert    of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
363*404b540aSrobert    UNKNOWN may be returned in case we are having CC_MODE compare and we don't
364*404b540aSrobert    know whether it's source is floating point or integer comparison.  Machine
365*404b540aSrobert    description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
366*404b540aSrobert    to help this function avoid overhead in these cases.  */
367*404b540aSrobert enum rtx_code
reversed_comparison_code_parts(enum rtx_code code,rtx arg0,rtx arg1,rtx insn)368*404b540aSrobert reversed_comparison_code_parts (enum rtx_code code, rtx arg0, rtx arg1, rtx insn)
369*404b540aSrobert {
370*404b540aSrobert   enum machine_mode mode;
371*404b540aSrobert 
372*404b540aSrobert   /* If this is not actually a comparison, we can't reverse it.  */
373*404b540aSrobert   if (GET_RTX_CLASS (code) != RTX_COMPARE
374*404b540aSrobert       && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
375*404b540aSrobert     return UNKNOWN;
376*404b540aSrobert 
377*404b540aSrobert   mode = GET_MODE (arg0);
378*404b540aSrobert   if (mode == VOIDmode)
379*404b540aSrobert     mode = GET_MODE (arg1);
380*404b540aSrobert 
381*404b540aSrobert   /* First see if machine description supplies us way to reverse the
382*404b540aSrobert      comparison.  Give it priority over everything else to allow
383*404b540aSrobert      machine description to do tricks.  */
384*404b540aSrobert   if (GET_MODE_CLASS (mode) == MODE_CC
385*404b540aSrobert       && REVERSIBLE_CC_MODE (mode))
386*404b540aSrobert     {
387*404b540aSrobert #ifdef REVERSE_CONDITION
388*404b540aSrobert       return REVERSE_CONDITION (code, mode);
389*404b540aSrobert #endif
390*404b540aSrobert       return reverse_condition (code);
391*404b540aSrobert     }
392*404b540aSrobert 
393*404b540aSrobert   /* Try a few special cases based on the comparison code.  */
394*404b540aSrobert   switch (code)
395*404b540aSrobert     {
396*404b540aSrobert     case GEU:
397*404b540aSrobert     case GTU:
398*404b540aSrobert     case LEU:
399*404b540aSrobert     case LTU:
400*404b540aSrobert     case NE:
401*404b540aSrobert     case EQ:
402*404b540aSrobert       /* It is always safe to reverse EQ and NE, even for the floating
403*404b540aSrobert 	 point.  Similarly the unsigned comparisons are never used for
404*404b540aSrobert 	 floating point so we can reverse them in the default way.  */
405*404b540aSrobert       return reverse_condition (code);
406*404b540aSrobert     case ORDERED:
407*404b540aSrobert     case UNORDERED:
408*404b540aSrobert     case LTGT:
409*404b540aSrobert     case UNEQ:
410*404b540aSrobert       /* In case we already see unordered comparison, we can be sure to
411*404b540aSrobert 	 be dealing with floating point so we don't need any more tests.  */
412*404b540aSrobert       return reverse_condition_maybe_unordered (code);
413*404b540aSrobert     case UNLT:
414*404b540aSrobert     case UNLE:
415*404b540aSrobert     case UNGT:
416*404b540aSrobert     case UNGE:
417*404b540aSrobert       /* We don't have safe way to reverse these yet.  */
418*404b540aSrobert       return UNKNOWN;
419*404b540aSrobert     default:
420*404b540aSrobert       break;
421*404b540aSrobert     }
422*404b540aSrobert 
423*404b540aSrobert   if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
424*404b540aSrobert     {
425*404b540aSrobert       rtx prev;
426*404b540aSrobert       /* Try to search for the comparison to determine the real mode.
427*404b540aSrobert          This code is expensive, but with sane machine description it
428*404b540aSrobert          will be never used, since REVERSIBLE_CC_MODE will return true
429*404b540aSrobert          in all cases.  */
430*404b540aSrobert       if (! insn)
431*404b540aSrobert 	return UNKNOWN;
432*404b540aSrobert 
433*404b540aSrobert       for (prev = prev_nonnote_insn (insn);
434*404b540aSrobert 	   prev != 0 && !LABEL_P (prev);
435*404b540aSrobert 	   prev = prev_nonnote_insn (prev))
436*404b540aSrobert 	{
437*404b540aSrobert 	  rtx set = set_of (arg0, prev);
438*404b540aSrobert 	  if (set && GET_CODE (set) == SET
439*404b540aSrobert 	      && rtx_equal_p (SET_DEST (set), arg0))
440*404b540aSrobert 	    {
441*404b540aSrobert 	      rtx src = SET_SRC (set);
442*404b540aSrobert 
443*404b540aSrobert 	      if (GET_CODE (src) == COMPARE)
444*404b540aSrobert 		{
445*404b540aSrobert 		  rtx comparison = src;
446*404b540aSrobert 		  arg0 = XEXP (src, 0);
447*404b540aSrobert 		  mode = GET_MODE (arg0);
448*404b540aSrobert 		  if (mode == VOIDmode)
449*404b540aSrobert 		    mode = GET_MODE (XEXP (comparison, 1));
450*404b540aSrobert 		  break;
451*404b540aSrobert 		}
452*404b540aSrobert 	      /* We can get past reg-reg moves.  This may be useful for model
453*404b540aSrobert 	         of i387 comparisons that first move flag registers around.  */
454*404b540aSrobert 	      if (REG_P (src))
455*404b540aSrobert 		{
456*404b540aSrobert 		  arg0 = src;
457*404b540aSrobert 		  continue;
458*404b540aSrobert 		}
459*404b540aSrobert 	    }
460*404b540aSrobert 	  /* If register is clobbered in some ununderstandable way,
461*404b540aSrobert 	     give up.  */
462*404b540aSrobert 	  if (set)
463*404b540aSrobert 	    return UNKNOWN;
464*404b540aSrobert 	}
465*404b540aSrobert     }
466*404b540aSrobert 
467*404b540aSrobert   /* Test for an integer condition, or a floating-point comparison
468*404b540aSrobert      in which NaNs can be ignored.  */
469*404b540aSrobert   if (GET_CODE (arg0) == CONST_INT
470*404b540aSrobert       || (GET_MODE (arg0) != VOIDmode
471*404b540aSrobert 	  && GET_MODE_CLASS (mode) != MODE_CC
472*404b540aSrobert 	  && !HONOR_NANS (mode)))
473*404b540aSrobert     return reverse_condition (code);
474*404b540aSrobert 
475*404b540aSrobert   return UNKNOWN;
476*404b540aSrobert }
477*404b540aSrobert 
478*404b540aSrobert /* A wrapper around the previous function to take COMPARISON as rtx
479*404b540aSrobert    expression.  This simplifies many callers.  */
480*404b540aSrobert enum rtx_code
reversed_comparison_code(rtx comparison,rtx insn)481*404b540aSrobert reversed_comparison_code (rtx comparison, rtx insn)
482*404b540aSrobert {
483*404b540aSrobert   if (!COMPARISON_P (comparison))
484*404b540aSrobert     return UNKNOWN;
485*404b540aSrobert   return reversed_comparison_code_parts (GET_CODE (comparison),
486*404b540aSrobert 					 XEXP (comparison, 0),
487*404b540aSrobert 					 XEXP (comparison, 1), insn);
488*404b540aSrobert }
489*404b540aSrobert 
490*404b540aSrobert /* Return comparison with reversed code of EXP.
491*404b540aSrobert    Return NULL_RTX in case we fail to do the reversal.  */
492*404b540aSrobert rtx
reversed_comparison(rtx exp,enum machine_mode mode)493*404b540aSrobert reversed_comparison (rtx exp, enum machine_mode mode)
494*404b540aSrobert {
495*404b540aSrobert   enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
496*404b540aSrobert   if (reversed_code == UNKNOWN)
497*404b540aSrobert     return NULL_RTX;
498*404b540aSrobert   else
499*404b540aSrobert     return simplify_gen_relational (reversed_code, mode, VOIDmode,
500*404b540aSrobert                                     XEXP (exp, 0), XEXP (exp, 1));
501*404b540aSrobert }
502*404b540aSrobert 
503*404b540aSrobert 
504*404b540aSrobert /* Given an rtx-code for a comparison, return the code for the negated
505*404b540aSrobert    comparison.  If no such code exists, return UNKNOWN.
506*404b540aSrobert 
507*404b540aSrobert    WATCH OUT!  reverse_condition is not safe to use on a jump that might
508*404b540aSrobert    be acting on the results of an IEEE floating point comparison, because
509*404b540aSrobert    of the special treatment of non-signaling nans in comparisons.
510*404b540aSrobert    Use reversed_comparison_code instead.  */
511*404b540aSrobert 
512*404b540aSrobert enum rtx_code
reverse_condition(enum rtx_code code)513*404b540aSrobert reverse_condition (enum rtx_code code)
514*404b540aSrobert {
515*404b540aSrobert   switch (code)
516*404b540aSrobert     {
517*404b540aSrobert     case EQ:
518*404b540aSrobert       return NE;
519*404b540aSrobert     case NE:
520*404b540aSrobert       return EQ;
521*404b540aSrobert     case GT:
522*404b540aSrobert       return LE;
523*404b540aSrobert     case GE:
524*404b540aSrobert       return LT;
525*404b540aSrobert     case LT:
526*404b540aSrobert       return GE;
527*404b540aSrobert     case LE:
528*404b540aSrobert       return GT;
529*404b540aSrobert     case GTU:
530*404b540aSrobert       return LEU;
531*404b540aSrobert     case GEU:
532*404b540aSrobert       return LTU;
533*404b540aSrobert     case LTU:
534*404b540aSrobert       return GEU;
535*404b540aSrobert     case LEU:
536*404b540aSrobert       return GTU;
537*404b540aSrobert     case UNORDERED:
538*404b540aSrobert       return ORDERED;
539*404b540aSrobert     case ORDERED:
540*404b540aSrobert       return UNORDERED;
541*404b540aSrobert 
542*404b540aSrobert     case UNLT:
543*404b540aSrobert     case UNLE:
544*404b540aSrobert     case UNGT:
545*404b540aSrobert     case UNGE:
546*404b540aSrobert     case UNEQ:
547*404b540aSrobert     case LTGT:
548*404b540aSrobert       return UNKNOWN;
549*404b540aSrobert 
550*404b540aSrobert     default:
551*404b540aSrobert       gcc_unreachable ();
552*404b540aSrobert     }
553*404b540aSrobert }
554*404b540aSrobert 
555*404b540aSrobert /* Similar, but we're allowed to generate unordered comparisons, which
556*404b540aSrobert    makes it safe for IEEE floating-point.  Of course, we have to recognize
557*404b540aSrobert    that the target will support them too...  */
558*404b540aSrobert 
559*404b540aSrobert enum rtx_code
reverse_condition_maybe_unordered(enum rtx_code code)560*404b540aSrobert reverse_condition_maybe_unordered (enum rtx_code code)
561*404b540aSrobert {
562*404b540aSrobert   switch (code)
563*404b540aSrobert     {
564*404b540aSrobert     case EQ:
565*404b540aSrobert       return NE;
566*404b540aSrobert     case NE:
567*404b540aSrobert       return EQ;
568*404b540aSrobert     case GT:
569*404b540aSrobert       return UNLE;
570*404b540aSrobert     case GE:
571*404b540aSrobert       return UNLT;
572*404b540aSrobert     case LT:
573*404b540aSrobert       return UNGE;
574*404b540aSrobert     case LE:
575*404b540aSrobert       return UNGT;
576*404b540aSrobert     case LTGT:
577*404b540aSrobert       return UNEQ;
578*404b540aSrobert     case UNORDERED:
579*404b540aSrobert       return ORDERED;
580*404b540aSrobert     case ORDERED:
581*404b540aSrobert       return UNORDERED;
582*404b540aSrobert     case UNLT:
583*404b540aSrobert       return GE;
584*404b540aSrobert     case UNLE:
585*404b540aSrobert       return GT;
586*404b540aSrobert     case UNGT:
587*404b540aSrobert       return LE;
588*404b540aSrobert     case UNGE:
589*404b540aSrobert       return LT;
590*404b540aSrobert     case UNEQ:
591*404b540aSrobert       return LTGT;
592*404b540aSrobert 
593*404b540aSrobert     default:
594*404b540aSrobert       gcc_unreachable ();
595*404b540aSrobert     }
596*404b540aSrobert }
597*404b540aSrobert 
598*404b540aSrobert /* Similar, but return the code when two operands of a comparison are swapped.
599*404b540aSrobert    This IS safe for IEEE floating-point.  */
600*404b540aSrobert 
601*404b540aSrobert enum rtx_code
swap_condition(enum rtx_code code)602*404b540aSrobert swap_condition (enum rtx_code code)
603*404b540aSrobert {
604*404b540aSrobert   switch (code)
605*404b540aSrobert     {
606*404b540aSrobert     case EQ:
607*404b540aSrobert     case NE:
608*404b540aSrobert     case UNORDERED:
609*404b540aSrobert     case ORDERED:
610*404b540aSrobert     case UNEQ:
611*404b540aSrobert     case LTGT:
612*404b540aSrobert       return code;
613*404b540aSrobert 
614*404b540aSrobert     case GT:
615*404b540aSrobert       return LT;
616*404b540aSrobert     case GE:
617*404b540aSrobert       return LE;
618*404b540aSrobert     case LT:
619*404b540aSrobert       return GT;
620*404b540aSrobert     case LE:
621*404b540aSrobert       return GE;
622*404b540aSrobert     case GTU:
623*404b540aSrobert       return LTU;
624*404b540aSrobert     case GEU:
625*404b540aSrobert       return LEU;
626*404b540aSrobert     case LTU:
627*404b540aSrobert       return GTU;
628*404b540aSrobert     case LEU:
629*404b540aSrobert       return GEU;
630*404b540aSrobert     case UNLT:
631*404b540aSrobert       return UNGT;
632*404b540aSrobert     case UNLE:
633*404b540aSrobert       return UNGE;
634*404b540aSrobert     case UNGT:
635*404b540aSrobert       return UNLT;
636*404b540aSrobert     case UNGE:
637*404b540aSrobert       return UNLE;
638*404b540aSrobert 
639*404b540aSrobert     default:
640*404b540aSrobert       gcc_unreachable ();
641*404b540aSrobert     }
642*404b540aSrobert }
643*404b540aSrobert 
644*404b540aSrobert /* Given a comparison CODE, return the corresponding unsigned comparison.
645*404b540aSrobert    If CODE is an equality comparison or already an unsigned comparison,
646*404b540aSrobert    CODE is returned.  */
647*404b540aSrobert 
648*404b540aSrobert enum rtx_code
unsigned_condition(enum rtx_code code)649*404b540aSrobert unsigned_condition (enum rtx_code code)
650*404b540aSrobert {
651*404b540aSrobert   switch (code)
652*404b540aSrobert     {
653*404b540aSrobert     case EQ:
654*404b540aSrobert     case NE:
655*404b540aSrobert     case GTU:
656*404b540aSrobert     case GEU:
657*404b540aSrobert     case LTU:
658*404b540aSrobert     case LEU:
659*404b540aSrobert       return code;
660*404b540aSrobert 
661*404b540aSrobert     case GT:
662*404b540aSrobert       return GTU;
663*404b540aSrobert     case GE:
664*404b540aSrobert       return GEU;
665*404b540aSrobert     case LT:
666*404b540aSrobert       return LTU;
667*404b540aSrobert     case LE:
668*404b540aSrobert       return LEU;
669*404b540aSrobert 
670*404b540aSrobert     default:
671*404b540aSrobert       gcc_unreachable ();
672*404b540aSrobert     }
673*404b540aSrobert }
674*404b540aSrobert 
675*404b540aSrobert /* Similarly, return the signed version of a comparison.  */
676*404b540aSrobert 
677*404b540aSrobert enum rtx_code
signed_condition(enum rtx_code code)678*404b540aSrobert signed_condition (enum rtx_code code)
679*404b540aSrobert {
680*404b540aSrobert   switch (code)
681*404b540aSrobert     {
682*404b540aSrobert     case EQ:
683*404b540aSrobert     case NE:
684*404b540aSrobert     case GT:
685*404b540aSrobert     case GE:
686*404b540aSrobert     case LT:
687*404b540aSrobert     case LE:
688*404b540aSrobert       return code;
689*404b540aSrobert 
690*404b540aSrobert     case GTU:
691*404b540aSrobert       return GT;
692*404b540aSrobert     case GEU:
693*404b540aSrobert       return GE;
694*404b540aSrobert     case LTU:
695*404b540aSrobert       return LT;
696*404b540aSrobert     case LEU:
697*404b540aSrobert       return LE;
698*404b540aSrobert 
699*404b540aSrobert     default:
700*404b540aSrobert       gcc_unreachable ();
701*404b540aSrobert     }
702*404b540aSrobert }
703*404b540aSrobert 
704*404b540aSrobert /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
705*404b540aSrobert    truth of CODE1 implies the truth of CODE2.  */
706*404b540aSrobert 
707*404b540aSrobert int
comparison_dominates_p(enum rtx_code code1,enum rtx_code code2)708*404b540aSrobert comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
709*404b540aSrobert {
710*404b540aSrobert   /* UNKNOWN comparison codes can happen as a result of trying to revert
711*404b540aSrobert      comparison codes.
712*404b540aSrobert      They can't match anything, so we have to reject them here.  */
713*404b540aSrobert   if (code1 == UNKNOWN || code2 == UNKNOWN)
714*404b540aSrobert     return 0;
715*404b540aSrobert 
716*404b540aSrobert   if (code1 == code2)
717*404b540aSrobert     return 1;
718*404b540aSrobert 
719*404b540aSrobert   switch (code1)
720*404b540aSrobert     {
721*404b540aSrobert     case UNEQ:
722*404b540aSrobert       if (code2 == UNLE || code2 == UNGE)
723*404b540aSrobert 	return 1;
724*404b540aSrobert       break;
725*404b540aSrobert 
726*404b540aSrobert     case EQ:
727*404b540aSrobert       if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
728*404b540aSrobert 	  || code2 == ORDERED)
729*404b540aSrobert 	return 1;
730*404b540aSrobert       break;
731*404b540aSrobert 
732*404b540aSrobert     case UNLT:
733*404b540aSrobert       if (code2 == UNLE || code2 == NE)
734*404b540aSrobert 	return 1;
735*404b540aSrobert       break;
736*404b540aSrobert 
737*404b540aSrobert     case LT:
738*404b540aSrobert       if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
739*404b540aSrobert 	return 1;
740*404b540aSrobert       break;
741*404b540aSrobert 
742*404b540aSrobert     case UNGT:
743*404b540aSrobert       if (code2 == UNGE || code2 == NE)
744*404b540aSrobert 	return 1;
745*404b540aSrobert       break;
746*404b540aSrobert 
747*404b540aSrobert     case GT:
748*404b540aSrobert       if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
749*404b540aSrobert 	return 1;
750*404b540aSrobert       break;
751*404b540aSrobert 
752*404b540aSrobert     case GE:
753*404b540aSrobert     case LE:
754*404b540aSrobert       if (code2 == ORDERED)
755*404b540aSrobert 	return 1;
756*404b540aSrobert       break;
757*404b540aSrobert 
758*404b540aSrobert     case LTGT:
759*404b540aSrobert       if (code2 == NE || code2 == ORDERED)
760*404b540aSrobert 	return 1;
761*404b540aSrobert       break;
762*404b540aSrobert 
763*404b540aSrobert     case LTU:
764*404b540aSrobert       if (code2 == LEU || code2 == NE)
765*404b540aSrobert 	return 1;
766*404b540aSrobert       break;
767*404b540aSrobert 
768*404b540aSrobert     case GTU:
769*404b540aSrobert       if (code2 == GEU || code2 == NE)
770*404b540aSrobert 	return 1;
771*404b540aSrobert       break;
772*404b540aSrobert 
773*404b540aSrobert     case UNORDERED:
774*404b540aSrobert       if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
775*404b540aSrobert 	  || code2 == UNGE || code2 == UNGT)
776*404b540aSrobert 	return 1;
777*404b540aSrobert       break;
778*404b540aSrobert 
779*404b540aSrobert     default:
780*404b540aSrobert       break;
781*404b540aSrobert     }
782*404b540aSrobert 
783*404b540aSrobert   return 0;
784*404b540aSrobert }
785*404b540aSrobert 
786*404b540aSrobert /* Return 1 if INSN is an unconditional jump and nothing else.  */
787*404b540aSrobert 
788*404b540aSrobert int
simplejump_p(rtx insn)789*404b540aSrobert simplejump_p (rtx insn)
790*404b540aSrobert {
791*404b540aSrobert   return (JUMP_P (insn)
792*404b540aSrobert 	  && GET_CODE (PATTERN (insn)) == SET
793*404b540aSrobert 	  && GET_CODE (SET_DEST (PATTERN (insn))) == PC
794*404b540aSrobert 	  && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
795*404b540aSrobert }
796*404b540aSrobert 
797*404b540aSrobert /* Return nonzero if INSN is a (possibly) conditional jump
798*404b540aSrobert    and nothing more.
799*404b540aSrobert 
800*404b540aSrobert    Use of this function is deprecated, since we need to support combined
801*404b540aSrobert    branch and compare insns.  Use any_condjump_p instead whenever possible.  */
802*404b540aSrobert 
803*404b540aSrobert int
condjump_p(rtx insn)804*404b540aSrobert condjump_p (rtx insn)
805*404b540aSrobert {
806*404b540aSrobert   rtx x = PATTERN (insn);
807*404b540aSrobert 
808*404b540aSrobert   if (GET_CODE (x) != SET
809*404b540aSrobert       || GET_CODE (SET_DEST (x)) != PC)
810*404b540aSrobert     return 0;
811*404b540aSrobert 
812*404b540aSrobert   x = SET_SRC (x);
813*404b540aSrobert   if (GET_CODE (x) == LABEL_REF)
814*404b540aSrobert     return 1;
815*404b540aSrobert   else
816*404b540aSrobert     return (GET_CODE (x) == IF_THEN_ELSE
817*404b540aSrobert 	    && ((GET_CODE (XEXP (x, 2)) == PC
818*404b540aSrobert 		 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
819*404b540aSrobert 		     || GET_CODE (XEXP (x, 1)) == RETURN))
820*404b540aSrobert 		|| (GET_CODE (XEXP (x, 1)) == PC
821*404b540aSrobert 		    && (GET_CODE (XEXP (x, 2)) == LABEL_REF
822*404b540aSrobert 			|| GET_CODE (XEXP (x, 2)) == RETURN))));
823*404b540aSrobert }
824*404b540aSrobert 
825*404b540aSrobert /* Return nonzero if INSN is a (possibly) conditional jump inside a
826*404b540aSrobert    PARALLEL.
827*404b540aSrobert 
828*404b540aSrobert    Use this function is deprecated, since we need to support combined
829*404b540aSrobert    branch and compare insns.  Use any_condjump_p instead whenever possible.  */
830*404b540aSrobert 
831*404b540aSrobert int
condjump_in_parallel_p(rtx insn)832*404b540aSrobert condjump_in_parallel_p (rtx insn)
833*404b540aSrobert {
834*404b540aSrobert   rtx x = PATTERN (insn);
835*404b540aSrobert 
836*404b540aSrobert   if (GET_CODE (x) != PARALLEL)
837*404b540aSrobert     return 0;
838*404b540aSrobert   else
839*404b540aSrobert     x = XVECEXP (x, 0, 0);
840*404b540aSrobert 
841*404b540aSrobert   if (GET_CODE (x) != SET)
842*404b540aSrobert     return 0;
843*404b540aSrobert   if (GET_CODE (SET_DEST (x)) != PC)
844*404b540aSrobert     return 0;
845*404b540aSrobert   if (GET_CODE (SET_SRC (x)) == LABEL_REF)
846*404b540aSrobert     return 1;
847*404b540aSrobert   if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
848*404b540aSrobert     return 0;
849*404b540aSrobert   if (XEXP (SET_SRC (x), 2) == pc_rtx
850*404b540aSrobert       && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
851*404b540aSrobert 	  || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
852*404b540aSrobert     return 1;
853*404b540aSrobert   if (XEXP (SET_SRC (x), 1) == pc_rtx
854*404b540aSrobert       && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
855*404b540aSrobert 	  || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
856*404b540aSrobert     return 1;
857*404b540aSrobert   return 0;
858*404b540aSrobert }
859*404b540aSrobert 
860*404b540aSrobert /* Return set of PC, otherwise NULL.  */
861*404b540aSrobert 
862*404b540aSrobert rtx
pc_set(rtx insn)863*404b540aSrobert pc_set (rtx insn)
864*404b540aSrobert {
865*404b540aSrobert   rtx pat;
866*404b540aSrobert   if (!JUMP_P (insn))
867*404b540aSrobert     return NULL_RTX;
868*404b540aSrobert   pat = PATTERN (insn);
869*404b540aSrobert 
870*404b540aSrobert   /* The set is allowed to appear either as the insn pattern or
871*404b540aSrobert      the first set in a PARALLEL.  */
872*404b540aSrobert   if (GET_CODE (pat) == PARALLEL)
873*404b540aSrobert     pat = XVECEXP (pat, 0, 0);
874*404b540aSrobert   if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
875*404b540aSrobert     return pat;
876*404b540aSrobert 
877*404b540aSrobert   return NULL_RTX;
878*404b540aSrobert }
879*404b540aSrobert 
880*404b540aSrobert /* Return true when insn is an unconditional direct jump,
881*404b540aSrobert    possibly bundled inside a PARALLEL.  */
882*404b540aSrobert 
883*404b540aSrobert int
any_uncondjump_p(rtx insn)884*404b540aSrobert any_uncondjump_p (rtx insn)
885*404b540aSrobert {
886*404b540aSrobert   rtx x = pc_set (insn);
887*404b540aSrobert   if (!x)
888*404b540aSrobert     return 0;
889*404b540aSrobert   if (GET_CODE (SET_SRC (x)) != LABEL_REF)
890*404b540aSrobert     return 0;
891*404b540aSrobert   if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
892*404b540aSrobert     return 0;
893*404b540aSrobert   return 1;
894*404b540aSrobert }
895*404b540aSrobert 
896*404b540aSrobert /* Return true when insn is a conditional jump.  This function works for
897*404b540aSrobert    instructions containing PC sets in PARALLELs.  The instruction may have
898*404b540aSrobert    various other effects so before removing the jump you must verify
899*404b540aSrobert    onlyjump_p.
900*404b540aSrobert 
901*404b540aSrobert    Note that unlike condjump_p it returns false for unconditional jumps.  */
902*404b540aSrobert 
903*404b540aSrobert int
any_condjump_p(rtx insn)904*404b540aSrobert any_condjump_p (rtx insn)
905*404b540aSrobert {
906*404b540aSrobert   rtx x = pc_set (insn);
907*404b540aSrobert   enum rtx_code a, b;
908*404b540aSrobert 
909*404b540aSrobert   if (!x)
910*404b540aSrobert     return 0;
911*404b540aSrobert   if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
912*404b540aSrobert     return 0;
913*404b540aSrobert 
914*404b540aSrobert   a = GET_CODE (XEXP (SET_SRC (x), 1));
915*404b540aSrobert   b = GET_CODE (XEXP (SET_SRC (x), 2));
916*404b540aSrobert 
917*404b540aSrobert   return ((b == PC && (a == LABEL_REF || a == RETURN))
918*404b540aSrobert 	  || (a == PC && (b == LABEL_REF || b == RETURN)));
919*404b540aSrobert }
920*404b540aSrobert 
921*404b540aSrobert /* Return the label of a conditional jump.  */
922*404b540aSrobert 
923*404b540aSrobert rtx
condjump_label(rtx insn)924*404b540aSrobert condjump_label (rtx insn)
925*404b540aSrobert {
926*404b540aSrobert   rtx x = pc_set (insn);
927*404b540aSrobert 
928*404b540aSrobert   if (!x)
929*404b540aSrobert     return NULL_RTX;
930*404b540aSrobert   x = SET_SRC (x);
931*404b540aSrobert   if (GET_CODE (x) == LABEL_REF)
932*404b540aSrobert     return x;
933*404b540aSrobert   if (GET_CODE (x) != IF_THEN_ELSE)
934*404b540aSrobert     return NULL_RTX;
935*404b540aSrobert   if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
936*404b540aSrobert     return XEXP (x, 1);
937*404b540aSrobert   if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
938*404b540aSrobert     return XEXP (x, 2);
939*404b540aSrobert   return NULL_RTX;
940*404b540aSrobert }
941*404b540aSrobert 
942*404b540aSrobert /* Return true if INSN is a (possibly conditional) return insn.  */
943*404b540aSrobert 
944*404b540aSrobert static int
returnjump_p_1(rtx * loc,void * data ATTRIBUTE_UNUSED)945*404b540aSrobert returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
946*404b540aSrobert {
947*404b540aSrobert   rtx x = *loc;
948*404b540aSrobert 
949*404b540aSrobert   return x && (GET_CODE (x) == RETURN
950*404b540aSrobert 	       || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
951*404b540aSrobert }
952*404b540aSrobert 
953*404b540aSrobert int
returnjump_p(rtx insn)954*404b540aSrobert returnjump_p (rtx insn)
955*404b540aSrobert {
956*404b540aSrobert   if (!JUMP_P (insn))
957*404b540aSrobert     return 0;
958*404b540aSrobert   return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
959*404b540aSrobert }
960*404b540aSrobert 
961*404b540aSrobert /* Return true if INSN is a jump that only transfers control and
962*404b540aSrobert    nothing more.  */
963*404b540aSrobert 
964*404b540aSrobert int
onlyjump_p(rtx insn)965*404b540aSrobert onlyjump_p (rtx insn)
966*404b540aSrobert {
967*404b540aSrobert   rtx set;
968*404b540aSrobert 
969*404b540aSrobert   if (!JUMP_P (insn))
970*404b540aSrobert     return 0;
971*404b540aSrobert 
972*404b540aSrobert   set = single_set (insn);
973*404b540aSrobert   if (set == NULL)
974*404b540aSrobert     return 0;
975*404b540aSrobert   if (GET_CODE (SET_DEST (set)) != PC)
976*404b540aSrobert     return 0;
977*404b540aSrobert   if (side_effects_p (SET_SRC (set)))
978*404b540aSrobert     return 0;
979*404b540aSrobert 
980*404b540aSrobert   return 1;
981*404b540aSrobert }
982*404b540aSrobert 
983*404b540aSrobert #ifdef HAVE_cc0
984*404b540aSrobert 
985*404b540aSrobert /* Return nonzero if X is an RTX that only sets the condition codes
986*404b540aSrobert    and has no side effects.  */
987*404b540aSrobert 
988*404b540aSrobert int
only_sets_cc0_p(rtx x)989*404b540aSrobert only_sets_cc0_p (rtx x)
990*404b540aSrobert {
991*404b540aSrobert   if (! x)
992*404b540aSrobert     return 0;
993*404b540aSrobert 
994*404b540aSrobert   if (INSN_P (x))
995*404b540aSrobert     x = PATTERN (x);
996*404b540aSrobert 
997*404b540aSrobert   return sets_cc0_p (x) == 1 && ! side_effects_p (x);
998*404b540aSrobert }
999*404b540aSrobert 
1000*404b540aSrobert /* Return 1 if X is an RTX that does nothing but set the condition codes
1001*404b540aSrobert    and CLOBBER or USE registers.
1002*404b540aSrobert    Return -1 if X does explicitly set the condition codes,
1003*404b540aSrobert    but also does other things.  */
1004*404b540aSrobert 
1005*404b540aSrobert int
sets_cc0_p(rtx x)1006*404b540aSrobert sets_cc0_p (rtx x)
1007*404b540aSrobert {
1008*404b540aSrobert   if (! x)
1009*404b540aSrobert     return 0;
1010*404b540aSrobert 
1011*404b540aSrobert   if (INSN_P (x))
1012*404b540aSrobert     x = PATTERN (x);
1013*404b540aSrobert 
1014*404b540aSrobert   if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1015*404b540aSrobert     return 1;
1016*404b540aSrobert   if (GET_CODE (x) == PARALLEL)
1017*404b540aSrobert     {
1018*404b540aSrobert       int i;
1019*404b540aSrobert       int sets_cc0 = 0;
1020*404b540aSrobert       int other_things = 0;
1021*404b540aSrobert       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1022*404b540aSrobert 	{
1023*404b540aSrobert 	  if (GET_CODE (XVECEXP (x, 0, i)) == SET
1024*404b540aSrobert 	      && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1025*404b540aSrobert 	    sets_cc0 = 1;
1026*404b540aSrobert 	  else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1027*404b540aSrobert 	    other_things = 1;
1028*404b540aSrobert 	}
1029*404b540aSrobert       return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1030*404b540aSrobert     }
1031*404b540aSrobert   return 0;
1032*404b540aSrobert }
1033*404b540aSrobert #endif
1034*404b540aSrobert 
1035*404b540aSrobert /* Follow any unconditional jump at LABEL;
1036*404b540aSrobert    return the ultimate label reached by any such chain of jumps.
1037*404b540aSrobert    Return null if the chain ultimately leads to a return instruction.
1038*404b540aSrobert    If LABEL is not followed by a jump, return LABEL.
1039*404b540aSrobert    If the chain loops or we can't find end, return LABEL,
1040*404b540aSrobert    since that tells caller to avoid changing the insn.
1041*404b540aSrobert 
1042*404b540aSrobert    If RELOAD_COMPLETED is 0, we do not chain across a USE or CLOBBER.  */
1043*404b540aSrobert 
1044*404b540aSrobert rtx
follow_jumps(rtx label)1045*404b540aSrobert follow_jumps (rtx label)
1046*404b540aSrobert {
1047*404b540aSrobert   rtx insn;
1048*404b540aSrobert   rtx next;
1049*404b540aSrobert   rtx value = label;
1050*404b540aSrobert   int depth;
1051*404b540aSrobert 
1052*404b540aSrobert   for (depth = 0;
1053*404b540aSrobert        (depth < 10
1054*404b540aSrobert 	&& (insn = next_active_insn (value)) != 0
1055*404b540aSrobert 	&& JUMP_P (insn)
1056*404b540aSrobert 	&& ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1057*404b540aSrobert 	     && onlyjump_p (insn))
1058*404b540aSrobert 	    || GET_CODE (PATTERN (insn)) == RETURN)
1059*404b540aSrobert 	&& (next = NEXT_INSN (insn))
1060*404b540aSrobert 	&& BARRIER_P (next));
1061*404b540aSrobert        depth++)
1062*404b540aSrobert     {
1063*404b540aSrobert       rtx tem;
1064*404b540aSrobert       if (!reload_completed && flag_test_coverage)
1065*404b540aSrobert 	{
1066*404b540aSrobert 	  /* ??? Optional.  Disables some optimizations, but makes
1067*404b540aSrobert 	     gcov output more accurate with -O.  */
1068*404b540aSrobert 	  for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1069*404b540aSrobert 	    if (NOTE_P (tem) && NOTE_LINE_NUMBER (tem) > 0)
1070*404b540aSrobert 	      return value;
1071*404b540aSrobert 	}
1072*404b540aSrobert 
1073*404b540aSrobert       /* If we have found a cycle, make the insn jump to itself.  */
1074*404b540aSrobert       if (JUMP_LABEL (insn) == label)
1075*404b540aSrobert 	return label;
1076*404b540aSrobert 
1077*404b540aSrobert       tem = next_active_insn (JUMP_LABEL (insn));
1078*404b540aSrobert       if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1079*404b540aSrobert 		  || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1080*404b540aSrobert 	break;
1081*404b540aSrobert 
1082*404b540aSrobert       value = JUMP_LABEL (insn);
1083*404b540aSrobert     }
1084*404b540aSrobert   if (depth == 10)
1085*404b540aSrobert     return label;
1086*404b540aSrobert   return value;
1087*404b540aSrobert }
1088*404b540aSrobert 
1089*404b540aSrobert 
1090*404b540aSrobert /* Find all CODE_LABELs referred to in X, and increment their use counts.
1091*404b540aSrobert    If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1092*404b540aSrobert    in INSN, then store one of them in JUMP_LABEL (INSN).
1093*404b540aSrobert    If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1094*404b540aSrobert    referenced in INSN, add a REG_LABEL note containing that label to INSN.
1095*404b540aSrobert    Also, when there are consecutive labels, canonicalize on the last of them.
1096*404b540aSrobert 
1097*404b540aSrobert    Note that two labels separated by a loop-beginning note
1098*404b540aSrobert    must be kept distinct if we have not yet done loop-optimization,
1099*404b540aSrobert    because the gap between them is where loop-optimize
1100*404b540aSrobert    will want to move invariant code to.  CROSS_JUMP tells us
1101*404b540aSrobert    that loop-optimization is done with.  */
1102*404b540aSrobert 
1103*404b540aSrobert void
mark_jump_label(rtx x,rtx insn,int in_mem)1104*404b540aSrobert mark_jump_label (rtx x, rtx insn, int in_mem)
1105*404b540aSrobert {
1106*404b540aSrobert   RTX_CODE code = GET_CODE (x);
1107*404b540aSrobert   int i;
1108*404b540aSrobert   const char *fmt;
1109*404b540aSrobert 
1110*404b540aSrobert   switch (code)
1111*404b540aSrobert     {
1112*404b540aSrobert     case PC:
1113*404b540aSrobert     case CC0:
1114*404b540aSrobert     case REG:
1115*404b540aSrobert     case CONST_INT:
1116*404b540aSrobert     case CONST_DOUBLE:
1117*404b540aSrobert     case CLOBBER:
1118*404b540aSrobert     case CALL:
1119*404b540aSrobert       return;
1120*404b540aSrobert 
1121*404b540aSrobert     case MEM:
1122*404b540aSrobert       in_mem = 1;
1123*404b540aSrobert       break;
1124*404b540aSrobert 
1125*404b540aSrobert     case SYMBOL_REF:
1126*404b540aSrobert       if (!in_mem)
1127*404b540aSrobert 	return;
1128*404b540aSrobert 
1129*404b540aSrobert       /* If this is a constant-pool reference, see if it is a label.  */
1130*404b540aSrobert       if (CONSTANT_POOL_ADDRESS_P (x))
1131*404b540aSrobert 	mark_jump_label (get_pool_constant (x), insn, in_mem);
1132*404b540aSrobert       break;
1133*404b540aSrobert 
1134*404b540aSrobert     case LABEL_REF:
1135*404b540aSrobert       {
1136*404b540aSrobert 	rtx label = XEXP (x, 0);
1137*404b540aSrobert 
1138*404b540aSrobert 	/* Ignore remaining references to unreachable labels that
1139*404b540aSrobert 	   have been deleted.  */
1140*404b540aSrobert 	if (NOTE_P (label)
1141*404b540aSrobert 	    && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1142*404b540aSrobert 	  break;
1143*404b540aSrobert 
1144*404b540aSrobert 	gcc_assert (LABEL_P (label));
1145*404b540aSrobert 
1146*404b540aSrobert 	/* Ignore references to labels of containing functions.  */
1147*404b540aSrobert 	if (LABEL_REF_NONLOCAL_P (x))
1148*404b540aSrobert 	  break;
1149*404b540aSrobert 
1150*404b540aSrobert 	XEXP (x, 0) = label;
1151*404b540aSrobert 	if (! insn || ! INSN_DELETED_P (insn))
1152*404b540aSrobert 	  ++LABEL_NUSES (label);
1153*404b540aSrobert 
1154*404b540aSrobert 	if (insn)
1155*404b540aSrobert 	  {
1156*404b540aSrobert 	    if (JUMP_P (insn))
1157*404b540aSrobert 	      JUMP_LABEL (insn) = label;
1158*404b540aSrobert 	    else
1159*404b540aSrobert 	      {
1160*404b540aSrobert 		/* Add a REG_LABEL note for LABEL unless there already
1161*404b540aSrobert 		   is one.  All uses of a label, except for labels
1162*404b540aSrobert 		   that are the targets of jumps, must have a
1163*404b540aSrobert 		   REG_LABEL note.  */
1164*404b540aSrobert 		if (! find_reg_note (insn, REG_LABEL, label))
1165*404b540aSrobert 		  REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1166*404b540aSrobert 							REG_NOTES (insn));
1167*404b540aSrobert 	      }
1168*404b540aSrobert 	  }
1169*404b540aSrobert 	return;
1170*404b540aSrobert       }
1171*404b540aSrobert 
1172*404b540aSrobert   /* Do walk the labels in a vector, but not the first operand of an
1173*404b540aSrobert      ADDR_DIFF_VEC.  Don't set the JUMP_LABEL of a vector.  */
1174*404b540aSrobert     case ADDR_VEC:
1175*404b540aSrobert     case ADDR_DIFF_VEC:
1176*404b540aSrobert       if (! INSN_DELETED_P (insn))
1177*404b540aSrobert 	{
1178*404b540aSrobert 	  int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1179*404b540aSrobert 
1180*404b540aSrobert 	  for (i = 0; i < XVECLEN (x, eltnum); i++)
1181*404b540aSrobert 	    mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1182*404b540aSrobert 	}
1183*404b540aSrobert       return;
1184*404b540aSrobert 
1185*404b540aSrobert     default:
1186*404b540aSrobert       break;
1187*404b540aSrobert     }
1188*404b540aSrobert 
1189*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
1190*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1191*404b540aSrobert     {
1192*404b540aSrobert       if (fmt[i] == 'e')
1193*404b540aSrobert 	mark_jump_label (XEXP (x, i), insn, in_mem);
1194*404b540aSrobert       else if (fmt[i] == 'E')
1195*404b540aSrobert 	{
1196*404b540aSrobert 	  int j;
1197*404b540aSrobert 	  for (j = 0; j < XVECLEN (x, i); j++)
1198*404b540aSrobert 	    mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1199*404b540aSrobert 	}
1200*404b540aSrobert     }
1201*404b540aSrobert }
1202*404b540aSrobert 
1203*404b540aSrobert /* If all INSN does is set the pc, delete it,
1204*404b540aSrobert    and delete the insn that set the condition codes for it
1205*404b540aSrobert    if that's what the previous thing was.  */
1206*404b540aSrobert 
1207*404b540aSrobert void
delete_jump(rtx insn)1208*404b540aSrobert delete_jump (rtx insn)
1209*404b540aSrobert {
1210*404b540aSrobert   rtx set = single_set (insn);
1211*404b540aSrobert 
1212*404b540aSrobert   if (set && GET_CODE (SET_DEST (set)) == PC)
1213*404b540aSrobert     delete_computation (insn);
1214*404b540aSrobert }
1215*404b540aSrobert 
1216*404b540aSrobert /* Recursively delete prior insns that compute the value (used only by INSN
1217*404b540aSrobert    which the caller is deleting) stored in the register mentioned by NOTE
1218*404b540aSrobert    which is a REG_DEAD note associated with INSN.  */
1219*404b540aSrobert 
1220*404b540aSrobert static void
delete_prior_computation(rtx note,rtx insn)1221*404b540aSrobert delete_prior_computation (rtx note, rtx insn)
1222*404b540aSrobert {
1223*404b540aSrobert   rtx our_prev;
1224*404b540aSrobert   rtx reg = XEXP (note, 0);
1225*404b540aSrobert 
1226*404b540aSrobert   for (our_prev = prev_nonnote_insn (insn);
1227*404b540aSrobert        our_prev && (NONJUMP_INSN_P (our_prev)
1228*404b540aSrobert 		    || CALL_P (our_prev));
1229*404b540aSrobert        our_prev = prev_nonnote_insn (our_prev))
1230*404b540aSrobert     {
1231*404b540aSrobert       rtx pat = PATTERN (our_prev);
1232*404b540aSrobert 
1233*404b540aSrobert       /* If we reach a CALL which is not calling a const function
1234*404b540aSrobert 	 or the callee pops the arguments, then give up.  */
1235*404b540aSrobert       if (CALL_P (our_prev)
1236*404b540aSrobert 	  && (! CONST_OR_PURE_CALL_P (our_prev)
1237*404b540aSrobert 	      || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1238*404b540aSrobert 	break;
1239*404b540aSrobert 
1240*404b540aSrobert       /* If we reach a SEQUENCE, it is too complex to try to
1241*404b540aSrobert 	 do anything with it, so give up.  We can be run during
1242*404b540aSrobert 	 and after reorg, so SEQUENCE rtl can legitimately show
1243*404b540aSrobert 	 up here.  */
1244*404b540aSrobert       if (GET_CODE (pat) == SEQUENCE)
1245*404b540aSrobert 	break;
1246*404b540aSrobert 
1247*404b540aSrobert       if (GET_CODE (pat) == USE
1248*404b540aSrobert 	  && NONJUMP_INSN_P (XEXP (pat, 0)))
1249*404b540aSrobert 	/* reorg creates USEs that look like this.  We leave them
1250*404b540aSrobert 	   alone because reorg needs them for its own purposes.  */
1251*404b540aSrobert 	break;
1252*404b540aSrobert 
1253*404b540aSrobert       if (reg_set_p (reg, pat))
1254*404b540aSrobert 	{
1255*404b540aSrobert 	  if (side_effects_p (pat) && !CALL_P (our_prev))
1256*404b540aSrobert 	    break;
1257*404b540aSrobert 
1258*404b540aSrobert 	  if (GET_CODE (pat) == PARALLEL)
1259*404b540aSrobert 	    {
1260*404b540aSrobert 	      /* If we find a SET of something else, we can't
1261*404b540aSrobert 		 delete the insn.  */
1262*404b540aSrobert 
1263*404b540aSrobert 	      int i;
1264*404b540aSrobert 
1265*404b540aSrobert 	      for (i = 0; i < XVECLEN (pat, 0); i++)
1266*404b540aSrobert 		{
1267*404b540aSrobert 		  rtx part = XVECEXP (pat, 0, i);
1268*404b540aSrobert 
1269*404b540aSrobert 		  if (GET_CODE (part) == SET
1270*404b540aSrobert 		      && SET_DEST (part) != reg)
1271*404b540aSrobert 		    break;
1272*404b540aSrobert 		}
1273*404b540aSrobert 
1274*404b540aSrobert 	      if (i == XVECLEN (pat, 0))
1275*404b540aSrobert 		delete_computation (our_prev);
1276*404b540aSrobert 	    }
1277*404b540aSrobert 	  else if (GET_CODE (pat) == SET
1278*404b540aSrobert 		   && REG_P (SET_DEST (pat)))
1279*404b540aSrobert 	    {
1280*404b540aSrobert 	      int dest_regno = REGNO (SET_DEST (pat));
1281*404b540aSrobert 	      int dest_endregno
1282*404b540aSrobert 		= (dest_regno
1283*404b540aSrobert 		   + (dest_regno < FIRST_PSEUDO_REGISTER
1284*404b540aSrobert 		      ? hard_regno_nregs[dest_regno]
1285*404b540aSrobert 					[GET_MODE (SET_DEST (pat))] : 1));
1286*404b540aSrobert 	      int regno = REGNO (reg);
1287*404b540aSrobert 	      int endregno
1288*404b540aSrobert 		= (regno
1289*404b540aSrobert 		   + (regno < FIRST_PSEUDO_REGISTER
1290*404b540aSrobert 		      ? hard_regno_nregs[regno][GET_MODE (reg)] : 1));
1291*404b540aSrobert 
1292*404b540aSrobert 	      if (dest_regno >= regno
1293*404b540aSrobert 		  && dest_endregno <= endregno)
1294*404b540aSrobert 		delete_computation (our_prev);
1295*404b540aSrobert 
1296*404b540aSrobert 	      /* We may have a multi-word hard register and some, but not
1297*404b540aSrobert 		 all, of the words of the register are needed in subsequent
1298*404b540aSrobert 		 insns.  Write REG_UNUSED notes for those parts that were not
1299*404b540aSrobert 		 needed.  */
1300*404b540aSrobert 	      else if (dest_regno <= regno
1301*404b540aSrobert 		       && dest_endregno >= endregno)
1302*404b540aSrobert 		{
1303*404b540aSrobert 		  int i;
1304*404b540aSrobert 
1305*404b540aSrobert 		  REG_NOTES (our_prev)
1306*404b540aSrobert 		    = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1307*404b540aSrobert 					 REG_NOTES (our_prev));
1308*404b540aSrobert 
1309*404b540aSrobert 		  for (i = dest_regno; i < dest_endregno; i++)
1310*404b540aSrobert 		    if (! find_regno_note (our_prev, REG_UNUSED, i))
1311*404b540aSrobert 		      break;
1312*404b540aSrobert 
1313*404b540aSrobert 		  if (i == dest_endregno)
1314*404b540aSrobert 		    delete_computation (our_prev);
1315*404b540aSrobert 		}
1316*404b540aSrobert 	    }
1317*404b540aSrobert 
1318*404b540aSrobert 	  break;
1319*404b540aSrobert 	}
1320*404b540aSrobert 
1321*404b540aSrobert       /* If PAT references the register that dies here, it is an
1322*404b540aSrobert 	 additional use.  Hence any prior SET isn't dead.  However, this
1323*404b540aSrobert 	 insn becomes the new place for the REG_DEAD note.  */
1324*404b540aSrobert       if (reg_overlap_mentioned_p (reg, pat))
1325*404b540aSrobert 	{
1326*404b540aSrobert 	  XEXP (note, 1) = REG_NOTES (our_prev);
1327*404b540aSrobert 	  REG_NOTES (our_prev) = note;
1328*404b540aSrobert 	  break;
1329*404b540aSrobert 	}
1330*404b540aSrobert     }
1331*404b540aSrobert }
1332*404b540aSrobert 
1333*404b540aSrobert /* Delete INSN and recursively delete insns that compute values used only
1334*404b540aSrobert    by INSN.  This uses the REG_DEAD notes computed during flow analysis.
1335*404b540aSrobert    If we are running before flow.c, we need do nothing since flow.c will
1336*404b540aSrobert    delete dead code.  We also can't know if the registers being used are
1337*404b540aSrobert    dead or not at this point.
1338*404b540aSrobert 
1339*404b540aSrobert    Otherwise, look at all our REG_DEAD notes.  If a previous insn does
1340*404b540aSrobert    nothing other than set a register that dies in this insn, we can delete
1341*404b540aSrobert    that insn as well.
1342*404b540aSrobert 
1343*404b540aSrobert    On machines with CC0, if CC0 is used in this insn, we may be able to
1344*404b540aSrobert    delete the insn that set it.  */
1345*404b540aSrobert 
1346*404b540aSrobert static void
delete_computation(rtx insn)1347*404b540aSrobert delete_computation (rtx insn)
1348*404b540aSrobert {
1349*404b540aSrobert   rtx note, next;
1350*404b540aSrobert 
1351*404b540aSrobert #ifdef HAVE_cc0
1352*404b540aSrobert   if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1353*404b540aSrobert     {
1354*404b540aSrobert       rtx prev = prev_nonnote_insn (insn);
1355*404b540aSrobert       /* We assume that at this stage
1356*404b540aSrobert 	 CC's are always set explicitly
1357*404b540aSrobert 	 and always immediately before the jump that
1358*404b540aSrobert 	 will use them.  So if the previous insn
1359*404b540aSrobert 	 exists to set the CC's, delete it
1360*404b540aSrobert 	 (unless it performs auto-increments, etc.).  */
1361*404b540aSrobert       if (prev && NONJUMP_INSN_P (prev)
1362*404b540aSrobert 	  && sets_cc0_p (PATTERN (prev)))
1363*404b540aSrobert 	{
1364*404b540aSrobert 	  if (sets_cc0_p (PATTERN (prev)) > 0
1365*404b540aSrobert 	      && ! side_effects_p (PATTERN (prev)))
1366*404b540aSrobert 	    delete_computation (prev);
1367*404b540aSrobert 	  else
1368*404b540aSrobert 	    /* Otherwise, show that cc0 won't be used.  */
1369*404b540aSrobert 	    REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1370*404b540aSrobert 						  cc0_rtx, REG_NOTES (prev));
1371*404b540aSrobert 	}
1372*404b540aSrobert     }
1373*404b540aSrobert #endif
1374*404b540aSrobert 
1375*404b540aSrobert   for (note = REG_NOTES (insn); note; note = next)
1376*404b540aSrobert     {
1377*404b540aSrobert       next = XEXP (note, 1);
1378*404b540aSrobert 
1379*404b540aSrobert       if (REG_NOTE_KIND (note) != REG_DEAD
1380*404b540aSrobert 	  /* Verify that the REG_NOTE is legitimate.  */
1381*404b540aSrobert 	  || !REG_P (XEXP (note, 0)))
1382*404b540aSrobert 	continue;
1383*404b540aSrobert 
1384*404b540aSrobert       delete_prior_computation (note, insn);
1385*404b540aSrobert     }
1386*404b540aSrobert 
1387*404b540aSrobert   delete_related_insns (insn);
1388*404b540aSrobert }
1389*404b540aSrobert 
1390*404b540aSrobert /* Delete insn INSN from the chain of insns and update label ref counts
1391*404b540aSrobert    and delete insns now unreachable.
1392*404b540aSrobert 
1393*404b540aSrobert    Returns the first insn after INSN that was not deleted.
1394*404b540aSrobert 
1395*404b540aSrobert    Usage of this instruction is deprecated.  Use delete_insn instead and
1396*404b540aSrobert    subsequent cfg_cleanup pass to delete unreachable code if needed.  */
1397*404b540aSrobert 
1398*404b540aSrobert rtx
delete_related_insns(rtx insn)1399*404b540aSrobert delete_related_insns (rtx insn)
1400*404b540aSrobert {
1401*404b540aSrobert   int was_code_label = (LABEL_P (insn));
1402*404b540aSrobert   rtx note;
1403*404b540aSrobert   rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1404*404b540aSrobert 
1405*404b540aSrobert   while (next && INSN_DELETED_P (next))
1406*404b540aSrobert     next = NEXT_INSN (next);
1407*404b540aSrobert 
1408*404b540aSrobert   /* This insn is already deleted => return first following nondeleted.  */
1409*404b540aSrobert   if (INSN_DELETED_P (insn))
1410*404b540aSrobert     return next;
1411*404b540aSrobert 
1412*404b540aSrobert   delete_insn (insn);
1413*404b540aSrobert 
1414*404b540aSrobert   /* If instruction is followed by a barrier,
1415*404b540aSrobert      delete the barrier too.  */
1416*404b540aSrobert 
1417*404b540aSrobert   if (next != 0 && BARRIER_P (next))
1418*404b540aSrobert     delete_insn (next);
1419*404b540aSrobert 
1420*404b540aSrobert   /* If deleting a jump, decrement the count of the label,
1421*404b540aSrobert      and delete the label if it is now unused.  */
1422*404b540aSrobert 
1423*404b540aSrobert   if (JUMP_P (insn) && JUMP_LABEL (insn))
1424*404b540aSrobert     {
1425*404b540aSrobert       rtx lab = JUMP_LABEL (insn), lab_next;
1426*404b540aSrobert 
1427*404b540aSrobert       if (LABEL_NUSES (lab) == 0)
1428*404b540aSrobert 	{
1429*404b540aSrobert 	  /* This can delete NEXT or PREV,
1430*404b540aSrobert 	     either directly if NEXT is JUMP_LABEL (INSN),
1431*404b540aSrobert 	     or indirectly through more levels of jumps.  */
1432*404b540aSrobert 	  delete_related_insns (lab);
1433*404b540aSrobert 
1434*404b540aSrobert 	  /* I feel a little doubtful about this loop,
1435*404b540aSrobert 	     but I see no clean and sure alternative way
1436*404b540aSrobert 	     to find the first insn after INSN that is not now deleted.
1437*404b540aSrobert 	     I hope this works.  */
1438*404b540aSrobert 	  while (next && INSN_DELETED_P (next))
1439*404b540aSrobert 	    next = NEXT_INSN (next);
1440*404b540aSrobert 	  return next;
1441*404b540aSrobert 	}
1442*404b540aSrobert       else if (tablejump_p (insn, NULL, &lab_next))
1443*404b540aSrobert 	{
1444*404b540aSrobert 	  /* If we're deleting the tablejump, delete the dispatch table.
1445*404b540aSrobert 	     We may not be able to kill the label immediately preceding
1446*404b540aSrobert 	     just yet, as it might be referenced in code leading up to
1447*404b540aSrobert 	     the tablejump.  */
1448*404b540aSrobert 	  delete_related_insns (lab_next);
1449*404b540aSrobert 	}
1450*404b540aSrobert     }
1451*404b540aSrobert 
1452*404b540aSrobert   /* Likewise if we're deleting a dispatch table.  */
1453*404b540aSrobert 
1454*404b540aSrobert   if (JUMP_P (insn)
1455*404b540aSrobert       && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1456*404b540aSrobert 	  || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1457*404b540aSrobert     {
1458*404b540aSrobert       rtx pat = PATTERN (insn);
1459*404b540aSrobert       int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1460*404b540aSrobert       int len = XVECLEN (pat, diff_vec_p);
1461*404b540aSrobert 
1462*404b540aSrobert       for (i = 0; i < len; i++)
1463*404b540aSrobert 	if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1464*404b540aSrobert 	  delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1465*404b540aSrobert       while (next && INSN_DELETED_P (next))
1466*404b540aSrobert 	next = NEXT_INSN (next);
1467*404b540aSrobert       return next;
1468*404b540aSrobert     }
1469*404b540aSrobert 
1470*404b540aSrobert   /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note.  */
1471*404b540aSrobert   if (NONJUMP_INSN_P (insn) || CALL_P (insn))
1472*404b540aSrobert     for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1473*404b540aSrobert       if (REG_NOTE_KIND (note) == REG_LABEL
1474*404b540aSrobert 	  /* This could also be a NOTE_INSN_DELETED_LABEL note.  */
1475*404b540aSrobert 	  && LABEL_P (XEXP (note, 0)))
1476*404b540aSrobert 	if (LABEL_NUSES (XEXP (note, 0)) == 0)
1477*404b540aSrobert 	  delete_related_insns (XEXP (note, 0));
1478*404b540aSrobert 
1479*404b540aSrobert   while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
1480*404b540aSrobert     prev = PREV_INSN (prev);
1481*404b540aSrobert 
1482*404b540aSrobert   /* If INSN was a label and a dispatch table follows it,
1483*404b540aSrobert      delete the dispatch table.  The tablejump must have gone already.
1484*404b540aSrobert      It isn't useful to fall through into a table.  */
1485*404b540aSrobert 
1486*404b540aSrobert   if (was_code_label
1487*404b540aSrobert       && NEXT_INSN (insn) != 0
1488*404b540aSrobert       && JUMP_P (NEXT_INSN (insn))
1489*404b540aSrobert       && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1490*404b540aSrobert 	  || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1491*404b540aSrobert     next = delete_related_insns (NEXT_INSN (insn));
1492*404b540aSrobert 
1493*404b540aSrobert   /* If INSN was a label, delete insns following it if now unreachable.  */
1494*404b540aSrobert 
1495*404b540aSrobert   if (was_code_label && prev && BARRIER_P (prev))
1496*404b540aSrobert     {
1497*404b540aSrobert       enum rtx_code code;
1498*404b540aSrobert       while (next)
1499*404b540aSrobert 	{
1500*404b540aSrobert 	  code = GET_CODE (next);
1501*404b540aSrobert 	  if (code == NOTE
1502*404b540aSrobert 	      && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1503*404b540aSrobert 	    next = NEXT_INSN (next);
1504*404b540aSrobert 	  /* Keep going past other deleted labels to delete what follows.  */
1505*404b540aSrobert 	  else if (code == CODE_LABEL && INSN_DELETED_P (next))
1506*404b540aSrobert 	    next = NEXT_INSN (next);
1507*404b540aSrobert 	  else if (code == BARRIER || INSN_P (next))
1508*404b540aSrobert 	    /* Note: if this deletes a jump, it can cause more
1509*404b540aSrobert 	       deletion of unreachable code, after a different label.
1510*404b540aSrobert 	       As long as the value from this recursive call is correct,
1511*404b540aSrobert 	       this invocation functions correctly.  */
1512*404b540aSrobert 	    next = delete_related_insns (next);
1513*404b540aSrobert 	  else
1514*404b540aSrobert 	    break;
1515*404b540aSrobert 	}
1516*404b540aSrobert     }
1517*404b540aSrobert 
1518*404b540aSrobert   return next;
1519*404b540aSrobert }
1520*404b540aSrobert 
1521*404b540aSrobert /* Delete a range of insns from FROM to TO, inclusive.
1522*404b540aSrobert    This is for the sake of peephole optimization, so assume
1523*404b540aSrobert    that whatever these insns do will still be done by a new
1524*404b540aSrobert    peephole insn that will replace them.  */
1525*404b540aSrobert 
1526*404b540aSrobert void
delete_for_peephole(rtx from,rtx to)1527*404b540aSrobert delete_for_peephole (rtx from, rtx to)
1528*404b540aSrobert {
1529*404b540aSrobert   rtx insn = from;
1530*404b540aSrobert 
1531*404b540aSrobert   while (1)
1532*404b540aSrobert     {
1533*404b540aSrobert       rtx next = NEXT_INSN (insn);
1534*404b540aSrobert       rtx prev = PREV_INSN (insn);
1535*404b540aSrobert 
1536*404b540aSrobert       if (!NOTE_P (insn))
1537*404b540aSrobert 	{
1538*404b540aSrobert 	  INSN_DELETED_P (insn) = 1;
1539*404b540aSrobert 
1540*404b540aSrobert 	  /* Patch this insn out of the chain.  */
1541*404b540aSrobert 	  /* We don't do this all at once, because we
1542*404b540aSrobert 	     must preserve all NOTEs.  */
1543*404b540aSrobert 	  if (prev)
1544*404b540aSrobert 	    NEXT_INSN (prev) = next;
1545*404b540aSrobert 
1546*404b540aSrobert 	  if (next)
1547*404b540aSrobert 	    PREV_INSN (next) = prev;
1548*404b540aSrobert 	}
1549*404b540aSrobert 
1550*404b540aSrobert       if (insn == to)
1551*404b540aSrobert 	break;
1552*404b540aSrobert       insn = next;
1553*404b540aSrobert     }
1554*404b540aSrobert 
1555*404b540aSrobert   /* Note that if TO is an unconditional jump
1556*404b540aSrobert      we *do not* delete the BARRIER that follows,
1557*404b540aSrobert      since the peephole that replaces this sequence
1558*404b540aSrobert      is also an unconditional jump in that case.  */
1559*404b540aSrobert }
1560*404b540aSrobert 
1561*404b540aSrobert /* Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
1562*404b540aSrobert    NLABEL as a return.  Accrue modifications into the change group.  */
1563*404b540aSrobert 
1564*404b540aSrobert static void
redirect_exp_1(rtx * loc,rtx olabel,rtx nlabel,rtx insn)1565*404b540aSrobert redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1566*404b540aSrobert {
1567*404b540aSrobert   rtx x = *loc;
1568*404b540aSrobert   RTX_CODE code = GET_CODE (x);
1569*404b540aSrobert   int i;
1570*404b540aSrobert   const char *fmt;
1571*404b540aSrobert 
1572*404b540aSrobert   if (code == LABEL_REF)
1573*404b540aSrobert     {
1574*404b540aSrobert       if (XEXP (x, 0) == olabel)
1575*404b540aSrobert 	{
1576*404b540aSrobert 	  rtx n;
1577*404b540aSrobert 	  if (nlabel)
1578*404b540aSrobert 	    n = gen_rtx_LABEL_REF (Pmode, nlabel);
1579*404b540aSrobert 	  else
1580*404b540aSrobert 	    n = gen_rtx_RETURN (VOIDmode);
1581*404b540aSrobert 
1582*404b540aSrobert 	  validate_change (insn, loc, n, 1);
1583*404b540aSrobert 	  return;
1584*404b540aSrobert 	}
1585*404b540aSrobert     }
1586*404b540aSrobert   else if (code == RETURN && olabel == 0)
1587*404b540aSrobert     {
1588*404b540aSrobert       if (nlabel)
1589*404b540aSrobert 	x = gen_rtx_LABEL_REF (Pmode, nlabel);
1590*404b540aSrobert       else
1591*404b540aSrobert 	x = gen_rtx_RETURN (VOIDmode);
1592*404b540aSrobert       if (loc == &PATTERN (insn))
1593*404b540aSrobert 	x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1594*404b540aSrobert       validate_change (insn, loc, x, 1);
1595*404b540aSrobert       return;
1596*404b540aSrobert     }
1597*404b540aSrobert 
1598*404b540aSrobert   if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1599*404b540aSrobert       && GET_CODE (SET_SRC (x)) == LABEL_REF
1600*404b540aSrobert       && XEXP (SET_SRC (x), 0) == olabel)
1601*404b540aSrobert     {
1602*404b540aSrobert       validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1603*404b540aSrobert       return;
1604*404b540aSrobert     }
1605*404b540aSrobert 
1606*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
1607*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1608*404b540aSrobert     {
1609*404b540aSrobert       if (fmt[i] == 'e')
1610*404b540aSrobert 	redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1611*404b540aSrobert       else if (fmt[i] == 'E')
1612*404b540aSrobert 	{
1613*404b540aSrobert 	  int j;
1614*404b540aSrobert 	  for (j = 0; j < XVECLEN (x, i); j++)
1615*404b540aSrobert 	    redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1616*404b540aSrobert 	}
1617*404b540aSrobert     }
1618*404b540aSrobert }
1619*404b540aSrobert 
1620*404b540aSrobert /* Make JUMP go to NLABEL instead of where it jumps now.  Accrue
1621*404b540aSrobert    the modifications into the change group.  Return false if we did
1622*404b540aSrobert    not see how to do that.  */
1623*404b540aSrobert 
1624*404b540aSrobert int
redirect_jump_1(rtx jump,rtx nlabel)1625*404b540aSrobert redirect_jump_1 (rtx jump, rtx nlabel)
1626*404b540aSrobert {
1627*404b540aSrobert   int ochanges = num_validated_changes ();
1628*404b540aSrobert   rtx *loc;
1629*404b540aSrobert 
1630*404b540aSrobert   if (GET_CODE (PATTERN (jump)) == PARALLEL)
1631*404b540aSrobert     loc = &XVECEXP (PATTERN (jump), 0, 0);
1632*404b540aSrobert   else
1633*404b540aSrobert     loc = &PATTERN (jump);
1634*404b540aSrobert 
1635*404b540aSrobert   redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1636*404b540aSrobert   return num_validated_changes () > ochanges;
1637*404b540aSrobert }
1638*404b540aSrobert 
1639*404b540aSrobert /* Make JUMP go to NLABEL instead of where it jumps now.  If the old
1640*404b540aSrobert    jump target label is unused as a result, it and the code following
1641*404b540aSrobert    it may be deleted.
1642*404b540aSrobert 
1643*404b540aSrobert    If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1644*404b540aSrobert    RETURN insn.
1645*404b540aSrobert 
1646*404b540aSrobert    The return value will be 1 if the change was made, 0 if it wasn't
1647*404b540aSrobert    (this can only occur for NLABEL == 0).  */
1648*404b540aSrobert 
1649*404b540aSrobert int
redirect_jump(rtx jump,rtx nlabel,int delete_unused)1650*404b540aSrobert redirect_jump (rtx jump, rtx nlabel, int delete_unused)
1651*404b540aSrobert {
1652*404b540aSrobert   rtx olabel = JUMP_LABEL (jump);
1653*404b540aSrobert 
1654*404b540aSrobert   if (nlabel == olabel)
1655*404b540aSrobert     return 1;
1656*404b540aSrobert 
1657*404b540aSrobert   if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1658*404b540aSrobert     return 0;
1659*404b540aSrobert 
1660*404b540aSrobert   redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1661*404b540aSrobert   return 1;
1662*404b540aSrobert }
1663*404b540aSrobert 
1664*404b540aSrobert /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1665*404b540aSrobert    NLABEL in JUMP.  If DELETE_UNUSED is non-negative, copy a
1666*404b540aSrobert    NOTE_INSN_FUNCTION_END found after OLABEL to the place after NLABEL.
1667*404b540aSrobert    If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1668*404b540aSrobert    count has dropped to zero.  */
1669*404b540aSrobert void
redirect_jump_2(rtx jump,rtx olabel,rtx nlabel,int delete_unused,int invert)1670*404b540aSrobert redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
1671*404b540aSrobert 		 int invert)
1672*404b540aSrobert {
1673*404b540aSrobert   rtx note;
1674*404b540aSrobert 
1675*404b540aSrobert   JUMP_LABEL (jump) = nlabel;
1676*404b540aSrobert   if (nlabel)
1677*404b540aSrobert     ++LABEL_NUSES (nlabel);
1678*404b540aSrobert 
1679*404b540aSrobert   /* Update labels in any REG_EQUAL note.  */
1680*404b540aSrobert   if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1681*404b540aSrobert     {
1682*404b540aSrobert       if (!nlabel || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1683*404b540aSrobert 	remove_note (jump, note);
1684*404b540aSrobert       else
1685*404b540aSrobert 	{
1686*404b540aSrobert 	  redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1687*404b540aSrobert 	  confirm_change_group ();
1688*404b540aSrobert 	}
1689*404b540aSrobert     }
1690*404b540aSrobert 
1691*404b540aSrobert   /* If we're eliding the jump over exception cleanups at the end of a
1692*404b540aSrobert      function, move the function end note so that -Wreturn-type works.  */
1693*404b540aSrobert   if (olabel && nlabel
1694*404b540aSrobert       && NEXT_INSN (olabel)
1695*404b540aSrobert       && NOTE_P (NEXT_INSN (olabel))
1696*404b540aSrobert       && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END
1697*404b540aSrobert       && delete_unused >= 0)
1698*404b540aSrobert     emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
1699*404b540aSrobert 
1700*404b540aSrobert   if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1701*404b540aSrobert       /* Undefined labels will remain outside the insn stream.  */
1702*404b540aSrobert       && INSN_UID (olabel))
1703*404b540aSrobert     delete_related_insns (olabel);
1704*404b540aSrobert   if (invert)
1705*404b540aSrobert     invert_br_probabilities (jump);
1706*404b540aSrobert }
1707*404b540aSrobert 
1708*404b540aSrobert /* Invert the jump condition X contained in jump insn INSN.  Accrue the
1709*404b540aSrobert    modifications into the change group.  Return nonzero for success.  */
1710*404b540aSrobert static int
invert_exp_1(rtx x,rtx insn)1711*404b540aSrobert invert_exp_1 (rtx x, rtx insn)
1712*404b540aSrobert {
1713*404b540aSrobert   RTX_CODE code = GET_CODE (x);
1714*404b540aSrobert 
1715*404b540aSrobert   if (code == IF_THEN_ELSE)
1716*404b540aSrobert     {
1717*404b540aSrobert       rtx comp = XEXP (x, 0);
1718*404b540aSrobert       rtx tem;
1719*404b540aSrobert       enum rtx_code reversed_code;
1720*404b540aSrobert 
1721*404b540aSrobert       /* We can do this in two ways:  The preferable way, which can only
1722*404b540aSrobert 	 be done if this is not an integer comparison, is to reverse
1723*404b540aSrobert 	 the comparison code.  Otherwise, swap the THEN-part and ELSE-part
1724*404b540aSrobert 	 of the IF_THEN_ELSE.  If we can't do either, fail.  */
1725*404b540aSrobert 
1726*404b540aSrobert       reversed_code = reversed_comparison_code (comp, insn);
1727*404b540aSrobert 
1728*404b540aSrobert       if (reversed_code != UNKNOWN)
1729*404b540aSrobert 	{
1730*404b540aSrobert 	  validate_change (insn, &XEXP (x, 0),
1731*404b540aSrobert 			   gen_rtx_fmt_ee (reversed_code,
1732*404b540aSrobert 					   GET_MODE (comp), XEXP (comp, 0),
1733*404b540aSrobert 					   XEXP (comp, 1)),
1734*404b540aSrobert 			   1);
1735*404b540aSrobert 	  return 1;
1736*404b540aSrobert 	}
1737*404b540aSrobert 
1738*404b540aSrobert       tem = XEXP (x, 1);
1739*404b540aSrobert       validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1740*404b540aSrobert       validate_change (insn, &XEXP (x, 2), tem, 1);
1741*404b540aSrobert       return 1;
1742*404b540aSrobert     }
1743*404b540aSrobert   else
1744*404b540aSrobert     return 0;
1745*404b540aSrobert }
1746*404b540aSrobert 
1747*404b540aSrobert /* Invert the condition of the jump JUMP, and make it jump to label
1748*404b540aSrobert    NLABEL instead of where it jumps now.  Accrue changes into the
1749*404b540aSrobert    change group.  Return false if we didn't see how to perform the
1750*404b540aSrobert    inversion and redirection.  */
1751*404b540aSrobert 
1752*404b540aSrobert int
invert_jump_1(rtx jump,rtx nlabel)1753*404b540aSrobert invert_jump_1 (rtx jump, rtx nlabel)
1754*404b540aSrobert {
1755*404b540aSrobert   rtx x = pc_set (jump);
1756*404b540aSrobert   int ochanges;
1757*404b540aSrobert   int ok;
1758*404b540aSrobert 
1759*404b540aSrobert   ochanges = num_validated_changes ();
1760*404b540aSrobert   gcc_assert (x);
1761*404b540aSrobert   ok = invert_exp_1 (SET_SRC (x), jump);
1762*404b540aSrobert   gcc_assert (ok);
1763*404b540aSrobert 
1764*404b540aSrobert   if (num_validated_changes () == ochanges)
1765*404b540aSrobert     return 0;
1766*404b540aSrobert 
1767*404b540aSrobert   /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1768*404b540aSrobert      in Pmode, so checking this is not merely an optimization.  */
1769*404b540aSrobert   return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1770*404b540aSrobert }
1771*404b540aSrobert 
1772*404b540aSrobert /* Invert the condition of the jump JUMP, and make it jump to label
1773*404b540aSrobert    NLABEL instead of where it jumps now.  Return true if successful.  */
1774*404b540aSrobert 
1775*404b540aSrobert int
invert_jump(rtx jump,rtx nlabel,int delete_unused)1776*404b540aSrobert invert_jump (rtx jump, rtx nlabel, int delete_unused)
1777*404b540aSrobert {
1778*404b540aSrobert   rtx olabel = JUMP_LABEL (jump);
1779*404b540aSrobert 
1780*404b540aSrobert   if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1781*404b540aSrobert     {
1782*404b540aSrobert       redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1783*404b540aSrobert       return 1;
1784*404b540aSrobert     }
1785*404b540aSrobert   cancel_changes (0);
1786*404b540aSrobert   return 0;
1787*404b540aSrobert }
1788*404b540aSrobert 
1789*404b540aSrobert 
1790*404b540aSrobert /* Like rtx_equal_p except that it considers two REGs as equal
1791*404b540aSrobert    if they renumber to the same value and considers two commutative
1792*404b540aSrobert    operations to be the same if the order of the operands has been
1793*404b540aSrobert    reversed.  */
1794*404b540aSrobert 
1795*404b540aSrobert int
rtx_renumbered_equal_p(rtx x,rtx y)1796*404b540aSrobert rtx_renumbered_equal_p (rtx x, rtx y)
1797*404b540aSrobert {
1798*404b540aSrobert   int i;
1799*404b540aSrobert   enum rtx_code code = GET_CODE (x);
1800*404b540aSrobert   const char *fmt;
1801*404b540aSrobert 
1802*404b540aSrobert   if (x == y)
1803*404b540aSrobert     return 1;
1804*404b540aSrobert 
1805*404b540aSrobert   if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1806*404b540aSrobert       && (REG_P (y) || (GET_CODE (y) == SUBREG
1807*404b540aSrobert 				  && REG_P (SUBREG_REG (y)))))
1808*404b540aSrobert     {
1809*404b540aSrobert       int reg_x = -1, reg_y = -1;
1810*404b540aSrobert       int byte_x = 0, byte_y = 0;
1811*404b540aSrobert 
1812*404b540aSrobert       if (GET_MODE (x) != GET_MODE (y))
1813*404b540aSrobert 	return 0;
1814*404b540aSrobert 
1815*404b540aSrobert       /* If we haven't done any renumbering, don't
1816*404b540aSrobert 	 make any assumptions.  */
1817*404b540aSrobert       if (reg_renumber == 0)
1818*404b540aSrobert 	return rtx_equal_p (x, y);
1819*404b540aSrobert 
1820*404b540aSrobert       if (code == SUBREG)
1821*404b540aSrobert 	{
1822*404b540aSrobert 	  reg_x = REGNO (SUBREG_REG (x));
1823*404b540aSrobert 	  byte_x = SUBREG_BYTE (x);
1824*404b540aSrobert 
1825*404b540aSrobert 	  if (reg_renumber[reg_x] >= 0)
1826*404b540aSrobert 	    {
1827*404b540aSrobert 	      reg_x = subreg_regno_offset (reg_renumber[reg_x],
1828*404b540aSrobert 					   GET_MODE (SUBREG_REG (x)),
1829*404b540aSrobert 					   byte_x,
1830*404b540aSrobert 					   GET_MODE (x));
1831*404b540aSrobert 	      byte_x = 0;
1832*404b540aSrobert 	    }
1833*404b540aSrobert 	}
1834*404b540aSrobert       else
1835*404b540aSrobert 	{
1836*404b540aSrobert 	  reg_x = REGNO (x);
1837*404b540aSrobert 	  if (reg_renumber[reg_x] >= 0)
1838*404b540aSrobert 	    reg_x = reg_renumber[reg_x];
1839*404b540aSrobert 	}
1840*404b540aSrobert 
1841*404b540aSrobert       if (GET_CODE (y) == SUBREG)
1842*404b540aSrobert 	{
1843*404b540aSrobert 	  reg_y = REGNO (SUBREG_REG (y));
1844*404b540aSrobert 	  byte_y = SUBREG_BYTE (y);
1845*404b540aSrobert 
1846*404b540aSrobert 	  if (reg_renumber[reg_y] >= 0)
1847*404b540aSrobert 	    {
1848*404b540aSrobert 	      reg_y = subreg_regno_offset (reg_renumber[reg_y],
1849*404b540aSrobert 					   GET_MODE (SUBREG_REG (y)),
1850*404b540aSrobert 					   byte_y,
1851*404b540aSrobert 					   GET_MODE (y));
1852*404b540aSrobert 	      byte_y = 0;
1853*404b540aSrobert 	    }
1854*404b540aSrobert 	}
1855*404b540aSrobert       else
1856*404b540aSrobert 	{
1857*404b540aSrobert 	  reg_y = REGNO (y);
1858*404b540aSrobert 	  if (reg_renumber[reg_y] >= 0)
1859*404b540aSrobert 	    reg_y = reg_renumber[reg_y];
1860*404b540aSrobert 	}
1861*404b540aSrobert 
1862*404b540aSrobert       return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
1863*404b540aSrobert     }
1864*404b540aSrobert 
1865*404b540aSrobert   /* Now we have disposed of all the cases
1866*404b540aSrobert      in which different rtx codes can match.  */
1867*404b540aSrobert   if (code != GET_CODE (y))
1868*404b540aSrobert     return 0;
1869*404b540aSrobert 
1870*404b540aSrobert   switch (code)
1871*404b540aSrobert     {
1872*404b540aSrobert     case PC:
1873*404b540aSrobert     case CC0:
1874*404b540aSrobert     case ADDR_VEC:
1875*404b540aSrobert     case ADDR_DIFF_VEC:
1876*404b540aSrobert     case CONST_INT:
1877*404b540aSrobert     case CONST_DOUBLE:
1878*404b540aSrobert       return 0;
1879*404b540aSrobert 
1880*404b540aSrobert     case LABEL_REF:
1881*404b540aSrobert       /* We can't assume nonlocal labels have their following insns yet.  */
1882*404b540aSrobert       if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1883*404b540aSrobert 	return XEXP (x, 0) == XEXP (y, 0);
1884*404b540aSrobert 
1885*404b540aSrobert       /* Two label-refs are equivalent if they point at labels
1886*404b540aSrobert 	 in the same position in the instruction stream.  */
1887*404b540aSrobert       return (next_real_insn (XEXP (x, 0))
1888*404b540aSrobert 	      == next_real_insn (XEXP (y, 0)));
1889*404b540aSrobert 
1890*404b540aSrobert     case SYMBOL_REF:
1891*404b540aSrobert       return XSTR (x, 0) == XSTR (y, 0);
1892*404b540aSrobert 
1893*404b540aSrobert     case CODE_LABEL:
1894*404b540aSrobert       /* If we didn't match EQ equality above, they aren't the same.  */
1895*404b540aSrobert       return 0;
1896*404b540aSrobert 
1897*404b540aSrobert     default:
1898*404b540aSrobert       break;
1899*404b540aSrobert     }
1900*404b540aSrobert 
1901*404b540aSrobert   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
1902*404b540aSrobert 
1903*404b540aSrobert   if (GET_MODE (x) != GET_MODE (y))
1904*404b540aSrobert     return 0;
1905*404b540aSrobert 
1906*404b540aSrobert   /* For commutative operations, the RTX match if the operand match in any
1907*404b540aSrobert      order.  Also handle the simple binary and unary cases without a loop.  */
1908*404b540aSrobert   if (targetm.commutative_p (x, UNKNOWN))
1909*404b540aSrobert     return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1910*404b540aSrobert 	     && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1911*404b540aSrobert 	    || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1912*404b540aSrobert 		&& rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1913*404b540aSrobert   else if (NON_COMMUTATIVE_P (x))
1914*404b540aSrobert     return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1915*404b540aSrobert 	    && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1916*404b540aSrobert   else if (UNARY_P (x))
1917*404b540aSrobert     return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1918*404b540aSrobert 
1919*404b540aSrobert   /* Compare the elements.  If any pair of corresponding elements
1920*404b540aSrobert      fail to match, return 0 for the whole things.  */
1921*404b540aSrobert 
1922*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
1923*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1924*404b540aSrobert     {
1925*404b540aSrobert       int j;
1926*404b540aSrobert       switch (fmt[i])
1927*404b540aSrobert 	{
1928*404b540aSrobert 	case 'w':
1929*404b540aSrobert 	  if (XWINT (x, i) != XWINT (y, i))
1930*404b540aSrobert 	    return 0;
1931*404b540aSrobert 	  break;
1932*404b540aSrobert 
1933*404b540aSrobert 	case 'i':
1934*404b540aSrobert 	  if (XINT (x, i) != XINT (y, i))
1935*404b540aSrobert 	    return 0;
1936*404b540aSrobert 	  break;
1937*404b540aSrobert 
1938*404b540aSrobert 	case 't':
1939*404b540aSrobert 	  if (XTREE (x, i) != XTREE (y, i))
1940*404b540aSrobert 	    return 0;
1941*404b540aSrobert 	  break;
1942*404b540aSrobert 
1943*404b540aSrobert 	case 's':
1944*404b540aSrobert 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
1945*404b540aSrobert 	    return 0;
1946*404b540aSrobert 	  break;
1947*404b540aSrobert 
1948*404b540aSrobert 	case 'e':
1949*404b540aSrobert 	  if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1950*404b540aSrobert 	    return 0;
1951*404b540aSrobert 	  break;
1952*404b540aSrobert 
1953*404b540aSrobert 	case 'u':
1954*404b540aSrobert 	  if (XEXP (x, i) != XEXP (y, i))
1955*404b540aSrobert 	    return 0;
1956*404b540aSrobert 	  /* Fall through.  */
1957*404b540aSrobert 	case '0':
1958*404b540aSrobert 	  break;
1959*404b540aSrobert 
1960*404b540aSrobert 	case 'E':
1961*404b540aSrobert 	  if (XVECLEN (x, i) != XVECLEN (y, i))
1962*404b540aSrobert 	    return 0;
1963*404b540aSrobert 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1964*404b540aSrobert 	    if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1965*404b540aSrobert 	      return 0;
1966*404b540aSrobert 	  break;
1967*404b540aSrobert 
1968*404b540aSrobert 	default:
1969*404b540aSrobert 	  gcc_unreachable ();
1970*404b540aSrobert 	}
1971*404b540aSrobert     }
1972*404b540aSrobert   return 1;
1973*404b540aSrobert }
1974*404b540aSrobert 
1975*404b540aSrobert /* If X is a hard register or equivalent to one or a subregister of one,
1976*404b540aSrobert    return the hard register number.  If X is a pseudo register that was not
1977*404b540aSrobert    assigned a hard register, return the pseudo register number.  Otherwise,
1978*404b540aSrobert    return -1.  Any rtx is valid for X.  */
1979*404b540aSrobert 
1980*404b540aSrobert int
true_regnum(rtx x)1981*404b540aSrobert true_regnum (rtx x)
1982*404b540aSrobert {
1983*404b540aSrobert   if (REG_P (x))
1984*404b540aSrobert     {
1985*404b540aSrobert       if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
1986*404b540aSrobert 	return reg_renumber[REGNO (x)];
1987*404b540aSrobert       return REGNO (x);
1988*404b540aSrobert     }
1989*404b540aSrobert   if (GET_CODE (x) == SUBREG)
1990*404b540aSrobert     {
1991*404b540aSrobert       int base = true_regnum (SUBREG_REG (x));
1992*404b540aSrobert       if (base >= 0
1993*404b540aSrobert 	  && base < FIRST_PSEUDO_REGISTER
1994*404b540aSrobert 	  && subreg_offset_representable_p (REGNO (SUBREG_REG (x)),
1995*404b540aSrobert 					    GET_MODE (SUBREG_REG (x)),
1996*404b540aSrobert 					    SUBREG_BYTE (x), GET_MODE (x)))
1997*404b540aSrobert 	return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
1998*404b540aSrobert 					   GET_MODE (SUBREG_REG (x)),
1999*404b540aSrobert 					   SUBREG_BYTE (x), GET_MODE (x));
2000*404b540aSrobert     }
2001*404b540aSrobert   return -1;
2002*404b540aSrobert }
2003*404b540aSrobert 
2004*404b540aSrobert /* Return regno of the register REG and handle subregs too.  */
2005*404b540aSrobert unsigned int
reg_or_subregno(rtx reg)2006*404b540aSrobert reg_or_subregno (rtx reg)
2007*404b540aSrobert {
2008*404b540aSrobert   if (GET_CODE (reg) == SUBREG)
2009*404b540aSrobert     reg = SUBREG_REG (reg);
2010*404b540aSrobert   gcc_assert (REG_P (reg));
2011*404b540aSrobert   return REGNO (reg);
2012*404b540aSrobert }
2013