1 /* Optimize jump instructions, for GNU compiler.
2    Copyright (C) 1987-2014 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This is the pathetic reminder of old fame of the jump-optimization pass
21    of the compiler.  Now it contains basically a set of utility functions to
22    operate with jumps.
23 
24    Each CODE_LABEL has a count of the times it is used
25    stored in the LABEL_NUSES internal field, and each JUMP_INSN
26    has one label that it refers to stored in the
27    JUMP_LABEL internal field.  With this we can detect labels that
28    become unused because of the deletion of all the jumps that
29    formerly used them.  The JUMP_LABEL info is sometimes looked
30    at by later passes.  For return insns, it contains either a
31    RETURN or a SIMPLE_RETURN rtx.
32 
33    The subroutines redirect_jump and invert_jump are used
34    from other passes as well.  */
35 
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "hard-reg-set.h"
44 #include "regs.h"
45 #include "insn-config.h"
46 #include "insn-attr.h"
47 #include "recog.h"
48 #include "function.h"
49 #include "basic-block.h"
50 #include "expr.h"
51 #include "except.h"
52 #include "diagnostic-core.h"
53 #include "reload.h"
54 #include "predict.h"
55 #include "tree-pass.h"
56 #include "target.h"
57 
58 /* Optimize jump y; x: ... y: jumpif... x?
59    Don't know if it is worth bothering with.  */
60 /* Optimize two cases of conditional jump to conditional jump?
61    This can never delete any instruction or make anything dead,
62    or even change what is live at any point.
63    So perhaps let combiner do it.  */
64 
65 static void init_label_info (rtx);
66 static void mark_all_labels (rtx);
67 static void mark_jump_label_1 (rtx, rtx, bool, bool);
68 static void mark_jump_label_asm (rtx, rtx);
69 static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
70 static int invert_exp_1 (rtx, rtx);
71 static int returnjump_p_1 (rtx *, void *);
72 
73 /* Worker for rebuild_jump_labels and rebuild_jump_labels_chain.  */
74 static void
rebuild_jump_labels_1(rtx f,bool count_forced)75 rebuild_jump_labels_1 (rtx f, bool count_forced)
76 {
77   rtx insn;
78 
79   timevar_push (TV_REBUILD_JUMP);
80   init_label_info (f);
81   mark_all_labels (f);
82 
83   /* Keep track of labels used from static data; we don't track them
84      closely enough to delete them here, so make sure their reference
85      count doesn't drop to zero.  */
86 
87   if (count_forced)
88     for (insn = forced_labels; insn; insn = XEXP (insn, 1))
89       if (LABEL_P (XEXP (insn, 0)))
90 	LABEL_NUSES (XEXP (insn, 0))++;
91   timevar_pop (TV_REBUILD_JUMP);
92 }
93 
94 /* This function rebuilds the JUMP_LABEL field and REG_LABEL_TARGET
95    notes in jumping insns and REG_LABEL_OPERAND notes in non-jumping
96    instructions and jumping insns that have labels as operands
97    (e.g. cbranchsi4).  */
98 void
rebuild_jump_labels(rtx f)99 rebuild_jump_labels (rtx f)
100 {
101   rebuild_jump_labels_1 (f, true);
102 }
103 
104 /* This function is like rebuild_jump_labels, but doesn't run over
105    forced_labels.  It can be used on insn chains that aren't the
106    main function chain.  */
107 void
rebuild_jump_labels_chain(rtx chain)108 rebuild_jump_labels_chain (rtx chain)
109 {
110   rebuild_jump_labels_1 (chain, false);
111 }
112 
113 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
114    non-fallthru insn.  This is not generally true, as multiple barriers
115    may have crept in, or the BARRIER may be separated from the last
116    real insn by one or more NOTEs.
117 
118    This simple pass moves barriers and removes duplicates so that the
119    old code is happy.
120  */
121 static unsigned int
cleanup_barriers(void)122 cleanup_barriers (void)
123 {
124   rtx insn, next, prev;
125   for (insn = get_insns (); insn; insn = next)
126     {
127       next = NEXT_INSN (insn);
128       if (BARRIER_P (insn))
129 	{
130 	  prev = prev_nonnote_insn (insn);
131 	  if (!prev)
132 	    continue;
133 	  if (BARRIER_P (prev))
134 	    delete_insn (insn);
135 	  else if (prev != PREV_INSN (insn))
136 	    reorder_insns_nobb (insn, insn, prev);
137 	}
138     }
139   return 0;
140 }
141 
142 namespace {
143 
144 const pass_data pass_data_cleanup_barriers =
145 {
146   RTL_PASS, /* type */
147   "barriers", /* name */
148   OPTGROUP_NONE, /* optinfo_flags */
149   false, /* has_gate */
150   true, /* has_execute */
151   TV_NONE, /* tv_id */
152   0, /* properties_required */
153   0, /* properties_provided */
154   0, /* properties_destroyed */
155   0, /* todo_flags_start */
156   0, /* todo_flags_finish */
157 };
158 
159 class pass_cleanup_barriers : public rtl_opt_pass
160 {
161 public:
pass_cleanup_barriers(gcc::context * ctxt)162   pass_cleanup_barriers (gcc::context *ctxt)
163     : rtl_opt_pass (pass_data_cleanup_barriers, ctxt)
164   {}
165 
166   /* opt_pass methods: */
execute()167   unsigned int execute () { return cleanup_barriers (); }
168 
169 }; // class pass_cleanup_barriers
170 
171 } // anon namespace
172 
173 rtl_opt_pass *
make_pass_cleanup_barriers(gcc::context * ctxt)174 make_pass_cleanup_barriers (gcc::context *ctxt)
175 {
176   return new pass_cleanup_barriers (ctxt);
177 }
178 
179 
180 /* Initialize LABEL_NUSES and JUMP_LABEL fields, add REG_LABEL_TARGET
181    for remaining targets for JUMP_P.  Delete any REG_LABEL_OPERAND
182    notes whose labels don't occur in the insn any more.  */
183 
184 static void
init_label_info(rtx f)185 init_label_info (rtx f)
186 {
187   rtx insn;
188 
189   for (insn = f; insn; insn = NEXT_INSN (insn))
190     {
191       if (LABEL_P (insn))
192 	LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
193 
194       /* REG_LABEL_TARGET notes (including the JUMP_LABEL field) are
195 	 sticky and not reset here; that way we won't lose association
196 	 with a label when e.g. the source for a target register
197 	 disappears out of reach for targets that may use jump-target
198 	 registers.  Jump transformations are supposed to transform
199 	 any REG_LABEL_TARGET notes.  The target label reference in a
200 	 branch may disappear from the branch (and from the
201 	 instruction before it) for other reasons, like register
202 	 allocation.  */
203 
204       if (INSN_P (insn))
205 	{
206 	  rtx note, next;
207 
208 	  for (note = REG_NOTES (insn); note; note = next)
209 	    {
210 	      next = XEXP (note, 1);
211 	      if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
212 		  && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
213 		remove_note (insn, note);
214 	    }
215 	}
216     }
217 }
218 
219 /* A subroutine of mark_all_labels.  Trivially propagate a simple label
220    load into a jump_insn that uses it.  */
221 
222 static void
maybe_propagate_label_ref(rtx jump_insn,rtx prev_nonjump_insn)223 maybe_propagate_label_ref (rtx jump_insn, rtx prev_nonjump_insn)
224 {
225   rtx label_note, pc, pc_src;
226 
227   pc = pc_set (jump_insn);
228   pc_src = pc != NULL ? SET_SRC (pc) : NULL;
229   label_note = find_reg_note (prev_nonjump_insn, REG_LABEL_OPERAND, NULL);
230 
231   /* If the previous non-jump insn sets something to a label,
232      something that this jump insn uses, make that label the primary
233      target of this insn if we don't yet have any.  That previous
234      insn must be a single_set and not refer to more than one label.
235      The jump insn must not refer to other labels as jump targets
236      and must be a plain (set (pc) ...), maybe in a parallel, and
237      may refer to the item being set only directly or as one of the
238      arms in an IF_THEN_ELSE.  */
239 
240   if (label_note != NULL && pc_src != NULL)
241     {
242       rtx label_set = single_set (prev_nonjump_insn);
243       rtx label_dest = label_set != NULL ? SET_DEST (label_set) : NULL;
244 
245       if (label_set != NULL
246 	  /* The source must be the direct LABEL_REF, not a
247 	     PLUS, UNSPEC, IF_THEN_ELSE etc.  */
248 	  && GET_CODE (SET_SRC (label_set)) == LABEL_REF
249 	  && (rtx_equal_p (label_dest, pc_src)
250 	      || (GET_CODE (pc_src) == IF_THEN_ELSE
251 		  && (rtx_equal_p (label_dest, XEXP (pc_src, 1))
252 		      || rtx_equal_p (label_dest, XEXP (pc_src, 2))))))
253 	{
254 	  /* The CODE_LABEL referred to in the note must be the
255 	     CODE_LABEL in the LABEL_REF of the "set".  We can
256 	     conveniently use it for the marker function, which
257 	     requires a LABEL_REF wrapping.  */
258 	  gcc_assert (XEXP (label_note, 0) == XEXP (SET_SRC (label_set), 0));
259 
260 	  mark_jump_label_1 (label_set, jump_insn, false, true);
261 
262 	  gcc_assert (JUMP_LABEL (jump_insn) == XEXP (label_note, 0));
263 	}
264     }
265 }
266 
267 /* Mark the label each jump jumps to.
268    Combine consecutive labels, and count uses of labels.  */
269 
270 static void
mark_all_labels(rtx f)271 mark_all_labels (rtx f)
272 {
273   rtx insn;
274 
275   if (current_ir_type () == IR_RTL_CFGLAYOUT)
276     {
277       basic_block bb;
278       FOR_EACH_BB_FN (bb, cfun)
279 	{
280 	  /* In cfglayout mode, we don't bother with trivial next-insn
281 	     propagation of LABEL_REFs into JUMP_LABEL.  This will be
282 	     handled by other optimizers using better algorithms.  */
283 	  FOR_BB_INSNS (bb, insn)
284 	    {
285 	      gcc_assert (! INSN_DELETED_P (insn));
286 	      if (NONDEBUG_INSN_P (insn))
287 	        mark_jump_label (PATTERN (insn), insn, 0);
288 	    }
289 
290 	  /* In cfglayout mode, there may be non-insns between the
291 	     basic blocks.  If those non-insns represent tablejump data,
292 	     they contain label references that we must record.  */
293 	  for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
294 	    if (JUMP_TABLE_DATA_P (insn))
295 	      mark_jump_label (PATTERN (insn), insn, 0);
296 	  for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
297 	    if (JUMP_TABLE_DATA_P (insn))
298 	      mark_jump_label (PATTERN (insn), insn, 0);
299 	}
300     }
301   else
302     {
303       rtx prev_nonjump_insn = NULL;
304       for (insn = f; insn; insn = NEXT_INSN (insn))
305 	{
306 	  if (INSN_DELETED_P (insn))
307 	    ;
308 	  else if (LABEL_P (insn))
309 	    prev_nonjump_insn = NULL;
310 	  else if (JUMP_TABLE_DATA_P (insn))
311 	    mark_jump_label (PATTERN (insn), insn, 0);
312 	  else if (NONDEBUG_INSN_P (insn))
313 	    {
314 	      mark_jump_label (PATTERN (insn), insn, 0);
315 	      if (JUMP_P (insn))
316 		{
317 		  if (JUMP_LABEL (insn) == NULL && prev_nonjump_insn != NULL)
318 		    maybe_propagate_label_ref (insn, prev_nonjump_insn);
319 		}
320 	      else
321 		prev_nonjump_insn = insn;
322 	    }
323 	}
324     }
325 }
326 
327 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
328    of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
329    UNKNOWN may be returned in case we are having CC_MODE compare and we don't
330    know whether it's source is floating point or integer comparison.  Machine
331    description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
332    to help this function avoid overhead in these cases.  */
333 enum rtx_code
reversed_comparison_code_parts(enum rtx_code code,const_rtx arg0,const_rtx arg1,const_rtx insn)334 reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
335 				const_rtx arg1, const_rtx insn)
336 {
337   enum machine_mode mode;
338 
339   /* If this is not actually a comparison, we can't reverse it.  */
340   if (GET_RTX_CLASS (code) != RTX_COMPARE
341       && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
342     return UNKNOWN;
343 
344   mode = GET_MODE (arg0);
345   if (mode == VOIDmode)
346     mode = GET_MODE (arg1);
347 
348   /* First see if machine description supplies us way to reverse the
349      comparison.  Give it priority over everything else to allow
350      machine description to do tricks.  */
351   if (GET_MODE_CLASS (mode) == MODE_CC
352       && REVERSIBLE_CC_MODE (mode))
353     {
354 #ifdef REVERSE_CONDITION
355       return REVERSE_CONDITION (code, mode);
356 #else
357       return reverse_condition (code);
358 #endif
359     }
360 
361   /* Try a few special cases based on the comparison code.  */
362   switch (code)
363     {
364     case GEU:
365     case GTU:
366     case LEU:
367     case LTU:
368     case NE:
369     case EQ:
370       /* It is always safe to reverse EQ and NE, even for the floating
371 	 point.  Similarly the unsigned comparisons are never used for
372 	 floating point so we can reverse them in the default way.  */
373       return reverse_condition (code);
374     case ORDERED:
375     case UNORDERED:
376     case LTGT:
377     case UNEQ:
378       /* In case we already see unordered comparison, we can be sure to
379 	 be dealing with floating point so we don't need any more tests.  */
380       return reverse_condition_maybe_unordered (code);
381     case UNLT:
382     case UNLE:
383     case UNGT:
384     case UNGE:
385       /* We don't have safe way to reverse these yet.  */
386       return UNKNOWN;
387     default:
388       break;
389     }
390 
391   if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
392     {
393       const_rtx prev;
394       /* Try to search for the comparison to determine the real mode.
395          This code is expensive, but with sane machine description it
396          will be never used, since REVERSIBLE_CC_MODE will return true
397          in all cases.  */
398       if (! insn)
399 	return UNKNOWN;
400 
401       /* These CONST_CAST's are okay because prev_nonnote_insn just
402 	 returns its argument and we assign it to a const_rtx
403 	 variable.  */
404       for (prev = prev_nonnote_insn (CONST_CAST_RTX (insn));
405 	   prev != 0 && !LABEL_P (prev);
406 	   prev = prev_nonnote_insn (CONST_CAST_RTX (prev)))
407 	{
408 	  const_rtx set = set_of (arg0, prev);
409 	  if (set && GET_CODE (set) == SET
410 	      && rtx_equal_p (SET_DEST (set), arg0))
411 	    {
412 	      rtx src = SET_SRC (set);
413 
414 	      if (GET_CODE (src) == COMPARE)
415 		{
416 		  rtx comparison = src;
417 		  arg0 = XEXP (src, 0);
418 		  mode = GET_MODE (arg0);
419 		  if (mode == VOIDmode)
420 		    mode = GET_MODE (XEXP (comparison, 1));
421 		  break;
422 		}
423 	      /* We can get past reg-reg moves.  This may be useful for model
424 	         of i387 comparisons that first move flag registers around.  */
425 	      if (REG_P (src))
426 		{
427 		  arg0 = src;
428 		  continue;
429 		}
430 	    }
431 	  /* If register is clobbered in some ununderstandable way,
432 	     give up.  */
433 	  if (set)
434 	    return UNKNOWN;
435 	}
436     }
437 
438   /* Test for an integer condition, or a floating-point comparison
439      in which NaNs can be ignored.  */
440   if (CONST_INT_P (arg0)
441       || (GET_MODE (arg0) != VOIDmode
442 	  && GET_MODE_CLASS (mode) != MODE_CC
443 	  && !HONOR_NANS (mode)))
444     return reverse_condition (code);
445 
446   return UNKNOWN;
447 }
448 
449 /* A wrapper around the previous function to take COMPARISON as rtx
450    expression.  This simplifies many callers.  */
451 enum rtx_code
reversed_comparison_code(const_rtx comparison,const_rtx insn)452 reversed_comparison_code (const_rtx comparison, const_rtx insn)
453 {
454   if (!COMPARISON_P (comparison))
455     return UNKNOWN;
456   return reversed_comparison_code_parts (GET_CODE (comparison),
457 					 XEXP (comparison, 0),
458 					 XEXP (comparison, 1), insn);
459 }
460 
461 /* Return comparison with reversed code of EXP.
462    Return NULL_RTX in case we fail to do the reversal.  */
463 rtx
reversed_comparison(const_rtx exp,enum machine_mode mode)464 reversed_comparison (const_rtx exp, enum machine_mode mode)
465 {
466   enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
467   if (reversed_code == UNKNOWN)
468     return NULL_RTX;
469   else
470     return simplify_gen_relational (reversed_code, mode, VOIDmode,
471                                     XEXP (exp, 0), XEXP (exp, 1));
472 }
473 
474 
475 /* Given an rtx-code for a comparison, return the code for the negated
476    comparison.  If no such code exists, return UNKNOWN.
477 
478    WATCH OUT!  reverse_condition is not safe to use on a jump that might
479    be acting on the results of an IEEE floating point comparison, because
480    of the special treatment of non-signaling nans in comparisons.
481    Use reversed_comparison_code instead.  */
482 
483 enum rtx_code
reverse_condition(enum rtx_code code)484 reverse_condition (enum rtx_code code)
485 {
486   switch (code)
487     {
488     case EQ:
489       return NE;
490     case NE:
491       return EQ;
492     case GT:
493       return LE;
494     case GE:
495       return LT;
496     case LT:
497       return GE;
498     case LE:
499       return GT;
500     case GTU:
501       return LEU;
502     case GEU:
503       return LTU;
504     case LTU:
505       return GEU;
506     case LEU:
507       return GTU;
508     case UNORDERED:
509       return ORDERED;
510     case ORDERED:
511       return UNORDERED;
512 
513     case UNLT:
514     case UNLE:
515     case UNGT:
516     case UNGE:
517     case UNEQ:
518     case LTGT:
519       return UNKNOWN;
520 
521     default:
522       gcc_unreachable ();
523     }
524 }
525 
526 /* Similar, but we're allowed to generate unordered comparisons, which
527    makes it safe for IEEE floating-point.  Of course, we have to recognize
528    that the target will support them too...  */
529 
530 enum rtx_code
reverse_condition_maybe_unordered(enum rtx_code code)531 reverse_condition_maybe_unordered (enum rtx_code code)
532 {
533   switch (code)
534     {
535     case EQ:
536       return NE;
537     case NE:
538       return EQ;
539     case GT:
540       return UNLE;
541     case GE:
542       return UNLT;
543     case LT:
544       return UNGE;
545     case LE:
546       return UNGT;
547     case LTGT:
548       return UNEQ;
549     case UNORDERED:
550       return ORDERED;
551     case ORDERED:
552       return UNORDERED;
553     case UNLT:
554       return GE;
555     case UNLE:
556       return GT;
557     case UNGT:
558       return LE;
559     case UNGE:
560       return LT;
561     case UNEQ:
562       return LTGT;
563 
564     default:
565       gcc_unreachable ();
566     }
567 }
568 
569 /* Similar, but return the code when two operands of a comparison are swapped.
570    This IS safe for IEEE floating-point.  */
571 
572 enum rtx_code
swap_condition(enum rtx_code code)573 swap_condition (enum rtx_code code)
574 {
575   switch (code)
576     {
577     case EQ:
578     case NE:
579     case UNORDERED:
580     case ORDERED:
581     case UNEQ:
582     case LTGT:
583       return code;
584 
585     case GT:
586       return LT;
587     case GE:
588       return LE;
589     case LT:
590       return GT;
591     case LE:
592       return GE;
593     case GTU:
594       return LTU;
595     case GEU:
596       return LEU;
597     case LTU:
598       return GTU;
599     case LEU:
600       return GEU;
601     case UNLT:
602       return UNGT;
603     case UNLE:
604       return UNGE;
605     case UNGT:
606       return UNLT;
607     case UNGE:
608       return UNLE;
609 
610     default:
611       gcc_unreachable ();
612     }
613 }
614 
615 /* Given a comparison CODE, return the corresponding unsigned comparison.
616    If CODE is an equality comparison or already an unsigned comparison,
617    CODE is returned.  */
618 
619 enum rtx_code
unsigned_condition(enum rtx_code code)620 unsigned_condition (enum rtx_code code)
621 {
622   switch (code)
623     {
624     case EQ:
625     case NE:
626     case GTU:
627     case GEU:
628     case LTU:
629     case LEU:
630       return code;
631 
632     case GT:
633       return GTU;
634     case GE:
635       return GEU;
636     case LT:
637       return LTU;
638     case LE:
639       return LEU;
640 
641     default:
642       gcc_unreachable ();
643     }
644 }
645 
646 /* Similarly, return the signed version of a comparison.  */
647 
648 enum rtx_code
signed_condition(enum rtx_code code)649 signed_condition (enum rtx_code code)
650 {
651   switch (code)
652     {
653     case EQ:
654     case NE:
655     case GT:
656     case GE:
657     case LT:
658     case LE:
659       return code;
660 
661     case GTU:
662       return GT;
663     case GEU:
664       return GE;
665     case LTU:
666       return LT;
667     case LEU:
668       return LE;
669 
670     default:
671       gcc_unreachable ();
672     }
673 }
674 
675 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
676    truth of CODE1 implies the truth of CODE2.  */
677 
678 int
comparison_dominates_p(enum rtx_code code1,enum rtx_code code2)679 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
680 {
681   /* UNKNOWN comparison codes can happen as a result of trying to revert
682      comparison codes.
683      They can't match anything, so we have to reject them here.  */
684   if (code1 == UNKNOWN || code2 == UNKNOWN)
685     return 0;
686 
687   if (code1 == code2)
688     return 1;
689 
690   switch (code1)
691     {
692     case UNEQ:
693       if (code2 == UNLE || code2 == UNGE)
694 	return 1;
695       break;
696 
697     case EQ:
698       if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
699 	  || code2 == ORDERED)
700 	return 1;
701       break;
702 
703     case UNLT:
704       if (code2 == UNLE || code2 == NE)
705 	return 1;
706       break;
707 
708     case LT:
709       if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
710 	return 1;
711       break;
712 
713     case UNGT:
714       if (code2 == UNGE || code2 == NE)
715 	return 1;
716       break;
717 
718     case GT:
719       if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
720 	return 1;
721       break;
722 
723     case GE:
724     case LE:
725       if (code2 == ORDERED)
726 	return 1;
727       break;
728 
729     case LTGT:
730       if (code2 == NE || code2 == ORDERED)
731 	return 1;
732       break;
733 
734     case LTU:
735       if (code2 == LEU || code2 == NE)
736 	return 1;
737       break;
738 
739     case GTU:
740       if (code2 == GEU || code2 == NE)
741 	return 1;
742       break;
743 
744     case UNORDERED:
745       if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
746 	  || code2 == UNGE || code2 == UNGT)
747 	return 1;
748       break;
749 
750     default:
751       break;
752     }
753 
754   return 0;
755 }
756 
757 /* Return 1 if INSN is an unconditional jump and nothing else.  */
758 
759 int
simplejump_p(const_rtx insn)760 simplejump_p (const_rtx insn)
761 {
762   return (JUMP_P (insn)
763 	  && GET_CODE (PATTERN (insn)) == SET
764 	  && GET_CODE (SET_DEST (PATTERN (insn))) == PC
765 	  && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
766 }
767 
768 /* Return nonzero if INSN is a (possibly) conditional jump
769    and nothing more.
770 
771    Use of this function is deprecated, since we need to support combined
772    branch and compare insns.  Use any_condjump_p instead whenever possible.  */
773 
774 int
condjump_p(const_rtx insn)775 condjump_p (const_rtx insn)
776 {
777   const_rtx x = PATTERN (insn);
778 
779   if (GET_CODE (x) != SET
780       || GET_CODE (SET_DEST (x)) != PC)
781     return 0;
782 
783   x = SET_SRC (x);
784   if (GET_CODE (x) == LABEL_REF)
785     return 1;
786   else
787     return (GET_CODE (x) == IF_THEN_ELSE
788 	    && ((GET_CODE (XEXP (x, 2)) == PC
789 		 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
790 		     || ANY_RETURN_P (XEXP (x, 1))))
791 		|| (GET_CODE (XEXP (x, 1)) == PC
792 		    && (GET_CODE (XEXP (x, 2)) == LABEL_REF
793 			|| ANY_RETURN_P (XEXP (x, 2))))));
794 }
795 
796 /* Return nonzero if INSN is a (possibly) conditional jump inside a
797    PARALLEL.
798 
799    Use this function is deprecated, since we need to support combined
800    branch and compare insns.  Use any_condjump_p instead whenever possible.  */
801 
802 int
condjump_in_parallel_p(const_rtx insn)803 condjump_in_parallel_p (const_rtx insn)
804 {
805   const_rtx x = PATTERN (insn);
806 
807   if (GET_CODE (x) != PARALLEL)
808     return 0;
809   else
810     x = XVECEXP (x, 0, 0);
811 
812   if (GET_CODE (x) != SET)
813     return 0;
814   if (GET_CODE (SET_DEST (x)) != PC)
815     return 0;
816   if (GET_CODE (SET_SRC (x)) == LABEL_REF)
817     return 1;
818   if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
819     return 0;
820   if (XEXP (SET_SRC (x), 2) == pc_rtx
821       && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
822 	  || ANY_RETURN_P (XEXP (SET_SRC (x), 1))))
823     return 1;
824   if (XEXP (SET_SRC (x), 1) == pc_rtx
825       && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
826 	  || ANY_RETURN_P (XEXP (SET_SRC (x), 2))))
827     return 1;
828   return 0;
829 }
830 
831 /* Return set of PC, otherwise NULL.  */
832 
833 rtx
pc_set(const_rtx insn)834 pc_set (const_rtx insn)
835 {
836   rtx pat;
837   if (!JUMP_P (insn))
838     return NULL_RTX;
839   pat = PATTERN (insn);
840 
841   /* The set is allowed to appear either as the insn pattern or
842      the first set in a PARALLEL.  */
843   if (GET_CODE (pat) == PARALLEL)
844     pat = XVECEXP (pat, 0, 0);
845   if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
846     return pat;
847 
848   return NULL_RTX;
849 }
850 
851 /* Return true when insn is an unconditional direct jump,
852    possibly bundled inside a PARALLEL.  */
853 
854 int
any_uncondjump_p(const_rtx insn)855 any_uncondjump_p (const_rtx insn)
856 {
857   const_rtx x = pc_set (insn);
858   if (!x)
859     return 0;
860   if (GET_CODE (SET_SRC (x)) != LABEL_REF)
861     return 0;
862   if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
863     return 0;
864   return 1;
865 }
866 
867 /* Return true when insn is a conditional jump.  This function works for
868    instructions containing PC sets in PARALLELs.  The instruction may have
869    various other effects so before removing the jump you must verify
870    onlyjump_p.
871 
872    Note that unlike condjump_p it returns false for unconditional jumps.  */
873 
874 int
any_condjump_p(const_rtx insn)875 any_condjump_p (const_rtx insn)
876 {
877   const_rtx x = pc_set (insn);
878   enum rtx_code a, b;
879 
880   if (!x)
881     return 0;
882   if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
883     return 0;
884 
885   a = GET_CODE (XEXP (SET_SRC (x), 1));
886   b = GET_CODE (XEXP (SET_SRC (x), 2));
887 
888   return ((b == PC && (a == LABEL_REF || a == RETURN || a == SIMPLE_RETURN))
889 	  || (a == PC
890 	      && (b == LABEL_REF || b == RETURN || b == SIMPLE_RETURN)));
891 }
892 
893 /* Return the label of a conditional jump.  */
894 
895 rtx
condjump_label(const_rtx insn)896 condjump_label (const_rtx insn)
897 {
898   rtx x = pc_set (insn);
899 
900   if (!x)
901     return NULL_RTX;
902   x = SET_SRC (x);
903   if (GET_CODE (x) == LABEL_REF)
904     return x;
905   if (GET_CODE (x) != IF_THEN_ELSE)
906     return NULL_RTX;
907   if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
908     return XEXP (x, 1);
909   if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
910     return XEXP (x, 2);
911   return NULL_RTX;
912 }
913 
914 /* Return true if INSN is a (possibly conditional) return insn.  */
915 
916 static int
returnjump_p_1(rtx * loc,void * data ATTRIBUTE_UNUSED)917 returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
918 {
919   rtx x = *loc;
920 
921   if (x == NULL)
922     return false;
923 
924   switch (GET_CODE (x))
925     {
926     case RETURN:
927     case SIMPLE_RETURN:
928     case EH_RETURN:
929       return true;
930 
931     case SET:
932       return SET_IS_RETURN_P (x);
933 
934     default:
935       return false;
936     }
937 }
938 
939 /* Return TRUE if INSN is a return jump.  */
940 
941 int
returnjump_p(rtx insn)942 returnjump_p (rtx insn)
943 {
944   if (!JUMP_P (insn))
945     return 0;
946   return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
947 }
948 
949 /* Return true if INSN is a (possibly conditional) return insn.  */
950 
951 static int
eh_returnjump_p_1(rtx * loc,void * data ATTRIBUTE_UNUSED)952 eh_returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
953 {
954   return *loc && GET_CODE (*loc) == EH_RETURN;
955 }
956 
957 int
eh_returnjump_p(rtx insn)958 eh_returnjump_p (rtx insn)
959 {
960   if (!JUMP_P (insn))
961     return 0;
962   return for_each_rtx (&PATTERN (insn), eh_returnjump_p_1, NULL);
963 }
964 
965 /* Return true if INSN is a jump that only transfers control and
966    nothing more.  */
967 
968 int
onlyjump_p(const_rtx insn)969 onlyjump_p (const_rtx insn)
970 {
971   rtx set;
972 
973   if (!JUMP_P (insn))
974     return 0;
975 
976   set = single_set (insn);
977   if (set == NULL)
978     return 0;
979   if (GET_CODE (SET_DEST (set)) != PC)
980     return 0;
981   if (side_effects_p (SET_SRC (set)))
982     return 0;
983 
984   return 1;
985 }
986 
987 /* Return true iff INSN is a jump and its JUMP_LABEL is a label, not
988    NULL or a return.  */
989 bool
jump_to_label_p(rtx insn)990 jump_to_label_p (rtx insn)
991 {
992   return (JUMP_P (insn)
993 	  && JUMP_LABEL (insn) != NULL && !ANY_RETURN_P (JUMP_LABEL (insn)));
994 }
995 
996 #ifdef HAVE_cc0
997 
998 /* Return nonzero if X is an RTX that only sets the condition codes
999    and has no side effects.  */
1000 
1001 int
only_sets_cc0_p(const_rtx x)1002 only_sets_cc0_p (const_rtx x)
1003 {
1004   if (! x)
1005     return 0;
1006 
1007   if (INSN_P (x))
1008     x = PATTERN (x);
1009 
1010   return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1011 }
1012 
1013 /* Return 1 if X is an RTX that does nothing but set the condition codes
1014    and CLOBBER or USE registers.
1015    Return -1 if X does explicitly set the condition codes,
1016    but also does other things.  */
1017 
1018 int
sets_cc0_p(const_rtx x)1019 sets_cc0_p (const_rtx x)
1020 {
1021   if (! x)
1022     return 0;
1023 
1024   if (INSN_P (x))
1025     x = PATTERN (x);
1026 
1027   if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1028     return 1;
1029   if (GET_CODE (x) == PARALLEL)
1030     {
1031       int i;
1032       int sets_cc0 = 0;
1033       int other_things = 0;
1034       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1035 	{
1036 	  if (GET_CODE (XVECEXP (x, 0, i)) == SET
1037 	      && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1038 	    sets_cc0 = 1;
1039 	  else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1040 	    other_things = 1;
1041 	}
1042       return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1043     }
1044   return 0;
1045 }
1046 #endif
1047 
1048 /* Find all CODE_LABELs referred to in X, and increment their use
1049    counts.  If INSN is a JUMP_INSN and there is at least one
1050    CODE_LABEL referenced in INSN as a jump target, then store the last
1051    one in JUMP_LABEL (INSN).  For a tablejump, this must be the label
1052    for the ADDR_VEC.  Store any other jump targets as REG_LABEL_TARGET
1053    notes.  If INSN is an INSN or a CALL_INSN or non-target operands of
1054    a JUMP_INSN, and there is at least one CODE_LABEL referenced in
1055    INSN, add a REG_LABEL_OPERAND note containing that label to INSN.
1056    For returnjumps, the JUMP_LABEL will also be set as appropriate.
1057 
1058    Note that two labels separated by a loop-beginning note
1059    must be kept distinct if we have not yet done loop-optimization,
1060    because the gap between them is where loop-optimize
1061    will want to move invariant code to.  CROSS_JUMP tells us
1062    that loop-optimization is done with.  */
1063 
1064 void
mark_jump_label(rtx x,rtx insn,int in_mem)1065 mark_jump_label (rtx x, rtx insn, int in_mem)
1066 {
1067   rtx asmop = extract_asm_operands (x);
1068   if (asmop)
1069     mark_jump_label_asm (asmop, insn);
1070   else
1071     mark_jump_label_1 (x, insn, in_mem != 0,
1072 		       (insn != NULL && x == PATTERN (insn) && JUMP_P (insn)));
1073 }
1074 
1075 /* Worker function for mark_jump_label.  IN_MEM is TRUE when X occurs
1076    within a (MEM ...).  IS_TARGET is TRUE when X is to be treated as a
1077    jump-target; when the JUMP_LABEL field of INSN should be set or a
1078    REG_LABEL_TARGET note should be added, not a REG_LABEL_OPERAND
1079    note.  */
1080 
1081 static void
mark_jump_label_1(rtx x,rtx insn,bool in_mem,bool is_target)1082 mark_jump_label_1 (rtx x, rtx insn, bool in_mem, bool is_target)
1083 {
1084   RTX_CODE code = GET_CODE (x);
1085   int i;
1086   const char *fmt;
1087 
1088   switch (code)
1089     {
1090     case PC:
1091     case CC0:
1092     case REG:
1093     case CLOBBER:
1094     case CALL:
1095       return;
1096 
1097     case RETURN:
1098     case SIMPLE_RETURN:
1099       if (is_target)
1100 	{
1101 	  gcc_assert (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == x);
1102 	  JUMP_LABEL (insn) = x;
1103 	}
1104       return;
1105 
1106     case MEM:
1107       in_mem = true;
1108       break;
1109 
1110     case SEQUENCE:
1111       for (i = 0; i < XVECLEN (x, 0); i++)
1112 	mark_jump_label (PATTERN (XVECEXP (x, 0, i)),
1113 			 XVECEXP (x, 0, i), 0);
1114       return;
1115 
1116     case SYMBOL_REF:
1117       if (!in_mem)
1118 	return;
1119 
1120       /* If this is a constant-pool reference, see if it is a label.  */
1121       if (CONSTANT_POOL_ADDRESS_P (x))
1122 	mark_jump_label_1 (get_pool_constant (x), insn, in_mem, is_target);
1123       break;
1124 
1125       /* Handle operands in the condition of an if-then-else as for a
1126 	 non-jump insn.  */
1127     case IF_THEN_ELSE:
1128       if (!is_target)
1129 	break;
1130       mark_jump_label_1 (XEXP (x, 0), insn, in_mem, false);
1131       mark_jump_label_1 (XEXP (x, 1), insn, in_mem, true);
1132       mark_jump_label_1 (XEXP (x, 2), insn, in_mem, true);
1133       return;
1134 
1135     case LABEL_REF:
1136       {
1137 	rtx label = XEXP (x, 0);
1138 
1139 	/* Ignore remaining references to unreachable labels that
1140 	   have been deleted.  */
1141 	if (NOTE_P (label)
1142 	    && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
1143 	  break;
1144 
1145 	gcc_assert (LABEL_P (label));
1146 
1147 	/* Ignore references to labels of containing functions.  */
1148 	if (LABEL_REF_NONLOCAL_P (x))
1149 	  break;
1150 
1151 	XEXP (x, 0) = label;
1152 	if (! insn || ! INSN_DELETED_P (insn))
1153 	  ++LABEL_NUSES (label);
1154 
1155 	if (insn)
1156 	  {
1157 	    if (is_target
1158 		/* Do not change a previous setting of JUMP_LABEL.  If the
1159 		   JUMP_LABEL slot is occupied by a different label,
1160 		   create a note for this label.  */
1161 		&& (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == label))
1162 	      JUMP_LABEL (insn) = label;
1163 	    else
1164 	      {
1165 		enum reg_note kind
1166 		  = is_target ? REG_LABEL_TARGET : REG_LABEL_OPERAND;
1167 
1168 		/* Add a REG_LABEL_OPERAND or REG_LABEL_TARGET note
1169 		   for LABEL unless there already is one.  All uses of
1170 		   a label, except for the primary target of a jump,
1171 		   must have such a note.  */
1172 		if (! find_reg_note (insn, kind, label))
1173 		  add_reg_note (insn, kind, label);
1174 	      }
1175 	  }
1176 	return;
1177       }
1178 
1179     /* Do walk the labels in a vector, but not the first operand of an
1180        ADDR_DIFF_VEC.  Don't set the JUMP_LABEL of a vector.  */
1181     case ADDR_VEC:
1182     case ADDR_DIFF_VEC:
1183       if (! INSN_DELETED_P (insn))
1184 	{
1185 	  int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1186 
1187 	  for (i = 0; i < XVECLEN (x, eltnum); i++)
1188 	    mark_jump_label_1 (XVECEXP (x, eltnum, i), NULL_RTX, in_mem,
1189 			       is_target);
1190 	}
1191       return;
1192 
1193     default:
1194       break;
1195     }
1196 
1197   fmt = GET_RTX_FORMAT (code);
1198 
1199   /* The primary target of a tablejump is the label of the ADDR_VEC,
1200      which is canonically mentioned *last* in the insn.  To get it
1201      marked as JUMP_LABEL, we iterate over items in reverse order.  */
1202   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1203     {
1204       if (fmt[i] == 'e')
1205 	mark_jump_label_1 (XEXP (x, i), insn, in_mem, is_target);
1206       else if (fmt[i] == 'E')
1207 	{
1208 	  int j;
1209 
1210 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1211 	    mark_jump_label_1 (XVECEXP (x, i, j), insn, in_mem,
1212 			       is_target);
1213 	}
1214     }
1215 }
1216 
1217 /* Worker function for mark_jump_label.  Handle asm insns specially.
1218    In particular, output operands need not be considered so we can
1219    avoid re-scanning the replicated asm_operand.  Also, the asm_labels
1220    need to be considered targets.  */
1221 
1222 static void
mark_jump_label_asm(rtx asmop,rtx insn)1223 mark_jump_label_asm (rtx asmop, rtx insn)
1224 {
1225   int i;
1226 
1227   for (i = ASM_OPERANDS_INPUT_LENGTH (asmop) - 1; i >= 0; --i)
1228     mark_jump_label_1 (ASM_OPERANDS_INPUT (asmop, i), insn, false, false);
1229 
1230   for (i = ASM_OPERANDS_LABEL_LENGTH (asmop) - 1; i >= 0; --i)
1231     mark_jump_label_1 (ASM_OPERANDS_LABEL (asmop, i), insn, false, true);
1232 }
1233 
1234 /* Delete insn INSN from the chain of insns and update label ref counts
1235    and delete insns now unreachable.
1236 
1237    Returns the first insn after INSN that was not deleted.
1238 
1239    Usage of this instruction is deprecated.  Use delete_insn instead and
1240    subsequent cfg_cleanup pass to delete unreachable code if needed.  */
1241 
1242 rtx
delete_related_insns(rtx insn)1243 delete_related_insns (rtx insn)
1244 {
1245   int was_code_label = (LABEL_P (insn));
1246   rtx note;
1247   rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1248 
1249   while (next && INSN_DELETED_P (next))
1250     next = NEXT_INSN (next);
1251 
1252   /* This insn is already deleted => return first following nondeleted.  */
1253   if (INSN_DELETED_P (insn))
1254     return next;
1255 
1256   delete_insn (insn);
1257 
1258   /* If instruction is followed by a barrier,
1259      delete the barrier too.  */
1260 
1261   if (next != 0 && BARRIER_P (next))
1262     delete_insn (next);
1263 
1264   /* If this is a call, then we have to remove the var tracking note
1265      for the call arguments.  */
1266 
1267   if (CALL_P (insn)
1268       || (NONJUMP_INSN_P (insn)
1269 	  && GET_CODE (PATTERN (insn)) == SEQUENCE
1270 	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))))
1271     {
1272       rtx p;
1273 
1274       for (p = next && INSN_DELETED_P (next) ? NEXT_INSN (next) : next;
1275 	   p && NOTE_P (p);
1276 	   p = NEXT_INSN (p))
1277 	if (NOTE_KIND (p) == NOTE_INSN_CALL_ARG_LOCATION)
1278 	  {
1279 	    remove_insn (p);
1280 	    break;
1281 	  }
1282     }
1283 
1284   /* If deleting a jump, decrement the count of the label,
1285      and delete the label if it is now unused.  */
1286 
1287   if (jump_to_label_p (insn))
1288     {
1289       rtx lab = JUMP_LABEL (insn), lab_next;
1290 
1291       if (LABEL_NUSES (lab) == 0)
1292 	/* This can delete NEXT or PREV,
1293 	   either directly if NEXT is JUMP_LABEL (INSN),
1294 	   or indirectly through more levels of jumps.  */
1295 	delete_related_insns (lab);
1296       else if (tablejump_p (insn, NULL, &lab_next))
1297 	{
1298 	  /* If we're deleting the tablejump, delete the dispatch table.
1299 	     We may not be able to kill the label immediately preceding
1300 	     just yet, as it might be referenced in code leading up to
1301 	     the tablejump.  */
1302 	  delete_related_insns (lab_next);
1303 	}
1304     }
1305 
1306   /* Likewise if we're deleting a dispatch table.  */
1307 
1308   if (JUMP_TABLE_DATA_P (insn))
1309     {
1310       rtx pat = PATTERN (insn);
1311       int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1312       int len = XVECLEN (pat, diff_vec_p);
1313 
1314       for (i = 0; i < len; i++)
1315 	if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1316 	  delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1317       while (next && INSN_DELETED_P (next))
1318 	next = NEXT_INSN (next);
1319       return next;
1320     }
1321 
1322   /* Likewise for any JUMP_P / INSN / CALL_INSN with a
1323      REG_LABEL_OPERAND or REG_LABEL_TARGET note.  */
1324   if (INSN_P (insn))
1325     for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1326       if ((REG_NOTE_KIND (note) == REG_LABEL_OPERAND
1327 	   || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
1328 	  /* This could also be a NOTE_INSN_DELETED_LABEL note.  */
1329 	  && LABEL_P (XEXP (note, 0)))
1330 	if (LABEL_NUSES (XEXP (note, 0)) == 0)
1331 	  delete_related_insns (XEXP (note, 0));
1332 
1333   while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
1334     prev = PREV_INSN (prev);
1335 
1336   /* If INSN was a label and a dispatch table follows it,
1337      delete the dispatch table.  The tablejump must have gone already.
1338      It isn't useful to fall through into a table.  */
1339 
1340   if (was_code_label
1341       && NEXT_INSN (insn) != 0
1342       && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
1343     next = delete_related_insns (NEXT_INSN (insn));
1344 
1345   /* If INSN was a label, delete insns following it if now unreachable.  */
1346 
1347   if (was_code_label && prev && BARRIER_P (prev))
1348     {
1349       enum rtx_code code;
1350       while (next)
1351 	{
1352 	  code = GET_CODE (next);
1353 	  if (code == NOTE)
1354 	    next = NEXT_INSN (next);
1355 	  /* Keep going past other deleted labels to delete what follows.  */
1356 	  else if (code == CODE_LABEL && INSN_DELETED_P (next))
1357 	    next = NEXT_INSN (next);
1358 	  /* Keep the (use (insn))s created by dbr_schedule, which needs
1359 	     them in order to track liveness relative to a previous
1360 	     barrier.  */
1361 	  else if (INSN_P (next)
1362 		   && GET_CODE (PATTERN (next)) == USE
1363 		   && INSN_P (XEXP (PATTERN (next), 0)))
1364 	    next = NEXT_INSN (next);
1365 	  else if (code == BARRIER || INSN_P (next))
1366 	    /* Note: if this deletes a jump, it can cause more
1367 	       deletion of unreachable code, after a different label.
1368 	       As long as the value from this recursive call is correct,
1369 	       this invocation functions correctly.  */
1370 	    next = delete_related_insns (next);
1371 	  else
1372 	    break;
1373 	}
1374     }
1375 
1376   /* I feel a little doubtful about this loop,
1377      but I see no clean and sure alternative way
1378      to find the first insn after INSN that is not now deleted.
1379      I hope this works.  */
1380   while (next && INSN_DELETED_P (next))
1381     next = NEXT_INSN (next);
1382   return next;
1383 }
1384 
1385 /* Delete a range of insns from FROM to TO, inclusive.
1386    This is for the sake of peephole optimization, so assume
1387    that whatever these insns do will still be done by a new
1388    peephole insn that will replace them.  */
1389 
1390 void
delete_for_peephole(rtx from,rtx to)1391 delete_for_peephole (rtx from, rtx to)
1392 {
1393   rtx insn = from;
1394 
1395   while (1)
1396     {
1397       rtx next = NEXT_INSN (insn);
1398       rtx prev = PREV_INSN (insn);
1399 
1400       if (!NOTE_P (insn))
1401 	{
1402 	  INSN_DELETED_P (insn) = 1;
1403 
1404 	  /* Patch this insn out of the chain.  */
1405 	  /* We don't do this all at once, because we
1406 	     must preserve all NOTEs.  */
1407 	  if (prev)
1408 	    NEXT_INSN (prev) = next;
1409 
1410 	  if (next)
1411 	    PREV_INSN (next) = prev;
1412 	}
1413 
1414       if (insn == to)
1415 	break;
1416       insn = next;
1417     }
1418 
1419   /* Note that if TO is an unconditional jump
1420      we *do not* delete the BARRIER that follows,
1421      since the peephole that replaces this sequence
1422      is also an unconditional jump in that case.  */
1423 }
1424 
1425 /* A helper function for redirect_exp_1; examines its input X and returns
1426    either a LABEL_REF around a label, or a RETURN if X was NULL.  */
1427 static rtx
redirect_target(rtx x)1428 redirect_target (rtx x)
1429 {
1430   if (x == NULL_RTX)
1431     return ret_rtx;
1432   if (!ANY_RETURN_P (x))
1433     return gen_rtx_LABEL_REF (Pmode, x);
1434   return x;
1435 }
1436 
1437 /* Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
1438    NLABEL as a return.  Accrue modifications into the change group.  */
1439 
1440 static void
redirect_exp_1(rtx * loc,rtx olabel,rtx nlabel,rtx insn)1441 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1442 {
1443   rtx x = *loc;
1444   RTX_CODE code = GET_CODE (x);
1445   int i;
1446   const char *fmt;
1447 
1448   if ((code == LABEL_REF && XEXP (x, 0) == olabel)
1449       || x == olabel)
1450     {
1451       x = redirect_target (nlabel);
1452       if (GET_CODE (x) == LABEL_REF && loc == &PATTERN (insn))
1453  	x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1454       validate_change (insn, loc, x, 1);
1455       return;
1456     }
1457 
1458   if (code == SET && SET_DEST (x) == pc_rtx
1459       && ANY_RETURN_P (nlabel)
1460       && GET_CODE (SET_SRC (x)) == LABEL_REF
1461       && XEXP (SET_SRC (x), 0) == olabel)
1462     {
1463       validate_change (insn, loc, nlabel, 1);
1464       return;
1465     }
1466 
1467   if (code == IF_THEN_ELSE)
1468     {
1469       /* Skip the condition of an IF_THEN_ELSE.  We only want to
1470          change jump destinations, not eventual label comparisons.  */
1471       redirect_exp_1 (&XEXP (x, 1), olabel, nlabel, insn);
1472       redirect_exp_1 (&XEXP (x, 2), olabel, nlabel, insn);
1473       return;
1474     }
1475 
1476   fmt = GET_RTX_FORMAT (code);
1477   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1478     {
1479       if (fmt[i] == 'e')
1480 	redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1481       else if (fmt[i] == 'E')
1482 	{
1483 	  int j;
1484 	  for (j = 0; j < XVECLEN (x, i); j++)
1485 	    redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1486 	}
1487     }
1488 }
1489 
1490 /* Make JUMP go to NLABEL instead of where it jumps now.  Accrue
1491    the modifications into the change group.  Return false if we did
1492    not see how to do that.  */
1493 
1494 int
redirect_jump_1(rtx jump,rtx nlabel)1495 redirect_jump_1 (rtx jump, rtx nlabel)
1496 {
1497   int ochanges = num_validated_changes ();
1498   rtx *loc, asmop;
1499 
1500   gcc_assert (nlabel != NULL_RTX);
1501   asmop = extract_asm_operands (PATTERN (jump));
1502   if (asmop)
1503     {
1504       if (nlabel == NULL)
1505 	return 0;
1506       gcc_assert (ASM_OPERANDS_LABEL_LENGTH (asmop) == 1);
1507       loc = &ASM_OPERANDS_LABEL (asmop, 0);
1508     }
1509   else if (GET_CODE (PATTERN (jump)) == PARALLEL)
1510     loc = &XVECEXP (PATTERN (jump), 0, 0);
1511   else
1512     loc = &PATTERN (jump);
1513 
1514   redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1515   return num_validated_changes () > ochanges;
1516 }
1517 
1518 /* Make JUMP go to NLABEL instead of where it jumps now.  If the old
1519    jump target label is unused as a result, it and the code following
1520    it may be deleted.
1521 
1522    Normally, NLABEL will be a label, but it may also be a RETURN rtx;
1523    in that case we are to turn the jump into a (possibly conditional)
1524    return insn.
1525 
1526    The return value will be 1 if the change was made, 0 if it wasn't
1527    (this can only occur when trying to produce return insns).  */
1528 
1529 int
redirect_jump(rtx jump,rtx nlabel,int delete_unused)1530 redirect_jump (rtx jump, rtx nlabel, int delete_unused)
1531 {
1532   rtx olabel = JUMP_LABEL (jump);
1533 
1534   if (!nlabel)
1535     {
1536       /* If there is no label, we are asked to redirect to the EXIT block.
1537 	 When before the epilogue is emitted, return/simple_return cannot be
1538 	 created so we return 0 immediately.  After the epilogue is emitted,
1539 	 we always expect a label, either a non-null label, or a
1540 	 return/simple_return RTX.  */
1541 
1542       if (!epilogue_completed)
1543 	return 0;
1544       gcc_unreachable ();
1545     }
1546 
1547   if (nlabel == olabel)
1548     return 1;
1549 
1550   if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1551     return 0;
1552 
1553   redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1554   return 1;
1555 }
1556 
1557 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1558    NLABEL in JUMP.
1559    If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1560    count has dropped to zero.  */
1561 void
redirect_jump_2(rtx jump,rtx olabel,rtx nlabel,int delete_unused,int invert)1562 redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
1563 		 int invert)
1564 {
1565   rtx note;
1566 
1567   gcc_assert (JUMP_LABEL (jump) == olabel);
1568 
1569   /* Negative DELETE_UNUSED used to be used to signalize behavior on
1570      moving FUNCTION_END note.  Just sanity check that no user still worry
1571      about this.  */
1572   gcc_assert (delete_unused >= 0);
1573   JUMP_LABEL (jump) = nlabel;
1574   if (!ANY_RETURN_P (nlabel))
1575     ++LABEL_NUSES (nlabel);
1576 
1577   /* Update labels in any REG_EQUAL note.  */
1578   if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1579     {
1580       if (ANY_RETURN_P (nlabel)
1581 	  || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1582 	remove_note (jump, note);
1583       else
1584 	{
1585 	  redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1586 	  confirm_change_group ();
1587 	}
1588     }
1589 
1590   /* Handle the case where we had a conditional crossing jump to a return
1591      label and are now changing it into a direct conditional return.
1592      The jump is no longer crossing in that case.  */
1593   if (ANY_RETURN_P (nlabel))
1594     {
1595       note = find_reg_note (jump, REG_CROSSING_JUMP, NULL_RTX);
1596       if (note)
1597 	remove_note (jump, note);
1598     }
1599 
1600   if (!ANY_RETURN_P (olabel)
1601       && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1602       /* Undefined labels will remain outside the insn stream.  */
1603       && INSN_UID (olabel))
1604     delete_related_insns (olabel);
1605   if (invert)
1606     invert_br_probabilities (jump);
1607 }
1608 
1609 /* Invert the jump condition X contained in jump insn INSN.  Accrue the
1610    modifications into the change group.  Return nonzero for success.  */
1611 static int
invert_exp_1(rtx x,rtx insn)1612 invert_exp_1 (rtx x, rtx insn)
1613 {
1614   RTX_CODE code = GET_CODE (x);
1615 
1616   if (code == IF_THEN_ELSE)
1617     {
1618       rtx comp = XEXP (x, 0);
1619       rtx tem;
1620       enum rtx_code reversed_code;
1621 
1622       /* We can do this in two ways:  The preferable way, which can only
1623 	 be done if this is not an integer comparison, is to reverse
1624 	 the comparison code.  Otherwise, swap the THEN-part and ELSE-part
1625 	 of the IF_THEN_ELSE.  If we can't do either, fail.  */
1626 
1627       reversed_code = reversed_comparison_code (comp, insn);
1628 
1629       if (reversed_code != UNKNOWN)
1630 	{
1631 	  validate_change (insn, &XEXP (x, 0),
1632 			   gen_rtx_fmt_ee (reversed_code,
1633 					   GET_MODE (comp), XEXP (comp, 0),
1634 					   XEXP (comp, 1)),
1635 			   1);
1636 	  return 1;
1637 	}
1638 
1639       tem = XEXP (x, 1);
1640       validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1641       validate_change (insn, &XEXP (x, 2), tem, 1);
1642       return 1;
1643     }
1644   else
1645     return 0;
1646 }
1647 
1648 /* Invert the condition of the jump JUMP, and make it jump to label
1649    NLABEL instead of where it jumps now.  Accrue changes into the
1650    change group.  Return false if we didn't see how to perform the
1651    inversion and redirection.  */
1652 
1653 int
invert_jump_1(rtx jump,rtx nlabel)1654 invert_jump_1 (rtx jump, rtx nlabel)
1655 {
1656   rtx x = pc_set (jump);
1657   int ochanges;
1658   int ok;
1659 
1660   ochanges = num_validated_changes ();
1661   if (x == NULL)
1662     return 0;
1663   ok = invert_exp_1 (SET_SRC (x), jump);
1664   gcc_assert (ok);
1665 
1666   if (num_validated_changes () == ochanges)
1667     return 0;
1668 
1669   /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1670      in Pmode, so checking this is not merely an optimization.  */
1671   return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1672 }
1673 
1674 /* Invert the condition of the jump JUMP, and make it jump to label
1675    NLABEL instead of where it jumps now.  Return true if successful.  */
1676 
1677 int
invert_jump(rtx jump,rtx nlabel,int delete_unused)1678 invert_jump (rtx jump, rtx nlabel, int delete_unused)
1679 {
1680   rtx olabel = JUMP_LABEL (jump);
1681 
1682   if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1683     {
1684       redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1685       return 1;
1686     }
1687   cancel_changes (0);
1688   return 0;
1689 }
1690 
1691 
1692 /* Like rtx_equal_p except that it considers two REGs as equal
1693    if they renumber to the same value and considers two commutative
1694    operations to be the same if the order of the operands has been
1695    reversed.  */
1696 
1697 int
rtx_renumbered_equal_p(const_rtx x,const_rtx y)1698 rtx_renumbered_equal_p (const_rtx x, const_rtx y)
1699 {
1700   int i;
1701   const enum rtx_code code = GET_CODE (x);
1702   const char *fmt;
1703 
1704   if (x == y)
1705     return 1;
1706 
1707   if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1708       && (REG_P (y) || (GET_CODE (y) == SUBREG
1709 				  && REG_P (SUBREG_REG (y)))))
1710     {
1711       int reg_x = -1, reg_y = -1;
1712       int byte_x = 0, byte_y = 0;
1713       struct subreg_info info;
1714 
1715       if (GET_MODE (x) != GET_MODE (y))
1716 	return 0;
1717 
1718       /* If we haven't done any renumbering, don't
1719 	 make any assumptions.  */
1720       if (reg_renumber == 0)
1721 	return rtx_equal_p (x, y);
1722 
1723       if (code == SUBREG)
1724 	{
1725 	  reg_x = REGNO (SUBREG_REG (x));
1726 	  byte_x = SUBREG_BYTE (x);
1727 
1728 	  if (reg_renumber[reg_x] >= 0)
1729 	    {
1730 	      subreg_get_info (reg_renumber[reg_x],
1731 			       GET_MODE (SUBREG_REG (x)), byte_x,
1732 			       GET_MODE (x), &info);
1733 	      if (!info.representable_p)
1734 		return 0;
1735 	      reg_x = info.offset;
1736 	      byte_x = 0;
1737 	    }
1738 	}
1739       else
1740 	{
1741 	  reg_x = REGNO (x);
1742 	  if (reg_renumber[reg_x] >= 0)
1743 	    reg_x = reg_renumber[reg_x];
1744 	}
1745 
1746       if (GET_CODE (y) == SUBREG)
1747 	{
1748 	  reg_y = REGNO (SUBREG_REG (y));
1749 	  byte_y = SUBREG_BYTE (y);
1750 
1751 	  if (reg_renumber[reg_y] >= 0)
1752 	    {
1753 	      subreg_get_info (reg_renumber[reg_y],
1754 			       GET_MODE (SUBREG_REG (y)), byte_y,
1755 			       GET_MODE (y), &info);
1756 	      if (!info.representable_p)
1757 		return 0;
1758 	      reg_y = info.offset;
1759 	      byte_y = 0;
1760 	    }
1761 	}
1762       else
1763 	{
1764 	  reg_y = REGNO (y);
1765 	  if (reg_renumber[reg_y] >= 0)
1766 	    reg_y = reg_renumber[reg_y];
1767 	}
1768 
1769       return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
1770     }
1771 
1772   /* Now we have disposed of all the cases
1773      in which different rtx codes can match.  */
1774   if (code != GET_CODE (y))
1775     return 0;
1776 
1777   switch (code)
1778     {
1779     case PC:
1780     case CC0:
1781     case ADDR_VEC:
1782     case ADDR_DIFF_VEC:
1783     CASE_CONST_UNIQUE:
1784       return 0;
1785 
1786     case LABEL_REF:
1787       /* We can't assume nonlocal labels have their following insns yet.  */
1788       if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1789 	return XEXP (x, 0) == XEXP (y, 0);
1790 
1791       /* Two label-refs are equivalent if they point at labels
1792 	 in the same position in the instruction stream.  */
1793       return (next_real_insn (XEXP (x, 0))
1794 	      == next_real_insn (XEXP (y, 0)));
1795 
1796     case SYMBOL_REF:
1797       return XSTR (x, 0) == XSTR (y, 0);
1798 
1799     case CODE_LABEL:
1800       /* If we didn't match EQ equality above, they aren't the same.  */
1801       return 0;
1802 
1803     default:
1804       break;
1805     }
1806 
1807   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
1808 
1809   if (GET_MODE (x) != GET_MODE (y))
1810     return 0;
1811 
1812   /* MEMs referring to different address space are not equivalent.  */
1813   if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
1814     return 0;
1815 
1816   /* For commutative operations, the RTX match if the operand match in any
1817      order.  Also handle the simple binary and unary cases without a loop.  */
1818   if (targetm.commutative_p (x, UNKNOWN))
1819     return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1820 	     && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1821 	    || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1822 		&& rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1823   else if (NON_COMMUTATIVE_P (x))
1824     return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1825 	    && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1826   else if (UNARY_P (x))
1827     return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1828 
1829   /* Compare the elements.  If any pair of corresponding elements
1830      fail to match, return 0 for the whole things.  */
1831 
1832   fmt = GET_RTX_FORMAT (code);
1833   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1834     {
1835       int j;
1836       switch (fmt[i])
1837 	{
1838 	case 'w':
1839 	  if (XWINT (x, i) != XWINT (y, i))
1840 	    return 0;
1841 	  break;
1842 
1843 	case 'i':
1844 	  if (XINT (x, i) != XINT (y, i))
1845 	    {
1846 	      if (((code == ASM_OPERANDS && i == 6)
1847 		   || (code == ASM_INPUT && i == 1)))
1848 		break;
1849 	      return 0;
1850 	    }
1851 	  break;
1852 
1853 	case 't':
1854 	  if (XTREE (x, i) != XTREE (y, i))
1855 	    return 0;
1856 	  break;
1857 
1858 	case 's':
1859 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
1860 	    return 0;
1861 	  break;
1862 
1863 	case 'e':
1864 	  if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1865 	    return 0;
1866 	  break;
1867 
1868 	case 'u':
1869 	  if (XEXP (x, i) != XEXP (y, i))
1870 	    return 0;
1871 	  /* Fall through.  */
1872 	case '0':
1873 	  break;
1874 
1875 	case 'E':
1876 	  if (XVECLEN (x, i) != XVECLEN (y, i))
1877 	    return 0;
1878 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1879 	    if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1880 	      return 0;
1881 	  break;
1882 
1883 	default:
1884 	  gcc_unreachable ();
1885 	}
1886     }
1887   return 1;
1888 }
1889 
1890 /* If X is a hard register or equivalent to one or a subregister of one,
1891    return the hard register number.  If X is a pseudo register that was not
1892    assigned a hard register, return the pseudo register number.  Otherwise,
1893    return -1.  Any rtx is valid for X.  */
1894 
1895 int
true_regnum(const_rtx x)1896 true_regnum (const_rtx x)
1897 {
1898   if (REG_P (x))
1899     {
1900       if (REGNO (x) >= FIRST_PSEUDO_REGISTER
1901 	  && (lra_in_progress || reg_renumber[REGNO (x)] >= 0))
1902 	return reg_renumber[REGNO (x)];
1903       return REGNO (x);
1904     }
1905   if (GET_CODE (x) == SUBREG)
1906     {
1907       int base = true_regnum (SUBREG_REG (x));
1908       if (base >= 0
1909 	  && base < FIRST_PSEUDO_REGISTER)
1910 	{
1911 	  struct subreg_info info;
1912 
1913 	  subreg_get_info (lra_in_progress
1914 			   ? (unsigned) base : REGNO (SUBREG_REG (x)),
1915 			   GET_MODE (SUBREG_REG (x)),
1916 			   SUBREG_BYTE (x), GET_MODE (x), &info);
1917 
1918 	  if (info.representable_p)
1919 	    return base + info.offset;
1920 	}
1921     }
1922   return -1;
1923 }
1924 
1925 /* Return regno of the register REG and handle subregs too.  */
1926 unsigned int
reg_or_subregno(const_rtx reg)1927 reg_or_subregno (const_rtx reg)
1928 {
1929   if (GET_CODE (reg) == SUBREG)
1930     reg = SUBREG_REG (reg);
1931   gcc_assert (REG_P (reg));
1932   return REGNO (reg);
1933 }
1934