1 /* Optimize jump instructions, for GNU compiler.
2    Copyright (C) 1987-2016 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This is the pathetic reminder of old fame of the jump-optimization pass
21    of the compiler.  Now it contains basically a set of utility functions to
22    operate with jumps.
23 
24    Each CODE_LABEL has a count of the times it is used
25    stored in the LABEL_NUSES internal field, and each JUMP_INSN
26    has one label that it refers to stored in the
27    JUMP_LABEL internal field.  With this we can detect labels that
28    become unused because of the deletion of all the jumps that
29    formerly used them.  The JUMP_LABEL info is sometimes looked
30    at by later passes.  For return insns, it contains either a
31    RETURN or a SIMPLE_RETURN rtx.
32 
33    The subroutines redirect_jump and invert_jump are used
34    from other passes as well.  */
35 
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "backend.h"
40 #include "target.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "cfghooks.h"
44 #include "tree-pass.h"
45 #include "tm_p.h"
46 #include "insn-config.h"
47 #include "regs.h"
48 #include "emit-rtl.h"
49 #include "recog.h"
50 #include "cfgrtl.h"
51 #include "rtl-iter.h"
52 
53 /* Optimize jump y; x: ... y: jumpif... x?
54    Don't know if it is worth bothering with.  */
55 /* Optimize two cases of conditional jump to conditional jump?
56    This can never delete any instruction or make anything dead,
57    or even change what is live at any point.
58    So perhaps let combiner do it.  */
59 
60 static void init_label_info (rtx_insn *);
61 static void mark_all_labels (rtx_insn *);
62 static void mark_jump_label_1 (rtx, rtx_insn *, bool, bool);
63 static void mark_jump_label_asm (rtx, rtx_insn *);
64 static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
65 static int invert_exp_1 (rtx, rtx);
66 
67 /* Worker for rebuild_jump_labels and rebuild_jump_labels_chain.  */
68 static void
rebuild_jump_labels_1(rtx_insn * f,bool count_forced)69 rebuild_jump_labels_1 (rtx_insn *f, bool count_forced)
70 {
71   rtx_insn_list *insn;
72 
73   timevar_push (TV_REBUILD_JUMP);
74   init_label_info (f);
75   mark_all_labels (f);
76 
77   /* Keep track of labels used from static data; we don't track them
78      closely enough to delete them here, so make sure their reference
79      count doesn't drop to zero.  */
80 
81   if (count_forced)
82     for (insn = forced_labels; insn; insn = insn->next ())
83       if (LABEL_P (insn->insn ()))
84 	LABEL_NUSES (insn->insn ())++;
85   timevar_pop (TV_REBUILD_JUMP);
86 }
87 
88 /* This function rebuilds the JUMP_LABEL field and REG_LABEL_TARGET
89    notes in jumping insns and REG_LABEL_OPERAND notes in non-jumping
90    instructions and jumping insns that have labels as operands
91    (e.g. cbranchsi4).  */
92 void
rebuild_jump_labels(rtx_insn * f)93 rebuild_jump_labels (rtx_insn *f)
94 {
95   rebuild_jump_labels_1 (f, true);
96 }
97 
98 /* This function is like rebuild_jump_labels, but doesn't run over
99    forced_labels.  It can be used on insn chains that aren't the
100    main function chain.  */
101 void
rebuild_jump_labels_chain(rtx_insn * chain)102 rebuild_jump_labels_chain (rtx_insn *chain)
103 {
104   rebuild_jump_labels_1 (chain, false);
105 }
106 
107 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
108    non-fallthru insn.  This is not generally true, as multiple barriers
109    may have crept in, or the BARRIER may be separated from the last
110    real insn by one or more NOTEs.
111 
112    This simple pass moves barriers and removes duplicates so that the
113    old code is happy.
114  */
115 static unsigned int
cleanup_barriers(void)116 cleanup_barriers (void)
117 {
118   rtx_insn *insn;
119   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
120     {
121       if (BARRIER_P (insn))
122 	{
123 	  rtx_insn *prev = prev_nonnote_insn (insn);
124 	  if (!prev)
125 	    continue;
126 
127 	  if (CALL_P (prev))
128 	    {
129 	      /* Make sure we do not split a call and its corresponding
130 		 CALL_ARG_LOCATION note.  */
131 	      rtx_insn *next = NEXT_INSN (prev);
132 
133 	      if (NOTE_P (next)
134 		  && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
135 		prev = next;
136 	    }
137 
138 	  if (BARRIER_P (prev))
139 	    delete_insn (insn);
140 	  else if (prev != PREV_INSN (insn))
141 	    {
142 	      basic_block bb = BLOCK_FOR_INSN (prev);
143 	      rtx_insn *end = PREV_INSN (insn);
144 	      reorder_insns_nobb (insn, insn, prev);
145 	      if (bb)
146 		{
147 		  /* If the backend called in machine reorg compute_bb_for_insn
148 		     and didn't free_bb_for_insn again, preserve basic block
149 		     boundaries.  Move the end of basic block to PREV since
150 		     it is followed by a barrier now, and clear BLOCK_FOR_INSN
151 		     on the following notes.
152 		     ???  Maybe the proper solution for the targets that have
153 		     cfg around after machine reorg is not to run cleanup_barriers
154 		     pass at all.  */
155 		  BB_END (bb) = prev;
156 		  do
157 		    {
158 		      prev = NEXT_INSN (prev);
159 		      if (prev != insn && BLOCK_FOR_INSN (prev) == bb)
160 			BLOCK_FOR_INSN (prev) = NULL;
161 		    }
162 		  while (prev != end);
163 		}
164 	    }
165 	}
166     }
167   return 0;
168 }
169 
170 namespace {
171 
172 const pass_data pass_data_cleanup_barriers =
173 {
174   RTL_PASS, /* type */
175   "barriers", /* name */
176   OPTGROUP_NONE, /* optinfo_flags */
177   TV_NONE, /* tv_id */
178   0, /* properties_required */
179   0, /* properties_provided */
180   0, /* properties_destroyed */
181   0, /* todo_flags_start */
182   0, /* todo_flags_finish */
183 };
184 
185 class pass_cleanup_barriers : public rtl_opt_pass
186 {
187 public:
pass_cleanup_barriers(gcc::context * ctxt)188   pass_cleanup_barriers (gcc::context *ctxt)
189     : rtl_opt_pass (pass_data_cleanup_barriers, ctxt)
190   {}
191 
192   /* opt_pass methods: */
execute(function *)193   virtual unsigned int execute (function *) { return cleanup_barriers (); }
194 
195 }; // class pass_cleanup_barriers
196 
197 } // anon namespace
198 
199 rtl_opt_pass *
make_pass_cleanup_barriers(gcc::context * ctxt)200 make_pass_cleanup_barriers (gcc::context *ctxt)
201 {
202   return new pass_cleanup_barriers (ctxt);
203 }
204 
205 
206 /* Initialize LABEL_NUSES and JUMP_LABEL fields, add REG_LABEL_TARGET
207    for remaining targets for JUMP_P.  Delete any REG_LABEL_OPERAND
208    notes whose labels don't occur in the insn any more.  */
209 
210 static void
init_label_info(rtx_insn * f)211 init_label_info (rtx_insn *f)
212 {
213   rtx_insn *insn;
214 
215   for (insn = f; insn; insn = NEXT_INSN (insn))
216     {
217       if (LABEL_P (insn))
218 	LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
219 
220       /* REG_LABEL_TARGET notes (including the JUMP_LABEL field) are
221 	 sticky and not reset here; that way we won't lose association
222 	 with a label when e.g. the source for a target register
223 	 disappears out of reach for targets that may use jump-target
224 	 registers.  Jump transformations are supposed to transform
225 	 any REG_LABEL_TARGET notes.  The target label reference in a
226 	 branch may disappear from the branch (and from the
227 	 instruction before it) for other reasons, like register
228 	 allocation.  */
229 
230       if (INSN_P (insn))
231 	{
232 	  rtx note, next;
233 
234 	  for (note = REG_NOTES (insn); note; note = next)
235 	    {
236 	      next = XEXP (note, 1);
237 	      if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
238 		  && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
239 		remove_note (insn, note);
240 	    }
241 	}
242     }
243 }
244 
245 /* A subroutine of mark_all_labels.  Trivially propagate a simple label
246    load into a jump_insn that uses it.  */
247 
248 static void
maybe_propagate_label_ref(rtx_insn * jump_insn,rtx_insn * prev_nonjump_insn)249 maybe_propagate_label_ref (rtx_insn *jump_insn, rtx_insn *prev_nonjump_insn)
250 {
251   rtx label_note, pc, pc_src;
252 
253   pc = pc_set (jump_insn);
254   pc_src = pc != NULL ? SET_SRC (pc) : NULL;
255   label_note = find_reg_note (prev_nonjump_insn, REG_LABEL_OPERAND, NULL);
256 
257   /* If the previous non-jump insn sets something to a label,
258      something that this jump insn uses, make that label the primary
259      target of this insn if we don't yet have any.  That previous
260      insn must be a single_set and not refer to more than one label.
261      The jump insn must not refer to other labels as jump targets
262      and must be a plain (set (pc) ...), maybe in a parallel, and
263      may refer to the item being set only directly or as one of the
264      arms in an IF_THEN_ELSE.  */
265 
266   if (label_note != NULL && pc_src != NULL)
267     {
268       rtx label_set = single_set (prev_nonjump_insn);
269       rtx label_dest = label_set != NULL ? SET_DEST (label_set) : NULL;
270 
271       if (label_set != NULL
272 	  /* The source must be the direct LABEL_REF, not a
273 	     PLUS, UNSPEC, IF_THEN_ELSE etc.  */
274 	  && GET_CODE (SET_SRC (label_set)) == LABEL_REF
275 	  && (rtx_equal_p (label_dest, pc_src)
276 	      || (GET_CODE (pc_src) == IF_THEN_ELSE
277 		  && (rtx_equal_p (label_dest, XEXP (pc_src, 1))
278 		      || rtx_equal_p (label_dest, XEXP (pc_src, 2))))))
279 	{
280 	  /* The CODE_LABEL referred to in the note must be the
281 	     CODE_LABEL in the LABEL_REF of the "set".  We can
282 	     conveniently use it for the marker function, which
283 	     requires a LABEL_REF wrapping.  */
284 	  gcc_assert (XEXP (label_note, 0) == LABEL_REF_LABEL (SET_SRC (label_set)));
285 
286 	  mark_jump_label_1 (label_set, jump_insn, false, true);
287 
288 	  gcc_assert (JUMP_LABEL (jump_insn) == XEXP (label_note, 0));
289 	}
290     }
291 }
292 
293 /* Mark the label each jump jumps to.
294    Combine consecutive labels, and count uses of labels.  */
295 
296 static void
mark_all_labels(rtx_insn * f)297 mark_all_labels (rtx_insn *f)
298 {
299   rtx_insn *insn;
300 
301   if (current_ir_type () == IR_RTL_CFGLAYOUT)
302     {
303       basic_block bb;
304       FOR_EACH_BB_FN (bb, cfun)
305 	{
306 	  /* In cfglayout mode, we don't bother with trivial next-insn
307 	     propagation of LABEL_REFs into JUMP_LABEL.  This will be
308 	     handled by other optimizers using better algorithms.  */
309 	  FOR_BB_INSNS (bb, insn)
310 	    {
311 	      gcc_assert (! insn->deleted ());
312 	      if (NONDEBUG_INSN_P (insn))
313 	        mark_jump_label (PATTERN (insn), insn, 0);
314 	    }
315 
316 	  /* In cfglayout mode, there may be non-insns between the
317 	     basic blocks.  If those non-insns represent tablejump data,
318 	     they contain label references that we must record.  */
319 	  for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
320 	    if (JUMP_TABLE_DATA_P (insn))
321 	      mark_jump_label (PATTERN (insn), insn, 0);
322 	  for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
323 	    if (JUMP_TABLE_DATA_P (insn))
324 	      mark_jump_label (PATTERN (insn), insn, 0);
325 	}
326     }
327   else
328     {
329       rtx_insn *prev_nonjump_insn = NULL;
330       for (insn = f; insn; insn = NEXT_INSN (insn))
331 	{
332 	  if (insn->deleted ())
333 	    ;
334 	  else if (LABEL_P (insn))
335 	    prev_nonjump_insn = NULL;
336 	  else if (JUMP_TABLE_DATA_P (insn))
337 	    mark_jump_label (PATTERN (insn), insn, 0);
338 	  else if (NONDEBUG_INSN_P (insn))
339 	    {
340 	      mark_jump_label (PATTERN (insn), insn, 0);
341 	      if (JUMP_P (insn))
342 		{
343 		  if (JUMP_LABEL (insn) == NULL && prev_nonjump_insn != NULL)
344 		    maybe_propagate_label_ref (insn, prev_nonjump_insn);
345 		}
346 	      else
347 		prev_nonjump_insn = insn;
348 	    }
349 	}
350     }
351 }
352 
353 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
354    of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
355    UNKNOWN may be returned in case we are having CC_MODE compare and we don't
356    know whether it's source is floating point or integer comparison.  Machine
357    description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
358    to help this function avoid overhead in these cases.  */
359 enum rtx_code
reversed_comparison_code_parts(enum rtx_code code,const_rtx arg0,const_rtx arg1,const_rtx insn)360 reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
361 				const_rtx arg1, const_rtx insn)
362 {
363   machine_mode mode;
364 
365   /* If this is not actually a comparison, we can't reverse it.  */
366   if (GET_RTX_CLASS (code) != RTX_COMPARE
367       && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
368     return UNKNOWN;
369 
370   mode = GET_MODE (arg0);
371   if (mode == VOIDmode)
372     mode = GET_MODE (arg1);
373 
374   /* First see if machine description supplies us way to reverse the
375      comparison.  Give it priority over everything else to allow
376      machine description to do tricks.  */
377   if (GET_MODE_CLASS (mode) == MODE_CC
378       && REVERSIBLE_CC_MODE (mode))
379     return REVERSE_CONDITION (code, mode);
380 
381   /* Try a few special cases based on the comparison code.  */
382   switch (code)
383     {
384     case GEU:
385     case GTU:
386     case LEU:
387     case LTU:
388     case NE:
389     case EQ:
390       /* It is always safe to reverse EQ and NE, even for the floating
391 	 point.  Similarly the unsigned comparisons are never used for
392 	 floating point so we can reverse them in the default way.  */
393       return reverse_condition (code);
394     case ORDERED:
395     case UNORDERED:
396     case LTGT:
397     case UNEQ:
398       /* In case we already see unordered comparison, we can be sure to
399 	 be dealing with floating point so we don't need any more tests.  */
400       return reverse_condition_maybe_unordered (code);
401     case UNLT:
402     case UNLE:
403     case UNGT:
404     case UNGE:
405       /* We don't have safe way to reverse these yet.  */
406       return UNKNOWN;
407     default:
408       break;
409     }
410 
411   if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
412     {
413       /* Try to search for the comparison to determine the real mode.
414          This code is expensive, but with sane machine description it
415          will be never used, since REVERSIBLE_CC_MODE will return true
416          in all cases.  */
417       if (! insn)
418 	return UNKNOWN;
419 
420       /* These CONST_CAST's are okay because prev_nonnote_insn just
421 	 returns its argument and we assign it to a const_rtx
422 	 variable.  */
423       for (rtx_insn *prev = prev_nonnote_insn (CONST_CAST_RTX (insn));
424 	   prev != 0 && !LABEL_P (prev);
425 	   prev = prev_nonnote_insn (prev))
426 	{
427 	  const_rtx set = set_of (arg0, prev);
428 	  if (set && GET_CODE (set) == SET
429 	      && rtx_equal_p (SET_DEST (set), arg0))
430 	    {
431 	      rtx src = SET_SRC (set);
432 
433 	      if (GET_CODE (src) == COMPARE)
434 		{
435 		  rtx comparison = src;
436 		  arg0 = XEXP (src, 0);
437 		  mode = GET_MODE (arg0);
438 		  if (mode == VOIDmode)
439 		    mode = GET_MODE (XEXP (comparison, 1));
440 		  break;
441 		}
442 	      /* We can get past reg-reg moves.  This may be useful for model
443 	         of i387 comparisons that first move flag registers around.  */
444 	      if (REG_P (src))
445 		{
446 		  arg0 = src;
447 		  continue;
448 		}
449 	    }
450 	  /* If register is clobbered in some ununderstandable way,
451 	     give up.  */
452 	  if (set)
453 	    return UNKNOWN;
454 	}
455     }
456 
457   /* Test for an integer condition, or a floating-point comparison
458      in which NaNs can be ignored.  */
459   if (CONST_INT_P (arg0)
460       || (GET_MODE (arg0) != VOIDmode
461 	  && GET_MODE_CLASS (mode) != MODE_CC
462 	  && !HONOR_NANS (mode)))
463     return reverse_condition (code);
464 
465   return UNKNOWN;
466 }
467 
468 /* A wrapper around the previous function to take COMPARISON as rtx
469    expression.  This simplifies many callers.  */
470 enum rtx_code
reversed_comparison_code(const_rtx comparison,const_rtx insn)471 reversed_comparison_code (const_rtx comparison, const_rtx insn)
472 {
473   if (!COMPARISON_P (comparison))
474     return UNKNOWN;
475   return reversed_comparison_code_parts (GET_CODE (comparison),
476 					 XEXP (comparison, 0),
477 					 XEXP (comparison, 1), insn);
478 }
479 
480 /* Return comparison with reversed code of EXP.
481    Return NULL_RTX in case we fail to do the reversal.  */
482 rtx
reversed_comparison(const_rtx exp,machine_mode mode)483 reversed_comparison (const_rtx exp, machine_mode mode)
484 {
485   enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
486   if (reversed_code == UNKNOWN)
487     return NULL_RTX;
488   else
489     return simplify_gen_relational (reversed_code, mode, VOIDmode,
490                                     XEXP (exp, 0), XEXP (exp, 1));
491 }
492 
493 
494 /* Given an rtx-code for a comparison, return the code for the negated
495    comparison.  If no such code exists, return UNKNOWN.
496 
497    WATCH OUT!  reverse_condition is not safe to use on a jump that might
498    be acting on the results of an IEEE floating point comparison, because
499    of the special treatment of non-signaling nans in comparisons.
500    Use reversed_comparison_code instead.  */
501 
502 enum rtx_code
reverse_condition(enum rtx_code code)503 reverse_condition (enum rtx_code code)
504 {
505   switch (code)
506     {
507     case EQ:
508       return NE;
509     case NE:
510       return EQ;
511     case GT:
512       return LE;
513     case GE:
514       return LT;
515     case LT:
516       return GE;
517     case LE:
518       return GT;
519     case GTU:
520       return LEU;
521     case GEU:
522       return LTU;
523     case LTU:
524       return GEU;
525     case LEU:
526       return GTU;
527     case UNORDERED:
528       return ORDERED;
529     case ORDERED:
530       return UNORDERED;
531 
532     case UNLT:
533     case UNLE:
534     case UNGT:
535     case UNGE:
536     case UNEQ:
537     case LTGT:
538       return UNKNOWN;
539 
540     default:
541       gcc_unreachable ();
542     }
543 }
544 
545 /* Similar, but we're allowed to generate unordered comparisons, which
546    makes it safe for IEEE floating-point.  Of course, we have to recognize
547    that the target will support them too...  */
548 
549 enum rtx_code
reverse_condition_maybe_unordered(enum rtx_code code)550 reverse_condition_maybe_unordered (enum rtx_code code)
551 {
552   switch (code)
553     {
554     case EQ:
555       return NE;
556     case NE:
557       return EQ;
558     case GT:
559       return UNLE;
560     case GE:
561       return UNLT;
562     case LT:
563       return UNGE;
564     case LE:
565       return UNGT;
566     case LTGT:
567       return UNEQ;
568     case UNORDERED:
569       return ORDERED;
570     case ORDERED:
571       return UNORDERED;
572     case UNLT:
573       return GE;
574     case UNLE:
575       return GT;
576     case UNGT:
577       return LE;
578     case UNGE:
579       return LT;
580     case UNEQ:
581       return LTGT;
582 
583     default:
584       gcc_unreachable ();
585     }
586 }
587 
588 /* Similar, but return the code when two operands of a comparison are swapped.
589    This IS safe for IEEE floating-point.  */
590 
591 enum rtx_code
swap_condition(enum rtx_code code)592 swap_condition (enum rtx_code code)
593 {
594   switch (code)
595     {
596     case EQ:
597     case NE:
598     case UNORDERED:
599     case ORDERED:
600     case UNEQ:
601     case LTGT:
602       return code;
603 
604     case GT:
605       return LT;
606     case GE:
607       return LE;
608     case LT:
609       return GT;
610     case LE:
611       return GE;
612     case GTU:
613       return LTU;
614     case GEU:
615       return LEU;
616     case LTU:
617       return GTU;
618     case LEU:
619       return GEU;
620     case UNLT:
621       return UNGT;
622     case UNLE:
623       return UNGE;
624     case UNGT:
625       return UNLT;
626     case UNGE:
627       return UNLE;
628 
629     default:
630       gcc_unreachable ();
631     }
632 }
633 
634 /* Given a comparison CODE, return the corresponding unsigned comparison.
635    If CODE is an equality comparison or already an unsigned comparison,
636    CODE is returned.  */
637 
638 enum rtx_code
unsigned_condition(enum rtx_code code)639 unsigned_condition (enum rtx_code code)
640 {
641   switch (code)
642     {
643     case EQ:
644     case NE:
645     case GTU:
646     case GEU:
647     case LTU:
648     case LEU:
649       return code;
650 
651     case GT:
652       return GTU;
653     case GE:
654       return GEU;
655     case LT:
656       return LTU;
657     case LE:
658       return LEU;
659 
660     default:
661       gcc_unreachable ();
662     }
663 }
664 
665 /* Similarly, return the signed version of a comparison.  */
666 
667 enum rtx_code
signed_condition(enum rtx_code code)668 signed_condition (enum rtx_code code)
669 {
670   switch (code)
671     {
672     case EQ:
673     case NE:
674     case GT:
675     case GE:
676     case LT:
677     case LE:
678       return code;
679 
680     case GTU:
681       return GT;
682     case GEU:
683       return GE;
684     case LTU:
685       return LT;
686     case LEU:
687       return LE;
688 
689     default:
690       gcc_unreachable ();
691     }
692 }
693 
694 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
695    truth of CODE1 implies the truth of CODE2.  */
696 
697 int
comparison_dominates_p(enum rtx_code code1,enum rtx_code code2)698 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
699 {
700   /* UNKNOWN comparison codes can happen as a result of trying to revert
701      comparison codes.
702      They can't match anything, so we have to reject them here.  */
703   if (code1 == UNKNOWN || code2 == UNKNOWN)
704     return 0;
705 
706   if (code1 == code2)
707     return 1;
708 
709   switch (code1)
710     {
711     case UNEQ:
712       if (code2 == UNLE || code2 == UNGE)
713 	return 1;
714       break;
715 
716     case EQ:
717       if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
718 	  || code2 == ORDERED)
719 	return 1;
720       break;
721 
722     case UNLT:
723       if (code2 == UNLE || code2 == NE)
724 	return 1;
725       break;
726 
727     case LT:
728       if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
729 	return 1;
730       break;
731 
732     case UNGT:
733       if (code2 == UNGE || code2 == NE)
734 	return 1;
735       break;
736 
737     case GT:
738       if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
739 	return 1;
740       break;
741 
742     case GE:
743     case LE:
744       if (code2 == ORDERED)
745 	return 1;
746       break;
747 
748     case LTGT:
749       if (code2 == NE || code2 == ORDERED)
750 	return 1;
751       break;
752 
753     case LTU:
754       if (code2 == LEU || code2 == NE)
755 	return 1;
756       break;
757 
758     case GTU:
759       if (code2 == GEU || code2 == NE)
760 	return 1;
761       break;
762 
763     case UNORDERED:
764       if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
765 	  || code2 == UNGE || code2 == UNGT)
766 	return 1;
767       break;
768 
769     default:
770       break;
771     }
772 
773   return 0;
774 }
775 
776 /* Return 1 if INSN is an unconditional jump and nothing else.  */
777 
778 int
simplejump_p(const rtx_insn * insn)779 simplejump_p (const rtx_insn *insn)
780 {
781   return (JUMP_P (insn)
782 	  && GET_CODE (PATTERN (insn)) == SET
783 	  && GET_CODE (SET_DEST (PATTERN (insn))) == PC
784 	  && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
785 }
786 
787 /* Return nonzero if INSN is a (possibly) conditional jump
788    and nothing more.
789 
790    Use of this function is deprecated, since we need to support combined
791    branch and compare insns.  Use any_condjump_p instead whenever possible.  */
792 
793 int
condjump_p(const rtx_insn * insn)794 condjump_p (const rtx_insn *insn)
795 {
796   const_rtx x = PATTERN (insn);
797 
798   if (GET_CODE (x) != SET
799       || GET_CODE (SET_DEST (x)) != PC)
800     return 0;
801 
802   x = SET_SRC (x);
803   if (GET_CODE (x) == LABEL_REF)
804     return 1;
805   else
806     return (GET_CODE (x) == IF_THEN_ELSE
807 	    && ((GET_CODE (XEXP (x, 2)) == PC
808 		 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
809 		     || ANY_RETURN_P (XEXP (x, 1))))
810 		|| (GET_CODE (XEXP (x, 1)) == PC
811 		    && (GET_CODE (XEXP (x, 2)) == LABEL_REF
812 			|| ANY_RETURN_P (XEXP (x, 2))))));
813 }
814 
815 /* Return nonzero if INSN is a (possibly) conditional jump inside a
816    PARALLEL.
817 
818    Use this function is deprecated, since we need to support combined
819    branch and compare insns.  Use any_condjump_p instead whenever possible.  */
820 
821 int
condjump_in_parallel_p(const rtx_insn * insn)822 condjump_in_parallel_p (const rtx_insn *insn)
823 {
824   const_rtx x = PATTERN (insn);
825 
826   if (GET_CODE (x) != PARALLEL)
827     return 0;
828   else
829     x = XVECEXP (x, 0, 0);
830 
831   if (GET_CODE (x) != SET)
832     return 0;
833   if (GET_CODE (SET_DEST (x)) != PC)
834     return 0;
835   if (GET_CODE (SET_SRC (x)) == LABEL_REF)
836     return 1;
837   if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
838     return 0;
839   if (XEXP (SET_SRC (x), 2) == pc_rtx
840       && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
841 	  || ANY_RETURN_P (XEXP (SET_SRC (x), 1))))
842     return 1;
843   if (XEXP (SET_SRC (x), 1) == pc_rtx
844       && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
845 	  || ANY_RETURN_P (XEXP (SET_SRC (x), 2))))
846     return 1;
847   return 0;
848 }
849 
850 /* Return set of PC, otherwise NULL.  */
851 
852 rtx
pc_set(const rtx_insn * insn)853 pc_set (const rtx_insn *insn)
854 {
855   rtx pat;
856   if (!JUMP_P (insn))
857     return NULL_RTX;
858   pat = PATTERN (insn);
859 
860   /* The set is allowed to appear either as the insn pattern or
861      the first set in a PARALLEL.  */
862   if (GET_CODE (pat) == PARALLEL)
863     pat = XVECEXP (pat, 0, 0);
864   if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
865     return pat;
866 
867   return NULL_RTX;
868 }
869 
870 /* Return true when insn is an unconditional direct jump,
871    possibly bundled inside a PARALLEL.  */
872 
873 int
any_uncondjump_p(const rtx_insn * insn)874 any_uncondjump_p (const rtx_insn *insn)
875 {
876   const_rtx x = pc_set (insn);
877   if (!x)
878     return 0;
879   if (GET_CODE (SET_SRC (x)) != LABEL_REF)
880     return 0;
881   if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
882     return 0;
883   return 1;
884 }
885 
886 /* Return true when insn is a conditional jump.  This function works for
887    instructions containing PC sets in PARALLELs.  The instruction may have
888    various other effects so before removing the jump you must verify
889    onlyjump_p.
890 
891    Note that unlike condjump_p it returns false for unconditional jumps.  */
892 
893 int
any_condjump_p(const rtx_insn * insn)894 any_condjump_p (const rtx_insn *insn)
895 {
896   const_rtx x = pc_set (insn);
897   enum rtx_code a, b;
898 
899   if (!x)
900     return 0;
901   if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
902     return 0;
903 
904   a = GET_CODE (XEXP (SET_SRC (x), 1));
905   b = GET_CODE (XEXP (SET_SRC (x), 2));
906 
907   return ((b == PC && (a == LABEL_REF || a == RETURN || a == SIMPLE_RETURN))
908 	  || (a == PC
909 	      && (b == LABEL_REF || b == RETURN || b == SIMPLE_RETURN)));
910 }
911 
912 /* Return the label of a conditional jump.  */
913 
914 rtx
condjump_label(const rtx_insn * insn)915 condjump_label (const rtx_insn *insn)
916 {
917   rtx x = pc_set (insn);
918 
919   if (!x)
920     return NULL_RTX;
921   x = SET_SRC (x);
922   if (GET_CODE (x) == LABEL_REF)
923     return x;
924   if (GET_CODE (x) != IF_THEN_ELSE)
925     return NULL_RTX;
926   if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
927     return XEXP (x, 1);
928   if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
929     return XEXP (x, 2);
930   return NULL_RTX;
931 }
932 
933 /* Return TRUE if INSN is a return jump.  */
934 
935 int
returnjump_p(const rtx_insn * insn)936 returnjump_p (const rtx_insn *insn)
937 {
938   if (JUMP_P (insn))
939     {
940       subrtx_iterator::array_type array;
941       FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
942 	{
943 	  const_rtx x = *iter;
944 	  switch (GET_CODE (x))
945 	    {
946 	    case RETURN:
947 	    case SIMPLE_RETURN:
948 	    case EH_RETURN:
949 	      return true;
950 
951 	    case SET:
952 	      if (SET_IS_RETURN_P (x))
953 		return true;
954 	      break;
955 
956 	    default:
957 	      break;
958 	    }
959 	}
960     }
961   return false;
962 }
963 
964 /* Return true if INSN is a (possibly conditional) return insn.  */
965 
966 int
eh_returnjump_p(rtx_insn * insn)967 eh_returnjump_p (rtx_insn *insn)
968 {
969   if (JUMP_P (insn))
970     {
971       subrtx_iterator::array_type array;
972       FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
973 	if (GET_CODE (*iter) == EH_RETURN)
974 	  return true;
975     }
976   return false;
977 }
978 
979 /* Return true if INSN is a jump that only transfers control and
980    nothing more.  */
981 
982 int
onlyjump_p(const rtx_insn * insn)983 onlyjump_p (const rtx_insn *insn)
984 {
985   rtx set;
986 
987   if (!JUMP_P (insn))
988     return 0;
989 
990   set = single_set (insn);
991   if (set == NULL)
992     return 0;
993   if (GET_CODE (SET_DEST (set)) != PC)
994     return 0;
995   if (side_effects_p (SET_SRC (set)))
996     return 0;
997 
998   return 1;
999 }
1000 
1001 /* Return true iff INSN is a jump and its JUMP_LABEL is a label, not
1002    NULL or a return.  */
1003 bool
jump_to_label_p(const rtx_insn * insn)1004 jump_to_label_p (const rtx_insn *insn)
1005 {
1006   return (JUMP_P (insn)
1007 	  && JUMP_LABEL (insn) != NULL && !ANY_RETURN_P (JUMP_LABEL (insn)));
1008 }
1009 
1010 /* Return nonzero if X is an RTX that only sets the condition codes
1011    and has no side effects.  */
1012 
1013 int
only_sets_cc0_p(const_rtx x)1014 only_sets_cc0_p (const_rtx x)
1015 {
1016   if (! x)
1017     return 0;
1018 
1019   if (INSN_P (x))
1020     x = PATTERN (x);
1021 
1022   return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1023 }
1024 
1025 /* Return 1 if X is an RTX that does nothing but set the condition codes
1026    and CLOBBER or USE registers.
1027    Return -1 if X does explicitly set the condition codes,
1028    but also does other things.  */
1029 
1030 int
sets_cc0_p(const_rtx x)1031 sets_cc0_p (const_rtx x)
1032 {
1033   if (! x)
1034     return 0;
1035 
1036   if (INSN_P (x))
1037     x = PATTERN (x);
1038 
1039   if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1040     return 1;
1041   if (GET_CODE (x) == PARALLEL)
1042     {
1043       int i;
1044       int sets_cc0 = 0;
1045       int other_things = 0;
1046       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1047 	{
1048 	  if (GET_CODE (XVECEXP (x, 0, i)) == SET
1049 	      && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1050 	    sets_cc0 = 1;
1051 	  else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1052 	    other_things = 1;
1053 	}
1054       return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1055     }
1056   return 0;
1057 }
1058 
1059 /* Find all CODE_LABELs referred to in X, and increment their use
1060    counts.  If INSN is a JUMP_INSN and there is at least one
1061    CODE_LABEL referenced in INSN as a jump target, then store the last
1062    one in JUMP_LABEL (INSN).  For a tablejump, this must be the label
1063    for the ADDR_VEC.  Store any other jump targets as REG_LABEL_TARGET
1064    notes.  If INSN is an INSN or a CALL_INSN or non-target operands of
1065    a JUMP_INSN, and there is at least one CODE_LABEL referenced in
1066    INSN, add a REG_LABEL_OPERAND note containing that label to INSN.
1067    For returnjumps, the JUMP_LABEL will also be set as appropriate.
1068 
1069    Note that two labels separated by a loop-beginning note
1070    must be kept distinct if we have not yet done loop-optimization,
1071    because the gap between them is where loop-optimize
1072    will want to move invariant code to.  CROSS_JUMP tells us
1073    that loop-optimization is done with.  */
1074 
1075 void
mark_jump_label(rtx x,rtx_insn * insn,int in_mem)1076 mark_jump_label (rtx x, rtx_insn *insn, int in_mem)
1077 {
1078   rtx asmop = extract_asm_operands (x);
1079   if (asmop)
1080     mark_jump_label_asm (asmop, insn);
1081   else
1082     mark_jump_label_1 (x, insn, in_mem != 0,
1083 		       (insn != NULL && x == PATTERN (insn) && JUMP_P (insn)));
1084 }
1085 
1086 /* Worker function for mark_jump_label.  IN_MEM is TRUE when X occurs
1087    within a (MEM ...).  IS_TARGET is TRUE when X is to be treated as a
1088    jump-target; when the JUMP_LABEL field of INSN should be set or a
1089    REG_LABEL_TARGET note should be added, not a REG_LABEL_OPERAND
1090    note.  */
1091 
1092 static void
mark_jump_label_1(rtx x,rtx_insn * insn,bool in_mem,bool is_target)1093 mark_jump_label_1 (rtx x, rtx_insn *insn, bool in_mem, bool is_target)
1094 {
1095   RTX_CODE code = GET_CODE (x);
1096   int i;
1097   const char *fmt;
1098 
1099   switch (code)
1100     {
1101     case PC:
1102     case CC0:
1103     case REG:
1104     case CLOBBER:
1105     case CALL:
1106       return;
1107 
1108     case RETURN:
1109     case SIMPLE_RETURN:
1110       if (is_target)
1111 	{
1112 	  gcc_assert (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == x);
1113 	  JUMP_LABEL (insn) = x;
1114 	}
1115       return;
1116 
1117     case MEM:
1118       in_mem = true;
1119       break;
1120 
1121     case SEQUENCE:
1122       {
1123 	rtx_sequence *seq = as_a <rtx_sequence *> (x);
1124 	for (i = 0; i < seq->len (); i++)
1125 	  mark_jump_label (PATTERN (seq->insn (i)),
1126 			   seq->insn (i), 0);
1127       }
1128       return;
1129 
1130     case SYMBOL_REF:
1131       if (!in_mem)
1132 	return;
1133 
1134       /* If this is a constant-pool reference, see if it is a label.  */
1135       if (CONSTANT_POOL_ADDRESS_P (x))
1136 	mark_jump_label_1 (get_pool_constant (x), insn, in_mem, is_target);
1137       break;
1138 
1139       /* Handle operands in the condition of an if-then-else as for a
1140 	 non-jump insn.  */
1141     case IF_THEN_ELSE:
1142       if (!is_target)
1143 	break;
1144       mark_jump_label_1 (XEXP (x, 0), insn, in_mem, false);
1145       mark_jump_label_1 (XEXP (x, 1), insn, in_mem, true);
1146       mark_jump_label_1 (XEXP (x, 2), insn, in_mem, true);
1147       return;
1148 
1149     case LABEL_REF:
1150       {
1151 	rtx label = LABEL_REF_LABEL (x);
1152 
1153 	/* Ignore remaining references to unreachable labels that
1154 	   have been deleted.  */
1155 	if (NOTE_P (label)
1156 	    && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
1157 	  break;
1158 
1159 	gcc_assert (LABEL_P (label));
1160 
1161 	/* Ignore references to labels of containing functions.  */
1162 	if (LABEL_REF_NONLOCAL_P (x))
1163 	  break;
1164 
1165 	LABEL_REF_LABEL (x) = label;
1166 	if (! insn || ! insn->deleted ())
1167 	  ++LABEL_NUSES (label);
1168 
1169 	if (insn)
1170 	  {
1171 	    if (is_target
1172 		/* Do not change a previous setting of JUMP_LABEL.  If the
1173 		   JUMP_LABEL slot is occupied by a different label,
1174 		   create a note for this label.  */
1175 		&& (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == label))
1176 	      JUMP_LABEL (insn) = label;
1177 	    else
1178 	      {
1179 		enum reg_note kind
1180 		  = is_target ? REG_LABEL_TARGET : REG_LABEL_OPERAND;
1181 
1182 		/* Add a REG_LABEL_OPERAND or REG_LABEL_TARGET note
1183 		   for LABEL unless there already is one.  All uses of
1184 		   a label, except for the primary target of a jump,
1185 		   must have such a note.  */
1186 		if (! find_reg_note (insn, kind, label))
1187 		  add_reg_note (insn, kind, label);
1188 	      }
1189 	  }
1190 	return;
1191       }
1192 
1193     /* Do walk the labels in a vector, but not the first operand of an
1194        ADDR_DIFF_VEC.  Don't set the JUMP_LABEL of a vector.  */
1195     case ADDR_VEC:
1196     case ADDR_DIFF_VEC:
1197       if (! insn->deleted ())
1198 	{
1199 	  int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1200 
1201 	  for (i = 0; i < XVECLEN (x, eltnum); i++)
1202 	    mark_jump_label_1 (XVECEXP (x, eltnum, i), NULL, in_mem,
1203 			       is_target);
1204 	}
1205       return;
1206 
1207     default:
1208       break;
1209     }
1210 
1211   fmt = GET_RTX_FORMAT (code);
1212 
1213   /* The primary target of a tablejump is the label of the ADDR_VEC,
1214      which is canonically mentioned *last* in the insn.  To get it
1215      marked as JUMP_LABEL, we iterate over items in reverse order.  */
1216   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1217     {
1218       if (fmt[i] == 'e')
1219 	mark_jump_label_1 (XEXP (x, i), insn, in_mem, is_target);
1220       else if (fmt[i] == 'E')
1221 	{
1222 	  int j;
1223 
1224 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1225 	    mark_jump_label_1 (XVECEXP (x, i, j), insn, in_mem,
1226 			       is_target);
1227 	}
1228     }
1229 }
1230 
1231 /* Worker function for mark_jump_label.  Handle asm insns specially.
1232    In particular, output operands need not be considered so we can
1233    avoid re-scanning the replicated asm_operand.  Also, the asm_labels
1234    need to be considered targets.  */
1235 
1236 static void
mark_jump_label_asm(rtx asmop,rtx_insn * insn)1237 mark_jump_label_asm (rtx asmop, rtx_insn *insn)
1238 {
1239   int i;
1240 
1241   for (i = ASM_OPERANDS_INPUT_LENGTH (asmop) - 1; i >= 0; --i)
1242     mark_jump_label_1 (ASM_OPERANDS_INPUT (asmop, i), insn, false, false);
1243 
1244   for (i = ASM_OPERANDS_LABEL_LENGTH (asmop) - 1; i >= 0; --i)
1245     mark_jump_label_1 (ASM_OPERANDS_LABEL (asmop, i), insn, false, true);
1246 }
1247 
1248 /* Delete insn INSN from the chain of insns and update label ref counts
1249    and delete insns now unreachable.
1250 
1251    Returns the first insn after INSN that was not deleted.
1252 
1253    Usage of this instruction is deprecated.  Use delete_insn instead and
1254    subsequent cfg_cleanup pass to delete unreachable code if needed.  */
1255 
1256 rtx_insn *
delete_related_insns(rtx uncast_insn)1257 delete_related_insns (rtx uncast_insn)
1258 {
1259   rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
1260   int was_code_label = (LABEL_P (insn));
1261   rtx note;
1262   rtx_insn *next = NEXT_INSN (insn), *prev = PREV_INSN (insn);
1263 
1264   while (next && next->deleted ())
1265     next = NEXT_INSN (next);
1266 
1267   /* This insn is already deleted => return first following nondeleted.  */
1268   if (insn->deleted ())
1269     return next;
1270 
1271   delete_insn (insn);
1272 
1273   /* If instruction is followed by a barrier,
1274      delete the barrier too.  */
1275 
1276   if (next != 0 && BARRIER_P (next))
1277     delete_insn (next);
1278 
1279   /* If this is a call, then we have to remove the var tracking note
1280      for the call arguments.  */
1281 
1282   if (CALL_P (insn)
1283       || (NONJUMP_INSN_P (insn)
1284 	  && GET_CODE (PATTERN (insn)) == SEQUENCE
1285 	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))))
1286     {
1287       rtx_insn *p;
1288 
1289       for (p = next && next->deleted () ? NEXT_INSN (next) : next;
1290 	   p && NOTE_P (p);
1291 	   p = NEXT_INSN (p))
1292 	if (NOTE_KIND (p) == NOTE_INSN_CALL_ARG_LOCATION)
1293 	  {
1294 	    remove_insn (p);
1295 	    break;
1296 	  }
1297     }
1298 
1299   /* If deleting a jump, decrement the count of the label,
1300      and delete the label if it is now unused.  */
1301 
1302   if (jump_to_label_p (insn))
1303     {
1304       rtx lab = JUMP_LABEL (insn);
1305       rtx_jump_table_data *lab_next;
1306 
1307       if (LABEL_NUSES (lab) == 0)
1308 	/* This can delete NEXT or PREV,
1309 	   either directly if NEXT is JUMP_LABEL (INSN),
1310 	   or indirectly through more levels of jumps.  */
1311 	delete_related_insns (lab);
1312       else if (tablejump_p (insn, NULL, &lab_next))
1313 	{
1314 	  /* If we're deleting the tablejump, delete the dispatch table.
1315 	     We may not be able to kill the label immediately preceding
1316 	     just yet, as it might be referenced in code leading up to
1317 	     the tablejump.  */
1318 	  delete_related_insns (lab_next);
1319 	}
1320     }
1321 
1322   /* Likewise if we're deleting a dispatch table.  */
1323 
1324   if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
1325     {
1326       rtvec labels = table->get_labels ();
1327       int i;
1328       int len = GET_NUM_ELEM (labels);
1329 
1330       for (i = 0; i < len; i++)
1331 	if (LABEL_NUSES (XEXP (RTVEC_ELT (labels, i), 0)) == 0)
1332 	  delete_related_insns (XEXP (RTVEC_ELT (labels, i), 0));
1333       while (next && next->deleted ())
1334 	next = NEXT_INSN (next);
1335       return next;
1336     }
1337 
1338   /* Likewise for any JUMP_P / INSN / CALL_INSN with a
1339      REG_LABEL_OPERAND or REG_LABEL_TARGET note.  */
1340   if (INSN_P (insn))
1341     for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1342       if ((REG_NOTE_KIND (note) == REG_LABEL_OPERAND
1343 	   || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
1344 	  /* This could also be a NOTE_INSN_DELETED_LABEL note.  */
1345 	  && LABEL_P (XEXP (note, 0)))
1346 	if (LABEL_NUSES (XEXP (note, 0)) == 0)
1347 	  delete_related_insns (XEXP (note, 0));
1348 
1349   while (prev && (prev->deleted () || NOTE_P (prev)))
1350     prev = PREV_INSN (prev);
1351 
1352   /* If INSN was a label and a dispatch table follows it,
1353      delete the dispatch table.  The tablejump must have gone already.
1354      It isn't useful to fall through into a table.  */
1355 
1356   if (was_code_label
1357       && NEXT_INSN (insn) != 0
1358       && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
1359     next = delete_related_insns (NEXT_INSN (insn));
1360 
1361   /* If INSN was a label, delete insns following it if now unreachable.  */
1362 
1363   if (was_code_label && prev && BARRIER_P (prev))
1364     {
1365       enum rtx_code code;
1366       while (next)
1367 	{
1368 	  code = GET_CODE (next);
1369 	  if (code == NOTE)
1370 	    next = NEXT_INSN (next);
1371 	  /* Keep going past other deleted labels to delete what follows.  */
1372 	  else if (code == CODE_LABEL && next->deleted ())
1373 	    next = NEXT_INSN (next);
1374 	  /* Keep the (use (insn))s created by dbr_schedule, which needs
1375 	     them in order to track liveness relative to a previous
1376 	     barrier.  */
1377 	  else if (INSN_P (next)
1378 		   && GET_CODE (PATTERN (next)) == USE
1379 		   && INSN_P (XEXP (PATTERN (next), 0)))
1380 	    next = NEXT_INSN (next);
1381 	  else if (code == BARRIER || INSN_P (next))
1382 	    /* Note: if this deletes a jump, it can cause more
1383 	       deletion of unreachable code, after a different label.
1384 	       As long as the value from this recursive call is correct,
1385 	       this invocation functions correctly.  */
1386 	    next = delete_related_insns (next);
1387 	  else
1388 	    break;
1389 	}
1390     }
1391 
1392   /* I feel a little doubtful about this loop,
1393      but I see no clean and sure alternative way
1394      to find the first insn after INSN that is not now deleted.
1395      I hope this works.  */
1396   while (next && next->deleted ())
1397     next = NEXT_INSN (next);
1398   return next;
1399 }
1400 
1401 /* Delete a range of insns from FROM to TO, inclusive.
1402    This is for the sake of peephole optimization, so assume
1403    that whatever these insns do will still be done by a new
1404    peephole insn that will replace them.  */
1405 
1406 void
delete_for_peephole(rtx_insn * from,rtx_insn * to)1407 delete_for_peephole (rtx_insn *from, rtx_insn *to)
1408 {
1409   rtx_insn *insn = from;
1410 
1411   while (1)
1412     {
1413       rtx_insn *next = NEXT_INSN (insn);
1414       rtx_insn *prev = PREV_INSN (insn);
1415 
1416       if (!NOTE_P (insn))
1417 	{
1418 	  insn->set_deleted();
1419 
1420 	  /* Patch this insn out of the chain.  */
1421 	  /* We don't do this all at once, because we
1422 	     must preserve all NOTEs.  */
1423 	  if (prev)
1424 	    SET_NEXT_INSN (prev) = next;
1425 
1426 	  if (next)
1427 	    SET_PREV_INSN (next) = prev;
1428 	}
1429 
1430       if (insn == to)
1431 	break;
1432       insn = next;
1433     }
1434 
1435   /* Note that if TO is an unconditional jump
1436      we *do not* delete the BARRIER that follows,
1437      since the peephole that replaces this sequence
1438      is also an unconditional jump in that case.  */
1439 }
1440 
1441 /* A helper function for redirect_exp_1; examines its input X and returns
1442    either a LABEL_REF around a label, or a RETURN if X was NULL.  */
1443 static rtx
redirect_target(rtx x)1444 redirect_target (rtx x)
1445 {
1446   if (x == NULL_RTX)
1447     return ret_rtx;
1448   if (!ANY_RETURN_P (x))
1449     return gen_rtx_LABEL_REF (Pmode, x);
1450   return x;
1451 }
1452 
1453 /* Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
1454    NLABEL as a return.  Accrue modifications into the change group.  */
1455 
1456 static void
redirect_exp_1(rtx * loc,rtx olabel,rtx nlabel,rtx insn)1457 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1458 {
1459   rtx x = *loc;
1460   RTX_CODE code = GET_CODE (x);
1461   int i;
1462   const char *fmt;
1463 
1464   if ((code == LABEL_REF && LABEL_REF_LABEL (x) == olabel)
1465       || x == olabel)
1466     {
1467       x = redirect_target (nlabel);
1468       if (GET_CODE (x) == LABEL_REF && loc == &PATTERN (insn))
1469  	x = gen_rtx_SET (pc_rtx, x);
1470       validate_change (insn, loc, x, 1);
1471       return;
1472     }
1473 
1474   if (code == SET && SET_DEST (x) == pc_rtx
1475       && ANY_RETURN_P (nlabel)
1476       && GET_CODE (SET_SRC (x)) == LABEL_REF
1477       && LABEL_REF_LABEL (SET_SRC (x)) == olabel)
1478     {
1479       validate_change (insn, loc, nlabel, 1);
1480       return;
1481     }
1482 
1483   if (code == IF_THEN_ELSE)
1484     {
1485       /* Skip the condition of an IF_THEN_ELSE.  We only want to
1486          change jump destinations, not eventual label comparisons.  */
1487       redirect_exp_1 (&XEXP (x, 1), olabel, nlabel, insn);
1488       redirect_exp_1 (&XEXP (x, 2), olabel, nlabel, insn);
1489       return;
1490     }
1491 
1492   fmt = GET_RTX_FORMAT (code);
1493   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1494     {
1495       if (fmt[i] == 'e')
1496 	redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1497       else if (fmt[i] == 'E')
1498 	{
1499 	  int j;
1500 	  for (j = 0; j < XVECLEN (x, i); j++)
1501 	    redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1502 	}
1503     }
1504 }
1505 
1506 /* Make JUMP go to NLABEL instead of where it jumps now.  Accrue
1507    the modifications into the change group.  Return false if we did
1508    not see how to do that.  */
1509 
1510 int
redirect_jump_1(rtx_insn * jump,rtx nlabel)1511 redirect_jump_1 (rtx_insn *jump, rtx nlabel)
1512 {
1513   int ochanges = num_validated_changes ();
1514   rtx *loc, asmop;
1515 
1516   gcc_assert (nlabel != NULL_RTX);
1517   asmop = extract_asm_operands (PATTERN (jump));
1518   if (asmop)
1519     {
1520       if (nlabel == NULL)
1521 	return 0;
1522       gcc_assert (ASM_OPERANDS_LABEL_LENGTH (asmop) == 1);
1523       loc = &ASM_OPERANDS_LABEL (asmop, 0);
1524     }
1525   else if (GET_CODE (PATTERN (jump)) == PARALLEL)
1526     loc = &XVECEXP (PATTERN (jump), 0, 0);
1527   else
1528     loc = &PATTERN (jump);
1529 
1530   redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1531   return num_validated_changes () > ochanges;
1532 }
1533 
1534 /* Make JUMP go to NLABEL instead of where it jumps now.  If the old
1535    jump target label is unused as a result, it and the code following
1536    it may be deleted.
1537 
1538    Normally, NLABEL will be a label, but it may also be a RETURN rtx;
1539    in that case we are to turn the jump into a (possibly conditional)
1540    return insn.
1541 
1542    The return value will be 1 if the change was made, 0 if it wasn't
1543    (this can only occur when trying to produce return insns).  */
1544 
1545 int
redirect_jump(rtx_jump_insn * jump,rtx nlabel,int delete_unused)1546 redirect_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
1547 {
1548   rtx olabel = jump->jump_label ();
1549 
1550   if (!nlabel)
1551     {
1552       /* If there is no label, we are asked to redirect to the EXIT block.
1553 	 When before the epilogue is emitted, return/simple_return cannot be
1554 	 created so we return 0 immediately.  After the epilogue is emitted,
1555 	 we always expect a label, either a non-null label, or a
1556 	 return/simple_return RTX.  */
1557 
1558       if (!epilogue_completed)
1559 	return 0;
1560       gcc_unreachable ();
1561     }
1562 
1563   if (nlabel == olabel)
1564     return 1;
1565 
1566   if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1567     return 0;
1568 
1569   redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1570   return 1;
1571 }
1572 
1573 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1574    NLABEL in JUMP.
1575    If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1576    count has dropped to zero.  */
1577 void
redirect_jump_2(rtx_jump_insn * jump,rtx olabel,rtx nlabel,int delete_unused,int invert)1578 redirect_jump_2 (rtx_jump_insn *jump, rtx olabel, rtx nlabel, int delete_unused,
1579 		 int invert)
1580 {
1581   rtx note;
1582 
1583   gcc_assert (JUMP_LABEL (jump) == olabel);
1584 
1585   /* Negative DELETE_UNUSED used to be used to signalize behavior on
1586      moving FUNCTION_END note.  Just sanity check that no user still worry
1587      about this.  */
1588   gcc_assert (delete_unused >= 0);
1589   JUMP_LABEL (jump) = nlabel;
1590   if (!ANY_RETURN_P (nlabel))
1591     ++LABEL_NUSES (nlabel);
1592 
1593   /* Update labels in any REG_EQUAL note.  */
1594   if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1595     {
1596       if (ANY_RETURN_P (nlabel)
1597 	  || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1598 	remove_note (jump, note);
1599       else
1600 	{
1601 	  redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1602 	  confirm_change_group ();
1603 	}
1604     }
1605 
1606   /* Handle the case where we had a conditional crossing jump to a return
1607      label and are now changing it into a direct conditional return.
1608      The jump is no longer crossing in that case.  */
1609   if (ANY_RETURN_P (nlabel))
1610     CROSSING_JUMP_P (jump) = 0;
1611 
1612   if (!ANY_RETURN_P (olabel)
1613       && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1614       /* Undefined labels will remain outside the insn stream.  */
1615       && INSN_UID (olabel))
1616     delete_related_insns (olabel);
1617   if (invert)
1618     invert_br_probabilities (jump);
1619 }
1620 
1621 /* Invert the jump condition X contained in jump insn INSN.  Accrue the
1622    modifications into the change group.  Return nonzero for success.  */
1623 static int
invert_exp_1(rtx x,rtx insn)1624 invert_exp_1 (rtx x, rtx insn)
1625 {
1626   RTX_CODE code = GET_CODE (x);
1627 
1628   if (code == IF_THEN_ELSE)
1629     {
1630       rtx comp = XEXP (x, 0);
1631       rtx tem;
1632       enum rtx_code reversed_code;
1633 
1634       /* We can do this in two ways:  The preferable way, which can only
1635 	 be done if this is not an integer comparison, is to reverse
1636 	 the comparison code.  Otherwise, swap the THEN-part and ELSE-part
1637 	 of the IF_THEN_ELSE.  If we can't do either, fail.  */
1638 
1639       reversed_code = reversed_comparison_code (comp, insn);
1640 
1641       if (reversed_code != UNKNOWN)
1642 	{
1643 	  validate_change (insn, &XEXP (x, 0),
1644 			   gen_rtx_fmt_ee (reversed_code,
1645 					   GET_MODE (comp), XEXP (comp, 0),
1646 					   XEXP (comp, 1)),
1647 			   1);
1648 	  return 1;
1649 	}
1650 
1651       tem = XEXP (x, 1);
1652       validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1653       validate_change (insn, &XEXP (x, 2), tem, 1);
1654       return 1;
1655     }
1656   else
1657     return 0;
1658 }
1659 
1660 /* Invert the condition of the jump JUMP, and make it jump to label
1661    NLABEL instead of where it jumps now.  Accrue changes into the
1662    change group.  Return false if we didn't see how to perform the
1663    inversion and redirection.  */
1664 
1665 int
invert_jump_1(rtx_jump_insn * jump,rtx nlabel)1666 invert_jump_1 (rtx_jump_insn *jump, rtx nlabel)
1667 {
1668   rtx x = pc_set (jump);
1669   int ochanges;
1670   int ok;
1671 
1672   ochanges = num_validated_changes ();
1673   if (x == NULL)
1674     return 0;
1675   ok = invert_exp_1 (SET_SRC (x), jump);
1676   gcc_assert (ok);
1677 
1678   if (num_validated_changes () == ochanges)
1679     return 0;
1680 
1681   /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1682      in Pmode, so checking this is not merely an optimization.  */
1683   return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1684 }
1685 
1686 /* Invert the condition of the jump JUMP, and make it jump to label
1687    NLABEL instead of where it jumps now.  Return true if successful.  */
1688 
1689 int
invert_jump(rtx_jump_insn * jump,rtx nlabel,int delete_unused)1690 invert_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
1691 {
1692   rtx olabel = JUMP_LABEL (jump);
1693 
1694   if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1695     {
1696       redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1697       return 1;
1698     }
1699   cancel_changes (0);
1700   return 0;
1701 }
1702 
1703 
1704 /* Like rtx_equal_p except that it considers two REGs as equal
1705    if they renumber to the same value and considers two commutative
1706    operations to be the same if the order of the operands has been
1707    reversed.  */
1708 
1709 int
rtx_renumbered_equal_p(const_rtx x,const_rtx y)1710 rtx_renumbered_equal_p (const_rtx x, const_rtx y)
1711 {
1712   int i;
1713   const enum rtx_code code = GET_CODE (x);
1714   const char *fmt;
1715 
1716   if (x == y)
1717     return 1;
1718 
1719   if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1720       && (REG_P (y) || (GET_CODE (y) == SUBREG
1721 				  && REG_P (SUBREG_REG (y)))))
1722     {
1723       int reg_x = -1, reg_y = -1;
1724       int byte_x = 0, byte_y = 0;
1725       struct subreg_info info;
1726 
1727       if (GET_MODE (x) != GET_MODE (y))
1728 	return 0;
1729 
1730       /* If we haven't done any renumbering, don't
1731 	 make any assumptions.  */
1732       if (reg_renumber == 0)
1733 	return rtx_equal_p (x, y);
1734 
1735       if (code == SUBREG)
1736 	{
1737 	  reg_x = REGNO (SUBREG_REG (x));
1738 	  byte_x = SUBREG_BYTE (x);
1739 
1740 	  if (reg_renumber[reg_x] >= 0)
1741 	    {
1742 	      subreg_get_info (reg_renumber[reg_x],
1743 			       GET_MODE (SUBREG_REG (x)), byte_x,
1744 			       GET_MODE (x), &info);
1745 	      if (!info.representable_p)
1746 		return 0;
1747 	      reg_x = info.offset;
1748 	      byte_x = 0;
1749 	    }
1750 	}
1751       else
1752 	{
1753 	  reg_x = REGNO (x);
1754 	  if (reg_renumber[reg_x] >= 0)
1755 	    reg_x = reg_renumber[reg_x];
1756 	}
1757 
1758       if (GET_CODE (y) == SUBREG)
1759 	{
1760 	  reg_y = REGNO (SUBREG_REG (y));
1761 	  byte_y = SUBREG_BYTE (y);
1762 
1763 	  if (reg_renumber[reg_y] >= 0)
1764 	    {
1765 	      subreg_get_info (reg_renumber[reg_y],
1766 			       GET_MODE (SUBREG_REG (y)), byte_y,
1767 			       GET_MODE (y), &info);
1768 	      if (!info.representable_p)
1769 		return 0;
1770 	      reg_y = info.offset;
1771 	      byte_y = 0;
1772 	    }
1773 	}
1774       else
1775 	{
1776 	  reg_y = REGNO (y);
1777 	  if (reg_renumber[reg_y] >= 0)
1778 	    reg_y = reg_renumber[reg_y];
1779 	}
1780 
1781       return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
1782     }
1783 
1784   /* Now we have disposed of all the cases
1785      in which different rtx codes can match.  */
1786   if (code != GET_CODE (y))
1787     return 0;
1788 
1789   switch (code)
1790     {
1791     case PC:
1792     case CC0:
1793     case ADDR_VEC:
1794     case ADDR_DIFF_VEC:
1795     CASE_CONST_UNIQUE:
1796       return 0;
1797 
1798     case LABEL_REF:
1799       /* We can't assume nonlocal labels have their following insns yet.  */
1800       if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1801 	return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
1802 
1803       /* Two label-refs are equivalent if they point at labels
1804 	 in the same position in the instruction stream.  */
1805       else
1806 	{
1807 	  rtx_insn *xi = next_nonnote_nondebug_insn (LABEL_REF_LABEL (x));
1808 	  rtx_insn *yi = next_nonnote_nondebug_insn (LABEL_REF_LABEL (y));
1809 	  while (xi && LABEL_P (xi))
1810 	    xi = next_nonnote_nondebug_insn (xi);
1811 	  while (yi && LABEL_P (yi))
1812 	    yi = next_nonnote_nondebug_insn (yi);
1813 	  return xi == yi;
1814 	}
1815 
1816     case SYMBOL_REF:
1817       return XSTR (x, 0) == XSTR (y, 0);
1818 
1819     case CODE_LABEL:
1820       /* If we didn't match EQ equality above, they aren't the same.  */
1821       return 0;
1822 
1823     default:
1824       break;
1825     }
1826 
1827   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
1828 
1829   if (GET_MODE (x) != GET_MODE (y))
1830     return 0;
1831 
1832   /* MEMs referring to different address space are not equivalent.  */
1833   if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
1834     return 0;
1835 
1836   /* For commutative operations, the RTX match if the operand match in any
1837      order.  Also handle the simple binary and unary cases without a loop.  */
1838   if (targetm.commutative_p (x, UNKNOWN))
1839     return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1840 	     && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1841 	    || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1842 		&& rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1843   else if (NON_COMMUTATIVE_P (x))
1844     return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1845 	    && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1846   else if (UNARY_P (x))
1847     return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1848 
1849   /* Compare the elements.  If any pair of corresponding elements
1850      fail to match, return 0 for the whole things.  */
1851 
1852   fmt = GET_RTX_FORMAT (code);
1853   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1854     {
1855       int j;
1856       switch (fmt[i])
1857 	{
1858 	case 'w':
1859 	  if (XWINT (x, i) != XWINT (y, i))
1860 	    return 0;
1861 	  break;
1862 
1863 	case 'i':
1864 	  if (XINT (x, i) != XINT (y, i))
1865 	    {
1866 	      if (((code == ASM_OPERANDS && i == 6)
1867 		   || (code == ASM_INPUT && i == 1)))
1868 		break;
1869 	      return 0;
1870 	    }
1871 	  break;
1872 
1873 	case 't':
1874 	  if (XTREE (x, i) != XTREE (y, i))
1875 	    return 0;
1876 	  break;
1877 
1878 	case 's':
1879 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
1880 	    return 0;
1881 	  break;
1882 
1883 	case 'e':
1884 	  if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1885 	    return 0;
1886 	  break;
1887 
1888 	case 'u':
1889 	  if (XEXP (x, i) != XEXP (y, i))
1890 	    return 0;
1891 	  /* Fall through.  */
1892 	case '0':
1893 	  break;
1894 
1895 	case 'E':
1896 	  if (XVECLEN (x, i) != XVECLEN (y, i))
1897 	    return 0;
1898 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1899 	    if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1900 	      return 0;
1901 	  break;
1902 
1903 	default:
1904 	  gcc_unreachable ();
1905 	}
1906     }
1907   return 1;
1908 }
1909 
1910 /* If X is a hard register or equivalent to one or a subregister of one,
1911    return the hard register number.  If X is a pseudo register that was not
1912    assigned a hard register, return the pseudo register number.  Otherwise,
1913    return -1.  Any rtx is valid for X.  */
1914 
1915 int
true_regnum(const_rtx x)1916 true_regnum (const_rtx x)
1917 {
1918   if (REG_P (x))
1919     {
1920       if (REGNO (x) >= FIRST_PSEUDO_REGISTER
1921 	  && (lra_in_progress || reg_renumber[REGNO (x)] >= 0))
1922 	return reg_renumber[REGNO (x)];
1923       return REGNO (x);
1924     }
1925   if (GET_CODE (x) == SUBREG)
1926     {
1927       int base = true_regnum (SUBREG_REG (x));
1928       if (base >= 0
1929 	  && base < FIRST_PSEUDO_REGISTER)
1930 	{
1931 	  struct subreg_info info;
1932 
1933 	  subreg_get_info (lra_in_progress
1934 			   ? (unsigned) base : REGNO (SUBREG_REG (x)),
1935 			   GET_MODE (SUBREG_REG (x)),
1936 			   SUBREG_BYTE (x), GET_MODE (x), &info);
1937 
1938 	  if (info.representable_p)
1939 	    return base + info.offset;
1940 	}
1941     }
1942   return -1;
1943 }
1944 
1945 /* Return regno of the register REG and handle subregs too.  */
1946 unsigned int
reg_or_subregno(const_rtx reg)1947 reg_or_subregno (const_rtx reg)
1948 {
1949   if (GET_CODE (reg) == SUBREG)
1950     reg = SUBREG_REG (reg);
1951   gcc_assert (REG_P (reg));
1952   return REGNO (reg);
1953 }
1954