1 /* Optimize jump instructions, for GNU compiler.
2    Copyright (C) 1987-2021 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This is the pathetic reminder of old fame of the jump-optimization pass
21    of the compiler.  Now it contains basically a set of utility functions to
22    operate with jumps.
23 
24    Each CODE_LABEL has a count of the times it is used
25    stored in the LABEL_NUSES internal field, and each JUMP_INSN
26    has one label that it refers to stored in the
27    JUMP_LABEL internal field.  With this we can detect labels that
28    become unused because of the deletion of all the jumps that
29    formerly used them.  The JUMP_LABEL info is sometimes looked
30    at by later passes.  For return insns, it contains either a
31    RETURN or a SIMPLE_RETURN rtx.
32 
33    The subroutines redirect_jump and invert_jump are used
34    from other passes as well.  */
35 
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "backend.h"
40 #include "target.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "cfghooks.h"
44 #include "tree-pass.h"
45 #include "memmodel.h"
46 #include "tm_p.h"
47 #include "insn-config.h"
48 #include "regs.h"
49 #include "emit-rtl.h"
50 #include "recog.h"
51 #include "cfgrtl.h"
52 #include "rtl-iter.h"
53 
54 /* Optimize jump y; x: ... y: jumpif... x?
55    Don't know if it is worth bothering with.  */
56 /* Optimize two cases of conditional jump to conditional jump?
57    This can never delete any instruction or make anything dead,
58    or even change what is live at any point.
59    So perhaps let combiner do it.  */
60 
61 static void init_label_info (rtx_insn *);
62 static void mark_all_labels (rtx_insn *);
63 static void mark_jump_label_1 (rtx, rtx_insn *, bool, bool);
64 static void mark_jump_label_asm (rtx, rtx_insn *);
65 static void redirect_exp_1 (rtx *, rtx, rtx, rtx_insn *);
66 static int invert_exp_1 (rtx, rtx_insn *);
67 
68 /* Worker for rebuild_jump_labels and rebuild_jump_labels_chain.  */
69 static void
rebuild_jump_labels_1(rtx_insn * f,bool count_forced)70 rebuild_jump_labels_1 (rtx_insn *f, bool count_forced)
71 {
72   timevar_push (TV_REBUILD_JUMP);
73   init_label_info (f);
74   mark_all_labels (f);
75 
76   /* Keep track of labels used from static data; we don't track them
77      closely enough to delete them here, so make sure their reference
78      count doesn't drop to zero.  */
79 
80   if (count_forced)
81     {
82       rtx_insn *insn;
83       unsigned int i;
84       FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
85 	if (LABEL_P (insn))
86 	  LABEL_NUSES (insn)++;
87     }
88   timevar_pop (TV_REBUILD_JUMP);
89 }
90 
91 /* This function rebuilds the JUMP_LABEL field and REG_LABEL_TARGET
92    notes in jumping insns and REG_LABEL_OPERAND notes in non-jumping
93    instructions and jumping insns that have labels as operands
94    (e.g. cbranchsi4).  */
95 void
rebuild_jump_labels(rtx_insn * f)96 rebuild_jump_labels (rtx_insn *f)
97 {
98   rebuild_jump_labels_1 (f, true);
99 }
100 
101 /* This function is like rebuild_jump_labels, but doesn't run over
102    forced_labels.  It can be used on insn chains that aren't the
103    main function chain.  */
104 void
rebuild_jump_labels_chain(rtx_insn * chain)105 rebuild_jump_labels_chain (rtx_insn *chain)
106 {
107   rebuild_jump_labels_1 (chain, false);
108 }
109 
110 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
111    non-fallthru insn.  This is not generally true, as multiple barriers
112    may have crept in, or the BARRIER may be separated from the last
113    real insn by one or more NOTEs.
114 
115    This simple pass moves barriers and removes duplicates so that the
116    old code is happy.
117  */
118 static unsigned int
cleanup_barriers(void)119 cleanup_barriers (void)
120 {
121   rtx_insn *insn;
122   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
123     {
124       if (BARRIER_P (insn))
125 	{
126 	  rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
127 	  if (!prev)
128 	    continue;
129 
130 	  if (BARRIER_P (prev))
131 	    delete_insn (insn);
132 	  else if (prev != PREV_INSN (insn))
133 	    {
134 	      basic_block bb = BLOCK_FOR_INSN (prev);
135 	      rtx_insn *end = PREV_INSN (insn);
136 	      reorder_insns_nobb (insn, insn, prev);
137 	      if (bb)
138 		{
139 		  /* If the backend called in machine reorg compute_bb_for_insn
140 		     and didn't free_bb_for_insn again, preserve basic block
141 		     boundaries.  Move the end of basic block to PREV since
142 		     it is followed by a barrier now, and clear BLOCK_FOR_INSN
143 		     on the following notes.
144 		     ???  Maybe the proper solution for the targets that have
145 		     cfg around after machine reorg is not to run cleanup_barriers
146 		     pass at all.  */
147 		  BB_END (bb) = prev;
148 		  do
149 		    {
150 		      prev = NEXT_INSN (prev);
151 		      if (prev != insn && BLOCK_FOR_INSN (prev) == bb)
152 			BLOCK_FOR_INSN (prev) = NULL;
153 		    }
154 		  while (prev != end);
155 		}
156 	    }
157 	}
158     }
159   return 0;
160 }
161 
162 namespace {
163 
164 const pass_data pass_data_cleanup_barriers =
165 {
166   RTL_PASS, /* type */
167   "barriers", /* name */
168   OPTGROUP_NONE, /* optinfo_flags */
169   TV_NONE, /* tv_id */
170   0, /* properties_required */
171   0, /* properties_provided */
172   0, /* properties_destroyed */
173   0, /* todo_flags_start */
174   0, /* todo_flags_finish */
175 };
176 
177 class pass_cleanup_barriers : public rtl_opt_pass
178 {
179 public:
pass_cleanup_barriers(gcc::context * ctxt)180   pass_cleanup_barriers (gcc::context *ctxt)
181     : rtl_opt_pass (pass_data_cleanup_barriers, ctxt)
182   {}
183 
184   /* opt_pass methods: */
execute(function *)185   virtual unsigned int execute (function *) { return cleanup_barriers (); }
186 
187 }; // class pass_cleanup_barriers
188 
189 } // anon namespace
190 
191 rtl_opt_pass *
make_pass_cleanup_barriers(gcc::context * ctxt)192 make_pass_cleanup_barriers (gcc::context *ctxt)
193 {
194   return new pass_cleanup_barriers (ctxt);
195 }
196 
197 
198 /* Initialize LABEL_NUSES and JUMP_LABEL fields, add REG_LABEL_TARGET
199    for remaining targets for JUMP_P.  Delete any REG_LABEL_OPERAND
200    notes whose labels don't occur in the insn any more.  */
201 
202 static void
init_label_info(rtx_insn * f)203 init_label_info (rtx_insn *f)
204 {
205   rtx_insn *insn;
206 
207   for (insn = f; insn; insn = NEXT_INSN (insn))
208     {
209       if (LABEL_P (insn))
210 	LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
211 
212       /* REG_LABEL_TARGET notes (including the JUMP_LABEL field) are
213 	 sticky and not reset here; that way we won't lose association
214 	 with a label when e.g. the source for a target register
215 	 disappears out of reach for targets that may use jump-target
216 	 registers.  Jump transformations are supposed to transform
217 	 any REG_LABEL_TARGET notes.  The target label reference in a
218 	 branch may disappear from the branch (and from the
219 	 instruction before it) for other reasons, like register
220 	 allocation.  */
221 
222       if (INSN_P (insn))
223 	{
224 	  rtx note, next;
225 
226 	  for (note = REG_NOTES (insn); note; note = next)
227 	    {
228 	      next = XEXP (note, 1);
229 	      if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
230 		  && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
231 		remove_note (insn, note);
232 	    }
233 	}
234     }
235 }
236 
237 /* A subroutine of mark_all_labels.  Trivially propagate a simple label
238    load into a jump_insn that uses it.  */
239 
240 static void
maybe_propagate_label_ref(rtx_insn * jump_insn,rtx_insn * prev_nonjump_insn)241 maybe_propagate_label_ref (rtx_insn *jump_insn, rtx_insn *prev_nonjump_insn)
242 {
243   rtx label_note, pc, pc_src;
244 
245   pc = pc_set (jump_insn);
246   pc_src = pc != NULL ? SET_SRC (pc) : NULL;
247   label_note = find_reg_note (prev_nonjump_insn, REG_LABEL_OPERAND, NULL);
248 
249   /* If the previous non-jump insn sets something to a label,
250      something that this jump insn uses, make that label the primary
251      target of this insn if we don't yet have any.  That previous
252      insn must be a single_set and not refer to more than one label.
253      The jump insn must not refer to other labels as jump targets
254      and must be a plain (set (pc) ...), maybe in a parallel, and
255      may refer to the item being set only directly or as one of the
256      arms in an IF_THEN_ELSE.  */
257 
258   if (label_note != NULL && pc_src != NULL)
259     {
260       rtx label_set = single_set (prev_nonjump_insn);
261       rtx label_dest = label_set != NULL ? SET_DEST (label_set) : NULL;
262 
263       if (label_set != NULL
264 	  /* The source must be the direct LABEL_REF, not a
265 	     PLUS, UNSPEC, IF_THEN_ELSE etc.  */
266 	  && GET_CODE (SET_SRC (label_set)) == LABEL_REF
267 	  && (rtx_equal_p (label_dest, pc_src)
268 	      || (GET_CODE (pc_src) == IF_THEN_ELSE
269 		  && (rtx_equal_p (label_dest, XEXP (pc_src, 1))
270 		      || rtx_equal_p (label_dest, XEXP (pc_src, 2))))))
271 	{
272 	  /* The CODE_LABEL referred to in the note must be the
273 	     CODE_LABEL in the LABEL_REF of the "set".  We can
274 	     conveniently use it for the marker function, which
275 	     requires a LABEL_REF wrapping.  */
276 	  gcc_assert (XEXP (label_note, 0) == label_ref_label (SET_SRC (label_set)));
277 
278 	  mark_jump_label_1 (label_set, jump_insn, false, true);
279 
280 	  gcc_assert (JUMP_LABEL (jump_insn) == XEXP (label_note, 0));
281 	}
282     }
283 }
284 
285 /* Mark the label each jump jumps to.
286    Combine consecutive labels, and count uses of labels.  */
287 
288 static void
mark_all_labels(rtx_insn * f)289 mark_all_labels (rtx_insn *f)
290 {
291   rtx_insn *insn;
292 
293   if (current_ir_type () == IR_RTL_CFGLAYOUT)
294     {
295       basic_block bb;
296       FOR_EACH_BB_FN (bb, cfun)
297 	{
298 	  /* In cfglayout mode, we don't bother with trivial next-insn
299 	     propagation of LABEL_REFs into JUMP_LABEL.  This will be
300 	     handled by other optimizers using better algorithms.  */
301 	  FOR_BB_INSNS (bb, insn)
302 	    {
303 	      gcc_assert (! insn->deleted ());
304 	      if (NONDEBUG_INSN_P (insn))
305 	        mark_jump_label (PATTERN (insn), insn, 0);
306 	    }
307 
308 	  /* In cfglayout mode, there may be non-insns between the
309 	     basic blocks.  If those non-insns represent tablejump data,
310 	     they contain label references that we must record.  */
311 	  for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
312 	    if (JUMP_TABLE_DATA_P (insn))
313 	      mark_jump_label (PATTERN (insn), insn, 0);
314 	  for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
315 	    if (JUMP_TABLE_DATA_P (insn))
316 	      mark_jump_label (PATTERN (insn), insn, 0);
317 	}
318     }
319   else
320     {
321       rtx_insn *prev_nonjump_insn = NULL;
322       for (insn = f; insn; insn = NEXT_INSN (insn))
323 	{
324 	  if (insn->deleted ())
325 	    ;
326 	  else if (LABEL_P (insn))
327 	    prev_nonjump_insn = NULL;
328 	  else if (JUMP_TABLE_DATA_P (insn))
329 	    mark_jump_label (PATTERN (insn), insn, 0);
330 	  else if (NONDEBUG_INSN_P (insn))
331 	    {
332 	      mark_jump_label (PATTERN (insn), insn, 0);
333 	      if (JUMP_P (insn))
334 		{
335 		  if (JUMP_LABEL (insn) == NULL && prev_nonjump_insn != NULL)
336 		    maybe_propagate_label_ref (insn, prev_nonjump_insn);
337 		}
338 	      else
339 		prev_nonjump_insn = insn;
340 	    }
341 	}
342     }
343 }
344 
345 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
346    of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
347    UNKNOWN may be returned in case we are having CC_MODE compare and we don't
348    know whether it's source is floating point or integer comparison.  Machine
349    description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
350    to help this function avoid overhead in these cases.  */
351 enum rtx_code
reversed_comparison_code_parts(enum rtx_code code,const_rtx arg0,const_rtx arg1,const rtx_insn * insn)352 reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
353 				const_rtx arg1, const rtx_insn *insn)
354 {
355   machine_mode mode;
356 
357   /* If this is not actually a comparison, we can't reverse it.  */
358   if (GET_RTX_CLASS (code) != RTX_COMPARE
359       && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
360     return UNKNOWN;
361 
362   mode = GET_MODE (arg0);
363   if (mode == VOIDmode)
364     mode = GET_MODE (arg1);
365 
366   /* First see if machine description supplies us way to reverse the
367      comparison.  Give it priority over everything else to allow
368      machine description to do tricks.  */
369   if (GET_MODE_CLASS (mode) == MODE_CC
370       && REVERSIBLE_CC_MODE (mode))
371     return REVERSE_CONDITION (code, mode);
372 
373   /* Try a few special cases based on the comparison code.  */
374   switch (code)
375     {
376     case GEU:
377     case GTU:
378     case LEU:
379     case LTU:
380     case NE:
381     case EQ:
382       /* It is always safe to reverse EQ and NE, even for the floating
383 	 point.  Similarly the unsigned comparisons are never used for
384 	 floating point so we can reverse them in the default way.  */
385       return reverse_condition (code);
386     case ORDERED:
387     case UNORDERED:
388     case LTGT:
389     case UNEQ:
390       /* In case we already see unordered comparison, we can be sure to
391 	 be dealing with floating point so we don't need any more tests.  */
392       return reverse_condition_maybe_unordered (code);
393     case UNLT:
394     case UNLE:
395     case UNGT:
396     case UNGE:
397       /* We don't have safe way to reverse these yet.  */
398       return UNKNOWN;
399     default:
400       break;
401     }
402 
403   if (GET_MODE_CLASS (mode) == MODE_CC)
404     {
405       /* Try to search for the comparison to determine the real mode.
406          This code is expensive, but with sane machine description it
407          will be never used, since REVERSIBLE_CC_MODE will return true
408          in all cases.  */
409       if (! insn)
410 	return UNKNOWN;
411 
412       /* These CONST_CAST's are okay because prev_nonnote_insn just
413 	 returns its argument and we assign it to a const_rtx
414 	 variable.  */
415       for (rtx_insn *prev = prev_nonnote_insn (const_cast<rtx_insn *> (insn));
416 	   prev != 0 && !LABEL_P (prev);
417 	   prev = prev_nonnote_insn (prev))
418 	{
419 	  const_rtx set = set_of (arg0, prev);
420 	  if (set && GET_CODE (set) == SET
421 	      && rtx_equal_p (SET_DEST (set), arg0))
422 	    {
423 	      rtx src = SET_SRC (set);
424 
425 	      if (GET_CODE (src) == COMPARE)
426 		{
427 		  rtx comparison = src;
428 		  arg0 = XEXP (src, 0);
429 		  mode = GET_MODE (arg0);
430 		  if (mode == VOIDmode)
431 		    mode = GET_MODE (XEXP (comparison, 1));
432 		  break;
433 		}
434 	      /* We can get past reg-reg moves.  This may be useful for model
435 	         of i387 comparisons that first move flag registers around.  */
436 	      if (REG_P (src))
437 		{
438 		  arg0 = src;
439 		  continue;
440 		}
441 	    }
442 	  /* If register is clobbered in some ununderstandable way,
443 	     give up.  */
444 	  if (set)
445 	    return UNKNOWN;
446 	}
447     }
448 
449   /* Test for an integer condition, or a floating-point comparison
450      in which NaNs can be ignored.  */
451   if (CONST_INT_P (arg0)
452       || (GET_MODE (arg0) != VOIDmode
453 	  && GET_MODE_CLASS (mode) != MODE_CC
454 	  && !HONOR_NANS (mode)))
455     return reverse_condition (code);
456 
457   return UNKNOWN;
458 }
459 
460 /* A wrapper around the previous function to take COMPARISON as rtx
461    expression.  This simplifies many callers.  */
462 enum rtx_code
reversed_comparison_code(const_rtx comparison,const rtx_insn * insn)463 reversed_comparison_code (const_rtx comparison, const rtx_insn *insn)
464 {
465   if (!COMPARISON_P (comparison))
466     return UNKNOWN;
467   return reversed_comparison_code_parts (GET_CODE (comparison),
468 					 XEXP (comparison, 0),
469 					 XEXP (comparison, 1), insn);
470 }
471 
472 /* Return comparison with reversed code of EXP.
473    Return NULL_RTX in case we fail to do the reversal.  */
474 rtx
reversed_comparison(const_rtx exp,machine_mode mode)475 reversed_comparison (const_rtx exp, machine_mode mode)
476 {
477   enum rtx_code reversed_code = reversed_comparison_code (exp, NULL);
478   if (reversed_code == UNKNOWN)
479     return NULL_RTX;
480   else
481     return simplify_gen_relational (reversed_code, mode, VOIDmode,
482                                     XEXP (exp, 0), XEXP (exp, 1));
483 }
484 
485 
486 /* Given an rtx-code for a comparison, return the code for the negated
487    comparison.  If no such code exists, return UNKNOWN.
488 
489    WATCH OUT!  reverse_condition is not safe to use on a jump that might
490    be acting on the results of an IEEE floating point comparison, because
491    of the special treatment of non-signaling nans in comparisons.
492    Use reversed_comparison_code instead.  */
493 
494 enum rtx_code
reverse_condition(enum rtx_code code)495 reverse_condition (enum rtx_code code)
496 {
497   switch (code)
498     {
499     case EQ:
500       return NE;
501     case NE:
502       return EQ;
503     case GT:
504       return LE;
505     case GE:
506       return LT;
507     case LT:
508       return GE;
509     case LE:
510       return GT;
511     case GTU:
512       return LEU;
513     case GEU:
514       return LTU;
515     case LTU:
516       return GEU;
517     case LEU:
518       return GTU;
519     case UNORDERED:
520       return ORDERED;
521     case ORDERED:
522       return UNORDERED;
523 
524     case UNLT:
525     case UNLE:
526     case UNGT:
527     case UNGE:
528     case UNEQ:
529     case LTGT:
530       return UNKNOWN;
531 
532     default:
533       gcc_unreachable ();
534     }
535 }
536 
537 /* Similar, but we're allowed to generate unordered comparisons, which
538    makes it safe for IEEE floating-point.  Of course, we have to recognize
539    that the target will support them too...  */
540 
541 enum rtx_code
reverse_condition_maybe_unordered(enum rtx_code code)542 reverse_condition_maybe_unordered (enum rtx_code code)
543 {
544   switch (code)
545     {
546     case EQ:
547       return NE;
548     case NE:
549       return EQ;
550     case GT:
551       return UNLE;
552     case GE:
553       return UNLT;
554     case LT:
555       return UNGE;
556     case LE:
557       return UNGT;
558     case LTGT:
559       return UNEQ;
560     case UNORDERED:
561       return ORDERED;
562     case ORDERED:
563       return UNORDERED;
564     case UNLT:
565       return GE;
566     case UNLE:
567       return GT;
568     case UNGT:
569       return LE;
570     case UNGE:
571       return LT;
572     case UNEQ:
573       return LTGT;
574 
575     default:
576       gcc_unreachable ();
577     }
578 }
579 
580 /* Similar, but return the code when two operands of a comparison are swapped.
581    This IS safe for IEEE floating-point.  */
582 
583 enum rtx_code
swap_condition(enum rtx_code code)584 swap_condition (enum rtx_code code)
585 {
586   switch (code)
587     {
588     case EQ:
589     case NE:
590     case UNORDERED:
591     case ORDERED:
592     case UNEQ:
593     case LTGT:
594       return code;
595 
596     case GT:
597       return LT;
598     case GE:
599       return LE;
600     case LT:
601       return GT;
602     case LE:
603       return GE;
604     case GTU:
605       return LTU;
606     case GEU:
607       return LEU;
608     case LTU:
609       return GTU;
610     case LEU:
611       return GEU;
612     case UNLT:
613       return UNGT;
614     case UNLE:
615       return UNGE;
616     case UNGT:
617       return UNLT;
618     case UNGE:
619       return UNLE;
620 
621     default:
622       gcc_unreachable ();
623     }
624 }
625 
626 /* Given a comparison CODE, return the corresponding unsigned comparison.
627    If CODE is an equality comparison or already an unsigned comparison,
628    CODE is returned.  */
629 
630 enum rtx_code
unsigned_condition(enum rtx_code code)631 unsigned_condition (enum rtx_code code)
632 {
633   switch (code)
634     {
635     case EQ:
636     case NE:
637     case GTU:
638     case GEU:
639     case LTU:
640     case LEU:
641       return code;
642 
643     case GT:
644       return GTU;
645     case GE:
646       return GEU;
647     case LT:
648       return LTU;
649     case LE:
650       return LEU;
651 
652     default:
653       gcc_unreachable ();
654     }
655 }
656 
657 /* Similarly, return the signed version of a comparison.  */
658 
659 enum rtx_code
signed_condition(enum rtx_code code)660 signed_condition (enum rtx_code code)
661 {
662   switch (code)
663     {
664     case EQ:
665     case NE:
666     case GT:
667     case GE:
668     case LT:
669     case LE:
670       return code;
671 
672     case GTU:
673       return GT;
674     case GEU:
675       return GE;
676     case LTU:
677       return LT;
678     case LEU:
679       return LE;
680 
681     default:
682       gcc_unreachable ();
683     }
684 }
685 
686 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
687    truth of CODE1 implies the truth of CODE2.  */
688 
689 int
comparison_dominates_p(enum rtx_code code1,enum rtx_code code2)690 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
691 {
692   /* UNKNOWN comparison codes can happen as a result of trying to revert
693      comparison codes.
694      They can't match anything, so we have to reject them here.  */
695   if (code1 == UNKNOWN || code2 == UNKNOWN)
696     return 0;
697 
698   if (code1 == code2)
699     return 1;
700 
701   switch (code1)
702     {
703     case UNEQ:
704       if (code2 == UNLE || code2 == UNGE)
705 	return 1;
706       break;
707 
708     case EQ:
709       if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
710 	  || code2 == ORDERED)
711 	return 1;
712       break;
713 
714     case UNLT:
715       if (code2 == UNLE || code2 == NE)
716 	return 1;
717       break;
718 
719     case LT:
720       if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
721 	return 1;
722       break;
723 
724     case UNGT:
725       if (code2 == UNGE || code2 == NE)
726 	return 1;
727       break;
728 
729     case GT:
730       if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
731 	return 1;
732       break;
733 
734     case GE:
735     case LE:
736       if (code2 == ORDERED)
737 	return 1;
738       break;
739 
740     case LTGT:
741       if (code2 == NE || code2 == ORDERED)
742 	return 1;
743       break;
744 
745     case LTU:
746       if (code2 == LEU || code2 == NE)
747 	return 1;
748       break;
749 
750     case GTU:
751       if (code2 == GEU || code2 == NE)
752 	return 1;
753       break;
754 
755     case UNORDERED:
756       if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
757 	  || code2 == UNGE || code2 == UNGT)
758 	return 1;
759       break;
760 
761     default:
762       break;
763     }
764 
765   return 0;
766 }
767 
768 /* Return 1 if INSN is an unconditional jump and nothing else.  */
769 
770 int
simplejump_p(const rtx_insn * insn)771 simplejump_p (const rtx_insn *insn)
772 {
773   return (JUMP_P (insn)
774 	  && GET_CODE (PATTERN (insn)) == SET
775 	  && GET_CODE (SET_DEST (PATTERN (insn))) == PC
776 	  && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
777 }
778 
779 /* Return nonzero if INSN is a (possibly) conditional jump
780    and nothing more.
781 
782    Use of this function is deprecated, since we need to support combined
783    branch and compare insns.  Use any_condjump_p instead whenever possible.  */
784 
785 int
condjump_p(const rtx_insn * insn)786 condjump_p (const rtx_insn *insn)
787 {
788   const_rtx x = PATTERN (insn);
789 
790   if (GET_CODE (x) != SET
791       || GET_CODE (SET_DEST (x)) != PC)
792     return 0;
793 
794   x = SET_SRC (x);
795   if (GET_CODE (x) == LABEL_REF)
796     return 1;
797   else
798     return (GET_CODE (x) == IF_THEN_ELSE
799 	    && ((GET_CODE (XEXP (x, 2)) == PC
800 		 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
801 		     || ANY_RETURN_P (XEXP (x, 1))))
802 		|| (GET_CODE (XEXP (x, 1)) == PC
803 		    && (GET_CODE (XEXP (x, 2)) == LABEL_REF
804 			|| ANY_RETURN_P (XEXP (x, 2))))));
805 }
806 
807 /* Return nonzero if INSN is a (possibly) conditional jump inside a
808    PARALLEL.
809 
810    Use this function is deprecated, since we need to support combined
811    branch and compare insns.  Use any_condjump_p instead whenever possible.  */
812 
813 int
condjump_in_parallel_p(const rtx_insn * insn)814 condjump_in_parallel_p (const rtx_insn *insn)
815 {
816   const_rtx x = PATTERN (insn);
817 
818   if (GET_CODE (x) != PARALLEL)
819     return 0;
820   else
821     x = XVECEXP (x, 0, 0);
822 
823   if (GET_CODE (x) != SET)
824     return 0;
825   if (GET_CODE (SET_DEST (x)) != PC)
826     return 0;
827   if (GET_CODE (SET_SRC (x)) == LABEL_REF)
828     return 1;
829   if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
830     return 0;
831   if (XEXP (SET_SRC (x), 2) == pc_rtx
832       && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
833 	  || ANY_RETURN_P (XEXP (SET_SRC (x), 1))))
834     return 1;
835   if (XEXP (SET_SRC (x), 1) == pc_rtx
836       && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
837 	  || ANY_RETURN_P (XEXP (SET_SRC (x), 2))))
838     return 1;
839   return 0;
840 }
841 
842 /* Return set of PC, otherwise NULL.  */
843 
844 rtx
pc_set(const rtx_insn * insn)845 pc_set (const rtx_insn *insn)
846 {
847   rtx pat;
848   if (!JUMP_P (insn))
849     return NULL_RTX;
850   pat = PATTERN (insn);
851 
852   /* The set is allowed to appear either as the insn pattern or
853      the first set in a PARALLEL, UNSPEC or UNSPEC_VOLATILE.  */
854   switch (GET_CODE (pat))
855     {
856     case PARALLEL:
857     case UNSPEC:
858     case UNSPEC_VOLATILE:
859       pat = XVECEXP (pat, 0, 0);
860       break;
861     default:
862       break;
863     }
864   if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
865     return pat;
866 
867   return NULL_RTX;
868 }
869 
870 /* Return true when insn is an unconditional direct jump,
871    possibly bundled inside a PARALLEL, UNSPEC or UNSPEC_VOLATILE.
872    The instruction may have various other effects so before removing the jump
873    you must verify onlyjump_p.  */
874 
875 int
any_uncondjump_p(const rtx_insn * insn)876 any_uncondjump_p (const rtx_insn *insn)
877 {
878   const_rtx x = pc_set (insn);
879   if (!x)
880     return 0;
881   if (GET_CODE (SET_SRC (x)) != LABEL_REF)
882     return 0;
883   if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
884     return 0;
885   return 1;
886 }
887 
888 /* Return true when insn is a conditional jump.  This function works for
889    instructions containing PC sets in PARALLELs, UNSPECs or UNSPEC_VOLATILEs.
890    The instruction may have various other effects so before removing the jump
891    you must verify onlyjump_p.
892 
893    Note that unlike condjump_p it returns false for unconditional jumps.  */
894 
895 int
any_condjump_p(const rtx_insn * insn)896 any_condjump_p (const rtx_insn *insn)
897 {
898   const_rtx x = pc_set (insn);
899   enum rtx_code a, b;
900 
901   if (!x)
902     return 0;
903   if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
904     return 0;
905 
906   a = GET_CODE (XEXP (SET_SRC (x), 1));
907   b = GET_CODE (XEXP (SET_SRC (x), 2));
908 
909   return ((b == PC && (a == LABEL_REF || a == RETURN || a == SIMPLE_RETURN))
910 	  || (a == PC
911 	      && (b == LABEL_REF || b == RETURN || b == SIMPLE_RETURN)));
912 }
913 
914 /* Return the label of a conditional jump.  */
915 
916 rtx
condjump_label(const rtx_insn * insn)917 condjump_label (const rtx_insn *insn)
918 {
919   rtx x = pc_set (insn);
920 
921   if (!x)
922     return NULL_RTX;
923   x = SET_SRC (x);
924   if (GET_CODE (x) == LABEL_REF)
925     return x;
926   if (GET_CODE (x) != IF_THEN_ELSE)
927     return NULL_RTX;
928   if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
929     return XEXP (x, 1);
930   if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
931     return XEXP (x, 2);
932   return NULL_RTX;
933 }
934 
935 /* Return TRUE if INSN is a return jump.  */
936 
937 int
returnjump_p(const rtx_insn * insn)938 returnjump_p (const rtx_insn *insn)
939 {
940   if (JUMP_P (insn))
941     {
942       subrtx_iterator::array_type array;
943       FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
944 	{
945 	  const_rtx x = *iter;
946 	  switch (GET_CODE (x))
947 	    {
948 	    case RETURN:
949 	    case SIMPLE_RETURN:
950 	    case EH_RETURN:
951 	      return true;
952 
953 	    case SET:
954 	      if (SET_IS_RETURN_P (x))
955 		return true;
956 	      break;
957 
958 	    default:
959 	      break;
960 	    }
961 	}
962     }
963   return false;
964 }
965 
966 /* Return true if INSN is a (possibly conditional) return insn.  */
967 
968 int
eh_returnjump_p(rtx_insn * insn)969 eh_returnjump_p (rtx_insn *insn)
970 {
971   if (JUMP_P (insn))
972     {
973       subrtx_iterator::array_type array;
974       FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
975 	if (GET_CODE (*iter) == EH_RETURN)
976 	  return true;
977     }
978   return false;
979 }
980 
981 /* Return true if INSN is a jump that only transfers control and
982    nothing more.  */
983 
984 int
onlyjump_p(const rtx_insn * insn)985 onlyjump_p (const rtx_insn *insn)
986 {
987   rtx set;
988 
989   if (!JUMP_P (insn))
990     return 0;
991 
992   set = single_set (insn);
993   if (set == NULL)
994     return 0;
995   if (GET_CODE (SET_DEST (set)) != PC)
996     return 0;
997   if (side_effects_p (SET_SRC (set)))
998     return 0;
999 
1000   return 1;
1001 }
1002 
1003 /* Return true iff INSN is a jump and its JUMP_LABEL is a label, not
1004    NULL or a return.  */
1005 bool
jump_to_label_p(const rtx_insn * insn)1006 jump_to_label_p (const rtx_insn *insn)
1007 {
1008   return (JUMP_P (insn)
1009 	  && JUMP_LABEL (insn) != NULL && !ANY_RETURN_P (JUMP_LABEL (insn)));
1010 }
1011 
1012 /* Find all CODE_LABELs referred to in X, and increment their use
1013    counts.  If INSN is a JUMP_INSN and there is at least one
1014    CODE_LABEL referenced in INSN as a jump target, then store the last
1015    one in JUMP_LABEL (INSN).  For a tablejump, this must be the label
1016    for the ADDR_VEC.  Store any other jump targets as REG_LABEL_TARGET
1017    notes.  If INSN is an INSN or a CALL_INSN or non-target operands of
1018    a JUMP_INSN, and there is at least one CODE_LABEL referenced in
1019    INSN, add a REG_LABEL_OPERAND note containing that label to INSN.
1020    For returnjumps, the JUMP_LABEL will also be set as appropriate.
1021 
1022    Note that two labels separated by a loop-beginning note
1023    must be kept distinct if we have not yet done loop-optimization,
1024    because the gap between them is where loop-optimize
1025    will want to move invariant code to.  CROSS_JUMP tells us
1026    that loop-optimization is done with.  */
1027 
1028 void
mark_jump_label(rtx x,rtx_insn * insn,int in_mem)1029 mark_jump_label (rtx x, rtx_insn *insn, int in_mem)
1030 {
1031   rtx asmop = extract_asm_operands (x);
1032   if (asmop)
1033     mark_jump_label_asm (asmop, insn);
1034   else
1035     mark_jump_label_1 (x, insn, in_mem != 0,
1036 		       (insn != NULL && x == PATTERN (insn) && JUMP_P (insn)));
1037 }
1038 
1039 /* Worker function for mark_jump_label.  IN_MEM is TRUE when X occurs
1040    within a (MEM ...).  IS_TARGET is TRUE when X is to be treated as a
1041    jump-target; when the JUMP_LABEL field of INSN should be set or a
1042    REG_LABEL_TARGET note should be added, not a REG_LABEL_OPERAND
1043    note.  */
1044 
1045 static void
mark_jump_label_1(rtx x,rtx_insn * insn,bool in_mem,bool is_target)1046 mark_jump_label_1 (rtx x, rtx_insn *insn, bool in_mem, bool is_target)
1047 {
1048   RTX_CODE code = GET_CODE (x);
1049   int i;
1050   const char *fmt;
1051 
1052   switch (code)
1053     {
1054     case PC:
1055     case REG:
1056     case CLOBBER:
1057     case CALL:
1058       return;
1059 
1060     case RETURN:
1061     case SIMPLE_RETURN:
1062       if (is_target)
1063 	{
1064 	  gcc_assert (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == x);
1065 	  JUMP_LABEL (insn) = x;
1066 	}
1067       return;
1068 
1069     case MEM:
1070       in_mem = true;
1071       break;
1072 
1073     case SEQUENCE:
1074       {
1075 	rtx_sequence *seq = as_a <rtx_sequence *> (x);
1076 	for (i = 0; i < seq->len (); i++)
1077 	  mark_jump_label (PATTERN (seq->insn (i)),
1078 			   seq->insn (i), 0);
1079       }
1080       return;
1081 
1082     case SYMBOL_REF:
1083       if (!in_mem)
1084 	return;
1085 
1086       /* If this is a constant-pool reference, see if it is a label.  */
1087       if (CONSTANT_POOL_ADDRESS_P (x))
1088 	mark_jump_label_1 (get_pool_constant (x), insn, in_mem, is_target);
1089       break;
1090 
1091       /* Handle operands in the condition of an if-then-else as for a
1092 	 non-jump insn.  */
1093     case IF_THEN_ELSE:
1094       if (!is_target)
1095 	break;
1096       mark_jump_label_1 (XEXP (x, 0), insn, in_mem, false);
1097       mark_jump_label_1 (XEXP (x, 1), insn, in_mem, true);
1098       mark_jump_label_1 (XEXP (x, 2), insn, in_mem, true);
1099       return;
1100 
1101     case LABEL_REF:
1102       {
1103 	rtx_insn *label = label_ref_label (x);
1104 
1105 	/* Ignore remaining references to unreachable labels that
1106 	   have been deleted.  */
1107 	if (NOTE_P (label)
1108 	    && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
1109 	  break;
1110 
1111 	gcc_assert (LABEL_P (label));
1112 
1113 	/* Ignore references to labels of containing functions.  */
1114 	if (LABEL_REF_NONLOCAL_P (x))
1115 	  break;
1116 
1117 	set_label_ref_label (x, label);
1118 	if (! insn || ! insn->deleted ())
1119 	  ++LABEL_NUSES (label);
1120 
1121 	if (insn)
1122 	  {
1123 	    if (is_target
1124 		/* Do not change a previous setting of JUMP_LABEL.  If the
1125 		   JUMP_LABEL slot is occupied by a different label,
1126 		   create a note for this label.  */
1127 		&& (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == label))
1128 	      JUMP_LABEL (insn) = label;
1129 	    else
1130 	      {
1131 		enum reg_note kind
1132 		  = is_target ? REG_LABEL_TARGET : REG_LABEL_OPERAND;
1133 
1134 		/* Add a REG_LABEL_OPERAND or REG_LABEL_TARGET note
1135 		   for LABEL unless there already is one.  All uses of
1136 		   a label, except for the primary target of a jump,
1137 		   must have such a note.  */
1138 		if (! find_reg_note (insn, kind, label))
1139 		  add_reg_note (insn, kind, label);
1140 	      }
1141 	  }
1142 	return;
1143       }
1144 
1145     /* Do walk the labels in a vector, but not the first operand of an
1146        ADDR_DIFF_VEC.  Don't set the JUMP_LABEL of a vector.  */
1147     case ADDR_VEC:
1148     case ADDR_DIFF_VEC:
1149       if (! insn->deleted ())
1150 	{
1151 	  int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1152 
1153 	  for (i = 0; i < XVECLEN (x, eltnum); i++)
1154 	    mark_jump_label_1 (XVECEXP (x, eltnum, i), NULL, in_mem,
1155 			       is_target);
1156 	}
1157       return;
1158 
1159     default:
1160       break;
1161     }
1162 
1163   fmt = GET_RTX_FORMAT (code);
1164 
1165   /* The primary target of a tablejump is the label of the ADDR_VEC,
1166      which is canonically mentioned *last* in the insn.  To get it
1167      marked as JUMP_LABEL, we iterate over items in reverse order.  */
1168   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1169     {
1170       if (fmt[i] == 'e')
1171 	mark_jump_label_1 (XEXP (x, i), insn, in_mem, is_target);
1172       else if (fmt[i] == 'E')
1173 	{
1174 	  int j;
1175 
1176 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1177 	    mark_jump_label_1 (XVECEXP (x, i, j), insn, in_mem,
1178 			       is_target);
1179 	}
1180     }
1181 }
1182 
1183 /* Worker function for mark_jump_label.  Handle asm insns specially.
1184    In particular, output operands need not be considered so we can
1185    avoid re-scanning the replicated asm_operand.  Also, the asm_labels
1186    need to be considered targets.  */
1187 
1188 static void
mark_jump_label_asm(rtx asmop,rtx_insn * insn)1189 mark_jump_label_asm (rtx asmop, rtx_insn *insn)
1190 {
1191   int i;
1192 
1193   for (i = ASM_OPERANDS_INPUT_LENGTH (asmop) - 1; i >= 0; --i)
1194     mark_jump_label_1 (ASM_OPERANDS_INPUT (asmop, i), insn, false, false);
1195 
1196   for (i = ASM_OPERANDS_LABEL_LENGTH (asmop) - 1; i >= 0; --i)
1197     mark_jump_label_1 (ASM_OPERANDS_LABEL (asmop, i), insn, false, true);
1198 }
1199 
1200 /* Delete insn INSN from the chain of insns and update label ref counts
1201    and delete insns now unreachable.
1202 
1203    Returns the first insn after INSN that was not deleted.
1204 
1205    Usage of this instruction is deprecated.  Use delete_insn instead and
1206    subsequent cfg_cleanup pass to delete unreachable code if needed.  */
1207 
1208 rtx_insn *
delete_related_insns(rtx uncast_insn)1209 delete_related_insns (rtx uncast_insn)
1210 {
1211   rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
1212   int was_code_label = (LABEL_P (insn));
1213   rtx note;
1214   rtx_insn *next = NEXT_INSN (insn), *prev = PREV_INSN (insn);
1215 
1216   while (next && next->deleted ())
1217     next = NEXT_INSN (next);
1218 
1219   /* This insn is already deleted => return first following nondeleted.  */
1220   if (insn->deleted ())
1221     return next;
1222 
1223   delete_insn (insn);
1224 
1225   /* If instruction is followed by a barrier,
1226      delete the barrier too.  */
1227 
1228   if (next != 0 && BARRIER_P (next))
1229     delete_insn (next);
1230 
1231   /* If deleting a jump, decrement the count of the label,
1232      and delete the label if it is now unused.  */
1233 
1234   if (jump_to_label_p (insn))
1235     {
1236       rtx lab = JUMP_LABEL (insn);
1237       rtx_jump_table_data *lab_next;
1238 
1239       if (LABEL_NUSES (lab) == 0)
1240 	/* This can delete NEXT or PREV,
1241 	   either directly if NEXT is JUMP_LABEL (INSN),
1242 	   or indirectly through more levels of jumps.  */
1243 	delete_related_insns (lab);
1244       else if (tablejump_p (insn, NULL, &lab_next))
1245 	{
1246 	  /* If we're deleting the tablejump, delete the dispatch table.
1247 	     We may not be able to kill the label immediately preceding
1248 	     just yet, as it might be referenced in code leading up to
1249 	     the tablejump.  */
1250 	  delete_related_insns (lab_next);
1251 	}
1252     }
1253 
1254   /* Likewise if we're deleting a dispatch table.  */
1255 
1256   if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
1257     {
1258       rtvec labels = table->get_labels ();
1259       int i;
1260       int len = GET_NUM_ELEM (labels);
1261 
1262       for (i = 0; i < len; i++)
1263 	if (LABEL_NUSES (XEXP (RTVEC_ELT (labels, i), 0)) == 0)
1264 	  delete_related_insns (XEXP (RTVEC_ELT (labels, i), 0));
1265       while (next && next->deleted ())
1266 	next = NEXT_INSN (next);
1267       return next;
1268     }
1269 
1270   /* Likewise for any JUMP_P / INSN / CALL_INSN with a
1271      REG_LABEL_OPERAND or REG_LABEL_TARGET note.  */
1272   if (INSN_P (insn))
1273     for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1274       if ((REG_NOTE_KIND (note) == REG_LABEL_OPERAND
1275 	   || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
1276 	  /* This could also be a NOTE_INSN_DELETED_LABEL note.  */
1277 	  && LABEL_P (XEXP (note, 0)))
1278 	if (LABEL_NUSES (XEXP (note, 0)) == 0)
1279 	  delete_related_insns (XEXP (note, 0));
1280 
1281   while (prev && (prev->deleted () || NOTE_P (prev)))
1282     prev = PREV_INSN (prev);
1283 
1284   /* If INSN was a label and a dispatch table follows it,
1285      delete the dispatch table.  The tablejump must have gone already.
1286      It isn't useful to fall through into a table.  */
1287 
1288   if (was_code_label
1289       && NEXT_INSN (insn) != 0
1290       && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
1291     next = delete_related_insns (NEXT_INSN (insn));
1292 
1293   /* If INSN was a label, delete insns following it if now unreachable.  */
1294 
1295   if (was_code_label && prev && BARRIER_P (prev))
1296     {
1297       enum rtx_code code;
1298       while (next)
1299 	{
1300 	  code = GET_CODE (next);
1301 	  if (code == NOTE)
1302 	    next = NEXT_INSN (next);
1303 	  /* Keep going past other deleted labels to delete what follows.  */
1304 	  else if (code == CODE_LABEL && next->deleted ())
1305 	    next = NEXT_INSN (next);
1306 	  /* Keep the (use (insn))s created by dbr_schedule, which needs
1307 	     them in order to track liveness relative to a previous
1308 	     barrier.  */
1309 	  else if (INSN_P (next)
1310 		   && GET_CODE (PATTERN (next)) == USE
1311 		   && INSN_P (XEXP (PATTERN (next), 0)))
1312 	    next = NEXT_INSN (next);
1313 	  else if (code == BARRIER || INSN_P (next))
1314 	    /* Note: if this deletes a jump, it can cause more
1315 	       deletion of unreachable code, after a different label.
1316 	       As long as the value from this recursive call is correct,
1317 	       this invocation functions correctly.  */
1318 	    next = delete_related_insns (next);
1319 	  else
1320 	    break;
1321 	}
1322     }
1323 
1324   /* I feel a little doubtful about this loop,
1325      but I see no clean and sure alternative way
1326      to find the first insn after INSN that is not now deleted.
1327      I hope this works.  */
1328   while (next && next->deleted ())
1329     next = NEXT_INSN (next);
1330   return next;
1331 }
1332 
1333 /* Delete a range of insns from FROM to TO, inclusive.
1334    This is for the sake of peephole optimization, so assume
1335    that whatever these insns do will still be done by a new
1336    peephole insn that will replace them.  */
1337 
1338 void
delete_for_peephole(rtx_insn * from,rtx_insn * to)1339 delete_for_peephole (rtx_insn *from, rtx_insn *to)
1340 {
1341   rtx_insn *insn = from;
1342 
1343   while (1)
1344     {
1345       rtx_insn *next = NEXT_INSN (insn);
1346       rtx_insn *prev = PREV_INSN (insn);
1347 
1348       if (!NOTE_P (insn))
1349 	{
1350 	  insn->set_deleted();
1351 
1352 	  /* Patch this insn out of the chain.  */
1353 	  /* We don't do this all at once, because we
1354 	     must preserve all NOTEs.  */
1355 	  if (prev)
1356 	    SET_NEXT_INSN (prev) = next;
1357 
1358 	  if (next)
1359 	    SET_PREV_INSN (next) = prev;
1360 	}
1361 
1362       if (insn == to)
1363 	break;
1364       insn = next;
1365     }
1366 
1367   /* Note that if TO is an unconditional jump
1368      we *do not* delete the BARRIER that follows,
1369      since the peephole that replaces this sequence
1370      is also an unconditional jump in that case.  */
1371 }
1372 
1373 /* A helper function for redirect_exp_1; examines its input X and returns
1374    either a LABEL_REF around a label, or a RETURN if X was NULL.  */
1375 static rtx
redirect_target(rtx x)1376 redirect_target (rtx x)
1377 {
1378   if (x == NULL_RTX)
1379     return ret_rtx;
1380   if (!ANY_RETURN_P (x))
1381     return gen_rtx_LABEL_REF (Pmode, x);
1382   return x;
1383 }
1384 
1385 /* Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
1386    NLABEL as a return.  Accrue modifications into the change group.  */
1387 
1388 static void
redirect_exp_1(rtx * loc,rtx olabel,rtx nlabel,rtx_insn * insn)1389 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx_insn *insn)
1390 {
1391   rtx x = *loc;
1392   RTX_CODE code = GET_CODE (x);
1393   int i;
1394   const char *fmt;
1395 
1396   if ((code == LABEL_REF && label_ref_label (x) == olabel)
1397       || x == olabel)
1398     {
1399       x = redirect_target (nlabel);
1400       if (GET_CODE (x) == LABEL_REF && loc == &PATTERN (insn))
1401  	x = gen_rtx_SET (pc_rtx, x);
1402       validate_change (insn, loc, x, 1);
1403       return;
1404     }
1405 
1406   if (code == SET && SET_DEST (x) == pc_rtx
1407       && ANY_RETURN_P (nlabel)
1408       && GET_CODE (SET_SRC (x)) == LABEL_REF
1409       && label_ref_label (SET_SRC (x)) == olabel)
1410     {
1411       validate_change (insn, loc, nlabel, 1);
1412       return;
1413     }
1414 
1415   if (code == IF_THEN_ELSE)
1416     {
1417       /* Skip the condition of an IF_THEN_ELSE.  We only want to
1418          change jump destinations, not eventual label comparisons.  */
1419       redirect_exp_1 (&XEXP (x, 1), olabel, nlabel, insn);
1420       redirect_exp_1 (&XEXP (x, 2), olabel, nlabel, insn);
1421       return;
1422     }
1423 
1424   fmt = GET_RTX_FORMAT (code);
1425   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1426     {
1427       if (fmt[i] == 'e')
1428 	redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1429       else if (fmt[i] == 'E')
1430 	{
1431 	  int j;
1432 	  for (j = 0; j < XVECLEN (x, i); j++)
1433 	    redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1434 	}
1435     }
1436 }
1437 
1438 /* Make JUMP go to NLABEL instead of where it jumps now.  Accrue
1439    the modifications into the change group.  Return false if we did
1440    not see how to do that.  */
1441 
1442 int
redirect_jump_1(rtx_insn * jump,rtx nlabel)1443 redirect_jump_1 (rtx_insn *jump, rtx nlabel)
1444 {
1445   int ochanges = num_validated_changes ();
1446   rtx *loc, asmop;
1447 
1448   gcc_assert (nlabel != NULL_RTX);
1449   asmop = extract_asm_operands (PATTERN (jump));
1450   if (asmop)
1451     {
1452       if (nlabel == NULL)
1453 	return 0;
1454       gcc_assert (ASM_OPERANDS_LABEL_LENGTH (asmop) == 1);
1455       loc = &ASM_OPERANDS_LABEL (asmop, 0);
1456     }
1457   else if (GET_CODE (PATTERN (jump)) == PARALLEL)
1458     loc = &XVECEXP (PATTERN (jump), 0, 0);
1459   else
1460     loc = &PATTERN (jump);
1461 
1462   redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1463   return num_validated_changes () > ochanges;
1464 }
1465 
1466 /* Make JUMP go to NLABEL instead of where it jumps now.  If the old
1467    jump target label is unused as a result, it and the code following
1468    it may be deleted.
1469 
1470    Normally, NLABEL will be a label, but it may also be a RETURN rtx;
1471    in that case we are to turn the jump into a (possibly conditional)
1472    return insn.
1473 
1474    The return value will be 1 if the change was made, 0 if it wasn't
1475    (this can only occur when trying to produce return insns).  */
1476 
1477 int
redirect_jump(rtx_jump_insn * jump,rtx nlabel,int delete_unused)1478 redirect_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
1479 {
1480   rtx olabel = jump->jump_label ();
1481 
1482   if (!nlabel)
1483     {
1484       /* If there is no label, we are asked to redirect to the EXIT block.
1485 	 When before the epilogue is emitted, return/simple_return cannot be
1486 	 created so we return 0 immediately.  After the epilogue is emitted,
1487 	 we always expect a label, either a non-null label, or a
1488 	 return/simple_return RTX.  */
1489 
1490       if (!epilogue_completed)
1491 	return 0;
1492       gcc_unreachable ();
1493     }
1494 
1495   if (nlabel == olabel)
1496     return 1;
1497 
1498   if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1499     return 0;
1500 
1501   redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1502   return 1;
1503 }
1504 
1505 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1506    NLABEL in JUMP.
1507    If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1508    count has dropped to zero.  */
1509 void
redirect_jump_2(rtx_jump_insn * jump,rtx olabel,rtx nlabel,int delete_unused,int invert)1510 redirect_jump_2 (rtx_jump_insn *jump, rtx olabel, rtx nlabel, int delete_unused,
1511 		 int invert)
1512 {
1513   rtx note;
1514 
1515   gcc_assert (JUMP_LABEL (jump) == olabel);
1516 
1517   /* Negative DELETE_UNUSED used to be used to signalize behavior on
1518      moving FUNCTION_END note.  Just sanity check that no user still worry
1519      about this.  */
1520   gcc_assert (delete_unused >= 0);
1521   JUMP_LABEL (jump) = nlabel;
1522   if (!ANY_RETURN_P (nlabel))
1523     ++LABEL_NUSES (nlabel);
1524 
1525   /* Update labels in any REG_EQUAL note.  */
1526   if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1527     {
1528       if (ANY_RETURN_P (nlabel)
1529 	  || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1530 	remove_note (jump, note);
1531       else
1532 	{
1533 	  redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1534 	  confirm_change_group ();
1535 	}
1536     }
1537 
1538   /* Handle the case where we had a conditional crossing jump to a return
1539      label and are now changing it into a direct conditional return.
1540      The jump is no longer crossing in that case.  */
1541   if (ANY_RETURN_P (nlabel))
1542     CROSSING_JUMP_P (jump) = 0;
1543 
1544   if (!ANY_RETURN_P (olabel)
1545       && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1546       /* Undefined labels will remain outside the insn stream.  */
1547       && INSN_UID (olabel))
1548     delete_related_insns (olabel);
1549   if (invert)
1550     invert_br_probabilities (jump);
1551 }
1552 
1553 /* Invert the jump condition X contained in jump insn INSN.  Accrue the
1554    modifications into the change group.  Return nonzero for success.  */
1555 static int
invert_exp_1(rtx x,rtx_insn * insn)1556 invert_exp_1 (rtx x, rtx_insn *insn)
1557 {
1558   RTX_CODE code = GET_CODE (x);
1559 
1560   if (code == IF_THEN_ELSE)
1561     {
1562       rtx comp = XEXP (x, 0);
1563       rtx tem;
1564       enum rtx_code reversed_code;
1565 
1566       /* We can do this in two ways:  The preferable way, which can only
1567 	 be done if this is not an integer comparison, is to reverse
1568 	 the comparison code.  Otherwise, swap the THEN-part and ELSE-part
1569 	 of the IF_THEN_ELSE.  If we can't do either, fail.  */
1570 
1571       reversed_code = reversed_comparison_code (comp, insn);
1572 
1573       if (reversed_code != UNKNOWN)
1574 	{
1575 	  validate_change (insn, &XEXP (x, 0),
1576 			   gen_rtx_fmt_ee (reversed_code,
1577 					   GET_MODE (comp), XEXP (comp, 0),
1578 					   XEXP (comp, 1)),
1579 			   1);
1580 	  return 1;
1581 	}
1582 
1583       tem = XEXP (x, 1);
1584       validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1585       validate_change (insn, &XEXP (x, 2), tem, 1);
1586       return 1;
1587     }
1588   else
1589     return 0;
1590 }
1591 
1592 /* Invert the condition of the jump JUMP, and make it jump to label
1593    NLABEL instead of where it jumps now.  Accrue changes into the
1594    change group.  Return false if we didn't see how to perform the
1595    inversion and redirection.  */
1596 
1597 int
invert_jump_1(rtx_jump_insn * jump,rtx nlabel)1598 invert_jump_1 (rtx_jump_insn *jump, rtx nlabel)
1599 {
1600   rtx x = pc_set (jump);
1601   int ochanges;
1602   int ok;
1603 
1604   ochanges = num_validated_changes ();
1605   if (x == NULL)
1606     return 0;
1607   ok = invert_exp_1 (SET_SRC (x), jump);
1608   gcc_assert (ok);
1609 
1610   if (num_validated_changes () == ochanges)
1611     return 0;
1612 
1613   /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1614      in Pmode, so checking this is not merely an optimization.  */
1615   return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1616 }
1617 
1618 /* Invert the condition of the jump JUMP, and make it jump to label
1619    NLABEL instead of where it jumps now.  Return true if successful.  */
1620 
1621 int
invert_jump(rtx_jump_insn * jump,rtx nlabel,int delete_unused)1622 invert_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
1623 {
1624   rtx olabel = JUMP_LABEL (jump);
1625 
1626   if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1627     {
1628       redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1629       return 1;
1630     }
1631   cancel_changes (0);
1632   return 0;
1633 }
1634 
1635 
1636 /* Like rtx_equal_p except that it considers two REGs as equal
1637    if they renumber to the same value and considers two commutative
1638    operations to be the same if the order of the operands has been
1639    reversed.  */
1640 
1641 int
rtx_renumbered_equal_p(const_rtx x,const_rtx y)1642 rtx_renumbered_equal_p (const_rtx x, const_rtx y)
1643 {
1644   int i;
1645   const enum rtx_code code = GET_CODE (x);
1646   const char *fmt;
1647 
1648   if (x == y)
1649     return 1;
1650 
1651   if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1652       && (REG_P (y) || (GET_CODE (y) == SUBREG
1653 				  && REG_P (SUBREG_REG (y)))))
1654     {
1655       int reg_x = -1, reg_y = -1;
1656       poly_int64 byte_x = 0, byte_y = 0;
1657       struct subreg_info info;
1658 
1659       if (GET_MODE (x) != GET_MODE (y))
1660 	return 0;
1661 
1662       /* If we haven't done any renumbering, don't
1663 	 make any assumptions.  */
1664       if (reg_renumber == 0)
1665 	return rtx_equal_p (x, y);
1666 
1667       if (code == SUBREG)
1668 	{
1669 	  reg_x = REGNO (SUBREG_REG (x));
1670 	  byte_x = SUBREG_BYTE (x);
1671 
1672 	  if (reg_renumber[reg_x] >= 0)
1673 	    {
1674 	      subreg_get_info (reg_renumber[reg_x],
1675 			       GET_MODE (SUBREG_REG (x)), byte_x,
1676 			       GET_MODE (x), &info);
1677 	      if (!info.representable_p)
1678 		return 0;
1679 	      reg_x = info.offset;
1680 	      byte_x = 0;
1681 	    }
1682 	}
1683       else
1684 	{
1685 	  reg_x = REGNO (x);
1686 	  if (reg_renumber[reg_x] >= 0)
1687 	    reg_x = reg_renumber[reg_x];
1688 	}
1689 
1690       if (GET_CODE (y) == SUBREG)
1691 	{
1692 	  reg_y = REGNO (SUBREG_REG (y));
1693 	  byte_y = SUBREG_BYTE (y);
1694 
1695 	  if (reg_renumber[reg_y] >= 0)
1696 	    {
1697 	      subreg_get_info (reg_renumber[reg_y],
1698 			       GET_MODE (SUBREG_REG (y)), byte_y,
1699 			       GET_MODE (y), &info);
1700 	      if (!info.representable_p)
1701 		return 0;
1702 	      reg_y = info.offset;
1703 	      byte_y = 0;
1704 	    }
1705 	}
1706       else
1707 	{
1708 	  reg_y = REGNO (y);
1709 	  if (reg_renumber[reg_y] >= 0)
1710 	    reg_y = reg_renumber[reg_y];
1711 	}
1712 
1713       return reg_x >= 0 && reg_x == reg_y && known_eq (byte_x, byte_y);
1714     }
1715 
1716   /* Now we have disposed of all the cases
1717      in which different rtx codes can match.  */
1718   if (code != GET_CODE (y))
1719     return 0;
1720 
1721   switch (code)
1722     {
1723     case PC:
1724     case ADDR_VEC:
1725     case ADDR_DIFF_VEC:
1726     CASE_CONST_UNIQUE:
1727       return 0;
1728 
1729     case CONST_VECTOR:
1730       if (!same_vector_encodings_p (x, y))
1731 	return false;
1732       break;
1733 
1734     case LABEL_REF:
1735       /* We can't assume nonlocal labels have their following insns yet.  */
1736       if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1737 	return label_ref_label (x) == label_ref_label (y);
1738 
1739       /* Two label-refs are equivalent if they point at labels
1740 	 in the same position in the instruction stream.  */
1741       else
1742 	{
1743 	  rtx_insn *xi = next_nonnote_nondebug_insn (label_ref_label (x));
1744 	  rtx_insn *yi = next_nonnote_nondebug_insn (label_ref_label (y));
1745 	  while (xi && LABEL_P (xi))
1746 	    xi = next_nonnote_nondebug_insn (xi);
1747 	  while (yi && LABEL_P (yi))
1748 	    yi = next_nonnote_nondebug_insn (yi);
1749 	  return xi == yi;
1750 	}
1751 
1752     case SYMBOL_REF:
1753       return XSTR (x, 0) == XSTR (y, 0);
1754 
1755     case CODE_LABEL:
1756       /* If we didn't match EQ equality above, they aren't the same.  */
1757       return 0;
1758 
1759     default:
1760       break;
1761     }
1762 
1763   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
1764 
1765   if (GET_MODE (x) != GET_MODE (y))
1766     return 0;
1767 
1768   /* MEMs referring to different address space are not equivalent.  */
1769   if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
1770     return 0;
1771 
1772   /* For commutative operations, the RTX match if the operand match in any
1773      order.  Also handle the simple binary and unary cases without a loop.  */
1774   if (targetm.commutative_p (x, UNKNOWN))
1775     return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1776 	     && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1777 	    || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1778 		&& rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1779   else if (NON_COMMUTATIVE_P (x))
1780     return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1781 	    && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1782   else if (UNARY_P (x))
1783     return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1784 
1785   /* Compare the elements.  If any pair of corresponding elements
1786      fail to match, return 0 for the whole things.  */
1787 
1788   fmt = GET_RTX_FORMAT (code);
1789   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1790     {
1791       int j;
1792       switch (fmt[i])
1793 	{
1794 	case 'w':
1795 	  if (XWINT (x, i) != XWINT (y, i))
1796 	    return 0;
1797 	  break;
1798 
1799 	case 'i':
1800 	  if (XINT (x, i) != XINT (y, i))
1801 	    {
1802 	      if (((code == ASM_OPERANDS && i == 6)
1803 		   || (code == ASM_INPUT && i == 1)))
1804 		break;
1805 	      return 0;
1806 	    }
1807 	  break;
1808 
1809 	case 'p':
1810 	  if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
1811 	    return 0;
1812 	  break;
1813 
1814 	case 't':
1815 	  if (XTREE (x, i) != XTREE (y, i))
1816 	    return 0;
1817 	  break;
1818 
1819 	case 's':
1820 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
1821 	    return 0;
1822 	  break;
1823 
1824 	case 'e':
1825 	  if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1826 	    return 0;
1827 	  break;
1828 
1829 	case 'u':
1830 	  if (XEXP (x, i) != XEXP (y, i))
1831 	    return 0;
1832 	  /* Fall through.  */
1833 	case '0':
1834 	  break;
1835 
1836 	case 'E':
1837 	  if (XVECLEN (x, i) != XVECLEN (y, i))
1838 	    return 0;
1839 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1840 	    if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1841 	      return 0;
1842 	  break;
1843 
1844 	default:
1845 	  gcc_unreachable ();
1846 	}
1847     }
1848   return 1;
1849 }
1850 
1851 /* If X is a hard register or equivalent to one or a subregister of one,
1852    return the hard register number.  If X is a pseudo register that was not
1853    assigned a hard register, return the pseudo register number.  Otherwise,
1854    return -1.  Any rtx is valid for X.  */
1855 
1856 int
true_regnum(const_rtx x)1857 true_regnum (const_rtx x)
1858 {
1859   if (REG_P (x))
1860     {
1861       if (REGNO (x) >= FIRST_PSEUDO_REGISTER
1862 	  && (lra_in_progress || reg_renumber[REGNO (x)] >= 0))
1863 	return reg_renumber[REGNO (x)];
1864       return REGNO (x);
1865     }
1866   if (GET_CODE (x) == SUBREG)
1867     {
1868       int base = true_regnum (SUBREG_REG (x));
1869       if (base >= 0
1870 	  && base < FIRST_PSEUDO_REGISTER)
1871 	{
1872 	  struct subreg_info info;
1873 
1874 	  subreg_get_info (lra_in_progress
1875 			   ? (unsigned) base : REGNO (SUBREG_REG (x)),
1876 			   GET_MODE (SUBREG_REG (x)),
1877 			   SUBREG_BYTE (x), GET_MODE (x), &info);
1878 
1879 	  if (info.representable_p)
1880 	    return base + info.offset;
1881 	}
1882     }
1883   return -1;
1884 }
1885 
1886 /* Return regno of the register REG and handle subregs too.  */
1887 unsigned int
reg_or_subregno(const_rtx reg)1888 reg_or_subregno (const_rtx reg)
1889 {
1890   if (GET_CODE (reg) == SUBREG)
1891     reg = SUBREG_REG (reg);
1892   gcc_assert (REG_P (reg));
1893   return REGNO (reg);
1894 }
1895