1 /* If-conversion support.
2    Copyright (C) 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3 
4    This file is part of GCC.
5 
6    GCC is free software; you can redistribute it and/or modify it
7    under the terms of the GNU General Public License as published by
8    the Free Software Foundation; either version 2, or (at your option)
9    any later version.
10 
11    GCC is distributed in the hope that it will be useful, but WITHOUT
12    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
14    License for more details.
15 
16    You should have received a copy of the GNU General Public License
17    along with GCC; see the file COPYING.  If not, write to the Free
18    Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19    02111-1307, USA.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 
26 #include "rtl.h"
27 #include "regs.h"
28 #include "function.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "except.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "expr.h"
36 #include "real.h"
37 #include "output.h"
38 #include "optabs.h"
39 #include "toplev.h"
40 #include "tm_p.h"
41 #include "cfgloop.h"
42 #include "target.h"
43 
44 
45 #ifndef HAVE_conditional_execution
46 #define HAVE_conditional_execution 0
47 #endif
48 #ifndef HAVE_conditional_move
49 #define HAVE_conditional_move 0
50 #endif
51 #ifndef HAVE_incscc
52 #define HAVE_incscc 0
53 #endif
54 #ifndef HAVE_decscc
55 #define HAVE_decscc 0
56 #endif
57 #ifndef HAVE_trap
58 #define HAVE_trap 0
59 #endif
60 #ifndef HAVE_conditional_trap
61 #define HAVE_conditional_trap 0
62 #endif
63 
64 #ifndef MAX_CONDITIONAL_EXECUTE
65 #define MAX_CONDITIONAL_EXECUTE   (BRANCH_COST + 1)
66 #endif
67 
68 #define NULL_EDGE	((struct edge_def *)NULL)
69 #define NULL_BLOCK	((struct basic_block_def *)NULL)
70 
71 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at  */
72 static int num_possible_if_blocks;
73 
74 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
75    execution.  */
76 static int num_updated_if_blocks;
77 
78 /* # of changes made which require life information to be updated.  */
79 static int num_true_changes;
80 
81 /* Whether conditional execution changes were made.  */
82 static int cond_exec_changed_p;
83 
84 /* True if life data ok at present.  */
85 static bool life_data_ok;
86 
87 /* Forward references.  */
88 static int count_bb_insns (basic_block);
89 static rtx first_active_insn (basic_block);
90 static rtx last_active_insn (basic_block, int);
91 static int seq_contains_jump (rtx);
92 static basic_block block_fallthru (basic_block);
93 static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
94 static rtx cond_exec_get_condition (rtx);
95 static int cond_exec_process_if_block (ce_if_block_t *, int);
96 static rtx noce_get_condition (rtx, rtx *);
97 static int noce_operand_ok (rtx);
98 static int noce_process_if_block (ce_if_block_t *);
99 static int process_if_block (ce_if_block_t *);
100 static void merge_if_block (ce_if_block_t *);
101 static int find_cond_trap (basic_block, edge, edge);
102 static basic_block find_if_header (basic_block, int);
103 static int block_jumps_and_fallthru_p (basic_block, basic_block);
104 static int find_if_block (ce_if_block_t *);
105 static int find_if_case_1 (basic_block, edge, edge);
106 static int find_if_case_2 (basic_block, edge, edge);
107 static int find_memory (rtx *, void *);
108 static int dead_or_predicable (basic_block, basic_block, basic_block,
109 			       basic_block, int);
110 static void noce_emit_move_insn (rtx, rtx);
111 static rtx block_has_only_trap (basic_block);
112 static void mark_loop_exit_edges (void);
113 
114 /* Sets EDGE_LOOP_EXIT flag for all loop exits.  */
115 static void
mark_loop_exit_edges(void)116 mark_loop_exit_edges (void)
117 {
118   struct loops loops;
119   basic_block bb;
120   edge e;
121 
122   flow_loops_find (&loops, LOOP_TREE);
123   free_dominance_info (CDI_DOMINATORS);
124 
125   if (loops.num > 1)
126     {
127       FOR_EACH_BB (bb)
128 	{
129 	  for (e = bb->succ; e; e = e->succ_next)
130 	    {
131 	      if (find_common_loop (bb->loop_father, e->dest->loop_father)
132 		  != bb->loop_father)
133 		e->flags |= EDGE_LOOP_EXIT;
134 	      else
135 		e->flags &= ~EDGE_LOOP_EXIT;
136 	    }
137 	}
138     }
139 
140   flow_loops_free (&loops);
141 }
142 
143 /* Count the number of non-jump active insns in BB.  */
144 
145 static int
count_bb_insns(basic_block bb)146 count_bb_insns (basic_block bb)
147 {
148   int count = 0;
149   rtx insn = BB_HEAD (bb);
150 
151   while (1)
152     {
153       if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
154 	count++;
155 
156       if (insn == BB_END (bb))
157 	break;
158       insn = NEXT_INSN (insn);
159     }
160 
161   return count;
162 }
163 
164 /* Return the first non-jump active insn in the basic block.  */
165 
166 static rtx
first_active_insn(basic_block bb)167 first_active_insn (basic_block bb)
168 {
169   rtx insn = BB_HEAD (bb);
170 
171   if (GET_CODE (insn) == CODE_LABEL)
172     {
173       if (insn == BB_END (bb))
174 	return NULL_RTX;
175       insn = NEXT_INSN (insn);
176     }
177 
178   while (GET_CODE (insn) == NOTE)
179     {
180       if (insn == BB_END (bb))
181 	return NULL_RTX;
182       insn = NEXT_INSN (insn);
183     }
184 
185   if (GET_CODE (insn) == JUMP_INSN)
186     return NULL_RTX;
187 
188   return insn;
189 }
190 
191 /* Return the last non-jump active (non-jump) insn in the basic block.  */
192 
193 static rtx
last_active_insn(basic_block bb,int skip_use_p)194 last_active_insn (basic_block bb, int skip_use_p)
195 {
196   rtx insn = BB_END (bb);
197   rtx head = BB_HEAD (bb);
198 
199   while (GET_CODE (insn) == NOTE
200 	 || GET_CODE (insn) == JUMP_INSN
201 	 || (skip_use_p
202 	     && GET_CODE (insn) == INSN
203 	     && GET_CODE (PATTERN (insn)) == USE))
204     {
205       if (insn == head)
206 	return NULL_RTX;
207       insn = PREV_INSN (insn);
208     }
209 
210   if (GET_CODE (insn) == CODE_LABEL)
211     return NULL_RTX;
212 
213   return insn;
214 }
215 
216 /* It is possible, especially when having dealt with multi-word
217    arithmetic, for the expanders to have emitted jumps.  Search
218    through the sequence and return TRUE if a jump exists so that
219    we can abort the conversion.  */
220 
221 static int
seq_contains_jump(rtx insn)222 seq_contains_jump (rtx insn)
223 {
224   while (insn)
225     {
226       if (GET_CODE (insn) == JUMP_INSN)
227 	return 1;
228       insn = NEXT_INSN (insn);
229     }
230   return 0;
231 }
232 
233 static basic_block
block_fallthru(basic_block bb)234 block_fallthru (basic_block bb)
235 {
236   edge e;
237 
238   for (e = bb->succ;
239        e != NULL_EDGE && (e->flags & EDGE_FALLTHRU) == 0;
240        e = e->succ_next)
241     ;
242 
243   return (e) ? e->dest : NULL_BLOCK;
244 }
245 
246 /* Go through a bunch of insns, converting them to conditional
247    execution format if possible.  Return TRUE if all of the non-note
248    insns were processed.  */
249 
250 static int
cond_exec_process_insns(ce_if_block_t * ce_info ATTRIBUTE_UNUSED,rtx start,rtx end,rtx test,rtx prob_val,int mod_ok)251 cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
252 			 /* if block information */rtx start,
253 			 /* first insn to look at */rtx end,
254 			 /* last insn to look at */rtx test,
255 			 /* conditional execution test */rtx prob_val,
256 			 /* probability of branch taken. */int mod_ok)
257 {
258   int must_be_last = FALSE;
259   rtx insn;
260   rtx xtest;
261   rtx pattern;
262 
263   if (!start || !end)
264     return FALSE;
265 
266   for (insn = start; ; insn = NEXT_INSN (insn))
267     {
268       if (GET_CODE (insn) == NOTE)
269 	goto insn_done;
270 
271       if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
272 	abort ();
273 
274       /* Remove USE insns that get in the way.  */
275       if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
276 	{
277 	  /* ??? Ug.  Actually unlinking the thing is problematic,
278 	     given what we'd have to coordinate with our callers.  */
279 	  PUT_CODE (insn, NOTE);
280 	  NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
281 	  NOTE_SOURCE_FILE (insn) = 0;
282 	  goto insn_done;
283 	}
284 
285       /* Last insn wasn't last?  */
286       if (must_be_last)
287 	return FALSE;
288 
289       if (modified_in_p (test, insn))
290 	{
291 	  if (!mod_ok)
292 	    return FALSE;
293 	  must_be_last = TRUE;
294 	}
295 
296       /* Now build the conditional form of the instruction.  */
297       pattern = PATTERN (insn);
298       xtest = copy_rtx (test);
299 
300       /* If this is already a COND_EXEC, rewrite the test to be an AND of the
301          two conditions.  */
302       if (GET_CODE (pattern) == COND_EXEC)
303 	{
304 	  if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
305 	    return FALSE;
306 
307 	  xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
308 			       COND_EXEC_TEST (pattern));
309 	  pattern = COND_EXEC_CODE (pattern);
310 	}
311 
312       pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
313 
314       /* If the machine needs to modify the insn being conditionally executed,
315          say for example to force a constant integer operand into a temp
316          register, do so here.  */
317 #ifdef IFCVT_MODIFY_INSN
318       IFCVT_MODIFY_INSN (ce_info, pattern, insn);
319       if (! pattern)
320 	return FALSE;
321 #endif
322 
323       validate_change (insn, &PATTERN (insn), pattern, 1);
324 
325       if (GET_CODE (insn) == CALL_INSN && prob_val)
326 	validate_change (insn, &REG_NOTES (insn),
327 			 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
328 					  REG_NOTES (insn)), 1);
329 
330     insn_done:
331       if (insn == end)
332 	break;
333     }
334 
335   return TRUE;
336 }
337 
338 /* Return the condition for a jump.  Do not do any special processing.  */
339 
340 static rtx
cond_exec_get_condition(rtx jump)341 cond_exec_get_condition (rtx jump)
342 {
343   rtx test_if, cond;
344 
345   if (any_condjump_p (jump))
346     test_if = SET_SRC (pc_set (jump));
347   else
348     return NULL_RTX;
349   cond = XEXP (test_if, 0);
350 
351   /* If this branches to JUMP_LABEL when the condition is false,
352      reverse the condition.  */
353   if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
354       && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
355     {
356       enum rtx_code rev = reversed_comparison_code (cond, jump);
357       if (rev == UNKNOWN)
358 	return NULL_RTX;
359 
360       cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
361 			     XEXP (cond, 1));
362     }
363 
364   return cond;
365 }
366 
367 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
368    to conditional execution.  Return TRUE if we were successful at
369    converting the block.  */
370 
371 static int
cond_exec_process_if_block(ce_if_block_t * ce_info,int do_multiple_p)372 cond_exec_process_if_block (ce_if_block_t * ce_info,
373 			    /* if block information */int do_multiple_p)
374 {
375   basic_block test_bb = ce_info->test_bb;	/* last test block */
376   basic_block then_bb = ce_info->then_bb;	/* THEN */
377   basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
378   rtx test_expr;		/* expression in IF_THEN_ELSE that is tested */
379   rtx then_start;		/* first insn in THEN block */
380   rtx then_end;			/* last insn + 1 in THEN block */
381   rtx else_start = NULL_RTX;	/* first insn in ELSE block or NULL */
382   rtx else_end = NULL_RTX;	/* last insn + 1 in ELSE block */
383   int max;			/* max # of insns to convert.  */
384   int then_mod_ok;		/* whether conditional mods are ok in THEN */
385   rtx true_expr;		/* test for else block insns */
386   rtx false_expr;		/* test for then block insns */
387   rtx true_prob_val;		/* probability of else block */
388   rtx false_prob_val;		/* probability of then block */
389   int n_insns;
390   enum rtx_code false_code;
391 
392   /* If test is comprised of && or || elements, and we've failed at handling
393      all of them together, just use the last test if it is the special case of
394      && elements without an ELSE block.  */
395   if (!do_multiple_p && ce_info->num_multiple_test_blocks)
396     {
397       if (else_bb || ! ce_info->and_and_p)
398 	return FALSE;
399 
400       ce_info->test_bb = test_bb = ce_info->last_test_bb;
401       ce_info->num_multiple_test_blocks = 0;
402       ce_info->num_and_and_blocks = 0;
403       ce_info->num_or_or_blocks = 0;
404     }
405 
406   /* Find the conditional jump to the ELSE or JOIN part, and isolate
407      the test.  */
408   test_expr = cond_exec_get_condition (BB_END (test_bb));
409   if (! test_expr)
410     return FALSE;
411 
412   /* If the conditional jump is more than just a conditional jump,
413      then we can not do conditional execution conversion on this block.  */
414   if (! onlyjump_p (BB_END (test_bb)))
415     return FALSE;
416 
417   /* Collect the bounds of where we're to search, skipping any labels, jumps
418      and notes at the beginning and end of the block.  Then count the total
419      number of insns and see if it is small enough to convert.  */
420   then_start = first_active_insn (then_bb);
421   then_end = last_active_insn (then_bb, TRUE);
422   n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
423   max = MAX_CONDITIONAL_EXECUTE;
424 
425   if (else_bb)
426     {
427       max *= 2;
428       else_start = first_active_insn (else_bb);
429       else_end = last_active_insn (else_bb, TRUE);
430       n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
431     }
432 
433   if (n_insns > max)
434     return FALSE;
435 
436   /* Map test_expr/test_jump into the appropriate MD tests to use on
437      the conditionally executed code.  */
438 
439   true_expr = test_expr;
440 
441   false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
442   if (false_code != UNKNOWN)
443     false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
444 				 XEXP (true_expr, 0), XEXP (true_expr, 1));
445   else
446     false_expr = NULL_RTX;
447 
448 #ifdef IFCVT_MODIFY_TESTS
449   /* If the machine description needs to modify the tests, such as setting a
450      conditional execution register from a comparison, it can do so here.  */
451   IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
452 
453   /* See if the conversion failed.  */
454   if (!true_expr || !false_expr)
455     goto fail;
456 #endif
457 
458   true_prob_val = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
459   if (true_prob_val)
460     {
461       true_prob_val = XEXP (true_prob_val, 0);
462       false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
463     }
464   else
465     false_prob_val = NULL_RTX;
466 
467   /* If we have && or || tests, do them here.  These tests are in the adjacent
468      blocks after the first block containing the test.  */
469   if (ce_info->num_multiple_test_blocks > 0)
470     {
471       basic_block bb = test_bb;
472       basic_block last_test_bb = ce_info->last_test_bb;
473 
474       if (! false_expr)
475 	goto fail;
476 
477       do
478 	{
479 	  rtx start, end;
480 	  rtx t, f;
481 
482 	  bb = block_fallthru (bb);
483 	  start = first_active_insn (bb);
484 	  end = last_active_insn (bb, TRUE);
485 	  if (start
486 	      && ! cond_exec_process_insns (ce_info, start, end, false_expr,
487 					    false_prob_val, FALSE))
488 	    goto fail;
489 
490 	  /* If the conditional jump is more than just a conditional jump, then
491 	     we can not do conditional execution conversion on this block.  */
492 	  if (! onlyjump_p (BB_END (bb)))
493 	    goto fail;
494 
495 	  /* Find the conditional jump and isolate the test.  */
496 	  t = cond_exec_get_condition (BB_END (bb));
497 	  if (! t)
498 	    goto fail;
499 
500 	  f = gen_rtx_fmt_ee (reverse_condition (GET_CODE (t)),
501 			      GET_MODE (t),
502 			      XEXP (t, 0),
503 			      XEXP (t, 1));
504 
505 	  if (ce_info->and_and_p)
506 	    {
507 	      t = gen_rtx_AND (GET_MODE (t), true_expr, t);
508 	      f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
509 	    }
510 	  else
511 	    {
512 	      t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
513 	      f = gen_rtx_AND (GET_MODE (t), false_expr, f);
514 	    }
515 
516 	  /* If the machine description needs to modify the tests, such as
517 	     setting a conditional execution register from a comparison, it can
518 	     do so here.  */
519 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
520 	  IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
521 
522 	  /* See if the conversion failed.  */
523 	  if (!t || !f)
524 	    goto fail;
525 #endif
526 
527 	  true_expr = t;
528 	  false_expr = f;
529 	}
530       while (bb != last_test_bb);
531     }
532 
533   /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
534      on then THEN block.  */
535   then_mod_ok = (else_bb == NULL_BLOCK);
536 
537   /* Go through the THEN and ELSE blocks converting the insns if possible
538      to conditional execution.  */
539 
540   if (then_end
541       && (! false_expr
542 	  || ! cond_exec_process_insns (ce_info, then_start, then_end,
543 					false_expr, false_prob_val,
544 					then_mod_ok)))
545     goto fail;
546 
547   if (else_bb && else_end
548       && ! cond_exec_process_insns (ce_info, else_start, else_end,
549 				    true_expr, true_prob_val, TRUE))
550     goto fail;
551 
552   /* If we cannot apply the changes, fail.  Do not go through the normal fail
553      processing, since apply_change_group will call cancel_changes.  */
554   if (! apply_change_group ())
555     {
556 #ifdef IFCVT_MODIFY_CANCEL
557       /* Cancel any machine dependent changes.  */
558       IFCVT_MODIFY_CANCEL (ce_info);
559 #endif
560       return FALSE;
561     }
562 
563 #ifdef IFCVT_MODIFY_FINAL
564   /* Do any machine dependent final modifications.  */
565   IFCVT_MODIFY_FINAL (ce_info);
566 #endif
567 
568   /* Conversion succeeded.  */
569   if (rtl_dump_file)
570     fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
571 	     n_insns, (n_insns == 1) ? " was" : "s were");
572 
573   /* Merge the blocks!  */
574   merge_if_block (ce_info);
575   cond_exec_changed_p = TRUE;
576   return TRUE;
577 
578  fail:
579 #ifdef IFCVT_MODIFY_CANCEL
580   /* Cancel any machine dependent changes.  */
581   IFCVT_MODIFY_CANCEL (ce_info);
582 #endif
583 
584   cancel_changes (0);
585   return FALSE;
586 }
587 
588 /* Used by noce_process_if_block to communicate with its subroutines.
589 
590    The subroutines know that A and B may be evaluated freely.  They
591    know that X is a register.  They should insert new instructions
592    before cond_earliest.  */
593 
594 struct noce_if_info
595 {
596   basic_block test_bb;
597   rtx insn_a, insn_b;
598   rtx x, a, b;
599   rtx jump, cond, cond_earliest;
600 };
601 
602 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
603 static int noce_try_move (struct noce_if_info *);
604 static int noce_try_store_flag (struct noce_if_info *);
605 static int noce_try_addcc (struct noce_if_info *);
606 static int noce_try_store_flag_constants (struct noce_if_info *);
607 static int noce_try_store_flag_mask (struct noce_if_info *);
608 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
609 			    rtx, rtx, rtx);
610 static int noce_try_cmove (struct noce_if_info *);
611 static int noce_try_cmove_arith (struct noce_if_info *);
612 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx *);
613 static int noce_try_minmax (struct noce_if_info *);
614 static int noce_try_abs (struct noce_if_info *);
615 
616 /* Helper function for noce_try_store_flag*.  */
617 
618 static rtx
noce_emit_store_flag(struct noce_if_info * if_info,rtx x,int reversep,int normalize)619 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
620 		      int normalize)
621 {
622   rtx cond = if_info->cond;
623   int cond_complex;
624   enum rtx_code code;
625 
626   cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
627 		  || ! general_operand (XEXP (cond, 1), VOIDmode));
628 
629   /* If earliest == jump, or when the condition is complex, try to
630      build the store_flag insn directly.  */
631 
632   if (cond_complex)
633     cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
634 
635   if (reversep)
636     code = reversed_comparison_code (cond, if_info->jump);
637   else
638     code = GET_CODE (cond);
639 
640   if ((if_info->cond_earliest == if_info->jump || cond_complex)
641       && (normalize == 0 || STORE_FLAG_VALUE == normalize))
642     {
643       rtx tmp;
644 
645       tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
646 			    XEXP (cond, 1));
647       tmp = gen_rtx_SET (VOIDmode, x, tmp);
648 
649       start_sequence ();
650       tmp = emit_insn (tmp);
651 
652       if (recog_memoized (tmp) >= 0)
653 	{
654 	  tmp = get_insns ();
655 	  end_sequence ();
656 	  emit_insn (tmp);
657 
658 	  if_info->cond_earliest = if_info->jump;
659 
660 	  return x;
661 	}
662 
663       end_sequence ();
664     }
665 
666   /* Don't even try if the comparison operands or the mode of X are weird.  */
667   if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
668     return NULL_RTX;
669 
670   return emit_store_flag (x, code, XEXP (cond, 0),
671 			  XEXP (cond, 1), VOIDmode,
672 			  (code == LTU || code == LEU
673 			   || code == GEU || code == GTU), normalize);
674 }
675 
676 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
677    X is the destination/target and Y is the value to copy.  */
678 
679 static void
noce_emit_move_insn(rtx x,rtx y)680 noce_emit_move_insn (rtx x, rtx y)
681 {
682   enum machine_mode outmode, inmode;
683   rtx outer, inner;
684   int bitpos;
685 
686   if (GET_CODE (x) != STRICT_LOW_PART)
687     {
688       emit_move_insn (x, y);
689       return;
690     }
691 
692   outer = XEXP (x, 0);
693   inner = XEXP (outer, 0);
694   outmode = GET_MODE (outer);
695   inmode = GET_MODE (inner);
696   bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
697   store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y,
698 		   GET_MODE_BITSIZE (inmode));
699 }
700 
701 /* Unshare sequence SEQ produced by if conversion.  We care to mark
702    all arguments that may be shared with outer instruction stream.  */
703 static void
unshare_ifcvt_sequence(struct noce_if_info * if_info,rtx seq)704 unshare_ifcvt_sequence (struct noce_if_info *if_info, rtx seq)
705 {
706   set_used_flags (if_info->x);
707   set_used_flags (if_info->cond);
708   unshare_all_rtl_in_chain (seq);
709 }
710 
711 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
712    "if (a == b) x = a; else x = b" into "x = b".  */
713 
714 static int
noce_try_move(struct noce_if_info * if_info)715 noce_try_move (struct noce_if_info *if_info)
716 {
717   rtx cond = if_info->cond;
718   enum rtx_code code = GET_CODE (cond);
719   rtx y, seq;
720 
721   if (code != NE && code != EQ)
722     return FALSE;
723 
724   /* This optimization isn't valid if either A or B could be a NaN
725      or a signed zero.  */
726   if (HONOR_NANS (GET_MODE (if_info->x))
727       || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
728     return FALSE;
729 
730   /* Check whether the operands of the comparison are A and in
731      either order.  */
732   if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
733        && rtx_equal_p (if_info->b, XEXP (cond, 1)))
734       || (rtx_equal_p (if_info->a, XEXP (cond, 1))
735 	  && rtx_equal_p (if_info->b, XEXP (cond, 0))))
736     {
737       y = (code == EQ) ? if_info->a : if_info->b;
738 
739       /* Avoid generating the move if the source is the destination.  */
740       if (! rtx_equal_p (if_info->x, y))
741 	{
742 	  start_sequence ();
743 	  noce_emit_move_insn (if_info->x, y);
744 	  seq = get_insns ();
745 	  unshare_ifcvt_sequence (if_info, seq);
746 	  end_sequence ();
747 
748 	  /* Make sure that all of the instructions emitted are
749 	     recognizable.  As an excersise for the reader, build
750 	     a general mechanism that allows proper placement of
751 	     required clobbers.  */
752 	  for (y = seq; y ; y = NEXT_INSN (y))
753 	    if (recog_memoized (y) == -1)
754 	      return FALSE;
755 
756 	  emit_insn_before_setloc (seq, if_info->jump,
757 				   INSN_LOCATOR (if_info->insn_a));
758 	}
759       return TRUE;
760     }
761   return FALSE;
762 }
763 
764 /* Convert "if (test) x = 1; else x = 0".
765 
766    Only try 0 and STORE_FLAG_VALUE here.  Other combinations will be
767    tried in noce_try_store_flag_constants after noce_try_cmove has had
768    a go at the conversion.  */
769 
770 static int
noce_try_store_flag(struct noce_if_info * if_info)771 noce_try_store_flag (struct noce_if_info *if_info)
772 {
773   int reversep;
774   rtx target, seq;
775 
776   if (GET_CODE (if_info->b) == CONST_INT
777       && INTVAL (if_info->b) == STORE_FLAG_VALUE
778       && if_info->a == const0_rtx)
779     reversep = 0;
780   else if (if_info->b == const0_rtx
781 	   && GET_CODE (if_info->a) == CONST_INT
782 	   && INTVAL (if_info->a) == STORE_FLAG_VALUE
783 	   && (reversed_comparison_code (if_info->cond, if_info->jump)
784 	       != UNKNOWN))
785     reversep = 1;
786   else
787     return FALSE;
788 
789   start_sequence ();
790 
791   target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
792   if (target)
793     {
794       if (target != if_info->x)
795 	noce_emit_move_insn (if_info->x, target);
796 
797       seq = get_insns ();
798       unshare_ifcvt_sequence (if_info, seq);
799       end_sequence ();
800       emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
801 
802       return TRUE;
803     }
804   else
805     {
806       end_sequence ();
807       return FALSE;
808     }
809 }
810 
811 /* Convert "if (test) x = a; else x = b", for A and B constant.  */
812 
813 static int
noce_try_store_flag_constants(struct noce_if_info * if_info)814 noce_try_store_flag_constants (struct noce_if_info *if_info)
815 {
816   rtx target, seq;
817   int reversep;
818   HOST_WIDE_INT itrue, ifalse, diff, tmp;
819   int normalize, can_reverse;
820   enum machine_mode mode;
821 
822   if (! no_new_pseudos
823       && GET_CODE (if_info->a) == CONST_INT
824       && GET_CODE (if_info->b) == CONST_INT)
825     {
826       mode = GET_MODE (if_info->x);
827       ifalse = INTVAL (if_info->a);
828       itrue = INTVAL (if_info->b);
829 
830       /* Make sure we can represent the difference between the two values.  */
831       if ((itrue - ifalse > 0)
832 	  != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
833 	return FALSE;
834 
835       diff = trunc_int_for_mode (itrue - ifalse, mode);
836 
837       can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
838 		     != UNKNOWN);
839 
840       reversep = 0;
841       if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
842 	normalize = 0;
843       else if (ifalse == 0 && exact_log2 (itrue) >= 0
844 	       && (STORE_FLAG_VALUE == 1
845 		   || BRANCH_COST >= 2))
846 	normalize = 1;
847       else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
848 	       && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
849 	normalize = 1, reversep = 1;
850       else if (itrue == -1
851 	       && (STORE_FLAG_VALUE == -1
852 		   || BRANCH_COST >= 2))
853 	normalize = -1;
854       else if (ifalse == -1 && can_reverse
855 	       && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
856 	normalize = -1, reversep = 1;
857       else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
858 	       || BRANCH_COST >= 3)
859 	normalize = -1;
860       else
861 	return FALSE;
862 
863       if (reversep)
864 	{
865 	  tmp = itrue; itrue = ifalse; ifalse = tmp;
866 	  diff = trunc_int_for_mode (-diff, mode);
867 	}
868 
869       start_sequence ();
870       target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
871       if (! target)
872 	{
873 	  end_sequence ();
874 	  return FALSE;
875 	}
876 
877       /* if (test) x = 3; else x = 4;
878 	 =>   x = 3 + (test == 0);  */
879       if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
880 	{
881 	  target = expand_simple_binop (mode,
882 					(diff == STORE_FLAG_VALUE
883 					 ? PLUS : MINUS),
884 					GEN_INT (ifalse), target, if_info->x, 0,
885 					OPTAB_WIDEN);
886 	}
887 
888       /* if (test) x = 8; else x = 0;
889 	 =>   x = (test != 0) << 3;  */
890       else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
891 	{
892 	  target = expand_simple_binop (mode, ASHIFT,
893 					target, GEN_INT (tmp), if_info->x, 0,
894 					OPTAB_WIDEN);
895 	}
896 
897       /* if (test) x = -1; else x = b;
898 	 =>   x = -(test != 0) | b;  */
899       else if (itrue == -1)
900 	{
901 	  target = expand_simple_binop (mode, IOR,
902 					target, GEN_INT (ifalse), if_info->x, 0,
903 					OPTAB_WIDEN);
904 	}
905 
906       /* if (test) x = a; else x = b;
907 	 =>   x = (-(test != 0) & (b - a)) + a;  */
908       else
909 	{
910 	  target = expand_simple_binop (mode, AND,
911 					target, GEN_INT (diff), if_info->x, 0,
912 					OPTAB_WIDEN);
913 	  if (target)
914 	    target = expand_simple_binop (mode, PLUS,
915 					  target, GEN_INT (ifalse),
916 					  if_info->x, 0, OPTAB_WIDEN);
917 	}
918 
919       if (! target)
920 	{
921 	  end_sequence ();
922 	  return FALSE;
923 	}
924 
925       if (target != if_info->x)
926 	noce_emit_move_insn (if_info->x, target);
927 
928       seq = get_insns ();
929       unshare_ifcvt_sequence (if_info, seq);
930       end_sequence ();
931 
932       if (seq_contains_jump (seq))
933 	return FALSE;
934 
935       emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
936 
937       return TRUE;
938     }
939 
940   return FALSE;
941 }
942 
943 /* Convert "if (test) foo++" into "foo += (test != 0)", and
944    similarly for "foo--".  */
945 
946 static int
noce_try_addcc(struct noce_if_info * if_info)947 noce_try_addcc (struct noce_if_info *if_info)
948 {
949   rtx target, seq;
950   int subtract, normalize;
951 
952   if (! no_new_pseudos
953       && GET_CODE (if_info->a) == PLUS
954       && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
955       && (reversed_comparison_code (if_info->cond, if_info->jump)
956 	  != UNKNOWN))
957     {
958       rtx cond = if_info->cond;
959       enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
960 
961       /* First try to use addcc pattern.  */
962       if (general_operand (XEXP (cond, 0), VOIDmode)
963 	  && general_operand (XEXP (cond, 1), VOIDmode))
964 	{
965 	  start_sequence ();
966 	  target = emit_conditional_add (if_info->x, code,
967 					 XEXP (cond, 0),
968 					 XEXP (cond, 1),
969 					 VOIDmode,
970 					 if_info->b,
971 					 XEXP (if_info->a, 1),
972 					 GET_MODE (if_info->x),
973 					 (code == LTU || code == GEU
974 					  || code == LEU || code == GTU));
975 	  if (target)
976 	    {
977 	      if (target != if_info->x)
978 		noce_emit_move_insn (if_info->x, target);
979 
980 	      seq = get_insns ();
981 	      unshare_ifcvt_sequence (if_info, seq);
982 	      end_sequence ();
983 	      emit_insn_before_setloc (seq, if_info->jump,
984 				      INSN_LOCATOR (if_info->insn_a));
985 	      return TRUE;
986 	    }
987 	  end_sequence ();
988 	}
989 
990       /* If that fails, construct conditional increment or decrement using
991 	 setcc.  */
992       if (BRANCH_COST >= 2
993 	  && (XEXP (if_info->a, 1) == const1_rtx
994 	      || XEXP (if_info->a, 1) == constm1_rtx))
995         {
996 	  start_sequence ();
997 	  if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
998 	    subtract = 0, normalize = 0;
999 	  else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1000 	    subtract = 1, normalize = 0;
1001 	  else
1002 	    subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1003 
1004 
1005 	  target = noce_emit_store_flag (if_info,
1006 					 gen_reg_rtx (GET_MODE (if_info->x)),
1007 					 1, normalize);
1008 
1009 	  if (target)
1010 	    target = expand_simple_binop (GET_MODE (if_info->x),
1011 					  subtract ? MINUS : PLUS,
1012 					  if_info->b, target, if_info->x,
1013 					  0, OPTAB_WIDEN);
1014 	  if (target)
1015 	    {
1016 	      if (target != if_info->x)
1017 		noce_emit_move_insn (if_info->x, target);
1018 
1019 	      seq = get_insns ();
1020 	      unshare_ifcvt_sequence (if_info, seq);
1021 	      end_sequence ();
1022 
1023 	      if (seq_contains_jump (seq))
1024 		return FALSE;
1025 
1026 	      emit_insn_before_setloc (seq, if_info->jump,
1027 				      INSN_LOCATOR (if_info->insn_a));
1028 
1029 	      return TRUE;
1030 	    }
1031 	  end_sequence ();
1032 	}
1033     }
1034 
1035   return FALSE;
1036 }
1037 
1038 /* Convert "if (test) x = 0;" to "x &= -(test == 0);"  */
1039 
1040 static int
noce_try_store_flag_mask(struct noce_if_info * if_info)1041 noce_try_store_flag_mask (struct noce_if_info *if_info)
1042 {
1043   rtx target, seq;
1044   int reversep;
1045 
1046   reversep = 0;
1047   if (! no_new_pseudos
1048       && (BRANCH_COST >= 2
1049 	  || STORE_FLAG_VALUE == -1)
1050       && ((if_info->a == const0_rtx
1051 	   && rtx_equal_p (if_info->b, if_info->x))
1052 	  || ((reversep = (reversed_comparison_code (if_info->cond,
1053 						     if_info->jump)
1054 			   != UNKNOWN))
1055 	      && if_info->b == const0_rtx
1056 	      && rtx_equal_p (if_info->a, if_info->x))))
1057     {
1058       start_sequence ();
1059       target = noce_emit_store_flag (if_info,
1060 				     gen_reg_rtx (GET_MODE (if_info->x)),
1061 				     reversep, -1);
1062       if (target)
1063         target = expand_simple_binop (GET_MODE (if_info->x), AND,
1064 				      if_info->x,
1065 				      target, if_info->x, 0,
1066 				      OPTAB_WIDEN);
1067 
1068       if (target)
1069 	{
1070 	  if (target != if_info->x)
1071 	    noce_emit_move_insn (if_info->x, target);
1072 
1073 	  seq = get_insns ();
1074 	  unshare_ifcvt_sequence (if_info, seq);
1075 	  end_sequence ();
1076 
1077 	  if (seq_contains_jump (seq))
1078 	    return FALSE;
1079 
1080 	  emit_insn_before_setloc (seq, if_info->jump,
1081 				  INSN_LOCATOR (if_info->insn_a));
1082 
1083 	  return TRUE;
1084 	}
1085 
1086       end_sequence ();
1087     }
1088 
1089   return FALSE;
1090 }
1091 
1092 /* Helper function for noce_try_cmove and noce_try_cmove_arith.  */
1093 
1094 static rtx
noce_emit_cmove(struct noce_if_info * if_info,rtx x,enum rtx_code code,rtx cmp_a,rtx cmp_b,rtx vfalse,rtx vtrue)1095 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1096 		 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1097 {
1098   /* If earliest == jump, try to build the cmove insn directly.
1099      This is helpful when combine has created some complex condition
1100      (like for alpha's cmovlbs) that we can't hope to regenerate
1101      through the normal interface.  */
1102 
1103   if (if_info->cond_earliest == if_info->jump)
1104     {
1105       rtx tmp;
1106 
1107       tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1108       tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1109       tmp = gen_rtx_SET (VOIDmode, x, tmp);
1110 
1111       start_sequence ();
1112       tmp = emit_insn (tmp);
1113 
1114       if (recog_memoized (tmp) >= 0)
1115 	{
1116 	  tmp = get_insns ();
1117 	  end_sequence ();
1118 	  emit_insn (tmp);
1119 
1120 	  return x;
1121 	}
1122 
1123       end_sequence ();
1124     }
1125 
1126   /* Don't even try if the comparison operands are weird.  */
1127   if (! general_operand (cmp_a, GET_MODE (cmp_a))
1128       || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1129     return NULL_RTX;
1130 
1131 #if HAVE_conditional_move
1132   return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1133 				vtrue, vfalse, GET_MODE (x),
1134 			        (code == LTU || code == GEU
1135 				 || code == LEU || code == GTU));
1136 #else
1137   /* We'll never get here, as noce_process_if_block doesn't call the
1138      functions involved.  Ifdef code, however, should be discouraged
1139      because it leads to typos in the code not selected.  However,
1140      emit_conditional_move won't exist either.  */
1141   return NULL_RTX;
1142 #endif
1143 }
1144 
1145 /* Try only simple constants and registers here.  More complex cases
1146    are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1147    has had a go at it.  */
1148 
1149 static int
noce_try_cmove(struct noce_if_info * if_info)1150 noce_try_cmove (struct noce_if_info *if_info)
1151 {
1152   enum rtx_code code;
1153   rtx target, seq;
1154 
1155   if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1156       && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1157     {
1158       start_sequence ();
1159 
1160       code = GET_CODE (if_info->cond);
1161       target = noce_emit_cmove (if_info, if_info->x, code,
1162 				XEXP (if_info->cond, 0),
1163 				XEXP (if_info->cond, 1),
1164 				if_info->a, if_info->b);
1165 
1166       if (target)
1167 	{
1168 	  if (target != if_info->x)
1169 	    noce_emit_move_insn (if_info->x, target);
1170 
1171 	  seq = get_insns ();
1172 	  unshare_ifcvt_sequence (if_info, seq);
1173 	  end_sequence ();
1174 	  emit_insn_before_setloc (seq, if_info->jump,
1175 				  INSN_LOCATOR (if_info->insn_a));
1176 	  return TRUE;
1177 	}
1178       else
1179 	{
1180 	  end_sequence ();
1181 	  return FALSE;
1182 	}
1183     }
1184 
1185   return FALSE;
1186 }
1187 
1188 /* Try more complex cases involving conditional_move.  */
1189 
1190 static int
noce_try_cmove_arith(struct noce_if_info * if_info)1191 noce_try_cmove_arith (struct noce_if_info *if_info)
1192 {
1193   rtx a = if_info->a;
1194   rtx b = if_info->b;
1195   rtx x = if_info->x;
1196   rtx insn_a, insn_b;
1197   rtx tmp, target;
1198   int is_mem = 0;
1199   enum rtx_code code;
1200 
1201   /* A conditional move from two memory sources is equivalent to a
1202      conditional on their addresses followed by a load.  Don't do this
1203      early because it'll screw alias analysis.  Note that we've
1204      already checked for no side effects.  */
1205   if (! no_new_pseudos && cse_not_expected
1206       && GET_CODE (a) == MEM && GET_CODE (b) == MEM
1207       && BRANCH_COST >= 5)
1208     {
1209       a = XEXP (a, 0);
1210       b = XEXP (b, 0);
1211       x = gen_reg_rtx (Pmode);
1212       is_mem = 1;
1213     }
1214 
1215   /* ??? We could handle this if we knew that a load from A or B could
1216      not fault.  This is also true if we've already loaded
1217      from the address along the path from ENTRY.  */
1218   else if (may_trap_p (a) || may_trap_p (b))
1219     return FALSE;
1220 
1221   /* if (test) x = a + b; else x = c - d;
1222      => y = a + b;
1223         x = c - d;
1224 	if (test)
1225 	  x = y;
1226   */
1227 
1228   code = GET_CODE (if_info->cond);
1229   insn_a = if_info->insn_a;
1230   insn_b = if_info->insn_b;
1231 
1232   /* Possibly rearrange operands to make things come out more natural.  */
1233   if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1234     {
1235       int reversep = 0;
1236       if (rtx_equal_p (b, x))
1237 	reversep = 1;
1238       else if (general_operand (b, GET_MODE (b)))
1239 	reversep = 1;
1240 
1241       if (reversep)
1242 	{
1243 	  code = reversed_comparison_code (if_info->cond, if_info->jump);
1244 	  tmp = a, a = b, b = tmp;
1245 	  tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1246 	}
1247     }
1248 
1249   start_sequence ();
1250 
1251   /* If either operand is complex, load it into a register first.
1252      The best way to do this is to copy the original insn.  In this
1253      way we preserve any clobbers etc that the insn may have had.
1254      This is of course not possible in the IS_MEM case.  */
1255   if (! general_operand (a, GET_MODE (a)))
1256     {
1257       rtx set;
1258 
1259       if (no_new_pseudos)
1260 	goto end_seq_and_fail;
1261 
1262       if (is_mem)
1263 	{
1264 	  tmp = gen_reg_rtx (GET_MODE (a));
1265 	  tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1266 	}
1267       else if (! insn_a)
1268 	goto end_seq_and_fail;
1269       else
1270 	{
1271 	  a = gen_reg_rtx (GET_MODE (a));
1272 	  tmp = copy_rtx (insn_a);
1273 	  set = single_set (tmp);
1274 	  SET_DEST (set) = a;
1275 	  tmp = emit_insn (PATTERN (tmp));
1276 	}
1277       if (recog_memoized (tmp) < 0)
1278 	goto end_seq_and_fail;
1279     }
1280   if (! general_operand (b, GET_MODE (b)))
1281     {
1282       rtx set;
1283 
1284       if (no_new_pseudos)
1285 	goto end_seq_and_fail;
1286 
1287       if (is_mem)
1288 	{
1289           tmp = gen_reg_rtx (GET_MODE (b));
1290 	  tmp = emit_insn (gen_rtx_SET (VOIDmode,
1291 				  	tmp,
1292 					b));
1293 	}
1294       else if (! insn_b)
1295 	goto end_seq_and_fail;
1296       else
1297 	{
1298           b = gen_reg_rtx (GET_MODE (b));
1299 	  tmp = copy_rtx (insn_b);
1300 	  set = single_set (tmp);
1301 	  SET_DEST (set) = b;
1302 	  tmp = emit_insn (PATTERN (tmp));
1303 	}
1304       if (recog_memoized (tmp) < 0)
1305 	goto end_seq_and_fail;
1306     }
1307 
1308   target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1309 			    XEXP (if_info->cond, 1), a, b);
1310 
1311   if (! target)
1312     goto end_seq_and_fail;
1313 
1314   /* If we're handling a memory for above, emit the load now.  */
1315   if (is_mem)
1316     {
1317       tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1318 
1319       /* Copy over flags as appropriate.  */
1320       if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1321 	MEM_VOLATILE_P (tmp) = 1;
1322       if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1323 	MEM_IN_STRUCT_P (tmp) = 1;
1324       if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1325 	MEM_SCALAR_P (tmp) = 1;
1326       if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1327 	set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1328       set_mem_align (tmp,
1329 		     MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1330 
1331       noce_emit_move_insn (if_info->x, tmp);
1332     }
1333   else if (target != x)
1334     noce_emit_move_insn (x, target);
1335 
1336   tmp = get_insns ();
1337   unshare_ifcvt_sequence (if_info, tmp);
1338   end_sequence ();
1339   emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1340   return TRUE;
1341 
1342  end_seq_and_fail:
1343   end_sequence ();
1344   return FALSE;
1345 }
1346 
1347 /* For most cases, the simplified condition we found is the best
1348    choice, but this is not the case for the min/max/abs transforms.
1349    For these we wish to know that it is A or B in the condition.  */
1350 
1351 static rtx
noce_get_alt_condition(struct noce_if_info * if_info,rtx target,rtx * earliest)1352 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1353 			rtx *earliest)
1354 {
1355   rtx cond, set, insn;
1356   int reverse;
1357 
1358   /* If target is already mentioned in the known condition, return it.  */
1359   if (reg_mentioned_p (target, if_info->cond))
1360     {
1361       *earliest = if_info->cond_earliest;
1362       return if_info->cond;
1363     }
1364 
1365   set = pc_set (if_info->jump);
1366   cond = XEXP (SET_SRC (set), 0);
1367   reverse
1368     = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1369       && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1370 
1371   /* If we're looking for a constant, try to make the conditional
1372      have that constant in it.  There are two reasons why it may
1373      not have the constant we want:
1374 
1375      1. GCC may have needed to put the constant in a register, because
1376         the target can't compare directly against that constant.  For
1377         this case, we look for a SET immediately before the comparison
1378         that puts a constant in that register.
1379 
1380      2. GCC may have canonicalized the conditional, for example
1381 	replacing "if x < 4" with "if x <= 3".  We can undo that (or
1382 	make equivalent types of changes) to get the constants we need
1383 	if they're off by one in the right direction.  */
1384 
1385   if (GET_CODE (target) == CONST_INT)
1386     {
1387       enum rtx_code code = GET_CODE (if_info->cond);
1388       rtx op_a = XEXP (if_info->cond, 0);
1389       rtx op_b = XEXP (if_info->cond, 1);
1390       rtx prev_insn;
1391 
1392       /* First, look to see if we put a constant in a register.  */
1393       prev_insn = PREV_INSN (if_info->cond_earliest);
1394       if (prev_insn
1395 	  && INSN_P (prev_insn)
1396 	  && GET_CODE (PATTERN (prev_insn)) == SET)
1397 	{
1398 	  rtx src = find_reg_equal_equiv_note (prev_insn);
1399 	  if (!src)
1400 	    src = SET_SRC (PATTERN (prev_insn));
1401 	  if (GET_CODE (src) == CONST_INT)
1402 	    {
1403 	      if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1404 		op_a = src;
1405 	      else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1406 		op_b = src;
1407 
1408 	      if (GET_CODE (op_a) == CONST_INT)
1409 		{
1410 		  rtx tmp = op_a;
1411 		  op_a = op_b;
1412 		  op_b = tmp;
1413 		  code = swap_condition (code);
1414 		}
1415 	    }
1416 	}
1417 
1418       /* Now, look to see if we can get the right constant by
1419 	 adjusting the conditional.  */
1420       if (GET_CODE (op_b) == CONST_INT)
1421 	{
1422 	  HOST_WIDE_INT desired_val = INTVAL (target);
1423 	  HOST_WIDE_INT actual_val = INTVAL (op_b);
1424 
1425 	  switch (code)
1426 	    {
1427 	    case LT:
1428 	      if (actual_val == desired_val + 1)
1429 		{
1430 		  code = LE;
1431 		  op_b = GEN_INT (desired_val);
1432 		}
1433 	      break;
1434 	    case LE:
1435 	      if (actual_val == desired_val - 1)
1436 		{
1437 		  code = LT;
1438 		  op_b = GEN_INT (desired_val);
1439 		}
1440 	      break;
1441 	    case GT:
1442 	      if (actual_val == desired_val - 1)
1443 		{
1444 		  code = GE;
1445 		  op_b = GEN_INT (desired_val);
1446 		}
1447 	      break;
1448 	    case GE:
1449 	      if (actual_val == desired_val + 1)
1450 		{
1451 		  code = GT;
1452 		  op_b = GEN_INT (desired_val);
1453 		}
1454 	      break;
1455 	    default:
1456 	      break;
1457 	    }
1458 	}
1459 
1460       /* If we made any changes, generate a new conditional that is
1461 	 equivalent to what we started with, but has the right
1462 	 constants in it.  */
1463       if (code != GET_CODE (if_info->cond)
1464 	  || op_a != XEXP (if_info->cond, 0)
1465 	  || op_b != XEXP (if_info->cond, 1))
1466 	{
1467 	  cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1468 	  *earliest = if_info->cond_earliest;
1469 	  return cond;
1470 	}
1471     }
1472 
1473   cond = canonicalize_condition (if_info->jump, cond, reverse,
1474 				 earliest, target, false);
1475   if (! cond || ! reg_mentioned_p (target, cond))
1476     return NULL;
1477 
1478   /* We almost certainly searched back to a different place.
1479      Need to re-verify correct lifetimes.  */
1480 
1481   /* X may not be mentioned in the range (cond_earliest, jump].  */
1482   for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1483     if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1484       return NULL;
1485 
1486   /* A and B may not be modified in the range [cond_earliest, jump).  */
1487   for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1488     if (INSN_P (insn)
1489 	&& (modified_in_p (if_info->a, insn)
1490 	    || modified_in_p (if_info->b, insn)))
1491       return NULL;
1492 
1493   return cond;
1494 }
1495 
1496 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc.  */
1497 
1498 static int
noce_try_minmax(struct noce_if_info * if_info)1499 noce_try_minmax (struct noce_if_info *if_info)
1500 {
1501   rtx cond, earliest, target, seq;
1502   enum rtx_code code, op;
1503   int unsignedp;
1504 
1505   /* ??? Can't guarantee that expand_binop won't create pseudos.  */
1506   if (no_new_pseudos)
1507     return FALSE;
1508 
1509   /* ??? Reject modes with NaNs or signed zeros since we don't know how
1510      they will be resolved with an SMIN/SMAX.  It wouldn't be too hard
1511      to get the target to tell us...  */
1512   if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1513       || HONOR_NANS (GET_MODE (if_info->x)))
1514     return FALSE;
1515 
1516   cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1517   if (!cond)
1518     return FALSE;
1519 
1520   /* Verify the condition is of the form we expect, and canonicalize
1521      the comparison code.  */
1522   code = GET_CODE (cond);
1523   if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1524     {
1525       if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1526 	return FALSE;
1527     }
1528   else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1529     {
1530       if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1531 	return FALSE;
1532       code = swap_condition (code);
1533     }
1534   else
1535     return FALSE;
1536 
1537   /* Determine what sort of operation this is.  Note that the code is for
1538      a taken branch, so the code->operation mapping appears backwards.  */
1539   switch (code)
1540     {
1541     case LT:
1542     case LE:
1543     case UNLT:
1544     case UNLE:
1545       op = SMAX;
1546       unsignedp = 0;
1547       break;
1548     case GT:
1549     case GE:
1550     case UNGT:
1551     case UNGE:
1552       op = SMIN;
1553       unsignedp = 0;
1554       break;
1555     case LTU:
1556     case LEU:
1557       op = UMAX;
1558       unsignedp = 1;
1559       break;
1560     case GTU:
1561     case GEU:
1562       op = UMIN;
1563       unsignedp = 1;
1564       break;
1565     default:
1566       return FALSE;
1567     }
1568 
1569   start_sequence ();
1570 
1571   target = expand_simple_binop (GET_MODE (if_info->x), op,
1572 				if_info->a, if_info->b,
1573 				if_info->x, unsignedp, OPTAB_WIDEN);
1574   if (! target)
1575     {
1576       end_sequence ();
1577       return FALSE;
1578     }
1579   if (target != if_info->x)
1580     noce_emit_move_insn (if_info->x, target);
1581 
1582   seq = get_insns ();
1583   unshare_ifcvt_sequence (if_info, seq);
1584   end_sequence ();
1585 
1586   if (seq_contains_jump (seq))
1587     return FALSE;
1588 
1589   emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1590   if_info->cond = cond;
1591   if_info->cond_earliest = earliest;
1592 
1593   return TRUE;
1594 }
1595 
1596 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc.  */
1597 
1598 static int
noce_try_abs(struct noce_if_info * if_info)1599 noce_try_abs (struct noce_if_info *if_info)
1600 {
1601   rtx cond, earliest, target, seq, a, b, c;
1602   int negate;
1603 
1604   /* ??? Can't guarantee that expand_binop won't create pseudos.  */
1605   if (no_new_pseudos)
1606     return FALSE;
1607 
1608   /* Recognize A and B as constituting an ABS or NABS.  */
1609   a = if_info->a;
1610   b = if_info->b;
1611   if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1612     negate = 0;
1613   else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1614     {
1615       c = a; a = b; b = c;
1616       negate = 1;
1617     }
1618   else
1619     return FALSE;
1620 
1621   cond = noce_get_alt_condition (if_info, b, &earliest);
1622   if (!cond)
1623     return FALSE;
1624 
1625   /* Verify the condition is of the form we expect.  */
1626   if (rtx_equal_p (XEXP (cond, 0), b))
1627     c = XEXP (cond, 1);
1628   else if (rtx_equal_p (XEXP (cond, 1), b))
1629     c = XEXP (cond, 0);
1630   else
1631     return FALSE;
1632 
1633   /* Verify that C is zero.  Search backward through the block for
1634      a REG_EQUAL note if necessary.  */
1635   if (REG_P (c))
1636     {
1637       rtx insn, note = NULL;
1638       for (insn = earliest;
1639 	   insn != BB_HEAD (if_info->test_bb);
1640 	   insn = PREV_INSN (insn))
1641 	if (INSN_P (insn)
1642 	    && ((note = find_reg_note (insn, REG_EQUAL, c))
1643 		|| (note = find_reg_note (insn, REG_EQUIV, c))))
1644 	  break;
1645       if (! note)
1646 	return FALSE;
1647       c = XEXP (note, 0);
1648     }
1649   if (GET_CODE (c) == MEM
1650       && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1651       && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1652     c = get_pool_constant (XEXP (c, 0));
1653 
1654   /* Work around funny ideas get_condition has wrt canonicalization.
1655      Note that these rtx constants are known to be CONST_INT, and
1656      therefore imply integer comparisons.  */
1657   if (c == constm1_rtx && GET_CODE (cond) == GT)
1658     ;
1659   else if (c == const1_rtx && GET_CODE (cond) == LT)
1660     ;
1661   else if (c != CONST0_RTX (GET_MODE (b)))
1662     return FALSE;
1663 
1664   /* Determine what sort of operation this is.  */
1665   switch (GET_CODE (cond))
1666     {
1667     case LT:
1668     case LE:
1669     case UNLT:
1670     case UNLE:
1671       negate = !negate;
1672       break;
1673     case GT:
1674     case GE:
1675     case UNGT:
1676     case UNGE:
1677       break;
1678     default:
1679       return FALSE;
1680     }
1681 
1682   start_sequence ();
1683 
1684   target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
1685 
1686   /* ??? It's a quandary whether cmove would be better here, especially
1687      for integers.  Perhaps combine will clean things up.  */
1688   if (target && negate)
1689     target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1690 
1691   if (! target)
1692     {
1693       end_sequence ();
1694       return FALSE;
1695     }
1696 
1697   if (target != if_info->x)
1698     noce_emit_move_insn (if_info->x, target);
1699 
1700   seq = get_insns ();
1701   unshare_ifcvt_sequence (if_info, seq);
1702   end_sequence ();
1703 
1704   if (seq_contains_jump (seq))
1705     return FALSE;
1706 
1707   emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1708   if_info->cond = cond;
1709   if_info->cond_earliest = earliest;
1710 
1711   return TRUE;
1712 }
1713 
1714 /* Similar to get_condition, only the resulting condition must be
1715    valid at JUMP, instead of at EARLIEST.  */
1716 
1717 static rtx
noce_get_condition(rtx jump,rtx * earliest)1718 noce_get_condition (rtx jump, rtx *earliest)
1719 {
1720   rtx cond, set, tmp, insn;
1721   bool reverse;
1722 
1723   if (! any_condjump_p (jump))
1724     return NULL_RTX;
1725 
1726   set = pc_set (jump);
1727 
1728   /* If this branches to JUMP_LABEL when the condition is false,
1729      reverse the condition.  */
1730   reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1731 	     && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
1732 
1733   /* If the condition variable is a register and is MODE_INT, accept it.  */
1734 
1735   cond = XEXP (SET_SRC (set), 0);
1736   tmp = XEXP (cond, 0);
1737   if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
1738     {
1739       *earliest = jump;
1740 
1741       if (reverse)
1742 	cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1743 			       GET_MODE (cond), tmp, XEXP (cond, 1));
1744       return cond;
1745     }
1746 
1747   /* Otherwise, fall back on canonicalize_condition to do the dirty
1748      work of manipulating MODE_CC values and COMPARE rtx codes.  */
1749 
1750   tmp = canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
1751 				false);
1752   if (!tmp)
1753     return NULL_RTX;
1754 
1755   /* We are going to insert code before JUMP, not before EARLIEST.
1756      We must therefore be certain that the given condition is valid
1757      at JUMP by virtue of not having been modified since.  */
1758   for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1759     if (INSN_P (insn) && modified_in_p (tmp, insn))
1760       break;
1761   if (insn == jump)
1762     return tmp;
1763 
1764   /* The condition was modified.  See if we can get a partial result
1765      that doesn't follow all the reversals.  Perhaps combine can fold
1766      them together later.  */
1767   tmp = XEXP (tmp, 0);
1768   if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
1769     return NULL_RTX;
1770   tmp = canonicalize_condition (jump, cond, reverse, earliest, tmp,
1771 				false);
1772   if (!tmp)
1773     return NULL_RTX;
1774 
1775   /* For sanity's sake, re-validate the new result.  */
1776   for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1777     if (INSN_P (insn) && modified_in_p (tmp, insn))
1778       return NULL_RTX;
1779 
1780   return tmp;
1781 }
1782 
1783 /* Return true if OP is ok for if-then-else processing.  */
1784 
1785 static int
noce_operand_ok(rtx op)1786 noce_operand_ok (rtx op)
1787 {
1788   /* We special-case memories, so handle any of them with
1789      no address side effects.  */
1790   if (GET_CODE (op) == MEM)
1791     return ! side_effects_p (XEXP (op, 0));
1792 
1793   if (side_effects_p (op))
1794     return FALSE;
1795 
1796   return ! may_trap_p (op);
1797 }
1798 
1799 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1800    without using conditional execution.  Return TRUE if we were
1801    successful at converting the block.  */
1802 
1803 static int
noce_process_if_block(struct ce_if_block * ce_info)1804 noce_process_if_block (struct ce_if_block * ce_info)
1805 {
1806   basic_block test_bb = ce_info->test_bb;	/* test block */
1807   basic_block then_bb = ce_info->then_bb;	/* THEN */
1808   basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
1809   struct noce_if_info if_info;
1810   rtx insn_a, insn_b;
1811   rtx set_a, set_b;
1812   rtx orig_x, x, a, b;
1813   rtx jump, cond;
1814 
1815   /* We're looking for patterns of the form
1816 
1817      (1) if (...) x = a; else x = b;
1818      (2) x = b; if (...) x = a;
1819      (3) if (...) x = a;   // as if with an initial x = x.
1820 
1821      The later patterns require jumps to be more expensive.
1822 
1823      ??? For future expansion, look for multiple X in such patterns.  */
1824 
1825   /* If test is comprised of && or || elements, don't handle it unless it is
1826      the special case of && elements without an ELSE block.  */
1827   if (ce_info->num_multiple_test_blocks)
1828     {
1829       if (else_bb || ! ce_info->and_and_p)
1830 	return FALSE;
1831 
1832       ce_info->test_bb = test_bb = ce_info->last_test_bb;
1833       ce_info->num_multiple_test_blocks = 0;
1834       ce_info->num_and_and_blocks = 0;
1835       ce_info->num_or_or_blocks = 0;
1836     }
1837 
1838   /* If this is not a standard conditional jump, we can't parse it.  */
1839   jump = BB_END (test_bb);
1840   cond = noce_get_condition (jump, &if_info.cond_earliest);
1841   if (! cond)
1842     return FALSE;
1843 
1844   /* If the conditional jump is more than just a conditional
1845      jump, then we can not do if-conversion on this block.  */
1846   if (! onlyjump_p (jump))
1847     return FALSE;
1848 
1849   /* We must be comparing objects whose modes imply the size.  */
1850   if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1851     return FALSE;
1852 
1853   /* Look for one of the potential sets.  */
1854   insn_a = first_active_insn (then_bb);
1855   if (! insn_a
1856       || insn_a != last_active_insn (then_bb, FALSE)
1857       || (set_a = single_set (insn_a)) == NULL_RTX)
1858     return FALSE;
1859 
1860   x = SET_DEST (set_a);
1861   a = SET_SRC (set_a);
1862 
1863   /* Look for the other potential set.  Make sure we've got equivalent
1864      destinations.  */
1865   /* ??? This is overconservative.  Storing to two different mems is
1866      as easy as conditionally computing the address.  Storing to a
1867      single mem merely requires a scratch memory to use as one of the
1868      destination addresses; often the memory immediately below the
1869      stack pointer is available for this.  */
1870   set_b = NULL_RTX;
1871   if (else_bb)
1872     {
1873       insn_b = first_active_insn (else_bb);
1874       if (! insn_b
1875 	  || insn_b != last_active_insn (else_bb, FALSE)
1876 	  || (set_b = single_set (insn_b)) == NULL_RTX
1877 	  || ! rtx_equal_p (x, SET_DEST (set_b)))
1878 	return FALSE;
1879     }
1880   else
1881     {
1882       insn_b = prev_nonnote_insn (if_info.cond_earliest);
1883       /* We're going to be moving the evaluation of B down from above
1884 	 COND_EARLIEST to JUMP.  Make sure the relevant data is still
1885 	 intact.  */
1886       if (! insn_b
1887 	  || GET_CODE (insn_b) != INSN
1888 	  || (set_b = single_set (insn_b)) == NULL_RTX
1889 	  || ! rtx_equal_p (x, SET_DEST (set_b))
1890 	  || reg_overlap_mentioned_p (x, SET_SRC (set_b))
1891 	  || modified_between_p (SET_SRC (set_b),
1892 				 PREV_INSN (if_info.cond_earliest), jump)
1893 	  /* Likewise with X.  In particular this can happen when
1894 	     noce_get_condition looks farther back in the instruction
1895 	     stream than one might expect.  */
1896 	  || reg_overlap_mentioned_p (x, cond)
1897 	  || reg_overlap_mentioned_p (x, a)
1898 	  || modified_between_p (x, PREV_INSN (if_info.cond_earliest), jump))
1899 	insn_b = set_b = NULL_RTX;
1900     }
1901 
1902   /* If x has side effects then only the if-then-else form is safe to
1903      convert.  But even in that case we would need to restore any notes
1904      (such as REG_INC) at then end.  That can be tricky if
1905      noce_emit_move_insn expands to more than one insn, so disable the
1906      optimization entirely for now if there are side effects.  */
1907   if (side_effects_p (x))
1908     return FALSE;
1909 
1910   b = (set_b ? SET_SRC (set_b) : x);
1911 
1912   /* Only operate on register destinations, and even then avoid extending
1913      the lifetime of hard registers on small register class machines.  */
1914   orig_x = x;
1915   if (GET_CODE (x) != REG
1916       || (SMALL_REGISTER_CLASSES
1917 	  && REGNO (x) < FIRST_PSEUDO_REGISTER))
1918     {
1919       if (no_new_pseudos || GET_MODE (x) == BLKmode)
1920 	return FALSE;
1921       x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1922 				 ? XEXP (x, 0) : x));
1923     }
1924 
1925   /* Don't operate on sources that may trap or are volatile.  */
1926   if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1927     return FALSE;
1928 
1929   /* Set up the info block for our subroutines.  */
1930   if_info.test_bb = test_bb;
1931   if_info.cond = cond;
1932   if_info.jump = jump;
1933   if_info.insn_a = insn_a;
1934   if_info.insn_b = insn_b;
1935   if_info.x = x;
1936   if_info.a = a;
1937   if_info.b = b;
1938 
1939   /* Try optimizations in some approximation of a useful order.  */
1940   /* ??? Should first look to see if X is live incoming at all.  If it
1941      isn't, we don't need anything but an unconditional set.  */
1942 
1943   /* Look and see if A and B are really the same.  Avoid creating silly
1944      cmove constructs that no one will fix up later.  */
1945   if (rtx_equal_p (a, b))
1946     {
1947       /* If we have an INSN_B, we don't have to create any new rtl.  Just
1948 	 move the instruction that we already have.  If we don't have an
1949 	 INSN_B, that means that A == X, and we've got a noop move.  In
1950 	 that case don't do anything and let the code below delete INSN_A.  */
1951       if (insn_b && else_bb)
1952 	{
1953 	  rtx note;
1954 
1955 	  if (else_bb && insn_b == BB_END (else_bb))
1956 	    BB_END (else_bb) = PREV_INSN (insn_b);
1957 	  reorder_insns (insn_b, insn_b, PREV_INSN (jump));
1958 
1959 	  /* If there was a REG_EQUAL note, delete it since it may have been
1960 	     true due to this insn being after a jump.  */
1961 	  if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1962 	    remove_note (insn_b, note);
1963 
1964 	  insn_b = NULL_RTX;
1965 	}
1966       /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1967 	 x must be executed twice.  */
1968       else if (insn_b && side_effects_p (orig_x))
1969 	return FALSE;
1970 
1971       x = orig_x;
1972       goto success;
1973     }
1974 
1975   /* Disallow the "if (...) x = a;" form (with an implicit "else x = x;")
1976      for most optimizations if writing to x may trap, i.e. its a memory
1977      other than a static var or a stack slot.  */
1978   if (! set_b
1979       && GET_CODE (orig_x) == MEM
1980       && ! MEM_NOTRAP_P (orig_x)
1981       && rtx_addr_can_trap_p (XEXP (orig_x, 0)))
1982     {
1983       if (HAVE_conditional_move)
1984 	{
1985 	  if (noce_try_cmove (&if_info))
1986 	    goto success;
1987 	  if (! HAVE_conditional_execution
1988 	      && noce_try_cmove_arith (&if_info))
1989 	    goto success;
1990 	}
1991       return FALSE;
1992     }
1993 
1994   if (noce_try_move (&if_info))
1995     goto success;
1996   if (noce_try_store_flag (&if_info))
1997     goto success;
1998   if (noce_try_minmax (&if_info))
1999     goto success;
2000   if (noce_try_abs (&if_info))
2001     goto success;
2002   if (HAVE_conditional_move
2003       && noce_try_cmove (&if_info))
2004     goto success;
2005   if (! HAVE_conditional_execution)
2006     {
2007       if (noce_try_store_flag_constants (&if_info))
2008 	goto success;
2009       if (noce_try_addcc (&if_info))
2010 	goto success;
2011       if (noce_try_store_flag_mask (&if_info))
2012 	goto success;
2013       if (HAVE_conditional_move
2014 	  && noce_try_cmove_arith (&if_info))
2015 	goto success;
2016     }
2017 
2018   return FALSE;
2019 
2020  success:
2021   /* The original sets may now be killed.  */
2022   delete_insn (insn_a);
2023 
2024   /* Several special cases here: First, we may have reused insn_b above,
2025      in which case insn_b is now NULL.  Second, we want to delete insn_b
2026      if it came from the ELSE block, because follows the now correct
2027      write that appears in the TEST block.  However, if we got insn_b from
2028      the TEST block, it may in fact be loading data needed for the comparison.
2029      We'll let life_analysis remove the insn if it's really dead.  */
2030   if (insn_b && else_bb)
2031     delete_insn (insn_b);
2032 
2033   /* The new insns will have been inserted immediately before the jump.  We
2034      should be able to remove the jump with impunity, but the condition itself
2035      may have been modified by gcse to be shared across basic blocks.  */
2036   delete_insn (jump);
2037 
2038   /* If we used a temporary, fix it up now.  */
2039   if (orig_x != x)
2040     {
2041       start_sequence ();
2042       noce_emit_move_insn (orig_x, x);
2043       insn_b = get_insns ();
2044       set_used_flags (orig_x);
2045       unshare_all_rtl_in_chain (insn_b);
2046       end_sequence ();
2047 
2048       emit_insn_after_setloc (insn_b, BB_END (test_bb), INSN_LOCATOR (insn_a));
2049     }
2050 
2051   /* Merge the blocks!  */
2052   merge_if_block (ce_info);
2053 
2054   return TRUE;
2055 }
2056 
2057 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
2058    straight line code.  Return true if successful.  */
2059 
2060 static int
process_if_block(struct ce_if_block * ce_info)2061 process_if_block (struct ce_if_block * ce_info)
2062 {
2063   if (! reload_completed
2064       && noce_process_if_block (ce_info))
2065     return TRUE;
2066 
2067   if (HAVE_conditional_execution && reload_completed)
2068     {
2069       /* If we have && and || tests, try to first handle combining the && and
2070          || tests into the conditional code, and if that fails, go back and
2071          handle it without the && and ||, which at present handles the && case
2072          if there was no ELSE block.  */
2073       if (cond_exec_process_if_block (ce_info, TRUE))
2074 	return TRUE;
2075 
2076       if (ce_info->num_multiple_test_blocks)
2077 	{
2078 	  cancel_changes (0);
2079 
2080 	  if (cond_exec_process_if_block (ce_info, FALSE))
2081 	    return TRUE;
2082 	}
2083     }
2084 
2085   return FALSE;
2086 }
2087 
2088 /* Merge the blocks and mark for local life update.  */
2089 
2090 static void
merge_if_block(struct ce_if_block * ce_info)2091 merge_if_block (struct ce_if_block * ce_info)
2092 {
2093   basic_block test_bb = ce_info->test_bb;	/* last test block */
2094   basic_block then_bb = ce_info->then_bb;	/* THEN */
2095   basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
2096   basic_block join_bb = ce_info->join_bb;	/* join block */
2097   basic_block combo_bb;
2098 
2099   /* All block merging is done into the lower block numbers.  */
2100 
2101   combo_bb = test_bb;
2102 
2103   /* Merge any basic blocks to handle && and || subtests.  Each of
2104      the blocks are on the fallthru path from the predecessor block.  */
2105   if (ce_info->num_multiple_test_blocks > 0)
2106     {
2107       basic_block bb = test_bb;
2108       basic_block last_test_bb = ce_info->last_test_bb;
2109       basic_block fallthru = block_fallthru (bb);
2110 
2111       do
2112 	{
2113 	  bb = fallthru;
2114 	  fallthru = block_fallthru (bb);
2115 	  if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY)
2116 	    delete_from_dominance_info (CDI_POST_DOMINATORS, bb);
2117 	  merge_blocks (combo_bb, bb);
2118 	  num_true_changes++;
2119 	}
2120       while (bb != last_test_bb);
2121     }
2122 
2123   /* Merge TEST block into THEN block.  Normally the THEN block won't have a
2124      label, but it might if there were || tests.  That label's count should be
2125      zero, and it normally should be removed.  */
2126 
2127   if (then_bb)
2128     {
2129       if (combo_bb->global_live_at_end)
2130 	COPY_REG_SET (combo_bb->global_live_at_end,
2131 		      then_bb->global_live_at_end);
2132       if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY)
2133 	delete_from_dominance_info (CDI_POST_DOMINATORS, then_bb);
2134       merge_blocks (combo_bb, then_bb);
2135       num_true_changes++;
2136     }
2137 
2138   /* The ELSE block, if it existed, had a label.  That label count
2139      will almost always be zero, but odd things can happen when labels
2140      get their addresses taken.  */
2141   if (else_bb)
2142     {
2143       if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY)
2144        	delete_from_dominance_info (CDI_POST_DOMINATORS, else_bb);
2145       merge_blocks (combo_bb, else_bb);
2146       num_true_changes++;
2147     }
2148 
2149   /* If there was no join block reported, that means it was not adjacent
2150      to the others, and so we cannot merge them.  */
2151 
2152   if (! join_bb)
2153     {
2154       rtx last = BB_END (combo_bb);
2155 
2156       /* The outgoing edge for the current COMBO block should already
2157 	 be correct.  Verify this.  */
2158       if (combo_bb->succ == NULL_EDGE)
2159 	{
2160 	  if (find_reg_note (last, REG_NORETURN, NULL))
2161 	    ;
2162 	  else if (GET_CODE (last) == INSN
2163 		   && GET_CODE (PATTERN (last)) == TRAP_IF
2164 		   && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
2165 	    ;
2166 	  else
2167 	    abort ();
2168 	}
2169 
2170       /* There should still be something at the end of the THEN or ELSE
2171          blocks taking us to our final destination.  */
2172       else if (GET_CODE (last) == JUMP_INSN)
2173 	;
2174       else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
2175 	       && GET_CODE (last) == CALL_INSN
2176 	       && SIBLING_CALL_P (last))
2177 	;
2178       else if ((combo_bb->succ->flags & EDGE_EH)
2179 	       && can_throw_internal (last))
2180 	;
2181       else
2182 	abort ();
2183     }
2184 
2185   /* The JOIN block may have had quite a number of other predecessors too.
2186      Since we've already merged the TEST, THEN and ELSE blocks, we should
2187      have only one remaining edge from our if-then-else diamond.  If there
2188      is more than one remaining edge, it must come from elsewhere.  There
2189      may be zero incoming edges if the THEN block didn't actually join
2190      back up (as with a call to abort).  */
2191   else if ((join_bb->pred == NULL
2192 	    || join_bb->pred->pred_next == NULL)
2193 	   && join_bb != EXIT_BLOCK_PTR)
2194     {
2195       /* We can merge the JOIN.  */
2196       if (combo_bb->global_live_at_end)
2197 	COPY_REG_SET (combo_bb->global_live_at_end,
2198 		      join_bb->global_live_at_end);
2199 
2200       if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY)
2201 	delete_from_dominance_info (CDI_POST_DOMINATORS, join_bb);
2202       merge_blocks (combo_bb, join_bb);
2203       num_true_changes++;
2204     }
2205   else
2206     {
2207       /* We cannot merge the JOIN.  */
2208 
2209       /* The outgoing edge for the current COMBO block should already
2210 	 be correct.  Verify this.  */
2211       if (combo_bb->succ->succ_next != NULL_EDGE
2212 	  || combo_bb->succ->dest != join_bb)
2213 	abort ();
2214 
2215       /* Remove the jump and cruft from the end of the COMBO block.  */
2216       if (join_bb != EXIT_BLOCK_PTR)
2217 	tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
2218     }
2219 
2220   num_updated_if_blocks++;
2221 }
2222 
2223 /* Find a block ending in a simple IF condition and try to transform it
2224    in some way.  When converting a multi-block condition, put the new code
2225    in the first such block and delete the rest.  Return a pointer to this
2226    first block if some transformation was done.  Return NULL otherwise.  */
2227 
2228 static basic_block
find_if_header(basic_block test_bb,int pass)2229 find_if_header (basic_block test_bb, int pass)
2230 {
2231   ce_if_block_t ce_info;
2232   edge then_edge;
2233   edge else_edge;
2234 
2235   /* The kind of block we're looking for has exactly two successors.  */
2236   if ((then_edge = test_bb->succ) == NULL_EDGE
2237       || (else_edge = then_edge->succ_next) == NULL_EDGE
2238       || else_edge->succ_next != NULL_EDGE)
2239     return NULL;
2240 
2241   /* Neither edge should be abnormal.  */
2242   if ((then_edge->flags & EDGE_COMPLEX)
2243       || (else_edge->flags & EDGE_COMPLEX))
2244     return NULL;
2245 
2246   /* Nor exit the loop.  */
2247   if ((then_edge->flags & EDGE_LOOP_EXIT)
2248       || (else_edge->flags & EDGE_LOOP_EXIT))
2249     return NULL;
2250 
2251   /* The THEN edge is canonically the one that falls through.  */
2252   if (then_edge->flags & EDGE_FALLTHRU)
2253     ;
2254   else if (else_edge->flags & EDGE_FALLTHRU)
2255     {
2256       edge e = else_edge;
2257       else_edge = then_edge;
2258       then_edge = e;
2259     }
2260   else
2261     /* Otherwise this must be a multiway branch of some sort.  */
2262     return NULL;
2263 
2264   memset (&ce_info, '\0', sizeof (ce_info));
2265   ce_info.test_bb = test_bb;
2266   ce_info.then_bb = then_edge->dest;
2267   ce_info.else_bb = else_edge->dest;
2268   ce_info.pass = pass;
2269 
2270 #ifdef IFCVT_INIT_EXTRA_FIELDS
2271   IFCVT_INIT_EXTRA_FIELDS (&ce_info);
2272 #endif
2273 
2274   if (find_if_block (&ce_info))
2275     goto success;
2276 
2277   if (HAVE_trap && HAVE_conditional_trap
2278       && find_cond_trap (test_bb, then_edge, else_edge))
2279     goto success;
2280 
2281   if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY
2282       && (! HAVE_conditional_execution || reload_completed))
2283     {
2284       if (find_if_case_1 (test_bb, then_edge, else_edge))
2285 	goto success;
2286       if (find_if_case_2 (test_bb, then_edge, else_edge))
2287 	goto success;
2288     }
2289 
2290   return NULL;
2291 
2292  success:
2293   if (rtl_dump_file)
2294     fprintf (rtl_dump_file, "Conversion succeeded on pass %d.\n", pass);
2295   return ce_info.test_bb;
2296 }
2297 
2298 /* Return true if a block has two edges, one of which falls through to the next
2299    block, and the other jumps to a specific block, so that we can tell if the
2300    block is part of an && test or an || test.  Returns either -1 or the number
2301    of non-note, non-jump, non-USE/CLOBBER insns in the block.  */
2302 
2303 static int
block_jumps_and_fallthru_p(basic_block cur_bb,basic_block target_bb)2304 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
2305 {
2306   edge cur_edge;
2307   int fallthru_p = FALSE;
2308   int jump_p = FALSE;
2309   rtx insn;
2310   rtx end;
2311   int n_insns = 0;
2312 
2313   if (!cur_bb || !target_bb)
2314     return -1;
2315 
2316   /* If no edges, obviously it doesn't jump or fallthru.  */
2317   if (cur_bb->succ == NULL_EDGE)
2318     return FALSE;
2319 
2320   for (cur_edge = cur_bb->succ;
2321        cur_edge != NULL_EDGE;
2322        cur_edge = cur_edge->succ_next)
2323     {
2324       if (cur_edge->flags & EDGE_COMPLEX)
2325 	/* Anything complex isn't what we want.  */
2326 	return -1;
2327 
2328       else if (cur_edge->flags & EDGE_FALLTHRU)
2329 	fallthru_p = TRUE;
2330 
2331       else if (cur_edge->dest == target_bb)
2332 	jump_p = TRUE;
2333 
2334       else
2335 	return -1;
2336     }
2337 
2338   if ((jump_p & fallthru_p) == 0)
2339     return -1;
2340 
2341   /* Don't allow calls in the block, since this is used to group && and ||
2342      together for conditional execution support.  ??? we should support
2343      conditional execution support across calls for IA-64 some day, but
2344      for now it makes the code simpler.  */
2345   end = BB_END (cur_bb);
2346   insn = BB_HEAD (cur_bb);
2347 
2348   while (insn != NULL_RTX)
2349     {
2350       if (GET_CODE (insn) == CALL_INSN)
2351 	return -1;
2352 
2353       if (INSN_P (insn)
2354 	  && GET_CODE (insn) != JUMP_INSN
2355 	  && GET_CODE (PATTERN (insn)) != USE
2356 	  && GET_CODE (PATTERN (insn)) != CLOBBER)
2357 	n_insns++;
2358 
2359       if (insn == end)
2360 	break;
2361 
2362       insn = NEXT_INSN (insn);
2363     }
2364 
2365   return n_insns;
2366 }
2367 
2368 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
2369    block.  If so, we'll try to convert the insns to not require the branch.
2370    Return TRUE if we were successful at converting the block.  */
2371 
2372 static int
find_if_block(struct ce_if_block * ce_info)2373 find_if_block (struct ce_if_block * ce_info)
2374 {
2375   basic_block test_bb = ce_info->test_bb;
2376   basic_block then_bb = ce_info->then_bb;
2377   basic_block else_bb = ce_info->else_bb;
2378   basic_block join_bb = NULL_BLOCK;
2379   edge then_succ = then_bb->succ;
2380   edge else_succ = else_bb->succ;
2381   int then_predecessors;
2382   int else_predecessors;
2383   edge cur_edge;
2384   basic_block next;
2385 
2386   ce_info->last_test_bb = test_bb;
2387 
2388   /* Discover if any fall through predecessors of the current test basic block
2389      were && tests (which jump to the else block) or || tests (which jump to
2390      the then block).  */
2391   if (HAVE_conditional_execution && reload_completed
2392       && test_bb->pred != NULL_EDGE
2393       && test_bb->pred->pred_next == NULL_EDGE
2394       && test_bb->pred->flags == EDGE_FALLTHRU)
2395     {
2396       basic_block bb = test_bb->pred->src;
2397       basic_block target_bb;
2398       int max_insns = MAX_CONDITIONAL_EXECUTE;
2399       int n_insns;
2400 
2401       /* Determine if the preceding block is an && or || block.  */
2402       if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
2403 	{
2404 	  ce_info->and_and_p = TRUE;
2405 	  target_bb = else_bb;
2406 	}
2407       else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
2408 	{
2409 	  ce_info->and_and_p = FALSE;
2410 	  target_bb = then_bb;
2411 	}
2412       else
2413 	target_bb = NULL_BLOCK;
2414 
2415       if (target_bb && n_insns <= max_insns)
2416 	{
2417 	  int total_insns = 0;
2418 	  int blocks = 0;
2419 
2420 	  ce_info->last_test_bb = test_bb;
2421 
2422 	  /* Found at least one && or || block, look for more.  */
2423 	  do
2424 	    {
2425 	      ce_info->test_bb = test_bb = bb;
2426 	      total_insns += n_insns;
2427 	      blocks++;
2428 
2429 	      if (bb->pred == NULL_EDGE || bb->pred->pred_next != NULL_EDGE)
2430 		break;
2431 
2432 	      bb = bb->pred->src;
2433 	      n_insns = block_jumps_and_fallthru_p (bb, target_bb);
2434 	    }
2435 	  while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
2436 
2437 	  ce_info->num_multiple_test_blocks = blocks;
2438 	  ce_info->num_multiple_test_insns = total_insns;
2439 
2440 	  if (ce_info->and_and_p)
2441 	    ce_info->num_and_and_blocks = blocks;
2442 	  else
2443 	    ce_info->num_or_or_blocks = blocks;
2444 	}
2445     }
2446 
2447   /* Count the number of edges the THEN and ELSE blocks have.  */
2448   then_predecessors = 0;
2449   for (cur_edge = then_bb->pred;
2450        cur_edge != NULL_EDGE;
2451        cur_edge = cur_edge->pred_next)
2452     {
2453       then_predecessors++;
2454       if (cur_edge->flags & EDGE_COMPLEX)
2455 	return FALSE;
2456     }
2457 
2458   else_predecessors = 0;
2459   for (cur_edge = else_bb->pred;
2460        cur_edge != NULL_EDGE;
2461        cur_edge = cur_edge->pred_next)
2462     {
2463       else_predecessors++;
2464       if (cur_edge->flags & EDGE_COMPLEX)
2465 	return FALSE;
2466     }
2467 
2468   /* The THEN block of an IF-THEN combo must have exactly one predecessor,
2469      other than any || blocks which jump to the THEN block.  */
2470   if ((then_predecessors - ce_info->num_or_or_blocks) != 1)
2471     return FALSE;
2472 
2473   /* The THEN block of an IF-THEN combo must have zero or one successors.  */
2474   if (then_succ != NULL_EDGE
2475       && (then_succ->succ_next != NULL_EDGE
2476           || (then_succ->flags & EDGE_COMPLEX)
2477 	  || (flow2_completed && tablejump_p (BB_END (then_bb), NULL, NULL))))
2478     return FALSE;
2479 
2480   /* If the THEN block has no successors, conditional execution can still
2481      make a conditional call.  Don't do this unless the ELSE block has
2482      only one incoming edge -- the CFG manipulation is too ugly otherwise.
2483      Check for the last insn of the THEN block being an indirect jump, which
2484      is listed as not having any successors, but confuses the rest of the CE
2485      code processing.  ??? we should fix this in the future.  */
2486   if (then_succ == NULL)
2487     {
2488       if (else_bb->pred->pred_next == NULL_EDGE)
2489 	{
2490 	  rtx last_insn = BB_END (then_bb);
2491 
2492 	  while (last_insn
2493 		 && GET_CODE (last_insn) == NOTE
2494 		 && last_insn != BB_HEAD (then_bb))
2495 	    last_insn = PREV_INSN (last_insn);
2496 
2497 	  if (last_insn
2498 	      && GET_CODE (last_insn) == JUMP_INSN
2499 	      && ! simplejump_p (last_insn))
2500 	    return FALSE;
2501 
2502 	  join_bb = else_bb;
2503 	  else_bb = NULL_BLOCK;
2504 	}
2505       else
2506 	return FALSE;
2507     }
2508 
2509   /* If the THEN block's successor is the other edge out of the TEST block,
2510      then we have an IF-THEN combo without an ELSE.  */
2511   else if (then_succ->dest == else_bb)
2512     {
2513       join_bb = else_bb;
2514       else_bb = NULL_BLOCK;
2515     }
2516 
2517   /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2518      has exactly one predecessor and one successor, and the outgoing edge
2519      is not complex, then we have an IF-THEN-ELSE combo.  */
2520   else if (else_succ != NULL_EDGE
2521 	   && then_succ->dest == else_succ->dest
2522 	   && else_bb->pred->pred_next == NULL_EDGE
2523 	   && else_succ->succ_next == NULL_EDGE
2524 	   && ! (else_succ->flags & EDGE_COMPLEX)
2525 	   && ! (flow2_completed && tablejump_p (BB_END (else_bb), NULL, NULL)))
2526     join_bb = else_succ->dest;
2527 
2528   /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination.  */
2529   else
2530     return FALSE;
2531 
2532   num_possible_if_blocks++;
2533 
2534   if (rtl_dump_file)
2535     {
2536       fprintf (rtl_dump_file, "\nIF-THEN%s block found, pass %d, start block %d [insn %d], then %d [%d]",
2537 	       (else_bb) ? "-ELSE" : "",
2538 	       ce_info->pass,
2539 	       test_bb->index, (BB_HEAD (test_bb)) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
2540 	       then_bb->index, (BB_HEAD (then_bb)) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
2541 
2542       if (else_bb)
2543 	fprintf (rtl_dump_file, ", else %d [%d]",
2544 		 else_bb->index, (BB_HEAD (else_bb)) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
2545 
2546       fprintf (rtl_dump_file, ", join %d [%d]",
2547 	       join_bb->index, (BB_HEAD (join_bb)) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
2548 
2549       if (ce_info->num_multiple_test_blocks > 0)
2550 	fprintf (rtl_dump_file, ", %d %s block%s last test %d [%d]",
2551 		 ce_info->num_multiple_test_blocks,
2552 		 (ce_info->and_and_p) ? "&&" : "||",
2553 		 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
2554 		 ce_info->last_test_bb->index,
2555 		 ((BB_HEAD (ce_info->last_test_bb))
2556 		  ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
2557 		  : -1));
2558 
2559       fputc ('\n', rtl_dump_file);
2560     }
2561 
2562   /* Make sure IF, THEN, and ELSE, blocks are adjacent.  Actually, we get the
2563      first condition for free, since we've already asserted that there's a
2564      fallthru edge from IF to THEN.  Likewise for the && and || blocks, since
2565      we checked the FALLTHRU flag, those are already adjacent to the last IF
2566      block.  */
2567   /* ??? As an enhancement, move the ELSE block.  Have to deal with
2568      BLOCK notes, if by no other means than aborting the merge if they
2569      exist.  Sticky enough I don't want to think about it now.  */
2570   next = then_bb;
2571   if (else_bb && (next = next->next_bb) != else_bb)
2572     return FALSE;
2573   if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2574     {
2575       if (else_bb)
2576 	join_bb = NULL;
2577       else
2578 	return FALSE;
2579     }
2580 
2581   /* Do the real work.  */
2582   ce_info->else_bb = else_bb;
2583   ce_info->join_bb = join_bb;
2584 
2585   return process_if_block (ce_info);
2586 }
2587 
2588 /* Convert a branch over a trap, or a branch
2589    to a trap, into a conditional trap.  */
2590 
2591 static int
find_cond_trap(basic_block test_bb,edge then_edge,edge else_edge)2592 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
2593 {
2594   basic_block then_bb = then_edge->dest;
2595   basic_block else_bb = else_edge->dest;
2596   basic_block other_bb, trap_bb;
2597   rtx trap, jump, cond, cond_earliest, seq;
2598   enum rtx_code code;
2599 
2600   /* Locate the block with the trap instruction.  */
2601   /* ??? While we look for no successors, we really ought to allow
2602      EH successors.  Need to fix merge_if_block for that to work.  */
2603   if ((trap = block_has_only_trap (then_bb)) != NULL)
2604     trap_bb = then_bb, other_bb = else_bb;
2605   else if ((trap = block_has_only_trap (else_bb)) != NULL)
2606     trap_bb = else_bb, other_bb = then_bb;
2607   else
2608     return FALSE;
2609 
2610   if (rtl_dump_file)
2611     {
2612       fprintf (rtl_dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2613 	       test_bb->index, trap_bb->index);
2614     }
2615 
2616   /* If this is not a standard conditional jump, we can't parse it.  */
2617   jump = BB_END (test_bb);
2618   cond = noce_get_condition (jump, &cond_earliest);
2619   if (! cond)
2620     return FALSE;
2621 
2622   /* If the conditional jump is more than just a conditional jump, then
2623      we can not do if-conversion on this block.  */
2624   if (! onlyjump_p (jump))
2625     return FALSE;
2626 
2627   /* We must be comparing objects whose modes imply the size.  */
2628   if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2629     return FALSE;
2630 
2631   /* Reverse the comparison code, if necessary.  */
2632   code = GET_CODE (cond);
2633   if (then_bb == trap_bb)
2634     {
2635       code = reversed_comparison_code (cond, jump);
2636       if (code == UNKNOWN)
2637 	return FALSE;
2638     }
2639 
2640   /* Attempt to generate the conditional trap.  */
2641   seq = gen_cond_trap (code, XEXP (cond, 0),
2642 		       XEXP (cond, 1),
2643 		       TRAP_CODE (PATTERN (trap)));
2644   if (seq == NULL)
2645     return FALSE;
2646 
2647   num_true_changes++;
2648 
2649   /* Emit the new insns before cond_earliest.  */
2650   emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATOR (trap));
2651 
2652   /* Delete the trap block if possible.  */
2653   remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2654   if (trap_bb->pred == NULL)
2655     {
2656       if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY)
2657 	delete_from_dominance_info (CDI_POST_DOMINATORS, trap_bb);
2658       delete_block (trap_bb);
2659     }
2660 
2661   /* If the non-trap block and the test are now adjacent, merge them.
2662      Otherwise we must insert a direct branch.  */
2663   if (test_bb->next_bb == other_bb)
2664     {
2665       struct ce_if_block new_ce_info;
2666       delete_insn (jump);
2667       memset (&new_ce_info, '\0', sizeof (new_ce_info));
2668       new_ce_info.test_bb = test_bb;
2669       new_ce_info.then_bb = NULL;
2670       new_ce_info.else_bb = NULL;
2671       new_ce_info.join_bb = other_bb;
2672       merge_if_block (&new_ce_info);
2673     }
2674   else
2675     {
2676       rtx lab, newjump;
2677 
2678       lab = JUMP_LABEL (jump);
2679       newjump = emit_jump_insn_after (gen_jump (lab), jump);
2680       LABEL_NUSES (lab) += 1;
2681       JUMP_LABEL (newjump) = lab;
2682       emit_barrier_after (newjump);
2683 
2684       delete_insn (jump);
2685     }
2686 
2687   return TRUE;
2688 }
2689 
2690 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2691    return it.  */
2692 
2693 static rtx
block_has_only_trap(basic_block bb)2694 block_has_only_trap (basic_block bb)
2695 {
2696   rtx trap;
2697 
2698   /* We're not the exit block.  */
2699   if (bb == EXIT_BLOCK_PTR)
2700     return NULL_RTX;
2701 
2702   /* The block must have no successors.  */
2703   if (bb->succ)
2704     return NULL_RTX;
2705 
2706   /* The only instruction in the THEN block must be the trap.  */
2707   trap = first_active_insn (bb);
2708   if (! (trap == BB_END (bb)
2709 	 && GET_CODE (PATTERN (trap)) == TRAP_IF
2710          && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2711     return NULL_RTX;
2712 
2713   return trap;
2714 }
2715 
2716 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2717    transformable, but not necessarily the other.  There need be no
2718    JOIN block.
2719 
2720    Return TRUE if we were successful at converting the block.
2721 
2722    Cases we'd like to look at:
2723 
2724    (1)
2725 	if (test) goto over; // x not live
2726 	x = a;
2727 	goto label;
2728 	over:
2729 
2730    becomes
2731 
2732 	x = a;
2733 	if (! test) goto label;
2734 
2735    (2)
2736 	if (test) goto E; // x not live
2737 	x = big();
2738 	goto L;
2739 	E:
2740 	x = b;
2741 	goto M;
2742 
2743    becomes
2744 
2745 	x = b;
2746 	if (test) goto M;
2747 	x = big();
2748 	goto L;
2749 
2750    (3) // This one's really only interesting for targets that can do
2751        // multiway branching, e.g. IA-64 BBB bundles.  For other targets
2752        // it results in multiple branches on a cache line, which often
2753        // does not sit well with predictors.
2754 
2755 	if (test1) goto E; // predicted not taken
2756 	x = a;
2757 	if (test2) goto F;
2758 	...
2759 	E:
2760 	x = b;
2761 	J:
2762 
2763    becomes
2764 
2765 	x = a;
2766 	if (test1) goto E;
2767 	if (test2) goto F;
2768 
2769    Notes:
2770 
2771    (A) Don't do (2) if the branch is predicted against the block we're
2772    eliminating.  Do it anyway if we can eliminate a branch; this requires
2773    that the sole successor of the eliminated block postdominate the other
2774    side of the if.
2775 
2776    (B) With CE, on (3) we can steal from both sides of the if, creating
2777 
2778 	if (test1) x = a;
2779 	if (!test1) x = b;
2780 	if (test1) goto J;
2781 	if (test2) goto F;
2782 	...
2783 	J:
2784 
2785    Again, this is most useful if J postdominates.
2786 
2787    (C) CE substitutes for helpful life information.
2788 
2789    (D) These heuristics need a lot of work.  */
2790 
2791 /* Tests for case 1 above.  */
2792 
2793 static int
find_if_case_1(basic_block test_bb,edge then_edge,edge else_edge)2794 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
2795 {
2796   basic_block then_bb = then_edge->dest;
2797   basic_block else_bb = else_edge->dest, new_bb;
2798   edge then_succ = then_bb->succ;
2799   int then_bb_index;
2800 
2801   /* THEN has one successor.  */
2802   if (!then_succ || then_succ->succ_next != NULL)
2803     return FALSE;
2804 
2805   /* THEN does not fall through, but is not strange either.  */
2806   if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2807     return FALSE;
2808 
2809   /* THEN has one predecessor.  */
2810   if (then_bb->pred->pred_next != NULL)
2811     return FALSE;
2812 
2813   /* THEN must do something.  */
2814   if (forwarder_block_p (then_bb))
2815     return FALSE;
2816 
2817   num_possible_if_blocks++;
2818   if (rtl_dump_file)
2819     fprintf (rtl_dump_file,
2820 	     "\nIF-CASE-1 found, start %d, then %d\n",
2821 	     test_bb->index, then_bb->index);
2822 
2823   /* THEN is small.  */
2824   if (count_bb_insns (then_bb) > BRANCH_COST)
2825     return FALSE;
2826 
2827   /* Registers set are dead, or are predicable.  */
2828   if (! dead_or_predicable (test_bb, then_bb, else_bb,
2829 			    then_bb->succ->dest, 1))
2830     return FALSE;
2831 
2832   /* Conversion went ok, including moving the insns and fixing up the
2833      jump.  Adjust the CFG to match.  */
2834 
2835   bitmap_operation (test_bb->global_live_at_end,
2836 		    else_bb->global_live_at_start,
2837 		    then_bb->global_live_at_end, BITMAP_IOR);
2838 
2839   new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2840   then_bb_index = then_bb->index;
2841   if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY)
2842     delete_from_dominance_info (CDI_POST_DOMINATORS, then_bb);
2843   delete_block (then_bb);
2844 
2845   /* Make rest of code believe that the newly created block is the THEN_BB
2846      block we removed.  */
2847   if (new_bb)
2848     {
2849       new_bb->index = then_bb_index;
2850       BASIC_BLOCK (then_bb_index) = new_bb;
2851       if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY)
2852 	add_to_dominance_info (CDI_POST_DOMINATORS, new_bb);
2853     }
2854   /* We've possibly created jump to next insn, cleanup_cfg will solve that
2855      later.  */
2856 
2857   num_true_changes++;
2858   num_updated_if_blocks++;
2859 
2860   return TRUE;
2861 }
2862 
2863 /* Test for case 2 above.  */
2864 
2865 static int
find_if_case_2(basic_block test_bb,edge then_edge,edge else_edge)2866 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
2867 {
2868   basic_block then_bb = then_edge->dest;
2869   basic_block else_bb = else_edge->dest;
2870   edge else_succ = else_bb->succ;
2871   rtx note;
2872 
2873   /* ELSE has one successor.  */
2874   if (!else_succ || else_succ->succ_next != NULL)
2875     return FALSE;
2876 
2877   /* ELSE outgoing edge is not complex.  */
2878   if (else_succ->flags & EDGE_COMPLEX)
2879     return FALSE;
2880 
2881   /* ELSE has one predecessor.  */
2882   if (else_bb->pred->pred_next != NULL)
2883     return FALSE;
2884 
2885   /* THEN is not EXIT.  */
2886   if (then_bb->index < 0)
2887     return FALSE;
2888 
2889   /* ELSE is predicted or SUCC(ELSE) postdominates THEN.  */
2890   note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
2891   if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2892     ;
2893   else if (else_succ->dest->index < 0
2894 	   || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
2895 			      else_succ->dest))
2896     ;
2897   else
2898     return FALSE;
2899 
2900   num_possible_if_blocks++;
2901   if (rtl_dump_file)
2902     fprintf (rtl_dump_file,
2903 	     "\nIF-CASE-2 found, start %d, else %d\n",
2904 	     test_bb->index, else_bb->index);
2905 
2906   /* ELSE is small.  */
2907   if (count_bb_insns (else_bb) > BRANCH_COST)
2908     return FALSE;
2909 
2910   /* Registers set are dead, or are predicable.  */
2911   if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2912     return FALSE;
2913 
2914   /* Conversion went ok, including moving the insns and fixing up the
2915      jump.  Adjust the CFG to match.  */
2916 
2917   bitmap_operation (test_bb->global_live_at_end,
2918 		    then_bb->global_live_at_start,
2919 		    else_bb->global_live_at_end, BITMAP_IOR);
2920 
2921   if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY)
2922     delete_from_dominance_info (CDI_POST_DOMINATORS, else_bb);
2923   delete_block (else_bb);
2924 
2925   num_true_changes++;
2926   num_updated_if_blocks++;
2927 
2928   /* ??? We may now fallthru from one of THEN's successors into a join
2929      block.  Rerun cleanup_cfg?  Examine things manually?  Wait?  */
2930 
2931   return TRUE;
2932 }
2933 
2934 /* A subroutine of dead_or_predicable called through for_each_rtx.
2935    Return 1 if a memory is found.  */
2936 
2937 static int
find_memory(rtx * px,void * data ATTRIBUTE_UNUSED)2938 find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
2939 {
2940   return GET_CODE (*px) == MEM;
2941 }
2942 
2943 /* Used by the code above to perform the actual rtl transformations.
2944    Return TRUE if successful.
2945 
2946    TEST_BB is the block containing the conditional branch.  MERGE_BB
2947    is the block containing the code to manipulate.  NEW_DEST is the
2948    label TEST_BB should be branching to after the conversion.
2949    REVERSEP is true if the sense of the branch should be reversed.  */
2950 
2951 static int
dead_or_predicable(basic_block test_bb,basic_block merge_bb,basic_block other_bb,basic_block new_dest,int reversep)2952 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
2953 		    basic_block other_bb, basic_block new_dest, int reversep)
2954 {
2955   rtx head, end, jump, earliest, old_dest, new_label = NULL_RTX;
2956 
2957   jump = BB_END (test_bb);
2958 
2959   /* Find the extent of the real code in the merge block.  */
2960   head = BB_HEAD (merge_bb);
2961   end = BB_END (merge_bb);
2962 
2963   if (GET_CODE (head) == CODE_LABEL)
2964     head = NEXT_INSN (head);
2965   if (GET_CODE (head) == NOTE)
2966     {
2967       if (head == end)
2968 	{
2969 	  head = end = NULL_RTX;
2970 	  goto no_body;
2971 	}
2972       head = NEXT_INSN (head);
2973     }
2974 
2975   if (GET_CODE (end) == JUMP_INSN)
2976     {
2977       if (head == end)
2978 	{
2979 	  head = end = NULL_RTX;
2980 	  goto no_body;
2981 	}
2982       end = PREV_INSN (end);
2983     }
2984 
2985   /* Disable handling dead code by conditional execution if the machine needs
2986      to do anything funny with the tests, etc.  */
2987 #ifndef IFCVT_MODIFY_TESTS
2988   if (HAVE_conditional_execution)
2989     {
2990       /* In the conditional execution case, we have things easy.  We know
2991 	 the condition is reversible.  We don't have to check life info
2992 	 because we're going to conditionally execute the code anyway.
2993 	 All that's left is making sure the insns involved can actually
2994 	 be predicated.  */
2995 
2996       rtx cond, prob_val;
2997 
2998       cond = cond_exec_get_condition (jump);
2999       if (! cond)
3000 	return FALSE;
3001 
3002       prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3003       if (prob_val)
3004 	prob_val = XEXP (prob_val, 0);
3005 
3006       if (reversep)
3007 	{
3008 	  enum rtx_code rev = reversed_comparison_code (cond, jump);
3009 	  if (rev == UNKNOWN)
3010 	    return FALSE;
3011 	  cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
3012 			         XEXP (cond, 1));
3013 	  if (prob_val)
3014 	    prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
3015 	}
3016 
3017       if (! cond_exec_process_insns ((ce_if_block_t *)0, head, end, cond,
3018 				     prob_val, 0))
3019 	goto cancel;
3020 
3021       earliest = jump;
3022     }
3023   else
3024 #endif
3025     {
3026       /* In the non-conditional execution case, we have to verify that there
3027 	 are no trapping operations, no calls, no references to memory, and
3028 	 that any registers modified are dead at the branch site.  */
3029 
3030       rtx insn, cond, prev;
3031       regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
3032       regset merge_set, tmp, test_live, test_set;
3033       struct propagate_block_info *pbi;
3034       int i, fail = 0;
3035 
3036       /* Check for no calls or trapping operations.  */
3037       for (insn = head; ; insn = NEXT_INSN (insn))
3038 	{
3039 	  if (GET_CODE (insn) == CALL_INSN)
3040 	    return FALSE;
3041 	  if (INSN_P (insn))
3042 	    {
3043 	      if (may_trap_p (PATTERN (insn)))
3044 		return FALSE;
3045 
3046 	      /* ??? Even non-trapping memories such as stack frame
3047 		 references must be avoided.  For stores, we collect
3048 		 no lifetime info; for reads, we'd have to assert
3049 		 true_dependence false against every store in the
3050 		 TEST range.  */
3051 	      if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
3052 		return FALSE;
3053 	    }
3054 	  if (insn == end)
3055 	    break;
3056 	}
3057 
3058       if (! any_condjump_p (jump))
3059 	return FALSE;
3060 
3061       /* Find the extent of the conditional.  */
3062       cond = noce_get_condition (jump, &earliest);
3063       if (! cond)
3064 	return FALSE;
3065 
3066       /* Collect:
3067 	   MERGE_SET = set of registers set in MERGE_BB
3068 	   TEST_LIVE = set of registers live at EARLIEST
3069 	   TEST_SET  = set of registers set between EARLIEST and the
3070 		       end of the block.  */
3071 
3072       tmp = INITIALIZE_REG_SET (tmp_head);
3073       merge_set = INITIALIZE_REG_SET (merge_set_head);
3074       test_live = INITIALIZE_REG_SET (test_live_head);
3075       test_set = INITIALIZE_REG_SET (test_set_head);
3076 
3077       /* ??? bb->local_set is only valid during calculate_global_regs_live,
3078 	 so we must recompute usage for MERGE_BB.  Not so bad, I suppose,
3079          since we've already asserted that MERGE_BB is small.  */
3080       propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
3081 
3082       /* For small register class machines, don't lengthen lifetimes of
3083 	 hard registers before reload.  */
3084       if (SMALL_REGISTER_CLASSES && ! reload_completed)
3085 	{
3086           EXECUTE_IF_SET_IN_BITMAP
3087 	    (merge_set, 0, i,
3088 	     {
3089 	       if (i < FIRST_PSEUDO_REGISTER
3090 		   && ! fixed_regs[i]
3091 		   && ! global_regs[i])
3092 		fail = 1;
3093 	     });
3094 	}
3095 
3096       /* For TEST, we're interested in a range of insns, not a whole block.
3097 	 Moreover, we're interested in the insns live from OTHER_BB.  */
3098 
3099       COPY_REG_SET (test_live, other_bb->global_live_at_start);
3100       pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
3101 				       0);
3102 
3103       for (insn = jump; ; insn = prev)
3104 	{
3105 	  prev = propagate_one_insn (pbi, insn);
3106 	  if (insn == earliest)
3107 	    break;
3108 	}
3109 
3110       free_propagate_block_info (pbi);
3111 
3112       /* We can perform the transformation if
3113 	   MERGE_SET & (TEST_SET | TEST_LIVE)
3114 	 and
3115 	   TEST_SET & merge_bb->global_live_at_start
3116 	 are empty.  */
3117 
3118       bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
3119       bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
3120       EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3121 
3122       bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
3123 			BITMAP_AND);
3124       EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3125 
3126       FREE_REG_SET (tmp);
3127       FREE_REG_SET (merge_set);
3128       FREE_REG_SET (test_live);
3129       FREE_REG_SET (test_set);
3130 
3131       if (fail)
3132 	return FALSE;
3133     }
3134 
3135  no_body:
3136   /* We don't want to use normal invert_jump or redirect_jump because
3137      we don't want to delete_insn called.  Also, we want to do our own
3138      change group management.  */
3139 
3140   old_dest = JUMP_LABEL (jump);
3141   if (other_bb != new_dest)
3142     {
3143       new_label = block_label (new_dest);
3144       if (reversep
3145 	  ? ! invert_jump_1 (jump, new_label)
3146 	  : ! redirect_jump_1 (jump, new_label))
3147 	goto cancel;
3148     }
3149 
3150   if (! apply_change_group ())
3151     return FALSE;
3152 
3153   if (other_bb != new_dest)
3154     {
3155       if (old_dest)
3156 	LABEL_NUSES (old_dest) -= 1;
3157       if (new_label)
3158 	LABEL_NUSES (new_label) += 1;
3159       JUMP_LABEL (jump) = new_label;
3160       if (reversep)
3161 	invert_br_probabilities (jump);
3162 
3163       redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
3164       if (reversep)
3165 	{
3166 	  gcov_type count, probability;
3167 	  count = BRANCH_EDGE (test_bb)->count;
3168 	  BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
3169 	  FALLTHRU_EDGE (test_bb)->count = count;
3170 	  probability = BRANCH_EDGE (test_bb)->probability;
3171 	  BRANCH_EDGE (test_bb)->probability
3172 	    = FALLTHRU_EDGE (test_bb)->probability;
3173 	  FALLTHRU_EDGE (test_bb)->probability = probability;
3174 	  update_br_prob_note (test_bb);
3175 	}
3176     }
3177 
3178   /* Move the insns out of MERGE_BB to before the branch.  */
3179   if (head != NULL)
3180     {
3181       if (end == BB_END (merge_bb))
3182 	BB_END (merge_bb) = PREV_INSN (head);
3183 
3184       if (squeeze_notes (&head, &end))
3185 	return TRUE;
3186 
3187       reorder_insns (head, end, PREV_INSN (earliest));
3188     }
3189 
3190   /* Remove the jump and edge if we can.  */
3191   if (other_bb == new_dest)
3192     {
3193       delete_insn (jump);
3194       remove_edge (BRANCH_EDGE (test_bb));
3195       /* ??? Can't merge blocks here, as then_bb is still in use.
3196 	 At minimum, the merge will get done just before bb-reorder.  */
3197     }
3198 
3199   return TRUE;
3200 
3201  cancel:
3202   cancel_changes (0);
3203   return FALSE;
3204 }
3205 
3206 /* Main entry point for all if-conversion.  */
3207 
3208 void
if_convert(int x_life_data_ok)3209 if_convert (int x_life_data_ok)
3210 {
3211   basic_block bb;
3212   int pass;
3213 
3214   num_possible_if_blocks = 0;
3215   num_updated_if_blocks = 0;
3216   num_true_changes = 0;
3217   life_data_ok = (x_life_data_ok != 0);
3218 
3219   if (! (* targetm.cannot_modify_jumps_p) ())
3220     mark_loop_exit_edges ();
3221 
3222   /* Free up basic_block_for_insn so that we don't have to keep it
3223      up to date, either here or in merge_blocks.  */
3224   free_basic_block_vars (1);
3225 
3226   /* Compute postdominators if we think we'll use them.  */
3227   if (HAVE_conditional_execution || life_data_ok)
3228     calculate_dominance_info (CDI_POST_DOMINATORS);
3229 
3230   if (life_data_ok)
3231     clear_bb_flags ();
3232 
3233   /* Go through each of the basic blocks looking for things to convert.  If we
3234      have conditional execution, we make multiple passes to allow us to handle
3235      IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks.  */
3236   pass = 0;
3237   do
3238     {
3239       cond_exec_changed_p = FALSE;
3240       pass++;
3241 
3242 #ifdef IFCVT_MULTIPLE_DUMPS
3243       if (rtl_dump_file && pass > 1)
3244 	fprintf (rtl_dump_file, "\n\n========== Pass %d ==========\n", pass);
3245 #endif
3246 
3247       FOR_EACH_BB (bb)
3248 	{
3249 	  basic_block new_bb;
3250 	  while ((new_bb = find_if_header (bb, pass)))
3251 	    bb = new_bb;
3252 	}
3253 
3254 #ifdef IFCVT_MULTIPLE_DUMPS
3255       if (rtl_dump_file && cond_exec_changed_p)
3256 	print_rtl_with_bb (rtl_dump_file, get_insns ());
3257 #endif
3258     }
3259   while (cond_exec_changed_p);
3260 
3261 #ifdef IFCVT_MULTIPLE_DUMPS
3262   if (rtl_dump_file)
3263     fprintf (rtl_dump_file, "\n\n========== no more changes\n");
3264 #endif
3265 
3266   free_dominance_info (CDI_POST_DOMINATORS);
3267 
3268   if (rtl_dump_file)
3269     fflush (rtl_dump_file);
3270 
3271   clear_aux_for_blocks ();
3272 
3273   /* Rebuild life info for basic blocks that require it.  */
3274   if (num_true_changes && life_data_ok)
3275     {
3276       /* If we allocated new pseudos, we must resize the array for sched1.  */
3277       if (max_regno < max_reg_num ())
3278 	{
3279 	  max_regno = max_reg_num ();
3280 	  allocate_reg_info (max_regno, FALSE, FALSE);
3281 	}
3282       update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
3283 					PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
3284 					| PROP_KILL_DEAD_CODE);
3285     }
3286 
3287   /* Write the final stats.  */
3288   if (rtl_dump_file && num_possible_if_blocks > 0)
3289     {
3290       fprintf (rtl_dump_file,
3291 	       "\n%d possible IF blocks searched.\n",
3292 	       num_possible_if_blocks);
3293       fprintf (rtl_dump_file,
3294 	       "%d IF blocks converted.\n",
3295 	       num_updated_if_blocks);
3296       fprintf (rtl_dump_file,
3297 	       "%d true changes made.\n\n\n",
3298 	       num_true_changes);
3299     }
3300 
3301 #ifdef ENABLE_CHECKING
3302   verify_flow_info ();
3303 #endif
3304 }
3305