xref: /dragonfly/contrib/gcc-4.7/gcc/ifcvt.c (revision 51871435)
1 /* If-conversion support.
2    Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2010,
3    2011
4    Free Software Foundation, Inc.
5 
6    This file is part of GCC.
7 
8    GCC is free software; you can redistribute it and/or modify it
9    under the terms of the GNU General Public License as published by
10    the Free Software Foundation; either version 3, or (at your option)
11    any later version.
12 
13    GCC is distributed in the hope that it will be useful, but WITHOUT
14    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
16    License for more details.
17 
18    You should have received a copy of the GNU General Public License
19    along with GCC; see the file COPYING3.  If not see
20    <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 
27 #include "rtl.h"
28 #include "regs.h"
29 #include "function.h"
30 #include "flags.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "except.h"
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "expr.h"
37 #include "output.h"
38 #include "optabs.h"
39 #include "diagnostic-core.h"
40 #include "tm_p.h"
41 #include "cfgloop.h"
42 #include "target.h"
43 #include "timevar.h"
44 #include "tree-pass.h"
45 #include "df.h"
46 #include "vec.h"
47 #include "vecprim.h"
48 #include "dbgcnt.h"
49 
50 #ifndef HAVE_conditional_move
51 #define HAVE_conditional_move 0
52 #endif
53 #ifndef HAVE_incscc
54 #define HAVE_incscc 0
55 #endif
56 #ifndef HAVE_decscc
57 #define HAVE_decscc 0
58 #endif
59 #ifndef HAVE_trap
60 #define HAVE_trap 0
61 #endif
62 
63 #ifndef MAX_CONDITIONAL_EXECUTE
64 #define MAX_CONDITIONAL_EXECUTE \
65   (BRANCH_COST (optimize_function_for_speed_p (cfun), false) \
66    + 1)
67 #endif
68 
69 #define IFCVT_MULTIPLE_DUMPS 1
70 
71 #define NULL_BLOCK	((basic_block) NULL)
72 
73 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at  */
74 static int num_possible_if_blocks;
75 
76 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
77    execution.  */
78 static int num_updated_if_blocks;
79 
80 /* # of changes made.  */
81 static int num_true_changes;
82 
83 /* Whether conditional execution changes were made.  */
84 static int cond_exec_changed_p;
85 
86 /* Forward references.  */
87 static int count_bb_insns (const_basic_block);
88 static bool cheap_bb_rtx_cost_p (const_basic_block, int, int);
89 static rtx first_active_insn (basic_block);
90 static rtx last_active_insn (basic_block, int);
91 static rtx find_active_insn_before (basic_block, rtx);
92 static rtx find_active_insn_after (basic_block, rtx);
93 static basic_block block_fallthru (basic_block);
94 static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
95 static rtx cond_exec_get_condition (rtx);
96 static rtx noce_get_condition (rtx, rtx *, bool);
97 static int noce_operand_ok (const_rtx);
98 static void merge_if_block (ce_if_block_t *);
99 static int find_cond_trap (basic_block, edge, edge);
100 static basic_block find_if_header (basic_block, int);
101 static int block_jumps_and_fallthru_p (basic_block, basic_block);
102 static int noce_find_if_block (basic_block, edge, edge, int);
103 static int cond_exec_find_if_block (ce_if_block_t *);
104 static int find_if_case_1 (basic_block, edge, edge);
105 static int find_if_case_2 (basic_block, edge, edge);
106 static int dead_or_predicable (basic_block, basic_block, basic_block,
107 			       edge, int);
108 static void noce_emit_move_insn (rtx, rtx);
109 static rtx block_has_only_trap (basic_block);
110 
111 /* Count the number of non-jump active insns in BB.  */
112 
113 static int
114 count_bb_insns (const_basic_block bb)
115 {
116   int count = 0;
117   rtx insn = BB_HEAD (bb);
118 
119   while (1)
120     {
121       if (CALL_P (insn) || NONJUMP_INSN_P (insn))
122 	count++;
123 
124       if (insn == BB_END (bb))
125 	break;
126       insn = NEXT_INSN (insn);
127     }
128 
129   return count;
130 }
131 
132 /* Determine whether the total insn_rtx_cost on non-jump insns in
133    basic block BB is less than MAX_COST.  This function returns
134    false if the cost of any instruction could not be estimated.
135 
136    The cost of the non-jump insns in BB is scaled by REG_BR_PROB_BASE
137    as those insns are being speculated.  MAX_COST is scaled with SCALE
138    plus a small fudge factor.  */
139 
140 static bool
141 cheap_bb_rtx_cost_p (const_basic_block bb, int scale, int max_cost)
142 {
143   int count = 0;
144   rtx insn = BB_HEAD (bb);
145   bool speed = optimize_bb_for_speed_p (bb);
146 
147   /* Our branch probability/scaling factors are just estimates and don't
148      account for cases where we can get speculation for free and other
149      secondary benefits.  So we fudge the scale factor to make speculating
150      appear a little more profitable.  */
151   scale += REG_BR_PROB_BASE / 8;
152   max_cost *= scale;
153 
154   while (1)
155     {
156       if (NONJUMP_INSN_P (insn))
157 	{
158 	  int cost = insn_rtx_cost (PATTERN (insn), speed) * REG_BR_PROB_BASE;
159 	  if (cost == 0)
160 	    return false;
161 
162 	  /* If this instruction is the load or set of a "stack" register,
163 	     such as a floating point register on x87, then the cost of
164 	     speculatively executing this insn may need to include
165 	     the additional cost of popping its result off of the
166 	     register stack.  Unfortunately, correctly recognizing and
167 	     accounting for this additional overhead is tricky, so for
168 	     now we simply prohibit such speculative execution.  */
169 #ifdef STACK_REGS
170 	  {
171 	    rtx set = single_set (insn);
172 	    if (set && STACK_REG_P (SET_DEST (set)))
173 	      return false;
174 	  }
175 #endif
176 
177 	  count += cost;
178 	  if (count >= max_cost)
179 	    return false;
180 	}
181       else if (CALL_P (insn))
182 	return false;
183 
184       if (insn == BB_END (bb))
185 	break;
186       insn = NEXT_INSN (insn);
187     }
188 
189   return true;
190 }
191 
192 /* Return the first non-jump active insn in the basic block.  */
193 
194 static rtx
195 first_active_insn (basic_block bb)
196 {
197   rtx insn = BB_HEAD (bb);
198 
199   if (LABEL_P (insn))
200     {
201       if (insn == BB_END (bb))
202 	return NULL_RTX;
203       insn = NEXT_INSN (insn);
204     }
205 
206   while (NOTE_P (insn) || DEBUG_INSN_P (insn))
207     {
208       if (insn == BB_END (bb))
209 	return NULL_RTX;
210       insn = NEXT_INSN (insn);
211     }
212 
213   if (JUMP_P (insn))
214     return NULL_RTX;
215 
216   return insn;
217 }
218 
219 /* Return the last non-jump active (non-jump) insn in the basic block.  */
220 
221 static rtx
222 last_active_insn (basic_block bb, int skip_use_p)
223 {
224   rtx insn = BB_END (bb);
225   rtx head = BB_HEAD (bb);
226 
227   while (NOTE_P (insn)
228 	 || JUMP_P (insn)
229 	 || DEBUG_INSN_P (insn)
230 	 || (skip_use_p
231 	     && NONJUMP_INSN_P (insn)
232 	     && GET_CODE (PATTERN (insn)) == USE))
233     {
234       if (insn == head)
235 	return NULL_RTX;
236       insn = PREV_INSN (insn);
237     }
238 
239   if (LABEL_P (insn))
240     return NULL_RTX;
241 
242   return insn;
243 }
244 
245 /* Return the active insn before INSN inside basic block CURR_BB. */
246 
247 static rtx
248 find_active_insn_before (basic_block curr_bb, rtx insn)
249 {
250   if (!insn || insn == BB_HEAD (curr_bb))
251     return NULL_RTX;
252 
253   while ((insn = PREV_INSN (insn)) != NULL_RTX)
254     {
255       if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
256         break;
257 
258       /* No other active insn all the way to the start of the basic block. */
259       if (insn == BB_HEAD (curr_bb))
260         return NULL_RTX;
261     }
262 
263   return insn;
264 }
265 
266 /* Return the active insn after INSN inside basic block CURR_BB. */
267 
268 static rtx
269 find_active_insn_after (basic_block curr_bb, rtx insn)
270 {
271   if (!insn || insn == BB_END (curr_bb))
272     return NULL_RTX;
273 
274   while ((insn = NEXT_INSN (insn)) != NULL_RTX)
275     {
276       if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
277         break;
278 
279       /* No other active insn all the way to the end of the basic block. */
280       if (insn == BB_END (curr_bb))
281         return NULL_RTX;
282     }
283 
284   return insn;
285 }
286 
287 /* Return the basic block reached by falling though the basic block BB.  */
288 
289 static basic_block
290 block_fallthru (basic_block bb)
291 {
292   edge e = find_fallthru_edge (bb->succs);
293 
294   return (e) ? e->dest : NULL_BLOCK;
295 }
296 
297 /* Go through a bunch of insns, converting them to conditional
298    execution format if possible.  Return TRUE if all of the non-note
299    insns were processed.  */
300 
301 static int
302 cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
303 			 /* if block information */rtx start,
304 			 /* first insn to look at */rtx end,
305 			 /* last insn to look at */rtx test,
306 			 /* conditional execution test */rtx prob_val,
307 			 /* probability of branch taken. */int mod_ok)
308 {
309   int must_be_last = FALSE;
310   rtx insn;
311   rtx xtest;
312   rtx pattern;
313 
314   if (!start || !end)
315     return FALSE;
316 
317   for (insn = start; ; insn = NEXT_INSN (insn))
318     {
319       /* dwarf2out can't cope with conditional prologues.  */
320       if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
321 	return FALSE;
322 
323       if (NOTE_P (insn) || DEBUG_INSN_P (insn))
324 	goto insn_done;
325 
326       gcc_assert(NONJUMP_INSN_P (insn) || CALL_P (insn));
327 
328       /* Remove USE insns that get in the way.  */
329       if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
330 	{
331 	  /* ??? Ug.  Actually unlinking the thing is problematic,
332 	     given what we'd have to coordinate with our callers.  */
333 	  SET_INSN_DELETED (insn);
334 	  goto insn_done;
335 	}
336 
337       /* Last insn wasn't last?  */
338       if (must_be_last)
339 	return FALSE;
340 
341       if (modified_in_p (test, insn))
342 	{
343 	  if (!mod_ok)
344 	    return FALSE;
345 	  must_be_last = TRUE;
346 	}
347 
348       /* Now build the conditional form of the instruction.  */
349       pattern = PATTERN (insn);
350       xtest = copy_rtx (test);
351 
352       /* If this is already a COND_EXEC, rewrite the test to be an AND of the
353          two conditions.  */
354       if (GET_CODE (pattern) == COND_EXEC)
355 	{
356 	  if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
357 	    return FALSE;
358 
359 	  xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
360 			       COND_EXEC_TEST (pattern));
361 	  pattern = COND_EXEC_CODE (pattern);
362 	}
363 
364       pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
365 
366       /* If the machine needs to modify the insn being conditionally executed,
367          say for example to force a constant integer operand into a temp
368          register, do so here.  */
369 #ifdef IFCVT_MODIFY_INSN
370       IFCVT_MODIFY_INSN (ce_info, pattern, insn);
371       if (! pattern)
372 	return FALSE;
373 #endif
374 
375       validate_change (insn, &PATTERN (insn), pattern, 1);
376 
377       if (CALL_P (insn) && prob_val)
378 	validate_change (insn, &REG_NOTES (insn),
379 			 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
380 					  REG_NOTES (insn)), 1);
381 
382     insn_done:
383       if (insn == end)
384 	break;
385     }
386 
387   return TRUE;
388 }
389 
390 /* Return the condition for a jump.  Do not do any special processing.  */
391 
392 static rtx
393 cond_exec_get_condition (rtx jump)
394 {
395   rtx test_if, cond;
396 
397   if (any_condjump_p (jump))
398     test_if = SET_SRC (pc_set (jump));
399   else
400     return NULL_RTX;
401   cond = XEXP (test_if, 0);
402 
403   /* If this branches to JUMP_LABEL when the condition is false,
404      reverse the condition.  */
405   if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
406       && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
407     {
408       enum rtx_code rev = reversed_comparison_code (cond, jump);
409       if (rev == UNKNOWN)
410 	return NULL_RTX;
411 
412       cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
413 			     XEXP (cond, 1));
414     }
415 
416   return cond;
417 }
418 
419 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
420    to conditional execution.  Return TRUE if we were successful at
421    converting the block.  */
422 
423 static int
424 cond_exec_process_if_block (ce_if_block_t * ce_info,
425 			    /* if block information */int do_multiple_p)
426 {
427   basic_block test_bb = ce_info->test_bb;	/* last test block */
428   basic_block then_bb = ce_info->then_bb;	/* THEN */
429   basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
430   rtx test_expr;		/* expression in IF_THEN_ELSE that is tested */
431   rtx then_start;		/* first insn in THEN block */
432   rtx then_end;			/* last insn + 1 in THEN block */
433   rtx else_start = NULL_RTX;	/* first insn in ELSE block or NULL */
434   rtx else_end = NULL_RTX;	/* last insn + 1 in ELSE block */
435   int max;			/* max # of insns to convert.  */
436   int then_mod_ok;		/* whether conditional mods are ok in THEN */
437   rtx true_expr;		/* test for else block insns */
438   rtx false_expr;		/* test for then block insns */
439   rtx true_prob_val;		/* probability of else block */
440   rtx false_prob_val;		/* probability of then block */
441   rtx then_last_head = NULL_RTX;	/* Last match at the head of THEN */
442   rtx else_last_head = NULL_RTX;	/* Last match at the head of ELSE */
443   rtx then_first_tail = NULL_RTX;	/* First match at the tail of THEN */
444   rtx else_first_tail = NULL_RTX;	/* First match at the tail of ELSE */
445   int then_n_insns, else_n_insns, n_insns;
446   enum rtx_code false_code;
447 
448   /* If test is comprised of && or || elements, and we've failed at handling
449      all of them together, just use the last test if it is the special case of
450      && elements without an ELSE block.  */
451   if (!do_multiple_p && ce_info->num_multiple_test_blocks)
452     {
453       if (else_bb || ! ce_info->and_and_p)
454 	return FALSE;
455 
456       ce_info->test_bb = test_bb = ce_info->last_test_bb;
457       ce_info->num_multiple_test_blocks = 0;
458       ce_info->num_and_and_blocks = 0;
459       ce_info->num_or_or_blocks = 0;
460     }
461 
462   /* Find the conditional jump to the ELSE or JOIN part, and isolate
463      the test.  */
464   test_expr = cond_exec_get_condition (BB_END (test_bb));
465   if (! test_expr)
466     return FALSE;
467 
468   /* If the conditional jump is more than just a conditional jump,
469      then we can not do conditional execution conversion on this block.  */
470   if (! onlyjump_p (BB_END (test_bb)))
471     return FALSE;
472 
473   /* Collect the bounds of where we're to search, skipping any labels, jumps
474      and notes at the beginning and end of the block.  Then count the total
475      number of insns and see if it is small enough to convert.  */
476   then_start = first_active_insn (then_bb);
477   then_end = last_active_insn (then_bb, TRUE);
478   then_n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
479   n_insns = then_n_insns;
480   max = MAX_CONDITIONAL_EXECUTE;
481 
482   if (else_bb)
483     {
484       int n_matching;
485 
486       max *= 2;
487       else_start = first_active_insn (else_bb);
488       else_end = last_active_insn (else_bb, TRUE);
489       else_n_insns = ce_info->num_else_insns = count_bb_insns (else_bb);
490       n_insns += else_n_insns;
491 
492       /* Look for matching sequences at the head and tail of the two blocks,
493 	 and limit the range of insns to be converted if possible.  */
494       n_matching = flow_find_cross_jump (then_bb, else_bb,
495 					 &then_first_tail, &else_first_tail,
496 					 NULL);
497       if (then_first_tail == BB_HEAD (then_bb))
498 	then_start = then_end = NULL_RTX;
499       if (else_first_tail == BB_HEAD (else_bb))
500 	else_start = else_end = NULL_RTX;
501 
502       if (n_matching > 0)
503 	{
504 	  if (then_end)
505 	    then_end = find_active_insn_before (then_bb, then_first_tail);
506 	  if (else_end)
507 	    else_end = find_active_insn_before (else_bb, else_first_tail);
508 	  n_insns -= 2 * n_matching;
509 	}
510 
511       if (then_start && else_start)
512 	{
513 	  int longest_match = MIN (then_n_insns - n_matching,
514 				   else_n_insns - n_matching);
515 	  n_matching
516 	    = flow_find_head_matching_sequence (then_bb, else_bb,
517 						&then_last_head,
518 						&else_last_head,
519 						longest_match);
520 
521 	  if (n_matching > 0)
522 	    {
523 	      rtx insn;
524 
525 	      /* We won't pass the insns in the head sequence to
526 		 cond_exec_process_insns, so we need to test them here
527 		 to make sure that they don't clobber the condition.  */
528 	      for (insn = BB_HEAD (then_bb);
529 		   insn != NEXT_INSN (then_last_head);
530 		   insn = NEXT_INSN (insn))
531 		if (!LABEL_P (insn) && !NOTE_P (insn)
532 		    && !DEBUG_INSN_P (insn)
533 		    && modified_in_p (test_expr, insn))
534 		  return FALSE;
535 	    }
536 
537 	  if (then_last_head == then_end)
538 	    then_start = then_end = NULL_RTX;
539 	  if (else_last_head == else_end)
540 	    else_start = else_end = NULL_RTX;
541 
542 	  if (n_matching > 0)
543 	    {
544 	      if (then_start)
545 		then_start = find_active_insn_after (then_bb, then_last_head);
546 	      if (else_start)
547 		else_start = find_active_insn_after (else_bb, else_last_head);
548 	      n_insns -= 2 * n_matching;
549 	    }
550 	}
551     }
552 
553   if (n_insns > max)
554     return FALSE;
555 
556   /* Map test_expr/test_jump into the appropriate MD tests to use on
557      the conditionally executed code.  */
558 
559   true_expr = test_expr;
560 
561   false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
562   if (false_code != UNKNOWN)
563     false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
564 				 XEXP (true_expr, 0), XEXP (true_expr, 1));
565   else
566     false_expr = NULL_RTX;
567 
568 #ifdef IFCVT_MODIFY_TESTS
569   /* If the machine description needs to modify the tests, such as setting a
570      conditional execution register from a comparison, it can do so here.  */
571   IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
572 
573   /* See if the conversion failed.  */
574   if (!true_expr || !false_expr)
575     goto fail;
576 #endif
577 
578   true_prob_val = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
579   if (true_prob_val)
580     {
581       true_prob_val = XEXP (true_prob_val, 0);
582       false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
583     }
584   else
585     false_prob_val = NULL_RTX;
586 
587   /* If we have && or || tests, do them here.  These tests are in the adjacent
588      blocks after the first block containing the test.  */
589   if (ce_info->num_multiple_test_blocks > 0)
590     {
591       basic_block bb = test_bb;
592       basic_block last_test_bb = ce_info->last_test_bb;
593 
594       if (! false_expr)
595 	goto fail;
596 
597       do
598 	{
599 	  rtx start, end;
600 	  rtx t, f;
601 	  enum rtx_code f_code;
602 
603 	  bb = block_fallthru (bb);
604 	  start = first_active_insn (bb);
605 	  end = last_active_insn (bb, TRUE);
606 	  if (start
607 	      && ! cond_exec_process_insns (ce_info, start, end, false_expr,
608 					    false_prob_val, FALSE))
609 	    goto fail;
610 
611 	  /* If the conditional jump is more than just a conditional jump, then
612 	     we can not do conditional execution conversion on this block.  */
613 	  if (! onlyjump_p (BB_END (bb)))
614 	    goto fail;
615 
616 	  /* Find the conditional jump and isolate the test.  */
617 	  t = cond_exec_get_condition (BB_END (bb));
618 	  if (! t)
619 	    goto fail;
620 
621 	  f_code = reversed_comparison_code (t, BB_END (bb));
622 	  if (f_code == UNKNOWN)
623 	    goto fail;
624 
625 	  f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
626 	  if (ce_info->and_and_p)
627 	    {
628 	      t = gen_rtx_AND (GET_MODE (t), true_expr, t);
629 	      f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
630 	    }
631 	  else
632 	    {
633 	      t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
634 	      f = gen_rtx_AND (GET_MODE (t), false_expr, f);
635 	    }
636 
637 	  /* If the machine description needs to modify the tests, such as
638 	     setting a conditional execution register from a comparison, it can
639 	     do so here.  */
640 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
641 	  IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
642 
643 	  /* See if the conversion failed.  */
644 	  if (!t || !f)
645 	    goto fail;
646 #endif
647 
648 	  true_expr = t;
649 	  false_expr = f;
650 	}
651       while (bb != last_test_bb);
652     }
653 
654   /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
655      on then THEN block.  */
656   then_mod_ok = (else_bb == NULL_BLOCK);
657 
658   /* Go through the THEN and ELSE blocks converting the insns if possible
659      to conditional execution.  */
660 
661   if (then_end
662       && (! false_expr
663 	  || ! cond_exec_process_insns (ce_info, then_start, then_end,
664 					false_expr, false_prob_val,
665 					then_mod_ok)))
666     goto fail;
667 
668   if (else_bb && else_end
669       && ! cond_exec_process_insns (ce_info, else_start, else_end,
670 				    true_expr, true_prob_val, TRUE))
671     goto fail;
672 
673   /* If we cannot apply the changes, fail.  Do not go through the normal fail
674      processing, since apply_change_group will call cancel_changes.  */
675   if (! apply_change_group ())
676     {
677 #ifdef IFCVT_MODIFY_CANCEL
678       /* Cancel any machine dependent changes.  */
679       IFCVT_MODIFY_CANCEL (ce_info);
680 #endif
681       return FALSE;
682     }
683 
684 #ifdef IFCVT_MODIFY_FINAL
685   /* Do any machine dependent final modifications.  */
686   IFCVT_MODIFY_FINAL (ce_info);
687 #endif
688 
689   /* Conversion succeeded.  */
690   if (dump_file)
691     fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
692 	     n_insns, (n_insns == 1) ? " was" : "s were");
693 
694   /* Merge the blocks!  If we had matching sequences, make sure to delete one
695      copy at the appropriate location first: delete the copy in the THEN branch
696      for a tail sequence so that the remaining one is executed last for both
697      branches, and delete the copy in the ELSE branch for a head sequence so
698      that the remaining one is executed first for both branches.  */
699   if (then_first_tail)
700     {
701       rtx from = then_first_tail;
702       if (!INSN_P (from))
703 	from = find_active_insn_after (then_bb, from);
704       delete_insn_chain (from, BB_END (then_bb), false);
705     }
706   if (else_last_head)
707     delete_insn_chain (first_active_insn (else_bb), else_last_head, false);
708 
709   merge_if_block (ce_info);
710   cond_exec_changed_p = TRUE;
711   return TRUE;
712 
713  fail:
714 #ifdef IFCVT_MODIFY_CANCEL
715   /* Cancel any machine dependent changes.  */
716   IFCVT_MODIFY_CANCEL (ce_info);
717 #endif
718 
719   cancel_changes (0);
720   return FALSE;
721 }
722 
723 /* Used by noce_process_if_block to communicate with its subroutines.
724 
725    The subroutines know that A and B may be evaluated freely.  They
726    know that X is a register.  They should insert new instructions
727    before cond_earliest.  */
728 
729 struct noce_if_info
730 {
731   /* The basic blocks that make up the IF-THEN-{ELSE-,}JOIN block.  */
732   basic_block test_bb, then_bb, else_bb, join_bb;
733 
734   /* The jump that ends TEST_BB.  */
735   rtx jump;
736 
737   /* The jump condition.  */
738   rtx cond;
739 
740   /* New insns should be inserted before this one.  */
741   rtx cond_earliest;
742 
743   /* Insns in the THEN and ELSE block.  There is always just this
744      one insns in those blocks.  The insns are single_set insns.
745      If there was no ELSE block, INSN_B is the last insn before
746      COND_EARLIEST, or NULL_RTX.  In the former case, the insn
747      operands are still valid, as if INSN_B was moved down below
748      the jump.  */
749   rtx insn_a, insn_b;
750 
751   /* The SET_SRC of INSN_A and INSN_B.  */
752   rtx a, b;
753 
754   /* The SET_DEST of INSN_A.  */
755   rtx x;
756 
757   /* True if this if block is not canonical.  In the canonical form of
758      if blocks, the THEN_BB is the block reached via the fallthru edge
759      from TEST_BB.  For the noce transformations, we allow the symmetric
760      form as well.  */
761   bool then_else_reversed;
762 
763   /* Estimated cost of the particular branch instruction.  */
764   int branch_cost;
765 };
766 
767 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
768 static int noce_try_move (struct noce_if_info *);
769 static int noce_try_store_flag (struct noce_if_info *);
770 static int noce_try_addcc (struct noce_if_info *);
771 static int noce_try_store_flag_constants (struct noce_if_info *);
772 static int noce_try_store_flag_mask (struct noce_if_info *);
773 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
774 			    rtx, rtx, rtx);
775 static int noce_try_cmove (struct noce_if_info *);
776 static int noce_try_cmove_arith (struct noce_if_info *);
777 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx *);
778 static int noce_try_minmax (struct noce_if_info *);
779 static int noce_try_abs (struct noce_if_info *);
780 static int noce_try_sign_mask (struct noce_if_info *);
781 
782 /* Helper function for noce_try_store_flag*.  */
783 
784 static rtx
785 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
786 		      int normalize)
787 {
788   rtx cond = if_info->cond;
789   int cond_complex;
790   enum rtx_code code;
791 
792   cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
793 		  || ! general_operand (XEXP (cond, 1), VOIDmode));
794 
795   /* If earliest == jump, or when the condition is complex, try to
796      build the store_flag insn directly.  */
797 
798   if (cond_complex)
799     {
800       rtx set = pc_set (if_info->jump);
801       cond = XEXP (SET_SRC (set), 0);
802       if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
803 	  && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump))
804 	reversep = !reversep;
805       if (if_info->then_else_reversed)
806 	reversep = !reversep;
807     }
808 
809   if (reversep)
810     code = reversed_comparison_code (cond, if_info->jump);
811   else
812     code = GET_CODE (cond);
813 
814   if ((if_info->cond_earliest == if_info->jump || cond_complex)
815       && (normalize == 0 || STORE_FLAG_VALUE == normalize))
816     {
817       rtx tmp;
818 
819       tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
820 			    XEXP (cond, 1));
821       tmp = gen_rtx_SET (VOIDmode, x, tmp);
822 
823       start_sequence ();
824       tmp = emit_insn (tmp);
825 
826       if (recog_memoized (tmp) >= 0)
827 	{
828 	  tmp = get_insns ();
829 	  end_sequence ();
830 	  emit_insn (tmp);
831 
832 	  if_info->cond_earliest = if_info->jump;
833 
834 	  return x;
835 	}
836 
837       end_sequence ();
838     }
839 
840   /* Don't even try if the comparison operands or the mode of X are weird.  */
841   if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
842     return NULL_RTX;
843 
844   return emit_store_flag (x, code, XEXP (cond, 0),
845 			  XEXP (cond, 1), VOIDmode,
846 			  (code == LTU || code == LEU
847 			   || code == GEU || code == GTU), normalize);
848 }
849 
850 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
851    X is the destination/target and Y is the value to copy.  */
852 
853 static void
854 noce_emit_move_insn (rtx x, rtx y)
855 {
856   enum machine_mode outmode;
857   rtx outer, inner;
858   int bitpos;
859 
860   if (GET_CODE (x) != STRICT_LOW_PART)
861     {
862       rtx seq, insn, target;
863       optab ot;
864 
865       start_sequence ();
866       /* Check that the SET_SRC is reasonable before calling emit_move_insn,
867 	 otherwise construct a suitable SET pattern ourselves.  */
868       insn = (OBJECT_P (y) || CONSTANT_P (y) || GET_CODE (y) == SUBREG)
869 	     ? emit_move_insn (x, y)
870 	     : emit_insn (gen_rtx_SET (VOIDmode, x, y));
871       seq = get_insns ();
872       end_sequence ();
873 
874       if (recog_memoized (insn) <= 0)
875 	{
876 	  if (GET_CODE (x) == ZERO_EXTRACT)
877 	    {
878 	      rtx op = XEXP (x, 0);
879 	      unsigned HOST_WIDE_INT size = INTVAL (XEXP (x, 1));
880 	      unsigned HOST_WIDE_INT start = INTVAL (XEXP (x, 2));
881 
882 	      /* store_bit_field expects START to be relative to
883 		 BYTES_BIG_ENDIAN and adjusts this value for machines with
884 		 BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN.  In order to be able to
885 		 invoke store_bit_field again it is necessary to have the START
886 		 value from the first call.  */
887 	      if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
888 		{
889 		  if (MEM_P (op))
890 		    start = BITS_PER_UNIT - start - size;
891 		  else
892 		    {
893 		      gcc_assert (REG_P (op));
894 		      start = BITS_PER_WORD - start - size;
895 		    }
896 		}
897 
898 	      gcc_assert (start < (MEM_P (op) ? BITS_PER_UNIT : BITS_PER_WORD));
899 	      store_bit_field (op, size, start, 0, 0, GET_MODE (x), y);
900 	      return;
901 	    }
902 
903 	  switch (GET_RTX_CLASS (GET_CODE (y)))
904 	    {
905 	    case RTX_UNARY:
906 	      ot = code_to_optab[GET_CODE (y)];
907 	      if (ot)
908 		{
909 		  start_sequence ();
910 		  target = expand_unop (GET_MODE (y), ot, XEXP (y, 0), x, 0);
911 		  if (target != NULL_RTX)
912 		    {
913 		      if (target != x)
914 			emit_move_insn (x, target);
915 		      seq = get_insns ();
916 		    }
917 		  end_sequence ();
918 		}
919 	      break;
920 
921 	    case RTX_BIN_ARITH:
922 	    case RTX_COMM_ARITH:
923 	      ot = code_to_optab[GET_CODE (y)];
924 	      if (ot)
925 		{
926 		  start_sequence ();
927 		  target = expand_binop (GET_MODE (y), ot,
928 					 XEXP (y, 0), XEXP (y, 1),
929 					 x, 0, OPTAB_DIRECT);
930 		  if (target != NULL_RTX)
931 		    {
932 		      if (target != x)
933 			  emit_move_insn (x, target);
934 		      seq = get_insns ();
935 		    }
936 		  end_sequence ();
937 		}
938 	      break;
939 
940 	    default:
941 	      break;
942 	    }
943 	}
944 
945       emit_insn (seq);
946       return;
947     }
948 
949   outer = XEXP (x, 0);
950   inner = XEXP (outer, 0);
951   outmode = GET_MODE (outer);
952   bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
953   store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos,
954 		   0, 0, outmode, y);
955 }
956 
957 /* Return sequence of instructions generated by if conversion.  This
958    function calls end_sequence() to end the current stream, ensures
959    that are instructions are unshared, recognizable non-jump insns.
960    On failure, this function returns a NULL_RTX.  */
961 
962 static rtx
963 end_ifcvt_sequence (struct noce_if_info *if_info)
964 {
965   rtx insn;
966   rtx seq = get_insns ();
967 
968   set_used_flags (if_info->x);
969   set_used_flags (if_info->cond);
970   unshare_all_rtl_in_chain (seq);
971   end_sequence ();
972 
973   /* Make sure that all of the instructions emitted are recognizable,
974      and that we haven't introduced a new jump instruction.
975      As an exercise for the reader, build a general mechanism that
976      allows proper placement of required clobbers.  */
977   for (insn = seq; insn; insn = NEXT_INSN (insn))
978     if (JUMP_P (insn)
979 	|| recog_memoized (insn) == -1)
980       return NULL_RTX;
981 
982   return seq;
983 }
984 
985 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
986    "if (a == b) x = a; else x = b" into "x = b".  */
987 
988 static int
989 noce_try_move (struct noce_if_info *if_info)
990 {
991   rtx cond = if_info->cond;
992   enum rtx_code code = GET_CODE (cond);
993   rtx y, seq;
994 
995   if (code != NE && code != EQ)
996     return FALSE;
997 
998   /* This optimization isn't valid if either A or B could be a NaN
999      or a signed zero.  */
1000   if (HONOR_NANS (GET_MODE (if_info->x))
1001       || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
1002     return FALSE;
1003 
1004   /* Check whether the operands of the comparison are A and in
1005      either order.  */
1006   if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
1007        && rtx_equal_p (if_info->b, XEXP (cond, 1)))
1008       || (rtx_equal_p (if_info->a, XEXP (cond, 1))
1009 	  && rtx_equal_p (if_info->b, XEXP (cond, 0))))
1010     {
1011       y = (code == EQ) ? if_info->a : if_info->b;
1012 
1013       /* Avoid generating the move if the source is the destination.  */
1014       if (! rtx_equal_p (if_info->x, y))
1015 	{
1016 	  start_sequence ();
1017 	  noce_emit_move_insn (if_info->x, y);
1018 	  seq = end_ifcvt_sequence (if_info);
1019 	  if (!seq)
1020 	    return FALSE;
1021 
1022 	  emit_insn_before_setloc (seq, if_info->jump,
1023 				   INSN_LOCATOR (if_info->insn_a));
1024 	}
1025       return TRUE;
1026     }
1027   return FALSE;
1028 }
1029 
1030 /* Convert "if (test) x = 1; else x = 0".
1031 
1032    Only try 0 and STORE_FLAG_VALUE here.  Other combinations will be
1033    tried in noce_try_store_flag_constants after noce_try_cmove has had
1034    a go at the conversion.  */
1035 
1036 static int
1037 noce_try_store_flag (struct noce_if_info *if_info)
1038 {
1039   int reversep;
1040   rtx target, seq;
1041 
1042   if (CONST_INT_P (if_info->b)
1043       && INTVAL (if_info->b) == STORE_FLAG_VALUE
1044       && if_info->a == const0_rtx)
1045     reversep = 0;
1046   else if (if_info->b == const0_rtx
1047 	   && CONST_INT_P (if_info->a)
1048 	   && INTVAL (if_info->a) == STORE_FLAG_VALUE
1049 	   && (reversed_comparison_code (if_info->cond, if_info->jump)
1050 	       != UNKNOWN))
1051     reversep = 1;
1052   else
1053     return FALSE;
1054 
1055   start_sequence ();
1056 
1057   target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
1058   if (target)
1059     {
1060       if (target != if_info->x)
1061 	noce_emit_move_insn (if_info->x, target);
1062 
1063       seq = end_ifcvt_sequence (if_info);
1064       if (! seq)
1065 	return FALSE;
1066 
1067       emit_insn_before_setloc (seq, if_info->jump,
1068 			       INSN_LOCATOR (if_info->insn_a));
1069       return TRUE;
1070     }
1071   else
1072     {
1073       end_sequence ();
1074       return FALSE;
1075     }
1076 }
1077 
1078 /* Convert "if (test) x = a; else x = b", for A and B constant.  */
1079 
1080 static int
1081 noce_try_store_flag_constants (struct noce_if_info *if_info)
1082 {
1083   rtx target, seq;
1084   int reversep;
1085   HOST_WIDE_INT itrue, ifalse, diff, tmp;
1086   int normalize, can_reverse;
1087   enum machine_mode mode;
1088 
1089   if (CONST_INT_P (if_info->a)
1090       && CONST_INT_P (if_info->b))
1091     {
1092       mode = GET_MODE (if_info->x);
1093       ifalse = INTVAL (if_info->a);
1094       itrue = INTVAL (if_info->b);
1095 
1096       /* Make sure we can represent the difference between the two values.  */
1097       if ((itrue - ifalse > 0)
1098 	  != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
1099 	return FALSE;
1100 
1101       diff = trunc_int_for_mode (itrue - ifalse, mode);
1102 
1103       can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
1104 		     != UNKNOWN);
1105 
1106       reversep = 0;
1107       if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1108 	normalize = 0;
1109       else if (ifalse == 0 && exact_log2 (itrue) >= 0
1110 	       && (STORE_FLAG_VALUE == 1
1111 		   || if_info->branch_cost >= 2))
1112 	normalize = 1;
1113       else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
1114 	       && (STORE_FLAG_VALUE == 1 || if_info->branch_cost >= 2))
1115 	normalize = 1, reversep = 1;
1116       else if (itrue == -1
1117 	       && (STORE_FLAG_VALUE == -1
1118 		   || if_info->branch_cost >= 2))
1119 	normalize = -1;
1120       else if (ifalse == -1 && can_reverse
1121 	       && (STORE_FLAG_VALUE == -1 || if_info->branch_cost >= 2))
1122 	normalize = -1, reversep = 1;
1123       else if ((if_info->branch_cost >= 2 && STORE_FLAG_VALUE == -1)
1124 	       || if_info->branch_cost >= 3)
1125 	normalize = -1;
1126       else
1127 	return FALSE;
1128 
1129       if (reversep)
1130 	{
1131 	  tmp = itrue; itrue = ifalse; ifalse = tmp;
1132 	  diff = trunc_int_for_mode (-diff, mode);
1133 	}
1134 
1135       start_sequence ();
1136       target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
1137       if (! target)
1138 	{
1139 	  end_sequence ();
1140 	  return FALSE;
1141 	}
1142 
1143       /* if (test) x = 3; else x = 4;
1144 	 =>   x = 3 + (test == 0);  */
1145       if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1146 	{
1147 	  target = expand_simple_binop (mode,
1148 					(diff == STORE_FLAG_VALUE
1149 					 ? PLUS : MINUS),
1150 					GEN_INT (ifalse), target, if_info->x, 0,
1151 					OPTAB_WIDEN);
1152 	}
1153 
1154       /* if (test) x = 8; else x = 0;
1155 	 =>   x = (test != 0) << 3;  */
1156       else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
1157 	{
1158 	  target = expand_simple_binop (mode, ASHIFT,
1159 					target, GEN_INT (tmp), if_info->x, 0,
1160 					OPTAB_WIDEN);
1161 	}
1162 
1163       /* if (test) x = -1; else x = b;
1164 	 =>   x = -(test != 0) | b;  */
1165       else if (itrue == -1)
1166 	{
1167 	  target = expand_simple_binop (mode, IOR,
1168 					target, GEN_INT (ifalse), if_info->x, 0,
1169 					OPTAB_WIDEN);
1170 	}
1171 
1172       /* if (test) x = a; else x = b;
1173 	 =>   x = (-(test != 0) & (b - a)) + a;  */
1174       else
1175 	{
1176 	  target = expand_simple_binop (mode, AND,
1177 					target, GEN_INT (diff), if_info->x, 0,
1178 					OPTAB_WIDEN);
1179 	  if (target)
1180 	    target = expand_simple_binop (mode, PLUS,
1181 					  target, GEN_INT (ifalse),
1182 					  if_info->x, 0, OPTAB_WIDEN);
1183 	}
1184 
1185       if (! target)
1186 	{
1187 	  end_sequence ();
1188 	  return FALSE;
1189 	}
1190 
1191       if (target != if_info->x)
1192 	noce_emit_move_insn (if_info->x, target);
1193 
1194       seq = end_ifcvt_sequence (if_info);
1195       if (!seq)
1196 	return FALSE;
1197 
1198       emit_insn_before_setloc (seq, if_info->jump,
1199 			       INSN_LOCATOR (if_info->insn_a));
1200       return TRUE;
1201     }
1202 
1203   return FALSE;
1204 }
1205 
1206 /* Convert "if (test) foo++" into "foo += (test != 0)", and
1207    similarly for "foo--".  */
1208 
1209 static int
1210 noce_try_addcc (struct noce_if_info *if_info)
1211 {
1212   rtx target, seq;
1213   int subtract, normalize;
1214 
1215   if (GET_CODE (if_info->a) == PLUS
1216       && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
1217       && (reversed_comparison_code (if_info->cond, if_info->jump)
1218 	  != UNKNOWN))
1219     {
1220       rtx cond = if_info->cond;
1221       enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
1222 
1223       /* First try to use addcc pattern.  */
1224       if (general_operand (XEXP (cond, 0), VOIDmode)
1225 	  && general_operand (XEXP (cond, 1), VOIDmode))
1226 	{
1227 	  start_sequence ();
1228 	  target = emit_conditional_add (if_info->x, code,
1229 					 XEXP (cond, 0),
1230 					 XEXP (cond, 1),
1231 					 VOIDmode,
1232 					 if_info->b,
1233 					 XEXP (if_info->a, 1),
1234 					 GET_MODE (if_info->x),
1235 					 (code == LTU || code == GEU
1236 					  || code == LEU || code == GTU));
1237 	  if (target)
1238 	    {
1239 	      if (target != if_info->x)
1240 		noce_emit_move_insn (if_info->x, target);
1241 
1242 	      seq = end_ifcvt_sequence (if_info);
1243 	      if (!seq)
1244 		return FALSE;
1245 
1246 	      emit_insn_before_setloc (seq, if_info->jump,
1247 				       INSN_LOCATOR (if_info->insn_a));
1248 	      return TRUE;
1249 	    }
1250 	  end_sequence ();
1251 	}
1252 
1253       /* If that fails, construct conditional increment or decrement using
1254 	 setcc.  */
1255       if (if_info->branch_cost >= 2
1256 	  && (XEXP (if_info->a, 1) == const1_rtx
1257 	      || XEXP (if_info->a, 1) == constm1_rtx))
1258         {
1259 	  start_sequence ();
1260 	  if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1261 	    subtract = 0, normalize = 0;
1262 	  else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1263 	    subtract = 1, normalize = 0;
1264 	  else
1265 	    subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1266 
1267 
1268 	  target = noce_emit_store_flag (if_info,
1269 					 gen_reg_rtx (GET_MODE (if_info->x)),
1270 					 1, normalize);
1271 
1272 	  if (target)
1273 	    target = expand_simple_binop (GET_MODE (if_info->x),
1274 					  subtract ? MINUS : PLUS,
1275 					  if_info->b, target, if_info->x,
1276 					  0, OPTAB_WIDEN);
1277 	  if (target)
1278 	    {
1279 	      if (target != if_info->x)
1280 		noce_emit_move_insn (if_info->x, target);
1281 
1282 	      seq = end_ifcvt_sequence (if_info);
1283 	      if (!seq)
1284 		return FALSE;
1285 
1286 	      emit_insn_before_setloc (seq, if_info->jump,
1287 				       INSN_LOCATOR (if_info->insn_a));
1288 	      return TRUE;
1289 	    }
1290 	  end_sequence ();
1291 	}
1292     }
1293 
1294   return FALSE;
1295 }
1296 
1297 /* Convert "if (test) x = 0;" to "x &= -(test == 0);"  */
1298 
1299 static int
1300 noce_try_store_flag_mask (struct noce_if_info *if_info)
1301 {
1302   rtx target, seq;
1303   int reversep;
1304 
1305   reversep = 0;
1306   if ((if_info->branch_cost >= 2
1307        || STORE_FLAG_VALUE == -1)
1308       && ((if_info->a == const0_rtx
1309 	   && rtx_equal_p (if_info->b, if_info->x))
1310 	  || ((reversep = (reversed_comparison_code (if_info->cond,
1311 						     if_info->jump)
1312 			   != UNKNOWN))
1313 	      && if_info->b == const0_rtx
1314 	      && rtx_equal_p (if_info->a, if_info->x))))
1315     {
1316       start_sequence ();
1317       target = noce_emit_store_flag (if_info,
1318 				     gen_reg_rtx (GET_MODE (if_info->x)),
1319 				     reversep, -1);
1320       if (target)
1321         target = expand_simple_binop (GET_MODE (if_info->x), AND,
1322 				      if_info->x,
1323 				      target, if_info->x, 0,
1324 				      OPTAB_WIDEN);
1325 
1326       if (target)
1327 	{
1328 	  if (target != if_info->x)
1329 	    noce_emit_move_insn (if_info->x, target);
1330 
1331 	  seq = end_ifcvt_sequence (if_info);
1332 	  if (!seq)
1333 	    return FALSE;
1334 
1335 	  emit_insn_before_setloc (seq, if_info->jump,
1336 				   INSN_LOCATOR (if_info->insn_a));
1337 	  return TRUE;
1338 	}
1339 
1340       end_sequence ();
1341     }
1342 
1343   return FALSE;
1344 }
1345 
1346 /* Helper function for noce_try_cmove and noce_try_cmove_arith.  */
1347 
1348 static rtx
1349 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1350 		 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1351 {
1352   rtx target ATTRIBUTE_UNUSED;
1353   int unsignedp ATTRIBUTE_UNUSED;
1354 
1355   /* If earliest == jump, try to build the cmove insn directly.
1356      This is helpful when combine has created some complex condition
1357      (like for alpha's cmovlbs) that we can't hope to regenerate
1358      through the normal interface.  */
1359 
1360   if (if_info->cond_earliest == if_info->jump)
1361     {
1362       rtx tmp;
1363 
1364       tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1365       tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1366       tmp = gen_rtx_SET (VOIDmode, x, tmp);
1367 
1368       start_sequence ();
1369       tmp = emit_insn (tmp);
1370 
1371       if (recog_memoized (tmp) >= 0)
1372 	{
1373 	  tmp = get_insns ();
1374 	  end_sequence ();
1375 	  emit_insn (tmp);
1376 
1377 	  return x;
1378 	}
1379 
1380       end_sequence ();
1381     }
1382 
1383   /* Don't even try if the comparison operands are weird.  */
1384   if (! general_operand (cmp_a, GET_MODE (cmp_a))
1385       || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1386     return NULL_RTX;
1387 
1388 #if HAVE_conditional_move
1389   unsignedp = (code == LTU || code == GEU
1390 	       || code == LEU || code == GTU);
1391 
1392   target = emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1393 				  vtrue, vfalse, GET_MODE (x),
1394 				  unsignedp);
1395   if (target)
1396     return target;
1397 
1398   /* We might be faced with a situation like:
1399 
1400      x = (reg:M TARGET)
1401      vtrue = (subreg:M (reg:N VTRUE) BYTE)
1402      vfalse = (subreg:M (reg:N VFALSE) BYTE)
1403 
1404      We can't do a conditional move in mode M, but it's possible that we
1405      could do a conditional move in mode N instead and take a subreg of
1406      the result.
1407 
1408      If we can't create new pseudos, though, don't bother.  */
1409   if (reload_completed)
1410     return NULL_RTX;
1411 
1412   if (GET_CODE (vtrue) == SUBREG && GET_CODE (vfalse) == SUBREG)
1413     {
1414       rtx reg_vtrue = SUBREG_REG (vtrue);
1415       rtx reg_vfalse = SUBREG_REG (vfalse);
1416       unsigned int byte_vtrue = SUBREG_BYTE (vtrue);
1417       unsigned int byte_vfalse = SUBREG_BYTE (vfalse);
1418       rtx promoted_target;
1419 
1420       if (GET_MODE (reg_vtrue) != GET_MODE (reg_vfalse)
1421 	  || byte_vtrue != byte_vfalse
1422 	  || (SUBREG_PROMOTED_VAR_P (vtrue)
1423 	      != SUBREG_PROMOTED_VAR_P (vfalse))
1424 	  || (SUBREG_PROMOTED_UNSIGNED_P (vtrue)
1425 	      != SUBREG_PROMOTED_UNSIGNED_P (vfalse)))
1426 	return NULL_RTX;
1427 
1428       promoted_target = gen_reg_rtx (GET_MODE (reg_vtrue));
1429 
1430       target = emit_conditional_move (promoted_target, code, cmp_a, cmp_b,
1431 				      VOIDmode, reg_vtrue, reg_vfalse,
1432 				      GET_MODE (reg_vtrue), unsignedp);
1433       /* Nope, couldn't do it in that mode either.  */
1434       if (!target)
1435 	return NULL_RTX;
1436 
1437       target = gen_rtx_SUBREG (GET_MODE (vtrue), promoted_target, byte_vtrue);
1438       SUBREG_PROMOTED_VAR_P (target) = SUBREG_PROMOTED_VAR_P (vtrue);
1439       SUBREG_PROMOTED_UNSIGNED_SET (target, SUBREG_PROMOTED_UNSIGNED_P (vtrue));
1440       emit_move_insn (x, target);
1441       return x;
1442     }
1443   else
1444     return NULL_RTX;
1445 #else
1446   /* We'll never get here, as noce_process_if_block doesn't call the
1447      functions involved.  Ifdef code, however, should be discouraged
1448      because it leads to typos in the code not selected.  However,
1449      emit_conditional_move won't exist either.  */
1450   return NULL_RTX;
1451 #endif
1452 }
1453 
1454 /* Try only simple constants and registers here.  More complex cases
1455    are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1456    has had a go at it.  */
1457 
1458 static int
1459 noce_try_cmove (struct noce_if_info *if_info)
1460 {
1461   enum rtx_code code;
1462   rtx target, seq;
1463 
1464   if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1465       && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1466     {
1467       start_sequence ();
1468 
1469       code = GET_CODE (if_info->cond);
1470       target = noce_emit_cmove (if_info, if_info->x, code,
1471 				XEXP (if_info->cond, 0),
1472 				XEXP (if_info->cond, 1),
1473 				if_info->a, if_info->b);
1474 
1475       if (target)
1476 	{
1477 	  if (target != if_info->x)
1478 	    noce_emit_move_insn (if_info->x, target);
1479 
1480 	  seq = end_ifcvt_sequence (if_info);
1481 	  if (!seq)
1482 	    return FALSE;
1483 
1484 	  emit_insn_before_setloc (seq, if_info->jump,
1485 				   INSN_LOCATOR (if_info->insn_a));
1486 	  return TRUE;
1487 	}
1488       else
1489 	{
1490 	  end_sequence ();
1491 	  return FALSE;
1492 	}
1493     }
1494 
1495   return FALSE;
1496 }
1497 
1498 /* Try more complex cases involving conditional_move.  */
1499 
1500 static int
1501 noce_try_cmove_arith (struct noce_if_info *if_info)
1502 {
1503   rtx a = if_info->a;
1504   rtx b = if_info->b;
1505   rtx x = if_info->x;
1506   rtx orig_a, orig_b;
1507   rtx insn_a, insn_b;
1508   rtx tmp, target;
1509   int is_mem = 0;
1510   int insn_cost;
1511   enum rtx_code code;
1512 
1513   /* A conditional move from two memory sources is equivalent to a
1514      conditional on their addresses followed by a load.  Don't do this
1515      early because it'll screw alias analysis.  Note that we've
1516      already checked for no side effects.  */
1517   /* ??? FIXME: Magic number 5.  */
1518   if (cse_not_expected
1519       && MEM_P (a) && MEM_P (b)
1520       && MEM_ADDR_SPACE (a) == MEM_ADDR_SPACE (b)
1521       && if_info->branch_cost >= 5)
1522     {
1523       enum machine_mode address_mode
1524 	= targetm.addr_space.address_mode (MEM_ADDR_SPACE (a));
1525 
1526       a = XEXP (a, 0);
1527       b = XEXP (b, 0);
1528       x = gen_reg_rtx (address_mode);
1529       is_mem = 1;
1530     }
1531 
1532   /* ??? We could handle this if we knew that a load from A or B could
1533      not trap or fault.  This is also true if we've already loaded
1534      from the address along the path from ENTRY.  */
1535   else if (may_trap_or_fault_p (a) || may_trap_or_fault_p (b))
1536     return FALSE;
1537 
1538   /* if (test) x = a + b; else x = c - d;
1539      => y = a + b;
1540         x = c - d;
1541 	if (test)
1542 	  x = y;
1543   */
1544 
1545   code = GET_CODE (if_info->cond);
1546   insn_a = if_info->insn_a;
1547   insn_b = if_info->insn_b;
1548 
1549   /* Total insn_rtx_cost should be smaller than branch cost.  Exit
1550      if insn_rtx_cost can't be estimated.  */
1551   if (insn_a)
1552     {
1553       insn_cost
1554 	= insn_rtx_cost (PATTERN (insn_a),
1555       			 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_a)));
1556       if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1557 	return FALSE;
1558     }
1559   else
1560     insn_cost = 0;
1561 
1562   if (insn_b)
1563     {
1564       insn_cost
1565 	+= insn_rtx_cost (PATTERN (insn_b),
1566       			  optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_b)));
1567       if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1568         return FALSE;
1569     }
1570 
1571   /* Possibly rearrange operands to make things come out more natural.  */
1572   if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1573     {
1574       int reversep = 0;
1575       if (rtx_equal_p (b, x))
1576 	reversep = 1;
1577       else if (general_operand (b, GET_MODE (b)))
1578 	reversep = 1;
1579 
1580       if (reversep)
1581 	{
1582 	  code = reversed_comparison_code (if_info->cond, if_info->jump);
1583 	  tmp = a, a = b, b = tmp;
1584 	  tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1585 	}
1586     }
1587 
1588   start_sequence ();
1589 
1590   orig_a = a;
1591   orig_b = b;
1592 
1593   /* If either operand is complex, load it into a register first.
1594      The best way to do this is to copy the original insn.  In this
1595      way we preserve any clobbers etc that the insn may have had.
1596      This is of course not possible in the IS_MEM case.  */
1597   if (! general_operand (a, GET_MODE (a)))
1598     {
1599       rtx set;
1600 
1601       if (is_mem)
1602 	{
1603 	  tmp = gen_reg_rtx (GET_MODE (a));
1604 	  tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1605 	}
1606       else if (! insn_a)
1607 	goto end_seq_and_fail;
1608       else
1609 	{
1610 	  a = gen_reg_rtx (GET_MODE (a));
1611 	  tmp = copy_rtx (insn_a);
1612 	  set = single_set (tmp);
1613 	  SET_DEST (set) = a;
1614 	  tmp = emit_insn (PATTERN (tmp));
1615 	}
1616       if (recog_memoized (tmp) < 0)
1617 	goto end_seq_and_fail;
1618     }
1619   if (! general_operand (b, GET_MODE (b)))
1620     {
1621       rtx set, last;
1622 
1623       if (is_mem)
1624 	{
1625           tmp = gen_reg_rtx (GET_MODE (b));
1626 	  tmp = gen_rtx_SET (VOIDmode, tmp, b);
1627 	}
1628       else if (! insn_b)
1629 	goto end_seq_and_fail;
1630       else
1631 	{
1632           b = gen_reg_rtx (GET_MODE (b));
1633 	  tmp = copy_rtx (insn_b);
1634 	  set = single_set (tmp);
1635 	  SET_DEST (set) = b;
1636 	  tmp = PATTERN (tmp);
1637 	}
1638 
1639       /* If insn to set up A clobbers any registers B depends on, try to
1640 	 swap insn that sets up A with the one that sets up B.  If even
1641 	 that doesn't help, punt.  */
1642       last = get_last_insn ();
1643       if (last && modified_in_p (orig_b, last))
1644 	{
1645 	  tmp = emit_insn_before (tmp, get_insns ());
1646 	  if (modified_in_p (orig_a, tmp))
1647 	    goto end_seq_and_fail;
1648 	}
1649       else
1650 	tmp = emit_insn (tmp);
1651 
1652       if (recog_memoized (tmp) < 0)
1653 	goto end_seq_and_fail;
1654     }
1655 
1656   target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1657 			    XEXP (if_info->cond, 1), a, b);
1658 
1659   if (! target)
1660     goto end_seq_and_fail;
1661 
1662   /* If we're handling a memory for above, emit the load now.  */
1663   if (is_mem)
1664     {
1665       tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1666 
1667       /* Copy over flags as appropriate.  */
1668       if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1669 	MEM_VOLATILE_P (tmp) = 1;
1670       if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1671 	set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1672       set_mem_align (tmp,
1673 		     MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1674 
1675       gcc_assert (MEM_ADDR_SPACE (if_info->a) == MEM_ADDR_SPACE (if_info->b));
1676       set_mem_addr_space (tmp, MEM_ADDR_SPACE (if_info->a));
1677 
1678       noce_emit_move_insn (if_info->x, tmp);
1679     }
1680   else if (target != x)
1681     noce_emit_move_insn (x, target);
1682 
1683   tmp = end_ifcvt_sequence (if_info);
1684   if (!tmp)
1685     return FALSE;
1686 
1687   emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1688   return TRUE;
1689 
1690  end_seq_and_fail:
1691   end_sequence ();
1692   return FALSE;
1693 }
1694 
1695 /* For most cases, the simplified condition we found is the best
1696    choice, but this is not the case for the min/max/abs transforms.
1697    For these we wish to know that it is A or B in the condition.  */
1698 
1699 static rtx
1700 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1701 			rtx *earliest)
1702 {
1703   rtx cond, set, insn;
1704   int reverse;
1705 
1706   /* If target is already mentioned in the known condition, return it.  */
1707   if (reg_mentioned_p (target, if_info->cond))
1708     {
1709       *earliest = if_info->cond_earliest;
1710       return if_info->cond;
1711     }
1712 
1713   set = pc_set (if_info->jump);
1714   cond = XEXP (SET_SRC (set), 0);
1715   reverse
1716     = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1717       && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1718   if (if_info->then_else_reversed)
1719     reverse = !reverse;
1720 
1721   /* If we're looking for a constant, try to make the conditional
1722      have that constant in it.  There are two reasons why it may
1723      not have the constant we want:
1724 
1725      1. GCC may have needed to put the constant in a register, because
1726         the target can't compare directly against that constant.  For
1727         this case, we look for a SET immediately before the comparison
1728         that puts a constant in that register.
1729 
1730      2. GCC may have canonicalized the conditional, for example
1731 	replacing "if x < 4" with "if x <= 3".  We can undo that (or
1732 	make equivalent types of changes) to get the constants we need
1733 	if they're off by one in the right direction.  */
1734 
1735   if (CONST_INT_P (target))
1736     {
1737       enum rtx_code code = GET_CODE (if_info->cond);
1738       rtx op_a = XEXP (if_info->cond, 0);
1739       rtx op_b = XEXP (if_info->cond, 1);
1740       rtx prev_insn;
1741 
1742       /* First, look to see if we put a constant in a register.  */
1743       prev_insn = prev_nonnote_insn (if_info->cond_earliest);
1744       if (prev_insn
1745 	  && BLOCK_FOR_INSN (prev_insn)
1746 	     == BLOCK_FOR_INSN (if_info->cond_earliest)
1747 	  && INSN_P (prev_insn)
1748 	  && GET_CODE (PATTERN (prev_insn)) == SET)
1749 	{
1750 	  rtx src = find_reg_equal_equiv_note (prev_insn);
1751 	  if (!src)
1752 	    src = SET_SRC (PATTERN (prev_insn));
1753 	  if (CONST_INT_P (src))
1754 	    {
1755 	      if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1756 		op_a = src;
1757 	      else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1758 		op_b = src;
1759 
1760 	      if (CONST_INT_P (op_a))
1761 		{
1762 		  rtx tmp = op_a;
1763 		  op_a = op_b;
1764 		  op_b = tmp;
1765 		  code = swap_condition (code);
1766 		}
1767 	    }
1768 	}
1769 
1770       /* Now, look to see if we can get the right constant by
1771 	 adjusting the conditional.  */
1772       if (CONST_INT_P (op_b))
1773 	{
1774 	  HOST_WIDE_INT desired_val = INTVAL (target);
1775 	  HOST_WIDE_INT actual_val = INTVAL (op_b);
1776 
1777 	  switch (code)
1778 	    {
1779 	    case LT:
1780 	      if (actual_val == desired_val + 1)
1781 		{
1782 		  code = LE;
1783 		  op_b = GEN_INT (desired_val);
1784 		}
1785 	      break;
1786 	    case LE:
1787 	      if (actual_val == desired_val - 1)
1788 		{
1789 		  code = LT;
1790 		  op_b = GEN_INT (desired_val);
1791 		}
1792 	      break;
1793 	    case GT:
1794 	      if (actual_val == desired_val - 1)
1795 		{
1796 		  code = GE;
1797 		  op_b = GEN_INT (desired_val);
1798 		}
1799 	      break;
1800 	    case GE:
1801 	      if (actual_val == desired_val + 1)
1802 		{
1803 		  code = GT;
1804 		  op_b = GEN_INT (desired_val);
1805 		}
1806 	      break;
1807 	    default:
1808 	      break;
1809 	    }
1810 	}
1811 
1812       /* If we made any changes, generate a new conditional that is
1813 	 equivalent to what we started with, but has the right
1814 	 constants in it.  */
1815       if (code != GET_CODE (if_info->cond)
1816 	  || op_a != XEXP (if_info->cond, 0)
1817 	  || op_b != XEXP (if_info->cond, 1))
1818 	{
1819 	  cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1820 	  *earliest = if_info->cond_earliest;
1821 	  return cond;
1822 	}
1823     }
1824 
1825   cond = canonicalize_condition (if_info->jump, cond, reverse,
1826 				 earliest, target, false, true);
1827   if (! cond || ! reg_mentioned_p (target, cond))
1828     return NULL;
1829 
1830   /* We almost certainly searched back to a different place.
1831      Need to re-verify correct lifetimes.  */
1832 
1833   /* X may not be mentioned in the range (cond_earliest, jump].  */
1834   for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1835     if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1836       return NULL;
1837 
1838   /* A and B may not be modified in the range [cond_earliest, jump).  */
1839   for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1840     if (INSN_P (insn)
1841 	&& (modified_in_p (if_info->a, insn)
1842 	    || modified_in_p (if_info->b, insn)))
1843       return NULL;
1844 
1845   return cond;
1846 }
1847 
1848 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc.  */
1849 
1850 static int
1851 noce_try_minmax (struct noce_if_info *if_info)
1852 {
1853   rtx cond, earliest, target, seq;
1854   enum rtx_code code, op;
1855   int unsignedp;
1856 
1857   /* ??? Reject modes with NaNs or signed zeros since we don't know how
1858      they will be resolved with an SMIN/SMAX.  It wouldn't be too hard
1859      to get the target to tell us...  */
1860   if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1861       || HONOR_NANS (GET_MODE (if_info->x)))
1862     return FALSE;
1863 
1864   cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1865   if (!cond)
1866     return FALSE;
1867 
1868   /* Verify the condition is of the form we expect, and canonicalize
1869      the comparison code.  */
1870   code = GET_CODE (cond);
1871   if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1872     {
1873       if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1874 	return FALSE;
1875     }
1876   else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1877     {
1878       if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1879 	return FALSE;
1880       code = swap_condition (code);
1881     }
1882   else
1883     return FALSE;
1884 
1885   /* Determine what sort of operation this is.  Note that the code is for
1886      a taken branch, so the code->operation mapping appears backwards.  */
1887   switch (code)
1888     {
1889     case LT:
1890     case LE:
1891     case UNLT:
1892     case UNLE:
1893       op = SMAX;
1894       unsignedp = 0;
1895       break;
1896     case GT:
1897     case GE:
1898     case UNGT:
1899     case UNGE:
1900       op = SMIN;
1901       unsignedp = 0;
1902       break;
1903     case LTU:
1904     case LEU:
1905       op = UMAX;
1906       unsignedp = 1;
1907       break;
1908     case GTU:
1909     case GEU:
1910       op = UMIN;
1911       unsignedp = 1;
1912       break;
1913     default:
1914       return FALSE;
1915     }
1916 
1917   start_sequence ();
1918 
1919   target = expand_simple_binop (GET_MODE (if_info->x), op,
1920 				if_info->a, if_info->b,
1921 				if_info->x, unsignedp, OPTAB_WIDEN);
1922   if (! target)
1923     {
1924       end_sequence ();
1925       return FALSE;
1926     }
1927   if (target != if_info->x)
1928     noce_emit_move_insn (if_info->x, target);
1929 
1930   seq = end_ifcvt_sequence (if_info);
1931   if (!seq)
1932     return FALSE;
1933 
1934   emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1935   if_info->cond = cond;
1936   if_info->cond_earliest = earliest;
1937 
1938   return TRUE;
1939 }
1940 
1941 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);",
1942    "if (a < 0) x = ~a; else x = a;" to "x = one_cmpl_abs(a);",
1943    etc.  */
1944 
1945 static int
1946 noce_try_abs (struct noce_if_info *if_info)
1947 {
1948   rtx cond, earliest, target, seq, a, b, c;
1949   int negate;
1950   bool one_cmpl = false;
1951 
1952   /* Reject modes with signed zeros.  */
1953   if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
1954     return FALSE;
1955 
1956   /* Recognize A and B as constituting an ABS or NABS.  The canonical
1957      form is a branch around the negation, taken when the object is the
1958      first operand of a comparison against 0 that evaluates to true.  */
1959   a = if_info->a;
1960   b = if_info->b;
1961   if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1962     negate = 0;
1963   else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1964     {
1965       c = a; a = b; b = c;
1966       negate = 1;
1967     }
1968   else if (GET_CODE (a) == NOT && rtx_equal_p (XEXP (a, 0), b))
1969     {
1970       negate = 0;
1971       one_cmpl = true;
1972     }
1973   else if (GET_CODE (b) == NOT && rtx_equal_p (XEXP (b, 0), a))
1974     {
1975       c = a; a = b; b = c;
1976       negate = 1;
1977       one_cmpl = true;
1978     }
1979   else
1980     return FALSE;
1981 
1982   cond = noce_get_alt_condition (if_info, b, &earliest);
1983   if (!cond)
1984     return FALSE;
1985 
1986   /* Verify the condition is of the form we expect.  */
1987   if (rtx_equal_p (XEXP (cond, 0), b))
1988     c = XEXP (cond, 1);
1989   else if (rtx_equal_p (XEXP (cond, 1), b))
1990     {
1991       c = XEXP (cond, 0);
1992       negate = !negate;
1993     }
1994   else
1995     return FALSE;
1996 
1997   /* Verify that C is zero.  Search one step backward for a
1998      REG_EQUAL note or a simple source if necessary.  */
1999   if (REG_P (c))
2000     {
2001       rtx set, insn = prev_nonnote_insn (earliest);
2002       if (insn
2003 	  && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (earliest)
2004 	  && (set = single_set (insn))
2005 	  && rtx_equal_p (SET_DEST (set), c))
2006 	{
2007 	  rtx note = find_reg_equal_equiv_note (insn);
2008 	  if (note)
2009 	    c = XEXP (note, 0);
2010 	  else
2011 	    c = SET_SRC (set);
2012 	}
2013       else
2014 	return FALSE;
2015     }
2016   if (MEM_P (c)
2017       && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
2018       && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
2019     c = get_pool_constant (XEXP (c, 0));
2020 
2021   /* Work around funny ideas get_condition has wrt canonicalization.
2022      Note that these rtx constants are known to be CONST_INT, and
2023      therefore imply integer comparisons.  */
2024   if (c == constm1_rtx && GET_CODE (cond) == GT)
2025     ;
2026   else if (c == const1_rtx && GET_CODE (cond) == LT)
2027     ;
2028   else if (c != CONST0_RTX (GET_MODE (b)))
2029     return FALSE;
2030 
2031   /* Determine what sort of operation this is.  */
2032   switch (GET_CODE (cond))
2033     {
2034     case LT:
2035     case LE:
2036     case UNLT:
2037     case UNLE:
2038       negate = !negate;
2039       break;
2040     case GT:
2041     case GE:
2042     case UNGT:
2043     case UNGE:
2044       break;
2045     default:
2046       return FALSE;
2047     }
2048 
2049   start_sequence ();
2050   if (one_cmpl)
2051     target = expand_one_cmpl_abs_nojump (GET_MODE (if_info->x), b,
2052                                          if_info->x);
2053   else
2054     target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
2055 
2056   /* ??? It's a quandary whether cmove would be better here, especially
2057      for integers.  Perhaps combine will clean things up.  */
2058   if (target && negate)
2059     {
2060       if (one_cmpl)
2061         target = expand_simple_unop (GET_MODE (target), NOT, target,
2062                                      if_info->x, 0);
2063       else
2064         target = expand_simple_unop (GET_MODE (target), NEG, target,
2065                                      if_info->x, 0);
2066     }
2067 
2068   if (! target)
2069     {
2070       end_sequence ();
2071       return FALSE;
2072     }
2073 
2074   if (target != if_info->x)
2075     noce_emit_move_insn (if_info->x, target);
2076 
2077   seq = end_ifcvt_sequence (if_info);
2078   if (!seq)
2079     return FALSE;
2080 
2081   emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
2082   if_info->cond = cond;
2083   if_info->cond_earliest = earliest;
2084 
2085   return TRUE;
2086 }
2087 
2088 /* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;".  */
2089 
2090 static int
2091 noce_try_sign_mask (struct noce_if_info *if_info)
2092 {
2093   rtx cond, t, m, c, seq;
2094   enum machine_mode mode;
2095   enum rtx_code code;
2096   bool t_unconditional;
2097 
2098   cond = if_info->cond;
2099   code = GET_CODE (cond);
2100   m = XEXP (cond, 0);
2101   c = XEXP (cond, 1);
2102 
2103   t = NULL_RTX;
2104   if (if_info->a == const0_rtx)
2105     {
2106       if ((code == LT && c == const0_rtx)
2107 	  || (code == LE && c == constm1_rtx))
2108 	t = if_info->b;
2109     }
2110   else if (if_info->b == const0_rtx)
2111     {
2112       if ((code == GE && c == const0_rtx)
2113 	  || (code == GT && c == constm1_rtx))
2114 	t = if_info->a;
2115     }
2116 
2117   if (! t || side_effects_p (t))
2118     return FALSE;
2119 
2120   /* We currently don't handle different modes.  */
2121   mode = GET_MODE (t);
2122   if (GET_MODE (m) != mode)
2123     return FALSE;
2124 
2125   /* This is only profitable if T is unconditionally executed/evaluated in the
2126      original insn sequence or T is cheap.  The former happens if B is the
2127      non-zero (T) value and if INSN_B was taken from TEST_BB, or there was no
2128      INSN_B which can happen for e.g. conditional stores to memory.  For the
2129      cost computation use the block TEST_BB where the evaluation will end up
2130      after the transformation.  */
2131   t_unconditional =
2132     (t == if_info->b
2133      && (if_info->insn_b == NULL_RTX
2134 	 || BLOCK_FOR_INSN (if_info->insn_b) == if_info->test_bb));
2135   if (!(t_unconditional
2136 	|| (set_src_cost (t, optimize_bb_for_speed_p (if_info->test_bb))
2137 	    < COSTS_N_INSNS (2))))
2138     return FALSE;
2139 
2140   start_sequence ();
2141   /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
2142      "(signed) m >> 31" directly.  This benefits targets with specialized
2143      insns to obtain the signmask, but still uses ashr_optab otherwise.  */
2144   m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
2145   t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
2146 	: NULL_RTX;
2147 
2148   if (!t)
2149     {
2150       end_sequence ();
2151       return FALSE;
2152     }
2153 
2154   noce_emit_move_insn (if_info->x, t);
2155 
2156   seq = end_ifcvt_sequence (if_info);
2157   if (!seq)
2158     return FALSE;
2159 
2160   emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
2161   return TRUE;
2162 }
2163 
2164 
2165 /* Optimize away "if (x & C) x |= C" and similar bit manipulation
2166    transformations.  */
2167 
2168 static int
2169 noce_try_bitop (struct noce_if_info *if_info)
2170 {
2171   rtx cond, x, a, result, seq;
2172   enum machine_mode mode;
2173   enum rtx_code code;
2174   int bitnum;
2175 
2176   x = if_info->x;
2177   cond = if_info->cond;
2178   code = GET_CODE (cond);
2179 
2180   /* Check for no else condition.  */
2181   if (! rtx_equal_p (x, if_info->b))
2182     return FALSE;
2183 
2184   /* Check for a suitable condition.  */
2185   if (code != NE && code != EQ)
2186     return FALSE;
2187   if (XEXP (cond, 1) != const0_rtx)
2188     return FALSE;
2189   cond = XEXP (cond, 0);
2190 
2191   /* ??? We could also handle AND here.  */
2192   if (GET_CODE (cond) == ZERO_EXTRACT)
2193     {
2194       if (XEXP (cond, 1) != const1_rtx
2195 	  || !CONST_INT_P (XEXP (cond, 2))
2196 	  || ! rtx_equal_p (x, XEXP (cond, 0)))
2197 	return FALSE;
2198       bitnum = INTVAL (XEXP (cond, 2));
2199       mode = GET_MODE (x);
2200       if (BITS_BIG_ENDIAN)
2201 	bitnum = GET_MODE_BITSIZE (mode) - 1 - bitnum;
2202       if (bitnum < 0 || bitnum >= HOST_BITS_PER_WIDE_INT)
2203 	return FALSE;
2204     }
2205   else
2206     return FALSE;
2207 
2208   a = if_info->a;
2209   if (GET_CODE (a) == IOR || GET_CODE (a) == XOR)
2210     {
2211       /* Check for "if (X & C) x = x op C".  */
2212       if (! rtx_equal_p (x, XEXP (a, 0))
2213           || !CONST_INT_P (XEXP (a, 1))
2214 	  || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2215 	     != (unsigned HOST_WIDE_INT) 1 << bitnum)
2216         return FALSE;
2217 
2218       /* if ((x & C) == 0) x |= C; is transformed to x |= C.   */
2219       /* if ((x & C) != 0) x |= C; is transformed to nothing.  */
2220       if (GET_CODE (a) == IOR)
2221 	result = (code == NE) ? a : NULL_RTX;
2222       else if (code == NE)
2223 	{
2224 	  /* if ((x & C) == 0) x ^= C; is transformed to x |= C.   */
2225 	  result = gen_int_mode ((HOST_WIDE_INT) 1 << bitnum, mode);
2226 	  result = simplify_gen_binary (IOR, mode, x, result);
2227 	}
2228       else
2229 	{
2230 	  /* if ((x & C) != 0) x ^= C; is transformed to x &= ~C.  */
2231 	  result = gen_int_mode (~((HOST_WIDE_INT) 1 << bitnum), mode);
2232 	  result = simplify_gen_binary (AND, mode, x, result);
2233 	}
2234     }
2235   else if (GET_CODE (a) == AND)
2236     {
2237       /* Check for "if (X & C) x &= ~C".  */
2238       if (! rtx_equal_p (x, XEXP (a, 0))
2239 	  || !CONST_INT_P (XEXP (a, 1))
2240 	  || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2241 	     != (~((HOST_WIDE_INT) 1 << bitnum) & GET_MODE_MASK (mode)))
2242         return FALSE;
2243 
2244       /* if ((x & C) == 0) x &= ~C; is transformed to nothing.  */
2245       /* if ((x & C) != 0) x &= ~C; is transformed to x &= ~C.  */
2246       result = (code == EQ) ? a : NULL_RTX;
2247     }
2248   else
2249     return FALSE;
2250 
2251   if (result)
2252     {
2253       start_sequence ();
2254       noce_emit_move_insn (x, result);
2255       seq = end_ifcvt_sequence (if_info);
2256       if (!seq)
2257 	return FALSE;
2258 
2259       emit_insn_before_setloc (seq, if_info->jump,
2260 			       INSN_LOCATOR (if_info->insn_a));
2261     }
2262   return TRUE;
2263 }
2264 
2265 
2266 /* Similar to get_condition, only the resulting condition must be
2267    valid at JUMP, instead of at EARLIEST.
2268 
2269    If THEN_ELSE_REVERSED is true, the fallthrough does not go to the
2270    THEN block of the caller, and we have to reverse the condition.  */
2271 
2272 static rtx
2273 noce_get_condition (rtx jump, rtx *earliest, bool then_else_reversed)
2274 {
2275   rtx cond, set, tmp;
2276   bool reverse;
2277 
2278   if (! any_condjump_p (jump))
2279     return NULL_RTX;
2280 
2281   set = pc_set (jump);
2282 
2283   /* If this branches to JUMP_LABEL when the condition is false,
2284      reverse the condition.  */
2285   reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2286 	     && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
2287 
2288   /* We may have to reverse because the caller's if block is not canonical,
2289      i.e. the THEN block isn't the fallthrough block for the TEST block
2290      (see find_if_header).  */
2291   if (then_else_reversed)
2292     reverse = !reverse;
2293 
2294   /* If the condition variable is a register and is MODE_INT, accept it.  */
2295 
2296   cond = XEXP (SET_SRC (set), 0);
2297   tmp = XEXP (cond, 0);
2298   if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT
2299       && (GET_MODE (tmp) != BImode
2300           || !targetm.small_register_classes_for_mode_p (BImode)))
2301     {
2302       *earliest = jump;
2303 
2304       if (reverse)
2305 	cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2306 			       GET_MODE (cond), tmp, XEXP (cond, 1));
2307       return cond;
2308     }
2309 
2310   /* Otherwise, fall back on canonicalize_condition to do the dirty
2311      work of manipulating MODE_CC values and COMPARE rtx codes.  */
2312   tmp = canonicalize_condition (jump, cond, reverse, earliest,
2313 				NULL_RTX, false, true);
2314 
2315   /* We don't handle side-effects in the condition, like handling
2316      REG_INC notes and making sure no duplicate conditions are emitted.  */
2317   if (tmp != NULL_RTX && side_effects_p (tmp))
2318     return NULL_RTX;
2319 
2320   return tmp;
2321 }
2322 
2323 /* Return true if OP is ok for if-then-else processing.  */
2324 
2325 static int
2326 noce_operand_ok (const_rtx op)
2327 {
2328   if (side_effects_p (op))
2329     return FALSE;
2330 
2331   /* We special-case memories, so handle any of them with
2332      no address side effects.  */
2333   if (MEM_P (op))
2334     return ! side_effects_p (XEXP (op, 0));
2335 
2336   return ! may_trap_p (op);
2337 }
2338 
2339 /* Return true if a write into MEM may trap or fault.  */
2340 
2341 static bool
2342 noce_mem_write_may_trap_or_fault_p (const_rtx mem)
2343 {
2344   rtx addr;
2345 
2346   if (MEM_READONLY_P (mem))
2347     return true;
2348 
2349   if (may_trap_or_fault_p (mem))
2350     return true;
2351 
2352   addr = XEXP (mem, 0);
2353 
2354   /* Call target hook to avoid the effects of -fpic etc....  */
2355   addr = targetm.delegitimize_address (addr);
2356 
2357   while (addr)
2358     switch (GET_CODE (addr))
2359       {
2360       case CONST:
2361       case PRE_DEC:
2362       case PRE_INC:
2363       case POST_DEC:
2364       case POST_INC:
2365       case POST_MODIFY:
2366 	addr = XEXP (addr, 0);
2367 	break;
2368       case LO_SUM:
2369       case PRE_MODIFY:
2370 	addr = XEXP (addr, 1);
2371 	break;
2372       case PLUS:
2373 	if (CONST_INT_P (XEXP (addr, 1)))
2374 	  addr = XEXP (addr, 0);
2375 	else
2376 	  return false;
2377 	break;
2378       case LABEL_REF:
2379 	return true;
2380       case SYMBOL_REF:
2381 	if (SYMBOL_REF_DECL (addr)
2382 	    && decl_readonly_section (SYMBOL_REF_DECL (addr), 0))
2383 	  return true;
2384 	return false;
2385       default:
2386 	return false;
2387       }
2388 
2389   return false;
2390 }
2391 
2392 /* Return whether we can use store speculation for MEM.  TOP_BB is the
2393    basic block above the conditional block where we are considering
2394    doing the speculative store.  We look for whether MEM is set
2395    unconditionally later in the function.  */
2396 
2397 static bool
2398 noce_can_store_speculate_p (basic_block top_bb, const_rtx mem)
2399 {
2400   basic_block dominator;
2401 
2402   for (dominator = get_immediate_dominator (CDI_POST_DOMINATORS, top_bb);
2403        dominator != NULL;
2404        dominator = get_immediate_dominator (CDI_POST_DOMINATORS, dominator))
2405     {
2406       rtx insn;
2407 
2408       FOR_BB_INSNS (dominator, insn)
2409 	{
2410 	  /* If we see something that might be a memory barrier, we
2411 	     have to stop looking.  Even if the MEM is set later in
2412 	     the function, we still don't want to set it
2413 	     unconditionally before the barrier.  */
2414 	  if (INSN_P (insn)
2415 	      && (volatile_insn_p (PATTERN (insn))
2416 		  || (CALL_P (insn) && (!RTL_CONST_CALL_P (insn)))))
2417 	    return false;
2418 
2419 	  if (memory_modified_in_insn_p (mem, insn))
2420 	    return true;
2421 	  if (modified_in_p (XEXP (mem, 0), insn))
2422 	    return false;
2423 
2424 	}
2425     }
2426 
2427   return false;
2428 }
2429 
2430 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2431    it without using conditional execution.  Return TRUE if we were successful
2432    at converting the block.  */
2433 
2434 static int
2435 noce_process_if_block (struct noce_if_info *if_info)
2436 {
2437   basic_block test_bb = if_info->test_bb;	/* test block */
2438   basic_block then_bb = if_info->then_bb;	/* THEN */
2439   basic_block else_bb = if_info->else_bb;	/* ELSE or NULL */
2440   basic_block join_bb = if_info->join_bb;	/* JOIN */
2441   rtx jump = if_info->jump;
2442   rtx cond = if_info->cond;
2443   rtx insn_a, insn_b;
2444   rtx set_a, set_b;
2445   rtx orig_x, x, a, b;
2446 
2447   /* We're looking for patterns of the form
2448 
2449      (1) if (...) x = a; else x = b;
2450      (2) x = b; if (...) x = a;
2451      (3) if (...) x = a;   // as if with an initial x = x.
2452 
2453      The later patterns require jumps to be more expensive.
2454 
2455      ??? For future expansion, look for multiple X in such patterns.  */
2456 
2457   /* Look for one of the potential sets.  */
2458   insn_a = first_active_insn (then_bb);
2459   if (! insn_a
2460       || insn_a != last_active_insn (then_bb, FALSE)
2461       || (set_a = single_set (insn_a)) == NULL_RTX)
2462     return FALSE;
2463 
2464   x = SET_DEST (set_a);
2465   a = SET_SRC (set_a);
2466 
2467   /* Look for the other potential set.  Make sure we've got equivalent
2468      destinations.  */
2469   /* ??? This is overconservative.  Storing to two different mems is
2470      as easy as conditionally computing the address.  Storing to a
2471      single mem merely requires a scratch memory to use as one of the
2472      destination addresses; often the memory immediately below the
2473      stack pointer is available for this.  */
2474   set_b = NULL_RTX;
2475   if (else_bb)
2476     {
2477       insn_b = first_active_insn (else_bb);
2478       if (! insn_b
2479 	  || insn_b != last_active_insn (else_bb, FALSE)
2480 	  || (set_b = single_set (insn_b)) == NULL_RTX
2481 	  || ! rtx_equal_p (x, SET_DEST (set_b)))
2482 	return FALSE;
2483     }
2484   else
2485     {
2486       insn_b = prev_nonnote_nondebug_insn (if_info->cond_earliest);
2487       /* We're going to be moving the evaluation of B down from above
2488 	 COND_EARLIEST to JUMP.  Make sure the relevant data is still
2489 	 intact.  */
2490       if (! insn_b
2491 	  || BLOCK_FOR_INSN (insn_b) != BLOCK_FOR_INSN (if_info->cond_earliest)
2492 	  || !NONJUMP_INSN_P (insn_b)
2493 	  || (set_b = single_set (insn_b)) == NULL_RTX
2494 	  || ! rtx_equal_p (x, SET_DEST (set_b))
2495 	  || ! noce_operand_ok (SET_SRC (set_b))
2496 	  || reg_overlap_mentioned_p (x, SET_SRC (set_b))
2497 	  || modified_between_p (SET_SRC (set_b), insn_b, jump)
2498 	  /* Likewise with X.  In particular this can happen when
2499 	     noce_get_condition looks farther back in the instruction
2500 	     stream than one might expect.  */
2501 	  || reg_overlap_mentioned_p (x, cond)
2502 	  || reg_overlap_mentioned_p (x, a)
2503 	  || modified_between_p (x, insn_b, jump))
2504 	insn_b = set_b = NULL_RTX;
2505     }
2506 
2507   /* If x has side effects then only the if-then-else form is safe to
2508      convert.  But even in that case we would need to restore any notes
2509      (such as REG_INC) at then end.  That can be tricky if
2510      noce_emit_move_insn expands to more than one insn, so disable the
2511      optimization entirely for now if there are side effects.  */
2512   if (side_effects_p (x))
2513     return FALSE;
2514 
2515   b = (set_b ? SET_SRC (set_b) : x);
2516 
2517   /* Only operate on register destinations, and even then avoid extending
2518      the lifetime of hard registers on small register class machines.  */
2519   orig_x = x;
2520   if (!REG_P (x)
2521       || (HARD_REGISTER_P (x)
2522 	  && targetm.small_register_classes_for_mode_p (GET_MODE (x))))
2523     {
2524       if (GET_MODE (x) == BLKmode)
2525 	return FALSE;
2526 
2527       if (GET_CODE (x) == ZERO_EXTRACT
2528 	  && (!CONST_INT_P (XEXP (x, 1))
2529 	      || !CONST_INT_P (XEXP (x, 2))))
2530 	return FALSE;
2531 
2532       x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
2533 				 ? XEXP (x, 0) : x));
2534     }
2535 
2536   /* Don't operate on sources that may trap or are volatile.  */
2537   if (! noce_operand_ok (a) || ! noce_operand_ok (b))
2538     return FALSE;
2539 
2540  retry:
2541   /* Set up the info block for our subroutines.  */
2542   if_info->insn_a = insn_a;
2543   if_info->insn_b = insn_b;
2544   if_info->x = x;
2545   if_info->a = a;
2546   if_info->b = b;
2547 
2548   /* Try optimizations in some approximation of a useful order.  */
2549   /* ??? Should first look to see if X is live incoming at all.  If it
2550      isn't, we don't need anything but an unconditional set.  */
2551 
2552   /* Look and see if A and B are really the same.  Avoid creating silly
2553      cmove constructs that no one will fix up later.  */
2554   if (rtx_equal_p (a, b))
2555     {
2556       /* If we have an INSN_B, we don't have to create any new rtl.  Just
2557 	 move the instruction that we already have.  If we don't have an
2558 	 INSN_B, that means that A == X, and we've got a noop move.  In
2559 	 that case don't do anything and let the code below delete INSN_A.  */
2560       if (insn_b && else_bb)
2561 	{
2562 	  rtx note;
2563 
2564 	  if (else_bb && insn_b == BB_END (else_bb))
2565 	    BB_END (else_bb) = PREV_INSN (insn_b);
2566 	  reorder_insns (insn_b, insn_b, PREV_INSN (jump));
2567 
2568 	  /* If there was a REG_EQUAL note, delete it since it may have been
2569 	     true due to this insn being after a jump.  */
2570 	  if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
2571 	    remove_note (insn_b, note);
2572 
2573 	  insn_b = NULL_RTX;
2574 	}
2575       /* If we have "x = b; if (...) x = a;", and x has side-effects, then
2576 	 x must be executed twice.  */
2577       else if (insn_b && side_effects_p (orig_x))
2578 	return FALSE;
2579 
2580       x = orig_x;
2581       goto success;
2582     }
2583 
2584   if (!set_b && MEM_P (orig_x))
2585     {
2586       /* Disallow the "if (...) x = a;" form (implicit "else x = x;")
2587 	 for optimizations if writing to x may trap or fault,
2588 	 i.e. it's a memory other than a static var or a stack slot,
2589 	 is misaligned on strict aligned machines or is read-only.  If
2590 	 x is a read-only memory, then the program is valid only if we
2591 	 avoid the store into it.  If there are stores on both the
2592 	 THEN and ELSE arms, then we can go ahead with the conversion;
2593 	 either the program is broken, or the condition is always
2594 	 false such that the other memory is selected.  */
2595       if (noce_mem_write_may_trap_or_fault_p (orig_x))
2596 	return FALSE;
2597 
2598       /* Avoid store speculation: given "if (...) x = a" where x is a
2599 	 MEM, we only want to do the store if x is always set
2600 	 somewhere in the function.  This avoids cases like
2601 	   if (pthread_mutex_trylock(mutex))
2602 	     ++global_variable;
2603 	 where we only want global_variable to be changed if the mutex
2604 	 is held.  FIXME: This should ideally be expressed directly in
2605 	 RTL somehow.  */
2606       if (!noce_can_store_speculate_p (test_bb, orig_x))
2607 	return FALSE;
2608     }
2609 
2610   if (noce_try_move (if_info))
2611     goto success;
2612   if (noce_try_store_flag (if_info))
2613     goto success;
2614   if (noce_try_bitop (if_info))
2615     goto success;
2616   if (noce_try_minmax (if_info))
2617     goto success;
2618   if (noce_try_abs (if_info))
2619     goto success;
2620   if (HAVE_conditional_move
2621       && noce_try_cmove (if_info))
2622     goto success;
2623   if (! targetm.have_conditional_execution ())
2624     {
2625       if (noce_try_store_flag_constants (if_info))
2626 	goto success;
2627       if (noce_try_addcc (if_info))
2628 	goto success;
2629       if (noce_try_store_flag_mask (if_info))
2630 	goto success;
2631       if (HAVE_conditional_move
2632 	  && noce_try_cmove_arith (if_info))
2633 	goto success;
2634       if (noce_try_sign_mask (if_info))
2635 	goto success;
2636     }
2637 
2638   if (!else_bb && set_b)
2639     {
2640       insn_b = set_b = NULL_RTX;
2641       b = orig_x;
2642       goto retry;
2643     }
2644 
2645   return FALSE;
2646 
2647  success:
2648 
2649   /* If we used a temporary, fix it up now.  */
2650   if (orig_x != x)
2651     {
2652       rtx seq;
2653 
2654       start_sequence ();
2655       noce_emit_move_insn (orig_x, x);
2656       seq = get_insns ();
2657       set_used_flags (orig_x);
2658       unshare_all_rtl_in_chain (seq);
2659       end_sequence ();
2660 
2661       emit_insn_before_setloc (seq, BB_END (test_bb), INSN_LOCATOR (insn_a));
2662     }
2663 
2664   /* The original THEN and ELSE blocks may now be removed.  The test block
2665      must now jump to the join block.  If the test block and the join block
2666      can be merged, do so.  */
2667   if (else_bb)
2668     {
2669       delete_basic_block (else_bb);
2670       num_true_changes++;
2671     }
2672   else
2673     remove_edge (find_edge (test_bb, join_bb));
2674 
2675   remove_edge (find_edge (then_bb, join_bb));
2676   redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
2677   delete_basic_block (then_bb);
2678   num_true_changes++;
2679 
2680   if (can_merge_blocks_p (test_bb, join_bb))
2681     {
2682       merge_blocks (test_bb, join_bb);
2683       num_true_changes++;
2684     }
2685 
2686   num_updated_if_blocks++;
2687   return TRUE;
2688 }
2689 
2690 /* Check whether a block is suitable for conditional move conversion.
2691    Every insn must be a simple set of a register to a constant or a
2692    register.  For each assignment, store the value in the array VALS,
2693    indexed by register number, then store the register number in
2694    REGS.  COND is the condition we will test.  */
2695 
2696 static int
2697 check_cond_move_block (basic_block bb, rtx *vals, VEC (int, heap) **regs,
2698 		       rtx cond)
2699 {
2700   rtx insn;
2701 
2702    /* We can only handle simple jumps at the end of the basic block.
2703       It is almost impossible to update the CFG otherwise.  */
2704   insn = BB_END (bb);
2705   if (JUMP_P (insn) && !onlyjump_p (insn))
2706     return FALSE;
2707 
2708   FOR_BB_INSNS (bb, insn)
2709     {
2710       rtx set, dest, src;
2711 
2712       if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2713 	continue;
2714       set = single_set (insn);
2715       if (!set)
2716 	return FALSE;
2717 
2718       dest = SET_DEST (set);
2719       src = SET_SRC (set);
2720       if (!REG_P (dest)
2721 	  || (HARD_REGISTER_P (dest)
2722 	      && targetm.small_register_classes_for_mode_p (GET_MODE (dest))))
2723 	return FALSE;
2724 
2725       if (!CONSTANT_P (src) && !register_operand (src, VOIDmode))
2726 	return FALSE;
2727 
2728       if (side_effects_p (src) || side_effects_p (dest))
2729 	return FALSE;
2730 
2731       if (may_trap_p (src) || may_trap_p (dest))
2732 	return FALSE;
2733 
2734       /* Don't try to handle this if the source register was
2735 	 modified earlier in the block.  */
2736       if ((REG_P (src)
2737 	   && vals[REGNO (src)] != NULL)
2738 	  || (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
2739 	      && vals[REGNO (SUBREG_REG (src))] != NULL))
2740 	return FALSE;
2741 
2742       /* Don't try to handle this if the destination register was
2743 	 modified earlier in the block.  */
2744       if (vals[REGNO (dest)] != NULL)
2745 	return FALSE;
2746 
2747       /* Don't try to handle this if the condition uses the
2748 	 destination register.  */
2749       if (reg_overlap_mentioned_p (dest, cond))
2750 	return FALSE;
2751 
2752       /* Don't try to handle this if the source register is modified
2753 	 later in the block.  */
2754       if (!CONSTANT_P (src)
2755 	  && modified_between_p (src, insn, NEXT_INSN (BB_END (bb))))
2756 	return FALSE;
2757 
2758       vals[REGNO (dest)] = src;
2759 
2760       VEC_safe_push (int, heap, *regs, REGNO (dest));
2761     }
2762 
2763   return TRUE;
2764 }
2765 
2766 /* Given a basic block BB suitable for conditional move conversion,
2767    a condition COND, and arrays THEN_VALS and ELSE_VALS containing the
2768    register values depending on COND, emit the insns in the block as
2769    conditional moves.  If ELSE_BLOCK is true, THEN_BB was already
2770    processed.  The caller has started a sequence for the conversion.
2771    Return true if successful, false if something goes wrong.  */
2772 
2773 static bool
2774 cond_move_convert_if_block (struct noce_if_info *if_infop,
2775 			    basic_block bb, rtx cond,
2776 			    rtx *then_vals, rtx *else_vals,
2777 			    bool else_block_p)
2778 {
2779   enum rtx_code code;
2780   rtx insn, cond_arg0, cond_arg1;
2781 
2782   code = GET_CODE (cond);
2783   cond_arg0 = XEXP (cond, 0);
2784   cond_arg1 = XEXP (cond, 1);
2785 
2786   FOR_BB_INSNS (bb, insn)
2787     {
2788       rtx set, target, dest, t, e;
2789       unsigned int regno;
2790 
2791       /* ??? Maybe emit conditional debug insn?  */
2792       if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2793 	continue;
2794       set = single_set (insn);
2795       gcc_assert (set && REG_P (SET_DEST (set)));
2796 
2797       dest = SET_DEST (set);
2798       regno = REGNO (dest);
2799 
2800       t = then_vals[regno];
2801       e = else_vals[regno];
2802 
2803       if (else_block_p)
2804 	{
2805 	  /* If this register was set in the then block, we already
2806 	     handled this case there.  */
2807 	  if (t)
2808 	    continue;
2809 	  t = dest;
2810 	  gcc_assert (e);
2811 	}
2812       else
2813 	{
2814 	  gcc_assert (t);
2815 	  if (!e)
2816 	    e = dest;
2817 	}
2818 
2819       target = noce_emit_cmove (if_infop, dest, code, cond_arg0, cond_arg1,
2820 				t, e);
2821       if (!target)
2822 	return false;
2823 
2824       if (target != dest)
2825 	noce_emit_move_insn (dest, target);
2826     }
2827 
2828   return true;
2829 }
2830 
2831 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2832    it using only conditional moves.  Return TRUE if we were successful at
2833    converting the block.  */
2834 
2835 static int
2836 cond_move_process_if_block (struct noce_if_info *if_info)
2837 {
2838   basic_block test_bb = if_info->test_bb;
2839   basic_block then_bb = if_info->then_bb;
2840   basic_block else_bb = if_info->else_bb;
2841   basic_block join_bb = if_info->join_bb;
2842   rtx jump = if_info->jump;
2843   rtx cond = if_info->cond;
2844   rtx seq, loc_insn;
2845   int max_reg, size, c, reg;
2846   rtx *then_vals;
2847   rtx *else_vals;
2848   VEC (int, heap) *then_regs = NULL;
2849   VEC (int, heap) *else_regs = NULL;
2850   unsigned int i;
2851 
2852   /* Build a mapping for each block to the value used for each
2853      register.  */
2854   max_reg = max_reg_num ();
2855   size = (max_reg + 1) * sizeof (rtx);
2856   then_vals = (rtx *) alloca (size);
2857   else_vals = (rtx *) alloca (size);
2858   memset (then_vals, 0, size);
2859   memset (else_vals, 0, size);
2860 
2861   /* Make sure the blocks are suitable.  */
2862   if (!check_cond_move_block (then_bb, then_vals, &then_regs, cond)
2863       || (else_bb
2864 	  && !check_cond_move_block (else_bb, else_vals, &else_regs, cond)))
2865     {
2866       VEC_free (int, heap, then_regs);
2867       VEC_free (int, heap, else_regs);
2868       return FALSE;
2869     }
2870 
2871   /* Make sure the blocks can be used together.  If the same register
2872      is set in both blocks, and is not set to a constant in both
2873      cases, then both blocks must set it to the same register.  We
2874      have already verified that if it is set to a register, that the
2875      source register does not change after the assignment.  Also count
2876      the number of registers set in only one of the blocks.  */
2877   c = 0;
2878   FOR_EACH_VEC_ELT (int, then_regs, i, reg)
2879     {
2880       if (!then_vals[reg] && !else_vals[reg])
2881 	continue;
2882 
2883       if (!else_vals[reg])
2884 	++c;
2885       else
2886 	{
2887 	  if (!CONSTANT_P (then_vals[reg])
2888 	      && !CONSTANT_P (else_vals[reg])
2889 	      && !rtx_equal_p (then_vals[reg], else_vals[reg]))
2890 	    {
2891 	      VEC_free (int, heap, then_regs);
2892 	      VEC_free (int, heap, else_regs);
2893 	      return FALSE;
2894 	    }
2895 	}
2896     }
2897 
2898   /* Finish off c for MAX_CONDITIONAL_EXECUTE.  */
2899   FOR_EACH_VEC_ELT (int, else_regs, i, reg)
2900     if (!then_vals[reg])
2901       ++c;
2902 
2903   /* Make sure it is reasonable to convert this block.  What matters
2904      is the number of assignments currently made in only one of the
2905      branches, since if we convert we are going to always execute
2906      them.  */
2907   if (c > MAX_CONDITIONAL_EXECUTE)
2908     {
2909       VEC_free (int, heap, then_regs);
2910       VEC_free (int, heap, else_regs);
2911       return FALSE;
2912     }
2913 
2914   /* Try to emit the conditional moves.  First do the then block,
2915      then do anything left in the else blocks.  */
2916   start_sequence ();
2917   if (!cond_move_convert_if_block (if_info, then_bb, cond,
2918 				   then_vals, else_vals, false)
2919       || (else_bb
2920 	  && !cond_move_convert_if_block (if_info, else_bb, cond,
2921 					  then_vals, else_vals, true)))
2922     {
2923       end_sequence ();
2924       VEC_free (int, heap, then_regs);
2925       VEC_free (int, heap, else_regs);
2926       return FALSE;
2927     }
2928   seq = end_ifcvt_sequence (if_info);
2929   if (!seq)
2930     {
2931       VEC_free (int, heap, then_regs);
2932       VEC_free (int, heap, else_regs);
2933       return FALSE;
2934     }
2935 
2936   loc_insn = first_active_insn (then_bb);
2937   if (!loc_insn)
2938     {
2939       loc_insn = first_active_insn (else_bb);
2940       gcc_assert (loc_insn);
2941     }
2942   emit_insn_before_setloc (seq, jump, INSN_LOCATOR (loc_insn));
2943 
2944   if (else_bb)
2945     {
2946       delete_basic_block (else_bb);
2947       num_true_changes++;
2948     }
2949   else
2950     remove_edge (find_edge (test_bb, join_bb));
2951 
2952   remove_edge (find_edge (then_bb, join_bb));
2953   redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
2954   delete_basic_block (then_bb);
2955   num_true_changes++;
2956 
2957   if (can_merge_blocks_p (test_bb, join_bb))
2958     {
2959       merge_blocks (test_bb, join_bb);
2960       num_true_changes++;
2961     }
2962 
2963   num_updated_if_blocks++;
2964 
2965   VEC_free (int, heap, then_regs);
2966   VEC_free (int, heap, else_regs);
2967   return TRUE;
2968 }
2969 
2970 
2971 /* Determine if a given basic block heads a simple IF-THEN-JOIN or an
2972    IF-THEN-ELSE-JOIN block.
2973 
2974    If so, we'll try to convert the insns to not require the branch,
2975    using only transformations that do not require conditional execution.
2976 
2977    Return TRUE if we were successful at converting the block.  */
2978 
2979 static int
2980 noce_find_if_block (basic_block test_bb, edge then_edge, edge else_edge,
2981 		    int pass)
2982 {
2983   basic_block then_bb, else_bb, join_bb;
2984   bool then_else_reversed = false;
2985   rtx jump, cond;
2986   rtx cond_earliest;
2987   struct noce_if_info if_info;
2988 
2989   /* We only ever should get here before reload.  */
2990   gcc_assert (!reload_completed);
2991 
2992   /* Recognize an IF-THEN-ELSE-JOIN block.  */
2993   if (single_pred_p (then_edge->dest)
2994       && single_succ_p (then_edge->dest)
2995       && single_pred_p (else_edge->dest)
2996       && single_succ_p (else_edge->dest)
2997       && single_succ (then_edge->dest) == single_succ (else_edge->dest))
2998     {
2999       then_bb = then_edge->dest;
3000       else_bb = else_edge->dest;
3001       join_bb = single_succ (then_bb);
3002     }
3003   /* Recognize an IF-THEN-JOIN block.  */
3004   else if (single_pred_p (then_edge->dest)
3005 	   && single_succ_p (then_edge->dest)
3006 	   && single_succ (then_edge->dest) == else_edge->dest)
3007     {
3008       then_bb = then_edge->dest;
3009       else_bb = NULL_BLOCK;
3010       join_bb = else_edge->dest;
3011     }
3012   /* Recognize an IF-ELSE-JOIN block.  We can have those because the order
3013      of basic blocks in cfglayout mode does not matter, so the fallthrough
3014      edge can go to any basic block (and not just to bb->next_bb, like in
3015      cfgrtl mode).  */
3016   else if (single_pred_p (else_edge->dest)
3017 	   && single_succ_p (else_edge->dest)
3018 	   && single_succ (else_edge->dest) == then_edge->dest)
3019     {
3020       /* The noce transformations do not apply to IF-ELSE-JOIN blocks.
3021 	 To make this work, we have to invert the THEN and ELSE blocks
3022 	 and reverse the jump condition.  */
3023       then_bb = else_edge->dest;
3024       else_bb = NULL_BLOCK;
3025       join_bb = single_succ (then_bb);
3026       then_else_reversed = true;
3027     }
3028   else
3029     /* Not a form we can handle.  */
3030     return FALSE;
3031 
3032   /* The edges of the THEN and ELSE blocks cannot have complex edges.  */
3033   if (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3034     return FALSE;
3035   if (else_bb
3036       && single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3037     return FALSE;
3038 
3039   num_possible_if_blocks++;
3040 
3041   if (dump_file)
3042     {
3043       fprintf (dump_file,
3044 	       "\nIF-THEN%s-JOIN block found, pass %d, test %d, then %d",
3045 	       (else_bb) ? "-ELSE" : "",
3046 	       pass, test_bb->index, then_bb->index);
3047 
3048       if (else_bb)
3049 	fprintf (dump_file, ", else %d", else_bb->index);
3050 
3051       fprintf (dump_file, ", join %d\n", join_bb->index);
3052     }
3053 
3054   /* If the conditional jump is more than just a conditional
3055      jump, then we can not do if-conversion on this block.  */
3056   jump = BB_END (test_bb);
3057   if (! onlyjump_p (jump))
3058     return FALSE;
3059 
3060   /* If this is not a standard conditional jump, we can't parse it.  */
3061   cond = noce_get_condition (jump, &cond_earliest, then_else_reversed);
3062   if (!cond)
3063     return FALSE;
3064 
3065   /* We must be comparing objects whose modes imply the size.  */
3066   if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3067     return FALSE;
3068 
3069   /* Initialize an IF_INFO struct to pass around.  */
3070   memset (&if_info, 0, sizeof if_info);
3071   if_info.test_bb = test_bb;
3072   if_info.then_bb = then_bb;
3073   if_info.else_bb = else_bb;
3074   if_info.join_bb = join_bb;
3075   if_info.cond = cond;
3076   if_info.cond_earliest = cond_earliest;
3077   if_info.jump = jump;
3078   if_info.then_else_reversed = then_else_reversed;
3079   if_info.branch_cost = BRANCH_COST (optimize_bb_for_speed_p (test_bb),
3080 				     predictable_edge_p (then_edge));
3081 
3082   /* Do the real work.  */
3083 
3084   if (noce_process_if_block (&if_info))
3085     return TRUE;
3086 
3087   if (HAVE_conditional_move
3088       && cond_move_process_if_block (&if_info))
3089     return TRUE;
3090 
3091   return FALSE;
3092 }
3093 
3094 
3095 /* Merge the blocks and mark for local life update.  */
3096 
3097 static void
3098 merge_if_block (struct ce_if_block * ce_info)
3099 {
3100   basic_block test_bb = ce_info->test_bb;	/* last test block */
3101   basic_block then_bb = ce_info->then_bb;	/* THEN */
3102   basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
3103   basic_block join_bb = ce_info->join_bb;	/* join block */
3104   basic_block combo_bb;
3105 
3106   /* All block merging is done into the lower block numbers.  */
3107 
3108   combo_bb = test_bb;
3109   df_set_bb_dirty (test_bb);
3110 
3111   /* Merge any basic blocks to handle && and || subtests.  Each of
3112      the blocks are on the fallthru path from the predecessor block.  */
3113   if (ce_info->num_multiple_test_blocks > 0)
3114     {
3115       basic_block bb = test_bb;
3116       basic_block last_test_bb = ce_info->last_test_bb;
3117       basic_block fallthru = block_fallthru (bb);
3118 
3119       do
3120 	{
3121 	  bb = fallthru;
3122 	  fallthru = block_fallthru (bb);
3123 	  merge_blocks (combo_bb, bb);
3124 	  num_true_changes++;
3125 	}
3126       while (bb != last_test_bb);
3127     }
3128 
3129   /* Merge TEST block into THEN block.  Normally the THEN block won't have a
3130      label, but it might if there were || tests.  That label's count should be
3131      zero, and it normally should be removed.  */
3132 
3133   if (then_bb)
3134     {
3135       merge_blocks (combo_bb, then_bb);
3136       num_true_changes++;
3137     }
3138 
3139   /* The ELSE block, if it existed, had a label.  That label count
3140      will almost always be zero, but odd things can happen when labels
3141      get their addresses taken.  */
3142   if (else_bb)
3143     {
3144       merge_blocks (combo_bb, else_bb);
3145       num_true_changes++;
3146     }
3147 
3148   /* If there was no join block reported, that means it was not adjacent
3149      to the others, and so we cannot merge them.  */
3150 
3151   if (! join_bb)
3152     {
3153       rtx last = BB_END (combo_bb);
3154 
3155       /* The outgoing edge for the current COMBO block should already
3156 	 be correct.  Verify this.  */
3157       if (EDGE_COUNT (combo_bb->succs) == 0)
3158 	gcc_assert (find_reg_note (last, REG_NORETURN, NULL)
3159 		    || (NONJUMP_INSN_P (last)
3160 			&& GET_CODE (PATTERN (last)) == TRAP_IF
3161 			&& (TRAP_CONDITION (PATTERN (last))
3162 			    == const_true_rtx)));
3163 
3164       else
3165       /* There should still be something at the end of the THEN or ELSE
3166          blocks taking us to our final destination.  */
3167 	gcc_assert (JUMP_P (last)
3168 		    || (EDGE_SUCC (combo_bb, 0)->dest == EXIT_BLOCK_PTR
3169 			&& CALL_P (last)
3170 			&& SIBLING_CALL_P (last))
3171 		    || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
3172 			&& can_throw_internal (last)));
3173     }
3174 
3175   /* The JOIN block may have had quite a number of other predecessors too.
3176      Since we've already merged the TEST, THEN and ELSE blocks, we should
3177      have only one remaining edge from our if-then-else diamond.  If there
3178      is more than one remaining edge, it must come from elsewhere.  There
3179      may be zero incoming edges if the THEN block didn't actually join
3180      back up (as with a call to a non-return function).  */
3181   else if (EDGE_COUNT (join_bb->preds) < 2
3182 	   && join_bb != EXIT_BLOCK_PTR)
3183     {
3184       /* We can merge the JOIN cleanly and update the dataflow try
3185 	 again on this pass.*/
3186       merge_blocks (combo_bb, join_bb);
3187       num_true_changes++;
3188     }
3189   else
3190     {
3191       /* We cannot merge the JOIN.  */
3192 
3193       /* The outgoing edge for the current COMBO block should already
3194 	 be correct.  Verify this.  */
3195       gcc_assert (single_succ_p (combo_bb)
3196 		  && single_succ (combo_bb) == join_bb);
3197 
3198       /* Remove the jump and cruft from the end of the COMBO block.  */
3199       if (join_bb != EXIT_BLOCK_PTR)
3200 	tidy_fallthru_edge (single_succ_edge (combo_bb));
3201     }
3202 
3203   num_updated_if_blocks++;
3204 }
3205 
3206 /* Find a block ending in a simple IF condition and try to transform it
3207    in some way.  When converting a multi-block condition, put the new code
3208    in the first such block and delete the rest.  Return a pointer to this
3209    first block if some transformation was done.  Return NULL otherwise.  */
3210 
3211 static basic_block
3212 find_if_header (basic_block test_bb, int pass)
3213 {
3214   ce_if_block_t ce_info;
3215   edge then_edge;
3216   edge else_edge;
3217 
3218   /* The kind of block we're looking for has exactly two successors.  */
3219   if (EDGE_COUNT (test_bb->succs) != 2)
3220     return NULL;
3221 
3222   then_edge = EDGE_SUCC (test_bb, 0);
3223   else_edge = EDGE_SUCC (test_bb, 1);
3224 
3225   if (df_get_bb_dirty (then_edge->dest))
3226     return NULL;
3227   if (df_get_bb_dirty (else_edge->dest))
3228     return NULL;
3229 
3230   /* Neither edge should be abnormal.  */
3231   if ((then_edge->flags & EDGE_COMPLEX)
3232       || (else_edge->flags & EDGE_COMPLEX))
3233     return NULL;
3234 
3235   /* Nor exit the loop.  */
3236   if ((then_edge->flags & EDGE_LOOP_EXIT)
3237       || (else_edge->flags & EDGE_LOOP_EXIT))
3238     return NULL;
3239 
3240   /* The THEN edge is canonically the one that falls through.  */
3241   if (then_edge->flags & EDGE_FALLTHRU)
3242     ;
3243   else if (else_edge->flags & EDGE_FALLTHRU)
3244     {
3245       edge e = else_edge;
3246       else_edge = then_edge;
3247       then_edge = e;
3248     }
3249   else
3250     /* Otherwise this must be a multiway branch of some sort.  */
3251     return NULL;
3252 
3253   memset (&ce_info, 0, sizeof (ce_info));
3254   ce_info.test_bb = test_bb;
3255   ce_info.then_bb = then_edge->dest;
3256   ce_info.else_bb = else_edge->dest;
3257   ce_info.pass = pass;
3258 
3259 #ifdef IFCVT_INIT_EXTRA_FIELDS
3260   IFCVT_INIT_EXTRA_FIELDS (&ce_info);
3261 #endif
3262 
3263   if (!reload_completed
3264       && noce_find_if_block (test_bb, then_edge, else_edge, pass))
3265     goto success;
3266 
3267   if (reload_completed
3268       && targetm.have_conditional_execution ()
3269       && cond_exec_find_if_block (&ce_info))
3270     goto success;
3271 
3272   if (HAVE_trap
3273       && optab_handler (ctrap_optab, word_mode) != CODE_FOR_nothing
3274       && find_cond_trap (test_bb, then_edge, else_edge))
3275     goto success;
3276 
3277   if (dom_info_state (CDI_POST_DOMINATORS) >= DOM_NO_FAST_QUERY
3278       && (reload_completed || !targetm.have_conditional_execution ()))
3279     {
3280       if (find_if_case_1 (test_bb, then_edge, else_edge))
3281 	goto success;
3282       if (find_if_case_2 (test_bb, then_edge, else_edge))
3283 	goto success;
3284     }
3285 
3286   return NULL;
3287 
3288  success:
3289   if (dump_file)
3290     fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
3291   /* Set this so we continue looking.  */
3292   cond_exec_changed_p = TRUE;
3293   return ce_info.test_bb;
3294 }
3295 
3296 /* Return true if a block has two edges, one of which falls through to the next
3297    block, and the other jumps to a specific block, so that we can tell if the
3298    block is part of an && test or an || test.  Returns either -1 or the number
3299    of non-note, non-jump, non-USE/CLOBBER insns in the block.  */
3300 
3301 static int
3302 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
3303 {
3304   edge cur_edge;
3305   int fallthru_p = FALSE;
3306   int jump_p = FALSE;
3307   rtx insn;
3308   rtx end;
3309   int n_insns = 0;
3310   edge_iterator ei;
3311 
3312   if (!cur_bb || !target_bb)
3313     return -1;
3314 
3315   /* If no edges, obviously it doesn't jump or fallthru.  */
3316   if (EDGE_COUNT (cur_bb->succs) == 0)
3317     return FALSE;
3318 
3319   FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
3320     {
3321       if (cur_edge->flags & EDGE_COMPLEX)
3322 	/* Anything complex isn't what we want.  */
3323 	return -1;
3324 
3325       else if (cur_edge->flags & EDGE_FALLTHRU)
3326 	fallthru_p = TRUE;
3327 
3328       else if (cur_edge->dest == target_bb)
3329 	jump_p = TRUE;
3330 
3331       else
3332 	return -1;
3333     }
3334 
3335   if ((jump_p & fallthru_p) == 0)
3336     return -1;
3337 
3338   /* Don't allow calls in the block, since this is used to group && and ||
3339      together for conditional execution support.  ??? we should support
3340      conditional execution support across calls for IA-64 some day, but
3341      for now it makes the code simpler.  */
3342   end = BB_END (cur_bb);
3343   insn = BB_HEAD (cur_bb);
3344 
3345   while (insn != NULL_RTX)
3346     {
3347       if (CALL_P (insn))
3348 	return -1;
3349 
3350       if (INSN_P (insn)
3351 	  && !JUMP_P (insn)
3352 	  && !DEBUG_INSN_P (insn)
3353 	  && GET_CODE (PATTERN (insn)) != USE
3354 	  && GET_CODE (PATTERN (insn)) != CLOBBER)
3355 	n_insns++;
3356 
3357       if (insn == end)
3358 	break;
3359 
3360       insn = NEXT_INSN (insn);
3361     }
3362 
3363   return n_insns;
3364 }
3365 
3366 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
3367    block.  If so, we'll try to convert the insns to not require the branch.
3368    Return TRUE if we were successful at converting the block.  */
3369 
3370 static int
3371 cond_exec_find_if_block (struct ce_if_block * ce_info)
3372 {
3373   basic_block test_bb = ce_info->test_bb;
3374   basic_block then_bb = ce_info->then_bb;
3375   basic_block else_bb = ce_info->else_bb;
3376   basic_block join_bb = NULL_BLOCK;
3377   edge cur_edge;
3378   basic_block next;
3379   edge_iterator ei;
3380 
3381   ce_info->last_test_bb = test_bb;
3382 
3383   /* We only ever should get here after reload,
3384      and if we have conditional execution.  */
3385   gcc_assert (reload_completed && targetm.have_conditional_execution ());
3386 
3387   /* Discover if any fall through predecessors of the current test basic block
3388      were && tests (which jump to the else block) or || tests (which jump to
3389      the then block).  */
3390   if (single_pred_p (test_bb)
3391       && single_pred_edge (test_bb)->flags == EDGE_FALLTHRU)
3392     {
3393       basic_block bb = single_pred (test_bb);
3394       basic_block target_bb;
3395       int max_insns = MAX_CONDITIONAL_EXECUTE;
3396       int n_insns;
3397 
3398       /* Determine if the preceding block is an && or || block.  */
3399       if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
3400 	{
3401 	  ce_info->and_and_p = TRUE;
3402 	  target_bb = else_bb;
3403 	}
3404       else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
3405 	{
3406 	  ce_info->and_and_p = FALSE;
3407 	  target_bb = then_bb;
3408 	}
3409       else
3410 	target_bb = NULL_BLOCK;
3411 
3412       if (target_bb && n_insns <= max_insns)
3413 	{
3414 	  int total_insns = 0;
3415 	  int blocks = 0;
3416 
3417 	  ce_info->last_test_bb = test_bb;
3418 
3419 	  /* Found at least one && or || block, look for more.  */
3420 	  do
3421 	    {
3422 	      ce_info->test_bb = test_bb = bb;
3423 	      total_insns += n_insns;
3424 	      blocks++;
3425 
3426 	      if (!single_pred_p (bb))
3427 		break;
3428 
3429 	      bb = single_pred (bb);
3430 	      n_insns = block_jumps_and_fallthru_p (bb, target_bb);
3431 	    }
3432 	  while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
3433 
3434 	  ce_info->num_multiple_test_blocks = blocks;
3435 	  ce_info->num_multiple_test_insns = total_insns;
3436 
3437 	  if (ce_info->and_and_p)
3438 	    ce_info->num_and_and_blocks = blocks;
3439 	  else
3440 	    ce_info->num_or_or_blocks = blocks;
3441 	}
3442     }
3443 
3444   /* The THEN block of an IF-THEN combo must have exactly one predecessor,
3445      other than any || blocks which jump to the THEN block.  */
3446   if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
3447     return FALSE;
3448 
3449   /* The edges of the THEN and ELSE blocks cannot have complex edges.  */
3450   FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
3451     {
3452       if (cur_edge->flags & EDGE_COMPLEX)
3453 	return FALSE;
3454     }
3455 
3456   FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
3457     {
3458       if (cur_edge->flags & EDGE_COMPLEX)
3459 	return FALSE;
3460     }
3461 
3462   /* The THEN block of an IF-THEN combo must have zero or one successors.  */
3463   if (EDGE_COUNT (then_bb->succs) > 0
3464       && (!single_succ_p (then_bb)
3465           || (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3466 	  || (epilogue_completed
3467 	      && tablejump_p (BB_END (then_bb), NULL, NULL))))
3468     return FALSE;
3469 
3470   /* If the THEN block has no successors, conditional execution can still
3471      make a conditional call.  Don't do this unless the ELSE block has
3472      only one incoming edge -- the CFG manipulation is too ugly otherwise.
3473      Check for the last insn of the THEN block being an indirect jump, which
3474      is listed as not having any successors, but confuses the rest of the CE
3475      code processing.  ??? we should fix this in the future.  */
3476   if (EDGE_COUNT (then_bb->succs) == 0)
3477     {
3478       if (single_pred_p (else_bb))
3479 	{
3480 	  rtx last_insn = BB_END (then_bb);
3481 
3482 	  while (last_insn
3483 		 && NOTE_P (last_insn)
3484 		 && last_insn != BB_HEAD (then_bb))
3485 	    last_insn = PREV_INSN (last_insn);
3486 
3487 	  if (last_insn
3488 	      && JUMP_P (last_insn)
3489 	      && ! simplejump_p (last_insn))
3490 	    return FALSE;
3491 
3492 	  join_bb = else_bb;
3493 	  else_bb = NULL_BLOCK;
3494 	}
3495       else
3496 	return FALSE;
3497     }
3498 
3499   /* If the THEN block's successor is the other edge out of the TEST block,
3500      then we have an IF-THEN combo without an ELSE.  */
3501   else if (single_succ (then_bb) == else_bb)
3502     {
3503       join_bb = else_bb;
3504       else_bb = NULL_BLOCK;
3505     }
3506 
3507   /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
3508      has exactly one predecessor and one successor, and the outgoing edge
3509      is not complex, then we have an IF-THEN-ELSE combo.  */
3510   else if (single_succ_p (else_bb)
3511 	   && single_succ (then_bb) == single_succ (else_bb)
3512 	   && single_pred_p (else_bb)
3513 	   && !(single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3514 	   && !(epilogue_completed
3515 		&& tablejump_p (BB_END (else_bb), NULL, NULL)))
3516     join_bb = single_succ (else_bb);
3517 
3518   /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination.  */
3519   else
3520     return FALSE;
3521 
3522   num_possible_if_blocks++;
3523 
3524   if (dump_file)
3525     {
3526       fprintf (dump_file,
3527 	       "\nIF-THEN%s block found, pass %d, start block %d "
3528 	       "[insn %d], then %d [%d]",
3529 	       (else_bb) ? "-ELSE" : "",
3530 	       ce_info->pass,
3531 	       test_bb->index,
3532 	       BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
3533 	       then_bb->index,
3534 	       BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
3535 
3536       if (else_bb)
3537 	fprintf (dump_file, ", else %d [%d]",
3538 		 else_bb->index,
3539 		 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
3540 
3541       fprintf (dump_file, ", join %d [%d]",
3542 	       join_bb->index,
3543 	       BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
3544 
3545       if (ce_info->num_multiple_test_blocks > 0)
3546 	fprintf (dump_file, ", %d %s block%s last test %d [%d]",
3547 		 ce_info->num_multiple_test_blocks,
3548 		 (ce_info->and_and_p) ? "&&" : "||",
3549 		 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
3550 		 ce_info->last_test_bb->index,
3551 		 ((BB_HEAD (ce_info->last_test_bb))
3552 		  ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
3553 		  : -1));
3554 
3555       fputc ('\n', dump_file);
3556     }
3557 
3558   /* Make sure IF, THEN, and ELSE, blocks are adjacent.  Actually, we get the
3559      first condition for free, since we've already asserted that there's a
3560      fallthru edge from IF to THEN.  Likewise for the && and || blocks, since
3561      we checked the FALLTHRU flag, those are already adjacent to the last IF
3562      block.  */
3563   /* ??? As an enhancement, move the ELSE block.  Have to deal with
3564      BLOCK notes, if by no other means than backing out the merge if they
3565      exist.  Sticky enough I don't want to think about it now.  */
3566   next = then_bb;
3567   if (else_bb && (next = next->next_bb) != else_bb)
3568     return FALSE;
3569   if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
3570     {
3571       if (else_bb)
3572 	join_bb = NULL;
3573       else
3574 	return FALSE;
3575     }
3576 
3577   /* Do the real work.  */
3578 
3579   ce_info->else_bb = else_bb;
3580   ce_info->join_bb = join_bb;
3581 
3582   /* If we have && and || tests, try to first handle combining the && and ||
3583      tests into the conditional code, and if that fails, go back and handle
3584      it without the && and ||, which at present handles the && case if there
3585      was no ELSE block.  */
3586   if (cond_exec_process_if_block (ce_info, TRUE))
3587     return TRUE;
3588 
3589   if (ce_info->num_multiple_test_blocks)
3590     {
3591       cancel_changes (0);
3592 
3593       if (cond_exec_process_if_block (ce_info, FALSE))
3594 	return TRUE;
3595     }
3596 
3597   return FALSE;
3598 }
3599 
3600 /* Convert a branch over a trap, or a branch
3601    to a trap, into a conditional trap.  */
3602 
3603 static int
3604 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
3605 {
3606   basic_block then_bb = then_edge->dest;
3607   basic_block else_bb = else_edge->dest;
3608   basic_block other_bb, trap_bb;
3609   rtx trap, jump, cond, cond_earliest, seq;
3610   enum rtx_code code;
3611 
3612   /* Locate the block with the trap instruction.  */
3613   /* ??? While we look for no successors, we really ought to allow
3614      EH successors.  Need to fix merge_if_block for that to work.  */
3615   if ((trap = block_has_only_trap (then_bb)) != NULL)
3616     trap_bb = then_bb, other_bb = else_bb;
3617   else if ((trap = block_has_only_trap (else_bb)) != NULL)
3618     trap_bb = else_bb, other_bb = then_bb;
3619   else
3620     return FALSE;
3621 
3622   if (dump_file)
3623     {
3624       fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
3625 	       test_bb->index, trap_bb->index);
3626     }
3627 
3628   /* If this is not a standard conditional jump, we can't parse it.  */
3629   jump = BB_END (test_bb);
3630   cond = noce_get_condition (jump, &cond_earliest, false);
3631   if (! cond)
3632     return FALSE;
3633 
3634   /* If the conditional jump is more than just a conditional jump, then
3635      we can not do if-conversion on this block.  */
3636   if (! onlyjump_p (jump))
3637     return FALSE;
3638 
3639   /* We must be comparing objects whose modes imply the size.  */
3640   if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3641     return FALSE;
3642 
3643   /* Reverse the comparison code, if necessary.  */
3644   code = GET_CODE (cond);
3645   if (then_bb == trap_bb)
3646     {
3647       code = reversed_comparison_code (cond, jump);
3648       if (code == UNKNOWN)
3649 	return FALSE;
3650     }
3651 
3652   /* Attempt to generate the conditional trap.  */
3653   seq = gen_cond_trap (code, copy_rtx (XEXP (cond, 0)),
3654 		       copy_rtx (XEXP (cond, 1)),
3655 		       TRAP_CODE (PATTERN (trap)));
3656   if (seq == NULL)
3657     return FALSE;
3658 
3659   /* Emit the new insns before cond_earliest.  */
3660   emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATOR (trap));
3661 
3662   /* Delete the trap block if possible.  */
3663   remove_edge (trap_bb == then_bb ? then_edge : else_edge);
3664   df_set_bb_dirty (test_bb);
3665   df_set_bb_dirty (then_bb);
3666   df_set_bb_dirty (else_bb);
3667 
3668   if (EDGE_COUNT (trap_bb->preds) == 0)
3669     {
3670       delete_basic_block (trap_bb);
3671       num_true_changes++;
3672     }
3673 
3674   /* Wire together the blocks again.  */
3675   if (current_ir_type () == IR_RTL_CFGLAYOUT)
3676     single_succ_edge (test_bb)->flags |= EDGE_FALLTHRU;
3677   else
3678     {
3679       rtx lab, newjump;
3680 
3681       lab = JUMP_LABEL (jump);
3682       newjump = emit_jump_insn_after (gen_jump (lab), jump);
3683       LABEL_NUSES (lab) += 1;
3684       JUMP_LABEL (newjump) = lab;
3685       emit_barrier_after (newjump);
3686     }
3687   delete_insn (jump);
3688 
3689   if (can_merge_blocks_p (test_bb, other_bb))
3690     {
3691       merge_blocks (test_bb, other_bb);
3692       num_true_changes++;
3693     }
3694 
3695   num_updated_if_blocks++;
3696   return TRUE;
3697 }
3698 
3699 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
3700    return it.  */
3701 
3702 static rtx
3703 block_has_only_trap (basic_block bb)
3704 {
3705   rtx trap;
3706 
3707   /* We're not the exit block.  */
3708   if (bb == EXIT_BLOCK_PTR)
3709     return NULL_RTX;
3710 
3711   /* The block must have no successors.  */
3712   if (EDGE_COUNT (bb->succs) > 0)
3713     return NULL_RTX;
3714 
3715   /* The only instruction in the THEN block must be the trap.  */
3716   trap = first_active_insn (bb);
3717   if (! (trap == BB_END (bb)
3718 	 && GET_CODE (PATTERN (trap)) == TRAP_IF
3719          && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
3720     return NULL_RTX;
3721 
3722   return trap;
3723 }
3724 
3725 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
3726    transformable, but not necessarily the other.  There need be no
3727    JOIN block.
3728 
3729    Return TRUE if we were successful at converting the block.
3730 
3731    Cases we'd like to look at:
3732 
3733    (1)
3734 	if (test) goto over; // x not live
3735 	x = a;
3736 	goto label;
3737 	over:
3738 
3739    becomes
3740 
3741 	x = a;
3742 	if (! test) goto label;
3743 
3744    (2)
3745 	if (test) goto E; // x not live
3746 	x = big();
3747 	goto L;
3748 	E:
3749 	x = b;
3750 	goto M;
3751 
3752    becomes
3753 
3754 	x = b;
3755 	if (test) goto M;
3756 	x = big();
3757 	goto L;
3758 
3759    (3) // This one's really only interesting for targets that can do
3760        // multiway branching, e.g. IA-64 BBB bundles.  For other targets
3761        // it results in multiple branches on a cache line, which often
3762        // does not sit well with predictors.
3763 
3764 	if (test1) goto E; // predicted not taken
3765 	x = a;
3766 	if (test2) goto F;
3767 	...
3768 	E:
3769 	x = b;
3770 	J:
3771 
3772    becomes
3773 
3774 	x = a;
3775 	if (test1) goto E;
3776 	if (test2) goto F;
3777 
3778    Notes:
3779 
3780    (A) Don't do (2) if the branch is predicted against the block we're
3781    eliminating.  Do it anyway if we can eliminate a branch; this requires
3782    that the sole successor of the eliminated block postdominate the other
3783    side of the if.
3784 
3785    (B) With CE, on (3) we can steal from both sides of the if, creating
3786 
3787 	if (test1) x = a;
3788 	if (!test1) x = b;
3789 	if (test1) goto J;
3790 	if (test2) goto F;
3791 	...
3792 	J:
3793 
3794    Again, this is most useful if J postdominates.
3795 
3796    (C) CE substitutes for helpful life information.
3797 
3798    (D) These heuristics need a lot of work.  */
3799 
3800 /* Tests for case 1 above.  */
3801 
3802 static int
3803 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
3804 {
3805   basic_block then_bb = then_edge->dest;
3806   basic_block else_bb = else_edge->dest;
3807   basic_block new_bb;
3808   int then_bb_index, then_prob;
3809   rtx else_target = NULL_RTX;
3810 
3811   /* If we are partitioning hot/cold basic blocks, we don't want to
3812      mess up unconditional or indirect jumps that cross between hot
3813      and cold sections.
3814 
3815      Basic block partitioning may result in some jumps that appear to
3816      be optimizable (or blocks that appear to be mergeable), but which really
3817      must be left untouched (they are required to make it safely across
3818      partition boundaries).  See  the comments at the top of
3819      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
3820 
3821   if ((BB_END (then_bb)
3822        && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
3823       || (BB_END (test_bb)
3824 	  && find_reg_note (BB_END (test_bb), REG_CROSSING_JUMP, NULL_RTX))
3825       || (BB_END (else_bb)
3826 	  && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
3827 			    NULL_RTX)))
3828     return FALSE;
3829 
3830   /* THEN has one successor.  */
3831   if (!single_succ_p (then_bb))
3832     return FALSE;
3833 
3834   /* THEN does not fall through, but is not strange either.  */
3835   if (single_succ_edge (then_bb)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
3836     return FALSE;
3837 
3838   /* THEN has one predecessor.  */
3839   if (!single_pred_p (then_bb))
3840     return FALSE;
3841 
3842   /* THEN must do something.  */
3843   if (forwarder_block_p (then_bb))
3844     return FALSE;
3845 
3846   num_possible_if_blocks++;
3847   if (dump_file)
3848     fprintf (dump_file,
3849 	     "\nIF-CASE-1 found, start %d, then %d\n",
3850 	     test_bb->index, then_bb->index);
3851 
3852   if (then_edge->probability)
3853     then_prob = REG_BR_PROB_BASE - then_edge->probability;
3854   else
3855     then_prob = REG_BR_PROB_BASE / 2;
3856 
3857   /* We're speculating from the THEN path, we want to make sure the cost
3858      of speculation is within reason.  */
3859   if (! cheap_bb_rtx_cost_p (then_bb, then_prob,
3860 	COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (then_edge->src),
3861 				    predictable_edge_p (then_edge)))))
3862     return FALSE;
3863 
3864   if (else_bb == EXIT_BLOCK_PTR)
3865     {
3866       rtx jump = BB_END (else_edge->src);
3867       gcc_assert (JUMP_P (jump));
3868       else_target = JUMP_LABEL (jump);
3869     }
3870 
3871   /* Registers set are dead, or are predicable.  */
3872   if (! dead_or_predicable (test_bb, then_bb, else_bb,
3873 			    single_succ_edge (then_bb), 1))
3874     return FALSE;
3875 
3876   /* Conversion went ok, including moving the insns and fixing up the
3877      jump.  Adjust the CFG to match.  */
3878 
3879   /* We can avoid creating a new basic block if then_bb is immediately
3880      followed by else_bb, i.e. deleting then_bb allows test_bb to fall
3881      thru to else_bb.  */
3882 
3883   if (then_bb->next_bb == else_bb
3884       && then_bb->prev_bb == test_bb
3885       && else_bb != EXIT_BLOCK_PTR)
3886     {
3887       redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
3888       new_bb = 0;
3889     }
3890   else if (else_bb == EXIT_BLOCK_PTR)
3891     new_bb = force_nonfallthru_and_redirect (FALLTHRU_EDGE (test_bb),
3892 					     else_bb, else_target);
3893   else
3894     new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
3895 					     else_bb);
3896 
3897   df_set_bb_dirty (test_bb);
3898   df_set_bb_dirty (else_bb);
3899 
3900   then_bb_index = then_bb->index;
3901   delete_basic_block (then_bb);
3902 
3903   /* Make rest of code believe that the newly created block is the THEN_BB
3904      block we removed.  */
3905   if (new_bb)
3906     {
3907       df_bb_replace (then_bb_index, new_bb);
3908       /* Since the fallthru edge was redirected from test_bb to new_bb,
3909          we need to ensure that new_bb is in the same partition as
3910          test bb (you can not fall through across section boundaries).  */
3911       BB_COPY_PARTITION (new_bb, test_bb);
3912     }
3913 
3914   num_true_changes++;
3915   num_updated_if_blocks++;
3916 
3917   return TRUE;
3918 }
3919 
3920 /* Test for case 2 above.  */
3921 
3922 static int
3923 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
3924 {
3925   basic_block then_bb = then_edge->dest;
3926   basic_block else_bb = else_edge->dest;
3927   edge else_succ;
3928   int then_prob, else_prob;
3929 
3930   /* If we are partitioning hot/cold basic blocks, we don't want to
3931      mess up unconditional or indirect jumps that cross between hot
3932      and cold sections.
3933 
3934      Basic block partitioning may result in some jumps that appear to
3935      be optimizable (or blocks that appear to be mergeable), but which really
3936      must be left untouched (they are required to make it safely across
3937      partition boundaries).  See  the comments at the top of
3938      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
3939 
3940   if ((BB_END (then_bb)
3941        && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
3942       || (BB_END (test_bb)
3943 	  && find_reg_note (BB_END (test_bb), REG_CROSSING_JUMP, NULL_RTX))
3944       || (BB_END (else_bb)
3945 	  && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
3946 			    NULL_RTX)))
3947     return FALSE;
3948 
3949   /* ELSE has one successor.  */
3950   if (!single_succ_p (else_bb))
3951     return FALSE;
3952   else
3953     else_succ = single_succ_edge (else_bb);
3954 
3955   /* ELSE outgoing edge is not complex.  */
3956   if (else_succ->flags & EDGE_COMPLEX)
3957     return FALSE;
3958 
3959   /* ELSE has one predecessor.  */
3960   if (!single_pred_p (else_bb))
3961     return FALSE;
3962 
3963   /* THEN is not EXIT.  */
3964   if (then_bb->index < NUM_FIXED_BLOCKS)
3965     return FALSE;
3966 
3967   if (else_edge->probability)
3968     {
3969       else_prob = else_edge->probability;
3970       then_prob = REG_BR_PROB_BASE - else_prob;
3971     }
3972   else
3973     {
3974       else_prob = REG_BR_PROB_BASE / 2;
3975       then_prob = REG_BR_PROB_BASE / 2;
3976     }
3977 
3978   /* ELSE is predicted or SUCC(ELSE) postdominates THEN.  */
3979   if (else_prob > then_prob)
3980     ;
3981   else if (else_succ->dest->index < NUM_FIXED_BLOCKS
3982 	   || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
3983 			      else_succ->dest))
3984     ;
3985   else
3986     return FALSE;
3987 
3988   num_possible_if_blocks++;
3989   if (dump_file)
3990     fprintf (dump_file,
3991 	     "\nIF-CASE-2 found, start %d, else %d\n",
3992 	     test_bb->index, else_bb->index);
3993 
3994   /* We're speculating from the ELSE path, we want to make sure the cost
3995      of speculation is within reason.  */
3996   if (! cheap_bb_rtx_cost_p (else_bb, else_prob,
3997 	COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (else_edge->src),
3998 				    predictable_edge_p (else_edge)))))
3999     return FALSE;
4000 
4001   /* Registers set are dead, or are predicable.  */
4002   if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ, 0))
4003     return FALSE;
4004 
4005   /* Conversion went ok, including moving the insns and fixing up the
4006      jump.  Adjust the CFG to match.  */
4007 
4008   df_set_bb_dirty (test_bb);
4009   df_set_bb_dirty (then_bb);
4010   delete_basic_block (else_bb);
4011 
4012   num_true_changes++;
4013   num_updated_if_blocks++;
4014 
4015   /* ??? We may now fallthru from one of THEN's successors into a join
4016      block.  Rerun cleanup_cfg?  Examine things manually?  Wait?  */
4017 
4018   return TRUE;
4019 }
4020 
4021 /* Used by the code above to perform the actual rtl transformations.
4022    Return TRUE if successful.
4023 
4024    TEST_BB is the block containing the conditional branch.  MERGE_BB
4025    is the block containing the code to manipulate.  DEST_EDGE is an
4026    edge representing a jump to the join block; after the conversion,
4027    TEST_BB should be branching to its destination.
4028    REVERSEP is true if the sense of the branch should be reversed.  */
4029 
4030 static int
4031 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
4032 		    basic_block other_bb, edge dest_edge, int reversep)
4033 {
4034   basic_block new_dest = dest_edge->dest;
4035   rtx head, end, jump, earliest = NULL_RTX, old_dest;
4036   bitmap merge_set = NULL;
4037   /* Number of pending changes.  */
4038   int n_validated_changes = 0;
4039   rtx new_dest_label = NULL_RTX;
4040 
4041   jump = BB_END (test_bb);
4042 
4043   /* Find the extent of the real code in the merge block.  */
4044   head = BB_HEAD (merge_bb);
4045   end = BB_END (merge_bb);
4046 
4047   while (DEBUG_INSN_P (end) && end != head)
4048     end = PREV_INSN (end);
4049 
4050   /* If merge_bb ends with a tablejump, predicating/moving insn's
4051      into test_bb and then deleting merge_bb will result in the jumptable
4052      that follows merge_bb being removed along with merge_bb and then we
4053      get an unresolved reference to the jumptable.  */
4054   if (tablejump_p (end, NULL, NULL))
4055     return FALSE;
4056 
4057   if (LABEL_P (head))
4058     head = NEXT_INSN (head);
4059   while (DEBUG_INSN_P (head) && head != end)
4060     head = NEXT_INSN (head);
4061   if (NOTE_P (head))
4062     {
4063       if (head == end)
4064 	{
4065 	  head = end = NULL_RTX;
4066 	  goto no_body;
4067 	}
4068       head = NEXT_INSN (head);
4069       while (DEBUG_INSN_P (head) && head != end)
4070 	head = NEXT_INSN (head);
4071     }
4072 
4073   if (JUMP_P (end))
4074     {
4075       if (head == end)
4076 	{
4077 	  head = end = NULL_RTX;
4078 	  goto no_body;
4079 	}
4080       end = PREV_INSN (end);
4081       while (DEBUG_INSN_P (end) && end != head)
4082 	end = PREV_INSN (end);
4083     }
4084 
4085   /* Disable handling dead code by conditional execution if the machine needs
4086      to do anything funny with the tests, etc.  */
4087 #ifndef IFCVT_MODIFY_TESTS
4088   if (targetm.have_conditional_execution ())
4089     {
4090       /* In the conditional execution case, we have things easy.  We know
4091 	 the condition is reversible.  We don't have to check life info
4092 	 because we're going to conditionally execute the code anyway.
4093 	 All that's left is making sure the insns involved can actually
4094 	 be predicated.  */
4095 
4096       rtx cond, prob_val;
4097 
4098       cond = cond_exec_get_condition (jump);
4099       if (! cond)
4100 	return FALSE;
4101 
4102       prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
4103       if (prob_val)
4104 	prob_val = XEXP (prob_val, 0);
4105 
4106       if (reversep)
4107 	{
4108 	  enum rtx_code rev = reversed_comparison_code (cond, jump);
4109 	  if (rev == UNKNOWN)
4110 	    return FALSE;
4111 	  cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
4112 			         XEXP (cond, 1));
4113 	  if (prob_val)
4114 	    prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
4115 	}
4116 
4117       if (cond_exec_process_insns (NULL, head, end, cond, prob_val, 0)
4118 	  && verify_changes (0))
4119 	n_validated_changes = num_validated_changes ();
4120       else
4121 	cancel_changes (0);
4122 
4123       earliest = jump;
4124     }
4125 #endif
4126 
4127   /* If we allocated new pseudos (e.g. in the conditional move
4128      expander called from noce_emit_cmove), we must resize the
4129      array first.  */
4130   if (max_regno < max_reg_num ())
4131     max_regno = max_reg_num ();
4132 
4133   /* Try the NCE path if the CE path did not result in any changes.  */
4134   if (n_validated_changes == 0)
4135     {
4136       rtx cond, insn;
4137       regset live;
4138       bool success;
4139 
4140       /* In the non-conditional execution case, we have to verify that there
4141 	 are no trapping operations, no calls, no references to memory, and
4142 	 that any registers modified are dead at the branch site.  */
4143 
4144       if (!any_condjump_p (jump))
4145 	return FALSE;
4146 
4147       /* Find the extent of the conditional.  */
4148       cond = noce_get_condition (jump, &earliest, false);
4149       if (!cond)
4150 	return FALSE;
4151 
4152       live = BITMAP_ALLOC (&reg_obstack);
4153       simulate_backwards_to_point (merge_bb, live, end);
4154       success = can_move_insns_across (head, end, earliest, jump,
4155 				       merge_bb, live,
4156 				       df_get_live_in (other_bb), NULL);
4157       BITMAP_FREE (live);
4158       if (!success)
4159 	return FALSE;
4160 
4161       /* Collect the set of registers set in MERGE_BB.  */
4162       merge_set = BITMAP_ALLOC (&reg_obstack);
4163 
4164       FOR_BB_INSNS (merge_bb, insn)
4165 	if (NONDEBUG_INSN_P (insn))
4166 	  df_simulate_find_defs (insn, merge_set);
4167 
4168 #ifdef HAVE_simple_return
4169       /* If shrink-wrapping, disable this optimization when test_bb is
4170 	 the first basic block and merge_bb exits.  The idea is to not
4171 	 move code setting up a return register as that may clobber a
4172 	 register used to pass function parameters, which then must be
4173 	 saved in caller-saved regs.  A caller-saved reg requires the
4174 	 prologue, killing a shrink-wrap opportunity.  */
4175       if ((flag_shrink_wrap && HAVE_simple_return && !epilogue_completed)
4176 	  && ENTRY_BLOCK_PTR->next_bb == test_bb
4177 	  && single_succ_p (new_dest)
4178 	  && single_succ (new_dest) == EXIT_BLOCK_PTR
4179 	  && bitmap_intersect_p (df_get_live_in (new_dest), merge_set))
4180 	{
4181 	  regset return_regs;
4182 	  unsigned int i;
4183 
4184 	  return_regs = BITMAP_ALLOC (&reg_obstack);
4185 
4186 	  /* Start off with the intersection of regs used to pass
4187 	     params and regs used to return values.  */
4188 	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4189 	    if (FUNCTION_ARG_REGNO_P (i)
4190 		&& targetm.calls.function_value_regno_p (i))
4191 	      bitmap_set_bit (return_regs, INCOMING_REGNO (i));
4192 
4193 	  bitmap_and_into (return_regs, df_get_live_out (ENTRY_BLOCK_PTR));
4194 	  bitmap_and_into (return_regs, df_get_live_in (EXIT_BLOCK_PTR));
4195 	  if (!bitmap_empty_p (return_regs))
4196 	    {
4197 	      FOR_BB_INSNS_REVERSE (new_dest, insn)
4198 		if (NONDEBUG_INSN_P (insn))
4199 		  {
4200 		    df_ref *def_rec;
4201 		    unsigned int uid = INSN_UID (insn);
4202 
4203 		    /* If this insn sets any reg in return_regs..  */
4204 		    for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
4205 		      {
4206 			df_ref def = *def_rec;
4207 			unsigned r = DF_REF_REGNO (def);
4208 
4209 			if (bitmap_bit_p (return_regs, r))
4210 			  break;
4211 		      }
4212 		    /* ..then add all reg uses to the set of regs
4213 		       we're interested in.  */
4214 		    if (*def_rec)
4215 		      df_simulate_uses (insn, return_regs);
4216 		  }
4217 	      if (bitmap_intersect_p (merge_set, return_regs))
4218 		{
4219 		  BITMAP_FREE (return_regs);
4220 		  BITMAP_FREE (merge_set);
4221 		  return FALSE;
4222 		}
4223 	    }
4224 	  BITMAP_FREE (return_regs);
4225 	}
4226 #endif
4227     }
4228 
4229  no_body:
4230   /* We don't want to use normal invert_jump or redirect_jump because
4231      we don't want to delete_insn called.  Also, we want to do our own
4232      change group management.  */
4233 
4234   old_dest = JUMP_LABEL (jump);
4235   if (other_bb != new_dest)
4236     {
4237       if (JUMP_P (BB_END (dest_edge->src)))
4238 	new_dest_label = JUMP_LABEL (BB_END (dest_edge->src));
4239       else if (new_dest == EXIT_BLOCK_PTR)
4240 	new_dest_label = ret_rtx;
4241       else
4242 	new_dest_label = block_label (new_dest);
4243 
4244       if (reversep
4245 	  ? ! invert_jump_1 (jump, new_dest_label)
4246 	  : ! redirect_jump_1 (jump, new_dest_label))
4247 	goto cancel;
4248     }
4249 
4250   if (verify_changes (n_validated_changes))
4251     confirm_change_group ();
4252   else
4253     goto cancel;
4254 
4255   if (other_bb != new_dest)
4256     {
4257       redirect_jump_2 (jump, old_dest, new_dest_label, 0, reversep);
4258 
4259       redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
4260       if (reversep)
4261 	{
4262 	  gcov_type count, probability;
4263 	  count = BRANCH_EDGE (test_bb)->count;
4264 	  BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
4265 	  FALLTHRU_EDGE (test_bb)->count = count;
4266 	  probability = BRANCH_EDGE (test_bb)->probability;
4267 	  BRANCH_EDGE (test_bb)->probability
4268 	    = FALLTHRU_EDGE (test_bb)->probability;
4269 	  FALLTHRU_EDGE (test_bb)->probability = probability;
4270 	  update_br_prob_note (test_bb);
4271 	}
4272     }
4273 
4274   /* Move the insns out of MERGE_BB to before the branch.  */
4275   if (head != NULL)
4276     {
4277       rtx insn;
4278 
4279       if (end == BB_END (merge_bb))
4280 	BB_END (merge_bb) = PREV_INSN (head);
4281 
4282       /* PR 21767: when moving insns above a conditional branch, the REG_EQUAL
4283 	 notes being moved might become invalid.  */
4284       insn = head;
4285       do
4286 	{
4287 	  rtx note, set;
4288 
4289 	  if (! INSN_P (insn))
4290 	    continue;
4291 	  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4292 	  if (! note)
4293 	    continue;
4294 	  set = single_set (insn);
4295 	  if (!set || !function_invariant_p (SET_SRC (set))
4296 	      || !function_invariant_p (XEXP (note, 0)))
4297 	    remove_note (insn, note);
4298 	} while (insn != end && (insn = NEXT_INSN (insn)));
4299 
4300       /* PR46315: when moving insns above a conditional branch, the REG_EQUAL
4301 	 notes referring to the registers being set might become invalid.  */
4302       if (merge_set)
4303 	{
4304 	  unsigned i;
4305 	  bitmap_iterator bi;
4306 
4307 	  EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
4308 	    remove_reg_equal_equiv_notes_for_regno (i);
4309 
4310 	  BITMAP_FREE (merge_set);
4311 	}
4312 
4313       reorder_insns (head, end, PREV_INSN (earliest));
4314     }
4315 
4316   /* Remove the jump and edge if we can.  */
4317   if (other_bb == new_dest)
4318     {
4319       delete_insn (jump);
4320       remove_edge (BRANCH_EDGE (test_bb));
4321       /* ??? Can't merge blocks here, as then_bb is still in use.
4322 	 At minimum, the merge will get done just before bb-reorder.  */
4323     }
4324 
4325   return TRUE;
4326 
4327  cancel:
4328   cancel_changes (0);
4329 
4330   if (merge_set)
4331     BITMAP_FREE (merge_set);
4332 
4333   return FALSE;
4334 }
4335 
4336 /* Main entry point for all if-conversion.  */
4337 
4338 static void
4339 if_convert (void)
4340 {
4341   basic_block bb;
4342   int pass;
4343 
4344   if (optimize == 1)
4345     {
4346       df_live_add_problem ();
4347       df_live_set_all_dirty ();
4348     }
4349 
4350   num_possible_if_blocks = 0;
4351   num_updated_if_blocks = 0;
4352   num_true_changes = 0;
4353 
4354   loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
4355   mark_loop_exit_edges ();
4356   loop_optimizer_finalize ();
4357   free_dominance_info (CDI_DOMINATORS);
4358 
4359   /* Compute postdominators.  */
4360   calculate_dominance_info (CDI_POST_DOMINATORS);
4361 
4362   df_set_flags (DF_LR_RUN_DCE);
4363 
4364   /* Go through each of the basic blocks looking for things to convert.  If we
4365      have conditional execution, we make multiple passes to allow us to handle
4366      IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks.  */
4367   pass = 0;
4368   do
4369     {
4370       df_analyze ();
4371       /* Only need to do dce on the first pass.  */
4372       df_clear_flags (DF_LR_RUN_DCE);
4373       cond_exec_changed_p = FALSE;
4374       pass++;
4375 
4376 #ifdef IFCVT_MULTIPLE_DUMPS
4377       if (dump_file && pass > 1)
4378 	fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
4379 #endif
4380 
4381       FOR_EACH_BB (bb)
4382 	{
4383           basic_block new_bb;
4384           while (!df_get_bb_dirty (bb)
4385                  && (new_bb = find_if_header (bb, pass)) != NULL)
4386             bb = new_bb;
4387 	}
4388 
4389 #ifdef IFCVT_MULTIPLE_DUMPS
4390       if (dump_file && cond_exec_changed_p)
4391 	{
4392 	  if (dump_flags & TDF_SLIM)
4393 	    print_rtl_slim_with_bb (dump_file, get_insns (), dump_flags);
4394 	  else
4395 	    print_rtl_with_bb (dump_file, get_insns ());
4396 	}
4397 #endif
4398     }
4399   while (cond_exec_changed_p);
4400 
4401 #ifdef IFCVT_MULTIPLE_DUMPS
4402   if (dump_file)
4403     fprintf (dump_file, "\n\n========== no more changes\n");
4404 #endif
4405 
4406   free_dominance_info (CDI_POST_DOMINATORS);
4407 
4408   if (dump_file)
4409     fflush (dump_file);
4410 
4411   clear_aux_for_blocks ();
4412 
4413   /* If we allocated new pseudos, we must resize the array for sched1.  */
4414   if (max_regno < max_reg_num ())
4415     max_regno = max_reg_num ();
4416 
4417   /* Write the final stats.  */
4418   if (dump_file && num_possible_if_blocks > 0)
4419     {
4420       fprintf (dump_file,
4421 	       "\n%d possible IF blocks searched.\n",
4422 	       num_possible_if_blocks);
4423       fprintf (dump_file,
4424 	       "%d IF blocks converted.\n",
4425 	       num_updated_if_blocks);
4426       fprintf (dump_file,
4427 	       "%d true changes made.\n\n\n",
4428 	       num_true_changes);
4429     }
4430 
4431   if (optimize == 1)
4432     df_remove_problem (df_live);
4433 
4434 #ifdef ENABLE_CHECKING
4435   verify_flow_info ();
4436 #endif
4437 }
4438 
4439 static bool
4440 gate_handle_if_conversion (void)
4441 {
4442   return (optimize > 0)
4443     && dbg_cnt (if_conversion);
4444 }
4445 
4446 /* If-conversion and CFG cleanup.  */
4447 static unsigned int
4448 rest_of_handle_if_conversion (void)
4449 {
4450   if (flag_if_conversion)
4451     {
4452       if (dump_file)
4453         dump_flow_info (dump_file, dump_flags);
4454       cleanup_cfg (CLEANUP_EXPENSIVE);
4455       if_convert ();
4456     }
4457 
4458   cleanup_cfg (0);
4459   return 0;
4460 }
4461 
4462 struct rtl_opt_pass pass_rtl_ifcvt =
4463 {
4464  {
4465   RTL_PASS,
4466   "ce1",                                /* name */
4467   gate_handle_if_conversion,            /* gate */
4468   rest_of_handle_if_conversion,         /* execute */
4469   NULL,                                 /* sub */
4470   NULL,                                 /* next */
4471   0,                                    /* static_pass_number */
4472   TV_IFCVT,                             /* tv_id */
4473   0,                                    /* properties_required */
4474   0,                                    /* properties_provided */
4475   0,                                    /* properties_destroyed */
4476   0,                                    /* todo_flags_start */
4477   TODO_df_finish | TODO_verify_rtl_sharing |
4478   0                                     /* todo_flags_finish */
4479  }
4480 };
4481 
4482 static bool
4483 gate_handle_if_after_combine (void)
4484 {
4485   return optimize > 0 && flag_if_conversion
4486     && dbg_cnt (if_after_combine);
4487 }
4488 
4489 
4490 /* Rerun if-conversion, as combine may have simplified things enough
4491    to now meet sequence length restrictions.  */
4492 static unsigned int
4493 rest_of_handle_if_after_combine (void)
4494 {
4495   if_convert ();
4496   return 0;
4497 }
4498 
4499 struct rtl_opt_pass pass_if_after_combine =
4500 {
4501  {
4502   RTL_PASS,
4503   "ce2",                                /* name */
4504   gate_handle_if_after_combine,         /* gate */
4505   rest_of_handle_if_after_combine,      /* execute */
4506   NULL,                                 /* sub */
4507   NULL,                                 /* next */
4508   0,                                    /* static_pass_number */
4509   TV_IFCVT,                             /* tv_id */
4510   0,                                    /* properties_required */
4511   0,                                    /* properties_provided */
4512   0,                                    /* properties_destroyed */
4513   0,                                    /* todo_flags_start */
4514   TODO_df_finish | TODO_verify_rtl_sharing |
4515   TODO_ggc_collect                      /* todo_flags_finish */
4516  }
4517 };
4518 
4519 
4520 static bool
4521 gate_handle_if_after_reload (void)
4522 {
4523   return optimize > 0 && flag_if_conversion2
4524     && dbg_cnt (if_after_reload);
4525 }
4526 
4527 static unsigned int
4528 rest_of_handle_if_after_reload (void)
4529 {
4530   if_convert ();
4531   return 0;
4532 }
4533 
4534 
4535 struct rtl_opt_pass pass_if_after_reload =
4536 {
4537  {
4538   RTL_PASS,
4539   "ce3",                                /* name */
4540   gate_handle_if_after_reload,          /* gate */
4541   rest_of_handle_if_after_reload,       /* execute */
4542   NULL,                                 /* sub */
4543   NULL,                                 /* next */
4544   0,                                    /* static_pass_number */
4545   TV_IFCVT2,                            /* tv_id */
4546   0,                                    /* properties_required */
4547   0,                                    /* properties_provided */
4548   0,                                    /* properties_destroyed */
4549   0,                                    /* todo_flags_start */
4550   TODO_df_finish | TODO_verify_rtl_sharing |
4551   TODO_ggc_collect                      /* todo_flags_finish */
4552  }
4553 };
4554