xref: /dragonfly/contrib/gcc-8.0/gcc/ifcvt.c (revision e6d22e9b)
1 /* If-conversion support.
2    Copyright (C) 2000-2018 Free Software Foundation, Inc.
3 
4    This file is part of GCC.
5 
6    GCC is free software; you can redistribute it and/or modify it
7    under the terms of the GNU General Public License as published by
8    the Free Software Foundation; either version 3, or (at your option)
9    any later version.
10 
11    GCC is distributed in the hope that it will be useful, but WITHOUT
12    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
14    License for more details.
15 
16    You should have received a copy of the GNU General Public License
17    along with GCC; see the file COPYING3.  If not see
18    <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "cfghooks.h"
28 #include "df.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "expmed.h"
32 #include "optabs.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 
37 #include "cfgrtl.h"
38 #include "cfganal.h"
39 #include "cfgcleanup.h"
40 #include "expr.h"
41 #include "output.h"
42 #include "cfgloop.h"
43 #include "tree-pass.h"
44 #include "dbgcnt.h"
45 #include "shrink-wrap.h"
46 #include "rtl-iter.h"
47 #include "ifcvt.h"
48 #include "params.h"
49 
50 #ifndef MAX_CONDITIONAL_EXECUTE
51 #define MAX_CONDITIONAL_EXECUTE \
52   (BRANCH_COST (optimize_function_for_speed_p (cfun), false) \
53    + 1)
54 #endif
55 
56 #define IFCVT_MULTIPLE_DUMPS 1
57 
58 #define NULL_BLOCK	((basic_block) NULL)
59 
60 /* True if after combine pass.  */
61 static bool ifcvt_after_combine;
62 
63 /* True if the target has the cbranchcc4 optab.  */
64 static bool have_cbranchcc4;
65 
66 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at  */
67 static int num_possible_if_blocks;
68 
69 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
70    execution.  */
71 static int num_updated_if_blocks;
72 
73 /* # of changes made.  */
74 static int num_true_changes;
75 
76 /* Whether conditional execution changes were made.  */
77 static int cond_exec_changed_p;
78 
79 /* Forward references.  */
80 static int count_bb_insns (const_basic_block);
81 static bool cheap_bb_rtx_cost_p (const_basic_block, profile_probability, int);
82 static rtx_insn *first_active_insn (basic_block);
83 static rtx_insn *last_active_insn (basic_block, int);
84 static rtx_insn *find_active_insn_before (basic_block, rtx_insn *);
85 static rtx_insn *find_active_insn_after (basic_block, rtx_insn *);
86 static basic_block block_fallthru (basic_block);
87 static rtx cond_exec_get_condition (rtx_insn *);
88 static rtx noce_get_condition (rtx_insn *, rtx_insn **, bool);
89 static int noce_operand_ok (const_rtx);
90 static void merge_if_block (ce_if_block *);
91 static int find_cond_trap (basic_block, edge, edge);
92 static basic_block find_if_header (basic_block, int);
93 static int block_jumps_and_fallthru_p (basic_block, basic_block);
94 static int noce_find_if_block (basic_block, edge, edge, int);
95 static int cond_exec_find_if_block (ce_if_block *);
96 static int find_if_case_1 (basic_block, edge, edge);
97 static int find_if_case_2 (basic_block, edge, edge);
98 static int dead_or_predicable (basic_block, basic_block, basic_block,
99 			       edge, int);
100 static void noce_emit_move_insn (rtx, rtx);
101 static rtx_insn *block_has_only_trap (basic_block);
102 
103 /* Count the number of non-jump active insns in BB.  */
104 
105 static int
106 count_bb_insns (const_basic_block bb)
107 {
108   int count = 0;
109   rtx_insn *insn = BB_HEAD (bb);
110 
111   while (1)
112     {
113       if (active_insn_p (insn) && !JUMP_P (insn))
114 	count++;
115 
116       if (insn == BB_END (bb))
117 	break;
118       insn = NEXT_INSN (insn);
119     }
120 
121   return count;
122 }
123 
124 /* Determine whether the total insn_cost on non-jump insns in
125    basic block BB is less than MAX_COST.  This function returns
126    false if the cost of any instruction could not be estimated.
127 
128    The cost of the non-jump insns in BB is scaled by REG_BR_PROB_BASE
129    as those insns are being speculated.  MAX_COST is scaled with SCALE
130    plus a small fudge factor.  */
131 
132 static bool
133 cheap_bb_rtx_cost_p (const_basic_block bb,
134 		     profile_probability prob, int max_cost)
135 {
136   int count = 0;
137   rtx_insn *insn = BB_HEAD (bb);
138   bool speed = optimize_bb_for_speed_p (bb);
139   int scale = prob.initialized_p () ? prob.to_reg_br_prob_base ()
140 	      : REG_BR_PROB_BASE;
141 
142   /* Set scale to REG_BR_PROB_BASE to void the identical scaling
143      applied to insn_cost when optimizing for size.  Only do
144      this after combine because if-conversion might interfere with
145      passes before combine.
146 
147      Use optimize_function_for_speed_p instead of the pre-defined
148      variable speed to make sure it is set to same value for all
149      basic blocks in one if-conversion transformation.  */
150   if (!optimize_function_for_speed_p (cfun) && ifcvt_after_combine)
151     scale = REG_BR_PROB_BASE;
152   /* Our branch probability/scaling factors are just estimates and don't
153      account for cases where we can get speculation for free and other
154      secondary benefits.  So we fudge the scale factor to make speculating
155      appear a little more profitable when optimizing for performance.  */
156   else
157     scale += REG_BR_PROB_BASE / 8;
158 
159 
160   max_cost *= scale;
161 
162   while (1)
163     {
164       if (NONJUMP_INSN_P (insn))
165 	{
166 	  int cost = insn_cost (insn, speed) * REG_BR_PROB_BASE;
167 	  if (cost == 0)
168 	    return false;
169 
170 	  /* If this instruction is the load or set of a "stack" register,
171 	     such as a floating point register on x87, then the cost of
172 	     speculatively executing this insn may need to include
173 	     the additional cost of popping its result off of the
174 	     register stack.  Unfortunately, correctly recognizing and
175 	     accounting for this additional overhead is tricky, so for
176 	     now we simply prohibit such speculative execution.  */
177 #ifdef STACK_REGS
178 	  {
179 	    rtx set = single_set (insn);
180 	    if (set && STACK_REG_P (SET_DEST (set)))
181 	      return false;
182 	  }
183 #endif
184 
185 	  count += cost;
186 	  if (count >= max_cost)
187 	    return false;
188 	}
189       else if (CALL_P (insn))
190 	return false;
191 
192       if (insn == BB_END (bb))
193 	break;
194       insn = NEXT_INSN (insn);
195     }
196 
197   return true;
198 }
199 
200 /* Return the first non-jump active insn in the basic block.  */
201 
202 static rtx_insn *
203 first_active_insn (basic_block bb)
204 {
205   rtx_insn *insn = BB_HEAD (bb);
206 
207   if (LABEL_P (insn))
208     {
209       if (insn == BB_END (bb))
210 	return NULL;
211       insn = NEXT_INSN (insn);
212     }
213 
214   while (NOTE_P (insn) || DEBUG_INSN_P (insn))
215     {
216       if (insn == BB_END (bb))
217 	return NULL;
218       insn = NEXT_INSN (insn);
219     }
220 
221   if (JUMP_P (insn))
222     return NULL;
223 
224   return insn;
225 }
226 
227 /* Return the last non-jump active (non-jump) insn in the basic block.  */
228 
229 static rtx_insn *
230 last_active_insn (basic_block bb, int skip_use_p)
231 {
232   rtx_insn *insn = BB_END (bb);
233   rtx_insn *head = BB_HEAD (bb);
234 
235   while (NOTE_P (insn)
236 	 || JUMP_P (insn)
237 	 || DEBUG_INSN_P (insn)
238 	 || (skip_use_p
239 	     && NONJUMP_INSN_P (insn)
240 	     && GET_CODE (PATTERN (insn)) == USE))
241     {
242       if (insn == head)
243 	return NULL;
244       insn = PREV_INSN (insn);
245     }
246 
247   if (LABEL_P (insn))
248     return NULL;
249 
250   return insn;
251 }
252 
253 /* Return the active insn before INSN inside basic block CURR_BB. */
254 
255 static rtx_insn *
256 find_active_insn_before (basic_block curr_bb, rtx_insn *insn)
257 {
258   if (!insn || insn == BB_HEAD (curr_bb))
259     return NULL;
260 
261   while ((insn = PREV_INSN (insn)) != NULL_RTX)
262     {
263       if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
264         break;
265 
266       /* No other active insn all the way to the start of the basic block. */
267       if (insn == BB_HEAD (curr_bb))
268         return NULL;
269     }
270 
271   return insn;
272 }
273 
274 /* Return the active insn after INSN inside basic block CURR_BB. */
275 
276 static rtx_insn *
277 find_active_insn_after (basic_block curr_bb, rtx_insn *insn)
278 {
279   if (!insn || insn == BB_END (curr_bb))
280     return NULL;
281 
282   while ((insn = NEXT_INSN (insn)) != NULL_RTX)
283     {
284       if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
285         break;
286 
287       /* No other active insn all the way to the end of the basic block. */
288       if (insn == BB_END (curr_bb))
289         return NULL;
290     }
291 
292   return insn;
293 }
294 
295 /* Return the basic block reached by falling though the basic block BB.  */
296 
297 static basic_block
298 block_fallthru (basic_block bb)
299 {
300   edge e = find_fallthru_edge (bb->succs);
301 
302   return (e) ? e->dest : NULL_BLOCK;
303 }
304 
305 /* Return true if RTXs A and B can be safely interchanged.  */
306 
307 static bool
308 rtx_interchangeable_p (const_rtx a, const_rtx b)
309 {
310   if (!rtx_equal_p (a, b))
311     return false;
312 
313   if (GET_CODE (a) != MEM)
314     return true;
315 
316   /* A dead type-unsafe memory reference is legal, but a live type-unsafe memory
317      reference is not.  Interchanging a dead type-unsafe memory reference with
318      a live type-safe one creates a live type-unsafe memory reference, in other
319      words, it makes the program illegal.
320      We check here conservatively whether the two memory references have equal
321      memory attributes.  */
322 
323   return mem_attrs_eq_p (get_mem_attrs (a), get_mem_attrs (b));
324 }
325 
326 
327 /* Go through a bunch of insns, converting them to conditional
328    execution format if possible.  Return TRUE if all of the non-note
329    insns were processed.  */
330 
331 static int
332 cond_exec_process_insns (ce_if_block *ce_info ATTRIBUTE_UNUSED,
333 			 /* if block information */rtx_insn *start,
334 			 /* first insn to look at */rtx end,
335 			 /* last insn to look at */rtx test,
336 			 /* conditional execution test */profile_probability
337 							    prob_val,
338 			 /* probability of branch taken. */int mod_ok)
339 {
340   int must_be_last = FALSE;
341   rtx_insn *insn;
342   rtx xtest;
343   rtx pattern;
344 
345   if (!start || !end)
346     return FALSE;
347 
348   for (insn = start; ; insn = NEXT_INSN (insn))
349     {
350       /* dwarf2out can't cope with conditional prologues.  */
351       if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
352 	return FALSE;
353 
354       if (NOTE_P (insn) || DEBUG_INSN_P (insn))
355 	goto insn_done;
356 
357       gcc_assert (NONJUMP_INSN_P (insn) || CALL_P (insn));
358 
359       /* dwarf2out can't cope with conditional unwind info.  */
360       if (RTX_FRAME_RELATED_P (insn))
361 	return FALSE;
362 
363       /* Remove USE insns that get in the way.  */
364       if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
365 	{
366 	  /* ??? Ug.  Actually unlinking the thing is problematic,
367 	     given what we'd have to coordinate with our callers.  */
368 	  SET_INSN_DELETED (insn);
369 	  goto insn_done;
370 	}
371 
372       /* Last insn wasn't last?  */
373       if (must_be_last)
374 	return FALSE;
375 
376       if (modified_in_p (test, insn))
377 	{
378 	  if (!mod_ok)
379 	    return FALSE;
380 	  must_be_last = TRUE;
381 	}
382 
383       /* Now build the conditional form of the instruction.  */
384       pattern = PATTERN (insn);
385       xtest = copy_rtx (test);
386 
387       /* If this is already a COND_EXEC, rewrite the test to be an AND of the
388          two conditions.  */
389       if (GET_CODE (pattern) == COND_EXEC)
390 	{
391 	  if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
392 	    return FALSE;
393 
394 	  xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
395 			       COND_EXEC_TEST (pattern));
396 	  pattern = COND_EXEC_CODE (pattern);
397 	}
398 
399       pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
400 
401       /* If the machine needs to modify the insn being conditionally executed,
402          say for example to force a constant integer operand into a temp
403          register, do so here.  */
404 #ifdef IFCVT_MODIFY_INSN
405       IFCVT_MODIFY_INSN (ce_info, pattern, insn);
406       if (! pattern)
407 	return FALSE;
408 #endif
409 
410       validate_change (insn, &PATTERN (insn), pattern, 1);
411 
412       if (CALL_P (insn) && prob_val.initialized_p ())
413 	validate_change (insn, &REG_NOTES (insn),
414 			 gen_rtx_INT_LIST ((machine_mode) REG_BR_PROB,
415 					   prob_val.to_reg_br_prob_note (),
416 					   REG_NOTES (insn)), 1);
417 
418     insn_done:
419       if (insn == end)
420 	break;
421     }
422 
423   return TRUE;
424 }
425 
426 /* Return the condition for a jump.  Do not do any special processing.  */
427 
428 static rtx
429 cond_exec_get_condition (rtx_insn *jump)
430 {
431   rtx test_if, cond;
432 
433   if (any_condjump_p (jump))
434     test_if = SET_SRC (pc_set (jump));
435   else
436     return NULL_RTX;
437   cond = XEXP (test_if, 0);
438 
439   /* If this branches to JUMP_LABEL when the condition is false,
440      reverse the condition.  */
441   if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
442       && label_ref_label (XEXP (test_if, 2)) == JUMP_LABEL (jump))
443     {
444       enum rtx_code rev = reversed_comparison_code (cond, jump);
445       if (rev == UNKNOWN)
446 	return NULL_RTX;
447 
448       cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
449 			     XEXP (cond, 1));
450     }
451 
452   return cond;
453 }
454 
455 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
456    to conditional execution.  Return TRUE if we were successful at
457    converting the block.  */
458 
459 static int
460 cond_exec_process_if_block (ce_if_block * ce_info,
461 			    /* if block information */int do_multiple_p)
462 {
463   basic_block test_bb = ce_info->test_bb;	/* last test block */
464   basic_block then_bb = ce_info->then_bb;	/* THEN */
465   basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
466   rtx test_expr;		/* expression in IF_THEN_ELSE that is tested */
467   rtx_insn *then_start;		/* first insn in THEN block */
468   rtx_insn *then_end;		/* last insn + 1 in THEN block */
469   rtx_insn *else_start = NULL;	/* first insn in ELSE block or NULL */
470   rtx_insn *else_end = NULL;	/* last insn + 1 in ELSE block */
471   int max;			/* max # of insns to convert.  */
472   int then_mod_ok;		/* whether conditional mods are ok in THEN */
473   rtx true_expr;		/* test for else block insns */
474   rtx false_expr;		/* test for then block insns */
475   profile_probability true_prob_val;/* probability of else block */
476   profile_probability false_prob_val;/* probability of then block */
477   rtx_insn *then_last_head = NULL;	/* Last match at the head of THEN */
478   rtx_insn *else_last_head = NULL;	/* Last match at the head of ELSE */
479   rtx_insn *then_first_tail = NULL;	/* First match at the tail of THEN */
480   rtx_insn *else_first_tail = NULL;	/* First match at the tail of ELSE */
481   int then_n_insns, else_n_insns, n_insns;
482   enum rtx_code false_code;
483   rtx note;
484 
485   /* If test is comprised of && or || elements, and we've failed at handling
486      all of them together, just use the last test if it is the special case of
487      && elements without an ELSE block.  */
488   if (!do_multiple_p && ce_info->num_multiple_test_blocks)
489     {
490       if (else_bb || ! ce_info->and_and_p)
491 	return FALSE;
492 
493       ce_info->test_bb = test_bb = ce_info->last_test_bb;
494       ce_info->num_multiple_test_blocks = 0;
495       ce_info->num_and_and_blocks = 0;
496       ce_info->num_or_or_blocks = 0;
497     }
498 
499   /* Find the conditional jump to the ELSE or JOIN part, and isolate
500      the test.  */
501   test_expr = cond_exec_get_condition (BB_END (test_bb));
502   if (! test_expr)
503     return FALSE;
504 
505   /* If the conditional jump is more than just a conditional jump,
506      then we can not do conditional execution conversion on this block.  */
507   if (! onlyjump_p (BB_END (test_bb)))
508     return FALSE;
509 
510   /* Collect the bounds of where we're to search, skipping any labels, jumps
511      and notes at the beginning and end of the block.  Then count the total
512      number of insns and see if it is small enough to convert.  */
513   then_start = first_active_insn (then_bb);
514   then_end = last_active_insn (then_bb, TRUE);
515   then_n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
516   n_insns = then_n_insns;
517   max = MAX_CONDITIONAL_EXECUTE;
518 
519   if (else_bb)
520     {
521       int n_matching;
522 
523       max *= 2;
524       else_start = first_active_insn (else_bb);
525       else_end = last_active_insn (else_bb, TRUE);
526       else_n_insns = ce_info->num_else_insns = count_bb_insns (else_bb);
527       n_insns += else_n_insns;
528 
529       /* Look for matching sequences at the head and tail of the two blocks,
530 	 and limit the range of insns to be converted if possible.  */
531       n_matching = flow_find_cross_jump (then_bb, else_bb,
532 					 &then_first_tail, &else_first_tail,
533 					 NULL);
534       if (then_first_tail == BB_HEAD (then_bb))
535 	then_start = then_end = NULL;
536       if (else_first_tail == BB_HEAD (else_bb))
537 	else_start = else_end = NULL;
538 
539       if (n_matching > 0)
540 	{
541 	  if (then_end)
542 	    then_end = find_active_insn_before (then_bb, then_first_tail);
543 	  if (else_end)
544 	    else_end = find_active_insn_before (else_bb, else_first_tail);
545 	  n_insns -= 2 * n_matching;
546 	}
547 
548       if (then_start
549 	  && else_start
550 	  && then_n_insns > n_matching
551 	  && else_n_insns > n_matching)
552 	{
553 	  int longest_match = MIN (then_n_insns - n_matching,
554 				   else_n_insns - n_matching);
555 	  n_matching
556 	    = flow_find_head_matching_sequence (then_bb, else_bb,
557 						&then_last_head,
558 						&else_last_head,
559 						longest_match);
560 
561 	  if (n_matching > 0)
562 	    {
563 	      rtx_insn *insn;
564 
565 	      /* We won't pass the insns in the head sequence to
566 		 cond_exec_process_insns, so we need to test them here
567 		 to make sure that they don't clobber the condition.  */
568 	      for (insn = BB_HEAD (then_bb);
569 		   insn != NEXT_INSN (then_last_head);
570 		   insn = NEXT_INSN (insn))
571 		if (!LABEL_P (insn) && !NOTE_P (insn)
572 		    && !DEBUG_INSN_P (insn)
573 		    && modified_in_p (test_expr, insn))
574 		  return FALSE;
575 	    }
576 
577 	  if (then_last_head == then_end)
578 	    then_start = then_end = NULL;
579 	  if (else_last_head == else_end)
580 	    else_start = else_end = NULL;
581 
582 	  if (n_matching > 0)
583 	    {
584 	      if (then_start)
585 		then_start = find_active_insn_after (then_bb, then_last_head);
586 	      if (else_start)
587 		else_start = find_active_insn_after (else_bb, else_last_head);
588 	      n_insns -= 2 * n_matching;
589 	    }
590 	}
591     }
592 
593   if (n_insns > max)
594     return FALSE;
595 
596   /* Map test_expr/test_jump into the appropriate MD tests to use on
597      the conditionally executed code.  */
598 
599   true_expr = test_expr;
600 
601   false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
602   if (false_code != UNKNOWN)
603     false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
604 				 XEXP (true_expr, 0), XEXP (true_expr, 1));
605   else
606     false_expr = NULL_RTX;
607 
608 #ifdef IFCVT_MODIFY_TESTS
609   /* If the machine description needs to modify the tests, such as setting a
610      conditional execution register from a comparison, it can do so here.  */
611   IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
612 
613   /* See if the conversion failed.  */
614   if (!true_expr || !false_expr)
615     goto fail;
616 #endif
617 
618   note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
619   if (note)
620     {
621       true_prob_val = profile_probability::from_reg_br_prob_note (XINT (note, 0));
622       false_prob_val = true_prob_val.invert ();
623     }
624   else
625     {
626       true_prob_val = profile_probability::uninitialized ();
627       false_prob_val = profile_probability::uninitialized ();
628     }
629 
630   /* If we have && or || tests, do them here.  These tests are in the adjacent
631      blocks after the first block containing the test.  */
632   if (ce_info->num_multiple_test_blocks > 0)
633     {
634       basic_block bb = test_bb;
635       basic_block last_test_bb = ce_info->last_test_bb;
636 
637       if (! false_expr)
638 	goto fail;
639 
640       do
641 	{
642 	  rtx_insn *start, *end;
643 	  rtx t, f;
644 	  enum rtx_code f_code;
645 
646 	  bb = block_fallthru (bb);
647 	  start = first_active_insn (bb);
648 	  end = last_active_insn (bb, TRUE);
649 	  if (start
650 	      && ! cond_exec_process_insns (ce_info, start, end, false_expr,
651 					    false_prob_val, FALSE))
652 	    goto fail;
653 
654 	  /* If the conditional jump is more than just a conditional jump, then
655 	     we can not do conditional execution conversion on this block.  */
656 	  if (! onlyjump_p (BB_END (bb)))
657 	    goto fail;
658 
659 	  /* Find the conditional jump and isolate the test.  */
660 	  t = cond_exec_get_condition (BB_END (bb));
661 	  if (! t)
662 	    goto fail;
663 
664 	  f_code = reversed_comparison_code (t, BB_END (bb));
665 	  if (f_code == UNKNOWN)
666 	    goto fail;
667 
668 	  f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
669 	  if (ce_info->and_and_p)
670 	    {
671 	      t = gen_rtx_AND (GET_MODE (t), true_expr, t);
672 	      f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
673 	    }
674 	  else
675 	    {
676 	      t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
677 	      f = gen_rtx_AND (GET_MODE (t), false_expr, f);
678 	    }
679 
680 	  /* If the machine description needs to modify the tests, such as
681 	     setting a conditional execution register from a comparison, it can
682 	     do so here.  */
683 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
684 	  IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
685 
686 	  /* See if the conversion failed.  */
687 	  if (!t || !f)
688 	    goto fail;
689 #endif
690 
691 	  true_expr = t;
692 	  false_expr = f;
693 	}
694       while (bb != last_test_bb);
695     }
696 
697   /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
698      on then THEN block.  */
699   then_mod_ok = (else_bb == NULL_BLOCK);
700 
701   /* Go through the THEN and ELSE blocks converting the insns if possible
702      to conditional execution.  */
703 
704   if (then_end
705       && (! false_expr
706 	  || ! cond_exec_process_insns (ce_info, then_start, then_end,
707 					false_expr, false_prob_val,
708 					then_mod_ok)))
709     goto fail;
710 
711   if (else_bb && else_end
712       && ! cond_exec_process_insns (ce_info, else_start, else_end,
713 				    true_expr, true_prob_val, TRUE))
714     goto fail;
715 
716   /* If we cannot apply the changes, fail.  Do not go through the normal fail
717      processing, since apply_change_group will call cancel_changes.  */
718   if (! apply_change_group ())
719     {
720 #ifdef IFCVT_MODIFY_CANCEL
721       /* Cancel any machine dependent changes.  */
722       IFCVT_MODIFY_CANCEL (ce_info);
723 #endif
724       return FALSE;
725     }
726 
727 #ifdef IFCVT_MODIFY_FINAL
728   /* Do any machine dependent final modifications.  */
729   IFCVT_MODIFY_FINAL (ce_info);
730 #endif
731 
732   /* Conversion succeeded.  */
733   if (dump_file)
734     fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
735 	     n_insns, (n_insns == 1) ? " was" : "s were");
736 
737   /* Merge the blocks!  If we had matching sequences, make sure to delete one
738      copy at the appropriate location first: delete the copy in the THEN branch
739      for a tail sequence so that the remaining one is executed last for both
740      branches, and delete the copy in the ELSE branch for a head sequence so
741      that the remaining one is executed first for both branches.  */
742   if (then_first_tail)
743     {
744       rtx_insn *from = then_first_tail;
745       if (!INSN_P (from))
746 	from = find_active_insn_after (then_bb, from);
747       delete_insn_chain (from, get_last_bb_insn (then_bb), false);
748     }
749   if (else_last_head)
750     delete_insn_chain (first_active_insn (else_bb), else_last_head, false);
751 
752   merge_if_block (ce_info);
753   cond_exec_changed_p = TRUE;
754   return TRUE;
755 
756  fail:
757 #ifdef IFCVT_MODIFY_CANCEL
758   /* Cancel any machine dependent changes.  */
759   IFCVT_MODIFY_CANCEL (ce_info);
760 #endif
761 
762   cancel_changes (0);
763   return FALSE;
764 }
765 
766 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
767 static int noce_try_move (struct noce_if_info *);
768 static int noce_try_ifelse_collapse (struct noce_if_info *);
769 static int noce_try_store_flag (struct noce_if_info *);
770 static int noce_try_addcc (struct noce_if_info *);
771 static int noce_try_store_flag_constants (struct noce_if_info *);
772 static int noce_try_store_flag_mask (struct noce_if_info *);
773 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
774 			    rtx, rtx, rtx);
775 static int noce_try_cmove (struct noce_if_info *);
776 static int noce_try_cmove_arith (struct noce_if_info *);
777 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx_insn **);
778 static int noce_try_minmax (struct noce_if_info *);
779 static int noce_try_abs (struct noce_if_info *);
780 static int noce_try_sign_mask (struct noce_if_info *);
781 
782 /* Return the comparison code for reversed condition for IF_INFO,
783    or UNKNOWN if reversing the condition is not possible.  */
784 
785 static inline enum rtx_code
786 noce_reversed_cond_code (struct noce_if_info *if_info)
787 {
788   if (if_info->rev_cond)
789     return GET_CODE (if_info->rev_cond);
790   return reversed_comparison_code (if_info->cond, if_info->jump);
791 }
792 
793 /* Return true if SEQ is a good candidate as a replacement for the
794    if-convertible sequence described in IF_INFO.
795    This is the default implementation that targets can override
796    through a target hook.  */
797 
798 bool
799 default_noce_conversion_profitable_p (rtx_insn *seq,
800 				      struct noce_if_info *if_info)
801 {
802   bool speed_p = if_info->speed_p;
803 
804   /* Cost up the new sequence.  */
805   unsigned int cost = seq_cost (seq, speed_p);
806 
807   if (cost <= if_info->original_cost)
808     return true;
809 
810   /* When compiling for size, we can make a reasonably accurately guess
811      at the size growth.  When compiling for speed, use the maximum.  */
812   return speed_p && cost <= if_info->max_seq_cost;
813 }
814 
815 /* Helper function for noce_try_store_flag*.  */
816 
817 static rtx
818 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
819 		      int normalize)
820 {
821   rtx cond = if_info->cond;
822   int cond_complex;
823   enum rtx_code code;
824 
825   cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
826 		  || ! general_operand (XEXP (cond, 1), VOIDmode));
827 
828   /* If earliest == jump, or when the condition is complex, try to
829      build the store_flag insn directly.  */
830 
831   if (cond_complex)
832     {
833       rtx set = pc_set (if_info->jump);
834       cond = XEXP (SET_SRC (set), 0);
835       if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
836 	  && label_ref_label (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (if_info->jump))
837 	reversep = !reversep;
838       if (if_info->then_else_reversed)
839 	reversep = !reversep;
840     }
841   else if (reversep
842 	   && if_info->rev_cond
843 	   && general_operand (XEXP (if_info->rev_cond, 0), VOIDmode)
844 	   && general_operand (XEXP (if_info->rev_cond, 1), VOIDmode))
845     {
846       cond = if_info->rev_cond;
847       reversep = false;
848     }
849 
850   if (reversep)
851     code = reversed_comparison_code (cond, if_info->jump);
852   else
853     code = GET_CODE (cond);
854 
855   if ((if_info->cond_earliest == if_info->jump || cond_complex)
856       && (normalize == 0 || STORE_FLAG_VALUE == normalize))
857     {
858       rtx src = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
859 				XEXP (cond, 1));
860       rtx set = gen_rtx_SET (x, src);
861 
862       start_sequence ();
863       rtx_insn *insn = emit_insn (set);
864 
865       if (recog_memoized (insn) >= 0)
866 	{
867 	  rtx_insn *seq = get_insns ();
868 	  end_sequence ();
869 	  emit_insn (seq);
870 
871 	  if_info->cond_earliest = if_info->jump;
872 
873 	  return x;
874 	}
875 
876       end_sequence ();
877     }
878 
879   /* Don't even try if the comparison operands or the mode of X are weird.  */
880   if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
881     return NULL_RTX;
882 
883   return emit_store_flag (x, code, XEXP (cond, 0),
884 			  XEXP (cond, 1), VOIDmode,
885 			  (code == LTU || code == LEU
886 			   || code == GEU || code == GTU), normalize);
887 }
888 
889 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
890    X is the destination/target and Y is the value to copy.  */
891 
892 static void
893 noce_emit_move_insn (rtx x, rtx y)
894 {
895   machine_mode outmode;
896   rtx outer, inner;
897   poly_int64 bitpos;
898 
899   if (GET_CODE (x) != STRICT_LOW_PART)
900     {
901       rtx_insn *seq, *insn;
902       rtx target;
903       optab ot;
904 
905       start_sequence ();
906       /* Check that the SET_SRC is reasonable before calling emit_move_insn,
907 	 otherwise construct a suitable SET pattern ourselves.  */
908       insn = (OBJECT_P (y) || CONSTANT_P (y) || GET_CODE (y) == SUBREG)
909 	     ? emit_move_insn (x, y)
910 	     : emit_insn (gen_rtx_SET (x, y));
911       seq = get_insns ();
912       end_sequence ();
913 
914       if (recog_memoized (insn) <= 0)
915 	{
916 	  if (GET_CODE (x) == ZERO_EXTRACT)
917 	    {
918 	      rtx op = XEXP (x, 0);
919 	      unsigned HOST_WIDE_INT size = INTVAL (XEXP (x, 1));
920 	      unsigned HOST_WIDE_INT start = INTVAL (XEXP (x, 2));
921 
922 	      /* store_bit_field expects START to be relative to
923 		 BYTES_BIG_ENDIAN and adjusts this value for machines with
924 		 BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN.  In order to be able to
925 		 invoke store_bit_field again it is necessary to have the START
926 		 value from the first call.  */
927 	      if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
928 		{
929 		  if (MEM_P (op))
930 		    start = BITS_PER_UNIT - start - size;
931 		  else
932 		    {
933 		      gcc_assert (REG_P (op));
934 		      start = BITS_PER_WORD - start - size;
935 		    }
936 		}
937 
938 	      gcc_assert (start < (MEM_P (op) ? BITS_PER_UNIT : BITS_PER_WORD));
939 	      store_bit_field (op, size, start, 0, 0, GET_MODE (x), y, false);
940 	      return;
941 	    }
942 
943 	  switch (GET_RTX_CLASS (GET_CODE (y)))
944 	    {
945 	    case RTX_UNARY:
946 	      ot = code_to_optab (GET_CODE (y));
947 	      if (ot)
948 		{
949 		  start_sequence ();
950 		  target = expand_unop (GET_MODE (y), ot, XEXP (y, 0), x, 0);
951 		  if (target != NULL_RTX)
952 		    {
953 		      if (target != x)
954 			emit_move_insn (x, target);
955 		      seq = get_insns ();
956 		    }
957 		  end_sequence ();
958 		}
959 	      break;
960 
961 	    case RTX_BIN_ARITH:
962 	    case RTX_COMM_ARITH:
963 	      ot = code_to_optab (GET_CODE (y));
964 	      if (ot)
965 		{
966 		  start_sequence ();
967 		  target = expand_binop (GET_MODE (y), ot,
968 					 XEXP (y, 0), XEXP (y, 1),
969 					 x, 0, OPTAB_DIRECT);
970 		  if (target != NULL_RTX)
971 		    {
972 		      if (target != x)
973 			  emit_move_insn (x, target);
974 		      seq = get_insns ();
975 		    }
976 		  end_sequence ();
977 		}
978 	      break;
979 
980 	    default:
981 	      break;
982 	    }
983 	}
984 
985       emit_insn (seq);
986       return;
987     }
988 
989   outer = XEXP (x, 0);
990   inner = XEXP (outer, 0);
991   outmode = GET_MODE (outer);
992   bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
993   store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos,
994 		   0, 0, outmode, y, false);
995 }
996 
997 /* Return the CC reg if it is used in COND.  */
998 
999 static rtx
1000 cc_in_cond (rtx cond)
1001 {
1002   if (have_cbranchcc4 && cond
1003       && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_CC)
1004     return XEXP (cond, 0);
1005 
1006   return NULL_RTX;
1007 }
1008 
1009 /* Return sequence of instructions generated by if conversion.  This
1010    function calls end_sequence() to end the current stream, ensures
1011    that the instructions are unshared, recognizable non-jump insns.
1012    On failure, this function returns a NULL_RTX.  */
1013 
1014 static rtx_insn *
1015 end_ifcvt_sequence (struct noce_if_info *if_info)
1016 {
1017   rtx_insn *insn;
1018   rtx_insn *seq = get_insns ();
1019   rtx cc = cc_in_cond (if_info->cond);
1020 
1021   set_used_flags (if_info->x);
1022   set_used_flags (if_info->cond);
1023   set_used_flags (if_info->a);
1024   set_used_flags (if_info->b);
1025 
1026   for (insn = seq; insn; insn = NEXT_INSN (insn))
1027     set_used_flags (insn);
1028 
1029   unshare_all_rtl_in_chain (seq);
1030   end_sequence ();
1031 
1032   /* Make sure that all of the instructions emitted are recognizable,
1033      and that we haven't introduced a new jump instruction.
1034      As an exercise for the reader, build a general mechanism that
1035      allows proper placement of required clobbers.  */
1036   for (insn = seq; insn; insn = NEXT_INSN (insn))
1037     if (JUMP_P (insn)
1038 	|| recog_memoized (insn) == -1
1039 	   /* Make sure new generated code does not clobber CC.  */
1040 	|| (cc && set_of (cc, insn)))
1041       return NULL;
1042 
1043   return seq;
1044 }
1045 
1046 /* Return true iff the then and else basic block (if it exists)
1047    consist of a single simple set instruction.  */
1048 
1049 static bool
1050 noce_simple_bbs (struct noce_if_info *if_info)
1051 {
1052   if (!if_info->then_simple)
1053     return false;
1054 
1055   if (if_info->else_bb)
1056     return if_info->else_simple;
1057 
1058   return true;
1059 }
1060 
1061 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
1062    "if (a == b) x = a; else x = b" into "x = b".  */
1063 
1064 static int
1065 noce_try_move (struct noce_if_info *if_info)
1066 {
1067   rtx cond = if_info->cond;
1068   enum rtx_code code = GET_CODE (cond);
1069   rtx y;
1070   rtx_insn *seq;
1071 
1072   if (code != NE && code != EQ)
1073     return FALSE;
1074 
1075   if (!noce_simple_bbs (if_info))
1076     return FALSE;
1077 
1078   /* This optimization isn't valid if either A or B could be a NaN
1079      or a signed zero.  */
1080   if (HONOR_NANS (if_info->x)
1081       || HONOR_SIGNED_ZEROS (if_info->x))
1082     return FALSE;
1083 
1084   /* Check whether the operands of the comparison are A and in
1085      either order.  */
1086   if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
1087        && rtx_equal_p (if_info->b, XEXP (cond, 1)))
1088       || (rtx_equal_p (if_info->a, XEXP (cond, 1))
1089 	  && rtx_equal_p (if_info->b, XEXP (cond, 0))))
1090     {
1091       if (!rtx_interchangeable_p (if_info->a, if_info->b))
1092 	return FALSE;
1093 
1094       y = (code == EQ) ? if_info->a : if_info->b;
1095 
1096       /* Avoid generating the move if the source is the destination.  */
1097       if (! rtx_equal_p (if_info->x, y))
1098 	{
1099 	  start_sequence ();
1100 	  noce_emit_move_insn (if_info->x, y);
1101 	  seq = end_ifcvt_sequence (if_info);
1102 	  if (!seq)
1103 	    return FALSE;
1104 
1105 	  emit_insn_before_setloc (seq, if_info->jump,
1106 				   INSN_LOCATION (if_info->insn_a));
1107 	}
1108       if_info->transform_name = "noce_try_move";
1109       return TRUE;
1110     }
1111   return FALSE;
1112 }
1113 
1114 /* Try forming an IF_THEN_ELSE (cond, b, a) and collapsing that
1115    through simplify_rtx.  Sometimes that can eliminate the IF_THEN_ELSE.
1116    If that is the case, emit the result into x.  */
1117 
1118 static int
1119 noce_try_ifelse_collapse (struct noce_if_info * if_info)
1120 {
1121   if (!noce_simple_bbs (if_info))
1122     return FALSE;
1123 
1124   machine_mode mode = GET_MODE (if_info->x);
1125   rtx if_then_else = simplify_gen_ternary (IF_THEN_ELSE, mode, mode,
1126 					    if_info->cond, if_info->b,
1127 					    if_info->a);
1128 
1129   if (GET_CODE (if_then_else) == IF_THEN_ELSE)
1130     return FALSE;
1131 
1132   rtx_insn *seq;
1133   start_sequence ();
1134   noce_emit_move_insn (if_info->x, if_then_else);
1135   seq = end_ifcvt_sequence (if_info);
1136   if (!seq)
1137     return FALSE;
1138 
1139   emit_insn_before_setloc (seq, if_info->jump,
1140 			  INSN_LOCATION (if_info->insn_a));
1141 
1142   if_info->transform_name = "noce_try_ifelse_collapse";
1143   return TRUE;
1144 }
1145 
1146 
1147 /* Convert "if (test) x = 1; else x = 0".
1148 
1149    Only try 0 and STORE_FLAG_VALUE here.  Other combinations will be
1150    tried in noce_try_store_flag_constants after noce_try_cmove has had
1151    a go at the conversion.  */
1152 
1153 static int
1154 noce_try_store_flag (struct noce_if_info *if_info)
1155 {
1156   int reversep;
1157   rtx target;
1158   rtx_insn *seq;
1159 
1160   if (!noce_simple_bbs (if_info))
1161     return FALSE;
1162 
1163   if (CONST_INT_P (if_info->b)
1164       && INTVAL (if_info->b) == STORE_FLAG_VALUE
1165       && if_info->a == const0_rtx)
1166     reversep = 0;
1167   else if (if_info->b == const0_rtx
1168 	   && CONST_INT_P (if_info->a)
1169 	   && INTVAL (if_info->a) == STORE_FLAG_VALUE
1170 	   && noce_reversed_cond_code (if_info) != UNKNOWN)
1171     reversep = 1;
1172   else
1173     return FALSE;
1174 
1175   start_sequence ();
1176 
1177   target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
1178   if (target)
1179     {
1180       if (target != if_info->x)
1181 	noce_emit_move_insn (if_info->x, target);
1182 
1183       seq = end_ifcvt_sequence (if_info);
1184       if (! seq)
1185 	return FALSE;
1186 
1187       emit_insn_before_setloc (seq, if_info->jump,
1188 			       INSN_LOCATION (if_info->insn_a));
1189       if_info->transform_name = "noce_try_store_flag";
1190       return TRUE;
1191     }
1192   else
1193     {
1194       end_sequence ();
1195       return FALSE;
1196     }
1197 }
1198 
1199 
1200 /* Convert "if (test) x = -A; else x = A" into
1201    x = A; if (test) x = -x if the machine can do the
1202    conditional negate form of this cheaply.
1203    Try this before noce_try_cmove that will just load the
1204    immediates into two registers and do a conditional select
1205    between them.  If the target has a conditional negate or
1206    conditional invert operation we can save a potentially
1207    expensive constant synthesis.  */
1208 
1209 static bool
1210 noce_try_inverse_constants (struct noce_if_info *if_info)
1211 {
1212   if (!noce_simple_bbs (if_info))
1213     return false;
1214 
1215   if (!CONST_INT_P (if_info->a)
1216       || !CONST_INT_P (if_info->b)
1217       || !REG_P (if_info->x))
1218     return false;
1219 
1220   machine_mode mode = GET_MODE (if_info->x);
1221 
1222   HOST_WIDE_INT val_a = INTVAL (if_info->a);
1223   HOST_WIDE_INT val_b = INTVAL (if_info->b);
1224 
1225   rtx cond = if_info->cond;
1226 
1227   rtx x = if_info->x;
1228   rtx target;
1229 
1230   start_sequence ();
1231 
1232   rtx_code code;
1233   if (val_b != HOST_WIDE_INT_MIN && val_a == -val_b)
1234     code = NEG;
1235   else if (val_a == ~val_b)
1236     code = NOT;
1237   else
1238     {
1239       end_sequence ();
1240       return false;
1241     }
1242 
1243   rtx tmp = gen_reg_rtx (mode);
1244   noce_emit_move_insn (tmp, if_info->a);
1245 
1246   target = emit_conditional_neg_or_complement (x, code, mode, cond, tmp, tmp);
1247 
1248   if (target)
1249     {
1250       rtx_insn *seq = get_insns ();
1251 
1252       if (!seq)
1253 	{
1254 	  end_sequence ();
1255 	  return false;
1256 	}
1257 
1258       if (target != if_info->x)
1259 	noce_emit_move_insn (if_info->x, target);
1260 
1261       seq = end_ifcvt_sequence (if_info);
1262 
1263       if (!seq)
1264 	return false;
1265 
1266       emit_insn_before_setloc (seq, if_info->jump,
1267 			       INSN_LOCATION (if_info->insn_a));
1268       if_info->transform_name = "noce_try_inverse_constants";
1269       return true;
1270     }
1271 
1272   end_sequence ();
1273   return false;
1274 }
1275 
1276 
1277 /* Convert "if (test) x = a; else x = b", for A and B constant.
1278    Also allow A = y + c1, B = y + c2, with a common y between A
1279    and B.  */
1280 
1281 static int
1282 noce_try_store_flag_constants (struct noce_if_info *if_info)
1283 {
1284   rtx target;
1285   rtx_insn *seq;
1286   bool reversep;
1287   HOST_WIDE_INT itrue, ifalse, diff, tmp;
1288   int normalize;
1289   bool can_reverse;
1290   machine_mode mode = GET_MODE (if_info->x);
1291   rtx common = NULL_RTX;
1292 
1293   rtx a = if_info->a;
1294   rtx b = if_info->b;
1295 
1296   /* Handle cases like x := test ? y + 3 : y + 4.  */
1297   if (GET_CODE (a) == PLUS
1298       && GET_CODE (b) == PLUS
1299       && CONST_INT_P (XEXP (a, 1))
1300       && CONST_INT_P (XEXP (b, 1))
1301       && rtx_equal_p (XEXP (a, 0), XEXP (b, 0))
1302       /* Allow expressions that are not using the result or plain
1303          registers where we handle overlap below.  */
1304       && (REG_P (XEXP (a, 0))
1305 	  || (noce_operand_ok (XEXP (a, 0))
1306 	      && ! reg_overlap_mentioned_p (if_info->x, XEXP (a, 0)))))
1307     {
1308       common = XEXP (a, 0);
1309       a = XEXP (a, 1);
1310       b = XEXP (b, 1);
1311     }
1312 
1313   if (!noce_simple_bbs (if_info))
1314     return FALSE;
1315 
1316   if (CONST_INT_P (a)
1317       && CONST_INT_P (b))
1318     {
1319       ifalse = INTVAL (a);
1320       itrue = INTVAL (b);
1321       bool subtract_flag_p = false;
1322 
1323       diff = (unsigned HOST_WIDE_INT) itrue - ifalse;
1324       /* Make sure we can represent the difference between the two values.  */
1325       if ((diff > 0)
1326 	  != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
1327 	return FALSE;
1328 
1329       diff = trunc_int_for_mode (diff, mode);
1330 
1331       can_reverse = noce_reversed_cond_code (if_info) != UNKNOWN;
1332       reversep = false;
1333       if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1334 	{
1335 	  normalize = 0;
1336 	  /* We could collapse these cases but it is easier to follow the
1337 	     diff/STORE_FLAG_VALUE combinations when they are listed
1338 	     explicitly.  */
1339 
1340 	  /* test ? 3 : 4
1341 	     => 4 + (test != 0).  */
1342 	  if (diff < 0 && STORE_FLAG_VALUE < 0)
1343 	      reversep = false;
1344 	  /* test ? 4 : 3
1345 	     => can_reverse  | 4 + (test == 0)
1346 		!can_reverse | 3 - (test != 0).  */
1347 	  else if (diff > 0 && STORE_FLAG_VALUE < 0)
1348 	    {
1349 	      reversep = can_reverse;
1350 	      subtract_flag_p = !can_reverse;
1351 	      /* If we need to subtract the flag and we have PLUS-immediate
1352 		 A and B then it is unlikely to be beneficial to play tricks
1353 		 here.  */
1354 	      if (subtract_flag_p && common)
1355 		return FALSE;
1356 	    }
1357 	  /* test ? 3 : 4
1358 	     => can_reverse  | 3 + (test == 0)
1359 		!can_reverse | 4 - (test != 0).  */
1360 	  else if (diff < 0 && STORE_FLAG_VALUE > 0)
1361 	    {
1362 	      reversep = can_reverse;
1363 	      subtract_flag_p = !can_reverse;
1364 	      /* If we need to subtract the flag and we have PLUS-immediate
1365 		 A and B then it is unlikely to be beneficial to play tricks
1366 		 here.  */
1367 	      if (subtract_flag_p && common)
1368 		return FALSE;
1369 	    }
1370 	  /* test ? 4 : 3
1371 	     => 4 + (test != 0).  */
1372 	  else if (diff > 0 && STORE_FLAG_VALUE > 0)
1373 	    reversep = false;
1374 	  else
1375 	    gcc_unreachable ();
1376 	}
1377       /* Is this (cond) ? 2^n : 0?  */
1378       else if (ifalse == 0 && pow2p_hwi (itrue)
1379 	       && STORE_FLAG_VALUE == 1)
1380 	normalize = 1;
1381       /* Is this (cond) ? 0 : 2^n?  */
1382       else if (itrue == 0 && pow2p_hwi (ifalse) && can_reverse
1383 	       && STORE_FLAG_VALUE == 1)
1384 	{
1385 	  normalize = 1;
1386 	  reversep = true;
1387 	}
1388       /* Is this (cond) ? -1 : x?  */
1389       else if (itrue == -1
1390 	       && STORE_FLAG_VALUE == -1)
1391 	normalize = -1;
1392       /* Is this (cond) ? x : -1?  */
1393       else if (ifalse == -1 && can_reverse
1394 	       && STORE_FLAG_VALUE == -1)
1395 	{
1396 	  normalize = -1;
1397 	  reversep = true;
1398 	}
1399       else
1400 	return FALSE;
1401 
1402       if (reversep)
1403 	{
1404 	  std::swap (itrue, ifalse);
1405 	  diff = trunc_int_for_mode (-(unsigned HOST_WIDE_INT) diff, mode);
1406 	}
1407 
1408       start_sequence ();
1409 
1410       /* If we have x := test ? x + 3 : x + 4 then move the original
1411 	 x out of the way while we store flags.  */
1412       if (common && rtx_equal_p (common, if_info->x))
1413 	{
1414 	  common = gen_reg_rtx (mode);
1415 	  noce_emit_move_insn (common, if_info->x);
1416 	}
1417 
1418       target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
1419       if (! target)
1420 	{
1421 	  end_sequence ();
1422 	  return FALSE;
1423 	}
1424 
1425       /* if (test) x = 3; else x = 4;
1426 	 =>   x = 3 + (test == 0);  */
1427       if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1428 	{
1429 	  /* Add the common part now.  This may allow combine to merge this
1430 	     with the store flag operation earlier into some sort of conditional
1431 	     increment/decrement if the target allows it.  */
1432 	  if (common)
1433 	    target = expand_simple_binop (mode, PLUS,
1434 					   target, common,
1435 					   target, 0, OPTAB_WIDEN);
1436 
1437 	  /* Always use ifalse here.  It should have been swapped with itrue
1438 	     when appropriate when reversep is true.  */
1439 	  target = expand_simple_binop (mode, subtract_flag_p ? MINUS : PLUS,
1440 					gen_int_mode (ifalse, mode), target,
1441 					if_info->x, 0, OPTAB_WIDEN);
1442 	}
1443       /* Other cases are not beneficial when the original A and B are PLUS
1444 	 expressions.  */
1445       else if (common)
1446 	{
1447 	  end_sequence ();
1448 	  return FALSE;
1449 	}
1450       /* if (test) x = 8; else x = 0;
1451 	 =>   x = (test != 0) << 3;  */
1452       else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
1453 	{
1454 	  target = expand_simple_binop (mode, ASHIFT,
1455 					target, GEN_INT (tmp), if_info->x, 0,
1456 					OPTAB_WIDEN);
1457 	}
1458 
1459       /* if (test) x = -1; else x = b;
1460 	 =>   x = -(test != 0) | b;  */
1461       else if (itrue == -1)
1462 	{
1463 	  target = expand_simple_binop (mode, IOR,
1464 					target, gen_int_mode (ifalse, mode),
1465 					if_info->x, 0, OPTAB_WIDEN);
1466 	}
1467       else
1468 	{
1469 	  end_sequence ();
1470 	  return FALSE;
1471 	}
1472 
1473       if (! target)
1474 	{
1475 	  end_sequence ();
1476 	  return FALSE;
1477 	}
1478 
1479       if (target != if_info->x)
1480 	noce_emit_move_insn (if_info->x, target);
1481 
1482       seq = end_ifcvt_sequence (if_info);
1483       if (!seq || !targetm.noce_conversion_profitable_p (seq, if_info))
1484 	return FALSE;
1485 
1486       emit_insn_before_setloc (seq, if_info->jump,
1487 			       INSN_LOCATION (if_info->insn_a));
1488       if_info->transform_name = "noce_try_store_flag_constants";
1489 
1490       return TRUE;
1491     }
1492 
1493   return FALSE;
1494 }
1495 
1496 /* Convert "if (test) foo++" into "foo += (test != 0)", and
1497    similarly for "foo--".  */
1498 
1499 static int
1500 noce_try_addcc (struct noce_if_info *if_info)
1501 {
1502   rtx target;
1503   rtx_insn *seq;
1504   int subtract, normalize;
1505 
1506   if (!noce_simple_bbs (if_info))
1507     return FALSE;
1508 
1509   if (GET_CODE (if_info->a) == PLUS
1510       && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
1511       && noce_reversed_cond_code (if_info) != UNKNOWN)
1512     {
1513       rtx cond = if_info->rev_cond;
1514       enum rtx_code code;
1515 
1516       if (cond == NULL_RTX)
1517 	{
1518 	  cond = if_info->cond;
1519 	  code = reversed_comparison_code (cond, if_info->jump);
1520 	}
1521       else
1522 	code = GET_CODE (cond);
1523 
1524       /* First try to use addcc pattern.  */
1525       if (general_operand (XEXP (cond, 0), VOIDmode)
1526 	  && general_operand (XEXP (cond, 1), VOIDmode))
1527 	{
1528 	  start_sequence ();
1529 	  target = emit_conditional_add (if_info->x, code,
1530 					 XEXP (cond, 0),
1531 					 XEXP (cond, 1),
1532 					 VOIDmode,
1533 					 if_info->b,
1534 					 XEXP (if_info->a, 1),
1535 					 GET_MODE (if_info->x),
1536 					 (code == LTU || code == GEU
1537 					  || code == LEU || code == GTU));
1538 	  if (target)
1539 	    {
1540 	      if (target != if_info->x)
1541 		noce_emit_move_insn (if_info->x, target);
1542 
1543 	      seq = end_ifcvt_sequence (if_info);
1544 	      if (!seq || !targetm.noce_conversion_profitable_p (seq, if_info))
1545 		return FALSE;
1546 
1547 	      emit_insn_before_setloc (seq, if_info->jump,
1548 				       INSN_LOCATION (if_info->insn_a));
1549 	      if_info->transform_name = "noce_try_addcc";
1550 
1551 	      return TRUE;
1552 	    }
1553 	  end_sequence ();
1554 	}
1555 
1556       /* If that fails, construct conditional increment or decrement using
1557 	 setcc.  We're changing a branch and an increment to a comparison and
1558 	 an ADD/SUB.  */
1559       if (XEXP (if_info->a, 1) == const1_rtx
1560 	  || XEXP (if_info->a, 1) == constm1_rtx)
1561         {
1562 	  start_sequence ();
1563 	  if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1564 	    subtract = 0, normalize = 0;
1565 	  else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1566 	    subtract = 1, normalize = 0;
1567 	  else
1568 	    subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1569 
1570 
1571 	  target = noce_emit_store_flag (if_info,
1572 					 gen_reg_rtx (GET_MODE (if_info->x)),
1573 					 1, normalize);
1574 
1575 	  if (target)
1576 	    target = expand_simple_binop (GET_MODE (if_info->x),
1577 					  subtract ? MINUS : PLUS,
1578 					  if_info->b, target, if_info->x,
1579 					  0, OPTAB_WIDEN);
1580 	  if (target)
1581 	    {
1582 	      if (target != if_info->x)
1583 		noce_emit_move_insn (if_info->x, target);
1584 
1585 	      seq = end_ifcvt_sequence (if_info);
1586 	      if (!seq || !targetm.noce_conversion_profitable_p (seq, if_info))
1587 		return FALSE;
1588 
1589 	      emit_insn_before_setloc (seq, if_info->jump,
1590 				       INSN_LOCATION (if_info->insn_a));
1591 	      if_info->transform_name = "noce_try_addcc";
1592 	      return TRUE;
1593 	    }
1594 	  end_sequence ();
1595 	}
1596     }
1597 
1598   return FALSE;
1599 }
1600 
1601 /* Convert "if (test) x = 0;" to "x &= -(test == 0);"  */
1602 
1603 static int
1604 noce_try_store_flag_mask (struct noce_if_info *if_info)
1605 {
1606   rtx target;
1607   rtx_insn *seq;
1608   int reversep;
1609 
1610   if (!noce_simple_bbs (if_info))
1611     return FALSE;
1612 
1613   reversep = 0;
1614 
1615   if ((if_info->a == const0_rtx
1616        && rtx_equal_p (if_info->b, if_info->x))
1617       || ((reversep = (noce_reversed_cond_code (if_info) != UNKNOWN))
1618 	  && if_info->b == const0_rtx
1619 	  && rtx_equal_p (if_info->a, if_info->x)))
1620     {
1621       start_sequence ();
1622       target = noce_emit_store_flag (if_info,
1623 				     gen_reg_rtx (GET_MODE (if_info->x)),
1624 				     reversep, -1);
1625       if (target)
1626         target = expand_simple_binop (GET_MODE (if_info->x), AND,
1627 				      if_info->x,
1628 				      target, if_info->x, 0,
1629 				      OPTAB_WIDEN);
1630 
1631       if (target)
1632 	{
1633 	  if (target != if_info->x)
1634 	    noce_emit_move_insn (if_info->x, target);
1635 
1636 	  seq = end_ifcvt_sequence (if_info);
1637 	  if (!seq || !targetm.noce_conversion_profitable_p (seq, if_info))
1638 	    return FALSE;
1639 
1640 	  emit_insn_before_setloc (seq, if_info->jump,
1641 				   INSN_LOCATION (if_info->insn_a));
1642 	  if_info->transform_name = "noce_try_store_flag_mask";
1643 
1644 	  return TRUE;
1645 	}
1646 
1647       end_sequence ();
1648     }
1649 
1650   return FALSE;
1651 }
1652 
1653 /* Helper function for noce_try_cmove and noce_try_cmove_arith.  */
1654 
1655 static rtx
1656 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1657 		 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1658 {
1659   rtx target ATTRIBUTE_UNUSED;
1660   int unsignedp ATTRIBUTE_UNUSED;
1661 
1662   /* If earliest == jump, try to build the cmove insn directly.
1663      This is helpful when combine has created some complex condition
1664      (like for alpha's cmovlbs) that we can't hope to regenerate
1665      through the normal interface.  */
1666 
1667   if (if_info->cond_earliest == if_info->jump)
1668     {
1669       rtx cond = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1670       rtx if_then_else = gen_rtx_IF_THEN_ELSE (GET_MODE (x),
1671 					       cond, vtrue, vfalse);
1672       rtx set = gen_rtx_SET (x, if_then_else);
1673 
1674       start_sequence ();
1675       rtx_insn *insn = emit_insn (set);
1676 
1677       if (recog_memoized (insn) >= 0)
1678 	{
1679 	  rtx_insn *seq = get_insns ();
1680 	  end_sequence ();
1681 	  emit_insn (seq);
1682 
1683 	  return x;
1684 	}
1685 
1686       end_sequence ();
1687     }
1688 
1689   /* Don't even try if the comparison operands are weird
1690      except that the target supports cbranchcc4.  */
1691   if (! general_operand (cmp_a, GET_MODE (cmp_a))
1692       || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1693     {
1694       if (!have_cbranchcc4
1695 	  || GET_MODE_CLASS (GET_MODE (cmp_a)) != MODE_CC
1696 	  || cmp_b != const0_rtx)
1697 	return NULL_RTX;
1698     }
1699 
1700   unsignedp = (code == LTU || code == GEU
1701 	       || code == LEU || code == GTU);
1702 
1703   target = emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1704 				  vtrue, vfalse, GET_MODE (x),
1705 				  unsignedp);
1706   if (target)
1707     return target;
1708 
1709   /* We might be faced with a situation like:
1710 
1711      x = (reg:M TARGET)
1712      vtrue = (subreg:M (reg:N VTRUE) BYTE)
1713      vfalse = (subreg:M (reg:N VFALSE) BYTE)
1714 
1715      We can't do a conditional move in mode M, but it's possible that we
1716      could do a conditional move in mode N instead and take a subreg of
1717      the result.
1718 
1719      If we can't create new pseudos, though, don't bother.  */
1720   if (reload_completed)
1721     return NULL_RTX;
1722 
1723   if (GET_CODE (vtrue) == SUBREG && GET_CODE (vfalse) == SUBREG)
1724     {
1725       rtx reg_vtrue = SUBREG_REG (vtrue);
1726       rtx reg_vfalse = SUBREG_REG (vfalse);
1727       poly_uint64 byte_vtrue = SUBREG_BYTE (vtrue);
1728       poly_uint64 byte_vfalse = SUBREG_BYTE (vfalse);
1729       rtx promoted_target;
1730 
1731       if (GET_MODE (reg_vtrue) != GET_MODE (reg_vfalse)
1732 	  || maybe_ne (byte_vtrue, byte_vfalse)
1733 	  || (SUBREG_PROMOTED_VAR_P (vtrue)
1734 	      != SUBREG_PROMOTED_VAR_P (vfalse))
1735 	  || (SUBREG_PROMOTED_GET (vtrue)
1736 	      != SUBREG_PROMOTED_GET (vfalse)))
1737 	return NULL_RTX;
1738 
1739       promoted_target = gen_reg_rtx (GET_MODE (reg_vtrue));
1740 
1741       target = emit_conditional_move (promoted_target, code, cmp_a, cmp_b,
1742 				      VOIDmode, reg_vtrue, reg_vfalse,
1743 				      GET_MODE (reg_vtrue), unsignedp);
1744       /* Nope, couldn't do it in that mode either.  */
1745       if (!target)
1746 	return NULL_RTX;
1747 
1748       target = gen_rtx_SUBREG (GET_MODE (vtrue), promoted_target, byte_vtrue);
1749       SUBREG_PROMOTED_VAR_P (target) = SUBREG_PROMOTED_VAR_P (vtrue);
1750       SUBREG_PROMOTED_SET (target, SUBREG_PROMOTED_GET (vtrue));
1751       emit_move_insn (x, target);
1752       return x;
1753     }
1754   else
1755     return NULL_RTX;
1756 }
1757 
1758 /* Try only simple constants and registers here.  More complex cases
1759    are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1760    has had a go at it.  */
1761 
1762 static int
1763 noce_try_cmove (struct noce_if_info *if_info)
1764 {
1765   enum rtx_code code;
1766   rtx target;
1767   rtx_insn *seq;
1768 
1769   if (!noce_simple_bbs (if_info))
1770     return FALSE;
1771 
1772   if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1773       && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1774     {
1775       start_sequence ();
1776 
1777       code = GET_CODE (if_info->cond);
1778       target = noce_emit_cmove (if_info, if_info->x, code,
1779 				XEXP (if_info->cond, 0),
1780 				XEXP (if_info->cond, 1),
1781 				if_info->a, if_info->b);
1782 
1783       if (target)
1784 	{
1785 	  if (target != if_info->x)
1786 	    noce_emit_move_insn (if_info->x, target);
1787 
1788 	  seq = end_ifcvt_sequence (if_info);
1789 	  if (!seq || !targetm.noce_conversion_profitable_p (seq, if_info))
1790 	    return FALSE;
1791 
1792 	  emit_insn_before_setloc (seq, if_info->jump,
1793 				   INSN_LOCATION (if_info->insn_a));
1794 	  if_info->transform_name = "noce_try_cmove";
1795 
1796 	  return TRUE;
1797 	}
1798       /* If both a and b are constants try a last-ditch transformation:
1799 	 if (test) x = a; else x = b;
1800 	 =>   x = (-(test != 0) & (b - a)) + a;
1801 	 Try this only if the target-specific expansion above has failed.
1802 	 The target-specific expander may want to generate sequences that
1803 	 we don't know about, so give them a chance before trying this
1804 	 approach.  */
1805       else if (!targetm.have_conditional_execution ()
1806 		&& CONST_INT_P (if_info->a) && CONST_INT_P (if_info->b))
1807 	{
1808 	  machine_mode mode = GET_MODE (if_info->x);
1809 	  HOST_WIDE_INT ifalse = INTVAL (if_info->a);
1810 	  HOST_WIDE_INT itrue = INTVAL (if_info->b);
1811 	  rtx target = noce_emit_store_flag (if_info, if_info->x, false, -1);
1812 	  if (!target)
1813 	    {
1814 	      end_sequence ();
1815 	      return FALSE;
1816 	    }
1817 
1818 	  HOST_WIDE_INT diff = (unsigned HOST_WIDE_INT) itrue - ifalse;
1819 	  /* Make sure we can represent the difference
1820 	     between the two values.  */
1821 	  if ((diff > 0)
1822 	      != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
1823 	    {
1824 	      end_sequence ();
1825 	      return FALSE;
1826 	    }
1827 
1828 	  diff = trunc_int_for_mode (diff, mode);
1829 	  target = expand_simple_binop (mode, AND,
1830 					target, gen_int_mode (diff, mode),
1831 					if_info->x, 0, OPTAB_WIDEN);
1832 	  if (target)
1833 	    target = expand_simple_binop (mode, PLUS,
1834 					  target, gen_int_mode (ifalse, mode),
1835 					  if_info->x, 0, OPTAB_WIDEN);
1836 	  if (target)
1837 	    {
1838 	      if (target != if_info->x)
1839 		noce_emit_move_insn (if_info->x, target);
1840 
1841 	      seq = end_ifcvt_sequence (if_info);
1842 	      if (!seq || !targetm.noce_conversion_profitable_p (seq, if_info))
1843 		return FALSE;
1844 
1845 	      emit_insn_before_setloc (seq, if_info->jump,
1846 				   INSN_LOCATION (if_info->insn_a));
1847 	      if_info->transform_name = "noce_try_cmove";
1848 	      return TRUE;
1849 	    }
1850 	  else
1851 	    {
1852 	      end_sequence ();
1853 	      return FALSE;
1854 	    }
1855 	}
1856       else
1857 	end_sequence ();
1858     }
1859 
1860   return FALSE;
1861 }
1862 
1863 /* Return true if X contains a conditional code mode rtx.  */
1864 
1865 static bool
1866 contains_ccmode_rtx_p (rtx x)
1867 {
1868   subrtx_iterator::array_type array;
1869   FOR_EACH_SUBRTX (iter, array, x, ALL)
1870     if (GET_MODE_CLASS (GET_MODE (*iter)) == MODE_CC)
1871       return true;
1872 
1873   return false;
1874 }
1875 
1876 /* Helper for bb_valid_for_noce_process_p.  Validate that
1877    the rtx insn INSN is a single set that does not set
1878    the conditional register CC and is in general valid for
1879    if-conversion.  */
1880 
1881 static bool
1882 insn_valid_noce_process_p (rtx_insn *insn, rtx cc)
1883 {
1884   if (!insn
1885       || !NONJUMP_INSN_P (insn)
1886       || (cc && set_of (cc, insn)))
1887       return false;
1888 
1889   rtx sset = single_set (insn);
1890 
1891   /* Currently support only simple single sets in test_bb.  */
1892   if (!sset
1893       || !noce_operand_ok (SET_DEST (sset))
1894       || contains_ccmode_rtx_p (SET_DEST (sset))
1895       || !noce_operand_ok (SET_SRC (sset)))
1896     return false;
1897 
1898   return true;
1899 }
1900 
1901 
1902 /* Return true iff the registers that the insns in BB_A set do not get
1903    used in BB_B.  If TO_RENAME is non-NULL then it is a location that will be
1904    renamed later by the caller and so conflicts on it should be ignored
1905    in this function.  */
1906 
1907 static bool
1908 bbs_ok_for_cmove_arith (basic_block bb_a, basic_block bb_b, rtx to_rename)
1909 {
1910   rtx_insn *a_insn;
1911   bitmap bba_sets = BITMAP_ALLOC (&reg_obstack);
1912 
1913   df_ref def;
1914   df_ref use;
1915 
1916   FOR_BB_INSNS (bb_a, a_insn)
1917     {
1918       if (!active_insn_p (a_insn))
1919 	continue;
1920 
1921       rtx sset_a = single_set (a_insn);
1922 
1923       if (!sset_a)
1924 	{
1925 	  BITMAP_FREE (bba_sets);
1926 	  return false;
1927 	}
1928       /* Record all registers that BB_A sets.  */
1929       FOR_EACH_INSN_DEF (def, a_insn)
1930 	if (!(to_rename && DF_REF_REG (def) == to_rename))
1931 	  bitmap_set_bit (bba_sets, DF_REF_REGNO (def));
1932     }
1933 
1934   rtx_insn *b_insn;
1935 
1936   FOR_BB_INSNS (bb_b, b_insn)
1937     {
1938       if (!active_insn_p (b_insn))
1939 	continue;
1940 
1941       rtx sset_b = single_set (b_insn);
1942 
1943       if (!sset_b)
1944 	{
1945 	  BITMAP_FREE (bba_sets);
1946 	  return false;
1947 	}
1948 
1949       /* Make sure this is a REG and not some instance
1950 	 of ZERO_EXTRACT or SUBREG or other dangerous stuff.
1951 	 If we have a memory destination then we have a pair of simple
1952 	 basic blocks performing an operation of the form [addr] = c ? a : b.
1953 	 bb_valid_for_noce_process_p will have ensured that these are
1954 	 the only stores present.  In that case [addr] should be the location
1955 	 to be renamed.  Assert that the callers set this up properly.  */
1956       if (MEM_P (SET_DEST (sset_b)))
1957 	gcc_assert (rtx_equal_p (SET_DEST (sset_b), to_rename));
1958       else if (!REG_P (SET_DEST (sset_b)))
1959 	{
1960 	  BITMAP_FREE (bba_sets);
1961 	  return false;
1962 	}
1963 
1964       /* If the insn uses a reg set in BB_A return false.  */
1965       FOR_EACH_INSN_USE (use, b_insn)
1966 	{
1967 	  if (bitmap_bit_p (bba_sets, DF_REF_REGNO (use)))
1968 	    {
1969 	      BITMAP_FREE (bba_sets);
1970 	      return false;
1971 	    }
1972 	}
1973 
1974     }
1975 
1976   BITMAP_FREE (bba_sets);
1977   return true;
1978 }
1979 
1980 /* Emit copies of all the active instructions in BB except the last.
1981    This is a helper for noce_try_cmove_arith.  */
1982 
1983 static void
1984 noce_emit_all_but_last (basic_block bb)
1985 {
1986   rtx_insn *last = last_active_insn (bb, FALSE);
1987   rtx_insn *insn;
1988   FOR_BB_INSNS (bb, insn)
1989     {
1990       if (insn != last && active_insn_p (insn))
1991 	{
1992 	  rtx_insn *to_emit = as_a <rtx_insn *> (copy_rtx (insn));
1993 
1994 	  emit_insn (PATTERN (to_emit));
1995 	}
1996     }
1997 }
1998 
1999 /* Helper for noce_try_cmove_arith.  Emit the pattern TO_EMIT and return
2000    the resulting insn or NULL if it's not a valid insn.  */
2001 
2002 static rtx_insn *
2003 noce_emit_insn (rtx to_emit)
2004 {
2005   gcc_assert (to_emit);
2006   rtx_insn *insn = emit_insn (to_emit);
2007 
2008   if (recog_memoized (insn) < 0)
2009     return NULL;
2010 
2011   return insn;
2012 }
2013 
2014 /* Helper for noce_try_cmove_arith.  Emit a copy of the insns up to
2015    and including the penultimate one in BB if it is not simple
2016    (as indicated by SIMPLE).  Then emit LAST_INSN as the last
2017    insn in the block.  The reason for that is that LAST_INSN may
2018    have been modified by the preparation in noce_try_cmove_arith.  */
2019 
2020 static bool
2021 noce_emit_bb (rtx last_insn, basic_block bb, bool simple)
2022 {
2023   if (bb && !simple)
2024     noce_emit_all_but_last (bb);
2025 
2026   if (last_insn && !noce_emit_insn (last_insn))
2027     return false;
2028 
2029   return true;
2030 }
2031 
2032 /* Try more complex cases involving conditional_move.  */
2033 
2034 static int
2035 noce_try_cmove_arith (struct noce_if_info *if_info)
2036 {
2037   rtx a = if_info->a;
2038   rtx b = if_info->b;
2039   rtx x = if_info->x;
2040   rtx orig_a, orig_b;
2041   rtx_insn *insn_a, *insn_b;
2042   bool a_simple = if_info->then_simple;
2043   bool b_simple = if_info->else_simple;
2044   basic_block then_bb = if_info->then_bb;
2045   basic_block else_bb = if_info->else_bb;
2046   rtx target;
2047   int is_mem = 0;
2048   enum rtx_code code;
2049   rtx cond = if_info->cond;
2050   rtx_insn *ifcvt_seq;
2051 
2052   /* A conditional move from two memory sources is equivalent to a
2053      conditional on their addresses followed by a load.  Don't do this
2054      early because it'll screw alias analysis.  Note that we've
2055      already checked for no side effects.  */
2056   if (cse_not_expected
2057       && MEM_P (a) && MEM_P (b)
2058       && MEM_ADDR_SPACE (a) == MEM_ADDR_SPACE (b))
2059     {
2060       machine_mode address_mode = get_address_mode (a);
2061 
2062       a = XEXP (a, 0);
2063       b = XEXP (b, 0);
2064       x = gen_reg_rtx (address_mode);
2065       is_mem = 1;
2066     }
2067 
2068   /* ??? We could handle this if we knew that a load from A or B could
2069      not trap or fault.  This is also true if we've already loaded
2070      from the address along the path from ENTRY.  */
2071   else if (may_trap_or_fault_p (a) || may_trap_or_fault_p (b))
2072     return FALSE;
2073 
2074   /* if (test) x = a + b; else x = c - d;
2075      => y = a + b;
2076         x = c - d;
2077 	if (test)
2078 	  x = y;
2079   */
2080 
2081   code = GET_CODE (cond);
2082   insn_a = if_info->insn_a;
2083   insn_b = if_info->insn_b;
2084 
2085   machine_mode x_mode = GET_MODE (x);
2086 
2087   if (!can_conditionally_move_p (x_mode))
2088     return FALSE;
2089 
2090   /* Possibly rearrange operands to make things come out more natural.  */
2091   if (noce_reversed_cond_code (if_info) != UNKNOWN)
2092     {
2093       int reversep = 0;
2094       if (rtx_equal_p (b, x))
2095 	reversep = 1;
2096       else if (general_operand (b, GET_MODE (b)))
2097 	reversep = 1;
2098 
2099       if (reversep)
2100 	{
2101 	  if (if_info->rev_cond)
2102 	    {
2103 	      cond = if_info->rev_cond;
2104 	      code = GET_CODE (cond);
2105 	    }
2106 	  else
2107 	    code = reversed_comparison_code (cond, if_info->jump);
2108 	  std::swap (a, b);
2109 	  std::swap (insn_a, insn_b);
2110 	  std::swap (a_simple, b_simple);
2111 	  std::swap (then_bb, else_bb);
2112 	}
2113     }
2114 
2115   if (then_bb && else_bb
2116       && (!bbs_ok_for_cmove_arith (then_bb, else_bb,  if_info->orig_x)
2117 	  || !bbs_ok_for_cmove_arith (else_bb, then_bb,  if_info->orig_x)))
2118     return FALSE;
2119 
2120   start_sequence ();
2121 
2122   /* If one of the blocks is empty then the corresponding B or A value
2123      came from the test block.  The non-empty complex block that we will
2124      emit might clobber the register used by B or A, so move it to a pseudo
2125      first.  */
2126 
2127   rtx tmp_a = NULL_RTX;
2128   rtx tmp_b = NULL_RTX;
2129 
2130   if (b_simple || !else_bb)
2131     tmp_b = gen_reg_rtx (x_mode);
2132 
2133   if (a_simple || !then_bb)
2134     tmp_a = gen_reg_rtx (x_mode);
2135 
2136   orig_a = a;
2137   orig_b = b;
2138 
2139   rtx emit_a = NULL_RTX;
2140   rtx emit_b = NULL_RTX;
2141   rtx_insn *tmp_insn = NULL;
2142   bool modified_in_a = false;
2143   bool modified_in_b = false;
2144   /* If either operand is complex, load it into a register first.
2145      The best way to do this is to copy the original insn.  In this
2146      way we preserve any clobbers etc that the insn may have had.
2147      This is of course not possible in the IS_MEM case.  */
2148 
2149   if (! general_operand (a, GET_MODE (a)) || tmp_a)
2150     {
2151 
2152       if (is_mem)
2153 	{
2154 	  rtx reg = gen_reg_rtx (GET_MODE (a));
2155 	  emit_a = gen_rtx_SET (reg, a);
2156 	}
2157       else
2158 	{
2159 	  if (insn_a)
2160 	    {
2161 	      a = tmp_a ? tmp_a : gen_reg_rtx (GET_MODE (a));
2162 
2163 	      rtx_insn *copy_of_a = as_a <rtx_insn *> (copy_rtx (insn_a));
2164 	      rtx set = single_set (copy_of_a);
2165 	      SET_DEST (set) = a;
2166 
2167 	      emit_a = PATTERN (copy_of_a);
2168 	    }
2169 	  else
2170 	    {
2171 	      rtx tmp_reg = tmp_a ? tmp_a : gen_reg_rtx (GET_MODE (a));
2172 	      emit_a = gen_rtx_SET (tmp_reg, a);
2173 	      a = tmp_reg;
2174 	    }
2175 	}
2176     }
2177 
2178   if (! general_operand (b, GET_MODE (b)) || tmp_b)
2179     {
2180       if (is_mem)
2181 	{
2182           rtx reg = gen_reg_rtx (GET_MODE (b));
2183 	  emit_b = gen_rtx_SET (reg, b);
2184 	}
2185       else
2186 	{
2187 	  if (insn_b)
2188 	    {
2189 	      b = tmp_b ? tmp_b : gen_reg_rtx (GET_MODE (b));
2190 	      rtx_insn *copy_of_b = as_a <rtx_insn *> (copy_rtx (insn_b));
2191 	      rtx set = single_set (copy_of_b);
2192 
2193 	      SET_DEST (set) = b;
2194 	      emit_b = PATTERN (copy_of_b);
2195 	    }
2196 	  else
2197 	    {
2198 	      rtx tmp_reg = tmp_b ? tmp_b : gen_reg_rtx (GET_MODE (b));
2199 	      emit_b = gen_rtx_SET (tmp_reg, b);
2200 	      b = tmp_reg;
2201 	    }
2202 	}
2203     }
2204 
2205   modified_in_a = emit_a != NULL_RTX && modified_in_p (orig_b, emit_a);
2206   if (tmp_b && then_bb)
2207     {
2208       FOR_BB_INSNS (then_bb, tmp_insn)
2209 	/* Don't check inside insn_a.  We will have changed it to emit_a
2210 	   with a destination that doesn't conflict.  */
2211 	if (!(insn_a && tmp_insn == insn_a)
2212 	    && modified_in_p (orig_b, tmp_insn))
2213 	  {
2214 	    modified_in_a = true;
2215 	    break;
2216 	  }
2217 
2218     }
2219 
2220   modified_in_b = emit_b != NULL_RTX && modified_in_p (orig_a, emit_b);
2221   if (tmp_a && else_bb)
2222     {
2223       FOR_BB_INSNS (else_bb, tmp_insn)
2224       /* Don't check inside insn_b.  We will have changed it to emit_b
2225 	 with a destination that doesn't conflict.  */
2226       if (!(insn_b && tmp_insn == insn_b)
2227 	  && modified_in_p (orig_a, tmp_insn))
2228 	{
2229 	  modified_in_b = true;
2230 	  break;
2231 	}
2232     }
2233 
2234   /* If insn to set up A clobbers any registers B depends on, try to
2235      swap insn that sets up A with the one that sets up B.  If even
2236      that doesn't help, punt.  */
2237   if (modified_in_a && !modified_in_b)
2238     {
2239       if (!noce_emit_bb (emit_b, else_bb, b_simple))
2240 	goto end_seq_and_fail;
2241 
2242       if (!noce_emit_bb (emit_a, then_bb, a_simple))
2243 	goto end_seq_and_fail;
2244     }
2245   else if (!modified_in_a)
2246     {
2247       if (!noce_emit_bb (emit_a, then_bb, a_simple))
2248 	goto end_seq_and_fail;
2249 
2250       if (!noce_emit_bb (emit_b, else_bb, b_simple))
2251 	goto end_seq_and_fail;
2252     }
2253   else
2254     goto end_seq_and_fail;
2255 
2256   target = noce_emit_cmove (if_info, x, code, XEXP (cond, 0), XEXP (cond, 1),
2257 			    a, b);
2258 
2259   if (! target)
2260     goto end_seq_and_fail;
2261 
2262   /* If we're handling a memory for above, emit the load now.  */
2263   if (is_mem)
2264     {
2265       rtx mem = gen_rtx_MEM (GET_MODE (if_info->x), target);
2266 
2267       /* Copy over flags as appropriate.  */
2268       if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
2269 	MEM_VOLATILE_P (mem) = 1;
2270       if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
2271 	set_mem_alias_set (mem, MEM_ALIAS_SET (if_info->a));
2272       set_mem_align (mem,
2273 		     MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
2274 
2275       gcc_assert (MEM_ADDR_SPACE (if_info->a) == MEM_ADDR_SPACE (if_info->b));
2276       set_mem_addr_space (mem, MEM_ADDR_SPACE (if_info->a));
2277 
2278       noce_emit_move_insn (if_info->x, mem);
2279     }
2280   else if (target != x)
2281     noce_emit_move_insn (x, target);
2282 
2283   ifcvt_seq = end_ifcvt_sequence (if_info);
2284   if (!ifcvt_seq || !targetm.noce_conversion_profitable_p (ifcvt_seq, if_info))
2285     return FALSE;
2286 
2287   emit_insn_before_setloc (ifcvt_seq, if_info->jump,
2288 			   INSN_LOCATION (if_info->insn_a));
2289   if_info->transform_name = "noce_try_cmove_arith";
2290   return TRUE;
2291 
2292  end_seq_and_fail:
2293   end_sequence ();
2294   return FALSE;
2295 }
2296 
2297 /* For most cases, the simplified condition we found is the best
2298    choice, but this is not the case for the min/max/abs transforms.
2299    For these we wish to know that it is A or B in the condition.  */
2300 
2301 static rtx
2302 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
2303 			rtx_insn **earliest)
2304 {
2305   rtx cond, set;
2306   rtx_insn *insn;
2307   int reverse;
2308 
2309   /* If target is already mentioned in the known condition, return it.  */
2310   if (reg_mentioned_p (target, if_info->cond))
2311     {
2312       *earliest = if_info->cond_earliest;
2313       return if_info->cond;
2314     }
2315 
2316   set = pc_set (if_info->jump);
2317   cond = XEXP (SET_SRC (set), 0);
2318   reverse
2319     = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2320       && label_ref_label (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (if_info->jump);
2321   if (if_info->then_else_reversed)
2322     reverse = !reverse;
2323 
2324   /* If we're looking for a constant, try to make the conditional
2325      have that constant in it.  There are two reasons why it may
2326      not have the constant we want:
2327 
2328      1. GCC may have needed to put the constant in a register, because
2329         the target can't compare directly against that constant.  For
2330         this case, we look for a SET immediately before the comparison
2331         that puts a constant in that register.
2332 
2333      2. GCC may have canonicalized the conditional, for example
2334 	replacing "if x < 4" with "if x <= 3".  We can undo that (or
2335 	make equivalent types of changes) to get the constants we need
2336 	if they're off by one in the right direction.  */
2337 
2338   if (CONST_INT_P (target))
2339     {
2340       enum rtx_code code = GET_CODE (if_info->cond);
2341       rtx op_a = XEXP (if_info->cond, 0);
2342       rtx op_b = XEXP (if_info->cond, 1);
2343       rtx_insn *prev_insn;
2344 
2345       /* First, look to see if we put a constant in a register.  */
2346       prev_insn = prev_nonnote_insn (if_info->cond_earliest);
2347       if (prev_insn
2348 	  && BLOCK_FOR_INSN (prev_insn)
2349 	     == BLOCK_FOR_INSN (if_info->cond_earliest)
2350 	  && INSN_P (prev_insn)
2351 	  && GET_CODE (PATTERN (prev_insn)) == SET)
2352 	{
2353 	  rtx src = find_reg_equal_equiv_note (prev_insn);
2354 	  if (!src)
2355 	    src = SET_SRC (PATTERN (prev_insn));
2356 	  if (CONST_INT_P (src))
2357 	    {
2358 	      if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
2359 		op_a = src;
2360 	      else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
2361 		op_b = src;
2362 
2363 	      if (CONST_INT_P (op_a))
2364 		{
2365 		  std::swap (op_a, op_b);
2366 		  code = swap_condition (code);
2367 		}
2368 	    }
2369 	}
2370 
2371       /* Now, look to see if we can get the right constant by
2372 	 adjusting the conditional.  */
2373       if (CONST_INT_P (op_b))
2374 	{
2375 	  HOST_WIDE_INT desired_val = INTVAL (target);
2376 	  HOST_WIDE_INT actual_val = INTVAL (op_b);
2377 
2378 	  switch (code)
2379 	    {
2380 	    case LT:
2381 	      if (desired_val != HOST_WIDE_INT_MAX
2382 		  && actual_val == desired_val + 1)
2383 		{
2384 		  code = LE;
2385 		  op_b = GEN_INT (desired_val);
2386 		}
2387 	      break;
2388 	    case LE:
2389 	      if (desired_val != HOST_WIDE_INT_MIN
2390 		  && actual_val == desired_val - 1)
2391 		{
2392 		  code = LT;
2393 		  op_b = GEN_INT (desired_val);
2394 		}
2395 	      break;
2396 	    case GT:
2397 	      if (desired_val != HOST_WIDE_INT_MIN
2398 		  && actual_val == desired_val - 1)
2399 		{
2400 		  code = GE;
2401 		  op_b = GEN_INT (desired_val);
2402 		}
2403 	      break;
2404 	    case GE:
2405 	      if (desired_val != HOST_WIDE_INT_MAX
2406 		  && actual_val == desired_val + 1)
2407 		{
2408 		  code = GT;
2409 		  op_b = GEN_INT (desired_val);
2410 		}
2411 	      break;
2412 	    default:
2413 	      break;
2414 	    }
2415 	}
2416 
2417       /* If we made any changes, generate a new conditional that is
2418 	 equivalent to what we started with, but has the right
2419 	 constants in it.  */
2420       if (code != GET_CODE (if_info->cond)
2421 	  || op_a != XEXP (if_info->cond, 0)
2422 	  || op_b != XEXP (if_info->cond, 1))
2423 	{
2424 	  cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
2425 	  *earliest = if_info->cond_earliest;
2426 	  return cond;
2427 	}
2428     }
2429 
2430   cond = canonicalize_condition (if_info->jump, cond, reverse,
2431 				 earliest, target, have_cbranchcc4, true);
2432   if (! cond || ! reg_mentioned_p (target, cond))
2433     return NULL;
2434 
2435   /* We almost certainly searched back to a different place.
2436      Need to re-verify correct lifetimes.  */
2437 
2438   /* X may not be mentioned in the range (cond_earliest, jump].  */
2439   for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
2440     if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
2441       return NULL;
2442 
2443   /* A and B may not be modified in the range [cond_earliest, jump).  */
2444   for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
2445     if (INSN_P (insn)
2446 	&& (modified_in_p (if_info->a, insn)
2447 	    || modified_in_p (if_info->b, insn)))
2448       return NULL;
2449 
2450   return cond;
2451 }
2452 
2453 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc.  */
2454 
2455 static int
2456 noce_try_minmax (struct noce_if_info *if_info)
2457 {
2458   rtx cond, target;
2459   rtx_insn *earliest, *seq;
2460   enum rtx_code code, op;
2461   int unsignedp;
2462 
2463   if (!noce_simple_bbs (if_info))
2464     return FALSE;
2465 
2466   /* ??? Reject modes with NaNs or signed zeros since we don't know how
2467      they will be resolved with an SMIN/SMAX.  It wouldn't be too hard
2468      to get the target to tell us...  */
2469   if (HONOR_SIGNED_ZEROS (if_info->x)
2470       || HONOR_NANS (if_info->x))
2471     return FALSE;
2472 
2473   cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
2474   if (!cond)
2475     return FALSE;
2476 
2477   /* Verify the condition is of the form we expect, and canonicalize
2478      the comparison code.  */
2479   code = GET_CODE (cond);
2480   if (rtx_equal_p (XEXP (cond, 0), if_info->a))
2481     {
2482       if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
2483 	return FALSE;
2484     }
2485   else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
2486     {
2487       if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
2488 	return FALSE;
2489       code = swap_condition (code);
2490     }
2491   else
2492     return FALSE;
2493 
2494   /* Determine what sort of operation this is.  Note that the code is for
2495      a taken branch, so the code->operation mapping appears backwards.  */
2496   switch (code)
2497     {
2498     case LT:
2499     case LE:
2500     case UNLT:
2501     case UNLE:
2502       op = SMAX;
2503       unsignedp = 0;
2504       break;
2505     case GT:
2506     case GE:
2507     case UNGT:
2508     case UNGE:
2509       op = SMIN;
2510       unsignedp = 0;
2511       break;
2512     case LTU:
2513     case LEU:
2514       op = UMAX;
2515       unsignedp = 1;
2516       break;
2517     case GTU:
2518     case GEU:
2519       op = UMIN;
2520       unsignedp = 1;
2521       break;
2522     default:
2523       return FALSE;
2524     }
2525 
2526   start_sequence ();
2527 
2528   target = expand_simple_binop (GET_MODE (if_info->x), op,
2529 				if_info->a, if_info->b,
2530 				if_info->x, unsignedp, OPTAB_WIDEN);
2531   if (! target)
2532     {
2533       end_sequence ();
2534       return FALSE;
2535     }
2536   if (target != if_info->x)
2537     noce_emit_move_insn (if_info->x, target);
2538 
2539   seq = end_ifcvt_sequence (if_info);
2540   if (!seq)
2541     return FALSE;
2542 
2543   emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2544   if_info->cond = cond;
2545   if_info->cond_earliest = earliest;
2546   if_info->rev_cond = NULL_RTX;
2547   if_info->transform_name = "noce_try_minmax";
2548 
2549   return TRUE;
2550 }
2551 
2552 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);",
2553    "if (a < 0) x = ~a; else x = a;" to "x = one_cmpl_abs(a);",
2554    etc.  */
2555 
2556 static int
2557 noce_try_abs (struct noce_if_info *if_info)
2558 {
2559   rtx cond, target, a, b, c;
2560   rtx_insn *earliest, *seq;
2561   int negate;
2562   bool one_cmpl = false;
2563 
2564   if (!noce_simple_bbs (if_info))
2565     return FALSE;
2566 
2567   /* Reject modes with signed zeros.  */
2568   if (HONOR_SIGNED_ZEROS (if_info->x))
2569     return FALSE;
2570 
2571   /* Recognize A and B as constituting an ABS or NABS.  The canonical
2572      form is a branch around the negation, taken when the object is the
2573      first operand of a comparison against 0 that evaluates to true.  */
2574   a = if_info->a;
2575   b = if_info->b;
2576   if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
2577     negate = 0;
2578   else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
2579     {
2580       std::swap (a, b);
2581       negate = 1;
2582     }
2583   else if (GET_CODE (a) == NOT && rtx_equal_p (XEXP (a, 0), b))
2584     {
2585       negate = 0;
2586       one_cmpl = true;
2587     }
2588   else if (GET_CODE (b) == NOT && rtx_equal_p (XEXP (b, 0), a))
2589     {
2590       std::swap (a, b);
2591       negate = 1;
2592       one_cmpl = true;
2593     }
2594   else
2595     return FALSE;
2596 
2597   cond = noce_get_alt_condition (if_info, b, &earliest);
2598   if (!cond)
2599     return FALSE;
2600 
2601   /* Verify the condition is of the form we expect.  */
2602   if (rtx_equal_p (XEXP (cond, 0), b))
2603     c = XEXP (cond, 1);
2604   else if (rtx_equal_p (XEXP (cond, 1), b))
2605     {
2606       c = XEXP (cond, 0);
2607       negate = !negate;
2608     }
2609   else
2610     return FALSE;
2611 
2612   /* Verify that C is zero.  Search one step backward for a
2613      REG_EQUAL note or a simple source if necessary.  */
2614   if (REG_P (c))
2615     {
2616       rtx set;
2617       rtx_insn *insn = prev_nonnote_insn (earliest);
2618       if (insn
2619 	  && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (earliest)
2620 	  && (set = single_set (insn))
2621 	  && rtx_equal_p (SET_DEST (set), c))
2622 	{
2623 	  rtx note = find_reg_equal_equiv_note (insn);
2624 	  if (note)
2625 	    c = XEXP (note, 0);
2626 	  else
2627 	    c = SET_SRC (set);
2628 	}
2629       else
2630 	return FALSE;
2631     }
2632   if (MEM_P (c)
2633       && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
2634       && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
2635     c = get_pool_constant (XEXP (c, 0));
2636 
2637   /* Work around funny ideas get_condition has wrt canonicalization.
2638      Note that these rtx constants are known to be CONST_INT, and
2639      therefore imply integer comparisons.
2640      The one_cmpl case is more complicated, as we want to handle
2641      only x < 0 ? ~x : x or x >= 0 ? x : ~x to one_cmpl_abs (x)
2642      and x < 0 ? x : ~x or x >= 0 ? ~x : x to ~one_cmpl_abs (x),
2643      but not other cases (x > -1 is equivalent of x >= 0).  */
2644   if (c == constm1_rtx && GET_CODE (cond) == GT)
2645     ;
2646   else if (c == const1_rtx && GET_CODE (cond) == LT)
2647     {
2648       if (one_cmpl)
2649 	return FALSE;
2650     }
2651   else if (c == CONST0_RTX (GET_MODE (b)))
2652     {
2653       if (one_cmpl
2654 	  && GET_CODE (cond) != GE
2655 	  && GET_CODE (cond) != LT)
2656 	return FALSE;
2657     }
2658   else
2659     return FALSE;
2660 
2661   /* Determine what sort of operation this is.  */
2662   switch (GET_CODE (cond))
2663     {
2664     case LT:
2665     case LE:
2666     case UNLT:
2667     case UNLE:
2668       negate = !negate;
2669       break;
2670     case GT:
2671     case GE:
2672     case UNGT:
2673     case UNGE:
2674       break;
2675     default:
2676       return FALSE;
2677     }
2678 
2679   start_sequence ();
2680   if (one_cmpl)
2681     target = expand_one_cmpl_abs_nojump (GET_MODE (if_info->x), b,
2682                                          if_info->x);
2683   else
2684     target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
2685 
2686   /* ??? It's a quandary whether cmove would be better here, especially
2687      for integers.  Perhaps combine will clean things up.  */
2688   if (target && negate)
2689     {
2690       if (one_cmpl)
2691         target = expand_simple_unop (GET_MODE (target), NOT, target,
2692                                      if_info->x, 0);
2693       else
2694         target = expand_simple_unop (GET_MODE (target), NEG, target,
2695                                      if_info->x, 0);
2696     }
2697 
2698   if (! target)
2699     {
2700       end_sequence ();
2701       return FALSE;
2702     }
2703 
2704   if (target != if_info->x)
2705     noce_emit_move_insn (if_info->x, target);
2706 
2707   seq = end_ifcvt_sequence (if_info);
2708   if (!seq)
2709     return FALSE;
2710 
2711   emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2712   if_info->cond = cond;
2713   if_info->cond_earliest = earliest;
2714   if_info->rev_cond = NULL_RTX;
2715   if_info->transform_name = "noce_try_abs";
2716 
2717   return TRUE;
2718 }
2719 
2720 /* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;".  */
2721 
2722 static int
2723 noce_try_sign_mask (struct noce_if_info *if_info)
2724 {
2725   rtx cond, t, m, c;
2726   rtx_insn *seq;
2727   machine_mode mode;
2728   enum rtx_code code;
2729   bool t_unconditional;
2730 
2731   if (!noce_simple_bbs (if_info))
2732     return FALSE;
2733 
2734   cond = if_info->cond;
2735   code = GET_CODE (cond);
2736   m = XEXP (cond, 0);
2737   c = XEXP (cond, 1);
2738 
2739   t = NULL_RTX;
2740   if (if_info->a == const0_rtx)
2741     {
2742       if ((code == LT && c == const0_rtx)
2743 	  || (code == LE && c == constm1_rtx))
2744 	t = if_info->b;
2745     }
2746   else if (if_info->b == const0_rtx)
2747     {
2748       if ((code == GE && c == const0_rtx)
2749 	  || (code == GT && c == constm1_rtx))
2750 	t = if_info->a;
2751     }
2752 
2753   if (! t || side_effects_p (t))
2754     return FALSE;
2755 
2756   /* We currently don't handle different modes.  */
2757   mode = GET_MODE (t);
2758   if (GET_MODE (m) != mode)
2759     return FALSE;
2760 
2761   /* This is only profitable if T is unconditionally executed/evaluated in the
2762      original insn sequence or T is cheap.  The former happens if B is the
2763      non-zero (T) value and if INSN_B was taken from TEST_BB, or there was no
2764      INSN_B which can happen for e.g. conditional stores to memory.  For the
2765      cost computation use the block TEST_BB where the evaluation will end up
2766      after the transformation.  */
2767   t_unconditional =
2768     (t == if_info->b
2769      && (if_info->insn_b == NULL_RTX
2770 	 || BLOCK_FOR_INSN (if_info->insn_b) == if_info->test_bb));
2771   if (!(t_unconditional
2772 	|| (set_src_cost (t, mode, if_info->speed_p)
2773 	    < COSTS_N_INSNS (2))))
2774     return FALSE;
2775 
2776   start_sequence ();
2777   /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
2778      "(signed) m >> 31" directly.  This benefits targets with specialized
2779      insns to obtain the signmask, but still uses ashr_optab otherwise.  */
2780   m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
2781   t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
2782 	: NULL_RTX;
2783 
2784   if (!t)
2785     {
2786       end_sequence ();
2787       return FALSE;
2788     }
2789 
2790   noce_emit_move_insn (if_info->x, t);
2791 
2792   seq = end_ifcvt_sequence (if_info);
2793   if (!seq)
2794     return FALSE;
2795 
2796   emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2797   if_info->transform_name = "noce_try_sign_mask";
2798 
2799   return TRUE;
2800 }
2801 
2802 
2803 /* Optimize away "if (x & C) x |= C" and similar bit manipulation
2804    transformations.  */
2805 
2806 static int
2807 noce_try_bitop (struct noce_if_info *if_info)
2808 {
2809   rtx cond, x, a, result;
2810   rtx_insn *seq;
2811   scalar_int_mode mode;
2812   enum rtx_code code;
2813   int bitnum;
2814 
2815   x = if_info->x;
2816   cond = if_info->cond;
2817   code = GET_CODE (cond);
2818 
2819   /* Check for an integer operation.  */
2820   if (!is_a <scalar_int_mode> (GET_MODE (x), &mode))
2821     return FALSE;
2822 
2823   if (!noce_simple_bbs (if_info))
2824     return FALSE;
2825 
2826   /* Check for no else condition.  */
2827   if (! rtx_equal_p (x, if_info->b))
2828     return FALSE;
2829 
2830   /* Check for a suitable condition.  */
2831   if (code != NE && code != EQ)
2832     return FALSE;
2833   if (XEXP (cond, 1) != const0_rtx)
2834     return FALSE;
2835   cond = XEXP (cond, 0);
2836 
2837   /* ??? We could also handle AND here.  */
2838   if (GET_CODE (cond) == ZERO_EXTRACT)
2839     {
2840       if (XEXP (cond, 1) != const1_rtx
2841 	  || !CONST_INT_P (XEXP (cond, 2))
2842 	  || ! rtx_equal_p (x, XEXP (cond, 0)))
2843 	return FALSE;
2844       bitnum = INTVAL (XEXP (cond, 2));
2845       if (BITS_BIG_ENDIAN)
2846 	bitnum = GET_MODE_BITSIZE (mode) - 1 - bitnum;
2847       if (bitnum < 0 || bitnum >= HOST_BITS_PER_WIDE_INT)
2848 	return FALSE;
2849     }
2850   else
2851     return FALSE;
2852 
2853   a = if_info->a;
2854   if (GET_CODE (a) == IOR || GET_CODE (a) == XOR)
2855     {
2856       /* Check for "if (X & C) x = x op C".  */
2857       if (! rtx_equal_p (x, XEXP (a, 0))
2858           || !CONST_INT_P (XEXP (a, 1))
2859 	  || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2860 	     != HOST_WIDE_INT_1U << bitnum)
2861         return FALSE;
2862 
2863       /* if ((x & C) == 0) x |= C; is transformed to x |= C.   */
2864       /* if ((x & C) != 0) x |= C; is transformed to nothing.  */
2865       if (GET_CODE (a) == IOR)
2866 	result = (code == NE) ? a : NULL_RTX;
2867       else if (code == NE)
2868 	{
2869 	  /* if ((x & C) == 0) x ^= C; is transformed to x |= C.   */
2870 	  result = gen_int_mode (HOST_WIDE_INT_1 << bitnum, mode);
2871 	  result = simplify_gen_binary (IOR, mode, x, result);
2872 	}
2873       else
2874 	{
2875 	  /* if ((x & C) != 0) x ^= C; is transformed to x &= ~C.  */
2876 	  result = gen_int_mode (~(HOST_WIDE_INT_1 << bitnum), mode);
2877 	  result = simplify_gen_binary (AND, mode, x, result);
2878 	}
2879     }
2880   else if (GET_CODE (a) == AND)
2881     {
2882       /* Check for "if (X & C) x &= ~C".  */
2883       if (! rtx_equal_p (x, XEXP (a, 0))
2884 	  || !CONST_INT_P (XEXP (a, 1))
2885 	  || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2886 	     != (~(HOST_WIDE_INT_1 << bitnum) & GET_MODE_MASK (mode)))
2887         return FALSE;
2888 
2889       /* if ((x & C) == 0) x &= ~C; is transformed to nothing.  */
2890       /* if ((x & C) != 0) x &= ~C; is transformed to x &= ~C.  */
2891       result = (code == EQ) ? a : NULL_RTX;
2892     }
2893   else
2894     return FALSE;
2895 
2896   if (result)
2897     {
2898       start_sequence ();
2899       noce_emit_move_insn (x, result);
2900       seq = end_ifcvt_sequence (if_info);
2901       if (!seq)
2902 	return FALSE;
2903 
2904       emit_insn_before_setloc (seq, if_info->jump,
2905 			       INSN_LOCATION (if_info->insn_a));
2906     }
2907   if_info->transform_name = "noce_try_bitop";
2908   return TRUE;
2909 }
2910 
2911 
2912 /* Similar to get_condition, only the resulting condition must be
2913    valid at JUMP, instead of at EARLIEST.
2914 
2915    If THEN_ELSE_REVERSED is true, the fallthrough does not go to the
2916    THEN block of the caller, and we have to reverse the condition.  */
2917 
2918 static rtx
2919 noce_get_condition (rtx_insn *jump, rtx_insn **earliest, bool then_else_reversed)
2920 {
2921   rtx cond, set, tmp;
2922   bool reverse;
2923 
2924   if (! any_condjump_p (jump))
2925     return NULL_RTX;
2926 
2927   set = pc_set (jump);
2928 
2929   /* If this branches to JUMP_LABEL when the condition is false,
2930      reverse the condition.  */
2931   reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2932 	     && label_ref_label (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump));
2933 
2934   /* We may have to reverse because the caller's if block is not canonical,
2935      i.e. the THEN block isn't the fallthrough block for the TEST block
2936      (see find_if_header).  */
2937   if (then_else_reversed)
2938     reverse = !reverse;
2939 
2940   /* If the condition variable is a register and is MODE_INT, accept it.  */
2941 
2942   cond = XEXP (SET_SRC (set), 0);
2943   tmp = XEXP (cond, 0);
2944   if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT
2945       && (GET_MODE (tmp) != BImode
2946           || !targetm.small_register_classes_for_mode_p (BImode)))
2947     {
2948       *earliest = jump;
2949 
2950       if (reverse)
2951 	cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2952 			       GET_MODE (cond), tmp, XEXP (cond, 1));
2953       return cond;
2954     }
2955 
2956   /* Otherwise, fall back on canonicalize_condition to do the dirty
2957      work of manipulating MODE_CC values and COMPARE rtx codes.  */
2958   tmp = canonicalize_condition (jump, cond, reverse, earliest,
2959 				NULL_RTX, have_cbranchcc4, true);
2960 
2961   /* We don't handle side-effects in the condition, like handling
2962      REG_INC notes and making sure no duplicate conditions are emitted.  */
2963   if (tmp != NULL_RTX && side_effects_p (tmp))
2964     return NULL_RTX;
2965 
2966   return tmp;
2967 }
2968 
2969 /* Return true if OP is ok for if-then-else processing.  */
2970 
2971 static int
2972 noce_operand_ok (const_rtx op)
2973 {
2974   if (side_effects_p (op))
2975     return FALSE;
2976 
2977   /* We special-case memories, so handle any of them with
2978      no address side effects.  */
2979   if (MEM_P (op))
2980     return ! side_effects_p (XEXP (op, 0));
2981 
2982   return ! may_trap_p (op);
2983 }
2984 
2985 /* Return true iff basic block TEST_BB is valid for noce if-conversion.
2986    The condition used in this if-conversion is in COND.
2987    In practice, check that TEST_BB ends with a single set
2988    x := a and all previous computations
2989    in TEST_BB don't produce any values that are live after TEST_BB.
2990    In other words, all the insns in TEST_BB are there only
2991    to compute a value for x.  Add the rtx cost of the insns
2992    in TEST_BB to COST.  Record whether TEST_BB is a single simple
2993    set instruction in SIMPLE_P.  */
2994 
2995 static bool
2996 bb_valid_for_noce_process_p (basic_block test_bb, rtx cond,
2997 			      unsigned int *cost, bool *simple_p)
2998 {
2999   if (!test_bb)
3000     return false;
3001 
3002   rtx_insn *last_insn = last_active_insn (test_bb, FALSE);
3003   rtx last_set = NULL_RTX;
3004 
3005   rtx cc = cc_in_cond (cond);
3006 
3007   if (!insn_valid_noce_process_p (last_insn, cc))
3008     return false;
3009   last_set = single_set (last_insn);
3010 
3011   rtx x = SET_DEST (last_set);
3012   rtx_insn *first_insn = first_active_insn (test_bb);
3013   rtx first_set = single_set (first_insn);
3014 
3015   if (!first_set)
3016     return false;
3017 
3018   /* We have a single simple set, that's okay.  */
3019   bool speed_p = optimize_bb_for_speed_p (test_bb);
3020 
3021   if (first_insn == last_insn)
3022     {
3023       *simple_p = noce_operand_ok (SET_DEST (first_set));
3024       *cost += pattern_cost (first_set, speed_p);
3025       return *simple_p;
3026     }
3027 
3028   rtx_insn *prev_last_insn = PREV_INSN (last_insn);
3029   gcc_assert (prev_last_insn);
3030 
3031   /* For now, disallow setting x multiple times in test_bb.  */
3032   if (REG_P (x) && reg_set_between_p (x, first_insn, prev_last_insn))
3033     return false;
3034 
3035   bitmap test_bb_temps = BITMAP_ALLOC (&reg_obstack);
3036 
3037   /* The regs that are live out of test_bb.  */
3038   bitmap test_bb_live_out = df_get_live_out (test_bb);
3039 
3040   int potential_cost = pattern_cost (last_set, speed_p);
3041   rtx_insn *insn;
3042   FOR_BB_INSNS (test_bb, insn)
3043     {
3044       if (insn != last_insn)
3045 	{
3046 	  if (!active_insn_p (insn))
3047 	    continue;
3048 
3049 	  if (!insn_valid_noce_process_p (insn, cc))
3050 	    goto free_bitmap_and_fail;
3051 
3052 	  rtx sset = single_set (insn);
3053 	  gcc_assert (sset);
3054 
3055 	  if (contains_mem_rtx_p (SET_SRC (sset))
3056 	      || !REG_P (SET_DEST (sset))
3057 	      || reg_overlap_mentioned_p (SET_DEST (sset), cond))
3058 	    goto free_bitmap_and_fail;
3059 
3060 	  potential_cost += pattern_cost (sset, speed_p);
3061 	  bitmap_set_bit (test_bb_temps, REGNO (SET_DEST (sset)));
3062 	}
3063     }
3064 
3065   /* If any of the intermediate results in test_bb are live after test_bb
3066      then fail.  */
3067   if (bitmap_intersect_p (test_bb_live_out, test_bb_temps))
3068     goto free_bitmap_and_fail;
3069 
3070   BITMAP_FREE (test_bb_temps);
3071   *cost += potential_cost;
3072   *simple_p = false;
3073   return true;
3074 
3075  free_bitmap_and_fail:
3076   BITMAP_FREE (test_bb_temps);
3077   return false;
3078 }
3079 
3080 /* We have something like:
3081 
3082      if (x > y)
3083        { i = a; j = b; k = c; }
3084 
3085    Make it:
3086 
3087      tmp_i = (x > y) ? a : i;
3088      tmp_j = (x > y) ? b : j;
3089      tmp_k = (x > y) ? c : k;
3090      i = tmp_i;
3091      j = tmp_j;
3092      k = tmp_k;
3093 
3094    Subsequent passes are expected to clean up the extra moves.
3095 
3096    Look for special cases such as writes to one register which are
3097    read back in another SET, as might occur in a swap idiom or
3098    similar.
3099 
3100    These look like:
3101 
3102    if (x > y)
3103      i = a;
3104      j = i;
3105 
3106    Which we want to rewrite to:
3107 
3108      tmp_i = (x > y) ? a : i;
3109      tmp_j = (x > y) ? tmp_i : j;
3110      i = tmp_i;
3111      j = tmp_j;
3112 
3113    We can catch these when looking at (SET x y) by keeping a list of the
3114    registers we would have targeted before if-conversion and looking back
3115    through it for an overlap with Y.  If we find one, we rewire the
3116    conditional set to use the temporary we introduced earlier.
3117 
3118    IF_INFO contains the useful information about the block structure and
3119    jump instructions.  */
3120 
3121 static int
3122 noce_convert_multiple_sets (struct noce_if_info *if_info)
3123 {
3124   basic_block test_bb = if_info->test_bb;
3125   basic_block then_bb = if_info->then_bb;
3126   basic_block join_bb = if_info->join_bb;
3127   rtx_insn *jump = if_info->jump;
3128   rtx_insn *cond_earliest;
3129   rtx_insn *insn;
3130 
3131   start_sequence ();
3132 
3133   /* Decompose the condition attached to the jump.  */
3134   rtx cond = noce_get_condition (jump, &cond_earliest, false);
3135   rtx x = XEXP (cond, 0);
3136   rtx y = XEXP (cond, 1);
3137   rtx_code cond_code = GET_CODE (cond);
3138 
3139   /* The true targets for a conditional move.  */
3140   auto_vec<rtx> targets;
3141   /* The temporaries introduced to allow us to not consider register
3142      overlap.  */
3143   auto_vec<rtx> temporaries;
3144   /* The insns we've emitted.  */
3145   auto_vec<rtx_insn *> unmodified_insns;
3146   int count = 0;
3147 
3148   FOR_BB_INSNS (then_bb, insn)
3149     {
3150       /* Skip over non-insns.  */
3151       if (!active_insn_p (insn))
3152 	continue;
3153 
3154       rtx set = single_set (insn);
3155       gcc_checking_assert (set);
3156 
3157       rtx target = SET_DEST (set);
3158       rtx temp = gen_reg_rtx (GET_MODE (target));
3159       rtx new_val = SET_SRC (set);
3160       rtx old_val = target;
3161 
3162       /* If we were supposed to read from an earlier write in this block,
3163 	 we've changed the register allocation.  Rewire the read.  While
3164 	 we are looking, also try to catch a swap idiom.  */
3165       for (int i = count - 1; i >= 0; --i)
3166 	if (reg_overlap_mentioned_p (new_val, targets[i]))
3167 	  {
3168 	    /* Catch a "swap" style idiom.  */
3169 	    if (find_reg_note (insn, REG_DEAD, new_val) != NULL_RTX)
3170 	      /* The write to targets[i] is only live until the read
3171 		 here.  As the condition codes match, we can propagate
3172 		 the set to here.  */
3173 	      new_val = SET_SRC (single_set (unmodified_insns[i]));
3174 	    else
3175 	      new_val = temporaries[i];
3176 	    break;
3177 	  }
3178 
3179       /* If we had a non-canonical conditional jump (i.e. one where
3180 	 the fallthrough is to the "else" case) we need to reverse
3181 	 the conditional select.  */
3182       if (if_info->then_else_reversed)
3183 	std::swap (old_val, new_val);
3184 
3185 
3186       /* We allow simple lowpart register subreg SET sources in
3187 	 bb_ok_for_noce_convert_multiple_sets.  Be careful when processing
3188 	 sequences like:
3189 	 (set (reg:SI r1) (reg:SI r2))
3190 	 (set (reg:HI r3) (subreg:HI (r1)))
3191 	 For the second insn new_val or old_val (r1 in this example) will be
3192 	 taken from the temporaries and have the wider mode which will not
3193 	 match with the mode of the other source of the conditional move, so
3194 	 we'll end up trying to emit r4:HI = cond ? (r1:SI) : (r3:HI).
3195 	 Wrap the two cmove operands into subregs if appropriate to prevent
3196 	 that.  */
3197       if (GET_MODE (new_val) != GET_MODE (temp))
3198 	{
3199 	  machine_mode src_mode = GET_MODE (new_val);
3200 	  machine_mode dst_mode = GET_MODE (temp);
3201 	  if (!partial_subreg_p (dst_mode, src_mode))
3202 	    {
3203 	      end_sequence ();
3204 	      return FALSE;
3205 	    }
3206 	  new_val = lowpart_subreg (dst_mode, new_val, src_mode);
3207 	}
3208       if (GET_MODE (old_val) != GET_MODE (temp))
3209 	{
3210 	  machine_mode src_mode = GET_MODE (old_val);
3211 	  machine_mode dst_mode = GET_MODE (temp);
3212 	  if (!partial_subreg_p (dst_mode, src_mode))
3213 	    {
3214 	      end_sequence ();
3215 	      return FALSE;
3216 	    }
3217 	  old_val = lowpart_subreg (dst_mode, old_val, src_mode);
3218 	}
3219 
3220       /* Actually emit the conditional move.  */
3221       rtx temp_dest = noce_emit_cmove (if_info, temp, cond_code,
3222 				       x, y, new_val, old_val);
3223 
3224       /* If we failed to expand the conditional move, drop out and don't
3225 	 try to continue.  */
3226       if (temp_dest == NULL_RTX)
3227 	{
3228 	  end_sequence ();
3229 	  return FALSE;
3230 	}
3231 
3232       /* Bookkeeping.  */
3233       count++;
3234       targets.safe_push (target);
3235       temporaries.safe_push (temp_dest);
3236       unmodified_insns.safe_push (insn);
3237     }
3238 
3239   /* We must have seen some sort of insn to insert, otherwise we were
3240      given an empty BB to convert, and we can't handle that.  */
3241   gcc_assert (!unmodified_insns.is_empty ());
3242 
3243   /* Now fixup the assignments.  */
3244   for (int i = 0; i < count; i++)
3245     noce_emit_move_insn (targets[i], temporaries[i]);
3246 
3247   /* Actually emit the sequence if it isn't too expensive.  */
3248   rtx_insn *seq = get_insns ();
3249 
3250   if (!targetm.noce_conversion_profitable_p (seq, if_info))
3251     {
3252       end_sequence ();
3253       return FALSE;
3254     }
3255 
3256   for (insn = seq; insn; insn = NEXT_INSN (insn))
3257     set_used_flags (insn);
3258 
3259   /* Mark all our temporaries and targets as used.  */
3260   for (int i = 0; i < count; i++)
3261     {
3262       set_used_flags (temporaries[i]);
3263       set_used_flags (targets[i]);
3264     }
3265 
3266   set_used_flags (cond);
3267   set_used_flags (x);
3268   set_used_flags (y);
3269 
3270   unshare_all_rtl_in_chain (seq);
3271   end_sequence ();
3272 
3273   if (!seq)
3274     return FALSE;
3275 
3276   for (insn = seq; insn; insn = NEXT_INSN (insn))
3277     if (JUMP_P (insn)
3278 	|| recog_memoized (insn) == -1)
3279       return FALSE;
3280 
3281   emit_insn_before_setloc (seq, if_info->jump,
3282 			   INSN_LOCATION (unmodified_insns.last ()));
3283 
3284   /* Clean up THEN_BB and the edges in and out of it.  */
3285   remove_edge (find_edge (test_bb, join_bb));
3286   remove_edge (find_edge (then_bb, join_bb));
3287   redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
3288   delete_basic_block (then_bb);
3289   num_true_changes++;
3290 
3291   /* Maybe merge blocks now the jump is simple enough.  */
3292   if (can_merge_blocks_p (test_bb, join_bb))
3293     {
3294       merge_blocks (test_bb, join_bb);
3295       num_true_changes++;
3296     }
3297 
3298   num_updated_if_blocks++;
3299   if_info->transform_name = "noce_convert_multiple_sets";
3300   return TRUE;
3301 }
3302 
3303 /* Return true iff basic block TEST_BB is comprised of only
3304    (SET (REG) (REG)) insns suitable for conversion to a series
3305    of conditional moves.  Also check that we have more than one set
3306    (other routines can handle a single set better than we would), and
3307    fewer than PARAM_MAX_RTL_IF_CONVERSION_INSNS sets.  */
3308 
3309 static bool
3310 bb_ok_for_noce_convert_multiple_sets (basic_block test_bb)
3311 {
3312   rtx_insn *insn;
3313   unsigned count = 0;
3314   unsigned param = PARAM_VALUE (PARAM_MAX_RTL_IF_CONVERSION_INSNS);
3315 
3316   FOR_BB_INSNS (test_bb, insn)
3317     {
3318       /* Skip over notes etc.  */
3319       if (!active_insn_p (insn))
3320 	continue;
3321 
3322       /* We only handle SET insns.  */
3323       rtx set = single_set (insn);
3324       if (set == NULL_RTX)
3325 	return false;
3326 
3327       rtx dest = SET_DEST (set);
3328       rtx src = SET_SRC (set);
3329 
3330       /* We can possibly relax this, but for now only handle REG to REG
3331 	 (including subreg) moves.  This avoids any issues that might come
3332 	 from introducing loads/stores that might violate data-race-freedom
3333 	 guarantees.  */
3334       if (!REG_P (dest))
3335 	return false;
3336 
3337       if (!(REG_P (src)
3338 	   || (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
3339 	       && subreg_lowpart_p (src))))
3340 	return false;
3341 
3342       /* Destination must be appropriate for a conditional write.  */
3343       if (!noce_operand_ok (dest))
3344 	return false;
3345 
3346       /* We must be able to conditionally move in this mode.  */
3347       if (!can_conditionally_move_p (GET_MODE (dest)))
3348 	return false;
3349 
3350       count++;
3351     }
3352 
3353   /* If we would only put out one conditional move, the other strategies
3354      this pass tries are better optimized and will be more appropriate.
3355      Some targets want to strictly limit the number of conditional moves
3356      that are emitted, they set this through PARAM, we need to respect
3357      that.  */
3358   return count > 1 && count <= param;
3359 }
3360 
3361 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
3362    it without using conditional execution.  Return TRUE if we were successful
3363    at converting the block.  */
3364 
3365 static int
3366 noce_process_if_block (struct noce_if_info *if_info)
3367 {
3368   basic_block test_bb = if_info->test_bb;	/* test block */
3369   basic_block then_bb = if_info->then_bb;	/* THEN */
3370   basic_block else_bb = if_info->else_bb;	/* ELSE or NULL */
3371   basic_block join_bb = if_info->join_bb;	/* JOIN */
3372   rtx_insn *jump = if_info->jump;
3373   rtx cond = if_info->cond;
3374   rtx_insn *insn_a, *insn_b;
3375   rtx set_a, set_b;
3376   rtx orig_x, x, a, b;
3377 
3378   /* We're looking for patterns of the form
3379 
3380      (1) if (...) x = a; else x = b;
3381      (2) x = b; if (...) x = a;
3382      (3) if (...) x = a;   // as if with an initial x = x.
3383      (4) if (...) { x = a; y = b; z = c; }  // Like 3, for multiple SETS.
3384      The later patterns require jumps to be more expensive.
3385      For the if (...) x = a; else x = b; case we allow multiple insns
3386      inside the then and else blocks as long as their only effect is
3387      to calculate a value for x.
3388      ??? For future expansion, further expand the "multiple X" rules.  */
3389 
3390   /* First look for multiple SETS.  */
3391   if (!else_bb
3392       && HAVE_conditional_move
3393       && !HAVE_cc0
3394       && bb_ok_for_noce_convert_multiple_sets (then_bb))
3395     {
3396       if (noce_convert_multiple_sets (if_info))
3397 	{
3398 	  if (dump_file && if_info->transform_name)
3399 	    fprintf (dump_file, "if-conversion succeeded through %s\n",
3400 		     if_info->transform_name);
3401 	  return TRUE;
3402 	}
3403     }
3404 
3405   bool speed_p = optimize_bb_for_speed_p (test_bb);
3406   unsigned int then_cost = 0, else_cost = 0;
3407   if (!bb_valid_for_noce_process_p (then_bb, cond, &then_cost,
3408 				    &if_info->then_simple))
3409     return false;
3410 
3411   if (else_bb
3412       && !bb_valid_for_noce_process_p (else_bb, cond, &else_cost,
3413 				       &if_info->else_simple))
3414     return false;
3415 
3416   if (else_bb == NULL)
3417     if_info->original_cost += then_cost;
3418   else if (speed_p)
3419     if_info->original_cost += MIN (then_cost, else_cost);
3420   else
3421     if_info->original_cost += then_cost + else_cost;
3422 
3423   insn_a = last_active_insn (then_bb, FALSE);
3424   set_a = single_set (insn_a);
3425   gcc_assert (set_a);
3426 
3427   x = SET_DEST (set_a);
3428   a = SET_SRC (set_a);
3429 
3430   /* Look for the other potential set.  Make sure we've got equivalent
3431      destinations.  */
3432   /* ??? This is overconservative.  Storing to two different mems is
3433      as easy as conditionally computing the address.  Storing to a
3434      single mem merely requires a scratch memory to use as one of the
3435      destination addresses; often the memory immediately below the
3436      stack pointer is available for this.  */
3437   set_b = NULL_RTX;
3438   if (else_bb)
3439     {
3440       insn_b = last_active_insn (else_bb, FALSE);
3441       set_b = single_set (insn_b);
3442       gcc_assert (set_b);
3443 
3444       if (!rtx_interchangeable_p (x, SET_DEST (set_b)))
3445 	return FALSE;
3446     }
3447   else
3448     {
3449       insn_b = if_info->cond_earliest;
3450       do
3451 	insn_b = prev_nonnote_nondebug_insn (insn_b);
3452       while (insn_b
3453 	     && (BLOCK_FOR_INSN (insn_b)
3454 		 == BLOCK_FOR_INSN (if_info->cond_earliest))
3455 	     && !modified_in_p (x, insn_b));
3456 
3457       /* We're going to be moving the evaluation of B down from above
3458 	 COND_EARLIEST to JUMP.  Make sure the relevant data is still
3459 	 intact.  */
3460       if (! insn_b
3461 	  || BLOCK_FOR_INSN (insn_b) != BLOCK_FOR_INSN (if_info->cond_earliest)
3462 	  || !NONJUMP_INSN_P (insn_b)
3463 	  || (set_b = single_set (insn_b)) == NULL_RTX
3464 	  || ! rtx_interchangeable_p (x, SET_DEST (set_b))
3465 	  || ! noce_operand_ok (SET_SRC (set_b))
3466 	  || reg_overlap_mentioned_p (x, SET_SRC (set_b))
3467 	  || modified_between_p (SET_SRC (set_b), insn_b, jump)
3468 	  /* Avoid extending the lifetime of hard registers on small
3469 	     register class machines.  */
3470 	  || (REG_P (SET_SRC (set_b))
3471 	      && HARD_REGISTER_P (SET_SRC (set_b))
3472 	      && targetm.small_register_classes_for_mode_p
3473 		   (GET_MODE (SET_SRC (set_b))))
3474 	  /* Likewise with X.  In particular this can happen when
3475 	     noce_get_condition looks farther back in the instruction
3476 	     stream than one might expect.  */
3477 	  || reg_overlap_mentioned_p (x, cond)
3478 	  || reg_overlap_mentioned_p (x, a)
3479 	  || modified_between_p (x, insn_b, jump))
3480 	{
3481 	  insn_b = NULL;
3482 	  set_b = NULL_RTX;
3483 	}
3484     }
3485 
3486   /* If x has side effects then only the if-then-else form is safe to
3487      convert.  But even in that case we would need to restore any notes
3488      (such as REG_INC) at then end.  That can be tricky if
3489      noce_emit_move_insn expands to more than one insn, so disable the
3490      optimization entirely for now if there are side effects.  */
3491   if (side_effects_p (x))
3492     return FALSE;
3493 
3494   b = (set_b ? SET_SRC (set_b) : x);
3495 
3496   /* Only operate on register destinations, and even then avoid extending
3497      the lifetime of hard registers on small register class machines.  */
3498   orig_x = x;
3499   if_info->orig_x = orig_x;
3500   if (!REG_P (x)
3501       || (HARD_REGISTER_P (x)
3502 	  && targetm.small_register_classes_for_mode_p (GET_MODE (x))))
3503     {
3504       if (GET_MODE (x) == BLKmode)
3505 	return FALSE;
3506 
3507       if (GET_CODE (x) == ZERO_EXTRACT
3508 	  && (!CONST_INT_P (XEXP (x, 1))
3509 	      || !CONST_INT_P (XEXP (x, 2))))
3510 	return FALSE;
3511 
3512       x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
3513 				 ? XEXP (x, 0) : x));
3514     }
3515 
3516   /* Don't operate on sources that may trap or are volatile.  */
3517   if (! noce_operand_ok (a) || ! noce_operand_ok (b))
3518     return FALSE;
3519 
3520  retry:
3521   /* Set up the info block for our subroutines.  */
3522   if_info->insn_a = insn_a;
3523   if_info->insn_b = insn_b;
3524   if_info->x = x;
3525   if_info->a = a;
3526   if_info->b = b;
3527 
3528   /* Try optimizations in some approximation of a useful order.  */
3529   /* ??? Should first look to see if X is live incoming at all.  If it
3530      isn't, we don't need anything but an unconditional set.  */
3531 
3532   /* Look and see if A and B are really the same.  Avoid creating silly
3533      cmove constructs that no one will fix up later.  */
3534   if (noce_simple_bbs (if_info)
3535       && rtx_interchangeable_p (a, b))
3536     {
3537       /* If we have an INSN_B, we don't have to create any new rtl.  Just
3538 	 move the instruction that we already have.  If we don't have an
3539 	 INSN_B, that means that A == X, and we've got a noop move.  In
3540 	 that case don't do anything and let the code below delete INSN_A.  */
3541       if (insn_b && else_bb)
3542 	{
3543 	  rtx note;
3544 
3545 	  if (else_bb && insn_b == BB_END (else_bb))
3546 	    BB_END (else_bb) = PREV_INSN (insn_b);
3547 	  reorder_insns (insn_b, insn_b, PREV_INSN (jump));
3548 
3549 	  /* If there was a REG_EQUAL note, delete it since it may have been
3550 	     true due to this insn being after a jump.  */
3551 	  if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
3552 	    remove_note (insn_b, note);
3553 
3554 	  insn_b = NULL;
3555 	}
3556       /* If we have "x = b; if (...) x = a;", and x has side-effects, then
3557 	 x must be executed twice.  */
3558       else if (insn_b && side_effects_p (orig_x))
3559 	return FALSE;
3560 
3561       x = orig_x;
3562       goto success;
3563     }
3564 
3565   if (!set_b && MEM_P (orig_x))
3566     /* We want to avoid store speculation to avoid cases like
3567 	 if (pthread_mutex_trylock(mutex))
3568 	   ++global_variable;
3569        Rather than go to much effort here, we rely on the SSA optimizers,
3570        which do a good enough job these days.  */
3571     return FALSE;
3572 
3573   if (noce_try_move (if_info))
3574     goto success;
3575   if (noce_try_ifelse_collapse (if_info))
3576     goto success;
3577   if (noce_try_store_flag (if_info))
3578     goto success;
3579   if (noce_try_bitop (if_info))
3580     goto success;
3581   if (noce_try_minmax (if_info))
3582     goto success;
3583   if (noce_try_abs (if_info))
3584     goto success;
3585   if (noce_try_inverse_constants (if_info))
3586     goto success;
3587   if (!targetm.have_conditional_execution ()
3588       && noce_try_store_flag_constants (if_info))
3589     goto success;
3590   if (HAVE_conditional_move
3591       && noce_try_cmove (if_info))
3592     goto success;
3593   if (! targetm.have_conditional_execution ())
3594     {
3595       if (noce_try_addcc (if_info))
3596 	goto success;
3597       if (noce_try_store_flag_mask (if_info))
3598 	goto success;
3599       if (HAVE_conditional_move
3600 	  && noce_try_cmove_arith (if_info))
3601 	goto success;
3602       if (noce_try_sign_mask (if_info))
3603 	goto success;
3604     }
3605 
3606   if (!else_bb && set_b)
3607     {
3608       insn_b = NULL;
3609       set_b = NULL_RTX;
3610       b = orig_x;
3611       goto retry;
3612     }
3613 
3614   return FALSE;
3615 
3616  success:
3617   if (dump_file && if_info->transform_name)
3618     fprintf (dump_file, "if-conversion succeeded through %s\n",
3619 	     if_info->transform_name);
3620 
3621   /* If we used a temporary, fix it up now.  */
3622   if (orig_x != x)
3623     {
3624       rtx_insn *seq;
3625 
3626       start_sequence ();
3627       noce_emit_move_insn (orig_x, x);
3628       seq = get_insns ();
3629       set_used_flags (orig_x);
3630       unshare_all_rtl_in_chain (seq);
3631       end_sequence ();
3632 
3633       emit_insn_before_setloc (seq, BB_END (test_bb), INSN_LOCATION (insn_a));
3634     }
3635 
3636   /* The original THEN and ELSE blocks may now be removed.  The test block
3637      must now jump to the join block.  If the test block and the join block
3638      can be merged, do so.  */
3639   if (else_bb)
3640     {
3641       delete_basic_block (else_bb);
3642       num_true_changes++;
3643     }
3644   else
3645     remove_edge (find_edge (test_bb, join_bb));
3646 
3647   remove_edge (find_edge (then_bb, join_bb));
3648   redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
3649   delete_basic_block (then_bb);
3650   num_true_changes++;
3651 
3652   if (can_merge_blocks_p (test_bb, join_bb))
3653     {
3654       merge_blocks (test_bb, join_bb);
3655       num_true_changes++;
3656     }
3657 
3658   num_updated_if_blocks++;
3659   return TRUE;
3660 }
3661 
3662 /* Check whether a block is suitable for conditional move conversion.
3663    Every insn must be a simple set of a register to a constant or a
3664    register.  For each assignment, store the value in the pointer map
3665    VALS, keyed indexed by register pointer, then store the register
3666    pointer in REGS.  COND is the condition we will test.  */
3667 
3668 static int
3669 check_cond_move_block (basic_block bb,
3670 		       hash_map<rtx, rtx> *vals,
3671 		       vec<rtx> *regs,
3672 		       rtx cond)
3673 {
3674   rtx_insn *insn;
3675   rtx cc = cc_in_cond (cond);
3676 
3677    /* We can only handle simple jumps at the end of the basic block.
3678       It is almost impossible to update the CFG otherwise.  */
3679   insn = BB_END (bb);
3680   if (JUMP_P (insn) && !onlyjump_p (insn))
3681     return FALSE;
3682 
3683   FOR_BB_INSNS (bb, insn)
3684     {
3685       rtx set, dest, src;
3686 
3687       if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
3688 	continue;
3689       set = single_set (insn);
3690       if (!set)
3691 	return FALSE;
3692 
3693       dest = SET_DEST (set);
3694       src = SET_SRC (set);
3695       if (!REG_P (dest)
3696 	  || (HARD_REGISTER_P (dest)
3697 	      && targetm.small_register_classes_for_mode_p (GET_MODE (dest))))
3698 	return FALSE;
3699 
3700       if (!CONSTANT_P (src) && !register_operand (src, VOIDmode))
3701 	return FALSE;
3702 
3703       if (side_effects_p (src) || side_effects_p (dest))
3704 	return FALSE;
3705 
3706       if (may_trap_p (src) || may_trap_p (dest))
3707 	return FALSE;
3708 
3709       /* Don't try to handle this if the source register was
3710 	 modified earlier in the block.  */
3711       if ((REG_P (src)
3712 	   && vals->get (src))
3713 	  || (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
3714 	      && vals->get (SUBREG_REG (src))))
3715 	return FALSE;
3716 
3717       /* Don't try to handle this if the destination register was
3718 	 modified earlier in the block.  */
3719       if (vals->get (dest))
3720 	return FALSE;
3721 
3722       /* Don't try to handle this if the condition uses the
3723 	 destination register.  */
3724       if (reg_overlap_mentioned_p (dest, cond))
3725 	return FALSE;
3726 
3727       /* Don't try to handle this if the source register is modified
3728 	 later in the block.  */
3729       if (!CONSTANT_P (src)
3730 	  && modified_between_p (src, insn, NEXT_INSN (BB_END (bb))))
3731 	return FALSE;
3732 
3733       /* Skip it if the instruction to be moved might clobber CC.  */
3734       if (cc && set_of (cc, insn))
3735 	return FALSE;
3736 
3737       vals->put (dest, src);
3738 
3739       regs->safe_push (dest);
3740     }
3741 
3742   return TRUE;
3743 }
3744 
3745 /* Given a basic block BB suitable for conditional move conversion,
3746    a condition COND, and pointer maps THEN_VALS and ELSE_VALS containing
3747    the register values depending on COND, emit the insns in the block as
3748    conditional moves.  If ELSE_BLOCK is true, THEN_BB was already
3749    processed.  The caller has started a sequence for the conversion.
3750    Return true if successful, false if something goes wrong.  */
3751 
3752 static bool
3753 cond_move_convert_if_block (struct noce_if_info *if_infop,
3754 			    basic_block bb, rtx cond,
3755 			    hash_map<rtx, rtx> *then_vals,
3756 			    hash_map<rtx, rtx> *else_vals,
3757 			    bool else_block_p)
3758 {
3759   enum rtx_code code;
3760   rtx_insn *insn;
3761   rtx cond_arg0, cond_arg1;
3762 
3763   code = GET_CODE (cond);
3764   cond_arg0 = XEXP (cond, 0);
3765   cond_arg1 = XEXP (cond, 1);
3766 
3767   FOR_BB_INSNS (bb, insn)
3768     {
3769       rtx set, target, dest, t, e;
3770 
3771       /* ??? Maybe emit conditional debug insn?  */
3772       if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
3773 	continue;
3774       set = single_set (insn);
3775       gcc_assert (set && REG_P (SET_DEST (set)));
3776 
3777       dest = SET_DEST (set);
3778 
3779       rtx *then_slot = then_vals->get (dest);
3780       rtx *else_slot = else_vals->get (dest);
3781       t = then_slot ? *then_slot : NULL_RTX;
3782       e = else_slot ? *else_slot : NULL_RTX;
3783 
3784       if (else_block_p)
3785 	{
3786 	  /* If this register was set in the then block, we already
3787 	     handled this case there.  */
3788 	  if (t)
3789 	    continue;
3790 	  t = dest;
3791 	  gcc_assert (e);
3792 	}
3793       else
3794 	{
3795 	  gcc_assert (t);
3796 	  if (!e)
3797 	    e = dest;
3798 	}
3799 
3800       target = noce_emit_cmove (if_infop, dest, code, cond_arg0, cond_arg1,
3801 				t, e);
3802       if (!target)
3803 	return false;
3804 
3805       if (target != dest)
3806 	noce_emit_move_insn (dest, target);
3807     }
3808 
3809   return true;
3810 }
3811 
3812 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
3813    it using only conditional moves.  Return TRUE if we were successful at
3814    converting the block.  */
3815 
3816 static int
3817 cond_move_process_if_block (struct noce_if_info *if_info)
3818 {
3819   basic_block test_bb = if_info->test_bb;
3820   basic_block then_bb = if_info->then_bb;
3821   basic_block else_bb = if_info->else_bb;
3822   basic_block join_bb = if_info->join_bb;
3823   rtx_insn *jump = if_info->jump;
3824   rtx cond = if_info->cond;
3825   rtx_insn *seq, *loc_insn;
3826   rtx reg;
3827   int c;
3828   vec<rtx> then_regs = vNULL;
3829   vec<rtx> else_regs = vNULL;
3830   unsigned int i;
3831   int success_p = FALSE;
3832   int limit = PARAM_VALUE (PARAM_MAX_RTL_IF_CONVERSION_INSNS);
3833 
3834   /* Build a mapping for each block to the value used for each
3835      register.  */
3836   hash_map<rtx, rtx> then_vals;
3837   hash_map<rtx, rtx> else_vals;
3838 
3839   /* Make sure the blocks are suitable.  */
3840   if (!check_cond_move_block (then_bb, &then_vals, &then_regs, cond)
3841       || (else_bb
3842 	  && !check_cond_move_block (else_bb, &else_vals, &else_regs, cond)))
3843     goto done;
3844 
3845   /* Make sure the blocks can be used together.  If the same register
3846      is set in both blocks, and is not set to a constant in both
3847      cases, then both blocks must set it to the same register.  We
3848      have already verified that if it is set to a register, that the
3849      source register does not change after the assignment.  Also count
3850      the number of registers set in only one of the blocks.  */
3851   c = 0;
3852   FOR_EACH_VEC_ELT (then_regs, i, reg)
3853     {
3854       rtx *then_slot = then_vals.get (reg);
3855       rtx *else_slot = else_vals.get (reg);
3856 
3857       gcc_checking_assert (then_slot);
3858       if (!else_slot)
3859 	++c;
3860       else
3861 	{
3862 	  rtx then_val = *then_slot;
3863 	  rtx else_val = *else_slot;
3864 	  if (!CONSTANT_P (then_val) && !CONSTANT_P (else_val)
3865 	      && !rtx_equal_p (then_val, else_val))
3866 	    goto done;
3867 	}
3868     }
3869 
3870   /* Finish off c for MAX_CONDITIONAL_EXECUTE.  */
3871   FOR_EACH_VEC_ELT (else_regs, i, reg)
3872     {
3873       gcc_checking_assert (else_vals.get (reg));
3874       if (!then_vals.get (reg))
3875 	++c;
3876     }
3877 
3878   /* Make sure it is reasonable to convert this block.  What matters
3879      is the number of assignments currently made in only one of the
3880      branches, since if we convert we are going to always execute
3881      them.  */
3882   if (c > MAX_CONDITIONAL_EXECUTE
3883       || c > limit)
3884     goto done;
3885 
3886   /* Try to emit the conditional moves.  First do the then block,
3887      then do anything left in the else blocks.  */
3888   start_sequence ();
3889   if (!cond_move_convert_if_block (if_info, then_bb, cond,
3890 				   &then_vals, &else_vals, false)
3891       || (else_bb
3892 	  && !cond_move_convert_if_block (if_info, else_bb, cond,
3893 					  &then_vals, &else_vals, true)))
3894     {
3895       end_sequence ();
3896       goto done;
3897     }
3898   seq = end_ifcvt_sequence (if_info);
3899   if (!seq)
3900     goto done;
3901 
3902   loc_insn = first_active_insn (then_bb);
3903   if (!loc_insn)
3904     {
3905       loc_insn = first_active_insn (else_bb);
3906       gcc_assert (loc_insn);
3907     }
3908   emit_insn_before_setloc (seq, jump, INSN_LOCATION (loc_insn));
3909 
3910   if (else_bb)
3911     {
3912       delete_basic_block (else_bb);
3913       num_true_changes++;
3914     }
3915   else
3916     remove_edge (find_edge (test_bb, join_bb));
3917 
3918   remove_edge (find_edge (then_bb, join_bb));
3919   redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
3920   delete_basic_block (then_bb);
3921   num_true_changes++;
3922 
3923   if (can_merge_blocks_p (test_bb, join_bb))
3924     {
3925       merge_blocks (test_bb, join_bb);
3926       num_true_changes++;
3927     }
3928 
3929   num_updated_if_blocks++;
3930   success_p = TRUE;
3931 
3932 done:
3933   then_regs.release ();
3934   else_regs.release ();
3935   return success_p;
3936 }
3937 
3938 
3939 /* Determine if a given basic block heads a simple IF-THEN-JOIN or an
3940    IF-THEN-ELSE-JOIN block.
3941 
3942    If so, we'll try to convert the insns to not require the branch,
3943    using only transformations that do not require conditional execution.
3944 
3945    Return TRUE if we were successful at converting the block.  */
3946 
3947 static int
3948 noce_find_if_block (basic_block test_bb, edge then_edge, edge else_edge,
3949 		    int pass)
3950 {
3951   basic_block then_bb, else_bb, join_bb;
3952   bool then_else_reversed = false;
3953   rtx_insn *jump;
3954   rtx cond;
3955   rtx_insn *cond_earliest;
3956   struct noce_if_info if_info;
3957   bool speed_p = optimize_bb_for_speed_p (test_bb);
3958 
3959   /* We only ever should get here before reload.  */
3960   gcc_assert (!reload_completed);
3961 
3962   /* Recognize an IF-THEN-ELSE-JOIN block.  */
3963   if (single_pred_p (then_edge->dest)
3964       && single_succ_p (then_edge->dest)
3965       && single_pred_p (else_edge->dest)
3966       && single_succ_p (else_edge->dest)
3967       && single_succ (then_edge->dest) == single_succ (else_edge->dest))
3968     {
3969       then_bb = then_edge->dest;
3970       else_bb = else_edge->dest;
3971       join_bb = single_succ (then_bb);
3972     }
3973   /* Recognize an IF-THEN-JOIN block.  */
3974   else if (single_pred_p (then_edge->dest)
3975 	   && single_succ_p (then_edge->dest)
3976 	   && single_succ (then_edge->dest) == else_edge->dest)
3977     {
3978       then_bb = then_edge->dest;
3979       else_bb = NULL_BLOCK;
3980       join_bb = else_edge->dest;
3981     }
3982   /* Recognize an IF-ELSE-JOIN block.  We can have those because the order
3983      of basic blocks in cfglayout mode does not matter, so the fallthrough
3984      edge can go to any basic block (and not just to bb->next_bb, like in
3985      cfgrtl mode).  */
3986   else if (single_pred_p (else_edge->dest)
3987 	   && single_succ_p (else_edge->dest)
3988 	   && single_succ (else_edge->dest) == then_edge->dest)
3989     {
3990       /* The noce transformations do not apply to IF-ELSE-JOIN blocks.
3991 	 To make this work, we have to invert the THEN and ELSE blocks
3992 	 and reverse the jump condition.  */
3993       then_bb = else_edge->dest;
3994       else_bb = NULL_BLOCK;
3995       join_bb = single_succ (then_bb);
3996       then_else_reversed = true;
3997     }
3998   else
3999     /* Not a form we can handle.  */
4000     return FALSE;
4001 
4002   /* The edges of the THEN and ELSE blocks cannot have complex edges.  */
4003   if (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
4004     return FALSE;
4005   if (else_bb
4006       && single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
4007     return FALSE;
4008 
4009   num_possible_if_blocks++;
4010 
4011   if (dump_file)
4012     {
4013       fprintf (dump_file,
4014 	       "\nIF-THEN%s-JOIN block found, pass %d, test %d, then %d",
4015 	       (else_bb) ? "-ELSE" : "",
4016 	       pass, test_bb->index, then_bb->index);
4017 
4018       if (else_bb)
4019 	fprintf (dump_file, ", else %d", else_bb->index);
4020 
4021       fprintf (dump_file, ", join %d\n", join_bb->index);
4022     }
4023 
4024   /* If the conditional jump is more than just a conditional
4025      jump, then we can not do if-conversion on this block.  */
4026   jump = BB_END (test_bb);
4027   if (! onlyjump_p (jump))
4028     return FALSE;
4029 
4030   /* If this is not a standard conditional jump, we can't parse it.  */
4031   cond = noce_get_condition (jump, &cond_earliest, then_else_reversed);
4032   if (!cond)
4033     return FALSE;
4034 
4035   /* We must be comparing objects whose modes imply the size.  */
4036   if (GET_MODE (XEXP (cond, 0)) == BLKmode)
4037     return FALSE;
4038 
4039   /* Initialize an IF_INFO struct to pass around.  */
4040   memset (&if_info, 0, sizeof if_info);
4041   if_info.test_bb = test_bb;
4042   if_info.then_bb = then_bb;
4043   if_info.else_bb = else_bb;
4044   if_info.join_bb = join_bb;
4045   if_info.cond = cond;
4046   rtx_insn *rev_cond_earliest;
4047   if_info.rev_cond = noce_get_condition (jump, &rev_cond_earliest,
4048 					 !then_else_reversed);
4049   gcc_assert (if_info.rev_cond == NULL_RTX
4050 	      || rev_cond_earliest == cond_earliest);
4051   if_info.cond_earliest = cond_earliest;
4052   if_info.jump = jump;
4053   if_info.then_else_reversed = then_else_reversed;
4054   if_info.speed_p = speed_p;
4055   if_info.max_seq_cost
4056     = targetm.max_noce_ifcvt_seq_cost (then_edge);
4057   /* We'll add in the cost of THEN_BB and ELSE_BB later, when we check
4058      that they are valid to transform.  We can't easily get back to the insn
4059      for COND (and it may not exist if we had to canonicalize to get COND),
4060      and jump_insns are always given a cost of 1 by seq_cost, so treat
4061      both instructions as having cost COSTS_N_INSNS (1).  */
4062   if_info.original_cost = COSTS_N_INSNS (2);
4063 
4064 
4065   /* Do the real work.  */
4066 
4067   if (noce_process_if_block (&if_info))
4068     return TRUE;
4069 
4070   if (HAVE_conditional_move
4071       && cond_move_process_if_block (&if_info))
4072     return TRUE;
4073 
4074   return FALSE;
4075 }
4076 
4077 
4078 /* Merge the blocks and mark for local life update.  */
4079 
4080 static void
4081 merge_if_block (struct ce_if_block * ce_info)
4082 {
4083   basic_block test_bb = ce_info->test_bb;	/* last test block */
4084   basic_block then_bb = ce_info->then_bb;	/* THEN */
4085   basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
4086   basic_block join_bb = ce_info->join_bb;	/* join block */
4087   basic_block combo_bb;
4088 
4089   /* All block merging is done into the lower block numbers.  */
4090 
4091   combo_bb = test_bb;
4092   df_set_bb_dirty (test_bb);
4093 
4094   /* Merge any basic blocks to handle && and || subtests.  Each of
4095      the blocks are on the fallthru path from the predecessor block.  */
4096   if (ce_info->num_multiple_test_blocks > 0)
4097     {
4098       basic_block bb = test_bb;
4099       basic_block last_test_bb = ce_info->last_test_bb;
4100       basic_block fallthru = block_fallthru (bb);
4101 
4102       do
4103 	{
4104 	  bb = fallthru;
4105 	  fallthru = block_fallthru (bb);
4106 	  merge_blocks (combo_bb, bb);
4107 	  num_true_changes++;
4108 	}
4109       while (bb != last_test_bb);
4110     }
4111 
4112   /* Merge TEST block into THEN block.  Normally the THEN block won't have a
4113      label, but it might if there were || tests.  That label's count should be
4114      zero, and it normally should be removed.  */
4115 
4116   if (then_bb)
4117     {
4118       /* If THEN_BB has no successors, then there's a BARRIER after it.
4119 	 If COMBO_BB has more than one successor (THEN_BB), then that BARRIER
4120 	 is no longer needed, and in fact it is incorrect to leave it in
4121 	 the insn stream.  */
4122       if (EDGE_COUNT (then_bb->succs) == 0
4123 	  && EDGE_COUNT (combo_bb->succs) > 1)
4124 	{
4125 	  rtx_insn *end = NEXT_INSN (BB_END (then_bb));
4126 	  while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
4127 	    end = NEXT_INSN (end);
4128 
4129 	  if (end && BARRIER_P (end))
4130 	    delete_insn (end);
4131 	}
4132       merge_blocks (combo_bb, then_bb);
4133       num_true_changes++;
4134     }
4135 
4136   /* The ELSE block, if it existed, had a label.  That label count
4137      will almost always be zero, but odd things can happen when labels
4138      get their addresses taken.  */
4139   if (else_bb)
4140     {
4141       /* If ELSE_BB has no successors, then there's a BARRIER after it.
4142 	 If COMBO_BB has more than one successor (ELSE_BB), then that BARRIER
4143 	 is no longer needed, and in fact it is incorrect to leave it in
4144 	 the insn stream.  */
4145       if (EDGE_COUNT (else_bb->succs) == 0
4146 	  && EDGE_COUNT (combo_bb->succs) > 1)
4147 	{
4148 	  rtx_insn *end = NEXT_INSN (BB_END (else_bb));
4149 	  while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
4150 	    end = NEXT_INSN (end);
4151 
4152 	  if (end && BARRIER_P (end))
4153 	    delete_insn (end);
4154 	}
4155       merge_blocks (combo_bb, else_bb);
4156       num_true_changes++;
4157     }
4158 
4159   /* If there was no join block reported, that means it was not adjacent
4160      to the others, and so we cannot merge them.  */
4161 
4162   if (! join_bb)
4163     {
4164       rtx_insn *last = BB_END (combo_bb);
4165 
4166       /* The outgoing edge for the current COMBO block should already
4167 	 be correct.  Verify this.  */
4168       if (EDGE_COUNT (combo_bb->succs) == 0)
4169 	gcc_assert (find_reg_note (last, REG_NORETURN, NULL)
4170 		    || (NONJUMP_INSN_P (last)
4171 			&& GET_CODE (PATTERN (last)) == TRAP_IF
4172 			&& (TRAP_CONDITION (PATTERN (last))
4173 			    == const_true_rtx)));
4174 
4175       else
4176       /* There should still be something at the end of the THEN or ELSE
4177          blocks taking us to our final destination.  */
4178 	gcc_assert (JUMP_P (last)
4179 		    || (EDGE_SUCC (combo_bb, 0)->dest
4180 			== EXIT_BLOCK_PTR_FOR_FN (cfun)
4181 			&& CALL_P (last)
4182 			&& SIBLING_CALL_P (last))
4183 		    || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
4184 			&& can_throw_internal (last)));
4185     }
4186 
4187   /* The JOIN block may have had quite a number of other predecessors too.
4188      Since we've already merged the TEST, THEN and ELSE blocks, we should
4189      have only one remaining edge from our if-then-else diamond.  If there
4190      is more than one remaining edge, it must come from elsewhere.  There
4191      may be zero incoming edges if the THEN block didn't actually join
4192      back up (as with a call to a non-return function).  */
4193   else if (EDGE_COUNT (join_bb->preds) < 2
4194 	   && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4195     {
4196       /* We can merge the JOIN cleanly and update the dataflow try
4197 	 again on this pass.*/
4198       merge_blocks (combo_bb, join_bb);
4199       num_true_changes++;
4200     }
4201   else
4202     {
4203       /* We cannot merge the JOIN.  */
4204 
4205       /* The outgoing edge for the current COMBO block should already
4206 	 be correct.  Verify this.  */
4207       gcc_assert (single_succ_p (combo_bb)
4208 		  && single_succ (combo_bb) == join_bb);
4209 
4210       /* Remove the jump and cruft from the end of the COMBO block.  */
4211       if (join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4212 	tidy_fallthru_edge (single_succ_edge (combo_bb));
4213     }
4214 
4215   num_updated_if_blocks++;
4216 }
4217 
4218 /* Find a block ending in a simple IF condition and try to transform it
4219    in some way.  When converting a multi-block condition, put the new code
4220    in the first such block and delete the rest.  Return a pointer to this
4221    first block if some transformation was done.  Return NULL otherwise.  */
4222 
4223 static basic_block
4224 find_if_header (basic_block test_bb, int pass)
4225 {
4226   ce_if_block ce_info;
4227   edge then_edge;
4228   edge else_edge;
4229 
4230   /* The kind of block we're looking for has exactly two successors.  */
4231   if (EDGE_COUNT (test_bb->succs) != 2)
4232     return NULL;
4233 
4234   then_edge = EDGE_SUCC (test_bb, 0);
4235   else_edge = EDGE_SUCC (test_bb, 1);
4236 
4237   if (df_get_bb_dirty (then_edge->dest))
4238     return NULL;
4239   if (df_get_bb_dirty (else_edge->dest))
4240     return NULL;
4241 
4242   /* Neither edge should be abnormal.  */
4243   if ((then_edge->flags & EDGE_COMPLEX)
4244       || (else_edge->flags & EDGE_COMPLEX))
4245     return NULL;
4246 
4247   /* Nor exit the loop.  */
4248   if ((then_edge->flags & EDGE_LOOP_EXIT)
4249       || (else_edge->flags & EDGE_LOOP_EXIT))
4250     return NULL;
4251 
4252   /* The THEN edge is canonically the one that falls through.  */
4253   if (then_edge->flags & EDGE_FALLTHRU)
4254     ;
4255   else if (else_edge->flags & EDGE_FALLTHRU)
4256     std::swap (then_edge, else_edge);
4257   else
4258     /* Otherwise this must be a multiway branch of some sort.  */
4259     return NULL;
4260 
4261   memset (&ce_info, 0, sizeof (ce_info));
4262   ce_info.test_bb = test_bb;
4263   ce_info.then_bb = then_edge->dest;
4264   ce_info.else_bb = else_edge->dest;
4265   ce_info.pass = pass;
4266 
4267 #ifdef IFCVT_MACHDEP_INIT
4268   IFCVT_MACHDEP_INIT (&ce_info);
4269 #endif
4270 
4271   if (!reload_completed
4272       && noce_find_if_block (test_bb, then_edge, else_edge, pass))
4273     goto success;
4274 
4275   if (reload_completed
4276       && targetm.have_conditional_execution ()
4277       && cond_exec_find_if_block (&ce_info))
4278     goto success;
4279 
4280   if (targetm.have_trap ()
4281       && optab_handler (ctrap_optab, word_mode) != CODE_FOR_nothing
4282       && find_cond_trap (test_bb, then_edge, else_edge))
4283     goto success;
4284 
4285   if (dom_info_state (CDI_POST_DOMINATORS) >= DOM_NO_FAST_QUERY
4286       && (reload_completed || !targetm.have_conditional_execution ()))
4287     {
4288       if (find_if_case_1 (test_bb, then_edge, else_edge))
4289 	goto success;
4290       if (find_if_case_2 (test_bb, then_edge, else_edge))
4291 	goto success;
4292     }
4293 
4294   return NULL;
4295 
4296  success:
4297   if (dump_file)
4298     fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
4299   /* Set this so we continue looking.  */
4300   cond_exec_changed_p = TRUE;
4301   return ce_info.test_bb;
4302 }
4303 
4304 /* Return true if a block has two edges, one of which falls through to the next
4305    block, and the other jumps to a specific block, so that we can tell if the
4306    block is part of an && test or an || test.  Returns either -1 or the number
4307    of non-note, non-jump, non-USE/CLOBBER insns in the block.  */
4308 
4309 static int
4310 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
4311 {
4312   edge cur_edge;
4313   int fallthru_p = FALSE;
4314   int jump_p = FALSE;
4315   rtx_insn *insn;
4316   rtx_insn *end;
4317   int n_insns = 0;
4318   edge_iterator ei;
4319 
4320   if (!cur_bb || !target_bb)
4321     return -1;
4322 
4323   /* If no edges, obviously it doesn't jump or fallthru.  */
4324   if (EDGE_COUNT (cur_bb->succs) == 0)
4325     return FALSE;
4326 
4327   FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
4328     {
4329       if (cur_edge->flags & EDGE_COMPLEX)
4330 	/* Anything complex isn't what we want.  */
4331 	return -1;
4332 
4333       else if (cur_edge->flags & EDGE_FALLTHRU)
4334 	fallthru_p = TRUE;
4335 
4336       else if (cur_edge->dest == target_bb)
4337 	jump_p = TRUE;
4338 
4339       else
4340 	return -1;
4341     }
4342 
4343   if ((jump_p & fallthru_p) == 0)
4344     return -1;
4345 
4346   /* Don't allow calls in the block, since this is used to group && and ||
4347      together for conditional execution support.  ??? we should support
4348      conditional execution support across calls for IA-64 some day, but
4349      for now it makes the code simpler.  */
4350   end = BB_END (cur_bb);
4351   insn = BB_HEAD (cur_bb);
4352 
4353   while (insn != NULL_RTX)
4354     {
4355       if (CALL_P (insn))
4356 	return -1;
4357 
4358       if (INSN_P (insn)
4359 	  && !JUMP_P (insn)
4360 	  && !DEBUG_INSN_P (insn)
4361 	  && GET_CODE (PATTERN (insn)) != USE
4362 	  && GET_CODE (PATTERN (insn)) != CLOBBER)
4363 	n_insns++;
4364 
4365       if (insn == end)
4366 	break;
4367 
4368       insn = NEXT_INSN (insn);
4369     }
4370 
4371   return n_insns;
4372 }
4373 
4374 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
4375    block.  If so, we'll try to convert the insns to not require the branch.
4376    Return TRUE if we were successful at converting the block.  */
4377 
4378 static int
4379 cond_exec_find_if_block (struct ce_if_block * ce_info)
4380 {
4381   basic_block test_bb = ce_info->test_bb;
4382   basic_block then_bb = ce_info->then_bb;
4383   basic_block else_bb = ce_info->else_bb;
4384   basic_block join_bb = NULL_BLOCK;
4385   edge cur_edge;
4386   basic_block next;
4387   edge_iterator ei;
4388 
4389   ce_info->last_test_bb = test_bb;
4390 
4391   /* We only ever should get here after reload,
4392      and if we have conditional execution.  */
4393   gcc_assert (reload_completed && targetm.have_conditional_execution ());
4394 
4395   /* Discover if any fall through predecessors of the current test basic block
4396      were && tests (which jump to the else block) or || tests (which jump to
4397      the then block).  */
4398   if (single_pred_p (test_bb)
4399       && single_pred_edge (test_bb)->flags == EDGE_FALLTHRU)
4400     {
4401       basic_block bb = single_pred (test_bb);
4402       basic_block target_bb;
4403       int max_insns = MAX_CONDITIONAL_EXECUTE;
4404       int n_insns;
4405 
4406       /* Determine if the preceding block is an && or || block.  */
4407       if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
4408 	{
4409 	  ce_info->and_and_p = TRUE;
4410 	  target_bb = else_bb;
4411 	}
4412       else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
4413 	{
4414 	  ce_info->and_and_p = FALSE;
4415 	  target_bb = then_bb;
4416 	}
4417       else
4418 	target_bb = NULL_BLOCK;
4419 
4420       if (target_bb && n_insns <= max_insns)
4421 	{
4422 	  int total_insns = 0;
4423 	  int blocks = 0;
4424 
4425 	  ce_info->last_test_bb = test_bb;
4426 
4427 	  /* Found at least one && or || block, look for more.  */
4428 	  do
4429 	    {
4430 	      ce_info->test_bb = test_bb = bb;
4431 	      total_insns += n_insns;
4432 	      blocks++;
4433 
4434 	      if (!single_pred_p (bb))
4435 		break;
4436 
4437 	      bb = single_pred (bb);
4438 	      n_insns = block_jumps_and_fallthru_p (bb, target_bb);
4439 	    }
4440 	  while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
4441 
4442 	  ce_info->num_multiple_test_blocks = blocks;
4443 	  ce_info->num_multiple_test_insns = total_insns;
4444 
4445 	  if (ce_info->and_and_p)
4446 	    ce_info->num_and_and_blocks = blocks;
4447 	  else
4448 	    ce_info->num_or_or_blocks = blocks;
4449 	}
4450     }
4451 
4452   /* The THEN block of an IF-THEN combo must have exactly one predecessor,
4453      other than any || blocks which jump to the THEN block.  */
4454   if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
4455     return FALSE;
4456 
4457   /* The edges of the THEN and ELSE blocks cannot have complex edges.  */
4458   FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
4459     {
4460       if (cur_edge->flags & EDGE_COMPLEX)
4461 	return FALSE;
4462     }
4463 
4464   FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
4465     {
4466       if (cur_edge->flags & EDGE_COMPLEX)
4467 	return FALSE;
4468     }
4469 
4470   /* The THEN block of an IF-THEN combo must have zero or one successors.  */
4471   if (EDGE_COUNT (then_bb->succs) > 0
4472       && (!single_succ_p (then_bb)
4473           || (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
4474 	  || (epilogue_completed
4475 	      && tablejump_p (BB_END (then_bb), NULL, NULL))))
4476     return FALSE;
4477 
4478   /* If the THEN block has no successors, conditional execution can still
4479      make a conditional call.  Don't do this unless the ELSE block has
4480      only one incoming edge -- the CFG manipulation is too ugly otherwise.
4481      Check for the last insn of the THEN block being an indirect jump, which
4482      is listed as not having any successors, but confuses the rest of the CE
4483      code processing.  ??? we should fix this in the future.  */
4484   if (EDGE_COUNT (then_bb->succs) == 0)
4485     {
4486       if (single_pred_p (else_bb) && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4487 	{
4488 	  rtx_insn *last_insn = BB_END (then_bb);
4489 
4490 	  while (last_insn
4491 		 && NOTE_P (last_insn)
4492 		 && last_insn != BB_HEAD (then_bb))
4493 	    last_insn = PREV_INSN (last_insn);
4494 
4495 	  if (last_insn
4496 	      && JUMP_P (last_insn)
4497 	      && ! simplejump_p (last_insn))
4498 	    return FALSE;
4499 
4500 	  join_bb = else_bb;
4501 	  else_bb = NULL_BLOCK;
4502 	}
4503       else
4504 	return FALSE;
4505     }
4506 
4507   /* If the THEN block's successor is the other edge out of the TEST block,
4508      then we have an IF-THEN combo without an ELSE.  */
4509   else if (single_succ (then_bb) == else_bb)
4510     {
4511       join_bb = else_bb;
4512       else_bb = NULL_BLOCK;
4513     }
4514 
4515   /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
4516      has exactly one predecessor and one successor, and the outgoing edge
4517      is not complex, then we have an IF-THEN-ELSE combo.  */
4518   else if (single_succ_p (else_bb)
4519 	   && single_succ (then_bb) == single_succ (else_bb)
4520 	   && single_pred_p (else_bb)
4521 	   && !(single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
4522 	   && !(epilogue_completed
4523 		&& tablejump_p (BB_END (else_bb), NULL, NULL)))
4524     join_bb = single_succ (else_bb);
4525 
4526   /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination.  */
4527   else
4528     return FALSE;
4529 
4530   num_possible_if_blocks++;
4531 
4532   if (dump_file)
4533     {
4534       fprintf (dump_file,
4535 	       "\nIF-THEN%s block found, pass %d, start block %d "
4536 	       "[insn %d], then %d [%d]",
4537 	       (else_bb) ? "-ELSE" : "",
4538 	       ce_info->pass,
4539 	       test_bb->index,
4540 	       BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
4541 	       then_bb->index,
4542 	       BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
4543 
4544       if (else_bb)
4545 	fprintf (dump_file, ", else %d [%d]",
4546 		 else_bb->index,
4547 		 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
4548 
4549       fprintf (dump_file, ", join %d [%d]",
4550 	       join_bb->index,
4551 	       BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
4552 
4553       if (ce_info->num_multiple_test_blocks > 0)
4554 	fprintf (dump_file, ", %d %s block%s last test %d [%d]",
4555 		 ce_info->num_multiple_test_blocks,
4556 		 (ce_info->and_and_p) ? "&&" : "||",
4557 		 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
4558 		 ce_info->last_test_bb->index,
4559 		 ((BB_HEAD (ce_info->last_test_bb))
4560 		  ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
4561 		  : -1));
4562 
4563       fputc ('\n', dump_file);
4564     }
4565 
4566   /* Make sure IF, THEN, and ELSE, blocks are adjacent.  Actually, we get the
4567      first condition for free, since we've already asserted that there's a
4568      fallthru edge from IF to THEN.  Likewise for the && and || blocks, since
4569      we checked the FALLTHRU flag, those are already adjacent to the last IF
4570      block.  */
4571   /* ??? As an enhancement, move the ELSE block.  Have to deal with
4572      BLOCK notes, if by no other means than backing out the merge if they
4573      exist.  Sticky enough I don't want to think about it now.  */
4574   next = then_bb;
4575   if (else_bb && (next = next->next_bb) != else_bb)
4576     return FALSE;
4577   if ((next = next->next_bb) != join_bb
4578       && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4579     {
4580       if (else_bb)
4581 	join_bb = NULL;
4582       else
4583 	return FALSE;
4584     }
4585 
4586   /* Do the real work.  */
4587 
4588   ce_info->else_bb = else_bb;
4589   ce_info->join_bb = join_bb;
4590 
4591   /* If we have && and || tests, try to first handle combining the && and ||
4592      tests into the conditional code, and if that fails, go back and handle
4593      it without the && and ||, which at present handles the && case if there
4594      was no ELSE block.  */
4595   if (cond_exec_process_if_block (ce_info, TRUE))
4596     return TRUE;
4597 
4598   if (ce_info->num_multiple_test_blocks)
4599     {
4600       cancel_changes (0);
4601 
4602       if (cond_exec_process_if_block (ce_info, FALSE))
4603 	return TRUE;
4604     }
4605 
4606   return FALSE;
4607 }
4608 
4609 /* Convert a branch over a trap, or a branch
4610    to a trap, into a conditional trap.  */
4611 
4612 static int
4613 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
4614 {
4615   basic_block then_bb = then_edge->dest;
4616   basic_block else_bb = else_edge->dest;
4617   basic_block other_bb, trap_bb;
4618   rtx_insn *trap, *jump;
4619   rtx cond;
4620   rtx_insn *cond_earliest;
4621 
4622   /* Locate the block with the trap instruction.  */
4623   /* ??? While we look for no successors, we really ought to allow
4624      EH successors.  Need to fix merge_if_block for that to work.  */
4625   if ((trap = block_has_only_trap (then_bb)) != NULL)
4626     trap_bb = then_bb, other_bb = else_bb;
4627   else if ((trap = block_has_only_trap (else_bb)) != NULL)
4628     trap_bb = else_bb, other_bb = then_bb;
4629   else
4630     return FALSE;
4631 
4632   if (dump_file)
4633     {
4634       fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
4635 	       test_bb->index, trap_bb->index);
4636     }
4637 
4638   /* If this is not a standard conditional jump, we can't parse it.  */
4639   jump = BB_END (test_bb);
4640   cond = noce_get_condition (jump, &cond_earliest, then_bb == trap_bb);
4641   if (! cond)
4642     return FALSE;
4643 
4644   /* If the conditional jump is more than just a conditional jump, then
4645      we can not do if-conversion on this block.  Give up for returnjump_p,
4646      changing a conditional return followed by unconditional trap for
4647      conditional trap followed by unconditional return is likely not
4648      beneficial and harder to handle.  */
4649   if (! onlyjump_p (jump) || returnjump_p (jump))
4650     return FALSE;
4651 
4652   /* We must be comparing objects whose modes imply the size.  */
4653   if (GET_MODE (XEXP (cond, 0)) == BLKmode)
4654     return FALSE;
4655 
4656   /* Attempt to generate the conditional trap.  */
4657   rtx_insn *seq = gen_cond_trap (GET_CODE (cond), copy_rtx (XEXP (cond, 0)),
4658 				 copy_rtx (XEXP (cond, 1)),
4659 				 TRAP_CODE (PATTERN (trap)));
4660   if (seq == NULL)
4661     return FALSE;
4662 
4663   /* If that results in an invalid insn, back out.  */
4664   for (rtx_insn *x = seq; x; x = NEXT_INSN (x))
4665     if (recog_memoized (x) < 0)
4666       return FALSE;
4667 
4668   /* Emit the new insns before cond_earliest.  */
4669   emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATION (trap));
4670 
4671   /* Delete the trap block if possible.  */
4672   remove_edge (trap_bb == then_bb ? then_edge : else_edge);
4673   df_set_bb_dirty (test_bb);
4674   df_set_bb_dirty (then_bb);
4675   df_set_bb_dirty (else_bb);
4676 
4677   if (EDGE_COUNT (trap_bb->preds) == 0)
4678     {
4679       delete_basic_block (trap_bb);
4680       num_true_changes++;
4681     }
4682 
4683   /* Wire together the blocks again.  */
4684   if (current_ir_type () == IR_RTL_CFGLAYOUT)
4685     single_succ_edge (test_bb)->flags |= EDGE_FALLTHRU;
4686   else if (trap_bb == then_bb)
4687     {
4688       rtx lab = JUMP_LABEL (jump);
4689       rtx_insn *seq = targetm.gen_jump (lab);
4690       rtx_jump_insn *newjump = emit_jump_insn_after (seq, jump);
4691       LABEL_NUSES (lab) += 1;
4692       JUMP_LABEL (newjump) = lab;
4693       emit_barrier_after (newjump);
4694     }
4695   delete_insn (jump);
4696 
4697   if (can_merge_blocks_p (test_bb, other_bb))
4698     {
4699       merge_blocks (test_bb, other_bb);
4700       num_true_changes++;
4701     }
4702 
4703   num_updated_if_blocks++;
4704   return TRUE;
4705 }
4706 
4707 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
4708    return it.  */
4709 
4710 static rtx_insn *
4711 block_has_only_trap (basic_block bb)
4712 {
4713   rtx_insn *trap;
4714 
4715   /* We're not the exit block.  */
4716   if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4717     return NULL;
4718 
4719   /* The block must have no successors.  */
4720   if (EDGE_COUNT (bb->succs) > 0)
4721     return NULL;
4722 
4723   /* The only instruction in the THEN block must be the trap.  */
4724   trap = first_active_insn (bb);
4725   if (! (trap == BB_END (bb)
4726 	 && GET_CODE (PATTERN (trap)) == TRAP_IF
4727          && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
4728     return NULL;
4729 
4730   return trap;
4731 }
4732 
4733 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
4734    transformable, but not necessarily the other.  There need be no
4735    JOIN block.
4736 
4737    Return TRUE if we were successful at converting the block.
4738 
4739    Cases we'd like to look at:
4740 
4741    (1)
4742 	if (test) goto over; // x not live
4743 	x = a;
4744 	goto label;
4745 	over:
4746 
4747    becomes
4748 
4749 	x = a;
4750 	if (! test) goto label;
4751 
4752    (2)
4753 	if (test) goto E; // x not live
4754 	x = big();
4755 	goto L;
4756 	E:
4757 	x = b;
4758 	goto M;
4759 
4760    becomes
4761 
4762 	x = b;
4763 	if (test) goto M;
4764 	x = big();
4765 	goto L;
4766 
4767    (3) // This one's really only interesting for targets that can do
4768        // multiway branching, e.g. IA-64 BBB bundles.  For other targets
4769        // it results in multiple branches on a cache line, which often
4770        // does not sit well with predictors.
4771 
4772 	if (test1) goto E; // predicted not taken
4773 	x = a;
4774 	if (test2) goto F;
4775 	...
4776 	E:
4777 	x = b;
4778 	J:
4779 
4780    becomes
4781 
4782 	x = a;
4783 	if (test1) goto E;
4784 	if (test2) goto F;
4785 
4786    Notes:
4787 
4788    (A) Don't do (2) if the branch is predicted against the block we're
4789    eliminating.  Do it anyway if we can eliminate a branch; this requires
4790    that the sole successor of the eliminated block postdominate the other
4791    side of the if.
4792 
4793    (B) With CE, on (3) we can steal from both sides of the if, creating
4794 
4795 	if (test1) x = a;
4796 	if (!test1) x = b;
4797 	if (test1) goto J;
4798 	if (test2) goto F;
4799 	...
4800 	J:
4801 
4802    Again, this is most useful if J postdominates.
4803 
4804    (C) CE substitutes for helpful life information.
4805 
4806    (D) These heuristics need a lot of work.  */
4807 
4808 /* Tests for case 1 above.  */
4809 
4810 static int
4811 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
4812 {
4813   basic_block then_bb = then_edge->dest;
4814   basic_block else_bb = else_edge->dest;
4815   basic_block new_bb;
4816   int then_bb_index;
4817   profile_probability then_prob;
4818   rtx else_target = NULL_RTX;
4819 
4820   /* If we are partitioning hot/cold basic blocks, we don't want to
4821      mess up unconditional or indirect jumps that cross between hot
4822      and cold sections.
4823 
4824      Basic block partitioning may result in some jumps that appear to
4825      be optimizable (or blocks that appear to be mergeable), but which really
4826      must be left untouched (they are required to make it safely across
4827      partition boundaries).  See  the comments at the top of
4828      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
4829 
4830   if ((BB_END (then_bb)
4831        && JUMP_P (BB_END (then_bb))
4832        && CROSSING_JUMP_P (BB_END (then_bb)))
4833       || (BB_END (test_bb)
4834 	  && JUMP_P (BB_END (test_bb))
4835 	  && CROSSING_JUMP_P (BB_END (test_bb)))
4836       || (BB_END (else_bb)
4837 	  && JUMP_P (BB_END (else_bb))
4838 	  && CROSSING_JUMP_P (BB_END (else_bb))))
4839     return FALSE;
4840 
4841   /* THEN has one successor.  */
4842   if (!single_succ_p (then_bb))
4843     return FALSE;
4844 
4845   /* THEN does not fall through, but is not strange either.  */
4846   if (single_succ_edge (then_bb)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
4847     return FALSE;
4848 
4849   /* THEN has one predecessor.  */
4850   if (!single_pred_p (then_bb))
4851     return FALSE;
4852 
4853   /* THEN must do something.  */
4854   if (forwarder_block_p (then_bb))
4855     return FALSE;
4856 
4857   num_possible_if_blocks++;
4858   if (dump_file)
4859     fprintf (dump_file,
4860 	     "\nIF-CASE-1 found, start %d, then %d\n",
4861 	     test_bb->index, then_bb->index);
4862 
4863   then_prob = then_edge->probability.invert ();
4864 
4865   /* We're speculating from the THEN path, we want to make sure the cost
4866      of speculation is within reason.  */
4867   if (! cheap_bb_rtx_cost_p (then_bb, then_prob,
4868 	COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (then_edge->src),
4869 				    predictable_edge_p (then_edge)))))
4870     return FALSE;
4871 
4872   if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4873     {
4874       rtx_insn *jump = BB_END (else_edge->src);
4875       gcc_assert (JUMP_P (jump));
4876       else_target = JUMP_LABEL (jump);
4877     }
4878 
4879   /* Registers set are dead, or are predicable.  */
4880   if (! dead_or_predicable (test_bb, then_bb, else_bb,
4881 			    single_succ_edge (then_bb), 1))
4882     return FALSE;
4883 
4884   /* Conversion went ok, including moving the insns and fixing up the
4885      jump.  Adjust the CFG to match.  */
4886 
4887   /* We can avoid creating a new basic block if then_bb is immediately
4888      followed by else_bb, i.e. deleting then_bb allows test_bb to fall
4889      through to else_bb.  */
4890 
4891   if (then_bb->next_bb == else_bb
4892       && then_bb->prev_bb == test_bb
4893       && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4894     {
4895       redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
4896       new_bb = 0;
4897     }
4898   else if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4899     new_bb = force_nonfallthru_and_redirect (FALLTHRU_EDGE (test_bb),
4900 					     else_bb, else_target);
4901   else
4902     new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
4903 					     else_bb);
4904 
4905   df_set_bb_dirty (test_bb);
4906   df_set_bb_dirty (else_bb);
4907 
4908   then_bb_index = then_bb->index;
4909   delete_basic_block (then_bb);
4910 
4911   /* Make rest of code believe that the newly created block is the THEN_BB
4912      block we removed.  */
4913   if (new_bb)
4914     {
4915       df_bb_replace (then_bb_index, new_bb);
4916       /* This should have been done above via force_nonfallthru_and_redirect
4917          (possibly called from redirect_edge_and_branch_force).  */
4918       gcc_checking_assert (BB_PARTITION (new_bb) == BB_PARTITION (test_bb));
4919     }
4920 
4921   num_true_changes++;
4922   num_updated_if_blocks++;
4923   return TRUE;
4924 }
4925 
4926 /* Test for case 2 above.  */
4927 
4928 static int
4929 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
4930 {
4931   basic_block then_bb = then_edge->dest;
4932   basic_block else_bb = else_edge->dest;
4933   edge else_succ;
4934   profile_probability then_prob, else_prob;
4935 
4936   /* We do not want to speculate (empty) loop latches.  */
4937   if (current_loops
4938       && else_bb->loop_father->latch == else_bb)
4939     return FALSE;
4940 
4941   /* If we are partitioning hot/cold basic blocks, we don't want to
4942      mess up unconditional or indirect jumps that cross between hot
4943      and cold sections.
4944 
4945      Basic block partitioning may result in some jumps that appear to
4946      be optimizable (or blocks that appear to be mergeable), but which really
4947      must be left untouched (they are required to make it safely across
4948      partition boundaries).  See  the comments at the top of
4949      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
4950 
4951   if ((BB_END (then_bb)
4952        && JUMP_P (BB_END (then_bb))
4953        && CROSSING_JUMP_P (BB_END (then_bb)))
4954       || (BB_END (test_bb)
4955 	  && JUMP_P (BB_END (test_bb))
4956 	  && CROSSING_JUMP_P (BB_END (test_bb)))
4957       || (BB_END (else_bb)
4958 	  && JUMP_P (BB_END (else_bb))
4959 	  && CROSSING_JUMP_P (BB_END (else_bb))))
4960     return FALSE;
4961 
4962   /* ELSE has one successor.  */
4963   if (!single_succ_p (else_bb))
4964     return FALSE;
4965   else
4966     else_succ = single_succ_edge (else_bb);
4967 
4968   /* ELSE outgoing edge is not complex.  */
4969   if (else_succ->flags & EDGE_COMPLEX)
4970     return FALSE;
4971 
4972   /* ELSE has one predecessor.  */
4973   if (!single_pred_p (else_bb))
4974     return FALSE;
4975 
4976   /* THEN is not EXIT.  */
4977   if (then_bb->index < NUM_FIXED_BLOCKS)
4978     return FALSE;
4979 
4980   else_prob = else_edge->probability;
4981   then_prob = else_prob.invert ();
4982 
4983   /* ELSE is predicted or SUCC(ELSE) postdominates THEN.  */
4984   if (else_prob > then_prob)
4985     ;
4986   else if (else_succ->dest->index < NUM_FIXED_BLOCKS
4987 	   || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
4988 			      else_succ->dest))
4989     ;
4990   else
4991     return FALSE;
4992 
4993   num_possible_if_blocks++;
4994   if (dump_file)
4995     fprintf (dump_file,
4996 	     "\nIF-CASE-2 found, start %d, else %d\n",
4997 	     test_bb->index, else_bb->index);
4998 
4999   /* We're speculating from the ELSE path, we want to make sure the cost
5000      of speculation is within reason.  */
5001   if (! cheap_bb_rtx_cost_p (else_bb, else_prob,
5002 	COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (else_edge->src),
5003 				    predictable_edge_p (else_edge)))))
5004     return FALSE;
5005 
5006   /* Registers set are dead, or are predicable.  */
5007   if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ, 0))
5008     return FALSE;
5009 
5010   /* Conversion went ok, including moving the insns and fixing up the
5011      jump.  Adjust the CFG to match.  */
5012 
5013   df_set_bb_dirty (test_bb);
5014   df_set_bb_dirty (then_bb);
5015   delete_basic_block (else_bb);
5016 
5017   num_true_changes++;
5018   num_updated_if_blocks++;
5019 
5020   /* ??? We may now fallthru from one of THEN's successors into a join
5021      block.  Rerun cleanup_cfg?  Examine things manually?  Wait?  */
5022 
5023   return TRUE;
5024 }
5025 
5026 /* Used by the code above to perform the actual rtl transformations.
5027    Return TRUE if successful.
5028 
5029    TEST_BB is the block containing the conditional branch.  MERGE_BB
5030    is the block containing the code to manipulate.  DEST_EDGE is an
5031    edge representing a jump to the join block; after the conversion,
5032    TEST_BB should be branching to its destination.
5033    REVERSEP is true if the sense of the branch should be reversed.  */
5034 
5035 static int
5036 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
5037 		    basic_block other_bb, edge dest_edge, int reversep)
5038 {
5039   basic_block new_dest = dest_edge->dest;
5040   rtx_insn *head, *end, *jump;
5041   rtx_insn *earliest = NULL;
5042   rtx old_dest;
5043   bitmap merge_set = NULL;
5044   /* Number of pending changes.  */
5045   int n_validated_changes = 0;
5046   rtx new_dest_label = NULL_RTX;
5047 
5048   jump = BB_END (test_bb);
5049 
5050   /* Find the extent of the real code in the merge block.  */
5051   head = BB_HEAD (merge_bb);
5052   end = BB_END (merge_bb);
5053 
5054   while (DEBUG_INSN_P (end) && end != head)
5055     end = PREV_INSN (end);
5056 
5057   /* If merge_bb ends with a tablejump, predicating/moving insn's
5058      into test_bb and then deleting merge_bb will result in the jumptable
5059      that follows merge_bb being removed along with merge_bb and then we
5060      get an unresolved reference to the jumptable.  */
5061   if (tablejump_p (end, NULL, NULL))
5062     return FALSE;
5063 
5064   if (LABEL_P (head))
5065     head = NEXT_INSN (head);
5066   while (DEBUG_INSN_P (head) && head != end)
5067     head = NEXT_INSN (head);
5068   if (NOTE_P (head))
5069     {
5070       if (head == end)
5071 	{
5072 	  head = end = NULL;
5073 	  goto no_body;
5074 	}
5075       head = NEXT_INSN (head);
5076       while (DEBUG_INSN_P (head) && head != end)
5077 	head = NEXT_INSN (head);
5078     }
5079 
5080   if (JUMP_P (end))
5081     {
5082       if (!onlyjump_p (end))
5083 	return FALSE;
5084       if (head == end)
5085 	{
5086 	  head = end = NULL;
5087 	  goto no_body;
5088 	}
5089       end = PREV_INSN (end);
5090       while (DEBUG_INSN_P (end) && end != head)
5091 	end = PREV_INSN (end);
5092     }
5093 
5094   /* Don't move frame-related insn across the conditional branch.  This
5095      can lead to one of the paths of the branch having wrong unwind info.  */
5096   if (epilogue_completed)
5097     {
5098       rtx_insn *insn = head;
5099       while (1)
5100 	{
5101 	  if (INSN_P (insn) && RTX_FRAME_RELATED_P (insn))
5102 	    return FALSE;
5103 	  if (insn == end)
5104 	    break;
5105 	  insn = NEXT_INSN (insn);
5106 	}
5107     }
5108 
5109   /* Disable handling dead code by conditional execution if the machine needs
5110      to do anything funny with the tests, etc.  */
5111 #ifndef IFCVT_MODIFY_TESTS
5112   if (targetm.have_conditional_execution ())
5113     {
5114       /* In the conditional execution case, we have things easy.  We know
5115 	 the condition is reversible.  We don't have to check life info
5116 	 because we're going to conditionally execute the code anyway.
5117 	 All that's left is making sure the insns involved can actually
5118 	 be predicated.  */
5119 
5120       rtx cond;
5121 
5122       cond = cond_exec_get_condition (jump);
5123       if (! cond)
5124 	return FALSE;
5125 
5126       rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
5127       profile_probability prob_val
5128 	  = (note ? profile_probability::from_reg_br_prob_note (XINT (note, 0))
5129 	     : profile_probability::uninitialized ());
5130 
5131       if (reversep)
5132 	{
5133 	  enum rtx_code rev = reversed_comparison_code (cond, jump);
5134 	  if (rev == UNKNOWN)
5135 	    return FALSE;
5136 	  cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
5137 			         XEXP (cond, 1));
5138 	  prob_val = prob_val.invert ();
5139 	}
5140 
5141       if (cond_exec_process_insns (NULL, head, end, cond, prob_val, 0)
5142 	  && verify_changes (0))
5143 	n_validated_changes = num_validated_changes ();
5144       else
5145 	cancel_changes (0);
5146 
5147       earliest = jump;
5148     }
5149 #endif
5150 
5151   /* If we allocated new pseudos (e.g. in the conditional move
5152      expander called from noce_emit_cmove), we must resize the
5153      array first.  */
5154   if (max_regno < max_reg_num ())
5155     max_regno = max_reg_num ();
5156 
5157   /* Try the NCE path if the CE path did not result in any changes.  */
5158   if (n_validated_changes == 0)
5159     {
5160       rtx cond;
5161       rtx_insn *insn;
5162       regset live;
5163       bool success;
5164 
5165       /* In the non-conditional execution case, we have to verify that there
5166 	 are no trapping operations, no calls, no references to memory, and
5167 	 that any registers modified are dead at the branch site.  */
5168 
5169       if (!any_condjump_p (jump))
5170 	return FALSE;
5171 
5172       /* Find the extent of the conditional.  */
5173       cond = noce_get_condition (jump, &earliest, false);
5174       if (!cond)
5175 	return FALSE;
5176 
5177       live = BITMAP_ALLOC (&reg_obstack);
5178       simulate_backwards_to_point (merge_bb, live, end);
5179       success = can_move_insns_across (head, end, earliest, jump,
5180 				       merge_bb, live,
5181 				       df_get_live_in (other_bb), NULL);
5182       BITMAP_FREE (live);
5183       if (!success)
5184 	return FALSE;
5185 
5186       /* Collect the set of registers set in MERGE_BB.  */
5187       merge_set = BITMAP_ALLOC (&reg_obstack);
5188 
5189       FOR_BB_INSNS (merge_bb, insn)
5190 	if (NONDEBUG_INSN_P (insn))
5191 	  df_simulate_find_defs (insn, merge_set);
5192 
5193       /* If shrink-wrapping, disable this optimization when test_bb is
5194 	 the first basic block and merge_bb exits.  The idea is to not
5195 	 move code setting up a return register as that may clobber a
5196 	 register used to pass function parameters, which then must be
5197 	 saved in caller-saved regs.  A caller-saved reg requires the
5198 	 prologue, killing a shrink-wrap opportunity.  */
5199       if ((SHRINK_WRAPPING_ENABLED && !epilogue_completed)
5200 	  && ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == test_bb
5201 	  && single_succ_p (new_dest)
5202 	  && single_succ (new_dest) == EXIT_BLOCK_PTR_FOR_FN (cfun)
5203 	  && bitmap_intersect_p (df_get_live_in (new_dest), merge_set))
5204 	{
5205 	  regset return_regs;
5206 	  unsigned int i;
5207 
5208 	  return_regs = BITMAP_ALLOC (&reg_obstack);
5209 
5210 	  /* Start off with the intersection of regs used to pass
5211 	     params and regs used to return values.  */
5212 	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5213 	    if (FUNCTION_ARG_REGNO_P (i)
5214 		&& targetm.calls.function_value_regno_p (i))
5215 	      bitmap_set_bit (return_regs, INCOMING_REGNO (i));
5216 
5217 	  bitmap_and_into (return_regs,
5218 			   df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5219 	  bitmap_and_into (return_regs,
5220 			   df_get_live_in (EXIT_BLOCK_PTR_FOR_FN (cfun)));
5221 	  if (!bitmap_empty_p (return_regs))
5222 	    {
5223 	      FOR_BB_INSNS_REVERSE (new_dest, insn)
5224 		if (NONDEBUG_INSN_P (insn))
5225 		  {
5226 		    df_ref def;
5227 
5228 		    /* If this insn sets any reg in return_regs, add all
5229 		       reg uses to the set of regs we're interested in.  */
5230 		    FOR_EACH_INSN_DEF (def, insn)
5231 		      if (bitmap_bit_p (return_regs, DF_REF_REGNO (def)))
5232 			{
5233 			  df_simulate_uses (insn, return_regs);
5234 			  break;
5235 			}
5236 		  }
5237 	      if (bitmap_intersect_p (merge_set, return_regs))
5238 		{
5239 		  BITMAP_FREE (return_regs);
5240 		  BITMAP_FREE (merge_set);
5241 		  return FALSE;
5242 		}
5243 	    }
5244 	  BITMAP_FREE (return_regs);
5245 	}
5246     }
5247 
5248  no_body:
5249   /* We don't want to use normal invert_jump or redirect_jump because
5250      we don't want to delete_insn called.  Also, we want to do our own
5251      change group management.  */
5252 
5253   old_dest = JUMP_LABEL (jump);
5254   if (other_bb != new_dest)
5255     {
5256       if (!any_condjump_p (jump))
5257 	goto cancel;
5258 
5259       if (JUMP_P (BB_END (dest_edge->src)))
5260 	new_dest_label = JUMP_LABEL (BB_END (dest_edge->src));
5261       else if (new_dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
5262 	new_dest_label = ret_rtx;
5263       else
5264 	new_dest_label = block_label (new_dest);
5265 
5266       rtx_jump_insn *jump_insn = as_a <rtx_jump_insn *> (jump);
5267       if (reversep
5268 	  ? ! invert_jump_1 (jump_insn, new_dest_label)
5269 	  : ! redirect_jump_1 (jump_insn, new_dest_label))
5270 	goto cancel;
5271     }
5272 
5273   if (verify_changes (n_validated_changes))
5274     confirm_change_group ();
5275   else
5276     goto cancel;
5277 
5278   if (other_bb != new_dest)
5279     {
5280       redirect_jump_2 (as_a <rtx_jump_insn *> (jump), old_dest, new_dest_label,
5281 		       0, reversep);
5282 
5283       redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
5284       if (reversep)
5285 	{
5286 	  std::swap (BRANCH_EDGE (test_bb)->probability,
5287 		     FALLTHRU_EDGE (test_bb)->probability);
5288 	  update_br_prob_note (test_bb);
5289 	}
5290     }
5291 
5292   /* Move the insns out of MERGE_BB to before the branch.  */
5293   if (head != NULL)
5294     {
5295       rtx_insn *insn;
5296 
5297       if (end == BB_END (merge_bb))
5298 	BB_END (merge_bb) = PREV_INSN (head);
5299 
5300       /* PR 21767: when moving insns above a conditional branch, the REG_EQUAL
5301 	 notes being moved might become invalid.  */
5302       insn = head;
5303       do
5304 	{
5305 	  rtx note;
5306 
5307 	  if (! INSN_P (insn))
5308 	    continue;
5309 	  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5310 	  if (! note)
5311 	    continue;
5312 	  remove_note (insn, note);
5313 	} while (insn != end && (insn = NEXT_INSN (insn)));
5314 
5315       /* PR46315: when moving insns above a conditional branch, the REG_EQUAL
5316 	 notes referring to the registers being set might become invalid.  */
5317       if (merge_set)
5318 	{
5319 	  unsigned i;
5320 	  bitmap_iterator bi;
5321 
5322 	  EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
5323 	    remove_reg_equal_equiv_notes_for_regno (i);
5324 
5325 	  BITMAP_FREE (merge_set);
5326 	}
5327 
5328       reorder_insns (head, end, PREV_INSN (earliest));
5329     }
5330 
5331   /* Remove the jump and edge if we can.  */
5332   if (other_bb == new_dest)
5333     {
5334       delete_insn (jump);
5335       remove_edge (BRANCH_EDGE (test_bb));
5336       /* ??? Can't merge blocks here, as then_bb is still in use.
5337 	 At minimum, the merge will get done just before bb-reorder.  */
5338     }
5339 
5340   return TRUE;
5341 
5342  cancel:
5343   cancel_changes (0);
5344 
5345   if (merge_set)
5346     BITMAP_FREE (merge_set);
5347 
5348   return FALSE;
5349 }
5350 
5351 /* Main entry point for all if-conversion.  AFTER_COMBINE is true if
5352    we are after combine pass.  */
5353 
5354 static void
5355 if_convert (bool after_combine)
5356 {
5357   basic_block bb;
5358   int pass;
5359 
5360   if (optimize == 1)
5361     {
5362       df_live_add_problem ();
5363       df_live_set_all_dirty ();
5364     }
5365 
5366   /* Record whether we are after combine pass.  */
5367   ifcvt_after_combine = after_combine;
5368   have_cbranchcc4 = (direct_optab_handler (cbranch_optab, CCmode)
5369 		     != CODE_FOR_nothing);
5370   num_possible_if_blocks = 0;
5371   num_updated_if_blocks = 0;
5372   num_true_changes = 0;
5373 
5374   loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5375   mark_loop_exit_edges ();
5376   loop_optimizer_finalize ();
5377   free_dominance_info (CDI_DOMINATORS);
5378 
5379   /* Compute postdominators.  */
5380   calculate_dominance_info (CDI_POST_DOMINATORS);
5381 
5382   df_set_flags (DF_LR_RUN_DCE);
5383 
5384   /* Go through each of the basic blocks looking for things to convert.  If we
5385      have conditional execution, we make multiple passes to allow us to handle
5386      IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks.  */
5387   pass = 0;
5388   do
5389     {
5390       df_analyze ();
5391       /* Only need to do dce on the first pass.  */
5392       df_clear_flags (DF_LR_RUN_DCE);
5393       cond_exec_changed_p = FALSE;
5394       pass++;
5395 
5396 #ifdef IFCVT_MULTIPLE_DUMPS
5397       if (dump_file && pass > 1)
5398 	fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
5399 #endif
5400 
5401       FOR_EACH_BB_FN (bb, cfun)
5402 	{
5403           basic_block new_bb;
5404           while (!df_get_bb_dirty (bb)
5405                  && (new_bb = find_if_header (bb, pass)) != NULL)
5406             bb = new_bb;
5407 	}
5408 
5409 #ifdef IFCVT_MULTIPLE_DUMPS
5410       if (dump_file && cond_exec_changed_p)
5411 	print_rtl_with_bb (dump_file, get_insns (), dump_flags);
5412 #endif
5413     }
5414   while (cond_exec_changed_p);
5415 
5416 #ifdef IFCVT_MULTIPLE_DUMPS
5417   if (dump_file)
5418     fprintf (dump_file, "\n\n========== no more changes\n");
5419 #endif
5420 
5421   free_dominance_info (CDI_POST_DOMINATORS);
5422 
5423   if (dump_file)
5424     fflush (dump_file);
5425 
5426   clear_aux_for_blocks ();
5427 
5428   /* If we allocated new pseudos, we must resize the array for sched1.  */
5429   if (max_regno < max_reg_num ())
5430     max_regno = max_reg_num ();
5431 
5432   /* Write the final stats.  */
5433   if (dump_file && num_possible_if_blocks > 0)
5434     {
5435       fprintf (dump_file,
5436 	       "\n%d possible IF blocks searched.\n",
5437 	       num_possible_if_blocks);
5438       fprintf (dump_file,
5439 	       "%d IF blocks converted.\n",
5440 	       num_updated_if_blocks);
5441       fprintf (dump_file,
5442 	       "%d true changes made.\n\n\n",
5443 	       num_true_changes);
5444     }
5445 
5446   if (optimize == 1)
5447     df_remove_problem (df_live);
5448 
5449   /* Some non-cold blocks may now be only reachable from cold blocks.
5450      Fix that up.  */
5451   fixup_partitions ();
5452 
5453   checking_verify_flow_info ();
5454 }
5455 
5456 /* If-conversion and CFG cleanup.  */
5457 static unsigned int
5458 rest_of_handle_if_conversion (void)
5459 {
5460   if (flag_if_conversion)
5461     {
5462       if (dump_file)
5463 	{
5464 	  dump_reg_info (dump_file);
5465 	  dump_flow_info (dump_file, dump_flags);
5466 	}
5467       cleanup_cfg (CLEANUP_EXPENSIVE);
5468       if_convert (false);
5469     }
5470 
5471   cleanup_cfg (0);
5472   return 0;
5473 }
5474 
5475 namespace {
5476 
5477 const pass_data pass_data_rtl_ifcvt =
5478 {
5479   RTL_PASS, /* type */
5480   "ce1", /* name */
5481   OPTGROUP_NONE, /* optinfo_flags */
5482   TV_IFCVT, /* tv_id */
5483   0, /* properties_required */
5484   0, /* properties_provided */
5485   0, /* properties_destroyed */
5486   0, /* todo_flags_start */
5487   TODO_df_finish, /* todo_flags_finish */
5488 };
5489 
5490 class pass_rtl_ifcvt : public rtl_opt_pass
5491 {
5492 public:
5493   pass_rtl_ifcvt (gcc::context *ctxt)
5494     : rtl_opt_pass (pass_data_rtl_ifcvt, ctxt)
5495   {}
5496 
5497   /* opt_pass methods: */
5498   virtual bool gate (function *)
5499     {
5500       return (optimize > 0) && dbg_cnt (if_conversion);
5501     }
5502 
5503   virtual unsigned int execute (function *)
5504     {
5505       return rest_of_handle_if_conversion ();
5506     }
5507 
5508 }; // class pass_rtl_ifcvt
5509 
5510 } // anon namespace
5511 
5512 rtl_opt_pass *
5513 make_pass_rtl_ifcvt (gcc::context *ctxt)
5514 {
5515   return new pass_rtl_ifcvt (ctxt);
5516 }
5517 
5518 
5519 /* Rerun if-conversion, as combine may have simplified things enough
5520    to now meet sequence length restrictions.  */
5521 
5522 namespace {
5523 
5524 const pass_data pass_data_if_after_combine =
5525 {
5526   RTL_PASS, /* type */
5527   "ce2", /* name */
5528   OPTGROUP_NONE, /* optinfo_flags */
5529   TV_IFCVT, /* tv_id */
5530   0, /* properties_required */
5531   0, /* properties_provided */
5532   0, /* properties_destroyed */
5533   0, /* todo_flags_start */
5534   TODO_df_finish, /* todo_flags_finish */
5535 };
5536 
5537 class pass_if_after_combine : public rtl_opt_pass
5538 {
5539 public:
5540   pass_if_after_combine (gcc::context *ctxt)
5541     : rtl_opt_pass (pass_data_if_after_combine, ctxt)
5542   {}
5543 
5544   /* opt_pass methods: */
5545   virtual bool gate (function *)
5546     {
5547       return optimize > 0 && flag_if_conversion
5548 	&& dbg_cnt (if_after_combine);
5549     }
5550 
5551   virtual unsigned int execute (function *)
5552     {
5553       if_convert (true);
5554       return 0;
5555     }
5556 
5557 }; // class pass_if_after_combine
5558 
5559 } // anon namespace
5560 
5561 rtl_opt_pass *
5562 make_pass_if_after_combine (gcc::context *ctxt)
5563 {
5564   return new pass_if_after_combine (ctxt);
5565 }
5566 
5567 
5568 namespace {
5569 
5570 const pass_data pass_data_if_after_reload =
5571 {
5572   RTL_PASS, /* type */
5573   "ce3", /* name */
5574   OPTGROUP_NONE, /* optinfo_flags */
5575   TV_IFCVT2, /* tv_id */
5576   0, /* properties_required */
5577   0, /* properties_provided */
5578   0, /* properties_destroyed */
5579   0, /* todo_flags_start */
5580   TODO_df_finish, /* todo_flags_finish */
5581 };
5582 
5583 class pass_if_after_reload : public rtl_opt_pass
5584 {
5585 public:
5586   pass_if_after_reload (gcc::context *ctxt)
5587     : rtl_opt_pass (pass_data_if_after_reload, ctxt)
5588   {}
5589 
5590   /* opt_pass methods: */
5591   virtual bool gate (function *)
5592     {
5593       return optimize > 0 && flag_if_conversion2
5594 	&& dbg_cnt (if_after_reload);
5595     }
5596 
5597   virtual unsigned int execute (function *)
5598     {
5599       if_convert (true);
5600       return 0;
5601     }
5602 
5603 }; // class pass_if_after_reload
5604 
5605 } // anon namespace
5606 
5607 rtl_opt_pass *
5608 make_pass_if_after_reload (gcc::context *ctxt)
5609 {
5610   return new pass_if_after_reload (ctxt);
5611 }
5612