1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2019 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "tree-pretty-print.h"
34 /* rtl is needed only because arm back-end requires it for
35 BRANCH_COST. */
36 #include "fold-const.h"
37 #include "cfganal.h"
38 #include "gimple-fold.h"
39 #include "gimple-iterator.h"
40 #include "gimplify-me.h"
41 #include "tree-cfg.h"
42 #include "tree-ssa.h"
43 #include "params.h"
44
45 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
46 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
47 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
48 false) >= 2)
49 #endif
50
51 /* This pass combines COND_EXPRs to simplify control flow. It
52 currently recognizes bit tests and comparisons in chains that
53 represent logical and or logical or of two COND_EXPRs.
54
55 It does so by walking basic blocks in a approximate reverse
56 post-dominator order and trying to match CFG patterns that
57 represent logical and or logical or of two COND_EXPRs.
58 Transformations are done if the COND_EXPR conditions match
59 either
60
61 1. two single bit tests X & (1 << Yn) (for logical and)
62
63 2. two bit tests X & Yn (for logical or)
64
65 3. two comparisons X OPn Y (for logical or)
66
67 To simplify this pass, removing basic blocks and dead code
68 is left to CFG cleanup and DCE. */
69
70
71 /* Recognize a if-then-else CFG pattern starting to match with the
72 COND_BB basic-block containing the COND_EXPR. The recognized
73 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
74 *THEN_BB and/or *ELSE_BB are already set, they are required to
75 match the then and else basic-blocks to make the pattern match.
76 Returns true if the pattern matched, false otherwise. */
77
78 static bool
recognize_if_then_else(basic_block cond_bb,basic_block * then_bb,basic_block * else_bb)79 recognize_if_then_else (basic_block cond_bb,
80 basic_block *then_bb, basic_block *else_bb)
81 {
82 edge t, e;
83
84 if (EDGE_COUNT (cond_bb->succs) != 2)
85 return false;
86
87 /* Find the then/else edges. */
88 t = EDGE_SUCC (cond_bb, 0);
89 e = EDGE_SUCC (cond_bb, 1);
90 if (!(t->flags & EDGE_TRUE_VALUE))
91 std::swap (t, e);
92 if (!(t->flags & EDGE_TRUE_VALUE)
93 || !(e->flags & EDGE_FALSE_VALUE))
94 return false;
95
96 /* Check if the edge destinations point to the required block. */
97 if (*then_bb
98 && t->dest != *then_bb)
99 return false;
100 if (*else_bb
101 && e->dest != *else_bb)
102 return false;
103
104 if (!*then_bb)
105 *then_bb = t->dest;
106 if (!*else_bb)
107 *else_bb = e->dest;
108
109 return true;
110 }
111
112 /* Verify if the basic block BB does not have side-effects. Return
113 true in this case, else false. */
114
115 static bool
bb_no_side_effects_p(basic_block bb)116 bb_no_side_effects_p (basic_block bb)
117 {
118 gimple_stmt_iterator gsi;
119
120 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
121 {
122 gimple *stmt = gsi_stmt (gsi);
123
124 if (is_gimple_debug (stmt))
125 continue;
126
127 if (gimple_has_side_effects (stmt)
128 || gimple_uses_undefined_value_p (stmt)
129 || gimple_could_trap_p (stmt)
130 || gimple_vuse (stmt)
131 /* const calls don't match any of the above, yet they could
132 still have some side-effects - they could contain
133 gimple_could_trap_p statements, like floating point
134 exceptions or integer division by zero. See PR70586.
135 FIXME: perhaps gimple_has_side_effects or gimple_could_trap_p
136 should handle this. */
137 || is_gimple_call (stmt))
138 return false;
139 }
140
141 return true;
142 }
143
144 /* Return true if BB is an empty forwarder block to TO_BB. */
145
146 static bool
forwarder_block_to(basic_block bb,basic_block to_bb)147 forwarder_block_to (basic_block bb, basic_block to_bb)
148 {
149 return empty_block_p (bb)
150 && single_succ_p (bb)
151 && single_succ (bb) == to_bb;
152 }
153
154 /* Verify if all PHI node arguments in DEST for edges from BB1 or
155 BB2 to DEST are the same. This makes the CFG merge point
156 free from side-effects. Return true in this case, else false. */
157
158 static bool
same_phi_args_p(basic_block bb1,basic_block bb2,basic_block dest)159 same_phi_args_p (basic_block bb1, basic_block bb2, basic_block dest)
160 {
161 edge e1 = find_edge (bb1, dest);
162 edge e2 = find_edge (bb2, dest);
163 gphi_iterator gsi;
164 gphi *phi;
165
166 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
167 {
168 phi = gsi.phi ();
169 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, e1),
170 PHI_ARG_DEF_FROM_EDGE (phi, e2), 0))
171 return false;
172 }
173
174 return true;
175 }
176
177 /* Return the best representative SSA name for CANDIDATE which is used
178 in a bit test. */
179
180 static tree
get_name_for_bit_test(tree candidate)181 get_name_for_bit_test (tree candidate)
182 {
183 /* Skip single-use names in favor of using the name from a
184 non-widening conversion definition. */
185 if (TREE_CODE (candidate) == SSA_NAME
186 && has_single_use (candidate))
187 {
188 gimple *def_stmt = SSA_NAME_DEF_STMT (candidate);
189 if (is_gimple_assign (def_stmt)
190 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
191 {
192 if (TYPE_PRECISION (TREE_TYPE (candidate))
193 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
194 return gimple_assign_rhs1 (def_stmt);
195 }
196 }
197
198 return candidate;
199 }
200
201 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
202 statements. Store the name being tested in *NAME and the bit
203 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
204 Returns true if the pattern matched, false otherwise. */
205
206 static bool
recognize_single_bit_test(gcond * cond,tree * name,tree * bit,bool inv)207 recognize_single_bit_test (gcond *cond, tree *name, tree *bit, bool inv)
208 {
209 gimple *stmt;
210
211 /* Get at the definition of the result of the bit test. */
212 if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
213 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
214 || !integer_zerop (gimple_cond_rhs (cond)))
215 return false;
216 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
217 if (!is_gimple_assign (stmt))
218 return false;
219
220 /* Look at which bit is tested. One form to recognize is
221 D.1985_5 = state_3(D) >> control1_4(D);
222 D.1986_6 = (int) D.1985_5;
223 D.1987_7 = op0 & 1;
224 if (D.1987_7 != 0) */
225 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
226 && integer_onep (gimple_assign_rhs2 (stmt))
227 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
228 {
229 tree orig_name = gimple_assign_rhs1 (stmt);
230
231 /* Look through copies and conversions to eventually
232 find the stmt that computes the shift. */
233 stmt = SSA_NAME_DEF_STMT (orig_name);
234
235 while (is_gimple_assign (stmt)
236 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
237 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt)))
238 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt))))
239 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
240 || gimple_assign_ssa_name_copy_p (stmt)))
241 stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
242
243 /* If we found such, decompose it. */
244 if (is_gimple_assign (stmt)
245 && gimple_assign_rhs_code (stmt) == RSHIFT_EXPR)
246 {
247 /* op0 & (1 << op1) */
248 *bit = gimple_assign_rhs2 (stmt);
249 *name = gimple_assign_rhs1 (stmt);
250 }
251 else
252 {
253 /* t & 1 */
254 *bit = integer_zero_node;
255 *name = get_name_for_bit_test (orig_name);
256 }
257
258 return true;
259 }
260
261 /* Another form is
262 D.1987_7 = op0 & (1 << CST)
263 if (D.1987_7 != 0) */
264 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
265 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
266 && integer_pow2p (gimple_assign_rhs2 (stmt)))
267 {
268 *name = gimple_assign_rhs1 (stmt);
269 *bit = build_int_cst (integer_type_node,
270 tree_log2 (gimple_assign_rhs2 (stmt)));
271 return true;
272 }
273
274 /* Another form is
275 D.1986_6 = 1 << control1_4(D)
276 D.1987_7 = op0 & D.1986_6
277 if (D.1987_7 != 0) */
278 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
279 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
280 && TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
281 {
282 gimple *tmp;
283
284 /* Both arguments of the BIT_AND_EXPR can be the single-bit
285 specifying expression. */
286 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
287 if (is_gimple_assign (tmp)
288 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
289 && integer_onep (gimple_assign_rhs1 (tmp)))
290 {
291 *name = gimple_assign_rhs2 (stmt);
292 *bit = gimple_assign_rhs2 (tmp);
293 return true;
294 }
295
296 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
297 if (is_gimple_assign (tmp)
298 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
299 && integer_onep (gimple_assign_rhs1 (tmp)))
300 {
301 *name = gimple_assign_rhs1 (stmt);
302 *bit = gimple_assign_rhs2 (tmp);
303 return true;
304 }
305 }
306
307 return false;
308 }
309
310 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
311 statements. Store the name being tested in *NAME and the bits
312 in *BITS. The COND_EXPR computes *NAME & *BITS.
313 Returns true if the pattern matched, false otherwise. */
314
315 static bool
recognize_bits_test(gcond * cond,tree * name,tree * bits,bool inv)316 recognize_bits_test (gcond *cond, tree *name, tree *bits, bool inv)
317 {
318 gimple *stmt;
319
320 /* Get at the definition of the result of the bit test. */
321 if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
322 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
323 || !integer_zerop (gimple_cond_rhs (cond)))
324 return false;
325 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
326 if (!is_gimple_assign (stmt)
327 || gimple_assign_rhs_code (stmt) != BIT_AND_EXPR)
328 return false;
329
330 *name = get_name_for_bit_test (gimple_assign_rhs1 (stmt));
331 *bits = gimple_assign_rhs2 (stmt);
332
333 return true;
334 }
335
336
337 /* Update profile after code in outer_cond_bb was adjusted so
338 outer_cond_bb has no condition. */
339
340 static void
update_profile_after_ifcombine(basic_block inner_cond_bb,basic_block outer_cond_bb)341 update_profile_after_ifcombine (basic_block inner_cond_bb,
342 basic_block outer_cond_bb)
343 {
344 edge outer_to_inner = find_edge (outer_cond_bb, inner_cond_bb);
345 edge outer2 = (EDGE_SUCC (outer_cond_bb, 0) == outer_to_inner
346 ? EDGE_SUCC (outer_cond_bb, 1)
347 : EDGE_SUCC (outer_cond_bb, 0));
348 edge inner_taken = EDGE_SUCC (inner_cond_bb, 0);
349 edge inner_not_taken = EDGE_SUCC (inner_cond_bb, 1);
350
351 if (inner_taken->dest != outer2->dest)
352 std::swap (inner_taken, inner_not_taken);
353 gcc_assert (inner_taken->dest == outer2->dest);
354
355 /* In the following we assume that inner_cond_bb has single predecessor. */
356 gcc_assert (single_pred_p (inner_cond_bb));
357
358 /* Path outer_cond_bb->(outer2) needs to be merged into path
359 outer_cond_bb->(outer_to_inner)->inner_cond_bb->(inner_taken)
360 and probability of inner_not_taken updated. */
361
362 inner_cond_bb->count = outer_cond_bb->count;
363
364 /* Handle special case where inner_taken probability is always. In this case
365 we know that the overall outcome will be always as well, but combining
366 probabilities will be conservative because it does not know that
367 outer2->probability is inverse of outer_to_inner->probability. */
368 if (inner_taken->probability == profile_probability::always ())
369 ;
370 else
371 inner_taken->probability = outer2->probability + outer_to_inner->probability
372 * inner_taken->probability;
373 inner_not_taken->probability = profile_probability::always ()
374 - inner_taken->probability;
375
376 outer_to_inner->probability = profile_probability::always ();
377 outer2->probability = profile_probability::never ();
378 }
379
380 /* If-convert on a and pattern with a common else block. The inner
381 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
382 inner_inv, outer_inv and result_inv indicate whether the conditions
383 are inverted.
384 Returns true if the edges to the common else basic-block were merged. */
385
386 static bool
ifcombine_ifandif(basic_block inner_cond_bb,bool inner_inv,basic_block outer_cond_bb,bool outer_inv,bool result_inv)387 ifcombine_ifandif (basic_block inner_cond_bb, bool inner_inv,
388 basic_block outer_cond_bb, bool outer_inv, bool result_inv)
389 {
390 gimple_stmt_iterator gsi;
391 gimple *inner_stmt, *outer_stmt;
392 gcond *inner_cond, *outer_cond;
393 tree name1, name2, bit1, bit2, bits1, bits2;
394
395 inner_stmt = last_stmt (inner_cond_bb);
396 if (!inner_stmt
397 || gimple_code (inner_stmt) != GIMPLE_COND)
398 return false;
399 inner_cond = as_a <gcond *> (inner_stmt);
400
401 outer_stmt = last_stmt (outer_cond_bb);
402 if (!outer_stmt
403 || gimple_code (outer_stmt) != GIMPLE_COND)
404 return false;
405 outer_cond = as_a <gcond *> (outer_stmt);
406
407 /* See if we test a single bit of the same name in both tests. In
408 that case remove the outer test, merging both else edges,
409 and change the inner one to test for
410 name & (bit1 | bit2) == (bit1 | bit2). */
411 if (recognize_single_bit_test (inner_cond, &name1, &bit1, inner_inv)
412 && recognize_single_bit_test (outer_cond, &name2, &bit2, outer_inv)
413 && name1 == name2)
414 {
415 tree t, t2;
416
417 /* Do it. */
418 gsi = gsi_for_stmt (inner_cond);
419 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
420 build_int_cst (TREE_TYPE (name1), 1), bit1);
421 t2 = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
422 build_int_cst (TREE_TYPE (name1), 1), bit2);
423 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), t, t2);
424 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
425 true, GSI_SAME_STMT);
426 t2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
427 t2 = force_gimple_operand_gsi (&gsi, t2, true, NULL_TREE,
428 true, GSI_SAME_STMT);
429 t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR,
430 boolean_type_node, t2, t);
431 t = canonicalize_cond_expr_cond (t);
432 if (!t)
433 return false;
434 gimple_cond_set_condition_from_tree (inner_cond, t);
435 update_stmt (inner_cond);
436
437 /* Leave CFG optimization to cfg_cleanup. */
438 gimple_cond_set_condition_from_tree (outer_cond,
439 outer_inv ? boolean_false_node : boolean_true_node);
440 update_stmt (outer_cond);
441
442 update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb);
443
444 if (dump_file)
445 {
446 fprintf (dump_file, "optimizing double bit test to ");
447 print_generic_expr (dump_file, name1);
448 fprintf (dump_file, " & T == T\nwith temporary T = (1 << ");
449 print_generic_expr (dump_file, bit1);
450 fprintf (dump_file, ") | (1 << ");
451 print_generic_expr (dump_file, bit2);
452 fprintf (dump_file, ")\n");
453 }
454
455 return true;
456 }
457
458 /* See if we have two bit tests of the same name in both tests.
459 In that case remove the outer test and change the inner one to
460 test for name & (bits1 | bits2) != 0. */
461 else if (recognize_bits_test (inner_cond, &name1, &bits1, !inner_inv)
462 && recognize_bits_test (outer_cond, &name2, &bits2, !outer_inv))
463 {
464 gimple_stmt_iterator gsi;
465 tree t;
466
467 /* Find the common name which is bit-tested. */
468 if (name1 == name2)
469 ;
470 else if (bits1 == bits2)
471 {
472 std::swap (name2, bits2);
473 std::swap (name1, bits1);
474 }
475 else if (name1 == bits2)
476 std::swap (name2, bits2);
477 else if (bits1 == name2)
478 std::swap (name1, bits1);
479 else
480 return false;
481
482 /* As we strip non-widening conversions in finding a common
483 name that is tested make sure to end up with an integral
484 type for building the bit operations. */
485 if (TYPE_PRECISION (TREE_TYPE (bits1))
486 >= TYPE_PRECISION (TREE_TYPE (bits2)))
487 {
488 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
489 name1 = fold_convert (TREE_TYPE (bits1), name1);
490 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
491 bits2 = fold_convert (TREE_TYPE (bits1), bits2);
492 }
493 else
494 {
495 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
496 name1 = fold_convert (TREE_TYPE (bits2), name1);
497 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
498 bits1 = fold_convert (TREE_TYPE (bits2), bits1);
499 }
500
501 /* Do it. */
502 gsi = gsi_for_stmt (inner_cond);
503 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), bits1, bits2);
504 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
505 true, GSI_SAME_STMT);
506 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
507 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
508 true, GSI_SAME_STMT);
509 t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR, boolean_type_node, t,
510 build_int_cst (TREE_TYPE (t), 0));
511 t = canonicalize_cond_expr_cond (t);
512 if (!t)
513 return false;
514 gimple_cond_set_condition_from_tree (inner_cond, t);
515 update_stmt (inner_cond);
516
517 /* Leave CFG optimization to cfg_cleanup. */
518 gimple_cond_set_condition_from_tree (outer_cond,
519 outer_inv ? boolean_false_node : boolean_true_node);
520 update_stmt (outer_cond);
521 update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb);
522
523 if (dump_file)
524 {
525 fprintf (dump_file, "optimizing bits or bits test to ");
526 print_generic_expr (dump_file, name1);
527 fprintf (dump_file, " & T != 0\nwith temporary T = ");
528 print_generic_expr (dump_file, bits1);
529 fprintf (dump_file, " | ");
530 print_generic_expr (dump_file, bits2);
531 fprintf (dump_file, "\n");
532 }
533
534 return true;
535 }
536
537 /* See if we have two comparisons that we can merge into one. */
538 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond)) == tcc_comparison
539 && TREE_CODE_CLASS (gimple_cond_code (outer_cond)) == tcc_comparison)
540 {
541 tree t;
542 enum tree_code inner_cond_code = gimple_cond_code (inner_cond);
543 enum tree_code outer_cond_code = gimple_cond_code (outer_cond);
544
545 /* Invert comparisons if necessary (and possible). */
546 if (inner_inv)
547 inner_cond_code = invert_tree_comparison (inner_cond_code,
548 HONOR_NANS (gimple_cond_lhs (inner_cond)));
549 if (inner_cond_code == ERROR_MARK)
550 return false;
551 if (outer_inv)
552 outer_cond_code = invert_tree_comparison (outer_cond_code,
553 HONOR_NANS (gimple_cond_lhs (outer_cond)));
554 if (outer_cond_code == ERROR_MARK)
555 return false;
556 /* Don't return false so fast, try maybe_fold_or_comparisons? */
557
558 if (!(t = maybe_fold_and_comparisons (inner_cond_code,
559 gimple_cond_lhs (inner_cond),
560 gimple_cond_rhs (inner_cond),
561 outer_cond_code,
562 gimple_cond_lhs (outer_cond),
563 gimple_cond_rhs (outer_cond))))
564 {
565 tree t1, t2;
566 gimple_stmt_iterator gsi;
567 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
568 if (PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT) != -1)
569 logical_op_non_short_circuit
570 = PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT);
571 if (!logical_op_non_short_circuit || flag_sanitize_coverage)
572 return false;
573 /* Only do this optimization if the inner bb contains only the conditional. */
574 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb)))
575 return false;
576 t1 = fold_build2_loc (gimple_location (inner_cond),
577 inner_cond_code,
578 boolean_type_node,
579 gimple_cond_lhs (inner_cond),
580 gimple_cond_rhs (inner_cond));
581 t2 = fold_build2_loc (gimple_location (outer_cond),
582 outer_cond_code,
583 boolean_type_node,
584 gimple_cond_lhs (outer_cond),
585 gimple_cond_rhs (outer_cond));
586 t = fold_build2_loc (gimple_location (inner_cond),
587 TRUTH_AND_EXPR, boolean_type_node, t1, t2);
588 if (result_inv)
589 {
590 t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
591 result_inv = false;
592 }
593 gsi = gsi_for_stmt (inner_cond);
594 t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr, NULL, true,
595 GSI_SAME_STMT);
596 }
597 if (result_inv)
598 t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
599 t = canonicalize_cond_expr_cond (t);
600 if (!t)
601 return false;
602 gimple_cond_set_condition_from_tree (inner_cond, t);
603 update_stmt (inner_cond);
604
605 /* Leave CFG optimization to cfg_cleanup. */
606 gimple_cond_set_condition_from_tree (outer_cond,
607 outer_inv ? boolean_false_node : boolean_true_node);
608 update_stmt (outer_cond);
609 update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb);
610
611 if (dump_file)
612 {
613 fprintf (dump_file, "optimizing two comparisons to ");
614 print_generic_expr (dump_file, t);
615 fprintf (dump_file, "\n");
616 }
617
618 return true;
619 }
620
621 return false;
622 }
623
624 /* Helper function for tree_ssa_ifcombine_bb. Recognize a CFG pattern and
625 dispatch to the appropriate if-conversion helper for a particular
626 set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB.
627 PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB. */
628
629 static bool
tree_ssa_ifcombine_bb_1(basic_block inner_cond_bb,basic_block outer_cond_bb,basic_block then_bb,basic_block else_bb,basic_block phi_pred_bb)630 tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb, basic_block outer_cond_bb,
631 basic_block then_bb, basic_block else_bb,
632 basic_block phi_pred_bb)
633 {
634 /* The && form is characterized by a common else_bb with
635 the two edges leading to it mergable. The latter is
636 guaranteed by matching PHI arguments in the else_bb and
637 the inner cond_bb having no side-effects. */
638 if (phi_pred_bb != else_bb
639 && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &else_bb)
640 && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb))
641 {
642 /* We have
643 <outer_cond_bb>
644 if (q) goto inner_cond_bb; else goto else_bb;
645 <inner_cond_bb>
646 if (p) goto ...; else goto else_bb;
647 ...
648 <else_bb>
649 ...
650 */
651 return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, false,
652 false);
653 }
654
655 /* And a version where the outer condition is negated. */
656 if (phi_pred_bb != else_bb
657 && recognize_if_then_else (outer_cond_bb, &else_bb, &inner_cond_bb)
658 && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb))
659 {
660 /* We have
661 <outer_cond_bb>
662 if (q) goto else_bb; else goto inner_cond_bb;
663 <inner_cond_bb>
664 if (p) goto ...; else goto else_bb;
665 ...
666 <else_bb>
667 ...
668 */
669 return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, true,
670 false);
671 }
672
673 /* The || form is characterized by a common then_bb with the
674 two edges leading to it mergable. The latter is guaranteed
675 by matching PHI arguments in the then_bb and the inner cond_bb
676 having no side-effects. */
677 if (phi_pred_bb != then_bb
678 && recognize_if_then_else (outer_cond_bb, &then_bb, &inner_cond_bb)
679 && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb))
680 {
681 /* We have
682 <outer_cond_bb>
683 if (q) goto then_bb; else goto inner_cond_bb;
684 <inner_cond_bb>
685 if (q) goto then_bb; else goto ...;
686 <then_bb>
687 ...
688 */
689 return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, true,
690 true);
691 }
692
693 /* And a version where the outer condition is negated. */
694 if (phi_pred_bb != then_bb
695 && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &then_bb)
696 && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb))
697 {
698 /* We have
699 <outer_cond_bb>
700 if (q) goto inner_cond_bb; else goto then_bb;
701 <inner_cond_bb>
702 if (q) goto then_bb; else goto ...;
703 <then_bb>
704 ...
705 */
706 return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, false,
707 true);
708 }
709
710 return false;
711 }
712
713 /* Recognize a CFG pattern and dispatch to the appropriate
714 if-conversion helper. We start with BB as the innermost
715 worker basic-block. Returns true if a transformation was done. */
716
717 static bool
tree_ssa_ifcombine_bb(basic_block inner_cond_bb)718 tree_ssa_ifcombine_bb (basic_block inner_cond_bb)
719 {
720 basic_block then_bb = NULL, else_bb = NULL;
721
722 if (!recognize_if_then_else (inner_cond_bb, &then_bb, &else_bb))
723 return false;
724
725 /* Recognize && and || of two conditions with a common
726 then/else block which entry edges we can merge. That is:
727 if (a || b)
728 ;
729 and
730 if (a && b)
731 ;
732 This requires a single predecessor of the inner cond_bb. */
733 if (single_pred_p (inner_cond_bb)
734 && bb_no_side_effects_p (inner_cond_bb))
735 {
736 basic_block outer_cond_bb = single_pred (inner_cond_bb);
737
738 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb,
739 then_bb, else_bb, inner_cond_bb))
740 return true;
741
742 if (forwarder_block_to (else_bb, then_bb))
743 {
744 /* Other possibilities for the && form, if else_bb is
745 empty forwarder block to then_bb. Compared to the above simpler
746 forms this can be treated as if then_bb and else_bb were swapped,
747 and the corresponding inner_cond_bb not inverted because of that.
748 For same_phi_args_p we look at equality of arguments between
749 edge from outer_cond_bb and the forwarder block. */
750 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
751 then_bb, else_bb))
752 return true;
753 }
754 else if (forwarder_block_to (then_bb, else_bb))
755 {
756 /* Other possibilities for the || form, if then_bb is
757 empty forwarder block to else_bb. Compared to the above simpler
758 forms this can be treated as if then_bb and else_bb were swapped,
759 and the corresponding inner_cond_bb not inverted because of that.
760 For same_phi_args_p we look at equality of arguments between
761 edge from outer_cond_bb and the forwarder block. */
762 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
763 then_bb, then_bb))
764 return true;
765 }
766 }
767
768 return false;
769 }
770
771 /* Main entry for the tree if-conversion pass. */
772
773 namespace {
774
775 const pass_data pass_data_tree_ifcombine =
776 {
777 GIMPLE_PASS, /* type */
778 "ifcombine", /* name */
779 OPTGROUP_NONE, /* optinfo_flags */
780 TV_TREE_IFCOMBINE, /* tv_id */
781 ( PROP_cfg | PROP_ssa ), /* properties_required */
782 0, /* properties_provided */
783 0, /* properties_destroyed */
784 0, /* todo_flags_start */
785 TODO_update_ssa, /* todo_flags_finish */
786 };
787
788 class pass_tree_ifcombine : public gimple_opt_pass
789 {
790 public:
pass_tree_ifcombine(gcc::context * ctxt)791 pass_tree_ifcombine (gcc::context *ctxt)
792 : gimple_opt_pass (pass_data_tree_ifcombine, ctxt)
793 {}
794
795 /* opt_pass methods: */
796 virtual unsigned int execute (function *);
797
798 }; // class pass_tree_ifcombine
799
800 unsigned int
execute(function * fun)801 pass_tree_ifcombine::execute (function *fun)
802 {
803 basic_block *bbs;
804 bool cfg_changed = false;
805 int i;
806
807 bbs = single_pred_before_succ_order ();
808 calculate_dominance_info (CDI_DOMINATORS);
809
810 /* Search every basic block for COND_EXPR we may be able to optimize.
811
812 We walk the blocks in order that guarantees that a block with
813 a single predecessor is processed after the predecessor.
814 This ensures that we collapse outter ifs before visiting the
815 inner ones, and also that we do not try to visit a removed
816 block. This is opposite of PHI-OPT, because we cascade the
817 combining rather than cascading PHIs. */
818 for (i = n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
819 {
820 basic_block bb = bbs[i];
821 gimple *stmt = last_stmt (bb);
822
823 if (stmt
824 && gimple_code (stmt) == GIMPLE_COND)
825 if (tree_ssa_ifcombine_bb (bb))
826 {
827 /* Clear range info from all stmts in BB which is now executed
828 conditional on a always true/false condition. */
829 reset_flow_sensitive_info_in_bb (bb);
830 cfg_changed |= true;
831 }
832 }
833
834 free (bbs);
835
836 return cfg_changed ? TODO_cleanup_cfg : 0;
837 }
838
839 } // anon namespace
840
841 gimple_opt_pass *
make_pass_tree_ifcombine(gcc::context * ctxt)842 make_pass_tree_ifcombine (gcc::context *ctxt)
843 {
844 return new pass_tree_ifcombine (ctxt);
845 }
846