1 /* Preamble and helpers for the autogenerated gimple-match.c file.
2    Copyright (C) 2014-2021 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "vec-perm-indices.h"
31 #include "fold-const.h"
32 #include "fold-const-call.h"
33 #include "stor-layout.h"
34 #include "gimple-fold.h"
35 #include "calls.h"
36 #include "tree-dfa.h"
37 #include "builtins.h"
38 #include "gimple-match.h"
39 #include "tree-pass.h"
40 #include "internal-fn.h"
41 #include "case-cfn-macros.h"
42 #include "gimplify.h"
43 #include "optabs-tree.h"
44 #include "tree-eh.h"
45 #include "dbgcnt.h"
46 
47 /* Forward declarations of the private auto-generated matchers.
48    They expect valueized operands in canonical order and do not
49    perform simplification of all-constant operands.  */
50 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
51 			     code_helper, tree, tree);
52 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
53 			     code_helper, tree, tree, tree);
54 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
55 			     code_helper, tree, tree, tree, tree);
56 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
57 			     code_helper, tree, tree, tree, tree, tree);
58 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
59 			     code_helper, tree, tree, tree, tree, tree, tree);
60 static bool gimple_resimplify1 (gimple_seq *, gimple_match_op *,
61 				tree (*)(tree));
62 static bool gimple_resimplify2 (gimple_seq *, gimple_match_op *,
63 				tree (*)(tree));
64 static bool gimple_resimplify3 (gimple_seq *, gimple_match_op *,
65 				tree (*)(tree));
66 static bool gimple_resimplify4 (gimple_seq *, gimple_match_op *,
67 				tree (*)(tree));
68 static bool gimple_resimplify5 (gimple_seq *, gimple_match_op *,
69 				tree (*)(tree));
70 
71 const unsigned int gimple_match_op::MAX_NUM_OPS;
72 
73 /* Return whether T is a constant that we'll dispatch to fold to
74    evaluate fully constant expressions.  */
75 
76 static inline bool
constant_for_folding(tree t)77 constant_for_folding (tree t)
78 {
79   return (CONSTANT_CLASS_P (t)
80 	  /* The following is only interesting to string builtins.  */
81 	  || (TREE_CODE (t) == ADDR_EXPR
82 	      && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST));
83 }
84 
85 /* Try to convert conditional operation ORIG_OP into an IFN_COND_*
86    operation.  Return true on success, storing the new operation in NEW_OP.  */
87 
88 static bool
convert_conditional_op(gimple_match_op * orig_op,gimple_match_op * new_op)89 convert_conditional_op (gimple_match_op *orig_op,
90 			gimple_match_op *new_op)
91 {
92   internal_fn ifn;
93   if (orig_op->code.is_tree_code ())
94     ifn = get_conditional_internal_fn ((tree_code) orig_op->code);
95   else
96     {
97       combined_fn cfn = orig_op->code;
98       if (!internal_fn_p (cfn))
99 	return false;
100       ifn = get_conditional_internal_fn (as_internal_fn (cfn));
101     }
102   if (ifn == IFN_LAST)
103     return false;
104   unsigned int num_ops = orig_op->num_ops;
105   new_op->set_op (as_combined_fn (ifn), orig_op->type, num_ops + 2);
106   new_op->ops[0] = orig_op->cond.cond;
107   for (unsigned int i = 0; i < num_ops; ++i)
108     new_op->ops[i + 1] = orig_op->ops[i];
109   tree else_value = orig_op->cond.else_value;
110   if (!else_value)
111     else_value = targetm.preferred_else_value (ifn, orig_op->type,
112 					       num_ops, orig_op->ops);
113   new_op->ops[num_ops + 1] = else_value;
114   return true;
115 }
116 
117 /* RES_OP is the result of a simplification.  If it is conditional,
118    try to replace it with the equivalent UNCOND form, such as an
119    IFN_COND_* call or a VEC_COND_EXPR.  Also try to resimplify the
120    result of the replacement if appropriate, adding any new statements to
121    SEQ and using VALUEIZE as the valueization function.  Return true if
122    this resimplification occurred and resulted in at least one change.  */
123 
124 static bool
maybe_resimplify_conditional_op(gimple_seq * seq,gimple_match_op * res_op,tree (* valueize)(tree))125 maybe_resimplify_conditional_op (gimple_seq *seq, gimple_match_op *res_op,
126 				 tree (*valueize) (tree))
127 {
128   if (!res_op->cond.cond)
129     return false;
130 
131   if (!res_op->cond.else_value
132       && res_op->code.is_tree_code ())
133     {
134       /* The "else" value doesn't matter.  If the "then" value is a
135 	 gimple value, just use it unconditionally.  This isn't a
136 	 simplification in itself, since there was no operation to
137 	 build in the first place.  */
138       if (gimple_simplified_result_is_gimple_val (res_op))
139 	{
140 	  res_op->cond.cond = NULL_TREE;
141 	  return false;
142 	}
143 
144       /* Likewise if the operation would not trap.  */
145       bool honor_trapv = (INTEGRAL_TYPE_P (res_op->type)
146 			  && TYPE_OVERFLOW_TRAPS (res_op->type));
147       tree_code op_code = (tree_code) res_op->code;
148       bool op_could_trap;
149 
150       /* COND_EXPR and VEC_COND_EXPR will trap if, and only if, the condition
151 	 traps and hence we have to check this.  For all other operations, we
152 	 don't need to consider the operands.  */
153       if (op_code == COND_EXPR || op_code == VEC_COND_EXPR)
154 	op_could_trap = generic_expr_could_trap_p (res_op->ops[0]);
155       else
156 	op_could_trap = operation_could_trap_p ((tree_code) res_op->code,
157 						FLOAT_TYPE_P (res_op->type),
158 						honor_trapv,
159 						res_op->op_or_null (1));
160 
161       if (!op_could_trap)
162 	{
163 	  res_op->cond.cond = NULL_TREE;
164 	  return false;
165 	}
166     }
167 
168   /* If the "then" value is a gimple value and the "else" value matters,
169      create a VEC_COND_EXPR between them, then see if it can be further
170      simplified.  */
171   gimple_match_op new_op;
172   if (res_op->cond.else_value
173       && VECTOR_TYPE_P (res_op->type)
174       && gimple_simplified_result_is_gimple_val (res_op))
175     {
176       new_op.set_op (VEC_COND_EXPR, res_op->type,
177 		     res_op->cond.cond, res_op->ops[0],
178 		     res_op->cond.else_value);
179       *res_op = new_op;
180       return gimple_resimplify3 (seq, res_op, valueize);
181     }
182 
183   /* Otherwise try rewriting the operation as an IFN_COND_* call.
184      Again, this isn't a simplification in itself, since it's what
185      RES_OP already described.  */
186   if (convert_conditional_op (res_op, &new_op))
187     *res_op = new_op;
188 
189   return false;
190 }
191 
192 /* Helper that matches and simplifies the toplevel result from
193    a gimple_simplify run (where we don't want to build
194    a stmt in case it's used in in-place folding).  Replaces
195    RES_OP with a simplified and/or canonicalized result and
196    returns whether any change was made.  */
197 
198 static bool
gimple_resimplify1(gimple_seq * seq,gimple_match_op * res_op,tree (* valueize)(tree))199 gimple_resimplify1 (gimple_seq *seq, gimple_match_op *res_op,
200 		    tree (*valueize)(tree))
201 {
202   if (constant_for_folding (res_op->ops[0]))
203     {
204       tree tem = NULL_TREE;
205       if (res_op->code.is_tree_code ())
206 	{
207 	  tree_code code = res_op->code;
208 	  if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
209 	      && TREE_CODE_LENGTH (code) == 1)
210 	    tem = const_unop (res_op->code, res_op->type, res_op->ops[0]);
211 	}
212       else
213 	tem = fold_const_call (combined_fn (res_op->code), res_op->type,
214 			       res_op->ops[0]);
215       if (tem != NULL_TREE
216 	  && CONSTANT_CLASS_P (tem))
217 	{
218 	  if (TREE_OVERFLOW_P (tem))
219 	    tem = drop_tree_overflow (tem);
220 	  res_op->set_value (tem);
221 	  maybe_resimplify_conditional_op (seq, res_op, valueize);
222 	  return true;
223 	}
224     }
225 
226   /* Limit recursion, there are cases like PR80887 and others, for
227      example when value-numbering presents us with unfolded expressions
228      that we are really not prepared to handle without eventual
229      oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50
230      itself as available expression.  */
231   static unsigned depth;
232   if (depth > 10)
233     {
234       if (dump_file && (dump_flags & TDF_FOLDING))
235 	fprintf (dump_file, "Aborting expression simplification due to "
236 		 "deep recursion\n");
237       return false;
238     }
239 
240   ++depth;
241   gimple_match_op res_op2 (*res_op);
242   if (gimple_simplify (&res_op2, seq, valueize,
243 		       res_op->code, res_op->type, res_op->ops[0]))
244     {
245       --depth;
246       *res_op = res_op2;
247       return true;
248     }
249   --depth;
250 
251   if (maybe_resimplify_conditional_op (seq, res_op, valueize))
252     return true;
253 
254   return false;
255 }
256 
257 /* Helper that matches and simplifies the toplevel result from
258    a gimple_simplify run (where we don't want to build
259    a stmt in case it's used in in-place folding).  Replaces
260    RES_OP with a simplified and/or canonicalized result and
261    returns whether any change was made.  */
262 
263 static bool
gimple_resimplify2(gimple_seq * seq,gimple_match_op * res_op,tree (* valueize)(tree))264 gimple_resimplify2 (gimple_seq *seq, gimple_match_op *res_op,
265 		    tree (*valueize)(tree))
266 {
267   if (constant_for_folding (res_op->ops[0])
268       && constant_for_folding (res_op->ops[1]))
269     {
270       tree tem = NULL_TREE;
271       if (res_op->code.is_tree_code ())
272 	{
273 	  tree_code code = res_op->code;
274 	  if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
275 	      && TREE_CODE_LENGTH (code) == 2)
276 	    tem = const_binop (res_op->code, res_op->type,
277 			       res_op->ops[0], res_op->ops[1]);
278 	}
279       else
280 	tem = fold_const_call (combined_fn (res_op->code), res_op->type,
281 			       res_op->ops[0], res_op->ops[1]);
282       if (tem != NULL_TREE
283 	  && CONSTANT_CLASS_P (tem))
284 	{
285 	  if (TREE_OVERFLOW_P (tem))
286 	    tem = drop_tree_overflow (tem);
287 	  res_op->set_value (tem);
288 	  maybe_resimplify_conditional_op (seq, res_op, valueize);
289 	  return true;
290 	}
291     }
292 
293   /* Canonicalize operand order.  */
294   bool canonicalized = false;
295   if (res_op->code.is_tree_code ()
296       && (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
297 	  || commutative_tree_code (res_op->code))
298       && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
299     {
300       std::swap (res_op->ops[0], res_op->ops[1]);
301       if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison)
302 	res_op->code = swap_tree_comparison (res_op->code);
303       canonicalized = true;
304     }
305 
306   /* Limit recursion, see gimple_resimplify1.  */
307   static unsigned depth;
308   if (depth > 10)
309     {
310       if (dump_file && (dump_flags & TDF_FOLDING))
311 	fprintf (dump_file, "Aborting expression simplification due to "
312 		 "deep recursion\n");
313       return false;
314     }
315 
316   ++depth;
317   gimple_match_op res_op2 (*res_op);
318   if (gimple_simplify (&res_op2, seq, valueize,
319 		       res_op->code, res_op->type,
320 		       res_op->ops[0], res_op->ops[1]))
321     {
322       --depth;
323       *res_op = res_op2;
324       return true;
325     }
326   --depth;
327 
328   if (maybe_resimplify_conditional_op (seq, res_op, valueize))
329     return true;
330 
331   return canonicalized;
332 }
333 
334 /* Helper that matches and simplifies the toplevel result from
335    a gimple_simplify run (where we don't want to build
336    a stmt in case it's used in in-place folding).  Replaces
337    RES_OP with a simplified and/or canonicalized result and
338    returns whether any change was made.  */
339 
340 static bool
gimple_resimplify3(gimple_seq * seq,gimple_match_op * res_op,tree (* valueize)(tree))341 gimple_resimplify3 (gimple_seq *seq, gimple_match_op *res_op,
342 		    tree (*valueize)(tree))
343 {
344   if (constant_for_folding (res_op->ops[0])
345       && constant_for_folding (res_op->ops[1])
346       && constant_for_folding (res_op->ops[2]))
347     {
348       tree tem = NULL_TREE;
349       if (res_op->code.is_tree_code ())
350 	{
351 	  tree_code code = res_op->code;
352 	  if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
353 	      && TREE_CODE_LENGTH (code) == 3)
354 	    tem = fold_ternary/*_to_constant*/ (res_op->code, res_op->type,
355 						res_op->ops[0], res_op->ops[1],
356 						res_op->ops[2]);
357 	}
358       else
359 	tem = fold_const_call (combined_fn (res_op->code), res_op->type,
360 			       res_op->ops[0], res_op->ops[1], res_op->ops[2]);
361       if (tem != NULL_TREE
362 	  && CONSTANT_CLASS_P (tem))
363 	{
364 	  if (TREE_OVERFLOW_P (tem))
365 	    tem = drop_tree_overflow (tem);
366 	  res_op->set_value (tem);
367 	  maybe_resimplify_conditional_op (seq, res_op, valueize);
368 	  return true;
369 	}
370     }
371 
372   /* Canonicalize operand order.  */
373   bool canonicalized = false;
374   if (res_op->code.is_tree_code ()
375       && commutative_ternary_tree_code (res_op->code)
376       && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
377     {
378       std::swap (res_op->ops[0], res_op->ops[1]);
379       canonicalized = true;
380     }
381 
382   /* Limit recursion, see gimple_resimplify1.  */
383   static unsigned depth;
384   if (depth > 10)
385     {
386       if (dump_file && (dump_flags & TDF_FOLDING))
387 	fprintf (dump_file, "Aborting expression simplification due to "
388 		 "deep recursion\n");
389       return false;
390     }
391 
392   ++depth;
393   gimple_match_op res_op2 (*res_op);
394   if (gimple_simplify (&res_op2, seq, valueize,
395 		       res_op->code, res_op->type,
396 		       res_op->ops[0], res_op->ops[1], res_op->ops[2]))
397     {
398       --depth;
399       *res_op = res_op2;
400       return true;
401     }
402   --depth;
403 
404   if (maybe_resimplify_conditional_op (seq, res_op, valueize))
405     return true;
406 
407   return canonicalized;
408 }
409 
410 /* Helper that matches and simplifies the toplevel result from
411    a gimple_simplify run (where we don't want to build
412    a stmt in case it's used in in-place folding).  Replaces
413    RES_OP with a simplified and/or canonicalized result and
414    returns whether any change was made.  */
415 
416 static bool
gimple_resimplify4(gimple_seq * seq,gimple_match_op * res_op,tree (* valueize)(tree))417 gimple_resimplify4 (gimple_seq *seq, gimple_match_op *res_op,
418 		    tree (*valueize)(tree))
419 {
420   /* No constant folding is defined for four-operand functions.  */
421 
422   /* Limit recursion, see gimple_resimplify1.  */
423   static unsigned depth;
424   if (depth > 10)
425     {
426       if (dump_file && (dump_flags & TDF_FOLDING))
427 	fprintf (dump_file, "Aborting expression simplification due to "
428 		 "deep recursion\n");
429       return false;
430     }
431 
432   ++depth;
433   gimple_match_op res_op2 (*res_op);
434   if (gimple_simplify (&res_op2, seq, valueize,
435 		       res_op->code, res_op->type,
436 		       res_op->ops[0], res_op->ops[1], res_op->ops[2],
437 		       res_op->ops[3]))
438     {
439       --depth;
440       *res_op = res_op2;
441       return true;
442     }
443   --depth;
444 
445   if (maybe_resimplify_conditional_op (seq, res_op, valueize))
446     return true;
447 
448   return false;
449 }
450 
451 /* Helper that matches and simplifies the toplevel result from
452    a gimple_simplify run (where we don't want to build
453    a stmt in case it's used in in-place folding).  Replaces
454    RES_OP with a simplified and/or canonicalized result and
455    returns whether any change was made.  */
456 
457 static bool
gimple_resimplify5(gimple_seq * seq,gimple_match_op * res_op,tree (* valueize)(tree))458 gimple_resimplify5 (gimple_seq *seq, gimple_match_op *res_op,
459 		    tree (*valueize)(tree))
460 {
461   /* No constant folding is defined for five-operand functions.  */
462 
463   gimple_match_op res_op2 (*res_op);
464   if (gimple_simplify (&res_op2, seq, valueize,
465 		       res_op->code, res_op->type,
466 		       res_op->ops[0], res_op->ops[1], res_op->ops[2],
467 		       res_op->ops[3], res_op->ops[4]))
468     {
469       *res_op = res_op2;
470       return true;
471     }
472 
473   if (maybe_resimplify_conditional_op (seq, res_op, valueize))
474     return true;
475 
476   return false;
477 }
478 
479 /* Match and simplify the toplevel valueized operation THIS.
480    Replaces THIS with a simplified and/or canonicalized result and
481    returns whether any change was made.  */
482 
483 bool
resimplify(gimple_seq * seq,tree (* valueize)(tree))484 gimple_match_op::resimplify (gimple_seq *seq, tree (*valueize)(tree))
485 {
486   switch (num_ops)
487     {
488     case 1:
489       return gimple_resimplify1 (seq, this, valueize);
490     case 2:
491       return gimple_resimplify2 (seq, this, valueize);
492     case 3:
493       return gimple_resimplify3 (seq, this, valueize);
494     case 4:
495       return gimple_resimplify4 (seq, this, valueize);
496     case 5:
497       return gimple_resimplify5 (seq, this, valueize);
498     default:
499       gcc_unreachable ();
500     }
501 }
502 
503 /* If in GIMPLE the operation described by RES_OP should be single-rhs,
504    build a GENERIC tree for that expression and update RES_OP accordingly.  */
505 
506 void
maybe_build_generic_op(gimple_match_op * res_op)507 maybe_build_generic_op (gimple_match_op *res_op)
508 {
509   tree_code code = (tree_code) res_op->code;
510   tree val;
511   switch (code)
512     {
513     case REALPART_EXPR:
514     case IMAGPART_EXPR:
515     case VIEW_CONVERT_EXPR:
516       val = build1 (code, res_op->type, res_op->ops[0]);
517       res_op->set_value (val);
518       break;
519     case BIT_FIELD_REF:
520       val = build3 (code, res_op->type, res_op->ops[0], res_op->ops[1],
521 		    res_op->ops[2]);
522       REF_REVERSE_STORAGE_ORDER (val) = res_op->reverse;
523       res_op->set_value (val);
524       break;
525     default:;
526     }
527 }
528 
529 tree (*mprts_hook) (gimple_match_op *);
530 
531 /* Try to build RES_OP, which is known to be a call to FN.  Return null
532    if the target doesn't support the function.  */
533 
534 static gcall *
build_call_internal(internal_fn fn,gimple_match_op * res_op)535 build_call_internal (internal_fn fn, gimple_match_op *res_op)
536 {
537   if (direct_internal_fn_p (fn))
538     {
539       tree_pair types = direct_internal_fn_types (fn, res_op->type,
540 						  res_op->ops);
541       if (!direct_internal_fn_supported_p (fn, types, OPTIMIZE_FOR_BOTH))
542 	return NULL;
543     }
544   return gimple_build_call_internal (fn, res_op->num_ops,
545 				     res_op->op_or_null (0),
546 				     res_op->op_or_null (1),
547 				     res_op->op_or_null (2),
548 				     res_op->op_or_null (3),
549 				     res_op->op_or_null (4));
550 }
551 
552 /* Push the exploded expression described by RES_OP as a statement to
553    SEQ if necessary and return a gimple value denoting the value of the
554    expression.  If RES is not NULL then the result will be always RES
555    and even gimple values are pushed to SEQ.  */
556 
557 tree
maybe_push_res_to_seq(gimple_match_op * res_op,gimple_seq * seq,tree res)558 maybe_push_res_to_seq (gimple_match_op *res_op, gimple_seq *seq, tree res)
559 {
560   tree *ops = res_op->ops;
561   unsigned num_ops = res_op->num_ops;
562 
563   /* The caller should have converted conditional operations into an UNCOND
564      form and resimplified as appropriate.  The conditional form only
565      survives this far if that conversion failed.  */
566   if (res_op->cond.cond)
567     return NULL_TREE;
568 
569   if (res_op->code.is_tree_code ())
570     {
571       if (!res
572 	  && gimple_simplified_result_is_gimple_val (res_op))
573 	return ops[0];
574       if (mprts_hook)
575 	{
576 	  tree tem = mprts_hook (res_op);
577 	  if (tem)
578 	    return tem;
579 	}
580     }
581 
582   if (!seq)
583     return NULL_TREE;
584 
585   /* Play safe and do not allow abnormals to be mentioned in
586      newly created statements.  */
587   for (unsigned int i = 0; i < num_ops; ++i)
588     if (TREE_CODE (ops[i]) == SSA_NAME
589 	&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i]))
590       return NULL_TREE;
591 
592   if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
593     for (unsigned int i = 0; i < 2; ++i)
594       if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
595 	  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i)))
596 	return NULL_TREE;
597 
598   if (res_op->code.is_tree_code ())
599     {
600       if (!res)
601 	{
602 	  if (gimple_in_ssa_p (cfun))
603 	    res = make_ssa_name (res_op->type);
604 	  else
605 	    res = create_tmp_reg (res_op->type);
606 	}
607       maybe_build_generic_op (res_op);
608       gimple *new_stmt = gimple_build_assign (res, res_op->code,
609 					      res_op->op_or_null (0),
610 					      res_op->op_or_null (1),
611 					      res_op->op_or_null (2));
612       gimple_seq_add_stmt_without_update (seq, new_stmt);
613       return res;
614     }
615   else
616     {
617       gcc_assert (num_ops != 0);
618       combined_fn fn = res_op->code;
619       gcall *new_stmt = NULL;
620       if (internal_fn_p (fn))
621 	{
622 	  /* Generate the given function if we can.  */
623 	  internal_fn ifn = as_internal_fn (fn);
624 	  new_stmt = build_call_internal (ifn, res_op);
625 	  if (!new_stmt)
626 	    return NULL_TREE;
627 	}
628       else
629 	{
630 	  /* Find the function we want to call.  */
631 	  tree decl = builtin_decl_implicit (as_builtin_fn (fn));
632 	  if (!decl)
633 	    return NULL;
634 
635 	  /* We can't and should not emit calls to non-const functions.  */
636 	  if (!(flags_from_decl_or_type (decl) & ECF_CONST))
637 	    return NULL;
638 
639 	  new_stmt = gimple_build_call (decl, num_ops,
640 					res_op->op_or_null (0),
641 					res_op->op_or_null (1),
642 					res_op->op_or_null (2),
643 					res_op->op_or_null (3),
644 					res_op->op_or_null (4));
645 	}
646       if (!res)
647 	{
648 	  if (gimple_in_ssa_p (cfun))
649 	    res = make_ssa_name (res_op->type);
650 	  else
651 	    res = create_tmp_reg (res_op->type);
652 	}
653       gimple_call_set_lhs (new_stmt, res);
654       gimple_seq_add_stmt_without_update (seq, new_stmt);
655       return res;
656     }
657 }
658 
659 
660 /* Public API overloads follow for operation being tree_code or
661    built_in_function and for one to three operands or arguments.
662    They return NULL_TREE if nothing could be simplified or
663    the resulting simplified value with parts pushed to SEQ.
664    If SEQ is NULL then if the simplification needs to create
665    new stmts it will fail.  If VALUEIZE is non-NULL then all
666    SSA names will be valueized using that hook prior to
667    applying simplifications.  */
668 
669 /* Unary ops.  */
670 
671 tree
gimple_simplify(enum tree_code code,tree type,tree op0,gimple_seq * seq,tree (* valueize)(tree))672 gimple_simplify (enum tree_code code, tree type,
673 		 tree op0,
674 		 gimple_seq *seq, tree (*valueize)(tree))
675 {
676   if (constant_for_folding (op0))
677     {
678       tree res = const_unop (code, type, op0);
679       if (res != NULL_TREE
680 	  && CONSTANT_CLASS_P (res))
681 	return res;
682     }
683 
684   gimple_match_op res_op;
685   if (!gimple_simplify (&res_op, seq, valueize, code, type, op0))
686     return NULL_TREE;
687   return maybe_push_res_to_seq (&res_op, seq);
688 }
689 
690 /* Binary ops.  */
691 
692 tree
gimple_simplify(enum tree_code code,tree type,tree op0,tree op1,gimple_seq * seq,tree (* valueize)(tree))693 gimple_simplify (enum tree_code code, tree type,
694 		 tree op0, tree op1,
695 		 gimple_seq *seq, tree (*valueize)(tree))
696 {
697   if (constant_for_folding (op0) && constant_for_folding (op1))
698     {
699       tree res = const_binop (code, type, op0, op1);
700       if (res != NULL_TREE
701 	  && CONSTANT_CLASS_P (res))
702 	return res;
703     }
704 
705   /* Canonicalize operand order both for matching and fallback stmt
706      generation.  */
707   if ((commutative_tree_code (code)
708        || TREE_CODE_CLASS (code) == tcc_comparison)
709       && tree_swap_operands_p (op0, op1))
710     {
711       std::swap (op0, op1);
712       if (TREE_CODE_CLASS (code) == tcc_comparison)
713 	code = swap_tree_comparison (code);
714     }
715 
716   gimple_match_op res_op;
717   if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1))
718     return NULL_TREE;
719   return maybe_push_res_to_seq (&res_op, seq);
720 }
721 
722 /* Ternary ops.  */
723 
724 tree
gimple_simplify(enum tree_code code,tree type,tree op0,tree op1,tree op2,gimple_seq * seq,tree (* valueize)(tree))725 gimple_simplify (enum tree_code code, tree type,
726 		 tree op0, tree op1, tree op2,
727 		 gimple_seq *seq, tree (*valueize)(tree))
728 {
729   if (constant_for_folding (op0) && constant_for_folding (op1)
730       && constant_for_folding (op2))
731     {
732       tree res = fold_ternary/*_to_constant */ (code, type, op0, op1, op2);
733       if (res != NULL_TREE
734 	  && CONSTANT_CLASS_P (res))
735 	return res;
736     }
737 
738   /* Canonicalize operand order both for matching and fallback stmt
739      generation.  */
740   if (commutative_ternary_tree_code (code)
741       && tree_swap_operands_p (op0, op1))
742     std::swap (op0, op1);
743 
744   gimple_match_op res_op;
745   if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1, op2))
746     return NULL_TREE;
747   return maybe_push_res_to_seq (&res_op, seq);
748 }
749 
750 /* Builtin or internal function with one argument.  */
751 
752 tree
gimple_simplify(combined_fn fn,tree type,tree arg0,gimple_seq * seq,tree (* valueize)(tree))753 gimple_simplify (combined_fn fn, tree type,
754 		 tree arg0,
755 		 gimple_seq *seq, tree (*valueize)(tree))
756 {
757   if (constant_for_folding (arg0))
758     {
759       tree res = fold_const_call (fn, type, arg0);
760       if (res && CONSTANT_CLASS_P (res))
761 	return res;
762     }
763 
764   gimple_match_op res_op;
765   if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0))
766     return NULL_TREE;
767   return maybe_push_res_to_seq (&res_op, seq);
768 }
769 
770 /* Builtin or internal function with two arguments.  */
771 
772 tree
gimple_simplify(combined_fn fn,tree type,tree arg0,tree arg1,gimple_seq * seq,tree (* valueize)(tree))773 gimple_simplify (combined_fn fn, tree type,
774 		 tree arg0, tree arg1,
775 		 gimple_seq *seq, tree (*valueize)(tree))
776 {
777   if (constant_for_folding (arg0)
778       && constant_for_folding (arg1))
779     {
780       tree res = fold_const_call (fn, type, arg0, arg1);
781       if (res && CONSTANT_CLASS_P (res))
782 	return res;
783     }
784 
785   gimple_match_op res_op;
786   if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1))
787     return NULL_TREE;
788   return maybe_push_res_to_seq (&res_op, seq);
789 }
790 
791 /* Builtin or internal function with three arguments.  */
792 
793 tree
gimple_simplify(combined_fn fn,tree type,tree arg0,tree arg1,tree arg2,gimple_seq * seq,tree (* valueize)(tree))794 gimple_simplify (combined_fn fn, tree type,
795 		 tree arg0, tree arg1, tree arg2,
796 		 gimple_seq *seq, tree (*valueize)(tree))
797 {
798   if (constant_for_folding (arg0)
799       && constant_for_folding (arg1)
800       && constant_for_folding (arg2))
801     {
802       tree res = fold_const_call (fn, type, arg0, arg1, arg2);
803       if (res && CONSTANT_CLASS_P (res))
804 	return res;
805     }
806 
807   gimple_match_op res_op;
808   if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1, arg2))
809     return NULL_TREE;
810   return maybe_push_res_to_seq (&res_op, seq);
811 }
812 
813 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
814    VALUEIZED to true if valueization changed OP.  */
815 
816 static inline tree
do_valueize(tree op,tree (* valueize)(tree),bool & valueized)817 do_valueize (tree op, tree (*valueize)(tree), bool &valueized)
818 {
819   if (valueize && TREE_CODE (op) == SSA_NAME)
820     {
821       tree tem = valueize (op);
822       if (tem && tem != op)
823 	{
824 	  op = tem;
825 	  valueized = true;
826 	}
827     }
828   return op;
829 }
830 
831 /* If RES_OP is a call to a conditional internal function, try simplifying
832    the associated unconditional operation and using the result to build
833    a new conditional operation.  For example, if RES_OP is:
834 
835      IFN_COND_ADD (COND, A, B, ELSE)
836 
837    try simplifying (plus A B) and using the result to build a replacement
838    for the whole IFN_COND_ADD.
839 
840    Return true if this approach led to a simplification, otherwise leave
841    RES_OP unchanged (and so suitable for other simplifications).  When
842    returning true, add any new statements to SEQ and use VALUEIZE as the
843    valueization function.
844 
845    RES_OP is known to be a call to IFN.  */
846 
847 static bool
try_conditional_simplification(internal_fn ifn,gimple_match_op * res_op,gimple_seq * seq,tree (* valueize)(tree))848 try_conditional_simplification (internal_fn ifn, gimple_match_op *res_op,
849 				gimple_seq *seq, tree (*valueize) (tree))
850 {
851   code_helper op;
852   tree_code code = conditional_internal_fn_code (ifn);
853   if (code != ERROR_MARK)
854     op = code;
855   else
856     {
857       ifn = get_unconditional_internal_fn (ifn);
858       if (ifn == IFN_LAST)
859 	return false;
860       op = as_combined_fn (ifn);
861     }
862 
863   unsigned int num_ops = res_op->num_ops;
864   gimple_match_op cond_op (gimple_match_cond (res_op->ops[0],
865 					      res_op->ops[num_ops - 1]),
866 			   op, res_op->type, num_ops - 2);
867 
868   memcpy (cond_op.ops, res_op->ops + 1, (num_ops - 1) * sizeof *cond_op.ops);
869   switch (num_ops - 2)
870     {
871     case 2:
872       if (!gimple_resimplify2 (seq, &cond_op, valueize))
873 	return false;
874       break;
875     case 3:
876       if (!gimple_resimplify3 (seq, &cond_op, valueize))
877 	return false;
878       break;
879     default:
880       gcc_unreachable ();
881     }
882   *res_op = cond_op;
883   maybe_resimplify_conditional_op (seq, res_op, valueize);
884   return true;
885 }
886 
887 /* The main STMT based simplification entry.  It is used by the fold_stmt
888    and the fold_stmt_to_constant APIs.  */
889 
890 bool
gimple_simplify(gimple * stmt,gimple_match_op * res_op,gimple_seq * seq,tree (* valueize)(tree),tree (* top_valueize)(tree))891 gimple_simplify (gimple *stmt, gimple_match_op *res_op, gimple_seq *seq,
892 		 tree (*valueize)(tree), tree (*top_valueize)(tree))
893 {
894   switch (gimple_code (stmt))
895     {
896     case GIMPLE_ASSIGN:
897       {
898 	enum tree_code code = gimple_assign_rhs_code (stmt);
899 	tree type = TREE_TYPE (gimple_assign_lhs (stmt));
900 	switch (gimple_assign_rhs_class (stmt))
901 	  {
902 	  case GIMPLE_SINGLE_RHS:
903 	    if (code == REALPART_EXPR
904 		|| code == IMAGPART_EXPR
905 		|| code == VIEW_CONVERT_EXPR)
906 	      {
907 		tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
908 		bool valueized = false;
909 		op0 = do_valueize (op0, top_valueize, valueized);
910 		res_op->set_op (code, type, op0);
911 		return (gimple_resimplify1 (seq, res_op, valueize)
912 			|| valueized);
913 	      }
914 	    else if (code == BIT_FIELD_REF)
915 	      {
916 		tree rhs1 = gimple_assign_rhs1 (stmt);
917 		tree op0 = TREE_OPERAND (rhs1, 0);
918 		bool valueized = false;
919 		op0 = do_valueize (op0, top_valueize, valueized);
920 		res_op->set_op (code, type, op0,
921 				TREE_OPERAND (rhs1, 1),
922 				TREE_OPERAND (rhs1, 2),
923 				REF_REVERSE_STORAGE_ORDER (rhs1));
924 		if (res_op->reverse)
925 		  return valueized;
926 		return (gimple_resimplify3 (seq, res_op, valueize)
927 			|| valueized);
928 	      }
929 	    else if (code == SSA_NAME
930 		     && top_valueize)
931 	      {
932 		tree op0 = gimple_assign_rhs1 (stmt);
933 		tree valueized = top_valueize (op0);
934 		if (!valueized || op0 == valueized)
935 		  return false;
936 		res_op->set_op (TREE_CODE (op0), type, valueized);
937 		return true;
938 	      }
939 	    break;
940 	  case GIMPLE_UNARY_RHS:
941 	    {
942 	      tree rhs1 = gimple_assign_rhs1 (stmt);
943 	      bool valueized = false;
944 	      rhs1 = do_valueize (rhs1, top_valueize, valueized);
945 	      res_op->set_op (code, type, rhs1);
946 	      return (gimple_resimplify1 (seq, res_op, valueize)
947 		      || valueized);
948 	    }
949 	  case GIMPLE_BINARY_RHS:
950 	    {
951 	      tree rhs1 = gimple_assign_rhs1 (stmt);
952 	      tree rhs2 = gimple_assign_rhs2 (stmt);
953 	      bool valueized = false;
954 	      rhs1 = do_valueize (rhs1, top_valueize, valueized);
955 	      rhs2 = do_valueize (rhs2, top_valueize, valueized);
956 	      res_op->set_op (code, type, rhs1, rhs2);
957 	      return (gimple_resimplify2 (seq, res_op, valueize)
958 		      || valueized);
959 	    }
960 	  case GIMPLE_TERNARY_RHS:
961 	    {
962 	      bool valueized = false;
963 	      tree rhs1 = gimple_assign_rhs1 (stmt);
964 	      /* If this is a [VEC_]COND_EXPR first try to simplify an
965 		 embedded GENERIC condition.  */
966 	      if (code == COND_EXPR
967 		  || code == VEC_COND_EXPR)
968 		{
969 		  if (COMPARISON_CLASS_P (rhs1))
970 		    {
971 		      tree lhs = TREE_OPERAND (rhs1, 0);
972 		      tree rhs = TREE_OPERAND (rhs1, 1);
973 		      lhs = do_valueize (lhs, top_valueize, valueized);
974 		      rhs = do_valueize (rhs, top_valueize, valueized);
975 		      gimple_match_op res_op2 (res_op->cond, TREE_CODE (rhs1),
976 					       TREE_TYPE (rhs1), lhs, rhs);
977 		      if ((gimple_resimplify2 (seq, &res_op2, valueize)
978 			   || valueized)
979 			  && res_op2.code.is_tree_code ())
980 			{
981 			  valueized = true;
982 			  if (TREE_CODE_CLASS ((enum tree_code) res_op2.code)
983 			      == tcc_comparison)
984 			    rhs1 = build2 (res_op2.code, TREE_TYPE (rhs1),
985 					   res_op2.ops[0], res_op2.ops[1]);
986 			  else if (res_op2.code == SSA_NAME
987 				   || res_op2.code == INTEGER_CST
988 				   || res_op2.code == VECTOR_CST)
989 			    rhs1 = res_op2.ops[0];
990 			  else
991 			    valueized = false;
992 			}
993 		    }
994 		}
995 	      tree rhs2 = gimple_assign_rhs2 (stmt);
996 	      tree rhs3 = gimple_assign_rhs3 (stmt);
997 	      rhs1 = do_valueize (rhs1, top_valueize, valueized);
998 	      rhs2 = do_valueize (rhs2, top_valueize, valueized);
999 	      rhs3 = do_valueize (rhs3, top_valueize, valueized);
1000 	      res_op->set_op (code, type, rhs1, rhs2, rhs3);
1001 	      return (gimple_resimplify3 (seq, res_op, valueize)
1002 		      || valueized);
1003 	    }
1004 	  default:
1005 	    gcc_unreachable ();
1006 	  }
1007 	break;
1008       }
1009 
1010     case GIMPLE_CALL:
1011       /* ???  This way we can't simplify calls with side-effects.  */
1012       if (gimple_call_lhs (stmt) != NULL_TREE
1013 	  && gimple_call_num_args (stmt) >= 1
1014 	  && gimple_call_num_args (stmt) <= 5)
1015 	{
1016 	  bool valueized = false;
1017 	  combined_fn cfn;
1018 	  if (gimple_call_internal_p (stmt))
1019 	    cfn = as_combined_fn (gimple_call_internal_fn (stmt));
1020 	  else
1021 	    {
1022 	      tree fn = gimple_call_fn (stmt);
1023 	      if (!fn)
1024 		return false;
1025 
1026 	      fn = do_valueize (fn, top_valueize, valueized);
1027 	      if (TREE_CODE (fn) != ADDR_EXPR
1028 		  || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
1029 		return false;
1030 
1031 	      tree decl = TREE_OPERAND (fn, 0);
1032 	      if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL
1033 		  || !gimple_builtin_call_types_compatible_p (stmt, decl))
1034 		return false;
1035 
1036 	      cfn = as_combined_fn (DECL_FUNCTION_CODE (decl));
1037 	    }
1038 
1039 	  unsigned int num_args = gimple_call_num_args (stmt);
1040 	  res_op->set_op (cfn, TREE_TYPE (gimple_call_lhs (stmt)), num_args);
1041 	  for (unsigned i = 0; i < num_args; ++i)
1042 	    {
1043 	      tree arg = gimple_call_arg (stmt, i);
1044 	      res_op->ops[i] = do_valueize (arg, top_valueize, valueized);
1045 	    }
1046 	  if (internal_fn_p (cfn)
1047 	      && try_conditional_simplification (as_internal_fn (cfn),
1048 						 res_op, seq, valueize))
1049 	    return true;
1050 	  switch (num_args)
1051 	    {
1052 	    case 1:
1053 	      return (gimple_resimplify1 (seq, res_op, valueize)
1054 		      || valueized);
1055 	    case 2:
1056 	      return (gimple_resimplify2 (seq, res_op, valueize)
1057 		      || valueized);
1058 	    case 3:
1059 	      return (gimple_resimplify3 (seq, res_op, valueize)
1060 		      || valueized);
1061 	    case 4:
1062 	      return (gimple_resimplify4 (seq, res_op, valueize)
1063 		      || valueized);
1064 	    case 5:
1065 	      return (gimple_resimplify5 (seq, res_op, valueize)
1066 		      || valueized);
1067 	    default:
1068 	     gcc_unreachable ();
1069 	    }
1070 	}
1071       break;
1072 
1073     case GIMPLE_COND:
1074       {
1075 	tree lhs = gimple_cond_lhs (stmt);
1076 	tree rhs = gimple_cond_rhs (stmt);
1077 	bool valueized = false;
1078 	lhs = do_valueize (lhs, top_valueize, valueized);
1079 	rhs = do_valueize (rhs, top_valueize, valueized);
1080 	res_op->set_op (gimple_cond_code (stmt), boolean_type_node, lhs, rhs);
1081 	return (gimple_resimplify2 (seq, res_op, valueize)
1082 		|| valueized);
1083       }
1084 
1085     default:
1086       break;
1087     }
1088 
1089   return false;
1090 }
1091 
1092 
1093 /* Helper for the autogenerated code, valueize OP.  */
1094 
1095 inline tree
do_valueize(tree (* valueize)(tree),tree op)1096 do_valueize (tree (*valueize)(tree), tree op)
1097 {
1098   if (valueize && TREE_CODE (op) == SSA_NAME)
1099     {
1100       tree tem = valueize (op);
1101       if (tem)
1102 	return tem;
1103     }
1104   return op;
1105 }
1106 
1107 /* Helper for the autogenerated code, get at the definition of NAME when
1108    VALUEIZE allows that.  */
1109 
1110 inline gimple *
get_def(tree (* valueize)(tree),tree name)1111 get_def (tree (*valueize)(tree), tree name)
1112 {
1113   if (valueize && ! valueize (name))
1114     return NULL;
1115   return SSA_NAME_DEF_STMT (name);
1116 }
1117 
1118 /* Routine to determine if the types T1 and T2 are effectively
1119    the same for GIMPLE.  If T1 or T2 is not a type, the test
1120    applies to their TREE_TYPE.  */
1121 
1122 static inline bool
types_match(tree t1,tree t2)1123 types_match (tree t1, tree t2)
1124 {
1125   if (!TYPE_P (t1))
1126     t1 = TREE_TYPE (t1);
1127   if (!TYPE_P (t2))
1128     t2 = TREE_TYPE (t2);
1129 
1130   return types_compatible_p (t1, t2);
1131 }
1132 
1133 /* Return if T has a single use.  For GIMPLE, we also allow any
1134    non-SSA_NAME (ie constants) and zero uses to cope with uses
1135    that aren't linked up yet.  */
1136 
1137 static inline bool
single_use(tree t)1138 single_use (tree t)
1139 {
1140   return TREE_CODE (t) != SSA_NAME || has_zero_uses (t) || has_single_use (t);
1141 }
1142 
1143 /* Return true if math operations should be canonicalized,
1144    e.g. sqrt(sqrt(x)) -> pow(x, 0.25).  */
1145 
1146 static inline bool
canonicalize_math_p()1147 canonicalize_math_p ()
1148 {
1149   return !cfun || (cfun->curr_properties & PROP_gimple_opt_math) == 0;
1150 }
1151 
1152 /* Return true if math operations that are beneficial only after
1153    vectorization should be canonicalized.  */
1154 
1155 static inline bool
canonicalize_math_after_vectorization_p()1156 canonicalize_math_after_vectorization_p ()
1157 {
1158   return !cfun || (cfun->curr_properties & PROP_gimple_lvec) != 0;
1159 }
1160 
1161 /* Return true if we can still perform transformations that may introduce
1162    vector operations that are not supported by the target. Vector lowering
1163    normally handles those, but after that pass, it becomes unsafe.  */
1164 
1165 static inline bool
optimize_vectors_before_lowering_p()1166 optimize_vectors_before_lowering_p ()
1167 {
1168   return !cfun || (cfun->curr_properties & PROP_gimple_lvec) == 0;
1169 }
1170 
1171 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
1172    As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
1173    is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
1174    where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
1175    will likely be exact, while exp (log (arg0) * arg1) might be not.
1176    Also don't do it if arg1 is phi_res above and cst2 is an exact integer.  */
1177 
1178 static bool
optimize_pow_to_exp(tree arg0,tree arg1)1179 optimize_pow_to_exp (tree arg0, tree arg1)
1180 {
1181   gcc_assert (TREE_CODE (arg0) == REAL_CST);
1182   if (!real_isinteger (TREE_REAL_CST_PTR (arg0), TYPE_MODE (TREE_TYPE (arg0))))
1183     return true;
1184 
1185   if (TREE_CODE (arg1) != SSA_NAME)
1186     return true;
1187 
1188   gimple *def = SSA_NAME_DEF_STMT (arg1);
1189   gphi *phi = dyn_cast <gphi *> (def);
1190   tree cst1 = NULL_TREE;
1191   enum tree_code code = ERROR_MARK;
1192   if (!phi)
1193     {
1194       if (!is_gimple_assign (def))
1195 	return true;
1196       code = gimple_assign_rhs_code (def);
1197       switch (code)
1198 	{
1199 	case PLUS_EXPR:
1200 	case MINUS_EXPR:
1201 	  break;
1202 	default:
1203 	  return true;
1204 	}
1205       if (TREE_CODE (gimple_assign_rhs1 (def)) != SSA_NAME
1206 	  || TREE_CODE (gimple_assign_rhs2 (def)) != REAL_CST)
1207 	return true;
1208 
1209       cst1 = gimple_assign_rhs2 (def);
1210 
1211       phi = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def)));
1212       if (!phi)
1213 	return true;
1214     }
1215 
1216   tree cst2 = NULL_TREE;
1217   int n = gimple_phi_num_args (phi);
1218   for (int i = 0; i < n; i++)
1219     {
1220       tree arg = PHI_ARG_DEF (phi, i);
1221       if (TREE_CODE (arg) != REAL_CST)
1222 	continue;
1223       else if (cst2 == NULL_TREE)
1224 	cst2 = arg;
1225       else if (!operand_equal_p (cst2, arg, 0))
1226 	return true;
1227     }
1228 
1229   if (cst1 && cst2)
1230     cst2 = const_binop (code, TREE_TYPE (cst2), cst2, cst1);
1231   if (cst2
1232       && TREE_CODE (cst2) == REAL_CST
1233       && real_isinteger (TREE_REAL_CST_PTR (cst2),
1234 			 TYPE_MODE (TREE_TYPE (cst2))))
1235     return false;
1236   return true;
1237 }
1238 
1239 /* Return true if a division INNER_DIV / DIVISOR where INNER_DIV
1240    is another division can be optimized.  Don't optimize if INNER_DIV
1241    is used in a TRUNC_MOD_EXPR with DIVISOR as second operand.  */
1242 
1243 static bool
optimize_successive_divisions_p(tree divisor,tree inner_div)1244 optimize_successive_divisions_p (tree divisor, tree inner_div)
1245 {
1246   if (!gimple_in_ssa_p (cfun))
1247     return false;
1248 
1249   imm_use_iterator imm_iter;
1250   use_operand_p use_p;
1251   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, inner_div)
1252     {
1253       gimple *use_stmt = USE_STMT (use_p);
1254       if (!is_gimple_assign (use_stmt)
1255 	  || gimple_assign_rhs_code (use_stmt) != TRUNC_MOD_EXPR
1256 	  || !operand_equal_p (gimple_assign_rhs2 (use_stmt), divisor, 0))
1257 	continue;
1258       return false;
1259     }
1260   return true;
1261 }
1262