1 /* Preamble and helpers for the autogenerated gimple-match.c file.
2    Copyright (C) 2014-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "fold-const.h"
31 #include "fold-const-call.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
34 #include "calls.h"
35 #include "tree-dfa.h"
36 #include "builtins.h"
37 #include "gimple-match.h"
38 #include "tree-pass.h"
39 #include "internal-fn.h"
40 #include "case-cfn-macros.h"
41 #include "gimplify.h"
42 #include "optabs-tree.h"
43 
44 
45 /* Forward declarations of the private auto-generated matchers.
46    They expect valueized operands in canonical order and do not
47    perform simplification of all-constant operands.  */
48 static bool gimple_simplify (code_helper *, tree *,
49 			     gimple_seq *, tree (*)(tree),
50 			     code_helper, tree, tree);
51 static bool gimple_simplify (code_helper *, tree *,
52 			     gimple_seq *, tree (*)(tree),
53 			     code_helper, tree, tree, tree);
54 static bool gimple_simplify (code_helper *, tree *,
55 			     gimple_seq *, tree (*)(tree),
56 			     code_helper, tree, tree, tree, tree);
57 
58 
59 /* Return whether T is a constant that we'll dispatch to fold to
60    evaluate fully constant expressions.  */
61 
62 static inline bool
63 constant_for_folding (tree t)
64 {
65   return (CONSTANT_CLASS_P (t)
66 	  /* The following is only interesting to string builtins.  */
67 	  || (TREE_CODE (t) == ADDR_EXPR
68 	      && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST));
69 }
70 
71 
72 /* Helper that matches and simplifies the toplevel result from
73    a gimple_simplify run (where we don't want to build
74    a stmt in case it's used in in-place folding).  Replaces
75    *RES_CODE and *RES_OPS with a simplified and/or canonicalized
76    result and returns whether any change was made.  */
77 
78 bool
79 gimple_resimplify1 (gimple_seq *seq,
80 		    code_helper *res_code, tree type, tree *res_ops,
81 		    tree (*valueize)(tree))
82 {
83   if (constant_for_folding (res_ops[0]))
84     {
85       tree tem = NULL_TREE;
86       if (res_code->is_tree_code ())
87 	tem = const_unop (*res_code, type, res_ops[0]);
88       else
89 	tem = fold_const_call (combined_fn (*res_code), type, res_ops[0]);
90       if (tem != NULL_TREE
91 	  && CONSTANT_CLASS_P (tem))
92 	{
93 	  if (TREE_OVERFLOW_P (tem))
94 	    tem = drop_tree_overflow (tem);
95 	  res_ops[0] = tem;
96 	  res_ops[1] = NULL_TREE;
97 	  res_ops[2] = NULL_TREE;
98 	  *res_code = TREE_CODE (res_ops[0]);
99 	  return true;
100 	}
101     }
102 
103   /* Limit recursion, there are cases like PR80887 and others, for
104      example when value-numbering presents us with unfolded expressions
105      that we are really not prepared to handle without eventual
106      oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50
107      itself as available expression.  */
108   static unsigned depth;
109   if (depth > 10)
110     {
111       if (dump_file && (dump_flags & TDF_FOLDING))
112 	fprintf (dump_file, "Aborting expression simplification due to "
113 		 "deep recursion\n");
114       return false;
115     }
116 
117   ++depth;
118   code_helper res_code2;
119   tree res_ops2[3] = {};
120   if (gimple_simplify (&res_code2, res_ops2, seq, valueize,
121 		       *res_code, type, res_ops[0]))
122     {
123       --depth;
124       *res_code = res_code2;
125       res_ops[0] = res_ops2[0];
126       res_ops[1] = res_ops2[1];
127       res_ops[2] = res_ops2[2];
128       return true;
129     }
130   --depth;
131 
132   return false;
133 }
134 
135 /* Helper that matches and simplifies the toplevel result from
136    a gimple_simplify run (where we don't want to build
137    a stmt in case it's used in in-place folding).  Replaces
138    *RES_CODE and *RES_OPS with a simplified and/or canonicalized
139    result and returns whether any change was made.  */
140 
141 bool
142 gimple_resimplify2 (gimple_seq *seq,
143 		    code_helper *res_code, tree type, tree *res_ops,
144 		    tree (*valueize)(tree))
145 {
146   if (constant_for_folding (res_ops[0]) && constant_for_folding (res_ops[1]))
147     {
148       tree tem = NULL_TREE;
149       if (res_code->is_tree_code ())
150 	tem = const_binop (*res_code, type, res_ops[0], res_ops[1]);
151       else
152 	tem = fold_const_call (combined_fn (*res_code), type,
153 			       res_ops[0], res_ops[1]);
154       if (tem != NULL_TREE
155 	  && CONSTANT_CLASS_P (tem))
156 	{
157 	  if (TREE_OVERFLOW_P (tem))
158 	    tem = drop_tree_overflow (tem);
159 	  res_ops[0] = tem;
160 	  res_ops[1] = NULL_TREE;
161 	  res_ops[2] = NULL_TREE;
162 	  *res_code = TREE_CODE (res_ops[0]);
163 	  return true;
164 	}
165     }
166 
167   /* Canonicalize operand order.  */
168   bool canonicalized = false;
169   if (res_code->is_tree_code ()
170       && (TREE_CODE_CLASS ((enum tree_code) *res_code) == tcc_comparison
171 	  || commutative_tree_code (*res_code))
172       && tree_swap_operands_p (res_ops[0], res_ops[1]))
173     {
174       std::swap (res_ops[0], res_ops[1]);
175       if (TREE_CODE_CLASS ((enum tree_code) *res_code) == tcc_comparison)
176 	*res_code = swap_tree_comparison (*res_code);
177       canonicalized = true;
178     }
179 
180   /* Limit recursion, see gimple_resimplify1.  */
181   static unsigned depth;
182   if (depth > 10)
183     {
184       if (dump_file && (dump_flags & TDF_FOLDING))
185 	fprintf (dump_file, "Aborting expression simplification due to "
186 		 "deep recursion\n");
187       return false;
188     }
189 
190   ++depth;
191   code_helper res_code2;
192   tree res_ops2[3] = {};
193   if (gimple_simplify (&res_code2, res_ops2, seq, valueize,
194 		       *res_code, type, res_ops[0], res_ops[1]))
195     {
196       --depth;
197       *res_code = res_code2;
198       res_ops[0] = res_ops2[0];
199       res_ops[1] = res_ops2[1];
200       res_ops[2] = res_ops2[2];
201       return true;
202     }
203   --depth;
204 
205   return canonicalized;
206 }
207 
208 /* Helper that matches and simplifies the toplevel result from
209    a gimple_simplify run (where we don't want to build
210    a stmt in case it's used in in-place folding).  Replaces
211    *RES_CODE and *RES_OPS with a simplified and/or canonicalized
212    result and returns whether any change was made.  */
213 
214 bool
215 gimple_resimplify3 (gimple_seq *seq,
216 		    code_helper *res_code, tree type, tree *res_ops,
217 		    tree (*valueize)(tree))
218 {
219   if (constant_for_folding (res_ops[0]) && constant_for_folding (res_ops[1])
220       && constant_for_folding (res_ops[2]))
221     {
222       tree tem = NULL_TREE;
223       if (res_code->is_tree_code ())
224 	tem = fold_ternary/*_to_constant*/ (*res_code, type, res_ops[0],
225 					    res_ops[1], res_ops[2]);
226       else
227 	tem = fold_const_call (combined_fn (*res_code), type,
228 			       res_ops[0], res_ops[1], res_ops[2]);
229       if (tem != NULL_TREE
230 	  && CONSTANT_CLASS_P (tem))
231 	{
232 	  if (TREE_OVERFLOW_P (tem))
233 	    tem = drop_tree_overflow (tem);
234 	  res_ops[0] = tem;
235 	  res_ops[1] = NULL_TREE;
236 	  res_ops[2] = NULL_TREE;
237 	  *res_code = TREE_CODE (res_ops[0]);
238 	  return true;
239 	}
240     }
241 
242   /* Canonicalize operand order.  */
243   bool canonicalized = false;
244   if (res_code->is_tree_code ()
245       && commutative_ternary_tree_code (*res_code)
246       && tree_swap_operands_p (res_ops[0], res_ops[1]))
247     {
248       std::swap (res_ops[0], res_ops[1]);
249       canonicalized = true;
250     }
251 
252   /* Limit recursion, see gimple_resimplify1.  */
253   static unsigned depth;
254   if (depth > 10)
255     {
256       if (dump_file && (dump_flags & TDF_FOLDING))
257 	fprintf (dump_file, "Aborting expression simplification due to "
258 		 "deep recursion\n");
259       return false;
260     }
261 
262   ++depth;
263   code_helper res_code2;
264   tree res_ops2[3] = {};
265   if (gimple_simplify (&res_code2, res_ops2, seq, valueize,
266 		       *res_code, type,
267 		       res_ops[0], res_ops[1], res_ops[2]))
268     {
269       --depth;
270       *res_code = res_code2;
271       res_ops[0] = res_ops2[0];
272       res_ops[1] = res_ops2[1];
273       res_ops[2] = res_ops2[2];
274       return true;
275     }
276   --depth;
277 
278   return canonicalized;
279 }
280 
281 
282 /* If in GIMPLE expressions with CODE go as single-rhs build
283    a GENERIC tree for that expression into *OP0.  */
284 
285 void
286 maybe_build_generic_op (enum tree_code code, tree type, tree *ops)
287 {
288   switch (code)
289     {
290     case REALPART_EXPR:
291     case IMAGPART_EXPR:
292     case VIEW_CONVERT_EXPR:
293       ops[0] = build1 (code, type, ops[0]);
294       break;
295     case BIT_FIELD_REF:
296       ops[0] = build3 (code, type, ops[0], ops[1], ops[2]);
297       ops[1] = ops[2] = NULL_TREE;
298       break;
299     default:;
300     }
301 }
302 
303 tree (*mprts_hook) (code_helper, tree, tree *);
304 
305 /* Try to build a call to FN with return type TYPE and the NARGS
306    arguments given in OPS.  Return null if the target doesn't support
307    the function.  */
308 
309 static gcall *
310 build_call_internal (internal_fn fn, tree type, unsigned int nargs, tree *ops)
311 {
312   if (direct_internal_fn_p (fn))
313     {
314       tree_pair types = direct_internal_fn_types (fn, type, ops);
315       if (!direct_internal_fn_supported_p (fn, types, OPTIMIZE_FOR_BOTH))
316 	return NULL;
317     }
318   return gimple_build_call_internal (fn, nargs, ops[0], ops[1], ops[2]);
319 }
320 
321 /* Push the exploded expression described by RCODE, TYPE and OPS
322    as a statement to SEQ if necessary and return a gimple value
323    denoting the value of the expression.  If RES is not NULL
324    then the result will be always RES and even gimple values are
325    pushed to SEQ.  */
326 
327 tree
328 maybe_push_res_to_seq (code_helper rcode, tree type, tree *ops,
329 		       gimple_seq *seq, tree res)
330 {
331   if (rcode.is_tree_code ())
332     {
333       if (!res
334 	  && gimple_simplified_result_is_gimple_val (rcode, ops))
335 	return ops[0];
336       if (mprts_hook)
337 	{
338 	  tree tem = mprts_hook (rcode, type, ops);
339 	  if (tem)
340 	    return tem;
341 	}
342       if (!seq)
343 	return NULL_TREE;
344       /* Play safe and do not allow abnormals to be mentioned in
345          newly created statements.  */
346       if ((TREE_CODE (ops[0]) == SSA_NAME
347 	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
348 	  || (ops[1]
349 	      && TREE_CODE (ops[1]) == SSA_NAME
350 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
351 	  || (ops[2]
352 	      && TREE_CODE (ops[2]) == SSA_NAME
353 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2]))
354 	  || (COMPARISON_CLASS_P (ops[0])
355 	      && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
356 		   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0],
357 								     0)))
358 		  || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
359 		      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0],
360 									1))))))
361 	return NULL_TREE;
362       if (!res)
363 	{
364 	  if (gimple_in_ssa_p (cfun))
365 	    res = make_ssa_name (type);
366 	  else
367 	    res = create_tmp_reg (type);
368 	}
369       maybe_build_generic_op (rcode, type, ops);
370       gimple *new_stmt = gimple_build_assign (res, rcode,
371 					     ops[0], ops[1], ops[2]);
372       gimple_seq_add_stmt_without_update (seq, new_stmt);
373       return res;
374     }
375   else
376     {
377       if (!seq)
378 	return NULL_TREE;
379       combined_fn fn = rcode;
380       /* Play safe and do not allow abnormals to be mentioned in
381          newly created statements.  */
382       unsigned nargs;
383       for (nargs = 0; nargs < 3; ++nargs)
384 	{
385 	  if (!ops[nargs])
386 	    break;
387 	  if (TREE_CODE (ops[nargs]) == SSA_NAME
388 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[nargs]))
389 	    return NULL_TREE;
390 	}
391       gcc_assert (nargs != 0);
392       gcall *new_stmt = NULL;
393       if (internal_fn_p (fn))
394 	{
395 	  /* Generate the given function if we can.  */
396 	  internal_fn ifn = as_internal_fn (fn);
397 	  new_stmt = build_call_internal (ifn, type, nargs, ops);
398 	  if (!new_stmt)
399 	    return NULL_TREE;
400 	}
401       else
402 	{
403 	  /* Find the function we want to call.  */
404 	  tree decl = builtin_decl_implicit (as_builtin_fn (fn));
405 	  if (!decl)
406 	    return NULL;
407 
408 	  /* We can't and should not emit calls to non-const functions.  */
409 	  if (!(flags_from_decl_or_type (decl) & ECF_CONST))
410 	    return NULL;
411 
412 	  new_stmt = gimple_build_call (decl, nargs, ops[0], ops[1], ops[2]);
413 	}
414       if (!res)
415 	{
416 	  if (gimple_in_ssa_p (cfun))
417 	    res = make_ssa_name (type);
418 	  else
419 	    res = create_tmp_reg (type);
420 	}
421       gimple_call_set_lhs (new_stmt, res);
422       gimple_seq_add_stmt_without_update (seq, new_stmt);
423       return res;
424     }
425 }
426 
427 
428 /* Public API overloads follow for operation being tree_code or
429    built_in_function and for one to three operands or arguments.
430    They return NULL_TREE if nothing could be simplified or
431    the resulting simplified value with parts pushed to SEQ.
432    If SEQ is NULL then if the simplification needs to create
433    new stmts it will fail.  If VALUEIZE is non-NULL then all
434    SSA names will be valueized using that hook prior to
435    applying simplifications.  */
436 
437 /* Unary ops.  */
438 
439 tree
440 gimple_simplify (enum tree_code code, tree type,
441 		 tree op0,
442 		 gimple_seq *seq, tree (*valueize)(tree))
443 {
444   if (constant_for_folding (op0))
445     {
446       tree res = const_unop (code, type, op0);
447       if (res != NULL_TREE
448 	  && CONSTANT_CLASS_P (res))
449 	return res;
450     }
451 
452   code_helper rcode;
453   tree ops[3] = {};
454   if (!gimple_simplify (&rcode, ops, seq, valueize,
455 			code, type, op0))
456     return NULL_TREE;
457   return maybe_push_res_to_seq (rcode, type, ops, seq);
458 }
459 
460 /* Binary ops.  */
461 
462 tree
463 gimple_simplify (enum tree_code code, tree type,
464 		 tree op0, tree op1,
465 		 gimple_seq *seq, tree (*valueize)(tree))
466 {
467   if (constant_for_folding (op0) && constant_for_folding (op1))
468     {
469       tree res = const_binop (code, type, op0, op1);
470       if (res != NULL_TREE
471 	  && CONSTANT_CLASS_P (res))
472 	return res;
473     }
474 
475   /* Canonicalize operand order both for matching and fallback stmt
476      generation.  */
477   if ((commutative_tree_code (code)
478        || TREE_CODE_CLASS (code) == tcc_comparison)
479       && tree_swap_operands_p (op0, op1))
480     {
481       std::swap (op0, op1);
482       if (TREE_CODE_CLASS (code) == tcc_comparison)
483 	code = swap_tree_comparison (code);
484     }
485 
486   code_helper rcode;
487   tree ops[3] = {};
488   if (!gimple_simplify (&rcode, ops, seq, valueize,
489 			code, type, op0, op1))
490     return NULL_TREE;
491   return maybe_push_res_to_seq (rcode, type, ops, seq);
492 }
493 
494 /* Ternary ops.  */
495 
496 tree
497 gimple_simplify (enum tree_code code, tree type,
498 		 tree op0, tree op1, tree op2,
499 		 gimple_seq *seq, tree (*valueize)(tree))
500 {
501   if (constant_for_folding (op0) && constant_for_folding (op1)
502       && constant_for_folding (op2))
503     {
504       tree res = fold_ternary/*_to_constant */ (code, type, op0, op1, op2);
505       if (res != NULL_TREE
506 	  && CONSTANT_CLASS_P (res))
507 	return res;
508     }
509 
510   /* Canonicalize operand order both for matching and fallback stmt
511      generation.  */
512   if (commutative_ternary_tree_code (code)
513       && tree_swap_operands_p (op0, op1))
514     std::swap (op0, op1);
515 
516   code_helper rcode;
517   tree ops[3] = {};
518   if (!gimple_simplify (&rcode, ops, seq, valueize,
519 			code, type, op0, op1, op2))
520     return NULL_TREE;
521   return maybe_push_res_to_seq (rcode, type, ops, seq);
522 }
523 
524 /* Builtin function with one argument.  */
525 
526 tree
527 gimple_simplify (enum built_in_function fn, tree type,
528 		 tree arg0,
529 		 gimple_seq *seq, tree (*valueize)(tree))
530 {
531   if (constant_for_folding (arg0))
532     {
533       tree res = fold_const_call (as_combined_fn (fn), type, arg0);
534       if (res && CONSTANT_CLASS_P (res))
535 	return res;
536     }
537 
538   code_helper rcode;
539   tree ops[3] = {};
540   if (!gimple_simplify (&rcode, ops, seq, valueize,
541 			as_combined_fn (fn), type, arg0))
542     return NULL_TREE;
543   return maybe_push_res_to_seq (rcode, type, ops, seq);
544 }
545 
546 /* Builtin function with two arguments.  */
547 
548 tree
549 gimple_simplify (enum built_in_function fn, tree type,
550 		 tree arg0, tree arg1,
551 		 gimple_seq *seq, tree (*valueize)(tree))
552 {
553   if (constant_for_folding (arg0)
554       && constant_for_folding (arg1))
555     {
556       tree res = fold_const_call (as_combined_fn (fn), type, arg0, arg1);
557       if (res && CONSTANT_CLASS_P (res))
558 	return res;
559     }
560 
561   code_helper rcode;
562   tree ops[3] = {};
563   if (!gimple_simplify (&rcode, ops, seq, valueize,
564 			as_combined_fn (fn), type, arg0, arg1))
565     return NULL_TREE;
566   return maybe_push_res_to_seq (rcode, type, ops, seq);
567 }
568 
569 /* Builtin function with three arguments.  */
570 
571 tree
572 gimple_simplify (enum built_in_function fn, tree type,
573 		 tree arg0, tree arg1, tree arg2,
574 		 gimple_seq *seq, tree (*valueize)(tree))
575 {
576   if (constant_for_folding (arg0)
577       && constant_for_folding (arg1)
578       && constant_for_folding (arg2))
579     {
580       tree res = fold_const_call (as_combined_fn (fn), type, arg0, arg1, arg2);
581       if (res && CONSTANT_CLASS_P (res))
582 	return res;
583     }
584 
585   code_helper rcode;
586   tree ops[3] = {};
587   if (!gimple_simplify (&rcode, ops, seq, valueize,
588 			as_combined_fn (fn), type, arg0, arg1, arg2))
589     return NULL_TREE;
590   return maybe_push_res_to_seq (rcode, type, ops, seq);
591 }
592 
593 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
594    VALUEIZED to true if valueization changed OP.  */
595 
596 static inline tree
597 do_valueize (tree op, tree (*valueize)(tree), bool &valueized)
598 {
599   if (valueize && TREE_CODE (op) == SSA_NAME)
600     {
601       tree tem = valueize (op);
602       if (tem && tem != op)
603 	{
604 	  op = tem;
605 	  valueized = true;
606 	}
607     }
608   return op;
609 }
610 
611 /* The main STMT based simplification entry.  It is used by the fold_stmt
612    and the fold_stmt_to_constant APIs.  */
613 
614 bool
615 gimple_simplify (gimple *stmt,
616 		 code_helper *rcode, tree *ops,
617 		 gimple_seq *seq,
618 		 tree (*valueize)(tree), tree (*top_valueize)(tree))
619 {
620   switch (gimple_code (stmt))
621     {
622     case GIMPLE_ASSIGN:
623       {
624 	enum tree_code code = gimple_assign_rhs_code (stmt);
625 	tree type = TREE_TYPE (gimple_assign_lhs (stmt));
626 	switch (gimple_assign_rhs_class (stmt))
627 	  {
628 	  case GIMPLE_SINGLE_RHS:
629 	    if (code == REALPART_EXPR
630 		|| code == IMAGPART_EXPR
631 		|| code == VIEW_CONVERT_EXPR)
632 	      {
633 		tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
634 		bool valueized = false;
635 		op0 = do_valueize (op0, top_valueize, valueized);
636 		*rcode = code;
637 		ops[0] = op0;
638 		return (gimple_resimplify1 (seq, rcode, type, ops, valueize)
639 			|| valueized);
640 	      }
641 	    else if (code == BIT_FIELD_REF)
642 	      {
643 		tree rhs1 = gimple_assign_rhs1 (stmt);
644 		tree op0 = TREE_OPERAND (rhs1, 0);
645 		bool valueized = false;
646 		op0 = do_valueize (op0, top_valueize, valueized);
647 		*rcode = code;
648 		ops[0] = op0;
649 		ops[1] = TREE_OPERAND (rhs1, 1);
650 		ops[2] = TREE_OPERAND (rhs1, 2);
651 		return (gimple_resimplify3 (seq, rcode, type, ops, valueize)
652 			|| valueized);
653 	      }
654 	    else if (code == SSA_NAME
655 		     && top_valueize)
656 	      {
657 		tree op0 = gimple_assign_rhs1 (stmt);
658 		tree valueized = top_valueize (op0);
659 		if (!valueized || op0 == valueized)
660 		  return false;
661 		ops[0] = valueized;
662 		*rcode = TREE_CODE (op0);
663 		return true;
664 	      }
665 	    break;
666 	  case GIMPLE_UNARY_RHS:
667 	    {
668 	      tree rhs1 = gimple_assign_rhs1 (stmt);
669 	      bool valueized = false;
670 	      rhs1 = do_valueize (rhs1, top_valueize, valueized);
671 	      *rcode = code;
672 	      ops[0] = rhs1;
673 	      return (gimple_resimplify1 (seq, rcode, type, ops, valueize)
674 		      || valueized);
675 	    }
676 	  case GIMPLE_BINARY_RHS:
677 	    {
678 	      tree rhs1 = gimple_assign_rhs1 (stmt);
679 	      tree rhs2 = gimple_assign_rhs2 (stmt);
680 	      bool valueized = false;
681 	      rhs1 = do_valueize (rhs1, top_valueize, valueized);
682 	      rhs2 = do_valueize (rhs2, top_valueize, valueized);
683 	      *rcode = code;
684 	      ops[0] = rhs1;
685 	      ops[1] = rhs2;
686 	      return (gimple_resimplify2 (seq, rcode, type, ops, valueize)
687 		      || valueized);
688 	    }
689 	  case GIMPLE_TERNARY_RHS:
690 	    {
691 	      bool valueized = false;
692 	      tree rhs1 = gimple_assign_rhs1 (stmt);
693 	      /* If this is a [VEC_]COND_EXPR first try to simplify an
694 		 embedded GENERIC condition.  */
695 	      if (code == COND_EXPR
696 		  || code == VEC_COND_EXPR)
697 		{
698 		  if (COMPARISON_CLASS_P (rhs1))
699 		    {
700 		      tree lhs = TREE_OPERAND (rhs1, 0);
701 		      tree rhs = TREE_OPERAND (rhs1, 1);
702 		      lhs = do_valueize (lhs, top_valueize, valueized);
703 		      rhs = do_valueize (rhs, top_valueize, valueized);
704 		      code_helper rcode2 = TREE_CODE (rhs1);
705 		      tree ops2[3] = {};
706 		      ops2[0] = lhs;
707 		      ops2[1] = rhs;
708 		      if ((gimple_resimplify2 (seq, &rcode2, TREE_TYPE (rhs1),
709 					       ops2, valueize)
710 			   || valueized)
711 			  && rcode2.is_tree_code ())
712 			{
713 			  valueized = true;
714 			  if (TREE_CODE_CLASS ((enum tree_code)rcode2)
715 			      == tcc_comparison)
716 			    rhs1 = build2 (rcode2, TREE_TYPE (rhs1),
717 					   ops2[0], ops2[1]);
718 			  else if (rcode2 == SSA_NAME
719 				   || rcode2 == INTEGER_CST
720 				   || rcode2 == VECTOR_CST)
721 			    rhs1 = ops2[0];
722 			  else
723 			    valueized = false;
724 			}
725 		    }
726 		}
727 	      tree rhs2 = gimple_assign_rhs2 (stmt);
728 	      tree rhs3 = gimple_assign_rhs3 (stmt);
729 	      rhs1 = do_valueize (rhs1, top_valueize, valueized);
730 	      rhs2 = do_valueize (rhs2, top_valueize, valueized);
731 	      rhs3 = do_valueize (rhs3, top_valueize, valueized);
732 	      *rcode = code;
733 	      ops[0] = rhs1;
734 	      ops[1] = rhs2;
735 	      ops[2] = rhs3;
736 	      return (gimple_resimplify3 (seq, rcode, type, ops, valueize)
737 		      || valueized);
738 	    }
739 	  default:
740 	    gcc_unreachable ();
741 	  }
742 	break;
743       }
744 
745     case GIMPLE_CALL:
746       /* ???  This way we can't simplify calls with side-effects.  */
747       if (gimple_call_lhs (stmt) != NULL_TREE
748 	  && gimple_call_num_args (stmt) >= 1
749 	  && gimple_call_num_args (stmt) <= 3)
750 	{
751 	  bool valueized = false;
752 	  if (gimple_call_internal_p (stmt))
753 	    *rcode = as_combined_fn (gimple_call_internal_fn (stmt));
754 	  else
755 	    {
756 	      tree fn = gimple_call_fn (stmt);
757 	      if (!fn)
758 		return false;
759 
760 	      fn = do_valueize (fn, top_valueize, valueized);
761 	      if (TREE_CODE (fn) != ADDR_EXPR
762 		  || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
763 		return false;
764 
765 	      tree decl = TREE_OPERAND (fn, 0);
766 	      if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL
767 		  || !gimple_builtin_call_types_compatible_p (stmt, decl))
768 		return false;
769 
770 	      *rcode = as_combined_fn (DECL_FUNCTION_CODE (decl));
771 	    }
772 
773 	  tree type = TREE_TYPE (gimple_call_lhs (stmt));
774 	  for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
775 	    {
776 	      tree arg = gimple_call_arg (stmt, i);
777 	      ops[i] = do_valueize (arg, top_valueize, valueized);
778 	    }
779 	  switch (gimple_call_num_args (stmt))
780 	    {
781 	    case 1:
782 	      return (gimple_resimplify1 (seq, rcode, type, ops, valueize)
783 		      || valueized);
784 	    case 2:
785 	      return (gimple_resimplify2 (seq, rcode, type, ops, valueize)
786 		      || valueized);
787 	    case 3:
788 	      return (gimple_resimplify3 (seq, rcode, type, ops, valueize)
789 		      || valueized);
790 	    default:
791 	     gcc_unreachable ();
792 	    }
793 	}
794       break;
795 
796     case GIMPLE_COND:
797       {
798 	tree lhs = gimple_cond_lhs (stmt);
799 	tree rhs = gimple_cond_rhs (stmt);
800 	bool valueized = false;
801 	lhs = do_valueize (lhs, top_valueize, valueized);
802 	rhs = do_valueize (rhs, top_valueize, valueized);
803 	*rcode = gimple_cond_code (stmt);
804 	ops[0] = lhs;
805 	ops[1] = rhs;
806         return (gimple_resimplify2 (seq, rcode,
807 				    boolean_type_node, ops, valueize)
808 		|| valueized);
809       }
810 
811     default:
812       break;
813     }
814 
815   return false;
816 }
817 
818 
819 /* Helper for the autogenerated code, valueize OP.  */
820 
821 inline tree
822 do_valueize (tree (*valueize)(tree), tree op)
823 {
824   if (valueize && TREE_CODE (op) == SSA_NAME)
825     {
826       tree tem = valueize (op);
827       if (tem)
828 	return tem;
829     }
830   return op;
831 }
832 
833 /* Helper for the autogenerated code, get at the definition of NAME when
834    VALUEIZE allows that.  */
835 
836 inline gimple *
837 get_def (tree (*valueize)(tree), tree name)
838 {
839   if (valueize && ! valueize (name))
840     return NULL;
841   return SSA_NAME_DEF_STMT (name);
842 }
843 
844 /* Routine to determine if the types T1 and T2 are effectively
845    the same for GIMPLE.  If T1 or T2 is not a type, the test
846    applies to their TREE_TYPE.  */
847 
848 static inline bool
849 types_match (tree t1, tree t2)
850 {
851   if (!TYPE_P (t1))
852     t1 = TREE_TYPE (t1);
853   if (!TYPE_P (t2))
854     t2 = TREE_TYPE (t2);
855 
856   return types_compatible_p (t1, t2);
857 }
858 
859 /* Return if T has a single use.  For GIMPLE, we also allow any
860    non-SSA_NAME (ie constants) and zero uses to cope with uses
861    that aren't linked up yet.  */
862 
863 static inline bool
864 single_use (tree t)
865 {
866   return TREE_CODE (t) != SSA_NAME || has_zero_uses (t) || has_single_use (t);
867 }
868 
869 /* Return true if math operations should be canonicalized,
870    e.g. sqrt(sqrt(x)) -> pow(x, 0.25).  */
871 
872 static inline bool
873 canonicalize_math_p ()
874 {
875   return !cfun || (cfun->curr_properties & PROP_gimple_opt_math) == 0;
876 }
877 
878 /* Return true if math operations that are beneficial only after
879    vectorization should be canonicalized.  */
880 
881 static inline bool
882 canonicalize_math_after_vectorization_p ()
883 {
884   return !cfun || (cfun->curr_properties & PROP_gimple_lvec) != 0;
885 }
886 
887 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
888    As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
889    is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
890    where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
891    will likely be exact, while exp (log (arg0) * arg1) might be not.
892    Also don't do it if arg1 is phi_res above and cst2 is an exact integer.  */
893 
894 static bool
895 optimize_pow_to_exp (tree arg0, tree arg1)
896 {
897   gcc_assert (TREE_CODE (arg0) == REAL_CST);
898   if (!real_isinteger (TREE_REAL_CST_PTR (arg0), TYPE_MODE (TREE_TYPE (arg0))))
899     return true;
900 
901   if (TREE_CODE (arg1) != SSA_NAME)
902     return true;
903 
904   gimple *def = SSA_NAME_DEF_STMT (arg1);
905   gphi *phi = dyn_cast <gphi *> (def);
906   tree cst1 = NULL_TREE;
907   enum tree_code code = ERROR_MARK;
908   if (!phi)
909     {
910       if (!is_gimple_assign (def))
911 	return true;
912       code = gimple_assign_rhs_code (def);
913       switch (code)
914 	{
915 	case PLUS_EXPR:
916 	case MINUS_EXPR:
917 	  break;
918 	default:
919 	  return true;
920 	}
921       if (TREE_CODE (gimple_assign_rhs1 (def)) != SSA_NAME
922 	  || TREE_CODE (gimple_assign_rhs2 (def)) != REAL_CST)
923 	return true;
924 
925       cst1 = gimple_assign_rhs2 (def);
926 
927       phi = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def)));
928       if (!phi)
929 	return true;
930     }
931 
932   tree cst2 = NULL_TREE;
933   int n = gimple_phi_num_args (phi);
934   for (int i = 0; i < n; i++)
935     {
936       tree arg = PHI_ARG_DEF (phi, i);
937       if (TREE_CODE (arg) != REAL_CST)
938 	continue;
939       else if (cst2 == NULL_TREE)
940 	cst2 = arg;
941       else if (!operand_equal_p (cst2, arg, 0))
942 	return true;
943     }
944 
945   if (cst1 && cst2)
946     cst2 = const_binop (code, TREE_TYPE (cst2), cst2, cst1);
947   if (cst2
948       && TREE_CODE (cst2) == REAL_CST
949       && real_isinteger (TREE_REAL_CST_PTR (cst2),
950 			 TYPE_MODE (TREE_TYPE (cst2))))
951     return false;
952   return true;
953 }
954