xref: /dragonfly/contrib/gcc-8.0/gcc/gimple-fold.c (revision 335b9e93)
1 /* Statement simplification on GIMPLE.
2    Copyright (C) 2010-2018 Free Software Foundation, Inc.
3    Split out from tree-ssa-ccp.c.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "ipa-chkp.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
69 
70 /* Return true when DECL can be referenced from current unit.
71    FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
72    We can get declarations that are not possible to reference for various
73    reasons:
74 
75      1) When analyzing C++ virtual tables.
76 	C++ virtual tables do have known constructors even
77 	when they are keyed to other compilation unit.
78 	Those tables can contain pointers to methods and vars
79 	in other units.  Those methods have both STATIC and EXTERNAL
80 	set.
81      2) In WHOPR mode devirtualization might lead to reference
82 	to method that was partitioned elsehwere.
83 	In this case we have static VAR_DECL or FUNCTION_DECL
84 	that has no corresponding callgraph/varpool node
85 	declaring the body.
86      3) COMDAT functions referred by external vtables that
87         we devirtualize only during final compilation stage.
88         At this time we already decided that we will not output
89         the function body and thus we can't reference the symbol
90         directly.  */
91 
92 static bool
93 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
94 {
95   varpool_node *vnode;
96   struct cgraph_node *node;
97   symtab_node *snode;
98 
99   if (DECL_ABSTRACT_P (decl))
100     return false;
101 
102   /* We are concerned only about static/external vars and functions.  */
103   if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
104       || !VAR_OR_FUNCTION_DECL_P (decl))
105     return true;
106 
107   /* Static objects can be referred only if they was not optimized out yet.  */
108   if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
109     {
110       /* Before we start optimizing unreachable code we can be sure all
111 	 static objects are defined.  */
112       if (symtab->function_flags_ready)
113 	return true;
114       snode = symtab_node::get (decl);
115       if (!snode || !snode->definition)
116 	return false;
117       node = dyn_cast <cgraph_node *> (snode);
118       return !node || !node->global.inlined_to;
119     }
120 
121   /* We will later output the initializer, so we can refer to it.
122      So we are concerned only when DECL comes from initializer of
123      external var or var that has been optimized out.  */
124   if (!from_decl
125       || !VAR_P (from_decl)
126       || (!DECL_EXTERNAL (from_decl)
127 	  && (vnode = varpool_node::get (from_decl)) != NULL
128 	  && vnode->definition)
129       || (flag_ltrans
130 	  && (vnode = varpool_node::get (from_decl)) != NULL
131 	  && vnode->in_other_partition))
132     return true;
133   /* We are folding reference from external vtable.  The vtable may reffer
134      to a symbol keyed to other compilation unit.  The other compilation
135      unit may be in separate DSO and the symbol may be hidden.  */
136   if (DECL_VISIBILITY_SPECIFIED (decl)
137       && DECL_EXTERNAL (decl)
138       && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
139       && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
140     return false;
141   /* When function is public, we always can introduce new reference.
142      Exception are the COMDAT functions where introducing a direct
143      reference imply need to include function body in the curren tunit.  */
144   if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
145     return true;
146   /* We have COMDAT.  We are going to check if we still have definition
147      or if the definition is going to be output in other partition.
148      Bypass this when gimplifying; all needed functions will be produced.
149 
150      As observed in PR20991 for already optimized out comdat virtual functions
151      it may be tempting to not necessarily give up because the copy will be
152      output elsewhere when corresponding vtable is output.
153      This is however not possible - ABI specify that COMDATs are output in
154      units where they are used and when the other unit was compiled with LTO
155      it is possible that vtable was kept public while the function itself
156      was privatized. */
157   if (!symtab->function_flags_ready)
158     return true;
159 
160   snode = symtab_node::get (decl);
161   if (!snode
162       || ((!snode->definition || DECL_EXTERNAL (decl))
163 	  && (!snode->in_other_partition
164 	      || (!snode->forced_by_abi && !snode->force_output))))
165     return false;
166   node = dyn_cast <cgraph_node *> (snode);
167   return !node || !node->global.inlined_to;
168 }
169 
170 /* Create a temporary for TYPE for a statement STMT.  If the current function
171    is in SSA form, a SSA name is created.  Otherwise a temporary register
172    is made.  */
173 
174 tree
175 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
176 {
177   if (gimple_in_ssa_p (cfun))
178     return make_ssa_name (type, stmt);
179   else
180     return create_tmp_reg (type);
181 }
182 
183 /* CVAL is value taken from DECL_INITIAL of variable.  Try to transform it into
184    acceptable form for is_gimple_min_invariant.
185    FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL.  */
186 
187 tree
188 canonicalize_constructor_val (tree cval, tree from_decl)
189 {
190   tree orig_cval = cval;
191   STRIP_NOPS (cval);
192   if (TREE_CODE (cval) == POINTER_PLUS_EXPR
193       && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
194     {
195       tree ptr = TREE_OPERAND (cval, 0);
196       if (is_gimple_min_invariant (ptr))
197 	cval = build1_loc (EXPR_LOCATION (cval),
198 			   ADDR_EXPR, TREE_TYPE (ptr),
199 			   fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
200 					ptr,
201 					fold_convert (ptr_type_node,
202 						      TREE_OPERAND (cval, 1))));
203     }
204   if (TREE_CODE (cval) == ADDR_EXPR)
205     {
206       tree base = NULL_TREE;
207       if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
208 	{
209 	  base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
210 	  if (base)
211 	    TREE_OPERAND (cval, 0) = base;
212 	}
213       else
214 	base = get_base_address (TREE_OPERAND (cval, 0));
215       if (!base)
216 	return NULL_TREE;
217 
218       if (VAR_OR_FUNCTION_DECL_P (base)
219 	  && !can_refer_decl_in_current_unit_p (base, from_decl))
220 	return NULL_TREE;
221       if (TREE_TYPE (base) == error_mark_node)
222 	return NULL_TREE;
223       if (VAR_P (base))
224 	TREE_ADDRESSABLE (base) = 1;
225       else if (TREE_CODE (base) == FUNCTION_DECL)
226 	{
227 	  /* Make sure we create a cgraph node for functions we'll reference.
228 	     They can be non-existent if the reference comes from an entry
229 	     of an external vtable for example.  */
230 	  cgraph_node::get_create (base);
231 	}
232       /* Fixup types in global initializers.  */
233       if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
234 	cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
235 
236       if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
237 	cval = fold_convert (TREE_TYPE (orig_cval), cval);
238       return cval;
239     }
240   if (TREE_OVERFLOW_P (cval))
241     return drop_tree_overflow (cval);
242   return orig_cval;
243 }
244 
245 /* If SYM is a constant variable with known value, return the value.
246    NULL_TREE is returned otherwise.  */
247 
248 tree
249 get_symbol_constant_value (tree sym)
250 {
251   tree val = ctor_for_folding (sym);
252   if (val != error_mark_node)
253     {
254       if (val)
255 	{
256 	  val = canonicalize_constructor_val (unshare_expr (val), sym);
257 	  if (val && is_gimple_min_invariant (val))
258 	    return val;
259 	  else
260 	    return NULL_TREE;
261 	}
262       /* Variables declared 'const' without an initializer
263 	 have zero as the initializer if they may not be
264 	 overridden at link or run time.  */
265       if (!val
266           && is_gimple_reg_type (TREE_TYPE (sym)))
267 	return build_zero_cst (TREE_TYPE (sym));
268     }
269 
270   return NULL_TREE;
271 }
272 
273 
274 
275 /* Subroutine of fold_stmt.  We perform several simplifications of the
276    memory reference tree EXPR and make sure to re-gimplify them properly
277    after propagation of constant addresses.  IS_LHS is true if the
278    reference is supposed to be an lvalue.  */
279 
280 static tree
281 maybe_fold_reference (tree expr, bool is_lhs)
282 {
283   tree result;
284 
285   if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
286        || TREE_CODE (expr) == REALPART_EXPR
287        || TREE_CODE (expr) == IMAGPART_EXPR)
288       && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
289     return fold_unary_loc (EXPR_LOCATION (expr),
290 			   TREE_CODE (expr),
291 			   TREE_TYPE (expr),
292 			   TREE_OPERAND (expr, 0));
293   else if (TREE_CODE (expr) == BIT_FIELD_REF
294 	   && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
295     return fold_ternary_loc (EXPR_LOCATION (expr),
296 			     TREE_CODE (expr),
297 			     TREE_TYPE (expr),
298 			     TREE_OPERAND (expr, 0),
299 			     TREE_OPERAND (expr, 1),
300 			     TREE_OPERAND (expr, 2));
301 
302   if (!is_lhs
303       && (result = fold_const_aggregate_ref (expr))
304       && is_gimple_min_invariant (result))
305     return result;
306 
307   return NULL_TREE;
308 }
309 
310 
311 /* Attempt to fold an assignment statement pointed-to by SI.  Returns a
312    replacement rhs for the statement or NULL_TREE if no simplification
313    could be made.  It is assumed that the operands have been previously
314    folded.  */
315 
316 static tree
317 fold_gimple_assign (gimple_stmt_iterator *si)
318 {
319   gimple *stmt = gsi_stmt (*si);
320   enum tree_code subcode = gimple_assign_rhs_code (stmt);
321   location_t loc = gimple_location (stmt);
322 
323   tree result = NULL_TREE;
324 
325   switch (get_gimple_rhs_class (subcode))
326     {
327     case GIMPLE_SINGLE_RHS:
328       {
329         tree rhs = gimple_assign_rhs1 (stmt);
330 
331 	if (TREE_CLOBBER_P (rhs))
332 	  return NULL_TREE;
333 
334 	if (REFERENCE_CLASS_P (rhs))
335 	  return maybe_fold_reference (rhs, false);
336 
337 	else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
338 	  {
339 	    tree val = OBJ_TYPE_REF_EXPR (rhs);
340 	    if (is_gimple_min_invariant (val))
341 	      return val;
342 	    else if (flag_devirtualize && virtual_method_call_p (rhs))
343 	      {
344 		bool final;
345 		vec <cgraph_node *>targets
346 		  = possible_polymorphic_call_targets (rhs, stmt, &final);
347 		if (final && targets.length () <= 1 && dbg_cnt (devirt))
348 		  {
349 		    if (dump_enabled_p ())
350 		      {
351 			location_t loc = gimple_location_safe (stmt);
352 			dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
353 					 "resolving virtual function address "
354 					 "reference to function %s\n",
355 					 targets.length () == 1
356 					 ? targets[0]->name ()
357 					 : "NULL");
358 		      }
359 		    if (targets.length () == 1)
360 		      {
361 			val = fold_convert (TREE_TYPE (val),
362 					    build_fold_addr_expr_loc
363 					      (loc, targets[0]->decl));
364 			STRIP_USELESS_TYPE_CONVERSION (val);
365 		      }
366 		    else
367 		      /* We can not use __builtin_unreachable here because it
368 			 can not have address taken.  */
369 		      val = build_int_cst (TREE_TYPE (val), 0);
370 		    return val;
371 		  }
372 	      }
373 	  }
374 
375 	else if (TREE_CODE (rhs) == ADDR_EXPR)
376 	  {
377 	    tree ref = TREE_OPERAND (rhs, 0);
378 	    tree tem = maybe_fold_reference (ref, true);
379 	    if (tem
380 		&& TREE_CODE (tem) == MEM_REF
381 		&& integer_zerop (TREE_OPERAND (tem, 1)))
382 	      result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
383 	    else if (tem)
384 	      result = fold_convert (TREE_TYPE (rhs),
385 				     build_fold_addr_expr_loc (loc, tem));
386 	    else if (TREE_CODE (ref) == MEM_REF
387 		     && integer_zerop (TREE_OPERAND (ref, 1)))
388 	      result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
389 
390 	    if (result)
391 	      {
392 		/* Strip away useless type conversions.  Both the
393 		   NON_LVALUE_EXPR that may have been added by fold, and
394 		   "useless" type conversions that might now be apparent
395 		   due to propagation.  */
396 		STRIP_USELESS_TYPE_CONVERSION (result);
397 
398 		if (result != rhs && valid_gimple_rhs_p (result))
399 		  return result;
400 	      }
401 	  }
402 
403 	else if (TREE_CODE (rhs) == CONSTRUCTOR
404 		 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
405 	  {
406 	    /* Fold a constant vector CONSTRUCTOR to VECTOR_CST.  */
407 	    unsigned i;
408 	    tree val;
409 
410 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
411 	      if (! CONSTANT_CLASS_P (val))
412 		return NULL_TREE;
413 
414 	    return build_vector_from_ctor (TREE_TYPE (rhs),
415 					   CONSTRUCTOR_ELTS (rhs));
416 	  }
417 
418 	else if (DECL_P (rhs))
419 	  return get_symbol_constant_value (rhs);
420       }
421       break;
422 
423     case GIMPLE_UNARY_RHS:
424       break;
425 
426     case GIMPLE_BINARY_RHS:
427       break;
428 
429     case GIMPLE_TERNARY_RHS:
430       result = fold_ternary_loc (loc, subcode,
431 				 TREE_TYPE (gimple_assign_lhs (stmt)),
432 				 gimple_assign_rhs1 (stmt),
433 				 gimple_assign_rhs2 (stmt),
434 				 gimple_assign_rhs3 (stmt));
435 
436       if (result)
437         {
438           STRIP_USELESS_TYPE_CONVERSION (result);
439           if (valid_gimple_rhs_p (result))
440 	    return result;
441         }
442       break;
443 
444     case GIMPLE_INVALID_RHS:
445       gcc_unreachable ();
446     }
447 
448   return NULL_TREE;
449 }
450 
451 
452 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
453    adjusting the replacement stmts location and virtual operands.
454    If the statement has a lhs the last stmt in the sequence is expected
455    to assign to that lhs.  */
456 
457 static void
458 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
459 {
460   gimple *stmt = gsi_stmt (*si_p);
461 
462   if (gimple_has_location (stmt))
463     annotate_all_with_location (stmts, gimple_location (stmt));
464 
465   /* First iterate over the replacement statements backward, assigning
466      virtual operands to their defining statements.  */
467   gimple *laststore = NULL;
468   for (gimple_stmt_iterator i = gsi_last (stmts);
469        !gsi_end_p (i); gsi_prev (&i))
470     {
471       gimple *new_stmt = gsi_stmt (i);
472       if ((gimple_assign_single_p (new_stmt)
473 	   && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
474 	  || (is_gimple_call (new_stmt)
475 	      && (gimple_call_flags (new_stmt)
476 		  & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
477 	{
478 	  tree vdef;
479 	  if (!laststore)
480 	    vdef = gimple_vdef (stmt);
481 	  else
482 	    vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
483 	  gimple_set_vdef (new_stmt, vdef);
484 	  if (vdef && TREE_CODE (vdef) == SSA_NAME)
485 	    SSA_NAME_DEF_STMT (vdef) = new_stmt;
486 	  laststore = new_stmt;
487 	}
488     }
489 
490   /* Second iterate over the statements forward, assigning virtual
491      operands to their uses.  */
492   tree reaching_vuse = gimple_vuse (stmt);
493   for (gimple_stmt_iterator i = gsi_start (stmts);
494        !gsi_end_p (i); gsi_next (&i))
495     {
496       gimple *new_stmt = gsi_stmt (i);
497       /* If the new statement possibly has a VUSE, update it with exact SSA
498 	 name we know will reach this one.  */
499       if (gimple_has_mem_ops (new_stmt))
500 	gimple_set_vuse (new_stmt, reaching_vuse);
501       gimple_set_modified (new_stmt, true);
502       if (gimple_vdef (new_stmt))
503 	reaching_vuse = gimple_vdef (new_stmt);
504     }
505 
506   /* If the new sequence does not do a store release the virtual
507      definition of the original statement.  */
508   if (reaching_vuse
509       && reaching_vuse == gimple_vuse (stmt))
510     {
511       tree vdef = gimple_vdef (stmt);
512       if (vdef
513 	  && TREE_CODE (vdef) == SSA_NAME)
514 	{
515 	  unlink_stmt_vdef (stmt);
516 	  release_ssa_name (vdef);
517 	}
518     }
519 
520   /* Finally replace the original statement with the sequence.  */
521   gsi_replace_with_seq (si_p, stmts, false);
522 }
523 
524 /* Convert EXPR into a GIMPLE value suitable for substitution on the
525    RHS of an assignment.  Insert the necessary statements before
526    iterator *SI_P.  The statement at *SI_P, which must be a GIMPLE_CALL
527    is replaced.  If the call is expected to produces a result, then it
528    is replaced by an assignment of the new RHS to the result variable.
529    If the result is to be ignored, then the call is replaced by a
530    GIMPLE_NOP.  A proper VDEF chain is retained by making the first
531    VUSE and the last VDEF of the whole sequence be the same as the replaced
532    statement and using new SSA names for stores in between.  */
533 
534 void
535 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
536 {
537   tree lhs;
538   gimple *stmt, *new_stmt;
539   gimple_stmt_iterator i;
540   gimple_seq stmts = NULL;
541 
542   stmt = gsi_stmt (*si_p);
543 
544   gcc_assert (is_gimple_call (stmt));
545 
546   push_gimplify_context (gimple_in_ssa_p (cfun));
547 
548   lhs = gimple_call_lhs (stmt);
549   if (lhs == NULL_TREE)
550     {
551       gimplify_and_add (expr, &stmts);
552       /* We can end up with folding a memcpy of an empty class assignment
553 	 which gets optimized away by C++ gimplification.  */
554       if (gimple_seq_empty_p (stmts))
555 	{
556 	  pop_gimplify_context (NULL);
557 	  if (gimple_in_ssa_p (cfun))
558 	    {
559 	      unlink_stmt_vdef (stmt);
560 	      release_defs (stmt);
561 	    }
562 	  gsi_replace (si_p, gimple_build_nop (), false);
563 	  return;
564 	}
565     }
566   else
567     {
568       tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
569       new_stmt = gimple_build_assign (lhs, tmp);
570       i = gsi_last (stmts);
571       gsi_insert_after_without_update (&i, new_stmt,
572 				       GSI_CONTINUE_LINKING);
573     }
574 
575   pop_gimplify_context (NULL);
576 
577   gsi_replace_with_seq_vops (si_p, stmts);
578 }
579 
580 
581 /* Replace the call at *GSI with the gimple value VAL.  */
582 
583 void
584 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
585 {
586   gimple *stmt = gsi_stmt (*gsi);
587   tree lhs = gimple_call_lhs (stmt);
588   gimple *repl;
589   if (lhs)
590     {
591       if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
592 	val = fold_convert (TREE_TYPE (lhs), val);
593       repl = gimple_build_assign (lhs, val);
594     }
595   else
596     repl = gimple_build_nop ();
597   tree vdef = gimple_vdef (stmt);
598   if (vdef && TREE_CODE (vdef) == SSA_NAME)
599     {
600       unlink_stmt_vdef (stmt);
601       release_ssa_name (vdef);
602     }
603   gsi_replace (gsi, repl, false);
604 }
605 
606 /* Replace the call at *GSI with the new call REPL and fold that
607    again.  */
608 
609 static void
610 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
611 {
612   gimple *stmt = gsi_stmt (*gsi);
613   gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
614   gimple_set_location (repl, gimple_location (stmt));
615   if (gimple_vdef (stmt)
616       && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
617     {
618       gimple_set_vdef (repl, gimple_vdef (stmt));
619       SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
620     }
621   if (gimple_vuse (stmt))
622     gimple_set_vuse (repl, gimple_vuse (stmt));
623   gsi_replace (gsi, repl, false);
624   fold_stmt (gsi);
625 }
626 
627 /* Return true if VAR is a VAR_DECL or a component thereof.  */
628 
629 static bool
630 var_decl_component_p (tree var)
631 {
632   tree inner = var;
633   while (handled_component_p (inner))
634     inner = TREE_OPERAND (inner, 0);
635   return SSA_VAR_P (inner);
636 }
637 
638 /* If the SIZE argument representing the size of an object is in a range
639    of values of which exactly one is valid (and that is zero), return
640    true, otherwise false.  */
641 
642 static bool
643 size_must_be_zero_p (tree size)
644 {
645   if (integer_zerop (size))
646     return true;
647 
648   if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
649     return false;
650 
651   wide_int min, max;
652   enum value_range_type rtype = get_range_info (size, &min, &max);
653   if (rtype != VR_ANTI_RANGE)
654     return false;
655 
656   tree type = TREE_TYPE (size);
657   int prec = TYPE_PRECISION (type);
658 
659   wide_int wone = wi::one (prec);
660 
661   /* Compute the value of SSIZE_MAX, the largest positive value that
662      can be stored in ssize_t, the signed counterpart of size_t.  */
663   wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
664 
665   return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
666 }
667 
668 /* Fold function call to builtin mem{{,p}cpy,move}.  Try to detect and
669    diagnose (otherwise undefined) overlapping copies without preventing
670    folding.  When folded, GCC guarantees that overlapping memcpy has
671    the same semantics as memmove.  Call to the library memcpy need not
672    provide the same guarantee.  Return false if no simplification can
673    be made.  */
674 
675 static bool
676 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
677 			       tree dest, tree src, int endp)
678 {
679   gimple *stmt = gsi_stmt (*gsi);
680   tree lhs = gimple_call_lhs (stmt);
681   tree len = gimple_call_arg (stmt, 2);
682   tree destvar, srcvar;
683   location_t loc = gimple_location (stmt);
684 
685   bool nowarn = gimple_no_warning_p (stmt);
686 
687   /* If the LEN parameter is a constant zero or in range where
688      the only valid value is zero, return DEST.  */
689   if (size_must_be_zero_p (len))
690     {
691       gimple *repl;
692       if (gimple_call_lhs (stmt))
693 	repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
694       else
695 	repl = gimple_build_nop ();
696       tree vdef = gimple_vdef (stmt);
697       if (vdef && TREE_CODE (vdef) == SSA_NAME)
698 	{
699 	  unlink_stmt_vdef (stmt);
700 	  release_ssa_name (vdef);
701 	}
702       gsi_replace (gsi, repl, false);
703       return true;
704     }
705 
706   /* If SRC and DEST are the same (and not volatile), return
707      DEST{,+LEN,+LEN-1}.  */
708   if (operand_equal_p (src, dest, 0))
709     {
710       /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
711 	 It's safe and may even be emitted by GCC itself (see bug
712 	 32667).  */
713       unlink_stmt_vdef (stmt);
714       if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
715 	release_ssa_name (gimple_vdef (stmt));
716       if (!lhs)
717 	{
718 	  gsi_replace (gsi, gimple_build_nop (), false);
719 	  return true;
720 	}
721       goto done;
722     }
723   else
724     {
725       tree srctype, desttype;
726       unsigned int src_align, dest_align;
727       tree off0;
728 
729       /* Inlining of memcpy/memmove may cause bounds lost (if we copy
730 	 pointers as wide integer) and also may result in huge function
731 	 size because of inlined bounds copy.  Thus don't inline for
732 	 functions we want to instrument.  */
733       if (flag_check_pointer_bounds
734 	  && chkp_instrumentable_p (cfun->decl)
735 	  /* Even if data may contain pointers we can inline if copy
736 	     less than a pointer size.  */
737 	  && (!tree_fits_uhwi_p (len)
738 	      || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
739 	return false;
740 
741       /* Build accesses at offset zero with a ref-all character type.  */
742       off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
743 							 ptr_mode, true), 0);
744 
745       /* If we can perform the copy efficiently with first doing all loads
746          and then all stores inline it that way.  Currently efficiently
747 	 means that we can load all the memory into a single integer
748 	 register which is what MOVE_MAX gives us.  */
749       src_align = get_pointer_alignment (src);
750       dest_align = get_pointer_alignment (dest);
751       if (tree_fits_uhwi_p (len)
752 	  && compare_tree_int (len, MOVE_MAX) <= 0
753 	  /* ???  Don't transform copies from strings with known length this
754 	     confuses the tree-ssa-strlen.c.  This doesn't handle
755 	     the case in gcc.dg/strlenopt-8.c which is XFAILed for that
756 	     reason.  */
757 	  && !c_strlen (src, 2))
758 	{
759 	  unsigned ilen = tree_to_uhwi (len);
760 	  if (pow2p_hwi (ilen))
761 	    {
762 	      /* Detect invalid bounds and overlapping copies and issue
763 		 either -Warray-bounds or -Wrestrict.  */
764 	      if (!nowarn
765 		  && check_bounds_or_overlap (as_a <gcall *>(stmt),
766 					      dest, src, len, len))
767 	      	gimple_set_no_warning (stmt, true);
768 
769 	      scalar_int_mode mode;
770 	      tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
771 	      if (type
772 		  && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
773 		  && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
774 		  /* If the destination pointer is not aligned we must be able
775 		     to emit an unaligned store.  */
776 		  && (dest_align >= GET_MODE_ALIGNMENT (mode)
777 		      || !targetm.slow_unaligned_access (mode, dest_align)
778 		      || (optab_handler (movmisalign_optab, mode)
779 			  != CODE_FOR_nothing)))
780 		{
781 		  tree srctype = type;
782 		  tree desttype = type;
783 		  if (src_align < GET_MODE_ALIGNMENT (mode))
784 		    srctype = build_aligned_type (type, src_align);
785 		  tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
786 		  tree tem = fold_const_aggregate_ref (srcmem);
787 		  if (tem)
788 		    srcmem = tem;
789 		  else if (src_align < GET_MODE_ALIGNMENT (mode)
790 			   && targetm.slow_unaligned_access (mode, src_align)
791 			   && (optab_handler (movmisalign_optab, mode)
792 			       == CODE_FOR_nothing))
793 		    srcmem = NULL_TREE;
794 		  if (srcmem)
795 		    {
796 		      gimple *new_stmt;
797 		      if (is_gimple_reg_type (TREE_TYPE (srcmem)))
798 			{
799 			  new_stmt = gimple_build_assign (NULL_TREE, srcmem);
800 			  srcmem
801 			    = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
802 							  new_stmt);
803 			  gimple_assign_set_lhs (new_stmt, srcmem);
804 			  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
805 			  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
806 			}
807 		      if (dest_align < GET_MODE_ALIGNMENT (mode))
808 			desttype = build_aligned_type (type, dest_align);
809 		      new_stmt
810 			= gimple_build_assign (fold_build2 (MEM_REF, desttype,
811 							    dest, off0),
812 					       srcmem);
813 		      gimple_set_vuse (new_stmt, gimple_vuse (stmt));
814 		      gimple_set_vdef (new_stmt, gimple_vdef (stmt));
815 		      if (gimple_vdef (new_stmt)
816 			  && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
817 			SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
818 		      if (!lhs)
819 			{
820 			  gsi_replace (gsi, new_stmt, false);
821 			  return true;
822 			}
823 		      gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
824 		      goto done;
825 		    }
826 		}
827 	    }
828 	}
829 
830       if (endp == 3)
831 	{
832 	  /* Both DEST and SRC must be pointer types.
833 	     ??? This is what old code did.  Is the testing for pointer types
834 	     really mandatory?
835 
836 	     If either SRC is readonly or length is 1, we can use memcpy.  */
837 	  if (!dest_align || !src_align)
838 	    return false;
839 	  if (readonly_data_expr (src)
840 	      || (tree_fits_uhwi_p (len)
841 		  && (MIN (src_align, dest_align) / BITS_PER_UNIT
842 		      >= tree_to_uhwi (len))))
843 	    {
844 	      tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
845 	      if (!fn)
846 		return false;
847 	      gimple_call_set_fndecl (stmt, fn);
848 	      gimple_call_set_arg (stmt, 0, dest);
849 	      gimple_call_set_arg (stmt, 1, src);
850 	      fold_stmt (gsi);
851 	      return true;
852 	    }
853 
854 	  /* If *src and *dest can't overlap, optimize into memcpy as well.  */
855 	  if (TREE_CODE (src) == ADDR_EXPR
856 	      && TREE_CODE (dest) == ADDR_EXPR)
857 	    {
858 	      tree src_base, dest_base, fn;
859 	      poly_int64 src_offset = 0, dest_offset = 0;
860 	      poly_uint64 maxsize;
861 
862 	      srcvar = TREE_OPERAND (src, 0);
863 	      src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
864 	      if (src_base == NULL)
865 		src_base = srcvar;
866 	      destvar = TREE_OPERAND (dest, 0);
867 	      dest_base = get_addr_base_and_unit_offset (destvar,
868 							 &dest_offset);
869 	      if (dest_base == NULL)
870 		dest_base = destvar;
871 	      if (!poly_int_tree_p (len, &maxsize))
872 		maxsize = -1;
873 	      if (SSA_VAR_P (src_base)
874 		  && SSA_VAR_P (dest_base))
875 		{
876 		  if (operand_equal_p (src_base, dest_base, 0)
877 		      && ranges_maybe_overlap_p (src_offset, maxsize,
878 						 dest_offset, maxsize))
879 		    return false;
880 		}
881 	      else if (TREE_CODE (src_base) == MEM_REF
882 		       && TREE_CODE (dest_base) == MEM_REF)
883 		{
884 		  if (! operand_equal_p (TREE_OPERAND (src_base, 0),
885 					 TREE_OPERAND (dest_base, 0), 0))
886 		    return false;
887 		  poly_offset_int full_src_offset
888 		    = mem_ref_offset (src_base) + src_offset;
889 		  poly_offset_int full_dest_offset
890 		    = mem_ref_offset (dest_base) + dest_offset;
891 		  if (ranges_maybe_overlap_p (full_src_offset, maxsize,
892 					      full_dest_offset, maxsize))
893 		    return false;
894 		}
895 	      else
896 		return false;
897 
898 	      fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
899 	      if (!fn)
900 		return false;
901 	      gimple_call_set_fndecl (stmt, fn);
902 	      gimple_call_set_arg (stmt, 0, dest);
903 	      gimple_call_set_arg (stmt, 1, src);
904 	      fold_stmt (gsi);
905 	      return true;
906 	    }
907 
908 	  /* If the destination and source do not alias optimize into
909 	     memcpy as well.  */
910 	  if ((is_gimple_min_invariant (dest)
911 	       || TREE_CODE (dest) == SSA_NAME)
912 	      && (is_gimple_min_invariant (src)
913 		  || TREE_CODE (src) == SSA_NAME))
914 	    {
915 	      ao_ref destr, srcr;
916 	      ao_ref_init_from_ptr_and_size (&destr, dest, len);
917 	      ao_ref_init_from_ptr_and_size (&srcr, src, len);
918 	      if (!refs_may_alias_p_1 (&destr, &srcr, false))
919 		{
920 		  tree fn;
921 		  fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
922 		  if (!fn)
923 		    return false;
924 		  gimple_call_set_fndecl (stmt, fn);
925 		  gimple_call_set_arg (stmt, 0, dest);
926 		  gimple_call_set_arg (stmt, 1, src);
927 		  fold_stmt (gsi);
928 		  return true;
929 		}
930 	    }
931 
932 	  return false;
933 	}
934 
935       if (!tree_fits_shwi_p (len))
936 	return false;
937       if (!POINTER_TYPE_P (TREE_TYPE (src))
938 	  || !POINTER_TYPE_P (TREE_TYPE (dest)))
939 	return false;
940       /* In the following try to find a type that is most natural to be
941 	 used for the memcpy source and destination and that allows
942 	 the most optimization when memcpy is turned into a plain assignment
943 	 using that type.  In theory we could always use a char[len] type
944 	 but that only gains us that the destination and source possibly
945 	 no longer will have their address taken.  */
946       srctype = TREE_TYPE (TREE_TYPE (src));
947       if (TREE_CODE (srctype) == ARRAY_TYPE
948 	  && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
949 	srctype = TREE_TYPE (srctype);
950       desttype = TREE_TYPE (TREE_TYPE (dest));
951       if (TREE_CODE (desttype) == ARRAY_TYPE
952 	  && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
953 	desttype = TREE_TYPE (desttype);
954       if (TREE_ADDRESSABLE (srctype)
955 	  || TREE_ADDRESSABLE (desttype))
956 	return false;
957 
958       /* Make sure we are not copying using a floating-point mode or
959          a type whose size possibly does not match its precision.  */
960       if (FLOAT_MODE_P (TYPE_MODE (desttype))
961 	  || TREE_CODE (desttype) == BOOLEAN_TYPE
962 	  || TREE_CODE (desttype) == ENUMERAL_TYPE)
963 	desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
964       if (FLOAT_MODE_P (TYPE_MODE (srctype))
965 	  || TREE_CODE (srctype) == BOOLEAN_TYPE
966 	  || TREE_CODE (srctype) == ENUMERAL_TYPE)
967 	srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
968       if (!srctype)
969 	srctype = desttype;
970       if (!desttype)
971 	desttype = srctype;
972       if (!srctype)
973 	return false;
974 
975       src_align = get_pointer_alignment (src);
976       dest_align = get_pointer_alignment (dest);
977       if (dest_align < TYPE_ALIGN (desttype)
978 	  || src_align < TYPE_ALIGN (srctype))
979 	return false;
980 
981       destvar = NULL_TREE;
982       if (TREE_CODE (dest) == ADDR_EXPR
983 	  && var_decl_component_p (TREE_OPERAND (dest, 0))
984 	  && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
985 	destvar = fold_build2 (MEM_REF, desttype, dest, off0);
986 
987       srcvar = NULL_TREE;
988       if (TREE_CODE (src) == ADDR_EXPR
989 	  && var_decl_component_p (TREE_OPERAND (src, 0))
990 	  && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
991 	{
992 	  if (!destvar
993 	      || src_align >= TYPE_ALIGN (desttype))
994 	    srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
995 				  src, off0);
996 	  else if (!STRICT_ALIGNMENT)
997 	    {
998 	      srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
999 					    src_align);
1000 	      srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1001 	    }
1002 	}
1003 
1004       if (srcvar == NULL_TREE && destvar == NULL_TREE)
1005 	return false;
1006 
1007       if (srcvar == NULL_TREE)
1008 	{
1009 	  if (src_align >= TYPE_ALIGN (desttype))
1010 	    srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1011 	  else
1012 	    {
1013 	      if (STRICT_ALIGNMENT)
1014 		return false;
1015 	      srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1016 					    src_align);
1017 	      srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1018 	    }
1019 	}
1020       else if (destvar == NULL_TREE)
1021 	{
1022 	  if (dest_align >= TYPE_ALIGN (srctype))
1023 	    destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1024 	  else
1025 	    {
1026 	      if (STRICT_ALIGNMENT)
1027 		return false;
1028 	      desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1029 					     dest_align);
1030 	      destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1031 	    }
1032 	}
1033 
1034       /* Detect invalid bounds and overlapping copies and issue either
1035 	 -Warray-bounds or -Wrestrict.  */
1036       if (!nowarn)
1037 	check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1038 
1039       gimple *new_stmt;
1040       if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1041 	{
1042 	  tree tem = fold_const_aggregate_ref (srcvar);
1043 	  if (tem)
1044 	    srcvar = tem;
1045 	  if (! is_gimple_min_invariant (srcvar))
1046 	    {
1047 	      new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1048 	      srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1049 						   new_stmt);
1050 	      gimple_assign_set_lhs (new_stmt, srcvar);
1051 	      gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1052 	      gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1053 	    }
1054 	  new_stmt = gimple_build_assign (destvar, srcvar);
1055 	  goto set_vop_and_replace;
1056 	}
1057 
1058       /* We get an aggregate copy.  Use an unsigned char[] type to
1059 	 perform the copying to preserve padding and to avoid any issues
1060 	 with TREE_ADDRESSABLE types or float modes behavior on copying.  */
1061       desttype = build_array_type_nelts (unsigned_char_type_node,
1062 					 tree_to_uhwi (len));
1063       srctype = desttype;
1064       if (src_align > TYPE_ALIGN (srctype))
1065 	srctype = build_aligned_type (srctype, src_align);
1066       if (dest_align > TYPE_ALIGN (desttype))
1067 	desttype = build_aligned_type (desttype, dest_align);
1068       new_stmt
1069 	= gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1070 			       fold_build2 (MEM_REF, srctype, src, off0));
1071 set_vop_and_replace:
1072       gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1073       gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1074       if (gimple_vdef (new_stmt)
1075 	  && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1076 	SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1077       if (!lhs)
1078 	{
1079 	  gsi_replace (gsi, new_stmt, false);
1080 	  return true;
1081 	}
1082       gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1083     }
1084 
1085 done:
1086   gimple_seq stmts = NULL;
1087   if (endp == 0 || endp == 3)
1088     len = NULL_TREE;
1089   else if (endp == 2)
1090     len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1091 			ssize_int (1));
1092   if (endp == 2 || endp == 1)
1093     {
1094       len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1095       dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1096 			   TREE_TYPE (dest), dest, len);
1097     }
1098 
1099   gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1100   gimple *repl = gimple_build_assign (lhs, dest);
1101   gsi_replace (gsi, repl, false);
1102   return true;
1103 }
1104 
1105 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1106    to built-in memcmp (a, b, len).  */
1107 
1108 static bool
1109 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1110 {
1111   tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1112 
1113   if (!fn)
1114     return false;
1115 
1116   /* Transform bcmp (a, b, len) into memcmp (a, b, len).  */
1117 
1118   gimple *stmt = gsi_stmt (*gsi);
1119   tree a = gimple_call_arg (stmt, 0);
1120   tree b = gimple_call_arg (stmt, 1);
1121   tree len = gimple_call_arg (stmt, 2);
1122 
1123   gimple *repl = gimple_build_call (fn, 3, a, b, len);
1124   replace_call_with_call_and_fold (gsi, repl);
1125 
1126   return true;
1127 }
1128 
1129 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1130    to built-in memmove (dest, src, len).  */
1131 
1132 static bool
1133 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1134 {
1135   tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1136 
1137   if (!fn)
1138     return false;
1139 
1140   /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1141      it's quivalent to memmove (not memcpy).  Transform bcopy (src, dest,
1142      len) into memmove (dest, src, len).  */
1143 
1144   gimple *stmt = gsi_stmt (*gsi);
1145   tree src = gimple_call_arg (stmt, 0);
1146   tree dest = gimple_call_arg (stmt, 1);
1147   tree len = gimple_call_arg (stmt, 2);
1148 
1149   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1150   gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1151   replace_call_with_call_and_fold (gsi, repl);
1152 
1153   return true;
1154 }
1155 
1156 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1157    to built-in memset (dest, 0, len).  */
1158 
1159 static bool
1160 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1161 {
1162   tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1163 
1164   if (!fn)
1165     return false;
1166 
1167   /* Transform bzero (dest, len) into memset (dest, 0, len).  */
1168 
1169   gimple *stmt = gsi_stmt (*gsi);
1170   tree dest = gimple_call_arg (stmt, 0);
1171   tree len = gimple_call_arg (stmt, 1);
1172 
1173   gimple_seq seq = NULL;
1174   gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1175   gimple_seq_add_stmt_without_update (&seq, repl);
1176   gsi_replace_with_seq_vops (gsi, seq);
1177   fold_stmt (gsi);
1178 
1179   return true;
1180 }
1181 
1182 /* Fold function call to builtin memset or bzero at *GSI setting the
1183    memory of size LEN to VAL.  Return whether a simplification was made.  */
1184 
1185 static bool
1186 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1187 {
1188   gimple *stmt = gsi_stmt (*gsi);
1189   tree etype;
1190   unsigned HOST_WIDE_INT length, cval;
1191 
1192   /* If the LEN parameter is zero, return DEST.  */
1193   if (integer_zerop (len))
1194     {
1195       replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1196       return true;
1197     }
1198 
1199   if (! tree_fits_uhwi_p (len))
1200     return false;
1201 
1202   if (TREE_CODE (c) != INTEGER_CST)
1203     return false;
1204 
1205   tree dest = gimple_call_arg (stmt, 0);
1206   tree var = dest;
1207   if (TREE_CODE (var) != ADDR_EXPR)
1208     return false;
1209 
1210   var = TREE_OPERAND (var, 0);
1211   if (TREE_THIS_VOLATILE (var))
1212     return false;
1213 
1214   etype = TREE_TYPE (var);
1215   if (TREE_CODE (etype) == ARRAY_TYPE)
1216     etype = TREE_TYPE (etype);
1217 
1218   if (!INTEGRAL_TYPE_P (etype)
1219       && !POINTER_TYPE_P (etype))
1220     return NULL_TREE;
1221 
1222   if (! var_decl_component_p (var))
1223     return NULL_TREE;
1224 
1225   length = tree_to_uhwi (len);
1226   if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1227       || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1228     return NULL_TREE;
1229 
1230   if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1231     return NULL_TREE;
1232 
1233   if (integer_zerop (c))
1234     cval = 0;
1235   else
1236     {
1237       if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1238 	return NULL_TREE;
1239 
1240       cval = TREE_INT_CST_LOW (c);
1241       cval &= 0xff;
1242       cval |= cval << 8;
1243       cval |= cval << 16;
1244       cval |= (cval << 31) << 1;
1245     }
1246 
1247   var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1248   gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1249   gimple_set_vuse (store, gimple_vuse (stmt));
1250   tree vdef = gimple_vdef (stmt);
1251   if (vdef && TREE_CODE (vdef) == SSA_NAME)
1252     {
1253       gimple_set_vdef (store, gimple_vdef (stmt));
1254       SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1255     }
1256   gsi_insert_before (gsi, store, GSI_SAME_STMT);
1257   if (gimple_call_lhs (stmt))
1258     {
1259       gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1260       gsi_replace (gsi, asgn, false);
1261     }
1262   else
1263     {
1264       gimple_stmt_iterator gsi2 = *gsi;
1265       gsi_prev (gsi);
1266       gsi_remove (&gsi2, true);
1267     }
1268 
1269   return true;
1270 }
1271 
1272 
1273 /* Obtain the minimum and maximum string length or minimum and maximum
1274    value of ARG in LENGTH[0] and LENGTH[1], respectively.
1275    If ARG is an SSA name variable, follow its use-def chains.  When
1276    TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1277    if we are unable to determine the length or value, return false.
1278    VISITED is a bitmap of visited variables.
1279    TYPE is 0 if string length should be obtained, 1 for maximum string
1280    length and 2 for maximum value ARG can have.
1281    When FUZZY is non-zero and the length of a string cannot be determined,
1282    the function instead considers as the maximum possible length the
1283    size of a character array it may refer to.  If FUZZY is 2, it will handle
1284    PHIs and COND_EXPRs optimistically, if we can determine string length
1285    minimum and maximum, it will use the minimum from the ones where it
1286    can be determined.
1287    Set *FLEXP to true if the range of the string lengths has been
1288    obtained from the upper bound of an array at the end of a struct.
1289    Such an array may hold a string that's longer than its upper bound
1290    due to it being used as a poor-man's flexible array member.  */
1291 
1292 static bool
1293 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1294 		  int fuzzy, bool *flexp)
1295 {
1296   tree var, val = NULL_TREE;
1297   gimple *def_stmt;
1298 
1299   /* The minimum and maximum length.  */
1300   tree *const minlen = length;
1301   tree *const maxlen = length + 1;
1302 
1303   if (TREE_CODE (arg) != SSA_NAME)
1304     {
1305       /* We can end up with &(*iftmp_1)[0] here as well, so handle it.  */
1306       if (TREE_CODE (arg) == ADDR_EXPR
1307 	  && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1308 	{
1309 	  tree op = TREE_OPERAND (arg, 0);
1310 	  if (integer_zerop (TREE_OPERAND (op, 1)))
1311 	    {
1312 	      tree aop0 = TREE_OPERAND (op, 0);
1313 	      if (TREE_CODE (aop0) == INDIRECT_REF
1314 		  && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1315 		return get_range_strlen (TREE_OPERAND (aop0, 0),
1316 					 length, visited, type, fuzzy, flexp);
1317 	    }
1318 	  else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1319 	    {
1320 	      /* Fail if an array is the last member of a struct object
1321 		 since it could be treated as a (fake) flexible array
1322 		 member.  */
1323 	      tree idx = TREE_OPERAND (op, 1);
1324 
1325 	      arg = TREE_OPERAND (op, 0);
1326 	      tree optype = TREE_TYPE (arg);
1327 	      if (tree dom = TYPE_DOMAIN (optype))
1328 		if (tree bound = TYPE_MAX_VALUE (dom))
1329 		  if (TREE_CODE (bound) == INTEGER_CST
1330 		      && TREE_CODE (idx) == INTEGER_CST
1331 		      && tree_int_cst_lt (bound, idx))
1332 		    return false;
1333 	    }
1334 	}
1335 
1336       if (type == 2)
1337 	{
1338 	  val = arg;
1339 	  if (TREE_CODE (val) != INTEGER_CST
1340 	      || tree_int_cst_sgn (val) < 0)
1341 	    return false;
1342 	}
1343       else
1344 	val = c_strlen (arg, 1);
1345 
1346       if (!val && fuzzy)
1347 	{
1348 	  if (TREE_CODE (arg) == ADDR_EXPR)
1349 	    return get_range_strlen (TREE_OPERAND (arg, 0), length,
1350 				     visited, type, fuzzy, flexp);
1351 
1352 	  if (TREE_CODE (arg) == ARRAY_REF)
1353 	    {
1354 	      tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1355 
1356 	      /* Determine the "innermost" array type.  */
1357 	      while (TREE_CODE (type) == ARRAY_TYPE
1358 		     && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1359 		type = TREE_TYPE (type);
1360 
1361 	      /* Avoid arrays of pointers.  */
1362 	      tree eltype = TREE_TYPE (type);
1363 	      if (TREE_CODE (type) != ARRAY_TYPE
1364 		  || !INTEGRAL_TYPE_P (eltype))
1365 		return false;
1366 
1367 	      val = TYPE_SIZE_UNIT (type);
1368 	      if (!val || integer_zerop (val))
1369 		return false;
1370 
1371 	      val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1372 				 integer_one_node);
1373 	      /* Set the minimum size to zero since the string in
1374 		 the array could have zero length.  */
1375 	      *minlen = ssize_int (0);
1376 
1377 	      if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1378 		  && type == TREE_TYPE (TREE_OPERAND (arg, 0))
1379 		  && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1380 		*flexp = true;
1381 	    }
1382 	  else if (TREE_CODE (arg) == COMPONENT_REF
1383 		   && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1384 		       == ARRAY_TYPE))
1385 	    {
1386 	      /* Use the type of the member array to determine the upper
1387 		 bound on the length of the array.  This may be overly
1388 		 optimistic if the array itself isn't NUL-terminated and
1389 		 the caller relies on the subsequent member to contain
1390 		 the NUL but that would only be considered valid if
1391 		 the array were the last member of a struct.
1392 		 Set *FLEXP to true if the array whose bound is being
1393 		 used is at the end of a struct.  */
1394 	      if (array_at_struct_end_p (arg))
1395 		*flexp = true;
1396 
1397 	      arg = TREE_OPERAND (arg, 1);
1398 
1399 	      tree type = TREE_TYPE (arg);
1400 
1401 	      while (TREE_CODE (type) == ARRAY_TYPE
1402 		     && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1403 		type = TREE_TYPE (type);
1404 
1405 	      /* Fail when the array bound is unknown or zero.  */
1406 	      val = TYPE_SIZE_UNIT (type);
1407 	      if (!val || integer_zerop (val))
1408 		return false;
1409 	      val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1410 				 integer_one_node);
1411 	      /* Set the minimum size to zero since the string in
1412 		 the array could have zero length.  */
1413 	      *minlen = ssize_int (0);
1414 	    }
1415 
1416 	  if (VAR_P (arg))
1417 	    {
1418 	      tree type = TREE_TYPE (arg);
1419 	      if (POINTER_TYPE_P (type))
1420 		type = TREE_TYPE (type);
1421 
1422 	      if (TREE_CODE (type) == ARRAY_TYPE)
1423 		{
1424 		  val = TYPE_SIZE_UNIT (type);
1425 		  if (!val
1426 		      || TREE_CODE (val) != INTEGER_CST
1427 		      || integer_zerop (val))
1428 		    return false;
1429 		  val = wide_int_to_tree (TREE_TYPE (val),
1430 					  wi::sub (wi::to_wide (val), 1));
1431 		  /* Set the minimum size to zero since the string in
1432 		     the array could have zero length.  */
1433 		  *minlen = ssize_int (0);
1434 		}
1435 	    }
1436 	}
1437 
1438       if (!val)
1439 	return false;
1440 
1441       if (!*minlen
1442 	  || (type > 0
1443 	      && TREE_CODE (*minlen) == INTEGER_CST
1444 	      && TREE_CODE (val) == INTEGER_CST
1445 	      && tree_int_cst_lt (val, *minlen)))
1446 	*minlen = val;
1447 
1448       if (*maxlen)
1449 	{
1450 	  if (type > 0)
1451 	    {
1452 	      if (TREE_CODE (*maxlen) != INTEGER_CST
1453 		  || TREE_CODE (val) != INTEGER_CST)
1454 		return false;
1455 
1456 	      if (tree_int_cst_lt (*maxlen, val))
1457 		*maxlen = val;
1458 	      return true;
1459 	    }
1460 	  else if (simple_cst_equal (val, *maxlen) != 1)
1461 	    return false;
1462 	}
1463 
1464       *maxlen = val;
1465       return true;
1466     }
1467 
1468   /* If ARG is registered for SSA update we cannot look at its defining
1469      statement.  */
1470   if (name_registered_for_update_p (arg))
1471     return false;
1472 
1473   /* If we were already here, break the infinite cycle.  */
1474   if (!*visited)
1475     *visited = BITMAP_ALLOC (NULL);
1476   if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1477     return true;
1478 
1479   var = arg;
1480   def_stmt = SSA_NAME_DEF_STMT (var);
1481 
1482   switch (gimple_code (def_stmt))
1483     {
1484       case GIMPLE_ASSIGN:
1485         /* The RHS of the statement defining VAR must either have a
1486            constant length or come from another SSA_NAME with a constant
1487            length.  */
1488         if (gimple_assign_single_p (def_stmt)
1489             || gimple_assign_unary_nop_p (def_stmt))
1490           {
1491             tree rhs = gimple_assign_rhs1 (def_stmt);
1492 	    return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1493           }
1494 	else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1495 	  {
1496 	    tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1497 			    gimple_assign_rhs3 (def_stmt) };
1498 
1499 	    for (unsigned int i = 0; i < 2; i++)
1500 	      if (!get_range_strlen (ops[i], length, visited, type, fuzzy,
1501 				     flexp))
1502 		{
1503 		  if (fuzzy == 2)
1504 		    *maxlen = build_all_ones_cst (size_type_node);
1505 		  else
1506 		    return false;
1507 		}
1508 	    return true;
1509 	  }
1510         return false;
1511 
1512       case GIMPLE_PHI:
1513 	/* All the arguments of the PHI node must have the same constant
1514 	   length.  */
1515 	for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1516           {
1517             tree arg = gimple_phi_arg (def_stmt, i)->def;
1518 
1519             /* If this PHI has itself as an argument, we cannot
1520                determine the string length of this argument.  However,
1521                if we can find a constant string length for the other
1522                PHI args then we can still be sure that this is a
1523                constant string length.  So be optimistic and just
1524                continue with the next argument.  */
1525             if (arg == gimple_phi_result (def_stmt))
1526               continue;
1527 
1528 	    if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1529 	      {
1530 		if (fuzzy == 2)
1531 		  *maxlen = build_all_ones_cst (size_type_node);
1532 		else
1533 		  return false;
1534 	      }
1535           }
1536         return true;
1537 
1538       default:
1539         return false;
1540     }
1541 }
1542 
1543 /* Determine the minimum and maximum value or string length that ARG
1544    refers to and store each in the first two elements of MINMAXLEN.
1545    For expressions that point to strings of unknown lengths that are
1546    character arrays, use the upper bound of the array as the maximum
1547    length.  For example, given an expression like 'x ? array : "xyz"'
1548    and array declared as 'char array[8]', MINMAXLEN[0] will be set
1549    to 0 and MINMAXLEN[1] to 7, the longest string that could be
1550    stored in array.
1551    Return true if the range of the string lengths has been obtained
1552    from the upper bound of an array at the end of a struct.  Such
1553    an array may hold a string that's longer than its upper bound
1554    due to it being used as a poor-man's flexible array member.
1555 
1556    STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1557    and false if PHIs and COND_EXPRs are to be handled optimistically,
1558    if we can determine string length minimum and maximum; it will use
1559    the minimum from the ones where it can be determined.
1560    STRICT false should be only used for warning code.  */
1561 
1562 bool
1563 get_range_strlen (tree arg, tree minmaxlen[2], bool strict)
1564 {
1565   bitmap visited = NULL;
1566 
1567   minmaxlen[0] = NULL_TREE;
1568   minmaxlen[1] = NULL_TREE;
1569 
1570   bool flexarray = false;
1571   if (!get_range_strlen (arg, minmaxlen, &visited, 1, strict ? 1 : 2,
1572 			 &flexarray))
1573     {
1574       minmaxlen[0] = NULL_TREE;
1575       minmaxlen[1] = NULL_TREE;
1576     }
1577 
1578   if (visited)
1579     BITMAP_FREE (visited);
1580 
1581   return flexarray;
1582 }
1583 
1584 tree
1585 get_maxval_strlen (tree arg, int type)
1586 {
1587   bitmap visited = NULL;
1588   tree len[2] = { NULL_TREE, NULL_TREE };
1589 
1590   bool dummy;
1591   if (!get_range_strlen (arg, len, &visited, type, 0, &dummy))
1592     len[1] = NULL_TREE;
1593   if (visited)
1594     BITMAP_FREE (visited);
1595 
1596   return len[1];
1597 }
1598 
1599 
1600 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1601    If LEN is not NULL, it represents the length of the string to be
1602    copied.  Return NULL_TREE if no simplification can be made.  */
1603 
1604 static bool
1605 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1606 			    tree dest, tree src)
1607 {
1608   gimple *stmt = gsi_stmt (*gsi);
1609   location_t loc = gimple_location (stmt);
1610   tree fn;
1611 
1612   /* If SRC and DEST are the same (and not volatile), return DEST.  */
1613   if (operand_equal_p (src, dest, 0))
1614     {
1615       /* Issue -Wrestrict unless the pointers are null (those do
1616 	 not point to objects and so do not indicate an overlap;
1617 	 such calls could be the result of sanitization and jump
1618 	 threading).  */
1619       if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1620 	{
1621 	  tree func = gimple_call_fndecl (stmt);
1622 
1623 	  warning_at (loc, OPT_Wrestrict,
1624 		      "%qD source argument is the same as destination",
1625 		      func);
1626 	}
1627 
1628       replace_call_with_value (gsi, dest);
1629       return true;
1630     }
1631 
1632   if (optimize_function_for_size_p (cfun))
1633     return false;
1634 
1635   fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1636   if (!fn)
1637     return false;
1638 
1639   tree len = get_maxval_strlen (src, 0);
1640   if (!len)
1641     return false;
1642 
1643   len = fold_convert_loc (loc, size_type_node, len);
1644   len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1645   len = force_gimple_operand_gsi (gsi, len, true,
1646 				  NULL_TREE, true, GSI_SAME_STMT);
1647   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1648   replace_call_with_call_and_fold (gsi, repl);
1649   return true;
1650 }
1651 
1652 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1653    If SLEN is not NULL, it represents the length of the source string.
1654    Return NULL_TREE if no simplification can be made.  */
1655 
1656 static bool
1657 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1658 			     tree dest, tree src, tree len)
1659 {
1660   gimple *stmt = gsi_stmt (*gsi);
1661   location_t loc = gimple_location (stmt);
1662   bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1663 
1664   /* If the LEN parameter is zero, return DEST.  */
1665   if (integer_zerop (len))
1666     {
1667       /* Avoid warning if the destination refers to a an array/pointer
1668 	 decorate with attribute nonstring.  */
1669       if (!nonstring)
1670 	{
1671 	  tree fndecl = gimple_call_fndecl (stmt);
1672 	  gcall *call = as_a <gcall *> (stmt);
1673 
1674 	  /* Warn about the lack of nul termination: the result is not
1675 	     a (nul-terminated) string.  */
1676 	  tree slen = get_maxval_strlen (src, 0);
1677 	  if (slen && !integer_zerop (slen))
1678 	    warning_at (loc, OPT_Wstringop_truncation,
1679 			"%G%qD destination unchanged after copying no bytes "
1680 			"from a string of length %E",
1681 			call, fndecl, slen);
1682 	  else
1683 	    warning_at (loc, OPT_Wstringop_truncation,
1684 			"%G%qD destination unchanged after copying no bytes",
1685 			call, fndecl);
1686 	}
1687 
1688       replace_call_with_value (gsi, dest);
1689       return true;
1690     }
1691 
1692   /* We can't compare slen with len as constants below if len is not a
1693      constant.  */
1694   if (TREE_CODE (len) != INTEGER_CST)
1695     return false;
1696 
1697   /* Now, we must be passed a constant src ptr parameter.  */
1698   tree slen = get_maxval_strlen (src, 0);
1699   if (!slen || TREE_CODE (slen) != INTEGER_CST)
1700     return false;
1701 
1702   /* The size of the source string including the terminating nul.  */
1703   tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1704 
1705   /* We do not support simplification of this case, though we do
1706      support it when expanding trees into RTL.  */
1707   /* FIXME: generate a call to __builtin_memset.  */
1708   if (tree_int_cst_lt (ssize, len))
1709     return false;
1710 
1711   /* Diagnose truncation that leaves the copy unterminated.  */
1712   maybe_diag_stxncpy_trunc (*gsi, src, len);
1713 
1714   /* OK transform into builtin memcpy.  */
1715   tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1716   if (!fn)
1717     return false;
1718 
1719   len = fold_convert_loc (loc, size_type_node, len);
1720   len = force_gimple_operand_gsi (gsi, len, true,
1721 				  NULL_TREE, true, GSI_SAME_STMT);
1722   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1723   replace_call_with_call_and_fold (gsi, repl);
1724 
1725   return true;
1726 }
1727 
1728 /* Fold function call to builtin strchr or strrchr.
1729    If both arguments are constant, evaluate and fold the result,
1730    otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1731    In general strlen is significantly faster than strchr
1732    due to being a simpler operation.  */
1733 static bool
1734 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1735 {
1736   gimple *stmt = gsi_stmt (*gsi);
1737   tree str = gimple_call_arg (stmt, 0);
1738   tree c = gimple_call_arg (stmt, 1);
1739   location_t loc = gimple_location (stmt);
1740   const char *p;
1741   char ch;
1742 
1743   if (!gimple_call_lhs (stmt))
1744     return false;
1745 
1746   if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1747     {
1748       const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1749 
1750       if (p1 == NULL)
1751 	{
1752 	  replace_call_with_value (gsi, integer_zero_node);
1753 	  return true;
1754 	}
1755 
1756       tree len = build_int_cst (size_type_node, p1 - p);
1757       gimple_seq stmts = NULL;
1758       gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1759 					      POINTER_PLUS_EXPR, str, len);
1760       gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1761       gsi_replace_with_seq_vops (gsi, stmts);
1762       return true;
1763     }
1764 
1765   if (!integer_zerop (c))
1766     return false;
1767 
1768   /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size.  */
1769   if (is_strrchr && optimize_function_for_size_p (cfun))
1770     {
1771       tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1772 
1773       if (strchr_fn)
1774 	{
1775 	  gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1776 	  replace_call_with_call_and_fold (gsi, repl);
1777 	  return true;
1778 	}
1779 
1780       return false;
1781     }
1782 
1783   tree len;
1784   tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1785 
1786   if (!strlen_fn)
1787     return false;
1788 
1789   /* Create newstr = strlen (str).  */
1790   gimple_seq stmts = NULL;
1791   gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1792   gimple_set_location (new_stmt, loc);
1793   len = create_tmp_reg_or_ssa_name (size_type_node);
1794   gimple_call_set_lhs (new_stmt, len);
1795   gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1796 
1797   /* Create (str p+ strlen (str)).  */
1798   new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1799 				  POINTER_PLUS_EXPR, str, len);
1800   gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1801   gsi_replace_with_seq_vops (gsi, stmts);
1802   /* gsi now points at the assignment to the lhs, get a
1803      stmt iterator to the strlen.
1804      ???  We can't use gsi_for_stmt as that doesn't work when the
1805      CFG isn't built yet.  */
1806   gimple_stmt_iterator gsi2 = *gsi;
1807   gsi_prev (&gsi2);
1808   fold_stmt (&gsi2);
1809   return true;
1810 }
1811 
1812 /* Fold function call to builtin strstr.
1813    If both arguments are constant, evaluate and fold the result,
1814    additionally fold strstr (x, "") into x and strstr (x, "c")
1815    into strchr (x, 'c').  */
1816 static bool
1817 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1818 {
1819   gimple *stmt = gsi_stmt (*gsi);
1820   tree haystack = gimple_call_arg (stmt, 0);
1821   tree needle = gimple_call_arg (stmt, 1);
1822   const char *p, *q;
1823 
1824   if (!gimple_call_lhs (stmt))
1825     return false;
1826 
1827   q = c_getstr (needle);
1828   if (q == NULL)
1829     return false;
1830 
1831   if ((p = c_getstr (haystack)))
1832     {
1833       const char *r = strstr (p, q);
1834 
1835       if (r == NULL)
1836 	{
1837 	  replace_call_with_value (gsi, integer_zero_node);
1838 	  return true;
1839 	}
1840 
1841       tree len = build_int_cst (size_type_node, r - p);
1842       gimple_seq stmts = NULL;
1843       gimple *new_stmt
1844 	= gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1845 			       haystack, len);
1846       gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1847       gsi_replace_with_seq_vops (gsi, stmts);
1848       return true;
1849     }
1850 
1851   /* For strstr (x, "") return x.  */
1852   if (q[0] == '\0')
1853     {
1854       replace_call_with_value (gsi, haystack);
1855       return true;
1856     }
1857 
1858   /* Transform strstr (x, "c") into strchr (x, 'c').  */
1859   if (q[1] == '\0')
1860     {
1861       tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1862       if (strchr_fn)
1863 	{
1864 	  tree c = build_int_cst (integer_type_node, q[0]);
1865 	  gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1866 	  replace_call_with_call_and_fold (gsi, repl);
1867 	  return true;
1868 	}
1869     }
1870 
1871   return false;
1872 }
1873 
1874 /* Simplify a call to the strcat builtin.  DST and SRC are the arguments
1875    to the call.
1876 
1877    Return NULL_TREE if no simplification was possible, otherwise return the
1878    simplified form of the call as a tree.
1879 
1880    The simplified form may be a constant or other expression which
1881    computes the same value, but in a more efficient manner (including
1882    calls to other builtin functions).
1883 
1884    The call may contain arguments which need to be evaluated, but
1885    which are not useful to determine the result of the call.  In
1886    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
1887    COMPOUND_EXPR will be an argument which must be evaluated.
1888    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
1889    COMPOUND_EXPR in the chain will contain the tree for the simplified
1890    form of the builtin function call.  */
1891 
1892 static bool
1893 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1894 {
1895   gimple *stmt = gsi_stmt (*gsi);
1896   location_t loc = gimple_location (stmt);
1897 
1898   const char *p = c_getstr (src);
1899 
1900   /* If the string length is zero, return the dst parameter.  */
1901   if (p && *p == '\0')
1902     {
1903       replace_call_with_value (gsi, dst);
1904       return true;
1905     }
1906 
1907   if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1908     return false;
1909 
1910   /* See if we can store by pieces into (dst + strlen(dst)).  */
1911   tree newdst;
1912   tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1913   tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1914 
1915   if (!strlen_fn || !memcpy_fn)
1916     return false;
1917 
1918   /* If the length of the source string isn't computable don't
1919      split strcat into strlen and memcpy.  */
1920   tree len = get_maxval_strlen (src, 0);
1921   if (! len)
1922     return false;
1923 
1924   /* Create strlen (dst).  */
1925   gimple_seq stmts = NULL, stmts2;
1926   gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1927   gimple_set_location (repl, loc);
1928   newdst = create_tmp_reg_or_ssa_name (size_type_node);
1929   gimple_call_set_lhs (repl, newdst);
1930   gimple_seq_add_stmt_without_update (&stmts, repl);
1931 
1932   /* Create (dst p+ strlen (dst)).  */
1933   newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1934   newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1935   gimple_seq_add_seq_without_update (&stmts, stmts2);
1936 
1937   len = fold_convert_loc (loc, size_type_node, len);
1938   len = size_binop_loc (loc, PLUS_EXPR, len,
1939 			build_int_cst (size_type_node, 1));
1940   len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1941   gimple_seq_add_seq_without_update (&stmts, stmts2);
1942 
1943   repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1944   gimple_seq_add_stmt_without_update (&stmts, repl);
1945   if (gimple_call_lhs (stmt))
1946     {
1947       repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1948       gimple_seq_add_stmt_without_update (&stmts, repl);
1949       gsi_replace_with_seq_vops (gsi, stmts);
1950       /* gsi now points at the assignment to the lhs, get a
1951          stmt iterator to the memcpy call.
1952 	 ???  We can't use gsi_for_stmt as that doesn't work when the
1953 	 CFG isn't built yet.  */
1954       gimple_stmt_iterator gsi2 = *gsi;
1955       gsi_prev (&gsi2);
1956       fold_stmt (&gsi2);
1957     }
1958   else
1959     {
1960       gsi_replace_with_seq_vops (gsi, stmts);
1961       fold_stmt (gsi);
1962     }
1963   return true;
1964 }
1965 
1966 /* Fold a call to the __strcat_chk builtin FNDECL.  DEST, SRC, and SIZE
1967    are the arguments to the call.  */
1968 
1969 static bool
1970 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1971 {
1972   gimple *stmt = gsi_stmt (*gsi);
1973   tree dest = gimple_call_arg (stmt, 0);
1974   tree src = gimple_call_arg (stmt, 1);
1975   tree size = gimple_call_arg (stmt, 2);
1976   tree fn;
1977   const char *p;
1978 
1979 
1980   p = c_getstr (src);
1981   /* If the SRC parameter is "", return DEST.  */
1982   if (p && *p == '\0')
1983     {
1984       replace_call_with_value (gsi, dest);
1985       return true;
1986     }
1987 
1988   if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1989     return false;
1990 
1991   /* If __builtin_strcat_chk is used, assume strcat is available.  */
1992   fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1993   if (!fn)
1994     return false;
1995 
1996   gimple *repl = gimple_build_call (fn, 2, dest, src);
1997   replace_call_with_call_and_fold (gsi, repl);
1998   return true;
1999 }
2000 
2001 /* Simplify a call to the strncat builtin.  */
2002 
2003 static bool
2004 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2005 {
2006   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2007   tree dst = gimple_call_arg (stmt, 0);
2008   tree src = gimple_call_arg (stmt, 1);
2009   tree len = gimple_call_arg (stmt, 2);
2010 
2011   const char *p = c_getstr (src);
2012 
2013   /* If the requested length is zero, or the src parameter string
2014      length is zero, return the dst parameter.  */
2015   if (integer_zerop (len) || (p && *p == '\0'))
2016     {
2017       replace_call_with_value (gsi, dst);
2018       return true;
2019     }
2020 
2021   if (TREE_CODE (len) != INTEGER_CST || !p)
2022     return false;
2023 
2024   unsigned srclen = strlen (p);
2025 
2026   int cmpsrc = compare_tree_int (len, srclen);
2027 
2028   /* Return early if the requested len is less than the string length.
2029      Warnings will be issued elsewhere later.  */
2030   if (cmpsrc < 0)
2031     return false;
2032 
2033   unsigned HOST_WIDE_INT dstsize;
2034 
2035   bool nowarn = gimple_no_warning_p (stmt);
2036 
2037   if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2038     {
2039       int cmpdst = compare_tree_int (len, dstsize);
2040 
2041       if (cmpdst >= 0)
2042 	{
2043 	  tree fndecl = gimple_call_fndecl (stmt);
2044 
2045 	  /* Strncat copies (at most) LEN bytes and always appends
2046 	     the terminating NUL so the specified bound should never
2047 	     be equal to (or greater than) the size of the destination.
2048 	     If it is, the copy could overflow.  */
2049 	  location_t loc = gimple_location (stmt);
2050 	  nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2051 			       cmpdst == 0
2052 			       ? G_("%G%qD specified bound %E equals "
2053 				    "destination size")
2054 			       : G_("%G%qD specified bound %E exceeds "
2055 				    "destination size %wu"),
2056 			       stmt, fndecl, len, dstsize);
2057 	  if (nowarn)
2058 	    gimple_set_no_warning (stmt, true);
2059 	}
2060     }
2061 
2062   if (!nowarn && cmpsrc == 0)
2063     {
2064       tree fndecl = gimple_call_fndecl (stmt);
2065 
2066       /* To avoid certain truncation the specified bound should also
2067 	 not be equal to (or less than) the length of the source.  */
2068       location_t loc = gimple_location (stmt);
2069       if (warning_at (loc, OPT_Wstringop_overflow_,
2070 		      "%G%qD specified bound %E equals source length",
2071 		      stmt, fndecl, len))
2072 	gimple_set_no_warning (stmt, true);
2073     }
2074 
2075   tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2076 
2077   /* If the replacement _DECL isn't initialized, don't do the
2078      transformation.  */
2079   if (!fn)
2080     return false;
2081 
2082   /* Otherwise, emit a call to strcat.  */
2083   gcall *repl = gimple_build_call (fn, 2, dst, src);
2084   replace_call_with_call_and_fold (gsi, repl);
2085   return true;
2086 }
2087 
2088 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2089    LEN, and SIZE.  */
2090 
2091 static bool
2092 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2093 {
2094   gimple *stmt = gsi_stmt (*gsi);
2095   tree dest = gimple_call_arg (stmt, 0);
2096   tree src = gimple_call_arg (stmt, 1);
2097   tree len = gimple_call_arg (stmt, 2);
2098   tree size = gimple_call_arg (stmt, 3);
2099   tree fn;
2100   const char *p;
2101 
2102   p = c_getstr (src);
2103   /* If the SRC parameter is "" or if LEN is 0, return DEST.  */
2104   if ((p && *p == '\0')
2105       || integer_zerop (len))
2106     {
2107       replace_call_with_value (gsi, dest);
2108       return true;
2109     }
2110 
2111   if (! tree_fits_uhwi_p (size))
2112     return false;
2113 
2114   if (! integer_all_onesp (size))
2115     {
2116       tree src_len = c_strlen (src, 1);
2117       if (src_len
2118 	  && tree_fits_uhwi_p (src_len)
2119 	  && tree_fits_uhwi_p (len)
2120 	  && ! tree_int_cst_lt (len, src_len))
2121 	{
2122 	  /* If LEN >= strlen (SRC), optimize into __strcat_chk.  */
2123 	  fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2124 	  if (!fn)
2125 	    return false;
2126 
2127 	  gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2128 	  replace_call_with_call_and_fold (gsi, repl);
2129 	  return true;
2130 	}
2131       return false;
2132     }
2133 
2134   /* If __builtin_strncat_chk is used, assume strncat is available.  */
2135   fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2136   if (!fn)
2137     return false;
2138 
2139   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2140   replace_call_with_call_and_fold (gsi, repl);
2141   return true;
2142 }
2143 
2144 /* Build and append gimple statements to STMTS that would load a first
2145    character of a memory location identified by STR.  LOC is location
2146    of the statement.  */
2147 
2148 static tree
2149 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2150 {
2151   tree var;
2152 
2153   tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2154   tree cst_uchar_ptr_node
2155     = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2156   tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2157 
2158   tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2159   gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2160   var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2161 
2162   gimple_assign_set_lhs (stmt, var);
2163   gimple_seq_add_stmt_without_update (stmts, stmt);
2164 
2165   return var;
2166 }
2167 
2168 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2169    FCODE is the name of the builtin.  */
2170 
2171 static bool
2172 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2173 {
2174   gimple *stmt = gsi_stmt (*gsi);
2175   tree callee = gimple_call_fndecl (stmt);
2176   enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2177 
2178   tree type = integer_type_node;
2179   tree str1 = gimple_call_arg (stmt, 0);
2180   tree str2 = gimple_call_arg (stmt, 1);
2181   tree lhs = gimple_call_lhs (stmt);
2182   HOST_WIDE_INT length = -1;
2183 
2184   /* Handle strncmp and strncasecmp functions.  */
2185   if (gimple_call_num_args (stmt) == 3)
2186     {
2187       tree len = gimple_call_arg (stmt, 2);
2188       if (tree_fits_uhwi_p (len))
2189 	length = tree_to_uhwi (len);
2190     }
2191 
2192   /* If the LEN parameter is zero, return zero.  */
2193   if (length == 0)
2194     {
2195       replace_call_with_value (gsi, integer_zero_node);
2196       return true;
2197     }
2198 
2199   /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
2200   if (operand_equal_p (str1, str2, 0))
2201     {
2202       replace_call_with_value (gsi, integer_zero_node);
2203       return true;
2204     }
2205 
2206   const char *p1 = c_getstr (str1);
2207   const char *p2 = c_getstr (str2);
2208 
2209   /* For known strings, return an immediate value.  */
2210   if (p1 && p2)
2211     {
2212       int r = 0;
2213       bool known_result = false;
2214 
2215       switch (fcode)
2216 	{
2217 	case BUILT_IN_STRCMP:
2218 	  {
2219 	    r = strcmp (p1, p2);
2220 	    known_result = true;
2221 	    break;
2222 	  }
2223 	case BUILT_IN_STRNCMP:
2224 	  {
2225 	    if (length == -1)
2226 	      break;
2227 	    r = strncmp (p1, p2, length);
2228 	    known_result = true;
2229 	    break;
2230 	  }
2231 	/* Only handleable situation is where the string are equal (result 0),
2232 	   which is already handled by operand_equal_p case.  */
2233 	case BUILT_IN_STRCASECMP:
2234 	  break;
2235 	case BUILT_IN_STRNCASECMP:
2236 	  {
2237 	    if (length == -1)
2238 	      break;
2239 	    r = strncmp (p1, p2, length);
2240 	    if (r == 0)
2241 	      known_result = true;
2242 	    break;
2243 	  }
2244 	default:
2245 	  gcc_unreachable ();
2246 	}
2247 
2248       if (known_result)
2249 	{
2250 	  replace_call_with_value (gsi, build_cmp_result (type, r));
2251 	  return true;
2252 	}
2253     }
2254 
2255   bool nonzero_length = length >= 1
2256     || fcode == BUILT_IN_STRCMP
2257     || fcode == BUILT_IN_STRCASECMP;
2258 
2259   location_t loc = gimple_location (stmt);
2260 
2261   /* If the second arg is "", return *(const unsigned char*)arg1.  */
2262   if (p2 && *p2 == '\0' && nonzero_length)
2263     {
2264       gimple_seq stmts = NULL;
2265       tree var = gimple_load_first_char (loc, str1, &stmts);
2266       if (lhs)
2267 	{
2268 	  stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2269 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2270 	}
2271 
2272       gsi_replace_with_seq_vops (gsi, stmts);
2273       return true;
2274     }
2275 
2276   /* If the first arg is "", return -*(const unsigned char*)arg2.  */
2277   if (p1 && *p1 == '\0' && nonzero_length)
2278     {
2279       gimple_seq stmts = NULL;
2280       tree var = gimple_load_first_char (loc, str2, &stmts);
2281 
2282       if (lhs)
2283 	{
2284 	  tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2285 	  stmt = gimple_build_assign (c, NOP_EXPR, var);
2286 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2287 
2288 	  stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2289 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2290 	}
2291 
2292       gsi_replace_with_seq_vops (gsi, stmts);
2293       return true;
2294     }
2295 
2296   /* If len parameter is one, return an expression corresponding to
2297      (*(const unsigned char*)arg2 - *(const unsigned char*)arg1).  */
2298   if (fcode == BUILT_IN_STRNCMP && length == 1)
2299     {
2300       gimple_seq stmts = NULL;
2301       tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2302       tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2303 
2304       if (lhs)
2305 	{
2306 	  tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2307 	  gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2308 	  gimple_seq_add_stmt_without_update (&stmts, convert1);
2309 
2310 	  tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2311 	  gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2312 	  gimple_seq_add_stmt_without_update (&stmts, convert2);
2313 
2314 	  stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2315 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2316 	}
2317 
2318       gsi_replace_with_seq_vops (gsi, stmts);
2319       return true;
2320     }
2321 
2322   /* If length is larger than the length of one constant string,
2323      replace strncmp with corresponding strcmp */
2324   if (fcode == BUILT_IN_STRNCMP
2325       && length > 0
2326       && ((p2 && (size_t) length > strlen (p2))
2327           || (p1 && (size_t) length > strlen (p1))))
2328     {
2329       tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2330       if (!fn)
2331         return false;
2332       gimple *repl = gimple_build_call (fn, 2, str1, str2);
2333       replace_call_with_call_and_fold (gsi, repl);
2334       return true;
2335     }
2336 
2337   return false;
2338 }
2339 
2340 /* Fold a call to the memchr pointed by GSI iterator.  */
2341 
2342 static bool
2343 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2344 {
2345   gimple *stmt = gsi_stmt (*gsi);
2346   tree lhs = gimple_call_lhs (stmt);
2347   tree arg1 = gimple_call_arg (stmt, 0);
2348   tree arg2 = gimple_call_arg (stmt, 1);
2349   tree len = gimple_call_arg (stmt, 2);
2350 
2351   /* If the LEN parameter is zero, return zero.  */
2352   if (integer_zerop (len))
2353     {
2354       replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2355       return true;
2356     }
2357 
2358   char c;
2359   if (TREE_CODE (arg2) != INTEGER_CST
2360       || !tree_fits_uhwi_p (len)
2361       || !target_char_cst_p (arg2, &c))
2362     return false;
2363 
2364   unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2365   unsigned HOST_WIDE_INT string_length;
2366   const char *p1 = c_getstr (arg1, &string_length);
2367 
2368   if (p1)
2369     {
2370       const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2371       if (r == NULL)
2372 	{
2373 	  if (length <= string_length)
2374 	    {
2375 	      replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2376 	      return true;
2377 	    }
2378 	}
2379       else
2380 	{
2381 	  unsigned HOST_WIDE_INT offset = r - p1;
2382 	  gimple_seq stmts = NULL;
2383 	  if (lhs != NULL_TREE)
2384 	    {
2385 	      tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2386 	      gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2387 						   arg1, offset_cst);
2388 	      gimple_seq_add_stmt_without_update (&stmts, stmt);
2389 	    }
2390 	  else
2391 	    gimple_seq_add_stmt_without_update (&stmts,
2392 						gimple_build_nop ());
2393 
2394 	  gsi_replace_with_seq_vops (gsi, stmts);
2395 	  return true;
2396 	}
2397     }
2398 
2399   return false;
2400 }
2401 
2402 /* Fold a call to the fputs builtin.  ARG0 and ARG1 are the arguments
2403    to the call.  IGNORE is true if the value returned
2404    by the builtin will be ignored.  UNLOCKED is true is true if this
2405    actually a call to fputs_unlocked.  If LEN in non-NULL, it represents
2406    the known length of the string.  Return NULL_TREE if no simplification
2407    was possible.  */
2408 
2409 static bool
2410 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2411 			   tree arg0, tree arg1,
2412 			   bool unlocked)
2413 {
2414   gimple *stmt = gsi_stmt (*gsi);
2415 
2416   /* If we're using an unlocked function, assume the other unlocked
2417      functions exist explicitly.  */
2418   tree const fn_fputc = (unlocked
2419 			 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2420 			 : builtin_decl_implicit (BUILT_IN_FPUTC));
2421   tree const fn_fwrite = (unlocked
2422 			  ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2423 			  : builtin_decl_implicit (BUILT_IN_FWRITE));
2424 
2425   /* If the return value is used, don't do the transformation.  */
2426   if (gimple_call_lhs (stmt))
2427     return false;
2428 
2429   /* Get the length of the string passed to fputs.  If the length
2430      can't be determined, punt.  */
2431   tree len = get_maxval_strlen (arg0, 0);
2432   if (!len
2433       || TREE_CODE (len) != INTEGER_CST)
2434     return false;
2435 
2436   switch (compare_tree_int (len, 1))
2437     {
2438     case -1: /* length is 0, delete the call entirely .  */
2439       replace_call_with_value (gsi, integer_zero_node);
2440       return true;
2441 
2442     case 0: /* length is 1, call fputc.  */
2443       {
2444 	const char *p = c_getstr (arg0);
2445 	if (p != NULL)
2446 	  {
2447 	    if (!fn_fputc)
2448 	      return false;
2449 
2450 	    gimple *repl = gimple_build_call (fn_fputc, 2,
2451 					     build_int_cst
2452 					     (integer_type_node, p[0]), arg1);
2453 	    replace_call_with_call_and_fold (gsi, repl);
2454 	    return true;
2455 	  }
2456       }
2457       /* FALLTHROUGH */
2458     case 1: /* length is greater than 1, call fwrite.  */
2459       {
2460 	/* If optimizing for size keep fputs.  */
2461 	if (optimize_function_for_size_p (cfun))
2462 	  return false;
2463 	/* New argument list transforming fputs(string, stream) to
2464 	   fwrite(string, 1, len, stream).  */
2465 	if (!fn_fwrite)
2466 	  return false;
2467 
2468 	gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2469 					 size_one_node, len, arg1);
2470 	replace_call_with_call_and_fold (gsi, repl);
2471 	return true;
2472       }
2473     default:
2474       gcc_unreachable ();
2475     }
2476   return false;
2477 }
2478 
2479 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2480    DEST, SRC, LEN, and SIZE are the arguments to the call.
2481    IGNORE is true, if return value can be ignored.  FCODE is the BUILT_IN_*
2482    code of the builtin.  If MAXLEN is not NULL, it is maximum length
2483    passed as third argument.  */
2484 
2485 static bool
2486 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2487 				tree dest, tree src, tree len, tree size,
2488 				enum built_in_function fcode)
2489 {
2490   gimple *stmt = gsi_stmt (*gsi);
2491   location_t loc = gimple_location (stmt);
2492   bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2493   tree fn;
2494 
2495   /* If SRC and DEST are the same (and not volatile), return DEST
2496      (resp. DEST+LEN for __mempcpy_chk).  */
2497   if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2498     {
2499       if (fcode != BUILT_IN_MEMPCPY_CHK)
2500 	{
2501 	  replace_call_with_value (gsi, dest);
2502 	  return true;
2503 	}
2504       else
2505 	{
2506 	  gimple_seq stmts = NULL;
2507 	  len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2508 	  tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2509 				    TREE_TYPE (dest), dest, len);
2510 	  gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2511 	  replace_call_with_value (gsi, temp);
2512 	  return true;
2513 	}
2514     }
2515 
2516   if (! tree_fits_uhwi_p (size))
2517     return false;
2518 
2519   tree maxlen = get_maxval_strlen (len, 2);
2520   if (! integer_all_onesp (size))
2521     {
2522       if (! tree_fits_uhwi_p (len))
2523 	{
2524 	  /* If LEN is not constant, try MAXLEN too.
2525 	     For MAXLEN only allow optimizing into non-_ocs function
2526 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
2527 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2528 	    {
2529 	      if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2530 		{
2531 		  /* (void) __mempcpy_chk () can be optimized into
2532 		     (void) __memcpy_chk ().  */
2533 		  fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2534 		  if (!fn)
2535 		    return false;
2536 
2537 		  gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2538 		  replace_call_with_call_and_fold (gsi, repl);
2539 		  return true;
2540 		}
2541 	      return false;
2542 	    }
2543 	}
2544       else
2545 	maxlen = len;
2546 
2547       if (tree_int_cst_lt (size, maxlen))
2548 	return false;
2549     }
2550 
2551   fn = NULL_TREE;
2552   /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2553      mem{cpy,pcpy,move,set} is available.  */
2554   switch (fcode)
2555     {
2556     case BUILT_IN_MEMCPY_CHK:
2557       fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2558       break;
2559     case BUILT_IN_MEMPCPY_CHK:
2560       fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2561       break;
2562     case BUILT_IN_MEMMOVE_CHK:
2563       fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2564       break;
2565     case BUILT_IN_MEMSET_CHK:
2566       fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2567       break;
2568     default:
2569       break;
2570     }
2571 
2572   if (!fn)
2573     return false;
2574 
2575   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2576   replace_call_with_call_and_fold (gsi, repl);
2577   return true;
2578 }
2579 
2580 /* Fold a call to the __st[rp]cpy_chk builtin.
2581    DEST, SRC, and SIZE are the arguments to the call.
2582    IGNORE is true if return value can be ignored.  FCODE is the BUILT_IN_*
2583    code of the builtin.  If MAXLEN is not NULL, it is maximum length of
2584    strings passed as second argument.  */
2585 
2586 static bool
2587 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2588 				tree dest,
2589 				tree src, tree size,
2590 				enum built_in_function fcode)
2591 {
2592   gimple *stmt = gsi_stmt (*gsi);
2593   location_t loc = gimple_location (stmt);
2594   bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2595   tree len, fn;
2596 
2597   /* If SRC and DEST are the same (and not volatile), return DEST.  */
2598   if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2599     {
2600       /* Issue -Wrestrict unless the pointers are null (those do
2601 	 not point to objects and so do not indicate an overlap;
2602 	 such calls could be the result of sanitization and jump
2603 	 threading).  */
2604       if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2605 	{
2606 	  tree func = gimple_call_fndecl (stmt);
2607 
2608 	  warning_at (loc, OPT_Wrestrict,
2609 		      "%qD source argument is the same as destination",
2610 		      func);
2611 	}
2612 
2613       replace_call_with_value (gsi, dest);
2614       return true;
2615     }
2616 
2617   if (! tree_fits_uhwi_p (size))
2618     return false;
2619 
2620   tree maxlen = get_maxval_strlen (src, 1);
2621   if (! integer_all_onesp (size))
2622     {
2623       len = c_strlen (src, 1);
2624       if (! len || ! tree_fits_uhwi_p (len))
2625 	{
2626 	  /* If LEN is not constant, try MAXLEN too.
2627 	     For MAXLEN only allow optimizing into non-_ocs function
2628 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
2629 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2630 	    {
2631 	      if (fcode == BUILT_IN_STPCPY_CHK)
2632 		{
2633 		  if (! ignore)
2634 		    return false;
2635 
2636 		  /* If return value of __stpcpy_chk is ignored,
2637 		     optimize into __strcpy_chk.  */
2638 		  fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2639 		  if (!fn)
2640 		    return false;
2641 
2642 		  gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2643 		  replace_call_with_call_and_fold (gsi, repl);
2644 		  return true;
2645 		}
2646 
2647 	      if (! len || TREE_SIDE_EFFECTS (len))
2648 		return false;
2649 
2650 	      /* If c_strlen returned something, but not a constant,
2651 		 transform __strcpy_chk into __memcpy_chk.  */
2652 	      fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2653 	      if (!fn)
2654 		return false;
2655 
2656 	      gimple_seq stmts = NULL;
2657 	      len = gimple_convert (&stmts, loc, size_type_node, len);
2658 	      len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2659 				  build_int_cst (size_type_node, 1));
2660 	      gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2661 	      gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2662 	      replace_call_with_call_and_fold (gsi, repl);
2663 	      return true;
2664 	    }
2665 	}
2666       else
2667 	maxlen = len;
2668 
2669       if (! tree_int_cst_lt (maxlen, size))
2670 	return false;
2671     }
2672 
2673   /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available.  */
2674   fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2675 			      ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2676   if (!fn)
2677     return false;
2678 
2679   gimple *repl = gimple_build_call (fn, 2, dest, src);
2680   replace_call_with_call_and_fold (gsi, repl);
2681   return true;
2682 }
2683 
2684 /* Fold a call to the __st{r,p}ncpy_chk builtin.  DEST, SRC, LEN, and SIZE
2685    are the arguments to the call.  If MAXLEN is not NULL, it is maximum
2686    length passed as third argument. IGNORE is true if return value can be
2687    ignored. FCODE is the BUILT_IN_* code of the builtin. */
2688 
2689 static bool
2690 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2691 				 tree dest, tree src,
2692 				 tree len, tree size,
2693 				 enum built_in_function fcode)
2694 {
2695   gimple *stmt = gsi_stmt (*gsi);
2696   bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2697   tree fn;
2698 
2699   if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2700     {
2701        /* If return value of __stpncpy_chk is ignored,
2702           optimize into __strncpy_chk.  */
2703        fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2704        if (fn)
2705 	 {
2706 	   gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2707 	   replace_call_with_call_and_fold (gsi, repl);
2708 	   return true;
2709 	 }
2710     }
2711 
2712   if (! tree_fits_uhwi_p (size))
2713     return false;
2714 
2715   tree maxlen = get_maxval_strlen (len, 2);
2716   if (! integer_all_onesp (size))
2717     {
2718       if (! tree_fits_uhwi_p (len))
2719 	{
2720 	  /* If LEN is not constant, try MAXLEN too.
2721 	     For MAXLEN only allow optimizing into non-_ocs function
2722 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
2723 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2724 	    return false;
2725 	}
2726       else
2727 	maxlen = len;
2728 
2729       if (tree_int_cst_lt (size, maxlen))
2730 	return false;
2731     }
2732 
2733   /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available.  */
2734   fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2735 			      ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2736   if (!fn)
2737     return false;
2738 
2739   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2740   replace_call_with_call_and_fold (gsi, repl);
2741   return true;
2742 }
2743 
2744 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2745    Return NULL_TREE if no simplification can be made.  */
2746 
2747 static bool
2748 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2749 {
2750   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2751   location_t loc = gimple_location (stmt);
2752   tree dest = gimple_call_arg (stmt, 0);
2753   tree src = gimple_call_arg (stmt, 1);
2754   tree fn, len, lenp1;
2755 
2756   /* If the result is unused, replace stpcpy with strcpy.  */
2757   if (gimple_call_lhs (stmt) == NULL_TREE)
2758     {
2759       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2760       if (!fn)
2761 	return false;
2762       gimple_call_set_fndecl (stmt, fn);
2763       fold_stmt (gsi);
2764       return true;
2765     }
2766 
2767   len = c_strlen (src, 1);
2768   if (!len
2769       || TREE_CODE (len) != INTEGER_CST)
2770     return false;
2771 
2772   if (optimize_function_for_size_p (cfun)
2773       /* If length is zero it's small enough.  */
2774       && !integer_zerop (len))
2775     return false;
2776 
2777   /* If the source has a known length replace stpcpy with memcpy.  */
2778   fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2779   if (!fn)
2780     return false;
2781 
2782   gimple_seq stmts = NULL;
2783   tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2784   lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2785 			tem, build_int_cst (size_type_node, 1));
2786   gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2787   gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2788   gimple_set_vuse (repl, gimple_vuse (stmt));
2789   gimple_set_vdef (repl, gimple_vdef (stmt));
2790   if (gimple_vdef (repl)
2791       && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2792     SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2793   gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2794   /* Replace the result with dest + len.  */
2795   stmts = NULL;
2796   tem = gimple_convert (&stmts, loc, sizetype, len);
2797   gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2798   gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2799 				      POINTER_PLUS_EXPR, dest, tem);
2800   gsi_replace (gsi, ret, false);
2801   /* Finally fold the memcpy call.  */
2802   gimple_stmt_iterator gsi2 = *gsi;
2803   gsi_prev (&gsi2);
2804   fold_stmt (&gsi2);
2805   return true;
2806 }
2807 
2808 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS.  Return
2809    NULL_TREE if a normal call should be emitted rather than expanding
2810    the function inline.  FCODE is either BUILT_IN_SNPRINTF_CHK or
2811    BUILT_IN_VSNPRINTF_CHK.  If MAXLEN is not NULL, it is maximum length
2812    passed as second argument.  */
2813 
2814 static bool
2815 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2816 				  enum built_in_function fcode)
2817 {
2818   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2819   tree dest, size, len, fn, fmt, flag;
2820   const char *fmt_str;
2821 
2822   /* Verify the required arguments in the original call.  */
2823   if (gimple_call_num_args (stmt) < 5)
2824     return false;
2825 
2826   dest = gimple_call_arg (stmt, 0);
2827   len = gimple_call_arg (stmt, 1);
2828   flag = gimple_call_arg (stmt, 2);
2829   size = gimple_call_arg (stmt, 3);
2830   fmt = gimple_call_arg (stmt, 4);
2831 
2832   if (! tree_fits_uhwi_p (size))
2833     return false;
2834 
2835   if (! integer_all_onesp (size))
2836     {
2837       tree maxlen = get_maxval_strlen (len, 2);
2838       if (! tree_fits_uhwi_p (len))
2839 	{
2840 	  /* If LEN is not constant, try MAXLEN too.
2841 	     For MAXLEN only allow optimizing into non-_ocs function
2842 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
2843 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2844 	    return false;
2845 	}
2846       else
2847 	maxlen = len;
2848 
2849       if (tree_int_cst_lt (size, maxlen))
2850 	return false;
2851     }
2852 
2853   if (!init_target_chars ())
2854     return false;
2855 
2856   /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2857      or if format doesn't contain % chars or is "%s".  */
2858   if (! integer_zerop (flag))
2859     {
2860       fmt_str = c_getstr (fmt);
2861       if (fmt_str == NULL)
2862 	return false;
2863       if (strchr (fmt_str, target_percent) != NULL
2864 	  && strcmp (fmt_str, target_percent_s))
2865 	return false;
2866     }
2867 
2868   /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2869      available.  */
2870   fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2871 			      ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2872   if (!fn)
2873     return false;
2874 
2875   /* Replace the called function and the first 5 argument by 3 retaining
2876      trailing varargs.  */
2877   gimple_call_set_fndecl (stmt, fn);
2878   gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2879   gimple_call_set_arg (stmt, 0, dest);
2880   gimple_call_set_arg (stmt, 1, len);
2881   gimple_call_set_arg (stmt, 2, fmt);
2882   for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2883     gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2884   gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2885   fold_stmt (gsi);
2886   return true;
2887 }
2888 
2889 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2890    Return NULL_TREE if a normal call should be emitted rather than
2891    expanding the function inline.  FCODE is either BUILT_IN_SPRINTF_CHK
2892    or BUILT_IN_VSPRINTF_CHK.  */
2893 
2894 static bool
2895 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2896 				 enum built_in_function fcode)
2897 {
2898   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2899   tree dest, size, len, fn, fmt, flag;
2900   const char *fmt_str;
2901   unsigned nargs = gimple_call_num_args (stmt);
2902 
2903   /* Verify the required arguments in the original call.  */
2904   if (nargs < 4)
2905     return false;
2906   dest = gimple_call_arg (stmt, 0);
2907   flag = gimple_call_arg (stmt, 1);
2908   size = gimple_call_arg (stmt, 2);
2909   fmt = gimple_call_arg (stmt, 3);
2910 
2911   if (! tree_fits_uhwi_p (size))
2912     return false;
2913 
2914   len = NULL_TREE;
2915 
2916   if (!init_target_chars ())
2917     return false;
2918 
2919   /* Check whether the format is a literal string constant.  */
2920   fmt_str = c_getstr (fmt);
2921   if (fmt_str != NULL)
2922     {
2923       /* If the format doesn't contain % args or %%, we know the size.  */
2924       if (strchr (fmt_str, target_percent) == 0)
2925 	{
2926 	  if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2927 	    len = build_int_cstu (size_type_node, strlen (fmt_str));
2928 	}
2929       /* If the format is "%s" and first ... argument is a string literal,
2930 	 we know the size too.  */
2931       else if (fcode == BUILT_IN_SPRINTF_CHK
2932 	       && strcmp (fmt_str, target_percent_s) == 0)
2933 	{
2934 	  tree arg;
2935 
2936 	  if (nargs == 5)
2937 	    {
2938 	      arg = gimple_call_arg (stmt, 4);
2939 	      if (POINTER_TYPE_P (TREE_TYPE (arg)))
2940 		{
2941 		  len = c_strlen (arg, 1);
2942 		  if (! len || ! tree_fits_uhwi_p (len))
2943 		    len = NULL_TREE;
2944 		}
2945 	    }
2946 	}
2947     }
2948 
2949   if (! integer_all_onesp (size))
2950     {
2951       if (! len || ! tree_int_cst_lt (len, size))
2952 	return false;
2953     }
2954 
2955   /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2956      or if format doesn't contain % chars or is "%s".  */
2957   if (! integer_zerop (flag))
2958     {
2959       if (fmt_str == NULL)
2960 	return false;
2961       if (strchr (fmt_str, target_percent) != NULL
2962 	  && strcmp (fmt_str, target_percent_s))
2963 	return false;
2964     }
2965 
2966   /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available.  */
2967   fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2968 			      ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2969   if (!fn)
2970     return false;
2971 
2972   /* Replace the called function and the first 4 argument by 2 retaining
2973      trailing varargs.  */
2974   gimple_call_set_fndecl (stmt, fn);
2975   gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2976   gimple_call_set_arg (stmt, 0, dest);
2977   gimple_call_set_arg (stmt, 1, fmt);
2978   for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2979     gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2980   gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2981   fold_stmt (gsi);
2982   return true;
2983 }
2984 
2985 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2986    ORIG may be null if this is a 2-argument call.  We don't attempt to
2987    simplify calls with more than 3 arguments.
2988 
2989    Return true if simplification was possible, otherwise false.  */
2990 
2991 bool
2992 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2993 {
2994   gimple *stmt = gsi_stmt (*gsi);
2995   tree dest = gimple_call_arg (stmt, 0);
2996   tree fmt = gimple_call_arg (stmt, 1);
2997   tree orig = NULL_TREE;
2998   const char *fmt_str = NULL;
2999 
3000   /* Verify the required arguments in the original call.  We deal with two
3001      types of sprintf() calls: 'sprintf (str, fmt)' and
3002      'sprintf (dest, "%s", orig)'.  */
3003   if (gimple_call_num_args (stmt) > 3)
3004     return false;
3005 
3006   if (gimple_call_num_args (stmt) == 3)
3007     orig = gimple_call_arg (stmt, 2);
3008 
3009   /* Check whether the format is a literal string constant.  */
3010   fmt_str = c_getstr (fmt);
3011   if (fmt_str == NULL)
3012     return false;
3013 
3014   if (!init_target_chars ())
3015     return false;
3016 
3017   /* If the format doesn't contain % args or %%, use strcpy.  */
3018   if (strchr (fmt_str, target_percent) == NULL)
3019     {
3020       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3021 
3022       if (!fn)
3023 	return false;
3024 
3025       /* Don't optimize sprintf (buf, "abc", ptr++).  */
3026       if (orig)
3027 	return false;
3028 
3029       /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3030 	 'format' is known to contain no % formats.  */
3031       gimple_seq stmts = NULL;
3032       gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3033       gimple_seq_add_stmt_without_update (&stmts, repl);
3034       if (gimple_call_lhs (stmt))
3035 	{
3036 	  repl = gimple_build_assign (gimple_call_lhs (stmt),
3037 				      build_int_cst (integer_type_node,
3038 						     strlen (fmt_str)));
3039 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3040 	  gsi_replace_with_seq_vops (gsi, stmts);
3041 	  /* gsi now points at the assignment to the lhs, get a
3042 	     stmt iterator to the memcpy call.
3043 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3044 	     CFG isn't built yet.  */
3045 	  gimple_stmt_iterator gsi2 = *gsi;
3046 	  gsi_prev (&gsi2);
3047 	  fold_stmt (&gsi2);
3048 	}
3049       else
3050 	{
3051 	  gsi_replace_with_seq_vops (gsi, stmts);
3052 	  fold_stmt (gsi);
3053 	}
3054       return true;
3055     }
3056 
3057   /* If the format is "%s", use strcpy if the result isn't used.  */
3058   else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3059     {
3060       tree fn;
3061       fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3062 
3063       if (!fn)
3064 	return false;
3065 
3066       /* Don't crash on sprintf (str1, "%s").  */
3067       if (!orig)
3068 	return false;
3069 
3070       tree orig_len = NULL_TREE;
3071       if (gimple_call_lhs (stmt))
3072 	{
3073 	  orig_len = get_maxval_strlen (orig, 0);
3074 	  if (!orig_len)
3075 	    return false;
3076 	}
3077 
3078       /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2).  */
3079       gimple_seq stmts = NULL;
3080       gimple *repl = gimple_build_call (fn, 2, dest, orig);
3081       gimple_seq_add_stmt_without_update (&stmts, repl);
3082       if (gimple_call_lhs (stmt))
3083 	{
3084 	  if (!useless_type_conversion_p (integer_type_node,
3085 					  TREE_TYPE (orig_len)))
3086 	    orig_len = fold_convert (integer_type_node, orig_len);
3087 	  repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3088 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3089 	  gsi_replace_with_seq_vops (gsi, stmts);
3090 	  /* gsi now points at the assignment to the lhs, get a
3091 	     stmt iterator to the memcpy call.
3092 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3093 	     CFG isn't built yet.  */
3094 	  gimple_stmt_iterator gsi2 = *gsi;
3095 	  gsi_prev (&gsi2);
3096 	  fold_stmt (&gsi2);
3097 	}
3098       else
3099 	{
3100 	  gsi_replace_with_seq_vops (gsi, stmts);
3101 	  fold_stmt (gsi);
3102 	}
3103       return true;
3104     }
3105   return false;
3106 }
3107 
3108 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3109    FMT, and ORIG.  ORIG may be null if this is a 3-argument call.  We don't
3110    attempt to simplify calls with more than 4 arguments.
3111 
3112    Return true if simplification was possible, otherwise false.  */
3113 
3114 bool
3115 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3116 {
3117   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3118   tree dest = gimple_call_arg (stmt, 0);
3119   tree destsize = gimple_call_arg (stmt, 1);
3120   tree fmt = gimple_call_arg (stmt, 2);
3121   tree orig = NULL_TREE;
3122   const char *fmt_str = NULL;
3123 
3124   if (gimple_call_num_args (stmt) > 4)
3125     return false;
3126 
3127   if (gimple_call_num_args (stmt) == 4)
3128     orig = gimple_call_arg (stmt, 3);
3129 
3130   if (!tree_fits_uhwi_p (destsize))
3131     return false;
3132   unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3133 
3134   /* Check whether the format is a literal string constant.  */
3135   fmt_str = c_getstr (fmt);
3136   if (fmt_str == NULL)
3137     return false;
3138 
3139   if (!init_target_chars ())
3140     return false;
3141 
3142   /* If the format doesn't contain % args or %%, use strcpy.  */
3143   if (strchr (fmt_str, target_percent) == NULL)
3144     {
3145       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3146       if (!fn)
3147 	return false;
3148 
3149       /* Don't optimize snprintf (buf, 4, "abc", ptr++).  */
3150       if (orig)
3151 	return false;
3152 
3153       /* We could expand this as
3154 	 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3155 	 or to
3156 	 memcpy (str, fmt_with_nul_at_cstm1, cst);
3157 	 but in the former case that might increase code size
3158 	 and in the latter case grow .rodata section too much.
3159 	 So punt for now.  */
3160       size_t len = strlen (fmt_str);
3161       if (len >= destlen)
3162 	return false;
3163 
3164       gimple_seq stmts = NULL;
3165       gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3166       gimple_seq_add_stmt_without_update (&stmts, repl);
3167       if (gimple_call_lhs (stmt))
3168 	{
3169 	  repl = gimple_build_assign (gimple_call_lhs (stmt),
3170 				      build_int_cst (integer_type_node, len));
3171 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3172 	  gsi_replace_with_seq_vops (gsi, stmts);
3173 	  /* gsi now points at the assignment to the lhs, get a
3174 	     stmt iterator to the memcpy call.
3175 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3176 	     CFG isn't built yet.  */
3177 	  gimple_stmt_iterator gsi2 = *gsi;
3178 	  gsi_prev (&gsi2);
3179 	  fold_stmt (&gsi2);
3180 	}
3181       else
3182 	{
3183 	  gsi_replace_with_seq_vops (gsi, stmts);
3184 	  fold_stmt (gsi);
3185 	}
3186       return true;
3187     }
3188 
3189   /* If the format is "%s", use strcpy if the result isn't used.  */
3190   else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3191     {
3192       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3193       if (!fn)
3194 	return false;
3195 
3196       /* Don't crash on snprintf (str1, cst, "%s").  */
3197       if (!orig)
3198 	return false;
3199 
3200       tree orig_len = get_maxval_strlen (orig, 0);
3201       if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3202 	return false;
3203 
3204       /* We could expand this as
3205 	 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3206 	 or to
3207 	 memcpy (str1, str2_with_nul_at_cstm1, cst);
3208 	 but in the former case that might increase code size
3209 	 and in the latter case grow .rodata section too much.
3210 	 So punt for now.  */
3211       if (compare_tree_int (orig_len, destlen) >= 0)
3212 	return false;
3213 
3214       /* Convert snprintf (str1, cst, "%s", str2) into
3215 	 strcpy (str1, str2) if strlen (str2) < cst.  */
3216       gimple_seq stmts = NULL;
3217       gimple *repl = gimple_build_call (fn, 2, dest, orig);
3218       gimple_seq_add_stmt_without_update (&stmts, repl);
3219       if (gimple_call_lhs (stmt))
3220 	{
3221 	  if (!useless_type_conversion_p (integer_type_node,
3222 					  TREE_TYPE (orig_len)))
3223 	    orig_len = fold_convert (integer_type_node, orig_len);
3224 	  repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3225 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3226 	  gsi_replace_with_seq_vops (gsi, stmts);
3227 	  /* gsi now points at the assignment to the lhs, get a
3228 	     stmt iterator to the memcpy call.
3229 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3230 	     CFG isn't built yet.  */
3231 	  gimple_stmt_iterator gsi2 = *gsi;
3232 	  gsi_prev (&gsi2);
3233 	  fold_stmt (&gsi2);
3234 	}
3235       else
3236 	{
3237 	  gsi_replace_with_seq_vops (gsi, stmts);
3238 	  fold_stmt (gsi);
3239 	}
3240       return true;
3241     }
3242   return false;
3243 }
3244 
3245 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3246    FP, FMT, and ARG are the arguments to the call.  We don't fold calls with
3247    more than 3 arguments, and ARG may be null in the 2-argument case.
3248 
3249    Return NULL_TREE if no simplification was possible, otherwise return the
3250    simplified form of the call as a tree.  FCODE is the BUILT_IN_*
3251    code of the function to be simplified.  */
3252 
3253 static bool
3254 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3255 			     tree fp, tree fmt, tree arg,
3256 			     enum built_in_function fcode)
3257 {
3258   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3259   tree fn_fputc, fn_fputs;
3260   const char *fmt_str = NULL;
3261 
3262   /* If the return value is used, don't do the transformation.  */
3263   if (gimple_call_lhs (stmt) != NULL_TREE)
3264     return false;
3265 
3266   /* Check whether the format is a literal string constant.  */
3267   fmt_str = c_getstr (fmt);
3268   if (fmt_str == NULL)
3269     return false;
3270 
3271   if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3272     {
3273       /* If we're using an unlocked function, assume the other
3274 	 unlocked functions exist explicitly.  */
3275       fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3276       fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3277     }
3278   else
3279     {
3280       fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3281       fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3282     }
3283 
3284   if (!init_target_chars ())
3285     return false;
3286 
3287   /* If the format doesn't contain % args or %%, use strcpy.  */
3288   if (strchr (fmt_str, target_percent) == NULL)
3289     {
3290       if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3291 	  && arg)
3292 	return false;
3293 
3294       /* If the format specifier was "", fprintf does nothing.  */
3295       if (fmt_str[0] == '\0')
3296 	{
3297 	  replace_call_with_value (gsi, NULL_TREE);
3298 	  return true;
3299 	}
3300 
3301       /* When "string" doesn't contain %, replace all cases of
3302 	 fprintf (fp, string) with fputs (string, fp).  The fputs
3303 	 builtin will take care of special cases like length == 1.  */
3304       if (fn_fputs)
3305 	{
3306 	  gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3307 	  replace_call_with_call_and_fold (gsi, repl);
3308 	  return true;
3309 	}
3310     }
3311 
3312   /* The other optimizations can be done only on the non-va_list variants.  */
3313   else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3314     return false;
3315 
3316   /* If the format specifier was "%s", call __builtin_fputs (arg, fp).  */
3317   else if (strcmp (fmt_str, target_percent_s) == 0)
3318     {
3319       if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3320 	return false;
3321       if (fn_fputs)
3322 	{
3323 	  gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3324 	  replace_call_with_call_and_fold (gsi, repl);
3325 	  return true;
3326 	}
3327     }
3328 
3329   /* If the format specifier was "%c", call __builtin_fputc (arg, fp).  */
3330   else if (strcmp (fmt_str, target_percent_c) == 0)
3331     {
3332       if (!arg
3333 	  || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3334 	return false;
3335       if (fn_fputc)
3336 	{
3337 	  gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3338 	  replace_call_with_call_and_fold (gsi, repl);
3339 	  return true;
3340 	}
3341     }
3342 
3343   return false;
3344 }
3345 
3346 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3347    FMT and ARG are the arguments to the call; we don't fold cases with
3348    more than 2 arguments, and ARG may be null if this is a 1-argument case.
3349 
3350    Return NULL_TREE if no simplification was possible, otherwise return the
3351    simplified form of the call as a tree.  FCODE is the BUILT_IN_*
3352    code of the function to be simplified.  */
3353 
3354 static bool
3355 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3356 			    tree arg, enum built_in_function fcode)
3357 {
3358   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3359   tree fn_putchar, fn_puts, newarg;
3360   const char *fmt_str = NULL;
3361 
3362   /* If the return value is used, don't do the transformation.  */
3363   if (gimple_call_lhs (stmt) != NULL_TREE)
3364     return false;
3365 
3366   /* Check whether the format is a literal string constant.  */
3367   fmt_str = c_getstr (fmt);
3368   if (fmt_str == NULL)
3369     return false;
3370 
3371   if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3372     {
3373       /* If we're using an unlocked function, assume the other
3374 	 unlocked functions exist explicitly.  */
3375       fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3376       fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3377     }
3378   else
3379     {
3380       fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3381       fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3382     }
3383 
3384   if (!init_target_chars ())
3385     return false;
3386 
3387   if (strcmp (fmt_str, target_percent_s) == 0
3388       || strchr (fmt_str, target_percent) == NULL)
3389     {
3390       const char *str;
3391 
3392       if (strcmp (fmt_str, target_percent_s) == 0)
3393 	{
3394 	  if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3395 	    return false;
3396 
3397 	  if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3398 	    return false;
3399 
3400 	  str = c_getstr (arg);
3401 	  if (str == NULL)
3402 	    return false;
3403 	}
3404       else
3405 	{
3406 	  /* The format specifier doesn't contain any '%' characters.  */
3407 	  if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3408 	      && arg)
3409 	    return false;
3410 	  str = fmt_str;
3411 	}
3412 
3413       /* If the string was "", printf does nothing.  */
3414       if (str[0] == '\0')
3415 	{
3416 	  replace_call_with_value (gsi, NULL_TREE);
3417 	  return true;
3418 	}
3419 
3420       /* If the string has length of 1, call putchar.  */
3421       if (str[1] == '\0')
3422 	{
3423 	  /* Given printf("c"), (where c is any one character,)
3424 	     convert "c"[0] to an int and pass that to the replacement
3425 	     function.  */
3426 	  newarg = build_int_cst (integer_type_node, str[0]);
3427 	  if (fn_putchar)
3428 	    {
3429 	      gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3430 	      replace_call_with_call_and_fold (gsi, repl);
3431 	      return true;
3432 	    }
3433 	}
3434       else
3435 	{
3436 	  /* If the string was "string\n", call puts("string").  */
3437 	  size_t len = strlen (str);
3438 	  if ((unsigned char)str[len - 1] == target_newline
3439 	      && (size_t) (int) len == len
3440 	      && (int) len > 0)
3441 	    {
3442 	      char *newstr;
3443 	      tree offset_node, string_cst;
3444 
3445 	      /* Create a NUL-terminated string that's one char shorter
3446 		 than the original, stripping off the trailing '\n'.  */
3447 	      newarg = build_string_literal (len, str);
3448 	      string_cst = string_constant (newarg, &offset_node);
3449 	      gcc_checking_assert (string_cst
3450 				   && (TREE_STRING_LENGTH (string_cst)
3451 				       == (int) len)
3452 				   && integer_zerop (offset_node)
3453 				   && (unsigned char)
3454 				      TREE_STRING_POINTER (string_cst)[len - 1]
3455 				      == target_newline);
3456 	      /* build_string_literal creates a new STRING_CST,
3457 		 modify it in place to avoid double copying.  */
3458 	      newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3459 	      newstr[len - 1] = '\0';
3460 	      if (fn_puts)
3461 		{
3462 		  gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3463 		  replace_call_with_call_and_fold (gsi, repl);
3464 		  return true;
3465 		}
3466 	    }
3467 	  else
3468 	    /* We'd like to arrange to call fputs(string,stdout) here,
3469 	       but we need stdout and don't have a way to get it yet.  */
3470 	    return false;
3471 	}
3472     }
3473 
3474   /* The other optimizations can be done only on the non-va_list variants.  */
3475   else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3476     return false;
3477 
3478   /* If the format specifier was "%s\n", call __builtin_puts(arg).  */
3479   else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3480     {
3481       if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3482 	return false;
3483       if (fn_puts)
3484 	{
3485 	  gcall *repl = gimple_build_call (fn_puts, 1, arg);
3486 	  replace_call_with_call_and_fold (gsi, repl);
3487 	  return true;
3488 	}
3489     }
3490 
3491   /* If the format specifier was "%c", call __builtin_putchar(arg).  */
3492   else if (strcmp (fmt_str, target_percent_c) == 0)
3493     {
3494       if (!arg || ! useless_type_conversion_p (integer_type_node,
3495 					       TREE_TYPE (arg)))
3496 	return false;
3497       if (fn_putchar)
3498 	{
3499 	  gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3500 	  replace_call_with_call_and_fold (gsi, repl);
3501 	  return true;
3502 	}
3503     }
3504 
3505   return false;
3506 }
3507 
3508 
3509 
3510 /* Fold a call to __builtin_strlen with known length LEN.  */
3511 
3512 static bool
3513 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3514 {
3515   gimple *stmt = gsi_stmt (*gsi);
3516 
3517   wide_int minlen;
3518   wide_int maxlen;
3519 
3520   tree lenrange[2];
3521   if (!get_range_strlen (gimple_call_arg (stmt, 0), lenrange, true)
3522       && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3523       && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3524     {
3525       /* The range of lengths refers to either a single constant
3526 	 string or to the longest and shortest constant string
3527 	 referenced by the argument of the strlen() call, or to
3528 	 the strings that can possibly be stored in the arrays
3529 	 the argument refers to.  */
3530       minlen = wi::to_wide (lenrange[0]);
3531       maxlen = wi::to_wide (lenrange[1]);
3532     }
3533   else
3534     {
3535       unsigned prec = TYPE_PRECISION (sizetype);
3536 
3537       minlen = wi::shwi (0, prec);
3538       maxlen = wi::to_wide (max_object_size (), prec) - 2;
3539     }
3540 
3541   if (minlen == maxlen)
3542     {
3543       lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3544 					      true, GSI_SAME_STMT);
3545       replace_call_with_value (gsi, lenrange[0]);
3546       return true;
3547     }
3548 
3549   if (tree lhs = gimple_call_lhs (stmt))
3550     if (TREE_CODE (lhs) == SSA_NAME
3551 	&& INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
3552       set_range_info (lhs, VR_RANGE, minlen, maxlen);
3553 
3554   return false;
3555 }
3556 
3557 /* Fold a call to __builtin_acc_on_device.  */
3558 
3559 static bool
3560 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3561 {
3562   /* Defer folding until we know which compiler we're in.  */
3563   if (symtab->state != EXPANSION)
3564     return false;
3565 
3566   unsigned val_host = GOMP_DEVICE_HOST;
3567   unsigned val_dev = GOMP_DEVICE_NONE;
3568 
3569 #ifdef ACCEL_COMPILER
3570   val_host = GOMP_DEVICE_NOT_HOST;
3571   val_dev = ACCEL_COMPILER_acc_device;
3572 #endif
3573 
3574   location_t loc = gimple_location (gsi_stmt (*gsi));
3575 
3576   tree host_eq = make_ssa_name (boolean_type_node);
3577   gimple *host_ass = gimple_build_assign
3578     (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3579   gimple_set_location (host_ass, loc);
3580   gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3581 
3582   tree dev_eq = make_ssa_name (boolean_type_node);
3583   gimple *dev_ass = gimple_build_assign
3584     (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3585   gimple_set_location (dev_ass, loc);
3586   gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3587 
3588   tree result = make_ssa_name (boolean_type_node);
3589   gimple *result_ass = gimple_build_assign
3590     (result, BIT_IOR_EXPR, host_eq, dev_eq);
3591   gimple_set_location (result_ass, loc);
3592   gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3593 
3594   replace_call_with_value (gsi, result);
3595 
3596   return true;
3597 }
3598 
3599 /* Fold realloc (0, n) -> malloc (n).  */
3600 
3601 static bool
3602 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3603 {
3604   gimple *stmt = gsi_stmt (*gsi);
3605   tree arg = gimple_call_arg (stmt, 0);
3606   tree size = gimple_call_arg (stmt, 1);
3607 
3608   if (operand_equal_p (arg, null_pointer_node, 0))
3609     {
3610       tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3611       if (fn_malloc)
3612 	{
3613 	  gcall *repl = gimple_build_call (fn_malloc, 1, size);
3614 	  replace_call_with_call_and_fold (gsi, repl);
3615 	  return true;
3616 	}
3617     }
3618   return false;
3619 }
3620 
3621 /* Fold the non-target builtin at *GSI and return whether any simplification
3622    was made.  */
3623 
3624 static bool
3625 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3626 {
3627   gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3628   tree callee = gimple_call_fndecl (stmt);
3629 
3630   /* Give up for always_inline inline builtins until they are
3631      inlined.  */
3632   if (avoid_folding_inline_builtin (callee))
3633     return false;
3634 
3635   unsigned n = gimple_call_num_args (stmt);
3636   enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3637   switch (fcode)
3638     {
3639     case BUILT_IN_BCMP:
3640       return gimple_fold_builtin_bcmp (gsi);
3641     case BUILT_IN_BCOPY:
3642       return gimple_fold_builtin_bcopy (gsi);
3643     case BUILT_IN_BZERO:
3644       return gimple_fold_builtin_bzero (gsi);
3645 
3646     case BUILT_IN_MEMSET:
3647       return gimple_fold_builtin_memset (gsi,
3648 					 gimple_call_arg (stmt, 1),
3649 					 gimple_call_arg (stmt, 2));
3650     case BUILT_IN_MEMCPY:
3651       return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3652 					    gimple_call_arg (stmt, 1), 0);
3653     case BUILT_IN_MEMPCPY:
3654       return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3655 					    gimple_call_arg (stmt, 1), 1);
3656     case BUILT_IN_MEMMOVE:
3657       return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3658 					    gimple_call_arg (stmt, 1), 3);
3659     case BUILT_IN_SPRINTF_CHK:
3660     case BUILT_IN_VSPRINTF_CHK:
3661       return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3662     case BUILT_IN_STRCAT_CHK:
3663       return gimple_fold_builtin_strcat_chk (gsi);
3664     case BUILT_IN_STRNCAT_CHK:
3665       return gimple_fold_builtin_strncat_chk (gsi);
3666     case BUILT_IN_STRLEN:
3667       return gimple_fold_builtin_strlen (gsi);
3668     case BUILT_IN_STRCPY:
3669       return gimple_fold_builtin_strcpy (gsi,
3670 					 gimple_call_arg (stmt, 0),
3671 					 gimple_call_arg (stmt, 1));
3672     case BUILT_IN_STRNCPY:
3673       return gimple_fold_builtin_strncpy (gsi,
3674 					  gimple_call_arg (stmt, 0),
3675 					  gimple_call_arg (stmt, 1),
3676 					  gimple_call_arg (stmt, 2));
3677     case BUILT_IN_STRCAT:
3678       return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3679 					 gimple_call_arg (stmt, 1));
3680     case BUILT_IN_STRNCAT:
3681       return gimple_fold_builtin_strncat (gsi);
3682     case BUILT_IN_INDEX:
3683     case BUILT_IN_STRCHR:
3684       return gimple_fold_builtin_strchr (gsi, false);
3685     case BUILT_IN_RINDEX:
3686     case BUILT_IN_STRRCHR:
3687       return gimple_fold_builtin_strchr (gsi, true);
3688     case BUILT_IN_STRSTR:
3689       return gimple_fold_builtin_strstr (gsi);
3690     case BUILT_IN_STRCMP:
3691     case BUILT_IN_STRCASECMP:
3692     case BUILT_IN_STRNCMP:
3693     case BUILT_IN_STRNCASECMP:
3694       return gimple_fold_builtin_string_compare (gsi);
3695     case BUILT_IN_MEMCHR:
3696       return gimple_fold_builtin_memchr (gsi);
3697     case BUILT_IN_FPUTS:
3698       return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3699 					gimple_call_arg (stmt, 1), false);
3700     case BUILT_IN_FPUTS_UNLOCKED:
3701       return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3702 					gimple_call_arg (stmt, 1), true);
3703     case BUILT_IN_MEMCPY_CHK:
3704     case BUILT_IN_MEMPCPY_CHK:
3705     case BUILT_IN_MEMMOVE_CHK:
3706     case BUILT_IN_MEMSET_CHK:
3707       return gimple_fold_builtin_memory_chk (gsi,
3708 					     gimple_call_arg (stmt, 0),
3709 					     gimple_call_arg (stmt, 1),
3710 					     gimple_call_arg (stmt, 2),
3711 					     gimple_call_arg (stmt, 3),
3712 					     fcode);
3713     case BUILT_IN_STPCPY:
3714       return gimple_fold_builtin_stpcpy (gsi);
3715     case BUILT_IN_STRCPY_CHK:
3716     case BUILT_IN_STPCPY_CHK:
3717       return gimple_fold_builtin_stxcpy_chk (gsi,
3718 					     gimple_call_arg (stmt, 0),
3719 					     gimple_call_arg (stmt, 1),
3720 					     gimple_call_arg (stmt, 2),
3721 					     fcode);
3722     case BUILT_IN_STRNCPY_CHK:
3723     case BUILT_IN_STPNCPY_CHK:
3724       return gimple_fold_builtin_stxncpy_chk (gsi,
3725 					      gimple_call_arg (stmt, 0),
3726 					      gimple_call_arg (stmt, 1),
3727 					      gimple_call_arg (stmt, 2),
3728 					      gimple_call_arg (stmt, 3),
3729 					      fcode);
3730     case BUILT_IN_SNPRINTF_CHK:
3731     case BUILT_IN_VSNPRINTF_CHK:
3732       return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3733 
3734     case BUILT_IN_FPRINTF:
3735     case BUILT_IN_FPRINTF_UNLOCKED:
3736     case BUILT_IN_VFPRINTF:
3737       if (n == 2 || n == 3)
3738 	return gimple_fold_builtin_fprintf (gsi,
3739 					    gimple_call_arg (stmt, 0),
3740 					    gimple_call_arg (stmt, 1),
3741 					    n == 3
3742 					    ? gimple_call_arg (stmt, 2)
3743 					    : NULL_TREE,
3744 					    fcode);
3745       break;
3746     case BUILT_IN_FPRINTF_CHK:
3747     case BUILT_IN_VFPRINTF_CHK:
3748       if (n == 3 || n == 4)
3749 	return gimple_fold_builtin_fprintf (gsi,
3750 					    gimple_call_arg (stmt, 0),
3751 					    gimple_call_arg (stmt, 2),
3752 					    n == 4
3753 					    ? gimple_call_arg (stmt, 3)
3754 					    : NULL_TREE,
3755 					    fcode);
3756       break;
3757     case BUILT_IN_PRINTF:
3758     case BUILT_IN_PRINTF_UNLOCKED:
3759     case BUILT_IN_VPRINTF:
3760       if (n == 1 || n == 2)
3761 	return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3762 					   n == 2
3763 					   ? gimple_call_arg (stmt, 1)
3764 					   : NULL_TREE, fcode);
3765       break;
3766     case BUILT_IN_PRINTF_CHK:
3767     case BUILT_IN_VPRINTF_CHK:
3768       if (n == 2 || n == 3)
3769 	return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3770 					   n == 3
3771 					   ? gimple_call_arg (stmt, 2)
3772 					   : NULL_TREE, fcode);
3773       break;
3774     case BUILT_IN_ACC_ON_DEVICE:
3775       return gimple_fold_builtin_acc_on_device (gsi,
3776 						gimple_call_arg (stmt, 0));
3777     case BUILT_IN_REALLOC:
3778       return gimple_fold_builtin_realloc (gsi);
3779 
3780     default:;
3781     }
3782 
3783   /* Try the generic builtin folder.  */
3784   bool ignore = (gimple_call_lhs (stmt) == NULL);
3785   tree result = fold_call_stmt (stmt, ignore);
3786   if (result)
3787     {
3788       if (ignore)
3789 	STRIP_NOPS (result);
3790       else
3791 	result = fold_convert (gimple_call_return_type (stmt), result);
3792       if (!update_call_from_tree (gsi, result))
3793 	gimplify_and_update_call_from_tree (gsi, result);
3794       return true;
3795     }
3796 
3797   return false;
3798 }
3799 
3800 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3801    function calls to constants, where possible.  */
3802 
3803 static tree
3804 fold_internal_goacc_dim (const gimple *call)
3805 {
3806   int axis = oacc_get_ifn_dim_arg (call);
3807   int size = oacc_get_fn_dim_size (current_function_decl, axis);
3808   tree result = NULL_TREE;
3809   tree type = TREE_TYPE (gimple_call_lhs (call));
3810 
3811   switch (gimple_call_internal_fn (call))
3812     {
3813     case IFN_GOACC_DIM_POS:
3814       /* If the size is 1, we know the answer.  */
3815       if (size == 1)
3816 	result = build_int_cst (type, 0);
3817       break;
3818     case IFN_GOACC_DIM_SIZE:
3819       /* If the size is not dynamic, we know the answer.  */
3820       if (size)
3821 	result = build_int_cst (type, size);
3822       break;
3823     default:
3824       break;
3825     }
3826 
3827   return result;
3828 }
3829 
3830 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3831    for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3832    &var where var is only addressable because of such calls.  */
3833 
3834 bool
3835 optimize_atomic_compare_exchange_p (gimple *stmt)
3836 {
3837   if (gimple_call_num_args (stmt) != 6
3838       || !flag_inline_atomics
3839       || !optimize
3840       || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3841       || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3842       || !gimple_vdef (stmt)
3843       || !gimple_vuse (stmt))
3844     return false;
3845 
3846   tree fndecl = gimple_call_fndecl (stmt);
3847   switch (DECL_FUNCTION_CODE (fndecl))
3848     {
3849     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3850     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3851     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3852     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3853     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3854       break;
3855     default:
3856       return false;
3857     }
3858 
3859   tree expected = gimple_call_arg (stmt, 1);
3860   if (TREE_CODE (expected) != ADDR_EXPR
3861       || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3862     return false;
3863 
3864   tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3865   if (!is_gimple_reg_type (etype)
3866       || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3867       || TREE_THIS_VOLATILE (etype)
3868       || VECTOR_TYPE_P (etype)
3869       || TREE_CODE (etype) == COMPLEX_TYPE
3870       /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3871 	 might not preserve all the bits.  See PR71716.  */
3872       || SCALAR_FLOAT_TYPE_P (etype)
3873       || maybe_ne (TYPE_PRECISION (etype),
3874 		   GET_MODE_BITSIZE (TYPE_MODE (etype))))
3875     return false;
3876 
3877   tree weak = gimple_call_arg (stmt, 3);
3878   if (!integer_zerop (weak) && !integer_onep (weak))
3879     return false;
3880 
3881   tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3882   tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3883   machine_mode mode = TYPE_MODE (itype);
3884 
3885   if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3886       == CODE_FOR_nothing
3887       && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3888     return false;
3889 
3890   if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
3891     return false;
3892 
3893   return true;
3894 }
3895 
3896 /* Fold
3897      r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3898    into
3899      _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3900      i = IMAGPART_EXPR <t>;
3901      r = (_Bool) i;
3902      e = REALPART_EXPR <t>;  */
3903 
3904 void
3905 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3906 {
3907   gimple *stmt = gsi_stmt (*gsi);
3908   tree fndecl = gimple_call_fndecl (stmt);
3909   tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3910   tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3911   tree ctype = build_complex_type (itype);
3912   tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3913   bool throws = false;
3914   edge e = NULL;
3915   gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3916 				   expected);
3917   gsi_insert_before (gsi, g, GSI_SAME_STMT);
3918   gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3919   if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3920     {
3921       g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3922 			       build1 (VIEW_CONVERT_EXPR, itype,
3923 				       gimple_assign_lhs (g)));
3924       gsi_insert_before (gsi, g, GSI_SAME_STMT);
3925     }
3926   int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3927 	     + int_size_in_bytes (itype);
3928   g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3929 				  gimple_call_arg (stmt, 0),
3930 				  gimple_assign_lhs (g),
3931 				  gimple_call_arg (stmt, 2),
3932 				  build_int_cst (integer_type_node, flag),
3933 				  gimple_call_arg (stmt, 4),
3934 				  gimple_call_arg (stmt, 5));
3935   tree lhs = make_ssa_name (ctype);
3936   gimple_call_set_lhs (g, lhs);
3937   gimple_set_vdef (g, gimple_vdef (stmt));
3938   gimple_set_vuse (g, gimple_vuse (stmt));
3939   SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3940   tree oldlhs = gimple_call_lhs (stmt);
3941   if (stmt_can_throw_internal (stmt))
3942     {
3943       throws = true;
3944       e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3945     }
3946   gimple_call_set_nothrow (as_a <gcall *> (g),
3947 			   gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3948   gimple_call_set_lhs (stmt, NULL_TREE);
3949   gsi_replace (gsi, g, true);
3950   if (oldlhs)
3951     {
3952       g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3953 			       build1 (IMAGPART_EXPR, itype, lhs));
3954       if (throws)
3955 	{
3956 	  gsi_insert_on_edge_immediate (e, g);
3957 	  *gsi = gsi_for_stmt (g);
3958 	}
3959       else
3960 	gsi_insert_after (gsi, g, GSI_NEW_STMT);
3961       g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3962       gsi_insert_after (gsi, g, GSI_NEW_STMT);
3963     }
3964   g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3965 			   build1 (REALPART_EXPR, itype, lhs));
3966   if (throws && oldlhs == NULL_TREE)
3967     {
3968       gsi_insert_on_edge_immediate (e, g);
3969       *gsi = gsi_for_stmt (g);
3970     }
3971   else
3972     gsi_insert_after (gsi, g, GSI_NEW_STMT);
3973   if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3974     {
3975       g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3976 			       VIEW_CONVERT_EXPR,
3977 			       build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3978 				       gimple_assign_lhs (g)));
3979       gsi_insert_after (gsi, g, GSI_NEW_STMT);
3980     }
3981   g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3982   gsi_insert_after (gsi, g, GSI_NEW_STMT);
3983   *gsi = gsiret;
3984 }
3985 
3986 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3987    doesn't fit into TYPE.  The test for overflow should be regardless of
3988    -fwrapv, and even for unsigned types.  */
3989 
3990 bool
3991 arith_overflowed_p (enum tree_code code, const_tree type,
3992 		    const_tree arg0, const_tree arg1)
3993 {
3994   typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3995   typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3996     widest2_int_cst;
3997   widest2_int warg0 = widest2_int_cst (arg0);
3998   widest2_int warg1 = widest2_int_cst (arg1);
3999   widest2_int wres;
4000   switch (code)
4001     {
4002     case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4003     case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4004     case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4005     default: gcc_unreachable ();
4006     }
4007   signop sign = TYPE_SIGN (type);
4008   if (sign == UNSIGNED && wi::neg_p (wres))
4009     return true;
4010   return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4011 }
4012 
4013 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4014    The statement may be replaced by another statement, e.g., if the call
4015    simplifies to a constant value. Return true if any changes were made.
4016    It is assumed that the operands have been previously folded.  */
4017 
4018 static bool
4019 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4020 {
4021   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4022   tree callee;
4023   bool changed = false;
4024   unsigned i;
4025 
4026   /* Fold *& in call arguments.  */
4027   for (i = 0; i < gimple_call_num_args (stmt); ++i)
4028     if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4029       {
4030 	tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4031 	if (tmp)
4032 	  {
4033 	    gimple_call_set_arg (stmt, i, tmp);
4034 	    changed = true;
4035 	  }
4036       }
4037 
4038   /* Check for virtual calls that became direct calls.  */
4039   callee = gimple_call_fn (stmt);
4040   if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4041     {
4042       if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4043 	{
4044           if (dump_file && virtual_method_call_p (callee)
4045 	      && !possible_polymorphic_call_target_p
4046 		    (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4047 						     (OBJ_TYPE_REF_EXPR (callee)))))
4048 	    {
4049 	      fprintf (dump_file,
4050 		       "Type inheritance inconsistent devirtualization of ");
4051 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4052 	      fprintf (dump_file, " to ");
4053 	      print_generic_expr (dump_file, callee, TDF_SLIM);
4054 	      fprintf (dump_file, "\n");
4055 	    }
4056 
4057 	  gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4058 	  changed = true;
4059 	}
4060       else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4061 	{
4062 	  bool final;
4063 	  vec <cgraph_node *>targets
4064 	    = possible_polymorphic_call_targets (callee, stmt, &final);
4065 	  if (final && targets.length () <= 1 && dbg_cnt (devirt))
4066 	    {
4067 	      tree lhs = gimple_call_lhs (stmt);
4068 	      if (dump_enabled_p ())
4069 		{
4070 		  location_t loc = gimple_location_safe (stmt);
4071 		  dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
4072 				   "folding virtual function call to %s\n",
4073 		 		   targets.length () == 1
4074 		  		   ? targets[0]->name ()
4075 		  		   : "__builtin_unreachable");
4076 		}
4077 	      if (targets.length () == 1)
4078 		{
4079 		  tree fndecl = targets[0]->decl;
4080 		  gimple_call_set_fndecl (stmt, fndecl);
4081 		  changed = true;
4082 		  /* If changing the call to __cxa_pure_virtual
4083 		     or similar noreturn function, adjust gimple_call_fntype
4084 		     too.  */
4085 		  if (gimple_call_noreturn_p (stmt)
4086 		      && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4087 		      && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4088 		      && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4089 			  == void_type_node))
4090 		    gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4091 		  /* If the call becomes noreturn, remove the lhs.  */
4092 		  if (lhs
4093 		      && gimple_call_noreturn_p (stmt)
4094 		      && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4095 			  || should_remove_lhs_p (lhs)))
4096 		    {
4097 		      if (TREE_CODE (lhs) == SSA_NAME)
4098 			{
4099 			  tree var = create_tmp_var (TREE_TYPE (lhs));
4100 			  tree def = get_or_create_ssa_default_def (cfun, var);
4101 			  gimple *new_stmt = gimple_build_assign (lhs, def);
4102 			  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4103 			}
4104 		      gimple_call_set_lhs (stmt, NULL_TREE);
4105 		    }
4106 		  maybe_remove_unused_call_args (cfun, stmt);
4107 		}
4108 	      else
4109 		{
4110 		  tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4111 		  gimple *new_stmt = gimple_build_call (fndecl, 0);
4112 		  gimple_set_location (new_stmt, gimple_location (stmt));
4113 		  /* If the call had a SSA name as lhs morph that into
4114 		     an uninitialized value.  */
4115 		  if (lhs && TREE_CODE (lhs) == SSA_NAME)
4116 		    {
4117 		      tree var = create_tmp_var (TREE_TYPE (lhs));
4118 		      SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4119 		      SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4120 		      set_ssa_default_def (cfun, var, lhs);
4121 		    }
4122 		  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4123 		  gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4124 		  gsi_replace (gsi, new_stmt, false);
4125 		  return true;
4126 		}
4127 	    }
4128 	}
4129     }
4130 
4131   /* Check for indirect calls that became direct calls, and then
4132      no longer require a static chain.  */
4133   if (gimple_call_chain (stmt))
4134     {
4135       tree fn = gimple_call_fndecl (stmt);
4136       if (fn && !DECL_STATIC_CHAIN (fn))
4137 	{
4138 	  gimple_call_set_chain (stmt, NULL);
4139 	  changed = true;
4140 	}
4141       else
4142 	{
4143 	  tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4144 	  if (tmp)
4145 	    {
4146 	      gimple_call_set_chain (stmt, tmp);
4147 	      changed = true;
4148 	    }
4149 	}
4150     }
4151 
4152   if (inplace)
4153     return changed;
4154 
4155   /* Check for builtins that CCP can handle using information not
4156      available in the generic fold routines.  */
4157   if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4158     {
4159       if (gimple_fold_builtin (gsi))
4160         changed = true;
4161     }
4162   else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4163     {
4164 	changed |= targetm.gimple_fold_builtin (gsi);
4165     }
4166   else if (gimple_call_internal_p (stmt))
4167     {
4168       enum tree_code subcode = ERROR_MARK;
4169       tree result = NULL_TREE;
4170       bool cplx_result = false;
4171       tree overflow = NULL_TREE;
4172       switch (gimple_call_internal_fn (stmt))
4173 	{
4174 	case IFN_BUILTIN_EXPECT:
4175 	  result = fold_builtin_expect (gimple_location (stmt),
4176 					gimple_call_arg (stmt, 0),
4177 					gimple_call_arg (stmt, 1),
4178 					gimple_call_arg (stmt, 2));
4179 	  break;
4180 	case IFN_UBSAN_OBJECT_SIZE:
4181 	  {
4182 	    tree offset = gimple_call_arg (stmt, 1);
4183 	    tree objsize = gimple_call_arg (stmt, 2);
4184 	    if (integer_all_onesp (objsize)
4185 		|| (TREE_CODE (offset) == INTEGER_CST
4186 		    && TREE_CODE (objsize) == INTEGER_CST
4187 		    && tree_int_cst_le (offset, objsize)))
4188 	      {
4189 		replace_call_with_value (gsi, NULL_TREE);
4190 		return true;
4191 	      }
4192 	  }
4193 	  break;
4194 	case IFN_UBSAN_PTR:
4195 	  if (integer_zerop (gimple_call_arg (stmt, 1)))
4196 	    {
4197 	      replace_call_with_value (gsi, NULL_TREE);
4198 	      return true;
4199 	    }
4200 	  break;
4201 	case IFN_UBSAN_BOUNDS:
4202 	  {
4203 	    tree index = gimple_call_arg (stmt, 1);
4204 	    tree bound = gimple_call_arg (stmt, 2);
4205 	    if (TREE_CODE (index) == INTEGER_CST
4206 		&& TREE_CODE (bound) == INTEGER_CST)
4207 	      {
4208 		index = fold_convert (TREE_TYPE (bound), index);
4209 		if (TREE_CODE (index) == INTEGER_CST
4210 		    && tree_int_cst_le (index, bound))
4211 		  {
4212 		    replace_call_with_value (gsi, NULL_TREE);
4213 		    return true;
4214 		  }
4215 	      }
4216 	  }
4217 	  break;
4218 	case IFN_GOACC_DIM_SIZE:
4219 	case IFN_GOACC_DIM_POS:
4220 	  result = fold_internal_goacc_dim (stmt);
4221 	  break;
4222 	case IFN_UBSAN_CHECK_ADD:
4223 	  subcode = PLUS_EXPR;
4224 	  break;
4225 	case IFN_UBSAN_CHECK_SUB:
4226 	  subcode = MINUS_EXPR;
4227 	  break;
4228 	case IFN_UBSAN_CHECK_MUL:
4229 	  subcode = MULT_EXPR;
4230 	  break;
4231 	case IFN_ADD_OVERFLOW:
4232 	  subcode = PLUS_EXPR;
4233 	  cplx_result = true;
4234 	  break;
4235 	case IFN_SUB_OVERFLOW:
4236 	  subcode = MINUS_EXPR;
4237 	  cplx_result = true;
4238 	  break;
4239 	case IFN_MUL_OVERFLOW:
4240 	  subcode = MULT_EXPR;
4241 	  cplx_result = true;
4242 	  break;
4243 	default:
4244 	  break;
4245 	}
4246       if (subcode != ERROR_MARK)
4247 	{
4248 	  tree arg0 = gimple_call_arg (stmt, 0);
4249 	  tree arg1 = gimple_call_arg (stmt, 1);
4250 	  tree type = TREE_TYPE (arg0);
4251 	  if (cplx_result)
4252 	    {
4253 	      tree lhs = gimple_call_lhs (stmt);
4254 	      if (lhs == NULL_TREE)
4255 		type = NULL_TREE;
4256 	      else
4257 		type = TREE_TYPE (TREE_TYPE (lhs));
4258 	    }
4259 	  if (type == NULL_TREE)
4260 	    ;
4261 	  /* x = y + 0; x = y - 0; x = y * 0; */
4262 	  else if (integer_zerop (arg1))
4263 	    result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4264 	  /* x = 0 + y; x = 0 * y; */
4265 	  else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4266 	    result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4267 	  /* x = y - y; */
4268 	  else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4269 	    result = integer_zero_node;
4270 	  /* x = y * 1; x = 1 * y; */
4271 	  else if (subcode == MULT_EXPR && integer_onep (arg1))
4272 	    result = arg0;
4273 	  else if (subcode == MULT_EXPR && integer_onep (arg0))
4274 	    result = arg1;
4275 	  else if (TREE_CODE (arg0) == INTEGER_CST
4276 		   && TREE_CODE (arg1) == INTEGER_CST)
4277 	    {
4278 	      if (cplx_result)
4279 		result = int_const_binop (subcode, fold_convert (type, arg0),
4280 					  fold_convert (type, arg1));
4281 	      else
4282 		result = int_const_binop (subcode, arg0, arg1);
4283 	      if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4284 		{
4285 		  if (cplx_result)
4286 		    overflow = build_one_cst (type);
4287 		  else
4288 		    result = NULL_TREE;
4289 		}
4290 	    }
4291 	  if (result)
4292 	    {
4293 	      if (result == integer_zero_node)
4294 		result = build_zero_cst (type);
4295 	      else if (cplx_result && TREE_TYPE (result) != type)
4296 		{
4297 		  if (TREE_CODE (result) == INTEGER_CST)
4298 		    {
4299 		      if (arith_overflowed_p (PLUS_EXPR, type, result,
4300 					      integer_zero_node))
4301 			overflow = build_one_cst (type);
4302 		    }
4303 		  else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4304 			    && TYPE_UNSIGNED (type))
4305 			   || (TYPE_PRECISION (type)
4306 			       < (TYPE_PRECISION (TREE_TYPE (result))
4307 				  + (TYPE_UNSIGNED (TREE_TYPE (result))
4308 				     && !TYPE_UNSIGNED (type)))))
4309 		    result = NULL_TREE;
4310 		  if (result)
4311 		    result = fold_convert (type, result);
4312 		}
4313 	    }
4314 	}
4315 
4316       if (result)
4317 	{
4318 	  if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4319 	    result = drop_tree_overflow (result);
4320 	  if (cplx_result)
4321 	    {
4322 	      if (overflow == NULL_TREE)
4323 		overflow = build_zero_cst (TREE_TYPE (result));
4324 	      tree ctype = build_complex_type (TREE_TYPE (result));
4325 	      if (TREE_CODE (result) == INTEGER_CST
4326 		  && TREE_CODE (overflow) == INTEGER_CST)
4327 		result = build_complex (ctype, result, overflow);
4328 	      else
4329 		result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4330 				     ctype, result, overflow);
4331 	    }
4332 	  if (!update_call_from_tree (gsi, result))
4333 	    gimplify_and_update_call_from_tree (gsi, result);
4334 	  changed = true;
4335 	}
4336     }
4337 
4338   return changed;
4339 }
4340 
4341 
4342 /* Return true whether NAME has a use on STMT.  */
4343 
4344 static bool
4345 has_use_on_stmt (tree name, gimple *stmt)
4346 {
4347   imm_use_iterator iter;
4348   use_operand_p use_p;
4349   FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4350     if (USE_STMT (use_p) == stmt)
4351       return true;
4352   return false;
4353 }
4354 
4355 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4356    gimple_simplify.
4357 
4358    Replaces *GSI with the simplification result in RCODE and OPS
4359    and the associated statements in *SEQ.  Does the replacement
4360    according to INPLACE and returns true if the operation succeeded.  */
4361 
4362 static bool
4363 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4364 				  code_helper rcode, tree *ops,
4365 				  gimple_seq *seq, bool inplace)
4366 {
4367   gimple *stmt = gsi_stmt (*gsi);
4368 
4369   /* Play safe and do not allow abnormals to be mentioned in
4370      newly created statements.  See also maybe_push_res_to_seq.
4371      As an exception allow such uses if there was a use of the
4372      same SSA name on the old stmt.  */
4373   if ((TREE_CODE (ops[0]) == SSA_NAME
4374        && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4375        && !has_use_on_stmt (ops[0], stmt))
4376       || (ops[1]
4377 	  && TREE_CODE (ops[1]) == SSA_NAME
4378 	  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4379 	  && !has_use_on_stmt (ops[1], stmt))
4380       || (ops[2]
4381 	  && TREE_CODE (ops[2]) == SSA_NAME
4382 	  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
4383 	  && !has_use_on_stmt (ops[2], stmt))
4384       || (COMPARISON_CLASS_P (ops[0])
4385 	  && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4386 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4387 	       && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4388 	      || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4389 		  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4390 		  && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
4391     return false;
4392 
4393   /* Don't insert new statements when INPLACE is true, even if we could
4394      reuse STMT for the final statement.  */
4395   if (inplace && !gimple_seq_empty_p (*seq))
4396     return false;
4397 
4398   if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4399     {
4400       gcc_assert (rcode.is_tree_code ());
4401       if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4402 	  /* GIMPLE_CONDs condition may not throw.  */
4403 	  && (!flag_exceptions
4404 	      || !cfun->can_throw_non_call_exceptions
4405 	      || !operation_could_trap_p (rcode,
4406 					  FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4407 					  false, NULL_TREE)))
4408 	gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
4409       else if (rcode == SSA_NAME)
4410 	gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4411 				   build_zero_cst (TREE_TYPE (ops[0])));
4412       else if (rcode == INTEGER_CST)
4413 	{
4414 	  if (integer_zerop (ops[0]))
4415 	    gimple_cond_make_false (cond_stmt);
4416 	  else
4417 	    gimple_cond_make_true (cond_stmt);
4418 	}
4419       else if (!inplace)
4420 	{
4421 	  tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4422 					    ops, seq);
4423 	  if (!res)
4424 	    return false;
4425 	  gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4426 				     build_zero_cst (TREE_TYPE (res)));
4427 	}
4428       else
4429 	return false;
4430       if (dump_file && (dump_flags & TDF_DETAILS))
4431 	{
4432 	  fprintf (dump_file, "gimple_simplified to ");
4433 	  if (!gimple_seq_empty_p (*seq))
4434 	    print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4435 	  print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4436 			     0, TDF_SLIM);
4437 	}
4438       gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4439       return true;
4440     }
4441   else if (is_gimple_assign (stmt)
4442 	   && rcode.is_tree_code ())
4443     {
4444       if (!inplace
4445 	  || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4446 	{
4447 	  maybe_build_generic_op (rcode,
4448 				  TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4449 	  gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4450 	  if (dump_file && (dump_flags & TDF_DETAILS))
4451 	    {
4452 	      fprintf (dump_file, "gimple_simplified to ");
4453 	      if (!gimple_seq_empty_p (*seq))
4454 		print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4455 	      print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4456 				 0, TDF_SLIM);
4457 	    }
4458 	  gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4459 	  return true;
4460 	}
4461     }
4462   else if (rcode.is_fn_code ()
4463 	   && gimple_call_combined_fn (stmt) == rcode)
4464     {
4465       unsigned i;
4466       for (i = 0; i < gimple_call_num_args (stmt); ++i)
4467 	{
4468 	  gcc_assert (ops[i] != NULL_TREE);
4469 	  gimple_call_set_arg (stmt, i, ops[i]);
4470 	}
4471       if (i < 3)
4472 	gcc_assert (ops[i] == NULL_TREE);
4473       if (dump_file && (dump_flags & TDF_DETAILS))
4474 	{
4475 	  fprintf (dump_file, "gimple_simplified to ");
4476 	  if (!gimple_seq_empty_p (*seq))
4477 	    print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4478 	  print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4479 	}
4480       gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4481       return true;
4482     }
4483   else if (!inplace)
4484     {
4485       if (gimple_has_lhs (stmt))
4486 	{
4487 	  tree lhs = gimple_get_lhs (stmt);
4488 	  if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4489 				      ops, seq, lhs))
4490 	    return false;
4491 	  if (dump_file && (dump_flags & TDF_DETAILS))
4492 	    {
4493 	      fprintf (dump_file, "gimple_simplified to ");
4494 	      print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4495 	    }
4496 	  gsi_replace_with_seq_vops (gsi, *seq);
4497 	  return true;
4498 	}
4499       else
4500 	gcc_unreachable ();
4501     }
4502 
4503   return false;
4504 }
4505 
4506 /* Canonicalize MEM_REFs invariant address operand after propagation.  */
4507 
4508 static bool
4509 maybe_canonicalize_mem_ref_addr (tree *t)
4510 {
4511   bool res = false;
4512 
4513   if (TREE_CODE (*t) == ADDR_EXPR)
4514     t = &TREE_OPERAND (*t, 0);
4515 
4516   /* The C and C++ frontends use an ARRAY_REF for indexing with their
4517      generic vector extension.  The actual vector referenced is
4518      view-converted to an array type for this purpose.  If the index
4519      is constant the canonical representation in the middle-end is a
4520      BIT_FIELD_REF so re-write the former to the latter here.  */
4521   if (TREE_CODE (*t) == ARRAY_REF
4522       && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4523       && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4524       && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4525     {
4526       tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4527       if (VECTOR_TYPE_P (vtype))
4528 	{
4529 	  tree low = array_ref_low_bound (*t);
4530 	  if (TREE_CODE (low) == INTEGER_CST)
4531 	    {
4532 	      if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4533 		{
4534 		  widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4535 					    wi::to_widest (low));
4536 		  idx = wi::mul (idx, wi::to_widest
4537 					 (TYPE_SIZE (TREE_TYPE (*t))));
4538 		  widest_int ext
4539 		    = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4540 		  if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4541 		    {
4542 		      *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4543 				       TREE_TYPE (*t),
4544 				       TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4545 				       TYPE_SIZE (TREE_TYPE (*t)),
4546 				       wide_int_to_tree (bitsizetype, idx));
4547 		      res = true;
4548 		    }
4549 		}
4550 	    }
4551 	}
4552     }
4553 
4554   while (handled_component_p (*t))
4555     t = &TREE_OPERAND (*t, 0);
4556 
4557   /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4558      of invariant addresses into a SSA name MEM_REF address.  */
4559   if (TREE_CODE (*t) == MEM_REF
4560       || TREE_CODE (*t) == TARGET_MEM_REF)
4561     {
4562       tree addr = TREE_OPERAND (*t, 0);
4563       if (TREE_CODE (addr) == ADDR_EXPR
4564 	  && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4565 	      || handled_component_p (TREE_OPERAND (addr, 0))))
4566 	{
4567 	  tree base;
4568 	  poly_int64 coffset;
4569 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4570 						&coffset);
4571 	  if (!base)
4572 	    gcc_unreachable ();
4573 
4574 	  TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4575 	  TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4576 						  TREE_OPERAND (*t, 1),
4577 						  size_int (coffset));
4578 	  res = true;
4579 	}
4580       gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4581 			   || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4582     }
4583 
4584   /* Canonicalize back MEM_REFs to plain reference trees if the object
4585      accessed is a decl that has the same access semantics as the MEM_REF.  */
4586   if (TREE_CODE (*t) == MEM_REF
4587       && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4588       && integer_zerop (TREE_OPERAND (*t, 1))
4589       && MR_DEPENDENCE_CLIQUE (*t) == 0)
4590     {
4591       tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4592       tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4593       if (/* Same volatile qualification.  */
4594 	  TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4595 	  /* Same TBAA behavior with -fstrict-aliasing.  */
4596 	  && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4597 	  && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4598 	      == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4599 	  /* Same alignment.  */
4600 	  && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4601 	  /* We have to look out here to not drop a required conversion
4602 	     from the rhs to the lhs if *t appears on the lhs or vice-versa
4603 	     if it appears on the rhs.  Thus require strict type
4604 	     compatibility.  */
4605 	  && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4606 	{
4607 	  *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4608 	  res = true;
4609 	}
4610     }
4611 
4612   /* Canonicalize TARGET_MEM_REF in particular with respect to
4613      the indexes becoming constant.  */
4614   else if (TREE_CODE (*t) == TARGET_MEM_REF)
4615     {
4616       tree tem = maybe_fold_tmr (*t);
4617       if (tem)
4618 	{
4619 	  *t = tem;
4620 	  res = true;
4621 	}
4622     }
4623 
4624   return res;
4625 }
4626 
4627 /* Worker for both fold_stmt and fold_stmt_inplace.  The INPLACE argument
4628    distinguishes both cases.  */
4629 
4630 static bool
4631 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4632 {
4633   bool changed = false;
4634   gimple *stmt = gsi_stmt (*gsi);
4635   bool nowarning = gimple_no_warning_p (stmt);
4636   unsigned i;
4637   fold_defer_overflow_warnings ();
4638 
4639   /* First do required canonicalization of [TARGET_]MEM_REF addresses
4640      after propagation.
4641      ???  This shouldn't be done in generic folding but in the
4642      propagation helpers which also know whether an address was
4643      propagated.
4644      Also canonicalize operand order.  */
4645   switch (gimple_code (stmt))
4646     {
4647     case GIMPLE_ASSIGN:
4648       if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4649 	{
4650 	  tree *rhs = gimple_assign_rhs1_ptr (stmt);
4651 	  if ((REFERENCE_CLASS_P (*rhs)
4652 	       || TREE_CODE (*rhs) == ADDR_EXPR)
4653 	      && maybe_canonicalize_mem_ref_addr (rhs))
4654 	    changed = true;
4655 	  tree *lhs = gimple_assign_lhs_ptr (stmt);
4656 	  if (REFERENCE_CLASS_P (*lhs)
4657 	      && maybe_canonicalize_mem_ref_addr (lhs))
4658 	    changed = true;
4659 	}
4660       else
4661 	{
4662 	  /* Canonicalize operand order.  */
4663 	  enum tree_code code = gimple_assign_rhs_code (stmt);
4664 	  if (TREE_CODE_CLASS (code) == tcc_comparison
4665 	      || commutative_tree_code (code)
4666 	      || commutative_ternary_tree_code (code))
4667 	    {
4668 	      tree rhs1 = gimple_assign_rhs1 (stmt);
4669 	      tree rhs2 = gimple_assign_rhs2 (stmt);
4670 	      if (tree_swap_operands_p (rhs1, rhs2))
4671 		{
4672 		  gimple_assign_set_rhs1 (stmt, rhs2);
4673 		  gimple_assign_set_rhs2 (stmt, rhs1);
4674 		  if (TREE_CODE_CLASS (code) == tcc_comparison)
4675 		    gimple_assign_set_rhs_code (stmt,
4676 						swap_tree_comparison (code));
4677 		  changed = true;
4678 		}
4679 	    }
4680 	}
4681       break;
4682     case GIMPLE_CALL:
4683       {
4684 	for (i = 0; i < gimple_call_num_args (stmt); ++i)
4685 	  {
4686 	    tree *arg = gimple_call_arg_ptr (stmt, i);
4687 	    if (REFERENCE_CLASS_P (*arg)
4688 		&& maybe_canonicalize_mem_ref_addr (arg))
4689 	      changed = true;
4690 	  }
4691 	tree *lhs = gimple_call_lhs_ptr (stmt);
4692 	if (*lhs
4693 	    && REFERENCE_CLASS_P (*lhs)
4694 	    && maybe_canonicalize_mem_ref_addr (lhs))
4695 	  changed = true;
4696 	break;
4697       }
4698     case GIMPLE_ASM:
4699       {
4700 	gasm *asm_stmt = as_a <gasm *> (stmt);
4701 	for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4702 	  {
4703 	    tree link = gimple_asm_output_op (asm_stmt, i);
4704 	    tree op = TREE_VALUE (link);
4705 	    if (REFERENCE_CLASS_P (op)
4706 		&& maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4707 	      changed = true;
4708 	  }
4709 	for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4710 	  {
4711 	    tree link = gimple_asm_input_op (asm_stmt, i);
4712 	    tree op = TREE_VALUE (link);
4713 	    if ((REFERENCE_CLASS_P (op)
4714 		 || TREE_CODE (op) == ADDR_EXPR)
4715 		&& maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4716 	      changed = true;
4717 	  }
4718       }
4719       break;
4720     case GIMPLE_DEBUG:
4721       if (gimple_debug_bind_p (stmt))
4722 	{
4723 	  tree *val = gimple_debug_bind_get_value_ptr (stmt);
4724 	  if (*val
4725 	      && (REFERENCE_CLASS_P (*val)
4726 		  || TREE_CODE (*val) == ADDR_EXPR)
4727 	      && maybe_canonicalize_mem_ref_addr (val))
4728 	    changed = true;
4729 	}
4730       break;
4731     case GIMPLE_COND:
4732       {
4733 	/* Canonicalize operand order.  */
4734 	tree lhs = gimple_cond_lhs (stmt);
4735 	tree rhs = gimple_cond_rhs (stmt);
4736 	if (tree_swap_operands_p (lhs, rhs))
4737 	  {
4738 	    gcond *gc = as_a <gcond *> (stmt);
4739 	    gimple_cond_set_lhs (gc, rhs);
4740 	    gimple_cond_set_rhs (gc, lhs);
4741 	    gimple_cond_set_code (gc,
4742 				  swap_tree_comparison (gimple_cond_code (gc)));
4743 	    changed = true;
4744 	  }
4745       }
4746     default:;
4747     }
4748 
4749   /* Dispatch to pattern-based folding.  */
4750   if (!inplace
4751       || is_gimple_assign (stmt)
4752       || gimple_code (stmt) == GIMPLE_COND)
4753     {
4754       gimple_seq seq = NULL;
4755       code_helper rcode;
4756       tree ops[3] = {};
4757       if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4758 			   valueize, valueize))
4759 	{
4760 	  if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4761 	    changed = true;
4762 	  else
4763 	    gimple_seq_discard (seq);
4764 	}
4765     }
4766 
4767   stmt = gsi_stmt (*gsi);
4768 
4769   /* Fold the main computation performed by the statement.  */
4770   switch (gimple_code (stmt))
4771     {
4772     case GIMPLE_ASSIGN:
4773       {
4774 	/* Try to canonicalize for boolean-typed X the comparisons
4775 	   X == 0, X == 1, X != 0, and X != 1.  */
4776 	if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4777 	    || gimple_assign_rhs_code (stmt) == NE_EXPR)
4778 	  {
4779 	    tree lhs = gimple_assign_lhs (stmt);
4780 	    tree op1 = gimple_assign_rhs1 (stmt);
4781 	    tree op2 = gimple_assign_rhs2 (stmt);
4782 	    tree type = TREE_TYPE (op1);
4783 
4784 	    /* Check whether the comparison operands are of the same boolean
4785 	       type as the result type is.
4786 	       Check that second operand is an integer-constant with value
4787 	       one or zero.  */
4788 	    if (TREE_CODE (op2) == INTEGER_CST
4789 		&& (integer_zerop (op2) || integer_onep (op2))
4790 		&& useless_type_conversion_p (TREE_TYPE (lhs), type))
4791 	      {
4792 		enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4793 		bool is_logical_not = false;
4794 
4795 		/* X == 0 and X != 1 is a logical-not.of X
4796 		   X == 1 and X != 0 is X  */
4797 		if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4798 		    || (cmp_code == NE_EXPR && integer_onep (op2)))
4799 		  is_logical_not = true;
4800 
4801 		if (is_logical_not == false)
4802 		  gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4803 		/* Only for one-bit precision typed X the transformation
4804 		   !X -> ~X is valied.  */
4805 		else if (TYPE_PRECISION (type) == 1)
4806 		  gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4807 		/* Otherwise we use !X -> X ^ 1.  */
4808 		else
4809 		  gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4810 						  build_int_cst (type, 1));
4811 		changed = true;
4812 		break;
4813 	      }
4814 	  }
4815 
4816 	unsigned old_num_ops = gimple_num_ops (stmt);
4817 	tree lhs = gimple_assign_lhs (stmt);
4818 	tree new_rhs = fold_gimple_assign (gsi);
4819 	if (new_rhs
4820 	    && !useless_type_conversion_p (TREE_TYPE (lhs),
4821 					   TREE_TYPE (new_rhs)))
4822 	  new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4823 	if (new_rhs
4824 	    && (!inplace
4825 		|| get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4826 	  {
4827 	    gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4828 	    changed = true;
4829 	  }
4830 	break;
4831       }
4832 
4833     case GIMPLE_CALL:
4834       changed |= gimple_fold_call (gsi, inplace);
4835       break;
4836 
4837     case GIMPLE_ASM:
4838       /* Fold *& in asm operands.  */
4839       {
4840 	gasm *asm_stmt = as_a <gasm *> (stmt);
4841 	size_t noutputs;
4842 	const char **oconstraints;
4843 	const char *constraint;
4844 	bool allows_mem, allows_reg;
4845 
4846 	noutputs = gimple_asm_noutputs (asm_stmt);
4847 	oconstraints = XALLOCAVEC (const char *, noutputs);
4848 
4849 	for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4850 	  {
4851 	    tree link = gimple_asm_output_op (asm_stmt, i);
4852 	    tree op = TREE_VALUE (link);
4853 	    oconstraints[i]
4854 	      = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4855 	    if (REFERENCE_CLASS_P (op)
4856 		&& (op = maybe_fold_reference (op, true)) != NULL_TREE)
4857 	      {
4858 		TREE_VALUE (link) = op;
4859 		changed = true;
4860 	      }
4861 	  }
4862 	for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4863 	  {
4864 	    tree link = gimple_asm_input_op (asm_stmt, i);
4865 	    tree op = TREE_VALUE (link);
4866 	    constraint
4867 	      = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4868 	    parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4869 				    oconstraints, &allows_mem, &allows_reg);
4870 	    if (REFERENCE_CLASS_P (op)
4871 		&& (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4872 		   != NULL_TREE)
4873 	      {
4874 		TREE_VALUE (link) = op;
4875 		changed = true;
4876 	      }
4877 	  }
4878       }
4879       break;
4880 
4881     case GIMPLE_DEBUG:
4882       if (gimple_debug_bind_p (stmt))
4883 	{
4884 	  tree val = gimple_debug_bind_get_value (stmt);
4885 	  if (val
4886 	      && REFERENCE_CLASS_P (val))
4887 	    {
4888 	      tree tem = maybe_fold_reference (val, false);
4889 	      if (tem)
4890 		{
4891 		  gimple_debug_bind_set_value (stmt, tem);
4892 		  changed = true;
4893 		}
4894 	    }
4895 	  else if (val
4896 		   && TREE_CODE (val) == ADDR_EXPR)
4897 	    {
4898 	      tree ref = TREE_OPERAND (val, 0);
4899 	      tree tem = maybe_fold_reference (ref, false);
4900 	      if (tem)
4901 		{
4902 		  tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4903 		  gimple_debug_bind_set_value (stmt, tem);
4904 		  changed = true;
4905 		}
4906 	    }
4907 	}
4908       break;
4909 
4910     case GIMPLE_RETURN:
4911       {
4912 	greturn *ret_stmt = as_a<greturn *> (stmt);
4913 	tree ret = gimple_return_retval(ret_stmt);
4914 
4915 	if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4916 	  {
4917 	    tree val = valueize (ret);
4918 	    if (val && val != ret
4919 		&& may_propagate_copy (ret, val))
4920 	      {
4921 		gimple_return_set_retval (ret_stmt, val);
4922 		changed = true;
4923 	      }
4924 	  }
4925       }
4926       break;
4927 
4928     default:;
4929     }
4930 
4931   stmt = gsi_stmt (*gsi);
4932 
4933   /* Fold *& on the lhs.  */
4934   if (gimple_has_lhs (stmt))
4935     {
4936       tree lhs = gimple_get_lhs (stmt);
4937       if (lhs && REFERENCE_CLASS_P (lhs))
4938 	{
4939 	  tree new_lhs = maybe_fold_reference (lhs, true);
4940 	  if (new_lhs)
4941 	    {
4942 	      gimple_set_lhs (stmt, new_lhs);
4943 	      changed = true;
4944 	    }
4945 	}
4946     }
4947 
4948   fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4949   return changed;
4950 }
4951 
4952 /* Valueziation callback that ends up not following SSA edges.  */
4953 
4954 tree
4955 no_follow_ssa_edges (tree)
4956 {
4957   return NULL_TREE;
4958 }
4959 
4960 /* Valueization callback that ends up following single-use SSA edges only.  */
4961 
4962 tree
4963 follow_single_use_edges (tree val)
4964 {
4965   if (TREE_CODE (val) == SSA_NAME
4966       && !has_single_use (val))
4967     return NULL_TREE;
4968   return val;
4969 }
4970 
4971 /* Fold the statement pointed to by GSI.  In some cases, this function may
4972    replace the whole statement with a new one.  Returns true iff folding
4973    makes any changes.
4974    The statement pointed to by GSI should be in valid gimple form but may
4975    be in unfolded state as resulting from for example constant propagation
4976    which can produce *&x = 0.  */
4977 
4978 bool
4979 fold_stmt (gimple_stmt_iterator *gsi)
4980 {
4981   return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4982 }
4983 
4984 bool
4985 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4986 {
4987   return fold_stmt_1 (gsi, false, valueize);
4988 }
4989 
4990 /* Perform the minimal folding on statement *GSI.  Only operations like
4991    *&x created by constant propagation are handled.  The statement cannot
4992    be replaced with a new one.  Return true if the statement was
4993    changed, false otherwise.
4994    The statement *GSI should be in valid gimple form but may
4995    be in unfolded state as resulting from for example constant propagation
4996    which can produce *&x = 0.  */
4997 
4998 bool
4999 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5000 {
5001   gimple *stmt = gsi_stmt (*gsi);
5002   bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5003   gcc_assert (gsi_stmt (*gsi) == stmt);
5004   return changed;
5005 }
5006 
5007 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5008    if EXPR is null or we don't know how.
5009    If non-null, the result always has boolean type.  */
5010 
5011 static tree
5012 canonicalize_bool (tree expr, bool invert)
5013 {
5014   if (!expr)
5015     return NULL_TREE;
5016   else if (invert)
5017     {
5018       if (integer_nonzerop (expr))
5019 	return boolean_false_node;
5020       else if (integer_zerop (expr))
5021 	return boolean_true_node;
5022       else if (TREE_CODE (expr) == SSA_NAME)
5023 	return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5024 			    build_int_cst (TREE_TYPE (expr), 0));
5025       else if (COMPARISON_CLASS_P (expr))
5026 	return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5027 			    boolean_type_node,
5028 			    TREE_OPERAND (expr, 0),
5029 			    TREE_OPERAND (expr, 1));
5030       else
5031 	return NULL_TREE;
5032     }
5033   else
5034     {
5035       if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5036 	return expr;
5037       if (integer_nonzerop (expr))
5038 	return boolean_true_node;
5039       else if (integer_zerop (expr))
5040 	return boolean_false_node;
5041       else if (TREE_CODE (expr) == SSA_NAME)
5042 	return fold_build2 (NE_EXPR, boolean_type_node, expr,
5043 			    build_int_cst (TREE_TYPE (expr), 0));
5044       else if (COMPARISON_CLASS_P (expr))
5045 	return fold_build2 (TREE_CODE (expr),
5046 			    boolean_type_node,
5047 			    TREE_OPERAND (expr, 0),
5048 			    TREE_OPERAND (expr, 1));
5049       else
5050 	return NULL_TREE;
5051     }
5052 }
5053 
5054 /* Check to see if a boolean expression EXPR is logically equivalent to the
5055    comparison (OP1 CODE OP2).  Check for various identities involving
5056    SSA_NAMEs.  */
5057 
5058 static bool
5059 same_bool_comparison_p (const_tree expr, enum tree_code code,
5060 			const_tree op1, const_tree op2)
5061 {
5062   gimple *s;
5063 
5064   /* The obvious case.  */
5065   if (TREE_CODE (expr) == code
5066       && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5067       && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5068     return true;
5069 
5070   /* Check for comparing (name, name != 0) and the case where expr
5071      is an SSA_NAME with a definition matching the comparison.  */
5072   if (TREE_CODE (expr) == SSA_NAME
5073       && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5074     {
5075       if (operand_equal_p (expr, op1, 0))
5076 	return ((code == NE_EXPR && integer_zerop (op2))
5077 		|| (code == EQ_EXPR && integer_nonzerop (op2)));
5078       s = SSA_NAME_DEF_STMT (expr);
5079       if (is_gimple_assign (s)
5080 	  && gimple_assign_rhs_code (s) == code
5081 	  && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5082 	  && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5083 	return true;
5084     }
5085 
5086   /* If op1 is of the form (name != 0) or (name == 0), and the definition
5087      of name is a comparison, recurse.  */
5088   if (TREE_CODE (op1) == SSA_NAME
5089       && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5090     {
5091       s = SSA_NAME_DEF_STMT (op1);
5092       if (is_gimple_assign (s)
5093 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5094 	{
5095 	  enum tree_code c = gimple_assign_rhs_code (s);
5096 	  if ((c == NE_EXPR && integer_zerop (op2))
5097 	      || (c == EQ_EXPR && integer_nonzerop (op2)))
5098 	    return same_bool_comparison_p (expr, c,
5099 					   gimple_assign_rhs1 (s),
5100 					   gimple_assign_rhs2 (s));
5101 	  if ((c == EQ_EXPR && integer_zerop (op2))
5102 	      || (c == NE_EXPR && integer_nonzerop (op2)))
5103 	    return same_bool_comparison_p (expr,
5104 					   invert_tree_comparison (c, false),
5105 					   gimple_assign_rhs1 (s),
5106 					   gimple_assign_rhs2 (s));
5107 	}
5108     }
5109   return false;
5110 }
5111 
5112 /* Check to see if two boolean expressions OP1 and OP2 are logically
5113    equivalent.  */
5114 
5115 static bool
5116 same_bool_result_p (const_tree op1, const_tree op2)
5117 {
5118   /* Simple cases first.  */
5119   if (operand_equal_p (op1, op2, 0))
5120     return true;
5121 
5122   /* Check the cases where at least one of the operands is a comparison.
5123      These are a bit smarter than operand_equal_p in that they apply some
5124      identifies on SSA_NAMEs.  */
5125   if (COMPARISON_CLASS_P (op2)
5126       && same_bool_comparison_p (op1, TREE_CODE (op2),
5127 				 TREE_OPERAND (op2, 0),
5128 				 TREE_OPERAND (op2, 1)))
5129     return true;
5130   if (COMPARISON_CLASS_P (op1)
5131       && same_bool_comparison_p (op2, TREE_CODE (op1),
5132 				 TREE_OPERAND (op1, 0),
5133 				 TREE_OPERAND (op1, 1)))
5134     return true;
5135 
5136   /* Default case.  */
5137   return false;
5138 }
5139 
5140 /* Forward declarations for some mutually recursive functions.  */
5141 
5142 static tree
5143 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5144 		   enum tree_code code2, tree op2a, tree op2b);
5145 static tree
5146 and_var_with_comparison (tree var, bool invert,
5147 			 enum tree_code code2, tree op2a, tree op2b);
5148 static tree
5149 and_var_with_comparison_1 (gimple *stmt,
5150 			   enum tree_code code2, tree op2a, tree op2b);
5151 static tree
5152 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5153 		  enum tree_code code2, tree op2a, tree op2b);
5154 static tree
5155 or_var_with_comparison (tree var, bool invert,
5156 			enum tree_code code2, tree op2a, tree op2b);
5157 static tree
5158 or_var_with_comparison_1 (gimple *stmt,
5159 			  enum tree_code code2, tree op2a, tree op2b);
5160 
5161 /* Helper function for and_comparisons_1:  try to simplify the AND of the
5162    ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5163    If INVERT is true, invert the value of the VAR before doing the AND.
5164    Return NULL_EXPR if we can't simplify this to a single expression.  */
5165 
5166 static tree
5167 and_var_with_comparison (tree var, bool invert,
5168 			 enum tree_code code2, tree op2a, tree op2b)
5169 {
5170   tree t;
5171   gimple *stmt = SSA_NAME_DEF_STMT (var);
5172 
5173   /* We can only deal with variables whose definitions are assignments.  */
5174   if (!is_gimple_assign (stmt))
5175     return NULL_TREE;
5176 
5177   /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5178      !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5179      Then we only have to consider the simpler non-inverted cases.  */
5180   if (invert)
5181     t = or_var_with_comparison_1 (stmt,
5182 				  invert_tree_comparison (code2, false),
5183 				  op2a, op2b);
5184   else
5185     t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5186   return canonicalize_bool (t, invert);
5187 }
5188 
5189 /* Try to simplify the AND of the ssa variable defined by the assignment
5190    STMT with the comparison specified by (OP2A CODE2 OP2B).
5191    Return NULL_EXPR if we can't simplify this to a single expression.  */
5192 
5193 static tree
5194 and_var_with_comparison_1 (gimple *stmt,
5195 			   enum tree_code code2, tree op2a, tree op2b)
5196 {
5197   tree var = gimple_assign_lhs (stmt);
5198   tree true_test_var = NULL_TREE;
5199   tree false_test_var = NULL_TREE;
5200   enum tree_code innercode = gimple_assign_rhs_code (stmt);
5201 
5202   /* Check for identities like (var AND (var == 0)) => false.  */
5203   if (TREE_CODE (op2a) == SSA_NAME
5204       && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5205     {
5206       if ((code2 == NE_EXPR && integer_zerop (op2b))
5207 	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5208 	{
5209 	  true_test_var = op2a;
5210 	  if (var == true_test_var)
5211 	    return var;
5212 	}
5213       else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5214 	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5215 	{
5216 	  false_test_var = op2a;
5217 	  if (var == false_test_var)
5218 	    return boolean_false_node;
5219 	}
5220     }
5221 
5222   /* If the definition is a comparison, recurse on it.  */
5223   if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5224     {
5225       tree t = and_comparisons_1 (innercode,
5226 				  gimple_assign_rhs1 (stmt),
5227 				  gimple_assign_rhs2 (stmt),
5228 				  code2,
5229 				  op2a,
5230 				  op2b);
5231       if (t)
5232 	return t;
5233     }
5234 
5235   /* If the definition is an AND or OR expression, we may be able to
5236      simplify by reassociating.  */
5237   if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5238       && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5239     {
5240       tree inner1 = gimple_assign_rhs1 (stmt);
5241       tree inner2 = gimple_assign_rhs2 (stmt);
5242       gimple *s;
5243       tree t;
5244       tree partial = NULL_TREE;
5245       bool is_and = (innercode == BIT_AND_EXPR);
5246 
5247       /* Check for boolean identities that don't require recursive examination
5248 	 of inner1/inner2:
5249 	 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5250 	 inner1 AND (inner1 OR inner2) => inner1
5251 	 !inner1 AND (inner1 AND inner2) => false
5252 	 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5253          Likewise for similar cases involving inner2.  */
5254       if (inner1 == true_test_var)
5255 	return (is_and ? var : inner1);
5256       else if (inner2 == true_test_var)
5257 	return (is_and ? var : inner2);
5258       else if (inner1 == false_test_var)
5259 	return (is_and
5260 		? boolean_false_node
5261 		: and_var_with_comparison (inner2, false, code2, op2a, op2b));
5262       else if (inner2 == false_test_var)
5263 	return (is_and
5264 		? boolean_false_node
5265 		: and_var_with_comparison (inner1, false, code2, op2a, op2b));
5266 
5267       /* Next, redistribute/reassociate the AND across the inner tests.
5268 	 Compute the first partial result, (inner1 AND (op2a code op2b))  */
5269       if (TREE_CODE (inner1) == SSA_NAME
5270 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5271 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5272 	  && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5273 					      gimple_assign_rhs1 (s),
5274 					      gimple_assign_rhs2 (s),
5275 					      code2, op2a, op2b)))
5276 	{
5277 	  /* Handle the AND case, where we are reassociating:
5278 	     (inner1 AND inner2) AND (op2a code2 op2b)
5279 	     => (t AND inner2)
5280 	     If the partial result t is a constant, we win.  Otherwise
5281 	     continue on to try reassociating with the other inner test.  */
5282 	  if (is_and)
5283 	    {
5284 	      if (integer_onep (t))
5285 		return inner2;
5286 	      else if (integer_zerop (t))
5287 		return boolean_false_node;
5288 	    }
5289 
5290 	  /* Handle the OR case, where we are redistributing:
5291 	     (inner1 OR inner2) AND (op2a code2 op2b)
5292 	     => (t OR (inner2 AND (op2a code2 op2b)))  */
5293 	  else if (integer_onep (t))
5294 	    return boolean_true_node;
5295 
5296 	  /* Save partial result for later.  */
5297 	  partial = t;
5298 	}
5299 
5300       /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5301       if (TREE_CODE (inner2) == SSA_NAME
5302 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5303 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5304 	  && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5305 					      gimple_assign_rhs1 (s),
5306 					      gimple_assign_rhs2 (s),
5307 					      code2, op2a, op2b)))
5308 	{
5309 	  /* Handle the AND case, where we are reassociating:
5310 	     (inner1 AND inner2) AND (op2a code2 op2b)
5311 	     => (inner1 AND t)  */
5312 	  if (is_and)
5313 	    {
5314 	      if (integer_onep (t))
5315 		return inner1;
5316 	      else if (integer_zerop (t))
5317 		return boolean_false_node;
5318 	      /* If both are the same, we can apply the identity
5319 		 (x AND x) == x.  */
5320 	      else if (partial && same_bool_result_p (t, partial))
5321 		return t;
5322 	    }
5323 
5324 	  /* Handle the OR case. where we are redistributing:
5325 	     (inner1 OR inner2) AND (op2a code2 op2b)
5326 	     => (t OR (inner1 AND (op2a code2 op2b)))
5327 	     => (t OR partial)  */
5328 	  else
5329 	    {
5330 	      if (integer_onep (t))
5331 		return boolean_true_node;
5332 	      else if (partial)
5333 		{
5334 		  /* We already got a simplification for the other
5335 		     operand to the redistributed OR expression.  The
5336 		     interesting case is when at least one is false.
5337 		     Or, if both are the same, we can apply the identity
5338 		     (x OR x) == x.  */
5339 		  if (integer_zerop (partial))
5340 		    return t;
5341 		  else if (integer_zerop (t))
5342 		    return partial;
5343 		  else if (same_bool_result_p (t, partial))
5344 		    return t;
5345 		}
5346 	    }
5347 	}
5348     }
5349   return NULL_TREE;
5350 }
5351 
5352 /* Try to simplify the AND of two comparisons defined by
5353    (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5354    If this can be done without constructing an intermediate value,
5355    return the resulting tree; otherwise NULL_TREE is returned.
5356    This function is deliberately asymmetric as it recurses on SSA_DEFs
5357    in the first comparison but not the second.  */
5358 
5359 static tree
5360 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5361 		   enum tree_code code2, tree op2a, tree op2b)
5362 {
5363   tree truth_type = truth_type_for (TREE_TYPE (op1a));
5364 
5365   /* First check for ((x CODE1 y) AND (x CODE2 y)).  */
5366   if (operand_equal_p (op1a, op2a, 0)
5367       && operand_equal_p (op1b, op2b, 0))
5368     {
5369       /* Result will be either NULL_TREE, or a combined comparison.  */
5370       tree t = combine_comparisons (UNKNOWN_LOCATION,
5371 				    TRUTH_ANDIF_EXPR, code1, code2,
5372 				    truth_type, op1a, op1b);
5373       if (t)
5374 	return t;
5375     }
5376 
5377   /* Likewise the swapped case of the above.  */
5378   if (operand_equal_p (op1a, op2b, 0)
5379       && operand_equal_p (op1b, op2a, 0))
5380     {
5381       /* Result will be either NULL_TREE, or a combined comparison.  */
5382       tree t = combine_comparisons (UNKNOWN_LOCATION,
5383 				    TRUTH_ANDIF_EXPR, code1,
5384 				    swap_tree_comparison (code2),
5385 				    truth_type, op1a, op1b);
5386       if (t)
5387 	return t;
5388     }
5389 
5390   /* If both comparisons are of the same value against constants, we might
5391      be able to merge them.  */
5392   if (operand_equal_p (op1a, op2a, 0)
5393       && TREE_CODE (op1b) == INTEGER_CST
5394       && TREE_CODE (op2b) == INTEGER_CST)
5395     {
5396       int cmp = tree_int_cst_compare (op1b, op2b);
5397 
5398       /* If we have (op1a == op1b), we should either be able to
5399 	 return that or FALSE, depending on whether the constant op1b
5400 	 also satisfies the other comparison against op2b.  */
5401       if (code1 == EQ_EXPR)
5402 	{
5403 	  bool done = true;
5404 	  bool val;
5405 	  switch (code2)
5406 	    {
5407 	    case EQ_EXPR: val = (cmp == 0); break;
5408 	    case NE_EXPR: val = (cmp != 0); break;
5409 	    case LT_EXPR: val = (cmp < 0); break;
5410 	    case GT_EXPR: val = (cmp > 0); break;
5411 	    case LE_EXPR: val = (cmp <= 0); break;
5412 	    case GE_EXPR: val = (cmp >= 0); break;
5413 	    default: done = false;
5414 	    }
5415 	  if (done)
5416 	    {
5417 	      if (val)
5418 		return fold_build2 (code1, boolean_type_node, op1a, op1b);
5419 	      else
5420 		return boolean_false_node;
5421 	    }
5422 	}
5423       /* Likewise if the second comparison is an == comparison.  */
5424       else if (code2 == EQ_EXPR)
5425 	{
5426 	  bool done = true;
5427 	  bool val;
5428 	  switch (code1)
5429 	    {
5430 	    case EQ_EXPR: val = (cmp == 0); break;
5431 	    case NE_EXPR: val = (cmp != 0); break;
5432 	    case LT_EXPR: val = (cmp > 0); break;
5433 	    case GT_EXPR: val = (cmp < 0); break;
5434 	    case LE_EXPR: val = (cmp >= 0); break;
5435 	    case GE_EXPR: val = (cmp <= 0); break;
5436 	    default: done = false;
5437 	    }
5438 	  if (done)
5439 	    {
5440 	      if (val)
5441 		return fold_build2 (code2, boolean_type_node, op2a, op2b);
5442 	      else
5443 		return boolean_false_node;
5444 	    }
5445 	}
5446 
5447       /* Same business with inequality tests.  */
5448       else if (code1 == NE_EXPR)
5449 	{
5450 	  bool val;
5451 	  switch (code2)
5452 	    {
5453 	    case EQ_EXPR: val = (cmp != 0); break;
5454 	    case NE_EXPR: val = (cmp == 0); break;
5455 	    case LT_EXPR: val = (cmp >= 0); break;
5456 	    case GT_EXPR: val = (cmp <= 0); break;
5457 	    case LE_EXPR: val = (cmp > 0); break;
5458 	    case GE_EXPR: val = (cmp < 0); break;
5459 	    default:
5460 	      val = false;
5461 	    }
5462 	  if (val)
5463 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
5464 	}
5465       else if (code2 == NE_EXPR)
5466 	{
5467 	  bool val;
5468 	  switch (code1)
5469 	    {
5470 	    case EQ_EXPR: val = (cmp == 0); break;
5471 	    case NE_EXPR: val = (cmp != 0); break;
5472 	    case LT_EXPR: val = (cmp <= 0); break;
5473 	    case GT_EXPR: val = (cmp >= 0); break;
5474 	    case LE_EXPR: val = (cmp < 0); break;
5475 	    case GE_EXPR: val = (cmp > 0); break;
5476 	    default:
5477 	      val = false;
5478 	    }
5479 	  if (val)
5480 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
5481 	}
5482 
5483       /* Chose the more restrictive of two < or <= comparisons.  */
5484       else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5485 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
5486 	{
5487 	  if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5488 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
5489 	  else
5490 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
5491 	}
5492 
5493       /* Likewise chose the more restrictive of two > or >= comparisons.  */
5494       else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5495 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
5496 	{
5497 	  if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5498 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
5499 	  else
5500 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
5501 	}
5502 
5503       /* Check for singleton ranges.  */
5504       else if (cmp == 0
5505 	       && ((code1 == LE_EXPR && code2 == GE_EXPR)
5506 		   || (code1 == GE_EXPR && code2 == LE_EXPR)))
5507 	return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5508 
5509       /* Check for disjoint ranges. */
5510       else if (cmp <= 0
5511 	       && (code1 == LT_EXPR || code1 == LE_EXPR)
5512 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
5513 	return boolean_false_node;
5514       else if (cmp >= 0
5515 	       && (code1 == GT_EXPR || code1 == GE_EXPR)
5516 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
5517 	return boolean_false_node;
5518     }
5519 
5520   /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5521      NAME's definition is a truth value.  See if there are any simplifications
5522      that can be done against the NAME's definition.  */
5523   if (TREE_CODE (op1a) == SSA_NAME
5524       && (code1 == NE_EXPR || code1 == EQ_EXPR)
5525       && (integer_zerop (op1b) || integer_onep (op1b)))
5526     {
5527       bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5528 		     || (code1 == NE_EXPR && integer_onep (op1b)));
5529       gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5530       switch (gimple_code (stmt))
5531 	{
5532 	case GIMPLE_ASSIGN:
5533 	  /* Try to simplify by copy-propagating the definition.  */
5534 	  return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5535 
5536 	case GIMPLE_PHI:
5537 	  /* If every argument to the PHI produces the same result when
5538 	     ANDed with the second comparison, we win.
5539 	     Do not do this unless the type is bool since we need a bool
5540 	     result here anyway.  */
5541 	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5542 	    {
5543 	      tree result = NULL_TREE;
5544 	      unsigned i;
5545 	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
5546 		{
5547 		  tree arg = gimple_phi_arg_def (stmt, i);
5548 
5549 		  /* If this PHI has itself as an argument, ignore it.
5550 		     If all the other args produce the same result,
5551 		     we're still OK.  */
5552 		  if (arg == gimple_phi_result (stmt))
5553 		    continue;
5554 		  else if (TREE_CODE (arg) == INTEGER_CST)
5555 		    {
5556 		      if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5557 			{
5558 			  if (!result)
5559 			    result = boolean_false_node;
5560 			  else if (!integer_zerop (result))
5561 			    return NULL_TREE;
5562 			}
5563 		      else if (!result)
5564 			result = fold_build2 (code2, boolean_type_node,
5565 					      op2a, op2b);
5566 		      else if (!same_bool_comparison_p (result,
5567 							code2, op2a, op2b))
5568 			return NULL_TREE;
5569 		    }
5570 		  else if (TREE_CODE (arg) == SSA_NAME
5571 			   && !SSA_NAME_IS_DEFAULT_DEF (arg))
5572 		    {
5573 		      tree temp;
5574 		      gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5575 		      /* In simple cases we can look through PHI nodes,
5576 			 but we have to be careful with loops.
5577 			 See PR49073.  */
5578 		      if (! dom_info_available_p (CDI_DOMINATORS)
5579 			  || gimple_bb (def_stmt) == gimple_bb (stmt)
5580 			  || dominated_by_p (CDI_DOMINATORS,
5581 					     gimple_bb (def_stmt),
5582 					     gimple_bb (stmt)))
5583 			return NULL_TREE;
5584 		      temp = and_var_with_comparison (arg, invert, code2,
5585 						      op2a, op2b);
5586 		      if (!temp)
5587 			return NULL_TREE;
5588 		      else if (!result)
5589 			result = temp;
5590 		      else if (!same_bool_result_p (result, temp))
5591 			return NULL_TREE;
5592 		    }
5593 		  else
5594 		    return NULL_TREE;
5595 		}
5596 	      return result;
5597 	    }
5598 
5599 	default:
5600 	  break;
5601 	}
5602     }
5603   return NULL_TREE;
5604 }
5605 
5606 /* Try to simplify the AND of two comparisons, specified by
5607    (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5608    If this can be simplified to a single expression (without requiring
5609    introducing more SSA variables to hold intermediate values),
5610    return the resulting tree.  Otherwise return NULL_TREE.
5611    If the result expression is non-null, it has boolean type.  */
5612 
5613 tree
5614 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5615 			    enum tree_code code2, tree op2a, tree op2b)
5616 {
5617   tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5618   if (t)
5619     return t;
5620   else
5621     return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5622 }
5623 
5624 /* Helper function for or_comparisons_1:  try to simplify the OR of the
5625    ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5626    If INVERT is true, invert the value of VAR before doing the OR.
5627    Return NULL_EXPR if we can't simplify this to a single expression.  */
5628 
5629 static tree
5630 or_var_with_comparison (tree var, bool invert,
5631 			enum tree_code code2, tree op2a, tree op2b)
5632 {
5633   tree t;
5634   gimple *stmt = SSA_NAME_DEF_STMT (var);
5635 
5636   /* We can only deal with variables whose definitions are assignments.  */
5637   if (!is_gimple_assign (stmt))
5638     return NULL_TREE;
5639 
5640   /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5641      !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5642      Then we only have to consider the simpler non-inverted cases.  */
5643   if (invert)
5644     t = and_var_with_comparison_1 (stmt,
5645 				   invert_tree_comparison (code2, false),
5646 				   op2a, op2b);
5647   else
5648     t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5649   return canonicalize_bool (t, invert);
5650 }
5651 
5652 /* Try to simplify the OR of the ssa variable defined by the assignment
5653    STMT with the comparison specified by (OP2A CODE2 OP2B).
5654    Return NULL_EXPR if we can't simplify this to a single expression.  */
5655 
5656 static tree
5657 or_var_with_comparison_1 (gimple *stmt,
5658 			  enum tree_code code2, tree op2a, tree op2b)
5659 {
5660   tree var = gimple_assign_lhs (stmt);
5661   tree true_test_var = NULL_TREE;
5662   tree false_test_var = NULL_TREE;
5663   enum tree_code innercode = gimple_assign_rhs_code (stmt);
5664 
5665   /* Check for identities like (var OR (var != 0)) => true .  */
5666   if (TREE_CODE (op2a) == SSA_NAME
5667       && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5668     {
5669       if ((code2 == NE_EXPR && integer_zerop (op2b))
5670 	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5671 	{
5672 	  true_test_var = op2a;
5673 	  if (var == true_test_var)
5674 	    return var;
5675 	}
5676       else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5677 	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5678 	{
5679 	  false_test_var = op2a;
5680 	  if (var == false_test_var)
5681 	    return boolean_true_node;
5682 	}
5683     }
5684 
5685   /* If the definition is a comparison, recurse on it.  */
5686   if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5687     {
5688       tree t = or_comparisons_1 (innercode,
5689 				 gimple_assign_rhs1 (stmt),
5690 				 gimple_assign_rhs2 (stmt),
5691 				 code2,
5692 				 op2a,
5693 				 op2b);
5694       if (t)
5695 	return t;
5696     }
5697 
5698   /* If the definition is an AND or OR expression, we may be able to
5699      simplify by reassociating.  */
5700   if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5701       && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5702     {
5703       tree inner1 = gimple_assign_rhs1 (stmt);
5704       tree inner2 = gimple_assign_rhs2 (stmt);
5705       gimple *s;
5706       tree t;
5707       tree partial = NULL_TREE;
5708       bool is_or = (innercode == BIT_IOR_EXPR);
5709 
5710       /* Check for boolean identities that don't require recursive examination
5711 	 of inner1/inner2:
5712 	 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5713 	 inner1 OR (inner1 AND inner2) => inner1
5714 	 !inner1 OR (inner1 OR inner2) => true
5715 	 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5716       */
5717       if (inner1 == true_test_var)
5718 	return (is_or ? var : inner1);
5719       else if (inner2 == true_test_var)
5720 	return (is_or ? var : inner2);
5721       else if (inner1 == false_test_var)
5722 	return (is_or
5723 		? boolean_true_node
5724 		: or_var_with_comparison (inner2, false, code2, op2a, op2b));
5725       else if (inner2 == false_test_var)
5726 	return (is_or
5727 		? boolean_true_node
5728 		: or_var_with_comparison (inner1, false, code2, op2a, op2b));
5729 
5730       /* Next, redistribute/reassociate the OR across the inner tests.
5731 	 Compute the first partial result, (inner1 OR (op2a code op2b))  */
5732       if (TREE_CODE (inner1) == SSA_NAME
5733 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5734 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5735 	  && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5736 					     gimple_assign_rhs1 (s),
5737 					     gimple_assign_rhs2 (s),
5738 					     code2, op2a, op2b)))
5739 	{
5740 	  /* Handle the OR case, where we are reassociating:
5741 	     (inner1 OR inner2) OR (op2a code2 op2b)
5742 	     => (t OR inner2)
5743 	     If the partial result t is a constant, we win.  Otherwise
5744 	     continue on to try reassociating with the other inner test.  */
5745 	  if (is_or)
5746 	    {
5747 	      if (integer_onep (t))
5748 		return boolean_true_node;
5749 	      else if (integer_zerop (t))
5750 		return inner2;
5751 	    }
5752 
5753 	  /* Handle the AND case, where we are redistributing:
5754 	     (inner1 AND inner2) OR (op2a code2 op2b)
5755 	     => (t AND (inner2 OR (op2a code op2b)))  */
5756 	  else if (integer_zerop (t))
5757 	    return boolean_false_node;
5758 
5759 	  /* Save partial result for later.  */
5760 	  partial = t;
5761 	}
5762 
5763       /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5764       if (TREE_CODE (inner2) == SSA_NAME
5765 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5766 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5767 	  && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5768 					     gimple_assign_rhs1 (s),
5769 					     gimple_assign_rhs2 (s),
5770 					     code2, op2a, op2b)))
5771 	{
5772 	  /* Handle the OR case, where we are reassociating:
5773 	     (inner1 OR inner2) OR (op2a code2 op2b)
5774 	     => (inner1 OR t)
5775 	     => (t OR partial)  */
5776 	  if (is_or)
5777 	    {
5778 	      if (integer_zerop (t))
5779 		return inner1;
5780 	      else if (integer_onep (t))
5781 		return boolean_true_node;
5782 	      /* If both are the same, we can apply the identity
5783 		 (x OR x) == x.  */
5784 	      else if (partial && same_bool_result_p (t, partial))
5785 		return t;
5786 	    }
5787 
5788 	  /* Handle the AND case, where we are redistributing:
5789 	     (inner1 AND inner2) OR (op2a code2 op2b)
5790 	     => (t AND (inner1 OR (op2a code2 op2b)))
5791 	     => (t AND partial)  */
5792 	  else
5793 	    {
5794 	      if (integer_zerop (t))
5795 		return boolean_false_node;
5796 	      else if (partial)
5797 		{
5798 		  /* We already got a simplification for the other
5799 		     operand to the redistributed AND expression.  The
5800 		     interesting case is when at least one is true.
5801 		     Or, if both are the same, we can apply the identity
5802 		     (x AND x) == x.  */
5803 		  if (integer_onep (partial))
5804 		    return t;
5805 		  else if (integer_onep (t))
5806 		    return partial;
5807 		  else if (same_bool_result_p (t, partial))
5808 		    return t;
5809 		}
5810 	    }
5811 	}
5812     }
5813   return NULL_TREE;
5814 }
5815 
5816 /* Try to simplify the OR of two comparisons defined by
5817    (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5818    If this can be done without constructing an intermediate value,
5819    return the resulting tree; otherwise NULL_TREE is returned.
5820    This function is deliberately asymmetric as it recurses on SSA_DEFs
5821    in the first comparison but not the second.  */
5822 
5823 static tree
5824 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5825 		  enum tree_code code2, tree op2a, tree op2b)
5826 {
5827   tree truth_type = truth_type_for (TREE_TYPE (op1a));
5828 
5829   /* First check for ((x CODE1 y) OR (x CODE2 y)).  */
5830   if (operand_equal_p (op1a, op2a, 0)
5831       && operand_equal_p (op1b, op2b, 0))
5832     {
5833       /* Result will be either NULL_TREE, or a combined comparison.  */
5834       tree t = combine_comparisons (UNKNOWN_LOCATION,
5835 				    TRUTH_ORIF_EXPR, code1, code2,
5836 				    truth_type, op1a, op1b);
5837       if (t)
5838 	return t;
5839     }
5840 
5841   /* Likewise the swapped case of the above.  */
5842   if (operand_equal_p (op1a, op2b, 0)
5843       && operand_equal_p (op1b, op2a, 0))
5844     {
5845       /* Result will be either NULL_TREE, or a combined comparison.  */
5846       tree t = combine_comparisons (UNKNOWN_LOCATION,
5847 				    TRUTH_ORIF_EXPR, code1,
5848 				    swap_tree_comparison (code2),
5849 				    truth_type, op1a, op1b);
5850       if (t)
5851 	return t;
5852     }
5853 
5854   /* If both comparisons are of the same value against constants, we might
5855      be able to merge them.  */
5856   if (operand_equal_p (op1a, op2a, 0)
5857       && TREE_CODE (op1b) == INTEGER_CST
5858       && TREE_CODE (op2b) == INTEGER_CST)
5859     {
5860       int cmp = tree_int_cst_compare (op1b, op2b);
5861 
5862       /* If we have (op1a != op1b), we should either be able to
5863 	 return that or TRUE, depending on whether the constant op1b
5864 	 also satisfies the other comparison against op2b.  */
5865       if (code1 == NE_EXPR)
5866 	{
5867 	  bool done = true;
5868 	  bool val;
5869 	  switch (code2)
5870 	    {
5871 	    case EQ_EXPR: val = (cmp == 0); break;
5872 	    case NE_EXPR: val = (cmp != 0); break;
5873 	    case LT_EXPR: val = (cmp < 0); break;
5874 	    case GT_EXPR: val = (cmp > 0); break;
5875 	    case LE_EXPR: val = (cmp <= 0); break;
5876 	    case GE_EXPR: val = (cmp >= 0); break;
5877 	    default: done = false;
5878 	    }
5879 	  if (done)
5880 	    {
5881 	      if (val)
5882 		return boolean_true_node;
5883 	      else
5884 		return fold_build2 (code1, boolean_type_node, op1a, op1b);
5885 	    }
5886 	}
5887       /* Likewise if the second comparison is a != comparison.  */
5888       else if (code2 == NE_EXPR)
5889 	{
5890 	  bool done = true;
5891 	  bool val;
5892 	  switch (code1)
5893 	    {
5894 	    case EQ_EXPR: val = (cmp == 0); break;
5895 	    case NE_EXPR: val = (cmp != 0); break;
5896 	    case LT_EXPR: val = (cmp > 0); break;
5897 	    case GT_EXPR: val = (cmp < 0); break;
5898 	    case LE_EXPR: val = (cmp >= 0); break;
5899 	    case GE_EXPR: val = (cmp <= 0); break;
5900 	    default: done = false;
5901 	    }
5902 	  if (done)
5903 	    {
5904 	      if (val)
5905 		return boolean_true_node;
5906 	      else
5907 		return fold_build2 (code2, boolean_type_node, op2a, op2b);
5908 	    }
5909 	}
5910 
5911       /* See if an equality test is redundant with the other comparison.  */
5912       else if (code1 == EQ_EXPR)
5913 	{
5914 	  bool val;
5915 	  switch (code2)
5916 	    {
5917 	    case EQ_EXPR: val = (cmp == 0); break;
5918 	    case NE_EXPR: val = (cmp != 0); break;
5919 	    case LT_EXPR: val = (cmp < 0); break;
5920 	    case GT_EXPR: val = (cmp > 0); break;
5921 	    case LE_EXPR: val = (cmp <= 0); break;
5922 	    case GE_EXPR: val = (cmp >= 0); break;
5923 	    default:
5924 	      val = false;
5925 	    }
5926 	  if (val)
5927 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
5928 	}
5929       else if (code2 == EQ_EXPR)
5930 	{
5931 	  bool val;
5932 	  switch (code1)
5933 	    {
5934 	    case EQ_EXPR: val = (cmp == 0); break;
5935 	    case NE_EXPR: val = (cmp != 0); break;
5936 	    case LT_EXPR: val = (cmp > 0); break;
5937 	    case GT_EXPR: val = (cmp < 0); break;
5938 	    case LE_EXPR: val = (cmp >= 0); break;
5939 	    case GE_EXPR: val = (cmp <= 0); break;
5940 	    default:
5941 	      val = false;
5942 	    }
5943 	  if (val)
5944 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
5945 	}
5946 
5947       /* Chose the less restrictive of two < or <= comparisons.  */
5948       else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5949 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
5950 	{
5951 	  if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5952 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
5953 	  else
5954 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
5955 	}
5956 
5957       /* Likewise chose the less restrictive of two > or >= comparisons.  */
5958       else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5959 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
5960 	{
5961 	  if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5962 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
5963 	  else
5964 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
5965 	}
5966 
5967       /* Check for singleton ranges.  */
5968       else if (cmp == 0
5969 	       && ((code1 == LT_EXPR && code2 == GT_EXPR)
5970 		   || (code1 == GT_EXPR && code2 == LT_EXPR)))
5971 	return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5972 
5973       /* Check for less/greater pairs that don't restrict the range at all.  */
5974       else if (cmp >= 0
5975 	       && (code1 == LT_EXPR || code1 == LE_EXPR)
5976 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
5977 	return boolean_true_node;
5978       else if (cmp <= 0
5979 	       && (code1 == GT_EXPR || code1 == GE_EXPR)
5980 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
5981 	return boolean_true_node;
5982     }
5983 
5984   /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5985      NAME's definition is a truth value.  See if there are any simplifications
5986      that can be done against the NAME's definition.  */
5987   if (TREE_CODE (op1a) == SSA_NAME
5988       && (code1 == NE_EXPR || code1 == EQ_EXPR)
5989       && (integer_zerop (op1b) || integer_onep (op1b)))
5990     {
5991       bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5992 		     || (code1 == NE_EXPR && integer_onep (op1b)));
5993       gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5994       switch (gimple_code (stmt))
5995 	{
5996 	case GIMPLE_ASSIGN:
5997 	  /* Try to simplify by copy-propagating the definition.  */
5998 	  return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5999 
6000 	case GIMPLE_PHI:
6001 	  /* If every argument to the PHI produces the same result when
6002 	     ORed with the second comparison, we win.
6003 	     Do not do this unless the type is bool since we need a bool
6004 	     result here anyway.  */
6005 	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6006 	    {
6007 	      tree result = NULL_TREE;
6008 	      unsigned i;
6009 	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
6010 		{
6011 		  tree arg = gimple_phi_arg_def (stmt, i);
6012 
6013 		  /* If this PHI has itself as an argument, ignore it.
6014 		     If all the other args produce the same result,
6015 		     we're still OK.  */
6016 		  if (arg == gimple_phi_result (stmt))
6017 		    continue;
6018 		  else if (TREE_CODE (arg) == INTEGER_CST)
6019 		    {
6020 		      if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6021 			{
6022 			  if (!result)
6023 			    result = boolean_true_node;
6024 			  else if (!integer_onep (result))
6025 			    return NULL_TREE;
6026 			}
6027 		      else if (!result)
6028 			result = fold_build2 (code2, boolean_type_node,
6029 					      op2a, op2b);
6030 		      else if (!same_bool_comparison_p (result,
6031 							code2, op2a, op2b))
6032 			return NULL_TREE;
6033 		    }
6034 		  else if (TREE_CODE (arg) == SSA_NAME
6035 			   && !SSA_NAME_IS_DEFAULT_DEF (arg))
6036 		    {
6037 		      tree temp;
6038 		      gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6039 		      /* In simple cases we can look through PHI nodes,
6040 			 but we have to be careful with loops.
6041 			 See PR49073.  */
6042 		      if (! dom_info_available_p (CDI_DOMINATORS)
6043 			  || gimple_bb (def_stmt) == gimple_bb (stmt)
6044 			  || dominated_by_p (CDI_DOMINATORS,
6045 					     gimple_bb (def_stmt),
6046 					     gimple_bb (stmt)))
6047 			return NULL_TREE;
6048 		      temp = or_var_with_comparison (arg, invert, code2,
6049 						     op2a, op2b);
6050 		      if (!temp)
6051 			return NULL_TREE;
6052 		      else if (!result)
6053 			result = temp;
6054 		      else if (!same_bool_result_p (result, temp))
6055 			return NULL_TREE;
6056 		    }
6057 		  else
6058 		    return NULL_TREE;
6059 		}
6060 	      return result;
6061 	    }
6062 
6063 	default:
6064 	  break;
6065 	}
6066     }
6067   return NULL_TREE;
6068 }
6069 
6070 /* Try to simplify the OR of two comparisons, specified by
6071    (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6072    If this can be simplified to a single expression (without requiring
6073    introducing more SSA variables to hold intermediate values),
6074    return the resulting tree.  Otherwise return NULL_TREE.
6075    If the result expression is non-null, it has boolean type.  */
6076 
6077 tree
6078 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6079 			   enum tree_code code2, tree op2a, tree op2b)
6080 {
6081   tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6082   if (t)
6083     return t;
6084   else
6085     return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6086 }
6087 
6088 
6089 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6090 
6091    Either NULL_TREE, a simplified but non-constant or a constant
6092    is returned.
6093 
6094    ???  This should go into a gimple-fold-inline.h file to be eventually
6095    privatized with the single valueize function used in the various TUs
6096    to avoid the indirect function call overhead.  */
6097 
6098 tree
6099 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6100 				tree (*gvalueize) (tree))
6101 {
6102   code_helper rcode;
6103   tree ops[3] = {};
6104   /* ???  The SSA propagators do not correctly deal with following SSA use-def
6105      edges if there are intermediate VARYING defs.  For this reason
6106      do not follow SSA edges here even though SCCVN can technically
6107      just deal fine with that.  */
6108   if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
6109     {
6110       tree res = NULL_TREE;
6111       if (gimple_simplified_result_is_gimple_val (rcode, ops))
6112 	res = ops[0];
6113       else if (mprts_hook)
6114 	res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
6115       if (res)
6116 	{
6117 	  if (dump_file && dump_flags & TDF_DETAILS)
6118 	    {
6119 	      fprintf (dump_file, "Match-and-simplified ");
6120 	      print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6121 	      fprintf (dump_file, " to ");
6122 	      print_generic_expr (dump_file, res);
6123 	      fprintf (dump_file, "\n");
6124 	    }
6125 	  return res;
6126 	}
6127     }
6128 
6129   location_t loc = gimple_location (stmt);
6130   switch (gimple_code (stmt))
6131     {
6132     case GIMPLE_ASSIGN:
6133       {
6134         enum tree_code subcode = gimple_assign_rhs_code (stmt);
6135 
6136         switch (get_gimple_rhs_class (subcode))
6137           {
6138           case GIMPLE_SINGLE_RHS:
6139             {
6140               tree rhs = gimple_assign_rhs1 (stmt);
6141               enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6142 
6143               if (TREE_CODE (rhs) == SSA_NAME)
6144                 {
6145                   /* If the RHS is an SSA_NAME, return its known constant value,
6146                      if any.  */
6147                   return (*valueize) (rhs);
6148                 }
6149 	      /* Handle propagating invariant addresses into address
6150 		 operations.  */
6151 	      else if (TREE_CODE (rhs) == ADDR_EXPR
6152 		       && !is_gimple_min_invariant (rhs))
6153 		{
6154 		  poly_int64 offset = 0;
6155 		  tree base;
6156 		  base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6157 							  &offset,
6158 							  valueize);
6159 		  if (base
6160 		      && (CONSTANT_CLASS_P (base)
6161 			  || decl_address_invariant_p (base)))
6162 		    return build_invariant_address (TREE_TYPE (rhs),
6163 						    base, offset);
6164 		}
6165 	      else if (TREE_CODE (rhs) == CONSTRUCTOR
6166 		       && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6167 		       && known_eq (CONSTRUCTOR_NELTS (rhs),
6168 				    TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6169 		{
6170 		  unsigned i, nelts;
6171 		  tree val;
6172 
6173 		  nelts = CONSTRUCTOR_NELTS (rhs);
6174 		  tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6175 		  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6176 		    {
6177 		      val = (*valueize) (val);
6178 		      if (TREE_CODE (val) == INTEGER_CST
6179 			  || TREE_CODE (val) == REAL_CST
6180 			  || TREE_CODE (val) == FIXED_CST)
6181 			vec.quick_push (val);
6182 		      else
6183 			return NULL_TREE;
6184 		    }
6185 
6186 		  return vec.build ();
6187 		}
6188 	      if (subcode == OBJ_TYPE_REF)
6189 		{
6190 		  tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6191 		  /* If callee is constant, we can fold away the wrapper.  */
6192 		  if (is_gimple_min_invariant (val))
6193 		    return val;
6194 		}
6195 
6196               if (kind == tcc_reference)
6197 		{
6198 		  if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6199 		       || TREE_CODE (rhs) == REALPART_EXPR
6200 		       || TREE_CODE (rhs) == IMAGPART_EXPR)
6201 		      && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6202 		    {
6203 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6204 		      return fold_unary_loc (EXPR_LOCATION (rhs),
6205 					     TREE_CODE (rhs),
6206 					     TREE_TYPE (rhs), val);
6207 		    }
6208 		  else if (TREE_CODE (rhs) == BIT_FIELD_REF
6209 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6210 		    {
6211 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6212 		      return fold_ternary_loc (EXPR_LOCATION (rhs),
6213 					       TREE_CODE (rhs),
6214 					       TREE_TYPE (rhs), val,
6215 					       TREE_OPERAND (rhs, 1),
6216 					       TREE_OPERAND (rhs, 2));
6217 		    }
6218 		  else if (TREE_CODE (rhs) == MEM_REF
6219 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6220 		    {
6221 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6222 		      if (TREE_CODE (val) == ADDR_EXPR
6223 			  && is_gimple_min_invariant (val))
6224 			{
6225 			  tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6226 						  unshare_expr (val),
6227 						  TREE_OPERAND (rhs, 1));
6228 			  if (tem)
6229 			    rhs = tem;
6230 			}
6231 		    }
6232 		  return fold_const_aggregate_ref_1 (rhs, valueize);
6233 		}
6234               else if (kind == tcc_declaration)
6235                 return get_symbol_constant_value (rhs);
6236               return rhs;
6237             }
6238 
6239           case GIMPLE_UNARY_RHS:
6240 	    return NULL_TREE;
6241 
6242           case GIMPLE_BINARY_RHS:
6243 	    /* Translate &x + CST into an invariant form suitable for
6244 	       further propagation.  */
6245 	    if (subcode == POINTER_PLUS_EXPR)
6246 	      {
6247 		tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6248 		tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6249 		if (TREE_CODE (op0) == ADDR_EXPR
6250 		    && TREE_CODE (op1) == INTEGER_CST)
6251 		  {
6252 		    tree off = fold_convert (ptr_type_node, op1);
6253 		    return build_fold_addr_expr_loc
6254 			(loc,
6255 			 fold_build2 (MEM_REF,
6256 				      TREE_TYPE (TREE_TYPE (op0)),
6257 				      unshare_expr (op0), off));
6258 		  }
6259 	      }
6260 	    /* Canonicalize bool != 0 and bool == 0 appearing after
6261 	       valueization.  While gimple_simplify handles this
6262 	       it can get confused by the ~X == 1 -> X == 0 transform
6263 	       which we cant reduce to a SSA name or a constant
6264 	       (and we have no way to tell gimple_simplify to not
6265 	       consider those transforms in the first place).  */
6266 	    else if (subcode == EQ_EXPR
6267 		     || subcode == NE_EXPR)
6268 	      {
6269 		tree lhs = gimple_assign_lhs (stmt);
6270 		tree op0 = gimple_assign_rhs1 (stmt);
6271 		if (useless_type_conversion_p (TREE_TYPE (lhs),
6272 					       TREE_TYPE (op0)))
6273 		  {
6274 		    tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6275 		    op0 = (*valueize) (op0);
6276 		    if (TREE_CODE (op0) == INTEGER_CST)
6277 		      std::swap (op0, op1);
6278 		    if (TREE_CODE (op1) == INTEGER_CST
6279 			&& ((subcode == NE_EXPR && integer_zerop (op1))
6280 			    || (subcode == EQ_EXPR && integer_onep (op1))))
6281 		      return op0;
6282 		  }
6283 	      }
6284 	    return NULL_TREE;
6285 
6286           case GIMPLE_TERNARY_RHS:
6287             {
6288               /* Handle ternary operators that can appear in GIMPLE form.  */
6289               tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6290               tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6291               tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6292               return fold_ternary_loc (loc, subcode,
6293 				       gimple_expr_type (stmt), op0, op1, op2);
6294             }
6295 
6296           default:
6297             gcc_unreachable ();
6298           }
6299       }
6300 
6301     case GIMPLE_CALL:
6302       {
6303 	tree fn;
6304 	gcall *call_stmt = as_a <gcall *> (stmt);
6305 
6306 	if (gimple_call_internal_p (stmt))
6307 	  {
6308 	    enum tree_code subcode = ERROR_MARK;
6309 	    switch (gimple_call_internal_fn (stmt))
6310 	      {
6311 	      case IFN_UBSAN_CHECK_ADD:
6312 		subcode = PLUS_EXPR;
6313 		break;
6314 	      case IFN_UBSAN_CHECK_SUB:
6315 		subcode = MINUS_EXPR;
6316 		break;
6317 	      case IFN_UBSAN_CHECK_MUL:
6318 		subcode = MULT_EXPR;
6319 		break;
6320 	      case IFN_BUILTIN_EXPECT:
6321 		  {
6322 		    tree arg0 = gimple_call_arg (stmt, 0);
6323 		    tree op0 = (*valueize) (arg0);
6324 		    if (TREE_CODE (op0) == INTEGER_CST)
6325 		      return op0;
6326 		    return NULL_TREE;
6327 		  }
6328 	      default:
6329 		return NULL_TREE;
6330 	      }
6331 	    tree arg0 = gimple_call_arg (stmt, 0);
6332 	    tree arg1 = gimple_call_arg (stmt, 1);
6333 	    tree op0 = (*valueize) (arg0);
6334 	    tree op1 = (*valueize) (arg1);
6335 
6336 	    if (TREE_CODE (op0) != INTEGER_CST
6337 		|| TREE_CODE (op1) != INTEGER_CST)
6338 	      {
6339 		switch (subcode)
6340 		  {
6341 		  case MULT_EXPR:
6342 		    /* x * 0 = 0 * x = 0 without overflow.  */
6343 		    if (integer_zerop (op0) || integer_zerop (op1))
6344 		      return build_zero_cst (TREE_TYPE (arg0));
6345 		    break;
6346 		  case MINUS_EXPR:
6347 		    /* y - y = 0 without overflow.  */
6348 		    if (operand_equal_p (op0, op1, 0))
6349 		      return build_zero_cst (TREE_TYPE (arg0));
6350 		    break;
6351 		  default:
6352 		    break;
6353 		  }
6354 	      }
6355 	    tree res
6356 	      = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6357 	    if (res
6358 		&& TREE_CODE (res) == INTEGER_CST
6359 		&& !TREE_OVERFLOW (res))
6360 	      return res;
6361 	    return NULL_TREE;
6362 	  }
6363 
6364 	fn = (*valueize) (gimple_call_fn (stmt));
6365 	if (TREE_CODE (fn) == ADDR_EXPR
6366 	    && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6367 	    && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6368 	    && gimple_builtin_call_types_compatible_p (stmt,
6369 						       TREE_OPERAND (fn, 0)))
6370 	  {
6371 	    tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6372 	    tree retval;
6373 	    unsigned i;
6374 	    for (i = 0; i < gimple_call_num_args (stmt); ++i)
6375 	      args[i] = (*valueize) (gimple_call_arg (stmt, i));
6376 	    retval = fold_builtin_call_array (loc,
6377 					 gimple_call_return_type (call_stmt),
6378 					 fn, gimple_call_num_args (stmt), args);
6379 	    if (retval)
6380 	      {
6381 		/* fold_call_expr wraps the result inside a NOP_EXPR.  */
6382 		STRIP_NOPS (retval);
6383 		retval = fold_convert (gimple_call_return_type (call_stmt),
6384 				       retval);
6385 	      }
6386 	    return retval;
6387 	  }
6388 	return NULL_TREE;
6389       }
6390 
6391     default:
6392       return NULL_TREE;
6393     }
6394 }
6395 
6396 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6397    Returns NULL_TREE if folding to a constant is not possible, otherwise
6398    returns a constant according to is_gimple_min_invariant.  */
6399 
6400 tree
6401 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6402 {
6403   tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6404   if (res && is_gimple_min_invariant (res))
6405     return res;
6406   return NULL_TREE;
6407 }
6408 
6409 
6410 /* The following set of functions are supposed to fold references using
6411    their constant initializers.  */
6412 
6413 /* See if we can find constructor defining value of BASE.
6414    When we know the consructor with constant offset (such as
6415    base is array[40] and we do know constructor of array), then
6416    BIT_OFFSET is adjusted accordingly.
6417 
6418    As a special case, return error_mark_node when constructor
6419    is not explicitly available, but it is known to be zero
6420    such as 'static const int a;'.  */
6421 static tree
6422 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6423 		      tree (*valueize)(tree))
6424 {
6425   poly_int64 bit_offset2, size, max_size;
6426   bool reverse;
6427 
6428   if (TREE_CODE (base) == MEM_REF)
6429     {
6430       poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6431       if (!boff.to_shwi (bit_offset))
6432 	return NULL_TREE;
6433 
6434       if (valueize
6435 	  && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6436 	base = valueize (TREE_OPERAND (base, 0));
6437       if (!base || TREE_CODE (base) != ADDR_EXPR)
6438         return NULL_TREE;
6439       base = TREE_OPERAND (base, 0);
6440     }
6441   else if (valueize
6442 	   && TREE_CODE (base) == SSA_NAME)
6443     base = valueize (base);
6444 
6445   /* Get a CONSTRUCTOR.  If BASE is a VAR_DECL, get its
6446      DECL_INITIAL.  If BASE is a nested reference into another
6447      ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6448      the inner reference.  */
6449   switch (TREE_CODE (base))
6450     {
6451     case VAR_DECL:
6452     case CONST_DECL:
6453       {
6454 	tree init = ctor_for_folding (base);
6455 
6456 	/* Our semantic is exact opposite of ctor_for_folding;
6457 	   NULL means unknown, while error_mark_node is 0.  */
6458 	if (init == error_mark_node)
6459 	  return NULL_TREE;
6460 	if (!init)
6461 	  return error_mark_node;
6462 	return init;
6463       }
6464 
6465     case VIEW_CONVERT_EXPR:
6466       return get_base_constructor (TREE_OPERAND (base, 0),
6467 				   bit_offset, valueize);
6468 
6469     case ARRAY_REF:
6470     case COMPONENT_REF:
6471       base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6472 				      &reverse);
6473       if (!known_size_p (max_size) || maybe_ne (size, max_size))
6474 	return NULL_TREE;
6475       *bit_offset +=  bit_offset2;
6476       return get_base_constructor (base, bit_offset, valueize);
6477 
6478     case CONSTRUCTOR:
6479       return base;
6480 
6481     default:
6482       if (CONSTANT_CLASS_P (base))
6483 	return base;
6484 
6485       return NULL_TREE;
6486     }
6487 }
6488 
6489 /* CTOR is CONSTRUCTOR of an array type.  Fold reference of type TYPE and size
6490    SIZE to the memory at bit OFFSET.  */
6491 
6492 static tree
6493 fold_array_ctor_reference (tree type, tree ctor,
6494 			   unsigned HOST_WIDE_INT offset,
6495 			   unsigned HOST_WIDE_INT size,
6496 			   tree from_decl)
6497 {
6498   offset_int low_bound;
6499   offset_int elt_size;
6500   offset_int access_index;
6501   tree domain_type = NULL_TREE;
6502   HOST_WIDE_INT inner_offset;
6503 
6504   /* Compute low bound and elt size.  */
6505   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6506     domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6507   if (domain_type && TYPE_MIN_VALUE (domain_type))
6508     {
6509       /* Static constructors for variably sized objects makes no sense.  */
6510       if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6511 	return NULL_TREE;
6512       low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6513     }
6514   else
6515     low_bound = 0;
6516   /* Static constructors for variably sized objects makes no sense.  */
6517   if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6518     return NULL_TREE;
6519   elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6520 
6521   /* We can handle only constantly sized accesses that are known to not
6522      be larger than size of array element.  */
6523   if (!TYPE_SIZE_UNIT (type)
6524       || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6525       || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6526       || elt_size == 0)
6527     return NULL_TREE;
6528 
6529   /* Compute the array index we look for.  */
6530   access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6531 				 elt_size);
6532   access_index += low_bound;
6533 
6534   /* And offset within the access.  */
6535   inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6536 
6537   /* See if the array field is large enough to span whole access.  We do not
6538      care to fold accesses spanning multiple array indexes.  */
6539   if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6540     return NULL_TREE;
6541   if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6542     return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6543 
6544   /* When memory is not explicitely mentioned in constructor,
6545      it is 0 (or out of range).  */
6546   return build_zero_cst (type);
6547 }
6548 
6549 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6550    Fold reference of type TYPE and size SIZE to the memory at bit OFFSET.  */
6551 
6552 static tree
6553 fold_nonarray_ctor_reference (tree type, tree ctor,
6554 			      unsigned HOST_WIDE_INT offset,
6555 			      unsigned HOST_WIDE_INT size,
6556 			      tree from_decl)
6557 {
6558   unsigned HOST_WIDE_INT cnt;
6559   tree cfield, cval;
6560 
6561   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6562 			    cval)
6563     {
6564       tree byte_offset = DECL_FIELD_OFFSET (cfield);
6565       tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6566       tree field_size = DECL_SIZE (cfield);
6567       offset_int bitoffset;
6568       offset_int bitoffset_end, access_end;
6569 
6570       /* Variable sized objects in static constructors makes no sense,
6571 	 but field_size can be NULL for flexible array members.  */
6572       gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6573 		  && TREE_CODE (byte_offset) == INTEGER_CST
6574 		  && (field_size != NULL_TREE
6575 		      ? TREE_CODE (field_size) == INTEGER_CST
6576 		      : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6577 
6578       /* Compute bit offset of the field.  */
6579       bitoffset = (wi::to_offset (field_offset)
6580 		   + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6581       /* Compute bit offset where the field ends.  */
6582       if (field_size != NULL_TREE)
6583 	bitoffset_end = bitoffset + wi::to_offset (field_size);
6584       else
6585 	bitoffset_end = 0;
6586 
6587       access_end = offset_int (offset) + size;
6588 
6589       /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6590 	 [BITOFFSET, BITOFFSET_END)?  */
6591       if (wi::cmps (access_end, bitoffset) > 0
6592 	  && (field_size == NULL_TREE
6593 	      || wi::lts_p (offset, bitoffset_end)))
6594 	{
6595 	  offset_int inner_offset = offset_int (offset) - bitoffset;
6596 	  /* We do have overlap.  Now see if field is large enough to
6597 	     cover the access.  Give up for accesses spanning multiple
6598 	     fields.  */
6599 	  if (wi::cmps (access_end, bitoffset_end) > 0)
6600 	    return NULL_TREE;
6601 	  if (offset < bitoffset)
6602 	    return NULL_TREE;
6603 	  return fold_ctor_reference (type, cval,
6604 				      inner_offset.to_uhwi (), size,
6605 				      from_decl);
6606 	}
6607     }
6608   /* When memory is not explicitely mentioned in constructor, it is 0.  */
6609   return build_zero_cst (type);
6610 }
6611 
6612 /* CTOR is value initializing memory, fold reference of type TYPE and
6613    size POLY_SIZE to the memory at bit POLY_OFFSET.  */
6614 
6615 tree
6616 fold_ctor_reference (tree type, tree ctor, poly_uint64 poly_offset,
6617 		     poly_uint64 poly_size, tree from_decl)
6618 {
6619   tree ret;
6620 
6621   /* We found the field with exact match.  */
6622   if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6623       && known_eq (poly_offset, 0U))
6624     return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6625 
6626   /* The remaining optimizations need a constant size and offset.  */
6627   unsigned HOST_WIDE_INT size, offset;
6628   if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6629     return NULL_TREE;
6630 
6631   /* We are at the end of walk, see if we can view convert the
6632      result.  */
6633   if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6634       /* VIEW_CONVERT_EXPR is defined only for matching sizes.  */
6635       && !compare_tree_int (TYPE_SIZE (type), size)
6636       && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6637     {
6638       ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6639       if (ret)
6640 	{
6641 	  ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6642 	  if (ret)
6643 	    STRIP_USELESS_TYPE_CONVERSION (ret);
6644 	}
6645       return ret;
6646     }
6647   /* For constants and byte-aligned/sized reads try to go through
6648      native_encode/interpret.  */
6649   if (CONSTANT_CLASS_P (ctor)
6650       && BITS_PER_UNIT == 8
6651       && offset % BITS_PER_UNIT == 0
6652       && size % BITS_PER_UNIT == 0
6653       && size <= MAX_BITSIZE_MODE_ANY_MODE)
6654     {
6655       unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6656       int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6657 				    offset / BITS_PER_UNIT);
6658       if (len > 0)
6659 	return native_interpret_expr (type, buf, len);
6660     }
6661   if (TREE_CODE (ctor) == CONSTRUCTOR)
6662     {
6663 
6664       if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6665 	  || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6666 	return fold_array_ctor_reference (type, ctor, offset, size,
6667 					  from_decl);
6668       else
6669 	return fold_nonarray_ctor_reference (type, ctor, offset, size,
6670 					     from_decl);
6671     }
6672 
6673   return NULL_TREE;
6674 }
6675 
6676 /* Return the tree representing the element referenced by T if T is an
6677    ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6678    names using VALUEIZE.  Return NULL_TREE otherwise.  */
6679 
6680 tree
6681 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6682 {
6683   tree ctor, idx, base;
6684   poly_int64 offset, size, max_size;
6685   tree tem;
6686   bool reverse;
6687 
6688   if (TREE_THIS_VOLATILE (t))
6689     return NULL_TREE;
6690 
6691   if (DECL_P (t))
6692     return get_symbol_constant_value (t);
6693 
6694   tem = fold_read_from_constant_string (t);
6695   if (tem)
6696     return tem;
6697 
6698   switch (TREE_CODE (t))
6699     {
6700     case ARRAY_REF:
6701     case ARRAY_RANGE_REF:
6702       /* Constant indexes are handled well by get_base_constructor.
6703 	 Only special case variable offsets.
6704 	 FIXME: This code can't handle nested references with variable indexes
6705 	 (they will be handled only by iteration of ccp).  Perhaps we can bring
6706 	 get_ref_base_and_extent here and make it use a valueize callback.  */
6707       if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6708 	  && valueize
6709 	  && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6710 	  && poly_int_tree_p (idx))
6711 	{
6712 	  tree low_bound, unit_size;
6713 
6714 	  /* If the resulting bit-offset is constant, track it.  */
6715 	  if ((low_bound = array_ref_low_bound (t),
6716 	       poly_int_tree_p (low_bound))
6717 	      && (unit_size = array_ref_element_size (t),
6718 		  tree_fits_uhwi_p (unit_size)))
6719 	    {
6720 	      poly_offset_int woffset
6721 		= wi::sext (wi::to_poly_offset (idx)
6722 			    - wi::to_poly_offset (low_bound),
6723 			    TYPE_PRECISION (TREE_TYPE (idx)));
6724 
6725 	      if (woffset.to_shwi (&offset))
6726 		{
6727 		  /* TODO: This code seems wrong, multiply then check
6728 		     to see if it fits.  */
6729 		  offset *= tree_to_uhwi (unit_size);
6730 		  offset *= BITS_PER_UNIT;
6731 
6732 		  base = TREE_OPERAND (t, 0);
6733 		  ctor = get_base_constructor (base, &offset, valueize);
6734 		  /* Empty constructor.  Always fold to 0.  */
6735 		  if (ctor == error_mark_node)
6736 		    return build_zero_cst (TREE_TYPE (t));
6737 		  /* Out of bound array access.  Value is undefined,
6738 		     but don't fold.  */
6739 		  if (maybe_lt (offset, 0))
6740 		    return NULL_TREE;
6741 		  /* We can not determine ctor.  */
6742 		  if (!ctor)
6743 		    return NULL_TREE;
6744 		  return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6745 					      tree_to_uhwi (unit_size)
6746 					      * BITS_PER_UNIT,
6747 					      base);
6748 		}
6749 	    }
6750 	}
6751       /* Fallthru.  */
6752 
6753     case COMPONENT_REF:
6754     case BIT_FIELD_REF:
6755     case TARGET_MEM_REF:
6756     case MEM_REF:
6757       base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6758       ctor = get_base_constructor (base, &offset, valueize);
6759 
6760       /* Empty constructor.  Always fold to 0.  */
6761       if (ctor == error_mark_node)
6762 	return build_zero_cst (TREE_TYPE (t));
6763       /* We do not know precise address.  */
6764       if (!known_size_p (max_size) || maybe_ne (max_size, size))
6765 	return NULL_TREE;
6766       /* We can not determine ctor.  */
6767       if (!ctor)
6768 	return NULL_TREE;
6769 
6770       /* Out of bound array access.  Value is undefined, but don't fold.  */
6771       if (maybe_lt (offset, 0))
6772 	return NULL_TREE;
6773 
6774       return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6775 				  base);
6776 
6777     case REALPART_EXPR:
6778     case IMAGPART_EXPR:
6779       {
6780 	tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6781 	if (c && TREE_CODE (c) == COMPLEX_CST)
6782 	  return fold_build1_loc (EXPR_LOCATION (t),
6783 			      TREE_CODE (t), TREE_TYPE (t), c);
6784 	break;
6785       }
6786 
6787     default:
6788       break;
6789     }
6790 
6791   return NULL_TREE;
6792 }
6793 
6794 tree
6795 fold_const_aggregate_ref (tree t)
6796 {
6797   return fold_const_aggregate_ref_1 (t, NULL);
6798 }
6799 
6800 /* Lookup virtual method with index TOKEN in a virtual table V
6801    at OFFSET.
6802    Set CAN_REFER if non-NULL to false if method
6803    is not referable or if the virtual table is ill-formed (such as rewriten
6804    by non-C++ produced symbol). Otherwise just return NULL in that calse.  */
6805 
6806 tree
6807 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6808 				   tree v,
6809 				   unsigned HOST_WIDE_INT offset,
6810 				   bool *can_refer)
6811 {
6812   tree vtable = v, init, fn;
6813   unsigned HOST_WIDE_INT size;
6814   unsigned HOST_WIDE_INT elt_size, access_index;
6815   tree domain_type;
6816 
6817   if (can_refer)
6818     *can_refer = true;
6819 
6820   /* First of all double check we have virtual table.  */
6821   if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6822     {
6823       /* Pass down that we lost track of the target.  */
6824       if (can_refer)
6825 	*can_refer = false;
6826       return NULL_TREE;
6827     }
6828 
6829   init = ctor_for_folding (v);
6830 
6831   /* The virtual tables should always be born with constructors
6832      and we always should assume that they are avaialble for
6833      folding.  At the moment we do not stream them in all cases,
6834      but it should never happen that ctor seem unreachable.  */
6835   gcc_assert (init);
6836   if (init == error_mark_node)
6837     {
6838       /* Pass down that we lost track of the target.  */
6839       if (can_refer)
6840 	*can_refer = false;
6841       return NULL_TREE;
6842     }
6843   gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6844   size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6845   offset *= BITS_PER_UNIT;
6846   offset += token * size;
6847 
6848   /* Lookup the value in the constructor that is assumed to be array.
6849      This is equivalent to
6850      fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6851 			       offset, size, NULL);
6852      but in a constant time.  We expect that frontend produced a simple
6853      array without indexed initializers.  */
6854 
6855   gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6856   domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6857   gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6858   elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6859 
6860   access_index = offset / BITS_PER_UNIT / elt_size;
6861   gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6862 
6863   /* This code makes an assumption that there are no
6864      indexed fileds produced by C++ FE, so we can directly index the array. */
6865   if (access_index < CONSTRUCTOR_NELTS (init))
6866     {
6867       fn = CONSTRUCTOR_ELT (init, access_index)->value;
6868       gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6869       STRIP_NOPS (fn);
6870     }
6871   else
6872     fn = NULL;
6873 
6874   /* For type inconsistent program we may end up looking up virtual method
6875      in virtual table that does not contain TOKEN entries.  We may overrun
6876      the virtual table and pick up a constant or RTTI info pointer.
6877      In any case the call is undefined.  */
6878   if (!fn
6879       || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6880       || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6881     fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6882   else
6883     {
6884       fn = TREE_OPERAND (fn, 0);
6885 
6886       /* When cgraph node is missing and function is not public, we cannot
6887 	 devirtualize.  This can happen in WHOPR when the actual method
6888 	 ends up in other partition, because we found devirtualization
6889 	 possibility too late.  */
6890       if (!can_refer_decl_in_current_unit_p (fn, vtable))
6891 	{
6892 	  if (can_refer)
6893 	    {
6894 	      *can_refer = false;
6895 	      return fn;
6896 	    }
6897 	  return NULL_TREE;
6898 	}
6899     }
6900 
6901   /* Make sure we create a cgraph node for functions we'll reference.
6902      They can be non-existent if the reference comes from an entry
6903      of an external vtable for example.  */
6904   cgraph_node::get_create (fn);
6905 
6906   return fn;
6907 }
6908 
6909 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6910    is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6911    KNOWN_BINFO carries the binfo describing the true type of
6912    OBJ_TYPE_REF_OBJECT(REF).
6913    Set CAN_REFER if non-NULL to false if method
6914    is not referable or if the virtual table is ill-formed (such as rewriten
6915    by non-C++ produced symbol). Otherwise just return NULL in that calse.  */
6916 
6917 tree
6918 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6919 				  bool *can_refer)
6920 {
6921   unsigned HOST_WIDE_INT offset;
6922   tree v;
6923 
6924   v = BINFO_VTABLE (known_binfo);
6925   /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone.  */
6926   if (!v)
6927     return NULL_TREE;
6928 
6929   if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6930     {
6931       if (can_refer)
6932 	*can_refer = false;
6933       return NULL_TREE;
6934     }
6935   return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6936 }
6937 
6938 /* Given a pointer value T, return a simplified version of an
6939    indirection through T, or NULL_TREE if no simplification is
6940    possible.  Note that the resulting type may be different from
6941    the type pointed to in the sense that it is still compatible
6942    from the langhooks point of view. */
6943 
6944 tree
6945 gimple_fold_indirect_ref (tree t)
6946 {
6947   tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6948   tree sub = t;
6949   tree subtype;
6950 
6951   STRIP_NOPS (sub);
6952   subtype = TREE_TYPE (sub);
6953   if (!POINTER_TYPE_P (subtype)
6954       || TYPE_REF_CAN_ALIAS_ALL (ptype))
6955     return NULL_TREE;
6956 
6957   if (TREE_CODE (sub) == ADDR_EXPR)
6958     {
6959       tree op = TREE_OPERAND (sub, 0);
6960       tree optype = TREE_TYPE (op);
6961       /* *&p => p */
6962       if (useless_type_conversion_p (type, optype))
6963         return op;
6964 
6965       /* *(foo *)&fooarray => fooarray[0] */
6966       if (TREE_CODE (optype) == ARRAY_TYPE
6967 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6968 	  && useless_type_conversion_p (type, TREE_TYPE (optype)))
6969        {
6970          tree type_domain = TYPE_DOMAIN (optype);
6971          tree min_val = size_zero_node;
6972          if (type_domain && TYPE_MIN_VALUE (type_domain))
6973            min_val = TYPE_MIN_VALUE (type_domain);
6974 	 if (TREE_CODE (min_val) == INTEGER_CST)
6975 	   return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6976        }
6977       /* *(foo *)&complexfoo => __real__ complexfoo */
6978       else if (TREE_CODE (optype) == COMPLEX_TYPE
6979                && useless_type_conversion_p (type, TREE_TYPE (optype)))
6980         return fold_build1 (REALPART_EXPR, type, op);
6981       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6982       else if (TREE_CODE (optype) == VECTOR_TYPE
6983                && useless_type_conversion_p (type, TREE_TYPE (optype)))
6984         {
6985           tree part_width = TYPE_SIZE (type);
6986           tree index = bitsize_int (0);
6987           return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6988         }
6989     }
6990 
6991   /* *(p + CST) -> ...  */
6992   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6993       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6994     {
6995       tree addr = TREE_OPERAND (sub, 0);
6996       tree off = TREE_OPERAND (sub, 1);
6997       tree addrtype;
6998 
6999       STRIP_NOPS (addr);
7000       addrtype = TREE_TYPE (addr);
7001 
7002       /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7003       if (TREE_CODE (addr) == ADDR_EXPR
7004 	  && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7005 	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7006 	  && tree_fits_uhwi_p (off))
7007 	{
7008           unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7009           tree part_width = TYPE_SIZE (type);
7010           unsigned HOST_WIDE_INT part_widthi
7011             = tree_to_shwi (part_width) / BITS_PER_UNIT;
7012           unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7013           tree index = bitsize_int (indexi);
7014 	  if (known_lt (offset / part_widthi,
7015 			TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7016             return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7017                                 part_width, index);
7018 	}
7019 
7020       /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7021       if (TREE_CODE (addr) == ADDR_EXPR
7022 	  && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7023 	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7024         {
7025           tree size = TYPE_SIZE_UNIT (type);
7026           if (tree_int_cst_equal (size, off))
7027             return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7028         }
7029 
7030       /* *(p + CST) -> MEM_REF <p, CST>.  */
7031       if (TREE_CODE (addr) != ADDR_EXPR
7032 	  || DECL_P (TREE_OPERAND (addr, 0)))
7033 	return fold_build2 (MEM_REF, type,
7034 			    addr,
7035 			    wide_int_to_tree (ptype, wi::to_wide (off)));
7036     }
7037 
7038   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7039   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7040       && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7041       && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7042     {
7043       tree type_domain;
7044       tree min_val = size_zero_node;
7045       tree osub = sub;
7046       sub = gimple_fold_indirect_ref (sub);
7047       if (! sub)
7048 	sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7049       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7050       if (type_domain && TYPE_MIN_VALUE (type_domain))
7051         min_val = TYPE_MIN_VALUE (type_domain);
7052       if (TREE_CODE (min_val) == INTEGER_CST)
7053 	return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7054     }
7055 
7056   return NULL_TREE;
7057 }
7058 
7059 /* Return true if CODE is an operation that when operating on signed
7060    integer types involves undefined behavior on overflow and the
7061    operation can be expressed with unsigned arithmetic.  */
7062 
7063 bool
7064 arith_code_with_undefined_signed_overflow (tree_code code)
7065 {
7066   switch (code)
7067     {
7068     case PLUS_EXPR:
7069     case MINUS_EXPR:
7070     case MULT_EXPR:
7071     case NEGATE_EXPR:
7072     case POINTER_PLUS_EXPR:
7073       return true;
7074     default:
7075       return false;
7076     }
7077 }
7078 
7079 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7080    operation that can be transformed to unsigned arithmetic by converting
7081    its operand, carrying out the operation in the corresponding unsigned
7082    type and converting the result back to the original type.
7083 
7084    Returns a sequence of statements that replace STMT and also contain
7085    a modified form of STMT itself.  */
7086 
7087 gimple_seq
7088 rewrite_to_defined_overflow (gimple *stmt)
7089 {
7090   if (dump_file && (dump_flags & TDF_DETAILS))
7091     {
7092       fprintf (dump_file, "rewriting stmt with undefined signed "
7093 	       "overflow ");
7094       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7095     }
7096 
7097   tree lhs = gimple_assign_lhs (stmt);
7098   tree type = unsigned_type_for (TREE_TYPE (lhs));
7099   gimple_seq stmts = NULL;
7100   for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7101     {
7102       tree op = gimple_op (stmt, i);
7103       op = gimple_convert (&stmts, type, op);
7104       gimple_set_op (stmt, i, op);
7105     }
7106   gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7107   if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7108     gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7109   gimple_seq_add_stmt (&stmts, stmt);
7110   gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7111   gimple_seq_add_stmt (&stmts, cvt);
7112 
7113   return stmts;
7114 }
7115 
7116 
7117 /* The valueization hook we use for the gimple_build API simplification.
7118    This makes us match fold_buildN behavior by only combining with
7119    statements in the sequence(s) we are currently building.  */
7120 
7121 static tree
7122 gimple_build_valueize (tree op)
7123 {
7124   if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7125     return op;
7126   return NULL_TREE;
7127 }
7128 
7129 /* Build the expression CODE OP0 of type TYPE with location LOC,
7130    simplifying it first if possible.  Returns the built
7131    expression value and appends statements possibly defining it
7132    to SEQ.  */
7133 
7134 tree
7135 gimple_build (gimple_seq *seq, location_t loc,
7136 	      enum tree_code code, tree type, tree op0)
7137 {
7138   tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7139   if (!res)
7140     {
7141       res = create_tmp_reg_or_ssa_name (type);
7142       gimple *stmt;
7143       if (code == REALPART_EXPR
7144 	  || code == IMAGPART_EXPR
7145 	  || code == VIEW_CONVERT_EXPR)
7146 	stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7147       else
7148 	stmt = gimple_build_assign (res, code, op0);
7149       gimple_set_location (stmt, loc);
7150       gimple_seq_add_stmt_without_update (seq, stmt);
7151     }
7152   return res;
7153 }
7154 
7155 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7156    simplifying it first if possible.  Returns the built
7157    expression value and appends statements possibly defining it
7158    to SEQ.  */
7159 
7160 tree
7161 gimple_build (gimple_seq *seq, location_t loc,
7162 	      enum tree_code code, tree type, tree op0, tree op1)
7163 {
7164   tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7165   if (!res)
7166     {
7167       res = create_tmp_reg_or_ssa_name (type);
7168       gimple *stmt = gimple_build_assign (res, code, op0, op1);
7169       gimple_set_location (stmt, loc);
7170       gimple_seq_add_stmt_without_update (seq, stmt);
7171     }
7172   return res;
7173 }
7174 
7175 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7176    simplifying it first if possible.  Returns the built
7177    expression value and appends statements possibly defining it
7178    to SEQ.  */
7179 
7180 tree
7181 gimple_build (gimple_seq *seq, location_t loc,
7182 	      enum tree_code code, tree type, tree op0, tree op1, tree op2)
7183 {
7184   tree res = gimple_simplify (code, type, op0, op1, op2,
7185 			      seq, gimple_build_valueize);
7186   if (!res)
7187     {
7188       res = create_tmp_reg_or_ssa_name (type);
7189       gimple *stmt;
7190       if (code == BIT_FIELD_REF)
7191 	stmt = gimple_build_assign (res, code,
7192 				    build3 (code, type, op0, op1, op2));
7193       else
7194 	stmt = gimple_build_assign (res, code, op0, op1, op2);
7195       gimple_set_location (stmt, loc);
7196       gimple_seq_add_stmt_without_update (seq, stmt);
7197     }
7198   return res;
7199 }
7200 
7201 /* Build the call FN (ARG0) with a result of type TYPE
7202    (or no result if TYPE is void) with location LOC,
7203    simplifying it first if possible.  Returns the built
7204    expression value (or NULL_TREE if TYPE is void) and appends
7205    statements possibly defining it to SEQ.  */
7206 
7207 tree
7208 gimple_build (gimple_seq *seq, location_t loc,
7209 	      enum built_in_function fn, tree type, tree arg0)
7210 {
7211   tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7212   if (!res)
7213     {
7214       tree decl = builtin_decl_implicit (fn);
7215       gimple *stmt = gimple_build_call (decl, 1, arg0);
7216       if (!VOID_TYPE_P (type))
7217 	{
7218 	  res = create_tmp_reg_or_ssa_name (type);
7219 	  gimple_call_set_lhs (stmt, res);
7220 	}
7221       gimple_set_location (stmt, loc);
7222       gimple_seq_add_stmt_without_update (seq, stmt);
7223     }
7224   return res;
7225 }
7226 
7227 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7228    (or no result if TYPE is void) with location LOC,
7229    simplifying it first if possible.  Returns the built
7230    expression value (or NULL_TREE if TYPE is void) and appends
7231    statements possibly defining it to SEQ.  */
7232 
7233 tree
7234 gimple_build (gimple_seq *seq, location_t loc,
7235 	      enum built_in_function fn, tree type, tree arg0, tree arg1)
7236 {
7237   tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7238   if (!res)
7239     {
7240       tree decl = builtin_decl_implicit (fn);
7241       gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
7242       if (!VOID_TYPE_P (type))
7243 	{
7244 	  res = create_tmp_reg_or_ssa_name (type);
7245 	  gimple_call_set_lhs (stmt, res);
7246 	}
7247       gimple_set_location (stmt, loc);
7248       gimple_seq_add_stmt_without_update (seq, stmt);
7249     }
7250   return res;
7251 }
7252 
7253 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7254    (or no result if TYPE is void) with location LOC,
7255    simplifying it first if possible.  Returns the built
7256    expression value (or NULL_TREE if TYPE is void) and appends
7257    statements possibly defining it to SEQ.  */
7258 
7259 tree
7260 gimple_build (gimple_seq *seq, location_t loc,
7261 	      enum built_in_function fn, tree type,
7262 	      tree arg0, tree arg1, tree arg2)
7263 {
7264   tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7265 			      seq, gimple_build_valueize);
7266   if (!res)
7267     {
7268       tree decl = builtin_decl_implicit (fn);
7269       gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7270       if (!VOID_TYPE_P (type))
7271 	{
7272 	  res = create_tmp_reg_or_ssa_name (type);
7273 	  gimple_call_set_lhs (stmt, res);
7274 	}
7275       gimple_set_location (stmt, loc);
7276       gimple_seq_add_stmt_without_update (seq, stmt);
7277     }
7278   return res;
7279 }
7280 
7281 /* Build the conversion (TYPE) OP with a result of type TYPE
7282    with location LOC if such conversion is neccesary in GIMPLE,
7283    simplifying it first.
7284    Returns the built expression value and appends
7285    statements possibly defining it to SEQ.  */
7286 
7287 tree
7288 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7289 {
7290   if (useless_type_conversion_p (type, TREE_TYPE (op)))
7291     return op;
7292   return gimple_build (seq, loc, NOP_EXPR, type, op);
7293 }
7294 
7295 /* Build the conversion (ptrofftype) OP with a result of a type
7296    compatible with ptrofftype with location LOC if such conversion
7297    is neccesary in GIMPLE, simplifying it first.
7298    Returns the built expression value and appends
7299    statements possibly defining it to SEQ.  */
7300 
7301 tree
7302 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7303 {
7304   if (ptrofftype_p (TREE_TYPE (op)))
7305     return op;
7306   return gimple_convert (seq, loc, sizetype, op);
7307 }
7308 
7309 /* Build a vector of type TYPE in which each element has the value OP.
7310    Return a gimple value for the result, appending any new statements
7311    to SEQ.  */
7312 
7313 tree
7314 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7315 			      tree op)
7316 {
7317   if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7318       && !CONSTANT_CLASS_P (op))
7319     return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7320 
7321   tree res, vec = build_vector_from_val (type, op);
7322   if (is_gimple_val (vec))
7323     return vec;
7324   if (gimple_in_ssa_p (cfun))
7325     res = make_ssa_name (type);
7326   else
7327     res = create_tmp_reg (type);
7328   gimple *stmt = gimple_build_assign (res, vec);
7329   gimple_set_location (stmt, loc);
7330   gimple_seq_add_stmt_without_update (seq, stmt);
7331   return res;
7332 }
7333 
7334 /* Build a vector from BUILDER, handling the case in which some elements
7335    are non-constant.  Return a gimple value for the result, appending any
7336    new instructions to SEQ.
7337 
7338    BUILDER must not have a stepped encoding on entry.  This is because
7339    the function is not geared up to handle the arithmetic that would
7340    be needed in the variable case, and any code building a vector that
7341    is known to be constant should use BUILDER->build () directly.  */
7342 
7343 tree
7344 gimple_build_vector (gimple_seq *seq, location_t loc,
7345 		     tree_vector_builder *builder)
7346 {
7347   gcc_assert (builder->nelts_per_pattern () <= 2);
7348   unsigned int encoded_nelts = builder->encoded_nelts ();
7349   for (unsigned int i = 0; i < encoded_nelts; ++i)
7350     if (!TREE_CONSTANT ((*builder)[i]))
7351       {
7352 	tree type = builder->type ();
7353 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7354 	vec<constructor_elt, va_gc> *v;
7355 	vec_alloc (v, nelts);
7356 	for (i = 0; i < nelts; ++i)
7357 	  CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7358 
7359 	tree res;
7360 	if (gimple_in_ssa_p (cfun))
7361 	  res = make_ssa_name (type);
7362 	else
7363 	  res = create_tmp_reg (type);
7364 	gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7365 	gimple_set_location (stmt, loc);
7366 	gimple_seq_add_stmt_without_update (seq, stmt);
7367 	return res;
7368       }
7369   return builder->build ();
7370 }
7371 
7372 /* Return true if the result of assignment STMT is known to be non-negative.
7373    If the return value is based on the assumption that signed overflow is
7374    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7375    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
7376 
7377 static bool
7378 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7379 				   int depth)
7380 {
7381   enum tree_code code = gimple_assign_rhs_code (stmt);
7382   switch (get_gimple_rhs_class (code))
7383     {
7384     case GIMPLE_UNARY_RHS:
7385       return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7386 					     gimple_expr_type (stmt),
7387 					     gimple_assign_rhs1 (stmt),
7388 					     strict_overflow_p, depth);
7389     case GIMPLE_BINARY_RHS:
7390       return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7391 					      gimple_expr_type (stmt),
7392 					      gimple_assign_rhs1 (stmt),
7393 					      gimple_assign_rhs2 (stmt),
7394 					      strict_overflow_p, depth);
7395     case GIMPLE_TERNARY_RHS:
7396       return false;
7397     case GIMPLE_SINGLE_RHS:
7398       return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7399 					      strict_overflow_p, depth);
7400     case GIMPLE_INVALID_RHS:
7401       break;
7402     }
7403   gcc_unreachable ();
7404 }
7405 
7406 /* Return true if return value of call STMT is known to be non-negative.
7407    If the return value is based on the assumption that signed overflow is
7408    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7409    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
7410 
7411 static bool
7412 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7413 				 int depth)
7414 {
7415   tree arg0 = gimple_call_num_args (stmt) > 0 ?
7416     gimple_call_arg (stmt, 0) : NULL_TREE;
7417   tree arg1 = gimple_call_num_args (stmt) > 1 ?
7418     gimple_call_arg (stmt, 1) : NULL_TREE;
7419 
7420   return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7421 					gimple_call_combined_fn (stmt),
7422 					arg0,
7423 					arg1,
7424 					strict_overflow_p, depth);
7425 }
7426 
7427 /* Return true if return value of call STMT is known to be non-negative.
7428    If the return value is based on the assumption that signed overflow is
7429    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7430    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
7431 
7432 static bool
7433 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7434 				int depth)
7435 {
7436   for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7437     {
7438       tree arg = gimple_phi_arg_def (stmt, i);
7439       if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7440 	return false;
7441     }
7442   return true;
7443 }
7444 
7445 /* Return true if STMT is known to compute a non-negative value.
7446    If the return value is based on the assumption that signed overflow is
7447    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7448    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
7449 
7450 bool
7451 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7452 				 int depth)
7453 {
7454   switch (gimple_code (stmt))
7455     {
7456     case GIMPLE_ASSIGN:
7457       return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7458 						depth);
7459     case GIMPLE_CALL:
7460       return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7461 					      depth);
7462     case GIMPLE_PHI:
7463       return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7464 					     depth);
7465     default:
7466       return false;
7467     }
7468 }
7469 
7470 /* Return true if the floating-point value computed by assignment STMT
7471    is known to have an integer value.  We also allow +Inf, -Inf and NaN
7472    to be considered integer values. Return false for signaling NaN.
7473 
7474    DEPTH is the current nesting depth of the query.  */
7475 
7476 static bool
7477 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7478 {
7479   enum tree_code code = gimple_assign_rhs_code (stmt);
7480   switch (get_gimple_rhs_class (code))
7481     {
7482     case GIMPLE_UNARY_RHS:
7483       return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7484 					  gimple_assign_rhs1 (stmt), depth);
7485     case GIMPLE_BINARY_RHS:
7486       return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7487 					   gimple_assign_rhs1 (stmt),
7488 					   gimple_assign_rhs2 (stmt), depth);
7489     case GIMPLE_TERNARY_RHS:
7490       return false;
7491     case GIMPLE_SINGLE_RHS:
7492       return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7493     case GIMPLE_INVALID_RHS:
7494       break;
7495     }
7496   gcc_unreachable ();
7497 }
7498 
7499 /* Return true if the floating-point value computed by call STMT is known
7500    to have an integer value.  We also allow +Inf, -Inf and NaN to be
7501    considered integer values. Return false for signaling NaN.
7502 
7503    DEPTH is the current nesting depth of the query.  */
7504 
7505 static bool
7506 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7507 {
7508   tree arg0 = (gimple_call_num_args (stmt) > 0
7509 	       ? gimple_call_arg (stmt, 0)
7510 	       : NULL_TREE);
7511   tree arg1 = (gimple_call_num_args (stmt) > 1
7512 	       ? gimple_call_arg (stmt, 1)
7513 	       : NULL_TREE);
7514   return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7515 				     arg0, arg1, depth);
7516 }
7517 
7518 /* Return true if the floating-point result of phi STMT is known to have
7519    an integer value.  We also allow +Inf, -Inf and NaN to be considered
7520    integer values. Return false for signaling NaN.
7521 
7522    DEPTH is the current nesting depth of the query.  */
7523 
7524 static bool
7525 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7526 {
7527   for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7528     {
7529       tree arg = gimple_phi_arg_def (stmt, i);
7530       if (!integer_valued_real_single_p (arg, depth + 1))
7531 	return false;
7532     }
7533   return true;
7534 }
7535 
7536 /* Return true if the floating-point value computed by STMT is known
7537    to have an integer value.  We also allow +Inf, -Inf and NaN to be
7538    considered integer values. Return false for signaling NaN.
7539 
7540    DEPTH is the current nesting depth of the query.  */
7541 
7542 bool
7543 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7544 {
7545   switch (gimple_code (stmt))
7546     {
7547     case GIMPLE_ASSIGN:
7548       return gimple_assign_integer_valued_real_p (stmt, depth);
7549     case GIMPLE_CALL:
7550       return gimple_call_integer_valued_real_p (stmt, depth);
7551     case GIMPLE_PHI:
7552       return gimple_phi_integer_valued_real_p (stmt, depth);
7553     default:
7554       return false;
7555     }
7556 }
7557