1 /* Statement simplification on GIMPLE.
2    Copyright (C) 2010-2020 Free Software Foundation, Inc.
3    Split out from tree-ssa-ccp.c.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
68 #include "varasm.h"
69 
70 enum strlen_range_kind {
71   /* Compute the exact constant string length.  */
72   SRK_STRLEN,
73   /* Compute the maximum constant string length.  */
74   SRK_STRLENMAX,
75   /* Compute a range of string lengths bounded by object sizes.  When
76      the length of a string cannot be determined, consider as the upper
77      bound the size of the enclosing object the string may be a member
78      or element of.  Also determine the size of the largest character
79      array the string may refer to.  */
80   SRK_LENRANGE,
81   /* Determine the integer value of the argument (not string length).  */
82   SRK_INT_VALUE
83 };
84 
85 static bool
86 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
87 
88 /* Return true when DECL can be referenced from current unit.
89    FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90    We can get declarations that are not possible to reference for various
91    reasons:
92 
93      1) When analyzing C++ virtual tables.
94 	C++ virtual tables do have known constructors even
95 	when they are keyed to other compilation unit.
96 	Those tables can contain pointers to methods and vars
97 	in other units.  Those methods have both STATIC and EXTERNAL
98 	set.
99      2) In WHOPR mode devirtualization might lead to reference
100 	to method that was partitioned elsehwere.
101 	In this case we have static VAR_DECL or FUNCTION_DECL
102 	that has no corresponding callgraph/varpool node
103 	declaring the body.
104      3) COMDAT functions referred by external vtables that
105         we devirtualize only during final compilation stage.
106         At this time we already decided that we will not output
107         the function body and thus we can't reference the symbol
108         directly.  */
109 
110 static bool
can_refer_decl_in_current_unit_p(tree decl,tree from_decl)111 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
112 {
113   varpool_node *vnode;
114   struct cgraph_node *node;
115   symtab_node *snode;
116 
117   if (DECL_ABSTRACT_P (decl))
118     return false;
119 
120   /* We are concerned only about static/external vars and functions.  */
121   if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
122       || !VAR_OR_FUNCTION_DECL_P (decl))
123     return true;
124 
125   /* Static objects can be referred only if they are defined and not optimized
126      out yet.  */
127   if (!TREE_PUBLIC (decl))
128     {
129       if (DECL_EXTERNAL (decl))
130 	return false;
131       /* Before we start optimizing unreachable code we can be sure all
132 	 static objects are defined.  */
133       if (symtab->function_flags_ready)
134 	return true;
135       snode = symtab_node::get (decl);
136       if (!snode || !snode->definition)
137 	return false;
138       node = dyn_cast <cgraph_node *> (snode);
139       return !node || !node->inlined_to;
140     }
141 
142   /* We will later output the initializer, so we can refer to it.
143      So we are concerned only when DECL comes from initializer of
144      external var or var that has been optimized out.  */
145   if (!from_decl
146       || !VAR_P (from_decl)
147       || (!DECL_EXTERNAL (from_decl)
148 	  && (vnode = varpool_node::get (from_decl)) != NULL
149 	  && vnode->definition)
150       || (flag_ltrans
151 	  && (vnode = varpool_node::get (from_decl)) != NULL
152 	  && vnode->in_other_partition))
153     return true;
154   /* We are folding reference from external vtable.  The vtable may reffer
155      to a symbol keyed to other compilation unit.  The other compilation
156      unit may be in separate DSO and the symbol may be hidden.  */
157   if (DECL_VISIBILITY_SPECIFIED (decl)
158       && DECL_EXTERNAL (decl)
159       && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
160       && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
161     return false;
162   /* When function is public, we always can introduce new reference.
163      Exception are the COMDAT functions where introducing a direct
164      reference imply need to include function body in the curren tunit.  */
165   if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
166     return true;
167   /* We have COMDAT.  We are going to check if we still have definition
168      or if the definition is going to be output in other partition.
169      Bypass this when gimplifying; all needed functions will be produced.
170 
171      As observed in PR20991 for already optimized out comdat virtual functions
172      it may be tempting to not necessarily give up because the copy will be
173      output elsewhere when corresponding vtable is output.
174      This is however not possible - ABI specify that COMDATs are output in
175      units where they are used and when the other unit was compiled with LTO
176      it is possible that vtable was kept public while the function itself
177      was privatized. */
178   if (!symtab->function_flags_ready)
179     return true;
180 
181   snode = symtab_node::get (decl);
182   if (!snode
183       || ((!snode->definition || DECL_EXTERNAL (decl))
184 	  && (!snode->in_other_partition
185 	      || (!snode->forced_by_abi && !snode->force_output))))
186     return false;
187   node = dyn_cast <cgraph_node *> (snode);
188   return !node || !node->inlined_to;
189 }
190 
191 /* Create a temporary for TYPE for a statement STMT.  If the current function
192    is in SSA form, a SSA name is created.  Otherwise a temporary register
193    is made.  */
194 
195 tree
create_tmp_reg_or_ssa_name(tree type,gimple * stmt)196 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
197 {
198   if (gimple_in_ssa_p (cfun))
199     return make_ssa_name (type, stmt);
200   else
201     return create_tmp_reg (type);
202 }
203 
204 /* CVAL is value taken from DECL_INITIAL of variable.  Try to transform it into
205    acceptable form for is_gimple_min_invariant.
206    FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL.  */
207 
208 tree
canonicalize_constructor_val(tree cval,tree from_decl)209 canonicalize_constructor_val (tree cval, tree from_decl)
210 {
211   if (CONSTANT_CLASS_P (cval))
212     return cval;
213 
214   tree orig_cval = cval;
215   STRIP_NOPS (cval);
216   if (TREE_CODE (cval) == POINTER_PLUS_EXPR
217       && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
218     {
219       tree ptr = TREE_OPERAND (cval, 0);
220       if (is_gimple_min_invariant (ptr))
221 	cval = build1_loc (EXPR_LOCATION (cval),
222 			   ADDR_EXPR, TREE_TYPE (ptr),
223 			   fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
224 					ptr,
225 					fold_convert (ptr_type_node,
226 						      TREE_OPERAND (cval, 1))));
227     }
228   if (TREE_CODE (cval) == ADDR_EXPR)
229     {
230       tree base = NULL_TREE;
231       if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
232 	{
233 	  base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
234 	  if (base)
235 	    TREE_OPERAND (cval, 0) = base;
236 	}
237       else
238 	base = get_base_address (TREE_OPERAND (cval, 0));
239       if (!base)
240 	return NULL_TREE;
241 
242       if (VAR_OR_FUNCTION_DECL_P (base)
243 	  && !can_refer_decl_in_current_unit_p (base, from_decl))
244 	return NULL_TREE;
245       if (TREE_TYPE (base) == error_mark_node)
246 	return NULL_TREE;
247       if (VAR_P (base))
248 	TREE_ADDRESSABLE (base) = 1;
249       else if (TREE_CODE (base) == FUNCTION_DECL)
250 	{
251 	  /* Make sure we create a cgraph node for functions we'll reference.
252 	     They can be non-existent if the reference comes from an entry
253 	     of an external vtable for example.  */
254 	  cgraph_node::get_create (base);
255 	}
256       /* Fixup types in global initializers.  */
257       if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
258 	cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
259 
260       if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
261 	cval = fold_convert (TREE_TYPE (orig_cval), cval);
262       return cval;
263     }
264   /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0.  */
265   if (TREE_CODE (cval) == INTEGER_CST)
266     {
267       if (TREE_OVERFLOW_P (cval))
268 	cval = drop_tree_overflow (cval);
269       if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
270 	cval = fold_convert (TREE_TYPE (orig_cval), cval);
271       return cval;
272     }
273   return orig_cval;
274 }
275 
276 /* If SYM is a constant variable with known value, return the value.
277    NULL_TREE is returned otherwise.  */
278 
279 tree
get_symbol_constant_value(tree sym)280 get_symbol_constant_value (tree sym)
281 {
282   tree val = ctor_for_folding (sym);
283   if (val != error_mark_node)
284     {
285       if (val)
286 	{
287 	  val = canonicalize_constructor_val (unshare_expr (val), sym);
288 	  if (val && is_gimple_min_invariant (val))
289 	    return val;
290 	  else
291 	    return NULL_TREE;
292 	}
293       /* Variables declared 'const' without an initializer
294 	 have zero as the initializer if they may not be
295 	 overridden at link or run time.  */
296       if (!val
297           && is_gimple_reg_type (TREE_TYPE (sym)))
298 	return build_zero_cst (TREE_TYPE (sym));
299     }
300 
301   return NULL_TREE;
302 }
303 
304 
305 
306 /* Subroutine of fold_stmt.  We perform several simplifications of the
307    memory reference tree EXPR and make sure to re-gimplify them properly
308    after propagation of constant addresses.  IS_LHS is true if the
309    reference is supposed to be an lvalue.  */
310 
311 static tree
maybe_fold_reference(tree expr,bool is_lhs)312 maybe_fold_reference (tree expr, bool is_lhs)
313 {
314   tree result;
315 
316   if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
317        || TREE_CODE (expr) == REALPART_EXPR
318        || TREE_CODE (expr) == IMAGPART_EXPR)
319       && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
320     return fold_unary_loc (EXPR_LOCATION (expr),
321 			   TREE_CODE (expr),
322 			   TREE_TYPE (expr),
323 			   TREE_OPERAND (expr, 0));
324   else if (TREE_CODE (expr) == BIT_FIELD_REF
325 	   && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
326     return fold_ternary_loc (EXPR_LOCATION (expr),
327 			     TREE_CODE (expr),
328 			     TREE_TYPE (expr),
329 			     TREE_OPERAND (expr, 0),
330 			     TREE_OPERAND (expr, 1),
331 			     TREE_OPERAND (expr, 2));
332 
333   if (!is_lhs
334       && (result = fold_const_aggregate_ref (expr))
335       && is_gimple_min_invariant (result))
336     return result;
337 
338   return NULL_TREE;
339 }
340 
341 
342 /* Attempt to fold an assignment statement pointed-to by SI.  Returns a
343    replacement rhs for the statement or NULL_TREE if no simplification
344    could be made.  It is assumed that the operands have been previously
345    folded.  */
346 
347 static tree
fold_gimple_assign(gimple_stmt_iterator * si)348 fold_gimple_assign (gimple_stmt_iterator *si)
349 {
350   gimple *stmt = gsi_stmt (*si);
351   enum tree_code subcode = gimple_assign_rhs_code (stmt);
352   location_t loc = gimple_location (stmt);
353 
354   tree result = NULL_TREE;
355 
356   switch (get_gimple_rhs_class (subcode))
357     {
358     case GIMPLE_SINGLE_RHS:
359       {
360         tree rhs = gimple_assign_rhs1 (stmt);
361 
362 	if (TREE_CLOBBER_P (rhs))
363 	  return NULL_TREE;
364 
365 	if (REFERENCE_CLASS_P (rhs))
366 	  return maybe_fold_reference (rhs, false);
367 
368 	else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
369 	  {
370 	    tree val = OBJ_TYPE_REF_EXPR (rhs);
371 	    if (is_gimple_min_invariant (val))
372 	      return val;
373 	    else if (flag_devirtualize && virtual_method_call_p (rhs))
374 	      {
375 		bool final;
376 		vec <cgraph_node *>targets
377 		  = possible_polymorphic_call_targets (rhs, stmt, &final);
378 		if (final && targets.length () <= 1 && dbg_cnt (devirt))
379 		  {
380 		    if (dump_enabled_p ())
381 		      {
382 			dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
383 					 "resolving virtual function address "
384 					 "reference to function %s\n",
385 					 targets.length () == 1
386 					 ? targets[0]->name ()
387 					 : "NULL");
388 		      }
389 		    if (targets.length () == 1)
390 		      {
391 			val = fold_convert (TREE_TYPE (val),
392 					    build_fold_addr_expr_loc
393 					      (loc, targets[0]->decl));
394 			STRIP_USELESS_TYPE_CONVERSION (val);
395 		      }
396 		    else
397 		      /* We cannot use __builtin_unreachable here because it
398 			 cannot have address taken.  */
399 		      val = build_int_cst (TREE_TYPE (val), 0);
400 		    return val;
401 		  }
402 	      }
403 	  }
404 
405 	else if (TREE_CODE (rhs) == ADDR_EXPR)
406 	  {
407 	    tree ref = TREE_OPERAND (rhs, 0);
408 	    tree tem = maybe_fold_reference (ref, true);
409 	    if (tem
410 		&& TREE_CODE (tem) == MEM_REF
411 		&& integer_zerop (TREE_OPERAND (tem, 1)))
412 	      result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
413 	    else if (tem)
414 	      result = fold_convert (TREE_TYPE (rhs),
415 				     build_fold_addr_expr_loc (loc, tem));
416 	    else if (TREE_CODE (ref) == MEM_REF
417 		     && integer_zerop (TREE_OPERAND (ref, 1)))
418 	      result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
419 
420 	    if (result)
421 	      {
422 		/* Strip away useless type conversions.  Both the
423 		   NON_LVALUE_EXPR that may have been added by fold, and
424 		   "useless" type conversions that might now be apparent
425 		   due to propagation.  */
426 		STRIP_USELESS_TYPE_CONVERSION (result);
427 
428 		if (result != rhs && valid_gimple_rhs_p (result))
429 		  return result;
430 	      }
431 	  }
432 
433 	else if (TREE_CODE (rhs) == CONSTRUCTOR
434 		 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
435 	  {
436 	    /* Fold a constant vector CONSTRUCTOR to VECTOR_CST.  */
437 	    unsigned i;
438 	    tree val;
439 
440 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
441 	      if (! CONSTANT_CLASS_P (val))
442 		return NULL_TREE;
443 
444 	    return build_vector_from_ctor (TREE_TYPE (rhs),
445 					   CONSTRUCTOR_ELTS (rhs));
446 	  }
447 
448 	else if (DECL_P (rhs))
449 	  return get_symbol_constant_value (rhs);
450       }
451       break;
452 
453     case GIMPLE_UNARY_RHS:
454       break;
455 
456     case GIMPLE_BINARY_RHS:
457       break;
458 
459     case GIMPLE_TERNARY_RHS:
460       result = fold_ternary_loc (loc, subcode,
461 				 TREE_TYPE (gimple_assign_lhs (stmt)),
462 				 gimple_assign_rhs1 (stmt),
463 				 gimple_assign_rhs2 (stmt),
464 				 gimple_assign_rhs3 (stmt));
465 
466       if (result)
467         {
468           STRIP_USELESS_TYPE_CONVERSION (result);
469           if (valid_gimple_rhs_p (result))
470 	    return result;
471         }
472       break;
473 
474     case GIMPLE_INVALID_RHS:
475       gcc_unreachable ();
476     }
477 
478   return NULL_TREE;
479 }
480 
481 
482 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
483    adjusting the replacement stmts location and virtual operands.
484    If the statement has a lhs the last stmt in the sequence is expected
485    to assign to that lhs.  */
486 
487 static void
gsi_replace_with_seq_vops(gimple_stmt_iterator * si_p,gimple_seq stmts)488 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
489 {
490   gimple *stmt = gsi_stmt (*si_p);
491 
492   if (gimple_has_location (stmt))
493     annotate_all_with_location (stmts, gimple_location (stmt));
494 
495   /* First iterate over the replacement statements backward, assigning
496      virtual operands to their defining statements.  */
497   gimple *laststore = NULL;
498   for (gimple_stmt_iterator i = gsi_last (stmts);
499        !gsi_end_p (i); gsi_prev (&i))
500     {
501       gimple *new_stmt = gsi_stmt (i);
502       if ((gimple_assign_single_p (new_stmt)
503 	   && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
504 	  || (is_gimple_call (new_stmt)
505 	      && (gimple_call_flags (new_stmt)
506 		  & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
507 	{
508 	  tree vdef;
509 	  if (!laststore)
510 	    vdef = gimple_vdef (stmt);
511 	  else
512 	    vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
513 	  gimple_set_vdef (new_stmt, vdef);
514 	  if (vdef && TREE_CODE (vdef) == SSA_NAME)
515 	    SSA_NAME_DEF_STMT (vdef) = new_stmt;
516 	  laststore = new_stmt;
517 	}
518     }
519 
520   /* Second iterate over the statements forward, assigning virtual
521      operands to their uses.  */
522   tree reaching_vuse = gimple_vuse (stmt);
523   for (gimple_stmt_iterator i = gsi_start (stmts);
524        !gsi_end_p (i); gsi_next (&i))
525     {
526       gimple *new_stmt = gsi_stmt (i);
527       /* If the new statement possibly has a VUSE, update it with exact SSA
528 	 name we know will reach this one.  */
529       if (gimple_has_mem_ops (new_stmt))
530 	gimple_set_vuse (new_stmt, reaching_vuse);
531       gimple_set_modified (new_stmt, true);
532       if (gimple_vdef (new_stmt))
533 	reaching_vuse = gimple_vdef (new_stmt);
534     }
535 
536   /* If the new sequence does not do a store release the virtual
537      definition of the original statement.  */
538   if (reaching_vuse
539       && reaching_vuse == gimple_vuse (stmt))
540     {
541       tree vdef = gimple_vdef (stmt);
542       if (vdef
543 	  && TREE_CODE (vdef) == SSA_NAME)
544 	{
545 	  unlink_stmt_vdef (stmt);
546 	  release_ssa_name (vdef);
547 	}
548     }
549 
550   /* Finally replace the original statement with the sequence.  */
551   gsi_replace_with_seq (si_p, stmts, false);
552 }
553 
554 /* Convert EXPR into a GIMPLE value suitable for substitution on the
555    RHS of an assignment.  Insert the necessary statements before
556    iterator *SI_P.  The statement at *SI_P, which must be a GIMPLE_CALL
557    is replaced.  If the call is expected to produces a result, then it
558    is replaced by an assignment of the new RHS to the result variable.
559    If the result is to be ignored, then the call is replaced by a
560    GIMPLE_NOP.  A proper VDEF chain is retained by making the first
561    VUSE and the last VDEF of the whole sequence be the same as the replaced
562    statement and using new SSA names for stores in between.  */
563 
564 void
gimplify_and_update_call_from_tree(gimple_stmt_iterator * si_p,tree expr)565 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
566 {
567   tree lhs;
568   gimple *stmt, *new_stmt;
569   gimple_stmt_iterator i;
570   gimple_seq stmts = NULL;
571 
572   stmt = gsi_stmt (*si_p);
573 
574   gcc_assert (is_gimple_call (stmt));
575 
576   push_gimplify_context (gimple_in_ssa_p (cfun));
577 
578   lhs = gimple_call_lhs (stmt);
579   if (lhs == NULL_TREE)
580     {
581       gimplify_and_add (expr, &stmts);
582       /* We can end up with folding a memcpy of an empty class assignment
583 	 which gets optimized away by C++ gimplification.  */
584       if (gimple_seq_empty_p (stmts))
585 	{
586 	  pop_gimplify_context (NULL);
587 	  if (gimple_in_ssa_p (cfun))
588 	    {
589 	      unlink_stmt_vdef (stmt);
590 	      release_defs (stmt);
591 	    }
592 	  gsi_replace (si_p, gimple_build_nop (), false);
593 	  return;
594 	}
595     }
596   else
597     {
598       tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
599       new_stmt = gimple_build_assign (lhs, tmp);
600       i = gsi_last (stmts);
601       gsi_insert_after_without_update (&i, new_stmt,
602 				       GSI_CONTINUE_LINKING);
603     }
604 
605   pop_gimplify_context (NULL);
606 
607   gsi_replace_with_seq_vops (si_p, stmts);
608 }
609 
610 
611 /* Replace the call at *GSI with the gimple value VAL.  */
612 
613 void
replace_call_with_value(gimple_stmt_iterator * gsi,tree val)614 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
615 {
616   gimple *stmt = gsi_stmt (*gsi);
617   tree lhs = gimple_call_lhs (stmt);
618   gimple *repl;
619   if (lhs)
620     {
621       if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
622 	val = fold_convert (TREE_TYPE (lhs), val);
623       repl = gimple_build_assign (lhs, val);
624     }
625   else
626     repl = gimple_build_nop ();
627   tree vdef = gimple_vdef (stmt);
628   if (vdef && TREE_CODE (vdef) == SSA_NAME)
629     {
630       unlink_stmt_vdef (stmt);
631       release_ssa_name (vdef);
632     }
633   gsi_replace (gsi, repl, false);
634 }
635 
636 /* Replace the call at *GSI with the new call REPL and fold that
637    again.  */
638 
639 static void
replace_call_with_call_and_fold(gimple_stmt_iterator * gsi,gimple * repl)640 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
641 {
642   gimple *stmt = gsi_stmt (*gsi);
643   gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
644   gimple_set_location (repl, gimple_location (stmt));
645   gimple_move_vops (repl, stmt);
646   gsi_replace (gsi, repl, false);
647   fold_stmt (gsi);
648 }
649 
650 /* Return true if VAR is a VAR_DECL or a component thereof.  */
651 
652 static bool
var_decl_component_p(tree var)653 var_decl_component_p (tree var)
654 {
655   tree inner = var;
656   while (handled_component_p (inner))
657     inner = TREE_OPERAND (inner, 0);
658   return (DECL_P (inner)
659 	  || (TREE_CODE (inner) == MEM_REF
660 	      && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
661 }
662 
663 /* Return TRUE if the SIZE argument, representing the size of an
664    object, is in a range of values of which exactly zero is valid.  */
665 
666 static bool
size_must_be_zero_p(tree size)667 size_must_be_zero_p (tree size)
668 {
669   if (integer_zerop (size))
670     return true;
671 
672   if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
673     return false;
674 
675   tree type = TREE_TYPE (size);
676   int prec = TYPE_PRECISION (type);
677 
678   /* Compute the value of SSIZE_MAX, the largest positive value that
679      can be stored in ssize_t, the signed counterpart of size_t.  */
680   wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
681   value_range valid_range (build_int_cst (type, 0),
682 			   wide_int_to_tree (type, ssize_max));
683   value_range vr;
684   get_range_info (size, vr);
685   vr.intersect (&valid_range);
686   return vr.zero_p ();
687 }
688 
689 /* Fold function call to builtin mem{{,p}cpy,move}.  Try to detect and
690    diagnose (otherwise undefined) overlapping copies without preventing
691    folding.  When folded, GCC guarantees that overlapping memcpy has
692    the same semantics as memmove.  Call to the library memcpy need not
693    provide the same guarantee.  Return false if no simplification can
694    be made.  */
695 
696 static bool
gimple_fold_builtin_memory_op(gimple_stmt_iterator * gsi,tree dest,tree src,enum built_in_function code)697 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
698 			       tree dest, tree src, enum built_in_function code)
699 {
700   gimple *stmt = gsi_stmt (*gsi);
701   tree lhs = gimple_call_lhs (stmt);
702   tree len = gimple_call_arg (stmt, 2);
703   tree destvar, srcvar;
704   location_t loc = gimple_location (stmt);
705 
706   /* If the LEN parameter is a constant zero or in range where
707      the only valid value is zero, return DEST.  */
708   if (size_must_be_zero_p (len))
709     {
710       gimple *repl;
711       if (gimple_call_lhs (stmt))
712 	repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
713       else
714 	repl = gimple_build_nop ();
715       tree vdef = gimple_vdef (stmt);
716       if (vdef && TREE_CODE (vdef) == SSA_NAME)
717 	{
718 	  unlink_stmt_vdef (stmt);
719 	  release_ssa_name (vdef);
720 	}
721       gsi_replace (gsi, repl, false);
722       return true;
723     }
724 
725   /* If SRC and DEST are the same (and not volatile), return
726      DEST{,+LEN,+LEN-1}.  */
727   if (operand_equal_p (src, dest, 0))
728     {
729       /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
730 	 It's safe and may even be emitted by GCC itself (see bug
731 	 32667).  */
732       unlink_stmt_vdef (stmt);
733       if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
734 	release_ssa_name (gimple_vdef (stmt));
735       if (!lhs)
736 	{
737 	  gsi_replace (gsi, gimple_build_nop (), false);
738 	  return true;
739 	}
740       goto done;
741     }
742   else
743     {
744       tree srctype, desttype;
745       unsigned int src_align, dest_align;
746       tree off0;
747       const char *tmp_str;
748       unsigned HOST_WIDE_INT tmp_len;
749 
750       /* Build accesses at offset zero with a ref-all character type.  */
751       off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
752 							 ptr_mode, true), 0);
753 
754       /* If we can perform the copy efficiently with first doing all loads
755          and then all stores inline it that way.  Currently efficiently
756 	 means that we can load all the memory into a single integer
757 	 register which is what MOVE_MAX gives us.  */
758       src_align = get_pointer_alignment (src);
759       dest_align = get_pointer_alignment (dest);
760       if (tree_fits_uhwi_p (len)
761 	  && compare_tree_int (len, MOVE_MAX) <= 0
762 	  /* FIXME: Don't transform copies from strings with known length.
763 	     Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
764 	     from being handled, and the case was XFAILed for that reason.
765 	     Now that it is handled and the XFAIL removed, as soon as other
766 	     strlenopt tests that rely on it for passing are adjusted, this
767 	     hack can be removed.  */
768 	  && !c_strlen (src, 1)
769 	  && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
770 	       && memchr (tmp_str, 0, tmp_len) == NULL))
771 	{
772 	  unsigned ilen = tree_to_uhwi (len);
773 	  if (pow2p_hwi (ilen))
774 	    {
775 	      /* Detect out-of-bounds accesses without issuing warnings.
776 		 Avoid folding out-of-bounds copies but to avoid false
777 		 positives for unreachable code defer warning until after
778 		 DCE has worked its magic.
779 		 -Wrestrict is still diagnosed.  */
780 	      if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
781 							 dest, src, len, len,
782 							 false, false))
783 		if (warning != OPT_Wrestrict)
784 		  return false;
785 
786 	      scalar_int_mode mode;
787 	      tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
788 	      if (type
789 		  && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
790 		  && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
791 		  /* If the destination pointer is not aligned we must be able
792 		     to emit an unaligned store.  */
793 		  && (dest_align >= GET_MODE_ALIGNMENT (mode)
794 		      || !targetm.slow_unaligned_access (mode, dest_align)
795 		      || (optab_handler (movmisalign_optab, mode)
796 			  != CODE_FOR_nothing)))
797 		{
798 		  tree srctype = type;
799 		  tree desttype = type;
800 		  if (src_align < GET_MODE_ALIGNMENT (mode))
801 		    srctype = build_aligned_type (type, src_align);
802 		  tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
803 		  tree tem = fold_const_aggregate_ref (srcmem);
804 		  if (tem)
805 		    srcmem = tem;
806 		  else if (src_align < GET_MODE_ALIGNMENT (mode)
807 			   && targetm.slow_unaligned_access (mode, src_align)
808 			   && (optab_handler (movmisalign_optab, mode)
809 			       == CODE_FOR_nothing))
810 		    srcmem = NULL_TREE;
811 		  if (srcmem)
812 		    {
813 		      gimple *new_stmt;
814 		      if (is_gimple_reg_type (TREE_TYPE (srcmem)))
815 			{
816 			  new_stmt = gimple_build_assign (NULL_TREE, srcmem);
817 			  srcmem
818 			    = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
819 							  new_stmt);
820 			  gimple_assign_set_lhs (new_stmt, srcmem);
821 			  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
822 			  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
823 			}
824 		      if (dest_align < GET_MODE_ALIGNMENT (mode))
825 			desttype = build_aligned_type (type, dest_align);
826 		      new_stmt
827 			= gimple_build_assign (fold_build2 (MEM_REF, desttype,
828 							    dest, off0),
829 					       srcmem);
830 		      gimple_move_vops (new_stmt, stmt);
831 		      if (!lhs)
832 			{
833 			  gsi_replace (gsi, new_stmt, false);
834 			  return true;
835 			}
836 		      gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
837 		      goto done;
838 		    }
839 		}
840 	    }
841 	}
842 
843       if (code == BUILT_IN_MEMMOVE)
844 	{
845 	  /* Both DEST and SRC must be pointer types.
846 	     ??? This is what old code did.  Is the testing for pointer types
847 	     really mandatory?
848 
849 	     If either SRC is readonly or length is 1, we can use memcpy.  */
850 	  if (!dest_align || !src_align)
851 	    return false;
852 	  if (readonly_data_expr (src)
853 	      || (tree_fits_uhwi_p (len)
854 		  && (MIN (src_align, dest_align) / BITS_PER_UNIT
855 		      >= tree_to_uhwi (len))))
856 	    {
857 	      tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
858 	      if (!fn)
859 		return false;
860 	      gimple_call_set_fndecl (stmt, fn);
861 	      gimple_call_set_arg (stmt, 0, dest);
862 	      gimple_call_set_arg (stmt, 1, src);
863 	      fold_stmt (gsi);
864 	      return true;
865 	    }
866 
867 	  /* If *src and *dest can't overlap, optimize into memcpy as well.  */
868 	  if (TREE_CODE (src) == ADDR_EXPR
869 	      && TREE_CODE (dest) == ADDR_EXPR)
870 	    {
871 	      tree src_base, dest_base, fn;
872 	      poly_int64 src_offset = 0, dest_offset = 0;
873 	      poly_uint64 maxsize;
874 
875 	      srcvar = TREE_OPERAND (src, 0);
876 	      src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
877 	      if (src_base == NULL)
878 		src_base = srcvar;
879 	      destvar = TREE_OPERAND (dest, 0);
880 	      dest_base = get_addr_base_and_unit_offset (destvar,
881 							 &dest_offset);
882 	      if (dest_base == NULL)
883 		dest_base = destvar;
884 	      if (!poly_int_tree_p (len, &maxsize))
885 		maxsize = -1;
886 	      if (SSA_VAR_P (src_base)
887 		  && SSA_VAR_P (dest_base))
888 		{
889 		  if (operand_equal_p (src_base, dest_base, 0)
890 		      && ranges_maybe_overlap_p (src_offset, maxsize,
891 						 dest_offset, maxsize))
892 		    return false;
893 		}
894 	      else if (TREE_CODE (src_base) == MEM_REF
895 		       && TREE_CODE (dest_base) == MEM_REF)
896 		{
897 		  if (! operand_equal_p (TREE_OPERAND (src_base, 0),
898 					 TREE_OPERAND (dest_base, 0), 0))
899 		    return false;
900 		  poly_offset_int full_src_offset
901 		    = mem_ref_offset (src_base) + src_offset;
902 		  poly_offset_int full_dest_offset
903 		    = mem_ref_offset (dest_base) + dest_offset;
904 		  if (ranges_maybe_overlap_p (full_src_offset, maxsize,
905 					      full_dest_offset, maxsize))
906 		    return false;
907 		}
908 	      else
909 		return false;
910 
911 	      fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
912 	      if (!fn)
913 		return false;
914 	      gimple_call_set_fndecl (stmt, fn);
915 	      gimple_call_set_arg (stmt, 0, dest);
916 	      gimple_call_set_arg (stmt, 1, src);
917 	      fold_stmt (gsi);
918 	      return true;
919 	    }
920 
921 	  /* If the destination and source do not alias optimize into
922 	     memcpy as well.  */
923 	  if ((is_gimple_min_invariant (dest)
924 	       || TREE_CODE (dest) == SSA_NAME)
925 	      && (is_gimple_min_invariant (src)
926 		  || TREE_CODE (src) == SSA_NAME))
927 	    {
928 	      ao_ref destr, srcr;
929 	      ao_ref_init_from_ptr_and_size (&destr, dest, len);
930 	      ao_ref_init_from_ptr_and_size (&srcr, src, len);
931 	      if (!refs_may_alias_p_1 (&destr, &srcr, false))
932 		{
933 		  tree fn;
934 		  fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
935 		  if (!fn)
936 		    return false;
937 		  gimple_call_set_fndecl (stmt, fn);
938 		  gimple_call_set_arg (stmt, 0, dest);
939 		  gimple_call_set_arg (stmt, 1, src);
940 		  fold_stmt (gsi);
941 		  return true;
942 		}
943 	    }
944 
945 	  return false;
946 	}
947 
948       if (!tree_fits_shwi_p (len))
949 	return false;
950       if (!POINTER_TYPE_P (TREE_TYPE (src))
951 	  || !POINTER_TYPE_P (TREE_TYPE (dest)))
952 	return false;
953       /* In the following try to find a type that is most natural to be
954 	 used for the memcpy source and destination and that allows
955 	 the most optimization when memcpy is turned into a plain assignment
956 	 using that type.  In theory we could always use a char[len] type
957 	 but that only gains us that the destination and source possibly
958 	 no longer will have their address taken.  */
959       srctype = TREE_TYPE (TREE_TYPE (src));
960       if (TREE_CODE (srctype) == ARRAY_TYPE
961 	  && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
962 	srctype = TREE_TYPE (srctype);
963       desttype = TREE_TYPE (TREE_TYPE (dest));
964       if (TREE_CODE (desttype) == ARRAY_TYPE
965 	  && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
966 	desttype = TREE_TYPE (desttype);
967       if (TREE_ADDRESSABLE (srctype)
968 	  || TREE_ADDRESSABLE (desttype))
969 	return false;
970 
971       /* Make sure we are not copying using a floating-point mode or
972          a type whose size possibly does not match its precision.  */
973       if (FLOAT_MODE_P (TYPE_MODE (desttype))
974 	  || TREE_CODE (desttype) == BOOLEAN_TYPE
975 	  || TREE_CODE (desttype) == ENUMERAL_TYPE)
976 	desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
977       if (FLOAT_MODE_P (TYPE_MODE (srctype))
978 	  || TREE_CODE (srctype) == BOOLEAN_TYPE
979 	  || TREE_CODE (srctype) == ENUMERAL_TYPE)
980 	srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
981       if (!srctype)
982 	srctype = desttype;
983       if (!desttype)
984 	desttype = srctype;
985       if (!srctype)
986 	return false;
987 
988       src_align = get_pointer_alignment (src);
989       dest_align = get_pointer_alignment (dest);
990 
991       /* Choose between src and destination type for the access based
992          on alignment, whether the access constitutes a register access
993 	 and whether it may actually expose a declaration for SSA rewrite
994 	 or SRA decomposition.  */
995       destvar = NULL_TREE;
996       srcvar = NULL_TREE;
997       if (TREE_CODE (dest) == ADDR_EXPR
998 	  && var_decl_component_p (TREE_OPERAND (dest, 0))
999 	  && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1000 	  && dest_align >= TYPE_ALIGN (desttype)
1001 	  && (is_gimple_reg_type (desttype)
1002 	      || src_align >= TYPE_ALIGN (desttype)))
1003 	destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1004       else if (TREE_CODE (src) == ADDR_EXPR
1005 	       && var_decl_component_p (TREE_OPERAND (src, 0))
1006 	       && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1007 	       && src_align >= TYPE_ALIGN (srctype)
1008 	       && (is_gimple_reg_type (srctype)
1009 		   || dest_align >= TYPE_ALIGN (srctype)))
1010 	srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1011       if (srcvar == NULL_TREE && destvar == NULL_TREE)
1012 	return false;
1013 
1014       /* Now that we chose an access type express the other side in
1015          terms of it if the target allows that with respect to alignment
1016 	 constraints.  */
1017       if (srcvar == NULL_TREE)
1018 	{
1019 	  if (src_align >= TYPE_ALIGN (desttype))
1020 	    srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1021 	  else
1022 	    {
1023 	      if (STRICT_ALIGNMENT)
1024 		return false;
1025 	      srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1026 					    src_align);
1027 	      srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1028 	    }
1029 	}
1030       else if (destvar == NULL_TREE)
1031 	{
1032 	  if (dest_align >= TYPE_ALIGN (srctype))
1033 	    destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1034 	  else
1035 	    {
1036 	      if (STRICT_ALIGNMENT)
1037 		return false;
1038 	      desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1039 					     dest_align);
1040 	      destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1041 	    }
1042 	}
1043 
1044       /* Same as above, detect out-of-bounds accesses without issuing
1045 	 warnings.  Avoid folding out-of-bounds copies but to avoid
1046 	 false positives for unreachable code defer warning until
1047 	 after DCE has worked its magic.
1048 	 -Wrestrict is still diagnosed.  */
1049       if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1050 						 dest, src, len, len,
1051 						 false, false))
1052 	if (warning != OPT_Wrestrict)
1053 	  return false;
1054 
1055       gimple *new_stmt;
1056       if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1057 	{
1058 	  tree tem = fold_const_aggregate_ref (srcvar);
1059 	  if (tem)
1060 	    srcvar = tem;
1061 	  if (! is_gimple_min_invariant (srcvar))
1062 	    {
1063 	      new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1064 	      srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1065 						   new_stmt);
1066 	      gimple_assign_set_lhs (new_stmt, srcvar);
1067 	      gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1068 	      gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1069 	    }
1070 	  new_stmt = gimple_build_assign (destvar, srcvar);
1071 	  goto set_vop_and_replace;
1072 	}
1073 
1074       /* We get an aggregate copy.  Use an unsigned char[] type to
1075 	 perform the copying to preserve padding and to avoid any issues
1076 	 with TREE_ADDRESSABLE types or float modes behavior on copying.  */
1077       desttype = build_array_type_nelts (unsigned_char_type_node,
1078 					 tree_to_uhwi (len));
1079       srctype = desttype;
1080       if (src_align > TYPE_ALIGN (srctype))
1081 	srctype = build_aligned_type (srctype, src_align);
1082       if (dest_align > TYPE_ALIGN (desttype))
1083 	desttype = build_aligned_type (desttype, dest_align);
1084       new_stmt
1085 	= gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1086 			       fold_build2 (MEM_REF, srctype, src, off0));
1087 set_vop_and_replace:
1088       gimple_move_vops (new_stmt, stmt);
1089       if (!lhs)
1090 	{
1091 	  gsi_replace (gsi, new_stmt, false);
1092 	  return true;
1093 	}
1094       gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1095     }
1096 
1097 done:
1098   gimple_seq stmts = NULL;
1099   if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1100     len = NULL_TREE;
1101   else if (code == BUILT_IN_MEMPCPY)
1102     {
1103       len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1104       dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1105 			   TREE_TYPE (dest), dest, len);
1106     }
1107   else
1108     gcc_unreachable ();
1109 
1110   gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1111   gimple *repl = gimple_build_assign (lhs, dest);
1112   gsi_replace (gsi, repl, false);
1113   return true;
1114 }
1115 
1116 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1117    to built-in memcmp (a, b, len).  */
1118 
1119 static bool
gimple_fold_builtin_bcmp(gimple_stmt_iterator * gsi)1120 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1121 {
1122   tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1123 
1124   if (!fn)
1125     return false;
1126 
1127   /* Transform bcmp (a, b, len) into memcmp (a, b, len).  */
1128 
1129   gimple *stmt = gsi_stmt (*gsi);
1130   tree a = gimple_call_arg (stmt, 0);
1131   tree b = gimple_call_arg (stmt, 1);
1132   tree len = gimple_call_arg (stmt, 2);
1133 
1134   gimple *repl = gimple_build_call (fn, 3, a, b, len);
1135   replace_call_with_call_and_fold (gsi, repl);
1136 
1137   return true;
1138 }
1139 
1140 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1141    to built-in memmove (dest, src, len).  */
1142 
1143 static bool
gimple_fold_builtin_bcopy(gimple_stmt_iterator * gsi)1144 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1145 {
1146   tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1147 
1148   if (!fn)
1149     return false;
1150 
1151   /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1152      it's quivalent to memmove (not memcpy).  Transform bcopy (src, dest,
1153      len) into memmove (dest, src, len).  */
1154 
1155   gimple *stmt = gsi_stmt (*gsi);
1156   tree src = gimple_call_arg (stmt, 0);
1157   tree dest = gimple_call_arg (stmt, 1);
1158   tree len = gimple_call_arg (stmt, 2);
1159 
1160   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1161   gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1162   replace_call_with_call_and_fold (gsi, repl);
1163 
1164   return true;
1165 }
1166 
1167 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1168    to built-in memset (dest, 0, len).  */
1169 
1170 static bool
gimple_fold_builtin_bzero(gimple_stmt_iterator * gsi)1171 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1172 {
1173   tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1174 
1175   if (!fn)
1176     return false;
1177 
1178   /* Transform bzero (dest, len) into memset (dest, 0, len).  */
1179 
1180   gimple *stmt = gsi_stmt (*gsi);
1181   tree dest = gimple_call_arg (stmt, 0);
1182   tree len = gimple_call_arg (stmt, 1);
1183 
1184   gimple_seq seq = NULL;
1185   gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1186   gimple_seq_add_stmt_without_update (&seq, repl);
1187   gsi_replace_with_seq_vops (gsi, seq);
1188   fold_stmt (gsi);
1189 
1190   return true;
1191 }
1192 
1193 /* Fold function call to builtin memset or bzero at *GSI setting the
1194    memory of size LEN to VAL.  Return whether a simplification was made.  */
1195 
1196 static bool
gimple_fold_builtin_memset(gimple_stmt_iterator * gsi,tree c,tree len)1197 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1198 {
1199   gimple *stmt = gsi_stmt (*gsi);
1200   tree etype;
1201   unsigned HOST_WIDE_INT length, cval;
1202 
1203   /* If the LEN parameter is zero, return DEST.  */
1204   if (integer_zerop (len))
1205     {
1206       replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1207       return true;
1208     }
1209 
1210   if (! tree_fits_uhwi_p (len))
1211     return false;
1212 
1213   if (TREE_CODE (c) != INTEGER_CST)
1214     return false;
1215 
1216   tree dest = gimple_call_arg (stmt, 0);
1217   tree var = dest;
1218   if (TREE_CODE (var) != ADDR_EXPR)
1219     return false;
1220 
1221   var = TREE_OPERAND (var, 0);
1222   if (TREE_THIS_VOLATILE (var))
1223     return false;
1224 
1225   etype = TREE_TYPE (var);
1226   if (TREE_CODE (etype) == ARRAY_TYPE)
1227     etype = TREE_TYPE (etype);
1228 
1229   if (!INTEGRAL_TYPE_P (etype)
1230       && !POINTER_TYPE_P (etype))
1231     return NULL_TREE;
1232 
1233   if (! var_decl_component_p (var))
1234     return NULL_TREE;
1235 
1236   length = tree_to_uhwi (len);
1237   if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1238       || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1239 	  != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1240       || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1241     return NULL_TREE;
1242 
1243   if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1244     return NULL_TREE;
1245 
1246   if (!type_has_mode_precision_p (etype))
1247     etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1248 					    TYPE_UNSIGNED (etype));
1249 
1250   if (integer_zerop (c))
1251     cval = 0;
1252   else
1253     {
1254       if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1255 	return NULL_TREE;
1256 
1257       cval = TREE_INT_CST_LOW (c);
1258       cval &= 0xff;
1259       cval |= cval << 8;
1260       cval |= cval << 16;
1261       cval |= (cval << 31) << 1;
1262     }
1263 
1264   var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1265   gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1266   gimple_move_vops (store, stmt);
1267   gsi_insert_before (gsi, store, GSI_SAME_STMT);
1268   if (gimple_call_lhs (stmt))
1269     {
1270       gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1271       gsi_replace (gsi, asgn, false);
1272     }
1273   else
1274     {
1275       gimple_stmt_iterator gsi2 = *gsi;
1276       gsi_prev (gsi);
1277       gsi_remove (&gsi2, true);
1278     }
1279 
1280   return true;
1281 }
1282 
1283 /* Helper of get_range_strlen for ARG that is not an SSA_NAME.  */
1284 
1285 static bool
get_range_strlen_tree(tree arg,bitmap * visited,strlen_range_kind rkind,c_strlen_data * pdata,unsigned eltsize)1286 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1287 		       c_strlen_data *pdata, unsigned eltsize)
1288 {
1289   gcc_assert (TREE_CODE (arg) != SSA_NAME);
1290 
1291   /* The length computed by this invocation of the function.  */
1292   tree val = NULL_TREE;
1293 
1294   /* True if VAL is an optimistic (tight) bound determined from
1295      the size of the character array in which the string may be
1296      stored.  In that case, the computed VAL is used to set
1297      PDATA->MAXBOUND.  */
1298   bool tight_bound = false;
1299 
1300   /* We can end up with &(*iftmp_1)[0] here as well, so handle it.  */
1301   if (TREE_CODE (arg) == ADDR_EXPR
1302       && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1303     {
1304       tree op = TREE_OPERAND (arg, 0);
1305       if (integer_zerop (TREE_OPERAND (op, 1)))
1306 	{
1307 	  tree aop0 = TREE_OPERAND (op, 0);
1308 	  if (TREE_CODE (aop0) == INDIRECT_REF
1309 	      && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1310 	    return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1311 				     pdata, eltsize);
1312 	}
1313       else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1314 	       && rkind == SRK_LENRANGE)
1315 	{
1316 	  /* Fail if an array is the last member of a struct object
1317 	     since it could be treated as a (fake) flexible array
1318 	     member.  */
1319 	  tree idx = TREE_OPERAND (op, 1);
1320 
1321 	  arg = TREE_OPERAND (op, 0);
1322 	  tree optype = TREE_TYPE (arg);
1323 	  if (tree dom = TYPE_DOMAIN (optype))
1324 	    if (tree bound = TYPE_MAX_VALUE (dom))
1325 	      if (TREE_CODE (bound) == INTEGER_CST
1326 		  && TREE_CODE (idx) == INTEGER_CST
1327 		  && tree_int_cst_lt (bound, idx))
1328 		return false;
1329 	}
1330     }
1331 
1332   if (rkind == SRK_INT_VALUE)
1333     {
1334       /* We are computing the maximum value (not string length).  */
1335       val = arg;
1336       if (TREE_CODE (val) != INTEGER_CST
1337 	  || tree_int_cst_sgn (val) < 0)
1338 	return false;
1339     }
1340   else
1341     {
1342       c_strlen_data lendata = { };
1343       val = c_strlen (arg, 1, &lendata, eltsize);
1344 
1345       if (!val && lendata.decl)
1346 	{
1347 	  /* ARG refers to an unterminated const character array.
1348 	     DATA.DECL with size DATA.LEN.  */
1349 	  val = lendata.minlen;
1350 	  pdata->decl = lendata.decl;
1351 	}
1352     }
1353 
1354   /* Set if VAL represents the maximum length based on array size (set
1355      when exact length cannot be determined).  */
1356   bool maxbound = false;
1357 
1358   if (!val && rkind == SRK_LENRANGE)
1359     {
1360       if (TREE_CODE (arg) == ADDR_EXPR)
1361 	return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1362 				 pdata, eltsize);
1363 
1364       if (TREE_CODE (arg) == ARRAY_REF)
1365 	{
1366 	  tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1367 
1368 	  /* Determine the "innermost" array type.  */
1369 	  while (TREE_CODE (optype) == ARRAY_TYPE
1370 		 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1371 	    optype = TREE_TYPE (optype);
1372 
1373 	  /* Avoid arrays of pointers.  */
1374 	  tree eltype = TREE_TYPE (optype);
1375 	  if (TREE_CODE (optype) != ARRAY_TYPE
1376 	      || !INTEGRAL_TYPE_P (eltype))
1377 	    return false;
1378 
1379 	  /* Fail when the array bound is unknown or zero.  */
1380 	  val = TYPE_SIZE_UNIT (optype);
1381 	  if (!val
1382 	      || TREE_CODE (val) != INTEGER_CST
1383 	      || integer_zerop (val))
1384 	    return false;
1385 
1386 	  val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1387 			      integer_one_node);
1388 
1389 	  /* Set the minimum size to zero since the string in
1390 	     the array could have zero length.  */
1391 	  pdata->minlen = ssize_int (0);
1392 
1393 	  tight_bound = true;
1394 	}
1395       else if (TREE_CODE (arg) == COMPONENT_REF
1396 	       && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1397 		   == ARRAY_TYPE))
1398 	{
1399 	  /* Use the type of the member array to determine the upper
1400 	     bound on the length of the array.  This may be overly
1401 	     optimistic if the array itself isn't NUL-terminated and
1402 	     the caller relies on the subsequent member to contain
1403 	     the NUL but that would only be considered valid if
1404 	     the array were the last member of a struct.  */
1405 
1406 	  tree fld = TREE_OPERAND (arg, 1);
1407 
1408 	  tree optype = TREE_TYPE (fld);
1409 
1410 	  /* Determine the "innermost" array type.  */
1411 	  while (TREE_CODE (optype) == ARRAY_TYPE
1412 		 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1413 	    optype = TREE_TYPE (optype);
1414 
1415 	  /* Fail when the array bound is unknown or zero.  */
1416 	  val = TYPE_SIZE_UNIT (optype);
1417 	  if (!val
1418 	      || TREE_CODE (val) != INTEGER_CST
1419 	      || integer_zerop (val))
1420 	    return false;
1421 	  val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1422 			     integer_one_node);
1423 
1424 	  /* Set the minimum size to zero since the string in
1425 	     the array could have zero length.  */
1426 	  pdata->minlen = ssize_int (0);
1427 
1428 	  /* The array size determined above is an optimistic bound
1429 	     on the length.  If the array isn't nul-terminated the
1430 	     length computed by the library function would be greater.
1431 	     Even though using strlen to cross the subobject boundary
1432 	     is undefined, avoid drawing conclusions from the member
1433 	     type about the length here.  */
1434 	  tight_bound = true;
1435 	}
1436       else if (TREE_CODE (arg) == MEM_REF
1437 	       && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1438 	       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1439 	       && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1440 	{
1441 	  /* Handle a MEM_REF into a DECL accessing an array of integers,
1442 	     being conservative about references to extern structures with
1443 	     flexible array members that can be initialized to arbitrary
1444 	     numbers of elements as an extension (static structs are okay).
1445 	     FIXME: Make this less conservative -- see
1446 	     component_ref_size in tree.c.  */
1447 	  tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1448 	  if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1449 	      && (decl_binds_to_current_def_p (ref)
1450 		  || !array_at_struct_end_p (arg)))
1451 	    {
1452 	      /* Fail if the offset is out of bounds.  Such accesses
1453 		 should be diagnosed at some point.  */
1454 	      val = DECL_SIZE_UNIT (ref);
1455 	      if (!val
1456 		  || TREE_CODE (val) != INTEGER_CST
1457 		  || integer_zerop (val))
1458 		return false;
1459 
1460 	      poly_offset_int psiz = wi::to_offset (val);
1461 	      poly_offset_int poff = mem_ref_offset (arg);
1462 	      if (known_le (psiz, poff))
1463 		return false;
1464 
1465 	      pdata->minlen = ssize_int (0);
1466 
1467 	      /* Subtract the offset and one for the terminating nul.  */
1468 	      psiz -= poff;
1469 	      psiz -= 1;
1470 	      val = wide_int_to_tree (TREE_TYPE (val), psiz);
1471 	      /* Since VAL reflects the size of a declared object
1472 		 rather the type of the access it is not a tight bound.  */
1473 	    }
1474 	}
1475       else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1476 	{
1477 	  /* Avoid handling pointers to arrays.  GCC might misuse
1478 	     a pointer to an array of one bound to point to an array
1479 	     object of a greater bound.  */
1480 	  tree argtype = TREE_TYPE (arg);
1481 	  if (TREE_CODE (argtype) == ARRAY_TYPE)
1482 	    {
1483 	      val = TYPE_SIZE_UNIT (argtype);
1484 	      if (!val
1485 		  || TREE_CODE (val) != INTEGER_CST
1486 		  || integer_zerop (val))
1487 		return false;
1488 	      val = wide_int_to_tree (TREE_TYPE (val),
1489 				      wi::sub (wi::to_wide (val), 1));
1490 
1491 	      /* Set the minimum size to zero since the string in
1492 		 the array could have zero length.  */
1493 	      pdata->minlen = ssize_int (0);
1494 	    }
1495 	}
1496       maxbound = true;
1497     }
1498 
1499   if (!val)
1500     return false;
1501 
1502   /* Adjust the lower bound on the string length as necessary.  */
1503   if (!pdata->minlen
1504       || (rkind != SRK_STRLEN
1505 	  && TREE_CODE (pdata->minlen) == INTEGER_CST
1506 	  && TREE_CODE (val) == INTEGER_CST
1507 	  && tree_int_cst_lt (val, pdata->minlen)))
1508     pdata->minlen = val;
1509 
1510   if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1511     {
1512       /* Adjust the tighter (more optimistic) string length bound
1513 	 if necessary and proceed to adjust the more conservative
1514 	 bound.  */
1515       if (TREE_CODE (val) == INTEGER_CST)
1516 	{
1517 	  if (tree_int_cst_lt (pdata->maxbound, val))
1518 	    pdata->maxbound = val;
1519 	}
1520       else
1521 	pdata->maxbound = val;
1522     }
1523   else if (pdata->maxbound || maxbound)
1524     /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1525        if VAL corresponds to the maximum length determined based
1526        on the type of the object.  */
1527     pdata->maxbound = val;
1528 
1529   if (tight_bound)
1530     {
1531       /* VAL computed above represents an optimistically tight bound
1532 	 on the length of the string based on the referenced object's
1533 	 or subobject's type.  Determine the conservative upper bound
1534 	 based on the enclosing object's size if possible.  */
1535       if (rkind == SRK_LENRANGE)
1536 	{
1537 	  poly_int64 offset;
1538 	  tree base = get_addr_base_and_unit_offset (arg, &offset);
1539 	  if (!base)
1540 	    {
1541 	      /* When the call above fails due to a non-constant offset
1542 		 assume the offset is zero and use the size of the whole
1543 		 enclosing object instead.  */
1544 	      base = get_base_address (arg);
1545 	      offset = 0;
1546 	    }
1547 	  /* If the base object is a pointer no upper bound on the length
1548 	     can be determined.  Otherwise the maximum length is equal to
1549 	     the size of the enclosing object minus the offset of
1550 	     the referenced subobject minus 1 (for the terminating nul).  */
1551 	  tree type = TREE_TYPE (base);
1552 	  if (TREE_CODE (type) == POINTER_TYPE
1553 	      || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1554 	      || !(val = DECL_SIZE_UNIT (base)))
1555 	    val = build_all_ones_cst (size_type_node);
1556 	  else
1557 	    {
1558 	      val = DECL_SIZE_UNIT (base);
1559 	      val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1560 				 size_int (offset + 1));
1561 	    }
1562 	}
1563       else
1564 	return false;
1565     }
1566 
1567   if (pdata->maxlen)
1568     {
1569       /* Adjust the more conservative bound if possible/necessary
1570 	 and fail otherwise.  */
1571       if (rkind != SRK_STRLEN)
1572 	{
1573 	  if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1574 	      || TREE_CODE (val) != INTEGER_CST)
1575 	    return false;
1576 
1577 	  if (tree_int_cst_lt (pdata->maxlen, val))
1578 	    pdata->maxlen = val;
1579 	  return true;
1580 	}
1581       else if (simple_cst_equal (val, pdata->maxlen) != 1)
1582 	{
1583 	  /* Fail if the length of this ARG is different from that
1584 	     previously determined from another ARG.  */
1585 	  return false;
1586 	}
1587     }
1588 
1589   pdata->maxlen = val;
1590   return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1591 }
1592 
1593 /* For an ARG referencing one or more strings, try to obtain the range
1594    of their lengths, or the size of the largest array ARG referes to if
1595    the range of lengths cannot be determined, and store all in *PDATA.
1596    For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1597    the maximum constant value.
1598    If ARG is an SSA_NAME, follow its use-def chains.  When RKIND ==
1599    SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1600    length or if we are unable to determine the length, return false.
1601    VISITED is a bitmap of visited variables.
1602    RKIND determines the kind of value or range to obtain (see
1603    strlen_range_kind).
1604    Set PDATA->DECL if ARG refers to an unterminated constant array.
1605    On input, set ELTSIZE to 1 for normal single byte character strings,
1606    and either 2 or 4 for wide characer strings (the size of wchar_t).
1607    Return true if *PDATA was successfully populated and false otherwise.  */
1608 
1609 static bool
get_range_strlen(tree arg,bitmap * visited,strlen_range_kind rkind,c_strlen_data * pdata,unsigned eltsize)1610 get_range_strlen (tree arg, bitmap *visited,
1611 		  strlen_range_kind rkind,
1612 		  c_strlen_data *pdata, unsigned eltsize)
1613 {
1614 
1615   if (TREE_CODE (arg) != SSA_NAME)
1616     return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1617 
1618   /* If ARG is registered for SSA update we cannot look at its defining
1619      statement.  */
1620   if (name_registered_for_update_p (arg))
1621     return false;
1622 
1623   /* If we were already here, break the infinite cycle.  */
1624   if (!*visited)
1625     *visited = BITMAP_ALLOC (NULL);
1626   if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1627     return true;
1628 
1629   tree var = arg;
1630   gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1631 
1632   switch (gimple_code (def_stmt))
1633     {
1634       case GIMPLE_ASSIGN:
1635 	/* The RHS of the statement defining VAR must either have a
1636 	   constant length or come from another SSA_NAME with a constant
1637 	   length.  */
1638         if (gimple_assign_single_p (def_stmt)
1639             || gimple_assign_unary_nop_p (def_stmt))
1640           {
1641 	    tree rhs = gimple_assign_rhs1 (def_stmt);
1642 	    return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1643           }
1644 	else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1645 	  {
1646 	    tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1647 			    gimple_assign_rhs3 (def_stmt) };
1648 
1649 	    for (unsigned int i = 0; i < 2; i++)
1650 	      if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1651 		{
1652 		  if (rkind != SRK_LENRANGE)
1653 		    return false;
1654 		  /* Set the upper bound to the maximum to prevent
1655 		     it from being adjusted in the next iteration but
1656 		     leave MINLEN and the more conservative MAXBOUND
1657 		     determined so far alone (or leave them null if
1658 		     they haven't been set yet).  That the MINLEN is
1659 		     in fact zero can be determined from MAXLEN being
1660 		     unbounded but the discovered minimum is used for
1661 		     diagnostics.  */
1662 		  pdata->maxlen = build_all_ones_cst (size_type_node);
1663 		}
1664 	    return true;
1665 	  }
1666         return false;
1667 
1668       case GIMPLE_PHI:
1669 	/* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1670 	   must have a constant length.  */
1671 	for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1672           {
1673             tree arg = gimple_phi_arg (def_stmt, i)->def;
1674 
1675             /* If this PHI has itself as an argument, we cannot
1676                determine the string length of this argument.  However,
1677                if we can find a constant string length for the other
1678                PHI args then we can still be sure that this is a
1679                constant string length.  So be optimistic and just
1680                continue with the next argument.  */
1681             if (arg == gimple_phi_result (def_stmt))
1682               continue;
1683 
1684 	    if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1685 	      {
1686 		if (rkind != SRK_LENRANGE)
1687 		  return false;
1688 		/* Set the upper bound to the maximum to prevent
1689 		   it from being adjusted in the next iteration but
1690 		   leave MINLEN and the more conservative MAXBOUND
1691 		   determined so far alone (or leave them null if
1692 		   they haven't been set yet).  That the MINLEN is
1693 		   in fact zero can be determined from MAXLEN being
1694 		   unbounded but the discovered minimum is used for
1695 		   diagnostics.  */
1696 		pdata->maxlen = build_all_ones_cst (size_type_node);
1697 	      }
1698           }
1699         return true;
1700 
1701       default:
1702         return false;
1703     }
1704 }
1705 
1706 /* Try to obtain the range of the lengths of the string(s) referenced
1707    by ARG, or the size of the largest array ARG refers to if the range
1708    of lengths cannot be determined, and store all in *PDATA which must
1709    be zero-initialized on input except PDATA->MAXBOUND may be set to
1710    a non-null tree node other than INTEGER_CST to request to have it
1711    set to the length of the longest string in a PHI.  ELTSIZE is
1712    the expected size of the string element in bytes: 1 for char and
1713    some power of 2 for wide characters.
1714    Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1715    for optimization.  Returning false means that a nonzero PDATA->MINLEN
1716    doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1717    is -1 (in that case, the actual range is indeterminate, i.e.,
1718    [0, PTRDIFF_MAX - 2].  */
1719 
1720 bool
get_range_strlen(tree arg,c_strlen_data * pdata,unsigned eltsize)1721 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1722 {
1723   bitmap visited = NULL;
1724   tree maxbound = pdata->maxbound;
1725 
1726   if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1727     {
1728       /* On failure extend the length range to an impossible maximum
1729 	 (a valid MAXLEN must be less than PTRDIFF_MAX - 1).  Other
1730 	 members can stay unchanged regardless.  */
1731       pdata->minlen = ssize_int (0);
1732       pdata->maxlen = build_all_ones_cst (size_type_node);
1733     }
1734   else if (!pdata->minlen)
1735     pdata->minlen = ssize_int (0);
1736 
1737   /* If it's unchanged from it initial non-null value, set the conservative
1738      MAXBOUND to SIZE_MAX.  Otherwise leave it null (if it is null).  */
1739   if (maxbound && pdata->maxbound == maxbound)
1740     pdata->maxbound = build_all_ones_cst (size_type_node);
1741 
1742   if (visited)
1743     BITMAP_FREE (visited);
1744 
1745   return !integer_all_onesp (pdata->maxlen);
1746 }
1747 
1748 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1749    For ARG of pointer types, NONSTR indicates if the caller is prepared
1750    to handle unterminated strings.   For integer ARG and when RKIND ==
1751    SRK_INT_VALUE, NONSTR must be null.
1752 
1753    If an unterminated array is discovered and our caller handles
1754    unterminated arrays, then bubble up the offending DECL and
1755    return the maximum size.  Otherwise return NULL.  */
1756 
1757 static tree
1758 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1759 {
1760   /* A non-null NONSTR is meaningless when determining the maximum
1761      value of an integer ARG.  */
1762   gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1763   /* ARG must have an integral type when RKIND says so.  */
1764   gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1765 
1766   bitmap visited = NULL;
1767 
1768   /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1769      is unbounded.  */
1770   c_strlen_data lendata = { };
1771   if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
1772     lendata.maxlen = NULL_TREE;
1773   else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1774     lendata.maxlen = NULL_TREE;
1775 
1776   if (visited)
1777     BITMAP_FREE (visited);
1778 
1779   if (nonstr)
1780     {
1781       /* For callers prepared to handle unterminated arrays set
1782 	 *NONSTR to point to the declaration of the array and return
1783 	 the maximum length/size. */
1784       *nonstr = lendata.decl;
1785       return lendata.maxlen;
1786     }
1787 
1788   /* Fail if the constant array isn't nul-terminated.  */
1789   return lendata.decl ? NULL_TREE : lendata.maxlen;
1790 }
1791 
1792 
1793 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1794    If LEN is not NULL, it represents the length of the string to be
1795    copied.  Return NULL_TREE if no simplification can be made.  */
1796 
1797 static bool
gimple_fold_builtin_strcpy(gimple_stmt_iterator * gsi,tree dest,tree src)1798 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1799 			    tree dest, tree src)
1800 {
1801   gimple *stmt = gsi_stmt (*gsi);
1802   location_t loc = gimple_location (stmt);
1803   tree fn;
1804 
1805   /* If SRC and DEST are the same (and not volatile), return DEST.  */
1806   if (operand_equal_p (src, dest, 0))
1807     {
1808       /* Issue -Wrestrict unless the pointers are null (those do
1809 	 not point to objects and so do not indicate an overlap;
1810 	 such calls could be the result of sanitization and jump
1811 	 threading).  */
1812       if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1813 	{
1814 	  tree func = gimple_call_fndecl (stmt);
1815 
1816 	  warning_at (loc, OPT_Wrestrict,
1817 		      "%qD source argument is the same as destination",
1818 		      func);
1819 	}
1820 
1821       replace_call_with_value (gsi, dest);
1822       return true;
1823     }
1824 
1825   if (optimize_function_for_size_p (cfun))
1826     return false;
1827 
1828   fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1829   if (!fn)
1830     return false;
1831 
1832   /* Set to non-null if ARG refers to an unterminated array.  */
1833   tree nonstr = NULL;
1834   tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1835 
1836   if (nonstr)
1837     {
1838       /* Avoid folding calls with unterminated arrays.  */
1839       if (!gimple_no_warning_p (stmt))
1840 	warn_string_no_nul (loc, "strcpy", src, nonstr);
1841       gimple_set_no_warning (stmt, true);
1842       return false;
1843     }
1844 
1845   if (!len)
1846     return false;
1847 
1848   len = fold_convert_loc (loc, size_type_node, len);
1849   len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1850   len = force_gimple_operand_gsi (gsi, len, true,
1851 				  NULL_TREE, true, GSI_SAME_STMT);
1852   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1853   replace_call_with_call_and_fold (gsi, repl);
1854   return true;
1855 }
1856 
1857 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1858    If SLEN is not NULL, it represents the length of the source string.
1859    Return NULL_TREE if no simplification can be made.  */
1860 
1861 static bool
gimple_fold_builtin_strncpy(gimple_stmt_iterator * gsi,tree dest,tree src,tree len)1862 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1863 			     tree dest, tree src, tree len)
1864 {
1865   gimple *stmt = gsi_stmt (*gsi);
1866   location_t loc = gimple_location (stmt);
1867   bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1868 
1869   /* If the LEN parameter is zero, return DEST.  */
1870   if (integer_zerop (len))
1871     {
1872       /* Avoid warning if the destination refers to an array/pointer
1873 	 decorate with attribute nonstring.  */
1874       if (!nonstring)
1875 	{
1876 	  tree fndecl = gimple_call_fndecl (stmt);
1877 
1878 	  /* Warn about the lack of nul termination: the result is not
1879 	     a (nul-terminated) string.  */
1880 	  tree slen = get_maxval_strlen (src, SRK_STRLEN);
1881 	  if (slen && !integer_zerop (slen))
1882 	    warning_at (loc, OPT_Wstringop_truncation,
1883 			"%G%qD destination unchanged after copying no bytes "
1884 			"from a string of length %E",
1885 			stmt, fndecl, slen);
1886 	  else
1887 	    warning_at (loc, OPT_Wstringop_truncation,
1888 			"%G%qD destination unchanged after copying no bytes",
1889 			stmt, fndecl);
1890 	}
1891 
1892       replace_call_with_value (gsi, dest);
1893       return true;
1894     }
1895 
1896   /* We can't compare slen with len as constants below if len is not a
1897      constant.  */
1898   if (TREE_CODE (len) != INTEGER_CST)
1899     return false;
1900 
1901   /* Now, we must be passed a constant src ptr parameter.  */
1902   tree slen = get_maxval_strlen (src, SRK_STRLEN);
1903   if (!slen || TREE_CODE (slen) != INTEGER_CST)
1904     return false;
1905 
1906   /* The size of the source string including the terminating nul.  */
1907   tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1908 
1909   /* We do not support simplification of this case, though we do
1910      support it when expanding trees into RTL.  */
1911   /* FIXME: generate a call to __builtin_memset.  */
1912   if (tree_int_cst_lt (ssize, len))
1913     return false;
1914 
1915   /* Diagnose truncation that leaves the copy unterminated.  */
1916   maybe_diag_stxncpy_trunc (*gsi, src, len);
1917 
1918   /* OK transform into builtin memcpy.  */
1919   tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1920   if (!fn)
1921     return false;
1922 
1923   len = fold_convert_loc (loc, size_type_node, len);
1924   len = force_gimple_operand_gsi (gsi, len, true,
1925 				  NULL_TREE, true, GSI_SAME_STMT);
1926   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1927   replace_call_with_call_and_fold (gsi, repl);
1928 
1929   return true;
1930 }
1931 
1932 /* Fold function call to builtin strchr or strrchr.
1933    If both arguments are constant, evaluate and fold the result,
1934    otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1935    In general strlen is significantly faster than strchr
1936    due to being a simpler operation.  */
1937 static bool
gimple_fold_builtin_strchr(gimple_stmt_iterator * gsi,bool is_strrchr)1938 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1939 {
1940   gimple *stmt = gsi_stmt (*gsi);
1941   tree str = gimple_call_arg (stmt, 0);
1942   tree c = gimple_call_arg (stmt, 1);
1943   location_t loc = gimple_location (stmt);
1944   const char *p;
1945   char ch;
1946 
1947   if (!gimple_call_lhs (stmt))
1948     return false;
1949 
1950   /* Avoid folding if the first argument is not a nul-terminated array.
1951      Defer warning until later.  */
1952   if (!check_nul_terminated_array (NULL_TREE, str))
1953     return false;
1954 
1955   if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1956     {
1957       const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1958 
1959       if (p1 == NULL)
1960 	{
1961 	  replace_call_with_value (gsi, integer_zero_node);
1962 	  return true;
1963 	}
1964 
1965       tree len = build_int_cst (size_type_node, p1 - p);
1966       gimple_seq stmts = NULL;
1967       gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1968 					      POINTER_PLUS_EXPR, str, len);
1969       gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1970       gsi_replace_with_seq_vops (gsi, stmts);
1971       return true;
1972     }
1973 
1974   if (!integer_zerop (c))
1975     return false;
1976 
1977   /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size.  */
1978   if (is_strrchr && optimize_function_for_size_p (cfun))
1979     {
1980       tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1981 
1982       if (strchr_fn)
1983 	{
1984 	  gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1985 	  replace_call_with_call_and_fold (gsi, repl);
1986 	  return true;
1987 	}
1988 
1989       return false;
1990     }
1991 
1992   tree len;
1993   tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1994 
1995   if (!strlen_fn)
1996     return false;
1997 
1998   /* Create newstr = strlen (str).  */
1999   gimple_seq stmts = NULL;
2000   gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2001   gimple_set_location (new_stmt, loc);
2002   len = create_tmp_reg_or_ssa_name (size_type_node);
2003   gimple_call_set_lhs (new_stmt, len);
2004   gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2005 
2006   /* Create (str p+ strlen (str)).  */
2007   new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2008 				  POINTER_PLUS_EXPR, str, len);
2009   gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2010   gsi_replace_with_seq_vops (gsi, stmts);
2011   /* gsi now points at the assignment to the lhs, get a
2012      stmt iterator to the strlen.
2013      ???  We can't use gsi_for_stmt as that doesn't work when the
2014      CFG isn't built yet.  */
2015   gimple_stmt_iterator gsi2 = *gsi;
2016   gsi_prev (&gsi2);
2017   fold_stmt (&gsi2);
2018   return true;
2019 }
2020 
2021 /* Fold function call to builtin strstr.
2022    If both arguments are constant, evaluate and fold the result,
2023    additionally fold strstr (x, "") into x and strstr (x, "c")
2024    into strchr (x, 'c').  */
2025 static bool
gimple_fold_builtin_strstr(gimple_stmt_iterator * gsi)2026 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2027 {
2028   gimple *stmt = gsi_stmt (*gsi);
2029   if (!gimple_call_lhs (stmt))
2030     return false;
2031 
2032   tree haystack = gimple_call_arg (stmt, 0);
2033   tree needle = gimple_call_arg (stmt, 1);
2034 
2035   /* Avoid folding if either argument is not a nul-terminated array.
2036      Defer warning until later.  */
2037   if (!check_nul_terminated_array (NULL_TREE, haystack)
2038       || !check_nul_terminated_array (NULL_TREE, needle))
2039     return false;
2040 
2041   const char *q = c_getstr (needle);
2042   if (q == NULL)
2043     return false;
2044 
2045   if (const char *p = c_getstr (haystack))
2046     {
2047       const char *r = strstr (p, q);
2048 
2049       if (r == NULL)
2050 	{
2051 	  replace_call_with_value (gsi, integer_zero_node);
2052 	  return true;
2053 	}
2054 
2055       tree len = build_int_cst (size_type_node, r - p);
2056       gimple_seq stmts = NULL;
2057       gimple *new_stmt
2058 	= gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2059 			       haystack, len);
2060       gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2061       gsi_replace_with_seq_vops (gsi, stmts);
2062       return true;
2063     }
2064 
2065   /* For strstr (x, "") return x.  */
2066   if (q[0] == '\0')
2067     {
2068       replace_call_with_value (gsi, haystack);
2069       return true;
2070     }
2071 
2072   /* Transform strstr (x, "c") into strchr (x, 'c').  */
2073   if (q[1] == '\0')
2074     {
2075       tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2076       if (strchr_fn)
2077 	{
2078 	  tree c = build_int_cst (integer_type_node, q[0]);
2079 	  gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2080 	  replace_call_with_call_and_fold (gsi, repl);
2081 	  return true;
2082 	}
2083     }
2084 
2085   return false;
2086 }
2087 
2088 /* Simplify a call to the strcat builtin.  DST and SRC are the arguments
2089    to the call.
2090 
2091    Return NULL_TREE if no simplification was possible, otherwise return the
2092    simplified form of the call as a tree.
2093 
2094    The simplified form may be a constant or other expression which
2095    computes the same value, but in a more efficient manner (including
2096    calls to other builtin functions).
2097 
2098    The call may contain arguments which need to be evaluated, but
2099    which are not useful to determine the result of the call.  In
2100    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
2101    COMPOUND_EXPR will be an argument which must be evaluated.
2102    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
2103    COMPOUND_EXPR in the chain will contain the tree for the simplified
2104    form of the builtin function call.  */
2105 
2106 static bool
gimple_fold_builtin_strcat(gimple_stmt_iterator * gsi,tree dst,tree src)2107 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2108 {
2109   gimple *stmt = gsi_stmt (*gsi);
2110   location_t loc = gimple_location (stmt);
2111 
2112   const char *p = c_getstr (src);
2113 
2114   /* If the string length is zero, return the dst parameter.  */
2115   if (p && *p == '\0')
2116     {
2117       replace_call_with_value (gsi, dst);
2118       return true;
2119     }
2120 
2121   if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2122     return false;
2123 
2124   /* See if we can store by pieces into (dst + strlen(dst)).  */
2125   tree newdst;
2126   tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2127   tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2128 
2129   if (!strlen_fn || !memcpy_fn)
2130     return false;
2131 
2132   /* If the length of the source string isn't computable don't
2133      split strcat into strlen and memcpy.  */
2134   tree len = get_maxval_strlen (src, SRK_STRLEN);
2135   if (! len)
2136     return false;
2137 
2138   /* Create strlen (dst).  */
2139   gimple_seq stmts = NULL, stmts2;
2140   gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2141   gimple_set_location (repl, loc);
2142   newdst = create_tmp_reg_or_ssa_name (size_type_node);
2143   gimple_call_set_lhs (repl, newdst);
2144   gimple_seq_add_stmt_without_update (&stmts, repl);
2145 
2146   /* Create (dst p+ strlen (dst)).  */
2147   newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2148   newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2149   gimple_seq_add_seq_without_update (&stmts, stmts2);
2150 
2151   len = fold_convert_loc (loc, size_type_node, len);
2152   len = size_binop_loc (loc, PLUS_EXPR, len,
2153 			build_int_cst (size_type_node, 1));
2154   len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2155   gimple_seq_add_seq_without_update (&stmts, stmts2);
2156 
2157   repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2158   gimple_seq_add_stmt_without_update (&stmts, repl);
2159   if (gimple_call_lhs (stmt))
2160     {
2161       repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2162       gimple_seq_add_stmt_without_update (&stmts, repl);
2163       gsi_replace_with_seq_vops (gsi, stmts);
2164       /* gsi now points at the assignment to the lhs, get a
2165          stmt iterator to the memcpy call.
2166 	 ???  We can't use gsi_for_stmt as that doesn't work when the
2167 	 CFG isn't built yet.  */
2168       gimple_stmt_iterator gsi2 = *gsi;
2169       gsi_prev (&gsi2);
2170       fold_stmt (&gsi2);
2171     }
2172   else
2173     {
2174       gsi_replace_with_seq_vops (gsi, stmts);
2175       fold_stmt (gsi);
2176     }
2177   return true;
2178 }
2179 
2180 /* Fold a call to the __strcat_chk builtin FNDECL.  DEST, SRC, and SIZE
2181    are the arguments to the call.  */
2182 
2183 static bool
gimple_fold_builtin_strcat_chk(gimple_stmt_iterator * gsi)2184 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2185 {
2186   gimple *stmt = gsi_stmt (*gsi);
2187   tree dest = gimple_call_arg (stmt, 0);
2188   tree src = gimple_call_arg (stmt, 1);
2189   tree size = gimple_call_arg (stmt, 2);
2190   tree fn;
2191   const char *p;
2192 
2193 
2194   p = c_getstr (src);
2195   /* If the SRC parameter is "", return DEST.  */
2196   if (p && *p == '\0')
2197     {
2198       replace_call_with_value (gsi, dest);
2199       return true;
2200     }
2201 
2202   if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2203     return false;
2204 
2205   /* If __builtin_strcat_chk is used, assume strcat is available.  */
2206   fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2207   if (!fn)
2208     return false;
2209 
2210   gimple *repl = gimple_build_call (fn, 2, dest, src);
2211   replace_call_with_call_and_fold (gsi, repl);
2212   return true;
2213 }
2214 
2215 /* Simplify a call to the strncat builtin.  */
2216 
2217 static bool
gimple_fold_builtin_strncat(gimple_stmt_iterator * gsi)2218 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2219 {
2220   gimple *stmt = gsi_stmt (*gsi);
2221   tree dst = gimple_call_arg (stmt, 0);
2222   tree src = gimple_call_arg (stmt, 1);
2223   tree len = gimple_call_arg (stmt, 2);
2224 
2225   const char *p = c_getstr (src);
2226 
2227   /* If the requested length is zero, or the src parameter string
2228      length is zero, return the dst parameter.  */
2229   if (integer_zerop (len) || (p && *p == '\0'))
2230     {
2231       replace_call_with_value (gsi, dst);
2232       return true;
2233     }
2234 
2235   if (TREE_CODE (len) != INTEGER_CST || !p)
2236     return false;
2237 
2238   unsigned srclen = strlen (p);
2239 
2240   int cmpsrc = compare_tree_int (len, srclen);
2241 
2242   /* Return early if the requested len is less than the string length.
2243      Warnings will be issued elsewhere later.  */
2244   if (cmpsrc < 0)
2245     return false;
2246 
2247   unsigned HOST_WIDE_INT dstsize;
2248 
2249   bool nowarn = gimple_no_warning_p (stmt);
2250 
2251   if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2252     {
2253       int cmpdst = compare_tree_int (len, dstsize);
2254 
2255       if (cmpdst >= 0)
2256 	{
2257 	  tree fndecl = gimple_call_fndecl (stmt);
2258 
2259 	  /* Strncat copies (at most) LEN bytes and always appends
2260 	     the terminating NUL so the specified bound should never
2261 	     be equal to (or greater than) the size of the destination.
2262 	     If it is, the copy could overflow.  */
2263 	  location_t loc = gimple_location (stmt);
2264 	  nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2265 			       cmpdst == 0
2266 			       ? G_("%G%qD specified bound %E equals "
2267 				    "destination size")
2268 			       : G_("%G%qD specified bound %E exceeds "
2269 				    "destination size %wu"),
2270 			       stmt, fndecl, len, dstsize);
2271 	  if (nowarn)
2272 	    gimple_set_no_warning (stmt, true);
2273 	}
2274     }
2275 
2276   if (!nowarn && cmpsrc == 0)
2277     {
2278       tree fndecl = gimple_call_fndecl (stmt);
2279       location_t loc = gimple_location (stmt);
2280 
2281       /* To avoid possible overflow the specified bound should also
2282 	 not be equal to the length of the source, even when the size
2283 	 of the destination is unknown (it's not an uncommon mistake
2284 	 to specify as the bound to strncpy the length of the source).  */
2285       if (warning_at (loc, OPT_Wstringop_overflow_,
2286 		      "%G%qD specified bound %E equals source length",
2287 		      stmt, fndecl, len))
2288 	gimple_set_no_warning (stmt, true);
2289     }
2290 
2291   tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2292 
2293   /* If the replacement _DECL isn't initialized, don't do the
2294      transformation.  */
2295   if (!fn)
2296     return false;
2297 
2298   /* Otherwise, emit a call to strcat.  */
2299   gcall *repl = gimple_build_call (fn, 2, dst, src);
2300   replace_call_with_call_and_fold (gsi, repl);
2301   return true;
2302 }
2303 
2304 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2305    LEN, and SIZE.  */
2306 
2307 static bool
gimple_fold_builtin_strncat_chk(gimple_stmt_iterator * gsi)2308 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2309 {
2310   gimple *stmt = gsi_stmt (*gsi);
2311   tree dest = gimple_call_arg (stmt, 0);
2312   tree src = gimple_call_arg (stmt, 1);
2313   tree len = gimple_call_arg (stmt, 2);
2314   tree size = gimple_call_arg (stmt, 3);
2315   tree fn;
2316   const char *p;
2317 
2318   p = c_getstr (src);
2319   /* If the SRC parameter is "" or if LEN is 0, return DEST.  */
2320   if ((p && *p == '\0')
2321       || integer_zerop (len))
2322     {
2323       replace_call_with_value (gsi, dest);
2324       return true;
2325     }
2326 
2327   if (! tree_fits_uhwi_p (size))
2328     return false;
2329 
2330   if (! integer_all_onesp (size))
2331     {
2332       tree src_len = c_strlen (src, 1);
2333       if (src_len
2334 	  && tree_fits_uhwi_p (src_len)
2335 	  && tree_fits_uhwi_p (len)
2336 	  && ! tree_int_cst_lt (len, src_len))
2337 	{
2338 	  /* If LEN >= strlen (SRC), optimize into __strcat_chk.  */
2339 	  fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2340 	  if (!fn)
2341 	    return false;
2342 
2343 	  gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2344 	  replace_call_with_call_and_fold (gsi, repl);
2345 	  return true;
2346 	}
2347       return false;
2348     }
2349 
2350   /* If __builtin_strncat_chk is used, assume strncat is available.  */
2351   fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2352   if (!fn)
2353     return false;
2354 
2355   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2356   replace_call_with_call_and_fold (gsi, repl);
2357   return true;
2358 }
2359 
2360 /* Build and append gimple statements to STMTS that would load a first
2361    character of a memory location identified by STR.  LOC is location
2362    of the statement.  */
2363 
2364 static tree
gimple_load_first_char(location_t loc,tree str,gimple_seq * stmts)2365 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2366 {
2367   tree var;
2368 
2369   tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2370   tree cst_uchar_ptr_node
2371     = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2372   tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2373 
2374   tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2375   gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2376   var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2377 
2378   gimple_assign_set_lhs (stmt, var);
2379   gimple_seq_add_stmt_without_update (stmts, stmt);
2380 
2381   return var;
2382 }
2383 
2384 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.  */
2385 
2386 static bool
gimple_fold_builtin_string_compare(gimple_stmt_iterator * gsi)2387 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2388 {
2389   gimple *stmt = gsi_stmt (*gsi);
2390   tree callee = gimple_call_fndecl (stmt);
2391   enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2392 
2393   tree type = integer_type_node;
2394   tree str1 = gimple_call_arg (stmt, 0);
2395   tree str2 = gimple_call_arg (stmt, 1);
2396   tree lhs = gimple_call_lhs (stmt);
2397 
2398   tree bound_node = NULL_TREE;
2399   unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2400 
2401   /* Handle strncmp and strncasecmp functions.  */
2402   if (gimple_call_num_args (stmt) == 3)
2403     {
2404       bound_node = gimple_call_arg (stmt, 2);
2405       if (tree_fits_uhwi_p (bound_node))
2406 	bound = tree_to_uhwi (bound_node);
2407     }
2408 
2409   /* If the BOUND parameter is zero, return zero.  */
2410   if (bound == 0)
2411     {
2412       replace_call_with_value (gsi, integer_zero_node);
2413       return true;
2414     }
2415 
2416   /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
2417   if (operand_equal_p (str1, str2, 0))
2418     {
2419       replace_call_with_value (gsi, integer_zero_node);
2420       return true;
2421     }
2422 
2423   /* Initially set to the number of characters, including the terminating
2424      nul if each array has one.   LENx == strnlen (Sx, LENx) implies that
2425      the array Sx is not terminated by a nul.
2426      For nul-terminated strings then adjusted to their length so that
2427      LENx == NULPOSx holds.  */
2428   unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2429   const char *p1 = c_getstr (str1, &len1);
2430   const char *p2 = c_getstr (str2, &len2);
2431 
2432   /* The position of the terminating nul character if one exists, otherwise
2433      a value greater than LENx.  */
2434   unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2435 
2436   if (p1)
2437     {
2438       size_t n = strnlen (p1, len1);
2439       if (n < len1)
2440 	len1 = nulpos1 = n;
2441     }
2442 
2443   if (p2)
2444     {
2445       size_t n = strnlen (p2, len2);
2446       if (n < len2)
2447 	len2 = nulpos2 = n;
2448     }
2449 
2450   /* For known strings, return an immediate value.  */
2451   if (p1 && p2)
2452     {
2453       int r = 0;
2454       bool known_result = false;
2455 
2456       switch (fcode)
2457 	{
2458 	case BUILT_IN_STRCMP:
2459 	case BUILT_IN_STRCMP_EQ:
2460 	  if (len1 != nulpos1 || len2 != nulpos2)
2461 	    break;
2462 
2463 	  r = strcmp (p1, p2);
2464 	  known_result = true;
2465 	  break;
2466 
2467 	case BUILT_IN_STRNCMP:
2468 	case BUILT_IN_STRNCMP_EQ:
2469 	  {
2470 	    if (bound == HOST_WIDE_INT_M1U)
2471 	      break;
2472 
2473 	    /* Reduce the bound to be no more than the length
2474 	       of the shorter of the two strings, or the sizes
2475 	       of the unterminated arrays.  */
2476 	    unsigned HOST_WIDE_INT n = bound;
2477 
2478 	    if (len1 == nulpos1 && len1 < n)
2479 	      n = len1 + 1;
2480 	    if (len2 == nulpos2 && len2 < n)
2481 	      n = len2 + 1;
2482 
2483 	    if (MIN (nulpos1, nulpos2) + 1 < n)
2484 	      break;
2485 
2486 	    r = strncmp (p1, p2, n);
2487 	    known_result = true;
2488 	    break;
2489 	  }
2490 	/* Only handleable situation is where the string are equal (result 0),
2491 	   which is already handled by operand_equal_p case.  */
2492 	case BUILT_IN_STRCASECMP:
2493 	  break;
2494 	case BUILT_IN_STRNCASECMP:
2495 	  {
2496 	    if (bound == HOST_WIDE_INT_M1U)
2497 	      break;
2498 	    r = strncmp (p1, p2, bound);
2499 	    if (r == 0)
2500 	      known_result = true;
2501 	    break;
2502 	  }
2503 	default:
2504 	  gcc_unreachable ();
2505 	}
2506 
2507       if (known_result)
2508 	{
2509 	  replace_call_with_value (gsi, build_cmp_result (type, r));
2510 	  return true;
2511 	}
2512     }
2513 
2514   bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2515     || fcode == BUILT_IN_STRCMP
2516     || fcode == BUILT_IN_STRCMP_EQ
2517     || fcode == BUILT_IN_STRCASECMP;
2518 
2519   location_t loc = gimple_location (stmt);
2520 
2521   /* If the second arg is "", return *(const unsigned char*)arg1.  */
2522   if (p2 && *p2 == '\0' && nonzero_bound)
2523     {
2524       gimple_seq stmts = NULL;
2525       tree var = gimple_load_first_char (loc, str1, &stmts);
2526       if (lhs)
2527 	{
2528 	  stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2529 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2530 	}
2531 
2532       gsi_replace_with_seq_vops (gsi, stmts);
2533       return true;
2534     }
2535 
2536   /* If the first arg is "", return -*(const unsigned char*)arg2.  */
2537   if (p1 && *p1 == '\0' && nonzero_bound)
2538     {
2539       gimple_seq stmts = NULL;
2540       tree var = gimple_load_first_char (loc, str2, &stmts);
2541 
2542       if (lhs)
2543 	{
2544 	  tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2545 	  stmt = gimple_build_assign (c, NOP_EXPR, var);
2546 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2547 
2548 	  stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2549 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2550 	}
2551 
2552       gsi_replace_with_seq_vops (gsi, stmts);
2553       return true;
2554     }
2555 
2556   /* If BOUND is one, return an expression corresponding to
2557      (*(const unsigned char*)arg2 - *(const unsigned char*)arg1).  */
2558   if (fcode == BUILT_IN_STRNCMP && bound == 1)
2559     {
2560       gimple_seq stmts = NULL;
2561       tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2562       tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2563 
2564       if (lhs)
2565 	{
2566 	  tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2567 	  gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2568 	  gimple_seq_add_stmt_without_update (&stmts, convert1);
2569 
2570 	  tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2571 	  gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2572 	  gimple_seq_add_stmt_without_update (&stmts, convert2);
2573 
2574 	  stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2575 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2576 	}
2577 
2578       gsi_replace_with_seq_vops (gsi, stmts);
2579       return true;
2580     }
2581 
2582   /* If BOUND is greater than the length of one constant string,
2583      and the other argument is also a nul-terminated string, replace
2584      strncmp with strcmp.  */
2585   if (fcode == BUILT_IN_STRNCMP
2586       && bound > 0 && bound < HOST_WIDE_INT_M1U
2587       && ((p2 && len2 < bound && len2 == nulpos2)
2588 	  || (p1 && len1 < bound && len1 == nulpos1)))
2589     {
2590       tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2591       if (!fn)
2592         return false;
2593       gimple *repl = gimple_build_call (fn, 2, str1, str2);
2594       replace_call_with_call_and_fold (gsi, repl);
2595       return true;
2596     }
2597 
2598   return false;
2599 }
2600 
2601 /* Fold a call to the memchr pointed by GSI iterator.  */
2602 
2603 static bool
gimple_fold_builtin_memchr(gimple_stmt_iterator * gsi)2604 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2605 {
2606   gimple *stmt = gsi_stmt (*gsi);
2607   tree lhs = gimple_call_lhs (stmt);
2608   tree arg1 = gimple_call_arg (stmt, 0);
2609   tree arg2 = gimple_call_arg (stmt, 1);
2610   tree len = gimple_call_arg (stmt, 2);
2611 
2612   /* If the LEN parameter is zero, return zero.  */
2613   if (integer_zerop (len))
2614     {
2615       replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2616       return true;
2617     }
2618 
2619   char c;
2620   if (TREE_CODE (arg2) != INTEGER_CST
2621       || !tree_fits_uhwi_p (len)
2622       || !target_char_cst_p (arg2, &c))
2623     return false;
2624 
2625   unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2626   unsigned HOST_WIDE_INT string_length;
2627   const char *p1 = c_getstr (arg1, &string_length);
2628 
2629   if (p1)
2630     {
2631       const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2632       if (r == NULL)
2633 	{
2634 	  tree mem_size, offset_node;
2635 	  string_constant (arg1, &offset_node, &mem_size, NULL);
2636 	  unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2637 					  ? 0 : tree_to_uhwi (offset_node);
2638 	  /* MEM_SIZE is the size of the array the string literal
2639 	     is stored in.  */
2640 	  unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2641 	  gcc_checking_assert (string_length <= string_size);
2642 	  if (length <= string_size)
2643 	    {
2644 	      replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2645 	      return true;
2646 	    }
2647 	}
2648       else
2649 	{
2650 	  unsigned HOST_WIDE_INT offset = r - p1;
2651 	  gimple_seq stmts = NULL;
2652 	  if (lhs != NULL_TREE)
2653 	    {
2654 	      tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2655 	      gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2656 						   arg1, offset_cst);
2657 	      gimple_seq_add_stmt_without_update (&stmts, stmt);
2658 	    }
2659 	  else
2660 	    gimple_seq_add_stmt_without_update (&stmts,
2661 						gimple_build_nop ());
2662 
2663 	  gsi_replace_with_seq_vops (gsi, stmts);
2664 	  return true;
2665 	}
2666     }
2667 
2668   return false;
2669 }
2670 
2671 /* Fold a call to the fputs builtin.  ARG0 and ARG1 are the arguments
2672    to the call.  IGNORE is true if the value returned
2673    by the builtin will be ignored.  UNLOCKED is true is true if this
2674    actually a call to fputs_unlocked.  If LEN in non-NULL, it represents
2675    the known length of the string.  Return NULL_TREE if no simplification
2676    was possible.  */
2677 
2678 static bool
gimple_fold_builtin_fputs(gimple_stmt_iterator * gsi,tree arg0,tree arg1,bool unlocked)2679 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2680 			   tree arg0, tree arg1,
2681 			   bool unlocked)
2682 {
2683   gimple *stmt = gsi_stmt (*gsi);
2684 
2685   /* If we're using an unlocked function, assume the other unlocked
2686      functions exist explicitly.  */
2687   tree const fn_fputc = (unlocked
2688 			 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2689 			 : builtin_decl_implicit (BUILT_IN_FPUTC));
2690   tree const fn_fwrite = (unlocked
2691 			  ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2692 			  : builtin_decl_implicit (BUILT_IN_FWRITE));
2693 
2694   /* If the return value is used, don't do the transformation.  */
2695   if (gimple_call_lhs (stmt))
2696     return false;
2697 
2698   /* Get the length of the string passed to fputs.  If the length
2699      can't be determined, punt.  */
2700   tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2701   if (!len
2702       || TREE_CODE (len) != INTEGER_CST)
2703     return false;
2704 
2705   switch (compare_tree_int (len, 1))
2706     {
2707     case -1: /* length is 0, delete the call entirely .  */
2708       replace_call_with_value (gsi, integer_zero_node);
2709       return true;
2710 
2711     case 0: /* length is 1, call fputc.  */
2712       {
2713 	const char *p = c_getstr (arg0);
2714 	if (p != NULL)
2715 	  {
2716 	    if (!fn_fputc)
2717 	      return false;
2718 
2719 	    gimple *repl = gimple_build_call (fn_fputc, 2,
2720 					     build_int_cst
2721 					     (integer_type_node, p[0]), arg1);
2722 	    replace_call_with_call_and_fold (gsi, repl);
2723 	    return true;
2724 	  }
2725       }
2726       /* FALLTHROUGH */
2727     case 1: /* length is greater than 1, call fwrite.  */
2728       {
2729 	/* If optimizing for size keep fputs.  */
2730 	if (optimize_function_for_size_p (cfun))
2731 	  return false;
2732 	/* New argument list transforming fputs(string, stream) to
2733 	   fwrite(string, 1, len, stream).  */
2734 	if (!fn_fwrite)
2735 	  return false;
2736 
2737 	gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2738 					 size_one_node, len, arg1);
2739 	replace_call_with_call_and_fold (gsi, repl);
2740 	return true;
2741       }
2742     default:
2743       gcc_unreachable ();
2744     }
2745   return false;
2746 }
2747 
2748 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2749    DEST, SRC, LEN, and SIZE are the arguments to the call.
2750    IGNORE is true, if return value can be ignored.  FCODE is the BUILT_IN_*
2751    code of the builtin.  If MAXLEN is not NULL, it is maximum length
2752    passed as third argument.  */
2753 
2754 static bool
gimple_fold_builtin_memory_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree len,tree size,enum built_in_function fcode)2755 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2756 				tree dest, tree src, tree len, tree size,
2757 				enum built_in_function fcode)
2758 {
2759   gimple *stmt = gsi_stmt (*gsi);
2760   location_t loc = gimple_location (stmt);
2761   bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2762   tree fn;
2763 
2764   /* If SRC and DEST are the same (and not volatile), return DEST
2765      (resp. DEST+LEN for __mempcpy_chk).  */
2766   if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2767     {
2768       if (fcode != BUILT_IN_MEMPCPY_CHK)
2769 	{
2770 	  replace_call_with_value (gsi, dest);
2771 	  return true;
2772 	}
2773       else
2774 	{
2775 	  gimple_seq stmts = NULL;
2776 	  len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2777 	  tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2778 				    TREE_TYPE (dest), dest, len);
2779 	  gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2780 	  replace_call_with_value (gsi, temp);
2781 	  return true;
2782 	}
2783     }
2784 
2785   if (! tree_fits_uhwi_p (size))
2786     return false;
2787 
2788   tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2789   if (! integer_all_onesp (size))
2790     {
2791       if (! tree_fits_uhwi_p (len))
2792 	{
2793 	  /* If LEN is not constant, try MAXLEN too.
2794 	     For MAXLEN only allow optimizing into non-_ocs function
2795 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
2796 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2797 	    {
2798 	      if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2799 		{
2800 		  /* (void) __mempcpy_chk () can be optimized into
2801 		     (void) __memcpy_chk ().  */
2802 		  fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2803 		  if (!fn)
2804 		    return false;
2805 
2806 		  gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2807 		  replace_call_with_call_and_fold (gsi, repl);
2808 		  return true;
2809 		}
2810 	      return false;
2811 	    }
2812 	}
2813       else
2814 	maxlen = len;
2815 
2816       if (tree_int_cst_lt (size, maxlen))
2817 	return false;
2818     }
2819 
2820   fn = NULL_TREE;
2821   /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2822      mem{cpy,pcpy,move,set} is available.  */
2823   switch (fcode)
2824     {
2825     case BUILT_IN_MEMCPY_CHK:
2826       fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2827       break;
2828     case BUILT_IN_MEMPCPY_CHK:
2829       fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2830       break;
2831     case BUILT_IN_MEMMOVE_CHK:
2832       fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2833       break;
2834     case BUILT_IN_MEMSET_CHK:
2835       fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2836       break;
2837     default:
2838       break;
2839     }
2840 
2841   if (!fn)
2842     return false;
2843 
2844   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2845   replace_call_with_call_and_fold (gsi, repl);
2846   return true;
2847 }
2848 
2849 /* Fold a call to the __st[rp]cpy_chk builtin.
2850    DEST, SRC, and SIZE are the arguments to the call.
2851    IGNORE is true if return value can be ignored.  FCODE is the BUILT_IN_*
2852    code of the builtin.  If MAXLEN is not NULL, it is maximum length of
2853    strings passed as second argument.  */
2854 
2855 static bool
gimple_fold_builtin_stxcpy_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree size,enum built_in_function fcode)2856 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2857 				tree dest,
2858 				tree src, tree size,
2859 				enum built_in_function fcode)
2860 {
2861   gimple *stmt = gsi_stmt (*gsi);
2862   location_t loc = gimple_location (stmt);
2863   bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2864   tree len, fn;
2865 
2866   /* If SRC and DEST are the same (and not volatile), return DEST.  */
2867   if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2868     {
2869       /* Issue -Wrestrict unless the pointers are null (those do
2870 	 not point to objects and so do not indicate an overlap;
2871 	 such calls could be the result of sanitization and jump
2872 	 threading).  */
2873       if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2874 	{
2875 	  tree func = gimple_call_fndecl (stmt);
2876 
2877 	  warning_at (loc, OPT_Wrestrict,
2878 		      "%qD source argument is the same as destination",
2879 		      func);
2880 	}
2881 
2882       replace_call_with_value (gsi, dest);
2883       return true;
2884     }
2885 
2886   if (! tree_fits_uhwi_p (size))
2887     return false;
2888 
2889   tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2890   if (! integer_all_onesp (size))
2891     {
2892       len = c_strlen (src, 1);
2893       if (! len || ! tree_fits_uhwi_p (len))
2894 	{
2895 	  /* If LEN is not constant, try MAXLEN too.
2896 	     For MAXLEN only allow optimizing into non-_ocs function
2897 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
2898 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2899 	    {
2900 	      if (fcode == BUILT_IN_STPCPY_CHK)
2901 		{
2902 		  if (! ignore)
2903 		    return false;
2904 
2905 		  /* If return value of __stpcpy_chk is ignored,
2906 		     optimize into __strcpy_chk.  */
2907 		  fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2908 		  if (!fn)
2909 		    return false;
2910 
2911 		  gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2912 		  replace_call_with_call_and_fold (gsi, repl);
2913 		  return true;
2914 		}
2915 
2916 	      if (! len || TREE_SIDE_EFFECTS (len))
2917 		return false;
2918 
2919 	      /* If c_strlen returned something, but not a constant,
2920 		 transform __strcpy_chk into __memcpy_chk.  */
2921 	      fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2922 	      if (!fn)
2923 		return false;
2924 
2925 	      gimple_seq stmts = NULL;
2926 	      len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2927 	      len = gimple_convert (&stmts, loc, size_type_node, len);
2928 	      len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2929 				  build_int_cst (size_type_node, 1));
2930 	      gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2931 	      gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2932 	      replace_call_with_call_and_fold (gsi, repl);
2933 	      return true;
2934 	    }
2935 	}
2936       else
2937 	maxlen = len;
2938 
2939       if (! tree_int_cst_lt (maxlen, size))
2940 	return false;
2941     }
2942 
2943   /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available.  */
2944   fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2945 			      ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2946   if (!fn)
2947     return false;
2948 
2949   gimple *repl = gimple_build_call (fn, 2, dest, src);
2950   replace_call_with_call_and_fold (gsi, repl);
2951   return true;
2952 }
2953 
2954 /* Fold a call to the __st{r,p}ncpy_chk builtin.  DEST, SRC, LEN, and SIZE
2955    are the arguments to the call.  If MAXLEN is not NULL, it is maximum
2956    length passed as third argument. IGNORE is true if return value can be
2957    ignored. FCODE is the BUILT_IN_* code of the builtin. */
2958 
2959 static bool
gimple_fold_builtin_stxncpy_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree len,tree size,enum built_in_function fcode)2960 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2961 				 tree dest, tree src,
2962 				 tree len, tree size,
2963 				 enum built_in_function fcode)
2964 {
2965   gimple *stmt = gsi_stmt (*gsi);
2966   bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2967   tree fn;
2968 
2969   if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2970     {
2971        /* If return value of __stpncpy_chk is ignored,
2972           optimize into __strncpy_chk.  */
2973        fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2974        if (fn)
2975 	 {
2976 	   gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2977 	   replace_call_with_call_and_fold (gsi, repl);
2978 	   return true;
2979 	 }
2980     }
2981 
2982   if (! tree_fits_uhwi_p (size))
2983     return false;
2984 
2985   tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2986   if (! integer_all_onesp (size))
2987     {
2988       if (! tree_fits_uhwi_p (len))
2989 	{
2990 	  /* If LEN is not constant, try MAXLEN too.
2991 	     For MAXLEN only allow optimizing into non-_ocs function
2992 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
2993 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2994 	    return false;
2995 	}
2996       else
2997 	maxlen = len;
2998 
2999       if (tree_int_cst_lt (size, maxlen))
3000 	return false;
3001     }
3002 
3003   /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available.  */
3004   fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
3005 			      ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3006   if (!fn)
3007     return false;
3008 
3009   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3010   replace_call_with_call_and_fold (gsi, repl);
3011   return true;
3012 }
3013 
3014 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3015    Return NULL_TREE if no simplification can be made.  */
3016 
3017 static bool
gimple_fold_builtin_stpcpy(gimple_stmt_iterator * gsi)3018 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3019 {
3020   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3021   location_t loc = gimple_location (stmt);
3022   tree dest = gimple_call_arg (stmt, 0);
3023   tree src = gimple_call_arg (stmt, 1);
3024   tree fn, lenp1;
3025 
3026   /* If the result is unused, replace stpcpy with strcpy.  */
3027   if (gimple_call_lhs (stmt) == NULL_TREE)
3028     {
3029       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3030       if (!fn)
3031 	return false;
3032       gimple_call_set_fndecl (stmt, fn);
3033       fold_stmt (gsi);
3034       return true;
3035     }
3036 
3037   /* Set to non-null if ARG refers to an unterminated array.  */
3038   c_strlen_data data = { };
3039   tree len = c_strlen (src, 1, &data, 1);
3040   if (!len
3041       || TREE_CODE (len) != INTEGER_CST)
3042     {
3043       data.decl = unterminated_array (src);
3044       if (!data.decl)
3045 	return false;
3046     }
3047 
3048   if (data.decl)
3049     {
3050       /* Avoid folding calls with unterminated arrays.  */
3051       if (!gimple_no_warning_p (stmt))
3052 	warn_string_no_nul (loc, "stpcpy", src, data.decl);
3053       gimple_set_no_warning (stmt, true);
3054       return false;
3055     }
3056 
3057   if (optimize_function_for_size_p (cfun)
3058       /* If length is zero it's small enough.  */
3059       && !integer_zerop (len))
3060     return false;
3061 
3062   /* If the source has a known length replace stpcpy with memcpy.  */
3063   fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3064   if (!fn)
3065     return false;
3066 
3067   gimple_seq stmts = NULL;
3068   tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3069   lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3070 			tem, build_int_cst (size_type_node, 1));
3071   gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3072   gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3073   gimple_move_vops (repl, stmt);
3074   gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3075   /* Replace the result with dest + len.  */
3076   stmts = NULL;
3077   tem = gimple_convert (&stmts, loc, sizetype, len);
3078   gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3079   gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3080 				      POINTER_PLUS_EXPR, dest, tem);
3081   gsi_replace (gsi, ret, false);
3082   /* Finally fold the memcpy call.  */
3083   gimple_stmt_iterator gsi2 = *gsi;
3084   gsi_prev (&gsi2);
3085   fold_stmt (&gsi2);
3086   return true;
3087 }
3088 
3089 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS.  Return
3090    NULL_TREE if a normal call should be emitted rather than expanding
3091    the function inline.  FCODE is either BUILT_IN_SNPRINTF_CHK or
3092    BUILT_IN_VSNPRINTF_CHK.  If MAXLEN is not NULL, it is maximum length
3093    passed as second argument.  */
3094 
3095 static bool
gimple_fold_builtin_snprintf_chk(gimple_stmt_iterator * gsi,enum built_in_function fcode)3096 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3097 				  enum built_in_function fcode)
3098 {
3099   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3100   tree dest, size, len, fn, fmt, flag;
3101   const char *fmt_str;
3102 
3103   /* Verify the required arguments in the original call.  */
3104   if (gimple_call_num_args (stmt) < 5)
3105     return false;
3106 
3107   dest = gimple_call_arg (stmt, 0);
3108   len = gimple_call_arg (stmt, 1);
3109   flag = gimple_call_arg (stmt, 2);
3110   size = gimple_call_arg (stmt, 3);
3111   fmt = gimple_call_arg (stmt, 4);
3112 
3113   if (! tree_fits_uhwi_p (size))
3114     return false;
3115 
3116   if (! integer_all_onesp (size))
3117     {
3118       tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3119       if (! tree_fits_uhwi_p (len))
3120 	{
3121 	  /* If LEN is not constant, try MAXLEN too.
3122 	     For MAXLEN only allow optimizing into non-_ocs function
3123 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
3124 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3125 	    return false;
3126 	}
3127       else
3128 	maxlen = len;
3129 
3130       if (tree_int_cst_lt (size, maxlen))
3131 	return false;
3132     }
3133 
3134   if (!init_target_chars ())
3135     return false;
3136 
3137   /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3138      or if format doesn't contain % chars or is "%s".  */
3139   if (! integer_zerop (flag))
3140     {
3141       fmt_str = c_getstr (fmt);
3142       if (fmt_str == NULL)
3143 	return false;
3144       if (strchr (fmt_str, target_percent) != NULL
3145 	  && strcmp (fmt_str, target_percent_s))
3146 	return false;
3147     }
3148 
3149   /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3150      available.  */
3151   fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3152 			      ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3153   if (!fn)
3154     return false;
3155 
3156   /* Replace the called function and the first 5 argument by 3 retaining
3157      trailing varargs.  */
3158   gimple_call_set_fndecl (stmt, fn);
3159   gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3160   gimple_call_set_arg (stmt, 0, dest);
3161   gimple_call_set_arg (stmt, 1, len);
3162   gimple_call_set_arg (stmt, 2, fmt);
3163   for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3164     gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3165   gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3166   fold_stmt (gsi);
3167   return true;
3168 }
3169 
3170 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3171    Return NULL_TREE if a normal call should be emitted rather than
3172    expanding the function inline.  FCODE is either BUILT_IN_SPRINTF_CHK
3173    or BUILT_IN_VSPRINTF_CHK.  */
3174 
3175 static bool
gimple_fold_builtin_sprintf_chk(gimple_stmt_iterator * gsi,enum built_in_function fcode)3176 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3177 				 enum built_in_function fcode)
3178 {
3179   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3180   tree dest, size, len, fn, fmt, flag;
3181   const char *fmt_str;
3182   unsigned nargs = gimple_call_num_args (stmt);
3183 
3184   /* Verify the required arguments in the original call.  */
3185   if (nargs < 4)
3186     return false;
3187   dest = gimple_call_arg (stmt, 0);
3188   flag = gimple_call_arg (stmt, 1);
3189   size = gimple_call_arg (stmt, 2);
3190   fmt = gimple_call_arg (stmt, 3);
3191 
3192   if (! tree_fits_uhwi_p (size))
3193     return false;
3194 
3195   len = NULL_TREE;
3196 
3197   if (!init_target_chars ())
3198     return false;
3199 
3200   /* Check whether the format is a literal string constant.  */
3201   fmt_str = c_getstr (fmt);
3202   if (fmt_str != NULL)
3203     {
3204       /* If the format doesn't contain % args or %%, we know the size.  */
3205       if (strchr (fmt_str, target_percent) == 0)
3206 	{
3207 	  if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3208 	    len = build_int_cstu (size_type_node, strlen (fmt_str));
3209 	}
3210       /* If the format is "%s" and first ... argument is a string literal,
3211 	 we know the size too.  */
3212       else if (fcode == BUILT_IN_SPRINTF_CHK
3213 	       && strcmp (fmt_str, target_percent_s) == 0)
3214 	{
3215 	  tree arg;
3216 
3217 	  if (nargs == 5)
3218 	    {
3219 	      arg = gimple_call_arg (stmt, 4);
3220 	      if (POINTER_TYPE_P (TREE_TYPE (arg)))
3221 		{
3222 		  len = c_strlen (arg, 1);
3223 		  if (! len || ! tree_fits_uhwi_p (len))
3224 		    len = NULL_TREE;
3225 		}
3226 	    }
3227 	}
3228     }
3229 
3230   if (! integer_all_onesp (size))
3231     {
3232       if (! len || ! tree_int_cst_lt (len, size))
3233 	return false;
3234     }
3235 
3236   /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3237      or if format doesn't contain % chars or is "%s".  */
3238   if (! integer_zerop (flag))
3239     {
3240       if (fmt_str == NULL)
3241 	return false;
3242       if (strchr (fmt_str, target_percent) != NULL
3243 	  && strcmp (fmt_str, target_percent_s))
3244 	return false;
3245     }
3246 
3247   /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available.  */
3248   fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3249 			      ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3250   if (!fn)
3251     return false;
3252 
3253   /* Replace the called function and the first 4 argument by 2 retaining
3254      trailing varargs.  */
3255   gimple_call_set_fndecl (stmt, fn);
3256   gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3257   gimple_call_set_arg (stmt, 0, dest);
3258   gimple_call_set_arg (stmt, 1, fmt);
3259   for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3260     gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3261   gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3262   fold_stmt (gsi);
3263   return true;
3264 }
3265 
3266 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3267    ORIG may be null if this is a 2-argument call.  We don't attempt to
3268    simplify calls with more than 3 arguments.
3269 
3270    Return true if simplification was possible, otherwise false.  */
3271 
3272 bool
gimple_fold_builtin_sprintf(gimple_stmt_iterator * gsi)3273 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3274 {
3275   gimple *stmt = gsi_stmt (*gsi);
3276   tree dest = gimple_call_arg (stmt, 0);
3277   tree fmt = gimple_call_arg (stmt, 1);
3278   tree orig = NULL_TREE;
3279   const char *fmt_str = NULL;
3280 
3281   /* Verify the required arguments in the original call.  We deal with two
3282      types of sprintf() calls: 'sprintf (str, fmt)' and
3283      'sprintf (dest, "%s", orig)'.  */
3284   if (gimple_call_num_args (stmt) > 3)
3285     return false;
3286 
3287   if (gimple_call_num_args (stmt) == 3)
3288     orig = gimple_call_arg (stmt, 2);
3289 
3290   /* Check whether the format is a literal string constant.  */
3291   fmt_str = c_getstr (fmt);
3292   if (fmt_str == NULL)
3293     return false;
3294 
3295   if (!init_target_chars ())
3296     return false;
3297 
3298   /* If the format doesn't contain % args or %%, use strcpy.  */
3299   if (strchr (fmt_str, target_percent) == NULL)
3300     {
3301       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3302 
3303       if (!fn)
3304 	return false;
3305 
3306       /* Don't optimize sprintf (buf, "abc", ptr++).  */
3307       if (orig)
3308 	return false;
3309 
3310       /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3311 	 'format' is known to contain no % formats.  */
3312       gimple_seq stmts = NULL;
3313       gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3314 
3315       /* Propagate the NO_WARNING bit to avoid issuing the same
3316 	 warning more than once.  */
3317       if (gimple_no_warning_p (stmt))
3318 	gimple_set_no_warning (repl, true);
3319 
3320       gimple_seq_add_stmt_without_update (&stmts, repl);
3321       if (tree lhs = gimple_call_lhs (stmt))
3322 	{
3323 	  repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3324 							  strlen (fmt_str)));
3325 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3326 	  gsi_replace_with_seq_vops (gsi, stmts);
3327 	  /* gsi now points at the assignment to the lhs, get a
3328 	     stmt iterator to the memcpy call.
3329 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3330 	     CFG isn't built yet.  */
3331 	  gimple_stmt_iterator gsi2 = *gsi;
3332 	  gsi_prev (&gsi2);
3333 	  fold_stmt (&gsi2);
3334 	}
3335       else
3336 	{
3337 	  gsi_replace_with_seq_vops (gsi, stmts);
3338 	  fold_stmt (gsi);
3339 	}
3340       return true;
3341     }
3342 
3343   /* If the format is "%s", use strcpy if the result isn't used.  */
3344   else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3345     {
3346       tree fn;
3347       fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3348 
3349       if (!fn)
3350 	return false;
3351 
3352       /* Don't crash on sprintf (str1, "%s").  */
3353       if (!orig)
3354 	return false;
3355 
3356       tree orig_len = NULL_TREE;
3357       if (gimple_call_lhs (stmt))
3358 	{
3359 	  orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3360 	  if (!orig_len)
3361 	    return false;
3362 	}
3363 
3364       /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2).  */
3365       gimple_seq stmts = NULL;
3366       gimple *repl = gimple_build_call (fn, 2, dest, orig);
3367 
3368       /* Propagate the NO_WARNING bit to avoid issuing the same
3369 	 warning more than once.  */
3370       if (gimple_no_warning_p (stmt))
3371 	gimple_set_no_warning (repl, true);
3372 
3373       gimple_seq_add_stmt_without_update (&stmts, repl);
3374       if (tree lhs = gimple_call_lhs (stmt))
3375 	{
3376 	  if (!useless_type_conversion_p (TREE_TYPE (lhs),
3377 					  TREE_TYPE (orig_len)))
3378 	    orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3379 	  repl = gimple_build_assign (lhs, orig_len);
3380 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3381 	  gsi_replace_with_seq_vops (gsi, stmts);
3382 	  /* gsi now points at the assignment to the lhs, get a
3383 	     stmt iterator to the memcpy call.
3384 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3385 	     CFG isn't built yet.  */
3386 	  gimple_stmt_iterator gsi2 = *gsi;
3387 	  gsi_prev (&gsi2);
3388 	  fold_stmt (&gsi2);
3389 	}
3390       else
3391 	{
3392 	  gsi_replace_with_seq_vops (gsi, stmts);
3393 	  fold_stmt (gsi);
3394 	}
3395       return true;
3396     }
3397   return false;
3398 }
3399 
3400 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3401    FMT, and ORIG.  ORIG may be null if this is a 3-argument call.  We don't
3402    attempt to simplify calls with more than 4 arguments.
3403 
3404    Return true if simplification was possible, otherwise false.  */
3405 
3406 bool
gimple_fold_builtin_snprintf(gimple_stmt_iterator * gsi)3407 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3408 {
3409   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3410   tree dest = gimple_call_arg (stmt, 0);
3411   tree destsize = gimple_call_arg (stmt, 1);
3412   tree fmt = gimple_call_arg (stmt, 2);
3413   tree orig = NULL_TREE;
3414   const char *fmt_str = NULL;
3415 
3416   if (gimple_call_num_args (stmt) > 4)
3417     return false;
3418 
3419   if (gimple_call_num_args (stmt) == 4)
3420     orig = gimple_call_arg (stmt, 3);
3421 
3422   if (!tree_fits_uhwi_p (destsize))
3423     return false;
3424   unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3425 
3426   /* Check whether the format is a literal string constant.  */
3427   fmt_str = c_getstr (fmt);
3428   if (fmt_str == NULL)
3429     return false;
3430 
3431   if (!init_target_chars ())
3432     return false;
3433 
3434   /* If the format doesn't contain % args or %%, use strcpy.  */
3435   if (strchr (fmt_str, target_percent) == NULL)
3436     {
3437       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3438       if (!fn)
3439 	return false;
3440 
3441       /* Don't optimize snprintf (buf, 4, "abc", ptr++).  */
3442       if (orig)
3443 	return false;
3444 
3445       /* We could expand this as
3446 	 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3447 	 or to
3448 	 memcpy (str, fmt_with_nul_at_cstm1, cst);
3449 	 but in the former case that might increase code size
3450 	 and in the latter case grow .rodata section too much.
3451 	 So punt for now.  */
3452       size_t len = strlen (fmt_str);
3453       if (len >= destlen)
3454 	return false;
3455 
3456       gimple_seq stmts = NULL;
3457       gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3458       gimple_seq_add_stmt_without_update (&stmts, repl);
3459       if (tree lhs = gimple_call_lhs (stmt))
3460 	{
3461 	  repl = gimple_build_assign (lhs,
3462 				      build_int_cst (TREE_TYPE (lhs), len));
3463 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3464 	  gsi_replace_with_seq_vops (gsi, stmts);
3465 	  /* gsi now points at the assignment to the lhs, get a
3466 	     stmt iterator to the memcpy call.
3467 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3468 	     CFG isn't built yet.  */
3469 	  gimple_stmt_iterator gsi2 = *gsi;
3470 	  gsi_prev (&gsi2);
3471 	  fold_stmt (&gsi2);
3472 	}
3473       else
3474 	{
3475 	  gsi_replace_with_seq_vops (gsi, stmts);
3476 	  fold_stmt (gsi);
3477 	}
3478       return true;
3479     }
3480 
3481   /* If the format is "%s", use strcpy if the result isn't used.  */
3482   else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3483     {
3484       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3485       if (!fn)
3486 	return false;
3487 
3488       /* Don't crash on snprintf (str1, cst, "%s").  */
3489       if (!orig)
3490 	return false;
3491 
3492       tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3493       if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3494 	return false;
3495 
3496       /* We could expand this as
3497 	 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3498 	 or to
3499 	 memcpy (str1, str2_with_nul_at_cstm1, cst);
3500 	 but in the former case that might increase code size
3501 	 and in the latter case grow .rodata section too much.
3502 	 So punt for now.  */
3503       if (compare_tree_int (orig_len, destlen) >= 0)
3504 	return false;
3505 
3506       /* Convert snprintf (str1, cst, "%s", str2) into
3507 	 strcpy (str1, str2) if strlen (str2) < cst.  */
3508       gimple_seq stmts = NULL;
3509       gimple *repl = gimple_build_call (fn, 2, dest, orig);
3510       gimple_seq_add_stmt_without_update (&stmts, repl);
3511       if (tree lhs = gimple_call_lhs (stmt))
3512 	{
3513 	  if (!useless_type_conversion_p (TREE_TYPE (lhs),
3514 					  TREE_TYPE (orig_len)))
3515 	    orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3516 	  repl = gimple_build_assign (lhs, orig_len);
3517 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3518 	  gsi_replace_with_seq_vops (gsi, stmts);
3519 	  /* gsi now points at the assignment to the lhs, get a
3520 	     stmt iterator to the memcpy call.
3521 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3522 	     CFG isn't built yet.  */
3523 	  gimple_stmt_iterator gsi2 = *gsi;
3524 	  gsi_prev (&gsi2);
3525 	  fold_stmt (&gsi2);
3526 	}
3527       else
3528 	{
3529 	  gsi_replace_with_seq_vops (gsi, stmts);
3530 	  fold_stmt (gsi);
3531 	}
3532       return true;
3533     }
3534   return false;
3535 }
3536 
3537 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3538    FP, FMT, and ARG are the arguments to the call.  We don't fold calls with
3539    more than 3 arguments, and ARG may be null in the 2-argument case.
3540 
3541    Return NULL_TREE if no simplification was possible, otherwise return the
3542    simplified form of the call as a tree.  FCODE is the BUILT_IN_*
3543    code of the function to be simplified.  */
3544 
3545 static bool
gimple_fold_builtin_fprintf(gimple_stmt_iterator * gsi,tree fp,tree fmt,tree arg,enum built_in_function fcode)3546 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3547 			     tree fp, tree fmt, tree arg,
3548 			     enum built_in_function fcode)
3549 {
3550   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3551   tree fn_fputc, fn_fputs;
3552   const char *fmt_str = NULL;
3553 
3554   /* If the return value is used, don't do the transformation.  */
3555   if (gimple_call_lhs (stmt) != NULL_TREE)
3556     return false;
3557 
3558   /* Check whether the format is a literal string constant.  */
3559   fmt_str = c_getstr (fmt);
3560   if (fmt_str == NULL)
3561     return false;
3562 
3563   if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3564     {
3565       /* If we're using an unlocked function, assume the other
3566 	 unlocked functions exist explicitly.  */
3567       fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3568       fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3569     }
3570   else
3571     {
3572       fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3573       fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3574     }
3575 
3576   if (!init_target_chars ())
3577     return false;
3578 
3579   /* If the format doesn't contain % args or %%, use strcpy.  */
3580   if (strchr (fmt_str, target_percent) == NULL)
3581     {
3582       if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3583 	  && arg)
3584 	return false;
3585 
3586       /* If the format specifier was "", fprintf does nothing.  */
3587       if (fmt_str[0] == '\0')
3588 	{
3589 	  replace_call_with_value (gsi, NULL_TREE);
3590 	  return true;
3591 	}
3592 
3593       /* When "string" doesn't contain %, replace all cases of
3594 	 fprintf (fp, string) with fputs (string, fp).  The fputs
3595 	 builtin will take care of special cases like length == 1.  */
3596       if (fn_fputs)
3597 	{
3598 	  gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3599 	  replace_call_with_call_and_fold (gsi, repl);
3600 	  return true;
3601 	}
3602     }
3603 
3604   /* The other optimizations can be done only on the non-va_list variants.  */
3605   else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3606     return false;
3607 
3608   /* If the format specifier was "%s", call __builtin_fputs (arg, fp).  */
3609   else if (strcmp (fmt_str, target_percent_s) == 0)
3610     {
3611       if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3612 	return false;
3613       if (fn_fputs)
3614 	{
3615 	  gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3616 	  replace_call_with_call_and_fold (gsi, repl);
3617 	  return true;
3618 	}
3619     }
3620 
3621   /* If the format specifier was "%c", call __builtin_fputc (arg, fp).  */
3622   else if (strcmp (fmt_str, target_percent_c) == 0)
3623     {
3624       if (!arg
3625 	  || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3626 	return false;
3627       if (fn_fputc)
3628 	{
3629 	  gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3630 	  replace_call_with_call_and_fold (gsi, repl);
3631 	  return true;
3632 	}
3633     }
3634 
3635   return false;
3636 }
3637 
3638 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3639    FMT and ARG are the arguments to the call; we don't fold cases with
3640    more than 2 arguments, and ARG may be null if this is a 1-argument case.
3641 
3642    Return NULL_TREE if no simplification was possible, otherwise return the
3643    simplified form of the call as a tree.  FCODE is the BUILT_IN_*
3644    code of the function to be simplified.  */
3645 
3646 static bool
gimple_fold_builtin_printf(gimple_stmt_iterator * gsi,tree fmt,tree arg,enum built_in_function fcode)3647 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3648 			    tree arg, enum built_in_function fcode)
3649 {
3650   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3651   tree fn_putchar, fn_puts, newarg;
3652   const char *fmt_str = NULL;
3653 
3654   /* If the return value is used, don't do the transformation.  */
3655   if (gimple_call_lhs (stmt) != NULL_TREE)
3656     return false;
3657 
3658   /* Check whether the format is a literal string constant.  */
3659   fmt_str = c_getstr (fmt);
3660   if (fmt_str == NULL)
3661     return false;
3662 
3663   if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3664     {
3665       /* If we're using an unlocked function, assume the other
3666 	 unlocked functions exist explicitly.  */
3667       fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3668       fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3669     }
3670   else
3671     {
3672       fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3673       fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3674     }
3675 
3676   if (!init_target_chars ())
3677     return false;
3678 
3679   if (strcmp (fmt_str, target_percent_s) == 0
3680       || strchr (fmt_str, target_percent) == NULL)
3681     {
3682       const char *str;
3683 
3684       if (strcmp (fmt_str, target_percent_s) == 0)
3685 	{
3686 	  if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3687 	    return false;
3688 
3689 	  if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3690 	    return false;
3691 
3692 	  str = c_getstr (arg);
3693 	  if (str == NULL)
3694 	    return false;
3695 	}
3696       else
3697 	{
3698 	  /* The format specifier doesn't contain any '%' characters.  */
3699 	  if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3700 	      && arg)
3701 	    return false;
3702 	  str = fmt_str;
3703 	}
3704 
3705       /* If the string was "", printf does nothing.  */
3706       if (str[0] == '\0')
3707 	{
3708 	  replace_call_with_value (gsi, NULL_TREE);
3709 	  return true;
3710 	}
3711 
3712       /* If the string has length of 1, call putchar.  */
3713       if (str[1] == '\0')
3714 	{
3715 	  /* Given printf("c"), (where c is any one character,)
3716 	     convert "c"[0] to an int and pass that to the replacement
3717 	     function.  */
3718 	  newarg = build_int_cst (integer_type_node, str[0]);
3719 	  if (fn_putchar)
3720 	    {
3721 	      gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3722 	      replace_call_with_call_and_fold (gsi, repl);
3723 	      return true;
3724 	    }
3725 	}
3726       else
3727 	{
3728 	  /* If the string was "string\n", call puts("string").  */
3729 	  size_t len = strlen (str);
3730 	  if ((unsigned char)str[len - 1] == target_newline
3731 	      && (size_t) (int) len == len
3732 	      && (int) len > 0)
3733 	    {
3734 	      char *newstr;
3735 
3736 	      /* Create a NUL-terminated string that's one char shorter
3737 		 than the original, stripping off the trailing '\n'.  */
3738 	      newstr = xstrdup (str);
3739 	      newstr[len - 1] = '\0';
3740 	      newarg = build_string_literal (len, newstr);
3741 	      free (newstr);
3742 	      if (fn_puts)
3743 		{
3744 		  gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3745 		  replace_call_with_call_and_fold (gsi, repl);
3746 		  return true;
3747 		}
3748 	    }
3749 	  else
3750 	    /* We'd like to arrange to call fputs(string,stdout) here,
3751 	       but we need stdout and don't have a way to get it yet.  */
3752 	    return false;
3753 	}
3754     }
3755 
3756   /* The other optimizations can be done only on the non-va_list variants.  */
3757   else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3758     return false;
3759 
3760   /* If the format specifier was "%s\n", call __builtin_puts(arg).  */
3761   else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3762     {
3763       if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3764 	return false;
3765       if (fn_puts)
3766 	{
3767 	  gcall *repl = gimple_build_call (fn_puts, 1, arg);
3768 	  replace_call_with_call_and_fold (gsi, repl);
3769 	  return true;
3770 	}
3771     }
3772 
3773   /* If the format specifier was "%c", call __builtin_putchar(arg).  */
3774   else if (strcmp (fmt_str, target_percent_c) == 0)
3775     {
3776       if (!arg || ! useless_type_conversion_p (integer_type_node,
3777 					       TREE_TYPE (arg)))
3778 	return false;
3779       if (fn_putchar)
3780 	{
3781 	  gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3782 	  replace_call_with_call_and_fold (gsi, repl);
3783 	  return true;
3784 	}
3785     }
3786 
3787   return false;
3788 }
3789 
3790 
3791 
3792 /* Fold a call to __builtin_strlen with known length LEN.  */
3793 
3794 static bool
gimple_fold_builtin_strlen(gimple_stmt_iterator * gsi)3795 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3796 {
3797   gimple *stmt = gsi_stmt (*gsi);
3798   tree arg = gimple_call_arg (stmt, 0);
3799 
3800   wide_int minlen;
3801   wide_int maxlen;
3802 
3803   c_strlen_data lendata = { };
3804   if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3805       && !lendata.decl
3806       && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3807       && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
3808     {
3809       /* The range of lengths refers to either a single constant
3810 	 string or to the longest and shortest constant string
3811 	 referenced by the argument of the strlen() call, or to
3812 	 the strings that can possibly be stored in the arrays
3813 	 the argument refers to.  */
3814       minlen = wi::to_wide (lendata.minlen);
3815       maxlen = wi::to_wide (lendata.maxlen);
3816     }
3817   else
3818     {
3819       unsigned prec = TYPE_PRECISION (sizetype);
3820 
3821       minlen = wi::shwi (0, prec);
3822       maxlen = wi::to_wide (max_object_size (), prec) - 2;
3823     }
3824 
3825   if (minlen == maxlen)
3826     {
3827       /* Fold the strlen call to a constant.  */
3828       tree type = TREE_TYPE (lendata.minlen);
3829       tree len = force_gimple_operand_gsi (gsi,
3830 					   wide_int_to_tree (type, minlen),
3831 					   true, NULL, true, GSI_SAME_STMT);
3832       replace_call_with_value (gsi, len);
3833       return true;
3834     }
3835 
3836   /* Set the strlen() range to [0, MAXLEN].  */
3837   if (tree lhs = gimple_call_lhs (stmt))
3838     set_strlen_range (lhs, minlen, maxlen);
3839 
3840   return false;
3841 }
3842 
3843 /* Fold a call to __builtin_acc_on_device.  */
3844 
3845 static bool
gimple_fold_builtin_acc_on_device(gimple_stmt_iterator * gsi,tree arg0)3846 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3847 {
3848   /* Defer folding until we know which compiler we're in.  */
3849   if (symtab->state != EXPANSION)
3850     return false;
3851 
3852   unsigned val_host = GOMP_DEVICE_HOST;
3853   unsigned val_dev = GOMP_DEVICE_NONE;
3854 
3855 #ifdef ACCEL_COMPILER
3856   val_host = GOMP_DEVICE_NOT_HOST;
3857   val_dev = ACCEL_COMPILER_acc_device;
3858 #endif
3859 
3860   location_t loc = gimple_location (gsi_stmt (*gsi));
3861 
3862   tree host_eq = make_ssa_name (boolean_type_node);
3863   gimple *host_ass = gimple_build_assign
3864     (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3865   gimple_set_location (host_ass, loc);
3866   gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3867 
3868   tree dev_eq = make_ssa_name (boolean_type_node);
3869   gimple *dev_ass = gimple_build_assign
3870     (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3871   gimple_set_location (dev_ass, loc);
3872   gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3873 
3874   tree result = make_ssa_name (boolean_type_node);
3875   gimple *result_ass = gimple_build_assign
3876     (result, BIT_IOR_EXPR, host_eq, dev_eq);
3877   gimple_set_location (result_ass, loc);
3878   gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3879 
3880   replace_call_with_value (gsi, result);
3881 
3882   return true;
3883 }
3884 
3885 /* Fold realloc (0, n) -> malloc (n).  */
3886 
3887 static bool
gimple_fold_builtin_realloc(gimple_stmt_iterator * gsi)3888 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3889 {
3890   gimple *stmt = gsi_stmt (*gsi);
3891   tree arg = gimple_call_arg (stmt, 0);
3892   tree size = gimple_call_arg (stmt, 1);
3893 
3894   if (operand_equal_p (arg, null_pointer_node, 0))
3895     {
3896       tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3897       if (fn_malloc)
3898 	{
3899 	  gcall *repl = gimple_build_call (fn_malloc, 1, size);
3900 	  replace_call_with_call_and_fold (gsi, repl);
3901 	  return true;
3902 	}
3903     }
3904   return false;
3905 }
3906 
3907 /* Fold the non-target builtin at *GSI and return whether any simplification
3908    was made.  */
3909 
3910 static bool
gimple_fold_builtin(gimple_stmt_iterator * gsi)3911 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3912 {
3913   gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3914   tree callee = gimple_call_fndecl (stmt);
3915 
3916   /* Give up for always_inline inline builtins until they are
3917      inlined.  */
3918   if (avoid_folding_inline_builtin (callee))
3919     return false;
3920 
3921   unsigned n = gimple_call_num_args (stmt);
3922   enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3923   switch (fcode)
3924     {
3925     case BUILT_IN_BCMP:
3926       return gimple_fold_builtin_bcmp (gsi);
3927     case BUILT_IN_BCOPY:
3928       return gimple_fold_builtin_bcopy (gsi);
3929     case BUILT_IN_BZERO:
3930       return gimple_fold_builtin_bzero (gsi);
3931 
3932     case BUILT_IN_MEMSET:
3933       return gimple_fold_builtin_memset (gsi,
3934 					 gimple_call_arg (stmt, 1),
3935 					 gimple_call_arg (stmt, 2));
3936     case BUILT_IN_MEMCPY:
3937     case BUILT_IN_MEMPCPY:
3938     case BUILT_IN_MEMMOVE:
3939       return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3940 					    gimple_call_arg (stmt, 1), fcode);
3941     case BUILT_IN_SPRINTF_CHK:
3942     case BUILT_IN_VSPRINTF_CHK:
3943       return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3944     case BUILT_IN_STRCAT_CHK:
3945       return gimple_fold_builtin_strcat_chk (gsi);
3946     case BUILT_IN_STRNCAT_CHK:
3947       return gimple_fold_builtin_strncat_chk (gsi);
3948     case BUILT_IN_STRLEN:
3949       return gimple_fold_builtin_strlen (gsi);
3950     case BUILT_IN_STRCPY:
3951       return gimple_fold_builtin_strcpy (gsi,
3952 					 gimple_call_arg (stmt, 0),
3953 					 gimple_call_arg (stmt, 1));
3954     case BUILT_IN_STRNCPY:
3955       return gimple_fold_builtin_strncpy (gsi,
3956 					  gimple_call_arg (stmt, 0),
3957 					  gimple_call_arg (stmt, 1),
3958 					  gimple_call_arg (stmt, 2));
3959     case BUILT_IN_STRCAT:
3960       return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3961 					 gimple_call_arg (stmt, 1));
3962     case BUILT_IN_STRNCAT:
3963       return gimple_fold_builtin_strncat (gsi);
3964     case BUILT_IN_INDEX:
3965     case BUILT_IN_STRCHR:
3966       return gimple_fold_builtin_strchr (gsi, false);
3967     case BUILT_IN_RINDEX:
3968     case BUILT_IN_STRRCHR:
3969       return gimple_fold_builtin_strchr (gsi, true);
3970     case BUILT_IN_STRSTR:
3971       return gimple_fold_builtin_strstr (gsi);
3972     case BUILT_IN_STRCMP:
3973     case BUILT_IN_STRCMP_EQ:
3974     case BUILT_IN_STRCASECMP:
3975     case BUILT_IN_STRNCMP:
3976     case BUILT_IN_STRNCMP_EQ:
3977     case BUILT_IN_STRNCASECMP:
3978       return gimple_fold_builtin_string_compare (gsi);
3979     case BUILT_IN_MEMCHR:
3980       return gimple_fold_builtin_memchr (gsi);
3981     case BUILT_IN_FPUTS:
3982       return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3983 					gimple_call_arg (stmt, 1), false);
3984     case BUILT_IN_FPUTS_UNLOCKED:
3985       return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3986 					gimple_call_arg (stmt, 1), true);
3987     case BUILT_IN_MEMCPY_CHK:
3988     case BUILT_IN_MEMPCPY_CHK:
3989     case BUILT_IN_MEMMOVE_CHK:
3990     case BUILT_IN_MEMSET_CHK:
3991       return gimple_fold_builtin_memory_chk (gsi,
3992 					     gimple_call_arg (stmt, 0),
3993 					     gimple_call_arg (stmt, 1),
3994 					     gimple_call_arg (stmt, 2),
3995 					     gimple_call_arg (stmt, 3),
3996 					     fcode);
3997     case BUILT_IN_STPCPY:
3998       return gimple_fold_builtin_stpcpy (gsi);
3999     case BUILT_IN_STRCPY_CHK:
4000     case BUILT_IN_STPCPY_CHK:
4001       return gimple_fold_builtin_stxcpy_chk (gsi,
4002 					     gimple_call_arg (stmt, 0),
4003 					     gimple_call_arg (stmt, 1),
4004 					     gimple_call_arg (stmt, 2),
4005 					     fcode);
4006     case BUILT_IN_STRNCPY_CHK:
4007     case BUILT_IN_STPNCPY_CHK:
4008       return gimple_fold_builtin_stxncpy_chk (gsi,
4009 					      gimple_call_arg (stmt, 0),
4010 					      gimple_call_arg (stmt, 1),
4011 					      gimple_call_arg (stmt, 2),
4012 					      gimple_call_arg (stmt, 3),
4013 					      fcode);
4014     case BUILT_IN_SNPRINTF_CHK:
4015     case BUILT_IN_VSNPRINTF_CHK:
4016       return gimple_fold_builtin_snprintf_chk (gsi, fcode);
4017 
4018     case BUILT_IN_FPRINTF:
4019     case BUILT_IN_FPRINTF_UNLOCKED:
4020     case BUILT_IN_VFPRINTF:
4021       if (n == 2 || n == 3)
4022 	return gimple_fold_builtin_fprintf (gsi,
4023 					    gimple_call_arg (stmt, 0),
4024 					    gimple_call_arg (stmt, 1),
4025 					    n == 3
4026 					    ? gimple_call_arg (stmt, 2)
4027 					    : NULL_TREE,
4028 					    fcode);
4029       break;
4030     case BUILT_IN_FPRINTF_CHK:
4031     case BUILT_IN_VFPRINTF_CHK:
4032       if (n == 3 || n == 4)
4033 	return gimple_fold_builtin_fprintf (gsi,
4034 					    gimple_call_arg (stmt, 0),
4035 					    gimple_call_arg (stmt, 2),
4036 					    n == 4
4037 					    ? gimple_call_arg (stmt, 3)
4038 					    : NULL_TREE,
4039 					    fcode);
4040       break;
4041     case BUILT_IN_PRINTF:
4042     case BUILT_IN_PRINTF_UNLOCKED:
4043     case BUILT_IN_VPRINTF:
4044       if (n == 1 || n == 2)
4045 	return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
4046 					   n == 2
4047 					   ? gimple_call_arg (stmt, 1)
4048 					   : NULL_TREE, fcode);
4049       break;
4050     case BUILT_IN_PRINTF_CHK:
4051     case BUILT_IN_VPRINTF_CHK:
4052       if (n == 2 || n == 3)
4053 	return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
4054 					   n == 3
4055 					   ? gimple_call_arg (stmt, 2)
4056 					   : NULL_TREE, fcode);
4057       break;
4058     case BUILT_IN_ACC_ON_DEVICE:
4059       return gimple_fold_builtin_acc_on_device (gsi,
4060 						gimple_call_arg (stmt, 0));
4061     case BUILT_IN_REALLOC:
4062       return gimple_fold_builtin_realloc (gsi);
4063 
4064     default:;
4065     }
4066 
4067   /* Try the generic builtin folder.  */
4068   bool ignore = (gimple_call_lhs (stmt) == NULL);
4069   tree result = fold_call_stmt (stmt, ignore);
4070   if (result)
4071     {
4072       if (ignore)
4073 	STRIP_NOPS (result);
4074       else
4075 	result = fold_convert (gimple_call_return_type (stmt), result);
4076       if (!update_call_from_tree (gsi, result))
4077 	gimplify_and_update_call_from_tree (gsi, result);
4078       return true;
4079     }
4080 
4081   return false;
4082 }
4083 
4084 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4085    function calls to constants, where possible.  */
4086 
4087 static tree
fold_internal_goacc_dim(const gimple * call)4088 fold_internal_goacc_dim (const gimple *call)
4089 {
4090   int axis = oacc_get_ifn_dim_arg (call);
4091   int size = oacc_get_fn_dim_size (current_function_decl, axis);
4092   tree result = NULL_TREE;
4093   tree type = TREE_TYPE (gimple_call_lhs (call));
4094 
4095   switch (gimple_call_internal_fn (call))
4096     {
4097     case IFN_GOACC_DIM_POS:
4098       /* If the size is 1, we know the answer.  */
4099       if (size == 1)
4100 	result = build_int_cst (type, 0);
4101       break;
4102     case IFN_GOACC_DIM_SIZE:
4103       /* If the size is not dynamic, we know the answer.  */
4104       if (size)
4105 	result = build_int_cst (type, size);
4106       break;
4107     default:
4108       break;
4109     }
4110 
4111   return result;
4112 }
4113 
4114 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4115    for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4116    &var where var is only addressable because of such calls.  */
4117 
4118 bool
optimize_atomic_compare_exchange_p(gimple * stmt)4119 optimize_atomic_compare_exchange_p (gimple *stmt)
4120 {
4121   if (gimple_call_num_args (stmt) != 6
4122       || !flag_inline_atomics
4123       || !optimize
4124       || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
4125       || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4126       || !gimple_vdef (stmt)
4127       || !gimple_vuse (stmt))
4128     return false;
4129 
4130   tree fndecl = gimple_call_fndecl (stmt);
4131   switch (DECL_FUNCTION_CODE (fndecl))
4132     {
4133     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4134     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4135     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4136     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4137     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4138       break;
4139     default:
4140       return false;
4141     }
4142 
4143   tree expected = gimple_call_arg (stmt, 1);
4144   if (TREE_CODE (expected) != ADDR_EXPR
4145       || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4146     return false;
4147 
4148   tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4149   if (!is_gimple_reg_type (etype)
4150       || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
4151       || TREE_THIS_VOLATILE (etype)
4152       || VECTOR_TYPE_P (etype)
4153       || TREE_CODE (etype) == COMPLEX_TYPE
4154       /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4155 	 might not preserve all the bits.  See PR71716.  */
4156       || SCALAR_FLOAT_TYPE_P (etype)
4157       || maybe_ne (TYPE_PRECISION (etype),
4158 		   GET_MODE_BITSIZE (TYPE_MODE (etype))))
4159     return false;
4160 
4161   tree weak = gimple_call_arg (stmt, 3);
4162   if (!integer_zerop (weak) && !integer_onep (weak))
4163     return false;
4164 
4165   tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4166   tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4167   machine_mode mode = TYPE_MODE (itype);
4168 
4169   if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4170       == CODE_FOR_nothing
4171       && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4172     return false;
4173 
4174   if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
4175     return false;
4176 
4177   return true;
4178 }
4179 
4180 /* Fold
4181      r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4182    into
4183      _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4184      i = IMAGPART_EXPR <t>;
4185      r = (_Bool) i;
4186      e = REALPART_EXPR <t>;  */
4187 
4188 void
fold_builtin_atomic_compare_exchange(gimple_stmt_iterator * gsi)4189 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4190 {
4191   gimple *stmt = gsi_stmt (*gsi);
4192   tree fndecl = gimple_call_fndecl (stmt);
4193   tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4194   tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4195   tree ctype = build_complex_type (itype);
4196   tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
4197   bool throws = false;
4198   edge e = NULL;
4199   gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4200 				   expected);
4201   gsi_insert_before (gsi, g, GSI_SAME_STMT);
4202   gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4203   if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4204     {
4205       g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4206 			       build1 (VIEW_CONVERT_EXPR, itype,
4207 				       gimple_assign_lhs (g)));
4208       gsi_insert_before (gsi, g, GSI_SAME_STMT);
4209     }
4210   int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4211 	     + int_size_in_bytes (itype);
4212   g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4213 				  gimple_call_arg (stmt, 0),
4214 				  gimple_assign_lhs (g),
4215 				  gimple_call_arg (stmt, 2),
4216 				  build_int_cst (integer_type_node, flag),
4217 				  gimple_call_arg (stmt, 4),
4218 				  gimple_call_arg (stmt, 5));
4219   tree lhs = make_ssa_name (ctype);
4220   gimple_call_set_lhs (g, lhs);
4221   gimple_move_vops (g, stmt);
4222   tree oldlhs = gimple_call_lhs (stmt);
4223   if (stmt_can_throw_internal (cfun, stmt))
4224     {
4225       throws = true;
4226       e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4227     }
4228   gimple_call_set_nothrow (as_a <gcall *> (g),
4229 			   gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4230   gimple_call_set_lhs (stmt, NULL_TREE);
4231   gsi_replace (gsi, g, true);
4232   if (oldlhs)
4233     {
4234       g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4235 			       build1 (IMAGPART_EXPR, itype, lhs));
4236       if (throws)
4237 	{
4238 	  gsi_insert_on_edge_immediate (e, g);
4239 	  *gsi = gsi_for_stmt (g);
4240 	}
4241       else
4242 	gsi_insert_after (gsi, g, GSI_NEW_STMT);
4243       g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4244       gsi_insert_after (gsi, g, GSI_NEW_STMT);
4245     }
4246   g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4247 			   build1 (REALPART_EXPR, itype, lhs));
4248   if (throws && oldlhs == NULL_TREE)
4249     {
4250       gsi_insert_on_edge_immediate (e, g);
4251       *gsi = gsi_for_stmt (g);
4252     }
4253   else
4254     gsi_insert_after (gsi, g, GSI_NEW_STMT);
4255   if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4256     {
4257       g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4258 			       VIEW_CONVERT_EXPR,
4259 			       build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4260 				       gimple_assign_lhs (g)));
4261       gsi_insert_after (gsi, g, GSI_NEW_STMT);
4262     }
4263   g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4264   gsi_insert_after (gsi, g, GSI_NEW_STMT);
4265   *gsi = gsiret;
4266 }
4267 
4268 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4269    doesn't fit into TYPE.  The test for overflow should be regardless of
4270    -fwrapv, and even for unsigned types.  */
4271 
4272 bool
arith_overflowed_p(enum tree_code code,const_tree type,const_tree arg0,const_tree arg1)4273 arith_overflowed_p (enum tree_code code, const_tree type,
4274 		    const_tree arg0, const_tree arg1)
4275 {
4276   widest2_int warg0 = widest2_int_cst (arg0);
4277   widest2_int warg1 = widest2_int_cst (arg1);
4278   widest2_int wres;
4279   switch (code)
4280     {
4281     case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4282     case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4283     case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4284     default: gcc_unreachable ();
4285     }
4286   signop sign = TYPE_SIGN (type);
4287   if (sign == UNSIGNED && wi::neg_p (wres))
4288     return true;
4289   return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4290 }
4291 
4292 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
4293    for the memory it references, otherwise return null.  VECTYPE is the
4294    type of the memory vector.  */
4295 
4296 static tree
gimple_fold_mask_load_store_mem_ref(gcall * call,tree vectype)4297 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
4298 {
4299   tree ptr = gimple_call_arg (call, 0);
4300   tree alias_align = gimple_call_arg (call, 1);
4301   tree mask = gimple_call_arg (call, 2);
4302   if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
4303     return NULL_TREE;
4304 
4305   unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
4306   if (TYPE_ALIGN (vectype) != align)
4307     vectype = build_aligned_type (vectype, align);
4308   tree offset = build_zero_cst (TREE_TYPE (alias_align));
4309   return fold_build2 (MEM_REF, vectype, ptr, offset);
4310 }
4311 
4312 /* Try to fold IFN_MASK_LOAD call CALL.  Return true on success.  */
4313 
4314 static bool
gimple_fold_mask_load(gimple_stmt_iterator * gsi,gcall * call)4315 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
4316 {
4317   tree lhs = gimple_call_lhs (call);
4318   if (!lhs)
4319     return false;
4320 
4321   if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
4322     {
4323       gassign *new_stmt = gimple_build_assign (lhs, rhs);
4324       gimple_set_location (new_stmt, gimple_location (call));
4325       gimple_move_vops (new_stmt, call);
4326       gsi_replace (gsi, new_stmt, false);
4327       return true;
4328     }
4329   return false;
4330 }
4331 
4332 /* Try to fold IFN_MASK_STORE call CALL.  Return true on success.  */
4333 
4334 static bool
gimple_fold_mask_store(gimple_stmt_iterator * gsi,gcall * call)4335 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
4336 {
4337   tree rhs = gimple_call_arg (call, 3);
4338   if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
4339     {
4340       gassign *new_stmt = gimple_build_assign (lhs, rhs);
4341       gimple_set_location (new_stmt, gimple_location (call));
4342       gimple_move_vops (new_stmt, call);
4343       gsi_replace (gsi, new_stmt, false);
4344       return true;
4345     }
4346   return false;
4347 }
4348 
4349 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4350    The statement may be replaced by another statement, e.g., if the call
4351    simplifies to a constant value. Return true if any changes were made.
4352    It is assumed that the operands have been previously folded.  */
4353 
4354 static bool
gimple_fold_call(gimple_stmt_iterator * gsi,bool inplace)4355 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4356 {
4357   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4358   tree callee;
4359   bool changed = false;
4360   unsigned i;
4361 
4362   /* Fold *& in call arguments.  */
4363   for (i = 0; i < gimple_call_num_args (stmt); ++i)
4364     if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4365       {
4366 	tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4367 	if (tmp)
4368 	  {
4369 	    gimple_call_set_arg (stmt, i, tmp);
4370 	    changed = true;
4371 	  }
4372       }
4373 
4374   /* Check for virtual calls that became direct calls.  */
4375   callee = gimple_call_fn (stmt);
4376   if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4377     {
4378       if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4379 	{
4380           if (dump_file && virtual_method_call_p (callee)
4381 	      && !possible_polymorphic_call_target_p
4382 		    (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4383 						     (OBJ_TYPE_REF_EXPR (callee)))))
4384 	    {
4385 	      fprintf (dump_file,
4386 		       "Type inheritance inconsistent devirtualization of ");
4387 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4388 	      fprintf (dump_file, " to ");
4389 	      print_generic_expr (dump_file, callee, TDF_SLIM);
4390 	      fprintf (dump_file, "\n");
4391 	    }
4392 
4393 	  gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4394 	  changed = true;
4395 	}
4396       else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4397 	{
4398 	  bool final;
4399 	  vec <cgraph_node *>targets
4400 	    = possible_polymorphic_call_targets (callee, stmt, &final);
4401 	  if (final && targets.length () <= 1 && dbg_cnt (devirt))
4402 	    {
4403 	      tree lhs = gimple_call_lhs (stmt);
4404 	      if (dump_enabled_p ())
4405 		{
4406 		  dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4407 				   "folding virtual function call to %s\n",
4408 		 		   targets.length () == 1
4409 		  		   ? targets[0]->name ()
4410 		  		   : "__builtin_unreachable");
4411 		}
4412 	      if (targets.length () == 1)
4413 		{
4414 		  tree fndecl = targets[0]->decl;
4415 		  gimple_call_set_fndecl (stmt, fndecl);
4416 		  changed = true;
4417 		  /* If changing the call to __cxa_pure_virtual
4418 		     or similar noreturn function, adjust gimple_call_fntype
4419 		     too.  */
4420 		  if (gimple_call_noreturn_p (stmt)
4421 		      && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4422 		      && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4423 		      && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4424 			  == void_type_node))
4425 		    gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4426 		  /* If the call becomes noreturn, remove the lhs.  */
4427 		  if (lhs
4428 		      && gimple_call_noreturn_p (stmt)
4429 		      && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4430 			  || should_remove_lhs_p (lhs)))
4431 		    {
4432 		      if (TREE_CODE (lhs) == SSA_NAME)
4433 			{
4434 			  tree var = create_tmp_var (TREE_TYPE (lhs));
4435 			  tree def = get_or_create_ssa_default_def (cfun, var);
4436 			  gimple *new_stmt = gimple_build_assign (lhs, def);
4437 			  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4438 			}
4439 		      gimple_call_set_lhs (stmt, NULL_TREE);
4440 		    }
4441 		  maybe_remove_unused_call_args (cfun, stmt);
4442 		}
4443 	      else
4444 		{
4445 		  tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4446 		  gimple *new_stmt = gimple_build_call (fndecl, 0);
4447 		  gimple_set_location (new_stmt, gimple_location (stmt));
4448 		  /* If the call had a SSA name as lhs morph that into
4449 		     an uninitialized value.  */
4450 		  if (lhs && TREE_CODE (lhs) == SSA_NAME)
4451 		    {
4452 		      tree var = create_tmp_var (TREE_TYPE (lhs));
4453 		      SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4454 		      SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4455 		      set_ssa_default_def (cfun, var, lhs);
4456 		    }
4457 		  gimple_move_vops (new_stmt, stmt);
4458 		  gsi_replace (gsi, new_stmt, false);
4459 		  return true;
4460 		}
4461 	    }
4462 	}
4463     }
4464 
4465   /* Check for indirect calls that became direct calls, and then
4466      no longer require a static chain.  */
4467   if (gimple_call_chain (stmt))
4468     {
4469       tree fn = gimple_call_fndecl (stmt);
4470       if (fn && !DECL_STATIC_CHAIN (fn))
4471 	{
4472 	  gimple_call_set_chain (stmt, NULL);
4473 	  changed = true;
4474 	}
4475       else
4476 	{
4477 	  tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4478 	  if (tmp)
4479 	    {
4480 	      gimple_call_set_chain (stmt, tmp);
4481 	      changed = true;
4482 	    }
4483 	}
4484     }
4485 
4486   if (inplace)
4487     return changed;
4488 
4489   /* Check for builtins that CCP can handle using information not
4490      available in the generic fold routines.  */
4491   if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4492     {
4493       if (gimple_fold_builtin (gsi))
4494         changed = true;
4495     }
4496   else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4497     {
4498 	changed |= targetm.gimple_fold_builtin (gsi);
4499     }
4500   else if (gimple_call_internal_p (stmt))
4501     {
4502       enum tree_code subcode = ERROR_MARK;
4503       tree result = NULL_TREE;
4504       bool cplx_result = false;
4505       tree overflow = NULL_TREE;
4506       switch (gimple_call_internal_fn (stmt))
4507 	{
4508 	case IFN_BUILTIN_EXPECT:
4509 	  result = fold_builtin_expect (gimple_location (stmt),
4510 					gimple_call_arg (stmt, 0),
4511 					gimple_call_arg (stmt, 1),
4512 					gimple_call_arg (stmt, 2),
4513 					NULL_TREE);
4514 	  break;
4515 	case IFN_UBSAN_OBJECT_SIZE:
4516 	  {
4517 	    tree offset = gimple_call_arg (stmt, 1);
4518 	    tree objsize = gimple_call_arg (stmt, 2);
4519 	    if (integer_all_onesp (objsize)
4520 		|| (TREE_CODE (offset) == INTEGER_CST
4521 		    && TREE_CODE (objsize) == INTEGER_CST
4522 		    && tree_int_cst_le (offset, objsize)))
4523 	      {
4524 		replace_call_with_value (gsi, NULL_TREE);
4525 		return true;
4526 	      }
4527 	  }
4528 	  break;
4529 	case IFN_UBSAN_PTR:
4530 	  if (integer_zerop (gimple_call_arg (stmt, 1)))
4531 	    {
4532 	      replace_call_with_value (gsi, NULL_TREE);
4533 	      return true;
4534 	    }
4535 	  break;
4536 	case IFN_UBSAN_BOUNDS:
4537 	  {
4538 	    tree index = gimple_call_arg (stmt, 1);
4539 	    tree bound = gimple_call_arg (stmt, 2);
4540 	    if (TREE_CODE (index) == INTEGER_CST
4541 		&& TREE_CODE (bound) == INTEGER_CST)
4542 	      {
4543 		index = fold_convert (TREE_TYPE (bound), index);
4544 		if (TREE_CODE (index) == INTEGER_CST
4545 		    && tree_int_cst_le (index, bound))
4546 		  {
4547 		    replace_call_with_value (gsi, NULL_TREE);
4548 		    return true;
4549 		  }
4550 	      }
4551 	  }
4552 	  break;
4553 	case IFN_GOACC_DIM_SIZE:
4554 	case IFN_GOACC_DIM_POS:
4555 	  result = fold_internal_goacc_dim (stmt);
4556 	  break;
4557 	case IFN_UBSAN_CHECK_ADD:
4558 	  subcode = PLUS_EXPR;
4559 	  break;
4560 	case IFN_UBSAN_CHECK_SUB:
4561 	  subcode = MINUS_EXPR;
4562 	  break;
4563 	case IFN_UBSAN_CHECK_MUL:
4564 	  subcode = MULT_EXPR;
4565 	  break;
4566 	case IFN_ADD_OVERFLOW:
4567 	  subcode = PLUS_EXPR;
4568 	  cplx_result = true;
4569 	  break;
4570 	case IFN_SUB_OVERFLOW:
4571 	  subcode = MINUS_EXPR;
4572 	  cplx_result = true;
4573 	  break;
4574 	case IFN_MUL_OVERFLOW:
4575 	  subcode = MULT_EXPR;
4576 	  cplx_result = true;
4577 	  break;
4578 	case IFN_MASK_LOAD:
4579 	  changed |= gimple_fold_mask_load (gsi, stmt);
4580 	  break;
4581 	case IFN_MASK_STORE:
4582 	  changed |= gimple_fold_mask_store (gsi, stmt);
4583 	  break;
4584 	default:
4585 	  break;
4586 	}
4587       if (subcode != ERROR_MARK)
4588 	{
4589 	  tree arg0 = gimple_call_arg (stmt, 0);
4590 	  tree arg1 = gimple_call_arg (stmt, 1);
4591 	  tree type = TREE_TYPE (arg0);
4592 	  if (cplx_result)
4593 	    {
4594 	      tree lhs = gimple_call_lhs (stmt);
4595 	      if (lhs == NULL_TREE)
4596 		type = NULL_TREE;
4597 	      else
4598 		type = TREE_TYPE (TREE_TYPE (lhs));
4599 	    }
4600 	  if (type == NULL_TREE)
4601 	    ;
4602 	  /* x = y + 0; x = y - 0; x = y * 0; */
4603 	  else if (integer_zerop (arg1))
4604 	    result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4605 	  /* x = 0 + y; x = 0 * y; */
4606 	  else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4607 	    result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4608 	  /* x = y - y; */
4609 	  else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4610 	    result = integer_zero_node;
4611 	  /* x = y * 1; x = 1 * y; */
4612 	  else if (subcode == MULT_EXPR && integer_onep (arg1))
4613 	    result = arg0;
4614 	  else if (subcode == MULT_EXPR && integer_onep (arg0))
4615 	    result = arg1;
4616 	  else if (TREE_CODE (arg0) == INTEGER_CST
4617 		   && TREE_CODE (arg1) == INTEGER_CST)
4618 	    {
4619 	      if (cplx_result)
4620 		result = int_const_binop (subcode, fold_convert (type, arg0),
4621 					  fold_convert (type, arg1));
4622 	      else
4623 		result = int_const_binop (subcode, arg0, arg1);
4624 	      if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4625 		{
4626 		  if (cplx_result)
4627 		    overflow = build_one_cst (type);
4628 		  else
4629 		    result = NULL_TREE;
4630 		}
4631 	    }
4632 	  if (result)
4633 	    {
4634 	      if (result == integer_zero_node)
4635 		result = build_zero_cst (type);
4636 	      else if (cplx_result && TREE_TYPE (result) != type)
4637 		{
4638 		  if (TREE_CODE (result) == INTEGER_CST)
4639 		    {
4640 		      if (arith_overflowed_p (PLUS_EXPR, type, result,
4641 					      integer_zero_node))
4642 			overflow = build_one_cst (type);
4643 		    }
4644 		  else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4645 			    && TYPE_UNSIGNED (type))
4646 			   || (TYPE_PRECISION (type)
4647 			       < (TYPE_PRECISION (TREE_TYPE (result))
4648 				  + (TYPE_UNSIGNED (TREE_TYPE (result))
4649 				     && !TYPE_UNSIGNED (type)))))
4650 		    result = NULL_TREE;
4651 		  if (result)
4652 		    result = fold_convert (type, result);
4653 		}
4654 	    }
4655 	}
4656 
4657       if (result)
4658 	{
4659 	  if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4660 	    result = drop_tree_overflow (result);
4661 	  if (cplx_result)
4662 	    {
4663 	      if (overflow == NULL_TREE)
4664 		overflow = build_zero_cst (TREE_TYPE (result));
4665 	      tree ctype = build_complex_type (TREE_TYPE (result));
4666 	      if (TREE_CODE (result) == INTEGER_CST
4667 		  && TREE_CODE (overflow) == INTEGER_CST)
4668 		result = build_complex (ctype, result, overflow);
4669 	      else
4670 		result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4671 				     ctype, result, overflow);
4672 	    }
4673 	  if (!update_call_from_tree (gsi, result))
4674 	    gimplify_and_update_call_from_tree (gsi, result);
4675 	  changed = true;
4676 	}
4677     }
4678 
4679   return changed;
4680 }
4681 
4682 
4683 /* Return true whether NAME has a use on STMT.  */
4684 
4685 static bool
has_use_on_stmt(tree name,gimple * stmt)4686 has_use_on_stmt (tree name, gimple *stmt)
4687 {
4688   imm_use_iterator iter;
4689   use_operand_p use_p;
4690   FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4691     if (USE_STMT (use_p) == stmt)
4692       return true;
4693   return false;
4694 }
4695 
4696 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4697    gimple_simplify.
4698 
4699    Replaces *GSI with the simplification result in RCODE and OPS
4700    and the associated statements in *SEQ.  Does the replacement
4701    according to INPLACE and returns true if the operation succeeded.  */
4702 
4703 static bool
replace_stmt_with_simplification(gimple_stmt_iterator * gsi,gimple_match_op * res_op,gimple_seq * seq,bool inplace)4704 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4705 				  gimple_match_op *res_op,
4706 				  gimple_seq *seq, bool inplace)
4707 {
4708   gimple *stmt = gsi_stmt (*gsi);
4709   tree *ops = res_op->ops;
4710   unsigned int num_ops = res_op->num_ops;
4711 
4712   /* Play safe and do not allow abnormals to be mentioned in
4713      newly created statements.  See also maybe_push_res_to_seq.
4714      As an exception allow such uses if there was a use of the
4715      same SSA name on the old stmt.  */
4716   for (unsigned int i = 0; i < num_ops; ++i)
4717     if (TREE_CODE (ops[i]) == SSA_NAME
4718 	&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4719 	&& !has_use_on_stmt (ops[i], stmt))
4720       return false;
4721 
4722   if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4723     for (unsigned int i = 0; i < 2; ++i)
4724       if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4725 	  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4726 	  && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4727 	return false;
4728 
4729   /* Don't insert new statements when INPLACE is true, even if we could
4730      reuse STMT for the final statement.  */
4731   if (inplace && !gimple_seq_empty_p (*seq))
4732     return false;
4733 
4734   if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4735     {
4736       gcc_assert (res_op->code.is_tree_code ());
4737       if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4738 	  /* GIMPLE_CONDs condition may not throw.  */
4739 	  && (!flag_exceptions
4740 	      || !cfun->can_throw_non_call_exceptions
4741 	      || !operation_could_trap_p (res_op->code,
4742 					  FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4743 					  false, NULL_TREE)))
4744 	gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4745       else if (res_op->code == SSA_NAME)
4746 	gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4747 				   build_zero_cst (TREE_TYPE (ops[0])));
4748       else if (res_op->code == INTEGER_CST)
4749 	{
4750 	  if (integer_zerop (ops[0]))
4751 	    gimple_cond_make_false (cond_stmt);
4752 	  else
4753 	    gimple_cond_make_true (cond_stmt);
4754 	}
4755       else if (!inplace)
4756 	{
4757 	  tree res = maybe_push_res_to_seq (res_op, seq);
4758 	  if (!res)
4759 	    return false;
4760 	  gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4761 				     build_zero_cst (TREE_TYPE (res)));
4762 	}
4763       else
4764 	return false;
4765       if (dump_file && (dump_flags & TDF_DETAILS))
4766 	{
4767 	  fprintf (dump_file, "gimple_simplified to ");
4768 	  if (!gimple_seq_empty_p (*seq))
4769 	    print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4770 	  print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4771 			     0, TDF_SLIM);
4772 	}
4773       gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4774       return true;
4775     }
4776   else if (is_gimple_assign (stmt)
4777 	   && res_op->code.is_tree_code ())
4778     {
4779       if (!inplace
4780 	  || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4781 	{
4782 	  maybe_build_generic_op (res_op);
4783 	  gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4784 					  res_op->op_or_null (0),
4785 					  res_op->op_or_null (1),
4786 					  res_op->op_or_null (2));
4787 	  if (dump_file && (dump_flags & TDF_DETAILS))
4788 	    {
4789 	      fprintf (dump_file, "gimple_simplified to ");
4790 	      if (!gimple_seq_empty_p (*seq))
4791 		print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4792 	      print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4793 				 0, TDF_SLIM);
4794 	    }
4795 	  gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4796 	  return true;
4797 	}
4798     }
4799   else if (res_op->code.is_fn_code ()
4800 	   && gimple_call_combined_fn (stmt) == res_op->code)
4801     {
4802       gcc_assert (num_ops == gimple_call_num_args (stmt));
4803       for (unsigned int i = 0; i < num_ops; ++i)
4804 	gimple_call_set_arg (stmt, i, ops[i]);
4805       if (dump_file && (dump_flags & TDF_DETAILS))
4806 	{
4807 	  fprintf (dump_file, "gimple_simplified to ");
4808 	  if (!gimple_seq_empty_p (*seq))
4809 	    print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4810 	  print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4811 	}
4812       gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4813       return true;
4814     }
4815   else if (!inplace)
4816     {
4817       if (gimple_has_lhs (stmt))
4818 	{
4819 	  tree lhs = gimple_get_lhs (stmt);
4820 	  if (!maybe_push_res_to_seq (res_op, seq, lhs))
4821 	    return false;
4822 	  if (dump_file && (dump_flags & TDF_DETAILS))
4823 	    {
4824 	      fprintf (dump_file, "gimple_simplified to ");
4825 	      print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4826 	    }
4827 	  gsi_replace_with_seq_vops (gsi, *seq);
4828 	  return true;
4829 	}
4830       else
4831 	gcc_unreachable ();
4832     }
4833 
4834   return false;
4835 }
4836 
4837 /* Canonicalize MEM_REFs invariant address operand after propagation.  */
4838 
4839 static bool
4840 maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
4841 {
4842   bool res = false;
4843 
4844   if (TREE_CODE (*t) == ADDR_EXPR)
4845     t = &TREE_OPERAND (*t, 0);
4846 
4847   /* The C and C++ frontends use an ARRAY_REF for indexing with their
4848      generic vector extension.  The actual vector referenced is
4849      view-converted to an array type for this purpose.  If the index
4850      is constant the canonical representation in the middle-end is a
4851      BIT_FIELD_REF so re-write the former to the latter here.  */
4852   if (TREE_CODE (*t) == ARRAY_REF
4853       && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4854       && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4855       && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4856     {
4857       tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4858       if (VECTOR_TYPE_P (vtype))
4859 	{
4860 	  tree low = array_ref_low_bound (*t);
4861 	  if (TREE_CODE (low) == INTEGER_CST)
4862 	    {
4863 	      if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4864 		{
4865 		  widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4866 					    wi::to_widest (low));
4867 		  idx = wi::mul (idx, wi::to_widest
4868 					 (TYPE_SIZE (TREE_TYPE (*t))));
4869 		  widest_int ext
4870 		    = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4871 		  if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4872 		    {
4873 		      *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4874 				       TREE_TYPE (*t),
4875 				       TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4876 				       TYPE_SIZE (TREE_TYPE (*t)),
4877 				       wide_int_to_tree (bitsizetype, idx));
4878 		      res = true;
4879 		    }
4880 		}
4881 	    }
4882 	}
4883     }
4884 
4885   while (handled_component_p (*t))
4886     t = &TREE_OPERAND (*t, 0);
4887 
4888   /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4889      of invariant addresses into a SSA name MEM_REF address.  */
4890   if (TREE_CODE (*t) == MEM_REF
4891       || TREE_CODE (*t) == TARGET_MEM_REF)
4892     {
4893       tree addr = TREE_OPERAND (*t, 0);
4894       if (TREE_CODE (addr) == ADDR_EXPR
4895 	  && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4896 	      || handled_component_p (TREE_OPERAND (addr, 0))))
4897 	{
4898 	  tree base;
4899 	  poly_int64 coffset;
4900 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4901 						&coffset);
4902 	  if (!base)
4903 	    {
4904 	      if (is_debug)
4905 		return false;
4906 	      gcc_unreachable ();
4907 	    }
4908 
4909 	  TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4910 	  TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4911 						  TREE_OPERAND (*t, 1),
4912 						  size_int (coffset));
4913 	  res = true;
4914 	}
4915       gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4916 			   || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4917     }
4918 
4919   /* Canonicalize back MEM_REFs to plain reference trees if the object
4920      accessed is a decl that has the same access semantics as the MEM_REF.  */
4921   if (TREE_CODE (*t) == MEM_REF
4922       && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4923       && integer_zerop (TREE_OPERAND (*t, 1))
4924       && MR_DEPENDENCE_CLIQUE (*t) == 0)
4925     {
4926       tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4927       tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4928       if (/* Same volatile qualification.  */
4929 	  TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4930 	  /* Same TBAA behavior with -fstrict-aliasing.  */
4931 	  && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4932 	  && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4933 	      == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4934 	  /* Same alignment.  */
4935 	  && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4936 	  /* We have to look out here to not drop a required conversion
4937 	     from the rhs to the lhs if *t appears on the lhs or vice-versa
4938 	     if it appears on the rhs.  Thus require strict type
4939 	     compatibility.  */
4940 	  && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4941 	{
4942 	  *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4943 	  res = true;
4944 	}
4945     }
4946 
4947   /* Canonicalize TARGET_MEM_REF in particular with respect to
4948      the indexes becoming constant.  */
4949   else if (TREE_CODE (*t) == TARGET_MEM_REF)
4950     {
4951       tree tem = maybe_fold_tmr (*t);
4952       if (tem)
4953 	{
4954 	  *t = tem;
4955 	  res = true;
4956 	}
4957     }
4958 
4959   return res;
4960 }
4961 
4962 /* Worker for both fold_stmt and fold_stmt_inplace.  The INPLACE argument
4963    distinguishes both cases.  */
4964 
4965 static bool
fold_stmt_1(gimple_stmt_iterator * gsi,bool inplace,tree (* valueize)(tree))4966 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4967 {
4968   bool changed = false;
4969   gimple *stmt = gsi_stmt (*gsi);
4970   bool nowarning = gimple_no_warning_p (stmt);
4971   unsigned i;
4972   fold_defer_overflow_warnings ();
4973 
4974   /* First do required canonicalization of [TARGET_]MEM_REF addresses
4975      after propagation.
4976      ???  This shouldn't be done in generic folding but in the
4977      propagation helpers which also know whether an address was
4978      propagated.
4979      Also canonicalize operand order.  */
4980   switch (gimple_code (stmt))
4981     {
4982     case GIMPLE_ASSIGN:
4983       if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4984 	{
4985 	  tree *rhs = gimple_assign_rhs1_ptr (stmt);
4986 	  if ((REFERENCE_CLASS_P (*rhs)
4987 	       || TREE_CODE (*rhs) == ADDR_EXPR)
4988 	      && maybe_canonicalize_mem_ref_addr (rhs))
4989 	    changed = true;
4990 	  tree *lhs = gimple_assign_lhs_ptr (stmt);
4991 	  if (REFERENCE_CLASS_P (*lhs)
4992 	      && maybe_canonicalize_mem_ref_addr (lhs))
4993 	    changed = true;
4994 	}
4995       else
4996 	{
4997 	  /* Canonicalize operand order.  */
4998 	  enum tree_code code = gimple_assign_rhs_code (stmt);
4999 	  if (TREE_CODE_CLASS (code) == tcc_comparison
5000 	      || commutative_tree_code (code)
5001 	      || commutative_ternary_tree_code (code))
5002 	    {
5003 	      tree rhs1 = gimple_assign_rhs1 (stmt);
5004 	      tree rhs2 = gimple_assign_rhs2 (stmt);
5005 	      if (tree_swap_operands_p (rhs1, rhs2))
5006 		{
5007 		  gimple_assign_set_rhs1 (stmt, rhs2);
5008 		  gimple_assign_set_rhs2 (stmt, rhs1);
5009 		  if (TREE_CODE_CLASS (code) == tcc_comparison)
5010 		    gimple_assign_set_rhs_code (stmt,
5011 						swap_tree_comparison (code));
5012 		  changed = true;
5013 		}
5014 	    }
5015 	}
5016       break;
5017     case GIMPLE_CALL:
5018       {
5019 	for (i = 0; i < gimple_call_num_args (stmt); ++i)
5020 	  {
5021 	    tree *arg = gimple_call_arg_ptr (stmt, i);
5022 	    if (REFERENCE_CLASS_P (*arg)
5023 		&& maybe_canonicalize_mem_ref_addr (arg))
5024 	      changed = true;
5025 	  }
5026 	tree *lhs = gimple_call_lhs_ptr (stmt);
5027 	if (*lhs
5028 	    && REFERENCE_CLASS_P (*lhs)
5029 	    && maybe_canonicalize_mem_ref_addr (lhs))
5030 	  changed = true;
5031 	break;
5032       }
5033     case GIMPLE_ASM:
5034       {
5035 	gasm *asm_stmt = as_a <gasm *> (stmt);
5036 	for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5037 	  {
5038 	    tree link = gimple_asm_output_op (asm_stmt, i);
5039 	    tree op = TREE_VALUE (link);
5040 	    if (REFERENCE_CLASS_P (op)
5041 		&& maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5042 	      changed = true;
5043 	  }
5044 	for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5045 	  {
5046 	    tree link = gimple_asm_input_op (asm_stmt, i);
5047 	    tree op = TREE_VALUE (link);
5048 	    if ((REFERENCE_CLASS_P (op)
5049 		 || TREE_CODE (op) == ADDR_EXPR)
5050 		&& maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5051 	      changed = true;
5052 	  }
5053       }
5054       break;
5055     case GIMPLE_DEBUG:
5056       if (gimple_debug_bind_p (stmt))
5057 	{
5058 	  tree *val = gimple_debug_bind_get_value_ptr (stmt);
5059 	  if (*val
5060 	      && (REFERENCE_CLASS_P (*val)
5061 		  || TREE_CODE (*val) == ADDR_EXPR)
5062 	      && maybe_canonicalize_mem_ref_addr (val, true))
5063 	    changed = true;
5064 	}
5065       break;
5066     case GIMPLE_COND:
5067       {
5068 	/* Canonicalize operand order.  */
5069 	tree lhs = gimple_cond_lhs (stmt);
5070 	tree rhs = gimple_cond_rhs (stmt);
5071 	if (tree_swap_operands_p (lhs, rhs))
5072 	  {
5073 	    gcond *gc = as_a <gcond *> (stmt);
5074 	    gimple_cond_set_lhs (gc, rhs);
5075 	    gimple_cond_set_rhs (gc, lhs);
5076 	    gimple_cond_set_code (gc,
5077 				  swap_tree_comparison (gimple_cond_code (gc)));
5078 	    changed = true;
5079 	  }
5080       }
5081     default:;
5082     }
5083 
5084   /* Dispatch to pattern-based folding.  */
5085   if (!inplace
5086       || is_gimple_assign (stmt)
5087       || gimple_code (stmt) == GIMPLE_COND)
5088     {
5089       gimple_seq seq = NULL;
5090       gimple_match_op res_op;
5091       if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
5092 			   valueize, valueize))
5093 	{
5094 	  if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
5095 	    changed = true;
5096 	  else
5097 	    gimple_seq_discard (seq);
5098 	}
5099     }
5100 
5101   stmt = gsi_stmt (*gsi);
5102 
5103   /* Fold the main computation performed by the statement.  */
5104   switch (gimple_code (stmt))
5105     {
5106     case GIMPLE_ASSIGN:
5107       {
5108 	/* Try to canonicalize for boolean-typed X the comparisons
5109 	   X == 0, X == 1, X != 0, and X != 1.  */
5110 	if (gimple_assign_rhs_code (stmt) == EQ_EXPR
5111 	    || gimple_assign_rhs_code (stmt) == NE_EXPR)
5112 	  {
5113 	    tree lhs = gimple_assign_lhs (stmt);
5114 	    tree op1 = gimple_assign_rhs1 (stmt);
5115 	    tree op2 = gimple_assign_rhs2 (stmt);
5116 	    tree type = TREE_TYPE (op1);
5117 
5118 	    /* Check whether the comparison operands are of the same boolean
5119 	       type as the result type is.
5120 	       Check that second operand is an integer-constant with value
5121 	       one or zero.  */
5122 	    if (TREE_CODE (op2) == INTEGER_CST
5123 		&& (integer_zerop (op2) || integer_onep (op2))
5124 		&& useless_type_conversion_p (TREE_TYPE (lhs), type))
5125 	      {
5126 		enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
5127 		bool is_logical_not = false;
5128 
5129 		/* X == 0 and X != 1 is a logical-not.of X
5130 		   X == 1 and X != 0 is X  */
5131 		if ((cmp_code == EQ_EXPR && integer_zerop (op2))
5132 		    || (cmp_code == NE_EXPR && integer_onep (op2)))
5133 		  is_logical_not = true;
5134 
5135 		if (is_logical_not == false)
5136 		  gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
5137 		/* Only for one-bit precision typed X the transformation
5138 		   !X -> ~X is valied.  */
5139 		else if (TYPE_PRECISION (type) == 1)
5140 		  gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
5141 		/* Otherwise we use !X -> X ^ 1.  */
5142 		else
5143 		  gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
5144 						  build_int_cst (type, 1));
5145 		changed = true;
5146 		break;
5147 	      }
5148 	  }
5149 
5150 	unsigned old_num_ops = gimple_num_ops (stmt);
5151 	tree lhs = gimple_assign_lhs (stmt);
5152 	tree new_rhs = fold_gimple_assign (gsi);
5153 	if (new_rhs
5154 	    && !useless_type_conversion_p (TREE_TYPE (lhs),
5155 					   TREE_TYPE (new_rhs)))
5156 	  new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5157 	if (new_rhs
5158 	    && (!inplace
5159 		|| get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5160 	  {
5161 	    gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5162 	    changed = true;
5163 	  }
5164 	break;
5165       }
5166 
5167     case GIMPLE_CALL:
5168       changed |= gimple_fold_call (gsi, inplace);
5169       break;
5170 
5171     case GIMPLE_ASM:
5172       /* Fold *& in asm operands.  */
5173       {
5174 	gasm *asm_stmt = as_a <gasm *> (stmt);
5175 	size_t noutputs;
5176 	const char **oconstraints;
5177 	const char *constraint;
5178 	bool allows_mem, allows_reg;
5179 
5180 	noutputs = gimple_asm_noutputs (asm_stmt);
5181 	oconstraints = XALLOCAVEC (const char *, noutputs);
5182 
5183 	for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5184 	  {
5185 	    tree link = gimple_asm_output_op (asm_stmt, i);
5186 	    tree op = TREE_VALUE (link);
5187 	    oconstraints[i]
5188 	      = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5189 	    if (REFERENCE_CLASS_P (op)
5190 		&& (op = maybe_fold_reference (op, true)) != NULL_TREE)
5191 	      {
5192 		TREE_VALUE (link) = op;
5193 		changed = true;
5194 	      }
5195 	  }
5196 	for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5197 	  {
5198 	    tree link = gimple_asm_input_op (asm_stmt, i);
5199 	    tree op = TREE_VALUE (link);
5200 	    constraint
5201 	      = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5202 	    parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5203 				    oconstraints, &allows_mem, &allows_reg);
5204 	    if (REFERENCE_CLASS_P (op)
5205 		&& (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5206 		   != NULL_TREE)
5207 	      {
5208 		TREE_VALUE (link) = op;
5209 		changed = true;
5210 	      }
5211 	  }
5212       }
5213       break;
5214 
5215     case GIMPLE_DEBUG:
5216       if (gimple_debug_bind_p (stmt))
5217 	{
5218 	  tree val = gimple_debug_bind_get_value (stmt);
5219 	  if (val
5220 	      && REFERENCE_CLASS_P (val))
5221 	    {
5222 	      tree tem = maybe_fold_reference (val, false);
5223 	      if (tem)
5224 		{
5225 		  gimple_debug_bind_set_value (stmt, tem);
5226 		  changed = true;
5227 		}
5228 	    }
5229 	  else if (val
5230 		   && TREE_CODE (val) == ADDR_EXPR)
5231 	    {
5232 	      tree ref = TREE_OPERAND (val, 0);
5233 	      tree tem = maybe_fold_reference (ref, false);
5234 	      if (tem)
5235 		{
5236 		  tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5237 		  gimple_debug_bind_set_value (stmt, tem);
5238 		  changed = true;
5239 		}
5240 	    }
5241 	}
5242       break;
5243 
5244     case GIMPLE_RETURN:
5245       {
5246 	greturn *ret_stmt = as_a<greturn *> (stmt);
5247 	tree ret = gimple_return_retval(ret_stmt);
5248 
5249 	if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5250 	  {
5251 	    tree val = valueize (ret);
5252 	    if (val && val != ret
5253 		&& may_propagate_copy (ret, val))
5254 	      {
5255 		gimple_return_set_retval (ret_stmt, val);
5256 		changed = true;
5257 	      }
5258 	  }
5259       }
5260       break;
5261 
5262     default:;
5263     }
5264 
5265   stmt = gsi_stmt (*gsi);
5266 
5267   /* Fold *& on the lhs.  */
5268   if (gimple_has_lhs (stmt))
5269     {
5270       tree lhs = gimple_get_lhs (stmt);
5271       if (lhs && REFERENCE_CLASS_P (lhs))
5272 	{
5273 	  tree new_lhs = maybe_fold_reference (lhs, true);
5274 	  if (new_lhs)
5275 	    {
5276 	      gimple_set_lhs (stmt, new_lhs);
5277 	      changed = true;
5278 	    }
5279 	}
5280     }
5281 
5282   fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5283   return changed;
5284 }
5285 
5286 /* Valueziation callback that ends up not following SSA edges.  */
5287 
5288 tree
no_follow_ssa_edges(tree)5289 no_follow_ssa_edges (tree)
5290 {
5291   return NULL_TREE;
5292 }
5293 
5294 /* Valueization callback that ends up following single-use SSA edges only.  */
5295 
5296 tree
follow_single_use_edges(tree val)5297 follow_single_use_edges (tree val)
5298 {
5299   if (TREE_CODE (val) == SSA_NAME
5300       && !has_single_use (val))
5301     return NULL_TREE;
5302   return val;
5303 }
5304 
5305 /* Valueization callback that follows all SSA edges.  */
5306 
5307 tree
follow_all_ssa_edges(tree val)5308 follow_all_ssa_edges (tree val)
5309 {
5310   return val;
5311 }
5312 
5313 /* Fold the statement pointed to by GSI.  In some cases, this function may
5314    replace the whole statement with a new one.  Returns true iff folding
5315    makes any changes.
5316    The statement pointed to by GSI should be in valid gimple form but may
5317    be in unfolded state as resulting from for example constant propagation
5318    which can produce *&x = 0.  */
5319 
5320 bool
fold_stmt(gimple_stmt_iterator * gsi)5321 fold_stmt (gimple_stmt_iterator *gsi)
5322 {
5323   return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5324 }
5325 
5326 bool
fold_stmt(gimple_stmt_iterator * gsi,tree (* valueize)(tree))5327 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5328 {
5329   return fold_stmt_1 (gsi, false, valueize);
5330 }
5331 
5332 /* Perform the minimal folding on statement *GSI.  Only operations like
5333    *&x created by constant propagation are handled.  The statement cannot
5334    be replaced with a new one.  Return true if the statement was
5335    changed, false otherwise.
5336    The statement *GSI should be in valid gimple form but may
5337    be in unfolded state as resulting from for example constant propagation
5338    which can produce *&x = 0.  */
5339 
5340 bool
fold_stmt_inplace(gimple_stmt_iterator * gsi)5341 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5342 {
5343   gimple *stmt = gsi_stmt (*gsi);
5344   bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5345   gcc_assert (gsi_stmt (*gsi) == stmt);
5346   return changed;
5347 }
5348 
5349 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5350    if EXPR is null or we don't know how.
5351    If non-null, the result always has boolean type.  */
5352 
5353 static tree
canonicalize_bool(tree expr,bool invert)5354 canonicalize_bool (tree expr, bool invert)
5355 {
5356   if (!expr)
5357     return NULL_TREE;
5358   else if (invert)
5359     {
5360       if (integer_nonzerop (expr))
5361 	return boolean_false_node;
5362       else if (integer_zerop (expr))
5363 	return boolean_true_node;
5364       else if (TREE_CODE (expr) == SSA_NAME)
5365 	return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5366 			    build_int_cst (TREE_TYPE (expr), 0));
5367       else if (COMPARISON_CLASS_P (expr))
5368 	return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5369 			    boolean_type_node,
5370 			    TREE_OPERAND (expr, 0),
5371 			    TREE_OPERAND (expr, 1));
5372       else
5373 	return NULL_TREE;
5374     }
5375   else
5376     {
5377       if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5378 	return expr;
5379       if (integer_nonzerop (expr))
5380 	return boolean_true_node;
5381       else if (integer_zerop (expr))
5382 	return boolean_false_node;
5383       else if (TREE_CODE (expr) == SSA_NAME)
5384 	return fold_build2 (NE_EXPR, boolean_type_node, expr,
5385 			    build_int_cst (TREE_TYPE (expr), 0));
5386       else if (COMPARISON_CLASS_P (expr))
5387 	return fold_build2 (TREE_CODE (expr),
5388 			    boolean_type_node,
5389 			    TREE_OPERAND (expr, 0),
5390 			    TREE_OPERAND (expr, 1));
5391       else
5392 	return NULL_TREE;
5393     }
5394 }
5395 
5396 /* Check to see if a boolean expression EXPR is logically equivalent to the
5397    comparison (OP1 CODE OP2).  Check for various identities involving
5398    SSA_NAMEs.  */
5399 
5400 static bool
same_bool_comparison_p(const_tree expr,enum tree_code code,const_tree op1,const_tree op2)5401 same_bool_comparison_p (const_tree expr, enum tree_code code,
5402 			const_tree op1, const_tree op2)
5403 {
5404   gimple *s;
5405 
5406   /* The obvious case.  */
5407   if (TREE_CODE (expr) == code
5408       && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5409       && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5410     return true;
5411 
5412   /* Check for comparing (name, name != 0) and the case where expr
5413      is an SSA_NAME with a definition matching the comparison.  */
5414   if (TREE_CODE (expr) == SSA_NAME
5415       && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5416     {
5417       if (operand_equal_p (expr, op1, 0))
5418 	return ((code == NE_EXPR && integer_zerop (op2))
5419 		|| (code == EQ_EXPR && integer_nonzerop (op2)));
5420       s = SSA_NAME_DEF_STMT (expr);
5421       if (is_gimple_assign (s)
5422 	  && gimple_assign_rhs_code (s) == code
5423 	  && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5424 	  && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5425 	return true;
5426     }
5427 
5428   /* If op1 is of the form (name != 0) or (name == 0), and the definition
5429      of name is a comparison, recurse.  */
5430   if (TREE_CODE (op1) == SSA_NAME
5431       && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5432     {
5433       s = SSA_NAME_DEF_STMT (op1);
5434       if (is_gimple_assign (s)
5435 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5436 	{
5437 	  enum tree_code c = gimple_assign_rhs_code (s);
5438 	  if ((c == NE_EXPR && integer_zerop (op2))
5439 	      || (c == EQ_EXPR && integer_nonzerop (op2)))
5440 	    return same_bool_comparison_p (expr, c,
5441 					   gimple_assign_rhs1 (s),
5442 					   gimple_assign_rhs2 (s));
5443 	  if ((c == EQ_EXPR && integer_zerop (op2))
5444 	      || (c == NE_EXPR && integer_nonzerop (op2)))
5445 	    return same_bool_comparison_p (expr,
5446 					   invert_tree_comparison (c, false),
5447 					   gimple_assign_rhs1 (s),
5448 					   gimple_assign_rhs2 (s));
5449 	}
5450     }
5451   return false;
5452 }
5453 
5454 /* Check to see if two boolean expressions OP1 and OP2 are logically
5455    equivalent.  */
5456 
5457 static bool
same_bool_result_p(const_tree op1,const_tree op2)5458 same_bool_result_p (const_tree op1, const_tree op2)
5459 {
5460   /* Simple cases first.  */
5461   if (operand_equal_p (op1, op2, 0))
5462     return true;
5463 
5464   /* Check the cases where at least one of the operands is a comparison.
5465      These are a bit smarter than operand_equal_p in that they apply some
5466      identifies on SSA_NAMEs.  */
5467   if (COMPARISON_CLASS_P (op2)
5468       && same_bool_comparison_p (op1, TREE_CODE (op2),
5469 				 TREE_OPERAND (op2, 0),
5470 				 TREE_OPERAND (op2, 1)))
5471     return true;
5472   if (COMPARISON_CLASS_P (op1)
5473       && same_bool_comparison_p (op2, TREE_CODE (op1),
5474 				 TREE_OPERAND (op1, 0),
5475 				 TREE_OPERAND (op1, 1)))
5476     return true;
5477 
5478   /* Default case.  */
5479   return false;
5480 }
5481 
5482 /* Forward declarations for some mutually recursive functions.  */
5483 
5484 static tree
5485 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
5486 		   enum tree_code code2, tree op2a, tree op2b);
5487 static tree
5488 and_var_with_comparison (tree type, tree var, bool invert,
5489 			 enum tree_code code2, tree op2a, tree op2b);
5490 static tree
5491 and_var_with_comparison_1 (tree type, gimple *stmt,
5492 			   enum tree_code code2, tree op2a, tree op2b);
5493 static tree
5494 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
5495 		  enum tree_code code2, tree op2a, tree op2b);
5496 static tree
5497 or_var_with_comparison (tree, tree var, bool invert,
5498 			enum tree_code code2, tree op2a, tree op2b);
5499 static tree
5500 or_var_with_comparison_1 (tree, gimple *stmt,
5501 			  enum tree_code code2, tree op2a, tree op2b);
5502 
5503 /* Helper function for and_comparisons_1:  try to simplify the AND of the
5504    ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5505    If INVERT is true, invert the value of the VAR before doing the AND.
5506    Return NULL_EXPR if we can't simplify this to a single expression.  */
5507 
5508 static tree
and_var_with_comparison(tree type,tree var,bool invert,enum tree_code code2,tree op2a,tree op2b)5509 and_var_with_comparison (tree type, tree var, bool invert,
5510 			 enum tree_code code2, tree op2a, tree op2b)
5511 {
5512   tree t;
5513   gimple *stmt = SSA_NAME_DEF_STMT (var);
5514 
5515   /* We can only deal with variables whose definitions are assignments.  */
5516   if (!is_gimple_assign (stmt))
5517     return NULL_TREE;
5518 
5519   /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5520      !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5521      Then we only have to consider the simpler non-inverted cases.  */
5522   if (invert)
5523     t = or_var_with_comparison_1 (type, stmt,
5524 				  invert_tree_comparison (code2, false),
5525 				  op2a, op2b);
5526   else
5527     t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
5528   return canonicalize_bool (t, invert);
5529 }
5530 
5531 /* Try to simplify the AND of the ssa variable defined by the assignment
5532    STMT with the comparison specified by (OP2A CODE2 OP2B).
5533    Return NULL_EXPR if we can't simplify this to a single expression.  */
5534 
5535 static tree
and_var_with_comparison_1(tree type,gimple * stmt,enum tree_code code2,tree op2a,tree op2b)5536 and_var_with_comparison_1 (tree type, gimple *stmt,
5537 			   enum tree_code code2, tree op2a, tree op2b)
5538 {
5539   tree var = gimple_assign_lhs (stmt);
5540   tree true_test_var = NULL_TREE;
5541   tree false_test_var = NULL_TREE;
5542   enum tree_code innercode = gimple_assign_rhs_code (stmt);
5543 
5544   /* Check for identities like (var AND (var == 0)) => false.  */
5545   if (TREE_CODE (op2a) == SSA_NAME
5546       && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5547     {
5548       if ((code2 == NE_EXPR && integer_zerop (op2b))
5549 	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5550 	{
5551 	  true_test_var = op2a;
5552 	  if (var == true_test_var)
5553 	    return var;
5554 	}
5555       else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5556 	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5557 	{
5558 	  false_test_var = op2a;
5559 	  if (var == false_test_var)
5560 	    return boolean_false_node;
5561 	}
5562     }
5563 
5564   /* If the definition is a comparison, recurse on it.  */
5565   if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5566     {
5567       tree t = and_comparisons_1 (type, innercode,
5568 				  gimple_assign_rhs1 (stmt),
5569 				  gimple_assign_rhs2 (stmt),
5570 				  code2,
5571 				  op2a,
5572 				  op2b);
5573       if (t)
5574 	return t;
5575     }
5576 
5577   /* If the definition is an AND or OR expression, we may be able to
5578      simplify by reassociating.  */
5579   if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5580       && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5581     {
5582       tree inner1 = gimple_assign_rhs1 (stmt);
5583       tree inner2 = gimple_assign_rhs2 (stmt);
5584       gimple *s;
5585       tree t;
5586       tree partial = NULL_TREE;
5587       bool is_and = (innercode == BIT_AND_EXPR);
5588 
5589       /* Check for boolean identities that don't require recursive examination
5590 	 of inner1/inner2:
5591 	 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5592 	 inner1 AND (inner1 OR inner2) => inner1
5593 	 !inner1 AND (inner1 AND inner2) => false
5594 	 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5595          Likewise for similar cases involving inner2.  */
5596       if (inner1 == true_test_var)
5597 	return (is_and ? var : inner1);
5598       else if (inner2 == true_test_var)
5599 	return (is_and ? var : inner2);
5600       else if (inner1 == false_test_var)
5601 	return (is_and
5602 		? boolean_false_node
5603 		: and_var_with_comparison (type, inner2, false, code2, op2a,
5604 					   op2b));
5605       else if (inner2 == false_test_var)
5606 	return (is_and
5607 		? boolean_false_node
5608 		: and_var_with_comparison (type, inner1, false, code2, op2a,
5609 					   op2b));
5610 
5611       /* Next, redistribute/reassociate the AND across the inner tests.
5612 	 Compute the first partial result, (inner1 AND (op2a code op2b))  */
5613       if (TREE_CODE (inner1) == SSA_NAME
5614 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5615 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5616 	  && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
5617 					      gimple_assign_rhs1 (s),
5618 					      gimple_assign_rhs2 (s),
5619 					      code2, op2a, op2b)))
5620 	{
5621 	  /* Handle the AND case, where we are reassociating:
5622 	     (inner1 AND inner2) AND (op2a code2 op2b)
5623 	     => (t AND inner2)
5624 	     If the partial result t is a constant, we win.  Otherwise
5625 	     continue on to try reassociating with the other inner test.  */
5626 	  if (is_and)
5627 	    {
5628 	      if (integer_onep (t))
5629 		return inner2;
5630 	      else if (integer_zerop (t))
5631 		return boolean_false_node;
5632 	    }
5633 
5634 	  /* Handle the OR case, where we are redistributing:
5635 	     (inner1 OR inner2) AND (op2a code2 op2b)
5636 	     => (t OR (inner2 AND (op2a code2 op2b)))  */
5637 	  else if (integer_onep (t))
5638 	    return boolean_true_node;
5639 
5640 	  /* Save partial result for later.  */
5641 	  partial = t;
5642 	}
5643 
5644       /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5645       if (TREE_CODE (inner2) == SSA_NAME
5646 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5647 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5648 	  && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
5649 					      gimple_assign_rhs1 (s),
5650 					      gimple_assign_rhs2 (s),
5651 					      code2, op2a, op2b)))
5652 	{
5653 	  /* Handle the AND case, where we are reassociating:
5654 	     (inner1 AND inner2) AND (op2a code2 op2b)
5655 	     => (inner1 AND t)  */
5656 	  if (is_and)
5657 	    {
5658 	      if (integer_onep (t))
5659 		return inner1;
5660 	      else if (integer_zerop (t))
5661 		return boolean_false_node;
5662 	      /* If both are the same, we can apply the identity
5663 		 (x AND x) == x.  */
5664 	      else if (partial && same_bool_result_p (t, partial))
5665 		return t;
5666 	    }
5667 
5668 	  /* Handle the OR case. where we are redistributing:
5669 	     (inner1 OR inner2) AND (op2a code2 op2b)
5670 	     => (t OR (inner1 AND (op2a code2 op2b)))
5671 	     => (t OR partial)  */
5672 	  else
5673 	    {
5674 	      if (integer_onep (t))
5675 		return boolean_true_node;
5676 	      else if (partial)
5677 		{
5678 		  /* We already got a simplification for the other
5679 		     operand to the redistributed OR expression.  The
5680 		     interesting case is when at least one is false.
5681 		     Or, if both are the same, we can apply the identity
5682 		     (x OR x) == x.  */
5683 		  if (integer_zerop (partial))
5684 		    return t;
5685 		  else if (integer_zerop (t))
5686 		    return partial;
5687 		  else if (same_bool_result_p (t, partial))
5688 		    return t;
5689 		}
5690 	    }
5691 	}
5692     }
5693   return NULL_TREE;
5694 }
5695 
5696 /* Try to simplify the AND of two comparisons defined by
5697    (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5698    If this can be done without constructing an intermediate value,
5699    return the resulting tree; otherwise NULL_TREE is returned.
5700    This function is deliberately asymmetric as it recurses on SSA_DEFs
5701    in the first comparison but not the second.  */
5702 
5703 static tree
and_comparisons_1(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)5704 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
5705 		   enum tree_code code2, tree op2a, tree op2b)
5706 {
5707   tree truth_type = truth_type_for (TREE_TYPE (op1a));
5708 
5709   /* First check for ((x CODE1 y) AND (x CODE2 y)).  */
5710   if (operand_equal_p (op1a, op2a, 0)
5711       && operand_equal_p (op1b, op2b, 0))
5712     {
5713       /* Result will be either NULL_TREE, or a combined comparison.  */
5714       tree t = combine_comparisons (UNKNOWN_LOCATION,
5715 				    TRUTH_ANDIF_EXPR, code1, code2,
5716 				    truth_type, op1a, op1b);
5717       if (t)
5718 	return t;
5719     }
5720 
5721   /* Likewise the swapped case of the above.  */
5722   if (operand_equal_p (op1a, op2b, 0)
5723       && operand_equal_p (op1b, op2a, 0))
5724     {
5725       /* Result will be either NULL_TREE, or a combined comparison.  */
5726       tree t = combine_comparisons (UNKNOWN_LOCATION,
5727 				    TRUTH_ANDIF_EXPR, code1,
5728 				    swap_tree_comparison (code2),
5729 				    truth_type, op1a, op1b);
5730       if (t)
5731 	return t;
5732     }
5733 
5734   /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5735      NAME's definition is a truth value.  See if there are any simplifications
5736      that can be done against the NAME's definition.  */
5737   if (TREE_CODE (op1a) == SSA_NAME
5738       && (code1 == NE_EXPR || code1 == EQ_EXPR)
5739       && (integer_zerop (op1b) || integer_onep (op1b)))
5740     {
5741       bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5742 		     || (code1 == NE_EXPR && integer_onep (op1b)));
5743       gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5744       switch (gimple_code (stmt))
5745 	{
5746 	case GIMPLE_ASSIGN:
5747 	  /* Try to simplify by copy-propagating the definition.  */
5748 	  return and_var_with_comparison (type, op1a, invert, code2, op2a,
5749 					  op2b);
5750 
5751 	case GIMPLE_PHI:
5752 	  /* If every argument to the PHI produces the same result when
5753 	     ANDed with the second comparison, we win.
5754 	     Do not do this unless the type is bool since we need a bool
5755 	     result here anyway.  */
5756 	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5757 	    {
5758 	      tree result = NULL_TREE;
5759 	      unsigned i;
5760 	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
5761 		{
5762 		  tree arg = gimple_phi_arg_def (stmt, i);
5763 
5764 		  /* If this PHI has itself as an argument, ignore it.
5765 		     If all the other args produce the same result,
5766 		     we're still OK.  */
5767 		  if (arg == gimple_phi_result (stmt))
5768 		    continue;
5769 		  else if (TREE_CODE (arg) == INTEGER_CST)
5770 		    {
5771 		      if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5772 			{
5773 			  if (!result)
5774 			    result = boolean_false_node;
5775 			  else if (!integer_zerop (result))
5776 			    return NULL_TREE;
5777 			}
5778 		      else if (!result)
5779 			result = fold_build2 (code2, boolean_type_node,
5780 					      op2a, op2b);
5781 		      else if (!same_bool_comparison_p (result,
5782 							code2, op2a, op2b))
5783 			return NULL_TREE;
5784 		    }
5785 		  else if (TREE_CODE (arg) == SSA_NAME
5786 			   && !SSA_NAME_IS_DEFAULT_DEF (arg))
5787 		    {
5788 		      tree temp;
5789 		      gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5790 		      /* In simple cases we can look through PHI nodes,
5791 			 but we have to be careful with loops.
5792 			 See PR49073.  */
5793 		      if (! dom_info_available_p (CDI_DOMINATORS)
5794 			  || gimple_bb (def_stmt) == gimple_bb (stmt)
5795 			  || dominated_by_p (CDI_DOMINATORS,
5796 					     gimple_bb (def_stmt),
5797 					     gimple_bb (stmt)))
5798 			return NULL_TREE;
5799 		      temp = and_var_with_comparison (type, arg, invert, code2,
5800 						      op2a, op2b);
5801 		      if (!temp)
5802 			return NULL_TREE;
5803 		      else if (!result)
5804 			result = temp;
5805 		      else if (!same_bool_result_p (result, temp))
5806 			return NULL_TREE;
5807 		    }
5808 		  else
5809 		    return NULL_TREE;
5810 		}
5811 	      return result;
5812 	    }
5813 
5814 	default:
5815 	  break;
5816 	}
5817     }
5818   return NULL_TREE;
5819 }
5820 
5821 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
5822    : try to simplify the AND/OR of the ssa variable VAR with the comparison
5823    specified by (OP2A CODE2 OP2B) from match.pd.  Return NULL_EXPR if we can't
5824    simplify this to a single expression.  As we are going to lower the cost
5825    of building SSA names / gimple stmts significantly, we need to allocate
5826    them ont the stack.  This will cause the code to be a bit ugly.  */
5827 
5828 static tree
maybe_fold_comparisons_from_match_pd(tree type,enum tree_code code,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)5829 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
5830 				      enum tree_code code1,
5831 				      tree op1a, tree op1b,
5832 				      enum tree_code code2, tree op2a,
5833 				      tree op2b)
5834 {
5835   /* Allocate gimple stmt1 on the stack.  */
5836   gassign *stmt1
5837     = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5838   gimple_init (stmt1, GIMPLE_ASSIGN, 3);
5839   gimple_assign_set_rhs_code (stmt1, code1);
5840   gimple_assign_set_rhs1 (stmt1, op1a);
5841   gimple_assign_set_rhs2 (stmt1, op1b);
5842 
5843   /* Allocate gimple stmt2 on the stack.  */
5844   gassign *stmt2
5845     = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5846   gimple_init (stmt2, GIMPLE_ASSIGN, 3);
5847   gimple_assign_set_rhs_code (stmt2, code2);
5848   gimple_assign_set_rhs1 (stmt2, op2a);
5849   gimple_assign_set_rhs2 (stmt2, op2b);
5850 
5851   /* Allocate SSA names(lhs1) on the stack.  */
5852   tree lhs1 = (tree)XALLOCA (tree_ssa_name);
5853   memset (lhs1, 0, sizeof (tree_ssa_name));
5854   TREE_SET_CODE (lhs1, SSA_NAME);
5855   TREE_TYPE (lhs1) = type;
5856   init_ssa_name_imm_use (lhs1);
5857 
5858   /* Allocate SSA names(lhs2) on the stack.  */
5859   tree lhs2 = (tree)XALLOCA (tree_ssa_name);
5860   memset (lhs2, 0, sizeof (tree_ssa_name));
5861   TREE_SET_CODE (lhs2, SSA_NAME);
5862   TREE_TYPE (lhs2) = type;
5863   init_ssa_name_imm_use (lhs2);
5864 
5865   gimple_assign_set_lhs (stmt1, lhs1);
5866   gimple_assign_set_lhs (stmt2, lhs2);
5867 
5868   gimple_match_op op (gimple_match_cond::UNCOND, code,
5869 		      type, gimple_assign_lhs (stmt1),
5870 		      gimple_assign_lhs (stmt2));
5871   if (op.resimplify (NULL, follow_all_ssa_edges))
5872     {
5873       if (gimple_simplified_result_is_gimple_val (&op))
5874 	{
5875 	  tree res = op.ops[0];
5876 	  if (res == lhs1)
5877 	    return build2 (code1, type, op1a, op1b);
5878 	  else if (res == lhs2)
5879 	    return build2 (code2, type, op2a, op2b);
5880 	  else
5881 	    return res;
5882 	}
5883       else if (op.code.is_tree_code ()
5884 	       && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
5885 	{
5886 	  tree op0 = op.ops[0];
5887 	  tree op1 = op.ops[1];
5888 	  if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
5889 	    return NULL_TREE;  /* not simple */
5890 
5891 	  return build2 ((enum tree_code)op.code, op.type, op0, op1);
5892 	}
5893     }
5894 
5895   return NULL_TREE;
5896 }
5897 
5898 /* Try to simplify the AND of two comparisons, specified by
5899    (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5900    If this can be simplified to a single expression (without requiring
5901    introducing more SSA variables to hold intermediate values),
5902    return the resulting tree.  Otherwise return NULL_TREE.
5903    If the result expression is non-null, it has boolean type.  */
5904 
5905 tree
maybe_fold_and_comparisons(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)5906 maybe_fold_and_comparisons (tree type,
5907 			    enum tree_code code1, tree op1a, tree op1b,
5908 			    enum tree_code code2, tree op2a, tree op2b)
5909 {
5910   if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
5911     return t;
5912 
5913   if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
5914     return t;
5915 
5916   if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
5917 						     op1a, op1b, code2, op2a,
5918 						     op2b))
5919     return t;
5920 
5921   return NULL_TREE;
5922 }
5923 
5924 /* Helper function for or_comparisons_1:  try to simplify the OR of the
5925    ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5926    If INVERT is true, invert the value of VAR before doing the OR.
5927    Return NULL_EXPR if we can't simplify this to a single expression.  */
5928 
5929 static tree
or_var_with_comparison(tree type,tree var,bool invert,enum tree_code code2,tree op2a,tree op2b)5930 or_var_with_comparison (tree type, tree var, bool invert,
5931 			enum tree_code code2, tree op2a, tree op2b)
5932 {
5933   tree t;
5934   gimple *stmt = SSA_NAME_DEF_STMT (var);
5935 
5936   /* We can only deal with variables whose definitions are assignments.  */
5937   if (!is_gimple_assign (stmt))
5938     return NULL_TREE;
5939 
5940   /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5941      !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5942      Then we only have to consider the simpler non-inverted cases.  */
5943   if (invert)
5944     t = and_var_with_comparison_1 (type, stmt,
5945 				   invert_tree_comparison (code2, false),
5946 				   op2a, op2b);
5947   else
5948     t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
5949   return canonicalize_bool (t, invert);
5950 }
5951 
5952 /* Try to simplify the OR of the ssa variable defined by the assignment
5953    STMT with the comparison specified by (OP2A CODE2 OP2B).
5954    Return NULL_EXPR if we can't simplify this to a single expression.  */
5955 
5956 static tree
or_var_with_comparison_1(tree type,gimple * stmt,enum tree_code code2,tree op2a,tree op2b)5957 or_var_with_comparison_1 (tree type, gimple *stmt,
5958 			  enum tree_code code2, tree op2a, tree op2b)
5959 {
5960   tree var = gimple_assign_lhs (stmt);
5961   tree true_test_var = NULL_TREE;
5962   tree false_test_var = NULL_TREE;
5963   enum tree_code innercode = gimple_assign_rhs_code (stmt);
5964 
5965   /* Check for identities like (var OR (var != 0)) => true .  */
5966   if (TREE_CODE (op2a) == SSA_NAME
5967       && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5968     {
5969       if ((code2 == NE_EXPR && integer_zerop (op2b))
5970 	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5971 	{
5972 	  true_test_var = op2a;
5973 	  if (var == true_test_var)
5974 	    return var;
5975 	}
5976       else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5977 	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5978 	{
5979 	  false_test_var = op2a;
5980 	  if (var == false_test_var)
5981 	    return boolean_true_node;
5982 	}
5983     }
5984 
5985   /* If the definition is a comparison, recurse on it.  */
5986   if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5987     {
5988       tree t = or_comparisons_1 (type, innercode,
5989 				 gimple_assign_rhs1 (stmt),
5990 				 gimple_assign_rhs2 (stmt),
5991 				 code2,
5992 				 op2a,
5993 				 op2b);
5994       if (t)
5995 	return t;
5996     }
5997 
5998   /* If the definition is an AND or OR expression, we may be able to
5999      simplify by reassociating.  */
6000   if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6001       && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6002     {
6003       tree inner1 = gimple_assign_rhs1 (stmt);
6004       tree inner2 = gimple_assign_rhs2 (stmt);
6005       gimple *s;
6006       tree t;
6007       tree partial = NULL_TREE;
6008       bool is_or = (innercode == BIT_IOR_EXPR);
6009 
6010       /* Check for boolean identities that don't require recursive examination
6011 	 of inner1/inner2:
6012 	 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
6013 	 inner1 OR (inner1 AND inner2) => inner1
6014 	 !inner1 OR (inner1 OR inner2) => true
6015 	 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
6016       */
6017       if (inner1 == true_test_var)
6018 	return (is_or ? var : inner1);
6019       else if (inner2 == true_test_var)
6020 	return (is_or ? var : inner2);
6021       else if (inner1 == false_test_var)
6022 	return (is_or
6023 		? boolean_true_node
6024 		: or_var_with_comparison (type, inner2, false, code2, op2a,
6025 					  op2b));
6026       else if (inner2 == false_test_var)
6027 	return (is_or
6028 		? boolean_true_node
6029 		: or_var_with_comparison (type, inner1, false, code2, op2a,
6030 					  op2b));
6031 
6032       /* Next, redistribute/reassociate the OR across the inner tests.
6033 	 Compute the first partial result, (inner1 OR (op2a code op2b))  */
6034       if (TREE_CODE (inner1) == SSA_NAME
6035 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6036 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6037 	  && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
6038 					     gimple_assign_rhs1 (s),
6039 					     gimple_assign_rhs2 (s),
6040 					     code2, op2a, op2b)))
6041 	{
6042 	  /* Handle the OR case, where we are reassociating:
6043 	     (inner1 OR inner2) OR (op2a code2 op2b)
6044 	     => (t OR inner2)
6045 	     If the partial result t is a constant, we win.  Otherwise
6046 	     continue on to try reassociating with the other inner test.  */
6047 	  if (is_or)
6048 	    {
6049 	      if (integer_onep (t))
6050 		return boolean_true_node;
6051 	      else if (integer_zerop (t))
6052 		return inner2;
6053 	    }
6054 
6055 	  /* Handle the AND case, where we are redistributing:
6056 	     (inner1 AND inner2) OR (op2a code2 op2b)
6057 	     => (t AND (inner2 OR (op2a code op2b)))  */
6058 	  else if (integer_zerop (t))
6059 	    return boolean_false_node;
6060 
6061 	  /* Save partial result for later.  */
6062 	  partial = t;
6063 	}
6064 
6065       /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
6066       if (TREE_CODE (inner2) == SSA_NAME
6067 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6068 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6069 	  && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
6070 					     gimple_assign_rhs1 (s),
6071 					     gimple_assign_rhs2 (s),
6072 					     code2, op2a, op2b)))
6073 	{
6074 	  /* Handle the OR case, where we are reassociating:
6075 	     (inner1 OR inner2) OR (op2a code2 op2b)
6076 	     => (inner1 OR t)
6077 	     => (t OR partial)  */
6078 	  if (is_or)
6079 	    {
6080 	      if (integer_zerop (t))
6081 		return inner1;
6082 	      else if (integer_onep (t))
6083 		return boolean_true_node;
6084 	      /* If both are the same, we can apply the identity
6085 		 (x OR x) == x.  */
6086 	      else if (partial && same_bool_result_p (t, partial))
6087 		return t;
6088 	    }
6089 
6090 	  /* Handle the AND case, where we are redistributing:
6091 	     (inner1 AND inner2) OR (op2a code2 op2b)
6092 	     => (t AND (inner1 OR (op2a code2 op2b)))
6093 	     => (t AND partial)  */
6094 	  else
6095 	    {
6096 	      if (integer_zerop (t))
6097 		return boolean_false_node;
6098 	      else if (partial)
6099 		{
6100 		  /* We already got a simplification for the other
6101 		     operand to the redistributed AND expression.  The
6102 		     interesting case is when at least one is true.
6103 		     Or, if both are the same, we can apply the identity
6104 		     (x AND x) == x.  */
6105 		  if (integer_onep (partial))
6106 		    return t;
6107 		  else if (integer_onep (t))
6108 		    return partial;
6109 		  else if (same_bool_result_p (t, partial))
6110 		    return t;
6111 		}
6112 	    }
6113 	}
6114     }
6115   return NULL_TREE;
6116 }
6117 
6118 /* Try to simplify the OR of two comparisons defined by
6119    (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6120    If this can be done without constructing an intermediate value,
6121    return the resulting tree; otherwise NULL_TREE is returned.
6122    This function is deliberately asymmetric as it recurses on SSA_DEFs
6123    in the first comparison but not the second.  */
6124 
6125 static tree
or_comparisons_1(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)6126 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6127 		  enum tree_code code2, tree op2a, tree op2b)
6128 {
6129   tree truth_type = truth_type_for (TREE_TYPE (op1a));
6130 
6131   /* First check for ((x CODE1 y) OR (x CODE2 y)).  */
6132   if (operand_equal_p (op1a, op2a, 0)
6133       && operand_equal_p (op1b, op2b, 0))
6134     {
6135       /* Result will be either NULL_TREE, or a combined comparison.  */
6136       tree t = combine_comparisons (UNKNOWN_LOCATION,
6137 				    TRUTH_ORIF_EXPR, code1, code2,
6138 				    truth_type, op1a, op1b);
6139       if (t)
6140 	return t;
6141     }
6142 
6143   /* Likewise the swapped case of the above.  */
6144   if (operand_equal_p (op1a, op2b, 0)
6145       && operand_equal_p (op1b, op2a, 0))
6146     {
6147       /* Result will be either NULL_TREE, or a combined comparison.  */
6148       tree t = combine_comparisons (UNKNOWN_LOCATION,
6149 				    TRUTH_ORIF_EXPR, code1,
6150 				    swap_tree_comparison (code2),
6151 				    truth_type, op1a, op1b);
6152       if (t)
6153 	return t;
6154     }
6155 
6156   /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6157      NAME's definition is a truth value.  See if there are any simplifications
6158      that can be done against the NAME's definition.  */
6159   if (TREE_CODE (op1a) == SSA_NAME
6160       && (code1 == NE_EXPR || code1 == EQ_EXPR)
6161       && (integer_zerop (op1b) || integer_onep (op1b)))
6162     {
6163       bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6164 		     || (code1 == NE_EXPR && integer_onep (op1b)));
6165       gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6166       switch (gimple_code (stmt))
6167 	{
6168 	case GIMPLE_ASSIGN:
6169 	  /* Try to simplify by copy-propagating the definition.  */
6170 	  return or_var_with_comparison (type, op1a, invert, code2, op2a,
6171 					 op2b);
6172 
6173 	case GIMPLE_PHI:
6174 	  /* If every argument to the PHI produces the same result when
6175 	     ORed with the second comparison, we win.
6176 	     Do not do this unless the type is bool since we need a bool
6177 	     result here anyway.  */
6178 	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6179 	    {
6180 	      tree result = NULL_TREE;
6181 	      unsigned i;
6182 	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
6183 		{
6184 		  tree arg = gimple_phi_arg_def (stmt, i);
6185 
6186 		  /* If this PHI has itself as an argument, ignore it.
6187 		     If all the other args produce the same result,
6188 		     we're still OK.  */
6189 		  if (arg == gimple_phi_result (stmt))
6190 		    continue;
6191 		  else if (TREE_CODE (arg) == INTEGER_CST)
6192 		    {
6193 		      if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6194 			{
6195 			  if (!result)
6196 			    result = boolean_true_node;
6197 			  else if (!integer_onep (result))
6198 			    return NULL_TREE;
6199 			}
6200 		      else if (!result)
6201 			result = fold_build2 (code2, boolean_type_node,
6202 					      op2a, op2b);
6203 		      else if (!same_bool_comparison_p (result,
6204 							code2, op2a, op2b))
6205 			return NULL_TREE;
6206 		    }
6207 		  else if (TREE_CODE (arg) == SSA_NAME
6208 			   && !SSA_NAME_IS_DEFAULT_DEF (arg))
6209 		    {
6210 		      tree temp;
6211 		      gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6212 		      /* In simple cases we can look through PHI nodes,
6213 			 but we have to be careful with loops.
6214 			 See PR49073.  */
6215 		      if (! dom_info_available_p (CDI_DOMINATORS)
6216 			  || gimple_bb (def_stmt) == gimple_bb (stmt)
6217 			  || dominated_by_p (CDI_DOMINATORS,
6218 					     gimple_bb (def_stmt),
6219 					     gimple_bb (stmt)))
6220 			return NULL_TREE;
6221 		      temp = or_var_with_comparison (type, arg, invert, code2,
6222 						     op2a, op2b);
6223 		      if (!temp)
6224 			return NULL_TREE;
6225 		      else if (!result)
6226 			result = temp;
6227 		      else if (!same_bool_result_p (result, temp))
6228 			return NULL_TREE;
6229 		    }
6230 		  else
6231 		    return NULL_TREE;
6232 		}
6233 	      return result;
6234 	    }
6235 
6236 	default:
6237 	  break;
6238 	}
6239     }
6240   return NULL_TREE;
6241 }
6242 
6243 /* Try to simplify the OR of two comparisons, specified by
6244    (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6245    If this can be simplified to a single expression (without requiring
6246    introducing more SSA variables to hold intermediate values),
6247    return the resulting tree.  Otherwise return NULL_TREE.
6248    If the result expression is non-null, it has boolean type.  */
6249 
6250 tree
maybe_fold_or_comparisons(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)6251 maybe_fold_or_comparisons (tree type,
6252 			   enum tree_code code1, tree op1a, tree op1b,
6253 			   enum tree_code code2, tree op2a, tree op2b)
6254 {
6255   if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
6256     return t;
6257 
6258   if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6259     return t;
6260 
6261   if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
6262 						     op1a, op1b, code2, op2a,
6263 						     op2b))
6264     return t;
6265 
6266   return NULL_TREE;
6267 }
6268 
6269 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6270 
6271    Either NULL_TREE, a simplified but non-constant or a constant
6272    is returned.
6273 
6274    ???  This should go into a gimple-fold-inline.h file to be eventually
6275    privatized with the single valueize function used in the various TUs
6276    to avoid the indirect function call overhead.  */
6277 
6278 tree
gimple_fold_stmt_to_constant_1(gimple * stmt,tree (* valueize)(tree),tree (* gvalueize)(tree))6279 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6280 				tree (*gvalueize) (tree))
6281 {
6282   gimple_match_op res_op;
6283   /* ???  The SSA propagators do not correctly deal with following SSA use-def
6284      edges if there are intermediate VARYING defs.  For this reason
6285      do not follow SSA edges here even though SCCVN can technically
6286      just deal fine with that.  */
6287   if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6288     {
6289       tree res = NULL_TREE;
6290       if (gimple_simplified_result_is_gimple_val (&res_op))
6291 	res = res_op.ops[0];
6292       else if (mprts_hook)
6293 	res = mprts_hook (&res_op);
6294       if (res)
6295 	{
6296 	  if (dump_file && dump_flags & TDF_DETAILS)
6297 	    {
6298 	      fprintf (dump_file, "Match-and-simplified ");
6299 	      print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6300 	      fprintf (dump_file, " to ");
6301 	      print_generic_expr (dump_file, res);
6302 	      fprintf (dump_file, "\n");
6303 	    }
6304 	  return res;
6305 	}
6306     }
6307 
6308   location_t loc = gimple_location (stmt);
6309   switch (gimple_code (stmt))
6310     {
6311     case GIMPLE_ASSIGN:
6312       {
6313         enum tree_code subcode = gimple_assign_rhs_code (stmt);
6314 
6315         switch (get_gimple_rhs_class (subcode))
6316           {
6317           case GIMPLE_SINGLE_RHS:
6318             {
6319               tree rhs = gimple_assign_rhs1 (stmt);
6320               enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6321 
6322               if (TREE_CODE (rhs) == SSA_NAME)
6323                 {
6324                   /* If the RHS is an SSA_NAME, return its known constant value,
6325                      if any.  */
6326                   return (*valueize) (rhs);
6327                 }
6328 	      /* Handle propagating invariant addresses into address
6329 		 operations.  */
6330 	      else if (TREE_CODE (rhs) == ADDR_EXPR
6331 		       && !is_gimple_min_invariant (rhs))
6332 		{
6333 		  poly_int64 offset = 0;
6334 		  tree base;
6335 		  base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6336 							  &offset,
6337 							  valueize);
6338 		  if (base
6339 		      && (CONSTANT_CLASS_P (base)
6340 			  || decl_address_invariant_p (base)))
6341 		    return build_invariant_address (TREE_TYPE (rhs),
6342 						    base, offset);
6343 		}
6344 	      else if (TREE_CODE (rhs) == CONSTRUCTOR
6345 		       && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6346 		       && known_eq (CONSTRUCTOR_NELTS (rhs),
6347 				    TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6348 		{
6349 		  unsigned i, nelts;
6350 		  tree val;
6351 
6352 		  nelts = CONSTRUCTOR_NELTS (rhs);
6353 		  tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6354 		  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6355 		    {
6356 		      val = (*valueize) (val);
6357 		      if (TREE_CODE (val) == INTEGER_CST
6358 			  || TREE_CODE (val) == REAL_CST
6359 			  || TREE_CODE (val) == FIXED_CST)
6360 			vec.quick_push (val);
6361 		      else
6362 			return NULL_TREE;
6363 		    }
6364 
6365 		  return vec.build ();
6366 		}
6367 	      if (subcode == OBJ_TYPE_REF)
6368 		{
6369 		  tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6370 		  /* If callee is constant, we can fold away the wrapper.  */
6371 		  if (is_gimple_min_invariant (val))
6372 		    return val;
6373 		}
6374 
6375               if (kind == tcc_reference)
6376 		{
6377 		  if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6378 		       || TREE_CODE (rhs) == REALPART_EXPR
6379 		       || TREE_CODE (rhs) == IMAGPART_EXPR)
6380 		      && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6381 		    {
6382 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6383 		      return fold_unary_loc (EXPR_LOCATION (rhs),
6384 					     TREE_CODE (rhs),
6385 					     TREE_TYPE (rhs), val);
6386 		    }
6387 		  else if (TREE_CODE (rhs) == BIT_FIELD_REF
6388 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6389 		    {
6390 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6391 		      return fold_ternary_loc (EXPR_LOCATION (rhs),
6392 					       TREE_CODE (rhs),
6393 					       TREE_TYPE (rhs), val,
6394 					       TREE_OPERAND (rhs, 1),
6395 					       TREE_OPERAND (rhs, 2));
6396 		    }
6397 		  else if (TREE_CODE (rhs) == MEM_REF
6398 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6399 		    {
6400 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6401 		      if (TREE_CODE (val) == ADDR_EXPR
6402 			  && is_gimple_min_invariant (val))
6403 			{
6404 			  tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6405 						  unshare_expr (val),
6406 						  TREE_OPERAND (rhs, 1));
6407 			  if (tem)
6408 			    rhs = tem;
6409 			}
6410 		    }
6411 		  return fold_const_aggregate_ref_1 (rhs, valueize);
6412 		}
6413               else if (kind == tcc_declaration)
6414                 return get_symbol_constant_value (rhs);
6415               return rhs;
6416             }
6417 
6418           case GIMPLE_UNARY_RHS:
6419 	    return NULL_TREE;
6420 
6421           case GIMPLE_BINARY_RHS:
6422 	    /* Translate &x + CST into an invariant form suitable for
6423 	       further propagation.  */
6424 	    if (subcode == POINTER_PLUS_EXPR)
6425 	      {
6426 		tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6427 		tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6428 		if (TREE_CODE (op0) == ADDR_EXPR
6429 		    && TREE_CODE (op1) == INTEGER_CST)
6430 		  {
6431 		    tree off = fold_convert (ptr_type_node, op1);
6432 		    return build1_loc
6433 			(loc, ADDR_EXPR, TREE_TYPE (op0),
6434 			 fold_build2 (MEM_REF,
6435 				      TREE_TYPE (TREE_TYPE (op0)),
6436 				      unshare_expr (op0), off));
6437 		  }
6438 	      }
6439 	    /* Canonicalize bool != 0 and bool == 0 appearing after
6440 	       valueization.  While gimple_simplify handles this
6441 	       it can get confused by the ~X == 1 -> X == 0 transform
6442 	       which we cant reduce to a SSA name or a constant
6443 	       (and we have no way to tell gimple_simplify to not
6444 	       consider those transforms in the first place).  */
6445 	    else if (subcode == EQ_EXPR
6446 		     || subcode == NE_EXPR)
6447 	      {
6448 		tree lhs = gimple_assign_lhs (stmt);
6449 		tree op0 = gimple_assign_rhs1 (stmt);
6450 		if (useless_type_conversion_p (TREE_TYPE (lhs),
6451 					       TREE_TYPE (op0)))
6452 		  {
6453 		    tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6454 		    op0 = (*valueize) (op0);
6455 		    if (TREE_CODE (op0) == INTEGER_CST)
6456 		      std::swap (op0, op1);
6457 		    if (TREE_CODE (op1) == INTEGER_CST
6458 			&& ((subcode == NE_EXPR && integer_zerop (op1))
6459 			    || (subcode == EQ_EXPR && integer_onep (op1))))
6460 		      return op0;
6461 		  }
6462 	      }
6463 	    return NULL_TREE;
6464 
6465           case GIMPLE_TERNARY_RHS:
6466             {
6467               /* Handle ternary operators that can appear in GIMPLE form.  */
6468               tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6469               tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6470               tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6471               return fold_ternary_loc (loc, subcode,
6472 				       gimple_expr_type (stmt), op0, op1, op2);
6473             }
6474 
6475           default:
6476             gcc_unreachable ();
6477           }
6478       }
6479 
6480     case GIMPLE_CALL:
6481       {
6482 	tree fn;
6483 	gcall *call_stmt = as_a <gcall *> (stmt);
6484 
6485 	if (gimple_call_internal_p (stmt))
6486 	  {
6487 	    enum tree_code subcode = ERROR_MARK;
6488 	    switch (gimple_call_internal_fn (stmt))
6489 	      {
6490 	      case IFN_UBSAN_CHECK_ADD:
6491 		subcode = PLUS_EXPR;
6492 		break;
6493 	      case IFN_UBSAN_CHECK_SUB:
6494 		subcode = MINUS_EXPR;
6495 		break;
6496 	      case IFN_UBSAN_CHECK_MUL:
6497 		subcode = MULT_EXPR;
6498 		break;
6499 	      case IFN_BUILTIN_EXPECT:
6500 		  {
6501 		    tree arg0 = gimple_call_arg (stmt, 0);
6502 		    tree op0 = (*valueize) (arg0);
6503 		    if (TREE_CODE (op0) == INTEGER_CST)
6504 		      return op0;
6505 		    return NULL_TREE;
6506 		  }
6507 	      default:
6508 		return NULL_TREE;
6509 	      }
6510 	    tree arg0 = gimple_call_arg (stmt, 0);
6511 	    tree arg1 = gimple_call_arg (stmt, 1);
6512 	    tree op0 = (*valueize) (arg0);
6513 	    tree op1 = (*valueize) (arg1);
6514 
6515 	    if (TREE_CODE (op0) != INTEGER_CST
6516 		|| TREE_CODE (op1) != INTEGER_CST)
6517 	      {
6518 		switch (subcode)
6519 		  {
6520 		  case MULT_EXPR:
6521 		    /* x * 0 = 0 * x = 0 without overflow.  */
6522 		    if (integer_zerop (op0) || integer_zerop (op1))
6523 		      return build_zero_cst (TREE_TYPE (arg0));
6524 		    break;
6525 		  case MINUS_EXPR:
6526 		    /* y - y = 0 without overflow.  */
6527 		    if (operand_equal_p (op0, op1, 0))
6528 		      return build_zero_cst (TREE_TYPE (arg0));
6529 		    break;
6530 		  default:
6531 		    break;
6532 		  }
6533 	      }
6534 	    tree res
6535 	      = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6536 	    if (res
6537 		&& TREE_CODE (res) == INTEGER_CST
6538 		&& !TREE_OVERFLOW (res))
6539 	      return res;
6540 	    return NULL_TREE;
6541 	  }
6542 
6543 	fn = (*valueize) (gimple_call_fn (stmt));
6544 	if (TREE_CODE (fn) == ADDR_EXPR
6545 	    && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6546 	    && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6547 	    && gimple_builtin_call_types_compatible_p (stmt,
6548 						       TREE_OPERAND (fn, 0)))
6549 	  {
6550 	    tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6551 	    tree retval;
6552 	    unsigned i;
6553 	    for (i = 0; i < gimple_call_num_args (stmt); ++i)
6554 	      args[i] = (*valueize) (gimple_call_arg (stmt, i));
6555 	    retval = fold_builtin_call_array (loc,
6556 					 gimple_call_return_type (call_stmt),
6557 					 fn, gimple_call_num_args (stmt), args);
6558 	    if (retval)
6559 	      {
6560 		/* fold_call_expr wraps the result inside a NOP_EXPR.  */
6561 		STRIP_NOPS (retval);
6562 		retval = fold_convert (gimple_call_return_type (call_stmt),
6563 				       retval);
6564 	      }
6565 	    return retval;
6566 	  }
6567 	return NULL_TREE;
6568       }
6569 
6570     default:
6571       return NULL_TREE;
6572     }
6573 }
6574 
6575 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6576    Returns NULL_TREE if folding to a constant is not possible, otherwise
6577    returns a constant according to is_gimple_min_invariant.  */
6578 
6579 tree
gimple_fold_stmt_to_constant(gimple * stmt,tree (* valueize)(tree))6580 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6581 {
6582   tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6583   if (res && is_gimple_min_invariant (res))
6584     return res;
6585   return NULL_TREE;
6586 }
6587 
6588 
6589 /* The following set of functions are supposed to fold references using
6590    their constant initializers.  */
6591 
6592 /* See if we can find constructor defining value of BASE.
6593    When we know the consructor with constant offset (such as
6594    base is array[40] and we do know constructor of array), then
6595    BIT_OFFSET is adjusted accordingly.
6596 
6597    As a special case, return error_mark_node when constructor
6598    is not explicitly available, but it is known to be zero
6599    such as 'static const int a;'.  */
6600 static tree
get_base_constructor(tree base,poly_int64_pod * bit_offset,tree (* valueize)(tree))6601 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6602 		      tree (*valueize)(tree))
6603 {
6604   poly_int64 bit_offset2, size, max_size;
6605   bool reverse;
6606 
6607   if (TREE_CODE (base) == MEM_REF)
6608     {
6609       poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6610       if (!boff.to_shwi (bit_offset))
6611 	return NULL_TREE;
6612 
6613       if (valueize
6614 	  && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6615 	base = valueize (TREE_OPERAND (base, 0));
6616       if (!base || TREE_CODE (base) != ADDR_EXPR)
6617         return NULL_TREE;
6618       base = TREE_OPERAND (base, 0);
6619     }
6620   else if (valueize
6621 	   && TREE_CODE (base) == SSA_NAME)
6622     base = valueize (base);
6623 
6624   /* Get a CONSTRUCTOR.  If BASE is a VAR_DECL, get its
6625      DECL_INITIAL.  If BASE is a nested reference into another
6626      ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6627      the inner reference.  */
6628   switch (TREE_CODE (base))
6629     {
6630     case VAR_DECL:
6631     case CONST_DECL:
6632       {
6633 	tree init = ctor_for_folding (base);
6634 
6635 	/* Our semantic is exact opposite of ctor_for_folding;
6636 	   NULL means unknown, while error_mark_node is 0.  */
6637 	if (init == error_mark_node)
6638 	  return NULL_TREE;
6639 	if (!init)
6640 	  return error_mark_node;
6641 	return init;
6642       }
6643 
6644     case VIEW_CONVERT_EXPR:
6645       return get_base_constructor (TREE_OPERAND (base, 0),
6646 				   bit_offset, valueize);
6647 
6648     case ARRAY_REF:
6649     case COMPONENT_REF:
6650       base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6651 				      &reverse);
6652       if (!known_size_p (max_size) || maybe_ne (size, max_size))
6653 	return NULL_TREE;
6654       *bit_offset +=  bit_offset2;
6655       return get_base_constructor (base, bit_offset, valueize);
6656 
6657     case CONSTRUCTOR:
6658       return base;
6659 
6660     default:
6661       if (CONSTANT_CLASS_P (base))
6662 	return base;
6663 
6664       return NULL_TREE;
6665     }
6666 }
6667 
6668 /* CTOR is CONSTRUCTOR of an array type.  Fold a reference of SIZE bits
6669    to the memory at bit OFFSET.     When non-null, TYPE is the expected
6670    type of the reference; otherwise the type of the referenced element
6671    is used instead. When SIZE is zero, attempt to fold a reference to
6672    the entire element which OFFSET refers to.  Increment *SUBOFF by
6673    the bit offset of the accessed element.  */
6674 
6675 static tree
fold_array_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree from_decl,unsigned HOST_WIDE_INT * suboff)6676 fold_array_ctor_reference (tree type, tree ctor,
6677 			   unsigned HOST_WIDE_INT offset,
6678 			   unsigned HOST_WIDE_INT size,
6679 			   tree from_decl,
6680 			   unsigned HOST_WIDE_INT *suboff)
6681 {
6682   offset_int low_bound;
6683   offset_int elt_size;
6684   offset_int access_index;
6685   tree domain_type = NULL_TREE;
6686   HOST_WIDE_INT inner_offset;
6687 
6688   /* Compute low bound and elt size.  */
6689   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6690     domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6691   if (domain_type && TYPE_MIN_VALUE (domain_type))
6692     {
6693       /* Static constructors for variably sized objects make no sense.  */
6694       if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6695 	return NULL_TREE;
6696       low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6697     }
6698   else
6699     low_bound = 0;
6700   /* Static constructors for variably sized objects make no sense.  */
6701   if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6702     return NULL_TREE;
6703   elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6704 
6705   /* When TYPE is non-null, verify that it specifies a constant-sized
6706      access of a multiple of the array element size.  Avoid division
6707      by zero below when ELT_SIZE is zero, such as with the result of
6708      an initializer for a zero-length array or an empty struct.  */
6709   if (elt_size == 0
6710       || (type
6711 	  && (!TYPE_SIZE_UNIT (type)
6712 	      || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
6713     return NULL_TREE;
6714 
6715   /* Compute the array index we look for.  */
6716   access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6717 				 elt_size);
6718   access_index += low_bound;
6719 
6720   /* And offset within the access.  */
6721   inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6722 
6723   unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
6724   if (size > elt_sz * BITS_PER_UNIT)
6725     {
6726       /* native_encode_expr constraints.  */
6727       if (size > MAX_BITSIZE_MODE_ANY_MODE
6728 	  || size % BITS_PER_UNIT != 0
6729 	  || inner_offset % BITS_PER_UNIT != 0
6730 	  || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
6731 	return NULL_TREE;
6732 
6733       unsigned ctor_idx;
6734       tree val = get_array_ctor_element_at_index (ctor, access_index,
6735 						  &ctor_idx);
6736       if (!val && ctor_idx >= CONSTRUCTOR_NELTS  (ctor))
6737 	return build_zero_cst (type);
6738 
6739       /* native-encode adjacent ctor elements.  */
6740       unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6741       unsigned bufoff = 0;
6742       offset_int index = 0;
6743       offset_int max_index = access_index;
6744       constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6745       if (!val)
6746 	val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6747       else if (!CONSTANT_CLASS_P (val))
6748 	return NULL_TREE;
6749       if (!elt->index)
6750 	;
6751       else if (TREE_CODE (elt->index) == RANGE_EXPR)
6752 	{
6753 	  index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6754 	  max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6755 	}
6756       else
6757 	index = max_index = wi::to_offset (elt->index);
6758       index = wi::umax (index, access_index);
6759       do
6760 	{
6761 	  if (bufoff + elt_sz > sizeof (buf))
6762 	    elt_sz = sizeof (buf) - bufoff;
6763 	  int len = native_encode_expr (val, buf + bufoff, elt_sz,
6764 					inner_offset / BITS_PER_UNIT);
6765 	  if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
6766 	    return NULL_TREE;
6767 	  inner_offset = 0;
6768 	  bufoff += len;
6769 
6770 	  access_index += 1;
6771 	  if (wi::cmpu (access_index, index) == 0)
6772 	    val = elt->value;
6773 	  else if (wi::cmpu (access_index, max_index) > 0)
6774 	    {
6775 	      ctor_idx++;
6776 	      if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6777 		{
6778 		  val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6779 		  ++max_index;
6780 		}
6781 	      else
6782 		{
6783 		  elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6784 		  index = 0;
6785 		  max_index = access_index;
6786 		  if (!elt->index)
6787 		    ;
6788 		  else if (TREE_CODE (elt->index) == RANGE_EXPR)
6789 		    {
6790 		      index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6791 		      max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6792 		    }
6793 		  else
6794 		    index = max_index = wi::to_offset (elt->index);
6795 		  index = wi::umax (index, access_index);
6796 		  if (wi::cmpu (access_index, index) == 0)
6797 		    val = elt->value;
6798 		  else
6799 		    val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6800 		}
6801 	    }
6802 	}
6803       while (bufoff < size / BITS_PER_UNIT);
6804       *suboff += size;
6805       return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
6806     }
6807 
6808   if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6809     {
6810       if (!size && TREE_CODE (val) != CONSTRUCTOR)
6811 	{
6812 	  /* For the final reference to the entire accessed element
6813 	     (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6814 	     may be null) in favor of the type of the element, and set
6815 	     SIZE to the size of the accessed element.  */
6816 	  inner_offset = 0;
6817 	  type = TREE_TYPE (val);
6818 	  size = elt_sz * BITS_PER_UNIT;
6819 	}
6820       else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
6821 	       && TREE_CODE (val) == CONSTRUCTOR
6822 	       && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
6823 	/* If this isn't the last element in the CTOR and a CTOR itself
6824 	   and it does not cover the whole object we are requesting give up
6825 	   since we're not set up for combining from multiple CTORs.  */
6826 	return NULL_TREE;
6827 
6828       *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
6829       return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6830 				  suboff);
6831     }
6832 
6833   /* Memory not explicitly mentioned in constructor is 0 (or
6834      the reference is out of range).  */
6835   return type ? build_zero_cst (type) : NULL_TREE;
6836 }
6837 
6838 /* CTOR is CONSTRUCTOR of an aggregate or vector.  Fold a reference
6839    of SIZE bits to the memory at bit OFFSET.   When non-null, TYPE
6840    is the expected type of the reference; otherwise the type of
6841    the referenced member is used instead.  When SIZE is zero,
6842    attempt to fold a reference to the entire member which OFFSET
6843    refers to; in this case.  Increment *SUBOFF by the bit offset
6844    of the accessed member.  */
6845 
6846 static tree
fold_nonarray_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree from_decl,unsigned HOST_WIDE_INT * suboff)6847 fold_nonarray_ctor_reference (tree type, tree ctor,
6848 			      unsigned HOST_WIDE_INT offset,
6849 			      unsigned HOST_WIDE_INT size,
6850 			      tree from_decl,
6851 			      unsigned HOST_WIDE_INT *suboff)
6852 {
6853   unsigned HOST_WIDE_INT cnt;
6854   tree cfield, cval;
6855 
6856   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6857 			    cval)
6858     {
6859       tree byte_offset = DECL_FIELD_OFFSET (cfield);
6860       tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6861       tree field_size = DECL_SIZE (cfield);
6862 
6863       if (!field_size)
6864 	{
6865 	  /* Determine the size of the flexible array member from
6866 	     the size of the initializer provided for it.  */
6867 	  field_size = TYPE_SIZE (TREE_TYPE (cval));
6868 	}
6869 
6870       /* Variable sized objects in static constructors makes no sense,
6871 	 but field_size can be NULL for flexible array members.  */
6872       gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6873 		  && TREE_CODE (byte_offset) == INTEGER_CST
6874 		  && (field_size != NULL_TREE
6875 		      ? TREE_CODE (field_size) == INTEGER_CST
6876 		      : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6877 
6878       /* Compute bit offset of the field.  */
6879       offset_int bitoffset
6880 	= (wi::to_offset (field_offset)
6881 	   + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6882       /* Compute bit offset where the field ends.  */
6883       offset_int bitoffset_end;
6884       if (field_size != NULL_TREE)
6885 	bitoffset_end = bitoffset + wi::to_offset (field_size);
6886       else
6887 	bitoffset_end = 0;
6888 
6889       /* Compute the bit offset of the end of the desired access.
6890 	 As a special case, if the size of the desired access is
6891 	 zero, assume the access is to the entire field (and let
6892 	 the caller make any necessary adjustments by storing
6893 	 the actual bounds of the field in FIELDBOUNDS).  */
6894       offset_int access_end = offset_int (offset);
6895       if (size)
6896 	access_end += size;
6897       else
6898 	access_end = bitoffset_end;
6899 
6900       /* Is there any overlap between the desired access at
6901 	 [OFFSET, OFFSET+SIZE) and the offset of the field within
6902 	 the object at [BITOFFSET, BITOFFSET_END)?  */
6903       if (wi::cmps (access_end, bitoffset) > 0
6904 	  && (field_size == NULL_TREE
6905 	      || wi::lts_p (offset, bitoffset_end)))
6906 	{
6907 	  *suboff += bitoffset.to_uhwi ();
6908 
6909 	  if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6910 	    {
6911 	      /* For the final reference to the entire accessed member
6912 		 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6913 		 be null) in favor of the type of the member, and set
6914 		 SIZE to the size of the accessed member.  */
6915 	      offset = bitoffset.to_uhwi ();
6916 	      type = TREE_TYPE (cval);
6917 	      size = (bitoffset_end - bitoffset).to_uhwi ();
6918 	    }
6919 
6920 	  /* We do have overlap.  Now see if the field is large enough
6921 	     to cover the access.  Give up for accesses that extend
6922 	     beyond the end of the object or that span multiple fields.  */
6923 	  if (wi::cmps (access_end, bitoffset_end) > 0)
6924 	    return NULL_TREE;
6925 	  if (offset < bitoffset)
6926 	    return NULL_TREE;
6927 
6928 	  offset_int inner_offset = offset_int (offset) - bitoffset;
6929 	  return fold_ctor_reference (type, cval,
6930 				      inner_offset.to_uhwi (), size,
6931 				      from_decl, suboff);
6932 	}
6933     }
6934 
6935   if (!type)
6936     return NULL_TREE;
6937 
6938   return build_zero_cst (type);
6939 }
6940 
6941 /* CTOR is value initializing memory.  Fold a reference of TYPE and
6942    bit size POLY_SIZE to the memory at bit POLY_OFFSET.  When POLY_SIZE
6943    is zero, attempt to fold a reference to the entire subobject
6944    which OFFSET refers to.  This is used when folding accesses to
6945    string members of aggregates.  When non-null, set *SUBOFF to
6946    the bit offset of the accessed subobject.  */
6947 
6948 tree
fold_ctor_reference(tree type,tree ctor,const poly_uint64 & poly_offset,const poly_uint64 & poly_size,tree from_decl,unsigned HOST_WIDE_INT * suboff)6949 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6950 		     const poly_uint64 &poly_size, tree from_decl,
6951 		     unsigned HOST_WIDE_INT *suboff /* = NULL */)
6952 {
6953   tree ret;
6954 
6955   /* We found the field with exact match.  */
6956   if (type
6957       && useless_type_conversion_p (type, TREE_TYPE (ctor))
6958       && known_eq (poly_offset, 0U))
6959     return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6960 
6961   /* The remaining optimizations need a constant size and offset.  */
6962   unsigned HOST_WIDE_INT size, offset;
6963   if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6964     return NULL_TREE;
6965 
6966   /* We are at the end of walk, see if we can view convert the
6967      result.  */
6968   if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6969       /* VIEW_CONVERT_EXPR is defined only for matching sizes.  */
6970       && !compare_tree_int (TYPE_SIZE (type), size)
6971       && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6972     {
6973       ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6974       if (ret)
6975 	{
6976 	  ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6977 	  if (ret)
6978 	    STRIP_USELESS_TYPE_CONVERSION (ret);
6979 	}
6980       return ret;
6981     }
6982   /* For constants and byte-aligned/sized reads try to go through
6983      native_encode/interpret.  */
6984   if (CONSTANT_CLASS_P (ctor)
6985       && BITS_PER_UNIT == 8
6986       && offset % BITS_PER_UNIT == 0
6987       && offset / BITS_PER_UNIT <= INT_MAX
6988       && size % BITS_PER_UNIT == 0
6989       && size <= MAX_BITSIZE_MODE_ANY_MODE
6990       && can_native_interpret_type_p (type))
6991     {
6992       unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6993       int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6994 				    offset / BITS_PER_UNIT);
6995       if (len > 0)
6996 	return native_interpret_expr (type, buf, len);
6997     }
6998   if (TREE_CODE (ctor) == CONSTRUCTOR)
6999     {
7000       unsigned HOST_WIDE_INT dummy = 0;
7001       if (!suboff)
7002 	suboff = &dummy;
7003 
7004       tree ret;
7005       if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
7006 	  || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
7007 	ret = fold_array_ctor_reference (type, ctor, offset, size,
7008 					 from_decl, suboff);
7009       else
7010 	ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
7011 					    from_decl, suboff);
7012 
7013       /* Fall back to native_encode_initializer.  Needs to be done
7014 	 only in the outermost fold_ctor_reference call (because it itself
7015 	 recurses into CONSTRUCTORs) and doesn't update suboff.  */
7016       if (ret == NULL_TREE
7017 	  && suboff == &dummy
7018 	  && BITS_PER_UNIT == 8
7019 	  && offset % BITS_PER_UNIT == 0
7020 	  && offset / BITS_PER_UNIT <= INT_MAX
7021 	  && size % BITS_PER_UNIT == 0
7022 	  && size <= MAX_BITSIZE_MODE_ANY_MODE
7023 	  && can_native_interpret_type_p (type))
7024 	{
7025 	  unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7026 	  int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
7027 					       offset / BITS_PER_UNIT);
7028 	  if (len > 0)
7029 	    return native_interpret_expr (type, buf, len);
7030 	}
7031 
7032       return ret;
7033     }
7034 
7035   return NULL_TREE;
7036 }
7037 
7038 /* Return the tree representing the element referenced by T if T is an
7039    ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
7040    names using VALUEIZE.  Return NULL_TREE otherwise.  */
7041 
7042 tree
fold_const_aggregate_ref_1(tree t,tree (* valueize)(tree))7043 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
7044 {
7045   tree ctor, idx, base;
7046   poly_int64 offset, size, max_size;
7047   tree tem;
7048   bool reverse;
7049 
7050   if (TREE_THIS_VOLATILE (t))
7051     return NULL_TREE;
7052 
7053   if (DECL_P (t))
7054     return get_symbol_constant_value (t);
7055 
7056   tem = fold_read_from_constant_string (t);
7057   if (tem)
7058     return tem;
7059 
7060   switch (TREE_CODE (t))
7061     {
7062     case ARRAY_REF:
7063     case ARRAY_RANGE_REF:
7064       /* Constant indexes are handled well by get_base_constructor.
7065 	 Only special case variable offsets.
7066 	 FIXME: This code can't handle nested references with variable indexes
7067 	 (they will be handled only by iteration of ccp).  Perhaps we can bring
7068 	 get_ref_base_and_extent here and make it use a valueize callback.  */
7069       if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
7070 	  && valueize
7071 	  && (idx = (*valueize) (TREE_OPERAND (t, 1)))
7072 	  && poly_int_tree_p (idx))
7073 	{
7074 	  tree low_bound, unit_size;
7075 
7076 	  /* If the resulting bit-offset is constant, track it.  */
7077 	  if ((low_bound = array_ref_low_bound (t),
7078 	       poly_int_tree_p (low_bound))
7079 	      && (unit_size = array_ref_element_size (t),
7080 		  tree_fits_uhwi_p (unit_size)))
7081 	    {
7082 	      poly_offset_int woffset
7083 		= wi::sext (wi::to_poly_offset (idx)
7084 			    - wi::to_poly_offset (low_bound),
7085 			    TYPE_PRECISION (sizetype));
7086 	      woffset *= tree_to_uhwi (unit_size);
7087 	      woffset *= BITS_PER_UNIT;
7088 	      if (woffset.to_shwi (&offset))
7089 		{
7090 		  base = TREE_OPERAND (t, 0);
7091 		  ctor = get_base_constructor (base, &offset, valueize);
7092 		  /* Empty constructor.  Always fold to 0.  */
7093 		  if (ctor == error_mark_node)
7094 		    return build_zero_cst (TREE_TYPE (t));
7095 		  /* Out of bound array access.  Value is undefined,
7096 		     but don't fold.  */
7097 		  if (maybe_lt (offset, 0))
7098 		    return NULL_TREE;
7099 		  /* We cannot determine ctor.  */
7100 		  if (!ctor)
7101 		    return NULL_TREE;
7102 		  return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
7103 					      tree_to_uhwi (unit_size)
7104 					      * BITS_PER_UNIT,
7105 					      base);
7106 		}
7107 	    }
7108 	}
7109       /* Fallthru.  */
7110 
7111     case COMPONENT_REF:
7112     case BIT_FIELD_REF:
7113     case TARGET_MEM_REF:
7114     case MEM_REF:
7115       base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
7116       ctor = get_base_constructor (base, &offset, valueize);
7117 
7118       /* Empty constructor.  Always fold to 0.  */
7119       if (ctor == error_mark_node)
7120 	return build_zero_cst (TREE_TYPE (t));
7121       /* We do not know precise address.  */
7122       if (!known_size_p (max_size) || maybe_ne (max_size, size))
7123 	return NULL_TREE;
7124       /* We cannot determine ctor.  */
7125       if (!ctor)
7126 	return NULL_TREE;
7127 
7128       /* Out of bound array access.  Value is undefined, but don't fold.  */
7129       if (maybe_lt (offset, 0))
7130 	return NULL_TREE;
7131 
7132       return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
7133 				  base);
7134 
7135     case REALPART_EXPR:
7136     case IMAGPART_EXPR:
7137       {
7138 	tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7139 	if (c && TREE_CODE (c) == COMPLEX_CST)
7140 	  return fold_build1_loc (EXPR_LOCATION (t),
7141 				  TREE_CODE (t), TREE_TYPE (t), c);
7142 	break;
7143       }
7144 
7145     default:
7146       break;
7147     }
7148 
7149   return NULL_TREE;
7150 }
7151 
7152 tree
fold_const_aggregate_ref(tree t)7153 fold_const_aggregate_ref (tree t)
7154 {
7155   return fold_const_aggregate_ref_1 (t, NULL);
7156 }
7157 
7158 /* Lookup virtual method with index TOKEN in a virtual table V
7159    at OFFSET.
7160    Set CAN_REFER if non-NULL to false if method
7161    is not referable or if the virtual table is ill-formed (such as rewriten
7162    by non-C++ produced symbol). Otherwise just return NULL in that calse.  */
7163 
7164 tree
gimple_get_virt_method_for_vtable(HOST_WIDE_INT token,tree v,unsigned HOST_WIDE_INT offset,bool * can_refer)7165 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7166 				   tree v,
7167 				   unsigned HOST_WIDE_INT offset,
7168 				   bool *can_refer)
7169 {
7170   tree vtable = v, init, fn;
7171   unsigned HOST_WIDE_INT size;
7172   unsigned HOST_WIDE_INT elt_size, access_index;
7173   tree domain_type;
7174 
7175   if (can_refer)
7176     *can_refer = true;
7177 
7178   /* First of all double check we have virtual table.  */
7179   if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
7180     {
7181       /* Pass down that we lost track of the target.  */
7182       if (can_refer)
7183 	*can_refer = false;
7184       return NULL_TREE;
7185     }
7186 
7187   init = ctor_for_folding (v);
7188 
7189   /* The virtual tables should always be born with constructors
7190      and we always should assume that they are avaialble for
7191      folding.  At the moment we do not stream them in all cases,
7192      but it should never happen that ctor seem unreachable.  */
7193   gcc_assert (init);
7194   if (init == error_mark_node)
7195     {
7196       /* Pass down that we lost track of the target.  */
7197       if (can_refer)
7198 	*can_refer = false;
7199       return NULL_TREE;
7200     }
7201   gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
7202   size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
7203   offset *= BITS_PER_UNIT;
7204   offset += token * size;
7205 
7206   /* Lookup the value in the constructor that is assumed to be array.
7207      This is equivalent to
7208      fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7209 			       offset, size, NULL);
7210      but in a constant time.  We expect that frontend produced a simple
7211      array without indexed initializers.  */
7212 
7213   gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7214   domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7215   gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7216   elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7217 
7218   access_index = offset / BITS_PER_UNIT / elt_size;
7219   gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7220 
7221   /* The C++ FE can now produce indexed fields, and we check if the indexes
7222      match.  */
7223   if (access_index < CONSTRUCTOR_NELTS (init))
7224     {
7225       fn = CONSTRUCTOR_ELT (init, access_index)->value;
7226       tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7227       gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7228       STRIP_NOPS (fn);
7229     }
7230   else
7231     fn = NULL;
7232 
7233   /* For type inconsistent program we may end up looking up virtual method
7234      in virtual table that does not contain TOKEN entries.  We may overrun
7235      the virtual table and pick up a constant or RTTI info pointer.
7236      In any case the call is undefined.  */
7237   if (!fn
7238       || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7239       || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7240     fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7241   else
7242     {
7243       fn = TREE_OPERAND (fn, 0);
7244 
7245       /* When cgraph node is missing and function is not public, we cannot
7246 	 devirtualize.  This can happen in WHOPR when the actual method
7247 	 ends up in other partition, because we found devirtualization
7248 	 possibility too late.  */
7249       if (!can_refer_decl_in_current_unit_p (fn, vtable))
7250 	{
7251 	  if (can_refer)
7252 	    {
7253 	      *can_refer = false;
7254 	      return fn;
7255 	    }
7256 	  return NULL_TREE;
7257 	}
7258     }
7259 
7260   /* Make sure we create a cgraph node for functions we'll reference.
7261      They can be non-existent if the reference comes from an entry
7262      of an external vtable for example.  */
7263   cgraph_node::get_create (fn);
7264 
7265   return fn;
7266 }
7267 
7268 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7269    is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7270    KNOWN_BINFO carries the binfo describing the true type of
7271    OBJ_TYPE_REF_OBJECT(REF).
7272    Set CAN_REFER if non-NULL to false if method
7273    is not referable or if the virtual table is ill-formed (such as rewriten
7274    by non-C++ produced symbol). Otherwise just return NULL in that calse.  */
7275 
7276 tree
gimple_get_virt_method_for_binfo(HOST_WIDE_INT token,tree known_binfo,bool * can_refer)7277 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7278 				  bool *can_refer)
7279 {
7280   unsigned HOST_WIDE_INT offset;
7281   tree v;
7282 
7283   v = BINFO_VTABLE (known_binfo);
7284   /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone.  */
7285   if (!v)
7286     return NULL_TREE;
7287 
7288   if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7289     {
7290       if (can_refer)
7291 	*can_refer = false;
7292       return NULL_TREE;
7293     }
7294   return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7295 }
7296 
7297 /* Given a pointer value T, return a simplified version of an
7298    indirection through T, or NULL_TREE if no simplification is
7299    possible.  Note that the resulting type may be different from
7300    the type pointed to in the sense that it is still compatible
7301    from the langhooks point of view. */
7302 
7303 tree
gimple_fold_indirect_ref(tree t)7304 gimple_fold_indirect_ref (tree t)
7305 {
7306   tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7307   tree sub = t;
7308   tree subtype;
7309 
7310   STRIP_NOPS (sub);
7311   subtype = TREE_TYPE (sub);
7312   if (!POINTER_TYPE_P (subtype)
7313       || TYPE_REF_CAN_ALIAS_ALL (ptype))
7314     return NULL_TREE;
7315 
7316   if (TREE_CODE (sub) == ADDR_EXPR)
7317     {
7318       tree op = TREE_OPERAND (sub, 0);
7319       tree optype = TREE_TYPE (op);
7320       /* *&p => p */
7321       if (useless_type_conversion_p (type, optype))
7322         return op;
7323 
7324       /* *(foo *)&fooarray => fooarray[0] */
7325       if (TREE_CODE (optype) == ARRAY_TYPE
7326 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7327 	  && useless_type_conversion_p (type, TREE_TYPE (optype)))
7328        {
7329          tree type_domain = TYPE_DOMAIN (optype);
7330          tree min_val = size_zero_node;
7331          if (type_domain && TYPE_MIN_VALUE (type_domain))
7332            min_val = TYPE_MIN_VALUE (type_domain);
7333 	 if (TREE_CODE (min_val) == INTEGER_CST)
7334 	   return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7335        }
7336       /* *(foo *)&complexfoo => __real__ complexfoo */
7337       else if (TREE_CODE (optype) == COMPLEX_TYPE
7338                && useless_type_conversion_p (type, TREE_TYPE (optype)))
7339         return fold_build1 (REALPART_EXPR, type, op);
7340       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7341       else if (TREE_CODE (optype) == VECTOR_TYPE
7342                && useless_type_conversion_p (type, TREE_TYPE (optype)))
7343         {
7344           tree part_width = TYPE_SIZE (type);
7345           tree index = bitsize_int (0);
7346           return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7347         }
7348     }
7349 
7350   /* *(p + CST) -> ...  */
7351   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7352       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7353     {
7354       tree addr = TREE_OPERAND (sub, 0);
7355       tree off = TREE_OPERAND (sub, 1);
7356       tree addrtype;
7357 
7358       STRIP_NOPS (addr);
7359       addrtype = TREE_TYPE (addr);
7360 
7361       /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7362       if (TREE_CODE (addr) == ADDR_EXPR
7363 	  && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7364 	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7365 	  && tree_fits_uhwi_p (off))
7366 	{
7367           unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7368           tree part_width = TYPE_SIZE (type);
7369           unsigned HOST_WIDE_INT part_widthi
7370             = tree_to_shwi (part_width) / BITS_PER_UNIT;
7371           unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7372           tree index = bitsize_int (indexi);
7373 	  if (known_lt (offset / part_widthi,
7374 			TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7375             return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7376                                 part_width, index);
7377 	}
7378 
7379       /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7380       if (TREE_CODE (addr) == ADDR_EXPR
7381 	  && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7382 	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7383         {
7384           tree size = TYPE_SIZE_UNIT (type);
7385           if (tree_int_cst_equal (size, off))
7386             return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7387         }
7388 
7389       /* *(p + CST) -> MEM_REF <p, CST>.  */
7390       if (TREE_CODE (addr) != ADDR_EXPR
7391 	  || DECL_P (TREE_OPERAND (addr, 0)))
7392 	return fold_build2 (MEM_REF, type,
7393 			    addr,
7394 			    wide_int_to_tree (ptype, wi::to_wide (off)));
7395     }
7396 
7397   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7398   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7399       && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7400       && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7401     {
7402       tree type_domain;
7403       tree min_val = size_zero_node;
7404       tree osub = sub;
7405       sub = gimple_fold_indirect_ref (sub);
7406       if (! sub)
7407 	sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7408       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7409       if (type_domain && TYPE_MIN_VALUE (type_domain))
7410         min_val = TYPE_MIN_VALUE (type_domain);
7411       if (TREE_CODE (min_val) == INTEGER_CST)
7412 	return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7413     }
7414 
7415   return NULL_TREE;
7416 }
7417 
7418 /* Return true if CODE is an operation that when operating on signed
7419    integer types involves undefined behavior on overflow and the
7420    operation can be expressed with unsigned arithmetic.  */
7421 
7422 bool
arith_code_with_undefined_signed_overflow(tree_code code)7423 arith_code_with_undefined_signed_overflow (tree_code code)
7424 {
7425   switch (code)
7426     {
7427     case ABS_EXPR:
7428     case PLUS_EXPR:
7429     case MINUS_EXPR:
7430     case MULT_EXPR:
7431     case NEGATE_EXPR:
7432     case POINTER_PLUS_EXPR:
7433       return true;
7434     default:
7435       return false;
7436     }
7437 }
7438 
7439 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7440    operation that can be transformed to unsigned arithmetic by converting
7441    its operand, carrying out the operation in the corresponding unsigned
7442    type and converting the result back to the original type.
7443 
7444    Returns a sequence of statements that replace STMT and also contain
7445    a modified form of STMT itself.  */
7446 
7447 gimple_seq
rewrite_to_defined_overflow(gimple * stmt)7448 rewrite_to_defined_overflow (gimple *stmt)
7449 {
7450   if (dump_file && (dump_flags & TDF_DETAILS))
7451     {
7452       fprintf (dump_file, "rewriting stmt with undefined signed "
7453 	       "overflow ");
7454       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7455     }
7456 
7457   tree lhs = gimple_assign_lhs (stmt);
7458   tree type = unsigned_type_for (TREE_TYPE (lhs));
7459   gimple_seq stmts = NULL;
7460   if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
7461     gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
7462   else
7463     for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7464       {
7465 	tree op = gimple_op (stmt, i);
7466 	op = gimple_convert (&stmts, type, op);
7467 	gimple_set_op (stmt, i, op);
7468       }
7469   gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7470   if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7471     gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7472   gimple_set_modified (stmt, true);
7473   gimple_seq_add_stmt (&stmts, stmt);
7474   gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7475   gimple_seq_add_stmt (&stmts, cvt);
7476 
7477   return stmts;
7478 }
7479 
7480 
7481 /* The valueization hook we use for the gimple_build API simplification.
7482    This makes us match fold_buildN behavior by only combining with
7483    statements in the sequence(s) we are currently building.  */
7484 
7485 static tree
gimple_build_valueize(tree op)7486 gimple_build_valueize (tree op)
7487 {
7488   if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7489     return op;
7490   return NULL_TREE;
7491 }
7492 
7493 /* Build the expression CODE OP0 of type TYPE with location LOC,
7494    simplifying it first if possible.  Returns the built
7495    expression value and appends statements possibly defining it
7496    to SEQ.  */
7497 
7498 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0)7499 gimple_build (gimple_seq *seq, location_t loc,
7500 	      enum tree_code code, tree type, tree op0)
7501 {
7502   tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7503   if (!res)
7504     {
7505       res = create_tmp_reg_or_ssa_name (type);
7506       gimple *stmt;
7507       if (code == REALPART_EXPR
7508 	  || code == IMAGPART_EXPR
7509 	  || code == VIEW_CONVERT_EXPR)
7510 	stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7511       else
7512 	stmt = gimple_build_assign (res, code, op0);
7513       gimple_set_location (stmt, loc);
7514       gimple_seq_add_stmt_without_update (seq, stmt);
7515     }
7516   return res;
7517 }
7518 
7519 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7520    simplifying it first if possible.  Returns the built
7521    expression value and appends statements possibly defining it
7522    to SEQ.  */
7523 
7524 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0,tree op1)7525 gimple_build (gimple_seq *seq, location_t loc,
7526 	      enum tree_code code, tree type, tree op0, tree op1)
7527 {
7528   tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7529   if (!res)
7530     {
7531       res = create_tmp_reg_or_ssa_name (type);
7532       gimple *stmt = gimple_build_assign (res, code, op0, op1);
7533       gimple_set_location (stmt, loc);
7534       gimple_seq_add_stmt_without_update (seq, stmt);
7535     }
7536   return res;
7537 }
7538 
7539 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7540    simplifying it first if possible.  Returns the built
7541    expression value and appends statements possibly defining it
7542    to SEQ.  */
7543 
7544 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2)7545 gimple_build (gimple_seq *seq, location_t loc,
7546 	      enum tree_code code, tree type, tree op0, tree op1, tree op2)
7547 {
7548   tree res = gimple_simplify (code, type, op0, op1, op2,
7549 			      seq, gimple_build_valueize);
7550   if (!res)
7551     {
7552       res = create_tmp_reg_or_ssa_name (type);
7553       gimple *stmt;
7554       if (code == BIT_FIELD_REF)
7555 	stmt = gimple_build_assign (res, code,
7556 				    build3 (code, type, op0, op1, op2));
7557       else
7558 	stmt = gimple_build_assign (res, code, op0, op1, op2);
7559       gimple_set_location (stmt, loc);
7560       gimple_seq_add_stmt_without_update (seq, stmt);
7561     }
7562   return res;
7563 }
7564 
7565 /* Build the call FN (ARG0) with a result of type TYPE
7566    (or no result if TYPE is void) with location LOC,
7567    simplifying it first if possible.  Returns the built
7568    expression value (or NULL_TREE if TYPE is void) and appends
7569    statements possibly defining it to SEQ.  */
7570 
7571 tree
gimple_build(gimple_seq * seq,location_t loc,combined_fn fn,tree type,tree arg0)7572 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7573 	      tree type, tree arg0)
7574 {
7575   tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7576   if (!res)
7577     {
7578       gcall *stmt;
7579       if (internal_fn_p (fn))
7580 	stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7581       else
7582 	{
7583 	  tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7584 	  stmt = gimple_build_call (decl, 1, arg0);
7585 	}
7586       if (!VOID_TYPE_P (type))
7587 	{
7588 	  res = create_tmp_reg_or_ssa_name (type);
7589 	  gimple_call_set_lhs (stmt, res);
7590 	}
7591       gimple_set_location (stmt, loc);
7592       gimple_seq_add_stmt_without_update (seq, stmt);
7593     }
7594   return res;
7595 }
7596 
7597 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7598    (or no result if TYPE is void) with location LOC,
7599    simplifying it first if possible.  Returns the built
7600    expression value (or NULL_TREE if TYPE is void) and appends
7601    statements possibly defining it to SEQ.  */
7602 
7603 tree
gimple_build(gimple_seq * seq,location_t loc,combined_fn fn,tree type,tree arg0,tree arg1)7604 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7605 	      tree type, tree arg0, tree arg1)
7606 {
7607   tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7608   if (!res)
7609     {
7610       gcall *stmt;
7611       if (internal_fn_p (fn))
7612 	stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7613       else
7614 	{
7615 	  tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7616 	  stmt = gimple_build_call (decl, 2, arg0, arg1);
7617 	}
7618       if (!VOID_TYPE_P (type))
7619 	{
7620 	  res = create_tmp_reg_or_ssa_name (type);
7621 	  gimple_call_set_lhs (stmt, res);
7622 	}
7623       gimple_set_location (stmt, loc);
7624       gimple_seq_add_stmt_without_update (seq, stmt);
7625     }
7626   return res;
7627 }
7628 
7629 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7630    (or no result if TYPE is void) with location LOC,
7631    simplifying it first if possible.  Returns the built
7632    expression value (or NULL_TREE if TYPE is void) and appends
7633    statements possibly defining it to SEQ.  */
7634 
7635 tree
gimple_build(gimple_seq * seq,location_t loc,combined_fn fn,tree type,tree arg0,tree arg1,tree arg2)7636 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7637 	      tree type, tree arg0, tree arg1, tree arg2)
7638 {
7639   tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7640 			      seq, gimple_build_valueize);
7641   if (!res)
7642     {
7643       gcall *stmt;
7644       if (internal_fn_p (fn))
7645 	stmt = gimple_build_call_internal (as_internal_fn (fn),
7646 					   3, arg0, arg1, arg2);
7647       else
7648 	{
7649 	  tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7650 	  stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7651 	}
7652       if (!VOID_TYPE_P (type))
7653 	{
7654 	  res = create_tmp_reg_or_ssa_name (type);
7655 	  gimple_call_set_lhs (stmt, res);
7656 	}
7657       gimple_set_location (stmt, loc);
7658       gimple_seq_add_stmt_without_update (seq, stmt);
7659     }
7660   return res;
7661 }
7662 
7663 /* Build the conversion (TYPE) OP with a result of type TYPE
7664    with location LOC if such conversion is neccesary in GIMPLE,
7665    simplifying it first.
7666    Returns the built expression value and appends
7667    statements possibly defining it to SEQ.  */
7668 
7669 tree
gimple_convert(gimple_seq * seq,location_t loc,tree type,tree op)7670 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7671 {
7672   if (useless_type_conversion_p (type, TREE_TYPE (op)))
7673     return op;
7674   return gimple_build (seq, loc, NOP_EXPR, type, op);
7675 }
7676 
7677 /* Build the conversion (ptrofftype) OP with a result of a type
7678    compatible with ptrofftype with location LOC if such conversion
7679    is neccesary in GIMPLE, simplifying it first.
7680    Returns the built expression value and appends
7681    statements possibly defining it to SEQ.  */
7682 
7683 tree
gimple_convert_to_ptrofftype(gimple_seq * seq,location_t loc,tree op)7684 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7685 {
7686   if (ptrofftype_p (TREE_TYPE (op)))
7687     return op;
7688   return gimple_convert (seq, loc, sizetype, op);
7689 }
7690 
7691 /* Build a vector of type TYPE in which each element has the value OP.
7692    Return a gimple value for the result, appending any new statements
7693    to SEQ.  */
7694 
7695 tree
gimple_build_vector_from_val(gimple_seq * seq,location_t loc,tree type,tree op)7696 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7697 			      tree op)
7698 {
7699   if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7700       && !CONSTANT_CLASS_P (op))
7701     return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7702 
7703   tree res, vec = build_vector_from_val (type, op);
7704   if (is_gimple_val (vec))
7705     return vec;
7706   if (gimple_in_ssa_p (cfun))
7707     res = make_ssa_name (type);
7708   else
7709     res = create_tmp_reg (type);
7710   gimple *stmt = gimple_build_assign (res, vec);
7711   gimple_set_location (stmt, loc);
7712   gimple_seq_add_stmt_without_update (seq, stmt);
7713   return res;
7714 }
7715 
7716 /* Build a vector from BUILDER, handling the case in which some elements
7717    are non-constant.  Return a gimple value for the result, appending any
7718    new instructions to SEQ.
7719 
7720    BUILDER must not have a stepped encoding on entry.  This is because
7721    the function is not geared up to handle the arithmetic that would
7722    be needed in the variable case, and any code building a vector that
7723    is known to be constant should use BUILDER->build () directly.  */
7724 
7725 tree
gimple_build_vector(gimple_seq * seq,location_t loc,tree_vector_builder * builder)7726 gimple_build_vector (gimple_seq *seq, location_t loc,
7727 		     tree_vector_builder *builder)
7728 {
7729   gcc_assert (builder->nelts_per_pattern () <= 2);
7730   unsigned int encoded_nelts = builder->encoded_nelts ();
7731   for (unsigned int i = 0; i < encoded_nelts; ++i)
7732     if (!TREE_CONSTANT ((*builder)[i]))
7733       {
7734 	tree type = builder->type ();
7735 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7736 	vec<constructor_elt, va_gc> *v;
7737 	vec_alloc (v, nelts);
7738 	for (i = 0; i < nelts; ++i)
7739 	  CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7740 
7741 	tree res;
7742 	if (gimple_in_ssa_p (cfun))
7743 	  res = make_ssa_name (type);
7744 	else
7745 	  res = create_tmp_reg (type);
7746 	gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7747 	gimple_set_location (stmt, loc);
7748 	gimple_seq_add_stmt_without_update (seq, stmt);
7749 	return res;
7750       }
7751   return builder->build ();
7752 }
7753 
7754 /* Return true if the result of assignment STMT is known to be non-negative.
7755    If the return value is based on the assumption that signed overflow is
7756    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7757    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
7758 
7759 static bool
gimple_assign_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)7760 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7761 				   int depth)
7762 {
7763   enum tree_code code = gimple_assign_rhs_code (stmt);
7764   switch (get_gimple_rhs_class (code))
7765     {
7766     case GIMPLE_UNARY_RHS:
7767       return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7768 					     gimple_expr_type (stmt),
7769 					     gimple_assign_rhs1 (stmt),
7770 					     strict_overflow_p, depth);
7771     case GIMPLE_BINARY_RHS:
7772       return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7773 					      gimple_expr_type (stmt),
7774 					      gimple_assign_rhs1 (stmt),
7775 					      gimple_assign_rhs2 (stmt),
7776 					      strict_overflow_p, depth);
7777     case GIMPLE_TERNARY_RHS:
7778       return false;
7779     case GIMPLE_SINGLE_RHS:
7780       return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7781 					      strict_overflow_p, depth);
7782     case GIMPLE_INVALID_RHS:
7783       break;
7784     }
7785   gcc_unreachable ();
7786 }
7787 
7788 /* Return true if return value of call STMT is known to be non-negative.
7789    If the return value is based on the assumption that signed overflow is
7790    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7791    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
7792 
7793 static bool
gimple_call_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)7794 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7795 				 int depth)
7796 {
7797   tree arg0 = gimple_call_num_args (stmt) > 0 ?
7798     gimple_call_arg (stmt, 0) : NULL_TREE;
7799   tree arg1 = gimple_call_num_args (stmt) > 1 ?
7800     gimple_call_arg (stmt, 1) : NULL_TREE;
7801 
7802   return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7803 					gimple_call_combined_fn (stmt),
7804 					arg0,
7805 					arg1,
7806 					strict_overflow_p, depth);
7807 }
7808 
7809 /* Return true if return value of call STMT is known to be non-negative.
7810    If the return value is based on the assumption that signed overflow is
7811    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7812    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
7813 
7814 static bool
gimple_phi_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)7815 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7816 				int depth)
7817 {
7818   for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7819     {
7820       tree arg = gimple_phi_arg_def (stmt, i);
7821       if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7822 	return false;
7823     }
7824   return true;
7825 }
7826 
7827 /* Return true if STMT is known to compute a non-negative value.
7828    If the return value is based on the assumption that signed overflow is
7829    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7830    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
7831 
7832 bool
gimple_stmt_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)7833 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7834 				 int depth)
7835 {
7836   switch (gimple_code (stmt))
7837     {
7838     case GIMPLE_ASSIGN:
7839       return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7840 						depth);
7841     case GIMPLE_CALL:
7842       return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7843 					      depth);
7844     case GIMPLE_PHI:
7845       return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7846 					     depth);
7847     default:
7848       return false;
7849     }
7850 }
7851 
7852 /* Return true if the floating-point value computed by assignment STMT
7853    is known to have an integer value.  We also allow +Inf, -Inf and NaN
7854    to be considered integer values. Return false for signaling NaN.
7855 
7856    DEPTH is the current nesting depth of the query.  */
7857 
7858 static bool
gimple_assign_integer_valued_real_p(gimple * stmt,int depth)7859 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7860 {
7861   enum tree_code code = gimple_assign_rhs_code (stmt);
7862   switch (get_gimple_rhs_class (code))
7863     {
7864     case GIMPLE_UNARY_RHS:
7865       return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7866 					  gimple_assign_rhs1 (stmt), depth);
7867     case GIMPLE_BINARY_RHS:
7868       return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7869 					   gimple_assign_rhs1 (stmt),
7870 					   gimple_assign_rhs2 (stmt), depth);
7871     case GIMPLE_TERNARY_RHS:
7872       return false;
7873     case GIMPLE_SINGLE_RHS:
7874       return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7875     case GIMPLE_INVALID_RHS:
7876       break;
7877     }
7878   gcc_unreachable ();
7879 }
7880 
7881 /* Return true if the floating-point value computed by call STMT is known
7882    to have an integer value.  We also allow +Inf, -Inf and NaN to be
7883    considered integer values. Return false for signaling NaN.
7884 
7885    DEPTH is the current nesting depth of the query.  */
7886 
7887 static bool
gimple_call_integer_valued_real_p(gimple * stmt,int depth)7888 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7889 {
7890   tree arg0 = (gimple_call_num_args (stmt) > 0
7891 	       ? gimple_call_arg (stmt, 0)
7892 	       : NULL_TREE);
7893   tree arg1 = (gimple_call_num_args (stmt) > 1
7894 	       ? gimple_call_arg (stmt, 1)
7895 	       : NULL_TREE);
7896   return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7897 				     arg0, arg1, depth);
7898 }
7899 
7900 /* Return true if the floating-point result of phi STMT is known to have
7901    an integer value.  We also allow +Inf, -Inf and NaN to be considered
7902    integer values. Return false for signaling NaN.
7903 
7904    DEPTH is the current nesting depth of the query.  */
7905 
7906 static bool
gimple_phi_integer_valued_real_p(gimple * stmt,int depth)7907 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7908 {
7909   for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7910     {
7911       tree arg = gimple_phi_arg_def (stmt, i);
7912       if (!integer_valued_real_single_p (arg, depth + 1))
7913 	return false;
7914     }
7915   return true;
7916 }
7917 
7918 /* Return true if the floating-point value computed by STMT is known
7919    to have an integer value.  We also allow +Inf, -Inf and NaN to be
7920    considered integer values. Return false for signaling NaN.
7921 
7922    DEPTH is the current nesting depth of the query.  */
7923 
7924 bool
gimple_stmt_integer_valued_real_p(gimple * stmt,int depth)7925 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7926 {
7927   switch (gimple_code (stmt))
7928     {
7929     case GIMPLE_ASSIGN:
7930       return gimple_assign_integer_valued_real_p (stmt, depth);
7931     case GIMPLE_CALL:
7932       return gimple_call_integer_valued_real_p (stmt, depth);
7933     case GIMPLE_PHI:
7934       return gimple_phi_integer_valued_real_p (stmt, depth);
7935     default:
7936       return false;
7937     }
7938 }
7939