1 /* Statement simplification on GIMPLE.
2    Copyright (C) 2010-2021 Free Software Foundation, Inc.
3    Split out from tree-ssa-ccp.c.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
68 #include "varasm.h"
69 
70 enum strlen_range_kind {
71   /* Compute the exact constant string length.  */
72   SRK_STRLEN,
73   /* Compute the maximum constant string length.  */
74   SRK_STRLENMAX,
75   /* Compute a range of string lengths bounded by object sizes.  When
76      the length of a string cannot be determined, consider as the upper
77      bound the size of the enclosing object the string may be a member
78      or element of.  Also determine the size of the largest character
79      array the string may refer to.  */
80   SRK_LENRANGE,
81   /* Determine the integer value of the argument (not string length).  */
82   SRK_INT_VALUE
83 };
84 
85 static bool
86 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
87 
88 /* Return true when DECL can be referenced from current unit.
89    FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90    We can get declarations that are not possible to reference for various
91    reasons:
92 
93      1) When analyzing C++ virtual tables.
94 	C++ virtual tables do have known constructors even
95 	when they are keyed to other compilation unit.
96 	Those tables can contain pointers to methods and vars
97 	in other units.  Those methods have both STATIC and EXTERNAL
98 	set.
99      2) In WHOPR mode devirtualization might lead to reference
100 	to method that was partitioned elsehwere.
101 	In this case we have static VAR_DECL or FUNCTION_DECL
102 	that has no corresponding callgraph/varpool node
103 	declaring the body.
104      3) COMDAT functions referred by external vtables that
105         we devirtualize only during final compilation stage.
106         At this time we already decided that we will not output
107         the function body and thus we can't reference the symbol
108         directly.  */
109 
110 static bool
can_refer_decl_in_current_unit_p(tree decl,tree from_decl)111 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
112 {
113   varpool_node *vnode;
114   struct cgraph_node *node;
115   symtab_node *snode;
116 
117   if (DECL_ABSTRACT_P (decl))
118     return false;
119 
120   /* We are concerned only about static/external vars and functions.  */
121   if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
122       || !VAR_OR_FUNCTION_DECL_P (decl))
123     return true;
124 
125   /* Static objects can be referred only if they are defined and not optimized
126      out yet.  */
127   if (!TREE_PUBLIC (decl))
128     {
129       if (DECL_EXTERNAL (decl))
130 	return false;
131       /* Before we start optimizing unreachable code we can be sure all
132 	 static objects are defined.  */
133       if (symtab->function_flags_ready)
134 	return true;
135       snode = symtab_node::get (decl);
136       if (!snode || !snode->definition)
137 	return false;
138       node = dyn_cast <cgraph_node *> (snode);
139       return !node || !node->inlined_to;
140     }
141 
142   /* We will later output the initializer, so we can refer to it.
143      So we are concerned only when DECL comes from initializer of
144      external var or var that has been optimized out.  */
145   if (!from_decl
146       || !VAR_P (from_decl)
147       || (!DECL_EXTERNAL (from_decl)
148 	  && (vnode = varpool_node::get (from_decl)) != NULL
149 	  && vnode->definition)
150       || (flag_ltrans
151 	  && (vnode = varpool_node::get (from_decl)) != NULL
152 	  && vnode->in_other_partition))
153     return true;
154   /* We are folding reference from external vtable.  The vtable may reffer
155      to a symbol keyed to other compilation unit.  The other compilation
156      unit may be in separate DSO and the symbol may be hidden.  */
157   if (DECL_VISIBILITY_SPECIFIED (decl)
158       && DECL_EXTERNAL (decl)
159       && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
160       && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
161     return false;
162   /* When function is public, we always can introduce new reference.
163      Exception are the COMDAT functions where introducing a direct
164      reference imply need to include function body in the curren tunit.  */
165   if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
166     return true;
167   /* We have COMDAT.  We are going to check if we still have definition
168      or if the definition is going to be output in other partition.
169      Bypass this when gimplifying; all needed functions will be produced.
170 
171      As observed in PR20991 for already optimized out comdat virtual functions
172      it may be tempting to not necessarily give up because the copy will be
173      output elsewhere when corresponding vtable is output.
174      This is however not possible - ABI specify that COMDATs are output in
175      units where they are used and when the other unit was compiled with LTO
176      it is possible that vtable was kept public while the function itself
177      was privatized. */
178   if (!symtab->function_flags_ready)
179     return true;
180 
181   snode = symtab_node::get (decl);
182   if (!snode
183       || ((!snode->definition || DECL_EXTERNAL (decl))
184 	  && (!snode->in_other_partition
185 	      || (!snode->forced_by_abi && !snode->force_output))))
186     return false;
187   node = dyn_cast <cgraph_node *> (snode);
188   return !node || !node->inlined_to;
189 }
190 
191 /* Create a temporary for TYPE for a statement STMT.  If the current function
192    is in SSA form, a SSA name is created.  Otherwise a temporary register
193    is made.  */
194 
195 tree
create_tmp_reg_or_ssa_name(tree type,gimple * stmt)196 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
197 {
198   if (gimple_in_ssa_p (cfun))
199     return make_ssa_name (type, stmt);
200   else
201     return create_tmp_reg (type);
202 }
203 
204 /* CVAL is value taken from DECL_INITIAL of variable.  Try to transform it into
205    acceptable form for is_gimple_min_invariant.
206    FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL.  */
207 
208 tree
canonicalize_constructor_val(tree cval,tree from_decl)209 canonicalize_constructor_val (tree cval, tree from_decl)
210 {
211   if (CONSTANT_CLASS_P (cval))
212     return cval;
213 
214   tree orig_cval = cval;
215   STRIP_NOPS (cval);
216   if (TREE_CODE (cval) == POINTER_PLUS_EXPR
217       && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
218     {
219       tree ptr = TREE_OPERAND (cval, 0);
220       if (is_gimple_min_invariant (ptr))
221 	cval = build1_loc (EXPR_LOCATION (cval),
222 			   ADDR_EXPR, TREE_TYPE (ptr),
223 			   fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
224 					ptr,
225 					fold_convert (ptr_type_node,
226 						      TREE_OPERAND (cval, 1))));
227     }
228   if (TREE_CODE (cval) == ADDR_EXPR)
229     {
230       tree base = NULL_TREE;
231       if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
232 	{
233 	  base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
234 	  if (base)
235 	    TREE_OPERAND (cval, 0) = base;
236 	}
237       else
238 	base = get_base_address (TREE_OPERAND (cval, 0));
239       if (!base)
240 	return NULL_TREE;
241 
242       if (VAR_OR_FUNCTION_DECL_P (base)
243 	  && !can_refer_decl_in_current_unit_p (base, from_decl))
244 	return NULL_TREE;
245       if (TREE_TYPE (base) == error_mark_node)
246 	return NULL_TREE;
247       if (VAR_P (base))
248 	TREE_ADDRESSABLE (base) = 1;
249       else if (TREE_CODE (base) == FUNCTION_DECL)
250 	{
251 	  /* Make sure we create a cgraph node for functions we'll reference.
252 	     They can be non-existent if the reference comes from an entry
253 	     of an external vtable for example.  */
254 	  cgraph_node::get_create (base);
255 	}
256       /* Fixup types in global initializers.  */
257       if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
258 	cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
259 
260       if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
261 	cval = fold_convert (TREE_TYPE (orig_cval), cval);
262       return cval;
263     }
264   /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0.  */
265   if (TREE_CODE (cval) == INTEGER_CST)
266     {
267       if (TREE_OVERFLOW_P (cval))
268 	cval = drop_tree_overflow (cval);
269       if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
270 	cval = fold_convert (TREE_TYPE (orig_cval), cval);
271       return cval;
272     }
273   return orig_cval;
274 }
275 
276 /* If SYM is a constant variable with known value, return the value.
277    NULL_TREE is returned otherwise.  */
278 
279 tree
get_symbol_constant_value(tree sym)280 get_symbol_constant_value (tree sym)
281 {
282   tree val = ctor_for_folding (sym);
283   if (val != error_mark_node)
284     {
285       if (val)
286 	{
287 	  val = canonicalize_constructor_val (unshare_expr (val), sym);
288 	  if (val && is_gimple_min_invariant (val))
289 	    return val;
290 	  else
291 	    return NULL_TREE;
292 	}
293       /* Variables declared 'const' without an initializer
294 	 have zero as the initializer if they may not be
295 	 overridden at link or run time.  */
296       if (!val
297           && is_gimple_reg_type (TREE_TYPE (sym)))
298 	return build_zero_cst (TREE_TYPE (sym));
299     }
300 
301   return NULL_TREE;
302 }
303 
304 
305 
306 /* Subroutine of fold_stmt.  We perform several simplifications of the
307    memory reference tree EXPR and make sure to re-gimplify them properly
308    after propagation of constant addresses.  IS_LHS is true if the
309    reference is supposed to be an lvalue.  */
310 
311 static tree
maybe_fold_reference(tree expr,bool is_lhs)312 maybe_fold_reference (tree expr, bool is_lhs)
313 {
314   tree result;
315 
316   if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
317        || TREE_CODE (expr) == REALPART_EXPR
318        || TREE_CODE (expr) == IMAGPART_EXPR)
319       && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
320     return fold_unary_loc (EXPR_LOCATION (expr),
321 			   TREE_CODE (expr),
322 			   TREE_TYPE (expr),
323 			   TREE_OPERAND (expr, 0));
324   else if (TREE_CODE (expr) == BIT_FIELD_REF
325 	   && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
326     return fold_ternary_loc (EXPR_LOCATION (expr),
327 			     TREE_CODE (expr),
328 			     TREE_TYPE (expr),
329 			     TREE_OPERAND (expr, 0),
330 			     TREE_OPERAND (expr, 1),
331 			     TREE_OPERAND (expr, 2));
332 
333   if (!is_lhs
334       && (result = fold_const_aggregate_ref (expr))
335       && is_gimple_min_invariant (result))
336     return result;
337 
338   return NULL_TREE;
339 }
340 
341 
342 /* Attempt to fold an assignment statement pointed-to by SI.  Returns a
343    replacement rhs for the statement or NULL_TREE if no simplification
344    could be made.  It is assumed that the operands have been previously
345    folded.  */
346 
347 static tree
fold_gimple_assign(gimple_stmt_iterator * si)348 fold_gimple_assign (gimple_stmt_iterator *si)
349 {
350   gimple *stmt = gsi_stmt (*si);
351   enum tree_code subcode = gimple_assign_rhs_code (stmt);
352   location_t loc = gimple_location (stmt);
353 
354   tree result = NULL_TREE;
355 
356   switch (get_gimple_rhs_class (subcode))
357     {
358     case GIMPLE_SINGLE_RHS:
359       {
360         tree rhs = gimple_assign_rhs1 (stmt);
361 
362 	if (TREE_CLOBBER_P (rhs))
363 	  return NULL_TREE;
364 
365 	if (REFERENCE_CLASS_P (rhs))
366 	  return maybe_fold_reference (rhs, false);
367 
368 	else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
369 	  {
370 	    tree val = OBJ_TYPE_REF_EXPR (rhs);
371 	    if (is_gimple_min_invariant (val))
372 	      return val;
373 	    else if (flag_devirtualize && virtual_method_call_p (rhs))
374 	      {
375 		bool final;
376 		vec <cgraph_node *>targets
377 		  = possible_polymorphic_call_targets (rhs, stmt, &final);
378 		if (final && targets.length () <= 1 && dbg_cnt (devirt))
379 		  {
380 		    if (dump_enabled_p ())
381 		      {
382 			dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
383 					 "resolving virtual function address "
384 					 "reference to function %s\n",
385 					 targets.length () == 1
386 					 ? targets[0]->name ()
387 					 : "NULL");
388 		      }
389 		    if (targets.length () == 1)
390 		      {
391 			val = fold_convert (TREE_TYPE (val),
392 					    build_fold_addr_expr_loc
393 					      (loc, targets[0]->decl));
394 			STRIP_USELESS_TYPE_CONVERSION (val);
395 		      }
396 		    else
397 		      /* We cannot use __builtin_unreachable here because it
398 			 cannot have address taken.  */
399 		      val = build_int_cst (TREE_TYPE (val), 0);
400 		    return val;
401 		  }
402 	      }
403 	  }
404 
405 	else if (TREE_CODE (rhs) == ADDR_EXPR)
406 	  {
407 	    tree ref = TREE_OPERAND (rhs, 0);
408 	    tree tem = maybe_fold_reference (ref, true);
409 	    if (tem
410 		&& TREE_CODE (tem) == MEM_REF
411 		&& integer_zerop (TREE_OPERAND (tem, 1)))
412 	      result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
413 	    else if (tem)
414 	      result = fold_convert (TREE_TYPE (rhs),
415 				     build_fold_addr_expr_loc (loc, tem));
416 	    else if (TREE_CODE (ref) == MEM_REF
417 		     && integer_zerop (TREE_OPERAND (ref, 1)))
418 	      result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
419 
420 	    if (result)
421 	      {
422 		/* Strip away useless type conversions.  Both the
423 		   NON_LVALUE_EXPR that may have been added by fold, and
424 		   "useless" type conversions that might now be apparent
425 		   due to propagation.  */
426 		STRIP_USELESS_TYPE_CONVERSION (result);
427 
428 		if (result != rhs && valid_gimple_rhs_p (result))
429 		  return result;
430 	      }
431 	  }
432 
433 	else if (TREE_CODE (rhs) == CONSTRUCTOR
434 		 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
435 	  {
436 	    /* Fold a constant vector CONSTRUCTOR to VECTOR_CST.  */
437 	    unsigned i;
438 	    tree val;
439 
440 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
441 	      if (! CONSTANT_CLASS_P (val))
442 		return NULL_TREE;
443 
444 	    return build_vector_from_ctor (TREE_TYPE (rhs),
445 					   CONSTRUCTOR_ELTS (rhs));
446 	  }
447 
448 	else if (DECL_P (rhs)
449 		 && is_gimple_reg_type (TREE_TYPE (rhs)))
450 	  return get_symbol_constant_value (rhs);
451       }
452       break;
453 
454     case GIMPLE_UNARY_RHS:
455       break;
456 
457     case GIMPLE_BINARY_RHS:
458       break;
459 
460     case GIMPLE_TERNARY_RHS:
461       result = fold_ternary_loc (loc, subcode,
462 				 TREE_TYPE (gimple_assign_lhs (stmt)),
463 				 gimple_assign_rhs1 (stmt),
464 				 gimple_assign_rhs2 (stmt),
465 				 gimple_assign_rhs3 (stmt));
466 
467       if (result)
468         {
469           STRIP_USELESS_TYPE_CONVERSION (result);
470           if (valid_gimple_rhs_p (result))
471 	    return result;
472         }
473       break;
474 
475     case GIMPLE_INVALID_RHS:
476       gcc_unreachable ();
477     }
478 
479   return NULL_TREE;
480 }
481 
482 
483 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
484    adjusting the replacement stmts location and virtual operands.
485    If the statement has a lhs the last stmt in the sequence is expected
486    to assign to that lhs.  */
487 
488 static void
gsi_replace_with_seq_vops(gimple_stmt_iterator * si_p,gimple_seq stmts)489 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
490 {
491   gimple *stmt = gsi_stmt (*si_p);
492 
493   if (gimple_has_location (stmt))
494     annotate_all_with_location (stmts, gimple_location (stmt));
495 
496   /* First iterate over the replacement statements backward, assigning
497      virtual operands to their defining statements.  */
498   gimple *laststore = NULL;
499   for (gimple_stmt_iterator i = gsi_last (stmts);
500        !gsi_end_p (i); gsi_prev (&i))
501     {
502       gimple *new_stmt = gsi_stmt (i);
503       if ((gimple_assign_single_p (new_stmt)
504 	   && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
505 	  || (is_gimple_call (new_stmt)
506 	      && (gimple_call_flags (new_stmt)
507 		  & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
508 	{
509 	  tree vdef;
510 	  if (!laststore)
511 	    vdef = gimple_vdef (stmt);
512 	  else
513 	    vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
514 	  gimple_set_vdef (new_stmt, vdef);
515 	  if (vdef && TREE_CODE (vdef) == SSA_NAME)
516 	    SSA_NAME_DEF_STMT (vdef) = new_stmt;
517 	  laststore = new_stmt;
518 	}
519     }
520 
521   /* Second iterate over the statements forward, assigning virtual
522      operands to their uses.  */
523   tree reaching_vuse = gimple_vuse (stmt);
524   for (gimple_stmt_iterator i = gsi_start (stmts);
525        !gsi_end_p (i); gsi_next (&i))
526     {
527       gimple *new_stmt = gsi_stmt (i);
528       /* If the new statement possibly has a VUSE, update it with exact SSA
529 	 name we know will reach this one.  */
530       if (gimple_has_mem_ops (new_stmt))
531 	gimple_set_vuse (new_stmt, reaching_vuse);
532       gimple_set_modified (new_stmt, true);
533       if (gimple_vdef (new_stmt))
534 	reaching_vuse = gimple_vdef (new_stmt);
535     }
536 
537   /* If the new sequence does not do a store release the virtual
538      definition of the original statement.  */
539   if (reaching_vuse
540       && reaching_vuse == gimple_vuse (stmt))
541     {
542       tree vdef = gimple_vdef (stmt);
543       if (vdef
544 	  && TREE_CODE (vdef) == SSA_NAME)
545 	{
546 	  unlink_stmt_vdef (stmt);
547 	  release_ssa_name (vdef);
548 	}
549     }
550 
551   /* Finally replace the original statement with the sequence.  */
552   gsi_replace_with_seq (si_p, stmts, false);
553 }
554 
555 /* Convert EXPR into a GIMPLE value suitable for substitution on the
556    RHS of an assignment.  Insert the necessary statements before
557    iterator *SI_P.  The statement at *SI_P, which must be a GIMPLE_CALL
558    is replaced.  If the call is expected to produces a result, then it
559    is replaced by an assignment of the new RHS to the result variable.
560    If the result is to be ignored, then the call is replaced by a
561    GIMPLE_NOP.  A proper VDEF chain is retained by making the first
562    VUSE and the last VDEF of the whole sequence be the same as the replaced
563    statement and using new SSA names for stores in between.  */
564 
565 void
gimplify_and_update_call_from_tree(gimple_stmt_iterator * si_p,tree expr)566 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
567 {
568   tree lhs;
569   gimple *stmt, *new_stmt;
570   gimple_stmt_iterator i;
571   gimple_seq stmts = NULL;
572 
573   stmt = gsi_stmt (*si_p);
574 
575   gcc_assert (is_gimple_call (stmt));
576 
577   push_gimplify_context (gimple_in_ssa_p (cfun));
578 
579   lhs = gimple_call_lhs (stmt);
580   if (lhs == NULL_TREE)
581     {
582       gimplify_and_add (expr, &stmts);
583       /* We can end up with folding a memcpy of an empty class assignment
584 	 which gets optimized away by C++ gimplification.  */
585       if (gimple_seq_empty_p (stmts))
586 	{
587 	  pop_gimplify_context (NULL);
588 	  if (gimple_in_ssa_p (cfun))
589 	    {
590 	      unlink_stmt_vdef (stmt);
591 	      release_defs (stmt);
592 	    }
593 	  gsi_replace (si_p, gimple_build_nop (), false);
594 	  return;
595 	}
596     }
597   else
598     {
599       tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
600       new_stmt = gimple_build_assign (lhs, tmp);
601       i = gsi_last (stmts);
602       gsi_insert_after_without_update (&i, new_stmt,
603 				       GSI_CONTINUE_LINKING);
604     }
605 
606   pop_gimplify_context (NULL);
607 
608   gsi_replace_with_seq_vops (si_p, stmts);
609 }
610 
611 
612 /* Replace the call at *GSI with the gimple value VAL.  */
613 
614 void
replace_call_with_value(gimple_stmt_iterator * gsi,tree val)615 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
616 {
617   gimple *stmt = gsi_stmt (*gsi);
618   tree lhs = gimple_call_lhs (stmt);
619   gimple *repl;
620   if (lhs)
621     {
622       if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
623 	val = fold_convert (TREE_TYPE (lhs), val);
624       repl = gimple_build_assign (lhs, val);
625     }
626   else
627     repl = gimple_build_nop ();
628   tree vdef = gimple_vdef (stmt);
629   if (vdef && TREE_CODE (vdef) == SSA_NAME)
630     {
631       unlink_stmt_vdef (stmt);
632       release_ssa_name (vdef);
633     }
634   gsi_replace (gsi, repl, false);
635 }
636 
637 /* Replace the call at *GSI with the new call REPL and fold that
638    again.  */
639 
640 static void
replace_call_with_call_and_fold(gimple_stmt_iterator * gsi,gimple * repl)641 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
642 {
643   gimple *stmt = gsi_stmt (*gsi);
644   gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
645   gimple_set_location (repl, gimple_location (stmt));
646   gimple_move_vops (repl, stmt);
647   gsi_replace (gsi, repl, false);
648   fold_stmt (gsi);
649 }
650 
651 /* Return true if VAR is a VAR_DECL or a component thereof.  */
652 
653 static bool
var_decl_component_p(tree var)654 var_decl_component_p (tree var)
655 {
656   tree inner = var;
657   while (handled_component_p (inner))
658     inner = TREE_OPERAND (inner, 0);
659   return (DECL_P (inner)
660 	  || (TREE_CODE (inner) == MEM_REF
661 	      && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
662 }
663 
664 /* Return TRUE if the SIZE argument, representing the size of an
665    object, is in a range of values of which exactly zero is valid.  */
666 
667 static bool
size_must_be_zero_p(tree size)668 size_must_be_zero_p (tree size)
669 {
670   if (integer_zerop (size))
671     return true;
672 
673   if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
674     return false;
675 
676   tree type = TREE_TYPE (size);
677   int prec = TYPE_PRECISION (type);
678 
679   /* Compute the value of SSIZE_MAX, the largest positive value that
680      can be stored in ssize_t, the signed counterpart of size_t.  */
681   wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
682   value_range valid_range (build_int_cst (type, 0),
683 			   wide_int_to_tree (type, ssize_max));
684   value_range vr;
685   get_range_info (size, vr);
686   vr.intersect (&valid_range);
687   return vr.zero_p ();
688 }
689 
690 /* Fold function call to builtin mem{{,p}cpy,move}.  Try to detect and
691    diagnose (otherwise undefined) overlapping copies without preventing
692    folding.  When folded, GCC guarantees that overlapping memcpy has
693    the same semantics as memmove.  Call to the library memcpy need not
694    provide the same guarantee.  Return false if no simplification can
695    be made.  */
696 
697 static bool
gimple_fold_builtin_memory_op(gimple_stmt_iterator * gsi,tree dest,tree src,enum built_in_function code)698 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
699 			       tree dest, tree src, enum built_in_function code)
700 {
701   gimple *stmt = gsi_stmt (*gsi);
702   tree lhs = gimple_call_lhs (stmt);
703   tree len = gimple_call_arg (stmt, 2);
704   location_t loc = gimple_location (stmt);
705 
706   /* If the LEN parameter is a constant zero or in range where
707      the only valid value is zero, return DEST.  */
708   if (size_must_be_zero_p (len))
709     {
710       gimple *repl;
711       if (gimple_call_lhs (stmt))
712 	repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
713       else
714 	repl = gimple_build_nop ();
715       tree vdef = gimple_vdef (stmt);
716       if (vdef && TREE_CODE (vdef) == SSA_NAME)
717 	{
718 	  unlink_stmt_vdef (stmt);
719 	  release_ssa_name (vdef);
720 	}
721       gsi_replace (gsi, repl, false);
722       return true;
723     }
724 
725   /* If SRC and DEST are the same (and not volatile), return
726      DEST{,+LEN,+LEN-1}.  */
727   if (operand_equal_p (src, dest, 0))
728     {
729       /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
730 	 It's safe and may even be emitted by GCC itself (see bug
731 	 32667).  */
732       unlink_stmt_vdef (stmt);
733       if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
734 	release_ssa_name (gimple_vdef (stmt));
735       if (!lhs)
736 	{
737 	  gsi_replace (gsi, gimple_build_nop (), false);
738 	  return true;
739 	}
740       goto done;
741     }
742   else
743     {
744       /* We cannot (easily) change the type of the copy if it is a storage
745 	 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
746 	 modify the storage order of objects (see storage_order_barrier_p).  */
747       tree srctype
748 	= POINTER_TYPE_P (TREE_TYPE (src))
749 	  ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
750       tree desttype
751 	= POINTER_TYPE_P (TREE_TYPE (dest))
752 	  ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
753       tree destvar, srcvar, srcoff;
754       unsigned int src_align, dest_align;
755       unsigned HOST_WIDE_INT tmp_len;
756       const char *tmp_str;
757 
758       /* Build accesses at offset zero with a ref-all character type.  */
759       tree off0
760 	= build_int_cst (build_pointer_type_for_mode (char_type_node,
761 						      ptr_mode, true), 0);
762 
763       /* If we can perform the copy efficiently with first doing all loads
764          and then all stores inline it that way.  Currently efficiently
765 	 means that we can load all the memory into a single integer
766 	 register which is what MOVE_MAX gives us.  */
767       src_align = get_pointer_alignment (src);
768       dest_align = get_pointer_alignment (dest);
769       if (tree_fits_uhwi_p (len)
770 	  && compare_tree_int (len, MOVE_MAX) <= 0
771 	  /* FIXME: Don't transform copies from strings with known length.
772 	     Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
773 	     from being handled, and the case was XFAILed for that reason.
774 	     Now that it is handled and the XFAIL removed, as soon as other
775 	     strlenopt tests that rely on it for passing are adjusted, this
776 	     hack can be removed.  */
777 	  && !c_strlen (src, 1)
778 	  && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
779 	       && memchr (tmp_str, 0, tmp_len) == NULL)
780 	  && !(srctype
781 	       && AGGREGATE_TYPE_P (srctype)
782 	       && TYPE_REVERSE_STORAGE_ORDER (srctype))
783 	  && !(desttype
784 	       && AGGREGATE_TYPE_P (desttype)
785 	       && TYPE_REVERSE_STORAGE_ORDER (desttype)))
786 	{
787 	  unsigned ilen = tree_to_uhwi (len);
788 	  if (pow2p_hwi (ilen))
789 	    {
790 	      /* Detect out-of-bounds accesses without issuing warnings.
791 		 Avoid folding out-of-bounds copies but to avoid false
792 		 positives for unreachable code defer warning until after
793 		 DCE has worked its magic.
794 		 -Wrestrict is still diagnosed.  */
795 	      if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
796 							 dest, src, len, len,
797 							 false, false))
798 		if (warning != OPT_Wrestrict)
799 		  return false;
800 
801 	      scalar_int_mode mode;
802 	      tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
803 	      if (type
804 		  && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
805 		  && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
806 		  /* If the destination pointer is not aligned we must be able
807 		     to emit an unaligned store.  */
808 		  && (dest_align >= GET_MODE_ALIGNMENT (mode)
809 		      || !targetm.slow_unaligned_access (mode, dest_align)
810 		      || (optab_handler (movmisalign_optab, mode)
811 			  != CODE_FOR_nothing)))
812 		{
813 		  tree srctype = type;
814 		  tree desttype = type;
815 		  if (src_align < GET_MODE_ALIGNMENT (mode))
816 		    srctype = build_aligned_type (type, src_align);
817 		  tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
818 		  tree tem = fold_const_aggregate_ref (srcmem);
819 		  if (tem)
820 		    srcmem = tem;
821 		  else if (src_align < GET_MODE_ALIGNMENT (mode)
822 			   && targetm.slow_unaligned_access (mode, src_align)
823 			   && (optab_handler (movmisalign_optab, mode)
824 			       == CODE_FOR_nothing))
825 		    srcmem = NULL_TREE;
826 		  if (srcmem)
827 		    {
828 		      gimple *new_stmt;
829 		      if (is_gimple_reg_type (TREE_TYPE (srcmem)))
830 			{
831 			  new_stmt = gimple_build_assign (NULL_TREE, srcmem);
832 			  srcmem
833 			    = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
834 							  new_stmt);
835 			  gimple_assign_set_lhs (new_stmt, srcmem);
836 			  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
837 			  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
838 			}
839 		      if (dest_align < GET_MODE_ALIGNMENT (mode))
840 			desttype = build_aligned_type (type, dest_align);
841 		      new_stmt
842 			= gimple_build_assign (fold_build2 (MEM_REF, desttype,
843 							    dest, off0),
844 					       srcmem);
845 		      gimple_move_vops (new_stmt, stmt);
846 		      if (!lhs)
847 			{
848 			  gsi_replace (gsi, new_stmt, false);
849 			  return true;
850 			}
851 		      gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
852 		      goto done;
853 		    }
854 		}
855 	    }
856 	}
857 
858       if (code == BUILT_IN_MEMMOVE)
859 	{
860 	  /* Both DEST and SRC must be pointer types.
861 	     ??? This is what old code did.  Is the testing for pointer types
862 	     really mandatory?
863 
864 	     If either SRC is readonly or length is 1, we can use memcpy.  */
865 	  if (!dest_align || !src_align)
866 	    return false;
867 	  if (readonly_data_expr (src)
868 	      || (tree_fits_uhwi_p (len)
869 		  && (MIN (src_align, dest_align) / BITS_PER_UNIT
870 		      >= tree_to_uhwi (len))))
871 	    {
872 	      tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
873 	      if (!fn)
874 		return false;
875 	      gimple_call_set_fndecl (stmt, fn);
876 	      gimple_call_set_arg (stmt, 0, dest);
877 	      gimple_call_set_arg (stmt, 1, src);
878 	      fold_stmt (gsi);
879 	      return true;
880 	    }
881 
882 	  /* If *src and *dest can't overlap, optimize into memcpy as well.  */
883 	  if (TREE_CODE (src) == ADDR_EXPR
884 	      && TREE_CODE (dest) == ADDR_EXPR)
885 	    {
886 	      tree src_base, dest_base, fn;
887 	      poly_int64 src_offset = 0, dest_offset = 0;
888 	      poly_uint64 maxsize;
889 
890 	      srcvar = TREE_OPERAND (src, 0);
891 	      src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
892 	      if (src_base == NULL)
893 		src_base = srcvar;
894 	      destvar = TREE_OPERAND (dest, 0);
895 	      dest_base = get_addr_base_and_unit_offset (destvar,
896 							 &dest_offset);
897 	      if (dest_base == NULL)
898 		dest_base = destvar;
899 	      if (!poly_int_tree_p (len, &maxsize))
900 		maxsize = -1;
901 	      if (SSA_VAR_P (src_base)
902 		  && SSA_VAR_P (dest_base))
903 		{
904 		  if (operand_equal_p (src_base, dest_base, 0)
905 		      && ranges_maybe_overlap_p (src_offset, maxsize,
906 						 dest_offset, maxsize))
907 		    return false;
908 		}
909 	      else if (TREE_CODE (src_base) == MEM_REF
910 		       && TREE_CODE (dest_base) == MEM_REF)
911 		{
912 		  if (! operand_equal_p (TREE_OPERAND (src_base, 0),
913 					 TREE_OPERAND (dest_base, 0), 0))
914 		    return false;
915 		  poly_offset_int full_src_offset
916 		    = mem_ref_offset (src_base) + src_offset;
917 		  poly_offset_int full_dest_offset
918 		    = mem_ref_offset (dest_base) + dest_offset;
919 		  if (ranges_maybe_overlap_p (full_src_offset, maxsize,
920 					      full_dest_offset, maxsize))
921 		    return false;
922 		}
923 	      else
924 		return false;
925 
926 	      fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
927 	      if (!fn)
928 		return false;
929 	      gimple_call_set_fndecl (stmt, fn);
930 	      gimple_call_set_arg (stmt, 0, dest);
931 	      gimple_call_set_arg (stmt, 1, src);
932 	      fold_stmt (gsi);
933 	      return true;
934 	    }
935 
936 	  /* If the destination and source do not alias optimize into
937 	     memcpy as well.  */
938 	  if ((is_gimple_min_invariant (dest)
939 	       || TREE_CODE (dest) == SSA_NAME)
940 	      && (is_gimple_min_invariant (src)
941 		  || TREE_CODE (src) == SSA_NAME))
942 	    {
943 	      ao_ref destr, srcr;
944 	      ao_ref_init_from_ptr_and_size (&destr, dest, len);
945 	      ao_ref_init_from_ptr_and_size (&srcr, src, len);
946 	      if (!refs_may_alias_p_1 (&destr, &srcr, false))
947 		{
948 		  tree fn;
949 		  fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
950 		  if (!fn)
951 		    return false;
952 		  gimple_call_set_fndecl (stmt, fn);
953 		  gimple_call_set_arg (stmt, 0, dest);
954 		  gimple_call_set_arg (stmt, 1, src);
955 		  fold_stmt (gsi);
956 		  return true;
957 		}
958 	    }
959 
960 	  return false;
961 	}
962 
963       if (!tree_fits_shwi_p (len))
964 	return false;
965       if (!srctype
966 	  || (AGGREGATE_TYPE_P (srctype)
967 	      && TYPE_REVERSE_STORAGE_ORDER (srctype)))
968 	return false;
969       if (!desttype
970 	  || (AGGREGATE_TYPE_P (desttype)
971 	      && TYPE_REVERSE_STORAGE_ORDER (desttype)))
972 	return false;
973       /* In the following try to find a type that is most natural to be
974 	 used for the memcpy source and destination and that allows
975 	 the most optimization when memcpy is turned into a plain assignment
976 	 using that type.  In theory we could always use a char[len] type
977 	 but that only gains us that the destination and source possibly
978 	 no longer will have their address taken.  */
979       if (TREE_CODE (srctype) == ARRAY_TYPE
980 	  && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
981 	srctype = TREE_TYPE (srctype);
982       if (TREE_CODE (desttype) == ARRAY_TYPE
983 	  && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
984 	desttype = TREE_TYPE (desttype);
985       if (TREE_ADDRESSABLE (srctype)
986 	  || TREE_ADDRESSABLE (desttype))
987 	return false;
988 
989       /* Make sure we are not copying using a floating-point mode or
990          a type whose size possibly does not match its precision.  */
991       if (FLOAT_MODE_P (TYPE_MODE (desttype))
992 	  || TREE_CODE (desttype) == BOOLEAN_TYPE
993 	  || TREE_CODE (desttype) == ENUMERAL_TYPE)
994 	desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
995       if (FLOAT_MODE_P (TYPE_MODE (srctype))
996 	  || TREE_CODE (srctype) == BOOLEAN_TYPE
997 	  || TREE_CODE (srctype) == ENUMERAL_TYPE)
998 	srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
999       if (!srctype)
1000 	srctype = desttype;
1001       if (!desttype)
1002 	desttype = srctype;
1003       if (!srctype)
1004 	return false;
1005 
1006       src_align = get_pointer_alignment (src);
1007       dest_align = get_pointer_alignment (dest);
1008 
1009       /* Choose between src and destination type for the access based
1010          on alignment, whether the access constitutes a register access
1011 	 and whether it may actually expose a declaration for SSA rewrite
1012 	 or SRA decomposition.  Also try to expose a string constant, we
1013 	 might be able to concatenate several of them later into a single
1014 	 string store.  */
1015       destvar = NULL_TREE;
1016       srcvar = NULL_TREE;
1017       if (TREE_CODE (dest) == ADDR_EXPR
1018 	  && var_decl_component_p (TREE_OPERAND (dest, 0))
1019 	  && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1020 	  && dest_align >= TYPE_ALIGN (desttype)
1021 	  && (is_gimple_reg_type (desttype)
1022 	      || src_align >= TYPE_ALIGN (desttype)))
1023 	destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1024       else if (TREE_CODE (src) == ADDR_EXPR
1025 	       && var_decl_component_p (TREE_OPERAND (src, 0))
1026 	       && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1027 	       && src_align >= TYPE_ALIGN (srctype)
1028 	       && (is_gimple_reg_type (srctype)
1029 		   || dest_align >= TYPE_ALIGN (srctype)))
1030 	srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1031       /* FIXME: Don't transform copies from strings with known original length.
1032 	 As soon as strlenopt tests that rely on it for passing are adjusted,
1033 	 this hack can be removed.  */
1034       else if (gimple_call_alloca_for_var_p (stmt)
1035 	       && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1036 	       && integer_zerop (srcoff)
1037 	       && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1038 	       && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1039 	srctype = TREE_TYPE (srcvar);
1040       else
1041 	return false;
1042 
1043       /* Now that we chose an access type express the other side in
1044          terms of it if the target allows that with respect to alignment
1045 	 constraints.  */
1046       if (srcvar == NULL_TREE)
1047 	{
1048 	  if (src_align >= TYPE_ALIGN (desttype))
1049 	    srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1050 	  else
1051 	    {
1052 	      if (STRICT_ALIGNMENT)
1053 		return false;
1054 	      srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1055 					    src_align);
1056 	      srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1057 	    }
1058 	}
1059       else if (destvar == NULL_TREE)
1060 	{
1061 	  if (dest_align >= TYPE_ALIGN (srctype))
1062 	    destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1063 	  else
1064 	    {
1065 	      if (STRICT_ALIGNMENT)
1066 		return false;
1067 	      desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1068 					     dest_align);
1069 	      destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1070 	    }
1071 	}
1072 
1073       /* Same as above, detect out-of-bounds accesses without issuing
1074 	 warnings.  Avoid folding out-of-bounds copies but to avoid
1075 	 false positives for unreachable code defer warning until
1076 	 after DCE has worked its magic.
1077 	 -Wrestrict is still diagnosed.  */
1078       if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1079 						 dest, src, len, len,
1080 						 false, false))
1081 	if (warning != OPT_Wrestrict)
1082 	  return false;
1083 
1084       gimple *new_stmt;
1085       if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1086 	{
1087 	  tree tem = fold_const_aggregate_ref (srcvar);
1088 	  if (tem)
1089 	    srcvar = tem;
1090 	  if (! is_gimple_min_invariant (srcvar))
1091 	    {
1092 	      new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1093 	      srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1094 						   new_stmt);
1095 	      gimple_assign_set_lhs (new_stmt, srcvar);
1096 	      gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1097 	      gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1098 	    }
1099 	  new_stmt = gimple_build_assign (destvar, srcvar);
1100 	  goto set_vop_and_replace;
1101 	}
1102 
1103       /* We get an aggregate copy.  If the source is a STRING_CST, then
1104 	 directly use its type to perform the copy.  */
1105       if (TREE_CODE (srcvar) == STRING_CST)
1106 	  desttype = srctype;
1107 
1108       /* Or else, use an unsigned char[] type to perform the copy in order
1109 	 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1110 	 types or float modes behavior on copying.  */
1111       else
1112 	{
1113 	  desttype = build_array_type_nelts (unsigned_char_type_node,
1114 					     tree_to_uhwi (len));
1115 	  srctype = desttype;
1116 	  if (src_align > TYPE_ALIGN (srctype))
1117 	    srctype = build_aligned_type (srctype, src_align);
1118 	  srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1119 	}
1120 
1121       if (dest_align > TYPE_ALIGN (desttype))
1122 	desttype = build_aligned_type (desttype, dest_align);
1123       destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1124       new_stmt = gimple_build_assign (destvar, srcvar);
1125 
1126 set_vop_and_replace:
1127       gimple_move_vops (new_stmt, stmt);
1128       if (!lhs)
1129 	{
1130 	  gsi_replace (gsi, new_stmt, false);
1131 	  return true;
1132 	}
1133       gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1134     }
1135 
1136 done:
1137   gimple_seq stmts = NULL;
1138   if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1139     len = NULL_TREE;
1140   else if (code == BUILT_IN_MEMPCPY)
1141     {
1142       len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1143       dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1144 			   TREE_TYPE (dest), dest, len);
1145     }
1146   else
1147     gcc_unreachable ();
1148 
1149   gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1150   gimple *repl = gimple_build_assign (lhs, dest);
1151   gsi_replace (gsi, repl, false);
1152   return true;
1153 }
1154 
1155 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1156    to built-in memcmp (a, b, len).  */
1157 
1158 static bool
gimple_fold_builtin_bcmp(gimple_stmt_iterator * gsi)1159 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1160 {
1161   tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1162 
1163   if (!fn)
1164     return false;
1165 
1166   /* Transform bcmp (a, b, len) into memcmp (a, b, len).  */
1167 
1168   gimple *stmt = gsi_stmt (*gsi);
1169   tree a = gimple_call_arg (stmt, 0);
1170   tree b = gimple_call_arg (stmt, 1);
1171   tree len = gimple_call_arg (stmt, 2);
1172 
1173   gimple *repl = gimple_build_call (fn, 3, a, b, len);
1174   replace_call_with_call_and_fold (gsi, repl);
1175 
1176   return true;
1177 }
1178 
1179 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1180    to built-in memmove (dest, src, len).  */
1181 
1182 static bool
gimple_fold_builtin_bcopy(gimple_stmt_iterator * gsi)1183 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1184 {
1185   tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1186 
1187   if (!fn)
1188     return false;
1189 
1190   /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1191      it's quivalent to memmove (not memcpy).  Transform bcopy (src, dest,
1192      len) into memmove (dest, src, len).  */
1193 
1194   gimple *stmt = gsi_stmt (*gsi);
1195   tree src = gimple_call_arg (stmt, 0);
1196   tree dest = gimple_call_arg (stmt, 1);
1197   tree len = gimple_call_arg (stmt, 2);
1198 
1199   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1200   gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1201   replace_call_with_call_and_fold (gsi, repl);
1202 
1203   return true;
1204 }
1205 
1206 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1207    to built-in memset (dest, 0, len).  */
1208 
1209 static bool
gimple_fold_builtin_bzero(gimple_stmt_iterator * gsi)1210 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1211 {
1212   tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1213 
1214   if (!fn)
1215     return false;
1216 
1217   /* Transform bzero (dest, len) into memset (dest, 0, len).  */
1218 
1219   gimple *stmt = gsi_stmt (*gsi);
1220   tree dest = gimple_call_arg (stmt, 0);
1221   tree len = gimple_call_arg (stmt, 1);
1222 
1223   gimple_seq seq = NULL;
1224   gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1225   gimple_seq_add_stmt_without_update (&seq, repl);
1226   gsi_replace_with_seq_vops (gsi, seq);
1227   fold_stmt (gsi);
1228 
1229   return true;
1230 }
1231 
1232 /* Fold function call to builtin memset or bzero at *GSI setting the
1233    memory of size LEN to VAL.  Return whether a simplification was made.  */
1234 
1235 static bool
gimple_fold_builtin_memset(gimple_stmt_iterator * gsi,tree c,tree len)1236 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1237 {
1238   gimple *stmt = gsi_stmt (*gsi);
1239   tree etype;
1240   unsigned HOST_WIDE_INT length, cval;
1241 
1242   /* If the LEN parameter is zero, return DEST.  */
1243   if (integer_zerop (len))
1244     {
1245       replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1246       return true;
1247     }
1248 
1249   if (! tree_fits_uhwi_p (len))
1250     return false;
1251 
1252   if (TREE_CODE (c) != INTEGER_CST)
1253     return false;
1254 
1255   tree dest = gimple_call_arg (stmt, 0);
1256   tree var = dest;
1257   if (TREE_CODE (var) != ADDR_EXPR)
1258     return false;
1259 
1260   var = TREE_OPERAND (var, 0);
1261   if (TREE_THIS_VOLATILE (var))
1262     return false;
1263 
1264   etype = TREE_TYPE (var);
1265   if (TREE_CODE (etype) == ARRAY_TYPE)
1266     etype = TREE_TYPE (etype);
1267 
1268   if (!INTEGRAL_TYPE_P (etype)
1269       && !POINTER_TYPE_P (etype))
1270     return NULL_TREE;
1271 
1272   if (! var_decl_component_p (var))
1273     return NULL_TREE;
1274 
1275   length = tree_to_uhwi (len);
1276   if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1277       || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1278 	  != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1279       || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1280     return NULL_TREE;
1281 
1282   if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1283     return NULL_TREE;
1284 
1285   if (!type_has_mode_precision_p (etype))
1286     etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1287 					    TYPE_UNSIGNED (etype));
1288 
1289   if (integer_zerop (c))
1290     cval = 0;
1291   else
1292     {
1293       if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1294 	return NULL_TREE;
1295 
1296       cval = TREE_INT_CST_LOW (c);
1297       cval &= 0xff;
1298       cval |= cval << 8;
1299       cval |= cval << 16;
1300       cval |= (cval << 31) << 1;
1301     }
1302 
1303   var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1304   gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1305   gimple_move_vops (store, stmt);
1306   gsi_insert_before (gsi, store, GSI_SAME_STMT);
1307   if (gimple_call_lhs (stmt))
1308     {
1309       gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1310       gsi_replace (gsi, asgn, false);
1311     }
1312   else
1313     {
1314       gimple_stmt_iterator gsi2 = *gsi;
1315       gsi_prev (gsi);
1316       gsi_remove (&gsi2, true);
1317     }
1318 
1319   return true;
1320 }
1321 
1322 /* Helper of get_range_strlen for ARG that is not an SSA_NAME.  */
1323 
1324 static bool
get_range_strlen_tree(tree arg,bitmap * visited,strlen_range_kind rkind,c_strlen_data * pdata,unsigned eltsize)1325 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1326 		       c_strlen_data *pdata, unsigned eltsize)
1327 {
1328   gcc_assert (TREE_CODE (arg) != SSA_NAME);
1329 
1330   /* The length computed by this invocation of the function.  */
1331   tree val = NULL_TREE;
1332 
1333   /* True if VAL is an optimistic (tight) bound determined from
1334      the size of the character array in which the string may be
1335      stored.  In that case, the computed VAL is used to set
1336      PDATA->MAXBOUND.  */
1337   bool tight_bound = false;
1338 
1339   /* We can end up with &(*iftmp_1)[0] here as well, so handle it.  */
1340   if (TREE_CODE (arg) == ADDR_EXPR
1341       && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1342     {
1343       tree op = TREE_OPERAND (arg, 0);
1344       if (integer_zerop (TREE_OPERAND (op, 1)))
1345 	{
1346 	  tree aop0 = TREE_OPERAND (op, 0);
1347 	  if (TREE_CODE (aop0) == INDIRECT_REF
1348 	      && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1349 	    return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1350 				     pdata, eltsize);
1351 	}
1352       else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1353 	       && rkind == SRK_LENRANGE)
1354 	{
1355 	  /* Fail if an array is the last member of a struct object
1356 	     since it could be treated as a (fake) flexible array
1357 	     member.  */
1358 	  tree idx = TREE_OPERAND (op, 1);
1359 
1360 	  arg = TREE_OPERAND (op, 0);
1361 	  tree optype = TREE_TYPE (arg);
1362 	  if (tree dom = TYPE_DOMAIN (optype))
1363 	    if (tree bound = TYPE_MAX_VALUE (dom))
1364 	      if (TREE_CODE (bound) == INTEGER_CST
1365 		  && TREE_CODE (idx) == INTEGER_CST
1366 		  && tree_int_cst_lt (bound, idx))
1367 		return false;
1368 	}
1369     }
1370 
1371   if (rkind == SRK_INT_VALUE)
1372     {
1373       /* We are computing the maximum value (not string length).  */
1374       val = arg;
1375       if (TREE_CODE (val) != INTEGER_CST
1376 	  || tree_int_cst_sgn (val) < 0)
1377 	return false;
1378     }
1379   else
1380     {
1381       c_strlen_data lendata = { };
1382       val = c_strlen (arg, 1, &lendata, eltsize);
1383 
1384       if (!val && lendata.decl)
1385 	{
1386 	  /* ARG refers to an unterminated const character array.
1387 	     DATA.DECL with size DATA.LEN.  */
1388 	  val = lendata.minlen;
1389 	  pdata->decl = lendata.decl;
1390 	}
1391     }
1392 
1393   /* Set if VAL represents the maximum length based on array size (set
1394      when exact length cannot be determined).  */
1395   bool maxbound = false;
1396 
1397   if (!val && rkind == SRK_LENRANGE)
1398     {
1399       if (TREE_CODE (arg) == ADDR_EXPR)
1400 	return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1401 				 pdata, eltsize);
1402 
1403       if (TREE_CODE (arg) == ARRAY_REF)
1404 	{
1405 	  tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1406 
1407 	  /* Determine the "innermost" array type.  */
1408 	  while (TREE_CODE (optype) == ARRAY_TYPE
1409 		 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1410 	    optype = TREE_TYPE (optype);
1411 
1412 	  /* Avoid arrays of pointers.  */
1413 	  tree eltype = TREE_TYPE (optype);
1414 	  if (TREE_CODE (optype) != ARRAY_TYPE
1415 	      || !INTEGRAL_TYPE_P (eltype))
1416 	    return false;
1417 
1418 	  /* Fail when the array bound is unknown or zero.  */
1419 	  val = TYPE_SIZE_UNIT (optype);
1420 	  if (!val
1421 	      || TREE_CODE (val) != INTEGER_CST
1422 	      || integer_zerop (val))
1423 	    return false;
1424 
1425 	  val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1426 			      integer_one_node);
1427 
1428 	  /* Set the minimum size to zero since the string in
1429 	     the array could have zero length.  */
1430 	  pdata->minlen = ssize_int (0);
1431 
1432 	  tight_bound = true;
1433 	}
1434       else if (TREE_CODE (arg) == COMPONENT_REF
1435 	       && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1436 		   == ARRAY_TYPE))
1437 	{
1438 	  /* Use the type of the member array to determine the upper
1439 	     bound on the length of the array.  This may be overly
1440 	     optimistic if the array itself isn't NUL-terminated and
1441 	     the caller relies on the subsequent member to contain
1442 	     the NUL but that would only be considered valid if
1443 	     the array were the last member of a struct.  */
1444 
1445 	  tree fld = TREE_OPERAND (arg, 1);
1446 
1447 	  tree optype = TREE_TYPE (fld);
1448 
1449 	  /* Determine the "innermost" array type.  */
1450 	  while (TREE_CODE (optype) == ARRAY_TYPE
1451 		 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1452 	    optype = TREE_TYPE (optype);
1453 
1454 	  /* Fail when the array bound is unknown or zero.  */
1455 	  val = TYPE_SIZE_UNIT (optype);
1456 	  if (!val
1457 	      || TREE_CODE (val) != INTEGER_CST
1458 	      || integer_zerop (val))
1459 	    return false;
1460 	  val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1461 			     integer_one_node);
1462 
1463 	  /* Set the minimum size to zero since the string in
1464 	     the array could have zero length.  */
1465 	  pdata->minlen = ssize_int (0);
1466 
1467 	  /* The array size determined above is an optimistic bound
1468 	     on the length.  If the array isn't nul-terminated the
1469 	     length computed by the library function would be greater.
1470 	     Even though using strlen to cross the subobject boundary
1471 	     is undefined, avoid drawing conclusions from the member
1472 	     type about the length here.  */
1473 	  tight_bound = true;
1474 	}
1475       else if (TREE_CODE (arg) == MEM_REF
1476 	       && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1477 	       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1478 	       && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1479 	{
1480 	  /* Handle a MEM_REF into a DECL accessing an array of integers,
1481 	     being conservative about references to extern structures with
1482 	     flexible array members that can be initialized to arbitrary
1483 	     numbers of elements as an extension (static structs are okay).
1484 	     FIXME: Make this less conservative -- see
1485 	     component_ref_size in tree.c.  */
1486 	  tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1487 	  if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1488 	      && (decl_binds_to_current_def_p (ref)
1489 		  || !array_at_struct_end_p (arg)))
1490 	    {
1491 	      /* Fail if the offset is out of bounds.  Such accesses
1492 		 should be diagnosed at some point.  */
1493 	      val = DECL_SIZE_UNIT (ref);
1494 	      if (!val
1495 		  || TREE_CODE (val) != INTEGER_CST
1496 		  || integer_zerop (val))
1497 		return false;
1498 
1499 	      poly_offset_int psiz = wi::to_offset (val);
1500 	      poly_offset_int poff = mem_ref_offset (arg);
1501 	      if (known_le (psiz, poff))
1502 		return false;
1503 
1504 	      pdata->minlen = ssize_int (0);
1505 
1506 	      /* Subtract the offset and one for the terminating nul.  */
1507 	      psiz -= poff;
1508 	      psiz -= 1;
1509 	      val = wide_int_to_tree (TREE_TYPE (val), psiz);
1510 	      /* Since VAL reflects the size of a declared object
1511 		 rather the type of the access it is not a tight bound.  */
1512 	    }
1513 	}
1514       else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1515 	{
1516 	  /* Avoid handling pointers to arrays.  GCC might misuse
1517 	     a pointer to an array of one bound to point to an array
1518 	     object of a greater bound.  */
1519 	  tree argtype = TREE_TYPE (arg);
1520 	  if (TREE_CODE (argtype) == ARRAY_TYPE)
1521 	    {
1522 	      val = TYPE_SIZE_UNIT (argtype);
1523 	      if (!val
1524 		  || TREE_CODE (val) != INTEGER_CST
1525 		  || integer_zerop (val))
1526 		return false;
1527 	      val = wide_int_to_tree (TREE_TYPE (val),
1528 				      wi::sub (wi::to_wide (val), 1));
1529 
1530 	      /* Set the minimum size to zero since the string in
1531 		 the array could have zero length.  */
1532 	      pdata->minlen = ssize_int (0);
1533 	    }
1534 	}
1535       maxbound = true;
1536     }
1537 
1538   if (!val)
1539     return false;
1540 
1541   /* Adjust the lower bound on the string length as necessary.  */
1542   if (!pdata->minlen
1543       || (rkind != SRK_STRLEN
1544 	  && TREE_CODE (pdata->minlen) == INTEGER_CST
1545 	  && TREE_CODE (val) == INTEGER_CST
1546 	  && tree_int_cst_lt (val, pdata->minlen)))
1547     pdata->minlen = val;
1548 
1549   if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1550     {
1551       /* Adjust the tighter (more optimistic) string length bound
1552 	 if necessary and proceed to adjust the more conservative
1553 	 bound.  */
1554       if (TREE_CODE (val) == INTEGER_CST)
1555 	{
1556 	  if (tree_int_cst_lt (pdata->maxbound, val))
1557 	    pdata->maxbound = val;
1558 	}
1559       else
1560 	pdata->maxbound = val;
1561     }
1562   else if (pdata->maxbound || maxbound)
1563     /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1564        if VAL corresponds to the maximum length determined based
1565        on the type of the object.  */
1566     pdata->maxbound = val;
1567 
1568   if (tight_bound)
1569     {
1570       /* VAL computed above represents an optimistically tight bound
1571 	 on the length of the string based on the referenced object's
1572 	 or subobject's type.  Determine the conservative upper bound
1573 	 based on the enclosing object's size if possible.  */
1574       if (rkind == SRK_LENRANGE)
1575 	{
1576 	  poly_int64 offset;
1577 	  tree base = get_addr_base_and_unit_offset (arg, &offset);
1578 	  if (!base)
1579 	    {
1580 	      /* When the call above fails due to a non-constant offset
1581 		 assume the offset is zero and use the size of the whole
1582 		 enclosing object instead.  */
1583 	      base = get_base_address (arg);
1584 	      offset = 0;
1585 	    }
1586 	  /* If the base object is a pointer no upper bound on the length
1587 	     can be determined.  Otherwise the maximum length is equal to
1588 	     the size of the enclosing object minus the offset of
1589 	     the referenced subobject minus 1 (for the terminating nul).  */
1590 	  tree type = TREE_TYPE (base);
1591 	  if (TREE_CODE (type) == POINTER_TYPE
1592 	      || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1593 	      || !(val = DECL_SIZE_UNIT (base)))
1594 	    val = build_all_ones_cst (size_type_node);
1595 	  else
1596 	    {
1597 	      val = DECL_SIZE_UNIT (base);
1598 	      val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1599 				 size_int (offset + 1));
1600 	    }
1601 	}
1602       else
1603 	return false;
1604     }
1605 
1606   if (pdata->maxlen)
1607     {
1608       /* Adjust the more conservative bound if possible/necessary
1609 	 and fail otherwise.  */
1610       if (rkind != SRK_STRLEN)
1611 	{
1612 	  if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1613 	      || TREE_CODE (val) != INTEGER_CST)
1614 	    return false;
1615 
1616 	  if (tree_int_cst_lt (pdata->maxlen, val))
1617 	    pdata->maxlen = val;
1618 	  return true;
1619 	}
1620       else if (simple_cst_equal (val, pdata->maxlen) != 1)
1621 	{
1622 	  /* Fail if the length of this ARG is different from that
1623 	     previously determined from another ARG.  */
1624 	  return false;
1625 	}
1626     }
1627 
1628   pdata->maxlen = val;
1629   return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1630 }
1631 
1632 /* For an ARG referencing one or more strings, try to obtain the range
1633    of their lengths, or the size of the largest array ARG referes to if
1634    the range of lengths cannot be determined, and store all in *PDATA.
1635    For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1636    the maximum constant value.
1637    If ARG is an SSA_NAME, follow its use-def chains.  When RKIND ==
1638    SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1639    length or if we are unable to determine the length, return false.
1640    VISITED is a bitmap of visited variables.
1641    RKIND determines the kind of value or range to obtain (see
1642    strlen_range_kind).
1643    Set PDATA->DECL if ARG refers to an unterminated constant array.
1644    On input, set ELTSIZE to 1 for normal single byte character strings,
1645    and either 2 or 4 for wide characer strings (the size of wchar_t).
1646    Return true if *PDATA was successfully populated and false otherwise.  */
1647 
1648 static bool
get_range_strlen(tree arg,bitmap * visited,strlen_range_kind rkind,c_strlen_data * pdata,unsigned eltsize)1649 get_range_strlen (tree arg, bitmap *visited,
1650 		  strlen_range_kind rkind,
1651 		  c_strlen_data *pdata, unsigned eltsize)
1652 {
1653 
1654   if (TREE_CODE (arg) != SSA_NAME)
1655     return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1656 
1657   /* If ARG is registered for SSA update we cannot look at its defining
1658      statement.  */
1659   if (name_registered_for_update_p (arg))
1660     return false;
1661 
1662   /* If we were already here, break the infinite cycle.  */
1663   if (!*visited)
1664     *visited = BITMAP_ALLOC (NULL);
1665   if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1666     return true;
1667 
1668   tree var = arg;
1669   gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1670 
1671   switch (gimple_code (def_stmt))
1672     {
1673       case GIMPLE_ASSIGN:
1674 	/* The RHS of the statement defining VAR must either have a
1675 	   constant length or come from another SSA_NAME with a constant
1676 	   length.  */
1677         if (gimple_assign_single_p (def_stmt)
1678             || gimple_assign_unary_nop_p (def_stmt))
1679           {
1680 	    tree rhs = gimple_assign_rhs1 (def_stmt);
1681 	    return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1682           }
1683 	else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1684 	  {
1685 	    tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1686 			    gimple_assign_rhs3 (def_stmt) };
1687 
1688 	    for (unsigned int i = 0; i < 2; i++)
1689 	      if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1690 		{
1691 		  if (rkind != SRK_LENRANGE)
1692 		    return false;
1693 		  /* Set the upper bound to the maximum to prevent
1694 		     it from being adjusted in the next iteration but
1695 		     leave MINLEN and the more conservative MAXBOUND
1696 		     determined so far alone (or leave them null if
1697 		     they haven't been set yet).  That the MINLEN is
1698 		     in fact zero can be determined from MAXLEN being
1699 		     unbounded but the discovered minimum is used for
1700 		     diagnostics.  */
1701 		  pdata->maxlen = build_all_ones_cst (size_type_node);
1702 		}
1703 	    return true;
1704 	  }
1705         return false;
1706 
1707       case GIMPLE_PHI:
1708 	/* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1709 	   must have a constant length.  */
1710 	for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1711           {
1712             tree arg = gimple_phi_arg (def_stmt, i)->def;
1713 
1714             /* If this PHI has itself as an argument, we cannot
1715                determine the string length of this argument.  However,
1716                if we can find a constant string length for the other
1717                PHI args then we can still be sure that this is a
1718                constant string length.  So be optimistic and just
1719                continue with the next argument.  */
1720             if (arg == gimple_phi_result (def_stmt))
1721               continue;
1722 
1723 	    if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1724 	      {
1725 		if (rkind != SRK_LENRANGE)
1726 		  return false;
1727 		/* Set the upper bound to the maximum to prevent
1728 		   it from being adjusted in the next iteration but
1729 		   leave MINLEN and the more conservative MAXBOUND
1730 		   determined so far alone (or leave them null if
1731 		   they haven't been set yet).  That the MINLEN is
1732 		   in fact zero can be determined from MAXLEN being
1733 		   unbounded but the discovered minimum is used for
1734 		   diagnostics.  */
1735 		pdata->maxlen = build_all_ones_cst (size_type_node);
1736 	      }
1737           }
1738         return true;
1739 
1740       default:
1741         return false;
1742     }
1743 }
1744 
1745 /* Try to obtain the range of the lengths of the string(s) referenced
1746    by ARG, or the size of the largest array ARG refers to if the range
1747    of lengths cannot be determined, and store all in *PDATA which must
1748    be zero-initialized on input except PDATA->MAXBOUND may be set to
1749    a non-null tree node other than INTEGER_CST to request to have it
1750    set to the length of the longest string in a PHI.  ELTSIZE is
1751    the expected size of the string element in bytes: 1 for char and
1752    some power of 2 for wide characters.
1753    Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1754    for optimization.  Returning false means that a nonzero PDATA->MINLEN
1755    doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1756    is -1 (in that case, the actual range is indeterminate, i.e.,
1757    [0, PTRDIFF_MAX - 2].  */
1758 
1759 bool
get_range_strlen(tree arg,c_strlen_data * pdata,unsigned eltsize)1760 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1761 {
1762   bitmap visited = NULL;
1763   tree maxbound = pdata->maxbound;
1764 
1765   if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1766     {
1767       /* On failure extend the length range to an impossible maximum
1768 	 (a valid MAXLEN must be less than PTRDIFF_MAX - 1).  Other
1769 	 members can stay unchanged regardless.  */
1770       pdata->minlen = ssize_int (0);
1771       pdata->maxlen = build_all_ones_cst (size_type_node);
1772     }
1773   else if (!pdata->minlen)
1774     pdata->minlen = ssize_int (0);
1775 
1776   /* If it's unchanged from it initial non-null value, set the conservative
1777      MAXBOUND to SIZE_MAX.  Otherwise leave it null (if it is null).  */
1778   if (maxbound && pdata->maxbound == maxbound)
1779     pdata->maxbound = build_all_ones_cst (size_type_node);
1780 
1781   if (visited)
1782     BITMAP_FREE (visited);
1783 
1784   return !integer_all_onesp (pdata->maxlen);
1785 }
1786 
1787 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1788    For ARG of pointer types, NONSTR indicates if the caller is prepared
1789    to handle unterminated strings.   For integer ARG and when RKIND ==
1790    SRK_INT_VALUE, NONSTR must be null.
1791 
1792    If an unterminated array is discovered and our caller handles
1793    unterminated arrays, then bubble up the offending DECL and
1794    return the maximum size.  Otherwise return NULL.  */
1795 
1796 static tree
1797 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1798 {
1799   /* A non-null NONSTR is meaningless when determining the maximum
1800      value of an integer ARG.  */
1801   gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1802   /* ARG must have an integral type when RKIND says so.  */
1803   gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1804 
1805   bitmap visited = NULL;
1806 
1807   /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1808      is unbounded.  */
1809   c_strlen_data lendata = { };
1810   if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
1811     lendata.maxlen = NULL_TREE;
1812   else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1813     lendata.maxlen = NULL_TREE;
1814 
1815   if (visited)
1816     BITMAP_FREE (visited);
1817 
1818   if (nonstr)
1819     {
1820       /* For callers prepared to handle unterminated arrays set
1821 	 *NONSTR to point to the declaration of the array and return
1822 	 the maximum length/size. */
1823       *nonstr = lendata.decl;
1824       return lendata.maxlen;
1825     }
1826 
1827   /* Fail if the constant array isn't nul-terminated.  */
1828   return lendata.decl ? NULL_TREE : lendata.maxlen;
1829 }
1830 
1831 
1832 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1833    If LEN is not NULL, it represents the length of the string to be
1834    copied.  Return NULL_TREE if no simplification can be made.  */
1835 
1836 static bool
gimple_fold_builtin_strcpy(gimple_stmt_iterator * gsi,tree dest,tree src)1837 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1838 			    tree dest, tree src)
1839 {
1840   gimple *stmt = gsi_stmt (*gsi);
1841   location_t loc = gimple_location (stmt);
1842   tree fn;
1843 
1844   /* If SRC and DEST are the same (and not volatile), return DEST.  */
1845   if (operand_equal_p (src, dest, 0))
1846     {
1847       /* Issue -Wrestrict unless the pointers are null (those do
1848 	 not point to objects and so do not indicate an overlap;
1849 	 such calls could be the result of sanitization and jump
1850 	 threading).  */
1851       if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1852 	{
1853 	  tree func = gimple_call_fndecl (stmt);
1854 
1855 	  warning_at (loc, OPT_Wrestrict,
1856 		      "%qD source argument is the same as destination",
1857 		      func);
1858 	}
1859 
1860       replace_call_with_value (gsi, dest);
1861       return true;
1862     }
1863 
1864   if (optimize_function_for_size_p (cfun))
1865     return false;
1866 
1867   fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1868   if (!fn)
1869     return false;
1870 
1871   /* Set to non-null if ARG refers to an unterminated array.  */
1872   tree nonstr = NULL;
1873   tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1874 
1875   if (nonstr)
1876     {
1877       /* Avoid folding calls with unterminated arrays.  */
1878       if (!gimple_no_warning_p (stmt))
1879 	warn_string_no_nul (loc, NULL_TREE, "strcpy", src, nonstr);
1880       gimple_set_no_warning (stmt, true);
1881       return false;
1882     }
1883 
1884   if (!len)
1885     return false;
1886 
1887   len = fold_convert_loc (loc, size_type_node, len);
1888   len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1889   len = force_gimple_operand_gsi (gsi, len, true,
1890 				  NULL_TREE, true, GSI_SAME_STMT);
1891   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1892   replace_call_with_call_and_fold (gsi, repl);
1893   return true;
1894 }
1895 
1896 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1897    If SLEN is not NULL, it represents the length of the source string.
1898    Return NULL_TREE if no simplification can be made.  */
1899 
1900 static bool
gimple_fold_builtin_strncpy(gimple_stmt_iterator * gsi,tree dest,tree src,tree len)1901 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1902 			     tree dest, tree src, tree len)
1903 {
1904   gimple *stmt = gsi_stmt (*gsi);
1905   location_t loc = gimple_location (stmt);
1906   bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1907 
1908   /* If the LEN parameter is zero, return DEST.  */
1909   if (integer_zerop (len))
1910     {
1911       /* Avoid warning if the destination refers to an array/pointer
1912 	 decorate with attribute nonstring.  */
1913       if (!nonstring)
1914 	{
1915 	  tree fndecl = gimple_call_fndecl (stmt);
1916 
1917 	  /* Warn about the lack of nul termination: the result is not
1918 	     a (nul-terminated) string.  */
1919 	  tree slen = get_maxval_strlen (src, SRK_STRLEN);
1920 	  if (slen && !integer_zerop (slen))
1921 	    warning_at (loc, OPT_Wstringop_truncation,
1922 			"%G%qD destination unchanged after copying no bytes "
1923 			"from a string of length %E",
1924 			stmt, fndecl, slen);
1925 	  else
1926 	    warning_at (loc, OPT_Wstringop_truncation,
1927 			"%G%qD destination unchanged after copying no bytes",
1928 			stmt, fndecl);
1929 	}
1930 
1931       replace_call_with_value (gsi, dest);
1932       return true;
1933     }
1934 
1935   /* We can't compare slen with len as constants below if len is not a
1936      constant.  */
1937   if (TREE_CODE (len) != INTEGER_CST)
1938     return false;
1939 
1940   /* Now, we must be passed a constant src ptr parameter.  */
1941   tree slen = get_maxval_strlen (src, SRK_STRLEN);
1942   if (!slen || TREE_CODE (slen) != INTEGER_CST)
1943     return false;
1944 
1945   /* The size of the source string including the terminating nul.  */
1946   tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1947 
1948   /* We do not support simplification of this case, though we do
1949      support it when expanding trees into RTL.  */
1950   /* FIXME: generate a call to __builtin_memset.  */
1951   if (tree_int_cst_lt (ssize, len))
1952     return false;
1953 
1954   /* Diagnose truncation that leaves the copy unterminated.  */
1955   maybe_diag_stxncpy_trunc (*gsi, src, len);
1956 
1957   /* OK transform into builtin memcpy.  */
1958   tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1959   if (!fn)
1960     return false;
1961 
1962   len = fold_convert_loc (loc, size_type_node, len);
1963   len = force_gimple_operand_gsi (gsi, len, true,
1964 				  NULL_TREE, true, GSI_SAME_STMT);
1965   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1966   replace_call_with_call_and_fold (gsi, repl);
1967 
1968   return true;
1969 }
1970 
1971 /* Fold function call to builtin strchr or strrchr.
1972    If both arguments are constant, evaluate and fold the result,
1973    otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1974    In general strlen is significantly faster than strchr
1975    due to being a simpler operation.  */
1976 static bool
gimple_fold_builtin_strchr(gimple_stmt_iterator * gsi,bool is_strrchr)1977 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1978 {
1979   gimple *stmt = gsi_stmt (*gsi);
1980   tree str = gimple_call_arg (stmt, 0);
1981   tree c = gimple_call_arg (stmt, 1);
1982   location_t loc = gimple_location (stmt);
1983   const char *p;
1984   char ch;
1985 
1986   if (!gimple_call_lhs (stmt))
1987     return false;
1988 
1989   /* Avoid folding if the first argument is not a nul-terminated array.
1990      Defer warning until later.  */
1991   if (!check_nul_terminated_array (NULL_TREE, str))
1992     return false;
1993 
1994   if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1995     {
1996       const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1997 
1998       if (p1 == NULL)
1999 	{
2000 	  replace_call_with_value (gsi, integer_zero_node);
2001 	  return true;
2002 	}
2003 
2004       tree len = build_int_cst (size_type_node, p1 - p);
2005       gimple_seq stmts = NULL;
2006       gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2007 					      POINTER_PLUS_EXPR, str, len);
2008       gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2009       gsi_replace_with_seq_vops (gsi, stmts);
2010       return true;
2011     }
2012 
2013   if (!integer_zerop (c))
2014     return false;
2015 
2016   /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size.  */
2017   if (is_strrchr && optimize_function_for_size_p (cfun))
2018     {
2019       tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2020 
2021       if (strchr_fn)
2022 	{
2023 	  gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2024 	  replace_call_with_call_and_fold (gsi, repl);
2025 	  return true;
2026 	}
2027 
2028       return false;
2029     }
2030 
2031   tree len;
2032   tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2033 
2034   if (!strlen_fn)
2035     return false;
2036 
2037   /* Create newstr = strlen (str).  */
2038   gimple_seq stmts = NULL;
2039   gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2040   gimple_set_location (new_stmt, loc);
2041   len = create_tmp_reg_or_ssa_name (size_type_node);
2042   gimple_call_set_lhs (new_stmt, len);
2043   gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2044 
2045   /* Create (str p+ strlen (str)).  */
2046   new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2047 				  POINTER_PLUS_EXPR, str, len);
2048   gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2049   gsi_replace_with_seq_vops (gsi, stmts);
2050   /* gsi now points at the assignment to the lhs, get a
2051      stmt iterator to the strlen.
2052      ???  We can't use gsi_for_stmt as that doesn't work when the
2053      CFG isn't built yet.  */
2054   gimple_stmt_iterator gsi2 = *gsi;
2055   gsi_prev (&gsi2);
2056   fold_stmt (&gsi2);
2057   return true;
2058 }
2059 
2060 /* Fold function call to builtin strstr.
2061    If both arguments are constant, evaluate and fold the result,
2062    additionally fold strstr (x, "") into x and strstr (x, "c")
2063    into strchr (x, 'c').  */
2064 static bool
gimple_fold_builtin_strstr(gimple_stmt_iterator * gsi)2065 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2066 {
2067   gimple *stmt = gsi_stmt (*gsi);
2068   if (!gimple_call_lhs (stmt))
2069     return false;
2070 
2071   tree haystack = gimple_call_arg (stmt, 0);
2072   tree needle = gimple_call_arg (stmt, 1);
2073 
2074   /* Avoid folding if either argument is not a nul-terminated array.
2075      Defer warning until later.  */
2076   if (!check_nul_terminated_array (NULL_TREE, haystack)
2077       || !check_nul_terminated_array (NULL_TREE, needle))
2078     return false;
2079 
2080   const char *q = c_getstr (needle);
2081   if (q == NULL)
2082     return false;
2083 
2084   if (const char *p = c_getstr (haystack))
2085     {
2086       const char *r = strstr (p, q);
2087 
2088       if (r == NULL)
2089 	{
2090 	  replace_call_with_value (gsi, integer_zero_node);
2091 	  return true;
2092 	}
2093 
2094       tree len = build_int_cst (size_type_node, r - p);
2095       gimple_seq stmts = NULL;
2096       gimple *new_stmt
2097 	= gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2098 			       haystack, len);
2099       gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2100       gsi_replace_with_seq_vops (gsi, stmts);
2101       return true;
2102     }
2103 
2104   /* For strstr (x, "") return x.  */
2105   if (q[0] == '\0')
2106     {
2107       replace_call_with_value (gsi, haystack);
2108       return true;
2109     }
2110 
2111   /* Transform strstr (x, "c") into strchr (x, 'c').  */
2112   if (q[1] == '\0')
2113     {
2114       tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2115       if (strchr_fn)
2116 	{
2117 	  tree c = build_int_cst (integer_type_node, q[0]);
2118 	  gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2119 	  replace_call_with_call_and_fold (gsi, repl);
2120 	  return true;
2121 	}
2122     }
2123 
2124   return false;
2125 }
2126 
2127 /* Simplify a call to the strcat builtin.  DST and SRC are the arguments
2128    to the call.
2129 
2130    Return NULL_TREE if no simplification was possible, otherwise return the
2131    simplified form of the call as a tree.
2132 
2133    The simplified form may be a constant or other expression which
2134    computes the same value, but in a more efficient manner (including
2135    calls to other builtin functions).
2136 
2137    The call may contain arguments which need to be evaluated, but
2138    which are not useful to determine the result of the call.  In
2139    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
2140    COMPOUND_EXPR will be an argument which must be evaluated.
2141    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
2142    COMPOUND_EXPR in the chain will contain the tree for the simplified
2143    form of the builtin function call.  */
2144 
2145 static bool
gimple_fold_builtin_strcat(gimple_stmt_iterator * gsi,tree dst,tree src)2146 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2147 {
2148   gimple *stmt = gsi_stmt (*gsi);
2149   location_t loc = gimple_location (stmt);
2150 
2151   const char *p = c_getstr (src);
2152 
2153   /* If the string length is zero, return the dst parameter.  */
2154   if (p && *p == '\0')
2155     {
2156       replace_call_with_value (gsi, dst);
2157       return true;
2158     }
2159 
2160   if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2161     return false;
2162 
2163   /* See if we can store by pieces into (dst + strlen(dst)).  */
2164   tree newdst;
2165   tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2166   tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2167 
2168   if (!strlen_fn || !memcpy_fn)
2169     return false;
2170 
2171   /* If the length of the source string isn't computable don't
2172      split strcat into strlen and memcpy.  */
2173   tree len = get_maxval_strlen (src, SRK_STRLEN);
2174   if (! len)
2175     return false;
2176 
2177   /* Create strlen (dst).  */
2178   gimple_seq stmts = NULL, stmts2;
2179   gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2180   gimple_set_location (repl, loc);
2181   newdst = create_tmp_reg_or_ssa_name (size_type_node);
2182   gimple_call_set_lhs (repl, newdst);
2183   gimple_seq_add_stmt_without_update (&stmts, repl);
2184 
2185   /* Create (dst p+ strlen (dst)).  */
2186   newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2187   newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2188   gimple_seq_add_seq_without_update (&stmts, stmts2);
2189 
2190   len = fold_convert_loc (loc, size_type_node, len);
2191   len = size_binop_loc (loc, PLUS_EXPR, len,
2192 			build_int_cst (size_type_node, 1));
2193   len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2194   gimple_seq_add_seq_without_update (&stmts, stmts2);
2195 
2196   repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2197   gimple_seq_add_stmt_without_update (&stmts, repl);
2198   if (gimple_call_lhs (stmt))
2199     {
2200       repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2201       gimple_seq_add_stmt_without_update (&stmts, repl);
2202       gsi_replace_with_seq_vops (gsi, stmts);
2203       /* gsi now points at the assignment to the lhs, get a
2204          stmt iterator to the memcpy call.
2205 	 ???  We can't use gsi_for_stmt as that doesn't work when the
2206 	 CFG isn't built yet.  */
2207       gimple_stmt_iterator gsi2 = *gsi;
2208       gsi_prev (&gsi2);
2209       fold_stmt (&gsi2);
2210     }
2211   else
2212     {
2213       gsi_replace_with_seq_vops (gsi, stmts);
2214       fold_stmt (gsi);
2215     }
2216   return true;
2217 }
2218 
2219 /* Fold a call to the __strcat_chk builtin FNDECL.  DEST, SRC, and SIZE
2220    are the arguments to the call.  */
2221 
2222 static bool
gimple_fold_builtin_strcat_chk(gimple_stmt_iterator * gsi)2223 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2224 {
2225   gimple *stmt = gsi_stmt (*gsi);
2226   tree dest = gimple_call_arg (stmt, 0);
2227   tree src = gimple_call_arg (stmt, 1);
2228   tree size = gimple_call_arg (stmt, 2);
2229   tree fn;
2230   const char *p;
2231 
2232 
2233   p = c_getstr (src);
2234   /* If the SRC parameter is "", return DEST.  */
2235   if (p && *p == '\0')
2236     {
2237       replace_call_with_value (gsi, dest);
2238       return true;
2239     }
2240 
2241   if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2242     return false;
2243 
2244   /* If __builtin_strcat_chk is used, assume strcat is available.  */
2245   fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2246   if (!fn)
2247     return false;
2248 
2249   gimple *repl = gimple_build_call (fn, 2, dest, src);
2250   replace_call_with_call_and_fold (gsi, repl);
2251   return true;
2252 }
2253 
2254 /* Simplify a call to the strncat builtin.  */
2255 
2256 static bool
gimple_fold_builtin_strncat(gimple_stmt_iterator * gsi)2257 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2258 {
2259   gimple *stmt = gsi_stmt (*gsi);
2260   tree dst = gimple_call_arg (stmt, 0);
2261   tree src = gimple_call_arg (stmt, 1);
2262   tree len = gimple_call_arg (stmt, 2);
2263 
2264   const char *p = c_getstr (src);
2265 
2266   /* If the requested length is zero, or the src parameter string
2267      length is zero, return the dst parameter.  */
2268   if (integer_zerop (len) || (p && *p == '\0'))
2269     {
2270       replace_call_with_value (gsi, dst);
2271       return true;
2272     }
2273 
2274   if (TREE_CODE (len) != INTEGER_CST || !p)
2275     return false;
2276 
2277   unsigned srclen = strlen (p);
2278 
2279   int cmpsrc = compare_tree_int (len, srclen);
2280 
2281   /* Return early if the requested len is less than the string length.
2282      Warnings will be issued elsewhere later.  */
2283   if (cmpsrc < 0)
2284     return false;
2285 
2286   unsigned HOST_WIDE_INT dstsize;
2287 
2288   bool nowarn = gimple_no_warning_p (stmt);
2289 
2290   if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2291     {
2292       int cmpdst = compare_tree_int (len, dstsize);
2293 
2294       if (cmpdst >= 0)
2295 	{
2296 	  tree fndecl = gimple_call_fndecl (stmt);
2297 
2298 	  /* Strncat copies (at most) LEN bytes and always appends
2299 	     the terminating NUL so the specified bound should never
2300 	     be equal to (or greater than) the size of the destination.
2301 	     If it is, the copy could overflow.  */
2302 	  location_t loc = gimple_location (stmt);
2303 	  nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2304 			       cmpdst == 0
2305 			       ? G_("%G%qD specified bound %E equals "
2306 				    "destination size")
2307 			       : G_("%G%qD specified bound %E exceeds "
2308 				    "destination size %wu"),
2309 			       stmt, fndecl, len, dstsize);
2310 	  if (nowarn)
2311 	    gimple_set_no_warning (stmt, true);
2312 	}
2313     }
2314 
2315   if (!nowarn && cmpsrc == 0)
2316     {
2317       tree fndecl = gimple_call_fndecl (stmt);
2318       location_t loc = gimple_location (stmt);
2319 
2320       /* To avoid possible overflow the specified bound should also
2321 	 not be equal to the length of the source, even when the size
2322 	 of the destination is unknown (it's not an uncommon mistake
2323 	 to specify as the bound to strncpy the length of the source).  */
2324       if (warning_at (loc, OPT_Wstringop_overflow_,
2325 		      "%G%qD specified bound %E equals source length",
2326 		      stmt, fndecl, len))
2327 	gimple_set_no_warning (stmt, true);
2328     }
2329 
2330   tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2331 
2332   /* If the replacement _DECL isn't initialized, don't do the
2333      transformation.  */
2334   if (!fn)
2335     return false;
2336 
2337   /* Otherwise, emit a call to strcat.  */
2338   gcall *repl = gimple_build_call (fn, 2, dst, src);
2339   replace_call_with_call_and_fold (gsi, repl);
2340   return true;
2341 }
2342 
2343 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2344    LEN, and SIZE.  */
2345 
2346 static bool
gimple_fold_builtin_strncat_chk(gimple_stmt_iterator * gsi)2347 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2348 {
2349   gimple *stmt = gsi_stmt (*gsi);
2350   tree dest = gimple_call_arg (stmt, 0);
2351   tree src = gimple_call_arg (stmt, 1);
2352   tree len = gimple_call_arg (stmt, 2);
2353   tree size = gimple_call_arg (stmt, 3);
2354   tree fn;
2355   const char *p;
2356 
2357   p = c_getstr (src);
2358   /* If the SRC parameter is "" or if LEN is 0, return DEST.  */
2359   if ((p && *p == '\0')
2360       || integer_zerop (len))
2361     {
2362       replace_call_with_value (gsi, dest);
2363       return true;
2364     }
2365 
2366   if (! tree_fits_uhwi_p (size))
2367     return false;
2368 
2369   if (! integer_all_onesp (size))
2370     {
2371       tree src_len = c_strlen (src, 1);
2372       if (src_len
2373 	  && tree_fits_uhwi_p (src_len)
2374 	  && tree_fits_uhwi_p (len)
2375 	  && ! tree_int_cst_lt (len, src_len))
2376 	{
2377 	  /* If LEN >= strlen (SRC), optimize into __strcat_chk.  */
2378 	  fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2379 	  if (!fn)
2380 	    return false;
2381 
2382 	  gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2383 	  replace_call_with_call_and_fold (gsi, repl);
2384 	  return true;
2385 	}
2386       return false;
2387     }
2388 
2389   /* If __builtin_strncat_chk is used, assume strncat is available.  */
2390   fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2391   if (!fn)
2392     return false;
2393 
2394   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2395   replace_call_with_call_and_fold (gsi, repl);
2396   return true;
2397 }
2398 
2399 /* Build and append gimple statements to STMTS that would load a first
2400    character of a memory location identified by STR.  LOC is location
2401    of the statement.  */
2402 
2403 static tree
gimple_load_first_char(location_t loc,tree str,gimple_seq * stmts)2404 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2405 {
2406   tree var;
2407 
2408   tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2409   tree cst_uchar_ptr_node
2410     = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2411   tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2412 
2413   tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2414   gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2415   var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2416 
2417   gimple_assign_set_lhs (stmt, var);
2418   gimple_seq_add_stmt_without_update (stmts, stmt);
2419 
2420   return var;
2421 }
2422 
2423 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.  */
2424 
2425 static bool
gimple_fold_builtin_string_compare(gimple_stmt_iterator * gsi)2426 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2427 {
2428   gimple *stmt = gsi_stmt (*gsi);
2429   tree callee = gimple_call_fndecl (stmt);
2430   enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2431 
2432   tree type = integer_type_node;
2433   tree str1 = gimple_call_arg (stmt, 0);
2434   tree str2 = gimple_call_arg (stmt, 1);
2435   tree lhs = gimple_call_lhs (stmt);
2436 
2437   tree bound_node = NULL_TREE;
2438   unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2439 
2440   /* Handle strncmp and strncasecmp functions.  */
2441   if (gimple_call_num_args (stmt) == 3)
2442     {
2443       bound_node = gimple_call_arg (stmt, 2);
2444       if (tree_fits_uhwi_p (bound_node))
2445 	bound = tree_to_uhwi (bound_node);
2446     }
2447 
2448   /* If the BOUND parameter is zero, return zero.  */
2449   if (bound == 0)
2450     {
2451       replace_call_with_value (gsi, integer_zero_node);
2452       return true;
2453     }
2454 
2455   /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
2456   if (operand_equal_p (str1, str2, 0))
2457     {
2458       replace_call_with_value (gsi, integer_zero_node);
2459       return true;
2460     }
2461 
2462   /* Initially set to the number of characters, including the terminating
2463      nul if each array has one.   LENx == strnlen (Sx, LENx) implies that
2464      the array Sx is not terminated by a nul.
2465      For nul-terminated strings then adjusted to their length so that
2466      LENx == NULPOSx holds.  */
2467   unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2468   const char *p1 = getbyterep (str1, &len1);
2469   const char *p2 = getbyterep (str2, &len2);
2470 
2471   /* The position of the terminating nul character if one exists, otherwise
2472      a value greater than LENx.  */
2473   unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2474 
2475   if (p1)
2476     {
2477       size_t n = strnlen (p1, len1);
2478       if (n < len1)
2479 	len1 = nulpos1 = n;
2480     }
2481 
2482   if (p2)
2483     {
2484       size_t n = strnlen (p2, len2);
2485       if (n < len2)
2486 	len2 = nulpos2 = n;
2487     }
2488 
2489   /* For known strings, return an immediate value.  */
2490   if (p1 && p2)
2491     {
2492       int r = 0;
2493       bool known_result = false;
2494 
2495       switch (fcode)
2496 	{
2497 	case BUILT_IN_STRCMP:
2498 	case BUILT_IN_STRCMP_EQ:
2499 	  if (len1 != nulpos1 || len2 != nulpos2)
2500 	    break;
2501 
2502 	  r = strcmp (p1, p2);
2503 	  known_result = true;
2504 	  break;
2505 
2506 	case BUILT_IN_STRNCMP:
2507 	case BUILT_IN_STRNCMP_EQ:
2508 	  {
2509 	    if (bound == HOST_WIDE_INT_M1U)
2510 	      break;
2511 
2512 	    /* Reduce the bound to be no more than the length
2513 	       of the shorter of the two strings, or the sizes
2514 	       of the unterminated arrays.  */
2515 	    unsigned HOST_WIDE_INT n = bound;
2516 
2517 	    if (len1 == nulpos1 && len1 < n)
2518 	      n = len1 + 1;
2519 	    if (len2 == nulpos2 && len2 < n)
2520 	      n = len2 + 1;
2521 
2522 	    if (MIN (nulpos1, nulpos2) + 1 < n)
2523 	      break;
2524 
2525 	    r = strncmp (p1, p2, n);
2526 	    known_result = true;
2527 	    break;
2528 	  }
2529 	/* Only handleable situation is where the string are equal (result 0),
2530 	   which is already handled by operand_equal_p case.  */
2531 	case BUILT_IN_STRCASECMP:
2532 	  break;
2533 	case BUILT_IN_STRNCASECMP:
2534 	  {
2535 	    if (bound == HOST_WIDE_INT_M1U)
2536 	      break;
2537 	    r = strncmp (p1, p2, bound);
2538 	    if (r == 0)
2539 	      known_result = true;
2540 	    break;
2541 	  }
2542 	default:
2543 	  gcc_unreachable ();
2544 	}
2545 
2546       if (known_result)
2547 	{
2548 	  replace_call_with_value (gsi, build_cmp_result (type, r));
2549 	  return true;
2550 	}
2551     }
2552 
2553   bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2554     || fcode == BUILT_IN_STRCMP
2555     || fcode == BUILT_IN_STRCMP_EQ
2556     || fcode == BUILT_IN_STRCASECMP;
2557 
2558   location_t loc = gimple_location (stmt);
2559 
2560   /* If the second arg is "", return *(const unsigned char*)arg1.  */
2561   if (p2 && *p2 == '\0' && nonzero_bound)
2562     {
2563       gimple_seq stmts = NULL;
2564       tree var = gimple_load_first_char (loc, str1, &stmts);
2565       if (lhs)
2566 	{
2567 	  stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2568 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2569 	}
2570 
2571       gsi_replace_with_seq_vops (gsi, stmts);
2572       return true;
2573     }
2574 
2575   /* If the first arg is "", return -*(const unsigned char*)arg2.  */
2576   if (p1 && *p1 == '\0' && nonzero_bound)
2577     {
2578       gimple_seq stmts = NULL;
2579       tree var = gimple_load_first_char (loc, str2, &stmts);
2580 
2581       if (lhs)
2582 	{
2583 	  tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2584 	  stmt = gimple_build_assign (c, NOP_EXPR, var);
2585 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2586 
2587 	  stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2588 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2589 	}
2590 
2591       gsi_replace_with_seq_vops (gsi, stmts);
2592       return true;
2593     }
2594 
2595   /* If BOUND is one, return an expression corresponding to
2596      (*(const unsigned char*)arg2 - *(const unsigned char*)arg1).  */
2597   if (fcode == BUILT_IN_STRNCMP && bound == 1)
2598     {
2599       gimple_seq stmts = NULL;
2600       tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2601       tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2602 
2603       if (lhs)
2604 	{
2605 	  tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2606 	  gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2607 	  gimple_seq_add_stmt_without_update (&stmts, convert1);
2608 
2609 	  tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2610 	  gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2611 	  gimple_seq_add_stmt_without_update (&stmts, convert2);
2612 
2613 	  stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2614 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2615 	}
2616 
2617       gsi_replace_with_seq_vops (gsi, stmts);
2618       return true;
2619     }
2620 
2621   /* If BOUND is greater than the length of one constant string,
2622      and the other argument is also a nul-terminated string, replace
2623      strncmp with strcmp.  */
2624   if (fcode == BUILT_IN_STRNCMP
2625       && bound > 0 && bound < HOST_WIDE_INT_M1U
2626       && ((p2 && len2 < bound && len2 == nulpos2)
2627 	  || (p1 && len1 < bound && len1 == nulpos1)))
2628     {
2629       tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2630       if (!fn)
2631         return false;
2632       gimple *repl = gimple_build_call (fn, 2, str1, str2);
2633       replace_call_with_call_and_fold (gsi, repl);
2634       return true;
2635     }
2636 
2637   return false;
2638 }
2639 
2640 /* Fold a call to the memchr pointed by GSI iterator.  */
2641 
2642 static bool
gimple_fold_builtin_memchr(gimple_stmt_iterator * gsi)2643 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2644 {
2645   gimple *stmt = gsi_stmt (*gsi);
2646   tree lhs = gimple_call_lhs (stmt);
2647   tree arg1 = gimple_call_arg (stmt, 0);
2648   tree arg2 = gimple_call_arg (stmt, 1);
2649   tree len = gimple_call_arg (stmt, 2);
2650 
2651   /* If the LEN parameter is zero, return zero.  */
2652   if (integer_zerop (len))
2653     {
2654       replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2655       return true;
2656     }
2657 
2658   char c;
2659   if (TREE_CODE (arg2) != INTEGER_CST
2660       || !tree_fits_uhwi_p (len)
2661       || !target_char_cst_p (arg2, &c))
2662     return false;
2663 
2664   unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2665   unsigned HOST_WIDE_INT string_length;
2666   const char *p1 = getbyterep (arg1, &string_length);
2667 
2668   if (p1)
2669     {
2670       const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2671       if (r == NULL)
2672 	{
2673 	  tree mem_size, offset_node;
2674 	  byte_representation (arg1, &offset_node, &mem_size, NULL);
2675 	  unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2676 					  ? 0 : tree_to_uhwi (offset_node);
2677 	  /* MEM_SIZE is the size of the array the string literal
2678 	     is stored in.  */
2679 	  unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2680 	  gcc_checking_assert (string_length <= string_size);
2681 	  if (length <= string_size)
2682 	    {
2683 	      replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2684 	      return true;
2685 	    }
2686 	}
2687       else
2688 	{
2689 	  unsigned HOST_WIDE_INT offset = r - p1;
2690 	  gimple_seq stmts = NULL;
2691 	  if (lhs != NULL_TREE)
2692 	    {
2693 	      tree offset_cst = build_int_cst (sizetype, offset);
2694 	      gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2695 						   arg1, offset_cst);
2696 	      gimple_seq_add_stmt_without_update (&stmts, stmt);
2697 	    }
2698 	  else
2699 	    gimple_seq_add_stmt_without_update (&stmts,
2700 						gimple_build_nop ());
2701 
2702 	  gsi_replace_with_seq_vops (gsi, stmts);
2703 	  return true;
2704 	}
2705     }
2706 
2707   return false;
2708 }
2709 
2710 /* Fold a call to the fputs builtin.  ARG0 and ARG1 are the arguments
2711    to the call.  IGNORE is true if the value returned
2712    by the builtin will be ignored.  UNLOCKED is true is true if this
2713    actually a call to fputs_unlocked.  If LEN in non-NULL, it represents
2714    the known length of the string.  Return NULL_TREE if no simplification
2715    was possible.  */
2716 
2717 static bool
gimple_fold_builtin_fputs(gimple_stmt_iterator * gsi,tree arg0,tree arg1,bool unlocked)2718 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2719 			   tree arg0, tree arg1,
2720 			   bool unlocked)
2721 {
2722   gimple *stmt = gsi_stmt (*gsi);
2723 
2724   /* If we're using an unlocked function, assume the other unlocked
2725      functions exist explicitly.  */
2726   tree const fn_fputc = (unlocked
2727 			 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2728 			 : builtin_decl_implicit (BUILT_IN_FPUTC));
2729   tree const fn_fwrite = (unlocked
2730 			  ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2731 			  : builtin_decl_implicit (BUILT_IN_FWRITE));
2732 
2733   /* If the return value is used, don't do the transformation.  */
2734   if (gimple_call_lhs (stmt))
2735     return false;
2736 
2737   /* Get the length of the string passed to fputs.  If the length
2738      can't be determined, punt.  */
2739   tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2740   if (!len
2741       || TREE_CODE (len) != INTEGER_CST)
2742     return false;
2743 
2744   switch (compare_tree_int (len, 1))
2745     {
2746     case -1: /* length is 0, delete the call entirely .  */
2747       replace_call_with_value (gsi, integer_zero_node);
2748       return true;
2749 
2750     case 0: /* length is 1, call fputc.  */
2751       {
2752 	const char *p = c_getstr (arg0);
2753 	if (p != NULL)
2754 	  {
2755 	    if (!fn_fputc)
2756 	      return false;
2757 
2758 	    gimple *repl = gimple_build_call (fn_fputc, 2,
2759 					     build_int_cst
2760 					     (integer_type_node, p[0]), arg1);
2761 	    replace_call_with_call_and_fold (gsi, repl);
2762 	    return true;
2763 	  }
2764       }
2765       /* FALLTHROUGH */
2766     case 1: /* length is greater than 1, call fwrite.  */
2767       {
2768 	/* If optimizing for size keep fputs.  */
2769 	if (optimize_function_for_size_p (cfun))
2770 	  return false;
2771 	/* New argument list transforming fputs(string, stream) to
2772 	   fwrite(string, 1, len, stream).  */
2773 	if (!fn_fwrite)
2774 	  return false;
2775 
2776 	gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2777 					 size_one_node, len, arg1);
2778 	replace_call_with_call_and_fold (gsi, repl);
2779 	return true;
2780       }
2781     default:
2782       gcc_unreachable ();
2783     }
2784   return false;
2785 }
2786 
2787 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2788    DEST, SRC, LEN, and SIZE are the arguments to the call.
2789    IGNORE is true, if return value can be ignored.  FCODE is the BUILT_IN_*
2790    code of the builtin.  If MAXLEN is not NULL, it is maximum length
2791    passed as third argument.  */
2792 
2793 static bool
gimple_fold_builtin_memory_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree len,tree size,enum built_in_function fcode)2794 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2795 				tree dest, tree src, tree len, tree size,
2796 				enum built_in_function fcode)
2797 {
2798   gimple *stmt = gsi_stmt (*gsi);
2799   location_t loc = gimple_location (stmt);
2800   bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2801   tree fn;
2802 
2803   /* If SRC and DEST are the same (and not volatile), return DEST
2804      (resp. DEST+LEN for __mempcpy_chk).  */
2805   if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2806     {
2807       if (fcode != BUILT_IN_MEMPCPY_CHK)
2808 	{
2809 	  replace_call_with_value (gsi, dest);
2810 	  return true;
2811 	}
2812       else
2813 	{
2814 	  gimple_seq stmts = NULL;
2815 	  len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2816 	  tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2817 				    TREE_TYPE (dest), dest, len);
2818 	  gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2819 	  replace_call_with_value (gsi, temp);
2820 	  return true;
2821 	}
2822     }
2823 
2824   if (! tree_fits_uhwi_p (size))
2825     return false;
2826 
2827   tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2828   if (! integer_all_onesp (size))
2829     {
2830       if (! tree_fits_uhwi_p (len))
2831 	{
2832 	  /* If LEN is not constant, try MAXLEN too.
2833 	     For MAXLEN only allow optimizing into non-_ocs function
2834 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
2835 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2836 	    {
2837 	      if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2838 		{
2839 		  /* (void) __mempcpy_chk () can be optimized into
2840 		     (void) __memcpy_chk ().  */
2841 		  fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2842 		  if (!fn)
2843 		    return false;
2844 
2845 		  gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2846 		  replace_call_with_call_and_fold (gsi, repl);
2847 		  return true;
2848 		}
2849 	      return false;
2850 	    }
2851 	}
2852       else
2853 	maxlen = len;
2854 
2855       if (tree_int_cst_lt (size, maxlen))
2856 	return false;
2857     }
2858 
2859   fn = NULL_TREE;
2860   /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2861      mem{cpy,pcpy,move,set} is available.  */
2862   switch (fcode)
2863     {
2864     case BUILT_IN_MEMCPY_CHK:
2865       fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2866       break;
2867     case BUILT_IN_MEMPCPY_CHK:
2868       fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2869       break;
2870     case BUILT_IN_MEMMOVE_CHK:
2871       fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2872       break;
2873     case BUILT_IN_MEMSET_CHK:
2874       fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2875       break;
2876     default:
2877       break;
2878     }
2879 
2880   if (!fn)
2881     return false;
2882 
2883   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2884   replace_call_with_call_and_fold (gsi, repl);
2885   return true;
2886 }
2887 
2888 /* Fold a call to the __st[rp]cpy_chk builtin.
2889    DEST, SRC, and SIZE are the arguments to the call.
2890    IGNORE is true if return value can be ignored.  FCODE is the BUILT_IN_*
2891    code of the builtin.  If MAXLEN is not NULL, it is maximum length of
2892    strings passed as second argument.  */
2893 
2894 static bool
gimple_fold_builtin_stxcpy_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree size,enum built_in_function fcode)2895 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2896 				tree dest,
2897 				tree src, tree size,
2898 				enum built_in_function fcode)
2899 {
2900   gimple *stmt = gsi_stmt (*gsi);
2901   location_t loc = gimple_location (stmt);
2902   bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2903   tree len, fn;
2904 
2905   /* If SRC and DEST are the same (and not volatile), return DEST.  */
2906   if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2907     {
2908       /* Issue -Wrestrict unless the pointers are null (those do
2909 	 not point to objects and so do not indicate an overlap;
2910 	 such calls could be the result of sanitization and jump
2911 	 threading).  */
2912       if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2913 	{
2914 	  tree func = gimple_call_fndecl (stmt);
2915 
2916 	  warning_at (loc, OPT_Wrestrict,
2917 		      "%qD source argument is the same as destination",
2918 		      func);
2919 	}
2920 
2921       replace_call_with_value (gsi, dest);
2922       return true;
2923     }
2924 
2925   if (! tree_fits_uhwi_p (size))
2926     return false;
2927 
2928   tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2929   if (! integer_all_onesp (size))
2930     {
2931       len = c_strlen (src, 1);
2932       if (! len || ! tree_fits_uhwi_p (len))
2933 	{
2934 	  /* If LEN is not constant, try MAXLEN too.
2935 	     For MAXLEN only allow optimizing into non-_ocs function
2936 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
2937 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2938 	    {
2939 	      if (fcode == BUILT_IN_STPCPY_CHK)
2940 		{
2941 		  if (! ignore)
2942 		    return false;
2943 
2944 		  /* If return value of __stpcpy_chk is ignored,
2945 		     optimize into __strcpy_chk.  */
2946 		  fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2947 		  if (!fn)
2948 		    return false;
2949 
2950 		  gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2951 		  replace_call_with_call_and_fold (gsi, repl);
2952 		  return true;
2953 		}
2954 
2955 	      if (! len || TREE_SIDE_EFFECTS (len))
2956 		return false;
2957 
2958 	      /* If c_strlen returned something, but not a constant,
2959 		 transform __strcpy_chk into __memcpy_chk.  */
2960 	      fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2961 	      if (!fn)
2962 		return false;
2963 
2964 	      gimple_seq stmts = NULL;
2965 	      len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2966 	      len = gimple_convert (&stmts, loc, size_type_node, len);
2967 	      len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2968 				  build_int_cst (size_type_node, 1));
2969 	      gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2970 	      gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2971 	      replace_call_with_call_and_fold (gsi, repl);
2972 	      return true;
2973 	    }
2974 	}
2975       else
2976 	maxlen = len;
2977 
2978       if (! tree_int_cst_lt (maxlen, size))
2979 	return false;
2980     }
2981 
2982   /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available.  */
2983   fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2984 			      ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2985   if (!fn)
2986     return false;
2987 
2988   gimple *repl = gimple_build_call (fn, 2, dest, src);
2989   replace_call_with_call_and_fold (gsi, repl);
2990   return true;
2991 }
2992 
2993 /* Fold a call to the __st{r,p}ncpy_chk builtin.  DEST, SRC, LEN, and SIZE
2994    are the arguments to the call.  If MAXLEN is not NULL, it is maximum
2995    length passed as third argument. IGNORE is true if return value can be
2996    ignored. FCODE is the BUILT_IN_* code of the builtin. */
2997 
2998 static bool
gimple_fold_builtin_stxncpy_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree len,tree size,enum built_in_function fcode)2999 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3000 				 tree dest, tree src,
3001 				 tree len, tree size,
3002 				 enum built_in_function fcode)
3003 {
3004   gimple *stmt = gsi_stmt (*gsi);
3005   bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3006   tree fn;
3007 
3008   if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3009     {
3010        /* If return value of __stpncpy_chk is ignored,
3011           optimize into __strncpy_chk.  */
3012        fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3013        if (fn)
3014 	 {
3015 	   gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3016 	   replace_call_with_call_and_fold (gsi, repl);
3017 	   return true;
3018 	 }
3019     }
3020 
3021   if (! tree_fits_uhwi_p (size))
3022     return false;
3023 
3024   tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3025   if (! integer_all_onesp (size))
3026     {
3027       if (! tree_fits_uhwi_p (len))
3028 	{
3029 	  /* If LEN is not constant, try MAXLEN too.
3030 	     For MAXLEN only allow optimizing into non-_ocs function
3031 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
3032 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3033 	    return false;
3034 	}
3035       else
3036 	maxlen = len;
3037 
3038       if (tree_int_cst_lt (size, maxlen))
3039 	return false;
3040     }
3041 
3042   /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available.  */
3043   fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
3044 			      ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3045   if (!fn)
3046     return false;
3047 
3048   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3049   replace_call_with_call_and_fold (gsi, repl);
3050   return true;
3051 }
3052 
3053 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3054    Return NULL_TREE if no simplification can be made.  */
3055 
3056 static bool
gimple_fold_builtin_stpcpy(gimple_stmt_iterator * gsi)3057 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3058 {
3059   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3060   location_t loc = gimple_location (stmt);
3061   tree dest = gimple_call_arg (stmt, 0);
3062   tree src = gimple_call_arg (stmt, 1);
3063   tree fn, lenp1;
3064 
3065   /* If the result is unused, replace stpcpy with strcpy.  */
3066   if (gimple_call_lhs (stmt) == NULL_TREE)
3067     {
3068       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3069       if (!fn)
3070 	return false;
3071       gimple_call_set_fndecl (stmt, fn);
3072       fold_stmt (gsi);
3073       return true;
3074     }
3075 
3076   /* Set to non-null if ARG refers to an unterminated array.  */
3077   c_strlen_data data = { };
3078   /* The size of the unterminated array if SRC referes to one.  */
3079   tree size;
3080   /* True if the size is exact/constant, false if it's the lower bound
3081      of a range.  */
3082   bool exact;
3083   tree len = c_strlen (src, 1, &data, 1);
3084   if (!len
3085       || TREE_CODE (len) != INTEGER_CST)
3086     {
3087       data.decl = unterminated_array (src, &size, &exact);
3088       if (!data.decl)
3089 	return false;
3090     }
3091 
3092   if (data.decl)
3093     {
3094       /* Avoid folding calls with unterminated arrays.  */
3095       if (!gimple_no_warning_p (stmt))
3096 	warn_string_no_nul (loc, NULL_TREE, "stpcpy", src, data.decl, size,
3097 			    exact);
3098       gimple_set_no_warning (stmt, true);
3099       return false;
3100     }
3101 
3102   if (optimize_function_for_size_p (cfun)
3103       /* If length is zero it's small enough.  */
3104       && !integer_zerop (len))
3105     return false;
3106 
3107   /* If the source has a known length replace stpcpy with memcpy.  */
3108   fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3109   if (!fn)
3110     return false;
3111 
3112   gimple_seq stmts = NULL;
3113   tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3114   lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3115 			tem, build_int_cst (size_type_node, 1));
3116   gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3117   gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3118   gimple_move_vops (repl, stmt);
3119   gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3120   /* Replace the result with dest + len.  */
3121   stmts = NULL;
3122   tem = gimple_convert (&stmts, loc, sizetype, len);
3123   gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3124   gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3125 				      POINTER_PLUS_EXPR, dest, tem);
3126   gsi_replace (gsi, ret, false);
3127   /* Finally fold the memcpy call.  */
3128   gimple_stmt_iterator gsi2 = *gsi;
3129   gsi_prev (&gsi2);
3130   fold_stmt (&gsi2);
3131   return true;
3132 }
3133 
3134 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS.  Return
3135    NULL_TREE if a normal call should be emitted rather than expanding
3136    the function inline.  FCODE is either BUILT_IN_SNPRINTF_CHK or
3137    BUILT_IN_VSNPRINTF_CHK.  If MAXLEN is not NULL, it is maximum length
3138    passed as second argument.  */
3139 
3140 static bool
gimple_fold_builtin_snprintf_chk(gimple_stmt_iterator * gsi,enum built_in_function fcode)3141 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3142 				  enum built_in_function fcode)
3143 {
3144   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3145   tree dest, size, len, fn, fmt, flag;
3146   const char *fmt_str;
3147 
3148   /* Verify the required arguments in the original call.  */
3149   if (gimple_call_num_args (stmt) < 5)
3150     return false;
3151 
3152   dest = gimple_call_arg (stmt, 0);
3153   len = gimple_call_arg (stmt, 1);
3154   flag = gimple_call_arg (stmt, 2);
3155   size = gimple_call_arg (stmt, 3);
3156   fmt = gimple_call_arg (stmt, 4);
3157 
3158   if (! tree_fits_uhwi_p (size))
3159     return false;
3160 
3161   if (! integer_all_onesp (size))
3162     {
3163       tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3164       if (! tree_fits_uhwi_p (len))
3165 	{
3166 	  /* If LEN is not constant, try MAXLEN too.
3167 	     For MAXLEN only allow optimizing into non-_ocs function
3168 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
3169 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3170 	    return false;
3171 	}
3172       else
3173 	maxlen = len;
3174 
3175       if (tree_int_cst_lt (size, maxlen))
3176 	return false;
3177     }
3178 
3179   if (!init_target_chars ())
3180     return false;
3181 
3182   /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3183      or if format doesn't contain % chars or is "%s".  */
3184   if (! integer_zerop (flag))
3185     {
3186       fmt_str = c_getstr (fmt);
3187       if (fmt_str == NULL)
3188 	return false;
3189       if (strchr (fmt_str, target_percent) != NULL
3190 	  && strcmp (fmt_str, target_percent_s))
3191 	return false;
3192     }
3193 
3194   /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3195      available.  */
3196   fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3197 			      ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3198   if (!fn)
3199     return false;
3200 
3201   /* Replace the called function and the first 5 argument by 3 retaining
3202      trailing varargs.  */
3203   gimple_call_set_fndecl (stmt, fn);
3204   gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3205   gimple_call_set_arg (stmt, 0, dest);
3206   gimple_call_set_arg (stmt, 1, len);
3207   gimple_call_set_arg (stmt, 2, fmt);
3208   for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3209     gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3210   gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3211   fold_stmt (gsi);
3212   return true;
3213 }
3214 
3215 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3216    Return NULL_TREE if a normal call should be emitted rather than
3217    expanding the function inline.  FCODE is either BUILT_IN_SPRINTF_CHK
3218    or BUILT_IN_VSPRINTF_CHK.  */
3219 
3220 static bool
gimple_fold_builtin_sprintf_chk(gimple_stmt_iterator * gsi,enum built_in_function fcode)3221 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3222 				 enum built_in_function fcode)
3223 {
3224   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3225   tree dest, size, len, fn, fmt, flag;
3226   const char *fmt_str;
3227   unsigned nargs = gimple_call_num_args (stmt);
3228 
3229   /* Verify the required arguments in the original call.  */
3230   if (nargs < 4)
3231     return false;
3232   dest = gimple_call_arg (stmt, 0);
3233   flag = gimple_call_arg (stmt, 1);
3234   size = gimple_call_arg (stmt, 2);
3235   fmt = gimple_call_arg (stmt, 3);
3236 
3237   if (! tree_fits_uhwi_p (size))
3238     return false;
3239 
3240   len = NULL_TREE;
3241 
3242   if (!init_target_chars ())
3243     return false;
3244 
3245   /* Check whether the format is a literal string constant.  */
3246   fmt_str = c_getstr (fmt);
3247   if (fmt_str != NULL)
3248     {
3249       /* If the format doesn't contain % args or %%, we know the size.  */
3250       if (strchr (fmt_str, target_percent) == 0)
3251 	{
3252 	  if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3253 	    len = build_int_cstu (size_type_node, strlen (fmt_str));
3254 	}
3255       /* If the format is "%s" and first ... argument is a string literal,
3256 	 we know the size too.  */
3257       else if (fcode == BUILT_IN_SPRINTF_CHK
3258 	       && strcmp (fmt_str, target_percent_s) == 0)
3259 	{
3260 	  tree arg;
3261 
3262 	  if (nargs == 5)
3263 	    {
3264 	      arg = gimple_call_arg (stmt, 4);
3265 	      if (POINTER_TYPE_P (TREE_TYPE (arg)))
3266 		{
3267 		  len = c_strlen (arg, 1);
3268 		  if (! len || ! tree_fits_uhwi_p (len))
3269 		    len = NULL_TREE;
3270 		}
3271 	    }
3272 	}
3273     }
3274 
3275   if (! integer_all_onesp (size))
3276     {
3277       if (! len || ! tree_int_cst_lt (len, size))
3278 	return false;
3279     }
3280 
3281   /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3282      or if format doesn't contain % chars or is "%s".  */
3283   if (! integer_zerop (flag))
3284     {
3285       if (fmt_str == NULL)
3286 	return false;
3287       if (strchr (fmt_str, target_percent) != NULL
3288 	  && strcmp (fmt_str, target_percent_s))
3289 	return false;
3290     }
3291 
3292   /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available.  */
3293   fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3294 			      ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3295   if (!fn)
3296     return false;
3297 
3298   /* Replace the called function and the first 4 argument by 2 retaining
3299      trailing varargs.  */
3300   gimple_call_set_fndecl (stmt, fn);
3301   gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3302   gimple_call_set_arg (stmt, 0, dest);
3303   gimple_call_set_arg (stmt, 1, fmt);
3304   for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3305     gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3306   gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3307   fold_stmt (gsi);
3308   return true;
3309 }
3310 
3311 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3312    ORIG may be null if this is a 2-argument call.  We don't attempt to
3313    simplify calls with more than 3 arguments.
3314 
3315    Return true if simplification was possible, otherwise false.  */
3316 
3317 bool
gimple_fold_builtin_sprintf(gimple_stmt_iterator * gsi)3318 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3319 {
3320   gimple *stmt = gsi_stmt (*gsi);
3321 
3322   /* Verify the required arguments in the original call.  We deal with two
3323      types of sprintf() calls: 'sprintf (str, fmt)' and
3324      'sprintf (dest, "%s", orig)'.  */
3325   if (gimple_call_num_args (stmt) > 3)
3326     return false;
3327 
3328   tree orig = NULL_TREE;
3329   if (gimple_call_num_args (stmt) == 3)
3330     orig = gimple_call_arg (stmt, 2);
3331 
3332   /* Check whether the format is a literal string constant.  */
3333   tree fmt = gimple_call_arg (stmt, 1);
3334   const char *fmt_str = c_getstr (fmt);
3335   if (fmt_str == NULL)
3336     return false;
3337 
3338   tree dest = gimple_call_arg (stmt, 0);
3339 
3340   if (!init_target_chars ())
3341     return false;
3342 
3343   tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3344   if (!fn)
3345     return false;
3346 
3347   /* If the format doesn't contain % args or %%, use strcpy.  */
3348   if (strchr (fmt_str, target_percent) == NULL)
3349     {
3350       /* Don't optimize sprintf (buf, "abc", ptr++).  */
3351       if (orig)
3352 	return false;
3353 
3354       /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3355 	 'format' is known to contain no % formats.  */
3356       gimple_seq stmts = NULL;
3357       gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3358 
3359       /* Propagate the NO_WARNING bit to avoid issuing the same
3360 	 warning more than once.  */
3361       if (gimple_no_warning_p (stmt))
3362 	gimple_set_no_warning (repl, true);
3363 
3364       gimple_seq_add_stmt_without_update (&stmts, repl);
3365       if (tree lhs = gimple_call_lhs (stmt))
3366 	{
3367 	  repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3368 							  strlen (fmt_str)));
3369 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3370 	  gsi_replace_with_seq_vops (gsi, stmts);
3371 	  /* gsi now points at the assignment to the lhs, get a
3372 	     stmt iterator to the memcpy call.
3373 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3374 	     CFG isn't built yet.  */
3375 	  gimple_stmt_iterator gsi2 = *gsi;
3376 	  gsi_prev (&gsi2);
3377 	  fold_stmt (&gsi2);
3378 	}
3379       else
3380 	{
3381 	  gsi_replace_with_seq_vops (gsi, stmts);
3382 	  fold_stmt (gsi);
3383 	}
3384       return true;
3385     }
3386 
3387   /* If the format is "%s", use strcpy if the result isn't used.  */
3388   else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3389     {
3390       /* Don't crash on sprintf (str1, "%s").  */
3391       if (!orig)
3392 	return false;
3393 
3394       /* Don't fold calls with source arguments of invalid (nonpointer)
3395 	 types.  */
3396       if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3397 	return false;
3398 
3399       tree orig_len = NULL_TREE;
3400       if (gimple_call_lhs (stmt))
3401 	{
3402 	  orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3403 	  if (!orig_len)
3404 	    return false;
3405 	}
3406 
3407       /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2).  */
3408       gimple_seq stmts = NULL;
3409       gimple *repl = gimple_build_call (fn, 2, dest, orig);
3410 
3411       /* Propagate the NO_WARNING bit to avoid issuing the same
3412 	 warning more than once.  */
3413       if (gimple_no_warning_p (stmt))
3414 	gimple_set_no_warning (repl, true);
3415 
3416       gimple_seq_add_stmt_without_update (&stmts, repl);
3417       if (tree lhs = gimple_call_lhs (stmt))
3418 	{
3419 	  if (!useless_type_conversion_p (TREE_TYPE (lhs),
3420 					  TREE_TYPE (orig_len)))
3421 	    orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3422 	  repl = gimple_build_assign (lhs, orig_len);
3423 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3424 	  gsi_replace_with_seq_vops (gsi, stmts);
3425 	  /* gsi now points at the assignment to the lhs, get a
3426 	     stmt iterator to the memcpy call.
3427 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3428 	     CFG isn't built yet.  */
3429 	  gimple_stmt_iterator gsi2 = *gsi;
3430 	  gsi_prev (&gsi2);
3431 	  fold_stmt (&gsi2);
3432 	}
3433       else
3434 	{
3435 	  gsi_replace_with_seq_vops (gsi, stmts);
3436 	  fold_stmt (gsi);
3437 	}
3438       return true;
3439     }
3440   return false;
3441 }
3442 
3443 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3444    FMT, and ORIG.  ORIG may be null if this is a 3-argument call.  We don't
3445    attempt to simplify calls with more than 4 arguments.
3446 
3447    Return true if simplification was possible, otherwise false.  */
3448 
3449 bool
gimple_fold_builtin_snprintf(gimple_stmt_iterator * gsi)3450 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3451 {
3452   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3453   tree dest = gimple_call_arg (stmt, 0);
3454   tree destsize = gimple_call_arg (stmt, 1);
3455   tree fmt = gimple_call_arg (stmt, 2);
3456   tree orig = NULL_TREE;
3457   const char *fmt_str = NULL;
3458 
3459   if (gimple_call_num_args (stmt) > 4)
3460     return false;
3461 
3462   if (gimple_call_num_args (stmt) == 4)
3463     orig = gimple_call_arg (stmt, 3);
3464 
3465   if (!tree_fits_uhwi_p (destsize))
3466     return false;
3467   unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3468 
3469   /* Check whether the format is a literal string constant.  */
3470   fmt_str = c_getstr (fmt);
3471   if (fmt_str == NULL)
3472     return false;
3473 
3474   if (!init_target_chars ())
3475     return false;
3476 
3477   /* If the format doesn't contain % args or %%, use strcpy.  */
3478   if (strchr (fmt_str, target_percent) == NULL)
3479     {
3480       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3481       if (!fn)
3482 	return false;
3483 
3484       /* Don't optimize snprintf (buf, 4, "abc", ptr++).  */
3485       if (orig)
3486 	return false;
3487 
3488       /* We could expand this as
3489 	 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3490 	 or to
3491 	 memcpy (str, fmt_with_nul_at_cstm1, cst);
3492 	 but in the former case that might increase code size
3493 	 and in the latter case grow .rodata section too much.
3494 	 So punt for now.  */
3495       size_t len = strlen (fmt_str);
3496       if (len >= destlen)
3497 	return false;
3498 
3499       gimple_seq stmts = NULL;
3500       gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3501       gimple_seq_add_stmt_without_update (&stmts, repl);
3502       if (tree lhs = gimple_call_lhs (stmt))
3503 	{
3504 	  repl = gimple_build_assign (lhs,
3505 				      build_int_cst (TREE_TYPE (lhs), len));
3506 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3507 	  gsi_replace_with_seq_vops (gsi, stmts);
3508 	  /* gsi now points at the assignment to the lhs, get a
3509 	     stmt iterator to the memcpy call.
3510 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3511 	     CFG isn't built yet.  */
3512 	  gimple_stmt_iterator gsi2 = *gsi;
3513 	  gsi_prev (&gsi2);
3514 	  fold_stmt (&gsi2);
3515 	}
3516       else
3517 	{
3518 	  gsi_replace_with_seq_vops (gsi, stmts);
3519 	  fold_stmt (gsi);
3520 	}
3521       return true;
3522     }
3523 
3524   /* If the format is "%s", use strcpy if the result isn't used.  */
3525   else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3526     {
3527       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3528       if (!fn)
3529 	return false;
3530 
3531       /* Don't crash on snprintf (str1, cst, "%s").  */
3532       if (!orig)
3533 	return false;
3534 
3535       tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3536       if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3537 	return false;
3538 
3539       /* We could expand this as
3540 	 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3541 	 or to
3542 	 memcpy (str1, str2_with_nul_at_cstm1, cst);
3543 	 but in the former case that might increase code size
3544 	 and in the latter case grow .rodata section too much.
3545 	 So punt for now.  */
3546       if (compare_tree_int (orig_len, destlen) >= 0)
3547 	return false;
3548 
3549       /* Convert snprintf (str1, cst, "%s", str2) into
3550 	 strcpy (str1, str2) if strlen (str2) < cst.  */
3551       gimple_seq stmts = NULL;
3552       gimple *repl = gimple_build_call (fn, 2, dest, orig);
3553       gimple_seq_add_stmt_without_update (&stmts, repl);
3554       if (tree lhs = gimple_call_lhs (stmt))
3555 	{
3556 	  if (!useless_type_conversion_p (TREE_TYPE (lhs),
3557 					  TREE_TYPE (orig_len)))
3558 	    orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3559 	  repl = gimple_build_assign (lhs, orig_len);
3560 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3561 	  gsi_replace_with_seq_vops (gsi, stmts);
3562 	  /* gsi now points at the assignment to the lhs, get a
3563 	     stmt iterator to the memcpy call.
3564 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3565 	     CFG isn't built yet.  */
3566 	  gimple_stmt_iterator gsi2 = *gsi;
3567 	  gsi_prev (&gsi2);
3568 	  fold_stmt (&gsi2);
3569 	}
3570       else
3571 	{
3572 	  gsi_replace_with_seq_vops (gsi, stmts);
3573 	  fold_stmt (gsi);
3574 	}
3575       return true;
3576     }
3577   return false;
3578 }
3579 
3580 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3581    FP, FMT, and ARG are the arguments to the call.  We don't fold calls with
3582    more than 3 arguments, and ARG may be null in the 2-argument case.
3583 
3584    Return NULL_TREE if no simplification was possible, otherwise return the
3585    simplified form of the call as a tree.  FCODE is the BUILT_IN_*
3586    code of the function to be simplified.  */
3587 
3588 static bool
gimple_fold_builtin_fprintf(gimple_stmt_iterator * gsi,tree fp,tree fmt,tree arg,enum built_in_function fcode)3589 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3590 			     tree fp, tree fmt, tree arg,
3591 			     enum built_in_function fcode)
3592 {
3593   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3594   tree fn_fputc, fn_fputs;
3595   const char *fmt_str = NULL;
3596 
3597   /* If the return value is used, don't do the transformation.  */
3598   if (gimple_call_lhs (stmt) != NULL_TREE)
3599     return false;
3600 
3601   /* Check whether the format is a literal string constant.  */
3602   fmt_str = c_getstr (fmt);
3603   if (fmt_str == NULL)
3604     return false;
3605 
3606   if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3607     {
3608       /* If we're using an unlocked function, assume the other
3609 	 unlocked functions exist explicitly.  */
3610       fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3611       fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3612     }
3613   else
3614     {
3615       fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3616       fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3617     }
3618 
3619   if (!init_target_chars ())
3620     return false;
3621 
3622   /* If the format doesn't contain % args or %%, use strcpy.  */
3623   if (strchr (fmt_str, target_percent) == NULL)
3624     {
3625       if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3626 	  && arg)
3627 	return false;
3628 
3629       /* If the format specifier was "", fprintf does nothing.  */
3630       if (fmt_str[0] == '\0')
3631 	{
3632 	  replace_call_with_value (gsi, NULL_TREE);
3633 	  return true;
3634 	}
3635 
3636       /* When "string" doesn't contain %, replace all cases of
3637 	 fprintf (fp, string) with fputs (string, fp).  The fputs
3638 	 builtin will take care of special cases like length == 1.  */
3639       if (fn_fputs)
3640 	{
3641 	  gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3642 	  replace_call_with_call_and_fold (gsi, repl);
3643 	  return true;
3644 	}
3645     }
3646 
3647   /* The other optimizations can be done only on the non-va_list variants.  */
3648   else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3649     return false;
3650 
3651   /* If the format specifier was "%s", call __builtin_fputs (arg, fp).  */
3652   else if (strcmp (fmt_str, target_percent_s) == 0)
3653     {
3654       if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3655 	return false;
3656       if (fn_fputs)
3657 	{
3658 	  gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3659 	  replace_call_with_call_and_fold (gsi, repl);
3660 	  return true;
3661 	}
3662     }
3663 
3664   /* If the format specifier was "%c", call __builtin_fputc (arg, fp).  */
3665   else if (strcmp (fmt_str, target_percent_c) == 0)
3666     {
3667       if (!arg
3668 	  || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3669 	return false;
3670       if (fn_fputc)
3671 	{
3672 	  gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3673 	  replace_call_with_call_and_fold (gsi, repl);
3674 	  return true;
3675 	}
3676     }
3677 
3678   return false;
3679 }
3680 
3681 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3682    FMT and ARG are the arguments to the call; we don't fold cases with
3683    more than 2 arguments, and ARG may be null if this is a 1-argument case.
3684 
3685    Return NULL_TREE if no simplification was possible, otherwise return the
3686    simplified form of the call as a tree.  FCODE is the BUILT_IN_*
3687    code of the function to be simplified.  */
3688 
3689 static bool
gimple_fold_builtin_printf(gimple_stmt_iterator * gsi,tree fmt,tree arg,enum built_in_function fcode)3690 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3691 			    tree arg, enum built_in_function fcode)
3692 {
3693   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3694   tree fn_putchar, fn_puts, newarg;
3695   const char *fmt_str = NULL;
3696 
3697   /* If the return value is used, don't do the transformation.  */
3698   if (gimple_call_lhs (stmt) != NULL_TREE)
3699     return false;
3700 
3701   /* Check whether the format is a literal string constant.  */
3702   fmt_str = c_getstr (fmt);
3703   if (fmt_str == NULL)
3704     return false;
3705 
3706   if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3707     {
3708       /* If we're using an unlocked function, assume the other
3709 	 unlocked functions exist explicitly.  */
3710       fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3711       fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3712     }
3713   else
3714     {
3715       fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3716       fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3717     }
3718 
3719   if (!init_target_chars ())
3720     return false;
3721 
3722   if (strcmp (fmt_str, target_percent_s) == 0
3723       || strchr (fmt_str, target_percent) == NULL)
3724     {
3725       const char *str;
3726 
3727       if (strcmp (fmt_str, target_percent_s) == 0)
3728 	{
3729 	  if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3730 	    return false;
3731 
3732 	  if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3733 	    return false;
3734 
3735 	  str = c_getstr (arg);
3736 	  if (str == NULL)
3737 	    return false;
3738 	}
3739       else
3740 	{
3741 	  /* The format specifier doesn't contain any '%' characters.  */
3742 	  if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3743 	      && arg)
3744 	    return false;
3745 	  str = fmt_str;
3746 	}
3747 
3748       /* If the string was "", printf does nothing.  */
3749       if (str[0] == '\0')
3750 	{
3751 	  replace_call_with_value (gsi, NULL_TREE);
3752 	  return true;
3753 	}
3754 
3755       /* If the string has length of 1, call putchar.  */
3756       if (str[1] == '\0')
3757 	{
3758 	  /* Given printf("c"), (where c is any one character,)
3759 	     convert "c"[0] to an int and pass that to the replacement
3760 	     function.  */
3761 	  newarg = build_int_cst (integer_type_node, str[0]);
3762 	  if (fn_putchar)
3763 	    {
3764 	      gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3765 	      replace_call_with_call_and_fold (gsi, repl);
3766 	      return true;
3767 	    }
3768 	}
3769       else
3770 	{
3771 	  /* If the string was "string\n", call puts("string").  */
3772 	  size_t len = strlen (str);
3773 	  if ((unsigned char)str[len - 1] == target_newline
3774 	      && (size_t) (int) len == len
3775 	      && (int) len > 0)
3776 	    {
3777 	      char *newstr;
3778 
3779 	      /* Create a NUL-terminated string that's one char shorter
3780 		 than the original, stripping off the trailing '\n'.  */
3781 	      newstr = xstrdup (str);
3782 	      newstr[len - 1] = '\0';
3783 	      newarg = build_string_literal (len, newstr);
3784 	      free (newstr);
3785 	      if (fn_puts)
3786 		{
3787 		  gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3788 		  replace_call_with_call_and_fold (gsi, repl);
3789 		  return true;
3790 		}
3791 	    }
3792 	  else
3793 	    /* We'd like to arrange to call fputs(string,stdout) here,
3794 	       but we need stdout and don't have a way to get it yet.  */
3795 	    return false;
3796 	}
3797     }
3798 
3799   /* The other optimizations can be done only on the non-va_list variants.  */
3800   else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3801     return false;
3802 
3803   /* If the format specifier was "%s\n", call __builtin_puts(arg).  */
3804   else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3805     {
3806       if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3807 	return false;
3808       if (fn_puts)
3809 	{
3810 	  gcall *repl = gimple_build_call (fn_puts, 1, arg);
3811 	  replace_call_with_call_and_fold (gsi, repl);
3812 	  return true;
3813 	}
3814     }
3815 
3816   /* If the format specifier was "%c", call __builtin_putchar(arg).  */
3817   else if (strcmp (fmt_str, target_percent_c) == 0)
3818     {
3819       if (!arg || ! useless_type_conversion_p (integer_type_node,
3820 					       TREE_TYPE (arg)))
3821 	return false;
3822       if (fn_putchar)
3823 	{
3824 	  gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3825 	  replace_call_with_call_and_fold (gsi, repl);
3826 	  return true;
3827 	}
3828     }
3829 
3830   return false;
3831 }
3832 
3833 
3834 
3835 /* Fold a call to __builtin_strlen with known length LEN.  */
3836 
3837 static bool
gimple_fold_builtin_strlen(gimple_stmt_iterator * gsi)3838 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3839 {
3840   gimple *stmt = gsi_stmt (*gsi);
3841   tree arg = gimple_call_arg (stmt, 0);
3842 
3843   wide_int minlen;
3844   wide_int maxlen;
3845 
3846   c_strlen_data lendata = { };
3847   if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3848       && !lendata.decl
3849       && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3850       && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
3851     {
3852       /* The range of lengths refers to either a single constant
3853 	 string or to the longest and shortest constant string
3854 	 referenced by the argument of the strlen() call, or to
3855 	 the strings that can possibly be stored in the arrays
3856 	 the argument refers to.  */
3857       minlen = wi::to_wide (lendata.minlen);
3858       maxlen = wi::to_wide (lendata.maxlen);
3859     }
3860   else
3861     {
3862       unsigned prec = TYPE_PRECISION (sizetype);
3863 
3864       minlen = wi::shwi (0, prec);
3865       maxlen = wi::to_wide (max_object_size (), prec) - 2;
3866     }
3867 
3868   if (minlen == maxlen)
3869     {
3870       /* Fold the strlen call to a constant.  */
3871       tree type = TREE_TYPE (lendata.minlen);
3872       tree len = force_gimple_operand_gsi (gsi,
3873 					   wide_int_to_tree (type, minlen),
3874 					   true, NULL, true, GSI_SAME_STMT);
3875       replace_call_with_value (gsi, len);
3876       return true;
3877     }
3878 
3879   /* Set the strlen() range to [0, MAXLEN].  */
3880   if (tree lhs = gimple_call_lhs (stmt))
3881     set_strlen_range (lhs, minlen, maxlen);
3882 
3883   return false;
3884 }
3885 
3886 /* Fold a call to __builtin_acc_on_device.  */
3887 
3888 static bool
gimple_fold_builtin_acc_on_device(gimple_stmt_iterator * gsi,tree arg0)3889 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3890 {
3891   /* Defer folding until we know which compiler we're in.  */
3892   if (symtab->state != EXPANSION)
3893     return false;
3894 
3895   unsigned val_host = GOMP_DEVICE_HOST;
3896   unsigned val_dev = GOMP_DEVICE_NONE;
3897 
3898 #ifdef ACCEL_COMPILER
3899   val_host = GOMP_DEVICE_NOT_HOST;
3900   val_dev = ACCEL_COMPILER_acc_device;
3901 #endif
3902 
3903   location_t loc = gimple_location (gsi_stmt (*gsi));
3904 
3905   tree host_eq = make_ssa_name (boolean_type_node);
3906   gimple *host_ass = gimple_build_assign
3907     (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3908   gimple_set_location (host_ass, loc);
3909   gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3910 
3911   tree dev_eq = make_ssa_name (boolean_type_node);
3912   gimple *dev_ass = gimple_build_assign
3913     (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3914   gimple_set_location (dev_ass, loc);
3915   gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3916 
3917   tree result = make_ssa_name (boolean_type_node);
3918   gimple *result_ass = gimple_build_assign
3919     (result, BIT_IOR_EXPR, host_eq, dev_eq);
3920   gimple_set_location (result_ass, loc);
3921   gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3922 
3923   replace_call_with_value (gsi, result);
3924 
3925   return true;
3926 }
3927 
3928 /* Fold realloc (0, n) -> malloc (n).  */
3929 
3930 static bool
gimple_fold_builtin_realloc(gimple_stmt_iterator * gsi)3931 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3932 {
3933   gimple *stmt = gsi_stmt (*gsi);
3934   tree arg = gimple_call_arg (stmt, 0);
3935   tree size = gimple_call_arg (stmt, 1);
3936 
3937   if (operand_equal_p (arg, null_pointer_node, 0))
3938     {
3939       tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3940       if (fn_malloc)
3941 	{
3942 	  gcall *repl = gimple_build_call (fn_malloc, 1, size);
3943 	  replace_call_with_call_and_fold (gsi, repl);
3944 	  return true;
3945 	}
3946     }
3947   return false;
3948 }
3949 
3950 /* Number of bytes into which any type but aggregate or vector types
3951    should fit.  */
3952 static constexpr size_t clear_padding_unit
3953   = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
3954 /* Buffer size on which __builtin_clear_padding folding code works.  */
3955 static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
3956 
3957 /* Data passed through __builtin_clear_padding folding.  */
3958 struct clear_padding_struct {
3959   location_t loc;
3960   /* 0 during __builtin_clear_padding folding, nonzero during
3961      clear_type_padding_in_mask.  In that case, instead of clearing the
3962      non-padding bits in union_ptr array clear the padding bits in there.  */
3963   bool clear_in_mask;
3964   tree base;
3965   tree alias_type;
3966   gimple_stmt_iterator *gsi;
3967   /* Alignment of buf->base + 0.  */
3968   unsigned align;
3969   /* Offset from buf->base.  Should be always a multiple of UNITS_PER_WORD.  */
3970   HOST_WIDE_INT off;
3971   /* Number of padding bytes before buf->off that don't have padding clear
3972      code emitted yet.  */
3973   HOST_WIDE_INT padding_bytes;
3974   /* The size of the whole object.  Never emit code to touch
3975      buf->base + buf->sz or following bytes.  */
3976   HOST_WIDE_INT sz;
3977   /* Number of bytes recorded in buf->buf.  */
3978   size_t size;
3979   /* When inside union, instead of emitting code we and bits inside of
3980      the union_ptr array.  */
3981   unsigned char *union_ptr;
3982   /* Set bits mean padding bits that need to be cleared by the builtin.  */
3983   unsigned char buf[clear_padding_buf_size + clear_padding_unit];
3984 };
3985 
3986 /* Emit code to clear padding requested in BUF->buf - set bits
3987    in there stand for padding that should be cleared.  FULL is true
3988    if everything from the buffer should be flushed, otherwise
3989    it can leave up to 2 * clear_padding_unit bytes for further
3990    processing.  */
3991 
3992 static void
clear_padding_flush(clear_padding_struct * buf,bool full)3993 clear_padding_flush (clear_padding_struct *buf, bool full)
3994 {
3995   gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
3996   if (!full && buf->size < 2 * clear_padding_unit)
3997     return;
3998   gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
3999   size_t end = buf->size;
4000   if (!full)
4001     end = ((end - clear_padding_unit - 1) / clear_padding_unit
4002 	   * clear_padding_unit);
4003   size_t padding_bytes = buf->padding_bytes;
4004   if (buf->union_ptr)
4005     {
4006       if (buf->clear_in_mask)
4007 	{
4008 	  /* During clear_type_padding_in_mask, clear the padding
4009 	     bits set in buf->buf in the buf->union_ptr mask.  */
4010 	  for (size_t i = 0; i < end; i++)
4011 	    {
4012 	      if (buf->buf[i] == (unsigned char) ~0)
4013 		padding_bytes++;
4014 	      else
4015 		{
4016 		  memset (&buf->union_ptr[buf->off + i - padding_bytes],
4017 			  0, padding_bytes);
4018 		  padding_bytes = 0;
4019 		  buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4020 		}
4021 	    }
4022 	  if (full)
4023 	    {
4024 	      memset (&buf->union_ptr[buf->off + end - padding_bytes],
4025 		      0, padding_bytes);
4026 	      buf->off = 0;
4027 	      buf->size = 0;
4028 	      buf->padding_bytes = 0;
4029 	    }
4030 	  else
4031 	    {
4032 	      memmove (buf->buf, buf->buf + end, buf->size - end);
4033 	      buf->off += end;
4034 	      buf->size -= end;
4035 	      buf->padding_bytes = padding_bytes;
4036 	    }
4037 	  return;
4038 	}
4039       /* Inside of a union, instead of emitting any code, instead
4040 	 clear all bits in the union_ptr buffer that are clear
4041 	 in buf.  Whole padding bytes don't clear anything.  */
4042       for (size_t i = 0; i < end; i++)
4043 	{
4044 	  if (buf->buf[i] == (unsigned char) ~0)
4045 	    padding_bytes++;
4046 	  else
4047 	    {
4048 	      padding_bytes = 0;
4049 	      buf->union_ptr[buf->off + i] &= buf->buf[i];
4050 	    }
4051 	}
4052       if (full)
4053 	{
4054 	  buf->off = 0;
4055 	  buf->size = 0;
4056 	  buf->padding_bytes = 0;
4057 	}
4058       else
4059 	{
4060 	  memmove (buf->buf, buf->buf + end, buf->size - end);
4061 	  buf->off += end;
4062 	  buf->size -= end;
4063 	  buf->padding_bytes = padding_bytes;
4064 	}
4065       return;
4066     }
4067   size_t wordsize = UNITS_PER_WORD;
4068   for (size_t i = 0; i < end; i += wordsize)
4069     {
4070       size_t nonzero_first = wordsize;
4071       size_t nonzero_last = 0;
4072       size_t zero_first = wordsize;
4073       size_t zero_last = 0;
4074       bool all_ones = true, bytes_only = true;
4075       if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4076 	  > (unsigned HOST_WIDE_INT) buf->sz)
4077 	{
4078 	  gcc_assert (wordsize > 1);
4079 	  wordsize /= 2;
4080 	  i -= wordsize;
4081 	  continue;
4082 	}
4083       for (size_t j = i; j < i + wordsize && j < end; j++)
4084 	{
4085 	  if (buf->buf[j])
4086 	    {
4087 	      if (nonzero_first == wordsize)
4088 		{
4089 		  nonzero_first = j - i;
4090 		  nonzero_last = j - i;
4091 		}
4092 	      if (nonzero_last != j - i)
4093 		all_ones = false;
4094 	      nonzero_last = j + 1 - i;
4095 	    }
4096 	  else
4097 	    {
4098 	      if (zero_first == wordsize)
4099 		zero_first = j - i;
4100 	      zero_last = j + 1 - i;
4101 	    }
4102 	  if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4103 	    {
4104 	      all_ones = false;
4105 	      bytes_only = false;
4106 	    }
4107 	}
4108       size_t padding_end = i;
4109       if (padding_bytes)
4110 	{
4111 	  if (nonzero_first == 0
4112 	      && nonzero_last == wordsize
4113 	      && all_ones)
4114 	    {
4115 	      /* All bits are padding and we had some padding
4116 		 before too.  Just extend it.  */
4117 	      padding_bytes += wordsize;
4118 	      continue;
4119 	    }
4120 	  if (all_ones && nonzero_first == 0)
4121 	    {
4122 	      padding_bytes += nonzero_last;
4123 	      padding_end += nonzero_last;
4124 	      nonzero_first = wordsize;
4125 	      nonzero_last = 0;
4126 	    }
4127 	  else if (bytes_only && nonzero_first == 0)
4128 	    {
4129 	      gcc_assert (zero_first && zero_first != wordsize);
4130 	      padding_bytes += zero_first;
4131 	      padding_end += zero_first;
4132 	    }
4133 	  tree atype, src;
4134 	  if (padding_bytes == 1)
4135 	    {
4136 	      atype = char_type_node;
4137 	      src = build_zero_cst (char_type_node);
4138 	    }
4139 	  else
4140 	    {
4141 	      atype = build_array_type_nelts (char_type_node, padding_bytes);
4142 	      src = build_constructor (atype, NULL);
4143 	    }
4144 	  tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4145 				 build_int_cst (buf->alias_type,
4146 						buf->off + padding_end
4147 						- padding_bytes));
4148 	  gimple *g = gimple_build_assign (dst, src);
4149 	  gimple_set_location (g, buf->loc);
4150 	  gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4151 	  padding_bytes = 0;
4152 	  buf->padding_bytes = 0;
4153 	}
4154       if (nonzero_first == wordsize)
4155 	/* All bits in a word are 0, there are no padding bits.  */
4156 	continue;
4157       if (all_ones && nonzero_last == wordsize)
4158 	{
4159 	  /* All bits between nonzero_first and end of word are padding
4160 	     bits, start counting padding_bytes.  */
4161 	  padding_bytes = nonzero_last - nonzero_first;
4162 	  continue;
4163 	}
4164       if (bytes_only)
4165 	{
4166 	  /* If bitfields aren't involved in this word, prefer storing
4167 	     individual bytes or groups of them over performing a RMW
4168 	     operation on the whole word.  */
4169 	  gcc_assert (i + zero_last <= end);
4170 	  for (size_t j = padding_end; j < i + zero_last; j++)
4171 	    {
4172 	      if (buf->buf[j])
4173 		{
4174 		  size_t k;
4175 		  for (k = j; k < i + zero_last; k++)
4176 		    if (buf->buf[k] == 0)
4177 		      break;
4178 		  HOST_WIDE_INT off = buf->off + j;
4179 		  tree atype, src;
4180 		  if (k - j == 1)
4181 		    {
4182 		      atype = char_type_node;
4183 		      src = build_zero_cst (char_type_node);
4184 		    }
4185 		  else
4186 		    {
4187 		      atype = build_array_type_nelts (char_type_node, k - j);
4188 		      src = build_constructor (atype, NULL);
4189 		    }
4190 		  tree dst = build2_loc (buf->loc, MEM_REF, atype,
4191 					 buf->base,
4192 					 build_int_cst (buf->alias_type, off));
4193 		  gimple *g = gimple_build_assign (dst, src);
4194 		  gimple_set_location (g, buf->loc);
4195 		  gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4196 		  j = k;
4197 		}
4198 	    }
4199 	  if (nonzero_last == wordsize)
4200 	    padding_bytes = nonzero_last - zero_last;
4201 	  continue;
4202 	}
4203       for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4204 	{
4205 	  if (nonzero_last - nonzero_first <= eltsz
4206 	      && ((nonzero_first & ~(eltsz - 1))
4207 		  == ((nonzero_last - 1) & ~(eltsz - 1))))
4208 	    {
4209 	      tree type;
4210 	      if (eltsz == 1)
4211 		type = char_type_node;
4212 	      else
4213 		type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4214 						       0);
4215 	      size_t start = nonzero_first & ~(eltsz - 1);
4216 	      HOST_WIDE_INT off = buf->off + i + start;
4217 	      tree atype = type;
4218 	      if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4219 		atype = build_aligned_type (type, buf->align);
4220 	      tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4221 				     build_int_cst (buf->alias_type, off));
4222 	      tree src;
4223 	      gimple *g;
4224 	      if (all_ones
4225 		  && nonzero_first == start
4226 		  && nonzero_last == start + eltsz)
4227 		src = build_zero_cst (type);
4228 	      else
4229 		{
4230 		  src = make_ssa_name (type);
4231 		  g = gimple_build_assign (src, unshare_expr (dst));
4232 		  gimple_set_location (g, buf->loc);
4233 		  gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4234 		  tree mask = native_interpret_expr (type,
4235 						     buf->buf + i + start,
4236 						     eltsz);
4237 		  gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4238 		  mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4239 		  tree src_masked = make_ssa_name (type);
4240 		  g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4241 					   src, mask);
4242 		  gimple_set_location (g, buf->loc);
4243 		  gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4244 		  src = src_masked;
4245 		}
4246 	      g = gimple_build_assign (dst, src);
4247 	      gimple_set_location (g, buf->loc);
4248 	      gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4249 	      break;
4250 	    }
4251 	}
4252     }
4253   if (full)
4254     {
4255       if (padding_bytes)
4256 	{
4257 	  tree atype, src;
4258 	  if (padding_bytes == 1)
4259 	    {
4260 	      atype = char_type_node;
4261 	      src = build_zero_cst (char_type_node);
4262 	    }
4263 	  else
4264 	    {
4265 	      atype = build_array_type_nelts (char_type_node, padding_bytes);
4266 	      src = build_constructor (atype, NULL);
4267 	    }
4268 	  tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4269 				 build_int_cst (buf->alias_type,
4270 						buf->off + end
4271 						- padding_bytes));
4272 	  gimple *g = gimple_build_assign (dst, src);
4273 	  gimple_set_location (g, buf->loc);
4274 	  gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4275 	}
4276       size_t end_rem = end % UNITS_PER_WORD;
4277       buf->off += end - end_rem;
4278       buf->size = end_rem;
4279       memset (buf->buf, 0, buf->size);
4280       buf->padding_bytes = 0;
4281     }
4282   else
4283     {
4284       memmove (buf->buf, buf->buf + end, buf->size - end);
4285       buf->off += end;
4286       buf->size -= end;
4287       buf->padding_bytes = padding_bytes;
4288     }
4289 }
4290 
4291 /* Append PADDING_BYTES padding bytes.  */
4292 
4293 static void
clear_padding_add_padding(clear_padding_struct * buf,HOST_WIDE_INT padding_bytes)4294 clear_padding_add_padding (clear_padding_struct *buf,
4295 			   HOST_WIDE_INT padding_bytes)
4296 {
4297   if (padding_bytes == 0)
4298     return;
4299   if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4300       > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4301     clear_padding_flush (buf, false);
4302   if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4303       > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4304     {
4305       memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4306       padding_bytes -= clear_padding_buf_size - buf->size;
4307       buf->size = clear_padding_buf_size;
4308       clear_padding_flush (buf, false);
4309       gcc_assert (buf->padding_bytes);
4310       /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4311 	 is guaranteed to be all ones.  */
4312       padding_bytes += buf->size;
4313       buf->size = padding_bytes % UNITS_PER_WORD;
4314       memset (buf->buf, ~0, buf->size);
4315       buf->off += padding_bytes - buf->size;
4316       buf->padding_bytes += padding_bytes - buf->size;
4317     }
4318   else
4319     {
4320       memset (buf->buf + buf->size, ~0, padding_bytes);
4321       buf->size += padding_bytes;
4322     }
4323 }
4324 
4325 static void clear_padding_type (clear_padding_struct *, tree, HOST_WIDE_INT);
4326 
4327 /* Clear padding bits of union type TYPE.  */
4328 
4329 static void
clear_padding_union(clear_padding_struct * buf,tree type,HOST_WIDE_INT sz)4330 clear_padding_union (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4331 {
4332   clear_padding_struct *union_buf;
4333   HOST_WIDE_INT start_off = 0, next_off = 0;
4334   size_t start_size = 0;
4335   if (buf->union_ptr)
4336     {
4337       start_off = buf->off + buf->size;
4338       next_off = start_off + sz;
4339       start_size = start_off % UNITS_PER_WORD;
4340       start_off -= start_size;
4341       clear_padding_flush (buf, true);
4342       union_buf = buf;
4343     }
4344   else
4345     {
4346       if (sz + buf->size > clear_padding_buf_size)
4347 	clear_padding_flush (buf, false);
4348       union_buf = XALLOCA (clear_padding_struct);
4349       union_buf->loc = buf->loc;
4350       union_buf->clear_in_mask = buf->clear_in_mask;
4351       union_buf->base = NULL_TREE;
4352       union_buf->alias_type = NULL_TREE;
4353       union_buf->gsi = NULL;
4354       union_buf->align = 0;
4355       union_buf->off = 0;
4356       union_buf->padding_bytes = 0;
4357       union_buf->sz = sz;
4358       union_buf->size = 0;
4359       if (sz + buf->size <= clear_padding_buf_size)
4360 	union_buf->union_ptr = buf->buf + buf->size;
4361       else
4362 	union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4363       memset (union_buf->union_ptr, ~0, sz);
4364     }
4365 
4366   for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4367     if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4368       {
4369 	if (DECL_SIZE_UNIT (field) == NULL_TREE)
4370 	  {
4371 	    if (TREE_TYPE (field) == error_mark_node)
4372 	      continue;
4373 	    gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4374 			&& !COMPLETE_TYPE_P (TREE_TYPE (field)));
4375 	    if (!buf->clear_in_mask)
4376 	      error_at (buf->loc, "flexible array member %qD does not have "
4377 				  "well defined padding bits for %qs",
4378 			field, "__builtin_clear_padding");
4379 	    continue;
4380 	  }
4381 	HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4382 	gcc_assert (union_buf->size == 0);
4383 	union_buf->off = start_off;
4384 	union_buf->size = start_size;
4385 	memset (union_buf->buf, ~0, start_size);
4386 	clear_padding_type (union_buf, TREE_TYPE (field), fldsz);
4387 	clear_padding_add_padding (union_buf, sz - fldsz);
4388 	clear_padding_flush (union_buf, true);
4389       }
4390 
4391   if (buf == union_buf)
4392     {
4393       buf->off = next_off;
4394       buf->size = next_off % UNITS_PER_WORD;
4395       buf->off -= buf->size;
4396       memset (buf->buf, ~0, buf->size);
4397     }
4398   else if (sz + buf->size <= clear_padding_buf_size)
4399     buf->size += sz;
4400   else
4401     {
4402       unsigned char *union_ptr = union_buf->union_ptr;
4403       while (sz)
4404 	{
4405 	  clear_padding_flush (buf, false);
4406 	  HOST_WIDE_INT this_sz
4407 	    = MIN ((unsigned HOST_WIDE_INT) sz,
4408 		   clear_padding_buf_size - buf->size);
4409 	  memcpy (buf->buf + buf->size, union_ptr, this_sz);
4410 	  buf->size += this_sz;
4411 	  union_ptr += this_sz;
4412 	  sz -= this_sz;
4413 	}
4414       XDELETE (union_buf->union_ptr);
4415     }
4416 }
4417 
4418 /* The only known floating point formats with padding bits are the
4419    IEEE extended ones.  */
4420 
4421 static bool
clear_padding_real_needs_padding_p(tree type)4422 clear_padding_real_needs_padding_p (tree type)
4423 {
4424   const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4425   return (fmt->b == 2
4426 	  && fmt->signbit_ro == fmt->signbit_rw
4427 	  && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4428 }
4429 
4430 /* Return true if TYPE might contain any padding bits.  */
4431 
4432 static bool
clear_padding_type_may_have_padding_p(tree type)4433 clear_padding_type_may_have_padding_p (tree type)
4434 {
4435   switch (TREE_CODE (type))
4436     {
4437     case RECORD_TYPE:
4438     case UNION_TYPE:
4439       return true;
4440     case ARRAY_TYPE:
4441     case COMPLEX_TYPE:
4442     case VECTOR_TYPE:
4443       return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4444     case REAL_TYPE:
4445       return clear_padding_real_needs_padding_p (type);
4446     default:
4447       return false;
4448     }
4449 }
4450 
4451 /* Emit a runtime loop:
4452    for (; buf.base != end; buf.base += sz)
4453      __builtin_clear_padding (buf.base);  */
4454 
4455 static void
clear_padding_emit_loop(clear_padding_struct * buf,tree type,tree end)4456 clear_padding_emit_loop (clear_padding_struct *buf, tree type, tree end)
4457 {
4458   tree l1 = create_artificial_label (buf->loc);
4459   tree l2 = create_artificial_label (buf->loc);
4460   tree l3 = create_artificial_label (buf->loc);
4461   gimple *g = gimple_build_goto (l2);
4462   gimple_set_location (g, buf->loc);
4463   gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4464   g = gimple_build_label (l1);
4465   gimple_set_location (g, buf->loc);
4466   gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4467   clear_padding_type (buf, type, buf->sz);
4468   clear_padding_flush (buf, true);
4469   g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4470 			   size_int (buf->sz));
4471   gimple_set_location (g, buf->loc);
4472   gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4473   g = gimple_build_label (l2);
4474   gimple_set_location (g, buf->loc);
4475   gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4476   g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4477   gimple_set_location (g, buf->loc);
4478   gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4479   g = gimple_build_label (l3);
4480   gimple_set_location (g, buf->loc);
4481   gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4482 }
4483 
4484 /* Clear padding bits for TYPE.  Called recursively from
4485    gimple_fold_builtin_clear_padding.  */
4486 
4487 static void
clear_padding_type(clear_padding_struct * buf,tree type,HOST_WIDE_INT sz)4488 clear_padding_type (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4489 {
4490   switch (TREE_CODE (type))
4491     {
4492     case RECORD_TYPE:
4493       HOST_WIDE_INT cur_pos;
4494       cur_pos = 0;
4495       for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4496 	if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4497 	  {
4498 	    tree ftype = TREE_TYPE (field);
4499 	    if (DECL_BIT_FIELD (field))
4500 	      {
4501 		HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
4502 		if (fldsz == 0)
4503 		  continue;
4504 		HOST_WIDE_INT pos = int_byte_position (field);
4505 		HOST_WIDE_INT bpos
4506 		  = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4507 		bpos %= BITS_PER_UNIT;
4508 		HOST_WIDE_INT end
4509 		  = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4510 		if (pos + end > cur_pos)
4511 		  {
4512 		    clear_padding_add_padding (buf, pos + end - cur_pos);
4513 		    cur_pos = pos + end;
4514 		  }
4515 		gcc_assert (cur_pos > pos
4516 			    && ((unsigned HOST_WIDE_INT) buf->size
4517 				>= (unsigned HOST_WIDE_INT) cur_pos - pos));
4518 		unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4519 		if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4520 		  sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4521 				      " in %qs", "__builtin_clear_padding");
4522 		else if (BYTES_BIG_ENDIAN)
4523 		  {
4524 		    /* Big endian.  */
4525 		    if (bpos + fldsz <= BITS_PER_UNIT)
4526 		      *p &= ~(((1 << fldsz) - 1)
4527 			      << (BITS_PER_UNIT - bpos - fldsz));
4528 		    else
4529 		      {
4530 			if (bpos)
4531 			  {
4532 			    *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4533 			    p++;
4534 			    fldsz -= BITS_PER_UNIT - bpos;
4535 			  }
4536 			memset (p, 0, fldsz / BITS_PER_UNIT);
4537 			p += fldsz / BITS_PER_UNIT;
4538 			fldsz %= BITS_PER_UNIT;
4539 			if (fldsz)
4540 			  *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4541 		      }
4542 		  }
4543 		else
4544 		  {
4545 		    /* Little endian.  */
4546 		    if (bpos + fldsz <= BITS_PER_UNIT)
4547 		      *p &= ~(((1 << fldsz) - 1) << bpos);
4548 		    else
4549 		      {
4550 			if (bpos)
4551 			  {
4552 			    *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4553 			    p++;
4554 			    fldsz -= BITS_PER_UNIT - bpos;
4555 			  }
4556 			memset (p, 0, fldsz / BITS_PER_UNIT);
4557 			p += fldsz / BITS_PER_UNIT;
4558 			fldsz %= BITS_PER_UNIT;
4559 			if (fldsz)
4560 			  *p &= ~((1 << fldsz) - 1);
4561 		      }
4562 		  }
4563 	      }
4564 	    else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4565 	      {
4566 		if (ftype == error_mark_node)
4567 		  continue;
4568 		gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4569 			    && !COMPLETE_TYPE_P (ftype));
4570 		if (!buf->clear_in_mask)
4571 		  error_at (buf->loc, "flexible array member %qD does not "
4572 				      "have well defined padding bits for %qs",
4573 			    field, "__builtin_clear_padding");
4574 	      }
4575 	    else if (is_empty_type (TREE_TYPE (field)))
4576 	      continue;
4577 	    else
4578 	      {
4579 		HOST_WIDE_INT pos = int_byte_position (field);
4580 		HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4581 		gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4582 		clear_padding_add_padding (buf, pos - cur_pos);
4583 		cur_pos = pos;
4584 		clear_padding_type (buf, TREE_TYPE (field), fldsz);
4585 		cur_pos += fldsz;
4586 	      }
4587 	  }
4588       gcc_assert (sz >= cur_pos);
4589       clear_padding_add_padding (buf, sz - cur_pos);
4590       break;
4591     case ARRAY_TYPE:
4592       HOST_WIDE_INT nelts, fldsz;
4593       fldsz = int_size_in_bytes (TREE_TYPE (type));
4594       if (fldsz == 0)
4595 	break;
4596       nelts = sz / fldsz;
4597       if (nelts > 1
4598 	  && sz > 8 * UNITS_PER_WORD
4599 	  && buf->union_ptr == NULL
4600 	  && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4601 	{
4602 	  /* For sufficiently large array of more than one elements,
4603 	     emit a runtime loop to keep code size manageable.  */
4604 	  tree base = buf->base;
4605 	  unsigned int prev_align = buf->align;
4606 	  HOST_WIDE_INT off = buf->off + buf->size;
4607 	  HOST_WIDE_INT prev_sz = buf->sz;
4608 	  clear_padding_flush (buf, true);
4609 	  tree elttype = TREE_TYPE (type);
4610 	  buf->base = create_tmp_var (build_pointer_type (elttype));
4611 	  tree end = make_ssa_name (TREE_TYPE (buf->base));
4612 	  gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4613 					   base, size_int (off));
4614 	  gimple_set_location (g, buf->loc);
4615 	  gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4616 	  g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4617 				   size_int (sz));
4618 	  gimple_set_location (g, buf->loc);
4619 	  gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4620 	  buf->sz = fldsz;
4621 	  buf->align = TYPE_ALIGN (elttype);
4622 	  buf->off = 0;
4623 	  buf->size = 0;
4624 	  clear_padding_emit_loop (buf, elttype, end);
4625 	  buf->base = base;
4626 	  buf->sz = prev_sz;
4627 	  buf->align = prev_align;
4628 	  buf->size = off % UNITS_PER_WORD;
4629 	  buf->off = off - buf->size;
4630 	  memset (buf->buf, 0, buf->size);
4631 	  break;
4632 	}
4633       for (HOST_WIDE_INT i = 0; i < nelts; i++)
4634 	clear_padding_type (buf, TREE_TYPE (type), fldsz);
4635       break;
4636     case UNION_TYPE:
4637       clear_padding_union (buf, type, sz);
4638       break;
4639     case REAL_TYPE:
4640       gcc_assert ((size_t) sz <= clear_padding_unit);
4641       if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4642 	clear_padding_flush (buf, false);
4643       if (clear_padding_real_needs_padding_p (type))
4644 	{
4645 	  /* Use native_interpret_expr + native_encode_expr to figure out
4646 	     which bits are padding.  */
4647 	  memset (buf->buf + buf->size, ~0, sz);
4648 	  tree cst = native_interpret_expr (type, buf->buf + buf->size, sz);
4649 	  gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4650 	  int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4651 	  gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4652 	  for (size_t i = 0; i < (size_t) sz; i++)
4653 	    buf->buf[buf->size + i] ^= ~0;
4654 	}
4655       else
4656 	memset (buf->buf + buf->size, 0, sz);
4657       buf->size += sz;
4658       break;
4659     case COMPLEX_TYPE:
4660       fldsz = int_size_in_bytes (TREE_TYPE (type));
4661       clear_padding_type (buf, TREE_TYPE (type), fldsz);
4662       clear_padding_type (buf, TREE_TYPE (type), fldsz);
4663       break;
4664     case VECTOR_TYPE:
4665       nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4666       fldsz = int_size_in_bytes (TREE_TYPE (type));
4667       for (HOST_WIDE_INT i = 0; i < nelts; i++)
4668 	clear_padding_type (buf, TREE_TYPE (type), fldsz);
4669       break;
4670     case NULLPTR_TYPE:
4671       gcc_assert ((size_t) sz <= clear_padding_unit);
4672       if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4673 	clear_padding_flush (buf, false);
4674       memset (buf->buf + buf->size, ~0, sz);
4675       buf->size += sz;
4676       break;
4677     default:
4678       gcc_assert ((size_t) sz <= clear_padding_unit);
4679       if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4680 	clear_padding_flush (buf, false);
4681       memset (buf->buf + buf->size, 0, sz);
4682       buf->size += sz;
4683       break;
4684     }
4685 }
4686 
4687 /* Clear padding bits of TYPE in MASK.  */
4688 
4689 void
clear_type_padding_in_mask(tree type,unsigned char * mask)4690 clear_type_padding_in_mask (tree type, unsigned char *mask)
4691 {
4692   clear_padding_struct buf;
4693   buf.loc = UNKNOWN_LOCATION;
4694   buf.clear_in_mask = true;
4695   buf.base = NULL_TREE;
4696   buf.alias_type = NULL_TREE;
4697   buf.gsi = NULL;
4698   buf.align = 0;
4699   buf.off = 0;
4700   buf.padding_bytes = 0;
4701   buf.sz = int_size_in_bytes (type);
4702   buf.size = 0;
4703   buf.union_ptr = mask;
4704   clear_padding_type (&buf, type, buf.sz);
4705   clear_padding_flush (&buf, true);
4706 }
4707 
4708 /* Fold __builtin_clear_padding builtin.  */
4709 
4710 static bool
gimple_fold_builtin_clear_padding(gimple_stmt_iterator * gsi)4711 gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4712 {
4713   gimple *stmt = gsi_stmt (*gsi);
4714   gcc_assert (gimple_call_num_args (stmt) == 2);
4715   tree ptr = gimple_call_arg (stmt, 0);
4716   tree typearg = gimple_call_arg (stmt, 1);
4717   tree type = TREE_TYPE (TREE_TYPE (typearg));
4718   location_t loc = gimple_location (stmt);
4719   clear_padding_struct buf;
4720   gimple_stmt_iterator gsiprev = *gsi;
4721   /* This should be folded during the lower pass.  */
4722   gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4723   gcc_assert (COMPLETE_TYPE_P (type));
4724   gsi_prev (&gsiprev);
4725 
4726   buf.loc = loc;
4727   buf.clear_in_mask = false;
4728   buf.base = ptr;
4729   buf.alias_type = NULL_TREE;
4730   buf.gsi = gsi;
4731   buf.align = get_pointer_alignment (ptr);
4732   unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4733   buf.align = MAX (buf.align, talign);
4734   buf.off = 0;
4735   buf.padding_bytes = 0;
4736   buf.size = 0;
4737   buf.sz = int_size_in_bytes (type);
4738   buf.union_ptr = NULL;
4739   if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4740     sorry_at (loc, "%s not supported for variable length aggregates",
4741 	      "__builtin_clear_padding");
4742   /* The implementation currently assumes 8-bit host and target
4743      chars which is the case for all currently supported targets
4744      and hosts and is required e.g. for native_{encode,interpret}* APIs.  */
4745   else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
4746     sorry_at (loc, "%s not supported on this target",
4747 	      "__builtin_clear_padding");
4748   else if (!clear_padding_type_may_have_padding_p (type))
4749     ;
4750   else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
4751     {
4752       tree sz = TYPE_SIZE_UNIT (type);
4753       tree elttype = type;
4754       /* Only supports C/C++ VLAs and flattens all the VLA levels.  */
4755       while (TREE_CODE (elttype) == ARRAY_TYPE
4756 	     && int_size_in_bytes (elttype) < 0)
4757 	elttype = TREE_TYPE (elttype);
4758       HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
4759       gcc_assert (eltsz >= 0);
4760       if (eltsz)
4761 	{
4762 	  buf.base = create_tmp_var (build_pointer_type (elttype));
4763 	  tree end = make_ssa_name (TREE_TYPE (buf.base));
4764 	  gimple *g = gimple_build_assign (buf.base, ptr);
4765 	  gimple_set_location (g, loc);
4766 	  gsi_insert_before (gsi, g, GSI_SAME_STMT);
4767 	  g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
4768 	  gimple_set_location (g, loc);
4769 	  gsi_insert_before (gsi, g, GSI_SAME_STMT);
4770 	  buf.sz = eltsz;
4771 	  buf.align = TYPE_ALIGN (elttype);
4772 	  buf.alias_type = build_pointer_type (elttype);
4773 	  clear_padding_emit_loop (&buf, elttype, end);
4774 	}
4775     }
4776   else
4777     {
4778       if (!is_gimple_mem_ref_addr (buf.base))
4779 	{
4780 	  buf.base = make_ssa_name (TREE_TYPE (ptr));
4781 	  gimple *g = gimple_build_assign (buf.base, ptr);
4782 	  gimple_set_location (g, loc);
4783 	  gsi_insert_before (gsi, g, GSI_SAME_STMT);
4784 	}
4785       buf.alias_type = build_pointer_type (type);
4786       clear_padding_type (&buf, type, buf.sz);
4787       clear_padding_flush (&buf, true);
4788     }
4789 
4790   gimple_stmt_iterator gsiprev2 = *gsi;
4791   gsi_prev (&gsiprev2);
4792   if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
4793     gsi_replace (gsi, gimple_build_nop (), true);
4794   else
4795     {
4796       gsi_remove (gsi, true);
4797       *gsi = gsiprev2;
4798     }
4799   return true;
4800 }
4801 
4802 /* Fold the non-target builtin at *GSI and return whether any simplification
4803    was made.  */
4804 
4805 static bool
gimple_fold_builtin(gimple_stmt_iterator * gsi)4806 gimple_fold_builtin (gimple_stmt_iterator *gsi)
4807 {
4808   gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
4809   tree callee = gimple_call_fndecl (stmt);
4810 
4811   /* Give up for always_inline inline builtins until they are
4812      inlined.  */
4813   if (avoid_folding_inline_builtin (callee))
4814     return false;
4815 
4816   unsigned n = gimple_call_num_args (stmt);
4817   enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
4818   switch (fcode)
4819     {
4820     case BUILT_IN_BCMP:
4821       return gimple_fold_builtin_bcmp (gsi);
4822     case BUILT_IN_BCOPY:
4823       return gimple_fold_builtin_bcopy (gsi);
4824     case BUILT_IN_BZERO:
4825       return gimple_fold_builtin_bzero (gsi);
4826 
4827     case BUILT_IN_MEMSET:
4828       return gimple_fold_builtin_memset (gsi,
4829 					 gimple_call_arg (stmt, 1),
4830 					 gimple_call_arg (stmt, 2));
4831     case BUILT_IN_MEMCPY:
4832     case BUILT_IN_MEMPCPY:
4833     case BUILT_IN_MEMMOVE:
4834       return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
4835 					    gimple_call_arg (stmt, 1), fcode);
4836     case BUILT_IN_SPRINTF_CHK:
4837     case BUILT_IN_VSPRINTF_CHK:
4838       return gimple_fold_builtin_sprintf_chk (gsi, fcode);
4839     case BUILT_IN_STRCAT_CHK:
4840       return gimple_fold_builtin_strcat_chk (gsi);
4841     case BUILT_IN_STRNCAT_CHK:
4842       return gimple_fold_builtin_strncat_chk (gsi);
4843     case BUILT_IN_STRLEN:
4844       return gimple_fold_builtin_strlen (gsi);
4845     case BUILT_IN_STRCPY:
4846       return gimple_fold_builtin_strcpy (gsi,
4847 					 gimple_call_arg (stmt, 0),
4848 					 gimple_call_arg (stmt, 1));
4849     case BUILT_IN_STRNCPY:
4850       return gimple_fold_builtin_strncpy (gsi,
4851 					  gimple_call_arg (stmt, 0),
4852 					  gimple_call_arg (stmt, 1),
4853 					  gimple_call_arg (stmt, 2));
4854     case BUILT_IN_STRCAT:
4855       return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
4856 					 gimple_call_arg (stmt, 1));
4857     case BUILT_IN_STRNCAT:
4858       return gimple_fold_builtin_strncat (gsi);
4859     case BUILT_IN_INDEX:
4860     case BUILT_IN_STRCHR:
4861       return gimple_fold_builtin_strchr (gsi, false);
4862     case BUILT_IN_RINDEX:
4863     case BUILT_IN_STRRCHR:
4864       return gimple_fold_builtin_strchr (gsi, true);
4865     case BUILT_IN_STRSTR:
4866       return gimple_fold_builtin_strstr (gsi);
4867     case BUILT_IN_STRCMP:
4868     case BUILT_IN_STRCMP_EQ:
4869     case BUILT_IN_STRCASECMP:
4870     case BUILT_IN_STRNCMP:
4871     case BUILT_IN_STRNCMP_EQ:
4872     case BUILT_IN_STRNCASECMP:
4873       return gimple_fold_builtin_string_compare (gsi);
4874     case BUILT_IN_MEMCHR:
4875       return gimple_fold_builtin_memchr (gsi);
4876     case BUILT_IN_FPUTS:
4877       return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
4878 					gimple_call_arg (stmt, 1), false);
4879     case BUILT_IN_FPUTS_UNLOCKED:
4880       return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
4881 					gimple_call_arg (stmt, 1), true);
4882     case BUILT_IN_MEMCPY_CHK:
4883     case BUILT_IN_MEMPCPY_CHK:
4884     case BUILT_IN_MEMMOVE_CHK:
4885     case BUILT_IN_MEMSET_CHK:
4886       return gimple_fold_builtin_memory_chk (gsi,
4887 					     gimple_call_arg (stmt, 0),
4888 					     gimple_call_arg (stmt, 1),
4889 					     gimple_call_arg (stmt, 2),
4890 					     gimple_call_arg (stmt, 3),
4891 					     fcode);
4892     case BUILT_IN_STPCPY:
4893       return gimple_fold_builtin_stpcpy (gsi);
4894     case BUILT_IN_STRCPY_CHK:
4895     case BUILT_IN_STPCPY_CHK:
4896       return gimple_fold_builtin_stxcpy_chk (gsi,
4897 					     gimple_call_arg (stmt, 0),
4898 					     gimple_call_arg (stmt, 1),
4899 					     gimple_call_arg (stmt, 2),
4900 					     fcode);
4901     case BUILT_IN_STRNCPY_CHK:
4902     case BUILT_IN_STPNCPY_CHK:
4903       return gimple_fold_builtin_stxncpy_chk (gsi,
4904 					      gimple_call_arg (stmt, 0),
4905 					      gimple_call_arg (stmt, 1),
4906 					      gimple_call_arg (stmt, 2),
4907 					      gimple_call_arg (stmt, 3),
4908 					      fcode);
4909     case BUILT_IN_SNPRINTF_CHK:
4910     case BUILT_IN_VSNPRINTF_CHK:
4911       return gimple_fold_builtin_snprintf_chk (gsi, fcode);
4912 
4913     case BUILT_IN_FPRINTF:
4914     case BUILT_IN_FPRINTF_UNLOCKED:
4915     case BUILT_IN_VFPRINTF:
4916       if (n == 2 || n == 3)
4917 	return gimple_fold_builtin_fprintf (gsi,
4918 					    gimple_call_arg (stmt, 0),
4919 					    gimple_call_arg (stmt, 1),
4920 					    n == 3
4921 					    ? gimple_call_arg (stmt, 2)
4922 					    : NULL_TREE,
4923 					    fcode);
4924       break;
4925     case BUILT_IN_FPRINTF_CHK:
4926     case BUILT_IN_VFPRINTF_CHK:
4927       if (n == 3 || n == 4)
4928 	return gimple_fold_builtin_fprintf (gsi,
4929 					    gimple_call_arg (stmt, 0),
4930 					    gimple_call_arg (stmt, 2),
4931 					    n == 4
4932 					    ? gimple_call_arg (stmt, 3)
4933 					    : NULL_TREE,
4934 					    fcode);
4935       break;
4936     case BUILT_IN_PRINTF:
4937     case BUILT_IN_PRINTF_UNLOCKED:
4938     case BUILT_IN_VPRINTF:
4939       if (n == 1 || n == 2)
4940 	return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
4941 					   n == 2
4942 					   ? gimple_call_arg (stmt, 1)
4943 					   : NULL_TREE, fcode);
4944       break;
4945     case BUILT_IN_PRINTF_CHK:
4946     case BUILT_IN_VPRINTF_CHK:
4947       if (n == 2 || n == 3)
4948 	return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
4949 					   n == 3
4950 					   ? gimple_call_arg (stmt, 2)
4951 					   : NULL_TREE, fcode);
4952       break;
4953     case BUILT_IN_ACC_ON_DEVICE:
4954       return gimple_fold_builtin_acc_on_device (gsi,
4955 						gimple_call_arg (stmt, 0));
4956     case BUILT_IN_REALLOC:
4957       return gimple_fold_builtin_realloc (gsi);
4958 
4959     case BUILT_IN_CLEAR_PADDING:
4960       return gimple_fold_builtin_clear_padding (gsi);
4961 
4962     default:;
4963     }
4964 
4965   /* Try the generic builtin folder.  */
4966   bool ignore = (gimple_call_lhs (stmt) == NULL);
4967   tree result = fold_call_stmt (stmt, ignore);
4968   if (result)
4969     {
4970       if (ignore)
4971 	STRIP_NOPS (result);
4972       else
4973 	result = fold_convert (gimple_call_return_type (stmt), result);
4974       if (!update_call_from_tree (gsi, result))
4975 	gimplify_and_update_call_from_tree (gsi, result);
4976       return true;
4977     }
4978 
4979   return false;
4980 }
4981 
4982 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4983    function calls to constants, where possible.  */
4984 
4985 static tree
fold_internal_goacc_dim(const gimple * call)4986 fold_internal_goacc_dim (const gimple *call)
4987 {
4988   int axis = oacc_get_ifn_dim_arg (call);
4989   int size = oacc_get_fn_dim_size (current_function_decl, axis);
4990   tree result = NULL_TREE;
4991   tree type = TREE_TYPE (gimple_call_lhs (call));
4992 
4993   switch (gimple_call_internal_fn (call))
4994     {
4995     case IFN_GOACC_DIM_POS:
4996       /* If the size is 1, we know the answer.  */
4997       if (size == 1)
4998 	result = build_int_cst (type, 0);
4999       break;
5000     case IFN_GOACC_DIM_SIZE:
5001       /* If the size is not dynamic, we know the answer.  */
5002       if (size)
5003 	result = build_int_cst (type, size);
5004       break;
5005     default:
5006       break;
5007     }
5008 
5009   return result;
5010 }
5011 
5012 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5013    for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5014    &var where var is only addressable because of such calls.  */
5015 
5016 bool
optimize_atomic_compare_exchange_p(gimple * stmt)5017 optimize_atomic_compare_exchange_p (gimple *stmt)
5018 {
5019   if (gimple_call_num_args (stmt) != 6
5020       || !flag_inline_atomics
5021       || !optimize
5022       || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
5023       || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5024       || !gimple_vdef (stmt)
5025       || !gimple_vuse (stmt))
5026     return false;
5027 
5028   tree fndecl = gimple_call_fndecl (stmt);
5029   switch (DECL_FUNCTION_CODE (fndecl))
5030     {
5031     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5032     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5033     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5034     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5035     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5036       break;
5037     default:
5038       return false;
5039     }
5040 
5041   tree expected = gimple_call_arg (stmt, 1);
5042   if (TREE_CODE (expected) != ADDR_EXPR
5043       || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5044     return false;
5045 
5046   tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5047   if (!is_gimple_reg_type (etype)
5048       || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
5049       || TREE_THIS_VOLATILE (etype)
5050       || VECTOR_TYPE_P (etype)
5051       || TREE_CODE (etype) == COMPLEX_TYPE
5052       /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5053 	 might not preserve all the bits.  See PR71716.  */
5054       || SCALAR_FLOAT_TYPE_P (etype)
5055       || maybe_ne (TYPE_PRECISION (etype),
5056 		   GET_MODE_BITSIZE (TYPE_MODE (etype))))
5057     return false;
5058 
5059   tree weak = gimple_call_arg (stmt, 3);
5060   if (!integer_zerop (weak) && !integer_onep (weak))
5061     return false;
5062 
5063   tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5064   tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5065   machine_mode mode = TYPE_MODE (itype);
5066 
5067   if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5068       == CODE_FOR_nothing
5069       && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5070     return false;
5071 
5072   if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
5073     return false;
5074 
5075   return true;
5076 }
5077 
5078 /* Fold
5079      r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5080    into
5081      _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5082      i = IMAGPART_EXPR <t>;
5083      r = (_Bool) i;
5084      e = REALPART_EXPR <t>;  */
5085 
5086 void
fold_builtin_atomic_compare_exchange(gimple_stmt_iterator * gsi)5087 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5088 {
5089   gimple *stmt = gsi_stmt (*gsi);
5090   tree fndecl = gimple_call_fndecl (stmt);
5091   tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5092   tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5093   tree ctype = build_complex_type (itype);
5094   tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
5095   bool throws = false;
5096   edge e = NULL;
5097   gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5098 				   expected);
5099   gsi_insert_before (gsi, g, GSI_SAME_STMT);
5100   gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5101   if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5102     {
5103       g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5104 			       build1 (VIEW_CONVERT_EXPR, itype,
5105 				       gimple_assign_lhs (g)));
5106       gsi_insert_before (gsi, g, GSI_SAME_STMT);
5107     }
5108   int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5109 	     + int_size_in_bytes (itype);
5110   g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5111 				  gimple_call_arg (stmt, 0),
5112 				  gimple_assign_lhs (g),
5113 				  gimple_call_arg (stmt, 2),
5114 				  build_int_cst (integer_type_node, flag),
5115 				  gimple_call_arg (stmt, 4),
5116 				  gimple_call_arg (stmt, 5));
5117   tree lhs = make_ssa_name (ctype);
5118   gimple_call_set_lhs (g, lhs);
5119   gimple_move_vops (g, stmt);
5120   tree oldlhs = gimple_call_lhs (stmt);
5121   if (stmt_can_throw_internal (cfun, stmt))
5122     {
5123       throws = true;
5124       e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5125     }
5126   gimple_call_set_nothrow (as_a <gcall *> (g),
5127 			   gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5128   gimple_call_set_lhs (stmt, NULL_TREE);
5129   gsi_replace (gsi, g, true);
5130   if (oldlhs)
5131     {
5132       g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5133 			       build1 (IMAGPART_EXPR, itype, lhs));
5134       if (throws)
5135 	{
5136 	  gsi_insert_on_edge_immediate (e, g);
5137 	  *gsi = gsi_for_stmt (g);
5138 	}
5139       else
5140 	gsi_insert_after (gsi, g, GSI_NEW_STMT);
5141       g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5142       gsi_insert_after (gsi, g, GSI_NEW_STMT);
5143     }
5144   g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5145 			   build1 (REALPART_EXPR, itype, lhs));
5146   if (throws && oldlhs == NULL_TREE)
5147     {
5148       gsi_insert_on_edge_immediate (e, g);
5149       *gsi = gsi_for_stmt (g);
5150     }
5151   else
5152     gsi_insert_after (gsi, g, GSI_NEW_STMT);
5153   if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5154     {
5155       g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5156 			       VIEW_CONVERT_EXPR,
5157 			       build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5158 				       gimple_assign_lhs (g)));
5159       gsi_insert_after (gsi, g, GSI_NEW_STMT);
5160     }
5161   g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5162   gsi_insert_after (gsi, g, GSI_NEW_STMT);
5163   *gsi = gsiret;
5164 }
5165 
5166 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5167    doesn't fit into TYPE.  The test for overflow should be regardless of
5168    -fwrapv, and even for unsigned types.  */
5169 
5170 bool
arith_overflowed_p(enum tree_code code,const_tree type,const_tree arg0,const_tree arg1)5171 arith_overflowed_p (enum tree_code code, const_tree type,
5172 		    const_tree arg0, const_tree arg1)
5173 {
5174   widest2_int warg0 = widest2_int_cst (arg0);
5175   widest2_int warg1 = widest2_int_cst (arg1);
5176   widest2_int wres;
5177   switch (code)
5178     {
5179     case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5180     case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5181     case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5182     default: gcc_unreachable ();
5183     }
5184   signop sign = TYPE_SIGN (type);
5185   if (sign == UNSIGNED && wi::neg_p (wres))
5186     return true;
5187   return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5188 }
5189 
5190 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5191    for the memory it references, otherwise return null.  VECTYPE is the
5192    type of the memory vector.  */
5193 
5194 static tree
gimple_fold_mask_load_store_mem_ref(gcall * call,tree vectype)5195 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
5196 {
5197   tree ptr = gimple_call_arg (call, 0);
5198   tree alias_align = gimple_call_arg (call, 1);
5199   tree mask = gimple_call_arg (call, 2);
5200   if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
5201     return NULL_TREE;
5202 
5203   unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
5204   if (TYPE_ALIGN (vectype) != align)
5205     vectype = build_aligned_type (vectype, align);
5206   tree offset = build_zero_cst (TREE_TYPE (alias_align));
5207   return fold_build2 (MEM_REF, vectype, ptr, offset);
5208 }
5209 
5210 /* Try to fold IFN_MASK_LOAD call CALL.  Return true on success.  */
5211 
5212 static bool
gimple_fold_mask_load(gimple_stmt_iterator * gsi,gcall * call)5213 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
5214 {
5215   tree lhs = gimple_call_lhs (call);
5216   if (!lhs)
5217     return false;
5218 
5219   if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
5220     {
5221       gassign *new_stmt = gimple_build_assign (lhs, rhs);
5222       gimple_set_location (new_stmt, gimple_location (call));
5223       gimple_move_vops (new_stmt, call);
5224       gsi_replace (gsi, new_stmt, false);
5225       return true;
5226     }
5227   return false;
5228 }
5229 
5230 /* Try to fold IFN_MASK_STORE call CALL.  Return true on success.  */
5231 
5232 static bool
gimple_fold_mask_store(gimple_stmt_iterator * gsi,gcall * call)5233 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
5234 {
5235   tree rhs = gimple_call_arg (call, 3);
5236   if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
5237     {
5238       gassign *new_stmt = gimple_build_assign (lhs, rhs);
5239       gimple_set_location (new_stmt, gimple_location (call));
5240       gimple_move_vops (new_stmt, call);
5241       gsi_replace (gsi, new_stmt, false);
5242       return true;
5243     }
5244   return false;
5245 }
5246 
5247 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5248    The statement may be replaced by another statement, e.g., if the call
5249    simplifies to a constant value. Return true if any changes were made.
5250    It is assumed that the operands have been previously folded.  */
5251 
5252 static bool
gimple_fold_call(gimple_stmt_iterator * gsi,bool inplace)5253 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
5254 {
5255   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
5256   tree callee;
5257   bool changed = false;
5258   unsigned i;
5259 
5260   /* Fold *& in call arguments.  */
5261   for (i = 0; i < gimple_call_num_args (stmt); ++i)
5262     if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
5263       {
5264 	tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
5265 	if (tmp)
5266 	  {
5267 	    gimple_call_set_arg (stmt, i, tmp);
5268 	    changed = true;
5269 	  }
5270       }
5271 
5272   /* Check for virtual calls that became direct calls.  */
5273   callee = gimple_call_fn (stmt);
5274   if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
5275     {
5276       if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5277 	{
5278           if (dump_file && virtual_method_call_p (callee)
5279 	      && !possible_polymorphic_call_target_p
5280 		    (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5281 						     (OBJ_TYPE_REF_EXPR (callee)))))
5282 	    {
5283 	      fprintf (dump_file,
5284 		       "Type inheritance inconsistent devirtualization of ");
5285 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5286 	      fprintf (dump_file, " to ");
5287 	      print_generic_expr (dump_file, callee, TDF_SLIM);
5288 	      fprintf (dump_file, "\n");
5289 	    }
5290 
5291 	  gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
5292 	  changed = true;
5293 	}
5294       else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
5295 	{
5296 	  bool final;
5297 	  vec <cgraph_node *>targets
5298 	    = possible_polymorphic_call_targets (callee, stmt, &final);
5299 	  if (final && targets.length () <= 1 && dbg_cnt (devirt))
5300 	    {
5301 	      tree lhs = gimple_call_lhs (stmt);
5302 	      if (dump_enabled_p ())
5303 		{
5304 		  dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5305 				   "folding virtual function call to %s\n",
5306 		 		   targets.length () == 1
5307 		  		   ? targets[0]->name ()
5308 		  		   : "__builtin_unreachable");
5309 		}
5310 	      if (targets.length () == 1)
5311 		{
5312 		  tree fndecl = targets[0]->decl;
5313 		  gimple_call_set_fndecl (stmt, fndecl);
5314 		  changed = true;
5315 		  /* If changing the call to __cxa_pure_virtual
5316 		     or similar noreturn function, adjust gimple_call_fntype
5317 		     too.  */
5318 		  if (gimple_call_noreturn_p (stmt)
5319 		      && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5320 		      && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5321 		      && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5322 			  == void_type_node))
5323 		    gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
5324 		  /* If the call becomes noreturn, remove the lhs.  */
5325 		  if (lhs
5326 		      && gimple_call_noreturn_p (stmt)
5327 		      && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
5328 			  || should_remove_lhs_p (lhs)))
5329 		    {
5330 		      if (TREE_CODE (lhs) == SSA_NAME)
5331 			{
5332 			  tree var = create_tmp_var (TREE_TYPE (lhs));
5333 			  tree def = get_or_create_ssa_default_def (cfun, var);
5334 			  gimple *new_stmt = gimple_build_assign (lhs, def);
5335 			  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5336 			}
5337 		      gimple_call_set_lhs (stmt, NULL_TREE);
5338 		    }
5339 		  maybe_remove_unused_call_args (cfun, stmt);
5340 		}
5341 	      else
5342 		{
5343 		  tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5344 		  gimple *new_stmt = gimple_build_call (fndecl, 0);
5345 		  gimple_set_location (new_stmt, gimple_location (stmt));
5346 		  /* If the call had a SSA name as lhs morph that into
5347 		     an uninitialized value.  */
5348 		  if (lhs && TREE_CODE (lhs) == SSA_NAME)
5349 		    {
5350 		      tree var = create_tmp_var (TREE_TYPE (lhs));
5351 		      SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5352 		      SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5353 		      set_ssa_default_def (cfun, var, lhs);
5354 		    }
5355 		  gimple_move_vops (new_stmt, stmt);
5356 		  gsi_replace (gsi, new_stmt, false);
5357 		  return true;
5358 		}
5359 	    }
5360 	}
5361     }
5362 
5363   /* Check for indirect calls that became direct calls, and then
5364      no longer require a static chain.  */
5365   if (gimple_call_chain (stmt))
5366     {
5367       tree fn = gimple_call_fndecl (stmt);
5368       if (fn && !DECL_STATIC_CHAIN (fn))
5369 	{
5370 	  gimple_call_set_chain (stmt, NULL);
5371 	  changed = true;
5372 	}
5373       else
5374 	{
5375 	  tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
5376 	  if (tmp)
5377 	    {
5378 	      gimple_call_set_chain (stmt, tmp);
5379 	      changed = true;
5380 	    }
5381 	}
5382     }
5383 
5384   if (inplace)
5385     return changed;
5386 
5387   /* Check for builtins that CCP can handle using information not
5388      available in the generic fold routines.  */
5389   if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5390     {
5391       if (gimple_fold_builtin (gsi))
5392         changed = true;
5393     }
5394   else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
5395     {
5396 	changed |= targetm.gimple_fold_builtin (gsi);
5397     }
5398   else if (gimple_call_internal_p (stmt))
5399     {
5400       enum tree_code subcode = ERROR_MARK;
5401       tree result = NULL_TREE;
5402       bool cplx_result = false;
5403       tree overflow = NULL_TREE;
5404       switch (gimple_call_internal_fn (stmt))
5405 	{
5406 	case IFN_BUILTIN_EXPECT:
5407 	  result = fold_builtin_expect (gimple_location (stmt),
5408 					gimple_call_arg (stmt, 0),
5409 					gimple_call_arg (stmt, 1),
5410 					gimple_call_arg (stmt, 2),
5411 					NULL_TREE);
5412 	  break;
5413 	case IFN_UBSAN_OBJECT_SIZE:
5414 	  {
5415 	    tree offset = gimple_call_arg (stmt, 1);
5416 	    tree objsize = gimple_call_arg (stmt, 2);
5417 	    if (integer_all_onesp (objsize)
5418 		|| (TREE_CODE (offset) == INTEGER_CST
5419 		    && TREE_CODE (objsize) == INTEGER_CST
5420 		    && tree_int_cst_le (offset, objsize)))
5421 	      {
5422 		replace_call_with_value (gsi, NULL_TREE);
5423 		return true;
5424 	      }
5425 	  }
5426 	  break;
5427 	case IFN_UBSAN_PTR:
5428 	  if (integer_zerop (gimple_call_arg (stmt, 1)))
5429 	    {
5430 	      replace_call_with_value (gsi, NULL_TREE);
5431 	      return true;
5432 	    }
5433 	  break;
5434 	case IFN_UBSAN_BOUNDS:
5435 	  {
5436 	    tree index = gimple_call_arg (stmt, 1);
5437 	    tree bound = gimple_call_arg (stmt, 2);
5438 	    if (TREE_CODE (index) == INTEGER_CST
5439 		&& TREE_CODE (bound) == INTEGER_CST)
5440 	      {
5441 		index = fold_convert (TREE_TYPE (bound), index);
5442 		if (TREE_CODE (index) == INTEGER_CST
5443 		    && tree_int_cst_le (index, bound))
5444 		  {
5445 		    replace_call_with_value (gsi, NULL_TREE);
5446 		    return true;
5447 		  }
5448 	      }
5449 	  }
5450 	  break;
5451 	case IFN_GOACC_DIM_SIZE:
5452 	case IFN_GOACC_DIM_POS:
5453 	  result = fold_internal_goacc_dim (stmt);
5454 	  break;
5455 	case IFN_UBSAN_CHECK_ADD:
5456 	  subcode = PLUS_EXPR;
5457 	  break;
5458 	case IFN_UBSAN_CHECK_SUB:
5459 	  subcode = MINUS_EXPR;
5460 	  break;
5461 	case IFN_UBSAN_CHECK_MUL:
5462 	  subcode = MULT_EXPR;
5463 	  break;
5464 	case IFN_ADD_OVERFLOW:
5465 	  subcode = PLUS_EXPR;
5466 	  cplx_result = true;
5467 	  break;
5468 	case IFN_SUB_OVERFLOW:
5469 	  subcode = MINUS_EXPR;
5470 	  cplx_result = true;
5471 	  break;
5472 	case IFN_MUL_OVERFLOW:
5473 	  subcode = MULT_EXPR;
5474 	  cplx_result = true;
5475 	  break;
5476 	case IFN_MASK_LOAD:
5477 	  changed |= gimple_fold_mask_load (gsi, stmt);
5478 	  break;
5479 	case IFN_MASK_STORE:
5480 	  changed |= gimple_fold_mask_store (gsi, stmt);
5481 	  break;
5482 	default:
5483 	  break;
5484 	}
5485       if (subcode != ERROR_MARK)
5486 	{
5487 	  tree arg0 = gimple_call_arg (stmt, 0);
5488 	  tree arg1 = gimple_call_arg (stmt, 1);
5489 	  tree type = TREE_TYPE (arg0);
5490 	  if (cplx_result)
5491 	    {
5492 	      tree lhs = gimple_call_lhs (stmt);
5493 	      if (lhs == NULL_TREE)
5494 		type = NULL_TREE;
5495 	      else
5496 		type = TREE_TYPE (TREE_TYPE (lhs));
5497 	    }
5498 	  if (type == NULL_TREE)
5499 	    ;
5500 	  /* x = y + 0; x = y - 0; x = y * 0; */
5501 	  else if (integer_zerop (arg1))
5502 	    result = subcode == MULT_EXPR ? integer_zero_node : arg0;
5503 	  /* x = 0 + y; x = 0 * y; */
5504 	  else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5505 	    result = subcode == MULT_EXPR ? integer_zero_node : arg1;
5506 	  /* x = y - y; */
5507 	  else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
5508 	    result = integer_zero_node;
5509 	  /* x = y * 1; x = 1 * y; */
5510 	  else if (subcode == MULT_EXPR && integer_onep (arg1))
5511 	    result = arg0;
5512 	  else if (subcode == MULT_EXPR && integer_onep (arg0))
5513 	    result = arg1;
5514 	  else if (TREE_CODE (arg0) == INTEGER_CST
5515 		   && TREE_CODE (arg1) == INTEGER_CST)
5516 	    {
5517 	      if (cplx_result)
5518 		result = int_const_binop (subcode, fold_convert (type, arg0),
5519 					  fold_convert (type, arg1));
5520 	      else
5521 		result = int_const_binop (subcode, arg0, arg1);
5522 	      if (result && arith_overflowed_p (subcode, type, arg0, arg1))
5523 		{
5524 		  if (cplx_result)
5525 		    overflow = build_one_cst (type);
5526 		  else
5527 		    result = NULL_TREE;
5528 		}
5529 	    }
5530 	  if (result)
5531 	    {
5532 	      if (result == integer_zero_node)
5533 		result = build_zero_cst (type);
5534 	      else if (cplx_result && TREE_TYPE (result) != type)
5535 		{
5536 		  if (TREE_CODE (result) == INTEGER_CST)
5537 		    {
5538 		      if (arith_overflowed_p (PLUS_EXPR, type, result,
5539 					      integer_zero_node))
5540 			overflow = build_one_cst (type);
5541 		    }
5542 		  else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5543 			    && TYPE_UNSIGNED (type))
5544 			   || (TYPE_PRECISION (type)
5545 			       < (TYPE_PRECISION (TREE_TYPE (result))
5546 				  + (TYPE_UNSIGNED (TREE_TYPE (result))
5547 				     && !TYPE_UNSIGNED (type)))))
5548 		    result = NULL_TREE;
5549 		  if (result)
5550 		    result = fold_convert (type, result);
5551 		}
5552 	    }
5553 	}
5554 
5555       if (result)
5556 	{
5557 	  if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5558 	    result = drop_tree_overflow (result);
5559 	  if (cplx_result)
5560 	    {
5561 	      if (overflow == NULL_TREE)
5562 		overflow = build_zero_cst (TREE_TYPE (result));
5563 	      tree ctype = build_complex_type (TREE_TYPE (result));
5564 	      if (TREE_CODE (result) == INTEGER_CST
5565 		  && TREE_CODE (overflow) == INTEGER_CST)
5566 		result = build_complex (ctype, result, overflow);
5567 	      else
5568 		result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5569 				     ctype, result, overflow);
5570 	    }
5571 	  if (!update_call_from_tree (gsi, result))
5572 	    gimplify_and_update_call_from_tree (gsi, result);
5573 	  changed = true;
5574 	}
5575     }
5576 
5577   return changed;
5578 }
5579 
5580 
5581 /* Return true whether NAME has a use on STMT.  */
5582 
5583 static bool
has_use_on_stmt(tree name,gimple * stmt)5584 has_use_on_stmt (tree name, gimple *stmt)
5585 {
5586   imm_use_iterator iter;
5587   use_operand_p use_p;
5588   FOR_EACH_IMM_USE_FAST (use_p, iter, name)
5589     if (USE_STMT (use_p) == stmt)
5590       return true;
5591   return false;
5592 }
5593 
5594 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5595    gimple_simplify.
5596 
5597    Replaces *GSI with the simplification result in RCODE and OPS
5598    and the associated statements in *SEQ.  Does the replacement
5599    according to INPLACE and returns true if the operation succeeded.  */
5600 
5601 static bool
replace_stmt_with_simplification(gimple_stmt_iterator * gsi,gimple_match_op * res_op,gimple_seq * seq,bool inplace)5602 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5603 				  gimple_match_op *res_op,
5604 				  gimple_seq *seq, bool inplace)
5605 {
5606   gimple *stmt = gsi_stmt (*gsi);
5607   tree *ops = res_op->ops;
5608   unsigned int num_ops = res_op->num_ops;
5609 
5610   /* Play safe and do not allow abnormals to be mentioned in
5611      newly created statements.  See also maybe_push_res_to_seq.
5612      As an exception allow such uses if there was a use of the
5613      same SSA name on the old stmt.  */
5614   for (unsigned int i = 0; i < num_ops; ++i)
5615     if (TREE_CODE (ops[i]) == SSA_NAME
5616 	&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5617 	&& !has_use_on_stmt (ops[i], stmt))
5618       return false;
5619 
5620   if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5621     for (unsigned int i = 0; i < 2; ++i)
5622       if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5623 	  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5624 	  && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5625 	return false;
5626 
5627   /* Don't insert new statements when INPLACE is true, even if we could
5628      reuse STMT for the final statement.  */
5629   if (inplace && !gimple_seq_empty_p (*seq))
5630     return false;
5631 
5632   if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
5633     {
5634       gcc_assert (res_op->code.is_tree_code ());
5635       if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
5636 	  /* GIMPLE_CONDs condition may not throw.  */
5637 	  && (!flag_exceptions
5638 	      || !cfun->can_throw_non_call_exceptions
5639 	      || !operation_could_trap_p (res_op->code,
5640 					  FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5641 					  false, NULL_TREE)))
5642 	gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
5643       else if (res_op->code == SSA_NAME)
5644 	gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
5645 				   build_zero_cst (TREE_TYPE (ops[0])));
5646       else if (res_op->code == INTEGER_CST)
5647 	{
5648 	  if (integer_zerop (ops[0]))
5649 	    gimple_cond_make_false (cond_stmt);
5650 	  else
5651 	    gimple_cond_make_true (cond_stmt);
5652 	}
5653       else if (!inplace)
5654 	{
5655 	  tree res = maybe_push_res_to_seq (res_op, seq);
5656 	  if (!res)
5657 	    return false;
5658 	  gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
5659 				     build_zero_cst (TREE_TYPE (res)));
5660 	}
5661       else
5662 	return false;
5663       if (dump_file && (dump_flags & TDF_DETAILS))
5664 	{
5665 	  fprintf (dump_file, "gimple_simplified to ");
5666 	  if (!gimple_seq_empty_p (*seq))
5667 	    print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5668 	  print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5669 			     0, TDF_SLIM);
5670 	}
5671       gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5672       return true;
5673     }
5674   else if (is_gimple_assign (stmt)
5675 	   && res_op->code.is_tree_code ())
5676     {
5677       if (!inplace
5678 	  || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
5679 	{
5680 	  maybe_build_generic_op (res_op);
5681 	  gimple_assign_set_rhs_with_ops (gsi, res_op->code,
5682 					  res_op->op_or_null (0),
5683 					  res_op->op_or_null (1),
5684 					  res_op->op_or_null (2));
5685 	  if (dump_file && (dump_flags & TDF_DETAILS))
5686 	    {
5687 	      fprintf (dump_file, "gimple_simplified to ");
5688 	      if (!gimple_seq_empty_p (*seq))
5689 		print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5690 	      print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5691 				 0, TDF_SLIM);
5692 	    }
5693 	  gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5694 	  return true;
5695 	}
5696     }
5697   else if (res_op->code.is_fn_code ()
5698 	   && gimple_call_combined_fn (stmt) == res_op->code)
5699     {
5700       gcc_assert (num_ops == gimple_call_num_args (stmt));
5701       for (unsigned int i = 0; i < num_ops; ++i)
5702 	gimple_call_set_arg (stmt, i, ops[i]);
5703       if (dump_file && (dump_flags & TDF_DETAILS))
5704 	{
5705 	  fprintf (dump_file, "gimple_simplified to ");
5706 	  if (!gimple_seq_empty_p (*seq))
5707 	    print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5708 	  print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
5709 	}
5710       gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5711       return true;
5712     }
5713   else if (!inplace)
5714     {
5715       if (gimple_has_lhs (stmt))
5716 	{
5717 	  tree lhs = gimple_get_lhs (stmt);
5718 	  if (!maybe_push_res_to_seq (res_op, seq, lhs))
5719 	    return false;
5720 	  if (dump_file && (dump_flags & TDF_DETAILS))
5721 	    {
5722 	      fprintf (dump_file, "gimple_simplified to ");
5723 	      print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5724 	    }
5725 	  gsi_replace_with_seq_vops (gsi, *seq);
5726 	  return true;
5727 	}
5728       else
5729 	gcc_unreachable ();
5730     }
5731 
5732   return false;
5733 }
5734 
5735 /* Canonicalize MEM_REFs invariant address operand after propagation.  */
5736 
5737 static bool
5738 maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
5739 {
5740   bool res = false;
5741   tree *orig_t = t;
5742 
5743   if (TREE_CODE (*t) == ADDR_EXPR)
5744     t = &TREE_OPERAND (*t, 0);
5745 
5746   /* The C and C++ frontends use an ARRAY_REF for indexing with their
5747      generic vector extension.  The actual vector referenced is
5748      view-converted to an array type for this purpose.  If the index
5749      is constant the canonical representation in the middle-end is a
5750      BIT_FIELD_REF so re-write the former to the latter here.  */
5751   if (TREE_CODE (*t) == ARRAY_REF
5752       && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
5753       && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
5754       && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
5755     {
5756       tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
5757       if (VECTOR_TYPE_P (vtype))
5758 	{
5759 	  tree low = array_ref_low_bound (*t);
5760 	  if (TREE_CODE (low) == INTEGER_CST)
5761 	    {
5762 	      if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
5763 		{
5764 		  widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
5765 					    wi::to_widest (low));
5766 		  idx = wi::mul (idx, wi::to_widest
5767 					 (TYPE_SIZE (TREE_TYPE (*t))));
5768 		  widest_int ext
5769 		    = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
5770 		  if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
5771 		    {
5772 		      *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
5773 				       TREE_TYPE (*t),
5774 				       TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
5775 				       TYPE_SIZE (TREE_TYPE (*t)),
5776 				       wide_int_to_tree (bitsizetype, idx));
5777 		      res = true;
5778 		    }
5779 		}
5780 	    }
5781 	}
5782     }
5783 
5784   while (handled_component_p (*t))
5785     t = &TREE_OPERAND (*t, 0);
5786 
5787   /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5788      of invariant addresses into a SSA name MEM_REF address.  */
5789   if (TREE_CODE (*t) == MEM_REF
5790       || TREE_CODE (*t) == TARGET_MEM_REF)
5791     {
5792       tree addr = TREE_OPERAND (*t, 0);
5793       if (TREE_CODE (addr) == ADDR_EXPR
5794 	  && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
5795 	      || handled_component_p (TREE_OPERAND (addr, 0))))
5796 	{
5797 	  tree base;
5798 	  poly_int64 coffset;
5799 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
5800 						&coffset);
5801 	  if (!base)
5802 	    {
5803 	      if (is_debug)
5804 		return false;
5805 	      gcc_unreachable ();
5806 	    }
5807 
5808 	  TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
5809 	  TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
5810 						  TREE_OPERAND (*t, 1),
5811 						  size_int (coffset));
5812 	  res = true;
5813 	}
5814       gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
5815 			   || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
5816     }
5817 
5818   /* Canonicalize back MEM_REFs to plain reference trees if the object
5819      accessed is a decl that has the same access semantics as the MEM_REF.  */
5820   if (TREE_CODE (*t) == MEM_REF
5821       && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
5822       && integer_zerop (TREE_OPERAND (*t, 1))
5823       && MR_DEPENDENCE_CLIQUE (*t) == 0)
5824     {
5825       tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
5826       tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
5827       if (/* Same volatile qualification.  */
5828 	  TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
5829 	  /* Same TBAA behavior with -fstrict-aliasing.  */
5830 	  && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
5831 	  && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
5832 	      == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
5833 	  /* Same alignment.  */
5834 	  && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
5835 	  /* We have to look out here to not drop a required conversion
5836 	     from the rhs to the lhs if *t appears on the lhs or vice-versa
5837 	     if it appears on the rhs.  Thus require strict type
5838 	     compatibility.  */
5839 	  && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
5840 	{
5841 	  *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
5842 	  res = true;
5843 	}
5844     }
5845 
5846   else if (TREE_CODE (*orig_t) == ADDR_EXPR
5847 	   && TREE_CODE (*t) == MEM_REF
5848 	   && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
5849     {
5850       tree base;
5851       poly_int64 coffset;
5852       base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
5853 					    &coffset);
5854       if (base)
5855 	{
5856 	  gcc_assert (TREE_CODE (base) == MEM_REF);
5857 	  poly_int64 moffset;
5858 	  if (mem_ref_offset (base).to_shwi (&moffset))
5859 	    {
5860 	      coffset += moffset;
5861 	      if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
5862 		{
5863 		  coffset += moffset;
5864 		  *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
5865 		  return true;
5866 		}
5867 	    }
5868 	}
5869     }
5870 
5871   /* Canonicalize TARGET_MEM_REF in particular with respect to
5872      the indexes becoming constant.  */
5873   else if (TREE_CODE (*t) == TARGET_MEM_REF)
5874     {
5875       tree tem = maybe_fold_tmr (*t);
5876       if (tem)
5877 	{
5878 	  *t = tem;
5879 	  if (TREE_CODE (*orig_t) == ADDR_EXPR)
5880 	    recompute_tree_invariant_for_addr_expr (*orig_t);
5881 	  res = true;
5882 	}
5883     }
5884 
5885   return res;
5886 }
5887 
5888 /* Worker for both fold_stmt and fold_stmt_inplace.  The INPLACE argument
5889    distinguishes both cases.  */
5890 
5891 static bool
fold_stmt_1(gimple_stmt_iterator * gsi,bool inplace,tree (* valueize)(tree))5892 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
5893 {
5894   bool changed = false;
5895   gimple *stmt = gsi_stmt (*gsi);
5896   bool nowarning = gimple_no_warning_p (stmt);
5897   unsigned i;
5898   fold_defer_overflow_warnings ();
5899 
5900   /* First do required canonicalization of [TARGET_]MEM_REF addresses
5901      after propagation.
5902      ???  This shouldn't be done in generic folding but in the
5903      propagation helpers which also know whether an address was
5904      propagated.
5905      Also canonicalize operand order.  */
5906   switch (gimple_code (stmt))
5907     {
5908     case GIMPLE_ASSIGN:
5909       if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
5910 	{
5911 	  tree *rhs = gimple_assign_rhs1_ptr (stmt);
5912 	  if ((REFERENCE_CLASS_P (*rhs)
5913 	       || TREE_CODE (*rhs) == ADDR_EXPR)
5914 	      && maybe_canonicalize_mem_ref_addr (rhs))
5915 	    changed = true;
5916 	  tree *lhs = gimple_assign_lhs_ptr (stmt);
5917 	  if (REFERENCE_CLASS_P (*lhs)
5918 	      && maybe_canonicalize_mem_ref_addr (lhs))
5919 	    changed = true;
5920 	}
5921       else
5922 	{
5923 	  /* Canonicalize operand order.  */
5924 	  enum tree_code code = gimple_assign_rhs_code (stmt);
5925 	  if (TREE_CODE_CLASS (code) == tcc_comparison
5926 	      || commutative_tree_code (code)
5927 	      || commutative_ternary_tree_code (code))
5928 	    {
5929 	      tree rhs1 = gimple_assign_rhs1 (stmt);
5930 	      tree rhs2 = gimple_assign_rhs2 (stmt);
5931 	      if (tree_swap_operands_p (rhs1, rhs2))
5932 		{
5933 		  gimple_assign_set_rhs1 (stmt, rhs2);
5934 		  gimple_assign_set_rhs2 (stmt, rhs1);
5935 		  if (TREE_CODE_CLASS (code) == tcc_comparison)
5936 		    gimple_assign_set_rhs_code (stmt,
5937 						swap_tree_comparison (code));
5938 		  changed = true;
5939 		}
5940 	    }
5941 	}
5942       break;
5943     case GIMPLE_CALL:
5944       {
5945 	for (i = 0; i < gimple_call_num_args (stmt); ++i)
5946 	  {
5947 	    tree *arg = gimple_call_arg_ptr (stmt, i);
5948 	    if (REFERENCE_CLASS_P (*arg)
5949 		&& maybe_canonicalize_mem_ref_addr (arg))
5950 	      changed = true;
5951 	  }
5952 	tree *lhs = gimple_call_lhs_ptr (stmt);
5953 	if (*lhs
5954 	    && REFERENCE_CLASS_P (*lhs)
5955 	    && maybe_canonicalize_mem_ref_addr (lhs))
5956 	  changed = true;
5957 	break;
5958       }
5959     case GIMPLE_ASM:
5960       {
5961 	gasm *asm_stmt = as_a <gasm *> (stmt);
5962 	for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5963 	  {
5964 	    tree link = gimple_asm_output_op (asm_stmt, i);
5965 	    tree op = TREE_VALUE (link);
5966 	    if (REFERENCE_CLASS_P (op)
5967 		&& maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5968 	      changed = true;
5969 	  }
5970 	for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5971 	  {
5972 	    tree link = gimple_asm_input_op (asm_stmt, i);
5973 	    tree op = TREE_VALUE (link);
5974 	    if ((REFERENCE_CLASS_P (op)
5975 		 || TREE_CODE (op) == ADDR_EXPR)
5976 		&& maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5977 	      changed = true;
5978 	  }
5979       }
5980       break;
5981     case GIMPLE_DEBUG:
5982       if (gimple_debug_bind_p (stmt))
5983 	{
5984 	  tree *val = gimple_debug_bind_get_value_ptr (stmt);
5985 	  if (*val
5986 	      && (REFERENCE_CLASS_P (*val)
5987 		  || TREE_CODE (*val) == ADDR_EXPR)
5988 	      && maybe_canonicalize_mem_ref_addr (val, true))
5989 	    changed = true;
5990 	}
5991       break;
5992     case GIMPLE_COND:
5993       {
5994 	/* Canonicalize operand order.  */
5995 	tree lhs = gimple_cond_lhs (stmt);
5996 	tree rhs = gimple_cond_rhs (stmt);
5997 	if (tree_swap_operands_p (lhs, rhs))
5998 	  {
5999 	    gcond *gc = as_a <gcond *> (stmt);
6000 	    gimple_cond_set_lhs (gc, rhs);
6001 	    gimple_cond_set_rhs (gc, lhs);
6002 	    gimple_cond_set_code (gc,
6003 				  swap_tree_comparison (gimple_cond_code (gc)));
6004 	    changed = true;
6005 	  }
6006       }
6007     default:;
6008     }
6009 
6010   /* Dispatch to pattern-based folding.  */
6011   if (!inplace
6012       || is_gimple_assign (stmt)
6013       || gimple_code (stmt) == GIMPLE_COND)
6014     {
6015       gimple_seq seq = NULL;
6016       gimple_match_op res_op;
6017       if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
6018 			   valueize, valueize))
6019 	{
6020 	  if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
6021 	    changed = true;
6022 	  else
6023 	    gimple_seq_discard (seq);
6024 	}
6025     }
6026 
6027   stmt = gsi_stmt (*gsi);
6028 
6029   /* Fold the main computation performed by the statement.  */
6030   switch (gimple_code (stmt))
6031     {
6032     case GIMPLE_ASSIGN:
6033       {
6034 	/* Try to canonicalize for boolean-typed X the comparisons
6035 	   X == 0, X == 1, X != 0, and X != 1.  */
6036 	if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6037 	    || gimple_assign_rhs_code (stmt) == NE_EXPR)
6038 	  {
6039 	    tree lhs = gimple_assign_lhs (stmt);
6040 	    tree op1 = gimple_assign_rhs1 (stmt);
6041 	    tree op2 = gimple_assign_rhs2 (stmt);
6042 	    tree type = TREE_TYPE (op1);
6043 
6044 	    /* Check whether the comparison operands are of the same boolean
6045 	       type as the result type is.
6046 	       Check that second operand is an integer-constant with value
6047 	       one or zero.  */
6048 	    if (TREE_CODE (op2) == INTEGER_CST
6049 		&& (integer_zerop (op2) || integer_onep (op2))
6050 		&& useless_type_conversion_p (TREE_TYPE (lhs), type))
6051 	      {
6052 		enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6053 		bool is_logical_not = false;
6054 
6055 		/* X == 0 and X != 1 is a logical-not.of X
6056 		   X == 1 and X != 0 is X  */
6057 		if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6058 		    || (cmp_code == NE_EXPR && integer_onep (op2)))
6059 		  is_logical_not = true;
6060 
6061 		if (is_logical_not == false)
6062 		  gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6063 		/* Only for one-bit precision typed X the transformation
6064 		   !X -> ~X is valied.  */
6065 		else if (TYPE_PRECISION (type) == 1)
6066 		  gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6067 		/* Otherwise we use !X -> X ^ 1.  */
6068 		else
6069 		  gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6070 						  build_int_cst (type, 1));
6071 		changed = true;
6072 		break;
6073 	      }
6074 	  }
6075 
6076 	unsigned old_num_ops = gimple_num_ops (stmt);
6077 	tree lhs = gimple_assign_lhs (stmt);
6078 	tree new_rhs = fold_gimple_assign (gsi);
6079 	if (new_rhs
6080 	    && !useless_type_conversion_p (TREE_TYPE (lhs),
6081 					   TREE_TYPE (new_rhs)))
6082 	  new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6083 	if (new_rhs
6084 	    && (!inplace
6085 		|| get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6086 	  {
6087 	    gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6088 	    changed = true;
6089 	  }
6090 	break;
6091       }
6092 
6093     case GIMPLE_CALL:
6094       changed |= gimple_fold_call (gsi, inplace);
6095       break;
6096 
6097     case GIMPLE_ASM:
6098       /* Fold *& in asm operands.  */
6099       {
6100 	gasm *asm_stmt = as_a <gasm *> (stmt);
6101 	size_t noutputs;
6102 	const char **oconstraints;
6103 	const char *constraint;
6104 	bool allows_mem, allows_reg;
6105 
6106 	noutputs = gimple_asm_noutputs (asm_stmt);
6107 	oconstraints = XALLOCAVEC (const char *, noutputs);
6108 
6109 	for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
6110 	  {
6111 	    tree link = gimple_asm_output_op (asm_stmt, i);
6112 	    tree op = TREE_VALUE (link);
6113 	    oconstraints[i]
6114 	      = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6115 	    if (REFERENCE_CLASS_P (op)
6116 		&& (op = maybe_fold_reference (op, true)) != NULL_TREE)
6117 	      {
6118 		TREE_VALUE (link) = op;
6119 		changed = true;
6120 	      }
6121 	  }
6122 	for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
6123 	  {
6124 	    tree link = gimple_asm_input_op (asm_stmt, i);
6125 	    tree op = TREE_VALUE (link);
6126 	    constraint
6127 	      = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6128 	    parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6129 				    oconstraints, &allows_mem, &allows_reg);
6130 	    if (REFERENCE_CLASS_P (op)
6131 		&& (op = maybe_fold_reference (op, !allows_reg && allows_mem))
6132 		   != NULL_TREE)
6133 	      {
6134 		TREE_VALUE (link) = op;
6135 		changed = true;
6136 	      }
6137 	  }
6138       }
6139       break;
6140 
6141     case GIMPLE_DEBUG:
6142       if (gimple_debug_bind_p (stmt))
6143 	{
6144 	  tree val = gimple_debug_bind_get_value (stmt);
6145 	  if (val
6146 	      && REFERENCE_CLASS_P (val))
6147 	    {
6148 	      tree tem = maybe_fold_reference (val, false);
6149 	      if (tem)
6150 		{
6151 		  gimple_debug_bind_set_value (stmt, tem);
6152 		  changed = true;
6153 		}
6154 	    }
6155 	  else if (val
6156 		   && TREE_CODE (val) == ADDR_EXPR)
6157 	    {
6158 	      tree ref = TREE_OPERAND (val, 0);
6159 	      tree tem = maybe_fold_reference (ref, false);
6160 	      if (tem)
6161 		{
6162 		  tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
6163 		  gimple_debug_bind_set_value (stmt, tem);
6164 		  changed = true;
6165 		}
6166 	    }
6167 	}
6168       break;
6169 
6170     case GIMPLE_RETURN:
6171       {
6172 	greturn *ret_stmt = as_a<greturn *> (stmt);
6173 	tree ret = gimple_return_retval(ret_stmt);
6174 
6175 	if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6176 	  {
6177 	    tree val = valueize (ret);
6178 	    if (val && val != ret
6179 		&& may_propagate_copy (ret, val))
6180 	      {
6181 		gimple_return_set_retval (ret_stmt, val);
6182 		changed = true;
6183 	      }
6184 	  }
6185       }
6186       break;
6187 
6188     default:;
6189     }
6190 
6191   stmt = gsi_stmt (*gsi);
6192 
6193   /* Fold *& on the lhs.  */
6194   if (gimple_has_lhs (stmt))
6195     {
6196       tree lhs = gimple_get_lhs (stmt);
6197       if (lhs && REFERENCE_CLASS_P (lhs))
6198 	{
6199 	  tree new_lhs = maybe_fold_reference (lhs, true);
6200 	  if (new_lhs)
6201 	    {
6202 	      gimple_set_lhs (stmt, new_lhs);
6203 	      changed = true;
6204 	    }
6205 	}
6206     }
6207 
6208   fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
6209   return changed;
6210 }
6211 
6212 /* Valueziation callback that ends up not following SSA edges.  */
6213 
6214 tree
no_follow_ssa_edges(tree)6215 no_follow_ssa_edges (tree)
6216 {
6217   return NULL_TREE;
6218 }
6219 
6220 /* Valueization callback that ends up following single-use SSA edges only.  */
6221 
6222 tree
follow_single_use_edges(tree val)6223 follow_single_use_edges (tree val)
6224 {
6225   if (TREE_CODE (val) == SSA_NAME
6226       && !has_single_use (val))
6227     return NULL_TREE;
6228   return val;
6229 }
6230 
6231 /* Valueization callback that follows all SSA edges.  */
6232 
6233 tree
follow_all_ssa_edges(tree val)6234 follow_all_ssa_edges (tree val)
6235 {
6236   return val;
6237 }
6238 
6239 /* Fold the statement pointed to by GSI.  In some cases, this function may
6240    replace the whole statement with a new one.  Returns true iff folding
6241    makes any changes.
6242    The statement pointed to by GSI should be in valid gimple form but may
6243    be in unfolded state as resulting from for example constant propagation
6244    which can produce *&x = 0.  */
6245 
6246 bool
fold_stmt(gimple_stmt_iterator * gsi)6247 fold_stmt (gimple_stmt_iterator *gsi)
6248 {
6249   return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6250 }
6251 
6252 bool
fold_stmt(gimple_stmt_iterator * gsi,tree (* valueize)(tree))6253 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6254 {
6255   return fold_stmt_1 (gsi, false, valueize);
6256 }
6257 
6258 /* Perform the minimal folding on statement *GSI.  Only operations like
6259    *&x created by constant propagation are handled.  The statement cannot
6260    be replaced with a new one.  Return true if the statement was
6261    changed, false otherwise.
6262    The statement *GSI should be in valid gimple form but may
6263    be in unfolded state as resulting from for example constant propagation
6264    which can produce *&x = 0.  */
6265 
6266 bool
fold_stmt_inplace(gimple_stmt_iterator * gsi)6267 fold_stmt_inplace (gimple_stmt_iterator *gsi)
6268 {
6269   gimple *stmt = gsi_stmt (*gsi);
6270   bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
6271   gcc_assert (gsi_stmt (*gsi) == stmt);
6272   return changed;
6273 }
6274 
6275 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6276    if EXPR is null or we don't know how.
6277    If non-null, the result always has boolean type.  */
6278 
6279 static tree
canonicalize_bool(tree expr,bool invert)6280 canonicalize_bool (tree expr, bool invert)
6281 {
6282   if (!expr)
6283     return NULL_TREE;
6284   else if (invert)
6285     {
6286       if (integer_nonzerop (expr))
6287 	return boolean_false_node;
6288       else if (integer_zerop (expr))
6289 	return boolean_true_node;
6290       else if (TREE_CODE (expr) == SSA_NAME)
6291 	return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6292 			    build_int_cst (TREE_TYPE (expr), 0));
6293       else if (COMPARISON_CLASS_P (expr))
6294 	return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6295 			    boolean_type_node,
6296 			    TREE_OPERAND (expr, 0),
6297 			    TREE_OPERAND (expr, 1));
6298       else
6299 	return NULL_TREE;
6300     }
6301   else
6302     {
6303       if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6304 	return expr;
6305       if (integer_nonzerop (expr))
6306 	return boolean_true_node;
6307       else if (integer_zerop (expr))
6308 	return boolean_false_node;
6309       else if (TREE_CODE (expr) == SSA_NAME)
6310 	return fold_build2 (NE_EXPR, boolean_type_node, expr,
6311 			    build_int_cst (TREE_TYPE (expr), 0));
6312       else if (COMPARISON_CLASS_P (expr))
6313 	return fold_build2 (TREE_CODE (expr),
6314 			    boolean_type_node,
6315 			    TREE_OPERAND (expr, 0),
6316 			    TREE_OPERAND (expr, 1));
6317       else
6318 	return NULL_TREE;
6319     }
6320 }
6321 
6322 /* Check to see if a boolean expression EXPR is logically equivalent to the
6323    comparison (OP1 CODE OP2).  Check for various identities involving
6324    SSA_NAMEs.  */
6325 
6326 static bool
same_bool_comparison_p(const_tree expr,enum tree_code code,const_tree op1,const_tree op2)6327 same_bool_comparison_p (const_tree expr, enum tree_code code,
6328 			const_tree op1, const_tree op2)
6329 {
6330   gimple *s;
6331 
6332   /* The obvious case.  */
6333   if (TREE_CODE (expr) == code
6334       && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6335       && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6336     return true;
6337 
6338   /* Check for comparing (name, name != 0) and the case where expr
6339      is an SSA_NAME with a definition matching the comparison.  */
6340   if (TREE_CODE (expr) == SSA_NAME
6341       && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6342     {
6343       if (operand_equal_p (expr, op1, 0))
6344 	return ((code == NE_EXPR && integer_zerop (op2))
6345 		|| (code == EQ_EXPR && integer_nonzerop (op2)));
6346       s = SSA_NAME_DEF_STMT (expr);
6347       if (is_gimple_assign (s)
6348 	  && gimple_assign_rhs_code (s) == code
6349 	  && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6350 	  && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6351 	return true;
6352     }
6353 
6354   /* If op1 is of the form (name != 0) or (name == 0), and the definition
6355      of name is a comparison, recurse.  */
6356   if (TREE_CODE (op1) == SSA_NAME
6357       && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6358     {
6359       s = SSA_NAME_DEF_STMT (op1);
6360       if (is_gimple_assign (s)
6361 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6362 	{
6363 	  enum tree_code c = gimple_assign_rhs_code (s);
6364 	  if ((c == NE_EXPR && integer_zerop (op2))
6365 	      || (c == EQ_EXPR && integer_nonzerop (op2)))
6366 	    return same_bool_comparison_p (expr, c,
6367 					   gimple_assign_rhs1 (s),
6368 					   gimple_assign_rhs2 (s));
6369 	  if ((c == EQ_EXPR && integer_zerop (op2))
6370 	      || (c == NE_EXPR && integer_nonzerop (op2)))
6371 	    return same_bool_comparison_p (expr,
6372 					   invert_tree_comparison (c, false),
6373 					   gimple_assign_rhs1 (s),
6374 					   gimple_assign_rhs2 (s));
6375 	}
6376     }
6377   return false;
6378 }
6379 
6380 /* Check to see if two boolean expressions OP1 and OP2 are logically
6381    equivalent.  */
6382 
6383 static bool
same_bool_result_p(const_tree op1,const_tree op2)6384 same_bool_result_p (const_tree op1, const_tree op2)
6385 {
6386   /* Simple cases first.  */
6387   if (operand_equal_p (op1, op2, 0))
6388     return true;
6389 
6390   /* Check the cases where at least one of the operands is a comparison.
6391      These are a bit smarter than operand_equal_p in that they apply some
6392      identifies on SSA_NAMEs.  */
6393   if (COMPARISON_CLASS_P (op2)
6394       && same_bool_comparison_p (op1, TREE_CODE (op2),
6395 				 TREE_OPERAND (op2, 0),
6396 				 TREE_OPERAND (op2, 1)))
6397     return true;
6398   if (COMPARISON_CLASS_P (op1)
6399       && same_bool_comparison_p (op2, TREE_CODE (op1),
6400 				 TREE_OPERAND (op1, 0),
6401 				 TREE_OPERAND (op1, 1)))
6402     return true;
6403 
6404   /* Default case.  */
6405   return false;
6406 }
6407 
6408 /* Forward declarations for some mutually recursive functions.  */
6409 
6410 static tree
6411 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6412 		   enum tree_code code2, tree op2a, tree op2b);
6413 static tree
6414 and_var_with_comparison (tree type, tree var, bool invert,
6415 			 enum tree_code code2, tree op2a, tree op2b);
6416 static tree
6417 and_var_with_comparison_1 (tree type, gimple *stmt,
6418 			   enum tree_code code2, tree op2a, tree op2b);
6419 static tree
6420 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
6421 		  enum tree_code code2, tree op2a, tree op2b);
6422 static tree
6423 or_var_with_comparison (tree, tree var, bool invert,
6424 			enum tree_code code2, tree op2a, tree op2b);
6425 static tree
6426 or_var_with_comparison_1 (tree, gimple *stmt,
6427 			  enum tree_code code2, tree op2a, tree op2b);
6428 
6429 /* Helper function for and_comparisons_1:  try to simplify the AND of the
6430    ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6431    If INVERT is true, invert the value of the VAR before doing the AND.
6432    Return NULL_EXPR if we can't simplify this to a single expression.  */
6433 
6434 static tree
and_var_with_comparison(tree type,tree var,bool invert,enum tree_code code2,tree op2a,tree op2b)6435 and_var_with_comparison (tree type, tree var, bool invert,
6436 			 enum tree_code code2, tree op2a, tree op2b)
6437 {
6438   tree t;
6439   gimple *stmt = SSA_NAME_DEF_STMT (var);
6440 
6441   /* We can only deal with variables whose definitions are assignments.  */
6442   if (!is_gimple_assign (stmt))
6443     return NULL_TREE;
6444 
6445   /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6446      !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6447      Then we only have to consider the simpler non-inverted cases.  */
6448   if (invert)
6449     t = or_var_with_comparison_1 (type, stmt,
6450 				  invert_tree_comparison (code2, false),
6451 				  op2a, op2b);
6452   else
6453     t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
6454   return canonicalize_bool (t, invert);
6455 }
6456 
6457 /* Try to simplify the AND of the ssa variable defined by the assignment
6458    STMT with the comparison specified by (OP2A CODE2 OP2B).
6459    Return NULL_EXPR if we can't simplify this to a single expression.  */
6460 
6461 static tree
and_var_with_comparison_1(tree type,gimple * stmt,enum tree_code code2,tree op2a,tree op2b)6462 and_var_with_comparison_1 (tree type, gimple *stmt,
6463 			   enum tree_code code2, tree op2a, tree op2b)
6464 {
6465   tree var = gimple_assign_lhs (stmt);
6466   tree true_test_var = NULL_TREE;
6467   tree false_test_var = NULL_TREE;
6468   enum tree_code innercode = gimple_assign_rhs_code (stmt);
6469 
6470   /* Check for identities like (var AND (var == 0)) => false.  */
6471   if (TREE_CODE (op2a) == SSA_NAME
6472       && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6473     {
6474       if ((code2 == NE_EXPR && integer_zerop (op2b))
6475 	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6476 	{
6477 	  true_test_var = op2a;
6478 	  if (var == true_test_var)
6479 	    return var;
6480 	}
6481       else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6482 	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6483 	{
6484 	  false_test_var = op2a;
6485 	  if (var == false_test_var)
6486 	    return boolean_false_node;
6487 	}
6488     }
6489 
6490   /* If the definition is a comparison, recurse on it.  */
6491   if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6492     {
6493       tree t = and_comparisons_1 (type, innercode,
6494 				  gimple_assign_rhs1 (stmt),
6495 				  gimple_assign_rhs2 (stmt),
6496 				  code2,
6497 				  op2a,
6498 				  op2b);
6499       if (t)
6500 	return t;
6501     }
6502 
6503   /* If the definition is an AND or OR expression, we may be able to
6504      simplify by reassociating.  */
6505   if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6506       && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6507     {
6508       tree inner1 = gimple_assign_rhs1 (stmt);
6509       tree inner2 = gimple_assign_rhs2 (stmt);
6510       gimple *s;
6511       tree t;
6512       tree partial = NULL_TREE;
6513       bool is_and = (innercode == BIT_AND_EXPR);
6514 
6515       /* Check for boolean identities that don't require recursive examination
6516 	 of inner1/inner2:
6517 	 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6518 	 inner1 AND (inner1 OR inner2) => inner1
6519 	 !inner1 AND (inner1 AND inner2) => false
6520 	 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6521          Likewise for similar cases involving inner2.  */
6522       if (inner1 == true_test_var)
6523 	return (is_and ? var : inner1);
6524       else if (inner2 == true_test_var)
6525 	return (is_and ? var : inner2);
6526       else if (inner1 == false_test_var)
6527 	return (is_and
6528 		? boolean_false_node
6529 		: and_var_with_comparison (type, inner2, false, code2, op2a,
6530 					   op2b));
6531       else if (inner2 == false_test_var)
6532 	return (is_and
6533 		? boolean_false_node
6534 		: and_var_with_comparison (type, inner1, false, code2, op2a,
6535 					   op2b));
6536 
6537       /* Next, redistribute/reassociate the AND across the inner tests.
6538 	 Compute the first partial result, (inner1 AND (op2a code op2b))  */
6539       if (TREE_CODE (inner1) == SSA_NAME
6540 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6541 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6542 	  && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6543 					      gimple_assign_rhs1 (s),
6544 					      gimple_assign_rhs2 (s),
6545 					      code2, op2a, op2b)))
6546 	{
6547 	  /* Handle the AND case, where we are reassociating:
6548 	     (inner1 AND inner2) AND (op2a code2 op2b)
6549 	     => (t AND inner2)
6550 	     If the partial result t is a constant, we win.  Otherwise
6551 	     continue on to try reassociating with the other inner test.  */
6552 	  if (is_and)
6553 	    {
6554 	      if (integer_onep (t))
6555 		return inner2;
6556 	      else if (integer_zerop (t))
6557 		return boolean_false_node;
6558 	    }
6559 
6560 	  /* Handle the OR case, where we are redistributing:
6561 	     (inner1 OR inner2) AND (op2a code2 op2b)
6562 	     => (t OR (inner2 AND (op2a code2 op2b)))  */
6563 	  else if (integer_onep (t))
6564 	    return boolean_true_node;
6565 
6566 	  /* Save partial result for later.  */
6567 	  partial = t;
6568 	}
6569 
6570       /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6571       if (TREE_CODE (inner2) == SSA_NAME
6572 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6573 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6574 	  && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6575 					      gimple_assign_rhs1 (s),
6576 					      gimple_assign_rhs2 (s),
6577 					      code2, op2a, op2b)))
6578 	{
6579 	  /* Handle the AND case, where we are reassociating:
6580 	     (inner1 AND inner2) AND (op2a code2 op2b)
6581 	     => (inner1 AND t)  */
6582 	  if (is_and)
6583 	    {
6584 	      if (integer_onep (t))
6585 		return inner1;
6586 	      else if (integer_zerop (t))
6587 		return boolean_false_node;
6588 	      /* If both are the same, we can apply the identity
6589 		 (x AND x) == x.  */
6590 	      else if (partial && same_bool_result_p (t, partial))
6591 		return t;
6592 	    }
6593 
6594 	  /* Handle the OR case. where we are redistributing:
6595 	     (inner1 OR inner2) AND (op2a code2 op2b)
6596 	     => (t OR (inner1 AND (op2a code2 op2b)))
6597 	     => (t OR partial)  */
6598 	  else
6599 	    {
6600 	      if (integer_onep (t))
6601 		return boolean_true_node;
6602 	      else if (partial)
6603 		{
6604 		  /* We already got a simplification for the other
6605 		     operand to the redistributed OR expression.  The
6606 		     interesting case is when at least one is false.
6607 		     Or, if both are the same, we can apply the identity
6608 		     (x OR x) == x.  */
6609 		  if (integer_zerop (partial))
6610 		    return t;
6611 		  else if (integer_zerop (t))
6612 		    return partial;
6613 		  else if (same_bool_result_p (t, partial))
6614 		    return t;
6615 		}
6616 	    }
6617 	}
6618     }
6619   return NULL_TREE;
6620 }
6621 
6622 /* Try to simplify the AND of two comparisons defined by
6623    (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6624    If this can be done without constructing an intermediate value,
6625    return the resulting tree; otherwise NULL_TREE is returned.
6626    This function is deliberately asymmetric as it recurses on SSA_DEFs
6627    in the first comparison but not the second.  */
6628 
6629 static tree
and_comparisons_1(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)6630 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6631 		   enum tree_code code2, tree op2a, tree op2b)
6632 {
6633   tree truth_type = truth_type_for (TREE_TYPE (op1a));
6634 
6635   /* First check for ((x CODE1 y) AND (x CODE2 y)).  */
6636   if (operand_equal_p (op1a, op2a, 0)
6637       && operand_equal_p (op1b, op2b, 0))
6638     {
6639       /* Result will be either NULL_TREE, or a combined comparison.  */
6640       tree t = combine_comparisons (UNKNOWN_LOCATION,
6641 				    TRUTH_ANDIF_EXPR, code1, code2,
6642 				    truth_type, op1a, op1b);
6643       if (t)
6644 	return t;
6645     }
6646 
6647   /* Likewise the swapped case of the above.  */
6648   if (operand_equal_p (op1a, op2b, 0)
6649       && operand_equal_p (op1b, op2a, 0))
6650     {
6651       /* Result will be either NULL_TREE, or a combined comparison.  */
6652       tree t = combine_comparisons (UNKNOWN_LOCATION,
6653 				    TRUTH_ANDIF_EXPR, code1,
6654 				    swap_tree_comparison (code2),
6655 				    truth_type, op1a, op1b);
6656       if (t)
6657 	return t;
6658     }
6659 
6660   /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6661      NAME's definition is a truth value.  See if there are any simplifications
6662      that can be done against the NAME's definition.  */
6663   if (TREE_CODE (op1a) == SSA_NAME
6664       && (code1 == NE_EXPR || code1 == EQ_EXPR)
6665       && (integer_zerop (op1b) || integer_onep (op1b)))
6666     {
6667       bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6668 		     || (code1 == NE_EXPR && integer_onep (op1b)));
6669       gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6670       switch (gimple_code (stmt))
6671 	{
6672 	case GIMPLE_ASSIGN:
6673 	  /* Try to simplify by copy-propagating the definition.  */
6674 	  return and_var_with_comparison (type, op1a, invert, code2, op2a,
6675 					  op2b);
6676 
6677 	case GIMPLE_PHI:
6678 	  /* If every argument to the PHI produces the same result when
6679 	     ANDed with the second comparison, we win.
6680 	     Do not do this unless the type is bool since we need a bool
6681 	     result here anyway.  */
6682 	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6683 	    {
6684 	      tree result = NULL_TREE;
6685 	      unsigned i;
6686 	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
6687 		{
6688 		  tree arg = gimple_phi_arg_def (stmt, i);
6689 
6690 		  /* If this PHI has itself as an argument, ignore it.
6691 		     If all the other args produce the same result,
6692 		     we're still OK.  */
6693 		  if (arg == gimple_phi_result (stmt))
6694 		    continue;
6695 		  else if (TREE_CODE (arg) == INTEGER_CST)
6696 		    {
6697 		      if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6698 			{
6699 			  if (!result)
6700 			    result = boolean_false_node;
6701 			  else if (!integer_zerop (result))
6702 			    return NULL_TREE;
6703 			}
6704 		      else if (!result)
6705 			result = fold_build2 (code2, boolean_type_node,
6706 					      op2a, op2b);
6707 		      else if (!same_bool_comparison_p (result,
6708 							code2, op2a, op2b))
6709 			return NULL_TREE;
6710 		    }
6711 		  else if (TREE_CODE (arg) == SSA_NAME
6712 			   && !SSA_NAME_IS_DEFAULT_DEF (arg))
6713 		    {
6714 		      tree temp;
6715 		      gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6716 		      /* In simple cases we can look through PHI nodes,
6717 			 but we have to be careful with loops.
6718 			 See PR49073.  */
6719 		      if (! dom_info_available_p (CDI_DOMINATORS)
6720 			  || gimple_bb (def_stmt) == gimple_bb (stmt)
6721 			  || dominated_by_p (CDI_DOMINATORS,
6722 					     gimple_bb (def_stmt),
6723 					     gimple_bb (stmt)))
6724 			return NULL_TREE;
6725 		      temp = and_var_with_comparison (type, arg, invert, code2,
6726 						      op2a, op2b);
6727 		      if (!temp)
6728 			return NULL_TREE;
6729 		      else if (!result)
6730 			result = temp;
6731 		      else if (!same_bool_result_p (result, temp))
6732 			return NULL_TREE;
6733 		    }
6734 		  else
6735 		    return NULL_TREE;
6736 		}
6737 	      return result;
6738 	    }
6739 
6740 	default:
6741 	  break;
6742 	}
6743     }
6744   return NULL_TREE;
6745 }
6746 
6747 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6748    : try to simplify the AND/OR of the ssa variable VAR with the comparison
6749    specified by (OP2A CODE2 OP2B) from match.pd.  Return NULL_EXPR if we can't
6750    simplify this to a single expression.  As we are going to lower the cost
6751    of building SSA names / gimple stmts significantly, we need to allocate
6752    them ont the stack.  This will cause the code to be a bit ugly.  */
6753 
6754 static tree
maybe_fold_comparisons_from_match_pd(tree type,enum tree_code code,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)6755 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
6756 				      enum tree_code code1,
6757 				      tree op1a, tree op1b,
6758 				      enum tree_code code2, tree op2a,
6759 				      tree op2b)
6760 {
6761   /* Allocate gimple stmt1 on the stack.  */
6762   gassign *stmt1
6763     = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6764   gimple_init (stmt1, GIMPLE_ASSIGN, 3);
6765   gimple_assign_set_rhs_code (stmt1, code1);
6766   gimple_assign_set_rhs1 (stmt1, op1a);
6767   gimple_assign_set_rhs2 (stmt1, op1b);
6768 
6769   /* Allocate gimple stmt2 on the stack.  */
6770   gassign *stmt2
6771     = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6772   gimple_init (stmt2, GIMPLE_ASSIGN, 3);
6773   gimple_assign_set_rhs_code (stmt2, code2);
6774   gimple_assign_set_rhs1 (stmt2, op2a);
6775   gimple_assign_set_rhs2 (stmt2, op2b);
6776 
6777   /* Allocate SSA names(lhs1) on the stack.  */
6778   tree lhs1 = (tree)XALLOCA (tree_ssa_name);
6779   memset (lhs1, 0, sizeof (tree_ssa_name));
6780   TREE_SET_CODE (lhs1, SSA_NAME);
6781   TREE_TYPE (lhs1) = type;
6782   init_ssa_name_imm_use (lhs1);
6783 
6784   /* Allocate SSA names(lhs2) on the stack.  */
6785   tree lhs2 = (tree)XALLOCA (tree_ssa_name);
6786   memset (lhs2, 0, sizeof (tree_ssa_name));
6787   TREE_SET_CODE (lhs2, SSA_NAME);
6788   TREE_TYPE (lhs2) = type;
6789   init_ssa_name_imm_use (lhs2);
6790 
6791   gimple_assign_set_lhs (stmt1, lhs1);
6792   gimple_assign_set_lhs (stmt2, lhs2);
6793 
6794   gimple_match_op op (gimple_match_cond::UNCOND, code,
6795 		      type, gimple_assign_lhs (stmt1),
6796 		      gimple_assign_lhs (stmt2));
6797   if (op.resimplify (NULL, follow_all_ssa_edges))
6798     {
6799       if (gimple_simplified_result_is_gimple_val (&op))
6800 	{
6801 	  tree res = op.ops[0];
6802 	  if (res == lhs1)
6803 	    return build2 (code1, type, op1a, op1b);
6804 	  else if (res == lhs2)
6805 	    return build2 (code2, type, op2a, op2b);
6806 	  else
6807 	    return res;
6808 	}
6809       else if (op.code.is_tree_code ()
6810 	       && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
6811 	{
6812 	  tree op0 = op.ops[0];
6813 	  tree op1 = op.ops[1];
6814 	  if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
6815 	    return NULL_TREE;  /* not simple */
6816 
6817 	  return build2 ((enum tree_code)op.code, op.type, op0, op1);
6818 	}
6819     }
6820 
6821   return NULL_TREE;
6822 }
6823 
6824 /* Try to simplify the AND of two comparisons, specified by
6825    (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6826    If this can be simplified to a single expression (without requiring
6827    introducing more SSA variables to hold intermediate values),
6828    return the resulting tree.  Otherwise return NULL_TREE.
6829    If the result expression is non-null, it has boolean type.  */
6830 
6831 tree
maybe_fold_and_comparisons(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)6832 maybe_fold_and_comparisons (tree type,
6833 			    enum tree_code code1, tree op1a, tree op1b,
6834 			    enum tree_code code2, tree op2a, tree op2b)
6835 {
6836   if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
6837     return t;
6838 
6839   if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6840     return t;
6841 
6842   if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
6843 						     op1a, op1b, code2, op2a,
6844 						     op2b))
6845     return t;
6846 
6847   return NULL_TREE;
6848 }
6849 
6850 /* Helper function for or_comparisons_1:  try to simplify the OR of the
6851    ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6852    If INVERT is true, invert the value of VAR before doing the OR.
6853    Return NULL_EXPR if we can't simplify this to a single expression.  */
6854 
6855 static tree
or_var_with_comparison(tree type,tree var,bool invert,enum tree_code code2,tree op2a,tree op2b)6856 or_var_with_comparison (tree type, tree var, bool invert,
6857 			enum tree_code code2, tree op2a, tree op2b)
6858 {
6859   tree t;
6860   gimple *stmt = SSA_NAME_DEF_STMT (var);
6861 
6862   /* We can only deal with variables whose definitions are assignments.  */
6863   if (!is_gimple_assign (stmt))
6864     return NULL_TREE;
6865 
6866   /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6867      !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6868      Then we only have to consider the simpler non-inverted cases.  */
6869   if (invert)
6870     t = and_var_with_comparison_1 (type, stmt,
6871 				   invert_tree_comparison (code2, false),
6872 				   op2a, op2b);
6873   else
6874     t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
6875   return canonicalize_bool (t, invert);
6876 }
6877 
6878 /* Try to simplify the OR of the ssa variable defined by the assignment
6879    STMT with the comparison specified by (OP2A CODE2 OP2B).
6880    Return NULL_EXPR if we can't simplify this to a single expression.  */
6881 
6882 static tree
or_var_with_comparison_1(tree type,gimple * stmt,enum tree_code code2,tree op2a,tree op2b)6883 or_var_with_comparison_1 (tree type, gimple *stmt,
6884 			  enum tree_code code2, tree op2a, tree op2b)
6885 {
6886   tree var = gimple_assign_lhs (stmt);
6887   tree true_test_var = NULL_TREE;
6888   tree false_test_var = NULL_TREE;
6889   enum tree_code innercode = gimple_assign_rhs_code (stmt);
6890 
6891   /* Check for identities like (var OR (var != 0)) => true .  */
6892   if (TREE_CODE (op2a) == SSA_NAME
6893       && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6894     {
6895       if ((code2 == NE_EXPR && integer_zerop (op2b))
6896 	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6897 	{
6898 	  true_test_var = op2a;
6899 	  if (var == true_test_var)
6900 	    return var;
6901 	}
6902       else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6903 	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6904 	{
6905 	  false_test_var = op2a;
6906 	  if (var == false_test_var)
6907 	    return boolean_true_node;
6908 	}
6909     }
6910 
6911   /* If the definition is a comparison, recurse on it.  */
6912   if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6913     {
6914       tree t = or_comparisons_1 (type, innercode,
6915 				 gimple_assign_rhs1 (stmt),
6916 				 gimple_assign_rhs2 (stmt),
6917 				 code2,
6918 				 op2a,
6919 				 op2b);
6920       if (t)
6921 	return t;
6922     }
6923 
6924   /* If the definition is an AND or OR expression, we may be able to
6925      simplify by reassociating.  */
6926   if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6927       && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6928     {
6929       tree inner1 = gimple_assign_rhs1 (stmt);
6930       tree inner2 = gimple_assign_rhs2 (stmt);
6931       gimple *s;
6932       tree t;
6933       tree partial = NULL_TREE;
6934       bool is_or = (innercode == BIT_IOR_EXPR);
6935 
6936       /* Check for boolean identities that don't require recursive examination
6937 	 of inner1/inner2:
6938 	 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
6939 	 inner1 OR (inner1 AND inner2) => inner1
6940 	 !inner1 OR (inner1 OR inner2) => true
6941 	 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
6942       */
6943       if (inner1 == true_test_var)
6944 	return (is_or ? var : inner1);
6945       else if (inner2 == true_test_var)
6946 	return (is_or ? var : inner2);
6947       else if (inner1 == false_test_var)
6948 	return (is_or
6949 		? boolean_true_node
6950 		: or_var_with_comparison (type, inner2, false, code2, op2a,
6951 					  op2b));
6952       else if (inner2 == false_test_var)
6953 	return (is_or
6954 		? boolean_true_node
6955 		: or_var_with_comparison (type, inner1, false, code2, op2a,
6956 					  op2b));
6957 
6958       /* Next, redistribute/reassociate the OR across the inner tests.
6959 	 Compute the first partial result, (inner1 OR (op2a code op2b))  */
6960       if (TREE_CODE (inner1) == SSA_NAME
6961 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6962 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6963 	  && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
6964 					     gimple_assign_rhs1 (s),
6965 					     gimple_assign_rhs2 (s),
6966 					     code2, op2a, op2b)))
6967 	{
6968 	  /* Handle the OR case, where we are reassociating:
6969 	     (inner1 OR inner2) OR (op2a code2 op2b)
6970 	     => (t OR inner2)
6971 	     If the partial result t is a constant, we win.  Otherwise
6972 	     continue on to try reassociating with the other inner test.  */
6973 	  if (is_or)
6974 	    {
6975 	      if (integer_onep (t))
6976 		return boolean_true_node;
6977 	      else if (integer_zerop (t))
6978 		return inner2;
6979 	    }
6980 
6981 	  /* Handle the AND case, where we are redistributing:
6982 	     (inner1 AND inner2) OR (op2a code2 op2b)
6983 	     => (t AND (inner2 OR (op2a code op2b)))  */
6984 	  else if (integer_zerop (t))
6985 	    return boolean_false_node;
6986 
6987 	  /* Save partial result for later.  */
6988 	  partial = t;
6989 	}
6990 
6991       /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
6992       if (TREE_CODE (inner2) == SSA_NAME
6993 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6994 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6995 	  && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
6996 					     gimple_assign_rhs1 (s),
6997 					     gimple_assign_rhs2 (s),
6998 					     code2, op2a, op2b)))
6999 	{
7000 	  /* Handle the OR case, where we are reassociating:
7001 	     (inner1 OR inner2) OR (op2a code2 op2b)
7002 	     => (inner1 OR t)
7003 	     => (t OR partial)  */
7004 	  if (is_or)
7005 	    {
7006 	      if (integer_zerop (t))
7007 		return inner1;
7008 	      else if (integer_onep (t))
7009 		return boolean_true_node;
7010 	      /* If both are the same, we can apply the identity
7011 		 (x OR x) == x.  */
7012 	      else if (partial && same_bool_result_p (t, partial))
7013 		return t;
7014 	    }
7015 
7016 	  /* Handle the AND case, where we are redistributing:
7017 	     (inner1 AND inner2) OR (op2a code2 op2b)
7018 	     => (t AND (inner1 OR (op2a code2 op2b)))
7019 	     => (t AND partial)  */
7020 	  else
7021 	    {
7022 	      if (integer_zerop (t))
7023 		return boolean_false_node;
7024 	      else if (partial)
7025 		{
7026 		  /* We already got a simplification for the other
7027 		     operand to the redistributed AND expression.  The
7028 		     interesting case is when at least one is true.
7029 		     Or, if both are the same, we can apply the identity
7030 		     (x AND x) == x.  */
7031 		  if (integer_onep (partial))
7032 		    return t;
7033 		  else if (integer_onep (t))
7034 		    return partial;
7035 		  else if (same_bool_result_p (t, partial))
7036 		    return t;
7037 		}
7038 	    }
7039 	}
7040     }
7041   return NULL_TREE;
7042 }
7043 
7044 /* Try to simplify the OR of two comparisons defined by
7045    (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7046    If this can be done without constructing an intermediate value,
7047    return the resulting tree; otherwise NULL_TREE is returned.
7048    This function is deliberately asymmetric as it recurses on SSA_DEFs
7049    in the first comparison but not the second.  */
7050 
7051 static tree
or_comparisons_1(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)7052 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7053 		  enum tree_code code2, tree op2a, tree op2b)
7054 {
7055   tree truth_type = truth_type_for (TREE_TYPE (op1a));
7056 
7057   /* First check for ((x CODE1 y) OR (x CODE2 y)).  */
7058   if (operand_equal_p (op1a, op2a, 0)
7059       && operand_equal_p (op1b, op2b, 0))
7060     {
7061       /* Result will be either NULL_TREE, or a combined comparison.  */
7062       tree t = combine_comparisons (UNKNOWN_LOCATION,
7063 				    TRUTH_ORIF_EXPR, code1, code2,
7064 				    truth_type, op1a, op1b);
7065       if (t)
7066 	return t;
7067     }
7068 
7069   /* Likewise the swapped case of the above.  */
7070   if (operand_equal_p (op1a, op2b, 0)
7071       && operand_equal_p (op1b, op2a, 0))
7072     {
7073       /* Result will be either NULL_TREE, or a combined comparison.  */
7074       tree t = combine_comparisons (UNKNOWN_LOCATION,
7075 				    TRUTH_ORIF_EXPR, code1,
7076 				    swap_tree_comparison (code2),
7077 				    truth_type, op1a, op1b);
7078       if (t)
7079 	return t;
7080     }
7081 
7082   /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7083      NAME's definition is a truth value.  See if there are any simplifications
7084      that can be done against the NAME's definition.  */
7085   if (TREE_CODE (op1a) == SSA_NAME
7086       && (code1 == NE_EXPR || code1 == EQ_EXPR)
7087       && (integer_zerop (op1b) || integer_onep (op1b)))
7088     {
7089       bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7090 		     || (code1 == NE_EXPR && integer_onep (op1b)));
7091       gimple *stmt = SSA_NAME_DEF_STMT (op1a);
7092       switch (gimple_code (stmt))
7093 	{
7094 	case GIMPLE_ASSIGN:
7095 	  /* Try to simplify by copy-propagating the definition.  */
7096 	  return or_var_with_comparison (type, op1a, invert, code2, op2a,
7097 					 op2b);
7098 
7099 	case GIMPLE_PHI:
7100 	  /* If every argument to the PHI produces the same result when
7101 	     ORed with the second comparison, we win.
7102 	     Do not do this unless the type is bool since we need a bool
7103 	     result here anyway.  */
7104 	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7105 	    {
7106 	      tree result = NULL_TREE;
7107 	      unsigned i;
7108 	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
7109 		{
7110 		  tree arg = gimple_phi_arg_def (stmt, i);
7111 
7112 		  /* If this PHI has itself as an argument, ignore it.
7113 		     If all the other args produce the same result,
7114 		     we're still OK.  */
7115 		  if (arg == gimple_phi_result (stmt))
7116 		    continue;
7117 		  else if (TREE_CODE (arg) == INTEGER_CST)
7118 		    {
7119 		      if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7120 			{
7121 			  if (!result)
7122 			    result = boolean_true_node;
7123 			  else if (!integer_onep (result))
7124 			    return NULL_TREE;
7125 			}
7126 		      else if (!result)
7127 			result = fold_build2 (code2, boolean_type_node,
7128 					      op2a, op2b);
7129 		      else if (!same_bool_comparison_p (result,
7130 							code2, op2a, op2b))
7131 			return NULL_TREE;
7132 		    }
7133 		  else if (TREE_CODE (arg) == SSA_NAME
7134 			   && !SSA_NAME_IS_DEFAULT_DEF (arg))
7135 		    {
7136 		      tree temp;
7137 		      gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
7138 		      /* In simple cases we can look through PHI nodes,
7139 			 but we have to be careful with loops.
7140 			 See PR49073.  */
7141 		      if (! dom_info_available_p (CDI_DOMINATORS)
7142 			  || gimple_bb (def_stmt) == gimple_bb (stmt)
7143 			  || dominated_by_p (CDI_DOMINATORS,
7144 					     gimple_bb (def_stmt),
7145 					     gimple_bb (stmt)))
7146 			return NULL_TREE;
7147 		      temp = or_var_with_comparison (type, arg, invert, code2,
7148 						     op2a, op2b);
7149 		      if (!temp)
7150 			return NULL_TREE;
7151 		      else if (!result)
7152 			result = temp;
7153 		      else if (!same_bool_result_p (result, temp))
7154 			return NULL_TREE;
7155 		    }
7156 		  else
7157 		    return NULL_TREE;
7158 		}
7159 	      return result;
7160 	    }
7161 
7162 	default:
7163 	  break;
7164 	}
7165     }
7166   return NULL_TREE;
7167 }
7168 
7169 /* Try to simplify the OR of two comparisons, specified by
7170    (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7171    If this can be simplified to a single expression (without requiring
7172    introducing more SSA variables to hold intermediate values),
7173    return the resulting tree.  Otherwise return NULL_TREE.
7174    If the result expression is non-null, it has boolean type.  */
7175 
7176 tree
maybe_fold_or_comparisons(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)7177 maybe_fold_or_comparisons (tree type,
7178 			   enum tree_code code1, tree op1a, tree op1b,
7179 			   enum tree_code code2, tree op2a, tree op2b)
7180 {
7181   if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
7182     return t;
7183 
7184   if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
7185     return t;
7186 
7187   if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7188 						     op1a, op1b, code2, op2a,
7189 						     op2b))
7190     return t;
7191 
7192   return NULL_TREE;
7193 }
7194 
7195 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7196 
7197    Either NULL_TREE, a simplified but non-constant or a constant
7198    is returned.
7199 
7200    ???  This should go into a gimple-fold-inline.h file to be eventually
7201    privatized with the single valueize function used in the various TUs
7202    to avoid the indirect function call overhead.  */
7203 
7204 tree
gimple_fold_stmt_to_constant_1(gimple * stmt,tree (* valueize)(tree),tree (* gvalueize)(tree))7205 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
7206 				tree (*gvalueize) (tree))
7207 {
7208   gimple_match_op res_op;
7209   /* ???  The SSA propagators do not correctly deal with following SSA use-def
7210      edges if there are intermediate VARYING defs.  For this reason
7211      do not follow SSA edges here even though SCCVN can technically
7212      just deal fine with that.  */
7213   if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
7214     {
7215       tree res = NULL_TREE;
7216       if (gimple_simplified_result_is_gimple_val (&res_op))
7217 	res = res_op.ops[0];
7218       else if (mprts_hook)
7219 	res = mprts_hook (&res_op);
7220       if (res)
7221 	{
7222 	  if (dump_file && dump_flags & TDF_DETAILS)
7223 	    {
7224 	      fprintf (dump_file, "Match-and-simplified ");
7225 	      print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7226 	      fprintf (dump_file, " to ");
7227 	      print_generic_expr (dump_file, res);
7228 	      fprintf (dump_file, "\n");
7229 	    }
7230 	  return res;
7231 	}
7232     }
7233 
7234   location_t loc = gimple_location (stmt);
7235   switch (gimple_code (stmt))
7236     {
7237     case GIMPLE_ASSIGN:
7238       {
7239         enum tree_code subcode = gimple_assign_rhs_code (stmt);
7240 
7241         switch (get_gimple_rhs_class (subcode))
7242           {
7243           case GIMPLE_SINGLE_RHS:
7244             {
7245               tree rhs = gimple_assign_rhs1 (stmt);
7246               enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7247 
7248               if (TREE_CODE (rhs) == SSA_NAME)
7249                 {
7250                   /* If the RHS is an SSA_NAME, return its known constant value,
7251                      if any.  */
7252                   return (*valueize) (rhs);
7253                 }
7254 	      /* Handle propagating invariant addresses into address
7255 		 operations.  */
7256 	      else if (TREE_CODE (rhs) == ADDR_EXPR
7257 		       && !is_gimple_min_invariant (rhs))
7258 		{
7259 		  poly_int64 offset = 0;
7260 		  tree base;
7261 		  base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7262 							  &offset,
7263 							  valueize);
7264 		  if (base
7265 		      && (CONSTANT_CLASS_P (base)
7266 			  || decl_address_invariant_p (base)))
7267 		    return build_invariant_address (TREE_TYPE (rhs),
7268 						    base, offset);
7269 		}
7270 	      else if (TREE_CODE (rhs) == CONSTRUCTOR
7271 		       && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
7272 		       && known_eq (CONSTRUCTOR_NELTS (rhs),
7273 				    TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
7274 		{
7275 		  unsigned i, nelts;
7276 		  tree val;
7277 
7278 		  nelts = CONSTRUCTOR_NELTS (rhs);
7279 		  tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
7280 		  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7281 		    {
7282 		      val = (*valueize) (val);
7283 		      if (TREE_CODE (val) == INTEGER_CST
7284 			  || TREE_CODE (val) == REAL_CST
7285 			  || TREE_CODE (val) == FIXED_CST)
7286 			vec.quick_push (val);
7287 		      else
7288 			return NULL_TREE;
7289 		    }
7290 
7291 		  return vec.build ();
7292 		}
7293 	      if (subcode == OBJ_TYPE_REF)
7294 		{
7295 		  tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7296 		  /* If callee is constant, we can fold away the wrapper.  */
7297 		  if (is_gimple_min_invariant (val))
7298 		    return val;
7299 		}
7300 
7301               if (kind == tcc_reference)
7302 		{
7303 		  if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7304 		       || TREE_CODE (rhs) == REALPART_EXPR
7305 		       || TREE_CODE (rhs) == IMAGPART_EXPR)
7306 		      && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7307 		    {
7308 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7309 		      return fold_unary_loc (EXPR_LOCATION (rhs),
7310 					     TREE_CODE (rhs),
7311 					     TREE_TYPE (rhs), val);
7312 		    }
7313 		  else if (TREE_CODE (rhs) == BIT_FIELD_REF
7314 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7315 		    {
7316 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7317 		      return fold_ternary_loc (EXPR_LOCATION (rhs),
7318 					       TREE_CODE (rhs),
7319 					       TREE_TYPE (rhs), val,
7320 					       TREE_OPERAND (rhs, 1),
7321 					       TREE_OPERAND (rhs, 2));
7322 		    }
7323 		  else if (TREE_CODE (rhs) == MEM_REF
7324 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7325 		    {
7326 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7327 		      if (TREE_CODE (val) == ADDR_EXPR
7328 			  && is_gimple_min_invariant (val))
7329 			{
7330 			  tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7331 						  unshare_expr (val),
7332 						  TREE_OPERAND (rhs, 1));
7333 			  if (tem)
7334 			    rhs = tem;
7335 			}
7336 		    }
7337 		  return fold_const_aggregate_ref_1 (rhs, valueize);
7338 		}
7339               else if (kind == tcc_declaration)
7340                 return get_symbol_constant_value (rhs);
7341               return rhs;
7342             }
7343 
7344           case GIMPLE_UNARY_RHS:
7345 	    return NULL_TREE;
7346 
7347           case GIMPLE_BINARY_RHS:
7348 	    /* Translate &x + CST into an invariant form suitable for
7349 	       further propagation.  */
7350 	    if (subcode == POINTER_PLUS_EXPR)
7351 	      {
7352 		tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7353 		tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7354 		if (TREE_CODE (op0) == ADDR_EXPR
7355 		    && TREE_CODE (op1) == INTEGER_CST)
7356 		  {
7357 		    tree off = fold_convert (ptr_type_node, op1);
7358 		    return build1_loc
7359 			(loc, ADDR_EXPR, TREE_TYPE (op0),
7360 			 fold_build2 (MEM_REF,
7361 				      TREE_TYPE (TREE_TYPE (op0)),
7362 				      unshare_expr (op0), off));
7363 		  }
7364 	      }
7365 	    /* Canonicalize bool != 0 and bool == 0 appearing after
7366 	       valueization.  While gimple_simplify handles this
7367 	       it can get confused by the ~X == 1 -> X == 0 transform
7368 	       which we cant reduce to a SSA name or a constant
7369 	       (and we have no way to tell gimple_simplify to not
7370 	       consider those transforms in the first place).  */
7371 	    else if (subcode == EQ_EXPR
7372 		     || subcode == NE_EXPR)
7373 	      {
7374 		tree lhs = gimple_assign_lhs (stmt);
7375 		tree op0 = gimple_assign_rhs1 (stmt);
7376 		if (useless_type_conversion_p (TREE_TYPE (lhs),
7377 					       TREE_TYPE (op0)))
7378 		  {
7379 		    tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7380 		    op0 = (*valueize) (op0);
7381 		    if (TREE_CODE (op0) == INTEGER_CST)
7382 		      std::swap (op0, op1);
7383 		    if (TREE_CODE (op1) == INTEGER_CST
7384 			&& ((subcode == NE_EXPR && integer_zerop (op1))
7385 			    || (subcode == EQ_EXPR && integer_onep (op1))))
7386 		      return op0;
7387 		  }
7388 	      }
7389 	    return NULL_TREE;
7390 
7391           case GIMPLE_TERNARY_RHS:
7392             {
7393               /* Handle ternary operators that can appear in GIMPLE form.  */
7394               tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7395               tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7396               tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
7397               return fold_ternary_loc (loc, subcode,
7398 				       gimple_expr_type (stmt), op0, op1, op2);
7399             }
7400 
7401           default:
7402             gcc_unreachable ();
7403           }
7404       }
7405 
7406     case GIMPLE_CALL:
7407       {
7408 	tree fn;
7409 	gcall *call_stmt = as_a <gcall *> (stmt);
7410 
7411 	if (gimple_call_internal_p (stmt))
7412 	  {
7413 	    enum tree_code subcode = ERROR_MARK;
7414 	    switch (gimple_call_internal_fn (stmt))
7415 	      {
7416 	      case IFN_UBSAN_CHECK_ADD:
7417 		subcode = PLUS_EXPR;
7418 		break;
7419 	      case IFN_UBSAN_CHECK_SUB:
7420 		subcode = MINUS_EXPR;
7421 		break;
7422 	      case IFN_UBSAN_CHECK_MUL:
7423 		subcode = MULT_EXPR;
7424 		break;
7425 	      case IFN_BUILTIN_EXPECT:
7426 		  {
7427 		    tree arg0 = gimple_call_arg (stmt, 0);
7428 		    tree op0 = (*valueize) (arg0);
7429 		    if (TREE_CODE (op0) == INTEGER_CST)
7430 		      return op0;
7431 		    return NULL_TREE;
7432 		  }
7433 	      default:
7434 		return NULL_TREE;
7435 	      }
7436 	    tree arg0 = gimple_call_arg (stmt, 0);
7437 	    tree arg1 = gimple_call_arg (stmt, 1);
7438 	    tree op0 = (*valueize) (arg0);
7439 	    tree op1 = (*valueize) (arg1);
7440 
7441 	    if (TREE_CODE (op0) != INTEGER_CST
7442 		|| TREE_CODE (op1) != INTEGER_CST)
7443 	      {
7444 		switch (subcode)
7445 		  {
7446 		  case MULT_EXPR:
7447 		    /* x * 0 = 0 * x = 0 without overflow.  */
7448 		    if (integer_zerop (op0) || integer_zerop (op1))
7449 		      return build_zero_cst (TREE_TYPE (arg0));
7450 		    break;
7451 		  case MINUS_EXPR:
7452 		    /* y - y = 0 without overflow.  */
7453 		    if (operand_equal_p (op0, op1, 0))
7454 		      return build_zero_cst (TREE_TYPE (arg0));
7455 		    break;
7456 		  default:
7457 		    break;
7458 		  }
7459 	      }
7460 	    tree res
7461 	      = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
7462 	    if (res
7463 		&& TREE_CODE (res) == INTEGER_CST
7464 		&& !TREE_OVERFLOW (res))
7465 	      return res;
7466 	    return NULL_TREE;
7467 	  }
7468 
7469 	fn = (*valueize) (gimple_call_fn (stmt));
7470 	if (TREE_CODE (fn) == ADDR_EXPR
7471 	    && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
7472 	    && fndecl_built_in_p (TREE_OPERAND (fn, 0))
7473 	    && gimple_builtin_call_types_compatible_p (stmt,
7474 						       TREE_OPERAND (fn, 0)))
7475 	  {
7476 	    tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
7477 	    tree retval;
7478 	    unsigned i;
7479 	    for (i = 0; i < gimple_call_num_args (stmt); ++i)
7480 	      args[i] = (*valueize) (gimple_call_arg (stmt, i));
7481 	    retval = fold_builtin_call_array (loc,
7482 					 gimple_call_return_type (call_stmt),
7483 					 fn, gimple_call_num_args (stmt), args);
7484 	    if (retval)
7485 	      {
7486 		/* fold_call_expr wraps the result inside a NOP_EXPR.  */
7487 		STRIP_NOPS (retval);
7488 		retval = fold_convert (gimple_call_return_type (call_stmt),
7489 				       retval);
7490 	      }
7491 	    return retval;
7492 	  }
7493 	return NULL_TREE;
7494       }
7495 
7496     default:
7497       return NULL_TREE;
7498     }
7499 }
7500 
7501 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7502    Returns NULL_TREE if folding to a constant is not possible, otherwise
7503    returns a constant according to is_gimple_min_invariant.  */
7504 
7505 tree
gimple_fold_stmt_to_constant(gimple * stmt,tree (* valueize)(tree))7506 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
7507 {
7508   tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7509   if (res && is_gimple_min_invariant (res))
7510     return res;
7511   return NULL_TREE;
7512 }
7513 
7514 
7515 /* The following set of functions are supposed to fold references using
7516    their constant initializers.  */
7517 
7518 /* See if we can find constructor defining value of BASE.
7519    When we know the consructor with constant offset (such as
7520    base is array[40] and we do know constructor of array), then
7521    BIT_OFFSET is adjusted accordingly.
7522 
7523    As a special case, return error_mark_node when constructor
7524    is not explicitly available, but it is known to be zero
7525    such as 'static const int a;'.  */
7526 static tree
get_base_constructor(tree base,poly_int64_pod * bit_offset,tree (* valueize)(tree))7527 get_base_constructor (tree base, poly_int64_pod *bit_offset,
7528 		      tree (*valueize)(tree))
7529 {
7530   poly_int64 bit_offset2, size, max_size;
7531   bool reverse;
7532 
7533   if (TREE_CODE (base) == MEM_REF)
7534     {
7535       poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7536       if (!boff.to_shwi (bit_offset))
7537 	return NULL_TREE;
7538 
7539       if (valueize
7540 	  && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7541 	base = valueize (TREE_OPERAND (base, 0));
7542       if (!base || TREE_CODE (base) != ADDR_EXPR)
7543         return NULL_TREE;
7544       base = TREE_OPERAND (base, 0);
7545     }
7546   else if (valueize
7547 	   && TREE_CODE (base) == SSA_NAME)
7548     base = valueize (base);
7549 
7550   /* Get a CONSTRUCTOR.  If BASE is a VAR_DECL, get its
7551      DECL_INITIAL.  If BASE is a nested reference into another
7552      ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7553      the inner reference.  */
7554   switch (TREE_CODE (base))
7555     {
7556     case VAR_DECL:
7557     case CONST_DECL:
7558       {
7559 	tree init = ctor_for_folding (base);
7560 
7561 	/* Our semantic is exact opposite of ctor_for_folding;
7562 	   NULL means unknown, while error_mark_node is 0.  */
7563 	if (init == error_mark_node)
7564 	  return NULL_TREE;
7565 	if (!init)
7566 	  return error_mark_node;
7567 	return init;
7568       }
7569 
7570     case VIEW_CONVERT_EXPR:
7571       return get_base_constructor (TREE_OPERAND (base, 0),
7572 				   bit_offset, valueize);
7573 
7574     case ARRAY_REF:
7575     case COMPONENT_REF:
7576       base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7577 				      &reverse);
7578       if (!known_size_p (max_size) || maybe_ne (size, max_size))
7579 	return NULL_TREE;
7580       *bit_offset +=  bit_offset2;
7581       return get_base_constructor (base, bit_offset, valueize);
7582 
7583     case CONSTRUCTOR:
7584       return base;
7585 
7586     default:
7587       if (CONSTANT_CLASS_P (base))
7588 	return base;
7589 
7590       return NULL_TREE;
7591     }
7592 }
7593 
7594 /* CTOR is CONSTRUCTOR of an array type.  Fold a reference of SIZE bits
7595    to the memory at bit OFFSET.     When non-null, TYPE is the expected
7596    type of the reference; otherwise the type of the referenced element
7597    is used instead. When SIZE is zero, attempt to fold a reference to
7598    the entire element which OFFSET refers to.  Increment *SUBOFF by
7599    the bit offset of the accessed element.  */
7600 
7601 static tree
fold_array_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree from_decl,unsigned HOST_WIDE_INT * suboff)7602 fold_array_ctor_reference (tree type, tree ctor,
7603 			   unsigned HOST_WIDE_INT offset,
7604 			   unsigned HOST_WIDE_INT size,
7605 			   tree from_decl,
7606 			   unsigned HOST_WIDE_INT *suboff)
7607 {
7608   offset_int low_bound;
7609   offset_int elt_size;
7610   offset_int access_index;
7611   tree domain_type = NULL_TREE;
7612   HOST_WIDE_INT inner_offset;
7613 
7614   /* Compute low bound and elt size.  */
7615   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7616     domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
7617   if (domain_type && TYPE_MIN_VALUE (domain_type))
7618     {
7619       /* Static constructors for variably sized objects make no sense.  */
7620       if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7621 	return NULL_TREE;
7622       low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
7623     }
7624   else
7625     low_bound = 0;
7626   /* Static constructors for variably sized objects make no sense.  */
7627   if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7628     return NULL_TREE;
7629   elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
7630 
7631   /* When TYPE is non-null, verify that it specifies a constant-sized
7632      access of a multiple of the array element size.  Avoid division
7633      by zero below when ELT_SIZE is zero, such as with the result of
7634      an initializer for a zero-length array or an empty struct.  */
7635   if (elt_size == 0
7636       || (type
7637 	  && (!TYPE_SIZE_UNIT (type)
7638 	      || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
7639     return NULL_TREE;
7640 
7641   /* Compute the array index we look for.  */
7642   access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7643 				 elt_size);
7644   access_index += low_bound;
7645 
7646   /* And offset within the access.  */
7647   inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
7648 
7649   unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7650   if (size > elt_sz * BITS_PER_UNIT)
7651     {
7652       /* native_encode_expr constraints.  */
7653       if (size > MAX_BITSIZE_MODE_ANY_MODE
7654 	  || size % BITS_PER_UNIT != 0
7655 	  || inner_offset % BITS_PER_UNIT != 0
7656 	  || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
7657 	return NULL_TREE;
7658 
7659       unsigned ctor_idx;
7660       tree val = get_array_ctor_element_at_index (ctor, access_index,
7661 						  &ctor_idx);
7662       if (!val && ctor_idx >= CONSTRUCTOR_NELTS  (ctor))
7663 	return build_zero_cst (type);
7664 
7665       /* native-encode adjacent ctor elements.  */
7666       unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7667       unsigned bufoff = 0;
7668       offset_int index = 0;
7669       offset_int max_index = access_index;
7670       constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7671       if (!val)
7672 	val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7673       else if (!CONSTANT_CLASS_P (val))
7674 	return NULL_TREE;
7675       if (!elt->index)
7676 	;
7677       else if (TREE_CODE (elt->index) == RANGE_EXPR)
7678 	{
7679 	  index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7680 	  max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7681 	}
7682       else
7683 	index = max_index = wi::to_offset (elt->index);
7684       index = wi::umax (index, access_index);
7685       do
7686 	{
7687 	  if (bufoff + elt_sz > sizeof (buf))
7688 	    elt_sz = sizeof (buf) - bufoff;
7689 	  int len = native_encode_expr (val, buf + bufoff, elt_sz,
7690 					inner_offset / BITS_PER_UNIT);
7691 	  if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
7692 	    return NULL_TREE;
7693 	  inner_offset = 0;
7694 	  bufoff += len;
7695 
7696 	  access_index += 1;
7697 	  if (wi::cmpu (access_index, index) == 0)
7698 	    val = elt->value;
7699 	  else if (wi::cmpu (access_index, max_index) > 0)
7700 	    {
7701 	      ctor_idx++;
7702 	      if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7703 		{
7704 		  val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7705 		  ++max_index;
7706 		}
7707 	      else
7708 		{
7709 		  elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7710 		  index = 0;
7711 		  max_index = access_index;
7712 		  if (!elt->index)
7713 		    ;
7714 		  else if (TREE_CODE (elt->index) == RANGE_EXPR)
7715 		    {
7716 		      index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7717 		      max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7718 		    }
7719 		  else
7720 		    index = max_index = wi::to_offset (elt->index);
7721 		  index = wi::umax (index, access_index);
7722 		  if (wi::cmpu (access_index, index) == 0)
7723 		    val = elt->value;
7724 		  else
7725 		    val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7726 		}
7727 	    }
7728 	}
7729       while (bufoff < size / BITS_PER_UNIT);
7730       *suboff += size;
7731       return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
7732     }
7733 
7734   if (tree val = get_array_ctor_element_at_index (ctor, access_index))
7735     {
7736       if (!size && TREE_CODE (val) != CONSTRUCTOR)
7737 	{
7738 	  /* For the final reference to the entire accessed element
7739 	     (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7740 	     may be null) in favor of the type of the element, and set
7741 	     SIZE to the size of the accessed element.  */
7742 	  inner_offset = 0;
7743 	  type = TREE_TYPE (val);
7744 	  size = elt_sz * BITS_PER_UNIT;
7745 	}
7746       else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
7747 	       && TREE_CODE (val) == CONSTRUCTOR
7748 	       && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
7749 	/* If this isn't the last element in the CTOR and a CTOR itself
7750 	   and it does not cover the whole object we are requesting give up
7751 	   since we're not set up for combining from multiple CTORs.  */
7752 	return NULL_TREE;
7753 
7754       *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
7755       return fold_ctor_reference (type, val, inner_offset, size, from_decl,
7756 				  suboff);
7757     }
7758 
7759   /* Memory not explicitly mentioned in constructor is 0 (or
7760      the reference is out of range).  */
7761   return type ? build_zero_cst (type) : NULL_TREE;
7762 }
7763 
7764 /* CTOR is CONSTRUCTOR of an aggregate or vector.  Fold a reference
7765    of SIZE bits to the memory at bit OFFSET.   When non-null, TYPE
7766    is the expected type of the reference; otherwise the type of
7767    the referenced member is used instead.  When SIZE is zero,
7768    attempt to fold a reference to the entire member which OFFSET
7769    refers to; in this case.  Increment *SUBOFF by the bit offset
7770    of the accessed member.  */
7771 
7772 static tree
fold_nonarray_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree from_decl,unsigned HOST_WIDE_INT * suboff)7773 fold_nonarray_ctor_reference (tree type, tree ctor,
7774 			      unsigned HOST_WIDE_INT offset,
7775 			      unsigned HOST_WIDE_INT size,
7776 			      tree from_decl,
7777 			      unsigned HOST_WIDE_INT *suboff)
7778 {
7779   unsigned HOST_WIDE_INT cnt;
7780   tree cfield, cval;
7781 
7782   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
7783 			    cval)
7784     {
7785       tree byte_offset = DECL_FIELD_OFFSET (cfield);
7786       tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
7787       tree field_size = DECL_SIZE (cfield);
7788 
7789       if (!field_size)
7790 	{
7791 	  /* Determine the size of the flexible array member from
7792 	     the size of the initializer provided for it.  */
7793 	  field_size = TYPE_SIZE (TREE_TYPE (cval));
7794 	}
7795 
7796       /* Variable sized objects in static constructors makes no sense,
7797 	 but field_size can be NULL for flexible array members.  */
7798       gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
7799 		  && TREE_CODE (byte_offset) == INTEGER_CST
7800 		  && (field_size != NULL_TREE
7801 		      ? TREE_CODE (field_size) == INTEGER_CST
7802 		      : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
7803 
7804       /* Compute bit offset of the field.  */
7805       offset_int bitoffset
7806 	= (wi::to_offset (field_offset)
7807 	   + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
7808       /* Compute bit offset where the field ends.  */
7809       offset_int bitoffset_end;
7810       if (field_size != NULL_TREE)
7811 	bitoffset_end = bitoffset + wi::to_offset (field_size);
7812       else
7813 	bitoffset_end = 0;
7814 
7815       /* Compute the bit offset of the end of the desired access.
7816 	 As a special case, if the size of the desired access is
7817 	 zero, assume the access is to the entire field (and let
7818 	 the caller make any necessary adjustments by storing
7819 	 the actual bounds of the field in FIELDBOUNDS).  */
7820       offset_int access_end = offset_int (offset);
7821       if (size)
7822 	access_end += size;
7823       else
7824 	access_end = bitoffset_end;
7825 
7826       /* Is there any overlap between the desired access at
7827 	 [OFFSET, OFFSET+SIZE) and the offset of the field within
7828 	 the object at [BITOFFSET, BITOFFSET_END)?  */
7829       if (wi::cmps (access_end, bitoffset) > 0
7830 	  && (field_size == NULL_TREE
7831 	      || wi::lts_p (offset, bitoffset_end)))
7832 	{
7833 	  *suboff += bitoffset.to_uhwi ();
7834 
7835 	  if (!size && TREE_CODE (cval) != CONSTRUCTOR)
7836 	    {
7837 	      /* For the final reference to the entire accessed member
7838 		 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7839 		 be null) in favor of the type of the member, and set
7840 		 SIZE to the size of the accessed member.  */
7841 	      offset = bitoffset.to_uhwi ();
7842 	      type = TREE_TYPE (cval);
7843 	      size = (bitoffset_end - bitoffset).to_uhwi ();
7844 	    }
7845 
7846 	  /* We do have overlap.  Now see if the field is large enough
7847 	     to cover the access.  Give up for accesses that extend
7848 	     beyond the end of the object or that span multiple fields.  */
7849 	  if (wi::cmps (access_end, bitoffset_end) > 0)
7850 	    return NULL_TREE;
7851 	  if (offset < bitoffset)
7852 	    return NULL_TREE;
7853 
7854 	  offset_int inner_offset = offset_int (offset) - bitoffset;
7855 	  return fold_ctor_reference (type, cval,
7856 				      inner_offset.to_uhwi (), size,
7857 				      from_decl, suboff);
7858 	}
7859     }
7860 
7861   if (!type)
7862     return NULL_TREE;
7863 
7864   return build_zero_cst (type);
7865 }
7866 
7867 /* CTOR is value initializing memory.  Fold a reference of TYPE and
7868    bit size POLY_SIZE to the memory at bit POLY_OFFSET.  When POLY_SIZE
7869    is zero, attempt to fold a reference to the entire subobject
7870    which OFFSET refers to.  This is used when folding accesses to
7871    string members of aggregates.  When non-null, set *SUBOFF to
7872    the bit offset of the accessed subobject.  */
7873 
7874 tree
fold_ctor_reference(tree type,tree ctor,const poly_uint64 & poly_offset,const poly_uint64 & poly_size,tree from_decl,unsigned HOST_WIDE_INT * suboff)7875 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
7876 		     const poly_uint64 &poly_size, tree from_decl,
7877 		     unsigned HOST_WIDE_INT *suboff /* = NULL */)
7878 {
7879   tree ret;
7880 
7881   /* We found the field with exact match.  */
7882   if (type
7883       && useless_type_conversion_p (type, TREE_TYPE (ctor))
7884       && known_eq (poly_offset, 0U))
7885     return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
7886 
7887   /* The remaining optimizations need a constant size and offset.  */
7888   unsigned HOST_WIDE_INT size, offset;
7889   if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
7890     return NULL_TREE;
7891 
7892   /* We are at the end of walk, see if we can view convert the
7893      result.  */
7894   if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
7895       /* VIEW_CONVERT_EXPR is defined only for matching sizes.  */
7896       && !compare_tree_int (TYPE_SIZE (type), size)
7897       && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
7898     {
7899       ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
7900       if (ret)
7901 	{
7902 	  ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
7903 	  if (ret)
7904 	    STRIP_USELESS_TYPE_CONVERSION (ret);
7905 	}
7906       return ret;
7907     }
7908   /* For constants and byte-aligned/sized reads try to go through
7909      native_encode/interpret.  */
7910   if (CONSTANT_CLASS_P (ctor)
7911       && BITS_PER_UNIT == 8
7912       && offset % BITS_PER_UNIT == 0
7913       && offset / BITS_PER_UNIT <= INT_MAX
7914       && size % BITS_PER_UNIT == 0
7915       && size <= MAX_BITSIZE_MODE_ANY_MODE
7916       && can_native_interpret_type_p (type))
7917     {
7918       unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7919       int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
7920 				    offset / BITS_PER_UNIT);
7921       if (len > 0)
7922 	return native_interpret_expr (type, buf, len);
7923     }
7924   if (TREE_CODE (ctor) == CONSTRUCTOR)
7925     {
7926       unsigned HOST_WIDE_INT dummy = 0;
7927       if (!suboff)
7928 	suboff = &dummy;
7929 
7930       tree ret;
7931       if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
7932 	  || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
7933 	ret = fold_array_ctor_reference (type, ctor, offset, size,
7934 					 from_decl, suboff);
7935       else
7936 	ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
7937 					    from_decl, suboff);
7938 
7939       /* Fall back to native_encode_initializer.  Needs to be done
7940 	 only in the outermost fold_ctor_reference call (because it itself
7941 	 recurses into CONSTRUCTORs) and doesn't update suboff.  */
7942       if (ret == NULL_TREE
7943 	  && suboff == &dummy
7944 	  && BITS_PER_UNIT == 8
7945 	  && offset % BITS_PER_UNIT == 0
7946 	  && offset / BITS_PER_UNIT <= INT_MAX
7947 	  && size % BITS_PER_UNIT == 0
7948 	  && size <= MAX_BITSIZE_MODE_ANY_MODE
7949 	  && can_native_interpret_type_p (type))
7950 	{
7951 	  unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7952 	  int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
7953 					       offset / BITS_PER_UNIT);
7954 	  if (len > 0)
7955 	    return native_interpret_expr (type, buf, len);
7956 	}
7957 
7958       return ret;
7959     }
7960 
7961   return NULL_TREE;
7962 }
7963 
7964 /* Return the tree representing the element referenced by T if T is an
7965    ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
7966    names using VALUEIZE.  Return NULL_TREE otherwise.  */
7967 
7968 tree
fold_const_aggregate_ref_1(tree t,tree (* valueize)(tree))7969 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
7970 {
7971   tree ctor, idx, base;
7972   poly_int64 offset, size, max_size;
7973   tree tem;
7974   bool reverse;
7975 
7976   if (TREE_THIS_VOLATILE (t))
7977     return NULL_TREE;
7978 
7979   if (DECL_P (t))
7980     return get_symbol_constant_value (t);
7981 
7982   tem = fold_read_from_constant_string (t);
7983   if (tem)
7984     return tem;
7985 
7986   switch (TREE_CODE (t))
7987     {
7988     case ARRAY_REF:
7989     case ARRAY_RANGE_REF:
7990       /* Constant indexes are handled well by get_base_constructor.
7991 	 Only special case variable offsets.
7992 	 FIXME: This code can't handle nested references with variable indexes
7993 	 (they will be handled only by iteration of ccp).  Perhaps we can bring
7994 	 get_ref_base_and_extent here and make it use a valueize callback.  */
7995       if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
7996 	  && valueize
7997 	  && (idx = (*valueize) (TREE_OPERAND (t, 1)))
7998 	  && poly_int_tree_p (idx))
7999 	{
8000 	  tree low_bound, unit_size;
8001 
8002 	  /* If the resulting bit-offset is constant, track it.  */
8003 	  if ((low_bound = array_ref_low_bound (t),
8004 	       poly_int_tree_p (low_bound))
8005 	      && (unit_size = array_ref_element_size (t),
8006 		  tree_fits_uhwi_p (unit_size)))
8007 	    {
8008 	      poly_offset_int woffset
8009 		= wi::sext (wi::to_poly_offset (idx)
8010 			    - wi::to_poly_offset (low_bound),
8011 			    TYPE_PRECISION (sizetype));
8012 	      woffset *= tree_to_uhwi (unit_size);
8013 	      woffset *= BITS_PER_UNIT;
8014 	      if (woffset.to_shwi (&offset))
8015 		{
8016 		  base = TREE_OPERAND (t, 0);
8017 		  ctor = get_base_constructor (base, &offset, valueize);
8018 		  /* Empty constructor.  Always fold to 0.  */
8019 		  if (ctor == error_mark_node)
8020 		    return build_zero_cst (TREE_TYPE (t));
8021 		  /* Out of bound array access.  Value is undefined,
8022 		     but don't fold.  */
8023 		  if (maybe_lt (offset, 0))
8024 		    return NULL_TREE;
8025 		  /* We cannot determine ctor.  */
8026 		  if (!ctor)
8027 		    return NULL_TREE;
8028 		  return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8029 					      tree_to_uhwi (unit_size)
8030 					      * BITS_PER_UNIT,
8031 					      base);
8032 		}
8033 	    }
8034 	}
8035       /* Fallthru.  */
8036 
8037     case COMPONENT_REF:
8038     case BIT_FIELD_REF:
8039     case TARGET_MEM_REF:
8040     case MEM_REF:
8041       base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
8042       ctor = get_base_constructor (base, &offset, valueize);
8043 
8044       /* Empty constructor.  Always fold to 0.  */
8045       if (ctor == error_mark_node)
8046 	return build_zero_cst (TREE_TYPE (t));
8047       /* We do not know precise address.  */
8048       if (!known_size_p (max_size) || maybe_ne (max_size, size))
8049 	return NULL_TREE;
8050       /* We cannot determine ctor.  */
8051       if (!ctor)
8052 	return NULL_TREE;
8053 
8054       /* Out of bound array access.  Value is undefined, but don't fold.  */
8055       if (maybe_lt (offset, 0))
8056 	return NULL_TREE;
8057 
8058       tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8059       if (tem)
8060 	return tem;
8061 
8062       /* For bit field reads try to read the representative and
8063 	 adjust.  */
8064       if (TREE_CODE (t) == COMPONENT_REF
8065 	  && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8066 	  && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8067 	{
8068 	  HOST_WIDE_INT csize, coffset;
8069 	  tree field = TREE_OPERAND (t, 1);
8070 	  tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8071 	  if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8072 	      && size.is_constant (&csize)
8073 	      && offset.is_constant (&coffset)
8074 	      && (coffset % BITS_PER_UNIT != 0
8075 		  || csize % BITS_PER_UNIT != 0)
8076 	      && !reverse
8077 	      && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8078 	    {
8079 	      poly_int64 bitoffset;
8080 	      poly_uint64 field_offset, repr_offset;
8081 	      if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8082 		  && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8083 		bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8084 	      else
8085 		bitoffset = 0;
8086 	      bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8087 			    - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8088 	      HOST_WIDE_INT bitoff;
8089 	      int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8090 			  - TYPE_PRECISION (TREE_TYPE (field)));
8091 	      if (bitoffset.is_constant (&bitoff)
8092 		  && bitoff >= 0
8093 		  && bitoff <= diff)
8094 		{
8095 		  offset -= bitoff;
8096 		  size = tree_to_uhwi (DECL_SIZE (repr));
8097 
8098 		  tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8099 					     size, base);
8100 		  if (tem && TREE_CODE (tem) == INTEGER_CST)
8101 		    {
8102 		      if (!BYTES_BIG_ENDIAN)
8103 			tem = wide_int_to_tree (TREE_TYPE (field),
8104 						wi::lrshift (wi::to_wide (tem),
8105 							     bitoff));
8106 		      else
8107 			tem = wide_int_to_tree (TREE_TYPE (field),
8108 						wi::lrshift (wi::to_wide (tem),
8109 							     diff - bitoff));
8110 		      return tem;
8111 		    }
8112 		}
8113 	    }
8114 	}
8115       break;
8116 
8117     case REALPART_EXPR:
8118     case IMAGPART_EXPR:
8119       {
8120 	tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8121 	if (c && TREE_CODE (c) == COMPLEX_CST)
8122 	  return fold_build1_loc (EXPR_LOCATION (t),
8123 				  TREE_CODE (t), TREE_TYPE (t), c);
8124 	break;
8125       }
8126 
8127     default:
8128       break;
8129     }
8130 
8131   return NULL_TREE;
8132 }
8133 
8134 tree
fold_const_aggregate_ref(tree t)8135 fold_const_aggregate_ref (tree t)
8136 {
8137   return fold_const_aggregate_ref_1 (t, NULL);
8138 }
8139 
8140 /* Lookup virtual method with index TOKEN in a virtual table V
8141    at OFFSET.
8142    Set CAN_REFER if non-NULL to false if method
8143    is not referable or if the virtual table is ill-formed (such as rewriten
8144    by non-C++ produced symbol). Otherwise just return NULL in that calse.  */
8145 
8146 tree
gimple_get_virt_method_for_vtable(HOST_WIDE_INT token,tree v,unsigned HOST_WIDE_INT offset,bool * can_refer)8147 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8148 				   tree v,
8149 				   unsigned HOST_WIDE_INT offset,
8150 				   bool *can_refer)
8151 {
8152   tree vtable = v, init, fn;
8153   unsigned HOST_WIDE_INT size;
8154   unsigned HOST_WIDE_INT elt_size, access_index;
8155   tree domain_type;
8156 
8157   if (can_refer)
8158     *can_refer = true;
8159 
8160   /* First of all double check we have virtual table.  */
8161   if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
8162     {
8163       /* Pass down that we lost track of the target.  */
8164       if (can_refer)
8165 	*can_refer = false;
8166       return NULL_TREE;
8167     }
8168 
8169   init = ctor_for_folding (v);
8170 
8171   /* The virtual tables should always be born with constructors
8172      and we always should assume that they are avaialble for
8173      folding.  At the moment we do not stream them in all cases,
8174      but it should never happen that ctor seem unreachable.  */
8175   gcc_assert (init);
8176   if (init == error_mark_node)
8177     {
8178       /* Pass down that we lost track of the target.  */
8179       if (can_refer)
8180 	*can_refer = false;
8181       return NULL_TREE;
8182     }
8183   gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
8184   size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
8185   offset *= BITS_PER_UNIT;
8186   offset += token * size;
8187 
8188   /* Lookup the value in the constructor that is assumed to be array.
8189      This is equivalent to
8190      fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8191 			       offset, size, NULL);
8192      but in a constant time.  We expect that frontend produced a simple
8193      array without indexed initializers.  */
8194 
8195   gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8196   domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8197   gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8198   elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8199 
8200   access_index = offset / BITS_PER_UNIT / elt_size;
8201   gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8202 
8203   /* The C++ FE can now produce indexed fields, and we check if the indexes
8204      match.  */
8205   if (access_index < CONSTRUCTOR_NELTS (init))
8206     {
8207       fn = CONSTRUCTOR_ELT (init, access_index)->value;
8208       tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8209       gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8210       STRIP_NOPS (fn);
8211     }
8212   else
8213     fn = NULL;
8214 
8215   /* For type inconsistent program we may end up looking up virtual method
8216      in virtual table that does not contain TOKEN entries.  We may overrun
8217      the virtual table and pick up a constant or RTTI info pointer.
8218      In any case the call is undefined.  */
8219   if (!fn
8220       || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8221       || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8222     fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
8223   else
8224     {
8225       fn = TREE_OPERAND (fn, 0);
8226 
8227       /* When cgraph node is missing and function is not public, we cannot
8228 	 devirtualize.  This can happen in WHOPR when the actual method
8229 	 ends up in other partition, because we found devirtualization
8230 	 possibility too late.  */
8231       if (!can_refer_decl_in_current_unit_p (fn, vtable))
8232 	{
8233 	  if (can_refer)
8234 	    {
8235 	      *can_refer = false;
8236 	      return fn;
8237 	    }
8238 	  return NULL_TREE;
8239 	}
8240     }
8241 
8242   /* Make sure we create a cgraph node for functions we'll reference.
8243      They can be non-existent if the reference comes from an entry
8244      of an external vtable for example.  */
8245   cgraph_node::get_create (fn);
8246 
8247   return fn;
8248 }
8249 
8250 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8251    is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8252    KNOWN_BINFO carries the binfo describing the true type of
8253    OBJ_TYPE_REF_OBJECT(REF).
8254    Set CAN_REFER if non-NULL to false if method
8255    is not referable or if the virtual table is ill-formed (such as rewriten
8256    by non-C++ produced symbol). Otherwise just return NULL in that calse.  */
8257 
8258 tree
gimple_get_virt_method_for_binfo(HOST_WIDE_INT token,tree known_binfo,bool * can_refer)8259 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8260 				  bool *can_refer)
8261 {
8262   unsigned HOST_WIDE_INT offset;
8263   tree v;
8264 
8265   v = BINFO_VTABLE (known_binfo);
8266   /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone.  */
8267   if (!v)
8268     return NULL_TREE;
8269 
8270   if (!vtable_pointer_value_to_vtable (v, &v, &offset))
8271     {
8272       if (can_refer)
8273 	*can_refer = false;
8274       return NULL_TREE;
8275     }
8276   return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
8277 }
8278 
8279 /* Given a pointer value T, return a simplified version of an
8280    indirection through T, or NULL_TREE if no simplification is
8281    possible.  Note that the resulting type may be different from
8282    the type pointed to in the sense that it is still compatible
8283    from the langhooks point of view. */
8284 
8285 tree
gimple_fold_indirect_ref(tree t)8286 gimple_fold_indirect_ref (tree t)
8287 {
8288   tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8289   tree sub = t;
8290   tree subtype;
8291 
8292   STRIP_NOPS (sub);
8293   subtype = TREE_TYPE (sub);
8294   if (!POINTER_TYPE_P (subtype)
8295       || TYPE_REF_CAN_ALIAS_ALL (ptype))
8296     return NULL_TREE;
8297 
8298   if (TREE_CODE (sub) == ADDR_EXPR)
8299     {
8300       tree op = TREE_OPERAND (sub, 0);
8301       tree optype = TREE_TYPE (op);
8302       /* *&p => p */
8303       if (useless_type_conversion_p (type, optype))
8304         return op;
8305 
8306       /* *(foo *)&fooarray => fooarray[0] */
8307       if (TREE_CODE (optype) == ARRAY_TYPE
8308 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8309 	  && useless_type_conversion_p (type, TREE_TYPE (optype)))
8310        {
8311          tree type_domain = TYPE_DOMAIN (optype);
8312          tree min_val = size_zero_node;
8313          if (type_domain && TYPE_MIN_VALUE (type_domain))
8314            min_val = TYPE_MIN_VALUE (type_domain);
8315 	 if (TREE_CODE (min_val) == INTEGER_CST)
8316 	   return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8317        }
8318       /* *(foo *)&complexfoo => __real__ complexfoo */
8319       else if (TREE_CODE (optype) == COMPLEX_TYPE
8320                && useless_type_conversion_p (type, TREE_TYPE (optype)))
8321         return fold_build1 (REALPART_EXPR, type, op);
8322       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8323       else if (TREE_CODE (optype) == VECTOR_TYPE
8324                && useless_type_conversion_p (type, TREE_TYPE (optype)))
8325         {
8326           tree part_width = TYPE_SIZE (type);
8327           tree index = bitsize_int (0);
8328           return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8329         }
8330     }
8331 
8332   /* *(p + CST) -> ...  */
8333   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8334       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8335     {
8336       tree addr = TREE_OPERAND (sub, 0);
8337       tree off = TREE_OPERAND (sub, 1);
8338       tree addrtype;
8339 
8340       STRIP_NOPS (addr);
8341       addrtype = TREE_TYPE (addr);
8342 
8343       /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8344       if (TREE_CODE (addr) == ADDR_EXPR
8345 	  && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8346 	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
8347 	  && tree_fits_uhwi_p (off))
8348 	{
8349           unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
8350           tree part_width = TYPE_SIZE (type);
8351           unsigned HOST_WIDE_INT part_widthi
8352             = tree_to_shwi (part_width) / BITS_PER_UNIT;
8353           unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8354           tree index = bitsize_int (indexi);
8355 	  if (known_lt (offset / part_widthi,
8356 			TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
8357             return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8358                                 part_width, index);
8359 	}
8360 
8361       /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8362       if (TREE_CODE (addr) == ADDR_EXPR
8363 	  && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8364 	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8365         {
8366           tree size = TYPE_SIZE_UNIT (type);
8367           if (tree_int_cst_equal (size, off))
8368             return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8369         }
8370 
8371       /* *(p + CST) -> MEM_REF <p, CST>.  */
8372       if (TREE_CODE (addr) != ADDR_EXPR
8373 	  || DECL_P (TREE_OPERAND (addr, 0)))
8374 	return fold_build2 (MEM_REF, type,
8375 			    addr,
8376 			    wide_int_to_tree (ptype, wi::to_wide (off)));
8377     }
8378 
8379   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8380   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8381       && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8382       && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8383     {
8384       tree type_domain;
8385       tree min_val = size_zero_node;
8386       tree osub = sub;
8387       sub = gimple_fold_indirect_ref (sub);
8388       if (! sub)
8389 	sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8390       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8391       if (type_domain && TYPE_MIN_VALUE (type_domain))
8392         min_val = TYPE_MIN_VALUE (type_domain);
8393       if (TREE_CODE (min_val) == INTEGER_CST)
8394 	return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8395     }
8396 
8397   return NULL_TREE;
8398 }
8399 
8400 /* Return true if CODE is an operation that when operating on signed
8401    integer types involves undefined behavior on overflow and the
8402    operation can be expressed with unsigned arithmetic.  */
8403 
8404 bool
arith_code_with_undefined_signed_overflow(tree_code code)8405 arith_code_with_undefined_signed_overflow (tree_code code)
8406 {
8407   switch (code)
8408     {
8409     case ABS_EXPR:
8410     case PLUS_EXPR:
8411     case MINUS_EXPR:
8412     case MULT_EXPR:
8413     case NEGATE_EXPR:
8414     case POINTER_PLUS_EXPR:
8415       return true;
8416     default:
8417       return false;
8418     }
8419 }
8420 
8421 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8422    operation that can be transformed to unsigned arithmetic by converting
8423    its operand, carrying out the operation in the corresponding unsigned
8424    type and converting the result back to the original type.
8425 
8426    Returns a sequence of statements that replace STMT and also contain
8427    a modified form of STMT itself.  */
8428 
8429 gimple_seq
rewrite_to_defined_overflow(gimple * stmt)8430 rewrite_to_defined_overflow (gimple *stmt)
8431 {
8432   if (dump_file && (dump_flags & TDF_DETAILS))
8433     {
8434       fprintf (dump_file, "rewriting stmt with undefined signed "
8435 	       "overflow ");
8436       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8437     }
8438 
8439   tree lhs = gimple_assign_lhs (stmt);
8440   tree type = unsigned_type_for (TREE_TYPE (lhs));
8441   gimple_seq stmts = NULL;
8442   if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8443     gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8444   else
8445     for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8446       {
8447 	tree op = gimple_op (stmt, i);
8448 	op = gimple_convert (&stmts, type, op);
8449 	gimple_set_op (stmt, i, op);
8450       }
8451   gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8452   if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8453     gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
8454   gimple_set_modified (stmt, true);
8455   gimple_seq_add_stmt (&stmts, stmt);
8456   gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
8457   gimple_seq_add_stmt (&stmts, cvt);
8458 
8459   return stmts;
8460 }
8461 
8462 
8463 /* The valueization hook we use for the gimple_build API simplification.
8464    This makes us match fold_buildN behavior by only combining with
8465    statements in the sequence(s) we are currently building.  */
8466 
8467 static tree
gimple_build_valueize(tree op)8468 gimple_build_valueize (tree op)
8469 {
8470   if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8471     return op;
8472   return NULL_TREE;
8473 }
8474 
8475 /* Build the expression CODE OP0 of type TYPE with location LOC,
8476    simplifying it first if possible.  Returns the built
8477    expression value and appends statements possibly defining it
8478    to SEQ.  */
8479 
8480 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0)8481 gimple_build (gimple_seq *seq, location_t loc,
8482 	      enum tree_code code, tree type, tree op0)
8483 {
8484   tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
8485   if (!res)
8486     {
8487       res = create_tmp_reg_or_ssa_name (type);
8488       gimple *stmt;
8489       if (code == REALPART_EXPR
8490 	  || code == IMAGPART_EXPR
8491 	  || code == VIEW_CONVERT_EXPR)
8492 	stmt = gimple_build_assign (res, code, build1 (code, type, op0));
8493       else
8494 	stmt = gimple_build_assign (res, code, op0);
8495       gimple_set_location (stmt, loc);
8496       gimple_seq_add_stmt_without_update (seq, stmt);
8497     }
8498   return res;
8499 }
8500 
8501 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8502    simplifying it first if possible.  Returns the built
8503    expression value and appends statements possibly defining it
8504    to SEQ.  */
8505 
8506 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0,tree op1)8507 gimple_build (gimple_seq *seq, location_t loc,
8508 	      enum tree_code code, tree type, tree op0, tree op1)
8509 {
8510   tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
8511   if (!res)
8512     {
8513       res = create_tmp_reg_or_ssa_name (type);
8514       gimple *stmt = gimple_build_assign (res, code, op0, op1);
8515       gimple_set_location (stmt, loc);
8516       gimple_seq_add_stmt_without_update (seq, stmt);
8517     }
8518   return res;
8519 }
8520 
8521 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8522    simplifying it first if possible.  Returns the built
8523    expression value and appends statements possibly defining it
8524    to SEQ.  */
8525 
8526 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2)8527 gimple_build (gimple_seq *seq, location_t loc,
8528 	      enum tree_code code, tree type, tree op0, tree op1, tree op2)
8529 {
8530   tree res = gimple_simplify (code, type, op0, op1, op2,
8531 			      seq, gimple_build_valueize);
8532   if (!res)
8533     {
8534       res = create_tmp_reg_or_ssa_name (type);
8535       gimple *stmt;
8536       if (code == BIT_FIELD_REF)
8537 	stmt = gimple_build_assign (res, code,
8538 				    build3 (code, type, op0, op1, op2));
8539       else
8540 	stmt = gimple_build_assign (res, code, op0, op1, op2);
8541       gimple_set_location (stmt, loc);
8542       gimple_seq_add_stmt_without_update (seq, stmt);
8543     }
8544   return res;
8545 }
8546 
8547 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8548    void) with a location LOC.  Returns the built expression value (or NULL_TREE
8549    if TYPE is void) and appends statements possibly defining it to SEQ.  */
8550 
8551 tree
gimple_build(gimple_seq * seq,location_t loc,combined_fn fn,tree type)8552 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn, tree type)
8553 {
8554   tree res = NULL_TREE;
8555   gcall *stmt;
8556   if (internal_fn_p (fn))
8557     stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8558   else
8559     {
8560       tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8561       stmt = gimple_build_call (decl, 0);
8562     }
8563   if (!VOID_TYPE_P (type))
8564     {
8565       res = create_tmp_reg_or_ssa_name (type);
8566       gimple_call_set_lhs (stmt, res);
8567     }
8568   gimple_set_location (stmt, loc);
8569   gimple_seq_add_stmt_without_update (seq, stmt);
8570   return res;
8571 }
8572 
8573 /* Build the call FN (ARG0) with a result of type TYPE
8574    (or no result if TYPE is void) with location LOC,
8575    simplifying it first if possible.  Returns the built
8576    expression value (or NULL_TREE if TYPE is void) and appends
8577    statements possibly defining it to SEQ.  */
8578 
8579 tree
gimple_build(gimple_seq * seq,location_t loc,combined_fn fn,tree type,tree arg0)8580 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8581 	      tree type, tree arg0)
8582 {
8583   tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
8584   if (!res)
8585     {
8586       gcall *stmt;
8587       if (internal_fn_p (fn))
8588 	stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
8589       else
8590 	{
8591 	  tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8592 	  stmt = gimple_build_call (decl, 1, arg0);
8593 	}
8594       if (!VOID_TYPE_P (type))
8595 	{
8596 	  res = create_tmp_reg_or_ssa_name (type);
8597 	  gimple_call_set_lhs (stmt, res);
8598 	}
8599       gimple_set_location (stmt, loc);
8600       gimple_seq_add_stmt_without_update (seq, stmt);
8601     }
8602   return res;
8603 }
8604 
8605 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8606    (or no result if TYPE is void) with location LOC,
8607    simplifying it first if possible.  Returns the built
8608    expression value (or NULL_TREE if TYPE is void) and appends
8609    statements possibly defining it to SEQ.  */
8610 
8611 tree
gimple_build(gimple_seq * seq,location_t loc,combined_fn fn,tree type,tree arg0,tree arg1)8612 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8613 	      tree type, tree arg0, tree arg1)
8614 {
8615   tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
8616   if (!res)
8617     {
8618       gcall *stmt;
8619       if (internal_fn_p (fn))
8620 	stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
8621       else
8622 	{
8623 	  tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8624 	  stmt = gimple_build_call (decl, 2, arg0, arg1);
8625 	}
8626       if (!VOID_TYPE_P (type))
8627 	{
8628 	  res = create_tmp_reg_or_ssa_name (type);
8629 	  gimple_call_set_lhs (stmt, res);
8630 	}
8631       gimple_set_location (stmt, loc);
8632       gimple_seq_add_stmt_without_update (seq, stmt);
8633     }
8634   return res;
8635 }
8636 
8637 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8638    (or no result if TYPE is void) with location LOC,
8639    simplifying it first if possible.  Returns the built
8640    expression value (or NULL_TREE if TYPE is void) and appends
8641    statements possibly defining it to SEQ.  */
8642 
8643 tree
gimple_build(gimple_seq * seq,location_t loc,combined_fn fn,tree type,tree arg0,tree arg1,tree arg2)8644 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8645 	      tree type, tree arg0, tree arg1, tree arg2)
8646 {
8647   tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
8648 			      seq, gimple_build_valueize);
8649   if (!res)
8650     {
8651       gcall *stmt;
8652       if (internal_fn_p (fn))
8653 	stmt = gimple_build_call_internal (as_internal_fn (fn),
8654 					   3, arg0, arg1, arg2);
8655       else
8656 	{
8657 	  tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8658 	  stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
8659 	}
8660       if (!VOID_TYPE_P (type))
8661 	{
8662 	  res = create_tmp_reg_or_ssa_name (type);
8663 	  gimple_call_set_lhs (stmt, res);
8664 	}
8665       gimple_set_location (stmt, loc);
8666       gimple_seq_add_stmt_without_update (seq, stmt);
8667     }
8668   return res;
8669 }
8670 
8671 /* Build the conversion (TYPE) OP with a result of type TYPE
8672    with location LOC if such conversion is neccesary in GIMPLE,
8673    simplifying it first.
8674    Returns the built expression value and appends
8675    statements possibly defining it to SEQ.  */
8676 
8677 tree
gimple_convert(gimple_seq * seq,location_t loc,tree type,tree op)8678 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
8679 {
8680   if (useless_type_conversion_p (type, TREE_TYPE (op)))
8681     return op;
8682   return gimple_build (seq, loc, NOP_EXPR, type, op);
8683 }
8684 
8685 /* Build the conversion (ptrofftype) OP with a result of a type
8686    compatible with ptrofftype with location LOC if such conversion
8687    is neccesary in GIMPLE, simplifying it first.
8688    Returns the built expression value and appends
8689    statements possibly defining it to SEQ.  */
8690 
8691 tree
gimple_convert_to_ptrofftype(gimple_seq * seq,location_t loc,tree op)8692 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
8693 {
8694   if (ptrofftype_p (TREE_TYPE (op)))
8695     return op;
8696   return gimple_convert (seq, loc, sizetype, op);
8697 }
8698 
8699 /* Build a vector of type TYPE in which each element has the value OP.
8700    Return a gimple value for the result, appending any new statements
8701    to SEQ.  */
8702 
8703 tree
gimple_build_vector_from_val(gimple_seq * seq,location_t loc,tree type,tree op)8704 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
8705 			      tree op)
8706 {
8707   if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
8708       && !CONSTANT_CLASS_P (op))
8709     return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
8710 
8711   tree res, vec = build_vector_from_val (type, op);
8712   if (is_gimple_val (vec))
8713     return vec;
8714   if (gimple_in_ssa_p (cfun))
8715     res = make_ssa_name (type);
8716   else
8717     res = create_tmp_reg (type);
8718   gimple *stmt = gimple_build_assign (res, vec);
8719   gimple_set_location (stmt, loc);
8720   gimple_seq_add_stmt_without_update (seq, stmt);
8721   return res;
8722 }
8723 
8724 /* Build a vector from BUILDER, handling the case in which some elements
8725    are non-constant.  Return a gimple value for the result, appending any
8726    new instructions to SEQ.
8727 
8728    BUILDER must not have a stepped encoding on entry.  This is because
8729    the function is not geared up to handle the arithmetic that would
8730    be needed in the variable case, and any code building a vector that
8731    is known to be constant should use BUILDER->build () directly.  */
8732 
8733 tree
gimple_build_vector(gimple_seq * seq,location_t loc,tree_vector_builder * builder)8734 gimple_build_vector (gimple_seq *seq, location_t loc,
8735 		     tree_vector_builder *builder)
8736 {
8737   gcc_assert (builder->nelts_per_pattern () <= 2);
8738   unsigned int encoded_nelts = builder->encoded_nelts ();
8739   for (unsigned int i = 0; i < encoded_nelts; ++i)
8740     if (!CONSTANT_CLASS_P ((*builder)[i]))
8741       {
8742 	tree type = builder->type ();
8743 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8744 	vec<constructor_elt, va_gc> *v;
8745 	vec_alloc (v, nelts);
8746 	for (i = 0; i < nelts; ++i)
8747 	  CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
8748 
8749 	tree res;
8750 	if (gimple_in_ssa_p (cfun))
8751 	  res = make_ssa_name (type);
8752 	else
8753 	  res = create_tmp_reg (type);
8754 	gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
8755 	gimple_set_location (stmt, loc);
8756 	gimple_seq_add_stmt_without_update (seq, stmt);
8757 	return res;
8758       }
8759   return builder->build ();
8760 }
8761 
8762 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8763    and generate a value guaranteed to be rounded upwards to ALIGN.
8764 
8765    Return the tree node representing this size, it is of TREE_TYPE TYPE.  */
8766 
8767 tree
gimple_build_round_up(gimple_seq * seq,location_t loc,tree type,tree old_size,unsigned HOST_WIDE_INT align)8768 gimple_build_round_up (gimple_seq *seq, location_t loc, tree type,
8769 		       tree old_size, unsigned HOST_WIDE_INT align)
8770 {
8771   unsigned HOST_WIDE_INT tg_mask = align - 1;
8772   /* tree new_size = (old_size + tg_mask) & ~tg_mask;  */
8773   gcc_assert (INTEGRAL_TYPE_P (type));
8774   tree tree_mask = build_int_cst (type, tg_mask);
8775   tree oversize = gimple_build (seq, loc, PLUS_EXPR, type, old_size,
8776 				tree_mask);
8777 
8778   tree mask = build_int_cst (type, -align);
8779   return gimple_build (seq, loc, BIT_AND_EXPR, type, oversize, mask);
8780 }
8781 
8782 /* Return true if the result of assignment STMT is known to be non-negative.
8783    If the return value is based on the assumption that signed overflow is
8784    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8785    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
8786 
8787 static bool
gimple_assign_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)8788 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8789 				   int depth)
8790 {
8791   enum tree_code code = gimple_assign_rhs_code (stmt);
8792   switch (get_gimple_rhs_class (code))
8793     {
8794     case GIMPLE_UNARY_RHS:
8795       return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8796 					     gimple_expr_type (stmt),
8797 					     gimple_assign_rhs1 (stmt),
8798 					     strict_overflow_p, depth);
8799     case GIMPLE_BINARY_RHS:
8800       return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8801 					      gimple_expr_type (stmt),
8802 					      gimple_assign_rhs1 (stmt),
8803 					      gimple_assign_rhs2 (stmt),
8804 					      strict_overflow_p, depth);
8805     case GIMPLE_TERNARY_RHS:
8806       return false;
8807     case GIMPLE_SINGLE_RHS:
8808       return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
8809 					      strict_overflow_p, depth);
8810     case GIMPLE_INVALID_RHS:
8811       break;
8812     }
8813   gcc_unreachable ();
8814 }
8815 
8816 /* Return true if return value of call STMT is known to be non-negative.
8817    If the return value is based on the assumption that signed overflow is
8818    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8819    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
8820 
8821 static bool
gimple_call_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)8822 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8823 				 int depth)
8824 {
8825   tree arg0 = gimple_call_num_args (stmt) > 0 ?
8826     gimple_call_arg (stmt, 0) : NULL_TREE;
8827   tree arg1 = gimple_call_num_args (stmt) > 1 ?
8828     gimple_call_arg (stmt, 1) : NULL_TREE;
8829 
8830   return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
8831 					gimple_call_combined_fn (stmt),
8832 					arg0,
8833 					arg1,
8834 					strict_overflow_p, depth);
8835 }
8836 
8837 /* Return true if return value of call STMT is known to be non-negative.
8838    If the return value is based on the assumption that signed overflow is
8839    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8840    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
8841 
8842 static bool
gimple_phi_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)8843 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8844 				int depth)
8845 {
8846   for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8847     {
8848       tree arg = gimple_phi_arg_def (stmt, i);
8849       if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
8850 	return false;
8851     }
8852   return true;
8853 }
8854 
8855 /* Return true if STMT is known to compute a non-negative value.
8856    If the return value is based on the assumption that signed overflow is
8857    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8858    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
8859 
8860 bool
gimple_stmt_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)8861 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8862 				 int depth)
8863 {
8864   switch (gimple_code (stmt))
8865     {
8866     case GIMPLE_ASSIGN:
8867       return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
8868 						depth);
8869     case GIMPLE_CALL:
8870       return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
8871 					      depth);
8872     case GIMPLE_PHI:
8873       return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
8874 					     depth);
8875     default:
8876       return false;
8877     }
8878 }
8879 
8880 /* Return true if the floating-point value computed by assignment STMT
8881    is known to have an integer value.  We also allow +Inf, -Inf and NaN
8882    to be considered integer values. Return false for signaling NaN.
8883 
8884    DEPTH is the current nesting depth of the query.  */
8885 
8886 static bool
gimple_assign_integer_valued_real_p(gimple * stmt,int depth)8887 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
8888 {
8889   enum tree_code code = gimple_assign_rhs_code (stmt);
8890   switch (get_gimple_rhs_class (code))
8891     {
8892     case GIMPLE_UNARY_RHS:
8893       return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
8894 					  gimple_assign_rhs1 (stmt), depth);
8895     case GIMPLE_BINARY_RHS:
8896       return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
8897 					   gimple_assign_rhs1 (stmt),
8898 					   gimple_assign_rhs2 (stmt), depth);
8899     case GIMPLE_TERNARY_RHS:
8900       return false;
8901     case GIMPLE_SINGLE_RHS:
8902       return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
8903     case GIMPLE_INVALID_RHS:
8904       break;
8905     }
8906   gcc_unreachable ();
8907 }
8908 
8909 /* Return true if the floating-point value computed by call STMT is known
8910    to have an integer value.  We also allow +Inf, -Inf and NaN to be
8911    considered integer values. Return false for signaling NaN.
8912 
8913    DEPTH is the current nesting depth of the query.  */
8914 
8915 static bool
gimple_call_integer_valued_real_p(gimple * stmt,int depth)8916 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
8917 {
8918   tree arg0 = (gimple_call_num_args (stmt) > 0
8919 	       ? gimple_call_arg (stmt, 0)
8920 	       : NULL_TREE);
8921   tree arg1 = (gimple_call_num_args (stmt) > 1
8922 	       ? gimple_call_arg (stmt, 1)
8923 	       : NULL_TREE);
8924   return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
8925 				     arg0, arg1, depth);
8926 }
8927 
8928 /* Return true if the floating-point result of phi STMT is known to have
8929    an integer value.  We also allow +Inf, -Inf and NaN to be considered
8930    integer values. Return false for signaling NaN.
8931 
8932    DEPTH is the current nesting depth of the query.  */
8933 
8934 static bool
gimple_phi_integer_valued_real_p(gimple * stmt,int depth)8935 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
8936 {
8937   for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8938     {
8939       tree arg = gimple_phi_arg_def (stmt, i);
8940       if (!integer_valued_real_single_p (arg, depth + 1))
8941 	return false;
8942     }
8943   return true;
8944 }
8945 
8946 /* Return true if the floating-point value computed by STMT is known
8947    to have an integer value.  We also allow +Inf, -Inf and NaN to be
8948    considered integer values. Return false for signaling NaN.
8949 
8950    DEPTH is the current nesting depth of the query.  */
8951 
8952 bool
gimple_stmt_integer_valued_real_p(gimple * stmt,int depth)8953 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
8954 {
8955   switch (gimple_code (stmt))
8956     {
8957     case GIMPLE_ASSIGN:
8958       return gimple_assign_integer_valued_real_p (stmt, depth);
8959     case GIMPLE_CALL:
8960       return gimple_call_integer_valued_real_p (stmt, depth);
8961     case GIMPLE_PHI:
8962       return gimple_phi_integer_valued_real_p (stmt, depth);
8963     default:
8964       return false;
8965     }
8966 }
8967