1 /* Statement simplification on GIMPLE.
2    Copyright (C) 2010-2016 Free Software Foundation, Inc.
3    Split out from tree-ssa-ccp.c.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "dbgcnt.h"
50 #include "builtins.h"
51 #include "tree-eh.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-low.h"
56 #include "ipa-chkp.h"
57 
58 
59 /* Return true when DECL can be referenced from current unit.
60    FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
61    We can get declarations that are not possible to reference for various
62    reasons:
63 
64      1) When analyzing C++ virtual tables.
65 	C++ virtual tables do have known constructors even
66 	when they are keyed to other compilation unit.
67 	Those tables can contain pointers to methods and vars
68 	in other units.  Those methods have both STATIC and EXTERNAL
69 	set.
70      2) In WHOPR mode devirtualization might lead to reference
71 	to method that was partitioned elsehwere.
72 	In this case we have static VAR_DECL or FUNCTION_DECL
73 	that has no corresponding callgraph/varpool node
74 	declaring the body.
75      3) COMDAT functions referred by external vtables that
76         we devirtualize only during final compilation stage.
77         At this time we already decided that we will not output
78         the function body and thus we can't reference the symbol
79         directly.  */
80 
81 static bool
can_refer_decl_in_current_unit_p(tree decl,tree from_decl)82 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
83 {
84   varpool_node *vnode;
85   struct cgraph_node *node;
86   symtab_node *snode;
87 
88   if (DECL_ABSTRACT_P (decl))
89     return false;
90 
91   /* We are concerned only about static/external vars and functions.  */
92   if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
93       || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
94     return true;
95 
96   /* Static objects can be referred only if they was not optimized out yet.  */
97   if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
98     {
99       /* Before we start optimizing unreachable code we can be sure all
100 	 static objects are defined.  */
101       if (symtab->function_flags_ready)
102 	return true;
103       snode = symtab_node::get (decl);
104       if (!snode || !snode->definition)
105 	return false;
106       node = dyn_cast <cgraph_node *> (snode);
107       return !node || !node->global.inlined_to;
108     }
109 
110   /* We will later output the initializer, so we can refer to it.
111      So we are concerned only when DECL comes from initializer of
112      external var or var that has been optimized out.  */
113   if (!from_decl
114       || TREE_CODE (from_decl) != VAR_DECL
115       || (!DECL_EXTERNAL (from_decl)
116 	  && (vnode = varpool_node::get (from_decl)) != NULL
117 	  && vnode->definition)
118       || (flag_ltrans
119 	  && (vnode = varpool_node::get (from_decl)) != NULL
120 	  && vnode->in_other_partition))
121     return true;
122   /* We are folding reference from external vtable.  The vtable may reffer
123      to a symbol keyed to other compilation unit.  The other compilation
124      unit may be in separate DSO and the symbol may be hidden.  */
125   if (DECL_VISIBILITY_SPECIFIED (decl)
126       && DECL_EXTERNAL (decl)
127       && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
128       && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
129     return false;
130   /* When function is public, we always can introduce new reference.
131      Exception are the COMDAT functions where introducing a direct
132      reference imply need to include function body in the curren tunit.  */
133   if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
134     return true;
135   /* We have COMDAT.  We are going to check if we still have definition
136      or if the definition is going to be output in other partition.
137      Bypass this when gimplifying; all needed functions will be produced.
138 
139      As observed in PR20991 for already optimized out comdat virtual functions
140      it may be tempting to not necessarily give up because the copy will be
141      output elsewhere when corresponding vtable is output.
142      This is however not possible - ABI specify that COMDATs are output in
143      units where they are used and when the other unit was compiled with LTO
144      it is possible that vtable was kept public while the function itself
145      was privatized. */
146   if (!symtab->function_flags_ready)
147     return true;
148 
149   snode = symtab_node::get (decl);
150   if (!snode
151       || ((!snode->definition || DECL_EXTERNAL (decl))
152 	  && (!snode->in_other_partition
153 	      || (!snode->forced_by_abi && !snode->force_output))))
154     return false;
155   node = dyn_cast <cgraph_node *> (snode);
156   return !node || !node->global.inlined_to;
157 }
158 
159 /* CVAL is value taken from DECL_INITIAL of variable.  Try to transform it into
160    acceptable form for is_gimple_min_invariant.
161    FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL.  */
162 
163 tree
canonicalize_constructor_val(tree cval,tree from_decl)164 canonicalize_constructor_val (tree cval, tree from_decl)
165 {
166   tree orig_cval = cval;
167   STRIP_NOPS (cval);
168   if (TREE_CODE (cval) == POINTER_PLUS_EXPR
169       && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
170     {
171       tree ptr = TREE_OPERAND (cval, 0);
172       if (is_gimple_min_invariant (ptr))
173 	cval = build1_loc (EXPR_LOCATION (cval),
174 			   ADDR_EXPR, TREE_TYPE (ptr),
175 			   fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
176 					ptr,
177 					fold_convert (ptr_type_node,
178 						      TREE_OPERAND (cval, 1))));
179     }
180   if (TREE_CODE (cval) == ADDR_EXPR)
181     {
182       tree base = NULL_TREE;
183       if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
184 	{
185 	  base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
186 	  if (base)
187 	    TREE_OPERAND (cval, 0) = base;
188 	}
189       else
190 	base = get_base_address (TREE_OPERAND (cval, 0));
191       if (!base)
192 	return NULL_TREE;
193 
194       if ((TREE_CODE (base) == VAR_DECL
195 	   || TREE_CODE (base) == FUNCTION_DECL)
196 	  && !can_refer_decl_in_current_unit_p (base, from_decl))
197 	return NULL_TREE;
198       if (TREE_TYPE (base) == error_mark_node)
199 	return NULL_TREE;
200       if (TREE_CODE (base) == VAR_DECL)
201 	TREE_ADDRESSABLE (base) = 1;
202       else if (TREE_CODE (base) == FUNCTION_DECL)
203 	{
204 	  /* Make sure we create a cgraph node for functions we'll reference.
205 	     They can be non-existent if the reference comes from an entry
206 	     of an external vtable for example.  */
207 	  cgraph_node::get_create (base);
208 	}
209       /* Fixup types in global initializers.  */
210       if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
211 	cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
212 
213       if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
214 	cval = fold_convert (TREE_TYPE (orig_cval), cval);
215       return cval;
216     }
217   if (TREE_OVERFLOW_P (cval))
218     return drop_tree_overflow (cval);
219   return orig_cval;
220 }
221 
222 /* If SYM is a constant variable with known value, return the value.
223    NULL_TREE is returned otherwise.  */
224 
225 tree
get_symbol_constant_value(tree sym)226 get_symbol_constant_value (tree sym)
227 {
228   tree val = ctor_for_folding (sym);
229   if (val != error_mark_node)
230     {
231       if (val)
232 	{
233 	  val = canonicalize_constructor_val (unshare_expr (val), sym);
234 	  if (val && is_gimple_min_invariant (val))
235 	    return val;
236 	  else
237 	    return NULL_TREE;
238 	}
239       /* Variables declared 'const' without an initializer
240 	 have zero as the initializer if they may not be
241 	 overridden at link or run time.  */
242       if (!val
243           && is_gimple_reg_type (TREE_TYPE (sym)))
244 	return build_zero_cst (TREE_TYPE (sym));
245     }
246 
247   return NULL_TREE;
248 }
249 
250 
251 
252 /* Subroutine of fold_stmt.  We perform several simplifications of the
253    memory reference tree EXPR and make sure to re-gimplify them properly
254    after propagation of constant addresses.  IS_LHS is true if the
255    reference is supposed to be an lvalue.  */
256 
257 static tree
maybe_fold_reference(tree expr,bool is_lhs)258 maybe_fold_reference (tree expr, bool is_lhs)
259 {
260   tree result;
261 
262   if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
263        || TREE_CODE (expr) == REALPART_EXPR
264        || TREE_CODE (expr) == IMAGPART_EXPR)
265       && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
266     return fold_unary_loc (EXPR_LOCATION (expr),
267 			   TREE_CODE (expr),
268 			   TREE_TYPE (expr),
269 			   TREE_OPERAND (expr, 0));
270   else if (TREE_CODE (expr) == BIT_FIELD_REF
271 	   && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
272     return fold_ternary_loc (EXPR_LOCATION (expr),
273 			     TREE_CODE (expr),
274 			     TREE_TYPE (expr),
275 			     TREE_OPERAND (expr, 0),
276 			     TREE_OPERAND (expr, 1),
277 			     TREE_OPERAND (expr, 2));
278 
279   if (!is_lhs
280       && (result = fold_const_aggregate_ref (expr))
281       && is_gimple_min_invariant (result))
282     return result;
283 
284   return NULL_TREE;
285 }
286 
287 
288 /* Attempt to fold an assignment statement pointed-to by SI.  Returns a
289    replacement rhs for the statement or NULL_TREE if no simplification
290    could be made.  It is assumed that the operands have been previously
291    folded.  */
292 
293 static tree
fold_gimple_assign(gimple_stmt_iterator * si)294 fold_gimple_assign (gimple_stmt_iterator *si)
295 {
296   gimple *stmt = gsi_stmt (*si);
297   enum tree_code subcode = gimple_assign_rhs_code (stmt);
298   location_t loc = gimple_location (stmt);
299 
300   tree result = NULL_TREE;
301 
302   switch (get_gimple_rhs_class (subcode))
303     {
304     case GIMPLE_SINGLE_RHS:
305       {
306         tree rhs = gimple_assign_rhs1 (stmt);
307 
308 	if (TREE_CLOBBER_P (rhs))
309 	  return NULL_TREE;
310 
311 	if (REFERENCE_CLASS_P (rhs))
312 	  return maybe_fold_reference (rhs, false);
313 
314 	else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
315 	  {
316 	    tree val = OBJ_TYPE_REF_EXPR (rhs);
317 	    if (is_gimple_min_invariant (val))
318 	      return val;
319 	    else if (flag_devirtualize && virtual_method_call_p (rhs))
320 	      {
321 		bool final;
322 		vec <cgraph_node *>targets
323 		  = possible_polymorphic_call_targets (rhs, stmt, &final);
324 		if (final && targets.length () <= 1 && dbg_cnt (devirt))
325 		  {
326 		    if (dump_enabled_p ())
327 		      {
328 			location_t loc = gimple_location_safe (stmt);
329 			dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
330 					 "resolving virtual function address "
331 					 "reference to function %s\n",
332 					 targets.length () == 1
333 					 ? targets[0]->name ()
334 					 : "NULL");
335 		      }
336 		    if (targets.length () == 1)
337 		      {
338 			val = fold_convert (TREE_TYPE (val),
339 					    build_fold_addr_expr_loc
340 					      (loc, targets[0]->decl));
341 			STRIP_USELESS_TYPE_CONVERSION (val);
342 		      }
343 		    else
344 		      /* We can not use __builtin_unreachable here because it
345 			 can not have address taken.  */
346 		      val = build_int_cst (TREE_TYPE (val), 0);
347 		    return val;
348 		  }
349 	      }
350 	  }
351 
352 	else if (TREE_CODE (rhs) == ADDR_EXPR)
353 	  {
354 	    tree ref = TREE_OPERAND (rhs, 0);
355 	    tree tem = maybe_fold_reference (ref, true);
356 	    if (tem
357 		&& TREE_CODE (tem) == MEM_REF
358 		&& integer_zerop (TREE_OPERAND (tem, 1)))
359 	      result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
360 	    else if (tem)
361 	      result = fold_convert (TREE_TYPE (rhs),
362 				     build_fold_addr_expr_loc (loc, tem));
363 	    else if (TREE_CODE (ref) == MEM_REF
364 		     && integer_zerop (TREE_OPERAND (ref, 1)))
365 	      result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
366 
367 	    if (result)
368 	      {
369 		/* Strip away useless type conversions.  Both the
370 		   NON_LVALUE_EXPR that may have been added by fold, and
371 		   "useless" type conversions that might now be apparent
372 		   due to propagation.  */
373 		STRIP_USELESS_TYPE_CONVERSION (result);
374 
375 		if (result != rhs && valid_gimple_rhs_p (result))
376 		  return result;
377 	      }
378 	  }
379 
380 	else if (TREE_CODE (rhs) == CONSTRUCTOR
381 		 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
382 	  {
383 	    /* Fold a constant vector CONSTRUCTOR to VECTOR_CST.  */
384 	    unsigned i;
385 	    tree val;
386 
387 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
388 	      if (! CONSTANT_CLASS_P (val))
389 		return NULL_TREE;
390 
391 	    return build_vector_from_ctor (TREE_TYPE (rhs),
392 					   CONSTRUCTOR_ELTS (rhs));
393 	  }
394 
395 	else if (DECL_P (rhs))
396 	  return get_symbol_constant_value (rhs);
397       }
398       break;
399 
400     case GIMPLE_UNARY_RHS:
401       break;
402 
403     case GIMPLE_BINARY_RHS:
404       break;
405 
406     case GIMPLE_TERNARY_RHS:
407       result = fold_ternary_loc (loc, subcode,
408 				 TREE_TYPE (gimple_assign_lhs (stmt)),
409 				 gimple_assign_rhs1 (stmt),
410 				 gimple_assign_rhs2 (stmt),
411 				 gimple_assign_rhs3 (stmt));
412 
413       if (result)
414         {
415           STRIP_USELESS_TYPE_CONVERSION (result);
416           if (valid_gimple_rhs_p (result))
417 	    return result;
418         }
419       break;
420 
421     case GIMPLE_INVALID_RHS:
422       gcc_unreachable ();
423     }
424 
425   return NULL_TREE;
426 }
427 
428 
429 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
430    adjusting the replacement stmts location and virtual operands.
431    If the statement has a lhs the last stmt in the sequence is expected
432    to assign to that lhs.  */
433 
434 static void
gsi_replace_with_seq_vops(gimple_stmt_iterator * si_p,gimple_seq stmts)435 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
436 {
437   gimple *stmt = gsi_stmt (*si_p);
438 
439   if (gimple_has_location (stmt))
440     annotate_all_with_location (stmts, gimple_location (stmt));
441 
442   /* First iterate over the replacement statements backward, assigning
443      virtual operands to their defining statements.  */
444   gimple *laststore = NULL;
445   for (gimple_stmt_iterator i = gsi_last (stmts);
446        !gsi_end_p (i); gsi_prev (&i))
447     {
448       gimple *new_stmt = gsi_stmt (i);
449       if ((gimple_assign_single_p (new_stmt)
450 	   && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
451 	  || (is_gimple_call (new_stmt)
452 	      && (gimple_call_flags (new_stmt)
453 		  & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
454 	{
455 	  tree vdef;
456 	  if (!laststore)
457 	    vdef = gimple_vdef (stmt);
458 	  else
459 	    vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
460 	  gimple_set_vdef (new_stmt, vdef);
461 	  if (vdef && TREE_CODE (vdef) == SSA_NAME)
462 	    SSA_NAME_DEF_STMT (vdef) = new_stmt;
463 	  laststore = new_stmt;
464 	}
465     }
466 
467   /* Second iterate over the statements forward, assigning virtual
468      operands to their uses.  */
469   tree reaching_vuse = gimple_vuse (stmt);
470   for (gimple_stmt_iterator i = gsi_start (stmts);
471        !gsi_end_p (i); gsi_next (&i))
472     {
473       gimple *new_stmt = gsi_stmt (i);
474       /* If the new statement possibly has a VUSE, update it with exact SSA
475 	 name we know will reach this one.  */
476       if (gimple_has_mem_ops (new_stmt))
477 	gimple_set_vuse (new_stmt, reaching_vuse);
478       gimple_set_modified (new_stmt, true);
479       if (gimple_vdef (new_stmt))
480 	reaching_vuse = gimple_vdef (new_stmt);
481     }
482 
483   /* If the new sequence does not do a store release the virtual
484      definition of the original statement.  */
485   if (reaching_vuse
486       && reaching_vuse == gimple_vuse (stmt))
487     {
488       tree vdef = gimple_vdef (stmt);
489       if (vdef
490 	  && TREE_CODE (vdef) == SSA_NAME)
491 	{
492 	  unlink_stmt_vdef (stmt);
493 	  release_ssa_name (vdef);
494 	}
495     }
496 
497   /* Finally replace the original statement with the sequence.  */
498   gsi_replace_with_seq (si_p, stmts, false);
499 }
500 
501 /* Convert EXPR into a GIMPLE value suitable for substitution on the
502    RHS of an assignment.  Insert the necessary statements before
503    iterator *SI_P.  The statement at *SI_P, which must be a GIMPLE_CALL
504    is replaced.  If the call is expected to produces a result, then it
505    is replaced by an assignment of the new RHS to the result variable.
506    If the result is to be ignored, then the call is replaced by a
507    GIMPLE_NOP.  A proper VDEF chain is retained by making the first
508    VUSE and the last VDEF of the whole sequence be the same as the replaced
509    statement and using new SSA names for stores in between.  */
510 
511 void
gimplify_and_update_call_from_tree(gimple_stmt_iterator * si_p,tree expr)512 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
513 {
514   tree lhs;
515   gimple *stmt, *new_stmt;
516   gimple_stmt_iterator i;
517   gimple_seq stmts = NULL;
518 
519   stmt = gsi_stmt (*si_p);
520 
521   gcc_assert (is_gimple_call (stmt));
522 
523   push_gimplify_context (gimple_in_ssa_p (cfun));
524 
525   lhs = gimple_call_lhs (stmt);
526   if (lhs == NULL_TREE)
527     {
528       gimplify_and_add (expr, &stmts);
529       /* We can end up with folding a memcpy of an empty class assignment
530 	 which gets optimized away by C++ gimplification.  */
531       if (gimple_seq_empty_p (stmts))
532 	{
533 	  pop_gimplify_context (NULL);
534 	  if (gimple_in_ssa_p (cfun))
535 	    {
536 	      unlink_stmt_vdef (stmt);
537 	      release_defs (stmt);
538 	    }
539 	  gsi_replace (si_p, gimple_build_nop (), false);
540 	  return;
541 	}
542     }
543   else
544     {
545       tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
546       new_stmt = gimple_build_assign (lhs, tmp);
547       i = gsi_last (stmts);
548       gsi_insert_after_without_update (&i, new_stmt,
549 				       GSI_CONTINUE_LINKING);
550     }
551 
552   pop_gimplify_context (NULL);
553 
554   gsi_replace_with_seq_vops (si_p, stmts);
555 }
556 
557 
558 /* Replace the call at *GSI with the gimple value VAL.  */
559 
560 static void
replace_call_with_value(gimple_stmt_iterator * gsi,tree val)561 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
562 {
563   gimple *stmt = gsi_stmt (*gsi);
564   tree lhs = gimple_call_lhs (stmt);
565   gimple *repl;
566   if (lhs)
567     {
568       if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
569 	val = fold_convert (TREE_TYPE (lhs), val);
570       repl = gimple_build_assign (lhs, val);
571     }
572   else
573     repl = gimple_build_nop ();
574   tree vdef = gimple_vdef (stmt);
575   if (vdef && TREE_CODE (vdef) == SSA_NAME)
576     {
577       unlink_stmt_vdef (stmt);
578       release_ssa_name (vdef);
579     }
580   gsi_replace (gsi, repl, false);
581 }
582 
583 /* Replace the call at *GSI with the new call REPL and fold that
584    again.  */
585 
586 static void
replace_call_with_call_and_fold(gimple_stmt_iterator * gsi,gimple * repl)587 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
588 {
589   gimple *stmt = gsi_stmt (*gsi);
590   gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
591   gimple_set_location (repl, gimple_location (stmt));
592   if (gimple_vdef (stmt)
593       && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
594     {
595       gimple_set_vdef (repl, gimple_vdef (stmt));
596       gimple_set_vuse (repl, gimple_vuse (stmt));
597       SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
598     }
599   gsi_replace (gsi, repl, false);
600   fold_stmt (gsi);
601 }
602 
603 /* Return true if VAR is a VAR_DECL or a component thereof.  */
604 
605 static bool
var_decl_component_p(tree var)606 var_decl_component_p (tree var)
607 {
608   tree inner = var;
609   while (handled_component_p (inner))
610     inner = TREE_OPERAND (inner, 0);
611   return SSA_VAR_P (inner);
612 }
613 
614 /* Fold function call to builtin mem{{,p}cpy,move}.  Return
615    false if no simplification can be made.
616    If ENDP is 0, return DEST (like memcpy).
617    If ENDP is 1, return DEST+LEN (like mempcpy).
618    If ENDP is 2, return DEST+LEN-1 (like stpcpy).
619    If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
620    (memmove).   */
621 
622 static bool
gimple_fold_builtin_memory_op(gimple_stmt_iterator * gsi,tree dest,tree src,int endp)623 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
624 			       tree dest, tree src, int endp)
625 {
626   gimple *stmt = gsi_stmt (*gsi);
627   tree lhs = gimple_call_lhs (stmt);
628   tree len = gimple_call_arg (stmt, 2);
629   tree destvar, srcvar;
630   location_t loc = gimple_location (stmt);
631 
632   /* If the LEN parameter is zero, return DEST.  */
633   if (integer_zerop (len))
634     {
635       gimple *repl;
636       if (gimple_call_lhs (stmt))
637 	repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
638       else
639 	repl = gimple_build_nop ();
640       tree vdef = gimple_vdef (stmt);
641       if (vdef && TREE_CODE (vdef) == SSA_NAME)
642 	{
643 	  unlink_stmt_vdef (stmt);
644 	  release_ssa_name (vdef);
645 	}
646       gsi_replace (gsi, repl, false);
647       return true;
648     }
649 
650   /* If SRC and DEST are the same (and not volatile), return
651      DEST{,+LEN,+LEN-1}.  */
652   if (operand_equal_p (src, dest, 0))
653     {
654       unlink_stmt_vdef (stmt);
655       if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
656 	release_ssa_name (gimple_vdef (stmt));
657       if (!lhs)
658 	{
659 	  gsi_replace (gsi, gimple_build_nop (), false);
660 	  return true;
661 	}
662       goto done;
663     }
664   else
665     {
666       tree srctype, desttype;
667       unsigned int src_align, dest_align;
668       tree off0;
669 
670       /* Inlining of memcpy/memmove may cause bounds lost (if we copy
671 	 pointers as wide integer) and also may result in huge function
672 	 size because of inlined bounds copy.  Thus don't inline for
673 	 functions we want to instrument.  */
674       if (flag_check_pointer_bounds
675 	  && chkp_instrumentable_p (cfun->decl)
676 	  /* Even if data may contain pointers we can inline if copy
677 	     less than a pointer size.  */
678 	  && (!tree_fits_uhwi_p (len)
679 	      || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
680 	return false;
681 
682       /* Build accesses at offset zero with a ref-all character type.  */
683       off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
684 							 ptr_mode, true), 0);
685 
686       /* If we can perform the copy efficiently with first doing all loads
687          and then all stores inline it that way.  Currently efficiently
688 	 means that we can load all the memory into a single integer
689 	 register which is what MOVE_MAX gives us.  */
690       src_align = get_pointer_alignment (src);
691       dest_align = get_pointer_alignment (dest);
692       if (tree_fits_uhwi_p (len)
693 	  && compare_tree_int (len, MOVE_MAX) <= 0
694 	  /* ???  Don't transform copies from strings with known length this
695 	     confuses the tree-ssa-strlen.c.  This doesn't handle
696 	     the case in gcc.dg/strlenopt-8.c which is XFAILed for that
697 	     reason.  */
698 	  && !c_strlen (src, 2))
699 	{
700 	  unsigned ilen = tree_to_uhwi (len);
701 	  if (exact_log2 (ilen) != -1)
702 	    {
703 	      tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
704 	      if (type
705 		  && TYPE_MODE (type) != BLKmode
706 		  && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
707 		      == ilen * 8)
708 		  /* If the destination pointer is not aligned we must be able
709 		     to emit an unaligned store.  */
710 		  && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
711 		      || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
712 		      || (optab_handler (movmisalign_optab, TYPE_MODE (type))
713 			  != CODE_FOR_nothing)))
714 		{
715 		  tree srctype = type;
716 		  tree desttype = type;
717 		  if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
718 		    srctype = build_aligned_type (type, src_align);
719 		  tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
720 		  tree tem = fold_const_aggregate_ref (srcmem);
721 		  if (tem)
722 		    srcmem = tem;
723 		  else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
724 			   && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
725 						     src_align)
726 			   && (optab_handler (movmisalign_optab,
727 					      TYPE_MODE (type))
728 			       == CODE_FOR_nothing))
729 		    srcmem = NULL_TREE;
730 		  if (srcmem)
731 		    {
732 		      gimple *new_stmt;
733 		      if (is_gimple_reg_type (TREE_TYPE (srcmem)))
734 			{
735 			  new_stmt = gimple_build_assign (NULL_TREE, srcmem);
736 			  if (gimple_in_ssa_p (cfun))
737 			    srcmem = make_ssa_name (TREE_TYPE (srcmem),
738 						    new_stmt);
739 			  else
740 			    srcmem = create_tmp_reg (TREE_TYPE (srcmem));
741 			  gimple_assign_set_lhs (new_stmt, srcmem);
742 			  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
743 			  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
744 			}
745 		      if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
746 			desttype = build_aligned_type (type, dest_align);
747 		      new_stmt
748 			= gimple_build_assign (fold_build2 (MEM_REF, desttype,
749 							    dest, off0),
750 					       srcmem);
751 		      gimple_set_vuse (new_stmt, gimple_vuse (stmt));
752 		      gimple_set_vdef (new_stmt, gimple_vdef (stmt));
753 		      if (gimple_vdef (new_stmt)
754 			  && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
755 			SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
756 		      if (!lhs)
757 			{
758 			  gsi_replace (gsi, new_stmt, false);
759 			  return true;
760 			}
761 		      gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
762 		      goto done;
763 		    }
764 		}
765 	    }
766 	}
767 
768       if (endp == 3)
769 	{
770 	  /* Both DEST and SRC must be pointer types.
771 	     ??? This is what old code did.  Is the testing for pointer types
772 	     really mandatory?
773 
774 	     If either SRC is readonly or length is 1, we can use memcpy.  */
775 	  if (!dest_align || !src_align)
776 	    return false;
777 	  if (readonly_data_expr (src)
778 	      || (tree_fits_uhwi_p (len)
779 		  && (MIN (src_align, dest_align) / BITS_PER_UNIT
780 		      >= tree_to_uhwi (len))))
781 	    {
782 	      tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
783 	      if (!fn)
784 		return false;
785 	      gimple_call_set_fndecl (stmt, fn);
786 	      gimple_call_set_arg (stmt, 0, dest);
787 	      gimple_call_set_arg (stmt, 1, src);
788 	      fold_stmt (gsi);
789 	      return true;
790 	    }
791 
792 	  /* If *src and *dest can't overlap, optimize into memcpy as well.  */
793 	  if (TREE_CODE (src) == ADDR_EXPR
794 	      && TREE_CODE (dest) == ADDR_EXPR)
795 	    {
796 	      tree src_base, dest_base, fn;
797 	      HOST_WIDE_INT src_offset = 0, dest_offset = 0;
798 	      HOST_WIDE_INT maxsize;
799 
800 	      srcvar = TREE_OPERAND (src, 0);
801 	      src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
802 	      if (src_base == NULL)
803 		src_base = srcvar;
804 	      destvar = TREE_OPERAND (dest, 0);
805 	      dest_base = get_addr_base_and_unit_offset (destvar,
806 							 &dest_offset);
807 	      if (dest_base == NULL)
808 		dest_base = destvar;
809 	      if (tree_fits_uhwi_p (len))
810 		maxsize = tree_to_uhwi (len);
811 	      else
812 		maxsize = -1;
813 	      if (SSA_VAR_P (src_base)
814 		  && SSA_VAR_P (dest_base))
815 		{
816 		  if (operand_equal_p (src_base, dest_base, 0)
817 		      && ranges_overlap_p (src_offset, maxsize,
818 					   dest_offset, maxsize))
819 		    return false;
820 		}
821 	      else if (TREE_CODE (src_base) == MEM_REF
822 		       && TREE_CODE (dest_base) == MEM_REF)
823 		{
824 		  if (! operand_equal_p (TREE_OPERAND (src_base, 0),
825 					 TREE_OPERAND (dest_base, 0), 0))
826 		    return false;
827 		  offset_int off = mem_ref_offset (src_base) + src_offset;
828 		  if (!wi::fits_shwi_p (off))
829 		    return false;
830 		  src_offset = off.to_shwi ();
831 
832 		  off = mem_ref_offset (dest_base) + dest_offset;
833 		  if (!wi::fits_shwi_p (off))
834 		    return false;
835 		  dest_offset = off.to_shwi ();
836 		  if (ranges_overlap_p (src_offset, maxsize,
837 					dest_offset, maxsize))
838 		    return false;
839 		}
840 	      else
841 		return false;
842 
843 	      fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
844 	      if (!fn)
845 		return false;
846 	      gimple_call_set_fndecl (stmt, fn);
847 	      gimple_call_set_arg (stmt, 0, dest);
848 	      gimple_call_set_arg (stmt, 1, src);
849 	      fold_stmt (gsi);
850 	      return true;
851 	    }
852 
853 	  /* If the destination and source do not alias optimize into
854 	     memcpy as well.  */
855 	  if ((is_gimple_min_invariant (dest)
856 	       || TREE_CODE (dest) == SSA_NAME)
857 	      && (is_gimple_min_invariant (src)
858 		  || TREE_CODE (src) == SSA_NAME))
859 	    {
860 	      ao_ref destr, srcr;
861 	      ao_ref_init_from_ptr_and_size (&destr, dest, len);
862 	      ao_ref_init_from_ptr_and_size (&srcr, src, len);
863 	      if (!refs_may_alias_p_1 (&destr, &srcr, false))
864 		{
865 		  tree fn;
866 		  fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
867 		  if (!fn)
868 		    return false;
869 		  gimple_call_set_fndecl (stmt, fn);
870 		  gimple_call_set_arg (stmt, 0, dest);
871 		  gimple_call_set_arg (stmt, 1, src);
872 		  fold_stmt (gsi);
873 		  return true;
874 		}
875 	    }
876 
877 	  return false;
878 	}
879 
880       if (!tree_fits_shwi_p (len))
881 	return false;
882       /* FIXME:
883          This logic lose for arguments like (type *)malloc (sizeof (type)),
884          since we strip the casts of up to VOID return value from malloc.
885 	 Perhaps we ought to inherit type from non-VOID argument here?  */
886       STRIP_NOPS (src);
887       STRIP_NOPS (dest);
888       if (!POINTER_TYPE_P (TREE_TYPE (src))
889 	  || !POINTER_TYPE_P (TREE_TYPE (dest)))
890 	return false;
891       /* In the following try to find a type that is most natural to be
892 	 used for the memcpy source and destination and that allows
893 	 the most optimization when memcpy is turned into a plain assignment
894 	 using that type.  In theory we could always use a char[len] type
895 	 but that only gains us that the destination and source possibly
896 	 no longer will have their address taken.  */
897       /* As we fold (void *)(p + CST) to (void *)p + CST undo this here.  */
898       if (TREE_CODE (src) == POINTER_PLUS_EXPR)
899 	{
900 	  tree tem = TREE_OPERAND (src, 0);
901 	  STRIP_NOPS (tem);
902 	  if (tem != TREE_OPERAND (src, 0))
903 	    src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
904 	}
905       if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
906 	{
907 	  tree tem = TREE_OPERAND (dest, 0);
908 	  STRIP_NOPS (tem);
909 	  if (tem != TREE_OPERAND (dest, 0))
910 	    dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
911 	}
912       srctype = TREE_TYPE (TREE_TYPE (src));
913       if (TREE_CODE (srctype) == ARRAY_TYPE
914 	  && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
915 	{
916 	  srctype = TREE_TYPE (srctype);
917 	  STRIP_NOPS (src);
918 	  src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
919 	}
920       desttype = TREE_TYPE (TREE_TYPE (dest));
921       if (TREE_CODE (desttype) == ARRAY_TYPE
922 	  && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
923 	{
924 	  desttype = TREE_TYPE (desttype);
925 	  STRIP_NOPS (dest);
926 	  dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
927 	}
928       if (TREE_ADDRESSABLE (srctype)
929 	  || TREE_ADDRESSABLE (desttype))
930 	return false;
931 
932       /* Make sure we are not copying using a floating-point mode or
933          a type whose size possibly does not match its precision.  */
934       if (FLOAT_MODE_P (TYPE_MODE (desttype))
935 	  || TREE_CODE (desttype) == BOOLEAN_TYPE
936 	  || TREE_CODE (desttype) == ENUMERAL_TYPE)
937 	desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
938       if (FLOAT_MODE_P (TYPE_MODE (srctype))
939 	  || TREE_CODE (srctype) == BOOLEAN_TYPE
940 	  || TREE_CODE (srctype) == ENUMERAL_TYPE)
941 	srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
942       if (!srctype)
943 	srctype = desttype;
944       if (!desttype)
945 	desttype = srctype;
946       if (!srctype)
947 	return false;
948 
949       src_align = get_pointer_alignment (src);
950       dest_align = get_pointer_alignment (dest);
951       if (dest_align < TYPE_ALIGN (desttype)
952 	  || src_align < TYPE_ALIGN (srctype))
953 	return false;
954 
955       destvar = dest;
956       STRIP_NOPS (destvar);
957       if (TREE_CODE (destvar) == ADDR_EXPR
958 	  && var_decl_component_p (TREE_OPERAND (destvar, 0))
959 	  && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
960 	destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
961       else
962 	destvar = NULL_TREE;
963 
964       srcvar = src;
965       STRIP_NOPS (srcvar);
966       if (TREE_CODE (srcvar) == ADDR_EXPR
967 	  && var_decl_component_p (TREE_OPERAND (srcvar, 0))
968 	  && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
969 	{
970 	  if (!destvar
971 	      || src_align >= TYPE_ALIGN (desttype))
972 	    srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
973 				  srcvar, off0);
974 	  else if (!STRICT_ALIGNMENT)
975 	    {
976 	      srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
977 					    src_align);
978 	      srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
979 	    }
980 	  else
981 	    srcvar = NULL_TREE;
982 	}
983       else
984 	srcvar = NULL_TREE;
985 
986       if (srcvar == NULL_TREE && destvar == NULL_TREE)
987 	return false;
988 
989       if (srcvar == NULL_TREE)
990 	{
991 	  STRIP_NOPS (src);
992 	  if (src_align >= TYPE_ALIGN (desttype))
993 	    srcvar = fold_build2 (MEM_REF, desttype, src, off0);
994 	  else
995 	    {
996 	      if (STRICT_ALIGNMENT)
997 		return false;
998 	      srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
999 					    src_align);
1000 	      srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1001 	    }
1002 	}
1003       else if (destvar == NULL_TREE)
1004 	{
1005 	  STRIP_NOPS (dest);
1006 	  if (dest_align >= TYPE_ALIGN (srctype))
1007 	    destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1008 	  else
1009 	    {
1010 	      if (STRICT_ALIGNMENT)
1011 		return false;
1012 	      desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1013 					     dest_align);
1014 	      destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1015 	    }
1016 	}
1017 
1018       gimple *new_stmt;
1019       if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1020 	{
1021 	  new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1022 	  if (gimple_in_ssa_p (cfun))
1023 	    srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1024 	  else
1025 	    srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1026 	  gimple_assign_set_lhs (new_stmt, srcvar);
1027 	  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1028 	  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1029 	}
1030       new_stmt = gimple_build_assign (destvar, srcvar);
1031       gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1032       gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1033       if (gimple_vdef (new_stmt)
1034 	  && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1035 	SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1036       if (!lhs)
1037 	{
1038 	  gsi_replace (gsi, new_stmt, false);
1039 	  return true;
1040 	}
1041       gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1042     }
1043 
1044 done:
1045   gimple_seq stmts = NULL;
1046   if (endp == 0 || endp == 3)
1047     len = NULL_TREE;
1048   else if (endp == 2)
1049     len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1050 			ssize_int (1));
1051   if (endp == 2 || endp == 1)
1052     {
1053       len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1054       dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1055 			   TREE_TYPE (dest), dest, len);
1056     }
1057 
1058   gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1059   gimple *repl = gimple_build_assign (lhs, dest);
1060   gsi_replace (gsi, repl, false);
1061   return true;
1062 }
1063 
1064 /* Fold function call to builtin memset or bzero at *GSI setting the
1065    memory of size LEN to VAL.  Return whether a simplification was made.  */
1066 
1067 static bool
gimple_fold_builtin_memset(gimple_stmt_iterator * gsi,tree c,tree len)1068 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1069 {
1070   gimple *stmt = gsi_stmt (*gsi);
1071   tree etype;
1072   unsigned HOST_WIDE_INT length, cval;
1073 
1074   /* If the LEN parameter is zero, return DEST.  */
1075   if (integer_zerop (len))
1076     {
1077       replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1078       return true;
1079     }
1080 
1081   if (! tree_fits_uhwi_p (len))
1082     return false;
1083 
1084   if (TREE_CODE (c) != INTEGER_CST)
1085     return false;
1086 
1087   tree dest = gimple_call_arg (stmt, 0);
1088   tree var = dest;
1089   if (TREE_CODE (var) != ADDR_EXPR)
1090     return false;
1091 
1092   var = TREE_OPERAND (var, 0);
1093   if (TREE_THIS_VOLATILE (var))
1094     return false;
1095 
1096   etype = TREE_TYPE (var);
1097   if (TREE_CODE (etype) == ARRAY_TYPE)
1098     etype = TREE_TYPE (etype);
1099 
1100   if (!INTEGRAL_TYPE_P (etype)
1101       && !POINTER_TYPE_P (etype))
1102     return NULL_TREE;
1103 
1104   if (! var_decl_component_p (var))
1105     return NULL_TREE;
1106 
1107   length = tree_to_uhwi (len);
1108   if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1109       || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1110     return NULL_TREE;
1111 
1112   if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1113     return NULL_TREE;
1114 
1115   if (integer_zerop (c))
1116     cval = 0;
1117   else
1118     {
1119       if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1120 	return NULL_TREE;
1121 
1122       cval = TREE_INT_CST_LOW (c);
1123       cval &= 0xff;
1124       cval |= cval << 8;
1125       cval |= cval << 16;
1126       cval |= (cval << 31) << 1;
1127     }
1128 
1129   var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1130   gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1131   gimple_set_vuse (store, gimple_vuse (stmt));
1132   tree vdef = gimple_vdef (stmt);
1133   if (vdef && TREE_CODE (vdef) == SSA_NAME)
1134     {
1135       gimple_set_vdef (store, gimple_vdef (stmt));
1136       SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1137     }
1138   gsi_insert_before (gsi, store, GSI_SAME_STMT);
1139   if (gimple_call_lhs (stmt))
1140     {
1141       gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1142       gsi_replace (gsi, asgn, false);
1143     }
1144   else
1145     {
1146       gimple_stmt_iterator gsi2 = *gsi;
1147       gsi_prev (gsi);
1148       gsi_remove (&gsi2, true);
1149     }
1150 
1151   return true;
1152 }
1153 
1154 
1155 /* Return the string length, maximum string length or maximum value of
1156    ARG in LENGTH.
1157    If ARG is an SSA name variable, follow its use-def chains.  If LENGTH
1158    is not NULL and, for TYPE == 0, its value is not equal to the length
1159    we determine or if we are unable to determine the length or value,
1160    return false.  VISITED is a bitmap of visited variables.
1161    TYPE is 0 if string length should be returned, 1 for maximum string
1162    length and 2 for maximum value ARG can have.  */
1163 
1164 static bool
get_maxval_strlen(tree arg,tree * length,bitmap * visited,int type)1165 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1166 {
1167   tree var, val;
1168   gimple *def_stmt;
1169 
1170   if (TREE_CODE (arg) != SSA_NAME)
1171     {
1172       /* We can end up with &(*iftmp_1)[0] here as well, so handle it.  */
1173       if (TREE_CODE (arg) == ADDR_EXPR
1174 	  && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1175 	  && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1176 	{
1177 	  tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1178 	  if (TREE_CODE (aop0) == INDIRECT_REF
1179 	      && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1180 	    return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1181 				      length, visited, type);
1182 	}
1183 
1184       if (type == 2)
1185 	{
1186 	  val = arg;
1187 	  if (TREE_CODE (val) != INTEGER_CST
1188 	      || tree_int_cst_sgn (val) < 0)
1189 	    return false;
1190 	}
1191       else
1192 	val = c_strlen (arg, 1);
1193       if (!val)
1194 	return false;
1195 
1196       if (*length)
1197 	{
1198 	  if (type > 0)
1199 	    {
1200 	      if (TREE_CODE (*length) != INTEGER_CST
1201 		  || TREE_CODE (val) != INTEGER_CST)
1202 		return false;
1203 
1204 	      if (tree_int_cst_lt (*length, val))
1205 		*length = val;
1206 	      return true;
1207 	    }
1208 	  else if (simple_cst_equal (val, *length) != 1)
1209 	    return false;
1210 	}
1211 
1212       *length = val;
1213       return true;
1214     }
1215 
1216   /* If ARG is registered for SSA update we cannot look at its defining
1217      statement.  */
1218   if (name_registered_for_update_p (arg))
1219     return false;
1220 
1221   /* If we were already here, break the infinite cycle.  */
1222   if (!*visited)
1223     *visited = BITMAP_ALLOC (NULL);
1224   if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1225     return true;
1226 
1227   var = arg;
1228   def_stmt = SSA_NAME_DEF_STMT (var);
1229 
1230   switch (gimple_code (def_stmt))
1231     {
1232       case GIMPLE_ASSIGN:
1233         /* The RHS of the statement defining VAR must either have a
1234            constant length or come from another SSA_NAME with a constant
1235            length.  */
1236         if (gimple_assign_single_p (def_stmt)
1237             || gimple_assign_unary_nop_p (def_stmt))
1238           {
1239             tree rhs = gimple_assign_rhs1 (def_stmt);
1240             return get_maxval_strlen (rhs, length, visited, type);
1241           }
1242 	else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1243 	  {
1244 	    tree op2 = gimple_assign_rhs2 (def_stmt);
1245 	    tree op3 = gimple_assign_rhs3 (def_stmt);
1246 	    return get_maxval_strlen (op2, length, visited, type)
1247 		   && get_maxval_strlen (op3, length, visited, type);
1248           }
1249         return false;
1250 
1251       case GIMPLE_PHI:
1252 	{
1253 	  /* All the arguments of the PHI node must have the same constant
1254 	     length.  */
1255 	  unsigned i;
1256 
1257 	  for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1258           {
1259             tree arg = gimple_phi_arg (def_stmt, i)->def;
1260 
1261             /* If this PHI has itself as an argument, we cannot
1262                determine the string length of this argument.  However,
1263                if we can find a constant string length for the other
1264                PHI args then we can still be sure that this is a
1265                constant string length.  So be optimistic and just
1266                continue with the next argument.  */
1267             if (arg == gimple_phi_result (def_stmt))
1268               continue;
1269 
1270             if (!get_maxval_strlen (arg, length, visited, type))
1271               return false;
1272           }
1273         }
1274         return true;
1275 
1276       default:
1277         return false;
1278     }
1279 }
1280 
1281 tree
get_maxval_strlen(tree arg,int type)1282 get_maxval_strlen (tree arg, int type)
1283 {
1284   bitmap visited = NULL;
1285   tree len = NULL_TREE;
1286   if (!get_maxval_strlen (arg, &len, &visited, type))
1287     len = NULL_TREE;
1288   if (visited)
1289     BITMAP_FREE (visited);
1290 
1291   return len;
1292 }
1293 
1294 
1295 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1296    If LEN is not NULL, it represents the length of the string to be
1297    copied.  Return NULL_TREE if no simplification can be made.  */
1298 
1299 static bool
gimple_fold_builtin_strcpy(gimple_stmt_iterator * gsi,tree dest,tree src)1300 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1301 			    tree dest, tree src)
1302 {
1303   location_t loc = gimple_location (gsi_stmt (*gsi));
1304   tree fn;
1305 
1306   /* If SRC and DEST are the same (and not volatile), return DEST.  */
1307   if (operand_equal_p (src, dest, 0))
1308     {
1309       replace_call_with_value (gsi, dest);
1310       return true;
1311     }
1312 
1313   if (optimize_function_for_size_p (cfun))
1314     return false;
1315 
1316   fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1317   if (!fn)
1318     return false;
1319 
1320   tree len = get_maxval_strlen (src, 0);
1321   if (!len)
1322     return false;
1323 
1324   len = fold_convert_loc (loc, size_type_node, len);
1325   len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1326   len = force_gimple_operand_gsi (gsi, len, true,
1327 				  NULL_TREE, true, GSI_SAME_STMT);
1328   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1329   replace_call_with_call_and_fold (gsi, repl);
1330   return true;
1331 }
1332 
1333 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1334    If SLEN is not NULL, it represents the length of the source string.
1335    Return NULL_TREE if no simplification can be made.  */
1336 
1337 static bool
gimple_fold_builtin_strncpy(gimple_stmt_iterator * gsi,tree dest,tree src,tree len)1338 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1339 			     tree dest, tree src, tree len)
1340 {
1341   location_t loc = gimple_location (gsi_stmt (*gsi));
1342   tree fn;
1343 
1344   /* If the LEN parameter is zero, return DEST.  */
1345   if (integer_zerop (len))
1346     {
1347       replace_call_with_value (gsi, dest);
1348       return true;
1349     }
1350 
1351   /* We can't compare slen with len as constants below if len is not a
1352      constant.  */
1353   if (TREE_CODE (len) != INTEGER_CST)
1354     return false;
1355 
1356   /* Now, we must be passed a constant src ptr parameter.  */
1357   tree slen = get_maxval_strlen (src, 0);
1358   if (!slen || TREE_CODE (slen) != INTEGER_CST)
1359     return false;
1360 
1361   slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1362 
1363   /* We do not support simplification of this case, though we do
1364      support it when expanding trees into RTL.  */
1365   /* FIXME: generate a call to __builtin_memset.  */
1366   if (tree_int_cst_lt (slen, len))
1367     return false;
1368 
1369   /* OK transform into builtin memcpy.  */
1370   fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1371   if (!fn)
1372     return false;
1373 
1374   len = fold_convert_loc (loc, size_type_node, len);
1375   len = force_gimple_operand_gsi (gsi, len, true,
1376 				  NULL_TREE, true, GSI_SAME_STMT);
1377   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1378   replace_call_with_call_and_fold (gsi, repl);
1379   return true;
1380 }
1381 
1382 /* Simplify a call to the strcat builtin.  DST and SRC are the arguments
1383    to the call.
1384 
1385    Return NULL_TREE if no simplification was possible, otherwise return the
1386    simplified form of the call as a tree.
1387 
1388    The simplified form may be a constant or other expression which
1389    computes the same value, but in a more efficient manner (including
1390    calls to other builtin functions).
1391 
1392    The call may contain arguments which need to be evaluated, but
1393    which are not useful to determine the result of the call.  In
1394    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
1395    COMPOUND_EXPR will be an argument which must be evaluated.
1396    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
1397    COMPOUND_EXPR in the chain will contain the tree for the simplified
1398    form of the builtin function call.  */
1399 
1400 static bool
gimple_fold_builtin_strcat(gimple_stmt_iterator * gsi,tree dst,tree src)1401 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1402 {
1403   gimple *stmt = gsi_stmt (*gsi);
1404   location_t loc = gimple_location (stmt);
1405 
1406   const char *p = c_getstr (src);
1407 
1408   /* If the string length is zero, return the dst parameter.  */
1409   if (p && *p == '\0')
1410     {
1411       replace_call_with_value (gsi, dst);
1412       return true;
1413     }
1414 
1415   if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1416     return false;
1417 
1418   /* See if we can store by pieces into (dst + strlen(dst)).  */
1419   tree newdst;
1420   tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1421   tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1422 
1423   if (!strlen_fn || !memcpy_fn)
1424     return false;
1425 
1426   /* If the length of the source string isn't computable don't
1427      split strcat into strlen and memcpy.  */
1428   tree len = get_maxval_strlen (src, 0);
1429   if (! len)
1430     return false;
1431 
1432   /* Create strlen (dst).  */
1433   gimple_seq stmts = NULL, stmts2;
1434   gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1435   gimple_set_location (repl, loc);
1436   if (gimple_in_ssa_p (cfun))
1437     newdst = make_ssa_name (size_type_node);
1438   else
1439     newdst = create_tmp_reg (size_type_node);
1440   gimple_call_set_lhs (repl, newdst);
1441   gimple_seq_add_stmt_without_update (&stmts, repl);
1442 
1443   /* Create (dst p+ strlen (dst)).  */
1444   newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1445   newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1446   gimple_seq_add_seq_without_update (&stmts, stmts2);
1447 
1448   len = fold_convert_loc (loc, size_type_node, len);
1449   len = size_binop_loc (loc, PLUS_EXPR, len,
1450 			build_int_cst (size_type_node, 1));
1451   len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1452   gimple_seq_add_seq_without_update (&stmts, stmts2);
1453 
1454   repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1455   gimple_seq_add_stmt_without_update (&stmts, repl);
1456   if (gimple_call_lhs (stmt))
1457     {
1458       repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1459       gimple_seq_add_stmt_without_update (&stmts, repl);
1460       gsi_replace_with_seq_vops (gsi, stmts);
1461       /* gsi now points at the assignment to the lhs, get a
1462          stmt iterator to the memcpy call.
1463 	 ???  We can't use gsi_for_stmt as that doesn't work when the
1464 	 CFG isn't built yet.  */
1465       gimple_stmt_iterator gsi2 = *gsi;
1466       gsi_prev (&gsi2);
1467       fold_stmt (&gsi2);
1468     }
1469   else
1470     {
1471       gsi_replace_with_seq_vops (gsi, stmts);
1472       fold_stmt (gsi);
1473     }
1474   return true;
1475 }
1476 
1477 /* Fold a call to the __strcat_chk builtin FNDECL.  DEST, SRC, and SIZE
1478    are the arguments to the call.  */
1479 
1480 static bool
gimple_fold_builtin_strcat_chk(gimple_stmt_iterator * gsi)1481 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1482 {
1483   gimple *stmt = gsi_stmt (*gsi);
1484   tree dest = gimple_call_arg (stmt, 0);
1485   tree src = gimple_call_arg (stmt, 1);
1486   tree size = gimple_call_arg (stmt, 2);
1487   tree fn;
1488   const char *p;
1489 
1490 
1491   p = c_getstr (src);
1492   /* If the SRC parameter is "", return DEST.  */
1493   if (p && *p == '\0')
1494     {
1495       replace_call_with_value (gsi, dest);
1496       return true;
1497     }
1498 
1499   if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1500     return false;
1501 
1502   /* If __builtin_strcat_chk is used, assume strcat is available.  */
1503   fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1504   if (!fn)
1505     return false;
1506 
1507   gimple *repl = gimple_build_call (fn, 2, dest, src);
1508   replace_call_with_call_and_fold (gsi, repl);
1509   return true;
1510 }
1511 
1512 /* Simplify a call to the strncat builtin.  */
1513 
1514 static bool
gimple_fold_builtin_strncat(gimple_stmt_iterator * gsi)1515 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1516 {
1517   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1518   tree dst = gimple_call_arg (stmt, 0);
1519   tree src = gimple_call_arg (stmt, 1);
1520   tree len = gimple_call_arg (stmt, 2);
1521 
1522   const char *p = c_getstr (src);
1523 
1524   /* If the requested length is zero, or the src parameter string
1525      length is zero, return the dst parameter.  */
1526   if (integer_zerop (len) || (p && *p == '\0'))
1527     {
1528       replace_call_with_value (gsi, dst);
1529       return true;
1530     }
1531 
1532   /* If the requested len is greater than or equal to the string
1533      length, call strcat.  */
1534   if (TREE_CODE (len) == INTEGER_CST && p
1535       && compare_tree_int (len, strlen (p)) >= 0)
1536     {
1537       tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1538 
1539       /* If the replacement _DECL isn't initialized, don't do the
1540 	 transformation.  */
1541       if (!fn)
1542 	return false;
1543 
1544       gcall *repl = gimple_build_call (fn, 2, dst, src);
1545       replace_call_with_call_and_fold (gsi, repl);
1546       return true;
1547     }
1548 
1549   return false;
1550 }
1551 
1552 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1553    LEN, and SIZE.  */
1554 
1555 static bool
gimple_fold_builtin_strncat_chk(gimple_stmt_iterator * gsi)1556 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1557 {
1558   gimple *stmt = gsi_stmt (*gsi);
1559   tree dest = gimple_call_arg (stmt, 0);
1560   tree src = gimple_call_arg (stmt, 1);
1561   tree len = gimple_call_arg (stmt, 2);
1562   tree size = gimple_call_arg (stmt, 3);
1563   tree fn;
1564   const char *p;
1565 
1566   p = c_getstr (src);
1567   /* If the SRC parameter is "" or if LEN is 0, return DEST.  */
1568   if ((p && *p == '\0')
1569       || integer_zerop (len))
1570     {
1571       replace_call_with_value (gsi, dest);
1572       return true;
1573     }
1574 
1575   if (! tree_fits_uhwi_p (size))
1576     return false;
1577 
1578   if (! integer_all_onesp (size))
1579     {
1580       tree src_len = c_strlen (src, 1);
1581       if (src_len
1582 	  && tree_fits_uhwi_p (src_len)
1583 	  && tree_fits_uhwi_p (len)
1584 	  && ! tree_int_cst_lt (len, src_len))
1585 	{
1586 	  /* If LEN >= strlen (SRC), optimize into __strcat_chk.  */
1587 	  fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1588 	  if (!fn)
1589 	    return false;
1590 
1591 	  gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1592 	  replace_call_with_call_and_fold (gsi, repl);
1593 	  return true;
1594 	}
1595       return false;
1596     }
1597 
1598   /* If __builtin_strncat_chk is used, assume strncat is available.  */
1599   fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1600   if (!fn)
1601     return false;
1602 
1603   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1604   replace_call_with_call_and_fold (gsi, repl);
1605   return true;
1606 }
1607 
1608 /* Fold a call to the fputs builtin.  ARG0 and ARG1 are the arguments
1609    to the call.  IGNORE is true if the value returned
1610    by the builtin will be ignored.  UNLOCKED is true is true if this
1611    actually a call to fputs_unlocked.  If LEN in non-NULL, it represents
1612    the known length of the string.  Return NULL_TREE if no simplification
1613    was possible.  */
1614 
1615 static bool
gimple_fold_builtin_fputs(gimple_stmt_iterator * gsi,tree arg0,tree arg1,bool unlocked)1616 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1617 			   tree arg0, tree arg1,
1618 			   bool unlocked)
1619 {
1620   gimple *stmt = gsi_stmt (*gsi);
1621 
1622   /* If we're using an unlocked function, assume the other unlocked
1623      functions exist explicitly.  */
1624   tree const fn_fputc = (unlocked
1625 			 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1626 			 : builtin_decl_implicit (BUILT_IN_FPUTC));
1627   tree const fn_fwrite = (unlocked
1628 			  ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1629 			  : builtin_decl_implicit (BUILT_IN_FWRITE));
1630 
1631   /* If the return value is used, don't do the transformation.  */
1632   if (gimple_call_lhs (stmt))
1633     return false;
1634 
1635   /* Get the length of the string passed to fputs.  If the length
1636      can't be determined, punt.  */
1637   tree len = get_maxval_strlen (arg0, 0);
1638   if (!len
1639       || TREE_CODE (len) != INTEGER_CST)
1640     return false;
1641 
1642   switch (compare_tree_int (len, 1))
1643     {
1644     case -1: /* length is 0, delete the call entirely .  */
1645       replace_call_with_value (gsi, integer_zero_node);
1646       return true;
1647 
1648     case 0: /* length is 1, call fputc.  */
1649       {
1650 	const char *p = c_getstr (arg0);
1651 	if (p != NULL)
1652 	  {
1653 	    if (!fn_fputc)
1654 	      return false;
1655 
1656 	    gimple *repl = gimple_build_call (fn_fputc, 2,
1657 					     build_int_cst
1658 					     (integer_type_node, p[0]), arg1);
1659 	    replace_call_with_call_and_fold (gsi, repl);
1660 	    return true;
1661 	  }
1662       }
1663       /* FALLTHROUGH */
1664     case 1: /* length is greater than 1, call fwrite.  */
1665       {
1666 	/* If optimizing for size keep fputs.  */
1667 	if (optimize_function_for_size_p (cfun))
1668 	  return false;
1669 	/* New argument list transforming fputs(string, stream) to
1670 	   fwrite(string, 1, len, stream).  */
1671 	if (!fn_fwrite)
1672 	  return false;
1673 
1674 	gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1675 					 size_one_node, len, arg1);
1676 	replace_call_with_call_and_fold (gsi, repl);
1677 	return true;
1678       }
1679     default:
1680       gcc_unreachable ();
1681     }
1682   return false;
1683 }
1684 
1685 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1686    DEST, SRC, LEN, and SIZE are the arguments to the call.
1687    IGNORE is true, if return value can be ignored.  FCODE is the BUILT_IN_*
1688    code of the builtin.  If MAXLEN is not NULL, it is maximum length
1689    passed as third argument.  */
1690 
1691 static bool
gimple_fold_builtin_memory_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree len,tree size,enum built_in_function fcode)1692 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1693 				tree dest, tree src, tree len, tree size,
1694 				enum built_in_function fcode)
1695 {
1696   gimple *stmt = gsi_stmt (*gsi);
1697   location_t loc = gimple_location (stmt);
1698   bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1699   tree fn;
1700 
1701   /* If SRC and DEST are the same (and not volatile), return DEST
1702      (resp. DEST+LEN for __mempcpy_chk).  */
1703   if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1704     {
1705       if (fcode != BUILT_IN_MEMPCPY_CHK)
1706 	{
1707 	  replace_call_with_value (gsi, dest);
1708 	  return true;
1709 	}
1710       else
1711 	{
1712 	  gimple_seq stmts = NULL;
1713 	  len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1714 	  tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1715 				    TREE_TYPE (dest), dest, len);
1716 	  gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1717 	  replace_call_with_value (gsi, temp);
1718 	  return true;
1719 	}
1720     }
1721 
1722   if (! tree_fits_uhwi_p (size))
1723     return false;
1724 
1725   tree maxlen = get_maxval_strlen (len, 2);
1726   if (! integer_all_onesp (size))
1727     {
1728       if (! tree_fits_uhwi_p (len))
1729 	{
1730 	  /* If LEN is not constant, try MAXLEN too.
1731 	     For MAXLEN only allow optimizing into non-_ocs function
1732 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
1733 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1734 	    {
1735 	      if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1736 		{
1737 		  /* (void) __mempcpy_chk () can be optimized into
1738 		     (void) __memcpy_chk ().  */
1739 		  fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1740 		  if (!fn)
1741 		    return false;
1742 
1743 		  gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1744 		  replace_call_with_call_and_fold (gsi, repl);
1745 		  return true;
1746 		}
1747 	      return false;
1748 	    }
1749 	}
1750       else
1751 	maxlen = len;
1752 
1753       if (tree_int_cst_lt (size, maxlen))
1754 	return false;
1755     }
1756 
1757   fn = NULL_TREE;
1758   /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1759      mem{cpy,pcpy,move,set} is available.  */
1760   switch (fcode)
1761     {
1762     case BUILT_IN_MEMCPY_CHK:
1763       fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1764       break;
1765     case BUILT_IN_MEMPCPY_CHK:
1766       fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1767       break;
1768     case BUILT_IN_MEMMOVE_CHK:
1769       fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1770       break;
1771     case BUILT_IN_MEMSET_CHK:
1772       fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1773       break;
1774     default:
1775       break;
1776     }
1777 
1778   if (!fn)
1779     return false;
1780 
1781   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1782   replace_call_with_call_and_fold (gsi, repl);
1783   return true;
1784 }
1785 
1786 /* Fold a call to the __st[rp]cpy_chk builtin.
1787    DEST, SRC, and SIZE are the arguments to the call.
1788    IGNORE is true if return value can be ignored.  FCODE is the BUILT_IN_*
1789    code of the builtin.  If MAXLEN is not NULL, it is maximum length of
1790    strings passed as second argument.  */
1791 
1792 static bool
gimple_fold_builtin_stxcpy_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree size,enum built_in_function fcode)1793 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1794 				tree dest,
1795 				tree src, tree size,
1796 				enum built_in_function fcode)
1797 {
1798   gimple *stmt = gsi_stmt (*gsi);
1799   location_t loc = gimple_location (stmt);
1800   bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1801   tree len, fn;
1802 
1803   /* If SRC and DEST are the same (and not volatile), return DEST.  */
1804   if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1805     {
1806       replace_call_with_value (gsi, dest);
1807       return true;
1808     }
1809 
1810   if (! tree_fits_uhwi_p (size))
1811     return false;
1812 
1813   tree maxlen = get_maxval_strlen (src, 1);
1814   if (! integer_all_onesp (size))
1815     {
1816       len = c_strlen (src, 1);
1817       if (! len || ! tree_fits_uhwi_p (len))
1818 	{
1819 	  /* If LEN is not constant, try MAXLEN too.
1820 	     For MAXLEN only allow optimizing into non-_ocs function
1821 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
1822 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1823 	    {
1824 	      if (fcode == BUILT_IN_STPCPY_CHK)
1825 		{
1826 		  if (! ignore)
1827 		    return false;
1828 
1829 		  /* If return value of __stpcpy_chk is ignored,
1830 		     optimize into __strcpy_chk.  */
1831 		  fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1832 		  if (!fn)
1833 		    return false;
1834 
1835 		  gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1836 		  replace_call_with_call_and_fold (gsi, repl);
1837 		  return true;
1838 		}
1839 
1840 	      if (! len || TREE_SIDE_EFFECTS (len))
1841 		return false;
1842 
1843 	      /* If c_strlen returned something, but not a constant,
1844 		 transform __strcpy_chk into __memcpy_chk.  */
1845 	      fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1846 	      if (!fn)
1847 		return false;
1848 
1849 	      gimple_seq stmts = NULL;
1850 	      len = gimple_convert (&stmts, loc, size_type_node, len);
1851 	      len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1852 				  build_int_cst (size_type_node, 1));
1853 	      gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1854 	      gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1855 	      replace_call_with_call_and_fold (gsi, repl);
1856 	      return true;
1857 	    }
1858 	}
1859       else
1860 	maxlen = len;
1861 
1862       if (! tree_int_cst_lt (maxlen, size))
1863 	return false;
1864     }
1865 
1866   /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available.  */
1867   fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1868 			      ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1869   if (!fn)
1870     return false;
1871 
1872   gimple *repl = gimple_build_call (fn, 2, dest, src);
1873   replace_call_with_call_and_fold (gsi, repl);
1874   return true;
1875 }
1876 
1877 /* Fold a call to the __st{r,p}ncpy_chk builtin.  DEST, SRC, LEN, and SIZE
1878    are the arguments to the call.  If MAXLEN is not NULL, it is maximum
1879    length passed as third argument. IGNORE is true if return value can be
1880    ignored. FCODE is the BUILT_IN_* code of the builtin. */
1881 
1882 static bool
gimple_fold_builtin_stxncpy_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree len,tree size,enum built_in_function fcode)1883 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1884 				 tree dest, tree src,
1885 				 tree len, tree size,
1886 				 enum built_in_function fcode)
1887 {
1888   gimple *stmt = gsi_stmt (*gsi);
1889   bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1890   tree fn;
1891 
1892   if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1893     {
1894        /* If return value of __stpncpy_chk is ignored,
1895           optimize into __strncpy_chk.  */
1896        fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1897        if (fn)
1898 	 {
1899 	   gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1900 	   replace_call_with_call_and_fold (gsi, repl);
1901 	   return true;
1902 	 }
1903     }
1904 
1905   if (! tree_fits_uhwi_p (size))
1906     return false;
1907 
1908   tree maxlen = get_maxval_strlen (len, 2);
1909   if (! integer_all_onesp (size))
1910     {
1911       if (! tree_fits_uhwi_p (len))
1912 	{
1913 	  /* If LEN is not constant, try MAXLEN too.
1914 	     For MAXLEN only allow optimizing into non-_ocs function
1915 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
1916 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1917 	    return false;
1918 	}
1919       else
1920 	maxlen = len;
1921 
1922       if (tree_int_cst_lt (size, maxlen))
1923 	return false;
1924     }
1925 
1926   /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available.  */
1927   fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1928 			      ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1929   if (!fn)
1930     return false;
1931 
1932   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1933   replace_call_with_call_and_fold (gsi, repl);
1934   return true;
1935 }
1936 
1937 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1938    Return NULL_TREE if no simplification can be made.  */
1939 
1940 static bool
gimple_fold_builtin_stpcpy(gimple_stmt_iterator * gsi)1941 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1942 {
1943   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1944   location_t loc = gimple_location (stmt);
1945   tree dest = gimple_call_arg (stmt, 0);
1946   tree src = gimple_call_arg (stmt, 1);
1947   tree fn, len, lenp1;
1948 
1949   /* If the result is unused, replace stpcpy with strcpy.  */
1950   if (gimple_call_lhs (stmt) == NULL_TREE)
1951     {
1952       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1953       if (!fn)
1954 	return false;
1955       gimple_call_set_fndecl (stmt, fn);
1956       fold_stmt (gsi);
1957       return true;
1958     }
1959 
1960   len = c_strlen (src, 1);
1961   if (!len
1962       || TREE_CODE (len) != INTEGER_CST)
1963     return false;
1964 
1965   if (optimize_function_for_size_p (cfun)
1966       /* If length is zero it's small enough.  */
1967       && !integer_zerop (len))
1968     return false;
1969 
1970   /* If the source has a known length replace stpcpy with memcpy.  */
1971   fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1972   if (!fn)
1973     return false;
1974 
1975   gimple_seq stmts = NULL;
1976   tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1977   lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1978 			tem, build_int_cst (size_type_node, 1));
1979   gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1980   gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1981   gimple_set_vuse (repl, gimple_vuse (stmt));
1982   gimple_set_vdef (repl, gimple_vdef (stmt));
1983   if (gimple_vdef (repl)
1984       && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1985     SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1986   gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1987   /* Replace the result with dest + len.  */
1988   stmts = NULL;
1989   tem = gimple_convert (&stmts, loc, sizetype, len);
1990   gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1991   gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1992 				      POINTER_PLUS_EXPR, dest, tem);
1993   gsi_replace (gsi, ret, false);
1994   /* Finally fold the memcpy call.  */
1995   gimple_stmt_iterator gsi2 = *gsi;
1996   gsi_prev (&gsi2);
1997   fold_stmt (&gsi2);
1998   return true;
1999 }
2000 
2001 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS.  Return
2002    NULL_TREE if a normal call should be emitted rather than expanding
2003    the function inline.  FCODE is either BUILT_IN_SNPRINTF_CHK or
2004    BUILT_IN_VSNPRINTF_CHK.  If MAXLEN is not NULL, it is maximum length
2005    passed as second argument.  */
2006 
2007 static bool
gimple_fold_builtin_snprintf_chk(gimple_stmt_iterator * gsi,enum built_in_function fcode)2008 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2009 				  enum built_in_function fcode)
2010 {
2011   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2012   tree dest, size, len, fn, fmt, flag;
2013   const char *fmt_str;
2014 
2015   /* Verify the required arguments in the original call.  */
2016   if (gimple_call_num_args (stmt) < 5)
2017     return false;
2018 
2019   dest = gimple_call_arg (stmt, 0);
2020   len = gimple_call_arg (stmt, 1);
2021   flag = gimple_call_arg (stmt, 2);
2022   size = gimple_call_arg (stmt, 3);
2023   fmt = gimple_call_arg (stmt, 4);
2024 
2025   if (! tree_fits_uhwi_p (size))
2026     return false;
2027 
2028   if (! integer_all_onesp (size))
2029     {
2030       tree maxlen = get_maxval_strlen (len, 2);
2031       if (! tree_fits_uhwi_p (len))
2032 	{
2033 	  /* If LEN is not constant, try MAXLEN too.
2034 	     For MAXLEN only allow optimizing into non-_ocs function
2035 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
2036 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2037 	    return false;
2038 	}
2039       else
2040 	maxlen = len;
2041 
2042       if (tree_int_cst_lt (size, maxlen))
2043 	return false;
2044     }
2045 
2046   if (!init_target_chars ())
2047     return false;
2048 
2049   /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2050      or if format doesn't contain % chars or is "%s".  */
2051   if (! integer_zerop (flag))
2052     {
2053       fmt_str = c_getstr (fmt);
2054       if (fmt_str == NULL)
2055 	return false;
2056       if (strchr (fmt_str, target_percent) != NULL
2057 	  && strcmp (fmt_str, target_percent_s))
2058 	return false;
2059     }
2060 
2061   /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2062      available.  */
2063   fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2064 			      ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2065   if (!fn)
2066     return false;
2067 
2068   /* Replace the called function and the first 5 argument by 3 retaining
2069      trailing varargs.  */
2070   gimple_call_set_fndecl (stmt, fn);
2071   gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2072   gimple_call_set_arg (stmt, 0, dest);
2073   gimple_call_set_arg (stmt, 1, len);
2074   gimple_call_set_arg (stmt, 2, fmt);
2075   for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2076     gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2077   gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2078   fold_stmt (gsi);
2079   return true;
2080 }
2081 
2082 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2083    Return NULL_TREE if a normal call should be emitted rather than
2084    expanding the function inline.  FCODE is either BUILT_IN_SPRINTF_CHK
2085    or BUILT_IN_VSPRINTF_CHK.  */
2086 
2087 static bool
gimple_fold_builtin_sprintf_chk(gimple_stmt_iterator * gsi,enum built_in_function fcode)2088 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2089 				 enum built_in_function fcode)
2090 {
2091   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2092   tree dest, size, len, fn, fmt, flag;
2093   const char *fmt_str;
2094   unsigned nargs = gimple_call_num_args (stmt);
2095 
2096   /* Verify the required arguments in the original call.  */
2097   if (nargs < 4)
2098     return false;
2099   dest = gimple_call_arg (stmt, 0);
2100   flag = gimple_call_arg (stmt, 1);
2101   size = gimple_call_arg (stmt, 2);
2102   fmt = gimple_call_arg (stmt, 3);
2103 
2104   if (! tree_fits_uhwi_p (size))
2105     return false;
2106 
2107   len = NULL_TREE;
2108 
2109   if (!init_target_chars ())
2110     return false;
2111 
2112   /* Check whether the format is a literal string constant.  */
2113   fmt_str = c_getstr (fmt);
2114   if (fmt_str != NULL)
2115     {
2116       /* If the format doesn't contain % args or %%, we know the size.  */
2117       if (strchr (fmt_str, target_percent) == 0)
2118 	{
2119 	  if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2120 	    len = build_int_cstu (size_type_node, strlen (fmt_str));
2121 	}
2122       /* If the format is "%s" and first ... argument is a string literal,
2123 	 we know the size too.  */
2124       else if (fcode == BUILT_IN_SPRINTF_CHK
2125 	       && strcmp (fmt_str, target_percent_s) == 0)
2126 	{
2127 	  tree arg;
2128 
2129 	  if (nargs == 5)
2130 	    {
2131 	      arg = gimple_call_arg (stmt, 4);
2132 	      if (POINTER_TYPE_P (TREE_TYPE (arg)))
2133 		{
2134 		  len = c_strlen (arg, 1);
2135 		  if (! len || ! tree_fits_uhwi_p (len))
2136 		    len = NULL_TREE;
2137 		}
2138 	    }
2139 	}
2140     }
2141 
2142   if (! integer_all_onesp (size))
2143     {
2144       if (! len || ! tree_int_cst_lt (len, size))
2145 	return false;
2146     }
2147 
2148   /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2149      or if format doesn't contain % chars or is "%s".  */
2150   if (! integer_zerop (flag))
2151     {
2152       if (fmt_str == NULL)
2153 	return false;
2154       if (strchr (fmt_str, target_percent) != NULL
2155 	  && strcmp (fmt_str, target_percent_s))
2156 	return false;
2157     }
2158 
2159   /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available.  */
2160   fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2161 			      ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2162   if (!fn)
2163     return false;
2164 
2165   /* Replace the called function and the first 4 argument by 2 retaining
2166      trailing varargs.  */
2167   gimple_call_set_fndecl (stmt, fn);
2168   gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2169   gimple_call_set_arg (stmt, 0, dest);
2170   gimple_call_set_arg (stmt, 1, fmt);
2171   for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2172     gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2173   gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2174   fold_stmt (gsi);
2175   return true;
2176 }
2177 
2178 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2179    ORIG may be null if this is a 2-argument call.  We don't attempt to
2180    simplify calls with more than 3 arguments.
2181 
2182    Return NULL_TREE if no simplification was possible, otherwise return the
2183    simplified form of the call as a tree.  If IGNORED is true, it means that
2184    the caller does not use the returned value of the function.  */
2185 
2186 static bool
gimple_fold_builtin_sprintf(gimple_stmt_iterator * gsi)2187 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2188 {
2189   gimple *stmt = gsi_stmt (*gsi);
2190   tree dest = gimple_call_arg (stmt, 0);
2191   tree fmt = gimple_call_arg (stmt, 1);
2192   tree orig = NULL_TREE;
2193   const char *fmt_str = NULL;
2194 
2195   /* Verify the required arguments in the original call.  We deal with two
2196      types of sprintf() calls: 'sprintf (str, fmt)' and
2197      'sprintf (dest, "%s", orig)'.  */
2198   if (gimple_call_num_args (stmt) > 3)
2199     return false;
2200 
2201   if (gimple_call_num_args (stmt) == 3)
2202     orig = gimple_call_arg (stmt, 2);
2203 
2204   /* Check whether the format is a literal string constant.  */
2205   fmt_str = c_getstr (fmt);
2206   if (fmt_str == NULL)
2207     return false;
2208 
2209   if (!init_target_chars ())
2210     return false;
2211 
2212   /* If the format doesn't contain % args or %%, use strcpy.  */
2213   if (strchr (fmt_str, target_percent) == NULL)
2214     {
2215       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2216 
2217       if (!fn)
2218 	return false;
2219 
2220       /* Don't optimize sprintf (buf, "abc", ptr++).  */
2221       if (orig)
2222 	return false;
2223 
2224       /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2225 	 'format' is known to contain no % formats.  */
2226       gimple_seq stmts = NULL;
2227       gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2228       gimple_seq_add_stmt_without_update (&stmts, repl);
2229       if (gimple_call_lhs (stmt))
2230 	{
2231 	  repl = gimple_build_assign (gimple_call_lhs (stmt),
2232 				      build_int_cst (integer_type_node,
2233 						     strlen (fmt_str)));
2234 	  gimple_seq_add_stmt_without_update (&stmts, repl);
2235 	  gsi_replace_with_seq_vops (gsi, stmts);
2236 	  /* gsi now points at the assignment to the lhs, get a
2237 	     stmt iterator to the memcpy call.
2238 	     ???  We can't use gsi_for_stmt as that doesn't work when the
2239 	     CFG isn't built yet.  */
2240 	  gimple_stmt_iterator gsi2 = *gsi;
2241 	  gsi_prev (&gsi2);
2242 	  fold_stmt (&gsi2);
2243 	}
2244       else
2245 	{
2246 	  gsi_replace_with_seq_vops (gsi, stmts);
2247 	  fold_stmt (gsi);
2248 	}
2249       return true;
2250     }
2251 
2252   /* If the format is "%s", use strcpy if the result isn't used.  */
2253   else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2254     {
2255       tree fn;
2256       fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2257 
2258       if (!fn)
2259 	return false;
2260 
2261       /* Don't crash on sprintf (str1, "%s").  */
2262       if (!orig)
2263 	return false;
2264 
2265       tree orig_len = NULL_TREE;
2266       if (gimple_call_lhs (stmt))
2267 	{
2268 	  orig_len = get_maxval_strlen (orig, 0);
2269 	  if (!orig_len)
2270 	    return false;
2271 	}
2272 
2273       /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2).  */
2274       gimple_seq stmts = NULL;
2275       gimple *repl = gimple_build_call (fn, 2, dest, orig);
2276       gimple_seq_add_stmt_without_update (&stmts, repl);
2277       if (gimple_call_lhs (stmt))
2278 	{
2279 	  if (!useless_type_conversion_p (integer_type_node,
2280 					  TREE_TYPE (orig_len)))
2281 	    orig_len = fold_convert (integer_type_node, orig_len);
2282 	  repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2283 	  gimple_seq_add_stmt_without_update (&stmts, repl);
2284 	  gsi_replace_with_seq_vops (gsi, stmts);
2285 	  /* gsi now points at the assignment to the lhs, get a
2286 	     stmt iterator to the memcpy call.
2287 	     ???  We can't use gsi_for_stmt as that doesn't work when the
2288 	     CFG isn't built yet.  */
2289 	  gimple_stmt_iterator gsi2 = *gsi;
2290 	  gsi_prev (&gsi2);
2291 	  fold_stmt (&gsi2);
2292 	}
2293       else
2294 	{
2295 	  gsi_replace_with_seq_vops (gsi, stmts);
2296 	  fold_stmt (gsi);
2297 	}
2298       return true;
2299     }
2300   return false;
2301 }
2302 
2303 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2304    FMT, and ORIG.  ORIG may be null if this is a 3-argument call.  We don't
2305    attempt to simplify calls with more than 4 arguments.
2306 
2307    Return NULL_TREE if no simplification was possible, otherwise return the
2308    simplified form of the call as a tree.  If IGNORED is true, it means that
2309    the caller does not use the returned value of the function.  */
2310 
2311 static bool
gimple_fold_builtin_snprintf(gimple_stmt_iterator * gsi)2312 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2313 {
2314   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2315   tree dest = gimple_call_arg (stmt, 0);
2316   tree destsize = gimple_call_arg (stmt, 1);
2317   tree fmt = gimple_call_arg (stmt, 2);
2318   tree orig = NULL_TREE;
2319   const char *fmt_str = NULL;
2320 
2321   if (gimple_call_num_args (stmt) > 4)
2322     return false;
2323 
2324   if (gimple_call_num_args (stmt) == 4)
2325     orig = gimple_call_arg (stmt, 3);
2326 
2327   if (!tree_fits_uhwi_p (destsize))
2328     return false;
2329   unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2330 
2331   /* Check whether the format is a literal string constant.  */
2332   fmt_str = c_getstr (fmt);
2333   if (fmt_str == NULL)
2334     return false;
2335 
2336   if (!init_target_chars ())
2337     return false;
2338 
2339   /* If the format doesn't contain % args or %%, use strcpy.  */
2340   if (strchr (fmt_str, target_percent) == NULL)
2341     {
2342       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2343       if (!fn)
2344 	return false;
2345 
2346       /* Don't optimize snprintf (buf, 4, "abc", ptr++).  */
2347       if (orig)
2348 	return false;
2349 
2350       /* We could expand this as
2351 	 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2352 	 or to
2353 	 memcpy (str, fmt_with_nul_at_cstm1, cst);
2354 	 but in the former case that might increase code size
2355 	 and in the latter case grow .rodata section too much.
2356 	 So punt for now.  */
2357       size_t len = strlen (fmt_str);
2358       if (len >= destlen)
2359 	return false;
2360 
2361       gimple_seq stmts = NULL;
2362       gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2363       gimple_seq_add_stmt_without_update (&stmts, repl);
2364       if (gimple_call_lhs (stmt))
2365 	{
2366 	  repl = gimple_build_assign (gimple_call_lhs (stmt),
2367 				      build_int_cst (integer_type_node, len));
2368 	  gimple_seq_add_stmt_without_update (&stmts, repl);
2369 	  gsi_replace_with_seq_vops (gsi, stmts);
2370 	  /* gsi now points at the assignment to the lhs, get a
2371 	     stmt iterator to the memcpy call.
2372 	     ???  We can't use gsi_for_stmt as that doesn't work when the
2373 	     CFG isn't built yet.  */
2374 	  gimple_stmt_iterator gsi2 = *gsi;
2375 	  gsi_prev (&gsi2);
2376 	  fold_stmt (&gsi2);
2377 	}
2378       else
2379 	{
2380 	  gsi_replace_with_seq_vops (gsi, stmts);
2381 	  fold_stmt (gsi);
2382 	}
2383       return true;
2384     }
2385 
2386   /* If the format is "%s", use strcpy if the result isn't used.  */
2387   else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2388     {
2389       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2390       if (!fn)
2391 	return false;
2392 
2393       /* Don't crash on snprintf (str1, cst, "%s").  */
2394       if (!orig)
2395 	return false;
2396 
2397       tree orig_len = get_maxval_strlen (orig, 0);
2398       if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2399 	return false;
2400 
2401       /* We could expand this as
2402 	 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2403 	 or to
2404 	 memcpy (str1, str2_with_nul_at_cstm1, cst);
2405 	 but in the former case that might increase code size
2406 	 and in the latter case grow .rodata section too much.
2407 	 So punt for now.  */
2408       if (compare_tree_int (orig_len, destlen) >= 0)
2409 	return false;
2410 
2411       /* Convert snprintf (str1, cst, "%s", str2) into
2412 	 strcpy (str1, str2) if strlen (str2) < cst.  */
2413       gimple_seq stmts = NULL;
2414       gimple *repl = gimple_build_call (fn, 2, dest, orig);
2415       gimple_seq_add_stmt_without_update (&stmts, repl);
2416       if (gimple_call_lhs (stmt))
2417 	{
2418 	  if (!useless_type_conversion_p (integer_type_node,
2419 					  TREE_TYPE (orig_len)))
2420 	    orig_len = fold_convert (integer_type_node, orig_len);
2421 	  repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2422 	  gimple_seq_add_stmt_without_update (&stmts, repl);
2423 	  gsi_replace_with_seq_vops (gsi, stmts);
2424 	  /* gsi now points at the assignment to the lhs, get a
2425 	     stmt iterator to the memcpy call.
2426 	     ???  We can't use gsi_for_stmt as that doesn't work when the
2427 	     CFG isn't built yet.  */
2428 	  gimple_stmt_iterator gsi2 = *gsi;
2429 	  gsi_prev (&gsi2);
2430 	  fold_stmt (&gsi2);
2431 	}
2432       else
2433 	{
2434 	  gsi_replace_with_seq_vops (gsi, stmts);
2435 	  fold_stmt (gsi);
2436 	}
2437       return true;
2438     }
2439   return false;
2440 }
2441 
2442 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2443    FP, FMT, and ARG are the arguments to the call.  We don't fold calls with
2444    more than 3 arguments, and ARG may be null in the 2-argument case.
2445 
2446    Return NULL_TREE if no simplification was possible, otherwise return the
2447    simplified form of the call as a tree.  FCODE is the BUILT_IN_*
2448    code of the function to be simplified.  */
2449 
2450 static bool
gimple_fold_builtin_fprintf(gimple_stmt_iterator * gsi,tree fp,tree fmt,tree arg,enum built_in_function fcode)2451 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2452 			     tree fp, tree fmt, tree arg,
2453 			     enum built_in_function fcode)
2454 {
2455   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2456   tree fn_fputc, fn_fputs;
2457   const char *fmt_str = NULL;
2458 
2459   /* If the return value is used, don't do the transformation.  */
2460   if (gimple_call_lhs (stmt) != NULL_TREE)
2461     return false;
2462 
2463   /* Check whether the format is a literal string constant.  */
2464   fmt_str = c_getstr (fmt);
2465   if (fmt_str == NULL)
2466     return false;
2467 
2468   if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2469     {
2470       /* If we're using an unlocked function, assume the other
2471 	 unlocked functions exist explicitly.  */
2472       fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2473       fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2474     }
2475   else
2476     {
2477       fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2478       fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2479     }
2480 
2481   if (!init_target_chars ())
2482     return false;
2483 
2484   /* If the format doesn't contain % args or %%, use strcpy.  */
2485   if (strchr (fmt_str, target_percent) == NULL)
2486     {
2487       if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2488 	  && arg)
2489 	return false;
2490 
2491       /* If the format specifier was "", fprintf does nothing.  */
2492       if (fmt_str[0] == '\0')
2493 	{
2494 	  replace_call_with_value (gsi, NULL_TREE);
2495 	  return true;
2496 	}
2497 
2498       /* When "string" doesn't contain %, replace all cases of
2499 	 fprintf (fp, string) with fputs (string, fp).  The fputs
2500 	 builtin will take care of special cases like length == 1.  */
2501       if (fn_fputs)
2502 	{
2503 	  gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2504 	  replace_call_with_call_and_fold (gsi, repl);
2505 	  return true;
2506 	}
2507     }
2508 
2509   /* The other optimizations can be done only on the non-va_list variants.  */
2510   else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2511     return false;
2512 
2513   /* If the format specifier was "%s", call __builtin_fputs (arg, fp).  */
2514   else if (strcmp (fmt_str, target_percent_s) == 0)
2515     {
2516       if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2517 	return false;
2518       if (fn_fputs)
2519 	{
2520 	  gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2521 	  replace_call_with_call_and_fold (gsi, repl);
2522 	  return true;
2523 	}
2524     }
2525 
2526   /* If the format specifier was "%c", call __builtin_fputc (arg, fp).  */
2527   else if (strcmp (fmt_str, target_percent_c) == 0)
2528     {
2529       if (!arg
2530 	  || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2531 	return false;
2532       if (fn_fputc)
2533 	{
2534 	  gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2535 	  replace_call_with_call_and_fold (gsi, repl);
2536 	  return true;
2537 	}
2538     }
2539 
2540   return false;
2541 }
2542 
2543 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2544    FMT and ARG are the arguments to the call; we don't fold cases with
2545    more than 2 arguments, and ARG may be null if this is a 1-argument case.
2546 
2547    Return NULL_TREE if no simplification was possible, otherwise return the
2548    simplified form of the call as a tree.  FCODE is the BUILT_IN_*
2549    code of the function to be simplified.  */
2550 
2551 static bool
gimple_fold_builtin_printf(gimple_stmt_iterator * gsi,tree fmt,tree arg,enum built_in_function fcode)2552 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2553 			    tree arg, enum built_in_function fcode)
2554 {
2555   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2556   tree fn_putchar, fn_puts, newarg;
2557   const char *fmt_str = NULL;
2558 
2559   /* If the return value is used, don't do the transformation.  */
2560   if (gimple_call_lhs (stmt) != NULL_TREE)
2561     return false;
2562 
2563   /* Check whether the format is a literal string constant.  */
2564   fmt_str = c_getstr (fmt);
2565   if (fmt_str == NULL)
2566     return false;
2567 
2568   if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2569     {
2570       /* If we're using an unlocked function, assume the other
2571 	 unlocked functions exist explicitly.  */
2572       fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2573       fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2574     }
2575   else
2576     {
2577       fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2578       fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2579     }
2580 
2581   if (!init_target_chars ())
2582     return false;
2583 
2584   if (strcmp (fmt_str, target_percent_s) == 0
2585       || strchr (fmt_str, target_percent) == NULL)
2586     {
2587       const char *str;
2588 
2589       if (strcmp (fmt_str, target_percent_s) == 0)
2590 	{
2591 	  if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2592 	    return false;
2593 
2594 	  if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2595 	    return false;
2596 
2597 	  str = c_getstr (arg);
2598 	  if (str == NULL)
2599 	    return false;
2600 	}
2601       else
2602 	{
2603 	  /* The format specifier doesn't contain any '%' characters.  */
2604 	  if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2605 	      && arg)
2606 	    return false;
2607 	  str = fmt_str;
2608 	}
2609 
2610       /* If the string was "", printf does nothing.  */
2611       if (str[0] == '\0')
2612 	{
2613 	  replace_call_with_value (gsi, NULL_TREE);
2614 	  return true;
2615 	}
2616 
2617       /* If the string has length of 1, call putchar.  */
2618       if (str[1] == '\0')
2619 	{
2620 	  /* Given printf("c"), (where c is any one character,)
2621 	     convert "c"[0] to an int and pass that to the replacement
2622 	     function.  */
2623 	  newarg = build_int_cst (integer_type_node, str[0]);
2624 	  if (fn_putchar)
2625 	    {
2626 	      gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2627 	      replace_call_with_call_and_fold (gsi, repl);
2628 	      return true;
2629 	    }
2630 	}
2631       else
2632 	{
2633 	  /* If the string was "string\n", call puts("string").  */
2634 	  size_t len = strlen (str);
2635 	  if ((unsigned char)str[len - 1] == target_newline
2636 	      && (size_t) (int) len == len
2637 	      && (int) len > 0)
2638 	    {
2639 	      char *newstr;
2640 	      tree offset_node, string_cst;
2641 
2642 	      /* Create a NUL-terminated string that's one char shorter
2643 		 than the original, stripping off the trailing '\n'.  */
2644 	      newarg = build_string_literal (len, str);
2645 	      string_cst = string_constant (newarg, &offset_node);
2646 	      gcc_checking_assert (string_cst
2647 				   && (TREE_STRING_LENGTH (string_cst)
2648 				       == (int) len)
2649 				   && integer_zerop (offset_node)
2650 				   && (unsigned char)
2651 				      TREE_STRING_POINTER (string_cst)[len - 1]
2652 				      == target_newline);
2653 	      /* build_string_literal creates a new STRING_CST,
2654 		 modify it in place to avoid double copying.  */
2655 	      newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2656 	      newstr[len - 1] = '\0';
2657 	      if (fn_puts)
2658 		{
2659 		  gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2660 		  replace_call_with_call_and_fold (gsi, repl);
2661 		  return true;
2662 		}
2663 	    }
2664 	  else
2665 	    /* We'd like to arrange to call fputs(string,stdout) here,
2666 	       but we need stdout and don't have a way to get it yet.  */
2667 	    return false;
2668 	}
2669     }
2670 
2671   /* The other optimizations can be done only on the non-va_list variants.  */
2672   else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2673     return false;
2674 
2675   /* If the format specifier was "%s\n", call __builtin_puts(arg).  */
2676   else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2677     {
2678       if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2679 	return false;
2680       if (fn_puts)
2681 	{
2682 	  gcall *repl = gimple_build_call (fn_puts, 1, arg);
2683 	  replace_call_with_call_and_fold (gsi, repl);
2684 	  return true;
2685 	}
2686     }
2687 
2688   /* If the format specifier was "%c", call __builtin_putchar(arg).  */
2689   else if (strcmp (fmt_str, target_percent_c) == 0)
2690     {
2691       if (!arg || ! useless_type_conversion_p (integer_type_node,
2692 					       TREE_TYPE (arg)))
2693 	return false;
2694       if (fn_putchar)
2695 	{
2696 	  gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2697 	  replace_call_with_call_and_fold (gsi, repl);
2698 	  return true;
2699 	}
2700     }
2701 
2702   return false;
2703 }
2704 
2705 
2706 
2707 /* Fold a call to __builtin_strlen with known length LEN.  */
2708 
2709 static bool
gimple_fold_builtin_strlen(gimple_stmt_iterator * gsi)2710 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2711 {
2712   gimple *stmt = gsi_stmt (*gsi);
2713   tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2714   if (!len)
2715     return false;
2716   len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2717   replace_call_with_value (gsi, len);
2718   return true;
2719 }
2720 
2721 /* Fold a call to __builtin_acc_on_device.  */
2722 
2723 static bool
gimple_fold_builtin_acc_on_device(gimple_stmt_iterator * gsi,tree arg0)2724 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2725 {
2726   /* Defer folding until we know which compiler we're in.  */
2727   if (symtab->state != EXPANSION)
2728     return false;
2729 
2730   unsigned val_host = GOMP_DEVICE_HOST;
2731   unsigned val_dev = GOMP_DEVICE_NONE;
2732 
2733 #ifdef ACCEL_COMPILER
2734   val_host = GOMP_DEVICE_NOT_HOST;
2735   val_dev = ACCEL_COMPILER_acc_device;
2736 #endif
2737 
2738   location_t loc = gimple_location (gsi_stmt (*gsi));
2739 
2740   tree host_eq = make_ssa_name (boolean_type_node);
2741   gimple *host_ass = gimple_build_assign
2742     (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2743   gimple_set_location (host_ass, loc);
2744   gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2745 
2746   tree dev_eq = make_ssa_name (boolean_type_node);
2747   gimple *dev_ass = gimple_build_assign
2748     (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2749   gimple_set_location (dev_ass, loc);
2750   gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2751 
2752   tree result = make_ssa_name (boolean_type_node);
2753   gimple *result_ass = gimple_build_assign
2754     (result, BIT_IOR_EXPR, host_eq, dev_eq);
2755   gimple_set_location (result_ass, loc);
2756   gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2757 
2758   replace_call_with_value (gsi, result);
2759 
2760   return true;
2761 }
2762 
2763 /* Fold the non-target builtin at *GSI and return whether any simplification
2764    was made.  */
2765 
2766 static bool
gimple_fold_builtin(gimple_stmt_iterator * gsi)2767 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2768 {
2769   gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2770   tree callee = gimple_call_fndecl (stmt);
2771 
2772   /* Give up for always_inline inline builtins until they are
2773      inlined.  */
2774   if (avoid_folding_inline_builtin (callee))
2775     return false;
2776 
2777   unsigned n = gimple_call_num_args (stmt);
2778   enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2779   switch (fcode)
2780     {
2781     case BUILT_IN_BZERO:
2782       return gimple_fold_builtin_memset (gsi, integer_zero_node,
2783 					 gimple_call_arg (stmt, 1));
2784     case BUILT_IN_MEMSET:
2785       return gimple_fold_builtin_memset (gsi,
2786 					 gimple_call_arg (stmt, 1),
2787 					 gimple_call_arg (stmt, 2));
2788     case BUILT_IN_BCOPY:
2789       return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2790 					    gimple_call_arg (stmt, 0), 3);
2791     case BUILT_IN_MEMCPY:
2792       return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2793 					    gimple_call_arg (stmt, 1), 0);
2794     case BUILT_IN_MEMPCPY:
2795       return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2796 					    gimple_call_arg (stmt, 1), 1);
2797     case BUILT_IN_MEMMOVE:
2798       return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2799 					    gimple_call_arg (stmt, 1), 3);
2800     case BUILT_IN_SPRINTF_CHK:
2801     case BUILT_IN_VSPRINTF_CHK:
2802       return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2803     case BUILT_IN_STRCAT_CHK:
2804       return gimple_fold_builtin_strcat_chk (gsi);
2805     case BUILT_IN_STRNCAT_CHK:
2806       return gimple_fold_builtin_strncat_chk (gsi);
2807     case BUILT_IN_STRLEN:
2808       return gimple_fold_builtin_strlen (gsi);
2809     case BUILT_IN_STRCPY:
2810       return gimple_fold_builtin_strcpy (gsi,
2811 					 gimple_call_arg (stmt, 0),
2812 					 gimple_call_arg (stmt, 1));
2813     case BUILT_IN_STRNCPY:
2814       return gimple_fold_builtin_strncpy (gsi,
2815 					  gimple_call_arg (stmt, 0),
2816 					  gimple_call_arg (stmt, 1),
2817 					  gimple_call_arg (stmt, 2));
2818     case BUILT_IN_STRCAT:
2819       return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2820 					 gimple_call_arg (stmt, 1));
2821     case BUILT_IN_STRNCAT:
2822       return gimple_fold_builtin_strncat (gsi);
2823     case BUILT_IN_FPUTS:
2824       return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2825 					gimple_call_arg (stmt, 1), false);
2826     case BUILT_IN_FPUTS_UNLOCKED:
2827       return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2828 					gimple_call_arg (stmt, 1), true);
2829     case BUILT_IN_MEMCPY_CHK:
2830     case BUILT_IN_MEMPCPY_CHK:
2831     case BUILT_IN_MEMMOVE_CHK:
2832     case BUILT_IN_MEMSET_CHK:
2833       return gimple_fold_builtin_memory_chk (gsi,
2834 					     gimple_call_arg (stmt, 0),
2835 					     gimple_call_arg (stmt, 1),
2836 					     gimple_call_arg (stmt, 2),
2837 					     gimple_call_arg (stmt, 3),
2838 					     fcode);
2839     case BUILT_IN_STPCPY:
2840       return gimple_fold_builtin_stpcpy (gsi);
2841     case BUILT_IN_STRCPY_CHK:
2842     case BUILT_IN_STPCPY_CHK:
2843       return gimple_fold_builtin_stxcpy_chk (gsi,
2844 					     gimple_call_arg (stmt, 0),
2845 					     gimple_call_arg (stmt, 1),
2846 					     gimple_call_arg (stmt, 2),
2847 					     fcode);
2848     case BUILT_IN_STRNCPY_CHK:
2849     case BUILT_IN_STPNCPY_CHK:
2850       return gimple_fold_builtin_stxncpy_chk (gsi,
2851 					      gimple_call_arg (stmt, 0),
2852 					      gimple_call_arg (stmt, 1),
2853 					      gimple_call_arg (stmt, 2),
2854 					      gimple_call_arg (stmt, 3),
2855 					      fcode);
2856     case BUILT_IN_SNPRINTF_CHK:
2857     case BUILT_IN_VSNPRINTF_CHK:
2858       return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2859     case BUILT_IN_SNPRINTF:
2860       return gimple_fold_builtin_snprintf (gsi);
2861     case BUILT_IN_SPRINTF:
2862       return gimple_fold_builtin_sprintf (gsi);
2863     case BUILT_IN_FPRINTF:
2864     case BUILT_IN_FPRINTF_UNLOCKED:
2865     case BUILT_IN_VFPRINTF:
2866       if (n == 2 || n == 3)
2867 	return gimple_fold_builtin_fprintf (gsi,
2868 					    gimple_call_arg (stmt, 0),
2869 					    gimple_call_arg (stmt, 1),
2870 					    n == 3
2871 					    ? gimple_call_arg (stmt, 2)
2872 					    : NULL_TREE,
2873 					    fcode);
2874       break;
2875     case BUILT_IN_FPRINTF_CHK:
2876     case BUILT_IN_VFPRINTF_CHK:
2877       if (n == 3 || n == 4)
2878 	return gimple_fold_builtin_fprintf (gsi,
2879 					    gimple_call_arg (stmt, 0),
2880 					    gimple_call_arg (stmt, 2),
2881 					    n == 4
2882 					    ? gimple_call_arg (stmt, 3)
2883 					    : NULL_TREE,
2884 					    fcode);
2885       break;
2886     case BUILT_IN_PRINTF:
2887     case BUILT_IN_PRINTF_UNLOCKED:
2888     case BUILT_IN_VPRINTF:
2889       if (n == 1 || n == 2)
2890 	return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2891 					   n == 2
2892 					   ? gimple_call_arg (stmt, 1)
2893 					   : NULL_TREE, fcode);
2894       break;
2895     case BUILT_IN_PRINTF_CHK:
2896     case BUILT_IN_VPRINTF_CHK:
2897       if (n == 2 || n == 3)
2898 	return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2899 					   n == 3
2900 					   ? gimple_call_arg (stmt, 2)
2901 					   : NULL_TREE, fcode);
2902       break;
2903     case BUILT_IN_ACC_ON_DEVICE:
2904       return gimple_fold_builtin_acc_on_device (gsi,
2905 						gimple_call_arg (stmt, 0));
2906     default:;
2907     }
2908 
2909   /* Try the generic builtin folder.  */
2910   bool ignore = (gimple_call_lhs (stmt) == NULL);
2911   tree result = fold_call_stmt (stmt, ignore);
2912   if (result)
2913     {
2914       if (ignore)
2915 	STRIP_NOPS (result);
2916       else
2917 	result = fold_convert (gimple_call_return_type (stmt), result);
2918       if (!update_call_from_tree (gsi, result))
2919 	gimplify_and_update_call_from_tree (gsi, result);
2920       return true;
2921     }
2922 
2923   return false;
2924 }
2925 
2926 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2927    function calls to constants, where possible.  */
2928 
2929 static tree
fold_internal_goacc_dim(const gimple * call)2930 fold_internal_goacc_dim (const gimple *call)
2931 {
2932   int axis = get_oacc_ifn_dim_arg (call);
2933   int size = get_oacc_fn_dim_size (current_function_decl, axis);
2934   bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
2935   tree result = NULL_TREE;
2936 
2937   /* If the size is 1, or we only want the size and it is not dynamic,
2938      we know the answer.  */
2939   if (size == 1 || (!is_pos && size))
2940     {
2941       tree type = TREE_TYPE (gimple_call_lhs (call));
2942       result = build_int_cst (type, size - is_pos);
2943     }
2944 
2945   return result;
2946 }
2947 
2948 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2949    doesn't fit into TYPE.  The test for overflow should be regardless of
2950    -fwrapv, and even for unsigned types.  */
2951 
2952 bool
arith_overflowed_p(enum tree_code code,const_tree type,const_tree arg0,const_tree arg1)2953 arith_overflowed_p (enum tree_code code, const_tree type,
2954 		    const_tree arg0, const_tree arg1)
2955 {
2956   typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2957   typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2958     widest2_int_cst;
2959   widest2_int warg0 = widest2_int_cst (arg0);
2960   widest2_int warg1 = widest2_int_cst (arg1);
2961   widest2_int wres;
2962   switch (code)
2963     {
2964     case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2965     case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2966     case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2967     default: gcc_unreachable ();
2968     }
2969   signop sign = TYPE_SIGN (type);
2970   if (sign == UNSIGNED && wi::neg_p (wres))
2971     return true;
2972   return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2973 }
2974 
2975 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2976    The statement may be replaced by another statement, e.g., if the call
2977    simplifies to a constant value. Return true if any changes were made.
2978    It is assumed that the operands have been previously folded.  */
2979 
2980 static bool
gimple_fold_call(gimple_stmt_iterator * gsi,bool inplace)2981 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2982 {
2983   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2984   tree callee;
2985   bool changed = false;
2986   unsigned i;
2987 
2988   /* Fold *& in call arguments.  */
2989   for (i = 0; i < gimple_call_num_args (stmt); ++i)
2990     if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2991       {
2992 	tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2993 	if (tmp)
2994 	  {
2995 	    gimple_call_set_arg (stmt, i, tmp);
2996 	    changed = true;
2997 	  }
2998       }
2999 
3000   /* Check for virtual calls that became direct calls.  */
3001   callee = gimple_call_fn (stmt);
3002   if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3003     {
3004       if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3005 	{
3006           if (dump_file && virtual_method_call_p (callee)
3007 	      && !possible_polymorphic_call_target_p
3008 		    (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3009 						     (OBJ_TYPE_REF_EXPR (callee)))))
3010 	    {
3011 	      fprintf (dump_file,
3012 		       "Type inheritance inconsistent devirtualization of ");
3013 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3014 	      fprintf (dump_file, " to ");
3015 	      print_generic_expr (dump_file, callee, TDF_SLIM);
3016 	      fprintf (dump_file, "\n");
3017 	    }
3018 
3019 	  gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3020 	  changed = true;
3021 	}
3022       else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3023 	{
3024 	  bool final;
3025 	  vec <cgraph_node *>targets
3026 	    = possible_polymorphic_call_targets (callee, stmt, &final);
3027 	  if (final && targets.length () <= 1 && dbg_cnt (devirt))
3028 	    {
3029 	      tree lhs = gimple_call_lhs (stmt);
3030 	      if (dump_enabled_p ())
3031 		{
3032 		  location_t loc = gimple_location_safe (stmt);
3033 		  dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3034 				   "folding virtual function call to %s\n",
3035 		 		   targets.length () == 1
3036 		  		   ? targets[0]->name ()
3037 		  		   : "__builtin_unreachable");
3038 		}
3039 	      if (targets.length () == 1)
3040 		{
3041 		  tree fndecl = targets[0]->decl;
3042 		  gimple_call_set_fndecl (stmt, fndecl);
3043 		  changed = true;
3044 		  /* If changing the call to __cxa_pure_virtual
3045 		     or similar noreturn function, adjust gimple_call_fntype
3046 		     too.  */
3047 		  if ((gimple_call_flags (stmt) & ECF_NORETURN)
3048 		      && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3049 		      && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3050 		      && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3051 			  == void_type_node))
3052 		    gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3053 		  /* If the call becomes noreturn, remove the lhs.  */
3054 		  if (lhs
3055 		      && (gimple_call_flags (stmt) & ECF_NORETURN)
3056 		      && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3057 			  || ((TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))
3058 			       == INTEGER_CST)
3059 			      && !TREE_ADDRESSABLE (TREE_TYPE (lhs)))))
3060 		    {
3061 		      if (TREE_CODE (lhs) == SSA_NAME)
3062 			{
3063 			  tree var = create_tmp_var (TREE_TYPE (lhs));
3064 			  tree def = get_or_create_ssa_default_def (cfun, var);
3065 			  gimple *new_stmt = gimple_build_assign (lhs, def);
3066 			  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3067 			}
3068 		      gimple_call_set_lhs (stmt, NULL_TREE);
3069 		    }
3070 		  maybe_remove_unused_call_args (cfun, stmt);
3071 		}
3072 	      else
3073 		{
3074 		  tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3075 		  gimple *new_stmt = gimple_build_call (fndecl, 0);
3076 		  gimple_set_location (new_stmt, gimple_location (stmt));
3077 		  if (lhs && TREE_CODE (lhs) == SSA_NAME)
3078 		    {
3079 		      tree var = create_tmp_var (TREE_TYPE (lhs));
3080 		      tree def = get_or_create_ssa_default_def (cfun, var);
3081 
3082 		      /* To satisfy condition for
3083 			 cgraph_update_edges_for_call_stmt_node,
3084 			 we need to preserve GIMPLE_CALL statement
3085 			 at position of GSI iterator.  */
3086 		      update_call_from_tree (gsi, def);
3087 		      gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3088 		    }
3089 		  else
3090 		    {
3091 		      gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3092 		      gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3093 		      gsi_replace (gsi, new_stmt, false);
3094 		    }
3095 		  return true;
3096 		}
3097 	    }
3098 	}
3099     }
3100 
3101   /* Check for indirect calls that became direct calls, and then
3102      no longer require a static chain.  */
3103   if (gimple_call_chain (stmt))
3104     {
3105       tree fn = gimple_call_fndecl (stmt);
3106       if (fn && !DECL_STATIC_CHAIN (fn))
3107 	{
3108 	  gimple_call_set_chain (stmt, NULL);
3109 	  changed = true;
3110 	}
3111       else
3112 	{
3113 	  tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3114 	  if (tmp)
3115 	    {
3116 	      gimple_call_set_chain (stmt, tmp);
3117 	      changed = true;
3118 	    }
3119 	}
3120     }
3121 
3122   if (inplace)
3123     return changed;
3124 
3125   /* Check for builtins that CCP can handle using information not
3126      available in the generic fold routines.  */
3127   if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3128     {
3129       if (gimple_fold_builtin (gsi))
3130         changed = true;
3131     }
3132   else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3133     {
3134 	changed |= targetm.gimple_fold_builtin (gsi);
3135     }
3136   else if (gimple_call_internal_p (stmt))
3137     {
3138       enum tree_code subcode = ERROR_MARK;
3139       tree result = NULL_TREE;
3140       bool cplx_result = false;
3141       tree overflow = NULL_TREE;
3142       switch (gimple_call_internal_fn (stmt))
3143 	{
3144 	case IFN_BUILTIN_EXPECT:
3145 	  result = fold_builtin_expect (gimple_location (stmt),
3146 					gimple_call_arg (stmt, 0),
3147 					gimple_call_arg (stmt, 1),
3148 					gimple_call_arg (stmt, 2));
3149 	  break;
3150 	case IFN_UBSAN_OBJECT_SIZE:
3151 	  if (integer_all_onesp (gimple_call_arg (stmt, 2))
3152 	      || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3153 		  && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3154 		  && tree_int_cst_le (gimple_call_arg (stmt, 1),
3155 				      gimple_call_arg (stmt, 2))))
3156 	    {
3157 	      gsi_replace (gsi, gimple_build_nop (), false);
3158 	      unlink_stmt_vdef (stmt);
3159 	      release_defs (stmt);
3160 	      return true;
3161 	    }
3162 	  break;
3163 	case IFN_GOACC_DIM_SIZE:
3164 	case IFN_GOACC_DIM_POS:
3165 	  result = fold_internal_goacc_dim (stmt);
3166 	  break;
3167 	case IFN_UBSAN_CHECK_ADD:
3168 	  subcode = PLUS_EXPR;
3169 	  break;
3170 	case IFN_UBSAN_CHECK_SUB:
3171 	  subcode = MINUS_EXPR;
3172 	  break;
3173 	case IFN_UBSAN_CHECK_MUL:
3174 	  subcode = MULT_EXPR;
3175 	  break;
3176 	case IFN_ADD_OVERFLOW:
3177 	  subcode = PLUS_EXPR;
3178 	  cplx_result = true;
3179 	  break;
3180 	case IFN_SUB_OVERFLOW:
3181 	  subcode = MINUS_EXPR;
3182 	  cplx_result = true;
3183 	  break;
3184 	case IFN_MUL_OVERFLOW:
3185 	  subcode = MULT_EXPR;
3186 	  cplx_result = true;
3187 	  break;
3188 	default:
3189 	  break;
3190 	}
3191       if (subcode != ERROR_MARK)
3192 	{
3193 	  tree arg0 = gimple_call_arg (stmt, 0);
3194 	  tree arg1 = gimple_call_arg (stmt, 1);
3195 	  tree type = TREE_TYPE (arg0);
3196 	  if (cplx_result)
3197 	    {
3198 	      tree lhs = gimple_call_lhs (stmt);
3199 	      if (lhs == NULL_TREE)
3200 		type = NULL_TREE;
3201 	      else
3202 		type = TREE_TYPE (TREE_TYPE (lhs));
3203 	    }
3204 	  if (type == NULL_TREE)
3205 	    ;
3206 	  /* x = y + 0; x = y - 0; x = y * 0; */
3207 	  else if (integer_zerop (arg1))
3208 	    result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3209 	  /* x = 0 + y; x = 0 * y; */
3210 	  else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3211 	    result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3212 	  /* x = y - y; */
3213 	  else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3214 	    result = integer_zero_node;
3215 	  /* x = y * 1; x = 1 * y; */
3216 	  else if (subcode == MULT_EXPR && integer_onep (arg1))
3217 	    result = arg0;
3218 	  else if (subcode == MULT_EXPR && integer_onep (arg0))
3219 	    result = arg1;
3220 	  else if (TREE_CODE (arg0) == INTEGER_CST
3221 		   && TREE_CODE (arg1) == INTEGER_CST)
3222 	    {
3223 	      if (cplx_result)
3224 		result = int_const_binop (subcode, fold_convert (type, arg0),
3225 					  fold_convert (type, arg1));
3226 	      else
3227 		result = int_const_binop (subcode, arg0, arg1);
3228 	      if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3229 		{
3230 		  if (cplx_result)
3231 		    overflow = build_one_cst (type);
3232 		  else
3233 		    result = NULL_TREE;
3234 		}
3235 	    }
3236 	  if (result)
3237 	    {
3238 	      if (result == integer_zero_node)
3239 		result = build_zero_cst (type);
3240 	      else if (cplx_result && TREE_TYPE (result) != type)
3241 		{
3242 		  if (TREE_CODE (result) == INTEGER_CST)
3243 		    {
3244 		      if (arith_overflowed_p (PLUS_EXPR, type, result,
3245 					      integer_zero_node))
3246 			overflow = build_one_cst (type);
3247 		    }
3248 		  else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3249 			    && TYPE_UNSIGNED (type))
3250 			   || (TYPE_PRECISION (type)
3251 			       < (TYPE_PRECISION (TREE_TYPE (result))
3252 				  + (TYPE_UNSIGNED (TREE_TYPE (result))
3253 				     && !TYPE_UNSIGNED (type)))))
3254 		    result = NULL_TREE;
3255 		  if (result)
3256 		    result = fold_convert (type, result);
3257 		}
3258 	    }
3259 	}
3260 
3261       if (result)
3262 	{
3263 	  if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3264 	    result = drop_tree_overflow (result);
3265 	  if (cplx_result)
3266 	    {
3267 	      if (overflow == NULL_TREE)
3268 		overflow = build_zero_cst (TREE_TYPE (result));
3269 	      tree ctype = build_complex_type (TREE_TYPE (result));
3270 	      if (TREE_CODE (result) == INTEGER_CST
3271 		  && TREE_CODE (overflow) == INTEGER_CST)
3272 		result = build_complex (ctype, result, overflow);
3273 	      else
3274 		result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3275 				     ctype, result, overflow);
3276 	    }
3277 	  if (!update_call_from_tree (gsi, result))
3278 	    gimplify_and_update_call_from_tree (gsi, result);
3279 	  changed = true;
3280 	}
3281     }
3282 
3283   return changed;
3284 }
3285 
3286 
3287 /* Return true whether NAME has a use on STMT.  */
3288 
3289 static bool
has_use_on_stmt(tree name,gimple * stmt)3290 has_use_on_stmt (tree name, gimple *stmt)
3291 {
3292   imm_use_iterator iter;
3293   use_operand_p use_p;
3294   FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3295     if (USE_STMT (use_p) == stmt)
3296       return true;
3297   return false;
3298 }
3299 
3300 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3301    gimple_simplify.
3302 
3303    Replaces *GSI with the simplification result in RCODE and OPS
3304    and the associated statements in *SEQ.  Does the replacement
3305    according to INPLACE and returns true if the operation succeeded.  */
3306 
3307 static bool
replace_stmt_with_simplification(gimple_stmt_iterator * gsi,code_helper rcode,tree * ops,gimple_seq * seq,bool inplace)3308 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3309 				  code_helper rcode, tree *ops,
3310 				  gimple_seq *seq, bool inplace)
3311 {
3312   gimple *stmt = gsi_stmt (*gsi);
3313 
3314   /* Play safe and do not allow abnormals to be mentioned in
3315      newly created statements.  See also maybe_push_res_to_seq.
3316      As an exception allow such uses if there was a use of the
3317      same SSA name on the old stmt.  */
3318   if ((TREE_CODE (ops[0]) == SSA_NAME
3319        && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3320        && !has_use_on_stmt (ops[0], stmt))
3321       || (ops[1]
3322 	  && TREE_CODE (ops[1]) == SSA_NAME
3323 	  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3324 	  && !has_use_on_stmt (ops[1], stmt))
3325       || (ops[2]
3326 	  && TREE_CODE (ops[2]) == SSA_NAME
3327 	  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3328 	  && !has_use_on_stmt (ops[2], stmt))
3329       || (COMPARISON_CLASS_P (ops[0])
3330 	  && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3331 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3332 	       && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3333 	      || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3334 		  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3335 		  && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
3336     return false;
3337 
3338   /* Don't insert new statements when INPLACE is true, even if we could
3339      reuse STMT for the final statement.  */
3340   if (inplace && !gimple_seq_empty_p (*seq))
3341     return false;
3342 
3343   if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3344     {
3345       gcc_assert (rcode.is_tree_code ());
3346       if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3347 	  /* GIMPLE_CONDs condition may not throw.  */
3348 	  && (!flag_exceptions
3349 	      || !cfun->can_throw_non_call_exceptions
3350 	      || !operation_could_trap_p (rcode,
3351 					  FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3352 					  false, NULL_TREE)))
3353 	gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3354       else if (rcode == SSA_NAME)
3355 	gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3356 				   build_zero_cst (TREE_TYPE (ops[0])));
3357       else if (rcode == INTEGER_CST)
3358 	{
3359 	  if (integer_zerop (ops[0]))
3360 	    gimple_cond_make_false (cond_stmt);
3361 	  else
3362 	    gimple_cond_make_true (cond_stmt);
3363 	}
3364       else if (!inplace)
3365 	{
3366 	  tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3367 					    ops, seq);
3368 	  if (!res)
3369 	    return false;
3370 	  gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3371 				     build_zero_cst (TREE_TYPE (res)));
3372 	}
3373       else
3374 	return false;
3375       if (dump_file && (dump_flags & TDF_DETAILS))
3376 	{
3377 	  fprintf (dump_file, "gimple_simplified to ");
3378 	  if (!gimple_seq_empty_p (*seq))
3379 	    print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3380 	  print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3381 			     0, TDF_SLIM);
3382 	}
3383       gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3384       return true;
3385     }
3386   else if (is_gimple_assign (stmt)
3387 	   && rcode.is_tree_code ())
3388     {
3389       if (!inplace
3390 	  || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3391 	{
3392 	  maybe_build_generic_op (rcode,
3393 				  TREE_TYPE (gimple_assign_lhs (stmt)),
3394 				  &ops[0], ops[1], ops[2]);
3395 	  gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3396 	  if (dump_file && (dump_flags & TDF_DETAILS))
3397 	    {
3398 	      fprintf (dump_file, "gimple_simplified to ");
3399 	      if (!gimple_seq_empty_p (*seq))
3400 		print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3401 	      print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3402 				 0, TDF_SLIM);
3403 	    }
3404 	  gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3405 	  return true;
3406 	}
3407     }
3408   else if (rcode.is_fn_code ()
3409 	   && gimple_call_combined_fn (stmt) == rcode)
3410     {
3411       unsigned i;
3412       for (i = 0; i < gimple_call_num_args (stmt); ++i)
3413 	{
3414 	  gcc_assert (ops[i] != NULL_TREE);
3415 	  gimple_call_set_arg (stmt, i, ops[i]);
3416 	}
3417       if (i < 3)
3418 	gcc_assert (ops[i] == NULL_TREE);
3419       if (dump_file && (dump_flags & TDF_DETAILS))
3420 	{
3421 	  fprintf (dump_file, "gimple_simplified to ");
3422 	  if (!gimple_seq_empty_p (*seq))
3423 	    print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3424 	  print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3425 	}
3426       gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3427       return true;
3428     }
3429   else if (!inplace)
3430     {
3431       if (gimple_has_lhs (stmt))
3432 	{
3433 	  tree lhs = gimple_get_lhs (stmt);
3434 	  if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3435 				      ops, seq, lhs))
3436 	    return false;
3437 	  if (dump_file && (dump_flags & TDF_DETAILS))
3438 	    {
3439 	      fprintf (dump_file, "gimple_simplified to ");
3440 	      print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3441 	    }
3442 	  gsi_replace_with_seq_vops (gsi, *seq);
3443 	  return true;
3444 	}
3445       else
3446 	gcc_unreachable ();
3447     }
3448 
3449   return false;
3450 }
3451 
3452 /* Canonicalize MEM_REFs invariant address operand after propagation.  */
3453 
3454 static bool
maybe_canonicalize_mem_ref_addr(tree * t)3455 maybe_canonicalize_mem_ref_addr (tree *t)
3456 {
3457   bool res = false;
3458 
3459   if (TREE_CODE (*t) == ADDR_EXPR)
3460     t = &TREE_OPERAND (*t, 0);
3461 
3462   while (handled_component_p (*t))
3463     t = &TREE_OPERAND (*t, 0);
3464 
3465   /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3466      of invariant addresses into a SSA name MEM_REF address.  */
3467   if (TREE_CODE (*t) == MEM_REF
3468       || TREE_CODE (*t) == TARGET_MEM_REF)
3469     {
3470       tree addr = TREE_OPERAND (*t, 0);
3471       if (TREE_CODE (addr) == ADDR_EXPR
3472 	  && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3473 	      || handled_component_p (TREE_OPERAND (addr, 0))))
3474 	{
3475 	  tree base;
3476 	  HOST_WIDE_INT coffset;
3477 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3478 						&coffset);
3479 	  if (!base)
3480 	    gcc_unreachable ();
3481 
3482 	  TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3483 	  TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3484 						  TREE_OPERAND (*t, 1),
3485 						  size_int (coffset));
3486 	  res = true;
3487 	}
3488       gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3489 			   || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3490     }
3491 
3492   /* Canonicalize back MEM_REFs to plain reference trees if the object
3493      accessed is a decl that has the same access semantics as the MEM_REF.  */
3494   if (TREE_CODE (*t) == MEM_REF
3495       && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3496       && integer_zerop (TREE_OPERAND (*t, 1))
3497       && MR_DEPENDENCE_CLIQUE (*t) == 0)
3498     {
3499       tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3500       tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3501       if (/* Same volatile qualification.  */
3502 	  TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3503 	  /* Same TBAA behavior with -fstrict-aliasing.  */
3504 	  && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3505 	  && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3506 	      == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3507 	  /* Same alignment.  */
3508 	  && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3509 	  /* We have to look out here to not drop a required conversion
3510 	     from the rhs to the lhs if *t appears on the lhs or vice-versa
3511 	     if it appears on the rhs.  Thus require strict type
3512 	     compatibility.  */
3513 	  && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3514 	{
3515 	  *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3516 	  res = true;
3517 	}
3518     }
3519 
3520   /* Canonicalize TARGET_MEM_REF in particular with respect to
3521      the indexes becoming constant.  */
3522   else if (TREE_CODE (*t) == TARGET_MEM_REF)
3523     {
3524       tree tem = maybe_fold_tmr (*t);
3525       if (tem)
3526 	{
3527 	  *t = tem;
3528 	  res = true;
3529 	}
3530     }
3531 
3532   return res;
3533 }
3534 
3535 /* Worker for both fold_stmt and fold_stmt_inplace.  The INPLACE argument
3536    distinguishes both cases.  */
3537 
3538 static bool
fold_stmt_1(gimple_stmt_iterator * gsi,bool inplace,tree (* valueize)(tree))3539 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3540 {
3541   bool changed = false;
3542   gimple *stmt = gsi_stmt (*gsi);
3543   unsigned i;
3544 
3545   /* First do required canonicalization of [TARGET_]MEM_REF addresses
3546      after propagation.
3547      ???  This shouldn't be done in generic folding but in the
3548      propagation helpers which also know whether an address was
3549      propagated.
3550      Also canonicalize operand order.  */
3551   switch (gimple_code (stmt))
3552     {
3553     case GIMPLE_ASSIGN:
3554       if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3555 	{
3556 	  tree *rhs = gimple_assign_rhs1_ptr (stmt);
3557 	  if ((REFERENCE_CLASS_P (*rhs)
3558 	       || TREE_CODE (*rhs) == ADDR_EXPR)
3559 	      && maybe_canonicalize_mem_ref_addr (rhs))
3560 	    changed = true;
3561 	  tree *lhs = gimple_assign_lhs_ptr (stmt);
3562 	  if (REFERENCE_CLASS_P (*lhs)
3563 	      && maybe_canonicalize_mem_ref_addr (lhs))
3564 	    changed = true;
3565 	}
3566       else
3567 	{
3568 	  /* Canonicalize operand order.  */
3569 	  enum tree_code code = gimple_assign_rhs_code (stmt);
3570 	  if (TREE_CODE_CLASS (code) == tcc_comparison
3571 	      || commutative_tree_code (code)
3572 	      || commutative_ternary_tree_code (code))
3573 	    {
3574 	      tree rhs1 = gimple_assign_rhs1 (stmt);
3575 	      tree rhs2 = gimple_assign_rhs2 (stmt);
3576 	      if (tree_swap_operands_p (rhs1, rhs2, false))
3577 		{
3578 		  gimple_assign_set_rhs1 (stmt, rhs2);
3579 		  gimple_assign_set_rhs2 (stmt, rhs1);
3580 		  if (TREE_CODE_CLASS (code) == tcc_comparison)
3581 		    gimple_assign_set_rhs_code (stmt,
3582 						swap_tree_comparison (code));
3583 		  changed = true;
3584 		}
3585 	    }
3586 	}
3587       break;
3588     case GIMPLE_CALL:
3589       {
3590 	for (i = 0; i < gimple_call_num_args (stmt); ++i)
3591 	  {
3592 	    tree *arg = gimple_call_arg_ptr (stmt, i);
3593 	    if (REFERENCE_CLASS_P (*arg)
3594 		&& maybe_canonicalize_mem_ref_addr (arg))
3595 	      changed = true;
3596 	  }
3597 	tree *lhs = gimple_call_lhs_ptr (stmt);
3598 	if (*lhs
3599 	    && REFERENCE_CLASS_P (*lhs)
3600 	    && maybe_canonicalize_mem_ref_addr (lhs))
3601 	  changed = true;
3602 	break;
3603       }
3604     case GIMPLE_ASM:
3605       {
3606 	gasm *asm_stmt = as_a <gasm *> (stmt);
3607 	for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3608 	  {
3609 	    tree link = gimple_asm_output_op (asm_stmt, i);
3610 	    tree op = TREE_VALUE (link);
3611 	    if (REFERENCE_CLASS_P (op)
3612 		&& maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3613 	      changed = true;
3614 	  }
3615 	for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3616 	  {
3617 	    tree link = gimple_asm_input_op (asm_stmt, i);
3618 	    tree op = TREE_VALUE (link);
3619 	    if ((REFERENCE_CLASS_P (op)
3620 		 || TREE_CODE (op) == ADDR_EXPR)
3621 		&& maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3622 	      changed = true;
3623 	  }
3624       }
3625       break;
3626     case GIMPLE_DEBUG:
3627       if (gimple_debug_bind_p (stmt))
3628 	{
3629 	  tree *val = gimple_debug_bind_get_value_ptr (stmt);
3630 	  if (*val
3631 	      && (REFERENCE_CLASS_P (*val)
3632 		  || TREE_CODE (*val) == ADDR_EXPR)
3633 	      && maybe_canonicalize_mem_ref_addr (val))
3634 	    changed = true;
3635 	}
3636       break;
3637     case GIMPLE_COND:
3638       {
3639 	/* Canonicalize operand order.  */
3640 	tree lhs = gimple_cond_lhs (stmt);
3641 	tree rhs = gimple_cond_rhs (stmt);
3642 	if (tree_swap_operands_p (lhs, rhs, false))
3643 	  {
3644 	    gcond *gc = as_a <gcond *> (stmt);
3645 	    gimple_cond_set_lhs (gc, rhs);
3646 	    gimple_cond_set_rhs (gc, lhs);
3647 	    gimple_cond_set_code (gc,
3648 				  swap_tree_comparison (gimple_cond_code (gc)));
3649 	    changed = true;
3650 	  }
3651       }
3652     default:;
3653     }
3654 
3655   /* Dispatch to pattern-based folding.  */
3656   if (!inplace
3657       || is_gimple_assign (stmt)
3658       || gimple_code (stmt) == GIMPLE_COND)
3659     {
3660       gimple_seq seq = NULL;
3661       code_helper rcode;
3662       tree ops[3] = {};
3663       if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3664 			   valueize, valueize))
3665 	{
3666 	  if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3667 	    changed = true;
3668 	  else
3669 	    gimple_seq_discard (seq);
3670 	}
3671     }
3672 
3673   stmt = gsi_stmt (*gsi);
3674 
3675   /* Fold the main computation performed by the statement.  */
3676   switch (gimple_code (stmt))
3677     {
3678     case GIMPLE_ASSIGN:
3679       {
3680 	/* Try to canonicalize for boolean-typed X the comparisons
3681 	   X == 0, X == 1, X != 0, and X != 1.  */
3682 	if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3683 	    || gimple_assign_rhs_code (stmt) == NE_EXPR)
3684 	  {
3685 	    tree lhs = gimple_assign_lhs (stmt);
3686 	    tree op1 = gimple_assign_rhs1 (stmt);
3687 	    tree op2 = gimple_assign_rhs2 (stmt);
3688 	    tree type = TREE_TYPE (op1);
3689 
3690 	    /* Check whether the comparison operands are of the same boolean
3691 	       type as the result type is.
3692 	       Check that second operand is an integer-constant with value
3693 	       one or zero.  */
3694 	    if (TREE_CODE (op2) == INTEGER_CST
3695 		&& (integer_zerop (op2) || integer_onep (op2))
3696 		&& useless_type_conversion_p (TREE_TYPE (lhs), type))
3697 	      {
3698 		enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3699 		bool is_logical_not = false;
3700 
3701 		/* X == 0 and X != 1 is a logical-not.of X
3702 		   X == 1 and X != 0 is X  */
3703 		if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3704 		    || (cmp_code == NE_EXPR && integer_onep (op2)))
3705 		  is_logical_not = true;
3706 
3707 		if (is_logical_not == false)
3708 		  gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3709 		/* Only for one-bit precision typed X the transformation
3710 		   !X -> ~X is valied.  */
3711 		else if (TYPE_PRECISION (type) == 1)
3712 		  gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3713 		/* Otherwise we use !X -> X ^ 1.  */
3714 		else
3715 		  gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3716 						  build_int_cst (type, 1));
3717 		changed = true;
3718 		break;
3719 	      }
3720 	  }
3721 
3722 	unsigned old_num_ops = gimple_num_ops (stmt);
3723 	tree lhs = gimple_assign_lhs (stmt);
3724 	tree new_rhs = fold_gimple_assign (gsi);
3725 	if (new_rhs
3726 	    && !useless_type_conversion_p (TREE_TYPE (lhs),
3727 					   TREE_TYPE (new_rhs)))
3728 	  new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3729 	if (new_rhs
3730 	    && (!inplace
3731 		|| get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3732 	  {
3733 	    gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3734 	    changed = true;
3735 	  }
3736 	break;
3737       }
3738 
3739     case GIMPLE_CALL:
3740       changed |= gimple_fold_call (gsi, inplace);
3741       break;
3742 
3743     case GIMPLE_ASM:
3744       /* Fold *& in asm operands.  */
3745       {
3746 	gasm *asm_stmt = as_a <gasm *> (stmt);
3747 	size_t noutputs;
3748 	const char **oconstraints;
3749 	const char *constraint;
3750 	bool allows_mem, allows_reg;
3751 
3752 	noutputs = gimple_asm_noutputs (asm_stmt);
3753 	oconstraints = XALLOCAVEC (const char *, noutputs);
3754 
3755 	for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3756 	  {
3757 	    tree link = gimple_asm_output_op (asm_stmt, i);
3758 	    tree op = TREE_VALUE (link);
3759 	    oconstraints[i]
3760 	      = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3761 	    if (REFERENCE_CLASS_P (op)
3762 		&& (op = maybe_fold_reference (op, true)) != NULL_TREE)
3763 	      {
3764 		TREE_VALUE (link) = op;
3765 		changed = true;
3766 	      }
3767 	  }
3768 	for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3769 	  {
3770 	    tree link = gimple_asm_input_op (asm_stmt, i);
3771 	    tree op = TREE_VALUE (link);
3772 	    constraint
3773 	      = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3774 	    parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3775 				    oconstraints, &allows_mem, &allows_reg);
3776 	    if (REFERENCE_CLASS_P (op)
3777 		&& (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3778 		   != NULL_TREE)
3779 	      {
3780 		TREE_VALUE (link) = op;
3781 		changed = true;
3782 	      }
3783 	  }
3784       }
3785       break;
3786 
3787     case GIMPLE_DEBUG:
3788       if (gimple_debug_bind_p (stmt))
3789 	{
3790 	  tree val = gimple_debug_bind_get_value (stmt);
3791 	  if (val
3792 	      && REFERENCE_CLASS_P (val))
3793 	    {
3794 	      tree tem = maybe_fold_reference (val, false);
3795 	      if (tem)
3796 		{
3797 		  gimple_debug_bind_set_value (stmt, tem);
3798 		  changed = true;
3799 		}
3800 	    }
3801 	  else if (val
3802 		   && TREE_CODE (val) == ADDR_EXPR)
3803 	    {
3804 	      tree ref = TREE_OPERAND (val, 0);
3805 	      tree tem = maybe_fold_reference (ref, false);
3806 	      if (tem)
3807 		{
3808 		  tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3809 		  gimple_debug_bind_set_value (stmt, tem);
3810 		  changed = true;
3811 		}
3812 	    }
3813 	}
3814       break;
3815 
3816     default:;
3817     }
3818 
3819   stmt = gsi_stmt (*gsi);
3820 
3821   /* Fold *& on the lhs.  */
3822   if (gimple_has_lhs (stmt))
3823     {
3824       tree lhs = gimple_get_lhs (stmt);
3825       if (lhs && REFERENCE_CLASS_P (lhs))
3826 	{
3827 	  tree new_lhs = maybe_fold_reference (lhs, true);
3828 	  if (new_lhs)
3829 	    {
3830 	      gimple_set_lhs (stmt, new_lhs);
3831 	      changed = true;
3832 	    }
3833 	}
3834     }
3835 
3836   return changed;
3837 }
3838 
3839 /* Valueziation callback that ends up not following SSA edges.  */
3840 
3841 tree
no_follow_ssa_edges(tree)3842 no_follow_ssa_edges (tree)
3843 {
3844   return NULL_TREE;
3845 }
3846 
3847 /* Valueization callback that ends up following single-use SSA edges only.  */
3848 
3849 tree
follow_single_use_edges(tree val)3850 follow_single_use_edges (tree val)
3851 {
3852   if (TREE_CODE (val) == SSA_NAME
3853       && !has_single_use (val))
3854     return NULL_TREE;
3855   return val;
3856 }
3857 
3858 /* Fold the statement pointed to by GSI.  In some cases, this function may
3859    replace the whole statement with a new one.  Returns true iff folding
3860    makes any changes.
3861    The statement pointed to by GSI should be in valid gimple form but may
3862    be in unfolded state as resulting from for example constant propagation
3863    which can produce *&x = 0.  */
3864 
3865 bool
fold_stmt(gimple_stmt_iterator * gsi)3866 fold_stmt (gimple_stmt_iterator *gsi)
3867 {
3868   return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3869 }
3870 
3871 bool
fold_stmt(gimple_stmt_iterator * gsi,tree (* valueize)(tree))3872 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3873 {
3874   return fold_stmt_1 (gsi, false, valueize);
3875 }
3876 
3877 /* Perform the minimal folding on statement *GSI.  Only operations like
3878    *&x created by constant propagation are handled.  The statement cannot
3879    be replaced with a new one.  Return true if the statement was
3880    changed, false otherwise.
3881    The statement *GSI should be in valid gimple form but may
3882    be in unfolded state as resulting from for example constant propagation
3883    which can produce *&x = 0.  */
3884 
3885 bool
fold_stmt_inplace(gimple_stmt_iterator * gsi)3886 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3887 {
3888   gimple *stmt = gsi_stmt (*gsi);
3889   bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3890   gcc_assert (gsi_stmt (*gsi) == stmt);
3891   return changed;
3892 }
3893 
3894 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3895    if EXPR is null or we don't know how.
3896    If non-null, the result always has boolean type.  */
3897 
3898 static tree
canonicalize_bool(tree expr,bool invert)3899 canonicalize_bool (tree expr, bool invert)
3900 {
3901   if (!expr)
3902     return NULL_TREE;
3903   else if (invert)
3904     {
3905       if (integer_nonzerop (expr))
3906 	return boolean_false_node;
3907       else if (integer_zerop (expr))
3908 	return boolean_true_node;
3909       else if (TREE_CODE (expr) == SSA_NAME)
3910 	return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3911 			    build_int_cst (TREE_TYPE (expr), 0));
3912       else if (COMPARISON_CLASS_P (expr))
3913 	return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3914 			    boolean_type_node,
3915 			    TREE_OPERAND (expr, 0),
3916 			    TREE_OPERAND (expr, 1));
3917       else
3918 	return NULL_TREE;
3919     }
3920   else
3921     {
3922       if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3923 	return expr;
3924       if (integer_nonzerop (expr))
3925 	return boolean_true_node;
3926       else if (integer_zerop (expr))
3927 	return boolean_false_node;
3928       else if (TREE_CODE (expr) == SSA_NAME)
3929 	return fold_build2 (NE_EXPR, boolean_type_node, expr,
3930 			    build_int_cst (TREE_TYPE (expr), 0));
3931       else if (COMPARISON_CLASS_P (expr))
3932 	return fold_build2 (TREE_CODE (expr),
3933 			    boolean_type_node,
3934 			    TREE_OPERAND (expr, 0),
3935 			    TREE_OPERAND (expr, 1));
3936       else
3937 	return NULL_TREE;
3938     }
3939 }
3940 
3941 /* Check to see if a boolean expression EXPR is logically equivalent to the
3942    comparison (OP1 CODE OP2).  Check for various identities involving
3943    SSA_NAMEs.  */
3944 
3945 static bool
same_bool_comparison_p(const_tree expr,enum tree_code code,const_tree op1,const_tree op2)3946 same_bool_comparison_p (const_tree expr, enum tree_code code,
3947 			const_tree op1, const_tree op2)
3948 {
3949   gimple *s;
3950 
3951   /* The obvious case.  */
3952   if (TREE_CODE (expr) == code
3953       && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3954       && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3955     return true;
3956 
3957   /* Check for comparing (name, name != 0) and the case where expr
3958      is an SSA_NAME with a definition matching the comparison.  */
3959   if (TREE_CODE (expr) == SSA_NAME
3960       && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3961     {
3962       if (operand_equal_p (expr, op1, 0))
3963 	return ((code == NE_EXPR && integer_zerop (op2))
3964 		|| (code == EQ_EXPR && integer_nonzerop (op2)));
3965       s = SSA_NAME_DEF_STMT (expr);
3966       if (is_gimple_assign (s)
3967 	  && gimple_assign_rhs_code (s) == code
3968 	  && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3969 	  && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3970 	return true;
3971     }
3972 
3973   /* If op1 is of the form (name != 0) or (name == 0), and the definition
3974      of name is a comparison, recurse.  */
3975   if (TREE_CODE (op1) == SSA_NAME
3976       && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3977     {
3978       s = SSA_NAME_DEF_STMT (op1);
3979       if (is_gimple_assign (s)
3980 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3981 	{
3982 	  enum tree_code c = gimple_assign_rhs_code (s);
3983 	  if ((c == NE_EXPR && integer_zerop (op2))
3984 	      || (c == EQ_EXPR && integer_nonzerop (op2)))
3985 	    return same_bool_comparison_p (expr, c,
3986 					   gimple_assign_rhs1 (s),
3987 					   gimple_assign_rhs2 (s));
3988 	  if ((c == EQ_EXPR && integer_zerop (op2))
3989 	      || (c == NE_EXPR && integer_nonzerop (op2)))
3990 	    return same_bool_comparison_p (expr,
3991 					   invert_tree_comparison (c, false),
3992 					   gimple_assign_rhs1 (s),
3993 					   gimple_assign_rhs2 (s));
3994 	}
3995     }
3996   return false;
3997 }
3998 
3999 /* Check to see if two boolean expressions OP1 and OP2 are logically
4000    equivalent.  */
4001 
4002 static bool
same_bool_result_p(const_tree op1,const_tree op2)4003 same_bool_result_p (const_tree op1, const_tree op2)
4004 {
4005   /* Simple cases first.  */
4006   if (operand_equal_p (op1, op2, 0))
4007     return true;
4008 
4009   /* Check the cases where at least one of the operands is a comparison.
4010      These are a bit smarter than operand_equal_p in that they apply some
4011      identifies on SSA_NAMEs.  */
4012   if (COMPARISON_CLASS_P (op2)
4013       && same_bool_comparison_p (op1, TREE_CODE (op2),
4014 				 TREE_OPERAND (op2, 0),
4015 				 TREE_OPERAND (op2, 1)))
4016     return true;
4017   if (COMPARISON_CLASS_P (op1)
4018       && same_bool_comparison_p (op2, TREE_CODE (op1),
4019 				 TREE_OPERAND (op1, 0),
4020 				 TREE_OPERAND (op1, 1)))
4021     return true;
4022 
4023   /* Default case.  */
4024   return false;
4025 }
4026 
4027 /* Forward declarations for some mutually recursive functions.  */
4028 
4029 static tree
4030 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4031 		   enum tree_code code2, tree op2a, tree op2b);
4032 static tree
4033 and_var_with_comparison (tree var, bool invert,
4034 			 enum tree_code code2, tree op2a, tree op2b);
4035 static tree
4036 and_var_with_comparison_1 (gimple *stmt,
4037 			   enum tree_code code2, tree op2a, tree op2b);
4038 static tree
4039 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4040 		  enum tree_code code2, tree op2a, tree op2b);
4041 static tree
4042 or_var_with_comparison (tree var, bool invert,
4043 			enum tree_code code2, tree op2a, tree op2b);
4044 static tree
4045 or_var_with_comparison_1 (gimple *stmt,
4046 			  enum tree_code code2, tree op2a, tree op2b);
4047 
4048 /* Helper function for and_comparisons_1:  try to simplify the AND of the
4049    ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4050    If INVERT is true, invert the value of the VAR before doing the AND.
4051    Return NULL_EXPR if we can't simplify this to a single expression.  */
4052 
4053 static tree
and_var_with_comparison(tree var,bool invert,enum tree_code code2,tree op2a,tree op2b)4054 and_var_with_comparison (tree var, bool invert,
4055 			 enum tree_code code2, tree op2a, tree op2b)
4056 {
4057   tree t;
4058   gimple *stmt = SSA_NAME_DEF_STMT (var);
4059 
4060   /* We can only deal with variables whose definitions are assignments.  */
4061   if (!is_gimple_assign (stmt))
4062     return NULL_TREE;
4063 
4064   /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4065      !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4066      Then we only have to consider the simpler non-inverted cases.  */
4067   if (invert)
4068     t = or_var_with_comparison_1 (stmt,
4069 				  invert_tree_comparison (code2, false),
4070 				  op2a, op2b);
4071   else
4072     t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4073   return canonicalize_bool (t, invert);
4074 }
4075 
4076 /* Try to simplify the AND of the ssa variable defined by the assignment
4077    STMT with the comparison specified by (OP2A CODE2 OP2B).
4078    Return NULL_EXPR if we can't simplify this to a single expression.  */
4079 
4080 static tree
and_var_with_comparison_1(gimple * stmt,enum tree_code code2,tree op2a,tree op2b)4081 and_var_with_comparison_1 (gimple *stmt,
4082 			   enum tree_code code2, tree op2a, tree op2b)
4083 {
4084   tree var = gimple_assign_lhs (stmt);
4085   tree true_test_var = NULL_TREE;
4086   tree false_test_var = NULL_TREE;
4087   enum tree_code innercode = gimple_assign_rhs_code (stmt);
4088 
4089   /* Check for identities like (var AND (var == 0)) => false.  */
4090   if (TREE_CODE (op2a) == SSA_NAME
4091       && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4092     {
4093       if ((code2 == NE_EXPR && integer_zerop (op2b))
4094 	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4095 	{
4096 	  true_test_var = op2a;
4097 	  if (var == true_test_var)
4098 	    return var;
4099 	}
4100       else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4101 	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4102 	{
4103 	  false_test_var = op2a;
4104 	  if (var == false_test_var)
4105 	    return boolean_false_node;
4106 	}
4107     }
4108 
4109   /* If the definition is a comparison, recurse on it.  */
4110   if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4111     {
4112       tree t = and_comparisons_1 (innercode,
4113 				  gimple_assign_rhs1 (stmt),
4114 				  gimple_assign_rhs2 (stmt),
4115 				  code2,
4116 				  op2a,
4117 				  op2b);
4118       if (t)
4119 	return t;
4120     }
4121 
4122   /* If the definition is an AND or OR expression, we may be able to
4123      simplify by reassociating.  */
4124   if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4125       && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4126     {
4127       tree inner1 = gimple_assign_rhs1 (stmt);
4128       tree inner2 = gimple_assign_rhs2 (stmt);
4129       gimple *s;
4130       tree t;
4131       tree partial = NULL_TREE;
4132       bool is_and = (innercode == BIT_AND_EXPR);
4133 
4134       /* Check for boolean identities that don't require recursive examination
4135 	 of inner1/inner2:
4136 	 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4137 	 inner1 AND (inner1 OR inner2) => inner1
4138 	 !inner1 AND (inner1 AND inner2) => false
4139 	 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4140          Likewise for similar cases involving inner2.  */
4141       if (inner1 == true_test_var)
4142 	return (is_and ? var : inner1);
4143       else if (inner2 == true_test_var)
4144 	return (is_and ? var : inner2);
4145       else if (inner1 == false_test_var)
4146 	return (is_and
4147 		? boolean_false_node
4148 		: and_var_with_comparison (inner2, false, code2, op2a, op2b));
4149       else if (inner2 == false_test_var)
4150 	return (is_and
4151 		? boolean_false_node
4152 		: and_var_with_comparison (inner1, false, code2, op2a, op2b));
4153 
4154       /* Next, redistribute/reassociate the AND across the inner tests.
4155 	 Compute the first partial result, (inner1 AND (op2a code op2b))  */
4156       if (TREE_CODE (inner1) == SSA_NAME
4157 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4158 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4159 	  && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4160 					      gimple_assign_rhs1 (s),
4161 					      gimple_assign_rhs2 (s),
4162 					      code2, op2a, op2b)))
4163 	{
4164 	  /* Handle the AND case, where we are reassociating:
4165 	     (inner1 AND inner2) AND (op2a code2 op2b)
4166 	     => (t AND inner2)
4167 	     If the partial result t is a constant, we win.  Otherwise
4168 	     continue on to try reassociating with the other inner test.  */
4169 	  if (is_and)
4170 	    {
4171 	      if (integer_onep (t))
4172 		return inner2;
4173 	      else if (integer_zerop (t))
4174 		return boolean_false_node;
4175 	    }
4176 
4177 	  /* Handle the OR case, where we are redistributing:
4178 	     (inner1 OR inner2) AND (op2a code2 op2b)
4179 	     => (t OR (inner2 AND (op2a code2 op2b)))  */
4180 	  else if (integer_onep (t))
4181 	    return boolean_true_node;
4182 
4183 	  /* Save partial result for later.  */
4184 	  partial = t;
4185 	}
4186 
4187       /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4188       if (TREE_CODE (inner2) == SSA_NAME
4189 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4190 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4191 	  && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4192 					      gimple_assign_rhs1 (s),
4193 					      gimple_assign_rhs2 (s),
4194 					      code2, op2a, op2b)))
4195 	{
4196 	  /* Handle the AND case, where we are reassociating:
4197 	     (inner1 AND inner2) AND (op2a code2 op2b)
4198 	     => (inner1 AND t)  */
4199 	  if (is_and)
4200 	    {
4201 	      if (integer_onep (t))
4202 		return inner1;
4203 	      else if (integer_zerop (t))
4204 		return boolean_false_node;
4205 	      /* If both are the same, we can apply the identity
4206 		 (x AND x) == x.  */
4207 	      else if (partial && same_bool_result_p (t, partial))
4208 		return t;
4209 	    }
4210 
4211 	  /* Handle the OR case. where we are redistributing:
4212 	     (inner1 OR inner2) AND (op2a code2 op2b)
4213 	     => (t OR (inner1 AND (op2a code2 op2b)))
4214 	     => (t OR partial)  */
4215 	  else
4216 	    {
4217 	      if (integer_onep (t))
4218 		return boolean_true_node;
4219 	      else if (partial)
4220 		{
4221 		  /* We already got a simplification for the other
4222 		     operand to the redistributed OR expression.  The
4223 		     interesting case is when at least one is false.
4224 		     Or, if both are the same, we can apply the identity
4225 		     (x OR x) == x.  */
4226 		  if (integer_zerop (partial))
4227 		    return t;
4228 		  else if (integer_zerop (t))
4229 		    return partial;
4230 		  else if (same_bool_result_p (t, partial))
4231 		    return t;
4232 		}
4233 	    }
4234 	}
4235     }
4236   return NULL_TREE;
4237 }
4238 
4239 /* Try to simplify the AND of two comparisons defined by
4240    (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4241    If this can be done without constructing an intermediate value,
4242    return the resulting tree; otherwise NULL_TREE is returned.
4243    This function is deliberately asymmetric as it recurses on SSA_DEFs
4244    in the first comparison but not the second.  */
4245 
4246 static tree
and_comparisons_1(enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)4247 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4248 		   enum tree_code code2, tree op2a, tree op2b)
4249 {
4250   tree truth_type = truth_type_for (TREE_TYPE (op1a));
4251 
4252   /* First check for ((x CODE1 y) AND (x CODE2 y)).  */
4253   if (operand_equal_p (op1a, op2a, 0)
4254       && operand_equal_p (op1b, op2b, 0))
4255     {
4256       /* Result will be either NULL_TREE, or a combined comparison.  */
4257       tree t = combine_comparisons (UNKNOWN_LOCATION,
4258 				    TRUTH_ANDIF_EXPR, code1, code2,
4259 				    truth_type, op1a, op1b);
4260       if (t)
4261 	return t;
4262     }
4263 
4264   /* Likewise the swapped case of the above.  */
4265   if (operand_equal_p (op1a, op2b, 0)
4266       && operand_equal_p (op1b, op2a, 0))
4267     {
4268       /* Result will be either NULL_TREE, or a combined comparison.  */
4269       tree t = combine_comparisons (UNKNOWN_LOCATION,
4270 				    TRUTH_ANDIF_EXPR, code1,
4271 				    swap_tree_comparison (code2),
4272 				    truth_type, op1a, op1b);
4273       if (t)
4274 	return t;
4275     }
4276 
4277   /* If both comparisons are of the same value against constants, we might
4278      be able to merge them.  */
4279   if (operand_equal_p (op1a, op2a, 0)
4280       && TREE_CODE (op1b) == INTEGER_CST
4281       && TREE_CODE (op2b) == INTEGER_CST)
4282     {
4283       int cmp = tree_int_cst_compare (op1b, op2b);
4284 
4285       /* If we have (op1a == op1b), we should either be able to
4286 	 return that or FALSE, depending on whether the constant op1b
4287 	 also satisfies the other comparison against op2b.  */
4288       if (code1 == EQ_EXPR)
4289 	{
4290 	  bool done = true;
4291 	  bool val;
4292 	  switch (code2)
4293 	    {
4294 	    case EQ_EXPR: val = (cmp == 0); break;
4295 	    case NE_EXPR: val = (cmp != 0); break;
4296 	    case LT_EXPR: val = (cmp < 0); break;
4297 	    case GT_EXPR: val = (cmp > 0); break;
4298 	    case LE_EXPR: val = (cmp <= 0); break;
4299 	    case GE_EXPR: val = (cmp >= 0); break;
4300 	    default: done = false;
4301 	    }
4302 	  if (done)
4303 	    {
4304 	      if (val)
4305 		return fold_build2 (code1, boolean_type_node, op1a, op1b);
4306 	      else
4307 		return boolean_false_node;
4308 	    }
4309 	}
4310       /* Likewise if the second comparison is an == comparison.  */
4311       else if (code2 == EQ_EXPR)
4312 	{
4313 	  bool done = true;
4314 	  bool val;
4315 	  switch (code1)
4316 	    {
4317 	    case EQ_EXPR: val = (cmp == 0); break;
4318 	    case NE_EXPR: val = (cmp != 0); break;
4319 	    case LT_EXPR: val = (cmp > 0); break;
4320 	    case GT_EXPR: val = (cmp < 0); break;
4321 	    case LE_EXPR: val = (cmp >= 0); break;
4322 	    case GE_EXPR: val = (cmp <= 0); break;
4323 	    default: done = false;
4324 	    }
4325 	  if (done)
4326 	    {
4327 	      if (val)
4328 		return fold_build2 (code2, boolean_type_node, op2a, op2b);
4329 	      else
4330 		return boolean_false_node;
4331 	    }
4332 	}
4333 
4334       /* Same business with inequality tests.  */
4335       else if (code1 == NE_EXPR)
4336 	{
4337 	  bool val;
4338 	  switch (code2)
4339 	    {
4340 	    case EQ_EXPR: val = (cmp != 0); break;
4341 	    case NE_EXPR: val = (cmp == 0); break;
4342 	    case LT_EXPR: val = (cmp >= 0); break;
4343 	    case GT_EXPR: val = (cmp <= 0); break;
4344 	    case LE_EXPR: val = (cmp > 0); break;
4345 	    case GE_EXPR: val = (cmp < 0); break;
4346 	    default:
4347 	      val = false;
4348 	    }
4349 	  if (val)
4350 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
4351 	}
4352       else if (code2 == NE_EXPR)
4353 	{
4354 	  bool val;
4355 	  switch (code1)
4356 	    {
4357 	    case EQ_EXPR: val = (cmp == 0); break;
4358 	    case NE_EXPR: val = (cmp != 0); break;
4359 	    case LT_EXPR: val = (cmp <= 0); break;
4360 	    case GT_EXPR: val = (cmp >= 0); break;
4361 	    case LE_EXPR: val = (cmp < 0); break;
4362 	    case GE_EXPR: val = (cmp > 0); break;
4363 	    default:
4364 	      val = false;
4365 	    }
4366 	  if (val)
4367 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
4368 	}
4369 
4370       /* Chose the more restrictive of two < or <= comparisons.  */
4371       else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4372 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
4373 	{
4374 	  if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4375 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
4376 	  else
4377 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
4378 	}
4379 
4380       /* Likewise chose the more restrictive of two > or >= comparisons.  */
4381       else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4382 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
4383 	{
4384 	  if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4385 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
4386 	  else
4387 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
4388 	}
4389 
4390       /* Check for singleton ranges.  */
4391       else if (cmp == 0
4392 	       && ((code1 == LE_EXPR && code2 == GE_EXPR)
4393 		   || (code1 == GE_EXPR && code2 == LE_EXPR)))
4394 	return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4395 
4396       /* Check for disjoint ranges. */
4397       else if (cmp <= 0
4398 	       && (code1 == LT_EXPR || code1 == LE_EXPR)
4399 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
4400 	return boolean_false_node;
4401       else if (cmp >= 0
4402 	       && (code1 == GT_EXPR || code1 == GE_EXPR)
4403 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
4404 	return boolean_false_node;
4405     }
4406 
4407   /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4408      NAME's definition is a truth value.  See if there are any simplifications
4409      that can be done against the NAME's definition.  */
4410   if (TREE_CODE (op1a) == SSA_NAME
4411       && (code1 == NE_EXPR || code1 == EQ_EXPR)
4412       && (integer_zerop (op1b) || integer_onep (op1b)))
4413     {
4414       bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4415 		     || (code1 == NE_EXPR && integer_onep (op1b)));
4416       gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4417       switch (gimple_code (stmt))
4418 	{
4419 	case GIMPLE_ASSIGN:
4420 	  /* Try to simplify by copy-propagating the definition.  */
4421 	  return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4422 
4423 	case GIMPLE_PHI:
4424 	  /* If every argument to the PHI produces the same result when
4425 	     ANDed with the second comparison, we win.
4426 	     Do not do this unless the type is bool since we need a bool
4427 	     result here anyway.  */
4428 	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4429 	    {
4430 	      tree result = NULL_TREE;
4431 	      unsigned i;
4432 	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
4433 		{
4434 		  tree arg = gimple_phi_arg_def (stmt, i);
4435 
4436 		  /* If this PHI has itself as an argument, ignore it.
4437 		     If all the other args produce the same result,
4438 		     we're still OK.  */
4439 		  if (arg == gimple_phi_result (stmt))
4440 		    continue;
4441 		  else if (TREE_CODE (arg) == INTEGER_CST)
4442 		    {
4443 		      if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4444 			{
4445 			  if (!result)
4446 			    result = boolean_false_node;
4447 			  else if (!integer_zerop (result))
4448 			    return NULL_TREE;
4449 			}
4450 		      else if (!result)
4451 			result = fold_build2 (code2, boolean_type_node,
4452 					      op2a, op2b);
4453 		      else if (!same_bool_comparison_p (result,
4454 							code2, op2a, op2b))
4455 			return NULL_TREE;
4456 		    }
4457 		  else if (TREE_CODE (arg) == SSA_NAME
4458 			   && !SSA_NAME_IS_DEFAULT_DEF (arg))
4459 		    {
4460 		      tree temp;
4461 		      gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4462 		      /* In simple cases we can look through PHI nodes,
4463 			 but we have to be careful with loops.
4464 			 See PR49073.  */
4465 		      if (! dom_info_available_p (CDI_DOMINATORS)
4466 			  || gimple_bb (def_stmt) == gimple_bb (stmt)
4467 			  || dominated_by_p (CDI_DOMINATORS,
4468 					     gimple_bb (def_stmt),
4469 					     gimple_bb (stmt)))
4470 			return NULL_TREE;
4471 		      temp = and_var_with_comparison (arg, invert, code2,
4472 						      op2a, op2b);
4473 		      if (!temp)
4474 			return NULL_TREE;
4475 		      else if (!result)
4476 			result = temp;
4477 		      else if (!same_bool_result_p (result, temp))
4478 			return NULL_TREE;
4479 		    }
4480 		  else
4481 		    return NULL_TREE;
4482 		}
4483 	      return result;
4484 	    }
4485 
4486 	default:
4487 	  break;
4488 	}
4489     }
4490   return NULL_TREE;
4491 }
4492 
4493 /* Try to simplify the AND of two comparisons, specified by
4494    (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4495    If this can be simplified to a single expression (without requiring
4496    introducing more SSA variables to hold intermediate values),
4497    return the resulting tree.  Otherwise return NULL_TREE.
4498    If the result expression is non-null, it has boolean type.  */
4499 
4500 tree
maybe_fold_and_comparisons(enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)4501 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4502 			    enum tree_code code2, tree op2a, tree op2b)
4503 {
4504   tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4505   if (t)
4506     return t;
4507   else
4508     return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4509 }
4510 
4511 /* Helper function for or_comparisons_1:  try to simplify the OR of the
4512    ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4513    If INVERT is true, invert the value of VAR before doing the OR.
4514    Return NULL_EXPR if we can't simplify this to a single expression.  */
4515 
4516 static tree
or_var_with_comparison(tree var,bool invert,enum tree_code code2,tree op2a,tree op2b)4517 or_var_with_comparison (tree var, bool invert,
4518 			enum tree_code code2, tree op2a, tree op2b)
4519 {
4520   tree t;
4521   gimple *stmt = SSA_NAME_DEF_STMT (var);
4522 
4523   /* We can only deal with variables whose definitions are assignments.  */
4524   if (!is_gimple_assign (stmt))
4525     return NULL_TREE;
4526 
4527   /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4528      !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4529      Then we only have to consider the simpler non-inverted cases.  */
4530   if (invert)
4531     t = and_var_with_comparison_1 (stmt,
4532 				   invert_tree_comparison (code2, false),
4533 				   op2a, op2b);
4534   else
4535     t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4536   return canonicalize_bool (t, invert);
4537 }
4538 
4539 /* Try to simplify the OR of the ssa variable defined by the assignment
4540    STMT with the comparison specified by (OP2A CODE2 OP2B).
4541    Return NULL_EXPR if we can't simplify this to a single expression.  */
4542 
4543 static tree
or_var_with_comparison_1(gimple * stmt,enum tree_code code2,tree op2a,tree op2b)4544 or_var_with_comparison_1 (gimple *stmt,
4545 			  enum tree_code code2, tree op2a, tree op2b)
4546 {
4547   tree var = gimple_assign_lhs (stmt);
4548   tree true_test_var = NULL_TREE;
4549   tree false_test_var = NULL_TREE;
4550   enum tree_code innercode = gimple_assign_rhs_code (stmt);
4551 
4552   /* Check for identities like (var OR (var != 0)) => true .  */
4553   if (TREE_CODE (op2a) == SSA_NAME
4554       && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4555     {
4556       if ((code2 == NE_EXPR && integer_zerop (op2b))
4557 	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4558 	{
4559 	  true_test_var = op2a;
4560 	  if (var == true_test_var)
4561 	    return var;
4562 	}
4563       else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4564 	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4565 	{
4566 	  false_test_var = op2a;
4567 	  if (var == false_test_var)
4568 	    return boolean_true_node;
4569 	}
4570     }
4571 
4572   /* If the definition is a comparison, recurse on it.  */
4573   if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4574     {
4575       tree t = or_comparisons_1 (innercode,
4576 				 gimple_assign_rhs1 (stmt),
4577 				 gimple_assign_rhs2 (stmt),
4578 				 code2,
4579 				 op2a,
4580 				 op2b);
4581       if (t)
4582 	return t;
4583     }
4584 
4585   /* If the definition is an AND or OR expression, we may be able to
4586      simplify by reassociating.  */
4587   if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4588       && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4589     {
4590       tree inner1 = gimple_assign_rhs1 (stmt);
4591       tree inner2 = gimple_assign_rhs2 (stmt);
4592       gimple *s;
4593       tree t;
4594       tree partial = NULL_TREE;
4595       bool is_or = (innercode == BIT_IOR_EXPR);
4596 
4597       /* Check for boolean identities that don't require recursive examination
4598 	 of inner1/inner2:
4599 	 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4600 	 inner1 OR (inner1 AND inner2) => inner1
4601 	 !inner1 OR (inner1 OR inner2) => true
4602 	 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4603       */
4604       if (inner1 == true_test_var)
4605 	return (is_or ? var : inner1);
4606       else if (inner2 == true_test_var)
4607 	return (is_or ? var : inner2);
4608       else if (inner1 == false_test_var)
4609 	return (is_or
4610 		? boolean_true_node
4611 		: or_var_with_comparison (inner2, false, code2, op2a, op2b));
4612       else if (inner2 == false_test_var)
4613 	return (is_or
4614 		? boolean_true_node
4615 		: or_var_with_comparison (inner1, false, code2, op2a, op2b));
4616 
4617       /* Next, redistribute/reassociate the OR across the inner tests.
4618 	 Compute the first partial result, (inner1 OR (op2a code op2b))  */
4619       if (TREE_CODE (inner1) == SSA_NAME
4620 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4621 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4622 	  && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4623 					     gimple_assign_rhs1 (s),
4624 					     gimple_assign_rhs2 (s),
4625 					     code2, op2a, op2b)))
4626 	{
4627 	  /* Handle the OR case, where we are reassociating:
4628 	     (inner1 OR inner2) OR (op2a code2 op2b)
4629 	     => (t OR inner2)
4630 	     If the partial result t is a constant, we win.  Otherwise
4631 	     continue on to try reassociating with the other inner test.  */
4632 	  if (is_or)
4633 	    {
4634 	      if (integer_onep (t))
4635 		return boolean_true_node;
4636 	      else if (integer_zerop (t))
4637 		return inner2;
4638 	    }
4639 
4640 	  /* Handle the AND case, where we are redistributing:
4641 	     (inner1 AND inner2) OR (op2a code2 op2b)
4642 	     => (t AND (inner2 OR (op2a code op2b)))  */
4643 	  else if (integer_zerop (t))
4644 	    return boolean_false_node;
4645 
4646 	  /* Save partial result for later.  */
4647 	  partial = t;
4648 	}
4649 
4650       /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4651       if (TREE_CODE (inner2) == SSA_NAME
4652 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4653 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4654 	  && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4655 					     gimple_assign_rhs1 (s),
4656 					     gimple_assign_rhs2 (s),
4657 					     code2, op2a, op2b)))
4658 	{
4659 	  /* Handle the OR case, where we are reassociating:
4660 	     (inner1 OR inner2) OR (op2a code2 op2b)
4661 	     => (inner1 OR t)
4662 	     => (t OR partial)  */
4663 	  if (is_or)
4664 	    {
4665 	      if (integer_zerop (t))
4666 		return inner1;
4667 	      else if (integer_onep (t))
4668 		return boolean_true_node;
4669 	      /* If both are the same, we can apply the identity
4670 		 (x OR x) == x.  */
4671 	      else if (partial && same_bool_result_p (t, partial))
4672 		return t;
4673 	    }
4674 
4675 	  /* Handle the AND case, where we are redistributing:
4676 	     (inner1 AND inner2) OR (op2a code2 op2b)
4677 	     => (t AND (inner1 OR (op2a code2 op2b)))
4678 	     => (t AND partial)  */
4679 	  else
4680 	    {
4681 	      if (integer_zerop (t))
4682 		return boolean_false_node;
4683 	      else if (partial)
4684 		{
4685 		  /* We already got a simplification for the other
4686 		     operand to the redistributed AND expression.  The
4687 		     interesting case is when at least one is true.
4688 		     Or, if both are the same, we can apply the identity
4689 		     (x AND x) == x.  */
4690 		  if (integer_onep (partial))
4691 		    return t;
4692 		  else if (integer_onep (t))
4693 		    return partial;
4694 		  else if (same_bool_result_p (t, partial))
4695 		    return t;
4696 		}
4697 	    }
4698 	}
4699     }
4700   return NULL_TREE;
4701 }
4702 
4703 /* Try to simplify the OR of two comparisons defined by
4704    (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4705    If this can be done without constructing an intermediate value,
4706    return the resulting tree; otherwise NULL_TREE is returned.
4707    This function is deliberately asymmetric as it recurses on SSA_DEFs
4708    in the first comparison but not the second.  */
4709 
4710 static tree
or_comparisons_1(enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)4711 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4712 		  enum tree_code code2, tree op2a, tree op2b)
4713 {
4714   tree truth_type = truth_type_for (TREE_TYPE (op1a));
4715 
4716   /* First check for ((x CODE1 y) OR (x CODE2 y)).  */
4717   if (operand_equal_p (op1a, op2a, 0)
4718       && operand_equal_p (op1b, op2b, 0))
4719     {
4720       /* Result will be either NULL_TREE, or a combined comparison.  */
4721       tree t = combine_comparisons (UNKNOWN_LOCATION,
4722 				    TRUTH_ORIF_EXPR, code1, code2,
4723 				    truth_type, op1a, op1b);
4724       if (t)
4725 	return t;
4726     }
4727 
4728   /* Likewise the swapped case of the above.  */
4729   if (operand_equal_p (op1a, op2b, 0)
4730       && operand_equal_p (op1b, op2a, 0))
4731     {
4732       /* Result will be either NULL_TREE, or a combined comparison.  */
4733       tree t = combine_comparisons (UNKNOWN_LOCATION,
4734 				    TRUTH_ORIF_EXPR, code1,
4735 				    swap_tree_comparison (code2),
4736 				    truth_type, op1a, op1b);
4737       if (t)
4738 	return t;
4739     }
4740 
4741   /* If both comparisons are of the same value against constants, we might
4742      be able to merge them.  */
4743   if (operand_equal_p (op1a, op2a, 0)
4744       && TREE_CODE (op1b) == INTEGER_CST
4745       && TREE_CODE (op2b) == INTEGER_CST)
4746     {
4747       int cmp = tree_int_cst_compare (op1b, op2b);
4748 
4749       /* If we have (op1a != op1b), we should either be able to
4750 	 return that or TRUE, depending on whether the constant op1b
4751 	 also satisfies the other comparison against op2b.  */
4752       if (code1 == NE_EXPR)
4753 	{
4754 	  bool done = true;
4755 	  bool val;
4756 	  switch (code2)
4757 	    {
4758 	    case EQ_EXPR: val = (cmp == 0); break;
4759 	    case NE_EXPR: val = (cmp != 0); break;
4760 	    case LT_EXPR: val = (cmp < 0); break;
4761 	    case GT_EXPR: val = (cmp > 0); break;
4762 	    case LE_EXPR: val = (cmp <= 0); break;
4763 	    case GE_EXPR: val = (cmp >= 0); break;
4764 	    default: done = false;
4765 	    }
4766 	  if (done)
4767 	    {
4768 	      if (val)
4769 		return boolean_true_node;
4770 	      else
4771 		return fold_build2 (code1, boolean_type_node, op1a, op1b);
4772 	    }
4773 	}
4774       /* Likewise if the second comparison is a != comparison.  */
4775       else if (code2 == NE_EXPR)
4776 	{
4777 	  bool done = true;
4778 	  bool val;
4779 	  switch (code1)
4780 	    {
4781 	    case EQ_EXPR: val = (cmp == 0); break;
4782 	    case NE_EXPR: val = (cmp != 0); break;
4783 	    case LT_EXPR: val = (cmp > 0); break;
4784 	    case GT_EXPR: val = (cmp < 0); break;
4785 	    case LE_EXPR: val = (cmp >= 0); break;
4786 	    case GE_EXPR: val = (cmp <= 0); break;
4787 	    default: done = false;
4788 	    }
4789 	  if (done)
4790 	    {
4791 	      if (val)
4792 		return boolean_true_node;
4793 	      else
4794 		return fold_build2 (code2, boolean_type_node, op2a, op2b);
4795 	    }
4796 	}
4797 
4798       /* See if an equality test is redundant with the other comparison.  */
4799       else if (code1 == EQ_EXPR)
4800 	{
4801 	  bool val;
4802 	  switch (code2)
4803 	    {
4804 	    case EQ_EXPR: val = (cmp == 0); break;
4805 	    case NE_EXPR: val = (cmp != 0); break;
4806 	    case LT_EXPR: val = (cmp < 0); break;
4807 	    case GT_EXPR: val = (cmp > 0); break;
4808 	    case LE_EXPR: val = (cmp <= 0); break;
4809 	    case GE_EXPR: val = (cmp >= 0); break;
4810 	    default:
4811 	      val = false;
4812 	    }
4813 	  if (val)
4814 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
4815 	}
4816       else if (code2 == EQ_EXPR)
4817 	{
4818 	  bool val;
4819 	  switch (code1)
4820 	    {
4821 	    case EQ_EXPR: val = (cmp == 0); break;
4822 	    case NE_EXPR: val = (cmp != 0); break;
4823 	    case LT_EXPR: val = (cmp > 0); break;
4824 	    case GT_EXPR: val = (cmp < 0); break;
4825 	    case LE_EXPR: val = (cmp >= 0); break;
4826 	    case GE_EXPR: val = (cmp <= 0); break;
4827 	    default:
4828 	      val = false;
4829 	    }
4830 	  if (val)
4831 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
4832 	}
4833 
4834       /* Chose the less restrictive of two < or <= comparisons.  */
4835       else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4836 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
4837 	{
4838 	  if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4839 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
4840 	  else
4841 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
4842 	}
4843 
4844       /* Likewise chose the less restrictive of two > or >= comparisons.  */
4845       else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4846 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
4847 	{
4848 	  if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4849 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
4850 	  else
4851 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
4852 	}
4853 
4854       /* Check for singleton ranges.  */
4855       else if (cmp == 0
4856 	       && ((code1 == LT_EXPR && code2 == GT_EXPR)
4857 		   || (code1 == GT_EXPR && code2 == LT_EXPR)))
4858 	return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4859 
4860       /* Check for less/greater pairs that don't restrict the range at all.  */
4861       else if (cmp >= 0
4862 	       && (code1 == LT_EXPR || code1 == LE_EXPR)
4863 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
4864 	return boolean_true_node;
4865       else if (cmp <= 0
4866 	       && (code1 == GT_EXPR || code1 == GE_EXPR)
4867 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
4868 	return boolean_true_node;
4869     }
4870 
4871   /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4872      NAME's definition is a truth value.  See if there are any simplifications
4873      that can be done against the NAME's definition.  */
4874   if (TREE_CODE (op1a) == SSA_NAME
4875       && (code1 == NE_EXPR || code1 == EQ_EXPR)
4876       && (integer_zerop (op1b) || integer_onep (op1b)))
4877     {
4878       bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4879 		     || (code1 == NE_EXPR && integer_onep (op1b)));
4880       gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4881       switch (gimple_code (stmt))
4882 	{
4883 	case GIMPLE_ASSIGN:
4884 	  /* Try to simplify by copy-propagating the definition.  */
4885 	  return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4886 
4887 	case GIMPLE_PHI:
4888 	  /* If every argument to the PHI produces the same result when
4889 	     ORed with the second comparison, we win.
4890 	     Do not do this unless the type is bool since we need a bool
4891 	     result here anyway.  */
4892 	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4893 	    {
4894 	      tree result = NULL_TREE;
4895 	      unsigned i;
4896 	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
4897 		{
4898 		  tree arg = gimple_phi_arg_def (stmt, i);
4899 
4900 		  /* If this PHI has itself as an argument, ignore it.
4901 		     If all the other args produce the same result,
4902 		     we're still OK.  */
4903 		  if (arg == gimple_phi_result (stmt))
4904 		    continue;
4905 		  else if (TREE_CODE (arg) == INTEGER_CST)
4906 		    {
4907 		      if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4908 			{
4909 			  if (!result)
4910 			    result = boolean_true_node;
4911 			  else if (!integer_onep (result))
4912 			    return NULL_TREE;
4913 			}
4914 		      else if (!result)
4915 			result = fold_build2 (code2, boolean_type_node,
4916 					      op2a, op2b);
4917 		      else if (!same_bool_comparison_p (result,
4918 							code2, op2a, op2b))
4919 			return NULL_TREE;
4920 		    }
4921 		  else if (TREE_CODE (arg) == SSA_NAME
4922 			   && !SSA_NAME_IS_DEFAULT_DEF (arg))
4923 		    {
4924 		      tree temp;
4925 		      gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4926 		      /* In simple cases we can look through PHI nodes,
4927 			 but we have to be careful with loops.
4928 			 See PR49073.  */
4929 		      if (! dom_info_available_p (CDI_DOMINATORS)
4930 			  || gimple_bb (def_stmt) == gimple_bb (stmt)
4931 			  || dominated_by_p (CDI_DOMINATORS,
4932 					     gimple_bb (def_stmt),
4933 					     gimple_bb (stmt)))
4934 			return NULL_TREE;
4935 		      temp = or_var_with_comparison (arg, invert, code2,
4936 						     op2a, op2b);
4937 		      if (!temp)
4938 			return NULL_TREE;
4939 		      else if (!result)
4940 			result = temp;
4941 		      else if (!same_bool_result_p (result, temp))
4942 			return NULL_TREE;
4943 		    }
4944 		  else
4945 		    return NULL_TREE;
4946 		}
4947 	      return result;
4948 	    }
4949 
4950 	default:
4951 	  break;
4952 	}
4953     }
4954   return NULL_TREE;
4955 }
4956 
4957 /* Try to simplify the OR of two comparisons, specified by
4958    (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4959    If this can be simplified to a single expression (without requiring
4960    introducing more SSA variables to hold intermediate values),
4961    return the resulting tree.  Otherwise return NULL_TREE.
4962    If the result expression is non-null, it has boolean type.  */
4963 
4964 tree
maybe_fold_or_comparisons(enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)4965 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4966 			   enum tree_code code2, tree op2a, tree op2b)
4967 {
4968   tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4969   if (t)
4970     return t;
4971   else
4972     return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4973 }
4974 
4975 
4976 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4977 
4978    Either NULL_TREE, a simplified but non-constant or a constant
4979    is returned.
4980 
4981    ???  This should go into a gimple-fold-inline.h file to be eventually
4982    privatized with the single valueize function used in the various TUs
4983    to avoid the indirect function call overhead.  */
4984 
4985 tree
gimple_fold_stmt_to_constant_1(gimple * stmt,tree (* valueize)(tree),tree (* gvalueize)(tree))4986 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
4987 				tree (*gvalueize) (tree))
4988 {
4989   code_helper rcode;
4990   tree ops[3] = {};
4991   /* ???  The SSA propagators do not correctly deal with following SSA use-def
4992      edges if there are intermediate VARYING defs.  For this reason
4993      do not follow SSA edges here even though SCCVN can technically
4994      just deal fine with that.  */
4995   if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
4996     {
4997       tree res = NULL_TREE;
4998       if (gimple_simplified_result_is_gimple_val (rcode, ops))
4999 	res = ops[0];
5000       else if (mprts_hook)
5001 	res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5002       if (res)
5003 	{
5004 	  if (dump_file && dump_flags & TDF_DETAILS)
5005 	    {
5006 	      fprintf (dump_file, "Match-and-simplified ");
5007 	      print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5008 	      fprintf (dump_file, " to ");
5009 	      print_generic_expr (dump_file, res, 0);
5010 	      fprintf (dump_file, "\n");
5011 	    }
5012 	  return res;
5013 	}
5014     }
5015 
5016   location_t loc = gimple_location (stmt);
5017   switch (gimple_code (stmt))
5018     {
5019     case GIMPLE_ASSIGN:
5020       {
5021         enum tree_code subcode = gimple_assign_rhs_code (stmt);
5022 
5023         switch (get_gimple_rhs_class (subcode))
5024           {
5025           case GIMPLE_SINGLE_RHS:
5026             {
5027               tree rhs = gimple_assign_rhs1 (stmt);
5028               enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5029 
5030               if (TREE_CODE (rhs) == SSA_NAME)
5031                 {
5032                   /* If the RHS is an SSA_NAME, return its known constant value,
5033                      if any.  */
5034                   return (*valueize) (rhs);
5035                 }
5036 	      /* Handle propagating invariant addresses into address
5037 		 operations.  */
5038 	      else if (TREE_CODE (rhs) == ADDR_EXPR
5039 		       && !is_gimple_min_invariant (rhs))
5040 		{
5041 		  HOST_WIDE_INT offset = 0;
5042 		  tree base;
5043 		  base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5044 							  &offset,
5045 							  valueize);
5046 		  if (base
5047 		      && (CONSTANT_CLASS_P (base)
5048 			  || decl_address_invariant_p (base)))
5049 		    return build_invariant_address (TREE_TYPE (rhs),
5050 						    base, offset);
5051 		}
5052 	      else if (TREE_CODE (rhs) == CONSTRUCTOR
5053 		       && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5054 		       && (CONSTRUCTOR_NELTS (rhs)
5055 			   == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5056 		{
5057 		  unsigned i;
5058 		  tree val, *vec;
5059 
5060 		  vec = XALLOCAVEC (tree,
5061 				    TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5062 		  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5063 		    {
5064 		      val = (*valueize) (val);
5065 		      if (TREE_CODE (val) == INTEGER_CST
5066 			  || TREE_CODE (val) == REAL_CST
5067 			  || TREE_CODE (val) == FIXED_CST)
5068 			vec[i] = val;
5069 		      else
5070 			return NULL_TREE;
5071 		    }
5072 
5073 		  return build_vector (TREE_TYPE (rhs), vec);
5074 		}
5075 	      if (subcode == OBJ_TYPE_REF)
5076 		{
5077 		  tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5078 		  /* If callee is constant, we can fold away the wrapper.  */
5079 		  if (is_gimple_min_invariant (val))
5080 		    return val;
5081 		}
5082 
5083               if (kind == tcc_reference)
5084 		{
5085 		  if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5086 		       || TREE_CODE (rhs) == REALPART_EXPR
5087 		       || TREE_CODE (rhs) == IMAGPART_EXPR)
5088 		      && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5089 		    {
5090 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5091 		      return fold_unary_loc (EXPR_LOCATION (rhs),
5092 					     TREE_CODE (rhs),
5093 					     TREE_TYPE (rhs), val);
5094 		    }
5095 		  else if (TREE_CODE (rhs) == BIT_FIELD_REF
5096 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5097 		    {
5098 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5099 		      return fold_ternary_loc (EXPR_LOCATION (rhs),
5100 					       TREE_CODE (rhs),
5101 					       TREE_TYPE (rhs), val,
5102 					       TREE_OPERAND (rhs, 1),
5103 					       TREE_OPERAND (rhs, 2));
5104 		    }
5105 		  else if (TREE_CODE (rhs) == MEM_REF
5106 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5107 		    {
5108 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5109 		      if (TREE_CODE (val) == ADDR_EXPR
5110 			  && is_gimple_min_invariant (val))
5111 			{
5112 			  tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5113 						  unshare_expr (val),
5114 						  TREE_OPERAND (rhs, 1));
5115 			  if (tem)
5116 			    rhs = tem;
5117 			}
5118 		    }
5119 		  return fold_const_aggregate_ref_1 (rhs, valueize);
5120 		}
5121               else if (kind == tcc_declaration)
5122                 return get_symbol_constant_value (rhs);
5123               return rhs;
5124             }
5125 
5126           case GIMPLE_UNARY_RHS:
5127 	    return NULL_TREE;
5128 
5129           case GIMPLE_BINARY_RHS:
5130 	    /* Translate &x + CST into an invariant form suitable for
5131 	       further propagation.  */
5132 	    if (subcode == POINTER_PLUS_EXPR)
5133 	      {
5134 		tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5135 		tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5136 		if (TREE_CODE (op0) == ADDR_EXPR
5137 		    && TREE_CODE (op1) == INTEGER_CST)
5138 		  {
5139 		    tree off = fold_convert (ptr_type_node, op1);
5140 		    return build_fold_addr_expr_loc
5141 			(loc,
5142 			 fold_build2 (MEM_REF,
5143 				      TREE_TYPE (TREE_TYPE (op0)),
5144 				      unshare_expr (op0), off));
5145 		  }
5146 	      }
5147 	    /* Canonicalize bool != 0 and bool == 0 appearing after
5148 	       valueization.  While gimple_simplify handles this
5149 	       it can get confused by the ~X == 1 -> X == 0 transform
5150 	       which we cant reduce to a SSA name or a constant
5151 	       (and we have no way to tell gimple_simplify to not
5152 	       consider those transforms in the first place).  */
5153 	    else if (subcode == EQ_EXPR
5154 		     || subcode == NE_EXPR)
5155 	      {
5156 		tree lhs = gimple_assign_lhs (stmt);
5157 		tree op0 = gimple_assign_rhs1 (stmt);
5158 		if (useless_type_conversion_p (TREE_TYPE (lhs),
5159 					       TREE_TYPE (op0)))
5160 		  {
5161 		    tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5162 		    op0 = (*valueize) (op0);
5163 		    if (TREE_CODE (op0) == INTEGER_CST)
5164 		      std::swap (op0, op1);
5165 		    if (TREE_CODE (op1) == INTEGER_CST
5166 			&& ((subcode == NE_EXPR && integer_zerop (op1))
5167 			    || (subcode == EQ_EXPR && integer_onep (op1))))
5168 		      return op0;
5169 		  }
5170 	      }
5171 	    return NULL_TREE;
5172 
5173           case GIMPLE_TERNARY_RHS:
5174             {
5175               /* Handle ternary operators that can appear in GIMPLE form.  */
5176               tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5177               tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5178               tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5179               return fold_ternary_loc (loc, subcode,
5180 				       gimple_expr_type (stmt), op0, op1, op2);
5181             }
5182 
5183           default:
5184             gcc_unreachable ();
5185           }
5186       }
5187 
5188     case GIMPLE_CALL:
5189       {
5190 	tree fn;
5191 	gcall *call_stmt = as_a <gcall *> (stmt);
5192 
5193 	if (gimple_call_internal_p (stmt))
5194 	  {
5195 	    enum tree_code subcode = ERROR_MARK;
5196 	    switch (gimple_call_internal_fn (stmt))
5197 	      {
5198 	      case IFN_UBSAN_CHECK_ADD:
5199 		subcode = PLUS_EXPR;
5200 		break;
5201 	      case IFN_UBSAN_CHECK_SUB:
5202 		subcode = MINUS_EXPR;
5203 		break;
5204 	      case IFN_UBSAN_CHECK_MUL:
5205 		subcode = MULT_EXPR;
5206 		break;
5207 	      default:
5208 		return NULL_TREE;
5209 	      }
5210 	    tree arg0 = gimple_call_arg (stmt, 0);
5211 	    tree arg1 = gimple_call_arg (stmt, 1);
5212 	    tree op0 = (*valueize) (arg0);
5213 	    tree op1 = (*valueize) (arg1);
5214 
5215 	    if (TREE_CODE (op0) != INTEGER_CST
5216 		|| TREE_CODE (op1) != INTEGER_CST)
5217 	      {
5218 		switch (subcode)
5219 		  {
5220 		  case MULT_EXPR:
5221 		    /* x * 0 = 0 * x = 0 without overflow.  */
5222 		    if (integer_zerop (op0) || integer_zerop (op1))
5223 		      return build_zero_cst (TREE_TYPE (arg0));
5224 		    break;
5225 		  case MINUS_EXPR:
5226 		    /* y - y = 0 without overflow.  */
5227 		    if (operand_equal_p (op0, op1, 0))
5228 		      return build_zero_cst (TREE_TYPE (arg0));
5229 		    break;
5230 		  default:
5231 		    break;
5232 		  }
5233 	      }
5234 	    tree res
5235 	      = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5236 	    if (res
5237 		&& TREE_CODE (res) == INTEGER_CST
5238 		&& !TREE_OVERFLOW (res))
5239 	      return res;
5240 	    return NULL_TREE;
5241 	  }
5242 
5243 	fn = (*valueize) (gimple_call_fn (stmt));
5244 	if (TREE_CODE (fn) == ADDR_EXPR
5245 	    && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5246 	    && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5247 	    && gimple_builtin_call_types_compatible_p (stmt,
5248 						       TREE_OPERAND (fn, 0)))
5249 	  {
5250 	    tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5251 	    tree retval;
5252 	    unsigned i;
5253 	    for (i = 0; i < gimple_call_num_args (stmt); ++i)
5254 	      args[i] = (*valueize) (gimple_call_arg (stmt, i));
5255 	    retval = fold_builtin_call_array (loc,
5256 					 gimple_call_return_type (call_stmt),
5257 					 fn, gimple_call_num_args (stmt), args);
5258 	    if (retval)
5259 	      {
5260 		/* fold_call_expr wraps the result inside a NOP_EXPR.  */
5261 		STRIP_NOPS (retval);
5262 		retval = fold_convert (gimple_call_return_type (call_stmt),
5263 				       retval);
5264 	      }
5265 	    return retval;
5266 	  }
5267 	return NULL_TREE;
5268       }
5269 
5270     default:
5271       return NULL_TREE;
5272     }
5273 }
5274 
5275 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5276    Returns NULL_TREE if folding to a constant is not possible, otherwise
5277    returns a constant according to is_gimple_min_invariant.  */
5278 
5279 tree
gimple_fold_stmt_to_constant(gimple * stmt,tree (* valueize)(tree))5280 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5281 {
5282   tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5283   if (res && is_gimple_min_invariant (res))
5284     return res;
5285   return NULL_TREE;
5286 }
5287 
5288 
5289 /* The following set of functions are supposed to fold references using
5290    their constant initializers.  */
5291 
5292 /* See if we can find constructor defining value of BASE.
5293    When we know the consructor with constant offset (such as
5294    base is array[40] and we do know constructor of array), then
5295    BIT_OFFSET is adjusted accordingly.
5296 
5297    As a special case, return error_mark_node when constructor
5298    is not explicitly available, but it is known to be zero
5299    such as 'static const int a;'.  */
5300 static tree
get_base_constructor(tree base,HOST_WIDE_INT * bit_offset,tree (* valueize)(tree))5301 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5302 		      tree (*valueize)(tree))
5303 {
5304   HOST_WIDE_INT bit_offset2, size, max_size;
5305   bool reverse;
5306 
5307   if (TREE_CODE (base) == MEM_REF)
5308     {
5309       if (!integer_zerop (TREE_OPERAND (base, 1)))
5310 	{
5311 	  if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5312 	    return NULL_TREE;
5313 	  *bit_offset += (mem_ref_offset (base).to_short_addr ()
5314 			  * BITS_PER_UNIT);
5315 	}
5316 
5317       if (valueize
5318 	  && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5319 	base = valueize (TREE_OPERAND (base, 0));
5320       if (!base || TREE_CODE (base) != ADDR_EXPR)
5321         return NULL_TREE;
5322       base = TREE_OPERAND (base, 0);
5323     }
5324 
5325   /* Get a CONSTRUCTOR.  If BASE is a VAR_DECL, get its
5326      DECL_INITIAL.  If BASE is a nested reference into another
5327      ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5328      the inner reference.  */
5329   switch (TREE_CODE (base))
5330     {
5331     case VAR_DECL:
5332     case CONST_DECL:
5333       {
5334 	tree init = ctor_for_folding (base);
5335 
5336 	/* Our semantic is exact opposite of ctor_for_folding;
5337 	   NULL means unknown, while error_mark_node is 0.  */
5338 	if (init == error_mark_node)
5339 	  return NULL_TREE;
5340 	if (!init)
5341 	  return error_mark_node;
5342 	return init;
5343       }
5344 
5345     case ARRAY_REF:
5346     case COMPONENT_REF:
5347       base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5348 				      &reverse);
5349       if (max_size == -1 || size != max_size)
5350 	return NULL_TREE;
5351       *bit_offset +=  bit_offset2;
5352       return get_base_constructor (base, bit_offset, valueize);
5353 
5354     case STRING_CST:
5355     case CONSTRUCTOR:
5356       return base;
5357 
5358     default:
5359       return NULL_TREE;
5360     }
5361 }
5362 
5363 /* CTOR is CONSTRUCTOR of an array type.  Fold reference of type TYPE and size
5364    SIZE to the memory at bit OFFSET.  */
5365 
5366 static tree
fold_array_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree from_decl)5367 fold_array_ctor_reference (tree type, tree ctor,
5368 			   unsigned HOST_WIDE_INT offset,
5369 			   unsigned HOST_WIDE_INT size,
5370 			   tree from_decl)
5371 {
5372   offset_int low_bound;
5373   offset_int elt_size;
5374   offset_int access_index;
5375   tree domain_type = NULL_TREE;
5376   HOST_WIDE_INT inner_offset;
5377 
5378   /* Compute low bound and elt size.  */
5379   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5380     domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5381   if (domain_type && TYPE_MIN_VALUE (domain_type))
5382     {
5383       /* Static constructors for variably sized objects makes no sense.  */
5384       if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
5385 	return NULL_TREE;
5386       low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5387     }
5388   else
5389     low_bound = 0;
5390   /* Static constructors for variably sized objects makes no sense.  */
5391   if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
5392     return NULL_TREE;
5393   elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5394 
5395   /* We can handle only constantly sized accesses that are known to not
5396      be larger than size of array element.  */
5397   if (!TYPE_SIZE_UNIT (type)
5398       || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5399       || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5400       || elt_size == 0)
5401     return NULL_TREE;
5402 
5403   /* Compute the array index we look for.  */
5404   access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5405 				 elt_size);
5406   access_index += low_bound;
5407 
5408   /* And offset within the access.  */
5409   inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5410 
5411   /* See if the array field is large enough to span whole access.  We do not
5412      care to fold accesses spanning multiple array indexes.  */
5413   if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5414     return NULL_TREE;
5415   if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5416     return fold_ctor_reference (type, val, inner_offset, size, from_decl);
5417 
5418   /* When memory is not explicitely mentioned in constructor,
5419      it is 0 (or out of range).  */
5420   return build_zero_cst (type);
5421 }
5422 
5423 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5424    Fold reference of type TYPE and size SIZE to the memory at bit OFFSET.  */
5425 
5426 static tree
fold_nonarray_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree from_decl)5427 fold_nonarray_ctor_reference (tree type, tree ctor,
5428 			      unsigned HOST_WIDE_INT offset,
5429 			      unsigned HOST_WIDE_INT size,
5430 			      tree from_decl)
5431 {
5432   unsigned HOST_WIDE_INT cnt;
5433   tree cfield, cval;
5434 
5435   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5436 			    cval)
5437     {
5438       tree byte_offset = DECL_FIELD_OFFSET (cfield);
5439       tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5440       tree field_size = DECL_SIZE (cfield);
5441       offset_int bitoffset;
5442       offset_int bitoffset_end, access_end;
5443 
5444       /* Variable sized objects in static constructors makes no sense,
5445 	 but field_size can be NULL for flexible array members.  */
5446       gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5447 		  && TREE_CODE (byte_offset) == INTEGER_CST
5448 		  && (field_size != NULL_TREE
5449 		      ? TREE_CODE (field_size) == INTEGER_CST
5450 		      : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5451 
5452       /* Compute bit offset of the field.  */
5453       bitoffset = (wi::to_offset (field_offset)
5454 		   + wi::lshift (wi::to_offset (byte_offset),
5455 				 LOG2_BITS_PER_UNIT));
5456       /* Compute bit offset where the field ends.  */
5457       if (field_size != NULL_TREE)
5458 	bitoffset_end = bitoffset + wi::to_offset (field_size);
5459       else
5460 	bitoffset_end = 0;
5461 
5462       access_end = offset_int (offset) + size;
5463 
5464       /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5465 	 [BITOFFSET, BITOFFSET_END)?  */
5466       if (wi::cmps (access_end, bitoffset) > 0
5467 	  && (field_size == NULL_TREE
5468 	      || wi::lts_p (offset, bitoffset_end)))
5469 	{
5470 	  offset_int inner_offset = offset_int (offset) - bitoffset;
5471 	  /* We do have overlap.  Now see if field is large enough to
5472 	     cover the access.  Give up for accesses spanning multiple
5473 	     fields.  */
5474 	  if (wi::cmps (access_end, bitoffset_end) > 0)
5475 	    return NULL_TREE;
5476 	  if (wi::lts_p (offset, bitoffset))
5477 	    return NULL_TREE;
5478 	  return fold_ctor_reference (type, cval,
5479 				      inner_offset.to_uhwi (), size,
5480 				      from_decl);
5481 	}
5482     }
5483   /* When memory is not explicitely mentioned in constructor, it is 0.  */
5484   return build_zero_cst (type);
5485 }
5486 
5487 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5488    to the memory at bit OFFSET.  */
5489 
5490 tree
fold_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree from_decl)5491 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5492 		     unsigned HOST_WIDE_INT size, tree from_decl)
5493 {
5494   tree ret;
5495 
5496   /* We found the field with exact match.  */
5497   if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5498       && !offset)
5499     return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5500 
5501   /* We are at the end of walk, see if we can view convert the
5502      result.  */
5503   if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5504       /* VIEW_CONVERT_EXPR is defined only for matching sizes.  */
5505       && !compare_tree_int (TYPE_SIZE (type), size)
5506       && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5507     {
5508       ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5509       if (ret)
5510 	{
5511 	  ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5512 	  if (ret)
5513 	    STRIP_USELESS_TYPE_CONVERSION (ret);
5514 	}
5515       return ret;
5516     }
5517   /* For constants and byte-aligned/sized reads try to go through
5518      native_encode/interpret.  */
5519   if (CONSTANT_CLASS_P (ctor)
5520       && BITS_PER_UNIT == 8
5521       && offset % BITS_PER_UNIT == 0
5522       && size % BITS_PER_UNIT == 0
5523       && size <= MAX_BITSIZE_MODE_ANY_MODE)
5524     {
5525       unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5526       int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5527 				    offset / BITS_PER_UNIT);
5528       if (len > 0)
5529 	return native_interpret_expr (type, buf, len);
5530     }
5531   if (TREE_CODE (ctor) == CONSTRUCTOR)
5532     {
5533 
5534       if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5535 	  || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5536 	return fold_array_ctor_reference (type, ctor, offset, size,
5537 					  from_decl);
5538       else
5539 	return fold_nonarray_ctor_reference (type, ctor, offset, size,
5540 					     from_decl);
5541     }
5542 
5543   return NULL_TREE;
5544 }
5545 
5546 /* Return the tree representing the element referenced by T if T is an
5547    ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5548    names using VALUEIZE.  Return NULL_TREE otherwise.  */
5549 
5550 tree
fold_const_aggregate_ref_1(tree t,tree (* valueize)(tree))5551 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5552 {
5553   tree ctor, idx, base;
5554   HOST_WIDE_INT offset, size, max_size;
5555   tree tem;
5556   bool reverse;
5557 
5558   if (TREE_THIS_VOLATILE (t))
5559     return NULL_TREE;
5560 
5561   if (DECL_P (t))
5562     return get_symbol_constant_value (t);
5563 
5564   tem = fold_read_from_constant_string (t);
5565   if (tem)
5566     return tem;
5567 
5568   switch (TREE_CODE (t))
5569     {
5570     case ARRAY_REF:
5571     case ARRAY_RANGE_REF:
5572       /* Constant indexes are handled well by get_base_constructor.
5573 	 Only special case variable offsets.
5574 	 FIXME: This code can't handle nested references with variable indexes
5575 	 (they will be handled only by iteration of ccp).  Perhaps we can bring
5576 	 get_ref_base_and_extent here and make it use a valueize callback.  */
5577       if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5578 	  && valueize
5579 	  && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5580 	  && TREE_CODE (idx) == INTEGER_CST)
5581 	{
5582 	  tree low_bound, unit_size;
5583 
5584 	  /* If the resulting bit-offset is constant, track it.  */
5585 	  if ((low_bound = array_ref_low_bound (t),
5586 	       TREE_CODE (low_bound) == INTEGER_CST)
5587 	      && (unit_size = array_ref_element_size (t),
5588 		  tree_fits_uhwi_p (unit_size)))
5589 	    {
5590 	      offset_int woffset
5591 		= wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5592 			    TYPE_PRECISION (TREE_TYPE (idx)));
5593 
5594 	      if (wi::fits_shwi_p (woffset))
5595 		{
5596 		  offset = woffset.to_shwi ();
5597 		  /* TODO: This code seems wrong, multiply then check
5598 		     to see if it fits.  */
5599 		  offset *= tree_to_uhwi (unit_size);
5600 		  offset *= BITS_PER_UNIT;
5601 
5602 		  base = TREE_OPERAND (t, 0);
5603 		  ctor = get_base_constructor (base, &offset, valueize);
5604 		  /* Empty constructor.  Always fold to 0.  */
5605 		  if (ctor == error_mark_node)
5606 		    return build_zero_cst (TREE_TYPE (t));
5607 		  /* Out of bound array access.  Value is undefined,
5608 		     but don't fold.  */
5609 		  if (offset < 0)
5610 		    return NULL_TREE;
5611 		  /* We can not determine ctor.  */
5612 		  if (!ctor)
5613 		    return NULL_TREE;
5614 		  return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5615 					      tree_to_uhwi (unit_size)
5616 					      * BITS_PER_UNIT,
5617 					      base);
5618 		}
5619 	    }
5620 	}
5621       /* Fallthru.  */
5622 
5623     case COMPONENT_REF:
5624     case BIT_FIELD_REF:
5625     case TARGET_MEM_REF:
5626     case MEM_REF:
5627       base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
5628       ctor = get_base_constructor (base, &offset, valueize);
5629 
5630       /* Empty constructor.  Always fold to 0.  */
5631       if (ctor == error_mark_node)
5632 	return build_zero_cst (TREE_TYPE (t));
5633       /* We do not know precise address.  */
5634       if (max_size == -1 || max_size != size)
5635 	return NULL_TREE;
5636       /* We can not determine ctor.  */
5637       if (!ctor)
5638 	return NULL_TREE;
5639 
5640       /* Out of bound array access.  Value is undefined, but don't fold.  */
5641       if (offset < 0)
5642 	return NULL_TREE;
5643 
5644       return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5645 				  base);
5646 
5647     case REALPART_EXPR:
5648     case IMAGPART_EXPR:
5649       {
5650 	tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5651 	if (c && TREE_CODE (c) == COMPLEX_CST)
5652 	  return fold_build1_loc (EXPR_LOCATION (t),
5653 			      TREE_CODE (t), TREE_TYPE (t), c);
5654 	break;
5655       }
5656 
5657     default:
5658       break;
5659     }
5660 
5661   return NULL_TREE;
5662 }
5663 
5664 tree
fold_const_aggregate_ref(tree t)5665 fold_const_aggregate_ref (tree t)
5666 {
5667   return fold_const_aggregate_ref_1 (t, NULL);
5668 }
5669 
5670 /* Lookup virtual method with index TOKEN in a virtual table V
5671    at OFFSET.
5672    Set CAN_REFER if non-NULL to false if method
5673    is not referable or if the virtual table is ill-formed (such as rewriten
5674    by non-C++ produced symbol). Otherwise just return NULL in that calse.  */
5675 
5676 tree
gimple_get_virt_method_for_vtable(HOST_WIDE_INT token,tree v,unsigned HOST_WIDE_INT offset,bool * can_refer)5677 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5678 				   tree v,
5679 				   unsigned HOST_WIDE_INT offset,
5680 				   bool *can_refer)
5681 {
5682   tree vtable = v, init, fn;
5683   unsigned HOST_WIDE_INT size;
5684   unsigned HOST_WIDE_INT elt_size, access_index;
5685   tree domain_type;
5686 
5687   if (can_refer)
5688     *can_refer = true;
5689 
5690   /* First of all double check we have virtual table.  */
5691   if (TREE_CODE (v) != VAR_DECL
5692       || !DECL_VIRTUAL_P (v))
5693     {
5694       /* Pass down that we lost track of the target.  */
5695       if (can_refer)
5696 	*can_refer = false;
5697       return NULL_TREE;
5698     }
5699 
5700   init = ctor_for_folding (v);
5701 
5702   /* The virtual tables should always be born with constructors
5703      and we always should assume that they are avaialble for
5704      folding.  At the moment we do not stream them in all cases,
5705      but it should never happen that ctor seem unreachable.  */
5706   gcc_assert (init);
5707   if (init == error_mark_node)
5708     {
5709       gcc_assert (in_lto_p);
5710       /* Pass down that we lost track of the target.  */
5711       if (can_refer)
5712 	*can_refer = false;
5713       return NULL_TREE;
5714     }
5715   gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5716   size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5717   offset *= BITS_PER_UNIT;
5718   offset += token * size;
5719 
5720   /* Lookup the value in the constructor that is assumed to be array.
5721      This is equivalent to
5722      fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5723 			       offset, size, NULL);
5724      but in a constant time.  We expect that frontend produced a simple
5725      array without indexed initializers.  */
5726 
5727   gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5728   domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5729   gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5730   elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5731 
5732   access_index = offset / BITS_PER_UNIT / elt_size;
5733   gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5734 
5735   /* This code makes an assumption that there are no
5736      indexed fileds produced by C++ FE, so we can directly index the array. */
5737   if (access_index < CONSTRUCTOR_NELTS (init))
5738     {
5739       fn = CONSTRUCTOR_ELT (init, access_index)->value;
5740       gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5741       STRIP_NOPS (fn);
5742     }
5743   else
5744     fn = NULL;
5745 
5746   /* For type inconsistent program we may end up looking up virtual method
5747      in virtual table that does not contain TOKEN entries.  We may overrun
5748      the virtual table and pick up a constant or RTTI info pointer.
5749      In any case the call is undefined.  */
5750   if (!fn
5751       || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5752       || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5753     fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5754   else
5755     {
5756       fn = TREE_OPERAND (fn, 0);
5757 
5758       /* When cgraph node is missing and function is not public, we cannot
5759 	 devirtualize.  This can happen in WHOPR when the actual method
5760 	 ends up in other partition, because we found devirtualization
5761 	 possibility too late.  */
5762       if (!can_refer_decl_in_current_unit_p (fn, vtable))
5763 	{
5764 	  if (can_refer)
5765 	    {
5766 	      *can_refer = false;
5767 	      return fn;
5768 	    }
5769 	  return NULL_TREE;
5770 	}
5771     }
5772 
5773   /* Make sure we create a cgraph node for functions we'll reference.
5774      They can be non-existent if the reference comes from an entry
5775      of an external vtable for example.  */
5776   cgraph_node::get_create (fn);
5777 
5778   return fn;
5779 }
5780 
5781 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5782    is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5783    KNOWN_BINFO carries the binfo describing the true type of
5784    OBJ_TYPE_REF_OBJECT(REF).
5785    Set CAN_REFER if non-NULL to false if method
5786    is not referable or if the virtual table is ill-formed (such as rewriten
5787    by non-C++ produced symbol). Otherwise just return NULL in that calse.  */
5788 
5789 tree
gimple_get_virt_method_for_binfo(HOST_WIDE_INT token,tree known_binfo,bool * can_refer)5790 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5791 				  bool *can_refer)
5792 {
5793   unsigned HOST_WIDE_INT offset;
5794   tree v;
5795 
5796   v = BINFO_VTABLE (known_binfo);
5797   /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone.  */
5798   if (!v)
5799     return NULL_TREE;
5800 
5801   if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5802     {
5803       if (can_refer)
5804 	*can_refer = false;
5805       return NULL_TREE;
5806     }
5807   return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5808 }
5809 
5810 /* Given a pointer value OP0, return a simplified version of an
5811    indirection through OP0, or NULL_TREE if no simplification is
5812    possible.  Note that the resulting type may be different from
5813    the type pointed to in the sense that it is still compatible
5814    from the langhooks point of view. */
5815 
5816 tree
gimple_fold_indirect_ref(tree t)5817 gimple_fold_indirect_ref (tree t)
5818 {
5819   tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5820   tree sub = t;
5821   tree subtype;
5822 
5823   STRIP_NOPS (sub);
5824   subtype = TREE_TYPE (sub);
5825   if (!POINTER_TYPE_P (subtype))
5826     return NULL_TREE;
5827 
5828   if (TREE_CODE (sub) == ADDR_EXPR)
5829     {
5830       tree op = TREE_OPERAND (sub, 0);
5831       tree optype = TREE_TYPE (op);
5832       /* *&p => p */
5833       if (useless_type_conversion_p (type, optype))
5834         return op;
5835 
5836       /* *(foo *)&fooarray => fooarray[0] */
5837       if (TREE_CODE (optype) == ARRAY_TYPE
5838 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5839 	  && useless_type_conversion_p (type, TREE_TYPE (optype)))
5840        {
5841          tree type_domain = TYPE_DOMAIN (optype);
5842          tree min_val = size_zero_node;
5843          if (type_domain && TYPE_MIN_VALUE (type_domain))
5844            min_val = TYPE_MIN_VALUE (type_domain);
5845 	 if (TREE_CODE (min_val) == INTEGER_CST)
5846 	   return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5847        }
5848       /* *(foo *)&complexfoo => __real__ complexfoo */
5849       else if (TREE_CODE (optype) == COMPLEX_TYPE
5850                && useless_type_conversion_p (type, TREE_TYPE (optype)))
5851         return fold_build1 (REALPART_EXPR, type, op);
5852       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5853       else if (TREE_CODE (optype) == VECTOR_TYPE
5854                && useless_type_conversion_p (type, TREE_TYPE (optype)))
5855         {
5856           tree part_width = TYPE_SIZE (type);
5857           tree index = bitsize_int (0);
5858           return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5859         }
5860     }
5861 
5862   /* *(p + CST) -> ...  */
5863   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5864       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5865     {
5866       tree addr = TREE_OPERAND (sub, 0);
5867       tree off = TREE_OPERAND (sub, 1);
5868       tree addrtype;
5869 
5870       STRIP_NOPS (addr);
5871       addrtype = TREE_TYPE (addr);
5872 
5873       /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5874       if (TREE_CODE (addr) == ADDR_EXPR
5875 	  && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5876 	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5877 	  && tree_fits_uhwi_p (off))
5878 	{
5879           unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5880           tree part_width = TYPE_SIZE (type);
5881           unsigned HOST_WIDE_INT part_widthi
5882             = tree_to_shwi (part_width) / BITS_PER_UNIT;
5883           unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5884           tree index = bitsize_int (indexi);
5885           if (offset / part_widthi
5886 	      < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5887             return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5888                                 part_width, index);
5889 	}
5890 
5891       /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5892       if (TREE_CODE (addr) == ADDR_EXPR
5893 	  && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5894 	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5895         {
5896           tree size = TYPE_SIZE_UNIT (type);
5897           if (tree_int_cst_equal (size, off))
5898             return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5899         }
5900 
5901       /* *(p + CST) -> MEM_REF <p, CST>.  */
5902       if (TREE_CODE (addr) != ADDR_EXPR
5903 	  || DECL_P (TREE_OPERAND (addr, 0)))
5904 	return fold_build2 (MEM_REF, type,
5905 			    addr,
5906 			    wide_int_to_tree (ptype, off));
5907     }
5908 
5909   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5910   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5911       && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5912       && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5913     {
5914       tree type_domain;
5915       tree min_val = size_zero_node;
5916       tree osub = sub;
5917       sub = gimple_fold_indirect_ref (sub);
5918       if (! sub)
5919 	sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5920       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5921       if (type_domain && TYPE_MIN_VALUE (type_domain))
5922         min_val = TYPE_MIN_VALUE (type_domain);
5923       if (TREE_CODE (min_val) == INTEGER_CST)
5924 	return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5925     }
5926 
5927   return NULL_TREE;
5928 }
5929 
5930 /* Return true if CODE is an operation that when operating on signed
5931    integer types involves undefined behavior on overflow and the
5932    operation can be expressed with unsigned arithmetic.  */
5933 
5934 bool
arith_code_with_undefined_signed_overflow(tree_code code)5935 arith_code_with_undefined_signed_overflow (tree_code code)
5936 {
5937   switch (code)
5938     {
5939     case PLUS_EXPR:
5940     case MINUS_EXPR:
5941     case MULT_EXPR:
5942     case NEGATE_EXPR:
5943     case POINTER_PLUS_EXPR:
5944       return true;
5945     default:
5946       return false;
5947     }
5948 }
5949 
5950 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5951    operation that can be transformed to unsigned arithmetic by converting
5952    its operand, carrying out the operation in the corresponding unsigned
5953    type and converting the result back to the original type.
5954 
5955    Returns a sequence of statements that replace STMT and also contain
5956    a modified form of STMT itself.  */
5957 
5958 gimple_seq
rewrite_to_defined_overflow(gimple * stmt)5959 rewrite_to_defined_overflow (gimple *stmt)
5960 {
5961   if (dump_file && (dump_flags & TDF_DETAILS))
5962     {
5963       fprintf (dump_file, "rewriting stmt with undefined signed "
5964 	       "overflow ");
5965       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5966     }
5967 
5968   tree lhs = gimple_assign_lhs (stmt);
5969   tree type = unsigned_type_for (TREE_TYPE (lhs));
5970   gimple_seq stmts = NULL;
5971   for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5972     {
5973       tree op = gimple_op (stmt, i);
5974       op = gimple_convert (&stmts, type, op);
5975       gimple_set_op (stmt, i, op);
5976     }
5977   gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5978   if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5979     gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5980   gimple_seq_add_stmt (&stmts, stmt);
5981   gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
5982   gimple_seq_add_stmt (&stmts, cvt);
5983 
5984   return stmts;
5985 }
5986 
5987 
5988 /* The valueization hook we use for the gimple_build API simplification.
5989    This makes us match fold_buildN behavior by only combining with
5990    statements in the sequence(s) we are currently building.  */
5991 
5992 static tree
gimple_build_valueize(tree op)5993 gimple_build_valueize (tree op)
5994 {
5995   if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
5996     return op;
5997   return NULL_TREE;
5998 }
5999 
6000 /* Build the expression CODE OP0 of type TYPE with location LOC,
6001    simplifying it first if possible.  Returns the built
6002    expression value and appends statements possibly defining it
6003    to SEQ.  */
6004 
6005 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0)6006 gimple_build (gimple_seq *seq, location_t loc,
6007 	      enum tree_code code, tree type, tree op0)
6008 {
6009   tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6010   if (!res)
6011     {
6012       if (gimple_in_ssa_p (cfun))
6013 	res = make_ssa_name (type);
6014       else
6015 	res = create_tmp_reg (type);
6016       gimple *stmt;
6017       if (code == REALPART_EXPR
6018 	  || code == IMAGPART_EXPR
6019 	  || code == VIEW_CONVERT_EXPR)
6020 	stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6021       else
6022 	stmt = gimple_build_assign (res, code, op0);
6023       gimple_set_location (stmt, loc);
6024       gimple_seq_add_stmt_without_update (seq, stmt);
6025     }
6026   return res;
6027 }
6028 
6029 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6030    simplifying it first if possible.  Returns the built
6031    expression value and appends statements possibly defining it
6032    to SEQ.  */
6033 
6034 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0,tree op1)6035 gimple_build (gimple_seq *seq, location_t loc,
6036 	      enum tree_code code, tree type, tree op0, tree op1)
6037 {
6038   tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6039   if (!res)
6040     {
6041       if (gimple_in_ssa_p (cfun))
6042 	res = make_ssa_name (type);
6043       else
6044 	res = create_tmp_reg (type);
6045       gimple *stmt = gimple_build_assign (res, code, op0, op1);
6046       gimple_set_location (stmt, loc);
6047       gimple_seq_add_stmt_without_update (seq, stmt);
6048     }
6049   return res;
6050 }
6051 
6052 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6053    simplifying it first if possible.  Returns the built
6054    expression value and appends statements possibly defining it
6055    to SEQ.  */
6056 
6057 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2)6058 gimple_build (gimple_seq *seq, location_t loc,
6059 	      enum tree_code code, tree type, tree op0, tree op1, tree op2)
6060 {
6061   tree res = gimple_simplify (code, type, op0, op1, op2,
6062 			      seq, gimple_build_valueize);
6063   if (!res)
6064     {
6065       if (gimple_in_ssa_p (cfun))
6066 	res = make_ssa_name (type);
6067       else
6068 	res = create_tmp_reg (type);
6069       gimple *stmt;
6070       if (code == BIT_FIELD_REF)
6071 	stmt = gimple_build_assign (res, code,
6072 				    build3 (code, type, op0, op1, op2));
6073       else
6074 	stmt = gimple_build_assign (res, code, op0, op1, op2);
6075       gimple_set_location (stmt, loc);
6076       gimple_seq_add_stmt_without_update (seq, stmt);
6077     }
6078   return res;
6079 }
6080 
6081 /* Build the call FN (ARG0) with a result of type TYPE
6082    (or no result if TYPE is void) with location LOC,
6083    simplifying it first if possible.  Returns the built
6084    expression value (or NULL_TREE if TYPE is void) and appends
6085    statements possibly defining it to SEQ.  */
6086 
6087 tree
gimple_build(gimple_seq * seq,location_t loc,enum built_in_function fn,tree type,tree arg0)6088 gimple_build (gimple_seq *seq, location_t loc,
6089 	      enum built_in_function fn, tree type, tree arg0)
6090 {
6091   tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6092   if (!res)
6093     {
6094       tree decl = builtin_decl_implicit (fn);
6095       gimple *stmt = gimple_build_call (decl, 1, arg0);
6096       if (!VOID_TYPE_P (type))
6097 	{
6098 	  if (gimple_in_ssa_p (cfun))
6099 	    res = make_ssa_name (type);
6100 	  else
6101 	    res = create_tmp_reg (type);
6102 	  gimple_call_set_lhs (stmt, res);
6103 	}
6104       gimple_set_location (stmt, loc);
6105       gimple_seq_add_stmt_without_update (seq, stmt);
6106     }
6107   return res;
6108 }
6109 
6110 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6111    (or no result if TYPE is void) with location LOC,
6112    simplifying it first if possible.  Returns the built
6113    expression value (or NULL_TREE if TYPE is void) and appends
6114    statements possibly defining it to SEQ.  */
6115 
6116 tree
gimple_build(gimple_seq * seq,location_t loc,enum built_in_function fn,tree type,tree arg0,tree arg1)6117 gimple_build (gimple_seq *seq, location_t loc,
6118 	      enum built_in_function fn, tree type, tree arg0, tree arg1)
6119 {
6120   tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6121   if (!res)
6122     {
6123       tree decl = builtin_decl_implicit (fn);
6124       gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6125       if (!VOID_TYPE_P (type))
6126 	{
6127 	  if (gimple_in_ssa_p (cfun))
6128 	    res = make_ssa_name (type);
6129 	  else
6130 	    res = create_tmp_reg (type);
6131 	  gimple_call_set_lhs (stmt, res);
6132 	}
6133       gimple_set_location (stmt, loc);
6134       gimple_seq_add_stmt_without_update (seq, stmt);
6135     }
6136   return res;
6137 }
6138 
6139 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6140    (or no result if TYPE is void) with location LOC,
6141    simplifying it first if possible.  Returns the built
6142    expression value (or NULL_TREE if TYPE is void) and appends
6143    statements possibly defining it to SEQ.  */
6144 
6145 tree
gimple_build(gimple_seq * seq,location_t loc,enum built_in_function fn,tree type,tree arg0,tree arg1,tree arg2)6146 gimple_build (gimple_seq *seq, location_t loc,
6147 	      enum built_in_function fn, tree type,
6148 	      tree arg0, tree arg1, tree arg2)
6149 {
6150   tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6151 			      seq, gimple_build_valueize);
6152   if (!res)
6153     {
6154       tree decl = builtin_decl_implicit (fn);
6155       gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6156       if (!VOID_TYPE_P (type))
6157 	{
6158 	  if (gimple_in_ssa_p (cfun))
6159 	    res = make_ssa_name (type);
6160 	  else
6161 	    res = create_tmp_reg (type);
6162 	  gimple_call_set_lhs (stmt, res);
6163 	}
6164       gimple_set_location (stmt, loc);
6165       gimple_seq_add_stmt_without_update (seq, stmt);
6166     }
6167   return res;
6168 }
6169 
6170 /* Build the conversion (TYPE) OP with a result of type TYPE
6171    with location LOC if such conversion is neccesary in GIMPLE,
6172    simplifying it first.
6173    Returns the built expression value and appends
6174    statements possibly defining it to SEQ.  */
6175 
6176 tree
gimple_convert(gimple_seq * seq,location_t loc,tree type,tree op)6177 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6178 {
6179   if (useless_type_conversion_p (type, TREE_TYPE (op)))
6180     return op;
6181   return gimple_build (seq, loc, NOP_EXPR, type, op);
6182 }
6183 
6184 /* Build the conversion (ptrofftype) OP with a result of a type
6185    compatible with ptrofftype with location LOC if such conversion
6186    is neccesary in GIMPLE, simplifying it first.
6187    Returns the built expression value and appends
6188    statements possibly defining it to SEQ.  */
6189 
6190 tree
gimple_convert_to_ptrofftype(gimple_seq * seq,location_t loc,tree op)6191 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6192 {
6193   if (ptrofftype_p (TREE_TYPE (op)))
6194     return op;
6195   return gimple_convert (seq, loc, sizetype, op);
6196 }
6197 
6198 /* Return true if the result of assignment STMT is known to be non-negative.
6199    If the return value is based on the assumption that signed overflow is
6200    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6201    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
6202 
6203 static bool
gimple_assign_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)6204 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6205 				   int depth)
6206 {
6207   enum tree_code code = gimple_assign_rhs_code (stmt);
6208   switch (get_gimple_rhs_class (code))
6209     {
6210     case GIMPLE_UNARY_RHS:
6211       return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6212 					     gimple_expr_type (stmt),
6213 					     gimple_assign_rhs1 (stmt),
6214 					     strict_overflow_p, depth);
6215     case GIMPLE_BINARY_RHS:
6216       return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6217 					      gimple_expr_type (stmt),
6218 					      gimple_assign_rhs1 (stmt),
6219 					      gimple_assign_rhs2 (stmt),
6220 					      strict_overflow_p, depth);
6221     case GIMPLE_TERNARY_RHS:
6222       return false;
6223     case GIMPLE_SINGLE_RHS:
6224       return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6225 					      strict_overflow_p, depth);
6226     case GIMPLE_INVALID_RHS:
6227       break;
6228     }
6229   gcc_unreachable ();
6230 }
6231 
6232 /* Return true if return value of call STMT is known to be non-negative.
6233    If the return value is based on the assumption that signed overflow is
6234    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6235    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
6236 
6237 static bool
gimple_call_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)6238 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6239 				 int depth)
6240 {
6241   tree arg0 = gimple_call_num_args (stmt) > 0 ?
6242     gimple_call_arg (stmt, 0) : NULL_TREE;
6243   tree arg1 = gimple_call_num_args (stmt) > 1 ?
6244     gimple_call_arg (stmt, 1) : NULL_TREE;
6245 
6246   return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6247 					gimple_call_combined_fn (stmt),
6248 					arg0,
6249 					arg1,
6250 					strict_overflow_p, depth);
6251 }
6252 
6253 /* Return true if return value of call STMT is known to be non-negative.
6254    If the return value is based on the assumption that signed overflow is
6255    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6256    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
6257 
6258 static bool
gimple_phi_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)6259 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6260 				int depth)
6261 {
6262   for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6263     {
6264       tree arg = gimple_phi_arg_def (stmt, i);
6265       if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6266 	return false;
6267     }
6268   return true;
6269 }
6270 
6271 /* Return true if STMT is known to compute a non-negative value.
6272    If the return value is based on the assumption that signed overflow is
6273    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6274    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
6275 
6276 bool
gimple_stmt_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)6277 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6278 				 int depth)
6279 {
6280   switch (gimple_code (stmt))
6281     {
6282     case GIMPLE_ASSIGN:
6283       return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6284 						depth);
6285     case GIMPLE_CALL:
6286       return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6287 					      depth);
6288     case GIMPLE_PHI:
6289       return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6290 					     depth);
6291     default:
6292       return false;
6293     }
6294 }
6295 
6296 /* Return true if the floating-point value computed by assignment STMT
6297    is known to have an integer value.  We also allow +Inf, -Inf and NaN
6298    to be considered integer values. Return false for signaling NaN.
6299 
6300    DEPTH is the current nesting depth of the query.  */
6301 
6302 static bool
gimple_assign_integer_valued_real_p(gimple * stmt,int depth)6303 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6304 {
6305   enum tree_code code = gimple_assign_rhs_code (stmt);
6306   switch (get_gimple_rhs_class (code))
6307     {
6308     case GIMPLE_UNARY_RHS:
6309       return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6310 					  gimple_assign_rhs1 (stmt), depth);
6311     case GIMPLE_BINARY_RHS:
6312       return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6313 					   gimple_assign_rhs1 (stmt),
6314 					   gimple_assign_rhs2 (stmt), depth);
6315     case GIMPLE_TERNARY_RHS:
6316       return false;
6317     case GIMPLE_SINGLE_RHS:
6318       return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6319     case GIMPLE_INVALID_RHS:
6320       break;
6321     }
6322   gcc_unreachable ();
6323 }
6324 
6325 /* Return true if the floating-point value computed by call STMT is known
6326    to have an integer value.  We also allow +Inf, -Inf and NaN to be
6327    considered integer values. Return false for signaling NaN.
6328 
6329    DEPTH is the current nesting depth of the query.  */
6330 
6331 static bool
gimple_call_integer_valued_real_p(gimple * stmt,int depth)6332 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6333 {
6334   tree arg0 = (gimple_call_num_args (stmt) > 0
6335 	       ? gimple_call_arg (stmt, 0)
6336 	       : NULL_TREE);
6337   tree arg1 = (gimple_call_num_args (stmt) > 1
6338 	       ? gimple_call_arg (stmt, 1)
6339 	       : NULL_TREE);
6340   return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
6341 				     arg0, arg1, depth);
6342 }
6343 
6344 /* Return true if the floating-point result of phi STMT is known to have
6345    an integer value.  We also allow +Inf, -Inf and NaN to be considered
6346    integer values. Return false for signaling NaN.
6347 
6348    DEPTH is the current nesting depth of the query.  */
6349 
6350 static bool
gimple_phi_integer_valued_real_p(gimple * stmt,int depth)6351 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6352 {
6353   for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6354     {
6355       tree arg = gimple_phi_arg_def (stmt, i);
6356       if (!integer_valued_real_single_p (arg, depth + 1))
6357 	return false;
6358     }
6359   return true;
6360 }
6361 
6362 /* Return true if the floating-point value computed by STMT is known
6363    to have an integer value.  We also allow +Inf, -Inf and NaN to be
6364    considered integer values. Return false for signaling NaN.
6365 
6366    DEPTH is the current nesting depth of the query.  */
6367 
6368 bool
gimple_stmt_integer_valued_real_p(gimple * stmt,int depth)6369 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6370 {
6371   switch (gimple_code (stmt))
6372     {
6373     case GIMPLE_ASSIGN:
6374       return gimple_assign_integer_valued_real_p (stmt, depth);
6375     case GIMPLE_CALL:
6376       return gimple_call_integer_valued_real_p (stmt, depth);
6377     case GIMPLE_PHI:
6378       return gimple_phi_integer_valued_real_p (stmt, depth);
6379     default:
6380       return false;
6381     }
6382 }
6383