1 /* Statement simplification on GIMPLE.
2    Copyright (C) 2010-2014 Free Software Foundation, Inc.
3    Split out from tree-ssa-ccp.c.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "expr.h"
28 #include "stmt.h"
29 #include "stor-layout.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "dumpfile.h"
33 #include "bitmap.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
46 #include "tree-dfa.h"
47 #include "tree-ssa.h"
48 #include "tree-ssa-propagate.h"
49 #include "target.h"
50 #include "ipa-utils.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-address.h"
53 #include "langhooks.h"
54 #include "gimplify-me.h"
55 
56 /* Return true when DECL can be referenced from current unit.
57    FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
58    We can get declarations that are not possible to reference for various
59    reasons:
60 
61      1) When analyzing C++ virtual tables.
62 	C++ virtual tables do have known constructors even
63 	when they are keyed to other compilation unit.
64 	Those tables can contain pointers to methods and vars
65 	in other units.  Those methods have both STATIC and EXTERNAL
66 	set.
67      2) In WHOPR mode devirtualization might lead to reference
68 	to method that was partitioned elsehwere.
69 	In this case we have static VAR_DECL or FUNCTION_DECL
70 	that has no corresponding callgraph/varpool node
71 	declaring the body.
72      3) COMDAT functions referred by external vtables that
73         we devirtualize only during final compilation stage.
74         At this time we already decided that we will not output
75         the function body and thus we can't reference the symbol
76         directly.  */
77 
78 static bool
can_refer_decl_in_current_unit_p(tree decl,tree from_decl)79 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
80 {
81   varpool_node *vnode;
82   struct cgraph_node *node;
83   symtab_node *snode;
84 
85   if (DECL_ABSTRACT (decl))
86     return false;
87 
88   /* We are concerned only about static/external vars and functions.  */
89   if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
90       || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
91     return true;
92 
93   /* Static objects can be referred only if they was not optimized out yet.  */
94   if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
95     {
96       snode = symtab_get_node (decl);
97       if (!snode)
98 	return false;
99       node = dyn_cast <cgraph_node> (snode);
100       return !node || !node->global.inlined_to;
101     }
102 
103   /* We will later output the initializer, so we can refer to it.
104      So we are concerned only when DECL comes from initializer of
105      external var.  */
106   if (!from_decl
107       || TREE_CODE (from_decl) != VAR_DECL
108       || !DECL_EXTERNAL (from_decl)
109       || (flag_ltrans
110 	  && symtab_get_node (from_decl)->in_other_partition))
111     return true;
112   /* We are folding reference from external vtable.  The vtable may reffer
113      to a symbol keyed to other compilation unit.  The other compilation
114      unit may be in separate DSO and the symbol may be hidden.  */
115   if (DECL_VISIBILITY_SPECIFIED (decl)
116       && DECL_EXTERNAL (decl)
117       && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
118       && (!(snode = symtab_get_node (decl)) || !snode->in_other_partition))
119     return false;
120   /* When function is public, we always can introduce new reference.
121      Exception are the COMDAT functions where introducing a direct
122      reference imply need to include function body in the curren tunit.  */
123   if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
124     return true;
125   /* We are not at ltrans stage; so don't worry about WHOPR.
126      Also when still gimplifying all referred comdat functions will be
127      produced.
128 
129      As observed in PR20991 for already optimized out comdat virtual functions
130      it may be tempting to not necessarily give up because the copy will be
131      output elsewhere when corresponding vtable is output.
132      This is however not possible - ABI specify that COMDATs are output in
133      units where they are used and when the other unit was compiled with LTO
134      it is possible that vtable was kept public while the function itself
135      was privatized. */
136   if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
137     return true;
138 
139   /* OK we are seeing either COMDAT or static variable.  In this case we must
140      check that the definition is still around so we can refer it.  */
141   if (TREE_CODE (decl) == FUNCTION_DECL)
142     {
143       node = cgraph_get_node (decl);
144       /* Check that we still have function body and that we didn't took
145          the decision to eliminate offline copy of the function yet.
146          The second is important when devirtualization happens during final
147          compilation stage when making a new reference no longer makes callee
148          to be compiled.  */
149       if (!node || !node->definition
150 	  || DECL_EXTERNAL (decl) || node->global.inlined_to)
151 	{
152 	  gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
153 	  return false;
154 	}
155     }
156   else if (TREE_CODE (decl) == VAR_DECL)
157     {
158       vnode = varpool_get_node (decl);
159       if (!vnode || !vnode->definition)
160 	{
161 	  gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
162 	  return false;
163 	}
164     }
165   return true;
166 }
167 
168 /* CVAL is value taken from DECL_INITIAL of variable.  Try to transform it into
169    acceptable form for is_gimple_min_invariant.
170    FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL.  */
171 
172 tree
canonicalize_constructor_val(tree cval,tree from_decl)173 canonicalize_constructor_val (tree cval, tree from_decl)
174 {
175   tree orig_cval = cval;
176   STRIP_NOPS (cval);
177   if (TREE_CODE (cval) == POINTER_PLUS_EXPR
178       && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
179     {
180       tree ptr = TREE_OPERAND (cval, 0);
181       if (is_gimple_min_invariant (ptr))
182 	cval = build1_loc (EXPR_LOCATION (cval),
183 			   ADDR_EXPR, TREE_TYPE (ptr),
184 			   fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
185 					ptr,
186 					fold_convert (ptr_type_node,
187 						      TREE_OPERAND (cval, 1))));
188     }
189   if (TREE_CODE (cval) == ADDR_EXPR)
190     {
191       tree base = NULL_TREE;
192       if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
193 	{
194 	  base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
195 	  if (base)
196 	    TREE_OPERAND (cval, 0) = base;
197 	}
198       else
199 	base = get_base_address (TREE_OPERAND (cval, 0));
200       if (!base)
201 	return NULL_TREE;
202 
203       if ((TREE_CODE (base) == VAR_DECL
204 	   || TREE_CODE (base) == FUNCTION_DECL)
205 	  && !can_refer_decl_in_current_unit_p (base, from_decl))
206 	return NULL_TREE;
207       if (TREE_CODE (base) == VAR_DECL)
208 	TREE_ADDRESSABLE (base) = 1;
209       else if (TREE_CODE (base) == FUNCTION_DECL)
210 	{
211 	  /* Make sure we create a cgraph node for functions we'll reference.
212 	     They can be non-existent if the reference comes from an entry
213 	     of an external vtable for example.  */
214 	  cgraph_get_create_node (base);
215 	}
216       /* Fixup types in global initializers.  */
217       if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
218 	cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
219 
220       if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
221 	cval = fold_convert (TREE_TYPE (orig_cval), cval);
222       return cval;
223     }
224   if (TREE_OVERFLOW_P (cval))
225     return drop_tree_overflow (cval);
226   return orig_cval;
227 }
228 
229 /* If SYM is a constant variable with known value, return the value.
230    NULL_TREE is returned otherwise.  */
231 
232 tree
get_symbol_constant_value(tree sym)233 get_symbol_constant_value (tree sym)
234 {
235   tree val = ctor_for_folding (sym);
236   if (val != error_mark_node)
237     {
238       if (val)
239 	{
240 	  val = canonicalize_constructor_val (unshare_expr (val), sym);
241 	  if (val && is_gimple_min_invariant (val))
242 	    return val;
243 	  else
244 	    return NULL_TREE;
245 	}
246       /* Variables declared 'const' without an initializer
247 	 have zero as the initializer if they may not be
248 	 overridden at link or run time.  */
249       if (!val
250           && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
251 	       || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
252 	return build_zero_cst (TREE_TYPE (sym));
253     }
254 
255   return NULL_TREE;
256 }
257 
258 
259 
260 /* Subroutine of fold_stmt.  We perform several simplifications of the
261    memory reference tree EXPR and make sure to re-gimplify them properly
262    after propagation of constant addresses.  IS_LHS is true if the
263    reference is supposed to be an lvalue.  */
264 
265 static tree
maybe_fold_reference(tree expr,bool is_lhs)266 maybe_fold_reference (tree expr, bool is_lhs)
267 {
268   tree *t = &expr;
269   tree result;
270 
271   if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
272        || TREE_CODE (expr) == REALPART_EXPR
273        || TREE_CODE (expr) == IMAGPART_EXPR)
274       && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
275     return fold_unary_loc (EXPR_LOCATION (expr),
276 			   TREE_CODE (expr),
277 			   TREE_TYPE (expr),
278 			   TREE_OPERAND (expr, 0));
279   else if (TREE_CODE (expr) == BIT_FIELD_REF
280 	   && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
281     return fold_ternary_loc (EXPR_LOCATION (expr),
282 			     TREE_CODE (expr),
283 			     TREE_TYPE (expr),
284 			     TREE_OPERAND (expr, 0),
285 			     TREE_OPERAND (expr, 1),
286 			     TREE_OPERAND (expr, 2));
287 
288   while (handled_component_p (*t))
289     t = &TREE_OPERAND (*t, 0);
290 
291   /* Canonicalize MEM_REFs invariant address operand.  Do this first
292      to avoid feeding non-canonical MEM_REFs elsewhere.  */
293   if (TREE_CODE (*t) == MEM_REF
294       && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
295     {
296       bool volatile_p = TREE_THIS_VOLATILE (*t);
297       tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
298 			      TREE_OPERAND (*t, 0),
299 			      TREE_OPERAND (*t, 1));
300       if (tem)
301 	{
302 	  TREE_THIS_VOLATILE (tem) = volatile_p;
303 	  *t = tem;
304 	  tem = maybe_fold_reference (expr, is_lhs);
305 	  if (tem)
306 	    return tem;
307 	  return expr;
308 	}
309     }
310 
311   if (!is_lhs
312       && (result = fold_const_aggregate_ref (expr))
313       && is_gimple_min_invariant (result))
314     return result;
315 
316   /* Fold back MEM_REFs to reference trees.  */
317   if (TREE_CODE (*t) == MEM_REF
318       && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
319       && integer_zerop (TREE_OPERAND (*t, 1))
320       && (TREE_THIS_VOLATILE (*t)
321 	  == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
322       && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
323       && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
324 	  == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
325       /* We have to look out here to not drop a required conversion
326 	 from the rhs to the lhs if is_lhs, but we don't have the
327 	 rhs here to verify that.  Thus require strict type
328 	 compatibility.  */
329       && types_compatible_p (TREE_TYPE (*t),
330 			     TREE_TYPE (TREE_OPERAND
331 					(TREE_OPERAND (*t, 0), 0))))
332     {
333       tree tem;
334       *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
335       tem = maybe_fold_reference (expr, is_lhs);
336       if (tem)
337 	return tem;
338       return expr;
339     }
340   else if (TREE_CODE (*t) == TARGET_MEM_REF)
341     {
342       tree tem = maybe_fold_tmr (*t);
343       if (tem)
344 	{
345 	  *t = tem;
346 	  tem = maybe_fold_reference (expr, is_lhs);
347 	  if (tem)
348 	    return tem;
349 	  return expr;
350 	}
351     }
352 
353   return NULL_TREE;
354 }
355 
356 
357 /* Attempt to fold an assignment statement pointed-to by SI.  Returns a
358    replacement rhs for the statement or NULL_TREE if no simplification
359    could be made.  It is assumed that the operands have been previously
360    folded.  */
361 
362 static tree
fold_gimple_assign(gimple_stmt_iterator * si)363 fold_gimple_assign (gimple_stmt_iterator *si)
364 {
365   gimple stmt = gsi_stmt (*si);
366   enum tree_code subcode = gimple_assign_rhs_code (stmt);
367   location_t loc = gimple_location (stmt);
368 
369   tree result = NULL_TREE;
370 
371   switch (get_gimple_rhs_class (subcode))
372     {
373     case GIMPLE_SINGLE_RHS:
374       {
375         tree rhs = gimple_assign_rhs1 (stmt);
376 
377 	if (REFERENCE_CLASS_P (rhs))
378 	  return maybe_fold_reference (rhs, false);
379 
380 	else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
381 	  {
382 	    tree val = OBJ_TYPE_REF_EXPR (rhs);
383 	    if (is_gimple_min_invariant (val))
384 	      return val;
385 	    else if (flag_devirtualize && virtual_method_call_p (val))
386 	      {
387 		bool final;
388 		vec <cgraph_node *>targets
389 		  = possible_polymorphic_call_targets (val, &final);
390 		if (final && targets.length () <= 1)
391 		  {
392 		    tree fndecl;
393 		    if (targets.length () == 1)
394 		      fndecl = targets[0]->decl;
395 		    else
396 		      fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
397 		    val = fold_convert (TREE_TYPE (val), fndecl);
398 		    STRIP_USELESS_TYPE_CONVERSION (val);
399 		    return val;
400 		  }
401 	      }
402 
403 	  }
404 	else if (TREE_CODE (rhs) == ADDR_EXPR)
405 	  {
406 	    tree ref = TREE_OPERAND (rhs, 0);
407 	    tree tem = maybe_fold_reference (ref, true);
408 	    if (tem
409 		&& TREE_CODE (tem) == MEM_REF
410 		&& integer_zerop (TREE_OPERAND (tem, 1)))
411 	      result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
412 	    else if (tem)
413 	      result = fold_convert (TREE_TYPE (rhs),
414 				     build_fold_addr_expr_loc (loc, tem));
415 	    else if (TREE_CODE (ref) == MEM_REF
416 		     && integer_zerop (TREE_OPERAND (ref, 1)))
417 	      result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
418 	  }
419 
420 	else if (TREE_CODE (rhs) == CONSTRUCTOR
421 		 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
422 		 && (CONSTRUCTOR_NELTS (rhs)
423 		     == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
424 	  {
425 	    /* Fold a constant vector CONSTRUCTOR to VECTOR_CST.  */
426 	    unsigned i;
427 	    tree val;
428 
429 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
430 	      if (TREE_CODE (val) != INTEGER_CST
431 		  && TREE_CODE (val) != REAL_CST
432 		  && TREE_CODE (val) != FIXED_CST)
433 		return NULL_TREE;
434 
435 	    return build_vector_from_ctor (TREE_TYPE (rhs),
436 					   CONSTRUCTOR_ELTS (rhs));
437 	  }
438 
439 	else if (DECL_P (rhs))
440 	  return get_symbol_constant_value (rhs);
441 
442         /* If we couldn't fold the RHS, hand over to the generic
443            fold routines.  */
444         if (result == NULL_TREE)
445           result = fold (rhs);
446 
447         /* Strip away useless type conversions.  Both the NON_LVALUE_EXPR
448            that may have been added by fold, and "useless" type
449            conversions that might now be apparent due to propagation.  */
450         STRIP_USELESS_TYPE_CONVERSION (result);
451 
452         if (result != rhs && valid_gimple_rhs_p (result))
453 	  return result;
454 
455 	return NULL_TREE;
456       }
457       break;
458 
459     case GIMPLE_UNARY_RHS:
460       {
461 	tree rhs = gimple_assign_rhs1 (stmt);
462 
463 	result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
464 	if (result)
465 	  {
466 	    /* If the operation was a conversion do _not_ mark a
467 	       resulting constant with TREE_OVERFLOW if the original
468 	       constant was not.  These conversions have implementation
469 	       defined behavior and retaining the TREE_OVERFLOW flag
470 	       here would confuse later passes such as VRP.  */
471 	    if (CONVERT_EXPR_CODE_P (subcode)
472 		&& TREE_CODE (result) == INTEGER_CST
473 		&& TREE_CODE (rhs) == INTEGER_CST)
474 	      TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
475 
476 	    STRIP_USELESS_TYPE_CONVERSION (result);
477 	    if (valid_gimple_rhs_p (result))
478 	      return result;
479 	  }
480       }
481       break;
482 
483     case GIMPLE_BINARY_RHS:
484       /* Try to canonicalize for boolean-typed X the comparisons
485 	 X == 0, X == 1, X != 0, and X != 1.  */
486       if (gimple_assign_rhs_code (stmt) == EQ_EXPR
487 	  || gimple_assign_rhs_code (stmt) == NE_EXPR)
488         {
489 	  tree lhs = gimple_assign_lhs (stmt);
490 	  tree op1 = gimple_assign_rhs1 (stmt);
491 	  tree op2 = gimple_assign_rhs2 (stmt);
492 	  tree type = TREE_TYPE (op1);
493 
494 	  /* Check whether the comparison operands are of the same boolean
495 	     type as the result type is.
496 	     Check that second operand is an integer-constant with value
497 	     one or zero.  */
498 	  if (TREE_CODE (op2) == INTEGER_CST
499 	      && (integer_zerop (op2) || integer_onep (op2))
500 	      && useless_type_conversion_p (TREE_TYPE (lhs), type))
501 	    {
502 	      enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
503 	      bool is_logical_not = false;
504 
505 	      /* X == 0 and X != 1 is a logical-not.of X
506 	         X == 1 and X != 0 is X  */
507 	      if ((cmp_code == EQ_EXPR && integer_zerop (op2))
508 	          || (cmp_code == NE_EXPR && integer_onep (op2)))
509 	        is_logical_not = true;
510 
511 	      if (is_logical_not == false)
512 	        result = op1;
513 	      /* Only for one-bit precision typed X the transformation
514 	         !X -> ~X is valied.  */
515 	      else if (TYPE_PRECISION (type) == 1)
516 		result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
517 				     type, op1);
518 	      /* Otherwise we use !X -> X ^ 1.  */
519 	      else
520 	        result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
521 				     type, op1, build_int_cst (type, 1));
522 
523 	    }
524 	}
525 
526       if (!result)
527         result = fold_binary_loc (loc, subcode,
528 				  TREE_TYPE (gimple_assign_lhs (stmt)),
529 				  gimple_assign_rhs1 (stmt),
530 				  gimple_assign_rhs2 (stmt));
531 
532       if (result)
533         {
534           STRIP_USELESS_TYPE_CONVERSION (result);
535           if (valid_gimple_rhs_p (result))
536 	    return result;
537         }
538       break;
539 
540     case GIMPLE_TERNARY_RHS:
541       /* Try to fold a conditional expression.  */
542       if (gimple_assign_rhs_code (stmt) == COND_EXPR)
543 	{
544 	  tree op0 = gimple_assign_rhs1 (stmt);
545 	  tree tem;
546 	  bool set = false;
547 	  location_t cond_loc = gimple_location (stmt);
548 
549 	  if (COMPARISON_CLASS_P (op0))
550 	    {
551 	      fold_defer_overflow_warnings ();
552 	      tem = fold_binary_loc (cond_loc,
553 				     TREE_CODE (op0), TREE_TYPE (op0),
554 				     TREE_OPERAND (op0, 0),
555 				     TREE_OPERAND (op0, 1));
556 	      /* This is actually a conditional expression, not a GIMPLE
557 		 conditional statement, however, the valid_gimple_rhs_p
558 		 test still applies.  */
559 	      set = (tem && is_gimple_condexpr (tem)
560 		     && valid_gimple_rhs_p (tem));
561 	      fold_undefer_overflow_warnings (set, stmt, 0);
562 	    }
563 	  else if (is_gimple_min_invariant (op0))
564 	    {
565 	      tem = op0;
566 	      set = true;
567 	    }
568 	  else
569 	    return NULL_TREE;
570 
571 	  if (set)
572 	    result = fold_build3_loc (cond_loc, COND_EXPR,
573 				      TREE_TYPE (gimple_assign_lhs (stmt)), tem,
574 				      gimple_assign_rhs2 (stmt),
575 				      gimple_assign_rhs3 (stmt));
576 	}
577 
578       if (!result)
579 	result = fold_ternary_loc (loc, subcode,
580 				   TREE_TYPE (gimple_assign_lhs (stmt)),
581 				   gimple_assign_rhs1 (stmt),
582 				   gimple_assign_rhs2 (stmt),
583 				   gimple_assign_rhs3 (stmt));
584 
585       if (result)
586         {
587           STRIP_USELESS_TYPE_CONVERSION (result);
588           if (valid_gimple_rhs_p (result))
589 	    return result;
590         }
591       break;
592 
593     case GIMPLE_INVALID_RHS:
594       gcc_unreachable ();
595     }
596 
597   return NULL_TREE;
598 }
599 
600 /* Attempt to fold a conditional statement. Return true if any changes were
601    made. We only attempt to fold the condition expression, and do not perform
602    any transformation that would require alteration of the cfg.  It is
603    assumed that the operands have been previously folded.  */
604 
605 static bool
fold_gimple_cond(gimple stmt)606 fold_gimple_cond (gimple stmt)
607 {
608   tree result = fold_binary_loc (gimple_location (stmt),
609 			     gimple_cond_code (stmt),
610                              boolean_type_node,
611                              gimple_cond_lhs (stmt),
612                              gimple_cond_rhs (stmt));
613 
614   if (result)
615     {
616       STRIP_USELESS_TYPE_CONVERSION (result);
617       if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
618         {
619           gimple_cond_set_condition_from_tree (stmt, result);
620           return true;
621         }
622     }
623 
624   return false;
625 }
626 
627 /* Convert EXPR into a GIMPLE value suitable for substitution on the
628    RHS of an assignment.  Insert the necessary statements before
629    iterator *SI_P.  The statement at *SI_P, which must be a GIMPLE_CALL
630    is replaced.  If the call is expected to produces a result, then it
631    is replaced by an assignment of the new RHS to the result variable.
632    If the result is to be ignored, then the call is replaced by a
633    GIMPLE_NOP.  A proper VDEF chain is retained by making the first
634    VUSE and the last VDEF of the whole sequence be the same as the replaced
635    statement and using new SSA names for stores in between.  */
636 
637 void
gimplify_and_update_call_from_tree(gimple_stmt_iterator * si_p,tree expr)638 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
639 {
640   tree lhs;
641   gimple stmt, new_stmt;
642   gimple_stmt_iterator i;
643   gimple_seq stmts = NULL;
644   gimple laststore;
645   tree reaching_vuse;
646 
647   stmt = gsi_stmt (*si_p);
648 
649   gcc_assert (is_gimple_call (stmt));
650 
651   push_gimplify_context (gimple_in_ssa_p (cfun));
652 
653   lhs = gimple_call_lhs (stmt);
654   if (lhs == NULL_TREE)
655     {
656       gimplify_and_add (expr, &stmts);
657       /* We can end up with folding a memcpy of an empty class assignment
658 	 which gets optimized away by C++ gimplification.  */
659       if (gimple_seq_empty_p (stmts))
660 	{
661 	  pop_gimplify_context (NULL);
662 	  if (gimple_in_ssa_p (cfun))
663 	    {
664 	      unlink_stmt_vdef (stmt);
665 	      release_defs (stmt);
666 	    }
667 	  gsi_replace (si_p, gimple_build_nop (), true);
668 	  return;
669 	}
670     }
671   else
672     {
673       tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
674       new_stmt = gimple_build_assign (lhs, tmp);
675       i = gsi_last (stmts);
676       gsi_insert_after_without_update (&i, new_stmt,
677 				       GSI_CONTINUE_LINKING);
678     }
679 
680   pop_gimplify_context (NULL);
681 
682   if (gimple_has_location (stmt))
683     annotate_all_with_location (stmts, gimple_location (stmt));
684 
685   /* First iterate over the replacement statements backward, assigning
686      virtual operands to their defining statements.  */
687   laststore = NULL;
688   for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
689     {
690       new_stmt = gsi_stmt (i);
691       if ((gimple_assign_single_p (new_stmt)
692 	   && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
693 	  || (is_gimple_call (new_stmt)
694 	      && (gimple_call_flags (new_stmt)
695 		  & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
696 	{
697 	  tree vdef;
698 	  if (!laststore)
699 	    vdef = gimple_vdef (stmt);
700 	  else
701 	    vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
702 	  gimple_set_vdef (new_stmt, vdef);
703 	  if (vdef && TREE_CODE (vdef) == SSA_NAME)
704 	    SSA_NAME_DEF_STMT (vdef) = new_stmt;
705 	  laststore = new_stmt;
706 	}
707     }
708 
709   /* Second iterate over the statements forward, assigning virtual
710      operands to their uses.  */
711   reaching_vuse = gimple_vuse (stmt);
712   for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
713     {
714       new_stmt = gsi_stmt (i);
715       /* If the new statement possibly has a VUSE, update it with exact SSA
716 	 name we know will reach this one.  */
717       if (gimple_has_mem_ops (new_stmt))
718 	gimple_set_vuse (new_stmt, reaching_vuse);
719       gimple_set_modified (new_stmt, true);
720       if (gimple_vdef (new_stmt))
721 	reaching_vuse = gimple_vdef (new_stmt);
722     }
723 
724   /* If the new sequence does not do a store release the virtual
725      definition of the original statement.  */
726   if (reaching_vuse
727       && reaching_vuse == gimple_vuse (stmt))
728     {
729       tree vdef = gimple_vdef (stmt);
730       if (vdef
731 	  && TREE_CODE (vdef) == SSA_NAME)
732 	{
733 	  unlink_stmt_vdef (stmt);
734 	  release_ssa_name (vdef);
735 	}
736     }
737 
738   /* Finally replace the original statement with the sequence.  */
739   gsi_replace_with_seq (si_p, stmts, false);
740 }
741 
742 /* Return the string length, maximum string length or maximum value of
743    ARG in LENGTH.
744    If ARG is an SSA name variable, follow its use-def chains.  If LENGTH
745    is not NULL and, for TYPE == 0, its value is not equal to the length
746    we determine or if we are unable to determine the length or value,
747    return false.  VISITED is a bitmap of visited variables.
748    TYPE is 0 if string length should be returned, 1 for maximum string
749    length and 2 for maximum value ARG can have.  */
750 
751 static bool
get_maxval_strlen(tree arg,tree * length,bitmap visited,int type)752 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
753 {
754   tree var, val;
755   gimple def_stmt;
756 
757   if (TREE_CODE (arg) != SSA_NAME)
758     {
759       /* We can end up with &(*iftmp_1)[0] here as well, so handle it.  */
760       if (TREE_CODE (arg) == ADDR_EXPR
761 	  && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
762 	  && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
763 	{
764 	  tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
765 	  if (TREE_CODE (aop0) == INDIRECT_REF
766 	      && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
767 	    return get_maxval_strlen (TREE_OPERAND (aop0, 0),
768 				      length, visited, type);
769 	}
770 
771       if (type == 2)
772 	{
773 	  val = arg;
774 	  if (TREE_CODE (val) != INTEGER_CST
775 	      || tree_int_cst_sgn (val) < 0)
776 	    return false;
777 	}
778       else
779 	val = c_strlen (arg, 1);
780       if (!val)
781 	return false;
782 
783       if (*length)
784 	{
785 	  if (type > 0)
786 	    {
787 	      if (TREE_CODE (*length) != INTEGER_CST
788 		  || TREE_CODE (val) != INTEGER_CST)
789 		return false;
790 
791 	      if (tree_int_cst_lt (*length, val))
792 		*length = val;
793 	      return true;
794 	    }
795 	  else if (simple_cst_equal (val, *length) != 1)
796 	    return false;
797 	}
798 
799       *length = val;
800       return true;
801     }
802 
803   /* If ARG is registered for SSA update we cannot look at its defining
804      statement.  */
805   if (name_registered_for_update_p (arg))
806     return false;
807 
808   /* If we were already here, break the infinite cycle.  */
809   if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
810     return true;
811 
812   var = arg;
813   def_stmt = SSA_NAME_DEF_STMT (var);
814 
815   switch (gimple_code (def_stmt))
816     {
817       case GIMPLE_ASSIGN:
818         /* The RHS of the statement defining VAR must either have a
819            constant length or come from another SSA_NAME with a constant
820            length.  */
821         if (gimple_assign_single_p (def_stmt)
822             || gimple_assign_unary_nop_p (def_stmt))
823           {
824             tree rhs = gimple_assign_rhs1 (def_stmt);
825             return get_maxval_strlen (rhs, length, visited, type);
826           }
827 	else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
828 	  {
829 	    tree op2 = gimple_assign_rhs2 (def_stmt);
830 	    tree op3 = gimple_assign_rhs3 (def_stmt);
831 	    return get_maxval_strlen (op2, length, visited, type)
832 		   && get_maxval_strlen (op3, length, visited, type);
833           }
834         return false;
835 
836       case GIMPLE_PHI:
837 	{
838 	  /* All the arguments of the PHI node must have the same constant
839 	     length.  */
840 	  unsigned i;
841 
842 	  for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
843           {
844             tree arg = gimple_phi_arg (def_stmt, i)->def;
845 
846             /* If this PHI has itself as an argument, we cannot
847                determine the string length of this argument.  However,
848                if we can find a constant string length for the other
849                PHI args then we can still be sure that this is a
850                constant string length.  So be optimistic and just
851                continue with the next argument.  */
852             if (arg == gimple_phi_result (def_stmt))
853               continue;
854 
855             if (!get_maxval_strlen (arg, length, visited, type))
856               return false;
857           }
858         }
859         return true;
860 
861       default:
862         return false;
863     }
864 }
865 
866 
867 /* Fold builtin call in statement STMT.  Returns a simplified tree.
868    We may return a non-constant expression, including another call
869    to a different function and with different arguments, e.g.,
870    substituting memcpy for strcpy when the string length is known.
871    Note that some builtins expand into inline code that may not
872    be valid in GIMPLE.  Callers must take care.  */
873 
874 tree
gimple_fold_builtin(gimple stmt)875 gimple_fold_builtin (gimple stmt)
876 {
877   tree result, val[3];
878   tree callee, a;
879   int arg_idx, type;
880   bitmap visited;
881   bool ignore;
882   int nargs;
883   location_t loc = gimple_location (stmt);
884 
885   ignore = (gimple_call_lhs (stmt) == NULL);
886 
887   /* First try the generic builtin folder.  If that succeeds, return the
888      result directly.  */
889   result = fold_call_stmt (stmt, ignore);
890   if (result)
891     {
892       if (ignore)
893 	STRIP_NOPS (result);
894       else
895 	result = fold_convert (gimple_call_return_type (stmt), result);
896       return result;
897     }
898 
899   /* Ignore MD builtins.  */
900   callee = gimple_call_fndecl (stmt);
901   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
902     return NULL_TREE;
903 
904   /* Give up for always_inline inline builtins until they are
905      inlined.  */
906   if (avoid_folding_inline_builtin (callee))
907     return NULL_TREE;
908 
909   /* If the builtin could not be folded, and it has no argument list,
910      we're done.  */
911   nargs = gimple_call_num_args (stmt);
912   if (nargs == 0)
913     return NULL_TREE;
914 
915   /* Limit the work only for builtins we know how to simplify.  */
916   switch (DECL_FUNCTION_CODE (callee))
917     {
918     case BUILT_IN_STRLEN:
919     case BUILT_IN_FPUTS:
920     case BUILT_IN_FPUTS_UNLOCKED:
921       arg_idx = 0;
922       type = 0;
923       break;
924     case BUILT_IN_STRCPY:
925     case BUILT_IN_STRNCPY:
926     case BUILT_IN_STRCAT:
927       arg_idx = 1;
928       type = 0;
929       break;
930     case BUILT_IN_MEMCPY_CHK:
931     case BUILT_IN_MEMPCPY_CHK:
932     case BUILT_IN_MEMMOVE_CHK:
933     case BUILT_IN_MEMSET_CHK:
934     case BUILT_IN_STRNCPY_CHK:
935     case BUILT_IN_STPNCPY_CHK:
936       arg_idx = 2;
937       type = 2;
938       break;
939     case BUILT_IN_STRCPY_CHK:
940     case BUILT_IN_STPCPY_CHK:
941       arg_idx = 1;
942       type = 1;
943       break;
944     case BUILT_IN_SNPRINTF_CHK:
945     case BUILT_IN_VSNPRINTF_CHK:
946       arg_idx = 1;
947       type = 2;
948       break;
949     default:
950       return NULL_TREE;
951     }
952 
953   if (arg_idx >= nargs)
954     return NULL_TREE;
955 
956   /* Try to use the dataflow information gathered by the CCP process.  */
957   visited = BITMAP_ALLOC (NULL);
958   bitmap_clear (visited);
959 
960   memset (val, 0, sizeof (val));
961   a = gimple_call_arg (stmt, arg_idx);
962   if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
963     val[arg_idx] = NULL_TREE;
964 
965   BITMAP_FREE (visited);
966 
967   result = NULL_TREE;
968   switch (DECL_FUNCTION_CODE (callee))
969     {
970     case BUILT_IN_STRLEN:
971       if (val[0] && nargs == 1)
972 	{
973 	  tree new_val =
974               fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
975 
976 	  /* If the result is not a valid gimple value, or not a cast
977 	     of a valid gimple value, then we cannot use the result.  */
978 	  if (is_gimple_val (new_val)
979 	      || (CONVERT_EXPR_P (new_val)
980 		  && is_gimple_val (TREE_OPERAND (new_val, 0))))
981 	    return new_val;
982 	}
983       break;
984 
985     case BUILT_IN_STRCPY:
986       if (val[1] && is_gimple_val (val[1]) && nargs == 2)
987 	result = fold_builtin_strcpy (loc, callee,
988                                       gimple_call_arg (stmt, 0),
989                                       gimple_call_arg (stmt, 1),
990 				      val[1]);
991       break;
992 
993     case BUILT_IN_STRNCPY:
994       if (val[1] && is_gimple_val (val[1]) && nargs == 3)
995 	result = fold_builtin_strncpy (loc, callee,
996                                        gimple_call_arg (stmt, 0),
997                                        gimple_call_arg (stmt, 1),
998                                        gimple_call_arg (stmt, 2),
999 				       val[1]);
1000       break;
1001 
1002     case BUILT_IN_STRCAT:
1003       if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1004 	result = fold_builtin_strcat (loc, gimple_call_arg (stmt, 0),
1005 				      gimple_call_arg (stmt, 1),
1006 				      val[1]);
1007       break;
1008 
1009     case BUILT_IN_FPUTS:
1010       if (nargs == 2)
1011 	result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1012 				     gimple_call_arg (stmt, 1),
1013 				     ignore, false, val[0]);
1014       break;
1015 
1016     case BUILT_IN_FPUTS_UNLOCKED:
1017       if (nargs == 2)
1018 	result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1019 				     gimple_call_arg (stmt, 1),
1020 				     ignore, true, val[0]);
1021       break;
1022 
1023     case BUILT_IN_MEMCPY_CHK:
1024     case BUILT_IN_MEMPCPY_CHK:
1025     case BUILT_IN_MEMMOVE_CHK:
1026     case BUILT_IN_MEMSET_CHK:
1027       if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1028 	result = fold_builtin_memory_chk (loc, callee,
1029                                           gimple_call_arg (stmt, 0),
1030                                           gimple_call_arg (stmt, 1),
1031                                           gimple_call_arg (stmt, 2),
1032                                           gimple_call_arg (stmt, 3),
1033 					  val[2], ignore,
1034 					  DECL_FUNCTION_CODE (callee));
1035       break;
1036 
1037     case BUILT_IN_STRCPY_CHK:
1038     case BUILT_IN_STPCPY_CHK:
1039       if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1040 	result = fold_builtin_stxcpy_chk (loc, callee,
1041                                           gimple_call_arg (stmt, 0),
1042                                           gimple_call_arg (stmt, 1),
1043                                           gimple_call_arg (stmt, 2),
1044 					  val[1], ignore,
1045 					  DECL_FUNCTION_CODE (callee));
1046       break;
1047 
1048     case BUILT_IN_STRNCPY_CHK:
1049     case BUILT_IN_STPNCPY_CHK:
1050       if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1051 	result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
1052                                            gimple_call_arg (stmt, 1),
1053                                            gimple_call_arg (stmt, 2),
1054                                            gimple_call_arg (stmt, 3),
1055 					   val[2], ignore,
1056 					   DECL_FUNCTION_CODE (callee));
1057       break;
1058 
1059     case BUILT_IN_SNPRINTF_CHK:
1060     case BUILT_IN_VSNPRINTF_CHK:
1061       if (val[1] && is_gimple_val (val[1]))
1062 	result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1063                                                    DECL_FUNCTION_CODE (callee));
1064       break;
1065 
1066     default:
1067       gcc_unreachable ();
1068     }
1069 
1070   if (result && ignore)
1071     result = fold_ignored_result (result);
1072   return result;
1073 }
1074 
1075 
1076 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1077    The statement may be replaced by another statement, e.g., if the call
1078    simplifies to a constant value. Return true if any changes were made.
1079    It is assumed that the operands have been previously folded.  */
1080 
1081 static bool
gimple_fold_call(gimple_stmt_iterator * gsi,bool inplace)1082 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1083 {
1084   gimple stmt = gsi_stmt (*gsi);
1085   tree callee;
1086   bool changed = false;
1087   unsigned i;
1088 
1089   /* Fold *& in call arguments.  */
1090   for (i = 0; i < gimple_call_num_args (stmt); ++i)
1091     if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1092       {
1093 	tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1094 	if (tmp)
1095 	  {
1096 	    gimple_call_set_arg (stmt, i, tmp);
1097 	    changed = true;
1098 	  }
1099       }
1100 
1101   /* Check for virtual calls that became direct calls.  */
1102   callee = gimple_call_fn (stmt);
1103   if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1104     {
1105       if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1106 	{
1107           if (dump_file && virtual_method_call_p (callee)
1108 	      && !possible_polymorphic_call_target_p
1109 		    (callee, cgraph_get_node (gimple_call_addr_fndecl
1110                                                  (OBJ_TYPE_REF_EXPR (callee)))))
1111 	    {
1112 	      fprintf (dump_file,
1113 		       "Type inheritance inconsistent devirtualization of ");
1114 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1115 	      fprintf (dump_file, " to ");
1116 	      print_generic_expr (dump_file, callee, TDF_SLIM);
1117 	      fprintf (dump_file, "\n");
1118 	    }
1119 
1120 	  gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1121 	  changed = true;
1122 	}
1123       else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
1124 	{
1125 	  bool final;
1126 	  vec <cgraph_node *>targets
1127 	    = possible_polymorphic_call_targets (callee, &final);
1128 	  if (final && targets.length () <= 1)
1129 	    {
1130 	      tree lhs = gimple_call_lhs (stmt);
1131 	      if (targets.length () == 1)
1132 		{
1133 		  gimple_call_set_fndecl (stmt, targets[0]->decl);
1134 		  changed = true;
1135 		  /* If the call becomes noreturn, remove the lhs.  */
1136 		  if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
1137 		    {
1138 		      if (TREE_CODE (lhs) == SSA_NAME)
1139 			{
1140 			  tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
1141 			  tree def = get_or_create_ssa_default_def (cfun, var);
1142 			  gimple new_stmt = gimple_build_assign (lhs, def);
1143 			  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1144 			}
1145 		      gimple_call_set_lhs (stmt, NULL_TREE);
1146 		    }
1147 		}
1148 	      else
1149 		{
1150 		  tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
1151 		  gimple new_stmt = gimple_build_call (fndecl, 0);
1152 		  gimple_set_location (new_stmt, gimple_location (stmt));
1153 		  if (lhs && TREE_CODE (lhs) == SSA_NAME)
1154 		    {
1155 		      tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
1156 		      tree def = get_or_create_ssa_default_def (cfun, var);
1157 
1158 		      /* To satisfy condition for
1159 			 cgraph_update_edges_for_call_stmt_node,
1160 			 we need to preserve GIMPLE_CALL statement
1161 			 at position of GSI iterator.  */
1162 		      update_call_from_tree (gsi, def);
1163 		      gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
1164 		    }
1165 		  else
1166 		    gsi_replace (gsi, new_stmt, true);
1167 		  return true;
1168 		}
1169 	    }
1170 	}
1171     }
1172 
1173   if (inplace)
1174     return changed;
1175 
1176   /* Check for builtins that CCP can handle using information not
1177      available in the generic fold routines.  */
1178   if (gimple_call_builtin_p (stmt))
1179     {
1180       tree result = gimple_fold_builtin (stmt);
1181       if (result)
1182 	{
1183           if (!update_call_from_tree (gsi, result))
1184 	    gimplify_and_update_call_from_tree (gsi, result);
1185 	  changed = true;
1186 	}
1187       else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
1188 	changed |= targetm.gimple_fold_builtin (gsi);
1189     }
1190   else if (gimple_call_internal_p (stmt))
1191     {
1192       enum tree_code subcode = ERROR_MARK;
1193       tree result = NULL_TREE;
1194       switch (gimple_call_internal_fn (stmt))
1195 	{
1196 	case IFN_BUILTIN_EXPECT:
1197 	  result = fold_builtin_expect (gimple_location (stmt),
1198 					gimple_call_arg (stmt, 0),
1199 					gimple_call_arg (stmt, 1),
1200 					gimple_call_arg (stmt, 2));
1201 	  break;
1202 	case IFN_UBSAN_CHECK_ADD:
1203 	  subcode = PLUS_EXPR;
1204 	  break;
1205 	case IFN_UBSAN_CHECK_SUB:
1206 	  subcode = MINUS_EXPR;
1207 	  break;
1208 	case IFN_UBSAN_CHECK_MUL:
1209 	  subcode = MULT_EXPR;
1210 	  break;
1211 	default:
1212 	  break;
1213 	}
1214       if (subcode != ERROR_MARK)
1215 	{
1216 	  tree arg0 = gimple_call_arg (stmt, 0);
1217 	  tree arg1 = gimple_call_arg (stmt, 1);
1218 	  /* x = y + 0; x = y - 0; x = y * 0; */
1219 	  if (integer_zerop (arg1))
1220 	    result = subcode == MULT_EXPR
1221 		     ? build_zero_cst (TREE_TYPE (arg0))
1222 		     : arg0;
1223 	  /* x = 0 + y; x = 0 * y; */
1224 	  else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1225 	    result = subcode == MULT_EXPR
1226 		     ? build_zero_cst (TREE_TYPE (arg0))
1227 		     : arg1;
1228 	  /* x = y - y; */
1229 	  else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1230 	    result = build_zero_cst (TREE_TYPE (arg0));
1231 	  /* x = y * 1; x = 1 * y; */
1232 	  else if (subcode == MULT_EXPR)
1233 	    {
1234 	      if (integer_onep (arg1))
1235 		result = arg0;
1236 	      else if (integer_onep (arg0))
1237 		result = arg1;
1238 	    }
1239 	}
1240       if (result)
1241 	{
1242 	  if (!update_call_from_tree (gsi, result))
1243 	    gimplify_and_update_call_from_tree (gsi, result);
1244 	  changed = true;
1245 	}
1246     }
1247 
1248   return changed;
1249 }
1250 
1251 /* Worker for both fold_stmt and fold_stmt_inplace.  The INPLACE argument
1252    distinguishes both cases.  */
1253 
1254 static bool
fold_stmt_1(gimple_stmt_iterator * gsi,bool inplace)1255 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1256 {
1257   bool changed = false;
1258   gimple stmt = gsi_stmt (*gsi);
1259   unsigned i;
1260 
1261   /* Fold the main computation performed by the statement.  */
1262   switch (gimple_code (stmt))
1263     {
1264     case GIMPLE_ASSIGN:
1265       {
1266 	unsigned old_num_ops = gimple_num_ops (stmt);
1267 	enum tree_code subcode = gimple_assign_rhs_code (stmt);
1268 	tree lhs = gimple_assign_lhs (stmt);
1269 	tree new_rhs;
1270 	/* First canonicalize operand order.  This avoids building new
1271 	   trees if this is the only thing fold would later do.  */
1272 	if ((commutative_tree_code (subcode)
1273 	     || commutative_ternary_tree_code (subcode))
1274 	    && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1275 				     gimple_assign_rhs2 (stmt), false))
1276 	  {
1277 	    tree tem = gimple_assign_rhs1 (stmt);
1278 	    gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1279 	    gimple_assign_set_rhs2 (stmt, tem);
1280 	    changed = true;
1281 	  }
1282 	new_rhs = fold_gimple_assign (gsi);
1283 	if (new_rhs
1284 	    && !useless_type_conversion_p (TREE_TYPE (lhs),
1285 					   TREE_TYPE (new_rhs)))
1286 	  new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1287 	if (new_rhs
1288 	    && (!inplace
1289 		|| get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1290 	  {
1291 	    gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1292 	    changed = true;
1293 	  }
1294 	break;
1295       }
1296 
1297     case GIMPLE_COND:
1298       changed |= fold_gimple_cond (stmt);
1299       break;
1300 
1301     case GIMPLE_CALL:
1302       changed |= gimple_fold_call (gsi, inplace);
1303       break;
1304 
1305     case GIMPLE_ASM:
1306       /* Fold *& in asm operands.  */
1307       {
1308 	size_t noutputs;
1309 	const char **oconstraints;
1310 	const char *constraint;
1311 	bool allows_mem, allows_reg;
1312 
1313 	noutputs = gimple_asm_noutputs (stmt);
1314 	oconstraints = XALLOCAVEC (const char *, noutputs);
1315 
1316 	for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1317 	  {
1318 	    tree link = gimple_asm_output_op (stmt, i);
1319 	    tree op = TREE_VALUE (link);
1320 	    oconstraints[i]
1321 	      = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1322 	    if (REFERENCE_CLASS_P (op)
1323 		&& (op = maybe_fold_reference (op, true)) != NULL_TREE)
1324 	      {
1325 		TREE_VALUE (link) = op;
1326 		changed = true;
1327 	      }
1328 	  }
1329 	for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1330 	  {
1331 	    tree link = gimple_asm_input_op (stmt, i);
1332 	    tree op = TREE_VALUE (link);
1333 	    constraint
1334 	      = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1335 	    parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1336 				    oconstraints, &allows_mem, &allows_reg);
1337 	    if (REFERENCE_CLASS_P (op)
1338 		&& (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1339 		   != NULL_TREE)
1340 	      {
1341 		TREE_VALUE (link) = op;
1342 		changed = true;
1343 	      }
1344 	  }
1345       }
1346       break;
1347 
1348     case GIMPLE_DEBUG:
1349       if (gimple_debug_bind_p (stmt))
1350 	{
1351 	  tree val = gimple_debug_bind_get_value (stmt);
1352 	  if (val
1353 	      && REFERENCE_CLASS_P (val))
1354 	    {
1355 	      tree tem = maybe_fold_reference (val, false);
1356 	      if (tem)
1357 		{
1358 		  gimple_debug_bind_set_value (stmt, tem);
1359 		  changed = true;
1360 		}
1361 	    }
1362 	  else if (val
1363 		   && TREE_CODE (val) == ADDR_EXPR)
1364 	    {
1365 	      tree ref = TREE_OPERAND (val, 0);
1366 	      tree tem = maybe_fold_reference (ref, false);
1367 	      if (tem)
1368 		{
1369 		  tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1370 		  gimple_debug_bind_set_value (stmt, tem);
1371 		  changed = true;
1372 		}
1373 	    }
1374 	}
1375       break;
1376 
1377     default:;
1378     }
1379 
1380   stmt = gsi_stmt (*gsi);
1381 
1382   /* Fold *& on the lhs.  */
1383   if (gimple_has_lhs (stmt))
1384     {
1385       tree lhs = gimple_get_lhs (stmt);
1386       if (lhs && REFERENCE_CLASS_P (lhs))
1387 	{
1388 	  tree new_lhs = maybe_fold_reference (lhs, true);
1389 	  if (new_lhs)
1390 	    {
1391 	      gimple_set_lhs (stmt, new_lhs);
1392 	      changed = true;
1393 	    }
1394 	}
1395     }
1396 
1397   return changed;
1398 }
1399 
1400 /* Fold the statement pointed to by GSI.  In some cases, this function may
1401    replace the whole statement with a new one.  Returns true iff folding
1402    makes any changes.
1403    The statement pointed to by GSI should be in valid gimple form but may
1404    be in unfolded state as resulting from for example constant propagation
1405    which can produce *&x = 0.  */
1406 
1407 bool
fold_stmt(gimple_stmt_iterator * gsi)1408 fold_stmt (gimple_stmt_iterator *gsi)
1409 {
1410   return fold_stmt_1 (gsi, false);
1411 }
1412 
1413 /* Perform the minimal folding on statement *GSI.  Only operations like
1414    *&x created by constant propagation are handled.  The statement cannot
1415    be replaced with a new one.  Return true if the statement was
1416    changed, false otherwise.
1417    The statement *GSI should be in valid gimple form but may
1418    be in unfolded state as resulting from for example constant propagation
1419    which can produce *&x = 0.  */
1420 
1421 bool
fold_stmt_inplace(gimple_stmt_iterator * gsi)1422 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1423 {
1424   gimple stmt = gsi_stmt (*gsi);
1425   bool changed = fold_stmt_1 (gsi, true);
1426   gcc_assert (gsi_stmt (*gsi) == stmt);
1427   return changed;
1428 }
1429 
1430 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1431    if EXPR is null or we don't know how.
1432    If non-null, the result always has boolean type.  */
1433 
1434 static tree
canonicalize_bool(tree expr,bool invert)1435 canonicalize_bool (tree expr, bool invert)
1436 {
1437   if (!expr)
1438     return NULL_TREE;
1439   else if (invert)
1440     {
1441       if (integer_nonzerop (expr))
1442 	return boolean_false_node;
1443       else if (integer_zerop (expr))
1444 	return boolean_true_node;
1445       else if (TREE_CODE (expr) == SSA_NAME)
1446 	return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1447 			    build_int_cst (TREE_TYPE (expr), 0));
1448       else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1449 	return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1450 			    boolean_type_node,
1451 			    TREE_OPERAND (expr, 0),
1452 			    TREE_OPERAND (expr, 1));
1453       else
1454 	return NULL_TREE;
1455     }
1456   else
1457     {
1458       if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1459 	return expr;
1460       if (integer_nonzerop (expr))
1461 	return boolean_true_node;
1462       else if (integer_zerop (expr))
1463 	return boolean_false_node;
1464       else if (TREE_CODE (expr) == SSA_NAME)
1465 	return fold_build2 (NE_EXPR, boolean_type_node, expr,
1466 			    build_int_cst (TREE_TYPE (expr), 0));
1467       else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1468 	return fold_build2 (TREE_CODE (expr),
1469 			    boolean_type_node,
1470 			    TREE_OPERAND (expr, 0),
1471 			    TREE_OPERAND (expr, 1));
1472       else
1473 	return NULL_TREE;
1474     }
1475 }
1476 
1477 /* Check to see if a boolean expression EXPR is logically equivalent to the
1478    comparison (OP1 CODE OP2).  Check for various identities involving
1479    SSA_NAMEs.  */
1480 
1481 static bool
same_bool_comparison_p(const_tree expr,enum tree_code code,const_tree op1,const_tree op2)1482 same_bool_comparison_p (const_tree expr, enum tree_code code,
1483 			const_tree op1, const_tree op2)
1484 {
1485   gimple s;
1486 
1487   /* The obvious case.  */
1488   if (TREE_CODE (expr) == code
1489       && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1490       && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1491     return true;
1492 
1493   /* Check for comparing (name, name != 0) and the case where expr
1494      is an SSA_NAME with a definition matching the comparison.  */
1495   if (TREE_CODE (expr) == SSA_NAME
1496       && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1497     {
1498       if (operand_equal_p (expr, op1, 0))
1499 	return ((code == NE_EXPR && integer_zerop (op2))
1500 		|| (code == EQ_EXPR && integer_nonzerop (op2)));
1501       s = SSA_NAME_DEF_STMT (expr);
1502       if (is_gimple_assign (s)
1503 	  && gimple_assign_rhs_code (s) == code
1504 	  && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1505 	  && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1506 	return true;
1507     }
1508 
1509   /* If op1 is of the form (name != 0) or (name == 0), and the definition
1510      of name is a comparison, recurse.  */
1511   if (TREE_CODE (op1) == SSA_NAME
1512       && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1513     {
1514       s = SSA_NAME_DEF_STMT (op1);
1515       if (is_gimple_assign (s)
1516 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1517 	{
1518 	  enum tree_code c = gimple_assign_rhs_code (s);
1519 	  if ((c == NE_EXPR && integer_zerop (op2))
1520 	      || (c == EQ_EXPR && integer_nonzerop (op2)))
1521 	    return same_bool_comparison_p (expr, c,
1522 					   gimple_assign_rhs1 (s),
1523 					   gimple_assign_rhs2 (s));
1524 	  if ((c == EQ_EXPR && integer_zerop (op2))
1525 	      || (c == NE_EXPR && integer_nonzerop (op2)))
1526 	    return same_bool_comparison_p (expr,
1527 					   invert_tree_comparison (c, false),
1528 					   gimple_assign_rhs1 (s),
1529 					   gimple_assign_rhs2 (s));
1530 	}
1531     }
1532   return false;
1533 }
1534 
1535 /* Check to see if two boolean expressions OP1 and OP2 are logically
1536    equivalent.  */
1537 
1538 static bool
same_bool_result_p(const_tree op1,const_tree op2)1539 same_bool_result_p (const_tree op1, const_tree op2)
1540 {
1541   /* Simple cases first.  */
1542   if (operand_equal_p (op1, op2, 0))
1543     return true;
1544 
1545   /* Check the cases where at least one of the operands is a comparison.
1546      These are a bit smarter than operand_equal_p in that they apply some
1547      identifies on SSA_NAMEs.  */
1548   if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1549       && same_bool_comparison_p (op1, TREE_CODE (op2),
1550 				 TREE_OPERAND (op2, 0),
1551 				 TREE_OPERAND (op2, 1)))
1552     return true;
1553   if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1554       && same_bool_comparison_p (op2, TREE_CODE (op1),
1555 				 TREE_OPERAND (op1, 0),
1556 				 TREE_OPERAND (op1, 1)))
1557     return true;
1558 
1559   /* Default case.  */
1560   return false;
1561 }
1562 
1563 /* Forward declarations for some mutually recursive functions.  */
1564 
1565 static tree
1566 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1567 		   enum tree_code code2, tree op2a, tree op2b);
1568 static tree
1569 and_var_with_comparison (tree var, bool invert,
1570 			 enum tree_code code2, tree op2a, tree op2b);
1571 static tree
1572 and_var_with_comparison_1 (gimple stmt,
1573 			   enum tree_code code2, tree op2a, tree op2b);
1574 static tree
1575 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1576 		  enum tree_code code2, tree op2a, tree op2b);
1577 static tree
1578 or_var_with_comparison (tree var, bool invert,
1579 			enum tree_code code2, tree op2a, tree op2b);
1580 static tree
1581 or_var_with_comparison_1 (gimple stmt,
1582 			  enum tree_code code2, tree op2a, tree op2b);
1583 
1584 /* Helper function for and_comparisons_1:  try to simplify the AND of the
1585    ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1586    If INVERT is true, invert the value of the VAR before doing the AND.
1587    Return NULL_EXPR if we can't simplify this to a single expression.  */
1588 
1589 static tree
and_var_with_comparison(tree var,bool invert,enum tree_code code2,tree op2a,tree op2b)1590 and_var_with_comparison (tree var, bool invert,
1591 			 enum tree_code code2, tree op2a, tree op2b)
1592 {
1593   tree t;
1594   gimple stmt = SSA_NAME_DEF_STMT (var);
1595 
1596   /* We can only deal with variables whose definitions are assignments.  */
1597   if (!is_gimple_assign (stmt))
1598     return NULL_TREE;
1599 
1600   /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1601      !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1602      Then we only have to consider the simpler non-inverted cases.  */
1603   if (invert)
1604     t = or_var_with_comparison_1 (stmt,
1605 				  invert_tree_comparison (code2, false),
1606 				  op2a, op2b);
1607   else
1608     t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1609   return canonicalize_bool (t, invert);
1610 }
1611 
1612 /* Try to simplify the AND of the ssa variable defined by the assignment
1613    STMT with the comparison specified by (OP2A CODE2 OP2B).
1614    Return NULL_EXPR if we can't simplify this to a single expression.  */
1615 
1616 static tree
and_var_with_comparison_1(gimple stmt,enum tree_code code2,tree op2a,tree op2b)1617 and_var_with_comparison_1 (gimple stmt,
1618 			   enum tree_code code2, tree op2a, tree op2b)
1619 {
1620   tree var = gimple_assign_lhs (stmt);
1621   tree true_test_var = NULL_TREE;
1622   tree false_test_var = NULL_TREE;
1623   enum tree_code innercode = gimple_assign_rhs_code (stmt);
1624 
1625   /* Check for identities like (var AND (var == 0)) => false.  */
1626   if (TREE_CODE (op2a) == SSA_NAME
1627       && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1628     {
1629       if ((code2 == NE_EXPR && integer_zerop (op2b))
1630 	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1631 	{
1632 	  true_test_var = op2a;
1633 	  if (var == true_test_var)
1634 	    return var;
1635 	}
1636       else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1637 	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1638 	{
1639 	  false_test_var = op2a;
1640 	  if (var == false_test_var)
1641 	    return boolean_false_node;
1642 	}
1643     }
1644 
1645   /* If the definition is a comparison, recurse on it.  */
1646   if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1647     {
1648       tree t = and_comparisons_1 (innercode,
1649 				  gimple_assign_rhs1 (stmt),
1650 				  gimple_assign_rhs2 (stmt),
1651 				  code2,
1652 				  op2a,
1653 				  op2b);
1654       if (t)
1655 	return t;
1656     }
1657 
1658   /* If the definition is an AND or OR expression, we may be able to
1659      simplify by reassociating.  */
1660   if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1661       && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1662     {
1663       tree inner1 = gimple_assign_rhs1 (stmt);
1664       tree inner2 = gimple_assign_rhs2 (stmt);
1665       gimple s;
1666       tree t;
1667       tree partial = NULL_TREE;
1668       bool is_and = (innercode == BIT_AND_EXPR);
1669 
1670       /* Check for boolean identities that don't require recursive examination
1671 	 of inner1/inner2:
1672 	 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1673 	 inner1 AND (inner1 OR inner2) => inner1
1674 	 !inner1 AND (inner1 AND inner2) => false
1675 	 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1676          Likewise for similar cases involving inner2.  */
1677       if (inner1 == true_test_var)
1678 	return (is_and ? var : inner1);
1679       else if (inner2 == true_test_var)
1680 	return (is_and ? var : inner2);
1681       else if (inner1 == false_test_var)
1682 	return (is_and
1683 		? boolean_false_node
1684 		: and_var_with_comparison (inner2, false, code2, op2a, op2b));
1685       else if (inner2 == false_test_var)
1686 	return (is_and
1687 		? boolean_false_node
1688 		: and_var_with_comparison (inner1, false, code2, op2a, op2b));
1689 
1690       /* Next, redistribute/reassociate the AND across the inner tests.
1691 	 Compute the first partial result, (inner1 AND (op2a code op2b))  */
1692       if (TREE_CODE (inner1) == SSA_NAME
1693 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1694 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1695 	  && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1696 					      gimple_assign_rhs1 (s),
1697 					      gimple_assign_rhs2 (s),
1698 					      code2, op2a, op2b)))
1699 	{
1700 	  /* Handle the AND case, where we are reassociating:
1701 	     (inner1 AND inner2) AND (op2a code2 op2b)
1702 	     => (t AND inner2)
1703 	     If the partial result t is a constant, we win.  Otherwise
1704 	     continue on to try reassociating with the other inner test.  */
1705 	  if (is_and)
1706 	    {
1707 	      if (integer_onep (t))
1708 		return inner2;
1709 	      else if (integer_zerop (t))
1710 		return boolean_false_node;
1711 	    }
1712 
1713 	  /* Handle the OR case, where we are redistributing:
1714 	     (inner1 OR inner2) AND (op2a code2 op2b)
1715 	     => (t OR (inner2 AND (op2a code2 op2b)))  */
1716 	  else if (integer_onep (t))
1717 	    return boolean_true_node;
1718 
1719 	  /* Save partial result for later.  */
1720 	  partial = t;
1721 	}
1722 
1723       /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1724       if (TREE_CODE (inner2) == SSA_NAME
1725 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1726 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1727 	  && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1728 					      gimple_assign_rhs1 (s),
1729 					      gimple_assign_rhs2 (s),
1730 					      code2, op2a, op2b)))
1731 	{
1732 	  /* Handle the AND case, where we are reassociating:
1733 	     (inner1 AND inner2) AND (op2a code2 op2b)
1734 	     => (inner1 AND t)  */
1735 	  if (is_and)
1736 	    {
1737 	      if (integer_onep (t))
1738 		return inner1;
1739 	      else if (integer_zerop (t))
1740 		return boolean_false_node;
1741 	      /* If both are the same, we can apply the identity
1742 		 (x AND x) == x.  */
1743 	      else if (partial && same_bool_result_p (t, partial))
1744 		return t;
1745 	    }
1746 
1747 	  /* Handle the OR case. where we are redistributing:
1748 	     (inner1 OR inner2) AND (op2a code2 op2b)
1749 	     => (t OR (inner1 AND (op2a code2 op2b)))
1750 	     => (t OR partial)  */
1751 	  else
1752 	    {
1753 	      if (integer_onep (t))
1754 		return boolean_true_node;
1755 	      else if (partial)
1756 		{
1757 		  /* We already got a simplification for the other
1758 		     operand to the redistributed OR expression.  The
1759 		     interesting case is when at least one is false.
1760 		     Or, if both are the same, we can apply the identity
1761 		     (x OR x) == x.  */
1762 		  if (integer_zerop (partial))
1763 		    return t;
1764 		  else if (integer_zerop (t))
1765 		    return partial;
1766 		  else if (same_bool_result_p (t, partial))
1767 		    return t;
1768 		}
1769 	    }
1770 	}
1771     }
1772   return NULL_TREE;
1773 }
1774 
1775 /* Try to simplify the AND of two comparisons defined by
1776    (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1777    If this can be done without constructing an intermediate value,
1778    return the resulting tree; otherwise NULL_TREE is returned.
1779    This function is deliberately asymmetric as it recurses on SSA_DEFs
1780    in the first comparison but not the second.  */
1781 
1782 static tree
and_comparisons_1(enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)1783 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1784 		   enum tree_code code2, tree op2a, tree op2b)
1785 {
1786   tree truth_type = truth_type_for (TREE_TYPE (op1a));
1787 
1788   /* First check for ((x CODE1 y) AND (x CODE2 y)).  */
1789   if (operand_equal_p (op1a, op2a, 0)
1790       && operand_equal_p (op1b, op2b, 0))
1791     {
1792       /* Result will be either NULL_TREE, or a combined comparison.  */
1793       tree t = combine_comparisons (UNKNOWN_LOCATION,
1794 				    TRUTH_ANDIF_EXPR, code1, code2,
1795 				    truth_type, op1a, op1b);
1796       if (t)
1797 	return t;
1798     }
1799 
1800   /* Likewise the swapped case of the above.  */
1801   if (operand_equal_p (op1a, op2b, 0)
1802       && operand_equal_p (op1b, op2a, 0))
1803     {
1804       /* Result will be either NULL_TREE, or a combined comparison.  */
1805       tree t = combine_comparisons (UNKNOWN_LOCATION,
1806 				    TRUTH_ANDIF_EXPR, code1,
1807 				    swap_tree_comparison (code2),
1808 				    truth_type, op1a, op1b);
1809       if (t)
1810 	return t;
1811     }
1812 
1813   /* If both comparisons are of the same value against constants, we might
1814      be able to merge them.  */
1815   if (operand_equal_p (op1a, op2a, 0)
1816       && TREE_CODE (op1b) == INTEGER_CST
1817       && TREE_CODE (op2b) == INTEGER_CST)
1818     {
1819       int cmp = tree_int_cst_compare (op1b, op2b);
1820 
1821       /* If we have (op1a == op1b), we should either be able to
1822 	 return that or FALSE, depending on whether the constant op1b
1823 	 also satisfies the other comparison against op2b.  */
1824       if (code1 == EQ_EXPR)
1825 	{
1826 	  bool done = true;
1827 	  bool val;
1828 	  switch (code2)
1829 	    {
1830 	    case EQ_EXPR: val = (cmp == 0); break;
1831 	    case NE_EXPR: val = (cmp != 0); break;
1832 	    case LT_EXPR: val = (cmp < 0); break;
1833 	    case GT_EXPR: val = (cmp > 0); break;
1834 	    case LE_EXPR: val = (cmp <= 0); break;
1835 	    case GE_EXPR: val = (cmp >= 0); break;
1836 	    default: done = false;
1837 	    }
1838 	  if (done)
1839 	    {
1840 	      if (val)
1841 		return fold_build2 (code1, boolean_type_node, op1a, op1b);
1842 	      else
1843 		return boolean_false_node;
1844 	    }
1845 	}
1846       /* Likewise if the second comparison is an == comparison.  */
1847       else if (code2 == EQ_EXPR)
1848 	{
1849 	  bool done = true;
1850 	  bool val;
1851 	  switch (code1)
1852 	    {
1853 	    case EQ_EXPR: val = (cmp == 0); break;
1854 	    case NE_EXPR: val = (cmp != 0); break;
1855 	    case LT_EXPR: val = (cmp > 0); break;
1856 	    case GT_EXPR: val = (cmp < 0); break;
1857 	    case LE_EXPR: val = (cmp >= 0); break;
1858 	    case GE_EXPR: val = (cmp <= 0); break;
1859 	    default: done = false;
1860 	    }
1861 	  if (done)
1862 	    {
1863 	      if (val)
1864 		return fold_build2 (code2, boolean_type_node, op2a, op2b);
1865 	      else
1866 		return boolean_false_node;
1867 	    }
1868 	}
1869 
1870       /* Same business with inequality tests.  */
1871       else if (code1 == NE_EXPR)
1872 	{
1873 	  bool val;
1874 	  switch (code2)
1875 	    {
1876 	    case EQ_EXPR: val = (cmp != 0); break;
1877 	    case NE_EXPR: val = (cmp == 0); break;
1878 	    case LT_EXPR: val = (cmp >= 0); break;
1879 	    case GT_EXPR: val = (cmp <= 0); break;
1880 	    case LE_EXPR: val = (cmp > 0); break;
1881 	    case GE_EXPR: val = (cmp < 0); break;
1882 	    default:
1883 	      val = false;
1884 	    }
1885 	  if (val)
1886 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
1887 	}
1888       else if (code2 == NE_EXPR)
1889 	{
1890 	  bool val;
1891 	  switch (code1)
1892 	    {
1893 	    case EQ_EXPR: val = (cmp == 0); break;
1894 	    case NE_EXPR: val = (cmp != 0); break;
1895 	    case LT_EXPR: val = (cmp <= 0); break;
1896 	    case GT_EXPR: val = (cmp >= 0); break;
1897 	    case LE_EXPR: val = (cmp < 0); break;
1898 	    case GE_EXPR: val = (cmp > 0); break;
1899 	    default:
1900 	      val = false;
1901 	    }
1902 	  if (val)
1903 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
1904 	}
1905 
1906       /* Chose the more restrictive of two < or <= comparisons.  */
1907       else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1908 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
1909 	{
1910 	  if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1911 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
1912 	  else
1913 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
1914 	}
1915 
1916       /* Likewise chose the more restrictive of two > or >= comparisons.  */
1917       else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1918 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
1919 	{
1920 	  if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1921 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
1922 	  else
1923 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
1924 	}
1925 
1926       /* Check for singleton ranges.  */
1927       else if (cmp == 0
1928 	       && ((code1 == LE_EXPR && code2 == GE_EXPR)
1929 		   || (code1 == GE_EXPR && code2 == LE_EXPR)))
1930 	return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1931 
1932       /* Check for disjoint ranges. */
1933       else if (cmp <= 0
1934 	       && (code1 == LT_EXPR || code1 == LE_EXPR)
1935 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
1936 	return boolean_false_node;
1937       else if (cmp >= 0
1938 	       && (code1 == GT_EXPR || code1 == GE_EXPR)
1939 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
1940 	return boolean_false_node;
1941     }
1942 
1943   /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1944      NAME's definition is a truth value.  See if there are any simplifications
1945      that can be done against the NAME's definition.  */
1946   if (TREE_CODE (op1a) == SSA_NAME
1947       && (code1 == NE_EXPR || code1 == EQ_EXPR)
1948       && (integer_zerop (op1b) || integer_onep (op1b)))
1949     {
1950       bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1951 		     || (code1 == NE_EXPR && integer_onep (op1b)));
1952       gimple stmt = SSA_NAME_DEF_STMT (op1a);
1953       switch (gimple_code (stmt))
1954 	{
1955 	case GIMPLE_ASSIGN:
1956 	  /* Try to simplify by copy-propagating the definition.  */
1957 	  return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1958 
1959 	case GIMPLE_PHI:
1960 	  /* If every argument to the PHI produces the same result when
1961 	     ANDed with the second comparison, we win.
1962 	     Do not do this unless the type is bool since we need a bool
1963 	     result here anyway.  */
1964 	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1965 	    {
1966 	      tree result = NULL_TREE;
1967 	      unsigned i;
1968 	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
1969 		{
1970 		  tree arg = gimple_phi_arg_def (stmt, i);
1971 
1972 		  /* If this PHI has itself as an argument, ignore it.
1973 		     If all the other args produce the same result,
1974 		     we're still OK.  */
1975 		  if (arg == gimple_phi_result (stmt))
1976 		    continue;
1977 		  else if (TREE_CODE (arg) == INTEGER_CST)
1978 		    {
1979 		      if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1980 			{
1981 			  if (!result)
1982 			    result = boolean_false_node;
1983 			  else if (!integer_zerop (result))
1984 			    return NULL_TREE;
1985 			}
1986 		      else if (!result)
1987 			result = fold_build2 (code2, boolean_type_node,
1988 					      op2a, op2b);
1989 		      else if (!same_bool_comparison_p (result,
1990 							code2, op2a, op2b))
1991 			return NULL_TREE;
1992 		    }
1993 		  else if (TREE_CODE (arg) == SSA_NAME
1994 			   && !SSA_NAME_IS_DEFAULT_DEF (arg))
1995 		    {
1996 		      tree temp;
1997 		      gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1998 		      /* In simple cases we can look through PHI nodes,
1999 			 but we have to be careful with loops.
2000 			 See PR49073.  */
2001 		      if (! dom_info_available_p (CDI_DOMINATORS)
2002 			  || gimple_bb (def_stmt) == gimple_bb (stmt)
2003 			  || dominated_by_p (CDI_DOMINATORS,
2004 					     gimple_bb (def_stmt),
2005 					     gimple_bb (stmt)))
2006 			return NULL_TREE;
2007 		      temp = and_var_with_comparison (arg, invert, code2,
2008 						      op2a, op2b);
2009 		      if (!temp)
2010 			return NULL_TREE;
2011 		      else if (!result)
2012 			result = temp;
2013 		      else if (!same_bool_result_p (result, temp))
2014 			return NULL_TREE;
2015 		    }
2016 		  else
2017 		    return NULL_TREE;
2018 		}
2019 	      return result;
2020 	    }
2021 
2022 	default:
2023 	  break;
2024 	}
2025     }
2026   return NULL_TREE;
2027 }
2028 
2029 /* Try to simplify the AND of two comparisons, specified by
2030    (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2031    If this can be simplified to a single expression (without requiring
2032    introducing more SSA variables to hold intermediate values),
2033    return the resulting tree.  Otherwise return NULL_TREE.
2034    If the result expression is non-null, it has boolean type.  */
2035 
2036 tree
maybe_fold_and_comparisons(enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)2037 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2038 			    enum tree_code code2, tree op2a, tree op2b)
2039 {
2040   tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2041   if (t)
2042     return t;
2043   else
2044     return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2045 }
2046 
2047 /* Helper function for or_comparisons_1:  try to simplify the OR of the
2048    ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2049    If INVERT is true, invert the value of VAR before doing the OR.
2050    Return NULL_EXPR if we can't simplify this to a single expression.  */
2051 
2052 static tree
or_var_with_comparison(tree var,bool invert,enum tree_code code2,tree op2a,tree op2b)2053 or_var_with_comparison (tree var, bool invert,
2054 			enum tree_code code2, tree op2a, tree op2b)
2055 {
2056   tree t;
2057   gimple stmt = SSA_NAME_DEF_STMT (var);
2058 
2059   /* We can only deal with variables whose definitions are assignments.  */
2060   if (!is_gimple_assign (stmt))
2061     return NULL_TREE;
2062 
2063   /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2064      !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2065      Then we only have to consider the simpler non-inverted cases.  */
2066   if (invert)
2067     t = and_var_with_comparison_1 (stmt,
2068 				   invert_tree_comparison (code2, false),
2069 				   op2a, op2b);
2070   else
2071     t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2072   return canonicalize_bool (t, invert);
2073 }
2074 
2075 /* Try to simplify the OR of the ssa variable defined by the assignment
2076    STMT with the comparison specified by (OP2A CODE2 OP2B).
2077    Return NULL_EXPR if we can't simplify this to a single expression.  */
2078 
2079 static tree
or_var_with_comparison_1(gimple stmt,enum tree_code code2,tree op2a,tree op2b)2080 or_var_with_comparison_1 (gimple stmt,
2081 			  enum tree_code code2, tree op2a, tree op2b)
2082 {
2083   tree var = gimple_assign_lhs (stmt);
2084   tree true_test_var = NULL_TREE;
2085   tree false_test_var = NULL_TREE;
2086   enum tree_code innercode = gimple_assign_rhs_code (stmt);
2087 
2088   /* Check for identities like (var OR (var != 0)) => true .  */
2089   if (TREE_CODE (op2a) == SSA_NAME
2090       && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2091     {
2092       if ((code2 == NE_EXPR && integer_zerop (op2b))
2093 	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2094 	{
2095 	  true_test_var = op2a;
2096 	  if (var == true_test_var)
2097 	    return var;
2098 	}
2099       else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2100 	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2101 	{
2102 	  false_test_var = op2a;
2103 	  if (var == false_test_var)
2104 	    return boolean_true_node;
2105 	}
2106     }
2107 
2108   /* If the definition is a comparison, recurse on it.  */
2109   if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2110     {
2111       tree t = or_comparisons_1 (innercode,
2112 				 gimple_assign_rhs1 (stmt),
2113 				 gimple_assign_rhs2 (stmt),
2114 				 code2,
2115 				 op2a,
2116 				 op2b);
2117       if (t)
2118 	return t;
2119     }
2120 
2121   /* If the definition is an AND or OR expression, we may be able to
2122      simplify by reassociating.  */
2123   if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2124       && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2125     {
2126       tree inner1 = gimple_assign_rhs1 (stmt);
2127       tree inner2 = gimple_assign_rhs2 (stmt);
2128       gimple s;
2129       tree t;
2130       tree partial = NULL_TREE;
2131       bool is_or = (innercode == BIT_IOR_EXPR);
2132 
2133       /* Check for boolean identities that don't require recursive examination
2134 	 of inner1/inner2:
2135 	 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2136 	 inner1 OR (inner1 AND inner2) => inner1
2137 	 !inner1 OR (inner1 OR inner2) => true
2138 	 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2139       */
2140       if (inner1 == true_test_var)
2141 	return (is_or ? var : inner1);
2142       else if (inner2 == true_test_var)
2143 	return (is_or ? var : inner2);
2144       else if (inner1 == false_test_var)
2145 	return (is_or
2146 		? boolean_true_node
2147 		: or_var_with_comparison (inner2, false, code2, op2a, op2b));
2148       else if (inner2 == false_test_var)
2149 	return (is_or
2150 		? boolean_true_node
2151 		: or_var_with_comparison (inner1, false, code2, op2a, op2b));
2152 
2153       /* Next, redistribute/reassociate the OR across the inner tests.
2154 	 Compute the first partial result, (inner1 OR (op2a code op2b))  */
2155       if (TREE_CODE (inner1) == SSA_NAME
2156 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2157 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2158 	  && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2159 					     gimple_assign_rhs1 (s),
2160 					     gimple_assign_rhs2 (s),
2161 					     code2, op2a, op2b)))
2162 	{
2163 	  /* Handle the OR case, where we are reassociating:
2164 	     (inner1 OR inner2) OR (op2a code2 op2b)
2165 	     => (t OR inner2)
2166 	     If the partial result t is a constant, we win.  Otherwise
2167 	     continue on to try reassociating with the other inner test.  */
2168 	  if (is_or)
2169 	    {
2170 	      if (integer_onep (t))
2171 		return boolean_true_node;
2172 	      else if (integer_zerop (t))
2173 		return inner2;
2174 	    }
2175 
2176 	  /* Handle the AND case, where we are redistributing:
2177 	     (inner1 AND inner2) OR (op2a code2 op2b)
2178 	     => (t AND (inner2 OR (op2a code op2b)))  */
2179 	  else if (integer_zerop (t))
2180 	    return boolean_false_node;
2181 
2182 	  /* Save partial result for later.  */
2183 	  partial = t;
2184 	}
2185 
2186       /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2187       if (TREE_CODE (inner2) == SSA_NAME
2188 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2189 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2190 	  && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2191 					     gimple_assign_rhs1 (s),
2192 					     gimple_assign_rhs2 (s),
2193 					     code2, op2a, op2b)))
2194 	{
2195 	  /* Handle the OR case, where we are reassociating:
2196 	     (inner1 OR inner2) OR (op2a code2 op2b)
2197 	     => (inner1 OR t)
2198 	     => (t OR partial)  */
2199 	  if (is_or)
2200 	    {
2201 	      if (integer_zerop (t))
2202 		return inner1;
2203 	      else if (integer_onep (t))
2204 		return boolean_true_node;
2205 	      /* If both are the same, we can apply the identity
2206 		 (x OR x) == x.  */
2207 	      else if (partial && same_bool_result_p (t, partial))
2208 		return t;
2209 	    }
2210 
2211 	  /* Handle the AND case, where we are redistributing:
2212 	     (inner1 AND inner2) OR (op2a code2 op2b)
2213 	     => (t AND (inner1 OR (op2a code2 op2b)))
2214 	     => (t AND partial)  */
2215 	  else
2216 	    {
2217 	      if (integer_zerop (t))
2218 		return boolean_false_node;
2219 	      else if (partial)
2220 		{
2221 		  /* We already got a simplification for the other
2222 		     operand to the redistributed AND expression.  The
2223 		     interesting case is when at least one is true.
2224 		     Or, if both are the same, we can apply the identity
2225 		     (x AND x) == x.  */
2226 		  if (integer_onep (partial))
2227 		    return t;
2228 		  else if (integer_onep (t))
2229 		    return partial;
2230 		  else if (same_bool_result_p (t, partial))
2231 		    return t;
2232 		}
2233 	    }
2234 	}
2235     }
2236   return NULL_TREE;
2237 }
2238 
2239 /* Try to simplify the OR of two comparisons defined by
2240    (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2241    If this can be done without constructing an intermediate value,
2242    return the resulting tree; otherwise NULL_TREE is returned.
2243    This function is deliberately asymmetric as it recurses on SSA_DEFs
2244    in the first comparison but not the second.  */
2245 
2246 static tree
or_comparisons_1(enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)2247 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2248 		  enum tree_code code2, tree op2a, tree op2b)
2249 {
2250   tree truth_type = truth_type_for (TREE_TYPE (op1a));
2251 
2252   /* First check for ((x CODE1 y) OR (x CODE2 y)).  */
2253   if (operand_equal_p (op1a, op2a, 0)
2254       && operand_equal_p (op1b, op2b, 0))
2255     {
2256       /* Result will be either NULL_TREE, or a combined comparison.  */
2257       tree t = combine_comparisons (UNKNOWN_LOCATION,
2258 				    TRUTH_ORIF_EXPR, code1, code2,
2259 				    truth_type, op1a, op1b);
2260       if (t)
2261 	return t;
2262     }
2263 
2264   /* Likewise the swapped case of the above.  */
2265   if (operand_equal_p (op1a, op2b, 0)
2266       && operand_equal_p (op1b, op2a, 0))
2267     {
2268       /* Result will be either NULL_TREE, or a combined comparison.  */
2269       tree t = combine_comparisons (UNKNOWN_LOCATION,
2270 				    TRUTH_ORIF_EXPR, code1,
2271 				    swap_tree_comparison (code2),
2272 				    truth_type, op1a, op1b);
2273       if (t)
2274 	return t;
2275     }
2276 
2277   /* If both comparisons are of the same value against constants, we might
2278      be able to merge them.  */
2279   if (operand_equal_p (op1a, op2a, 0)
2280       && TREE_CODE (op1b) == INTEGER_CST
2281       && TREE_CODE (op2b) == INTEGER_CST)
2282     {
2283       int cmp = tree_int_cst_compare (op1b, op2b);
2284 
2285       /* If we have (op1a != op1b), we should either be able to
2286 	 return that or TRUE, depending on whether the constant op1b
2287 	 also satisfies the other comparison against op2b.  */
2288       if (code1 == NE_EXPR)
2289 	{
2290 	  bool done = true;
2291 	  bool val;
2292 	  switch (code2)
2293 	    {
2294 	    case EQ_EXPR: val = (cmp == 0); break;
2295 	    case NE_EXPR: val = (cmp != 0); break;
2296 	    case LT_EXPR: val = (cmp < 0); break;
2297 	    case GT_EXPR: val = (cmp > 0); break;
2298 	    case LE_EXPR: val = (cmp <= 0); break;
2299 	    case GE_EXPR: val = (cmp >= 0); break;
2300 	    default: done = false;
2301 	    }
2302 	  if (done)
2303 	    {
2304 	      if (val)
2305 		return boolean_true_node;
2306 	      else
2307 		return fold_build2 (code1, boolean_type_node, op1a, op1b);
2308 	    }
2309 	}
2310       /* Likewise if the second comparison is a != comparison.  */
2311       else if (code2 == NE_EXPR)
2312 	{
2313 	  bool done = true;
2314 	  bool val;
2315 	  switch (code1)
2316 	    {
2317 	    case EQ_EXPR: val = (cmp == 0); break;
2318 	    case NE_EXPR: val = (cmp != 0); break;
2319 	    case LT_EXPR: val = (cmp > 0); break;
2320 	    case GT_EXPR: val = (cmp < 0); break;
2321 	    case LE_EXPR: val = (cmp >= 0); break;
2322 	    case GE_EXPR: val = (cmp <= 0); break;
2323 	    default: done = false;
2324 	    }
2325 	  if (done)
2326 	    {
2327 	      if (val)
2328 		return boolean_true_node;
2329 	      else
2330 		return fold_build2 (code2, boolean_type_node, op2a, op2b);
2331 	    }
2332 	}
2333 
2334       /* See if an equality test is redundant with the other comparison.  */
2335       else if (code1 == EQ_EXPR)
2336 	{
2337 	  bool val;
2338 	  switch (code2)
2339 	    {
2340 	    case EQ_EXPR: val = (cmp == 0); break;
2341 	    case NE_EXPR: val = (cmp != 0); break;
2342 	    case LT_EXPR: val = (cmp < 0); break;
2343 	    case GT_EXPR: val = (cmp > 0); break;
2344 	    case LE_EXPR: val = (cmp <= 0); break;
2345 	    case GE_EXPR: val = (cmp >= 0); break;
2346 	    default:
2347 	      val = false;
2348 	    }
2349 	  if (val)
2350 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
2351 	}
2352       else if (code2 == EQ_EXPR)
2353 	{
2354 	  bool val;
2355 	  switch (code1)
2356 	    {
2357 	    case EQ_EXPR: val = (cmp == 0); break;
2358 	    case NE_EXPR: val = (cmp != 0); break;
2359 	    case LT_EXPR: val = (cmp > 0); break;
2360 	    case GT_EXPR: val = (cmp < 0); break;
2361 	    case LE_EXPR: val = (cmp >= 0); break;
2362 	    case GE_EXPR: val = (cmp <= 0); break;
2363 	    default:
2364 	      val = false;
2365 	    }
2366 	  if (val)
2367 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
2368 	}
2369 
2370       /* Chose the less restrictive of two < or <= comparisons.  */
2371       else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2372 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
2373 	{
2374 	  if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2375 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
2376 	  else
2377 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
2378 	}
2379 
2380       /* Likewise chose the less restrictive of two > or >= comparisons.  */
2381       else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2382 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
2383 	{
2384 	  if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2385 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
2386 	  else
2387 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
2388 	}
2389 
2390       /* Check for singleton ranges.  */
2391       else if (cmp == 0
2392 	       && ((code1 == LT_EXPR && code2 == GT_EXPR)
2393 		   || (code1 == GT_EXPR && code2 == LT_EXPR)))
2394 	return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2395 
2396       /* Check for less/greater pairs that don't restrict the range at all.  */
2397       else if (cmp >= 0
2398 	       && (code1 == LT_EXPR || code1 == LE_EXPR)
2399 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
2400 	return boolean_true_node;
2401       else if (cmp <= 0
2402 	       && (code1 == GT_EXPR || code1 == GE_EXPR)
2403 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
2404 	return boolean_true_node;
2405     }
2406 
2407   /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2408      NAME's definition is a truth value.  See if there are any simplifications
2409      that can be done against the NAME's definition.  */
2410   if (TREE_CODE (op1a) == SSA_NAME
2411       && (code1 == NE_EXPR || code1 == EQ_EXPR)
2412       && (integer_zerop (op1b) || integer_onep (op1b)))
2413     {
2414       bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2415 		     || (code1 == NE_EXPR && integer_onep (op1b)));
2416       gimple stmt = SSA_NAME_DEF_STMT (op1a);
2417       switch (gimple_code (stmt))
2418 	{
2419 	case GIMPLE_ASSIGN:
2420 	  /* Try to simplify by copy-propagating the definition.  */
2421 	  return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2422 
2423 	case GIMPLE_PHI:
2424 	  /* If every argument to the PHI produces the same result when
2425 	     ORed with the second comparison, we win.
2426 	     Do not do this unless the type is bool since we need a bool
2427 	     result here anyway.  */
2428 	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2429 	    {
2430 	      tree result = NULL_TREE;
2431 	      unsigned i;
2432 	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
2433 		{
2434 		  tree arg = gimple_phi_arg_def (stmt, i);
2435 
2436 		  /* If this PHI has itself as an argument, ignore it.
2437 		     If all the other args produce the same result,
2438 		     we're still OK.  */
2439 		  if (arg == gimple_phi_result (stmt))
2440 		    continue;
2441 		  else if (TREE_CODE (arg) == INTEGER_CST)
2442 		    {
2443 		      if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2444 			{
2445 			  if (!result)
2446 			    result = boolean_true_node;
2447 			  else if (!integer_onep (result))
2448 			    return NULL_TREE;
2449 			}
2450 		      else if (!result)
2451 			result = fold_build2 (code2, boolean_type_node,
2452 					      op2a, op2b);
2453 		      else if (!same_bool_comparison_p (result,
2454 							code2, op2a, op2b))
2455 			return NULL_TREE;
2456 		    }
2457 		  else if (TREE_CODE (arg) == SSA_NAME
2458 			   && !SSA_NAME_IS_DEFAULT_DEF (arg))
2459 		    {
2460 		      tree temp;
2461 		      gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2462 		      /* In simple cases we can look through PHI nodes,
2463 			 but we have to be careful with loops.
2464 			 See PR49073.  */
2465 		      if (! dom_info_available_p (CDI_DOMINATORS)
2466 			  || gimple_bb (def_stmt) == gimple_bb (stmt)
2467 			  || dominated_by_p (CDI_DOMINATORS,
2468 					     gimple_bb (def_stmt),
2469 					     gimple_bb (stmt)))
2470 			return NULL_TREE;
2471 		      temp = or_var_with_comparison (arg, invert, code2,
2472 						     op2a, op2b);
2473 		      if (!temp)
2474 			return NULL_TREE;
2475 		      else if (!result)
2476 			result = temp;
2477 		      else if (!same_bool_result_p (result, temp))
2478 			return NULL_TREE;
2479 		    }
2480 		  else
2481 		    return NULL_TREE;
2482 		}
2483 	      return result;
2484 	    }
2485 
2486 	default:
2487 	  break;
2488 	}
2489     }
2490   return NULL_TREE;
2491 }
2492 
2493 /* Try to simplify the OR of two comparisons, specified by
2494    (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2495    If this can be simplified to a single expression (without requiring
2496    introducing more SSA variables to hold intermediate values),
2497    return the resulting tree.  Otherwise return NULL_TREE.
2498    If the result expression is non-null, it has boolean type.  */
2499 
2500 tree
maybe_fold_or_comparisons(enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b)2501 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2502 			   enum tree_code code2, tree op2a, tree op2b)
2503 {
2504   tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2505   if (t)
2506     return t;
2507   else
2508     return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2509 }
2510 
2511 
2512 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2513 
2514    Either NULL_TREE, a simplified but non-constant or a constant
2515    is returned.
2516 
2517    ???  This should go into a gimple-fold-inline.h file to be eventually
2518    privatized with the single valueize function used in the various TUs
2519    to avoid the indirect function call overhead.  */
2520 
2521 tree
gimple_fold_stmt_to_constant_1(gimple stmt,tree (* valueize)(tree))2522 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2523 {
2524   location_t loc = gimple_location (stmt);
2525   switch (gimple_code (stmt))
2526     {
2527     case GIMPLE_ASSIGN:
2528       {
2529         enum tree_code subcode = gimple_assign_rhs_code (stmt);
2530 
2531         switch (get_gimple_rhs_class (subcode))
2532           {
2533           case GIMPLE_SINGLE_RHS:
2534             {
2535               tree rhs = gimple_assign_rhs1 (stmt);
2536               enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2537 
2538               if (TREE_CODE (rhs) == SSA_NAME)
2539                 {
2540                   /* If the RHS is an SSA_NAME, return its known constant value,
2541                      if any.  */
2542                   return (*valueize) (rhs);
2543                 }
2544 	      /* Handle propagating invariant addresses into address
2545 		 operations.  */
2546 	      else if (TREE_CODE (rhs) == ADDR_EXPR
2547 		       && !is_gimple_min_invariant (rhs))
2548 		{
2549 		  HOST_WIDE_INT offset = 0;
2550 		  tree base;
2551 		  base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2552 							  &offset,
2553 							  valueize);
2554 		  if (base
2555 		      && (CONSTANT_CLASS_P (base)
2556 			  || decl_address_invariant_p (base)))
2557 		    return build_invariant_address (TREE_TYPE (rhs),
2558 						    base, offset);
2559 		}
2560 	      else if (TREE_CODE (rhs) == CONSTRUCTOR
2561 		       && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2562 		       && (CONSTRUCTOR_NELTS (rhs)
2563 			   == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2564 		{
2565 		  unsigned i;
2566 		  tree val, *vec;
2567 
2568 		  vec = XALLOCAVEC (tree,
2569 				    TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2570 		  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2571 		    {
2572 		      val = (*valueize) (val);
2573 		      if (TREE_CODE (val) == INTEGER_CST
2574 			  || TREE_CODE (val) == REAL_CST
2575 			  || TREE_CODE (val) == FIXED_CST)
2576 			vec[i] = val;
2577 		      else
2578 			return NULL_TREE;
2579 		    }
2580 
2581 		  return build_vector (TREE_TYPE (rhs), vec);
2582 		}
2583 	      if (subcode == OBJ_TYPE_REF)
2584 		{
2585 		  tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
2586 		  /* If callee is constant, we can fold away the wrapper.  */
2587 		  if (is_gimple_min_invariant (val))
2588 		    return val;
2589 		}
2590 
2591               if (kind == tcc_reference)
2592 		{
2593 		  if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2594 		       || TREE_CODE (rhs) == REALPART_EXPR
2595 		       || TREE_CODE (rhs) == IMAGPART_EXPR)
2596 		      && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2597 		    {
2598 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2599 		      return fold_unary_loc (EXPR_LOCATION (rhs),
2600 					     TREE_CODE (rhs),
2601 					     TREE_TYPE (rhs), val);
2602 		    }
2603 		  else if (TREE_CODE (rhs) == BIT_FIELD_REF
2604 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2605 		    {
2606 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2607 		      return fold_ternary_loc (EXPR_LOCATION (rhs),
2608 					       TREE_CODE (rhs),
2609 					       TREE_TYPE (rhs), val,
2610 					       TREE_OPERAND (rhs, 1),
2611 					       TREE_OPERAND (rhs, 2));
2612 		    }
2613 		  else if (TREE_CODE (rhs) == MEM_REF
2614 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2615 		    {
2616 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2617 		      if (TREE_CODE (val) == ADDR_EXPR
2618 			  && is_gimple_min_invariant (val))
2619 			{
2620 			  tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2621 						  unshare_expr (val),
2622 						  TREE_OPERAND (rhs, 1));
2623 			  if (tem)
2624 			    rhs = tem;
2625 			}
2626 		    }
2627 		  return fold_const_aggregate_ref_1 (rhs, valueize);
2628 		}
2629               else if (kind == tcc_declaration)
2630                 return get_symbol_constant_value (rhs);
2631               return rhs;
2632             }
2633 
2634           case GIMPLE_UNARY_RHS:
2635             {
2636               /* Handle unary operators that can appear in GIMPLE form.
2637                  Note that we know the single operand must be a constant,
2638                  so this should almost always return a simplified RHS.  */
2639 	      tree lhs = gimple_assign_lhs (stmt);
2640               tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2641 
2642 	      /* Conversions are useless for CCP purposes if they are
2643 		 value-preserving.  Thus the restrictions that
2644 		 useless_type_conversion_p places for restrict qualification
2645 		 of pointer types should not apply here.
2646 		 Substitution later will only substitute to allowed places.  */
2647 	      if (CONVERT_EXPR_CODE_P (subcode)
2648 		  && POINTER_TYPE_P (TREE_TYPE (lhs))
2649 		  && POINTER_TYPE_P (TREE_TYPE (op0))
2650 		  && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2651 		     == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2652 		  && TYPE_MODE (TREE_TYPE (lhs))
2653 		     == TYPE_MODE (TREE_TYPE (op0)))
2654 		return op0;
2655 
2656               return
2657 		fold_unary_ignore_overflow_loc (loc, subcode,
2658 						gimple_expr_type (stmt), op0);
2659             }
2660 
2661           case GIMPLE_BINARY_RHS:
2662             {
2663               /* Handle binary operators that can appear in GIMPLE form.  */
2664               tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2665               tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2666 
2667 	      /* Translate &x + CST into an invariant form suitable for
2668 	         further propagation.  */
2669 	      if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2670 		  && TREE_CODE (op0) == ADDR_EXPR
2671 		  && TREE_CODE (op1) == INTEGER_CST)
2672 		{
2673 		  tree off = fold_convert (ptr_type_node, op1);
2674 		  return build_fold_addr_expr_loc
2675 			   (loc,
2676 			    fold_build2 (MEM_REF,
2677 					 TREE_TYPE (TREE_TYPE (op0)),
2678 					 unshare_expr (op0), off));
2679 		}
2680 
2681               return fold_binary_loc (loc, subcode,
2682 				      gimple_expr_type (stmt), op0, op1);
2683             }
2684 
2685           case GIMPLE_TERNARY_RHS:
2686             {
2687               /* Handle ternary operators that can appear in GIMPLE form.  */
2688               tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2689               tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2690               tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2691 
2692 	      /* Fold embedded expressions in ternary codes.  */
2693 	      if ((subcode == COND_EXPR
2694 		   || subcode == VEC_COND_EXPR)
2695 		  && COMPARISON_CLASS_P (op0))
2696 		{
2697 		  tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2698 		  tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2699 		  tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2700 					      TREE_TYPE (op0), op00, op01);
2701 		  if (tem)
2702 		    op0 = tem;
2703 		}
2704 
2705               return fold_ternary_loc (loc, subcode,
2706 				       gimple_expr_type (stmt), op0, op1, op2);
2707             }
2708 
2709           default:
2710             gcc_unreachable ();
2711           }
2712       }
2713 
2714     case GIMPLE_CALL:
2715       {
2716 	tree fn;
2717 
2718 	if (gimple_call_internal_p (stmt))
2719 	  {
2720 	    enum tree_code subcode = ERROR_MARK;
2721 	    switch (gimple_call_internal_fn (stmt))
2722 	      {
2723 	      case IFN_UBSAN_CHECK_ADD:
2724 		subcode = PLUS_EXPR;
2725 		break;
2726 	      case IFN_UBSAN_CHECK_SUB:
2727 		subcode = MINUS_EXPR;
2728 		break;
2729 	      case IFN_UBSAN_CHECK_MUL:
2730 		subcode = MULT_EXPR;
2731 		break;
2732 	      default:
2733 		return NULL_TREE;
2734 	      }
2735 	    tree arg0 = gimple_call_arg (stmt, 0);
2736 	    tree arg1 = gimple_call_arg (stmt, 1);
2737 	    tree op0 = (*valueize) (arg0);
2738 	    tree op1 = (*valueize) (arg1);
2739 
2740 	    if (TREE_CODE (op0) != INTEGER_CST
2741 		|| TREE_CODE (op1) != INTEGER_CST)
2742 	      {
2743 		switch (subcode)
2744 		  {
2745 		  case MULT_EXPR:
2746 		    /* x * 0 = 0 * x = 0 without overflow.  */
2747 		    if (integer_zerop (op0) || integer_zerop (op1))
2748 		      return build_zero_cst (TREE_TYPE (arg0));
2749 		    break;
2750 		  case MINUS_EXPR:
2751 		    /* y - y = 0 without overflow.  */
2752 		    if (operand_equal_p (op0, op1, 0))
2753 		      return build_zero_cst (TREE_TYPE (arg0));
2754 		    break;
2755 		  default:
2756 		    break;
2757 		  }
2758 	      }
2759 	    tree res
2760 	      = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
2761 	    if (res
2762 		&& TREE_CODE (res) == INTEGER_CST
2763 		&& !TREE_OVERFLOW (res))
2764 	      return res;
2765 	    return NULL_TREE;
2766 	  }
2767 
2768 	fn = (*valueize) (gimple_call_fn (stmt));
2769 	if (TREE_CODE (fn) == ADDR_EXPR
2770 	    && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2771 	    && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
2772 	    && gimple_builtin_call_types_compatible_p (stmt,
2773 						       TREE_OPERAND (fn, 0)))
2774 	  {
2775 	    tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2776 	    tree call, retval;
2777 	    unsigned i;
2778 	    for (i = 0; i < gimple_call_num_args (stmt); ++i)
2779 	      args[i] = (*valueize) (gimple_call_arg (stmt, i));
2780 	    call = build_call_array_loc (loc,
2781 					 gimple_call_return_type (stmt),
2782 					 fn, gimple_call_num_args (stmt), args);
2783 	    retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2784 	    if (retval)
2785 	      {
2786 		/* fold_call_expr wraps the result inside a NOP_EXPR.  */
2787 		STRIP_NOPS (retval);
2788 		retval = fold_convert (gimple_call_return_type (stmt), retval);
2789 	      }
2790 	    return retval;
2791 	  }
2792 	return NULL_TREE;
2793       }
2794 
2795     default:
2796       return NULL_TREE;
2797     }
2798 }
2799 
2800 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2801    Returns NULL_TREE if folding to a constant is not possible, otherwise
2802    returns a constant according to is_gimple_min_invariant.  */
2803 
2804 tree
gimple_fold_stmt_to_constant(gimple stmt,tree (* valueize)(tree))2805 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2806 {
2807   tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2808   if (res && is_gimple_min_invariant (res))
2809     return res;
2810   return NULL_TREE;
2811 }
2812 
2813 
2814 /* The following set of functions are supposed to fold references using
2815    their constant initializers.  */
2816 
2817 static tree fold_ctor_reference (tree type, tree ctor,
2818 				 unsigned HOST_WIDE_INT offset,
2819 				 unsigned HOST_WIDE_INT size, tree);
2820 
2821 /* See if we can find constructor defining value of BASE.
2822    When we know the consructor with constant offset (such as
2823    base is array[40] and we do know constructor of array), then
2824    BIT_OFFSET is adjusted accordingly.
2825 
2826    As a special case, return error_mark_node when constructor
2827    is not explicitly available, but it is known to be zero
2828    such as 'static const int a;'.  */
2829 static tree
get_base_constructor(tree base,HOST_WIDE_INT * bit_offset,tree (* valueize)(tree))2830 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2831 		      tree (*valueize)(tree))
2832 {
2833   HOST_WIDE_INT bit_offset2, size, max_size;
2834   if (TREE_CODE (base) == MEM_REF)
2835     {
2836       if (!integer_zerop (TREE_OPERAND (base, 1)))
2837 	{
2838 	  if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
2839 	    return NULL_TREE;
2840 	  *bit_offset += (mem_ref_offset (base).low
2841 			  * BITS_PER_UNIT);
2842 	}
2843 
2844       if (valueize
2845 	  && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2846 	base = valueize (TREE_OPERAND (base, 0));
2847       if (!base || TREE_CODE (base) != ADDR_EXPR)
2848         return NULL_TREE;
2849       base = TREE_OPERAND (base, 0);
2850     }
2851 
2852   /* Get a CONSTRUCTOR.  If BASE is a VAR_DECL, get its
2853      DECL_INITIAL.  If BASE is a nested reference into another
2854      ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2855      the inner reference.  */
2856   switch (TREE_CODE (base))
2857     {
2858     case VAR_DECL:
2859     case CONST_DECL:
2860       {
2861 	tree init = ctor_for_folding (base);
2862 
2863 	/* Our semantic is exact opposite of ctor_for_folding;
2864 	   NULL means unknown, while error_mark_node is 0.  */
2865 	if (init == error_mark_node)
2866 	  return NULL_TREE;
2867 	if (!init)
2868 	  return error_mark_node;
2869 	return init;
2870       }
2871 
2872     case ARRAY_REF:
2873     case COMPONENT_REF:
2874       base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2875       if (max_size == -1 || size != max_size)
2876 	return NULL_TREE;
2877       *bit_offset +=  bit_offset2;
2878       return get_base_constructor (base, bit_offset, valueize);
2879 
2880     case STRING_CST:
2881     case CONSTRUCTOR:
2882       return base;
2883 
2884     default:
2885       return NULL_TREE;
2886     }
2887 }
2888 
2889 /* CTOR is STRING_CST.  Fold reference of type TYPE and size SIZE
2890    to the memory at bit OFFSET.
2891 
2892    We do only simple job of folding byte accesses.  */
2893 
2894 static tree
fold_string_cst_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size)2895 fold_string_cst_ctor_reference (tree type, tree ctor,
2896 				unsigned HOST_WIDE_INT offset,
2897 				unsigned HOST_WIDE_INT size)
2898 {
2899   if (INTEGRAL_TYPE_P (type)
2900       && (TYPE_MODE (type)
2901 	  == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2902       && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2903 	  == MODE_INT)
2904       && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2905       && size == BITS_PER_UNIT
2906       && !(offset % BITS_PER_UNIT))
2907     {
2908       offset /= BITS_PER_UNIT;
2909       if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2910 	return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2911 				   [offset]));
2912       /* Folding
2913 	 const char a[20]="hello";
2914 	 return a[10];
2915 
2916 	 might lead to offset greater than string length.  In this case we
2917 	 know value is either initialized to 0 or out of bounds.  Return 0
2918 	 in both cases.  */
2919       return build_zero_cst (type);
2920     }
2921   return NULL_TREE;
2922 }
2923 
2924 /* CTOR is CONSTRUCTOR of an array type.  Fold reference of type TYPE and size
2925    SIZE to the memory at bit OFFSET.  */
2926 
2927 static tree
fold_array_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree from_decl)2928 fold_array_ctor_reference (tree type, tree ctor,
2929 			   unsigned HOST_WIDE_INT offset,
2930 			   unsigned HOST_WIDE_INT size,
2931 			   tree from_decl)
2932 {
2933   unsigned HOST_WIDE_INT cnt;
2934   tree cfield, cval;
2935   double_int low_bound, elt_size;
2936   double_int index, max_index;
2937   double_int access_index;
2938   tree domain_type = NULL_TREE, index_type = NULL_TREE;
2939   HOST_WIDE_INT inner_offset;
2940 
2941   /* Compute low bound and elt size.  */
2942   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2943     domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2944   if (domain_type && TYPE_MIN_VALUE (domain_type))
2945     {
2946       /* Static constructors for variably sized objects makes no sense.  */
2947       gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2948       index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2949       low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2950     }
2951   else
2952     low_bound = double_int_zero;
2953   /* Static constructors for variably sized objects makes no sense.  */
2954   gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2955 	      == INTEGER_CST);
2956   elt_size =
2957     tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2958 
2959 
2960   /* We can handle only constantly sized accesses that are known to not
2961      be larger than size of array element.  */
2962   if (!TYPE_SIZE_UNIT (type)
2963       || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2964       || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type)))
2965       || elt_size.is_zero ())
2966     return NULL_TREE;
2967 
2968   /* Compute the array index we look for.  */
2969   access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2970 		 .udiv (elt_size, TRUNC_DIV_EXPR);
2971   access_index += low_bound;
2972   if (index_type)
2973     access_index = access_index.ext (TYPE_PRECISION (index_type),
2974 				     TYPE_UNSIGNED (index_type));
2975 
2976   /* And offset within the access.  */
2977   inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2978 
2979   /* See if the array field is large enough to span whole access.  We do not
2980      care to fold accesses spanning multiple array indexes.  */
2981   if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2982     return NULL_TREE;
2983 
2984   index = low_bound - double_int_one;
2985   if (index_type)
2986     index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2987 
2988   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2989     {
2990       /* Array constructor might explicitely set index, or specify range
2991 	 or leave index NULL meaning that it is next index after previous
2992 	 one.  */
2993       if (cfield)
2994 	{
2995 	  if (TREE_CODE (cfield) == INTEGER_CST)
2996 	    max_index = index = tree_to_double_int (cfield);
2997 	  else
2998 	    {
2999 	      gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
3000 	      index = tree_to_double_int (TREE_OPERAND (cfield, 0));
3001 	      max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
3002 	    }
3003 	}
3004       else
3005 	{
3006 	  index += double_int_one;
3007 	  if (index_type)
3008 	    index = index.ext (TYPE_PRECISION (index_type),
3009 			       TYPE_UNSIGNED (index_type));
3010 	  max_index = index;
3011 	}
3012 
3013       /* Do we have match?  */
3014       if (access_index.cmp (index, 1) >= 0
3015 	  && access_index.cmp (max_index, 1) <= 0)
3016 	return fold_ctor_reference (type, cval, inner_offset, size,
3017 				    from_decl);
3018     }
3019   /* When memory is not explicitely mentioned in constructor,
3020      it is 0 (or out of range).  */
3021   return build_zero_cst (type);
3022 }
3023 
3024 /* CTOR is CONSTRUCTOR of an aggregate or vector.
3025    Fold reference of type TYPE and size SIZE to the memory at bit OFFSET.  */
3026 
3027 static tree
fold_nonarray_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree from_decl)3028 fold_nonarray_ctor_reference (tree type, tree ctor,
3029 			      unsigned HOST_WIDE_INT offset,
3030 			      unsigned HOST_WIDE_INT size,
3031 			      tree from_decl)
3032 {
3033   unsigned HOST_WIDE_INT cnt;
3034   tree cfield, cval;
3035 
3036   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
3037 			    cval)
3038     {
3039       tree byte_offset = DECL_FIELD_OFFSET (cfield);
3040       tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
3041       tree field_size = DECL_SIZE (cfield);
3042       double_int bitoffset;
3043       double_int byte_offset_cst = tree_to_double_int (byte_offset);
3044       double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
3045       double_int bitoffset_end, access_end;
3046 
3047       /* Variable sized objects in static constructors makes no sense,
3048 	 but field_size can be NULL for flexible array members.  */
3049       gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
3050 		  && TREE_CODE (byte_offset) == INTEGER_CST
3051 		  && (field_size != NULL_TREE
3052 		      ? TREE_CODE (field_size) == INTEGER_CST
3053 		      : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
3054 
3055       /* Compute bit offset of the field.  */
3056       bitoffset = tree_to_double_int (field_offset)
3057 		  + byte_offset_cst * bits_per_unit_cst;
3058       /* Compute bit offset where the field ends.  */
3059       if (field_size != NULL_TREE)
3060 	bitoffset_end = bitoffset + tree_to_double_int (field_size);
3061       else
3062 	bitoffset_end = double_int_zero;
3063 
3064       access_end = double_int::from_uhwi (offset)
3065 		   + double_int::from_uhwi (size);
3066 
3067       /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
3068 	 [BITOFFSET, BITOFFSET_END)?  */
3069       if (access_end.cmp (bitoffset, 0) > 0
3070 	  && (field_size == NULL_TREE
3071 	      || double_int::from_uhwi (offset).slt (bitoffset_end)))
3072 	{
3073 	  double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
3074 	  /* We do have overlap.  Now see if field is large enough to
3075 	     cover the access.  Give up for accesses spanning multiple
3076 	     fields.  */
3077 	  if (access_end.cmp (bitoffset_end, 0) > 0)
3078 	    return NULL_TREE;
3079 	  if (double_int::from_uhwi (offset).slt (bitoffset))
3080 	    return NULL_TREE;
3081 	  return fold_ctor_reference (type, cval,
3082 				      inner_offset.to_uhwi (), size,
3083 				      from_decl);
3084 	}
3085     }
3086   /* When memory is not explicitely mentioned in constructor, it is 0.  */
3087   return build_zero_cst (type);
3088 }
3089 
3090 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3091    to the memory at bit OFFSET.  */
3092 
3093 static tree
fold_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree from_decl)3094 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3095 		     unsigned HOST_WIDE_INT size, tree from_decl)
3096 {
3097   tree ret;
3098 
3099   /* We found the field with exact match.  */
3100   if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3101       && !offset)
3102     return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
3103 
3104   /* We are at the end of walk, see if we can view convert the
3105      result.  */
3106   if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3107       /* VIEW_CONVERT_EXPR is defined only for matching sizes.  */
3108       && !compare_tree_int (TYPE_SIZE (type), size)
3109       && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
3110     {
3111       ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
3112       ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3113       if (ret)
3114 	STRIP_NOPS (ret);
3115       return ret;
3116     }
3117   if (TREE_CODE (ctor) == STRING_CST)
3118     return fold_string_cst_ctor_reference (type, ctor, offset, size);
3119   if (TREE_CODE (ctor) == CONSTRUCTOR)
3120     {
3121 
3122       if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
3123 	  || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
3124 	return fold_array_ctor_reference (type, ctor, offset, size,
3125 					  from_decl);
3126       else
3127 	return fold_nonarray_ctor_reference (type, ctor, offset, size,
3128 					     from_decl);
3129     }
3130 
3131   return NULL_TREE;
3132 }
3133 
3134 /* Return the tree representing the element referenced by T if T is an
3135    ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3136    names using VALUEIZE.  Return NULL_TREE otherwise.  */
3137 
3138 tree
fold_const_aggregate_ref_1(tree t,tree (* valueize)(tree))3139 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3140 {
3141   tree ctor, idx, base;
3142   HOST_WIDE_INT offset, size, max_size;
3143   tree tem;
3144 
3145   if (TREE_THIS_VOLATILE (t))
3146     return NULL_TREE;
3147 
3148   if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3149     return get_symbol_constant_value (t);
3150 
3151   tem = fold_read_from_constant_string (t);
3152   if (tem)
3153     return tem;
3154 
3155   switch (TREE_CODE (t))
3156     {
3157     case ARRAY_REF:
3158     case ARRAY_RANGE_REF:
3159       /* Constant indexes are handled well by get_base_constructor.
3160 	 Only special case variable offsets.
3161 	 FIXME: This code can't handle nested references with variable indexes
3162 	 (they will be handled only by iteration of ccp).  Perhaps we can bring
3163 	 get_ref_base_and_extent here and make it use a valueize callback.  */
3164       if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3165 	  && valueize
3166 	  && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3167 	  && TREE_CODE (idx) == INTEGER_CST)
3168 	{
3169 	  tree low_bound, unit_size;
3170 	  double_int doffset;
3171 
3172 	  /* If the resulting bit-offset is constant, track it.  */
3173 	  if ((low_bound = array_ref_low_bound (t),
3174 	       TREE_CODE (low_bound) == INTEGER_CST)
3175 	      && (unit_size = array_ref_element_size (t),
3176 		  tree_fits_uhwi_p (unit_size))
3177 	      && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3178 			    .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3179 		  doffset.fits_shwi ()))
3180 	    {
3181 	      offset = doffset.to_shwi ();
3182 	      offset *= tree_to_uhwi (unit_size);
3183 	      offset *= BITS_PER_UNIT;
3184 
3185 	      base = TREE_OPERAND (t, 0);
3186 	      ctor = get_base_constructor (base, &offset, valueize);
3187 	      /* Empty constructor.  Always fold to 0.  */
3188 	      if (ctor == error_mark_node)
3189 		return build_zero_cst (TREE_TYPE (t));
3190 	      /* Out of bound array access.  Value is undefined,
3191 		 but don't fold.  */
3192 	      if (offset < 0)
3193 		return NULL_TREE;
3194 	      /* We can not determine ctor.  */
3195 	      if (!ctor)
3196 		return NULL_TREE;
3197 	      return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3198 					  tree_to_uhwi (unit_size)
3199 					  * BITS_PER_UNIT,
3200 					  base);
3201 	    }
3202 	}
3203       /* Fallthru.  */
3204 
3205     case COMPONENT_REF:
3206     case BIT_FIELD_REF:
3207     case TARGET_MEM_REF:
3208     case MEM_REF:
3209       base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3210       ctor = get_base_constructor (base, &offset, valueize);
3211 
3212       /* Empty constructor.  Always fold to 0.  */
3213       if (ctor == error_mark_node)
3214 	return build_zero_cst (TREE_TYPE (t));
3215       /* We do not know precise address.  */
3216       if (max_size == -1 || max_size != size)
3217 	return NULL_TREE;
3218       /* We can not determine ctor.  */
3219       if (!ctor)
3220 	return NULL_TREE;
3221 
3222       /* Out of bound array access.  Value is undefined, but don't fold.  */
3223       if (offset < 0)
3224 	return NULL_TREE;
3225 
3226       return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3227 				  base);
3228 
3229     case REALPART_EXPR:
3230     case IMAGPART_EXPR:
3231       {
3232 	tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3233 	if (c && TREE_CODE (c) == COMPLEX_CST)
3234 	  return fold_build1_loc (EXPR_LOCATION (t),
3235 			      TREE_CODE (t), TREE_TYPE (t), c);
3236 	break;
3237       }
3238 
3239     default:
3240       break;
3241     }
3242 
3243   return NULL_TREE;
3244 }
3245 
3246 tree
fold_const_aggregate_ref(tree t)3247 fold_const_aggregate_ref (tree t)
3248 {
3249   return fold_const_aggregate_ref_1 (t, NULL);
3250 }
3251 
3252 /* Lookup virtual method with index TOKEN in a virtual table V
3253    at OFFSET.
3254    Set CAN_REFER if non-NULL to false if method
3255    is not referable or if the virtual table is ill-formed (such as rewriten
3256    by non-C++ produced symbol). Otherwise just return NULL in that calse.  */
3257 
3258 tree
gimple_get_virt_method_for_vtable(HOST_WIDE_INT token,tree v,unsigned HOST_WIDE_INT offset,bool * can_refer)3259 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
3260 				   tree v,
3261 				   unsigned HOST_WIDE_INT offset,
3262 				   bool *can_refer)
3263 {
3264   tree vtable = v, init, fn;
3265   unsigned HOST_WIDE_INT size;
3266   unsigned HOST_WIDE_INT elt_size, access_index;
3267   tree domain_type;
3268 
3269   if (can_refer)
3270     *can_refer = true;
3271 
3272   /* First of all double check we have virtual table.  */
3273   if (TREE_CODE (v) != VAR_DECL
3274       || !DECL_VIRTUAL_P (v))
3275     {
3276       gcc_assert (in_lto_p);
3277       /* Pass down that we lost track of the target.  */
3278       if (can_refer)
3279 	*can_refer = false;
3280       return NULL_TREE;
3281     }
3282 
3283   init = ctor_for_folding (v);
3284 
3285   /* The virtual tables should always be born with constructors
3286      and we always should assume that they are avaialble for
3287      folding.  At the moment we do not stream them in all cases,
3288      but it should never happen that ctor seem unreachable.  */
3289   gcc_assert (init);
3290   if (init == error_mark_node)
3291     {
3292       gcc_assert (in_lto_p);
3293       /* Pass down that we lost track of the target.  */
3294       if (can_refer)
3295 	*can_refer = false;
3296       return NULL_TREE;
3297     }
3298   gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3299   size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
3300   offset *= BITS_PER_UNIT;
3301   offset += token * size;
3302 
3303   /* Lookup the value in the constructor that is assumed to be array.
3304      This is equivalent to
3305      fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
3306 			       offset, size, NULL);
3307      but in a constant time.  We expect that frontend produced a simple
3308      array without indexed initializers.  */
3309 
3310   gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
3311   domain_type = TYPE_DOMAIN (TREE_TYPE (init));
3312   gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
3313   elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
3314 
3315   access_index = offset / BITS_PER_UNIT / elt_size;
3316   gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
3317 
3318   /* This code makes an assumption that there are no
3319      indexed fileds produced by C++ FE, so we can directly index the array. */
3320   if (access_index < CONSTRUCTOR_NELTS (init))
3321     {
3322       fn = CONSTRUCTOR_ELT (init, access_index)->value;
3323       gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
3324       STRIP_NOPS (fn);
3325     }
3326   else
3327     fn = NULL;
3328 
3329   /* For type inconsistent program we may end up looking up virtual method
3330      in virtual table that does not contain TOKEN entries.  We may overrun
3331      the virtual table and pick up a constant or RTTI info pointer.
3332      In any case the call is undefined.  */
3333   if (!fn
3334       || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
3335       || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
3336     fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3337   else
3338     {
3339       fn = TREE_OPERAND (fn, 0);
3340 
3341       /* When cgraph node is missing and function is not public, we cannot
3342 	 devirtualize.  This can happen in WHOPR when the actual method
3343 	 ends up in other partition, because we found devirtualization
3344 	 possibility too late.  */
3345       if (!can_refer_decl_in_current_unit_p (fn, vtable))
3346 	{
3347 	  if (can_refer)
3348 	    {
3349 	      *can_refer = false;
3350 	      return fn;
3351 	    }
3352 	  return NULL_TREE;
3353 	}
3354     }
3355 
3356   /* Make sure we create a cgraph node for functions we'll reference.
3357      They can be non-existent if the reference comes from an entry
3358      of an external vtable for example.  */
3359   cgraph_get_create_node (fn);
3360 
3361   return fn;
3362 }
3363 
3364 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3365    is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3366    KNOWN_BINFO carries the binfo describing the true type of
3367    OBJ_TYPE_REF_OBJECT(REF).
3368    Set CAN_REFER if non-NULL to false if method
3369    is not referable or if the virtual table is ill-formed (such as rewriten
3370    by non-C++ produced symbol). Otherwise just return NULL in that calse.  */
3371 
3372 tree
gimple_get_virt_method_for_binfo(HOST_WIDE_INT token,tree known_binfo,bool * can_refer)3373 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
3374 				  bool *can_refer)
3375 {
3376   unsigned HOST_WIDE_INT offset;
3377   tree v;
3378 
3379   v = BINFO_VTABLE (known_binfo);
3380   /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone.  */
3381   if (!v)
3382     return NULL_TREE;
3383 
3384   if (!vtable_pointer_value_to_vtable (v, &v, &offset))
3385     {
3386       if (can_refer)
3387 	*can_refer = false;
3388       return NULL_TREE;
3389     }
3390   return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
3391 }
3392 
3393 /* Return true iff VAL is a gimple expression that is known to be
3394    non-negative.  Restricted to floating-point inputs.  */
3395 
3396 bool
gimple_val_nonnegative_real_p(tree val)3397 gimple_val_nonnegative_real_p (tree val)
3398 {
3399   gimple def_stmt;
3400 
3401   gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3402 
3403   /* Use existing logic for non-gimple trees.  */
3404   if (tree_expr_nonnegative_p (val))
3405     return true;
3406 
3407   if (TREE_CODE (val) != SSA_NAME)
3408     return false;
3409 
3410   /* Currently we look only at the immediately defining statement
3411      to make this determination, since recursion on defining
3412      statements of operands can lead to quadratic behavior in the
3413      worst case.  This is expected to catch almost all occurrences
3414      in practice.  It would be possible to implement limited-depth
3415      recursion if important cases are lost.  Alternatively, passes
3416      that need this information (such as the pow/powi lowering code
3417      in the cse_sincos pass) could be revised to provide it through
3418      dataflow propagation.  */
3419 
3420   def_stmt = SSA_NAME_DEF_STMT (val);
3421 
3422   if (is_gimple_assign (def_stmt))
3423     {
3424       tree op0, op1;
3425 
3426       /* See fold-const.c:tree_expr_nonnegative_p for additional
3427 	 cases that could be handled with recursion.  */
3428 
3429       switch (gimple_assign_rhs_code (def_stmt))
3430 	{
3431 	case ABS_EXPR:
3432 	  /* Always true for floating-point operands.  */
3433 	  return true;
3434 
3435 	case MULT_EXPR:
3436 	  /* True if the two operands are identical (since we are
3437 	     restricted to floating-point inputs).  */
3438 	  op0 = gimple_assign_rhs1 (def_stmt);
3439 	  op1 = gimple_assign_rhs2 (def_stmt);
3440 
3441 	  if (op0 == op1
3442 	      || operand_equal_p (op0, op1, 0))
3443 	    return true;
3444 
3445 	default:
3446 	  return false;
3447 	}
3448     }
3449   else if (is_gimple_call (def_stmt))
3450     {
3451       tree fndecl = gimple_call_fndecl (def_stmt);
3452       if (fndecl
3453 	  && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3454 	{
3455 	  tree arg1;
3456 
3457 	  switch (DECL_FUNCTION_CODE (fndecl))
3458 	    {
3459 	    CASE_FLT_FN (BUILT_IN_ACOS):
3460 	    CASE_FLT_FN (BUILT_IN_ACOSH):
3461 	    CASE_FLT_FN (BUILT_IN_CABS):
3462 	    CASE_FLT_FN (BUILT_IN_COSH):
3463 	    CASE_FLT_FN (BUILT_IN_ERFC):
3464 	    CASE_FLT_FN (BUILT_IN_EXP):
3465 	    CASE_FLT_FN (BUILT_IN_EXP10):
3466 	    CASE_FLT_FN (BUILT_IN_EXP2):
3467 	    CASE_FLT_FN (BUILT_IN_FABS):
3468 	    CASE_FLT_FN (BUILT_IN_FDIM):
3469 	    CASE_FLT_FN (BUILT_IN_HYPOT):
3470 	    CASE_FLT_FN (BUILT_IN_POW10):
3471 	      return true;
3472 
3473 	    CASE_FLT_FN (BUILT_IN_SQRT):
3474 	      /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3475 		 nonnegative inputs.  */
3476 	      if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3477 		return true;
3478 
3479 	      break;
3480 
3481 	    CASE_FLT_FN (BUILT_IN_POWI):
3482 	      /* True if the second argument is an even integer.  */
3483 	      arg1 = gimple_call_arg (def_stmt, 1);
3484 
3485 	      if (TREE_CODE (arg1) == INTEGER_CST
3486 		  && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3487 		return true;
3488 
3489 	      break;
3490 
3491 	    CASE_FLT_FN (BUILT_IN_POW):
3492 	      /* True if the second argument is an even integer-valued
3493 		 real.  */
3494 	      arg1 = gimple_call_arg (def_stmt, 1);
3495 
3496 	      if (TREE_CODE (arg1) == REAL_CST)
3497 		{
3498 		  REAL_VALUE_TYPE c;
3499 		  HOST_WIDE_INT n;
3500 
3501 		  c = TREE_REAL_CST (arg1);
3502 		  n = real_to_integer (&c);
3503 
3504 		  if ((n & 1) == 0)
3505 		    {
3506 		      REAL_VALUE_TYPE cint;
3507 		      real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3508 		      if (real_identical (&c, &cint))
3509 			return true;
3510 		    }
3511 		}
3512 
3513 	      break;
3514 
3515 	    default:
3516 	      return false;
3517 	    }
3518 	}
3519     }
3520 
3521   return false;
3522 }
3523 
3524 /* Given a pointer value OP0, return a simplified version of an
3525    indirection through OP0, or NULL_TREE if no simplification is
3526    possible.  Note that the resulting type may be different from
3527    the type pointed to in the sense that it is still compatible
3528    from the langhooks point of view. */
3529 
3530 tree
gimple_fold_indirect_ref(tree t)3531 gimple_fold_indirect_ref (tree t)
3532 {
3533   tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
3534   tree sub = t;
3535   tree subtype;
3536 
3537   STRIP_NOPS (sub);
3538   subtype = TREE_TYPE (sub);
3539   if (!POINTER_TYPE_P (subtype))
3540     return NULL_TREE;
3541 
3542   if (TREE_CODE (sub) == ADDR_EXPR)
3543     {
3544       tree op = TREE_OPERAND (sub, 0);
3545       tree optype = TREE_TYPE (op);
3546       /* *&p => p */
3547       if (useless_type_conversion_p (type, optype))
3548         return op;
3549 
3550       /* *(foo *)&fooarray => fooarray[0] */
3551       if (TREE_CODE (optype) == ARRAY_TYPE
3552 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3553 	  && useless_type_conversion_p (type, TREE_TYPE (optype)))
3554        {
3555          tree type_domain = TYPE_DOMAIN (optype);
3556          tree min_val = size_zero_node;
3557          if (type_domain && TYPE_MIN_VALUE (type_domain))
3558            min_val = TYPE_MIN_VALUE (type_domain);
3559 	 if (TREE_CODE (min_val) == INTEGER_CST)
3560 	   return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3561        }
3562       /* *(foo *)&complexfoo => __real__ complexfoo */
3563       else if (TREE_CODE (optype) == COMPLEX_TYPE
3564                && useless_type_conversion_p (type, TREE_TYPE (optype)))
3565         return fold_build1 (REALPART_EXPR, type, op);
3566       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3567       else if (TREE_CODE (optype) == VECTOR_TYPE
3568                && useless_type_conversion_p (type, TREE_TYPE (optype)))
3569         {
3570           tree part_width = TYPE_SIZE (type);
3571           tree index = bitsize_int (0);
3572           return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
3573         }
3574     }
3575 
3576   /* *(p + CST) -> ...  */
3577   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
3578       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
3579     {
3580       tree addr = TREE_OPERAND (sub, 0);
3581       tree off = TREE_OPERAND (sub, 1);
3582       tree addrtype;
3583 
3584       STRIP_NOPS (addr);
3585       addrtype = TREE_TYPE (addr);
3586 
3587       /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3588       if (TREE_CODE (addr) == ADDR_EXPR
3589 	  && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
3590 	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
3591 	  && tree_fits_uhwi_p (off))
3592 	{
3593           unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
3594           tree part_width = TYPE_SIZE (type);
3595           unsigned HOST_WIDE_INT part_widthi
3596             = tree_to_shwi (part_width) / BITS_PER_UNIT;
3597           unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
3598           tree index = bitsize_int (indexi);
3599           if (offset / part_widthi
3600 	      < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
3601             return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
3602                                 part_width, index);
3603 	}
3604 
3605       /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3606       if (TREE_CODE (addr) == ADDR_EXPR
3607 	  && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
3608 	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
3609         {
3610           tree size = TYPE_SIZE_UNIT (type);
3611           if (tree_int_cst_equal (size, off))
3612             return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
3613         }
3614 
3615       /* *(p + CST) -> MEM_REF <p, CST>.  */
3616       if (TREE_CODE (addr) != ADDR_EXPR
3617 	  || DECL_P (TREE_OPERAND (addr, 0)))
3618 	return fold_build2 (MEM_REF, type,
3619 			    addr,
3620 			    build_int_cst_wide (ptype,
3621 						TREE_INT_CST_LOW (off),
3622 						TREE_INT_CST_HIGH (off)));
3623     }
3624 
3625   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3626   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3627       && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
3628       && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3629     {
3630       tree type_domain;
3631       tree min_val = size_zero_node;
3632       tree osub = sub;
3633       sub = gimple_fold_indirect_ref (sub);
3634       if (! sub)
3635 	sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3636       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3637       if (type_domain && TYPE_MIN_VALUE (type_domain))
3638         min_val = TYPE_MIN_VALUE (type_domain);
3639       if (TREE_CODE (min_val) == INTEGER_CST)
3640 	return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3641     }
3642 
3643   return NULL_TREE;
3644 }
3645 
3646 /* Return true if CODE is an operation that when operating on signed
3647    integer types involves undefined behavior on overflow and the
3648    operation can be expressed with unsigned arithmetic.  */
3649 
3650 bool
arith_code_with_undefined_signed_overflow(tree_code code)3651 arith_code_with_undefined_signed_overflow (tree_code code)
3652 {
3653   switch (code)
3654     {
3655     case PLUS_EXPR:
3656     case MINUS_EXPR:
3657     case MULT_EXPR:
3658     case NEGATE_EXPR:
3659     case POINTER_PLUS_EXPR:
3660       return true;
3661     default:
3662       return false;
3663     }
3664 }
3665 
3666 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
3667    operation that can be transformed to unsigned arithmetic by converting
3668    its operand, carrying out the operation in the corresponding unsigned
3669    type and converting the result back to the original type.
3670 
3671    Returns a sequence of statements that replace STMT and also contain
3672    a modified form of STMT itself.  */
3673 
3674 gimple_seq
rewrite_to_defined_overflow(gimple stmt)3675 rewrite_to_defined_overflow (gimple stmt)
3676 {
3677   if (dump_file && (dump_flags & TDF_DETAILS))
3678     {
3679       fprintf (dump_file, "rewriting stmt with undefined signed "
3680 	       "overflow ");
3681       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3682     }
3683 
3684   tree lhs = gimple_assign_lhs (stmt);
3685   tree type = unsigned_type_for (TREE_TYPE (lhs));
3686   gimple_seq stmts = NULL;
3687   for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
3688     {
3689       gimple_seq stmts2 = NULL;
3690       gimple_set_op (stmt, i,
3691 		     force_gimple_operand (fold_convert (type,
3692 							 gimple_op (stmt, i)),
3693 					   &stmts2, true, NULL_TREE));
3694       gimple_seq_add_seq (&stmts, stmts2);
3695     }
3696   gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
3697   if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
3698     gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
3699   gimple_seq_add_stmt (&stmts, stmt);
3700   gimple cvt = gimple_build_assign_with_ops
3701       (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
3702   gimple_seq_add_stmt (&stmts, cvt);
3703 
3704   return stmts;
3705 }
3706