xref: /dragonfly/contrib/gcc-4.7/gcc/gimple-fold.c (revision 25a2db75)
1 /* Statement simplification on GIMPLE.
2    Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
3    Split out from tree-ssa-ccp.c.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33 #include "gimple-fold.h"
34 
35 /* Return true when DECL can be referenced from current unit.
36    We can get declarations that are not possible to reference for
37    various reasons:
38 
39      1) When analyzing C++ virtual tables.
40 	C++ virtual tables do have known constructors even
41 	when they are keyed to other compilation unit.
42 	Those tables can contain pointers to methods and vars
43 	in other units.  Those methods have both STATIC and EXTERNAL
44 	set.
45      2) In WHOPR mode devirtualization might lead to reference
46 	to method that was partitioned elsehwere.
47 	In this case we have static VAR_DECL or FUNCTION_DECL
48 	that has no corresponding callgraph/varpool node
49 	declaring the body.
50      3) COMDAT functions referred by external vtables that
51         we devirtualize only during final copmilation stage.
52         At this time we already decided that we will not output
53         the function body and thus we can't reference the symbol
54         directly.  */
55 
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl)
58 {
59   struct varpool_node *vnode;
60   struct cgraph_node *node;
61 
62   if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63     return true;
64   /* External flag is set, so we deal with C++ reference
65      to static object from other file.  */
66   if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67       && TREE_CODE (decl) == VAR_DECL)
68     {
69       /* Just be sure it is not big in frontend setting
70 	 flags incorrectly.  Those variables should never
71 	 be finalized.  */
72       gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 			   || !vnode->finalized);
74       return false;
75     }
76   /* When function is public, we always can introduce new reference.
77      Exception are the COMDAT functions where introducing a direct
78      reference imply need to include function body in the curren tunit.  */
79   if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80     return true;
81   /* We are not at ltrans stage; so don't worry about WHOPR.
82      Also when still gimplifying all referred comdat functions will be
83      produced.
84      ??? as observed in PR20991 for already optimized out comdat virtual functions
85      we may not neccesarily give up because the copy will be output elsewhere when
86      corresponding vtable is output.  */
87   if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
88     return true;
89   /* If we already output the function body, we are safe.  */
90   if (TREE_ASM_WRITTEN (decl))
91     return true;
92   if (TREE_CODE (decl) == FUNCTION_DECL)
93     {
94       node = cgraph_get_node (decl);
95       /* Check that we still have function body and that we didn't took
96          the decision to eliminate offline copy of the function yet.
97          The second is important when devirtualization happens during final
98          compilation stage when making a new reference no longer makes callee
99          to be compiled.  */
100       if (!node || !node->analyzed || node->global.inlined_to)
101 	return false;
102     }
103   else if (TREE_CODE (decl) == VAR_DECL)
104     {
105       vnode = varpool_get_node (decl);
106       if (!vnode || !vnode->finalized)
107 	return false;
108     }
109   return true;
110 }
111 
112 /* CVAL is value taken from DECL_INITIAL of variable.  Try to transform it into
113    acceptable form for is_gimple_min_invariant.   */
114 
115 tree
116 canonicalize_constructor_val (tree cval)
117 {
118   STRIP_USELESS_TYPE_CONVERSION (cval);
119   if (TREE_CODE (cval) == POINTER_PLUS_EXPR
120       && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
121     {
122       tree ptr = TREE_OPERAND (cval, 0);
123       if (is_gimple_min_invariant (ptr))
124 	cval = build1_loc (EXPR_LOCATION (cval),
125 			   ADDR_EXPR, TREE_TYPE (ptr),
126 			   fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
127 					ptr,
128 					fold_convert (ptr_type_node,
129 						      TREE_OPERAND (cval, 1))));
130     }
131   if (TREE_CODE (cval) == ADDR_EXPR)
132     {
133       tree base = get_base_address (TREE_OPERAND (cval, 0));
134 
135       if (base
136 	  && (TREE_CODE (base) == VAR_DECL
137 	      || TREE_CODE (base) == FUNCTION_DECL)
138 	  && !can_refer_decl_in_current_unit_p (base))
139 	return NULL_TREE;
140       if (base && TREE_CODE (base) == VAR_DECL)
141 	{
142 	  TREE_ADDRESSABLE (base) = 1;
143 	  if (cfun && gimple_referenced_vars (cfun))
144 	    add_referenced_var (base);
145 	}
146       /* Fixup types in global initializers.  */
147       if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
148 	cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
149     }
150   return cval;
151 }
152 
153 /* If SYM is a constant variable with known value, return the value.
154    NULL_TREE is returned otherwise.  */
155 
156 tree
157 get_symbol_constant_value (tree sym)
158 {
159   if (const_value_known_p (sym))
160     {
161       tree val = DECL_INITIAL (sym);
162       if (val)
163 	{
164 	  val = canonicalize_constructor_val (val);
165 	  if (val && is_gimple_min_invariant (val))
166 	    return val;
167 	  else
168 	    return NULL_TREE;
169 	}
170       /* Variables declared 'const' without an initializer
171 	 have zero as the initializer if they may not be
172 	 overridden at link or run time.  */
173       if (!val
174           && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
175 	       || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
176 	return build_zero_cst (TREE_TYPE (sym));
177     }
178 
179   return NULL_TREE;
180 }
181 
182 
183 
184 /* Subroutine of fold_stmt.  We perform several simplifications of the
185    memory reference tree EXPR and make sure to re-gimplify them properly
186    after propagation of constant addresses.  IS_LHS is true if the
187    reference is supposed to be an lvalue.  */
188 
189 static tree
190 maybe_fold_reference (tree expr, bool is_lhs)
191 {
192   tree *t = &expr;
193   tree result;
194 
195   if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
196        || TREE_CODE (expr) == REALPART_EXPR
197        || TREE_CODE (expr) == IMAGPART_EXPR)
198       && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
199     return fold_unary_loc (EXPR_LOCATION (expr),
200 			   TREE_CODE (expr),
201 			   TREE_TYPE (expr),
202 			   TREE_OPERAND (expr, 0));
203   else if (TREE_CODE (expr) == BIT_FIELD_REF
204 	   && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
205     return fold_ternary_loc (EXPR_LOCATION (expr),
206 			     TREE_CODE (expr),
207 			     TREE_TYPE (expr),
208 			     TREE_OPERAND (expr, 0),
209 			     TREE_OPERAND (expr, 1),
210 			     TREE_OPERAND (expr, 2));
211 
212   while (handled_component_p (*t))
213     t = &TREE_OPERAND (*t, 0);
214 
215   /* Canonicalize MEM_REFs invariant address operand.  Do this first
216      to avoid feeding non-canonical MEM_REFs elsewhere.  */
217   if (TREE_CODE (*t) == MEM_REF
218       && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
219     {
220       bool volatile_p = TREE_THIS_VOLATILE (*t);
221       tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
222 			      TREE_OPERAND (*t, 0),
223 			      TREE_OPERAND (*t, 1));
224       if (tem)
225 	{
226 	  TREE_THIS_VOLATILE (tem) = volatile_p;
227 	  *t = tem;
228 	  tem = maybe_fold_reference (expr, is_lhs);
229 	  if (tem)
230 	    return tem;
231 	  return expr;
232 	}
233     }
234 
235   if (!is_lhs
236       && (result = fold_const_aggregate_ref (expr))
237       && is_gimple_min_invariant (result))
238     return result;
239 
240   /* Fold back MEM_REFs to reference trees.  */
241   if (TREE_CODE (*t) == MEM_REF
242       && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
243       && integer_zerop (TREE_OPERAND (*t, 1))
244       && (TREE_THIS_VOLATILE (*t)
245 	  == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
246       && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
247       && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
248 	  == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
249       /* We have to look out here to not drop a required conversion
250 	 from the rhs to the lhs if is_lhs, but we don't have the
251 	 rhs here to verify that.  Thus require strict type
252 	 compatibility.  */
253       && types_compatible_p (TREE_TYPE (*t),
254 			     TREE_TYPE (TREE_OPERAND
255 					(TREE_OPERAND (*t, 0), 0))))
256     {
257       tree tem;
258       *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
259       tem = maybe_fold_reference (expr, is_lhs);
260       if (tem)
261 	return tem;
262       return expr;
263     }
264   else if (TREE_CODE (*t) == TARGET_MEM_REF)
265     {
266       tree tem = maybe_fold_tmr (*t);
267       if (tem)
268 	{
269 	  *t = tem;
270 	  tem = maybe_fold_reference (expr, is_lhs);
271 	  if (tem)
272 	    return tem;
273 	  return expr;
274 	}
275     }
276 
277   return NULL_TREE;
278 }
279 
280 
281 /* Attempt to fold an assignment statement pointed-to by SI.  Returns a
282    replacement rhs for the statement or NULL_TREE if no simplification
283    could be made.  It is assumed that the operands have been previously
284    folded.  */
285 
286 static tree
287 fold_gimple_assign (gimple_stmt_iterator *si)
288 {
289   gimple stmt = gsi_stmt (*si);
290   enum tree_code subcode = gimple_assign_rhs_code (stmt);
291   location_t loc = gimple_location (stmt);
292 
293   tree result = NULL_TREE;
294 
295   switch (get_gimple_rhs_class (subcode))
296     {
297     case GIMPLE_SINGLE_RHS:
298       {
299         tree rhs = gimple_assign_rhs1 (stmt);
300 
301 	if (REFERENCE_CLASS_P (rhs))
302 	  return maybe_fold_reference (rhs, false);
303 
304 	else if (TREE_CODE (rhs) == ADDR_EXPR)
305 	  {
306 	    tree ref = TREE_OPERAND (rhs, 0);
307 	    tree tem = maybe_fold_reference (ref, true);
308 	    if (tem
309 		&& TREE_CODE (tem) == MEM_REF
310 		&& integer_zerop (TREE_OPERAND (tem, 1)))
311 	      result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
312 	    else if (tem)
313 	      result = fold_convert (TREE_TYPE (rhs),
314 				     build_fold_addr_expr_loc (loc, tem));
315 	    else if (TREE_CODE (ref) == MEM_REF
316 		     && integer_zerop (TREE_OPERAND (ref, 1)))
317 	      result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
318 	  }
319 
320 	else if (TREE_CODE (rhs) == CONSTRUCTOR
321 		 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
322 		 && (CONSTRUCTOR_NELTS (rhs)
323 		     == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
324 	  {
325 	    /* Fold a constant vector CONSTRUCTOR to VECTOR_CST.  */
326 	    unsigned i;
327 	    tree val;
328 
329 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
330 	      if (TREE_CODE (val) != INTEGER_CST
331 		  && TREE_CODE (val) != REAL_CST
332 		  && TREE_CODE (val) != FIXED_CST)
333 		return NULL_TREE;
334 
335 	    return build_vector_from_ctor (TREE_TYPE (rhs),
336 					   CONSTRUCTOR_ELTS (rhs));
337 	  }
338 
339 	else if (DECL_P (rhs))
340 	  return unshare_expr (get_symbol_constant_value (rhs));
341 
342         /* If we couldn't fold the RHS, hand over to the generic
343            fold routines.  */
344         if (result == NULL_TREE)
345           result = fold (rhs);
346 
347         /* Strip away useless type conversions.  Both the NON_LVALUE_EXPR
348            that may have been added by fold, and "useless" type
349            conversions that might now be apparent due to propagation.  */
350         STRIP_USELESS_TYPE_CONVERSION (result);
351 
352         if (result != rhs && valid_gimple_rhs_p (result))
353 	  return result;
354 
355 	return NULL_TREE;
356       }
357       break;
358 
359     case GIMPLE_UNARY_RHS:
360       {
361 	tree rhs = gimple_assign_rhs1 (stmt);
362 
363 	result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
364 	if (result)
365 	  {
366 	    /* If the operation was a conversion do _not_ mark a
367 	       resulting constant with TREE_OVERFLOW if the original
368 	       constant was not.  These conversions have implementation
369 	       defined behavior and retaining the TREE_OVERFLOW flag
370 	       here would confuse later passes such as VRP.  */
371 	    if (CONVERT_EXPR_CODE_P (subcode)
372 		&& TREE_CODE (result) == INTEGER_CST
373 		&& TREE_CODE (rhs) == INTEGER_CST)
374 	      TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
375 
376 	    STRIP_USELESS_TYPE_CONVERSION (result);
377 	    if (valid_gimple_rhs_p (result))
378 	      return result;
379 	  }
380       }
381       break;
382 
383     case GIMPLE_BINARY_RHS:
384       /* Try to canonicalize for boolean-typed X the comparisons
385 	 X == 0, X == 1, X != 0, and X != 1.  */
386       if (gimple_assign_rhs_code (stmt) == EQ_EXPR
387 	  || gimple_assign_rhs_code (stmt) == NE_EXPR)
388         {
389 	  tree lhs = gimple_assign_lhs (stmt);
390 	  tree op1 = gimple_assign_rhs1 (stmt);
391 	  tree op2 = gimple_assign_rhs2 (stmt);
392 	  tree type = TREE_TYPE (op1);
393 
394 	  /* Check whether the comparison operands are of the same boolean
395 	     type as the result type is.
396 	     Check that second operand is an integer-constant with value
397 	     one or zero.  */
398 	  if (TREE_CODE (op2) == INTEGER_CST
399 	      && (integer_zerop (op2) || integer_onep (op2))
400 	      && useless_type_conversion_p (TREE_TYPE (lhs), type))
401 	    {
402 	      enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
403 	      bool is_logical_not = false;
404 
405 	      /* X == 0 and X != 1 is a logical-not.of X
406 	         X == 1 and X != 0 is X  */
407 	      if ((cmp_code == EQ_EXPR && integer_zerop (op2))
408 	          || (cmp_code == NE_EXPR && integer_onep (op2)))
409 	        is_logical_not = true;
410 
411 	      if (is_logical_not == false)
412 	        result = op1;
413 	      /* Only for one-bit precision typed X the transformation
414 	         !X -> ~X is valied.  */
415 	      else if (TYPE_PRECISION (type) == 1)
416 		result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
417 				     type, op1);
418 	      /* Otherwise we use !X -> X ^ 1.  */
419 	      else
420 	        result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
421 				     type, op1, build_int_cst (type, 1));
422 
423 	    }
424 	}
425 
426       if (!result)
427         result = fold_binary_loc (loc, subcode,
428 				  TREE_TYPE (gimple_assign_lhs (stmt)),
429 				  gimple_assign_rhs1 (stmt),
430 				  gimple_assign_rhs2 (stmt));
431 
432       if (result)
433         {
434           STRIP_USELESS_TYPE_CONVERSION (result);
435           if (valid_gimple_rhs_p (result))
436 	    return result;
437         }
438       break;
439 
440     case GIMPLE_TERNARY_RHS:
441       /* Try to fold a conditional expression.  */
442       if (gimple_assign_rhs_code (stmt) == COND_EXPR)
443 	{
444 	  tree op0 = gimple_assign_rhs1 (stmt);
445 	  tree tem;
446 	  bool set = false;
447 	  location_t cond_loc = gimple_location (stmt);
448 
449 	  if (COMPARISON_CLASS_P (op0))
450 	    {
451 	      fold_defer_overflow_warnings ();
452 	      tem = fold_binary_loc (cond_loc,
453 				     TREE_CODE (op0), TREE_TYPE (op0),
454 				     TREE_OPERAND (op0, 0),
455 				     TREE_OPERAND (op0, 1));
456 	      /* This is actually a conditional expression, not a GIMPLE
457 		 conditional statement, however, the valid_gimple_rhs_p
458 		 test still applies.  */
459 	      set = (tem && is_gimple_condexpr (tem)
460 		     && valid_gimple_rhs_p (tem));
461 	      fold_undefer_overflow_warnings (set, stmt, 0);
462 	    }
463 	  else if (is_gimple_min_invariant (op0))
464 	    {
465 	      tem = op0;
466 	      set = true;
467 	    }
468 	  else
469 	    return NULL_TREE;
470 
471 	  if (set)
472 	    result = fold_build3_loc (cond_loc, COND_EXPR,
473 				      TREE_TYPE (gimple_assign_lhs (stmt)), tem,
474 				      gimple_assign_rhs2 (stmt),
475 				      gimple_assign_rhs3 (stmt));
476 	}
477 
478       if (!result)
479 	result = fold_ternary_loc (loc, subcode,
480 				   TREE_TYPE (gimple_assign_lhs (stmt)),
481 				   gimple_assign_rhs1 (stmt),
482 				   gimple_assign_rhs2 (stmt),
483 				   gimple_assign_rhs3 (stmt));
484 
485       if (result)
486         {
487           STRIP_USELESS_TYPE_CONVERSION (result);
488           if (valid_gimple_rhs_p (result))
489 	    return result;
490         }
491       break;
492 
493     case GIMPLE_INVALID_RHS:
494       gcc_unreachable ();
495     }
496 
497   return NULL_TREE;
498 }
499 
500 /* Attempt to fold a conditional statement. Return true if any changes were
501    made. We only attempt to fold the condition expression, and do not perform
502    any transformation that would require alteration of the cfg.  It is
503    assumed that the operands have been previously folded.  */
504 
505 static bool
506 fold_gimple_cond (gimple stmt)
507 {
508   tree result = fold_binary_loc (gimple_location (stmt),
509 			     gimple_cond_code (stmt),
510                              boolean_type_node,
511                              gimple_cond_lhs (stmt),
512                              gimple_cond_rhs (stmt));
513 
514   if (result)
515     {
516       STRIP_USELESS_TYPE_CONVERSION (result);
517       if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
518         {
519           gimple_cond_set_condition_from_tree (stmt, result);
520           return true;
521         }
522     }
523 
524   return false;
525 }
526 
527 /* Convert EXPR into a GIMPLE value suitable for substitution on the
528    RHS of an assignment.  Insert the necessary statements before
529    iterator *SI_P.  The statement at *SI_P, which must be a GIMPLE_CALL
530    is replaced.  If the call is expected to produces a result, then it
531    is replaced by an assignment of the new RHS to the result variable.
532    If the result is to be ignored, then the call is replaced by a
533    GIMPLE_NOP.  A proper VDEF chain is retained by making the first
534    VUSE and the last VDEF of the whole sequence be the same as the replaced
535    statement and using new SSA names for stores in between.  */
536 
537 void
538 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
539 {
540   tree lhs;
541   gimple stmt, new_stmt;
542   gimple_stmt_iterator i;
543   gimple_seq stmts = gimple_seq_alloc();
544   struct gimplify_ctx gctx;
545   gimple last;
546   gimple laststore;
547   tree reaching_vuse;
548 
549   stmt = gsi_stmt (*si_p);
550 
551   gcc_assert (is_gimple_call (stmt));
552 
553   push_gimplify_context (&gctx);
554   gctx.into_ssa = gimple_in_ssa_p (cfun);
555 
556   lhs = gimple_call_lhs (stmt);
557   if (lhs == NULL_TREE)
558     {
559       gimplify_and_add (expr, &stmts);
560       /* We can end up with folding a memcpy of an empty class assignment
561 	 which gets optimized away by C++ gimplification.  */
562       if (gimple_seq_empty_p (stmts))
563 	{
564 	  pop_gimplify_context (NULL);
565 	  if (gimple_in_ssa_p (cfun))
566 	    {
567 	      unlink_stmt_vdef (stmt);
568 	      release_defs (stmt);
569 	    }
570 	  gsi_remove (si_p, true);
571 	  return;
572 	}
573     }
574   else
575     {
576       tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
577       new_stmt = gimple_build_assign (lhs, tmp);
578       i = gsi_last (stmts);
579       gsi_insert_after_without_update (&i, new_stmt,
580 				       GSI_CONTINUE_LINKING);
581     }
582 
583   pop_gimplify_context (NULL);
584 
585   if (gimple_has_location (stmt))
586     annotate_all_with_location (stmts, gimple_location (stmt));
587 
588   /* First iterate over the replacement statements backward, assigning
589      virtual operands to their defining statements.  */
590   laststore = NULL;
591   for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
592     {
593       new_stmt = gsi_stmt (i);
594       if ((gimple_assign_single_p (new_stmt)
595 	   && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
596 	  || (is_gimple_call (new_stmt)
597 	      && (gimple_call_flags (new_stmt)
598 		  & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
599 	{
600 	  tree vdef;
601 	  if (!laststore)
602 	    vdef = gimple_vdef (stmt);
603 	  else
604 	    vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
605 	  gimple_set_vdef (new_stmt, vdef);
606 	  if (vdef && TREE_CODE (vdef) == SSA_NAME)
607 	    SSA_NAME_DEF_STMT (vdef) = new_stmt;
608 	  laststore = new_stmt;
609 	}
610     }
611 
612   /* Second iterate over the statements forward, assigning virtual
613      operands to their uses.  */
614   last = NULL;
615   reaching_vuse = gimple_vuse (stmt);
616   for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
617     {
618       /* Do not insert the last stmt in this loop but remember it
619          for replacing the original statement.  */
620       if (last)
621 	{
622 	  gsi_insert_before (si_p, last, GSI_NEW_STMT);
623 	  gsi_next (si_p);
624 	}
625       new_stmt = gsi_stmt (i);
626       /* The replacement can expose previously unreferenced variables.  */
627       if (gimple_in_ssa_p (cfun))
628 	find_new_referenced_vars (new_stmt);
629       /* If the new statement possibly has a VUSE, update it with exact SSA
630 	 name we know will reach this one.  */
631       if (gimple_has_mem_ops (new_stmt))
632 	gimple_set_vuse (new_stmt, reaching_vuse);
633       gimple_set_modified (new_stmt, true);
634       if (gimple_vdef (new_stmt))
635 	reaching_vuse = gimple_vdef (new_stmt);
636       last = new_stmt;
637     }
638 
639   /* If the new sequence does not do a store release the virtual
640      definition of the original statement.  */
641   if (reaching_vuse
642       && reaching_vuse == gimple_vuse (stmt))
643     {
644       tree vdef = gimple_vdef (stmt);
645       if (vdef
646 	  && TREE_CODE (vdef) == SSA_NAME)
647 	{
648 	  unlink_stmt_vdef (stmt);
649 	  release_ssa_name (vdef);
650 	}
651     }
652 
653   /* Finally replace rhe original statement with the last.  */
654   gsi_replace (si_p, last, false);
655 }
656 
657 /* Return the string length, maximum string length or maximum value of
658    ARG in LENGTH.
659    If ARG is an SSA name variable, follow its use-def chains.  If LENGTH
660    is not NULL and, for TYPE == 0, its value is not equal to the length
661    we determine or if we are unable to determine the length or value,
662    return false.  VISITED is a bitmap of visited variables.
663    TYPE is 0 if string length should be returned, 1 for maximum string
664    length and 2 for maximum value ARG can have.  */
665 
666 static bool
667 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
668 {
669   tree var, val;
670   gimple def_stmt;
671 
672   if (TREE_CODE (arg) != SSA_NAME)
673     {
674       /* We can end up with &(*iftmp_1)[0] here as well, so handle it.  */
675       if (TREE_CODE (arg) == ADDR_EXPR
676 	  && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
677 	  && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
678 	{
679 	  tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
680 	  if (TREE_CODE (aop0) == INDIRECT_REF
681 	      && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
682 	    return get_maxval_strlen (TREE_OPERAND (aop0, 0),
683 				      length, visited, type);
684 	}
685 
686       if (type == 2)
687 	{
688 	  val = arg;
689 	  if (TREE_CODE (val) != INTEGER_CST
690 	      || tree_int_cst_sgn (val) < 0)
691 	    return false;
692 	}
693       else
694 	val = c_strlen (arg, 1);
695       if (!val)
696 	return false;
697 
698       if (*length)
699 	{
700 	  if (type > 0)
701 	    {
702 	      if (TREE_CODE (*length) != INTEGER_CST
703 		  || TREE_CODE (val) != INTEGER_CST)
704 		return false;
705 
706 	      if (tree_int_cst_lt (*length, val))
707 		*length = val;
708 	      return true;
709 	    }
710 	  else if (simple_cst_equal (val, *length) != 1)
711 	    return false;
712 	}
713 
714       *length = val;
715       return true;
716     }
717 
718   /* If we were already here, break the infinite cycle.  */
719   if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
720     return true;
721 
722   var = arg;
723   def_stmt = SSA_NAME_DEF_STMT (var);
724 
725   switch (gimple_code (def_stmt))
726     {
727       case GIMPLE_ASSIGN:
728         /* The RHS of the statement defining VAR must either have a
729            constant length or come from another SSA_NAME with a constant
730            length.  */
731         if (gimple_assign_single_p (def_stmt)
732             || gimple_assign_unary_nop_p (def_stmt))
733           {
734             tree rhs = gimple_assign_rhs1 (def_stmt);
735             return get_maxval_strlen (rhs, length, visited, type);
736           }
737 	else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
738 	  {
739 	    tree op2 = gimple_assign_rhs2 (def_stmt);
740 	    tree op3 = gimple_assign_rhs3 (def_stmt);
741 	    return get_maxval_strlen (op2, length, visited, type)
742 		   && get_maxval_strlen (op3, length, visited, type);
743           }
744         return false;
745 
746       case GIMPLE_PHI:
747 	{
748 	  /* All the arguments of the PHI node must have the same constant
749 	     length.  */
750 	  unsigned i;
751 
752 	  for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
753           {
754             tree arg = gimple_phi_arg (def_stmt, i)->def;
755 
756             /* If this PHI has itself as an argument, we cannot
757                determine the string length of this argument.  However,
758                if we can find a constant string length for the other
759                PHI args then we can still be sure that this is a
760                constant string length.  So be optimistic and just
761                continue with the next argument.  */
762             if (arg == gimple_phi_result (def_stmt))
763               continue;
764 
765             if (!get_maxval_strlen (arg, length, visited, type))
766               return false;
767           }
768         }
769         return true;
770 
771       default:
772         return false;
773     }
774 }
775 
776 
777 /* Fold builtin call in statement STMT.  Returns a simplified tree.
778    We may return a non-constant expression, including another call
779    to a different function and with different arguments, e.g.,
780    substituting memcpy for strcpy when the string length is known.
781    Note that some builtins expand into inline code that may not
782    be valid in GIMPLE.  Callers must take care.  */
783 
784 tree
785 gimple_fold_builtin (gimple stmt)
786 {
787   tree result, val[3];
788   tree callee, a;
789   int arg_idx, type;
790   bitmap visited;
791   bool ignore;
792   int nargs;
793   location_t loc = gimple_location (stmt);
794 
795   gcc_assert (is_gimple_call (stmt));
796 
797   ignore = (gimple_call_lhs (stmt) == NULL);
798 
799   /* First try the generic builtin folder.  If that succeeds, return the
800      result directly.  */
801   result = fold_call_stmt (stmt, ignore);
802   if (result)
803     {
804       if (ignore)
805 	STRIP_NOPS (result);
806       return result;
807     }
808 
809   /* Ignore MD builtins.  */
810   callee = gimple_call_fndecl (stmt);
811   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
812     return NULL_TREE;
813 
814   /* Give up for always_inline inline builtins until they are
815      inlined.  */
816   if (avoid_folding_inline_builtin (callee))
817     return NULL_TREE;
818 
819   /* If the builtin could not be folded, and it has no argument list,
820      we're done.  */
821   nargs = gimple_call_num_args (stmt);
822   if (nargs == 0)
823     return NULL_TREE;
824 
825   /* Limit the work only for builtins we know how to simplify.  */
826   switch (DECL_FUNCTION_CODE (callee))
827     {
828     case BUILT_IN_STRLEN:
829     case BUILT_IN_FPUTS:
830     case BUILT_IN_FPUTS_UNLOCKED:
831       arg_idx = 0;
832       type = 0;
833       break;
834     case BUILT_IN_STRCPY:
835     case BUILT_IN_STRNCPY:
836       arg_idx = 1;
837       type = 0;
838       break;
839     case BUILT_IN_MEMCPY_CHK:
840     case BUILT_IN_MEMPCPY_CHK:
841     case BUILT_IN_MEMMOVE_CHK:
842     case BUILT_IN_MEMSET_CHK:
843     case BUILT_IN_STRNCPY_CHK:
844     case BUILT_IN_STPNCPY_CHK:
845       arg_idx = 2;
846       type = 2;
847       break;
848     case BUILT_IN_STRCPY_CHK:
849     case BUILT_IN_STPCPY_CHK:
850       arg_idx = 1;
851       type = 1;
852       break;
853     case BUILT_IN_SNPRINTF_CHK:
854     case BUILT_IN_VSNPRINTF_CHK:
855       arg_idx = 1;
856       type = 2;
857       break;
858     default:
859       return NULL_TREE;
860     }
861 
862   if (arg_idx >= nargs)
863     return NULL_TREE;
864 
865   /* Try to use the dataflow information gathered by the CCP process.  */
866   visited = BITMAP_ALLOC (NULL);
867   bitmap_clear (visited);
868 
869   memset (val, 0, sizeof (val));
870   a = gimple_call_arg (stmt, arg_idx);
871   if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
872     val[arg_idx] = NULL_TREE;
873 
874   BITMAP_FREE (visited);
875 
876   result = NULL_TREE;
877   switch (DECL_FUNCTION_CODE (callee))
878     {
879     case BUILT_IN_STRLEN:
880       if (val[0] && nargs == 1)
881 	{
882 	  tree new_val =
883               fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
884 
885 	  /* If the result is not a valid gimple value, or not a cast
886 	     of a valid gimple value, then we cannot use the result.  */
887 	  if (is_gimple_val (new_val)
888 	      || (CONVERT_EXPR_P (new_val)
889 		  && is_gimple_val (TREE_OPERAND (new_val, 0))))
890 	    return new_val;
891 	}
892       break;
893 
894     case BUILT_IN_STRCPY:
895       if (val[1] && is_gimple_val (val[1]) && nargs == 2)
896 	result = fold_builtin_strcpy (loc, callee,
897                                       gimple_call_arg (stmt, 0),
898                                       gimple_call_arg (stmt, 1),
899 				      val[1]);
900       break;
901 
902     case BUILT_IN_STRNCPY:
903       if (val[1] && is_gimple_val (val[1]) && nargs == 3)
904 	result = fold_builtin_strncpy (loc, callee,
905                                        gimple_call_arg (stmt, 0),
906                                        gimple_call_arg (stmt, 1),
907                                        gimple_call_arg (stmt, 2),
908 				       val[1]);
909       break;
910 
911     case BUILT_IN_FPUTS:
912       if (nargs == 2)
913 	result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
914 				     gimple_call_arg (stmt, 1),
915 				     ignore, false, val[0]);
916       break;
917 
918     case BUILT_IN_FPUTS_UNLOCKED:
919       if (nargs == 2)
920 	result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
921 				     gimple_call_arg (stmt, 1),
922 				     ignore, true, val[0]);
923       break;
924 
925     case BUILT_IN_MEMCPY_CHK:
926     case BUILT_IN_MEMPCPY_CHK:
927     case BUILT_IN_MEMMOVE_CHK:
928     case BUILT_IN_MEMSET_CHK:
929       if (val[2] && is_gimple_val (val[2]) && nargs == 4)
930 	result = fold_builtin_memory_chk (loc, callee,
931                                           gimple_call_arg (stmt, 0),
932                                           gimple_call_arg (stmt, 1),
933                                           gimple_call_arg (stmt, 2),
934                                           gimple_call_arg (stmt, 3),
935 					  val[2], ignore,
936 					  DECL_FUNCTION_CODE (callee));
937       break;
938 
939     case BUILT_IN_STRCPY_CHK:
940     case BUILT_IN_STPCPY_CHK:
941       if (val[1] && is_gimple_val (val[1]) && nargs == 3)
942 	result = fold_builtin_stxcpy_chk (loc, callee,
943                                           gimple_call_arg (stmt, 0),
944                                           gimple_call_arg (stmt, 1),
945                                           gimple_call_arg (stmt, 2),
946 					  val[1], ignore,
947 					  DECL_FUNCTION_CODE (callee));
948       break;
949 
950     case BUILT_IN_STRNCPY_CHK:
951     case BUILT_IN_STPNCPY_CHK:
952       if (val[2] && is_gimple_val (val[2]) && nargs == 4)
953 	result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
954                                            gimple_call_arg (stmt, 1),
955                                            gimple_call_arg (stmt, 2),
956                                            gimple_call_arg (stmt, 3),
957 					   val[2], ignore,
958 					   DECL_FUNCTION_CODE (callee));
959       break;
960 
961     case BUILT_IN_SNPRINTF_CHK:
962     case BUILT_IN_VSNPRINTF_CHK:
963       if (val[1] && is_gimple_val (val[1]))
964 	result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
965                                                    DECL_FUNCTION_CODE (callee));
966       break;
967 
968     default:
969       gcc_unreachable ();
970     }
971 
972   if (result && ignore)
973     result = fold_ignored_result (result);
974   return result;
975 }
976 
977 /* Generate code adjusting the first parameter of a call statement determined
978    by GSI by DELTA.  */
979 
980 void
981 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
982 {
983   gimple call_stmt = gsi_stmt (*gsi);
984   tree parm, tmp;
985   gimple new_stmt;
986 
987   delta = convert_to_ptrofftype (delta);
988   gcc_assert (gimple_call_num_args (call_stmt) >= 1);
989   parm = gimple_call_arg (call_stmt, 0);
990   gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
991   tmp = create_tmp_var (TREE_TYPE (parm), NULL);
992   add_referenced_var (tmp);
993 
994   tmp = make_ssa_name (tmp, NULL);
995   new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
996   SSA_NAME_DEF_STMT (tmp) = new_stmt;
997   gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
998   gimple_call_set_arg (call_stmt, 0, tmp);
999 }
1000 
1001 /* Return a binfo to be used for devirtualization of calls based on an object
1002    represented by a declaration (i.e. a global or automatically allocated one)
1003    or NULL if it cannot be found or is not safe.  CST is expected to be an
1004    ADDR_EXPR of such object or the function will return NULL.  Currently it is
1005    safe to use such binfo only if it has no base binfo (i.e. no ancestors).  */
1006 
1007 tree
1008 gimple_extract_devirt_binfo_from_cst (tree cst)
1009 {
1010   HOST_WIDE_INT offset, size, max_size;
1011   tree base, type, expected_type, binfo;
1012   bool last_artificial = false;
1013 
1014   if (!flag_devirtualize
1015       || TREE_CODE (cst) != ADDR_EXPR
1016       || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1017     return NULL_TREE;
1018 
1019   cst = TREE_OPERAND (cst, 0);
1020   expected_type = TREE_TYPE (cst);
1021   base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1022   type = TREE_TYPE (base);
1023   if (!DECL_P (base)
1024       || max_size == -1
1025       || max_size != size
1026       || TREE_CODE (type) != RECORD_TYPE)
1027     return NULL_TREE;
1028 
1029   /* Find the sub-object the constant actually refers to and mark whether it is
1030      an artificial one (as opposed to a user-defined one).  */
1031   while (true)
1032     {
1033       HOST_WIDE_INT pos, size;
1034       tree fld;
1035 
1036       if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1037 	break;
1038       if (offset < 0)
1039 	return NULL_TREE;
1040 
1041       for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1042 	{
1043 	  if (TREE_CODE (fld) != FIELD_DECL)
1044 	    continue;
1045 
1046 	  pos = int_bit_position (fld);
1047 	  size = tree_low_cst (DECL_SIZE (fld), 1);
1048 	  if (pos <= offset && (pos + size) > offset)
1049 	    break;
1050 	}
1051       if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1052 	return NULL_TREE;
1053 
1054       last_artificial = DECL_ARTIFICIAL (fld);
1055       type = TREE_TYPE (fld);
1056       offset -= pos;
1057     }
1058   /* Artifical sub-objects are ancestors, we do not want to use them for
1059      devirtualization, at least not here.  */
1060   if (last_artificial)
1061     return NULL_TREE;
1062   binfo = TYPE_BINFO (type);
1063   if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1064     return NULL_TREE;
1065   else
1066     return binfo;
1067 }
1068 
1069 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1070    The statement may be replaced by another statement, e.g., if the call
1071    simplifies to a constant value. Return true if any changes were made.
1072    It is assumed that the operands have been previously folded.  */
1073 
1074 static bool
1075 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1076 {
1077   gimple stmt = gsi_stmt (*gsi);
1078   tree callee;
1079   bool changed = false;
1080   unsigned i;
1081 
1082   /* Fold *& in call arguments.  */
1083   for (i = 0; i < gimple_call_num_args (stmt); ++i)
1084     if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1085       {
1086 	tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1087 	if (tmp)
1088 	  {
1089 	    gimple_call_set_arg (stmt, i, tmp);
1090 	    changed = true;
1091 	  }
1092       }
1093 
1094   /* Check for virtual calls that became direct calls.  */
1095   callee = gimple_call_fn (stmt);
1096   if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1097     {
1098       if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1099 	{
1100 	  gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1101 	  changed = true;
1102 	}
1103       else
1104 	{
1105 	  tree obj = OBJ_TYPE_REF_OBJECT (callee);
1106 	  tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1107 	  if (binfo)
1108 	    {
1109 	      HOST_WIDE_INT token
1110 		= TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1111 	      tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1112 	      if (fndecl)
1113 		{
1114 		  gimple_call_set_fndecl (stmt, fndecl);
1115 		  changed = true;
1116 		}
1117 	    }
1118 	}
1119     }
1120 
1121   if (inplace)
1122     return changed;
1123 
1124   /* Check for builtins that CCP can handle using information not
1125      available in the generic fold routines.  */
1126   callee = gimple_call_fndecl (stmt);
1127   if (callee && DECL_BUILT_IN (callee))
1128     {
1129       tree result = gimple_fold_builtin (stmt);
1130       if (result)
1131 	{
1132           if (!update_call_from_tree (gsi, result))
1133 	    gimplify_and_update_call_from_tree (gsi, result);
1134 	  changed = true;
1135 	}
1136     }
1137 
1138   return changed;
1139 }
1140 
1141 /* Worker for both fold_stmt and fold_stmt_inplace.  The INPLACE argument
1142    distinguishes both cases.  */
1143 
1144 static bool
1145 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1146 {
1147   bool changed = false;
1148   gimple stmt = gsi_stmt (*gsi);
1149   unsigned i;
1150   gimple_stmt_iterator gsinext = *gsi;
1151   gimple next_stmt;
1152 
1153   gsi_next (&gsinext);
1154   next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1155 
1156   /* Fold the main computation performed by the statement.  */
1157   switch (gimple_code (stmt))
1158     {
1159     case GIMPLE_ASSIGN:
1160       {
1161 	unsigned old_num_ops = gimple_num_ops (stmt);
1162 	enum tree_code subcode = gimple_assign_rhs_code (stmt);
1163 	tree lhs = gimple_assign_lhs (stmt);
1164 	tree new_rhs;
1165 	/* First canonicalize operand order.  This avoids building new
1166 	   trees if this is the only thing fold would later do.  */
1167 	if ((commutative_tree_code (subcode)
1168 	     || commutative_ternary_tree_code (subcode))
1169 	    && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1170 				     gimple_assign_rhs2 (stmt), false))
1171 	  {
1172 	    tree tem = gimple_assign_rhs1 (stmt);
1173 	    gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1174 	    gimple_assign_set_rhs2 (stmt, tem);
1175 	    changed = true;
1176 	  }
1177 	new_rhs = fold_gimple_assign (gsi);
1178 	if (new_rhs
1179 	    && !useless_type_conversion_p (TREE_TYPE (lhs),
1180 					   TREE_TYPE (new_rhs)))
1181 	  new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1182 	if (new_rhs
1183 	    && (!inplace
1184 		|| get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1185 	  {
1186 	    gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1187 	    changed = true;
1188 	  }
1189 	break;
1190       }
1191 
1192     case GIMPLE_COND:
1193       changed |= fold_gimple_cond (stmt);
1194       break;
1195 
1196     case GIMPLE_CALL:
1197       changed |= gimple_fold_call (gsi, inplace);
1198       break;
1199 
1200     case GIMPLE_ASM:
1201       /* Fold *& in asm operands.  */
1202       {
1203 	size_t noutputs;
1204 	const char **oconstraints;
1205 	const char *constraint;
1206 	bool allows_mem, allows_reg;
1207 
1208 	noutputs = gimple_asm_noutputs (stmt);
1209 	oconstraints = XALLOCAVEC (const char *, noutputs);
1210 
1211 	for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1212 	  {
1213 	    tree link = gimple_asm_output_op (stmt, i);
1214 	    tree op = TREE_VALUE (link);
1215 	    oconstraints[i]
1216 	      = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1217 	    if (REFERENCE_CLASS_P (op)
1218 		&& (op = maybe_fold_reference (op, true)) != NULL_TREE)
1219 	      {
1220 		TREE_VALUE (link) = op;
1221 		changed = true;
1222 	      }
1223 	  }
1224 	for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1225 	  {
1226 	    tree link = gimple_asm_input_op (stmt, i);
1227 	    tree op = TREE_VALUE (link);
1228 	    constraint
1229 	      = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1230 	    parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1231 				    oconstraints, &allows_mem, &allows_reg);
1232 	    if (REFERENCE_CLASS_P (op)
1233 		&& (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1234 		   != NULL_TREE)
1235 	      {
1236 		TREE_VALUE (link) = op;
1237 		changed = true;
1238 	      }
1239 	  }
1240       }
1241       break;
1242 
1243     case GIMPLE_DEBUG:
1244       if (gimple_debug_bind_p (stmt))
1245 	{
1246 	  tree val = gimple_debug_bind_get_value (stmt);
1247 	  if (val
1248 	      && REFERENCE_CLASS_P (val))
1249 	    {
1250 	      tree tem = maybe_fold_reference (val, false);
1251 	      if (tem)
1252 		{
1253 		  gimple_debug_bind_set_value (stmt, tem);
1254 		  changed = true;
1255 		}
1256 	    }
1257 	  else if (val
1258 		   && TREE_CODE (val) == ADDR_EXPR)
1259 	    {
1260 	      tree ref = TREE_OPERAND (val, 0);
1261 	      tree tem = maybe_fold_reference (ref, false);
1262 	      if (tem)
1263 		{
1264 		  tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1265 		  gimple_debug_bind_set_value (stmt, tem);
1266 		  changed = true;
1267 		}
1268 	    }
1269 	}
1270       break;
1271 
1272     default:;
1273     }
1274 
1275   /* If stmt folds into nothing and it was the last stmt in a bb,
1276      don't call gsi_stmt.  */
1277   if (gsi_end_p (*gsi))
1278     {
1279       gcc_assert (next_stmt == NULL);
1280       return changed;
1281     }
1282 
1283   stmt = gsi_stmt (*gsi);
1284 
1285   /* Fold *& on the lhs.  Don't do this if stmt folded into nothing,
1286      as we'd changing the next stmt.  */
1287   if (gimple_has_lhs (stmt) && stmt != next_stmt)
1288     {
1289       tree lhs = gimple_get_lhs (stmt);
1290       if (lhs && REFERENCE_CLASS_P (lhs))
1291 	{
1292 	  tree new_lhs = maybe_fold_reference (lhs, true);
1293 	  if (new_lhs)
1294 	    {
1295 	      gimple_set_lhs (stmt, new_lhs);
1296 	      changed = true;
1297 	    }
1298 	}
1299     }
1300 
1301   return changed;
1302 }
1303 
1304 /* Fold the statement pointed to by GSI.  In some cases, this function may
1305    replace the whole statement with a new one.  Returns true iff folding
1306    makes any changes.
1307    The statement pointed to by GSI should be in valid gimple form but may
1308    be in unfolded state as resulting from for example constant propagation
1309    which can produce *&x = 0.  */
1310 
1311 bool
1312 fold_stmt (gimple_stmt_iterator *gsi)
1313 {
1314   return fold_stmt_1 (gsi, false);
1315 }
1316 
1317 /* Perform the minimal folding on statement *GSI.  Only operations like
1318    *&x created by constant propagation are handled.  The statement cannot
1319    be replaced with a new one.  Return true if the statement was
1320    changed, false otherwise.
1321    The statement *GSI should be in valid gimple form but may
1322    be in unfolded state as resulting from for example constant propagation
1323    which can produce *&x = 0.  */
1324 
1325 bool
1326 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1327 {
1328   gimple stmt = gsi_stmt (*gsi);
1329   bool changed = fold_stmt_1 (gsi, true);
1330   gcc_assert (gsi_stmt (*gsi) == stmt);
1331   return changed;
1332 }
1333 
1334 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1335    if EXPR is null or we don't know how.
1336    If non-null, the result always has boolean type.  */
1337 
1338 static tree
1339 canonicalize_bool (tree expr, bool invert)
1340 {
1341   if (!expr)
1342     return NULL_TREE;
1343   else if (invert)
1344     {
1345       if (integer_nonzerop (expr))
1346 	return boolean_false_node;
1347       else if (integer_zerop (expr))
1348 	return boolean_true_node;
1349       else if (TREE_CODE (expr) == SSA_NAME)
1350 	return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1351 			    build_int_cst (TREE_TYPE (expr), 0));
1352       else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1353 	return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1354 			    boolean_type_node,
1355 			    TREE_OPERAND (expr, 0),
1356 			    TREE_OPERAND (expr, 1));
1357       else
1358 	return NULL_TREE;
1359     }
1360   else
1361     {
1362       if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1363 	return expr;
1364       if (integer_nonzerop (expr))
1365 	return boolean_true_node;
1366       else if (integer_zerop (expr))
1367 	return boolean_false_node;
1368       else if (TREE_CODE (expr) == SSA_NAME)
1369 	return fold_build2 (NE_EXPR, boolean_type_node, expr,
1370 			    build_int_cst (TREE_TYPE (expr), 0));
1371       else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1372 	return fold_build2 (TREE_CODE (expr),
1373 			    boolean_type_node,
1374 			    TREE_OPERAND (expr, 0),
1375 			    TREE_OPERAND (expr, 1));
1376       else
1377 	return NULL_TREE;
1378     }
1379 }
1380 
1381 /* Check to see if a boolean expression EXPR is logically equivalent to the
1382    comparison (OP1 CODE OP2).  Check for various identities involving
1383    SSA_NAMEs.  */
1384 
1385 static bool
1386 same_bool_comparison_p (const_tree expr, enum tree_code code,
1387 			const_tree op1, const_tree op2)
1388 {
1389   gimple s;
1390 
1391   /* The obvious case.  */
1392   if (TREE_CODE (expr) == code
1393       && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1394       && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1395     return true;
1396 
1397   /* Check for comparing (name, name != 0) and the case where expr
1398      is an SSA_NAME with a definition matching the comparison.  */
1399   if (TREE_CODE (expr) == SSA_NAME
1400       && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1401     {
1402       if (operand_equal_p (expr, op1, 0))
1403 	return ((code == NE_EXPR && integer_zerop (op2))
1404 		|| (code == EQ_EXPR && integer_nonzerop (op2)));
1405       s = SSA_NAME_DEF_STMT (expr);
1406       if (is_gimple_assign (s)
1407 	  && gimple_assign_rhs_code (s) == code
1408 	  && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1409 	  && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1410 	return true;
1411     }
1412 
1413   /* If op1 is of the form (name != 0) or (name == 0), and the definition
1414      of name is a comparison, recurse.  */
1415   if (TREE_CODE (op1) == SSA_NAME
1416       && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1417     {
1418       s = SSA_NAME_DEF_STMT (op1);
1419       if (is_gimple_assign (s)
1420 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1421 	{
1422 	  enum tree_code c = gimple_assign_rhs_code (s);
1423 	  if ((c == NE_EXPR && integer_zerop (op2))
1424 	      || (c == EQ_EXPR && integer_nonzerop (op2)))
1425 	    return same_bool_comparison_p (expr, c,
1426 					   gimple_assign_rhs1 (s),
1427 					   gimple_assign_rhs2 (s));
1428 	  if ((c == EQ_EXPR && integer_zerop (op2))
1429 	      || (c == NE_EXPR && integer_nonzerop (op2)))
1430 	    return same_bool_comparison_p (expr,
1431 					   invert_tree_comparison (c, false),
1432 					   gimple_assign_rhs1 (s),
1433 					   gimple_assign_rhs2 (s));
1434 	}
1435     }
1436   return false;
1437 }
1438 
1439 /* Check to see if two boolean expressions OP1 and OP2 are logically
1440    equivalent.  */
1441 
1442 static bool
1443 same_bool_result_p (const_tree op1, const_tree op2)
1444 {
1445   /* Simple cases first.  */
1446   if (operand_equal_p (op1, op2, 0))
1447     return true;
1448 
1449   /* Check the cases where at least one of the operands is a comparison.
1450      These are a bit smarter than operand_equal_p in that they apply some
1451      identifies on SSA_NAMEs.  */
1452   if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1453       && same_bool_comparison_p (op1, TREE_CODE (op2),
1454 				 TREE_OPERAND (op2, 0),
1455 				 TREE_OPERAND (op2, 1)))
1456     return true;
1457   if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1458       && same_bool_comparison_p (op2, TREE_CODE (op1),
1459 				 TREE_OPERAND (op1, 0),
1460 				 TREE_OPERAND (op1, 1)))
1461     return true;
1462 
1463   /* Default case.  */
1464   return false;
1465 }
1466 
1467 /* Forward declarations for some mutually recursive functions.  */
1468 
1469 static tree
1470 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1471 		   enum tree_code code2, tree op2a, tree op2b);
1472 static tree
1473 and_var_with_comparison (tree var, bool invert,
1474 			 enum tree_code code2, tree op2a, tree op2b);
1475 static tree
1476 and_var_with_comparison_1 (gimple stmt,
1477 			   enum tree_code code2, tree op2a, tree op2b);
1478 static tree
1479 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1480 		  enum tree_code code2, tree op2a, tree op2b);
1481 static tree
1482 or_var_with_comparison (tree var, bool invert,
1483 			enum tree_code code2, tree op2a, tree op2b);
1484 static tree
1485 or_var_with_comparison_1 (gimple stmt,
1486 			  enum tree_code code2, tree op2a, tree op2b);
1487 
1488 /* Helper function for and_comparisons_1:  try to simplify the AND of the
1489    ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1490    If INVERT is true, invert the value of the VAR before doing the AND.
1491    Return NULL_EXPR if we can't simplify this to a single expression.  */
1492 
1493 static tree
1494 and_var_with_comparison (tree var, bool invert,
1495 			 enum tree_code code2, tree op2a, tree op2b)
1496 {
1497   tree t;
1498   gimple stmt = SSA_NAME_DEF_STMT (var);
1499 
1500   /* We can only deal with variables whose definitions are assignments.  */
1501   if (!is_gimple_assign (stmt))
1502     return NULL_TREE;
1503 
1504   /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1505      !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1506      Then we only have to consider the simpler non-inverted cases.  */
1507   if (invert)
1508     t = or_var_with_comparison_1 (stmt,
1509 				  invert_tree_comparison (code2, false),
1510 				  op2a, op2b);
1511   else
1512     t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1513   return canonicalize_bool (t, invert);
1514 }
1515 
1516 /* Try to simplify the AND of the ssa variable defined by the assignment
1517    STMT with the comparison specified by (OP2A CODE2 OP2B).
1518    Return NULL_EXPR if we can't simplify this to a single expression.  */
1519 
1520 static tree
1521 and_var_with_comparison_1 (gimple stmt,
1522 			   enum tree_code code2, tree op2a, tree op2b)
1523 {
1524   tree var = gimple_assign_lhs (stmt);
1525   tree true_test_var = NULL_TREE;
1526   tree false_test_var = NULL_TREE;
1527   enum tree_code innercode = gimple_assign_rhs_code (stmt);
1528 
1529   /* Check for identities like (var AND (var == 0)) => false.  */
1530   if (TREE_CODE (op2a) == SSA_NAME
1531       && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1532     {
1533       if ((code2 == NE_EXPR && integer_zerop (op2b))
1534 	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1535 	{
1536 	  true_test_var = op2a;
1537 	  if (var == true_test_var)
1538 	    return var;
1539 	}
1540       else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1541 	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1542 	{
1543 	  false_test_var = op2a;
1544 	  if (var == false_test_var)
1545 	    return boolean_false_node;
1546 	}
1547     }
1548 
1549   /* If the definition is a comparison, recurse on it.  */
1550   if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1551     {
1552       tree t = and_comparisons_1 (innercode,
1553 				  gimple_assign_rhs1 (stmt),
1554 				  gimple_assign_rhs2 (stmt),
1555 				  code2,
1556 				  op2a,
1557 				  op2b);
1558       if (t)
1559 	return t;
1560     }
1561 
1562   /* If the definition is an AND or OR expression, we may be able to
1563      simplify by reassociating.  */
1564   if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1565       && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1566     {
1567       tree inner1 = gimple_assign_rhs1 (stmt);
1568       tree inner2 = gimple_assign_rhs2 (stmt);
1569       gimple s;
1570       tree t;
1571       tree partial = NULL_TREE;
1572       bool is_and = (innercode == BIT_AND_EXPR);
1573 
1574       /* Check for boolean identities that don't require recursive examination
1575 	 of inner1/inner2:
1576 	 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1577 	 inner1 AND (inner1 OR inner2) => inner1
1578 	 !inner1 AND (inner1 AND inner2) => false
1579 	 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1580          Likewise for similar cases involving inner2.  */
1581       if (inner1 == true_test_var)
1582 	return (is_and ? var : inner1);
1583       else if (inner2 == true_test_var)
1584 	return (is_and ? var : inner2);
1585       else if (inner1 == false_test_var)
1586 	return (is_and
1587 		? boolean_false_node
1588 		: and_var_with_comparison (inner2, false, code2, op2a, op2b));
1589       else if (inner2 == false_test_var)
1590 	return (is_and
1591 		? boolean_false_node
1592 		: and_var_with_comparison (inner1, false, code2, op2a, op2b));
1593 
1594       /* Next, redistribute/reassociate the AND across the inner tests.
1595 	 Compute the first partial result, (inner1 AND (op2a code op2b))  */
1596       if (TREE_CODE (inner1) == SSA_NAME
1597 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1598 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1599 	  && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1600 					      gimple_assign_rhs1 (s),
1601 					      gimple_assign_rhs2 (s),
1602 					      code2, op2a, op2b)))
1603 	{
1604 	  /* Handle the AND case, where we are reassociating:
1605 	     (inner1 AND inner2) AND (op2a code2 op2b)
1606 	     => (t AND inner2)
1607 	     If the partial result t is a constant, we win.  Otherwise
1608 	     continue on to try reassociating with the other inner test.  */
1609 	  if (is_and)
1610 	    {
1611 	      if (integer_onep (t))
1612 		return inner2;
1613 	      else if (integer_zerop (t))
1614 		return boolean_false_node;
1615 	    }
1616 
1617 	  /* Handle the OR case, where we are redistributing:
1618 	     (inner1 OR inner2) AND (op2a code2 op2b)
1619 	     => (t OR (inner2 AND (op2a code2 op2b)))  */
1620 	  else if (integer_onep (t))
1621 	    return boolean_true_node;
1622 
1623 	  /* Save partial result for later.  */
1624 	  partial = t;
1625 	}
1626 
1627       /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1628       if (TREE_CODE (inner2) == SSA_NAME
1629 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1630 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1631 	  && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1632 					      gimple_assign_rhs1 (s),
1633 					      gimple_assign_rhs2 (s),
1634 					      code2, op2a, op2b)))
1635 	{
1636 	  /* Handle the AND case, where we are reassociating:
1637 	     (inner1 AND inner2) AND (op2a code2 op2b)
1638 	     => (inner1 AND t)  */
1639 	  if (is_and)
1640 	    {
1641 	      if (integer_onep (t))
1642 		return inner1;
1643 	      else if (integer_zerop (t))
1644 		return boolean_false_node;
1645 	      /* If both are the same, we can apply the identity
1646 		 (x AND x) == x.  */
1647 	      else if (partial && same_bool_result_p (t, partial))
1648 		return t;
1649 	    }
1650 
1651 	  /* Handle the OR case. where we are redistributing:
1652 	     (inner1 OR inner2) AND (op2a code2 op2b)
1653 	     => (t OR (inner1 AND (op2a code2 op2b)))
1654 	     => (t OR partial)  */
1655 	  else
1656 	    {
1657 	      if (integer_onep (t))
1658 		return boolean_true_node;
1659 	      else if (partial)
1660 		{
1661 		  /* We already got a simplification for the other
1662 		     operand to the redistributed OR expression.  The
1663 		     interesting case is when at least one is false.
1664 		     Or, if both are the same, we can apply the identity
1665 		     (x OR x) == x.  */
1666 		  if (integer_zerop (partial))
1667 		    return t;
1668 		  else if (integer_zerop (t))
1669 		    return partial;
1670 		  else if (same_bool_result_p (t, partial))
1671 		    return t;
1672 		}
1673 	    }
1674 	}
1675     }
1676   return NULL_TREE;
1677 }
1678 
1679 /* Try to simplify the AND of two comparisons defined by
1680    (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1681    If this can be done without constructing an intermediate value,
1682    return the resulting tree; otherwise NULL_TREE is returned.
1683    This function is deliberately asymmetric as it recurses on SSA_DEFs
1684    in the first comparison but not the second.  */
1685 
1686 static tree
1687 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1688 		   enum tree_code code2, tree op2a, tree op2b)
1689 {
1690   /* First check for ((x CODE1 y) AND (x CODE2 y)).  */
1691   if (operand_equal_p (op1a, op2a, 0)
1692       && operand_equal_p (op1b, op2b, 0))
1693     {
1694       /* Result will be either NULL_TREE, or a combined comparison.  */
1695       tree t = combine_comparisons (UNKNOWN_LOCATION,
1696 				    TRUTH_ANDIF_EXPR, code1, code2,
1697 				    boolean_type_node, op1a, op1b);
1698       if (t)
1699 	return t;
1700     }
1701 
1702   /* Likewise the swapped case of the above.  */
1703   if (operand_equal_p (op1a, op2b, 0)
1704       && operand_equal_p (op1b, op2a, 0))
1705     {
1706       /* Result will be either NULL_TREE, or a combined comparison.  */
1707       tree t = combine_comparisons (UNKNOWN_LOCATION,
1708 				    TRUTH_ANDIF_EXPR, code1,
1709 				    swap_tree_comparison (code2),
1710 				    boolean_type_node, op1a, op1b);
1711       if (t)
1712 	return t;
1713     }
1714 
1715   /* If both comparisons are of the same value against constants, we might
1716      be able to merge them.  */
1717   if (operand_equal_p (op1a, op2a, 0)
1718       && TREE_CODE (op1b) == INTEGER_CST
1719       && TREE_CODE (op2b) == INTEGER_CST)
1720     {
1721       int cmp = tree_int_cst_compare (op1b, op2b);
1722 
1723       /* If we have (op1a == op1b), we should either be able to
1724 	 return that or FALSE, depending on whether the constant op1b
1725 	 also satisfies the other comparison against op2b.  */
1726       if (code1 == EQ_EXPR)
1727 	{
1728 	  bool done = true;
1729 	  bool val;
1730 	  switch (code2)
1731 	    {
1732 	    case EQ_EXPR: val = (cmp == 0); break;
1733 	    case NE_EXPR: val = (cmp != 0); break;
1734 	    case LT_EXPR: val = (cmp < 0); break;
1735 	    case GT_EXPR: val = (cmp > 0); break;
1736 	    case LE_EXPR: val = (cmp <= 0); break;
1737 	    case GE_EXPR: val = (cmp >= 0); break;
1738 	    default: done = false;
1739 	    }
1740 	  if (done)
1741 	    {
1742 	      if (val)
1743 		return fold_build2 (code1, boolean_type_node, op1a, op1b);
1744 	      else
1745 		return boolean_false_node;
1746 	    }
1747 	}
1748       /* Likewise if the second comparison is an == comparison.  */
1749       else if (code2 == EQ_EXPR)
1750 	{
1751 	  bool done = true;
1752 	  bool val;
1753 	  switch (code1)
1754 	    {
1755 	    case EQ_EXPR: val = (cmp == 0); break;
1756 	    case NE_EXPR: val = (cmp != 0); break;
1757 	    case LT_EXPR: val = (cmp > 0); break;
1758 	    case GT_EXPR: val = (cmp < 0); break;
1759 	    case LE_EXPR: val = (cmp >= 0); break;
1760 	    case GE_EXPR: val = (cmp <= 0); break;
1761 	    default: done = false;
1762 	    }
1763 	  if (done)
1764 	    {
1765 	      if (val)
1766 		return fold_build2 (code2, boolean_type_node, op2a, op2b);
1767 	      else
1768 		return boolean_false_node;
1769 	    }
1770 	}
1771 
1772       /* Same business with inequality tests.  */
1773       else if (code1 == NE_EXPR)
1774 	{
1775 	  bool val;
1776 	  switch (code2)
1777 	    {
1778 	    case EQ_EXPR: val = (cmp != 0); break;
1779 	    case NE_EXPR: val = (cmp == 0); break;
1780 	    case LT_EXPR: val = (cmp >= 0); break;
1781 	    case GT_EXPR: val = (cmp <= 0); break;
1782 	    case LE_EXPR: val = (cmp > 0); break;
1783 	    case GE_EXPR: val = (cmp < 0); break;
1784 	    default:
1785 	      val = false;
1786 	    }
1787 	  if (val)
1788 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
1789 	}
1790       else if (code2 == NE_EXPR)
1791 	{
1792 	  bool val;
1793 	  switch (code1)
1794 	    {
1795 	    case EQ_EXPR: val = (cmp == 0); break;
1796 	    case NE_EXPR: val = (cmp != 0); break;
1797 	    case LT_EXPR: val = (cmp <= 0); break;
1798 	    case GT_EXPR: val = (cmp >= 0); break;
1799 	    case LE_EXPR: val = (cmp < 0); break;
1800 	    case GE_EXPR: val = (cmp > 0); break;
1801 	    default:
1802 	      val = false;
1803 	    }
1804 	  if (val)
1805 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
1806 	}
1807 
1808       /* Chose the more restrictive of two < or <= comparisons.  */
1809       else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1810 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
1811 	{
1812 	  if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1813 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
1814 	  else
1815 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
1816 	}
1817 
1818       /* Likewise chose the more restrictive of two > or >= comparisons.  */
1819       else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1820 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
1821 	{
1822 	  if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1823 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
1824 	  else
1825 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
1826 	}
1827 
1828       /* Check for singleton ranges.  */
1829       else if (cmp == 0
1830 	       && ((code1 == LE_EXPR && code2 == GE_EXPR)
1831 		   || (code1 == GE_EXPR && code2 == LE_EXPR)))
1832 	return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1833 
1834       /* Check for disjoint ranges. */
1835       else if (cmp <= 0
1836 	       && (code1 == LT_EXPR || code1 == LE_EXPR)
1837 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
1838 	return boolean_false_node;
1839       else if (cmp >= 0
1840 	       && (code1 == GT_EXPR || code1 == GE_EXPR)
1841 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
1842 	return boolean_false_node;
1843     }
1844 
1845   /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1846      NAME's definition is a truth value.  See if there are any simplifications
1847      that can be done against the NAME's definition.  */
1848   if (TREE_CODE (op1a) == SSA_NAME
1849       && (code1 == NE_EXPR || code1 == EQ_EXPR)
1850       && (integer_zerop (op1b) || integer_onep (op1b)))
1851     {
1852       bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1853 		     || (code1 == NE_EXPR && integer_onep (op1b)));
1854       gimple stmt = SSA_NAME_DEF_STMT (op1a);
1855       switch (gimple_code (stmt))
1856 	{
1857 	case GIMPLE_ASSIGN:
1858 	  /* Try to simplify by copy-propagating the definition.  */
1859 	  return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1860 
1861 	case GIMPLE_PHI:
1862 	  /* If every argument to the PHI produces the same result when
1863 	     ANDed with the second comparison, we win.
1864 	     Do not do this unless the type is bool since we need a bool
1865 	     result here anyway.  */
1866 	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1867 	    {
1868 	      tree result = NULL_TREE;
1869 	      unsigned i;
1870 	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
1871 		{
1872 		  tree arg = gimple_phi_arg_def (stmt, i);
1873 
1874 		  /* If this PHI has itself as an argument, ignore it.
1875 		     If all the other args produce the same result,
1876 		     we're still OK.  */
1877 		  if (arg == gimple_phi_result (stmt))
1878 		    continue;
1879 		  else if (TREE_CODE (arg) == INTEGER_CST)
1880 		    {
1881 		      if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1882 			{
1883 			  if (!result)
1884 			    result = boolean_false_node;
1885 			  else if (!integer_zerop (result))
1886 			    return NULL_TREE;
1887 			}
1888 		      else if (!result)
1889 			result = fold_build2 (code2, boolean_type_node,
1890 					      op2a, op2b);
1891 		      else if (!same_bool_comparison_p (result,
1892 							code2, op2a, op2b))
1893 			return NULL_TREE;
1894 		    }
1895 		  else if (TREE_CODE (arg) == SSA_NAME
1896 			   && !SSA_NAME_IS_DEFAULT_DEF (arg))
1897 		    {
1898 		      tree temp;
1899 		      gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1900 		      /* In simple cases we can look through PHI nodes,
1901 			 but we have to be careful with loops.
1902 			 See PR49073.  */
1903 		      if (! dom_info_available_p (CDI_DOMINATORS)
1904 			  || gimple_bb (def_stmt) == gimple_bb (stmt)
1905 			  || dominated_by_p (CDI_DOMINATORS,
1906 					     gimple_bb (def_stmt),
1907 					     gimple_bb (stmt)))
1908 			return NULL_TREE;
1909 		      temp = and_var_with_comparison (arg, invert, code2,
1910 						      op2a, op2b);
1911 		      if (!temp)
1912 			return NULL_TREE;
1913 		      else if (!result)
1914 			result = temp;
1915 		      else if (!same_bool_result_p (result, temp))
1916 			return NULL_TREE;
1917 		    }
1918 		  else
1919 		    return NULL_TREE;
1920 		}
1921 	      return result;
1922 	    }
1923 
1924 	default:
1925 	  break;
1926 	}
1927     }
1928   return NULL_TREE;
1929 }
1930 
1931 /* Try to simplify the AND of two comparisons, specified by
1932    (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1933    If this can be simplified to a single expression (without requiring
1934    introducing more SSA variables to hold intermediate values),
1935    return the resulting tree.  Otherwise return NULL_TREE.
1936    If the result expression is non-null, it has boolean type.  */
1937 
1938 tree
1939 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1940 			    enum tree_code code2, tree op2a, tree op2b)
1941 {
1942   tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1943   if (t)
1944     return t;
1945   else
1946     return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1947 }
1948 
1949 /* Helper function for or_comparisons_1:  try to simplify the OR of the
1950    ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1951    If INVERT is true, invert the value of VAR before doing the OR.
1952    Return NULL_EXPR if we can't simplify this to a single expression.  */
1953 
1954 static tree
1955 or_var_with_comparison (tree var, bool invert,
1956 			enum tree_code code2, tree op2a, tree op2b)
1957 {
1958   tree t;
1959   gimple stmt = SSA_NAME_DEF_STMT (var);
1960 
1961   /* We can only deal with variables whose definitions are assignments.  */
1962   if (!is_gimple_assign (stmt))
1963     return NULL_TREE;
1964 
1965   /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1966      !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1967      Then we only have to consider the simpler non-inverted cases.  */
1968   if (invert)
1969     t = and_var_with_comparison_1 (stmt,
1970 				   invert_tree_comparison (code2, false),
1971 				   op2a, op2b);
1972   else
1973     t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1974   return canonicalize_bool (t, invert);
1975 }
1976 
1977 /* Try to simplify the OR of the ssa variable defined by the assignment
1978    STMT with the comparison specified by (OP2A CODE2 OP2B).
1979    Return NULL_EXPR if we can't simplify this to a single expression.  */
1980 
1981 static tree
1982 or_var_with_comparison_1 (gimple stmt,
1983 			  enum tree_code code2, tree op2a, tree op2b)
1984 {
1985   tree var = gimple_assign_lhs (stmt);
1986   tree true_test_var = NULL_TREE;
1987   tree false_test_var = NULL_TREE;
1988   enum tree_code innercode = gimple_assign_rhs_code (stmt);
1989 
1990   /* Check for identities like (var OR (var != 0)) => true .  */
1991   if (TREE_CODE (op2a) == SSA_NAME
1992       && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1993     {
1994       if ((code2 == NE_EXPR && integer_zerop (op2b))
1995 	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1996 	{
1997 	  true_test_var = op2a;
1998 	  if (var == true_test_var)
1999 	    return var;
2000 	}
2001       else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2002 	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2003 	{
2004 	  false_test_var = op2a;
2005 	  if (var == false_test_var)
2006 	    return boolean_true_node;
2007 	}
2008     }
2009 
2010   /* If the definition is a comparison, recurse on it.  */
2011   if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2012     {
2013       tree t = or_comparisons_1 (innercode,
2014 				 gimple_assign_rhs1 (stmt),
2015 				 gimple_assign_rhs2 (stmt),
2016 				 code2,
2017 				 op2a,
2018 				 op2b);
2019       if (t)
2020 	return t;
2021     }
2022 
2023   /* If the definition is an AND or OR expression, we may be able to
2024      simplify by reassociating.  */
2025   if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2026       && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2027     {
2028       tree inner1 = gimple_assign_rhs1 (stmt);
2029       tree inner2 = gimple_assign_rhs2 (stmt);
2030       gimple s;
2031       tree t;
2032       tree partial = NULL_TREE;
2033       bool is_or = (innercode == BIT_IOR_EXPR);
2034 
2035       /* Check for boolean identities that don't require recursive examination
2036 	 of inner1/inner2:
2037 	 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2038 	 inner1 OR (inner1 AND inner2) => inner1
2039 	 !inner1 OR (inner1 OR inner2) => true
2040 	 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2041       */
2042       if (inner1 == true_test_var)
2043 	return (is_or ? var : inner1);
2044       else if (inner2 == true_test_var)
2045 	return (is_or ? var : inner2);
2046       else if (inner1 == false_test_var)
2047 	return (is_or
2048 		? boolean_true_node
2049 		: or_var_with_comparison (inner2, false, code2, op2a, op2b));
2050       else if (inner2 == false_test_var)
2051 	return (is_or
2052 		? boolean_true_node
2053 		: or_var_with_comparison (inner1, false, code2, op2a, op2b));
2054 
2055       /* Next, redistribute/reassociate the OR across the inner tests.
2056 	 Compute the first partial result, (inner1 OR (op2a code op2b))  */
2057       if (TREE_CODE (inner1) == SSA_NAME
2058 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2059 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2060 	  && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2061 					     gimple_assign_rhs1 (s),
2062 					     gimple_assign_rhs2 (s),
2063 					     code2, op2a, op2b)))
2064 	{
2065 	  /* Handle the OR case, where we are reassociating:
2066 	     (inner1 OR inner2) OR (op2a code2 op2b)
2067 	     => (t OR inner2)
2068 	     If the partial result t is a constant, we win.  Otherwise
2069 	     continue on to try reassociating with the other inner test.  */
2070 	  if (is_or)
2071 	    {
2072 	      if (integer_onep (t))
2073 		return boolean_true_node;
2074 	      else if (integer_zerop (t))
2075 		return inner2;
2076 	    }
2077 
2078 	  /* Handle the AND case, where we are redistributing:
2079 	     (inner1 AND inner2) OR (op2a code2 op2b)
2080 	     => (t AND (inner2 OR (op2a code op2b)))  */
2081 	  else if (integer_zerop (t))
2082 	    return boolean_false_node;
2083 
2084 	  /* Save partial result for later.  */
2085 	  partial = t;
2086 	}
2087 
2088       /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2089       if (TREE_CODE (inner2) == SSA_NAME
2090 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2091 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2092 	  && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2093 					     gimple_assign_rhs1 (s),
2094 					     gimple_assign_rhs2 (s),
2095 					     code2, op2a, op2b)))
2096 	{
2097 	  /* Handle the OR case, where we are reassociating:
2098 	     (inner1 OR inner2) OR (op2a code2 op2b)
2099 	     => (inner1 OR t)
2100 	     => (t OR partial)  */
2101 	  if (is_or)
2102 	    {
2103 	      if (integer_zerop (t))
2104 		return inner1;
2105 	      else if (integer_onep (t))
2106 		return boolean_true_node;
2107 	      /* If both are the same, we can apply the identity
2108 		 (x OR x) == x.  */
2109 	      else if (partial && same_bool_result_p (t, partial))
2110 		return t;
2111 	    }
2112 
2113 	  /* Handle the AND case, where we are redistributing:
2114 	     (inner1 AND inner2) OR (op2a code2 op2b)
2115 	     => (t AND (inner1 OR (op2a code2 op2b)))
2116 	     => (t AND partial)  */
2117 	  else
2118 	    {
2119 	      if (integer_zerop (t))
2120 		return boolean_false_node;
2121 	      else if (partial)
2122 		{
2123 		  /* We already got a simplification for the other
2124 		     operand to the redistributed AND expression.  The
2125 		     interesting case is when at least one is true.
2126 		     Or, if both are the same, we can apply the identity
2127 		     (x AND x) == x.  */
2128 		  if (integer_onep (partial))
2129 		    return t;
2130 		  else if (integer_onep (t))
2131 		    return partial;
2132 		  else if (same_bool_result_p (t, partial))
2133 		    return t;
2134 		}
2135 	    }
2136 	}
2137     }
2138   return NULL_TREE;
2139 }
2140 
2141 /* Try to simplify the OR of two comparisons defined by
2142    (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2143    If this can be done without constructing an intermediate value,
2144    return the resulting tree; otherwise NULL_TREE is returned.
2145    This function is deliberately asymmetric as it recurses on SSA_DEFs
2146    in the first comparison but not the second.  */
2147 
2148 static tree
2149 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2150 		  enum tree_code code2, tree op2a, tree op2b)
2151 {
2152   /* First check for ((x CODE1 y) OR (x CODE2 y)).  */
2153   if (operand_equal_p (op1a, op2a, 0)
2154       && operand_equal_p (op1b, op2b, 0))
2155     {
2156       /* Result will be either NULL_TREE, or a combined comparison.  */
2157       tree t = combine_comparisons (UNKNOWN_LOCATION,
2158 				    TRUTH_ORIF_EXPR, code1, code2,
2159 				    boolean_type_node, op1a, op1b);
2160       if (t)
2161 	return t;
2162     }
2163 
2164   /* Likewise the swapped case of the above.  */
2165   if (operand_equal_p (op1a, op2b, 0)
2166       && operand_equal_p (op1b, op2a, 0))
2167     {
2168       /* Result will be either NULL_TREE, or a combined comparison.  */
2169       tree t = combine_comparisons (UNKNOWN_LOCATION,
2170 				    TRUTH_ORIF_EXPR, code1,
2171 				    swap_tree_comparison (code2),
2172 				    boolean_type_node, op1a, op1b);
2173       if (t)
2174 	return t;
2175     }
2176 
2177   /* If both comparisons are of the same value against constants, we might
2178      be able to merge them.  */
2179   if (operand_equal_p (op1a, op2a, 0)
2180       && TREE_CODE (op1b) == INTEGER_CST
2181       && TREE_CODE (op2b) == INTEGER_CST)
2182     {
2183       int cmp = tree_int_cst_compare (op1b, op2b);
2184 
2185       /* If we have (op1a != op1b), we should either be able to
2186 	 return that or TRUE, depending on whether the constant op1b
2187 	 also satisfies the other comparison against op2b.  */
2188       if (code1 == NE_EXPR)
2189 	{
2190 	  bool done = true;
2191 	  bool val;
2192 	  switch (code2)
2193 	    {
2194 	    case EQ_EXPR: val = (cmp == 0); break;
2195 	    case NE_EXPR: val = (cmp != 0); break;
2196 	    case LT_EXPR: val = (cmp < 0); break;
2197 	    case GT_EXPR: val = (cmp > 0); break;
2198 	    case LE_EXPR: val = (cmp <= 0); break;
2199 	    case GE_EXPR: val = (cmp >= 0); break;
2200 	    default: done = false;
2201 	    }
2202 	  if (done)
2203 	    {
2204 	      if (val)
2205 		return boolean_true_node;
2206 	      else
2207 		return fold_build2 (code1, boolean_type_node, op1a, op1b);
2208 	    }
2209 	}
2210       /* Likewise if the second comparison is a != comparison.  */
2211       else if (code2 == NE_EXPR)
2212 	{
2213 	  bool done = true;
2214 	  bool val;
2215 	  switch (code1)
2216 	    {
2217 	    case EQ_EXPR: val = (cmp == 0); break;
2218 	    case NE_EXPR: val = (cmp != 0); break;
2219 	    case LT_EXPR: val = (cmp > 0); break;
2220 	    case GT_EXPR: val = (cmp < 0); break;
2221 	    case LE_EXPR: val = (cmp >= 0); break;
2222 	    case GE_EXPR: val = (cmp <= 0); break;
2223 	    default: done = false;
2224 	    }
2225 	  if (done)
2226 	    {
2227 	      if (val)
2228 		return boolean_true_node;
2229 	      else
2230 		return fold_build2 (code2, boolean_type_node, op2a, op2b);
2231 	    }
2232 	}
2233 
2234       /* See if an equality test is redundant with the other comparison.  */
2235       else if (code1 == EQ_EXPR)
2236 	{
2237 	  bool val;
2238 	  switch (code2)
2239 	    {
2240 	    case EQ_EXPR: val = (cmp == 0); break;
2241 	    case NE_EXPR: val = (cmp != 0); break;
2242 	    case LT_EXPR: val = (cmp < 0); break;
2243 	    case GT_EXPR: val = (cmp > 0); break;
2244 	    case LE_EXPR: val = (cmp <= 0); break;
2245 	    case GE_EXPR: val = (cmp >= 0); break;
2246 	    default:
2247 	      val = false;
2248 	    }
2249 	  if (val)
2250 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
2251 	}
2252       else if (code2 == EQ_EXPR)
2253 	{
2254 	  bool val;
2255 	  switch (code1)
2256 	    {
2257 	    case EQ_EXPR: val = (cmp == 0); break;
2258 	    case NE_EXPR: val = (cmp != 0); break;
2259 	    case LT_EXPR: val = (cmp > 0); break;
2260 	    case GT_EXPR: val = (cmp < 0); break;
2261 	    case LE_EXPR: val = (cmp >= 0); break;
2262 	    case GE_EXPR: val = (cmp <= 0); break;
2263 	    default:
2264 	      val = false;
2265 	    }
2266 	  if (val)
2267 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
2268 	}
2269 
2270       /* Chose the less restrictive of two < or <= comparisons.  */
2271       else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2272 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
2273 	{
2274 	  if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2275 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
2276 	  else
2277 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
2278 	}
2279 
2280       /* Likewise chose the less restrictive of two > or >= comparisons.  */
2281       else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2282 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
2283 	{
2284 	  if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2285 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
2286 	  else
2287 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
2288 	}
2289 
2290       /* Check for singleton ranges.  */
2291       else if (cmp == 0
2292 	       && ((code1 == LT_EXPR && code2 == GT_EXPR)
2293 		   || (code1 == GT_EXPR && code2 == LT_EXPR)))
2294 	return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2295 
2296       /* Check for less/greater pairs that don't restrict the range at all.  */
2297       else if (cmp >= 0
2298 	       && (code1 == LT_EXPR || code1 == LE_EXPR)
2299 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
2300 	return boolean_true_node;
2301       else if (cmp <= 0
2302 	       && (code1 == GT_EXPR || code1 == GE_EXPR)
2303 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
2304 	return boolean_true_node;
2305     }
2306 
2307   /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2308      NAME's definition is a truth value.  See if there are any simplifications
2309      that can be done against the NAME's definition.  */
2310   if (TREE_CODE (op1a) == SSA_NAME
2311       && (code1 == NE_EXPR || code1 == EQ_EXPR)
2312       && (integer_zerop (op1b) || integer_onep (op1b)))
2313     {
2314       bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2315 		     || (code1 == NE_EXPR && integer_onep (op1b)));
2316       gimple stmt = SSA_NAME_DEF_STMT (op1a);
2317       switch (gimple_code (stmt))
2318 	{
2319 	case GIMPLE_ASSIGN:
2320 	  /* Try to simplify by copy-propagating the definition.  */
2321 	  return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2322 
2323 	case GIMPLE_PHI:
2324 	  /* If every argument to the PHI produces the same result when
2325 	     ORed with the second comparison, we win.
2326 	     Do not do this unless the type is bool since we need a bool
2327 	     result here anyway.  */
2328 	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2329 	    {
2330 	      tree result = NULL_TREE;
2331 	      unsigned i;
2332 	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
2333 		{
2334 		  tree arg = gimple_phi_arg_def (stmt, i);
2335 
2336 		  /* If this PHI has itself as an argument, ignore it.
2337 		     If all the other args produce the same result,
2338 		     we're still OK.  */
2339 		  if (arg == gimple_phi_result (stmt))
2340 		    continue;
2341 		  else if (TREE_CODE (arg) == INTEGER_CST)
2342 		    {
2343 		      if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2344 			{
2345 			  if (!result)
2346 			    result = boolean_true_node;
2347 			  else if (!integer_onep (result))
2348 			    return NULL_TREE;
2349 			}
2350 		      else if (!result)
2351 			result = fold_build2 (code2, boolean_type_node,
2352 					      op2a, op2b);
2353 		      else if (!same_bool_comparison_p (result,
2354 							code2, op2a, op2b))
2355 			return NULL_TREE;
2356 		    }
2357 		  else if (TREE_CODE (arg) == SSA_NAME
2358 			   && !SSA_NAME_IS_DEFAULT_DEF (arg))
2359 		    {
2360 		      tree temp;
2361 		      gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2362 		      /* In simple cases we can look through PHI nodes,
2363 			 but we have to be careful with loops.
2364 			 See PR49073.  */
2365 		      if (! dom_info_available_p (CDI_DOMINATORS)
2366 			  || gimple_bb (def_stmt) == gimple_bb (stmt)
2367 			  || dominated_by_p (CDI_DOMINATORS,
2368 					     gimple_bb (def_stmt),
2369 					     gimple_bb (stmt)))
2370 			return NULL_TREE;
2371 		      temp = or_var_with_comparison (arg, invert, code2,
2372 						     op2a, op2b);
2373 		      if (!temp)
2374 			return NULL_TREE;
2375 		      else if (!result)
2376 			result = temp;
2377 		      else if (!same_bool_result_p (result, temp))
2378 			return NULL_TREE;
2379 		    }
2380 		  else
2381 		    return NULL_TREE;
2382 		}
2383 	      return result;
2384 	    }
2385 
2386 	default:
2387 	  break;
2388 	}
2389     }
2390   return NULL_TREE;
2391 }
2392 
2393 /* Try to simplify the OR of two comparisons, specified by
2394    (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2395    If this can be simplified to a single expression (without requiring
2396    introducing more SSA variables to hold intermediate values),
2397    return the resulting tree.  Otherwise return NULL_TREE.
2398    If the result expression is non-null, it has boolean type.  */
2399 
2400 tree
2401 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2402 			   enum tree_code code2, tree op2a, tree op2b)
2403 {
2404   tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2405   if (t)
2406     return t;
2407   else
2408     return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2409 }
2410 
2411 
2412 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2413 
2414    Either NULL_TREE, a simplified but non-constant or a constant
2415    is returned.
2416 
2417    ???  This should go into a gimple-fold-inline.h file to be eventually
2418    privatized with the single valueize function used in the various TUs
2419    to avoid the indirect function call overhead.  */
2420 
2421 tree
2422 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2423 {
2424   location_t loc = gimple_location (stmt);
2425   switch (gimple_code (stmt))
2426     {
2427     case GIMPLE_ASSIGN:
2428       {
2429         enum tree_code subcode = gimple_assign_rhs_code (stmt);
2430 
2431         switch (get_gimple_rhs_class (subcode))
2432           {
2433           case GIMPLE_SINGLE_RHS:
2434             {
2435               tree rhs = gimple_assign_rhs1 (stmt);
2436               enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2437 
2438               if (TREE_CODE (rhs) == SSA_NAME)
2439                 {
2440                   /* If the RHS is an SSA_NAME, return its known constant value,
2441                      if any.  */
2442                   return (*valueize) (rhs);
2443                 }
2444 	      /* Handle propagating invariant addresses into address
2445 		 operations.  */
2446 	      else if (TREE_CODE (rhs) == ADDR_EXPR
2447 		       && !is_gimple_min_invariant (rhs))
2448 		{
2449 		  HOST_WIDE_INT offset;
2450 		  tree base;
2451 		  base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2452 							  &offset,
2453 							  valueize);
2454 		  if (base
2455 		      && (CONSTANT_CLASS_P (base)
2456 			  || decl_address_invariant_p (base)))
2457 		    return build_invariant_address (TREE_TYPE (rhs),
2458 						    base, offset);
2459 		}
2460 	      else if (TREE_CODE (rhs) == CONSTRUCTOR
2461 		       && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2462 		       && (CONSTRUCTOR_NELTS (rhs)
2463 			   == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2464 		{
2465 		  unsigned i;
2466 		  tree val, list;
2467 
2468 		  list = NULL_TREE;
2469 		  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2470 		    {
2471 		      val = (*valueize) (val);
2472 		      if (TREE_CODE (val) == INTEGER_CST
2473 			  || TREE_CODE (val) == REAL_CST
2474 			  || TREE_CODE (val) == FIXED_CST)
2475 			list = tree_cons (NULL_TREE, val, list);
2476 		      else
2477 			return NULL_TREE;
2478 		    }
2479 
2480 		  return build_vector (TREE_TYPE (rhs), nreverse (list));
2481 		}
2482 
2483               if (kind == tcc_reference)
2484 		{
2485 		  if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2486 		       || TREE_CODE (rhs) == REALPART_EXPR
2487 		       || TREE_CODE (rhs) == IMAGPART_EXPR)
2488 		      && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2489 		    {
2490 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2491 		      return fold_unary_loc (EXPR_LOCATION (rhs),
2492 					     TREE_CODE (rhs),
2493 					     TREE_TYPE (rhs), val);
2494 		    }
2495 		  else if (TREE_CODE (rhs) == BIT_FIELD_REF
2496 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2497 		    {
2498 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2499 		      return fold_ternary_loc (EXPR_LOCATION (rhs),
2500 					       TREE_CODE (rhs),
2501 					       TREE_TYPE (rhs), val,
2502 					       TREE_OPERAND (rhs, 1),
2503 					       TREE_OPERAND (rhs, 2));
2504 		    }
2505 		  else if (TREE_CODE (rhs) == MEM_REF
2506 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2507 		    {
2508 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2509 		      if (TREE_CODE (val) == ADDR_EXPR
2510 			  && is_gimple_min_invariant (val))
2511 			{
2512 			  tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2513 						  unshare_expr (val),
2514 						  TREE_OPERAND (rhs, 1));
2515 			  if (tem)
2516 			    rhs = tem;
2517 			}
2518 		    }
2519 		  return fold_const_aggregate_ref_1 (rhs, valueize);
2520 		}
2521               else if (kind == tcc_declaration)
2522                 return get_symbol_constant_value (rhs);
2523               return rhs;
2524             }
2525 
2526           case GIMPLE_UNARY_RHS:
2527             {
2528               /* Handle unary operators that can appear in GIMPLE form.
2529                  Note that we know the single operand must be a constant,
2530                  so this should almost always return a simplified RHS.  */
2531 	      tree lhs = gimple_assign_lhs (stmt);
2532               tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2533 
2534 	      /* Conversions are useless for CCP purposes if they are
2535 		 value-preserving.  Thus the restrictions that
2536 		 useless_type_conversion_p places for restrict qualification
2537 		 of pointer types should not apply here.
2538 		 Substitution later will only substitute to allowed places.  */
2539 	      if (CONVERT_EXPR_CODE_P (subcode)
2540 		  && POINTER_TYPE_P (TREE_TYPE (lhs))
2541 		  && POINTER_TYPE_P (TREE_TYPE (op0))
2542 		  && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2543 		     == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2544 		  && TYPE_MODE (TREE_TYPE (lhs))
2545 		     == TYPE_MODE (TREE_TYPE (op0)))
2546 		return op0;
2547 
2548               return
2549 		fold_unary_ignore_overflow_loc (loc, subcode,
2550 						gimple_expr_type (stmt), op0);
2551             }
2552 
2553           case GIMPLE_BINARY_RHS:
2554             {
2555               /* Handle binary operators that can appear in GIMPLE form.  */
2556               tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2557               tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2558 
2559 	      /* Translate &x + CST into an invariant form suitable for
2560 	         further propagation.  */
2561 	      if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2562 		  && TREE_CODE (op0) == ADDR_EXPR
2563 		  && TREE_CODE (op1) == INTEGER_CST)
2564 		{
2565 		  tree off = fold_convert (ptr_type_node, op1);
2566 		  return build_fold_addr_expr_loc
2567 			   (loc,
2568 			    fold_build2 (MEM_REF,
2569 					 TREE_TYPE (TREE_TYPE (op0)),
2570 					 unshare_expr (op0), off));
2571 		}
2572 
2573               return fold_binary_loc (loc, subcode,
2574 				      gimple_expr_type (stmt), op0, op1);
2575             }
2576 
2577           case GIMPLE_TERNARY_RHS:
2578             {
2579               /* Handle ternary operators that can appear in GIMPLE form.  */
2580               tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2581               tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2582               tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2583 
2584 	      /* Fold embedded expressions in ternary codes.  */
2585 	      if ((subcode == COND_EXPR
2586 		   || subcode == VEC_COND_EXPR)
2587 		  && COMPARISON_CLASS_P (op0))
2588 		{
2589 		  tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2590 		  tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2591 		  tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2592 					      TREE_TYPE (op0), op00, op01);
2593 		  if (tem)
2594 		    op0 = tem;
2595 		}
2596 
2597               return fold_ternary_loc (loc, subcode,
2598 				       gimple_expr_type (stmt), op0, op1, op2);
2599             }
2600 
2601           default:
2602             gcc_unreachable ();
2603           }
2604       }
2605 
2606     case GIMPLE_CALL:
2607       {
2608 	tree fn;
2609 
2610 	if (gimple_call_internal_p (stmt))
2611 	  /* No folding yet for these functions.  */
2612 	  return NULL_TREE;
2613 
2614 	fn = (*valueize) (gimple_call_fn (stmt));
2615 	if (TREE_CODE (fn) == ADDR_EXPR
2616 	    && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2617 	    && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2618 	  {
2619 	    tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2620 	    tree call, retval;
2621 	    unsigned i;
2622 	    for (i = 0; i < gimple_call_num_args (stmt); ++i)
2623 	      args[i] = (*valueize) (gimple_call_arg (stmt, i));
2624 	    call = build_call_array_loc (loc,
2625 					 gimple_call_return_type (stmt),
2626 					 fn, gimple_call_num_args (stmt), args);
2627 	    retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2628 	    if (retval)
2629 	      /* fold_call_expr wraps the result inside a NOP_EXPR.  */
2630 	      STRIP_NOPS (retval);
2631 	    return retval;
2632 	  }
2633 	return NULL_TREE;
2634       }
2635 
2636     default:
2637       return NULL_TREE;
2638     }
2639 }
2640 
2641 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2642    Returns NULL_TREE if folding to a constant is not possible, otherwise
2643    returns a constant according to is_gimple_min_invariant.  */
2644 
2645 tree
2646 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2647 {
2648   tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2649   if (res && is_gimple_min_invariant (res))
2650     return res;
2651   return NULL_TREE;
2652 }
2653 
2654 
2655 /* The following set of functions are supposed to fold references using
2656    their constant initializers.  */
2657 
2658 static tree fold_ctor_reference (tree type, tree ctor,
2659 				 unsigned HOST_WIDE_INT offset,
2660 				 unsigned HOST_WIDE_INT size);
2661 
2662 /* See if we can find constructor defining value of BASE.
2663    When we know the consructor with constant offset (such as
2664    base is array[40] and we do know constructor of array), then
2665    BIT_OFFSET is adjusted accordingly.
2666 
2667    As a special case, return error_mark_node when constructor
2668    is not explicitly available, but it is known to be zero
2669    such as 'static const int a;'.  */
2670 static tree
2671 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2672 		      tree (*valueize)(tree))
2673 {
2674   HOST_WIDE_INT bit_offset2, size, max_size;
2675   if (TREE_CODE (base) == MEM_REF)
2676     {
2677       if (!integer_zerop (TREE_OPERAND (base, 1)))
2678 	{
2679 	  if (!host_integerp (TREE_OPERAND (base, 1), 0))
2680 	    return NULL_TREE;
2681 	  *bit_offset += (mem_ref_offset (base).low
2682 			  * BITS_PER_UNIT);
2683 	}
2684 
2685       if (valueize
2686 	  && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2687 	base = valueize (TREE_OPERAND (base, 0));
2688       if (!base || TREE_CODE (base) != ADDR_EXPR)
2689         return NULL_TREE;
2690       base = TREE_OPERAND (base, 0);
2691     }
2692 
2693   /* Get a CONSTRUCTOR.  If BASE is a VAR_DECL, get its
2694      DECL_INITIAL.  If BASE is a nested reference into another
2695      ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2696      the inner reference.  */
2697   switch (TREE_CODE (base))
2698     {
2699     case VAR_DECL:
2700       if (!const_value_known_p (base))
2701 	return NULL_TREE;
2702 
2703       /* Fallthru.  */
2704     case CONST_DECL:
2705       if (!DECL_INITIAL (base)
2706 	  && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2707         return error_mark_node;
2708       /* Do not return an error_mark_node DECL_INITIAL.  LTO uses this
2709          as special marker (_not_ zero ...) for its own purposes.  */
2710       if (DECL_INITIAL (base) == error_mark_node)
2711 	return NULL_TREE;
2712       return DECL_INITIAL (base);
2713 
2714     case ARRAY_REF:
2715     case COMPONENT_REF:
2716       base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2717       if (max_size == -1 || size != max_size)
2718 	return NULL_TREE;
2719       *bit_offset +=  bit_offset2;
2720       return get_base_constructor (base, bit_offset, valueize);
2721 
2722     case STRING_CST:
2723     case CONSTRUCTOR:
2724       return base;
2725 
2726     default:
2727       return NULL_TREE;
2728     }
2729 }
2730 
2731 /* CTOR is STRING_CST.  Fold reference of type TYPE and size SIZE
2732    to the memory at bit OFFSET.
2733 
2734    We do only simple job of folding byte accesses.  */
2735 
2736 static tree
2737 fold_string_cst_ctor_reference (tree type, tree ctor,
2738 				unsigned HOST_WIDE_INT offset,
2739 				unsigned HOST_WIDE_INT size)
2740 {
2741   if (INTEGRAL_TYPE_P (type)
2742       && (TYPE_MODE (type)
2743 	  == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2744       && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2745 	  == MODE_INT)
2746       && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2747       && size == BITS_PER_UNIT
2748       && !(offset % BITS_PER_UNIT))
2749     {
2750       offset /= BITS_PER_UNIT;
2751       if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2752 	return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2753 				   [offset]));
2754       /* Folding
2755 	 const char a[20]="hello";
2756 	 return a[10];
2757 
2758 	 might lead to offset greater than string length.  In this case we
2759 	 know value is either initialized to 0 or out of bounds.  Return 0
2760 	 in both cases.  */
2761       return build_zero_cst (type);
2762     }
2763   return NULL_TREE;
2764 }
2765 
2766 /* CTOR is CONSTRUCTOR of an array type.  Fold reference of type TYPE and size
2767    SIZE to the memory at bit OFFSET.  */
2768 
2769 static tree
2770 fold_array_ctor_reference (tree type, tree ctor,
2771 			   unsigned HOST_WIDE_INT offset,
2772 			   unsigned HOST_WIDE_INT size)
2773 {
2774   unsigned HOST_WIDE_INT cnt;
2775   tree cfield, cval;
2776   double_int low_bound, elt_size;
2777   double_int index, max_index;
2778   double_int access_index;
2779   tree domain_type = NULL_TREE;
2780   HOST_WIDE_INT inner_offset;
2781 
2782   /* Compute low bound and elt size.  */
2783   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2784     domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2785   if (domain_type && TYPE_MIN_VALUE (domain_type))
2786     {
2787       /* Static constructors for variably sized objects makes no sense.  */
2788       gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2789       low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2790     }
2791   else
2792     low_bound = double_int_zero;
2793   /* Static constructors for variably sized objects makes no sense.  */
2794   gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2795 	      == INTEGER_CST);
2796   elt_size =
2797     tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2798 
2799 
2800   /* We can handle only constantly sized accesses that are known to not
2801      be larger than size of array element.  */
2802   if (!TYPE_SIZE_UNIT (type)
2803       || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2804       || double_int_cmp (elt_size,
2805 			 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2806     return NULL_TREE;
2807 
2808   /* Compute the array index we look for.  */
2809   access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2810 				  elt_size, TRUNC_DIV_EXPR);
2811   access_index = double_int_add (access_index, low_bound);
2812 
2813   /* And offset within the access.  */
2814   inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2815 
2816   /* See if the array field is large enough to span whole access.  We do not
2817      care to fold accesses spanning multiple array indexes.  */
2818   if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2819     return NULL_TREE;
2820 
2821   index = double_int_sub (low_bound, double_int_one);
2822   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2823     {
2824       /* Array constructor might explicitely set index, or specify range
2825 	 or leave index NULL meaning that it is next index after previous
2826 	 one.  */
2827       if (cfield)
2828 	{
2829 	  if (TREE_CODE (cfield) == INTEGER_CST)
2830 	    max_index = index = tree_to_double_int (cfield);
2831 	  else
2832 	    {
2833 	      gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2834 	      index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2835 	      max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2836 	    }
2837 	}
2838       else
2839 	max_index = index = double_int_add (index, double_int_one);
2840 
2841       /* Do we have match?  */
2842       if (double_int_cmp (access_index, index, 1) >= 0
2843 	  && double_int_cmp (access_index, max_index, 1) <= 0)
2844 	return fold_ctor_reference (type, cval, inner_offset, size);
2845     }
2846   /* When memory is not explicitely mentioned in constructor,
2847      it is 0 (or out of range).  */
2848   return build_zero_cst (type);
2849 }
2850 
2851 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2852    Fold reference of type TYPE and size SIZE to the memory at bit OFFSET.  */
2853 
2854 static tree
2855 fold_nonarray_ctor_reference (tree type, tree ctor,
2856 			      unsigned HOST_WIDE_INT offset,
2857 			      unsigned HOST_WIDE_INT size)
2858 {
2859   unsigned HOST_WIDE_INT cnt;
2860   tree cfield, cval;
2861 
2862   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2863 			    cval)
2864     {
2865       tree byte_offset = DECL_FIELD_OFFSET (cfield);
2866       tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2867       tree field_size = DECL_SIZE (cfield);
2868       double_int bitoffset;
2869       double_int byte_offset_cst = tree_to_double_int (byte_offset);
2870       double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
2871       double_int bitoffset_end, access_end;
2872 
2873       /* Variable sized objects in static constructors makes no sense,
2874 	 but field_size can be NULL for flexible array members.  */
2875       gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2876 		  && TREE_CODE (byte_offset) == INTEGER_CST
2877 		  && (field_size != NULL_TREE
2878 		      ? TREE_CODE (field_size) == INTEGER_CST
2879 		      : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2880 
2881       /* Compute bit offset of the field.  */
2882       bitoffset = double_int_add (tree_to_double_int (field_offset),
2883 				  double_int_mul (byte_offset_cst,
2884 						  bits_per_unit_cst));
2885       /* Compute bit offset where the field ends.  */
2886       if (field_size != NULL_TREE)
2887 	bitoffset_end = double_int_add (bitoffset,
2888 					tree_to_double_int (field_size));
2889       else
2890 	bitoffset_end = double_int_zero;
2891 
2892       access_end = double_int_add (uhwi_to_double_int (offset),
2893 				   uhwi_to_double_int (size));
2894 
2895       /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2896 	 [BITOFFSET, BITOFFSET_END)?  */
2897       if (double_int_cmp (access_end, bitoffset, 0) > 0
2898 	  && (field_size == NULL_TREE
2899 	      || double_int_cmp (uhwi_to_double_int (offset),
2900 				 bitoffset_end, 0) < 0))
2901 	{
2902 	  double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2903 						    bitoffset);
2904 	  /* We do have overlap.  Now see if field is large enough to
2905 	     cover the access.  Give up for accesses spanning multiple
2906 	     fields.  */
2907 	  if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2908 	    return NULL_TREE;
2909 	  if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2910 	    return NULL_TREE;
2911 	  return fold_ctor_reference (type, cval,
2912 				      double_int_to_uhwi (inner_offset), size);
2913 	}
2914     }
2915   /* When memory is not explicitely mentioned in constructor, it is 0.  */
2916   return build_zero_cst (type);
2917 }
2918 
2919 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2920    to the memory at bit OFFSET.  */
2921 
2922 static tree
2923 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2924 		     unsigned HOST_WIDE_INT size)
2925 {
2926   tree ret;
2927 
2928   /* We found the field with exact match.  */
2929   if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2930       && !offset)
2931     return canonicalize_constructor_val (ctor);
2932 
2933   /* We are at the end of walk, see if we can view convert the
2934      result.  */
2935   if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2936       /* VIEW_CONVERT_EXPR is defined only for matching sizes.  */
2937       && operand_equal_p (TYPE_SIZE (type),
2938 			  TYPE_SIZE (TREE_TYPE (ctor)), 0))
2939     {
2940       ret = canonicalize_constructor_val (ctor);
2941       ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2942       if (ret)
2943 	STRIP_NOPS (ret);
2944       return ret;
2945     }
2946   if (TREE_CODE (ctor) == STRING_CST)
2947     return fold_string_cst_ctor_reference (type, ctor, offset, size);
2948   if (TREE_CODE (ctor) == CONSTRUCTOR)
2949     {
2950 
2951       if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2952 	  || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2953 	return fold_array_ctor_reference (type, ctor, offset, size);
2954       else
2955 	return fold_nonarray_ctor_reference (type, ctor, offset, size);
2956     }
2957 
2958   return NULL_TREE;
2959 }
2960 
2961 /* Return the tree representing the element referenced by T if T is an
2962    ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2963    names using VALUEIZE.  Return NULL_TREE otherwise.  */
2964 
2965 tree
2966 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2967 {
2968   tree ctor, idx, base;
2969   HOST_WIDE_INT offset, size, max_size;
2970   tree tem;
2971 
2972   if (TREE_THIS_VOLATILE (t))
2973     return NULL_TREE;
2974 
2975   if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2976     return get_symbol_constant_value (t);
2977 
2978   tem = fold_read_from_constant_string (t);
2979   if (tem)
2980     return tem;
2981 
2982   switch (TREE_CODE (t))
2983     {
2984     case ARRAY_REF:
2985     case ARRAY_RANGE_REF:
2986       /* Constant indexes are handled well by get_base_constructor.
2987 	 Only special case variable offsets.
2988 	 FIXME: This code can't handle nested references with variable indexes
2989 	 (they will be handled only by iteration of ccp).  Perhaps we can bring
2990 	 get_ref_base_and_extent here and make it use a valueize callback.  */
2991       if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
2992 	  && valueize
2993 	  && (idx = (*valueize) (TREE_OPERAND (t, 1)))
2994 	  && host_integerp (idx, 0))
2995 	{
2996 	  tree low_bound, unit_size;
2997 
2998 	  /* If the resulting bit-offset is constant, track it.  */
2999 	  if ((low_bound = array_ref_low_bound (t),
3000 	       host_integerp (low_bound, 0))
3001 	      && (unit_size = array_ref_element_size (t),
3002 		  host_integerp (unit_size, 1)))
3003 	    {
3004 	      offset = TREE_INT_CST_LOW (idx);
3005 	      offset -= TREE_INT_CST_LOW (low_bound);
3006 	      offset *= TREE_INT_CST_LOW (unit_size);
3007 	      offset *= BITS_PER_UNIT;
3008 
3009 	      base = TREE_OPERAND (t, 0);
3010 	      ctor = get_base_constructor (base, &offset, valueize);
3011 	      /* Empty constructor.  Always fold to 0.  */
3012 	      if (ctor == error_mark_node)
3013 		return build_zero_cst (TREE_TYPE (t));
3014 	      /* Out of bound array access.  Value is undefined,
3015 		 but don't fold.  */
3016 	      if (offset < 0)
3017 		return NULL_TREE;
3018 	      /* We can not determine ctor.  */
3019 	      if (!ctor)
3020 		return NULL_TREE;
3021 	      return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3022 					  TREE_INT_CST_LOW (unit_size)
3023 					  * BITS_PER_UNIT);
3024 	    }
3025 	}
3026       /* Fallthru.  */
3027 
3028     case COMPONENT_REF:
3029     case BIT_FIELD_REF:
3030     case TARGET_MEM_REF:
3031     case MEM_REF:
3032       base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3033       ctor = get_base_constructor (base, &offset, valueize);
3034 
3035       /* Empty constructor.  Always fold to 0.  */
3036       if (ctor == error_mark_node)
3037 	return build_zero_cst (TREE_TYPE (t));
3038       /* We do not know precise address.  */
3039       if (max_size == -1 || max_size != size)
3040 	return NULL_TREE;
3041       /* We can not determine ctor.  */
3042       if (!ctor)
3043 	return NULL_TREE;
3044 
3045       /* Out of bound array access.  Value is undefined, but don't fold.  */
3046       if (offset < 0)
3047 	return NULL_TREE;
3048 
3049       return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3050 
3051     case REALPART_EXPR:
3052     case IMAGPART_EXPR:
3053       {
3054 	tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3055 	if (c && TREE_CODE (c) == COMPLEX_CST)
3056 	  return fold_build1_loc (EXPR_LOCATION (t),
3057 			      TREE_CODE (t), TREE_TYPE (t), c);
3058 	break;
3059       }
3060 
3061     default:
3062       break;
3063     }
3064 
3065   return NULL_TREE;
3066 }
3067 
3068 tree
3069 fold_const_aggregate_ref (tree t)
3070 {
3071   return fold_const_aggregate_ref_1 (t, NULL);
3072 }
3073 
3074 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3075    is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3076    KNOWN_BINFO carries the binfo describing the true type of
3077    OBJ_TYPE_REF_OBJECT(REF).  */
3078 
3079 tree
3080 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3081 {
3082   unsigned HOST_WIDE_INT offset, size;
3083   tree v, fn;
3084 
3085   v = BINFO_VTABLE (known_binfo);
3086   /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone.  */
3087   if (!v)
3088     return NULL_TREE;
3089 
3090   if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3091     {
3092       offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3093       v = TREE_OPERAND (v, 0);
3094     }
3095   else
3096     offset = 0;
3097 
3098   if (TREE_CODE (v) != ADDR_EXPR)
3099     return NULL_TREE;
3100   v = TREE_OPERAND (v, 0);
3101 
3102   if (TREE_CODE (v) != VAR_DECL
3103       || !DECL_VIRTUAL_P (v)
3104       || !DECL_INITIAL (v)
3105       || DECL_INITIAL (v) == error_mark_node)
3106     return NULL_TREE;
3107   gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3108   size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3109   offset += token * size;
3110   fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3111 			    offset, size);
3112   if (!fn || integer_zerop (fn))
3113     return NULL_TREE;
3114   gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3115 	      || TREE_CODE (fn) == FDESC_EXPR);
3116   fn = TREE_OPERAND (fn, 0);
3117   gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3118 
3119   /* When cgraph node is missing and function is not public, we cannot
3120      devirtualize.  This can happen in WHOPR when the actual method
3121      ends up in other partition, because we found devirtualization
3122      possibility too late.  */
3123   if (!can_refer_decl_in_current_unit_p (fn))
3124     return NULL_TREE;
3125 
3126   return fn;
3127 }
3128 
3129 /* Return true iff VAL is a gimple expression that is known to be
3130    non-negative.  Restricted to floating-point inputs.  */
3131 
3132 bool
3133 gimple_val_nonnegative_real_p (tree val)
3134 {
3135   gimple def_stmt;
3136 
3137   gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3138 
3139   /* Use existing logic for non-gimple trees.  */
3140   if (tree_expr_nonnegative_p (val))
3141     return true;
3142 
3143   if (TREE_CODE (val) != SSA_NAME)
3144     return false;
3145 
3146   /* Currently we look only at the immediately defining statement
3147      to make this determination, since recursion on defining
3148      statements of operands can lead to quadratic behavior in the
3149      worst case.  This is expected to catch almost all occurrences
3150      in practice.  It would be possible to implement limited-depth
3151      recursion if important cases are lost.  Alternatively, passes
3152      that need this information (such as the pow/powi lowering code
3153      in the cse_sincos pass) could be revised to provide it through
3154      dataflow propagation.  */
3155 
3156   def_stmt = SSA_NAME_DEF_STMT (val);
3157 
3158   if (is_gimple_assign (def_stmt))
3159     {
3160       tree op0, op1;
3161 
3162       /* See fold-const.c:tree_expr_nonnegative_p for additional
3163 	 cases that could be handled with recursion.  */
3164 
3165       switch (gimple_assign_rhs_code (def_stmt))
3166 	{
3167 	case ABS_EXPR:
3168 	  /* Always true for floating-point operands.  */
3169 	  return true;
3170 
3171 	case MULT_EXPR:
3172 	  /* True if the two operands are identical (since we are
3173 	     restricted to floating-point inputs).  */
3174 	  op0 = gimple_assign_rhs1 (def_stmt);
3175 	  op1 = gimple_assign_rhs2 (def_stmt);
3176 
3177 	  if (op0 == op1
3178 	      || operand_equal_p (op0, op1, 0))
3179 	    return true;
3180 
3181 	default:
3182 	  return false;
3183 	}
3184     }
3185   else if (is_gimple_call (def_stmt))
3186     {
3187       tree fndecl = gimple_call_fndecl (def_stmt);
3188       if (fndecl
3189 	  && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3190 	{
3191 	  tree arg1;
3192 
3193 	  switch (DECL_FUNCTION_CODE (fndecl))
3194 	    {
3195 	    CASE_FLT_FN (BUILT_IN_ACOS):
3196 	    CASE_FLT_FN (BUILT_IN_ACOSH):
3197 	    CASE_FLT_FN (BUILT_IN_CABS):
3198 	    CASE_FLT_FN (BUILT_IN_COSH):
3199 	    CASE_FLT_FN (BUILT_IN_ERFC):
3200 	    CASE_FLT_FN (BUILT_IN_EXP):
3201 	    CASE_FLT_FN (BUILT_IN_EXP10):
3202 	    CASE_FLT_FN (BUILT_IN_EXP2):
3203 	    CASE_FLT_FN (BUILT_IN_FABS):
3204 	    CASE_FLT_FN (BUILT_IN_FDIM):
3205 	    CASE_FLT_FN (BUILT_IN_HYPOT):
3206 	    CASE_FLT_FN (BUILT_IN_POW10):
3207 	      return true;
3208 
3209 	    CASE_FLT_FN (BUILT_IN_SQRT):
3210 	      /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3211 		 nonnegative inputs.  */
3212 	      if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3213 		return true;
3214 
3215 	      break;
3216 
3217 	    CASE_FLT_FN (BUILT_IN_POWI):
3218 	      /* True if the second argument is an even integer.  */
3219 	      arg1 = gimple_call_arg (def_stmt, 1);
3220 
3221 	      if (TREE_CODE (arg1) == INTEGER_CST
3222 		  && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3223 		return true;
3224 
3225 	      break;
3226 
3227 	    CASE_FLT_FN (BUILT_IN_POW):
3228 	      /* True if the second argument is an even integer-valued
3229 		 real.  */
3230 	      arg1 = gimple_call_arg (def_stmt, 1);
3231 
3232 	      if (TREE_CODE (arg1) == REAL_CST)
3233 		{
3234 		  REAL_VALUE_TYPE c;
3235 		  HOST_WIDE_INT n;
3236 
3237 		  c = TREE_REAL_CST (arg1);
3238 		  n = real_to_integer (&c);
3239 
3240 		  if ((n & 1) == 0)
3241 		    {
3242 		      REAL_VALUE_TYPE cint;
3243 		      real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3244 		      if (real_identical (&c, &cint))
3245 			return true;
3246 		    }
3247 		}
3248 
3249 	      break;
3250 
3251 	    default:
3252 	      return false;
3253 	    }
3254 	}
3255     }
3256 
3257   return false;
3258 }
3259