xref: /dragonfly/contrib/gcc-8.0/gcc/fold-const.c (revision e215fc28)
1 /* Fold a constant sub-tree into a single node for C-compiler
2    Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /*@@ This file should be rewritten to use an arbitrary precision
21   @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22   @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23   @@ The routines that translate from the ap rep should
24   @@ warn if precision et. al. is lost.
25   @@ This would also make life easier when this technology is used
26   @@ for cross-compilers.  */
27 
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29 
30    fold takes a tree as argument and returns a simplified tree.
31 
32    size_binop takes a tree code for an arithmetic operation
33    and two operands that are trees, and produces a tree for the
34    result, assuming the type comes from `sizetype'.
35 
36    size_int takes an integer value, and creates a tree constant
37    with type from `sizetype'.
38 
39    Note: Since the folders get called on non-gimple code as well as
40    gimple code, we need to handle GIMPLE tuples as well as their
41    corresponding tree equivalents.  */
42 
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
85 #include "vec-perm-indices.h"
86 
87 /* Nonzero if we are folding constants inside an initializer; zero
88    otherwise.  */
89 int folding_initializer = 0;
90 
91 /* The following constants represent a bit based encoding of GCC's
92    comparison operators.  This encoding simplifies transformations
93    on relational comparison operators, such as AND and OR.  */
94 enum comparison_code {
95   COMPCODE_FALSE = 0,
96   COMPCODE_LT = 1,
97   COMPCODE_EQ = 2,
98   COMPCODE_LE = 3,
99   COMPCODE_GT = 4,
100   COMPCODE_LTGT = 5,
101   COMPCODE_GE = 6,
102   COMPCODE_ORD = 7,
103   COMPCODE_UNORD = 8,
104   COMPCODE_UNLT = 9,
105   COMPCODE_UNEQ = 10,
106   COMPCODE_UNLE = 11,
107   COMPCODE_UNGT = 12,
108   COMPCODE_NE = 13,
109   COMPCODE_UNGE = 14,
110   COMPCODE_TRUE = 15
111 };
112 
113 static bool negate_expr_p (tree);
114 static tree negate_expr (tree);
115 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
116 static enum comparison_code comparison_to_compcode (enum tree_code);
117 static enum tree_code compcode_to_comparison (enum comparison_code);
118 static int twoval_comparison_p (tree, tree *, tree *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree optimize_bit_field_compare (location_t, enum tree_code,
121 					tree, tree, tree);
122 static int simple_operand_p (const_tree);
123 static bool simple_operand_p_2 (tree);
124 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
125 static tree range_predecessor (tree);
126 static tree range_successor (tree);
127 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (location_t,
133 						 enum tree_code, tree,
134 						 tree, tree,
135 						 tree, tree, int);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140 static tree fold_view_convert_expr (tree, tree);
141 static tree fold_negate_expr (location_t, tree);
142 
143 
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145    Otherwise, return LOC.  */
146 
147 static location_t
expr_location_or(tree t,location_t loc)148 expr_location_or (tree t, location_t loc)
149 {
150   location_t tloc = EXPR_LOCATION (t);
151   return tloc == UNKNOWN_LOCATION ? loc : tloc;
152 }
153 
154 /* Similar to protected_set_expr_location, but never modify x in place,
155    if location can and needs to be set, unshare it.  */
156 
157 static inline tree
protected_set_expr_location_unshare(tree x,location_t loc)158 protected_set_expr_location_unshare (tree x, location_t loc)
159 {
160   if (CAN_HAVE_LOCATION_P (x)
161       && EXPR_LOCATION (x) != loc
162       && !(TREE_CODE (x) == SAVE_EXPR
163 	   || TREE_CODE (x) == TARGET_EXPR
164 	   || TREE_CODE (x) == BIND_EXPR))
165     {
166       x = copy_node (x);
167       SET_EXPR_LOCATION (x, loc);
168     }
169   return x;
170 }
171 
172 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
173    division and returns the quotient.  Otherwise returns
174    NULL_TREE.  */
175 
176 tree
div_if_zero_remainder(const_tree arg1,const_tree arg2)177 div_if_zero_remainder (const_tree arg1, const_tree arg2)
178 {
179   widest_int quo;
180 
181   if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
182 			 SIGNED, &quo))
183     return wide_int_to_tree (TREE_TYPE (arg1), quo);
184 
185   return NULL_TREE;
186 }
187 
188 /* This is nonzero if we should defer warnings about undefined
189    overflow.  This facility exists because these warnings are a
190    special case.  The code to estimate loop iterations does not want
191    to issue any warnings, since it works with expressions which do not
192    occur in user code.  Various bits of cleanup code call fold(), but
193    only use the result if it has certain characteristics (e.g., is a
194    constant); that code only wants to issue a warning if the result is
195    used.  */
196 
197 static int fold_deferring_overflow_warnings;
198 
199 /* If a warning about undefined overflow is deferred, this is the
200    warning.  Note that this may cause us to turn two warnings into
201    one, but that is fine since it is sufficient to only give one
202    warning per expression.  */
203 
204 static const char* fold_deferred_overflow_warning;
205 
206 /* If a warning about undefined overflow is deferred, this is the
207    level at which the warning should be emitted.  */
208 
209 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210 
211 /* Start deferring overflow warnings.  We could use a stack here to
212    permit nested calls, but at present it is not necessary.  */
213 
214 void
fold_defer_overflow_warnings(void)215 fold_defer_overflow_warnings (void)
216 {
217   ++fold_deferring_overflow_warnings;
218 }
219 
220 /* Stop deferring overflow warnings.  If there is a pending warning,
221    and ISSUE is true, then issue the warning if appropriate.  STMT is
222    the statement with which the warning should be associated (used for
223    location information); STMT may be NULL.  CODE is the level of the
224    warning--a warn_strict_overflow_code value.  This function will use
225    the smaller of CODE and the deferred code when deciding whether to
226    issue the warning.  CODE may be zero to mean to always use the
227    deferred code.  */
228 
229 void
fold_undefer_overflow_warnings(bool issue,const gimple * stmt,int code)230 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
231 {
232   const char *warnmsg;
233   location_t locus;
234 
235   gcc_assert (fold_deferring_overflow_warnings > 0);
236   --fold_deferring_overflow_warnings;
237   if (fold_deferring_overflow_warnings > 0)
238     {
239       if (fold_deferred_overflow_warning != NULL
240 	  && code != 0
241 	  && code < (int) fold_deferred_overflow_code)
242 	fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
243       return;
244     }
245 
246   warnmsg = fold_deferred_overflow_warning;
247   fold_deferred_overflow_warning = NULL;
248 
249   if (!issue || warnmsg == NULL)
250     return;
251 
252   if (gimple_no_warning_p (stmt))
253     return;
254 
255   /* Use the smallest code level when deciding to issue the
256      warning.  */
257   if (code == 0 || code > (int) fold_deferred_overflow_code)
258     code = fold_deferred_overflow_code;
259 
260   if (!issue_strict_overflow_warning (code))
261     return;
262 
263   if (stmt == NULL)
264     locus = input_location;
265   else
266     locus = gimple_location (stmt);
267   warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
268 }
269 
270 /* Stop deferring overflow warnings, ignoring any deferred
271    warnings.  */
272 
273 void
fold_undefer_and_ignore_overflow_warnings(void)274 fold_undefer_and_ignore_overflow_warnings (void)
275 {
276   fold_undefer_overflow_warnings (false, NULL, 0);
277 }
278 
279 /* Whether we are deferring overflow warnings.  */
280 
281 bool
fold_deferring_overflow_warnings_p(void)282 fold_deferring_overflow_warnings_p (void)
283 {
284   return fold_deferring_overflow_warnings > 0;
285 }
286 
287 /* This is called when we fold something based on the fact that signed
288    overflow is undefined.  */
289 
290 void
fold_overflow_warning(const char * gmsgid,enum warn_strict_overflow_code wc)291 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 {
293   if (fold_deferring_overflow_warnings > 0)
294     {
295       if (fold_deferred_overflow_warning == NULL
296 	  || wc < fold_deferred_overflow_code)
297 	{
298 	  fold_deferred_overflow_warning = gmsgid;
299 	  fold_deferred_overflow_code = wc;
300 	}
301     }
302   else if (issue_strict_overflow_warning (wc))
303     warning (OPT_Wstrict_overflow, gmsgid);
304 }
305 
306 /* Return true if the built-in mathematical function specified by CODE
307    is odd, i.e. -f(x) == f(-x).  */
308 
309 bool
negate_mathfn_p(combined_fn fn)310 negate_mathfn_p (combined_fn fn)
311 {
312   switch (fn)
313     {
314     CASE_CFN_ASIN:
315     CASE_CFN_ASINH:
316     CASE_CFN_ATAN:
317     CASE_CFN_ATANH:
318     CASE_CFN_CASIN:
319     CASE_CFN_CASINH:
320     CASE_CFN_CATAN:
321     CASE_CFN_CATANH:
322     CASE_CFN_CBRT:
323     CASE_CFN_CPROJ:
324     CASE_CFN_CSIN:
325     CASE_CFN_CSINH:
326     CASE_CFN_CTAN:
327     CASE_CFN_CTANH:
328     CASE_CFN_ERF:
329     CASE_CFN_LLROUND:
330     CASE_CFN_LROUND:
331     CASE_CFN_ROUND:
332     CASE_CFN_SIN:
333     CASE_CFN_SINH:
334     CASE_CFN_TAN:
335     CASE_CFN_TANH:
336     CASE_CFN_TRUNC:
337       return true;
338 
339     CASE_CFN_LLRINT:
340     CASE_CFN_LRINT:
341     CASE_CFN_NEARBYINT:
342     CASE_CFN_RINT:
343       return !flag_rounding_math;
344 
345     default:
346       break;
347     }
348   return false;
349 }
350 
351 /* Check whether we may negate an integer constant T without causing
352    overflow.  */
353 
354 bool
may_negate_without_overflow_p(const_tree t)355 may_negate_without_overflow_p (const_tree t)
356 {
357   tree type;
358 
359   gcc_assert (TREE_CODE (t) == INTEGER_CST);
360 
361   type = TREE_TYPE (t);
362   if (TYPE_UNSIGNED (type))
363     return false;
364 
365   return !wi::only_sign_bit_p (wi::to_wide (t));
366 }
367 
368 /* Determine whether an expression T can be cheaply negated using
369    the function negate_expr without introducing undefined overflow.  */
370 
371 static bool
negate_expr_p(tree t)372 negate_expr_p (tree t)
373 {
374   tree type;
375 
376   if (t == 0)
377     return false;
378 
379   type = TREE_TYPE (t);
380 
381   STRIP_SIGN_NOPS (t);
382   switch (TREE_CODE (t))
383     {
384     case INTEGER_CST:
385       if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
386 	return true;
387 
388       /* Check that -CST will not overflow type.  */
389       return may_negate_without_overflow_p (t);
390     case BIT_NOT_EXPR:
391       return (INTEGRAL_TYPE_P (type)
392 	      && TYPE_OVERFLOW_WRAPS (type));
393 
394     case FIXED_CST:
395       return true;
396 
397     case NEGATE_EXPR:
398       return !TYPE_OVERFLOW_SANITIZED (type);
399 
400     case REAL_CST:
401       /* We want to canonicalize to positive real constants.  Pretend
402          that only negative ones can be easily negated.  */
403       return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
404 
405     case COMPLEX_CST:
406       return negate_expr_p (TREE_REALPART (t))
407 	     && negate_expr_p (TREE_IMAGPART (t));
408 
409     case VECTOR_CST:
410       {
411 	if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
412 	  return true;
413 
414 	/* Steps don't prevent negation.  */
415 	unsigned int count = vector_cst_encoded_nelts (t);
416 	for (unsigned int i = 0; i < count; ++i)
417 	  if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
418 	    return false;
419 
420 	return true;
421       }
422 
423     case COMPLEX_EXPR:
424       return negate_expr_p (TREE_OPERAND (t, 0))
425 	     && negate_expr_p (TREE_OPERAND (t, 1));
426 
427     case CONJ_EXPR:
428       return negate_expr_p (TREE_OPERAND (t, 0));
429 
430     case PLUS_EXPR:
431       if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
432 	  || HONOR_SIGNED_ZEROS (element_mode (type))
433 	  || (ANY_INTEGRAL_TYPE_P (type)
434 	      && ! TYPE_OVERFLOW_WRAPS (type)))
435 	return false;
436       /* -(A + B) -> (-B) - A.  */
437       if (negate_expr_p (TREE_OPERAND (t, 1)))
438 	return true;
439       /* -(A + B) -> (-A) - B.  */
440       return negate_expr_p (TREE_OPERAND (t, 0));
441 
442     case MINUS_EXPR:
443       /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
444       return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
445 	     && !HONOR_SIGNED_ZEROS (element_mode (type))
446 	     && (! ANY_INTEGRAL_TYPE_P (type)
447 		 || TYPE_OVERFLOW_WRAPS (type));
448 
449     case MULT_EXPR:
450       if (TYPE_UNSIGNED (type))
451 	break;
452       /* INT_MIN/n * n doesn't overflow while negating one operand it does
453          if n is a (negative) power of two.  */
454       if (INTEGRAL_TYPE_P (TREE_TYPE (t))
455 	  && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
456 	  && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
457 		 && (wi::popcount
458 		     (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
459 		|| (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
460 		    && (wi::popcount
461 			(wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
462 	break;
463 
464       /* Fall through.  */
465 
466     case RDIV_EXPR:
467       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
468 	return negate_expr_p (TREE_OPERAND (t, 1))
469 	       || negate_expr_p (TREE_OPERAND (t, 0));
470       break;
471 
472     case TRUNC_DIV_EXPR:
473     case ROUND_DIV_EXPR:
474     case EXACT_DIV_EXPR:
475       if (TYPE_UNSIGNED (type))
476 	break;
477       /* In general we can't negate A in A / B, because if A is INT_MIN and
478          B is not 1 we change the sign of the result.  */
479       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
480 	  && negate_expr_p (TREE_OPERAND (t, 0)))
481 	return true;
482       /* In general we can't negate B in A / B, because if A is INT_MIN and
483 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
484 	 and actually traps on some architectures.  */
485       if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
486 	  || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
487 	  || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
488 	      && ! integer_onep (TREE_OPERAND (t, 1))))
489 	return negate_expr_p (TREE_OPERAND (t, 1));
490       break;
491 
492     case NOP_EXPR:
493       /* Negate -((double)float) as (double)(-float).  */
494       if (TREE_CODE (type) == REAL_TYPE)
495 	{
496 	  tree tem = strip_float_extensions (t);
497 	  if (tem != t)
498 	    return negate_expr_p (tem);
499 	}
500       break;
501 
502     case CALL_EXPR:
503       /* Negate -f(x) as f(-x).  */
504       if (negate_mathfn_p (get_call_combined_fn (t)))
505 	return negate_expr_p (CALL_EXPR_ARG (t, 0));
506       break;
507 
508     case RSHIFT_EXPR:
509       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
510       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
511 	{
512 	  tree op1 = TREE_OPERAND (t, 1);
513 	  if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
514 	    return true;
515 	}
516       break;
517 
518     default:
519       break;
520     }
521   return false;
522 }
523 
524 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
525    simplification is possible.
526    If negate_expr_p would return true for T, NULL_TREE will never be
527    returned.  */
528 
529 static tree
fold_negate_expr_1(location_t loc,tree t)530 fold_negate_expr_1 (location_t loc, tree t)
531 {
532   tree type = TREE_TYPE (t);
533   tree tem;
534 
535   switch (TREE_CODE (t))
536     {
537     /* Convert - (~A) to A + 1.  */
538     case BIT_NOT_EXPR:
539       if (INTEGRAL_TYPE_P (type))
540         return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
541 				build_one_cst (type));
542       break;
543 
544     case INTEGER_CST:
545       tem = fold_negate_const (t, type);
546       if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
547 	  || (ANY_INTEGRAL_TYPE_P (type)
548 	      && !TYPE_OVERFLOW_TRAPS (type)
549 	      && TYPE_OVERFLOW_WRAPS (type))
550 	  || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
551 	return tem;
552       break;
553 
554     case POLY_INT_CST:
555     case REAL_CST:
556     case FIXED_CST:
557       tem = fold_negate_const (t, type);
558       return tem;
559 
560     case COMPLEX_CST:
561       {
562 	tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
563 	tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
564 	if (rpart && ipart)
565 	  return build_complex (type, rpart, ipart);
566       }
567       break;
568 
569     case VECTOR_CST:
570       {
571 	tree_vector_builder elts;
572 	elts.new_unary_operation (type, t, true);
573 	unsigned int count = elts.encoded_nelts ();
574 	for (unsigned int i = 0; i < count; ++i)
575 	  {
576 	    tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
577 	    if (elt == NULL_TREE)
578 	      return NULL_TREE;
579 	    elts.quick_push (elt);
580 	  }
581 
582 	return elts.build ();
583       }
584 
585     case COMPLEX_EXPR:
586       if (negate_expr_p (t))
587 	return fold_build2_loc (loc, COMPLEX_EXPR, type,
588 				fold_negate_expr (loc, TREE_OPERAND (t, 0)),
589 				fold_negate_expr (loc, TREE_OPERAND (t, 1)));
590       break;
591 
592     case CONJ_EXPR:
593       if (negate_expr_p (t))
594 	return fold_build1_loc (loc, CONJ_EXPR, type,
595 				fold_negate_expr (loc, TREE_OPERAND (t, 0)));
596       break;
597 
598     case NEGATE_EXPR:
599       if (!TYPE_OVERFLOW_SANITIZED (type))
600 	return TREE_OPERAND (t, 0);
601       break;
602 
603     case PLUS_EXPR:
604       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
605 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
606 	{
607 	  /* -(A + B) -> (-B) - A.  */
608 	  if (negate_expr_p (TREE_OPERAND (t, 1)))
609 	    {
610 	      tem = negate_expr (TREE_OPERAND (t, 1));
611 	      return fold_build2_loc (loc, MINUS_EXPR, type,
612 				      tem, TREE_OPERAND (t, 0));
613 	    }
614 
615 	  /* -(A + B) -> (-A) - B.  */
616 	  if (negate_expr_p (TREE_OPERAND (t, 0)))
617 	    {
618 	      tem = negate_expr (TREE_OPERAND (t, 0));
619 	      return fold_build2_loc (loc, MINUS_EXPR, type,
620 				      tem, TREE_OPERAND (t, 1));
621 	    }
622 	}
623       break;
624 
625     case MINUS_EXPR:
626       /* - (A - B) -> B - A  */
627       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
628 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
629 	return fold_build2_loc (loc, MINUS_EXPR, type,
630 				TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
631       break;
632 
633     case MULT_EXPR:
634       if (TYPE_UNSIGNED (type))
635         break;
636 
637       /* Fall through.  */
638 
639     case RDIV_EXPR:
640       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
641 	{
642 	  tem = TREE_OPERAND (t, 1);
643 	  if (negate_expr_p (tem))
644 	    return fold_build2_loc (loc, TREE_CODE (t), type,
645 				    TREE_OPERAND (t, 0), negate_expr (tem));
646 	  tem = TREE_OPERAND (t, 0);
647 	  if (negate_expr_p (tem))
648 	    return fold_build2_loc (loc, TREE_CODE (t), type,
649 				    negate_expr (tem), TREE_OPERAND (t, 1));
650 	}
651       break;
652 
653     case TRUNC_DIV_EXPR:
654     case ROUND_DIV_EXPR:
655     case EXACT_DIV_EXPR:
656       if (TYPE_UNSIGNED (type))
657 	break;
658       /* In general we can't negate A in A / B, because if A is INT_MIN and
659 	 B is not 1 we change the sign of the result.  */
660       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
661 	  && negate_expr_p (TREE_OPERAND (t, 0)))
662 	return fold_build2_loc (loc, TREE_CODE (t), type,
663 				negate_expr (TREE_OPERAND (t, 0)),
664 				TREE_OPERAND (t, 1));
665       /* In general we can't negate B in A / B, because if A is INT_MIN and
666 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
667 	 and actually traps on some architectures.  */
668       if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
669 	   || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
670 	   || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
671 	       && ! integer_onep (TREE_OPERAND (t, 1))))
672 	  && negate_expr_p (TREE_OPERAND (t, 1)))
673 	return fold_build2_loc (loc, TREE_CODE (t), type,
674 				TREE_OPERAND (t, 0),
675 				negate_expr (TREE_OPERAND (t, 1)));
676       break;
677 
678     case NOP_EXPR:
679       /* Convert -((double)float) into (double)(-float).  */
680       if (TREE_CODE (type) == REAL_TYPE)
681 	{
682 	  tem = strip_float_extensions (t);
683 	  if (tem != t && negate_expr_p (tem))
684 	    return fold_convert_loc (loc, type, negate_expr (tem));
685 	}
686       break;
687 
688     case CALL_EXPR:
689       /* Negate -f(x) as f(-x).  */
690       if (negate_mathfn_p (get_call_combined_fn (t))
691 	  && negate_expr_p (CALL_EXPR_ARG (t, 0)))
692 	{
693 	  tree fndecl, arg;
694 
695 	  fndecl = get_callee_fndecl (t);
696 	  arg = negate_expr (CALL_EXPR_ARG (t, 0));
697 	  return build_call_expr_loc (loc, fndecl, 1, arg);
698 	}
699       break;
700 
701     case RSHIFT_EXPR:
702       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
703       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
704 	{
705 	  tree op1 = TREE_OPERAND (t, 1);
706 	  if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
707 	    {
708 	      tree ntype = TYPE_UNSIGNED (type)
709 			   ? signed_type_for (type)
710 			   : unsigned_type_for (type);
711 	      tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
712 	      temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
713 	      return fold_convert_loc (loc, type, temp);
714 	    }
715 	}
716       break;
717 
718     default:
719       break;
720     }
721 
722   return NULL_TREE;
723 }
724 
725 /* A wrapper for fold_negate_expr_1.  */
726 
727 static tree
fold_negate_expr(location_t loc,tree t)728 fold_negate_expr (location_t loc, tree t)
729 {
730   tree type = TREE_TYPE (t);
731   STRIP_SIGN_NOPS (t);
732   tree tem = fold_negate_expr_1 (loc, t);
733   if (tem == NULL_TREE)
734     return NULL_TREE;
735   return fold_convert_loc (loc, type, tem);
736 }
737 
738 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
739    negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
740    return NULL_TREE. */
741 
742 static tree
negate_expr(tree t)743 negate_expr (tree t)
744 {
745   tree type, tem;
746   location_t loc;
747 
748   if (t == NULL_TREE)
749     return NULL_TREE;
750 
751   loc = EXPR_LOCATION (t);
752   type = TREE_TYPE (t);
753   STRIP_SIGN_NOPS (t);
754 
755   tem = fold_negate_expr (loc, t);
756   if (!tem)
757     tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
758   return fold_convert_loc (loc, type, tem);
759 }
760 
761 /* Split a tree IN into a constant, literal and variable parts that could be
762    combined with CODE to make IN.  "constant" means an expression with
763    TREE_CONSTANT but that isn't an actual constant.  CODE must be a
764    commutative arithmetic operation.  Store the constant part into *CONP,
765    the literal in *LITP and return the variable part.  If a part isn't
766    present, set it to null.  If the tree does not decompose in this way,
767    return the entire tree as the variable part and the other parts as null.
768 
769    If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
770    case, we negate an operand that was subtracted.  Except if it is a
771    literal for which we use *MINUS_LITP instead.
772 
773    If NEGATE_P is true, we are negating all of IN, again except a literal
774    for which we use *MINUS_LITP instead.  If a variable part is of pointer
775    type, it is negated after converting to TYPE.  This prevents us from
776    generating illegal MINUS pointer expression.  LOC is the location of
777    the converted variable part.
778 
779    If IN is itself a literal or constant, return it as appropriate.
780 
781    Note that we do not guarantee that any of the three values will be the
782    same type as IN, but they will have the same signedness and mode.  */
783 
784 static tree
split_tree(tree in,tree type,enum tree_code code,tree * minus_varp,tree * conp,tree * minus_conp,tree * litp,tree * minus_litp,int negate_p)785 split_tree (tree in, tree type, enum tree_code code,
786 	    tree *minus_varp, tree *conp, tree *minus_conp,
787 	    tree *litp, tree *minus_litp, int negate_p)
788 {
789   tree var = 0;
790   *minus_varp = 0;
791   *conp = 0;
792   *minus_conp = 0;
793   *litp = 0;
794   *minus_litp = 0;
795 
796   /* Strip any conversions that don't change the machine mode or signedness.  */
797   STRIP_SIGN_NOPS (in);
798 
799   if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
800       || TREE_CODE (in) == FIXED_CST)
801     *litp = in;
802   else if (TREE_CODE (in) == code
803 	   || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
804 	       && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
805 	       /* We can associate addition and subtraction together (even
806 		  though the C standard doesn't say so) for integers because
807 		  the value is not affected.  For reals, the value might be
808 		  affected, so we can't.  */
809 	       && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
810 		   || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
811 		   || (code == MINUS_EXPR
812 		       && (TREE_CODE (in) == PLUS_EXPR
813 			   || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
814     {
815       tree op0 = TREE_OPERAND (in, 0);
816       tree op1 = TREE_OPERAND (in, 1);
817       int neg1_p = TREE_CODE (in) == MINUS_EXPR;
818       int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
819 
820       /* First see if either of the operands is a literal, then a constant.  */
821       if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
822 	  || TREE_CODE (op0) == FIXED_CST)
823 	*litp = op0, op0 = 0;
824       else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
825 	       || TREE_CODE (op1) == FIXED_CST)
826 	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
827 
828       if (op0 != 0 && TREE_CONSTANT (op0))
829 	*conp = op0, op0 = 0;
830       else if (op1 != 0 && TREE_CONSTANT (op1))
831 	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
832 
833       /* If we haven't dealt with either operand, this is not a case we can
834 	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
835       if (op0 != 0 && op1 != 0)
836 	var = in;
837       else if (op0 != 0)
838 	var = op0;
839       else
840 	var = op1, neg_var_p = neg1_p;
841 
842       /* Now do any needed negations.  */
843       if (neg_litp_p)
844 	*minus_litp = *litp, *litp = 0;
845       if (neg_conp_p && *conp)
846 	*minus_conp = *conp, *conp = 0;
847       if (neg_var_p && var)
848 	*minus_varp = var, var = 0;
849     }
850   else if (TREE_CONSTANT (in))
851     *conp = in;
852   else if (TREE_CODE (in) == BIT_NOT_EXPR
853 	   && code == PLUS_EXPR)
854     {
855       /* -1 - X is folded to ~X, undo that here.  Do _not_ do this
856          when IN is constant.  */
857       *litp = build_minus_one_cst (type);
858       *minus_varp = TREE_OPERAND (in, 0);
859     }
860   else
861     var = in;
862 
863   if (negate_p)
864     {
865       if (*litp)
866 	*minus_litp = *litp, *litp = 0;
867       else if (*minus_litp)
868 	*litp = *minus_litp, *minus_litp = 0;
869       if (*conp)
870 	*minus_conp = *conp, *conp = 0;
871       else if (*minus_conp)
872 	*conp = *minus_conp, *minus_conp = 0;
873       if (var)
874 	*minus_varp = var, var = 0;
875       else if (*minus_varp)
876 	var = *minus_varp, *minus_varp = 0;
877     }
878 
879   if (*litp
880       && TREE_OVERFLOW_P (*litp))
881     *litp = drop_tree_overflow (*litp);
882   if (*minus_litp
883       && TREE_OVERFLOW_P (*minus_litp))
884     *minus_litp = drop_tree_overflow (*minus_litp);
885 
886   return var;
887 }
888 
889 /* Re-associate trees split by the above function.  T1 and T2 are
890    either expressions to associate or null.  Return the new
891    expression, if any.  LOC is the location of the new expression.  If
892    we build an operation, do it in TYPE and with CODE.  */
893 
894 static tree
associate_trees(location_t loc,tree t1,tree t2,enum tree_code code,tree type)895 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
896 {
897   if (t1 == 0)
898     {
899       gcc_assert (t2 == 0 || code != MINUS_EXPR);
900       return t2;
901     }
902   else if (t2 == 0)
903     return t1;
904 
905   /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
906      try to fold this since we will have infinite recursion.  But do
907      deal with any NEGATE_EXPRs.  */
908   if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
909       || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
910       || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
911     {
912       if (code == PLUS_EXPR)
913 	{
914 	  if (TREE_CODE (t1) == NEGATE_EXPR)
915 	    return build2_loc (loc, MINUS_EXPR, type,
916 			       fold_convert_loc (loc, type, t2),
917 			       fold_convert_loc (loc, type,
918 						 TREE_OPERAND (t1, 0)));
919 	  else if (TREE_CODE (t2) == NEGATE_EXPR)
920 	    return build2_loc (loc, MINUS_EXPR, type,
921 			       fold_convert_loc (loc, type, t1),
922 			       fold_convert_loc (loc, type,
923 						 TREE_OPERAND (t2, 0)));
924 	  else if (integer_zerop (t2))
925 	    return fold_convert_loc (loc, type, t1);
926 	}
927       else if (code == MINUS_EXPR)
928 	{
929 	  if (integer_zerop (t2))
930 	    return fold_convert_loc (loc, type, t1);
931 	}
932 
933       return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
934 			 fold_convert_loc (loc, type, t2));
935     }
936 
937   return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
938 			  fold_convert_loc (loc, type, t2));
939 }
940 
941 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
942    for use in int_const_binop, size_binop and size_diffop.  */
943 
944 static bool
int_binop_types_match_p(enum tree_code code,const_tree type1,const_tree type2)945 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
946 {
947   if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
948     return false;
949   if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
950     return false;
951 
952   switch (code)
953     {
954     case LSHIFT_EXPR:
955     case RSHIFT_EXPR:
956     case LROTATE_EXPR:
957     case RROTATE_EXPR:
958       return true;
959 
960     default:
961       break;
962     }
963 
964   return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
965 	 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
966 	 && TYPE_MODE (type1) == TYPE_MODE (type2);
967 }
968 
969 /* Subroutine of int_const_binop_1 that handles two INTEGER_CSTs.  */
970 
971 static tree
int_const_binop_2(enum tree_code code,const_tree parg1,const_tree parg2,int overflowable)972 int_const_binop_2 (enum tree_code code, const_tree parg1, const_tree parg2,
973 		   int overflowable)
974 {
975   wide_int res;
976   tree t;
977   tree type = TREE_TYPE (parg1);
978   signop sign = TYPE_SIGN (type);
979   bool overflow = false;
980 
981   wi::tree_to_wide_ref arg1 = wi::to_wide (parg1);
982   wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
983 
984   switch (code)
985     {
986     case BIT_IOR_EXPR:
987       res = wi::bit_or (arg1, arg2);
988       break;
989 
990     case BIT_XOR_EXPR:
991       res = wi::bit_xor (arg1, arg2);
992       break;
993 
994     case BIT_AND_EXPR:
995       res = wi::bit_and (arg1, arg2);
996       break;
997 
998     case RSHIFT_EXPR:
999     case LSHIFT_EXPR:
1000       if (wi::neg_p (arg2))
1001 	{
1002 	  arg2 = -arg2;
1003 	  if (code == RSHIFT_EXPR)
1004 	    code = LSHIFT_EXPR;
1005 	  else
1006 	    code = RSHIFT_EXPR;
1007 	}
1008 
1009       if (code == RSHIFT_EXPR)
1010 	/* It's unclear from the C standard whether shifts can overflow.
1011 	   The following code ignores overflow; perhaps a C standard
1012 	   interpretation ruling is needed.  */
1013 	res = wi::rshift (arg1, arg2, sign);
1014       else
1015 	res = wi::lshift (arg1, arg2);
1016       break;
1017 
1018     case RROTATE_EXPR:
1019     case LROTATE_EXPR:
1020       if (wi::neg_p (arg2))
1021 	{
1022 	  arg2 = -arg2;
1023 	  if (code == RROTATE_EXPR)
1024 	    code = LROTATE_EXPR;
1025 	  else
1026 	    code = RROTATE_EXPR;
1027 	}
1028 
1029       if (code == RROTATE_EXPR)
1030 	res = wi::rrotate (arg1, arg2);
1031       else
1032 	res = wi::lrotate (arg1, arg2);
1033       break;
1034 
1035     case PLUS_EXPR:
1036       res = wi::add (arg1, arg2, sign, &overflow);
1037       break;
1038 
1039     case MINUS_EXPR:
1040       res = wi::sub (arg1, arg2, sign, &overflow);
1041       break;
1042 
1043     case MULT_EXPR:
1044       res = wi::mul (arg1, arg2, sign, &overflow);
1045       break;
1046 
1047     case MULT_HIGHPART_EXPR:
1048       res = wi::mul_high (arg1, arg2, sign);
1049       break;
1050 
1051     case TRUNC_DIV_EXPR:
1052     case EXACT_DIV_EXPR:
1053       if (arg2 == 0)
1054 	return NULL_TREE;
1055       res = wi::div_trunc (arg1, arg2, sign, &overflow);
1056       break;
1057 
1058     case FLOOR_DIV_EXPR:
1059       if (arg2 == 0)
1060 	return NULL_TREE;
1061       res = wi::div_floor (arg1, arg2, sign, &overflow);
1062       break;
1063 
1064     case CEIL_DIV_EXPR:
1065       if (arg2 == 0)
1066 	return NULL_TREE;
1067       res = wi::div_ceil (arg1, arg2, sign, &overflow);
1068       break;
1069 
1070     case ROUND_DIV_EXPR:
1071       if (arg2 == 0)
1072 	return NULL_TREE;
1073       res = wi::div_round (arg1, arg2, sign, &overflow);
1074       break;
1075 
1076     case TRUNC_MOD_EXPR:
1077       if (arg2 == 0)
1078 	return NULL_TREE;
1079       res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1080       break;
1081 
1082     case FLOOR_MOD_EXPR:
1083       if (arg2 == 0)
1084 	return NULL_TREE;
1085       res = wi::mod_floor (arg1, arg2, sign, &overflow);
1086       break;
1087 
1088     case CEIL_MOD_EXPR:
1089       if (arg2 == 0)
1090 	return NULL_TREE;
1091       res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1092       break;
1093 
1094     case ROUND_MOD_EXPR:
1095       if (arg2 == 0)
1096 	return NULL_TREE;
1097       res = wi::mod_round (arg1, arg2, sign, &overflow);
1098       break;
1099 
1100     case MIN_EXPR:
1101       res = wi::min (arg1, arg2, sign);
1102       break;
1103 
1104     case MAX_EXPR:
1105       res = wi::max (arg1, arg2, sign);
1106       break;
1107 
1108     default:
1109       return NULL_TREE;
1110     }
1111 
1112   t = force_fit_type (type, res, overflowable,
1113 		      (((sign == SIGNED || overflowable == -1)
1114 			&& overflow)
1115 		       | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2)));
1116 
1117   return t;
1118 }
1119 
1120 /* Combine two integer constants PARG1 and PARG2 under operation CODE
1121    to produce a new constant.  Return NULL_TREE if we don't know how
1122    to evaluate CODE at compile-time.  */
1123 
1124 static tree
int_const_binop_1(enum tree_code code,const_tree arg1,const_tree arg2,int overflowable)1125 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
1126 		   int overflowable)
1127 {
1128   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1129     return int_const_binop_2 (code, arg1, arg2, overflowable);
1130 
1131   gcc_assert (NUM_POLY_INT_COEFFS != 1);
1132 
1133   if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1134     {
1135       poly_wide_int res;
1136       bool overflow;
1137       tree type = TREE_TYPE (arg1);
1138       signop sign = TYPE_SIGN (type);
1139       switch (code)
1140 	{
1141 	case PLUS_EXPR:
1142 	  res = wi::add (wi::to_poly_wide (arg1),
1143 			 wi::to_poly_wide (arg2), sign, &overflow);
1144 	  break;
1145 
1146 	case MINUS_EXPR:
1147 	  res = wi::sub (wi::to_poly_wide (arg1),
1148 			 wi::to_poly_wide (arg2), sign, &overflow);
1149 	  break;
1150 
1151 	case MULT_EXPR:
1152 	  if (TREE_CODE (arg2) == INTEGER_CST)
1153 	    res = wi::mul (wi::to_poly_wide (arg1),
1154 			   wi::to_wide (arg2), sign, &overflow);
1155 	  else if (TREE_CODE (arg1) == INTEGER_CST)
1156 	    res = wi::mul (wi::to_poly_wide (arg2),
1157 			   wi::to_wide (arg1), sign, &overflow);
1158 	  else
1159 	    return NULL_TREE;
1160 	  break;
1161 
1162 	case LSHIFT_EXPR:
1163 	  if (TREE_CODE (arg2) == INTEGER_CST)
1164 	    res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1165 	  else
1166 	    return NULL_TREE;
1167 	  break;
1168 
1169 	case BIT_IOR_EXPR:
1170 	  if (TREE_CODE (arg2) != INTEGER_CST
1171 	      || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1172 			     &res))
1173 	    return NULL_TREE;
1174 	  break;
1175 
1176 	default:
1177 	  return NULL_TREE;
1178 	}
1179       return force_fit_type (type, res, overflowable,
1180 			     (((sign == SIGNED || overflowable == -1)
1181 			       && overflow)
1182 			      | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1183     }
1184 
1185   return NULL_TREE;
1186 }
1187 
1188 tree
int_const_binop(enum tree_code code,const_tree arg1,const_tree arg2)1189 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1190 {
1191   return int_const_binop_1 (code, arg1, arg2, 1);
1192 }
1193 
1194 /* Return true if binary operation OP distributes over addition in operand
1195    OPNO, with the other operand being held constant.  OPNO counts from 1.  */
1196 
1197 static bool
distributes_over_addition_p(tree_code op,int opno)1198 distributes_over_addition_p (tree_code op, int opno)
1199 {
1200   switch (op)
1201     {
1202     case PLUS_EXPR:
1203     case MINUS_EXPR:
1204     case MULT_EXPR:
1205       return true;
1206 
1207     case LSHIFT_EXPR:
1208       return opno == 1;
1209 
1210     default:
1211       return false;
1212     }
1213 }
1214 
1215 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1216    constant.  We assume ARG1 and ARG2 have the same data type, or at least
1217    are the same kind of constant and the same machine mode.  Return zero if
1218    combining the constants is not allowed in the current operating mode.  */
1219 
1220 static tree
const_binop(enum tree_code code,tree arg1,tree arg2)1221 const_binop (enum tree_code code, tree arg1, tree arg2)
1222 {
1223   /* Sanity check for the recursive cases.  */
1224   if (!arg1 || !arg2)
1225     return NULL_TREE;
1226 
1227   STRIP_NOPS (arg1);
1228   STRIP_NOPS (arg2);
1229 
1230   if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1231     {
1232       if (code == POINTER_PLUS_EXPR)
1233 	return int_const_binop (PLUS_EXPR,
1234 				arg1, fold_convert (TREE_TYPE (arg1), arg2));
1235 
1236       return int_const_binop (code, arg1, arg2);
1237     }
1238 
1239   if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1240     {
1241       machine_mode mode;
1242       REAL_VALUE_TYPE d1;
1243       REAL_VALUE_TYPE d2;
1244       REAL_VALUE_TYPE value;
1245       REAL_VALUE_TYPE result;
1246       bool inexact;
1247       tree t, type;
1248 
1249       /* The following codes are handled by real_arithmetic.  */
1250       switch (code)
1251 	{
1252 	case PLUS_EXPR:
1253 	case MINUS_EXPR:
1254 	case MULT_EXPR:
1255 	case RDIV_EXPR:
1256 	case MIN_EXPR:
1257 	case MAX_EXPR:
1258 	  break;
1259 
1260 	default:
1261 	  return NULL_TREE;
1262 	}
1263 
1264       d1 = TREE_REAL_CST (arg1);
1265       d2 = TREE_REAL_CST (arg2);
1266 
1267       type = TREE_TYPE (arg1);
1268       mode = TYPE_MODE (type);
1269 
1270       /* Don't perform operation if we honor signaling NaNs and
1271 	 either operand is a signaling NaN.  */
1272       if (HONOR_SNANS (mode)
1273 	  && (REAL_VALUE_ISSIGNALING_NAN (d1)
1274 	      || REAL_VALUE_ISSIGNALING_NAN (d2)))
1275 	return NULL_TREE;
1276 
1277       /* Don't perform operation if it would raise a division
1278 	 by zero exception.  */
1279       if (code == RDIV_EXPR
1280 	  && real_equal (&d2, &dconst0)
1281 	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1282 	return NULL_TREE;
1283 
1284       /* If either operand is a NaN, just return it.  Otherwise, set up
1285 	 for floating-point trap; we return an overflow.  */
1286       if (REAL_VALUE_ISNAN (d1))
1287       {
1288 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1289 	   is off.  */
1290 	d1.signalling = 0;
1291 	t = build_real (type, d1);
1292 	return t;
1293       }
1294       else if (REAL_VALUE_ISNAN (d2))
1295       {
1296 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1297 	   is off.  */
1298 	d2.signalling = 0;
1299 	t = build_real (type, d2);
1300 	return t;
1301       }
1302 
1303       inexact = real_arithmetic (&value, code, &d1, &d2);
1304       real_convert (&result, mode, &value);
1305 
1306       /* Don't constant fold this floating point operation if
1307 	 the result has overflowed and flag_trapping_math.  */
1308       if (flag_trapping_math
1309 	  && MODE_HAS_INFINITIES (mode)
1310 	  && REAL_VALUE_ISINF (result)
1311 	  && !REAL_VALUE_ISINF (d1)
1312 	  && !REAL_VALUE_ISINF (d2))
1313 	return NULL_TREE;
1314 
1315       /* Don't constant fold this floating point operation if the
1316 	 result may dependent upon the run-time rounding mode and
1317 	 flag_rounding_math is set, or if GCC's software emulation
1318 	 is unable to accurately represent the result.  */
1319       if ((flag_rounding_math
1320 	   || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1321 	  && (inexact || !real_identical (&result, &value)))
1322 	return NULL_TREE;
1323 
1324       t = build_real (type, result);
1325 
1326       TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1327       return t;
1328     }
1329 
1330   if (TREE_CODE (arg1) == FIXED_CST)
1331     {
1332       FIXED_VALUE_TYPE f1;
1333       FIXED_VALUE_TYPE f2;
1334       FIXED_VALUE_TYPE result;
1335       tree t, type;
1336       int sat_p;
1337       bool overflow_p;
1338 
1339       /* The following codes are handled by fixed_arithmetic.  */
1340       switch (code)
1341         {
1342 	case PLUS_EXPR:
1343 	case MINUS_EXPR:
1344 	case MULT_EXPR:
1345 	case TRUNC_DIV_EXPR:
1346 	  if (TREE_CODE (arg2) != FIXED_CST)
1347 	    return NULL_TREE;
1348 	  f2 = TREE_FIXED_CST (arg2);
1349 	  break;
1350 
1351 	case LSHIFT_EXPR:
1352 	case RSHIFT_EXPR:
1353 	  {
1354 	    if (TREE_CODE (arg2) != INTEGER_CST)
1355 	      return NULL_TREE;
1356 	    wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1357 	    f2.data.high = w2.elt (1);
1358 	    f2.data.low = w2.ulow ();
1359 	    f2.mode = SImode;
1360 	  }
1361 	  break;
1362 
1363         default:
1364 	  return NULL_TREE;
1365         }
1366 
1367       f1 = TREE_FIXED_CST (arg1);
1368       type = TREE_TYPE (arg1);
1369       sat_p = TYPE_SATURATING (type);
1370       overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1371       t = build_fixed (type, result);
1372       /* Propagate overflow flags.  */
1373       if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1374 	TREE_OVERFLOW (t) = 1;
1375       return t;
1376     }
1377 
1378   if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1379     {
1380       tree type = TREE_TYPE (arg1);
1381       tree r1 = TREE_REALPART (arg1);
1382       tree i1 = TREE_IMAGPART (arg1);
1383       tree r2 = TREE_REALPART (arg2);
1384       tree i2 = TREE_IMAGPART (arg2);
1385       tree real, imag;
1386 
1387       switch (code)
1388 	{
1389 	case PLUS_EXPR:
1390 	case MINUS_EXPR:
1391 	  real = const_binop (code, r1, r2);
1392 	  imag = const_binop (code, i1, i2);
1393 	  break;
1394 
1395 	case MULT_EXPR:
1396 	  if (COMPLEX_FLOAT_TYPE_P (type))
1397 	    return do_mpc_arg2 (arg1, arg2, type,
1398 				/* do_nonfinite= */ folding_initializer,
1399 				mpc_mul);
1400 
1401 	  real = const_binop (MINUS_EXPR,
1402 			      const_binop (MULT_EXPR, r1, r2),
1403 			      const_binop (MULT_EXPR, i1, i2));
1404 	  imag = const_binop (PLUS_EXPR,
1405 			      const_binop (MULT_EXPR, r1, i2),
1406 			      const_binop (MULT_EXPR, i1, r2));
1407 	  break;
1408 
1409 	case RDIV_EXPR:
1410 	  if (COMPLEX_FLOAT_TYPE_P (type))
1411 	    return do_mpc_arg2 (arg1, arg2, type,
1412                                 /* do_nonfinite= */ folding_initializer,
1413 				mpc_div);
1414 	  /* Fallthru. */
1415 	case TRUNC_DIV_EXPR:
1416 	case CEIL_DIV_EXPR:
1417 	case FLOOR_DIV_EXPR:
1418 	case ROUND_DIV_EXPR:
1419 	  if (flag_complex_method == 0)
1420 	  {
1421 	    /* Keep this algorithm in sync with
1422 	       tree-complex.c:expand_complex_div_straight().
1423 
1424 	       Expand complex division to scalars, straightforward algorithm.
1425 	       a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1426 	       t = br*br + bi*bi
1427 	    */
1428 	    tree magsquared
1429 	      = const_binop (PLUS_EXPR,
1430 			     const_binop (MULT_EXPR, r2, r2),
1431 			     const_binop (MULT_EXPR, i2, i2));
1432 	    tree t1
1433 	      = const_binop (PLUS_EXPR,
1434 			     const_binop (MULT_EXPR, r1, r2),
1435 			     const_binop (MULT_EXPR, i1, i2));
1436 	    tree t2
1437 	      = const_binop (MINUS_EXPR,
1438 			     const_binop (MULT_EXPR, i1, r2),
1439 			     const_binop (MULT_EXPR, r1, i2));
1440 
1441 	    real = const_binop (code, t1, magsquared);
1442 	    imag = const_binop (code, t2, magsquared);
1443 	  }
1444 	  else
1445 	  {
1446 	    /* Keep this algorithm in sync with
1447                tree-complex.c:expand_complex_div_wide().
1448 
1449 	       Expand complex division to scalars, modified algorithm to minimize
1450 	       overflow with wide input ranges.  */
1451 	    tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1452 					fold_abs_const (r2, TREE_TYPE (type)),
1453 					fold_abs_const (i2, TREE_TYPE (type)));
1454 
1455 	    if (integer_nonzerop (compare))
1456 	      {
1457 		/* In the TRUE branch, we compute
1458 		   ratio = br/bi;
1459 		   div = (br * ratio) + bi;
1460 		   tr = (ar * ratio) + ai;
1461 		   ti = (ai * ratio) - ar;
1462 		   tr = tr / div;
1463 		   ti = ti / div;  */
1464 		tree ratio = const_binop (code, r2, i2);
1465 		tree div = const_binop (PLUS_EXPR, i2,
1466 					const_binop (MULT_EXPR, r2, ratio));
1467 		real = const_binop (MULT_EXPR, r1, ratio);
1468 		real = const_binop (PLUS_EXPR, real, i1);
1469 		real = const_binop (code, real, div);
1470 
1471 		imag = const_binop (MULT_EXPR, i1, ratio);
1472 		imag = const_binop (MINUS_EXPR, imag, r1);
1473 		imag = const_binop (code, imag, div);
1474 	      }
1475 	    else
1476 	      {
1477 		/* In the FALSE branch, we compute
1478 		   ratio = d/c;
1479 		   divisor = (d * ratio) + c;
1480 		   tr = (b * ratio) + a;
1481 		   ti = b - (a * ratio);
1482 		   tr = tr / div;
1483 		   ti = ti / div;  */
1484 		tree ratio = const_binop (code, i2, r2);
1485 		tree div = const_binop (PLUS_EXPR, r2,
1486                                         const_binop (MULT_EXPR, i2, ratio));
1487 
1488 		real = const_binop (MULT_EXPR, i1, ratio);
1489 		real = const_binop (PLUS_EXPR, real, r1);
1490 		real = const_binop (code, real, div);
1491 
1492 		imag = const_binop (MULT_EXPR, r1, ratio);
1493 		imag = const_binop (MINUS_EXPR, i1, imag);
1494 		imag = const_binop (code, imag, div);
1495 	      }
1496 	  }
1497 	  break;
1498 
1499 	default:
1500 	  return NULL_TREE;
1501 	}
1502 
1503       if (real && imag)
1504 	return build_complex (type, real, imag);
1505     }
1506 
1507   if (TREE_CODE (arg1) == VECTOR_CST
1508       && TREE_CODE (arg2) == VECTOR_CST
1509       && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1510 		   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1511     {
1512       tree type = TREE_TYPE (arg1);
1513       bool step_ok_p;
1514       if (VECTOR_CST_STEPPED_P (arg1)
1515 	  && VECTOR_CST_STEPPED_P (arg2))
1516 	/* We can operate directly on the encoding if:
1517 
1518 	      a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1519 	    implies
1520 	      (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1521 
1522 	   Addition and subtraction are the supported operators
1523 	   for which this is true.  */
1524 	step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1525       else if (VECTOR_CST_STEPPED_P (arg1))
1526 	/* We can operate directly on stepped encodings if:
1527 
1528 	     a3 - a2 == a2 - a1
1529 	   implies:
1530 	     (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1531 
1532 	   which is true if (x -> x op c) distributes over addition.  */
1533 	step_ok_p = distributes_over_addition_p (code, 1);
1534       else
1535 	/* Similarly in reverse.  */
1536 	step_ok_p = distributes_over_addition_p (code, 2);
1537       tree_vector_builder elts;
1538       if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1539 	return NULL_TREE;
1540       unsigned int count = elts.encoded_nelts ();
1541       for (unsigned int i = 0; i < count; ++i)
1542 	{
1543 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1544 	  tree elem2 = VECTOR_CST_ELT (arg2, i);
1545 
1546 	  tree elt = const_binop (code, elem1, elem2);
1547 
1548 	  /* It is possible that const_binop cannot handle the given
1549 	     code and return NULL_TREE */
1550 	  if (elt == NULL_TREE)
1551 	    return NULL_TREE;
1552 	  elts.quick_push (elt);
1553 	}
1554 
1555       return elts.build ();
1556     }
1557 
1558   /* Shifts allow a scalar offset for a vector.  */
1559   if (TREE_CODE (arg1) == VECTOR_CST
1560       && TREE_CODE (arg2) == INTEGER_CST)
1561     {
1562       tree type = TREE_TYPE (arg1);
1563       bool step_ok_p = distributes_over_addition_p (code, 1);
1564       tree_vector_builder elts;
1565       if (!elts.new_unary_operation (type, arg1, step_ok_p))
1566 	return NULL_TREE;
1567       unsigned int count = elts.encoded_nelts ();
1568       for (unsigned int i = 0; i < count; ++i)
1569 	{
1570 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1571 
1572 	  tree elt = const_binop (code, elem1, arg2);
1573 
1574 	  /* It is possible that const_binop cannot handle the given
1575 	     code and return NULL_TREE.  */
1576 	  if (elt == NULL_TREE)
1577 	    return NULL_TREE;
1578 	  elts.quick_push (elt);
1579 	}
1580 
1581       return elts.build ();
1582     }
1583   return NULL_TREE;
1584 }
1585 
1586 /* Overload that adds a TYPE parameter to be able to dispatch
1587    to fold_relational_const.  */
1588 
1589 tree
const_binop(enum tree_code code,tree type,tree arg1,tree arg2)1590 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1591 {
1592   if (TREE_CODE_CLASS (code) == tcc_comparison)
1593     return fold_relational_const (code, type, arg1, arg2);
1594 
1595   /* ???  Until we make the const_binop worker take the type of the
1596      result as argument put those cases that need it here.  */
1597   switch (code)
1598     {
1599     case VEC_SERIES_EXPR:
1600       if (CONSTANT_CLASS_P (arg1)
1601 	  && CONSTANT_CLASS_P (arg2))
1602 	return build_vec_series (type, arg1, arg2);
1603       return NULL_TREE;
1604 
1605     case COMPLEX_EXPR:
1606       if ((TREE_CODE (arg1) == REAL_CST
1607 	   && TREE_CODE (arg2) == REAL_CST)
1608 	  || (TREE_CODE (arg1) == INTEGER_CST
1609 	      && TREE_CODE (arg2) == INTEGER_CST))
1610 	return build_complex (type, arg1, arg2);
1611       return NULL_TREE;
1612 
1613     case POINTER_DIFF_EXPR:
1614       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1615 	{
1616 	  offset_int res = wi::sub (wi::to_offset (arg1),
1617 				    wi::to_offset (arg2));
1618 	  return force_fit_type (type, res, 1,
1619 				 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1620 	}
1621       return NULL_TREE;
1622 
1623     case VEC_PACK_TRUNC_EXPR:
1624     case VEC_PACK_FIX_TRUNC_EXPR:
1625       {
1626 	unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1627 
1628 	if (TREE_CODE (arg1) != VECTOR_CST
1629 	    || TREE_CODE (arg2) != VECTOR_CST)
1630 	  return NULL_TREE;
1631 
1632 	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1633 	  return NULL_TREE;
1634 
1635 	out_nelts = in_nelts * 2;
1636 	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1637 		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1638 
1639 	tree_vector_builder elts (type, out_nelts, 1);
1640 	for (i = 0; i < out_nelts; i++)
1641 	  {
1642 	    tree elt = (i < in_nelts
1643 			? VECTOR_CST_ELT (arg1, i)
1644 			: VECTOR_CST_ELT (arg2, i - in_nelts));
1645 	    elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1646 				      ? NOP_EXPR : FIX_TRUNC_EXPR,
1647 				      TREE_TYPE (type), elt);
1648 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1649 	      return NULL_TREE;
1650 	    elts.quick_push (elt);
1651 	  }
1652 
1653 	return elts.build ();
1654       }
1655 
1656     case VEC_WIDEN_MULT_LO_EXPR:
1657     case VEC_WIDEN_MULT_HI_EXPR:
1658     case VEC_WIDEN_MULT_EVEN_EXPR:
1659     case VEC_WIDEN_MULT_ODD_EXPR:
1660       {
1661 	unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1662 
1663 	if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1664 	  return NULL_TREE;
1665 
1666 	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1667 	  return NULL_TREE;
1668 	out_nelts = in_nelts / 2;
1669 	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1670 		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1671 
1672 	if (code == VEC_WIDEN_MULT_LO_EXPR)
1673 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1674 	else if (code == VEC_WIDEN_MULT_HI_EXPR)
1675 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1676 	else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1677 	  scale = 1, ofs = 0;
1678 	else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1679 	  scale = 1, ofs = 1;
1680 
1681 	tree_vector_builder elts (type, out_nelts, 1);
1682 	for (out = 0; out < out_nelts; out++)
1683 	  {
1684 	    unsigned int in = (out << scale) + ofs;
1685 	    tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1686 					  VECTOR_CST_ELT (arg1, in));
1687 	    tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1688 					  VECTOR_CST_ELT (arg2, in));
1689 
1690 	    if (t1 == NULL_TREE || t2 == NULL_TREE)
1691 	      return NULL_TREE;
1692 	    tree elt = const_binop (MULT_EXPR, t1, t2);
1693 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1694 	      return NULL_TREE;
1695 	    elts.quick_push (elt);
1696 	  }
1697 
1698 	return elts.build ();
1699       }
1700 
1701     default:;
1702     }
1703 
1704   if (TREE_CODE_CLASS (code) != tcc_binary)
1705     return NULL_TREE;
1706 
1707   /* Make sure type and arg0 have the same saturating flag.  */
1708   gcc_checking_assert (TYPE_SATURATING (type)
1709 		       == TYPE_SATURATING (TREE_TYPE (arg1)));
1710 
1711   return const_binop (code, arg1, arg2);
1712 }
1713 
1714 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1715    Return zero if computing the constants is not possible.  */
1716 
1717 tree
const_unop(enum tree_code code,tree type,tree arg0)1718 const_unop (enum tree_code code, tree type, tree arg0)
1719 {
1720   /* Don't perform the operation, other than NEGATE and ABS, if
1721      flag_signaling_nans is on and the operand is a signaling NaN.  */
1722   if (TREE_CODE (arg0) == REAL_CST
1723       && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1724       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1725       && code != NEGATE_EXPR
1726       && code != ABS_EXPR)
1727     return NULL_TREE;
1728 
1729   switch (code)
1730     {
1731     CASE_CONVERT:
1732     case FLOAT_EXPR:
1733     case FIX_TRUNC_EXPR:
1734     case FIXED_CONVERT_EXPR:
1735       return fold_convert_const (code, type, arg0);
1736 
1737     case ADDR_SPACE_CONVERT_EXPR:
1738       /* If the source address is 0, and the source address space
1739 	 cannot have a valid object at 0, fold to dest type null.  */
1740       if (integer_zerop (arg0)
1741 	  && !(targetm.addr_space.zero_address_valid
1742 	       (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1743 	return fold_convert_const (code, type, arg0);
1744       break;
1745 
1746     case VIEW_CONVERT_EXPR:
1747       return fold_view_convert_expr (type, arg0);
1748 
1749     case NEGATE_EXPR:
1750       {
1751 	/* Can't call fold_negate_const directly here as that doesn't
1752 	   handle all cases and we might not be able to negate some
1753 	   constants.  */
1754 	tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1755 	if (tem && CONSTANT_CLASS_P (tem))
1756 	  return tem;
1757 	break;
1758       }
1759 
1760     case ABS_EXPR:
1761       if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1762 	return fold_abs_const (arg0, type);
1763       break;
1764 
1765     case CONJ_EXPR:
1766       if (TREE_CODE (arg0) == COMPLEX_CST)
1767 	{
1768 	  tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1769 					  TREE_TYPE (type));
1770 	  return build_complex (type, TREE_REALPART (arg0), ipart);
1771 	}
1772       break;
1773 
1774     case BIT_NOT_EXPR:
1775       if (TREE_CODE (arg0) == INTEGER_CST)
1776 	return fold_not_const (arg0, type);
1777       else if (POLY_INT_CST_P (arg0))
1778 	return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1779       /* Perform BIT_NOT_EXPR on each element individually.  */
1780       else if (TREE_CODE (arg0) == VECTOR_CST)
1781 	{
1782 	  tree elem;
1783 
1784 	  /* This can cope with stepped encodings because ~x == -1 - x.  */
1785 	  tree_vector_builder elements;
1786 	  elements.new_unary_operation (type, arg0, true);
1787 	  unsigned int i, count = elements.encoded_nelts ();
1788 	  for (i = 0; i < count; ++i)
1789 	    {
1790 	      elem = VECTOR_CST_ELT (arg0, i);
1791 	      elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1792 	      if (elem == NULL_TREE)
1793 		break;
1794 	      elements.quick_push (elem);
1795 	    }
1796 	  if (i == count)
1797 	    return elements.build ();
1798 	}
1799       break;
1800 
1801     case TRUTH_NOT_EXPR:
1802       if (TREE_CODE (arg0) == INTEGER_CST)
1803 	return constant_boolean_node (integer_zerop (arg0), type);
1804       break;
1805 
1806     case REALPART_EXPR:
1807       if (TREE_CODE (arg0) == COMPLEX_CST)
1808 	return fold_convert (type, TREE_REALPART (arg0));
1809       break;
1810 
1811     case IMAGPART_EXPR:
1812       if (TREE_CODE (arg0) == COMPLEX_CST)
1813 	return fold_convert (type, TREE_IMAGPART (arg0));
1814       break;
1815 
1816     case VEC_UNPACK_LO_EXPR:
1817     case VEC_UNPACK_HI_EXPR:
1818     case VEC_UNPACK_FLOAT_LO_EXPR:
1819     case VEC_UNPACK_FLOAT_HI_EXPR:
1820       {
1821 	unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1822 	enum tree_code subcode;
1823 
1824 	if (TREE_CODE (arg0) != VECTOR_CST)
1825 	  return NULL_TREE;
1826 
1827 	if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1828 	  return NULL_TREE;
1829 	out_nelts = in_nelts / 2;
1830 	gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1831 
1832 	unsigned int offset = 0;
1833 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1834 				   || code == VEC_UNPACK_FLOAT_LO_EXPR))
1835 	  offset = out_nelts;
1836 
1837 	if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1838 	  subcode = NOP_EXPR;
1839 	else
1840 	  subcode = FLOAT_EXPR;
1841 
1842 	tree_vector_builder elts (type, out_nelts, 1);
1843 	for (i = 0; i < out_nelts; i++)
1844 	  {
1845 	    tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1846 					   VECTOR_CST_ELT (arg0, i + offset));
1847 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1848 	      return NULL_TREE;
1849 	    elts.quick_push (elt);
1850 	  }
1851 
1852 	return elts.build ();
1853       }
1854 
1855     case VEC_DUPLICATE_EXPR:
1856       if (CONSTANT_CLASS_P (arg0))
1857 	return build_vector_from_val (type, arg0);
1858       return NULL_TREE;
1859 
1860     default:
1861       break;
1862     }
1863 
1864   return NULL_TREE;
1865 }
1866 
1867 /* Create a sizetype INT_CST node with NUMBER sign extended.  KIND
1868    indicates which particular sizetype to create.  */
1869 
1870 tree
size_int_kind(poly_int64 number,enum size_type_kind kind)1871 size_int_kind (poly_int64 number, enum size_type_kind kind)
1872 {
1873   return build_int_cst (sizetype_tab[(int) kind], number);
1874 }
1875 
1876 /* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1877    is a tree code.  The type of the result is taken from the operands.
1878    Both must be equivalent integer types, ala int_binop_types_match_p.
1879    If the operands are constant, so is the result.  */
1880 
1881 tree
size_binop_loc(location_t loc,enum tree_code code,tree arg0,tree arg1)1882 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1883 {
1884   tree type = TREE_TYPE (arg0);
1885 
1886   if (arg0 == error_mark_node || arg1 == error_mark_node)
1887     return error_mark_node;
1888 
1889   gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1890                                        TREE_TYPE (arg1)));
1891 
1892   /* Handle the special case of two poly_int constants faster.  */
1893   if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1894     {
1895       /* And some specific cases even faster than that.  */
1896       if (code == PLUS_EXPR)
1897 	{
1898 	  if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1899 	    return arg1;
1900 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1901 	    return arg0;
1902 	}
1903       else if (code == MINUS_EXPR)
1904 	{
1905 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1906 	    return arg0;
1907 	}
1908       else if (code == MULT_EXPR)
1909 	{
1910 	  if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1911 	    return arg1;
1912 	}
1913 
1914       /* Handle general case of two integer constants.  For sizetype
1915          constant calculations we always want to know about overflow,
1916 	 even in the unsigned case.  */
1917       tree res = int_const_binop_1 (code, arg0, arg1, -1);
1918       if (res != NULL_TREE)
1919 	return res;
1920     }
1921 
1922   return fold_build2_loc (loc, code, type, arg0, arg1);
1923 }
1924 
1925 /* Given two values, either both of sizetype or both of bitsizetype,
1926    compute the difference between the two values.  Return the value
1927    in signed type corresponding to the type of the operands.  */
1928 
1929 tree
size_diffop_loc(location_t loc,tree arg0,tree arg1)1930 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1931 {
1932   tree type = TREE_TYPE (arg0);
1933   tree ctype;
1934 
1935   gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1936 				       TREE_TYPE (arg1)));
1937 
1938   /* If the type is already signed, just do the simple thing.  */
1939   if (!TYPE_UNSIGNED (type))
1940     return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1941 
1942   if (type == sizetype)
1943     ctype = ssizetype;
1944   else if (type == bitsizetype)
1945     ctype = sbitsizetype;
1946   else
1947     ctype = signed_type_for (type);
1948 
1949   /* If either operand is not a constant, do the conversions to the signed
1950      type and subtract.  The hardware will do the right thing with any
1951      overflow in the subtraction.  */
1952   if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1953     return size_binop_loc (loc, MINUS_EXPR,
1954 			   fold_convert_loc (loc, ctype, arg0),
1955 			   fold_convert_loc (loc, ctype, arg1));
1956 
1957   /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1958      Otherwise, subtract the other way, convert to CTYPE (we know that can't
1959      overflow) and negate (which can't either).  Special-case a result
1960      of zero while we're here.  */
1961   if (tree_int_cst_equal (arg0, arg1))
1962     return build_int_cst (ctype, 0);
1963   else if (tree_int_cst_lt (arg1, arg0))
1964     return fold_convert_loc (loc, ctype,
1965 			     size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1966   else
1967     return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1968 			   fold_convert_loc (loc, ctype,
1969 					     size_binop_loc (loc,
1970 							     MINUS_EXPR,
1971 							     arg1, arg0)));
1972 }
1973 
1974 /* A subroutine of fold_convert_const handling conversions of an
1975    INTEGER_CST to another integer type.  */
1976 
1977 static tree
fold_convert_const_int_from_int(tree type,const_tree arg1)1978 fold_convert_const_int_from_int (tree type, const_tree arg1)
1979 {
1980   /* Given an integer constant, make new constant with new type,
1981      appropriately sign-extended or truncated.  Use widest_int
1982      so that any extension is done according ARG1's type.  */
1983   return force_fit_type (type, wi::to_widest (arg1),
1984 			 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1985 			 TREE_OVERFLOW (arg1));
1986 }
1987 
1988 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1989    to an integer type.  */
1990 
1991 static tree
fold_convert_const_int_from_real(enum tree_code code,tree type,const_tree arg1)1992 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1993 {
1994   bool overflow = false;
1995   tree t;
1996 
1997   /* The following code implements the floating point to integer
1998      conversion rules required by the Java Language Specification,
1999      that IEEE NaNs are mapped to zero and values that overflow
2000      the target precision saturate, i.e. values greater than
2001      INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2002      are mapped to INT_MIN.  These semantics are allowed by the
2003      C and C++ standards that simply state that the behavior of
2004      FP-to-integer conversion is unspecified upon overflow.  */
2005 
2006   wide_int val;
2007   REAL_VALUE_TYPE r;
2008   REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2009 
2010   switch (code)
2011     {
2012     case FIX_TRUNC_EXPR:
2013       real_trunc (&r, VOIDmode, &x);
2014       break;
2015 
2016     default:
2017       gcc_unreachable ();
2018     }
2019 
2020   /* If R is NaN, return zero and show we have an overflow.  */
2021   if (REAL_VALUE_ISNAN (r))
2022     {
2023       overflow = true;
2024       val = wi::zero (TYPE_PRECISION (type));
2025     }
2026 
2027   /* See if R is less than the lower bound or greater than the
2028      upper bound.  */
2029 
2030   if (! overflow)
2031     {
2032       tree lt = TYPE_MIN_VALUE (type);
2033       REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2034       if (real_less (&r, &l))
2035 	{
2036 	  overflow = true;
2037 	  val = wi::to_wide (lt);
2038 	}
2039     }
2040 
2041   if (! overflow)
2042     {
2043       tree ut = TYPE_MAX_VALUE (type);
2044       if (ut)
2045 	{
2046 	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2047 	  if (real_less (&u, &r))
2048 	    {
2049 	      overflow = true;
2050 	      val = wi::to_wide (ut);
2051 	    }
2052 	}
2053     }
2054 
2055   if (! overflow)
2056     val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2057 
2058   t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2059   return t;
2060 }
2061 
2062 /* A subroutine of fold_convert_const handling conversions of a
2063    FIXED_CST to an integer type.  */
2064 
2065 static tree
fold_convert_const_int_from_fixed(tree type,const_tree arg1)2066 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2067 {
2068   tree t;
2069   double_int temp, temp_trunc;
2070   scalar_mode mode;
2071 
2072   /* Right shift FIXED_CST to temp by fbit.  */
2073   temp = TREE_FIXED_CST (arg1).data;
2074   mode = TREE_FIXED_CST (arg1).mode;
2075   if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2076     {
2077       temp = temp.rshift (GET_MODE_FBIT (mode),
2078 			  HOST_BITS_PER_DOUBLE_INT,
2079 			  SIGNED_FIXED_POINT_MODE_P (mode));
2080 
2081       /* Left shift temp to temp_trunc by fbit.  */
2082       temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2083 				HOST_BITS_PER_DOUBLE_INT,
2084 				SIGNED_FIXED_POINT_MODE_P (mode));
2085     }
2086   else
2087     {
2088       temp = double_int_zero;
2089       temp_trunc = double_int_zero;
2090     }
2091 
2092   /* If FIXED_CST is negative, we need to round the value toward 0.
2093      By checking if the fractional bits are not zero to add 1 to temp.  */
2094   if (SIGNED_FIXED_POINT_MODE_P (mode)
2095       && temp_trunc.is_negative ()
2096       && TREE_FIXED_CST (arg1).data != temp_trunc)
2097     temp += double_int_one;
2098 
2099   /* Given a fixed-point constant, make new constant with new type,
2100      appropriately sign-extended or truncated.  */
2101   t = force_fit_type (type, temp, -1,
2102 		      (temp.is_negative ()
2103 		       && (TYPE_UNSIGNED (type)
2104 			   < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2105 		      | TREE_OVERFLOW (arg1));
2106 
2107   return t;
2108 }
2109 
2110 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2111    to another floating point type.  */
2112 
2113 static tree
fold_convert_const_real_from_real(tree type,const_tree arg1)2114 fold_convert_const_real_from_real (tree type, const_tree arg1)
2115 {
2116   REAL_VALUE_TYPE value;
2117   tree t;
2118 
2119   /* Don't perform the operation if flag_signaling_nans is on
2120      and the operand is a signaling NaN.  */
2121   if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2122       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2123     return NULL_TREE;
2124 
2125   real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2126   t = build_real (type, value);
2127 
2128   /* If converting an infinity or NAN to a representation that doesn't
2129      have one, set the overflow bit so that we can produce some kind of
2130      error message at the appropriate point if necessary.  It's not the
2131      most user-friendly message, but it's better than nothing.  */
2132   if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2133       && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2134     TREE_OVERFLOW (t) = 1;
2135   else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2136 	   && !MODE_HAS_NANS (TYPE_MODE (type)))
2137     TREE_OVERFLOW (t) = 1;
2138   /* Regular overflow, conversion produced an infinity in a mode that
2139      can't represent them.  */
2140   else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2141 	   && REAL_VALUE_ISINF (value)
2142 	   && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2143     TREE_OVERFLOW (t) = 1;
2144   else
2145     TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2146   return t;
2147 }
2148 
2149 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2150    to a floating point type.  */
2151 
2152 static tree
fold_convert_const_real_from_fixed(tree type,const_tree arg1)2153 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2154 {
2155   REAL_VALUE_TYPE value;
2156   tree t;
2157 
2158   real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2159 			   &TREE_FIXED_CST (arg1));
2160   t = build_real (type, value);
2161 
2162   TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2163   return t;
2164 }
2165 
2166 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2167    to another fixed-point type.  */
2168 
2169 static tree
fold_convert_const_fixed_from_fixed(tree type,const_tree arg1)2170 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2171 {
2172   FIXED_VALUE_TYPE value;
2173   tree t;
2174   bool overflow_p;
2175 
2176   overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2177 			      &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2178   t = build_fixed (type, value);
2179 
2180   /* Propagate overflow flags.  */
2181   if (overflow_p | TREE_OVERFLOW (arg1))
2182     TREE_OVERFLOW (t) = 1;
2183   return t;
2184 }
2185 
2186 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2187    to a fixed-point type.  */
2188 
2189 static tree
fold_convert_const_fixed_from_int(tree type,const_tree arg1)2190 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2191 {
2192   FIXED_VALUE_TYPE value;
2193   tree t;
2194   bool overflow_p;
2195   double_int di;
2196 
2197   gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2198 
2199   di.low = TREE_INT_CST_ELT (arg1, 0);
2200   if (TREE_INT_CST_NUNITS (arg1) == 1)
2201     di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2202   else
2203     di.high = TREE_INT_CST_ELT (arg1, 1);
2204 
2205   overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2206 				       TYPE_UNSIGNED (TREE_TYPE (arg1)),
2207 				       TYPE_SATURATING (type));
2208   t = build_fixed (type, value);
2209 
2210   /* Propagate overflow flags.  */
2211   if (overflow_p | TREE_OVERFLOW (arg1))
2212     TREE_OVERFLOW (t) = 1;
2213   return t;
2214 }
2215 
2216 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2217    to a fixed-point type.  */
2218 
2219 static tree
fold_convert_const_fixed_from_real(tree type,const_tree arg1)2220 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2221 {
2222   FIXED_VALUE_TYPE value;
2223   tree t;
2224   bool overflow_p;
2225 
2226   overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2227 					&TREE_REAL_CST (arg1),
2228 					TYPE_SATURATING (type));
2229   t = build_fixed (type, value);
2230 
2231   /* Propagate overflow flags.  */
2232   if (overflow_p | TREE_OVERFLOW (arg1))
2233     TREE_OVERFLOW (t) = 1;
2234   return t;
2235 }
2236 
2237 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2238    type TYPE.  If no simplification can be done return NULL_TREE.  */
2239 
2240 static tree
fold_convert_const(enum tree_code code,tree type,tree arg1)2241 fold_convert_const (enum tree_code code, tree type, tree arg1)
2242 {
2243   tree arg_type = TREE_TYPE (arg1);
2244   if (arg_type == type)
2245     return arg1;
2246 
2247   /* We can't widen types, since the runtime value could overflow the
2248      original type before being extended to the new type.  */
2249   if (POLY_INT_CST_P (arg1)
2250       && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2251       && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2252     return build_poly_int_cst (type,
2253 			       poly_wide_int::from (poly_int_cst_value (arg1),
2254 						    TYPE_PRECISION (type),
2255 						    TYPE_SIGN (arg_type)));
2256 
2257   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2258       || TREE_CODE (type) == OFFSET_TYPE)
2259     {
2260       if (TREE_CODE (arg1) == INTEGER_CST)
2261 	return fold_convert_const_int_from_int (type, arg1);
2262       else if (TREE_CODE (arg1) == REAL_CST)
2263 	return fold_convert_const_int_from_real (code, type, arg1);
2264       else if (TREE_CODE (arg1) == FIXED_CST)
2265 	return fold_convert_const_int_from_fixed (type, arg1);
2266     }
2267   else if (TREE_CODE (type) == REAL_TYPE)
2268     {
2269       if (TREE_CODE (arg1) == INTEGER_CST)
2270 	return build_real_from_int_cst (type, arg1);
2271       else if (TREE_CODE (arg1) == REAL_CST)
2272 	return fold_convert_const_real_from_real (type, arg1);
2273       else if (TREE_CODE (arg1) == FIXED_CST)
2274 	return fold_convert_const_real_from_fixed (type, arg1);
2275     }
2276   else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2277     {
2278       if (TREE_CODE (arg1) == FIXED_CST)
2279 	return fold_convert_const_fixed_from_fixed (type, arg1);
2280       else if (TREE_CODE (arg1) == INTEGER_CST)
2281 	return fold_convert_const_fixed_from_int (type, arg1);
2282       else if (TREE_CODE (arg1) == REAL_CST)
2283 	return fold_convert_const_fixed_from_real (type, arg1);
2284     }
2285   else if (TREE_CODE (type) == VECTOR_TYPE)
2286     {
2287       if (TREE_CODE (arg1) == VECTOR_CST
2288 	  && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2289 	{
2290 	  tree elttype = TREE_TYPE (type);
2291 	  tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2292 	  /* We can't handle steps directly when extending, since the
2293 	     values need to wrap at the original precision first.  */
2294 	  bool step_ok_p
2295 	    = (INTEGRAL_TYPE_P (elttype)
2296 	       && INTEGRAL_TYPE_P (arg1_elttype)
2297 	       && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2298 	  tree_vector_builder v;
2299 	  if (!v.new_unary_operation (type, arg1, step_ok_p))
2300 	    return NULL_TREE;
2301 	  unsigned int len = v.encoded_nelts ();
2302 	  for (unsigned int i = 0; i < len; ++i)
2303 	    {
2304 	      tree elt = VECTOR_CST_ELT (arg1, i);
2305 	      tree cvt = fold_convert_const (code, elttype, elt);
2306 	      if (cvt == NULL_TREE)
2307 		return NULL_TREE;
2308 	      v.quick_push (cvt);
2309 	    }
2310 	  return v.build ();
2311 	}
2312     }
2313   return NULL_TREE;
2314 }
2315 
2316 /* Construct a vector of zero elements of vector type TYPE.  */
2317 
2318 static tree
build_zero_vector(tree type)2319 build_zero_vector (tree type)
2320 {
2321   tree t;
2322 
2323   t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2324   return build_vector_from_val (type, t);
2325 }
2326 
2327 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
2328 
2329 bool
fold_convertible_p(const_tree type,const_tree arg)2330 fold_convertible_p (const_tree type, const_tree arg)
2331 {
2332   tree orig = TREE_TYPE (arg);
2333 
2334   if (type == orig)
2335     return true;
2336 
2337   if (TREE_CODE (arg) == ERROR_MARK
2338       || TREE_CODE (type) == ERROR_MARK
2339       || TREE_CODE (orig) == ERROR_MARK)
2340     return false;
2341 
2342   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2343     return true;
2344 
2345   switch (TREE_CODE (type))
2346     {
2347     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2348     case POINTER_TYPE: case REFERENCE_TYPE:
2349     case OFFSET_TYPE:
2350       return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2351 	      || TREE_CODE (orig) == OFFSET_TYPE);
2352 
2353     case REAL_TYPE:
2354     case FIXED_POINT_TYPE:
2355     case VECTOR_TYPE:
2356     case VOID_TYPE:
2357       return TREE_CODE (type) == TREE_CODE (orig);
2358 
2359     default:
2360       return false;
2361     }
2362 }
2363 
2364 /* Convert expression ARG to type TYPE.  Used by the middle-end for
2365    simple conversions in preference to calling the front-end's convert.  */
2366 
2367 tree
fold_convert_loc(location_t loc,tree type,tree arg)2368 fold_convert_loc (location_t loc, tree type, tree arg)
2369 {
2370   tree orig = TREE_TYPE (arg);
2371   tree tem;
2372 
2373   if (type == orig)
2374     return arg;
2375 
2376   if (TREE_CODE (arg) == ERROR_MARK
2377       || TREE_CODE (type) == ERROR_MARK
2378       || TREE_CODE (orig) == ERROR_MARK)
2379     return error_mark_node;
2380 
2381   switch (TREE_CODE (type))
2382     {
2383     case POINTER_TYPE:
2384     case REFERENCE_TYPE:
2385       /* Handle conversions between pointers to different address spaces.  */
2386       if (POINTER_TYPE_P (orig)
2387 	  && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2388 	      != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2389 	return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2390       /* fall through */
2391 
2392     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2393     case OFFSET_TYPE:
2394       if (TREE_CODE (arg) == INTEGER_CST)
2395 	{
2396 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2397 	  if (tem != NULL_TREE)
2398 	    return tem;
2399 	}
2400       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2401 	  || TREE_CODE (orig) == OFFSET_TYPE)
2402 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2403       if (TREE_CODE (orig) == COMPLEX_TYPE)
2404 	return fold_convert_loc (loc, type,
2405 				 fold_build1_loc (loc, REALPART_EXPR,
2406 						  TREE_TYPE (orig), arg));
2407       gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2408 		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2409       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2410 
2411     case REAL_TYPE:
2412       if (TREE_CODE (arg) == INTEGER_CST)
2413 	{
2414 	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
2415 	  if (tem != NULL_TREE)
2416 	    return tem;
2417 	}
2418       else if (TREE_CODE (arg) == REAL_CST)
2419 	{
2420 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2421 	  if (tem != NULL_TREE)
2422 	    return tem;
2423 	}
2424       else if (TREE_CODE (arg) == FIXED_CST)
2425 	{
2426 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2427 	  if (tem != NULL_TREE)
2428 	    return tem;
2429 	}
2430 
2431       switch (TREE_CODE (orig))
2432 	{
2433 	case INTEGER_TYPE:
2434 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2435 	case POINTER_TYPE: case REFERENCE_TYPE:
2436 	  return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2437 
2438 	case REAL_TYPE:
2439 	  return fold_build1_loc (loc, NOP_EXPR, type, arg);
2440 
2441 	case FIXED_POINT_TYPE:
2442 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2443 
2444 	case COMPLEX_TYPE:
2445 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2446 	  return fold_convert_loc (loc, type, tem);
2447 
2448 	default:
2449 	  gcc_unreachable ();
2450 	}
2451 
2452     case FIXED_POINT_TYPE:
2453       if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2454 	  || TREE_CODE (arg) == REAL_CST)
2455 	{
2456 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2457 	  if (tem != NULL_TREE)
2458 	    goto fold_convert_exit;
2459 	}
2460 
2461       switch (TREE_CODE (orig))
2462 	{
2463 	case FIXED_POINT_TYPE:
2464 	case INTEGER_TYPE:
2465 	case ENUMERAL_TYPE:
2466 	case BOOLEAN_TYPE:
2467 	case REAL_TYPE:
2468 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2469 
2470 	case COMPLEX_TYPE:
2471 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2472 	  return fold_convert_loc (loc, type, tem);
2473 
2474 	default:
2475 	  gcc_unreachable ();
2476 	}
2477 
2478     case COMPLEX_TYPE:
2479       switch (TREE_CODE (orig))
2480 	{
2481 	case INTEGER_TYPE:
2482 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2483 	case POINTER_TYPE: case REFERENCE_TYPE:
2484 	case REAL_TYPE:
2485 	case FIXED_POINT_TYPE:
2486 	  return fold_build2_loc (loc, COMPLEX_EXPR, type,
2487 			      fold_convert_loc (loc, TREE_TYPE (type), arg),
2488 			      fold_convert_loc (loc, TREE_TYPE (type),
2489 					    integer_zero_node));
2490 	case COMPLEX_TYPE:
2491 	  {
2492 	    tree rpart, ipart;
2493 
2494 	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2495 	      {
2496 		rpart = fold_convert_loc (loc, TREE_TYPE (type),
2497 				      TREE_OPERAND (arg, 0));
2498 		ipart = fold_convert_loc (loc, TREE_TYPE (type),
2499 				      TREE_OPERAND (arg, 1));
2500 		return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2501 	      }
2502 
2503 	    arg = save_expr (arg);
2504 	    rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2505 	    ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2506 	    rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2507 	    ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2508 	    return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2509 	  }
2510 
2511 	default:
2512 	  gcc_unreachable ();
2513 	}
2514 
2515     case VECTOR_TYPE:
2516       if (integer_zerop (arg))
2517 	return build_zero_vector (type);
2518       gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2519       gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2520 		  || TREE_CODE (orig) == VECTOR_TYPE);
2521       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2522 
2523     case VOID_TYPE:
2524       tem = fold_ignored_result (arg);
2525       return fold_build1_loc (loc, NOP_EXPR, type, tem);
2526 
2527     default:
2528       if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2529 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2530       gcc_unreachable ();
2531     }
2532  fold_convert_exit:
2533   protected_set_expr_location_unshare (tem, loc);
2534   return tem;
2535 }
2536 
2537 /* Return false if expr can be assumed not to be an lvalue, true
2538    otherwise.  */
2539 
2540 static bool
maybe_lvalue_p(const_tree x)2541 maybe_lvalue_p (const_tree x)
2542 {
2543   /* We only need to wrap lvalue tree codes.  */
2544   switch (TREE_CODE (x))
2545   {
2546   case VAR_DECL:
2547   case PARM_DECL:
2548   case RESULT_DECL:
2549   case LABEL_DECL:
2550   case FUNCTION_DECL:
2551   case SSA_NAME:
2552 
2553   case COMPONENT_REF:
2554   case MEM_REF:
2555   case INDIRECT_REF:
2556   case ARRAY_REF:
2557   case ARRAY_RANGE_REF:
2558   case BIT_FIELD_REF:
2559   case OBJ_TYPE_REF:
2560 
2561   case REALPART_EXPR:
2562   case IMAGPART_EXPR:
2563   case PREINCREMENT_EXPR:
2564   case PREDECREMENT_EXPR:
2565   case SAVE_EXPR:
2566   case TRY_CATCH_EXPR:
2567   case WITH_CLEANUP_EXPR:
2568   case COMPOUND_EXPR:
2569   case MODIFY_EXPR:
2570   case TARGET_EXPR:
2571   case COND_EXPR:
2572   case BIND_EXPR:
2573     break;
2574 
2575   default:
2576     /* Assume the worst for front-end tree codes.  */
2577     if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2578       break;
2579     return false;
2580   }
2581 
2582   return true;
2583 }
2584 
2585 /* Return an expr equal to X but certainly not valid as an lvalue.  */
2586 
2587 tree
non_lvalue_loc(location_t loc,tree x)2588 non_lvalue_loc (location_t loc, tree x)
2589 {
2590   /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2591      us.  */
2592   if (in_gimple_form)
2593     return x;
2594 
2595   if (! maybe_lvalue_p (x))
2596     return x;
2597   return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2598 }
2599 
2600 /* When pedantic, return an expr equal to X but certainly not valid as a
2601    pedantic lvalue.  Otherwise, return X.  */
2602 
2603 static tree
pedantic_non_lvalue_loc(location_t loc,tree x)2604 pedantic_non_lvalue_loc (location_t loc, tree x)
2605 {
2606   return protected_set_expr_location_unshare (x, loc);
2607 }
2608 
2609 /* Given a tree comparison code, return the code that is the logical inverse.
2610    It is generally not safe to do this for floating-point comparisons, except
2611    for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2612    ERROR_MARK in this case.  */
2613 
2614 enum tree_code
invert_tree_comparison(enum tree_code code,bool honor_nans)2615 invert_tree_comparison (enum tree_code code, bool honor_nans)
2616 {
2617   if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2618       && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2619     return ERROR_MARK;
2620 
2621   switch (code)
2622     {
2623     case EQ_EXPR:
2624       return NE_EXPR;
2625     case NE_EXPR:
2626       return EQ_EXPR;
2627     case GT_EXPR:
2628       return honor_nans ? UNLE_EXPR : LE_EXPR;
2629     case GE_EXPR:
2630       return honor_nans ? UNLT_EXPR : LT_EXPR;
2631     case LT_EXPR:
2632       return honor_nans ? UNGE_EXPR : GE_EXPR;
2633     case LE_EXPR:
2634       return honor_nans ? UNGT_EXPR : GT_EXPR;
2635     case LTGT_EXPR:
2636       return UNEQ_EXPR;
2637     case UNEQ_EXPR:
2638       return LTGT_EXPR;
2639     case UNGT_EXPR:
2640       return LE_EXPR;
2641     case UNGE_EXPR:
2642       return LT_EXPR;
2643     case UNLT_EXPR:
2644       return GE_EXPR;
2645     case UNLE_EXPR:
2646       return GT_EXPR;
2647     case ORDERED_EXPR:
2648       return UNORDERED_EXPR;
2649     case UNORDERED_EXPR:
2650       return ORDERED_EXPR;
2651     default:
2652       gcc_unreachable ();
2653     }
2654 }
2655 
2656 /* Similar, but return the comparison that results if the operands are
2657    swapped.  This is safe for floating-point.  */
2658 
2659 enum tree_code
swap_tree_comparison(enum tree_code code)2660 swap_tree_comparison (enum tree_code code)
2661 {
2662   switch (code)
2663     {
2664     case EQ_EXPR:
2665     case NE_EXPR:
2666     case ORDERED_EXPR:
2667     case UNORDERED_EXPR:
2668     case LTGT_EXPR:
2669     case UNEQ_EXPR:
2670       return code;
2671     case GT_EXPR:
2672       return LT_EXPR;
2673     case GE_EXPR:
2674       return LE_EXPR;
2675     case LT_EXPR:
2676       return GT_EXPR;
2677     case LE_EXPR:
2678       return GE_EXPR;
2679     case UNGT_EXPR:
2680       return UNLT_EXPR;
2681     case UNGE_EXPR:
2682       return UNLE_EXPR;
2683     case UNLT_EXPR:
2684       return UNGT_EXPR;
2685     case UNLE_EXPR:
2686       return UNGE_EXPR;
2687     default:
2688       gcc_unreachable ();
2689     }
2690 }
2691 
2692 
2693 /* Convert a comparison tree code from an enum tree_code representation
2694    into a compcode bit-based encoding.  This function is the inverse of
2695    compcode_to_comparison.  */
2696 
2697 static enum comparison_code
comparison_to_compcode(enum tree_code code)2698 comparison_to_compcode (enum tree_code code)
2699 {
2700   switch (code)
2701     {
2702     case LT_EXPR:
2703       return COMPCODE_LT;
2704     case EQ_EXPR:
2705       return COMPCODE_EQ;
2706     case LE_EXPR:
2707       return COMPCODE_LE;
2708     case GT_EXPR:
2709       return COMPCODE_GT;
2710     case NE_EXPR:
2711       return COMPCODE_NE;
2712     case GE_EXPR:
2713       return COMPCODE_GE;
2714     case ORDERED_EXPR:
2715       return COMPCODE_ORD;
2716     case UNORDERED_EXPR:
2717       return COMPCODE_UNORD;
2718     case UNLT_EXPR:
2719       return COMPCODE_UNLT;
2720     case UNEQ_EXPR:
2721       return COMPCODE_UNEQ;
2722     case UNLE_EXPR:
2723       return COMPCODE_UNLE;
2724     case UNGT_EXPR:
2725       return COMPCODE_UNGT;
2726     case LTGT_EXPR:
2727       return COMPCODE_LTGT;
2728     case UNGE_EXPR:
2729       return COMPCODE_UNGE;
2730     default:
2731       gcc_unreachable ();
2732     }
2733 }
2734 
2735 /* Convert a compcode bit-based encoding of a comparison operator back
2736    to GCC's enum tree_code representation.  This function is the
2737    inverse of comparison_to_compcode.  */
2738 
2739 static enum tree_code
compcode_to_comparison(enum comparison_code code)2740 compcode_to_comparison (enum comparison_code code)
2741 {
2742   switch (code)
2743     {
2744     case COMPCODE_LT:
2745       return LT_EXPR;
2746     case COMPCODE_EQ:
2747       return EQ_EXPR;
2748     case COMPCODE_LE:
2749       return LE_EXPR;
2750     case COMPCODE_GT:
2751       return GT_EXPR;
2752     case COMPCODE_NE:
2753       return NE_EXPR;
2754     case COMPCODE_GE:
2755       return GE_EXPR;
2756     case COMPCODE_ORD:
2757       return ORDERED_EXPR;
2758     case COMPCODE_UNORD:
2759       return UNORDERED_EXPR;
2760     case COMPCODE_UNLT:
2761       return UNLT_EXPR;
2762     case COMPCODE_UNEQ:
2763       return UNEQ_EXPR;
2764     case COMPCODE_UNLE:
2765       return UNLE_EXPR;
2766     case COMPCODE_UNGT:
2767       return UNGT_EXPR;
2768     case COMPCODE_LTGT:
2769       return LTGT_EXPR;
2770     case COMPCODE_UNGE:
2771       return UNGE_EXPR;
2772     default:
2773       gcc_unreachable ();
2774     }
2775 }
2776 
2777 /* Return a tree for the comparison which is the combination of
2778    doing the AND or OR (depending on CODE) of the two operations LCODE
2779    and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2780    the possibility of trapping if the mode has NaNs, and return NULL_TREE
2781    if this makes the transformation invalid.  */
2782 
2783 tree
combine_comparisons(location_t loc,enum tree_code code,enum tree_code lcode,enum tree_code rcode,tree truth_type,tree ll_arg,tree lr_arg)2784 combine_comparisons (location_t loc,
2785 		     enum tree_code code, enum tree_code lcode,
2786 		     enum tree_code rcode, tree truth_type,
2787 		     tree ll_arg, tree lr_arg)
2788 {
2789   bool honor_nans = HONOR_NANS (ll_arg);
2790   enum comparison_code lcompcode = comparison_to_compcode (lcode);
2791   enum comparison_code rcompcode = comparison_to_compcode (rcode);
2792   int compcode;
2793 
2794   switch (code)
2795     {
2796     case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2797       compcode = lcompcode & rcompcode;
2798       break;
2799 
2800     case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2801       compcode = lcompcode | rcompcode;
2802       break;
2803 
2804     default:
2805       return NULL_TREE;
2806     }
2807 
2808   if (!honor_nans)
2809     {
2810       /* Eliminate unordered comparisons, as well as LTGT and ORD
2811 	 which are not used unless the mode has NaNs.  */
2812       compcode &= ~COMPCODE_UNORD;
2813       if (compcode == COMPCODE_LTGT)
2814 	compcode = COMPCODE_NE;
2815       else if (compcode == COMPCODE_ORD)
2816 	compcode = COMPCODE_TRUE;
2817     }
2818    else if (flag_trapping_math)
2819      {
2820 	/* Check that the original operation and the optimized ones will trap
2821 	   under the same condition.  */
2822 	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2823 		     && (lcompcode != COMPCODE_EQ)
2824 		     && (lcompcode != COMPCODE_ORD);
2825 	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2826 		     && (rcompcode != COMPCODE_EQ)
2827 		     && (rcompcode != COMPCODE_ORD);
2828 	bool trap = (compcode & COMPCODE_UNORD) == 0
2829 		    && (compcode != COMPCODE_EQ)
2830 		    && (compcode != COMPCODE_ORD);
2831 
2832         /* In a short-circuited boolean expression the LHS might be
2833 	   such that the RHS, if evaluated, will never trap.  For
2834 	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2835 	   if neither x nor y is NaN.  (This is a mixed blessing: for
2836 	   example, the expression above will never trap, hence
2837 	   optimizing it to x < y would be invalid).  */
2838         if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2839             || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2840           rtrap = false;
2841 
2842         /* If the comparison was short-circuited, and only the RHS
2843 	   trapped, we may now generate a spurious trap.  */
2844 	if (rtrap && !ltrap
2845 	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2846 	  return NULL_TREE;
2847 
2848 	/* If we changed the conditions that cause a trap, we lose.  */
2849 	if ((ltrap || rtrap) != trap)
2850 	  return NULL_TREE;
2851       }
2852 
2853   if (compcode == COMPCODE_TRUE)
2854     return constant_boolean_node (true, truth_type);
2855   else if (compcode == COMPCODE_FALSE)
2856     return constant_boolean_node (false, truth_type);
2857   else
2858     {
2859       enum tree_code tcode;
2860 
2861       tcode = compcode_to_comparison ((enum comparison_code) compcode);
2862       return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2863     }
2864 }
2865 
2866 /* Return nonzero if two operands (typically of the same tree node)
2867    are necessarily equal. FLAGS modifies behavior as follows:
2868 
2869    If OEP_ONLY_CONST is set, only return nonzero for constants.
2870    This function tests whether the operands are indistinguishable;
2871    it does not test whether they are equal using C's == operation.
2872    The distinction is important for IEEE floating point, because
2873    (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2874    (2) two NaNs may be indistinguishable, but NaN!=NaN.
2875 
2876    If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2877    even though it may hold multiple values during a function.
2878    This is because a GCC tree node guarantees that nothing else is
2879    executed between the evaluation of its "operands" (which may often
2880    be evaluated in arbitrary order).  Hence if the operands themselves
2881    don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2882    same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2883    unset means assuming isochronic (or instantaneous) tree equivalence.
2884    Unless comparing arbitrary expression trees, such as from different
2885    statements, this flag can usually be left unset.
2886 
2887    If OEP_PURE_SAME is set, then pure functions with identical arguments
2888    are considered the same.  It is used when the caller has other ways
2889    to ensure that global memory is unchanged in between.
2890 
2891    If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2892    not values of expressions.
2893 
2894    If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2895    such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2896 
2897    Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2898    any operand with side effect.  This is unnecesarily conservative in the
2899    case we know that arg0 and arg1 are in disjoint code paths (such as in
2900    ?: operator).  In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2901    addresses with TREE_CONSTANT flag set so we know that &var == &var
2902    even if var is volatile.  */
2903 
2904 int
operand_equal_p(const_tree arg0,const_tree arg1,unsigned int flags)2905 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2906 {
2907   /* When checking, verify at the outermost operand_equal_p call that
2908      if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2909      hash value.  */
2910   if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2911     {
2912       if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2913 	{
2914 	  if (arg0 != arg1)
2915 	    {
2916 	      inchash::hash hstate0 (0), hstate1 (0);
2917 	      inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2918 	      inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2919 	      hashval_t h0 = hstate0.end ();
2920 	      hashval_t h1 = hstate1.end ();
2921 	      gcc_assert (h0 == h1);
2922 	    }
2923 	  return 1;
2924 	}
2925       else
2926 	return 0;
2927     }
2928 
2929   /* If either is ERROR_MARK, they aren't equal.  */
2930   if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2931       || TREE_TYPE (arg0) == error_mark_node
2932       || TREE_TYPE (arg1) == error_mark_node)
2933     return 0;
2934 
2935   /* Similar, if either does not have a type (like a released SSA name),
2936      they aren't equal.  */
2937   if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2938     return 0;
2939 
2940   /* We cannot consider pointers to different address space equal.  */
2941   if (POINTER_TYPE_P (TREE_TYPE (arg0))
2942       && POINTER_TYPE_P (TREE_TYPE (arg1))
2943       && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2944 	  != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2945     return 0;
2946 
2947   /* Check equality of integer constants before bailing out due to
2948      precision differences.  */
2949   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2950     {
2951       /* Address of INTEGER_CST is not defined; check that we did not forget
2952 	 to drop the OEP_ADDRESS_OF flags.  */
2953       gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2954       return tree_int_cst_equal (arg0, arg1);
2955     }
2956 
2957   if (!(flags & OEP_ADDRESS_OF))
2958     {
2959       /* If both types don't have the same signedness, then we can't consider
2960 	 them equal.  We must check this before the STRIP_NOPS calls
2961 	 because they may change the signedness of the arguments.  As pointers
2962 	 strictly don't have a signedness, require either two pointers or
2963 	 two non-pointers as well.  */
2964       if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2965 	  || POINTER_TYPE_P (TREE_TYPE (arg0))
2966 			     != POINTER_TYPE_P (TREE_TYPE (arg1)))
2967 	return 0;
2968 
2969       /* If both types don't have the same precision, then it is not safe
2970 	 to strip NOPs.  */
2971       if (element_precision (TREE_TYPE (arg0))
2972 	  != element_precision (TREE_TYPE (arg1)))
2973 	return 0;
2974 
2975       STRIP_NOPS (arg0);
2976       STRIP_NOPS (arg1);
2977     }
2978 #if 0
2979   /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2980      sanity check once the issue is solved.  */
2981   else
2982     /* Addresses of conversions and SSA_NAMEs (and many other things)
2983        are not defined.  Check that we did not forget to drop the
2984        OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags.  */
2985     gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2986 			 && TREE_CODE (arg0) != SSA_NAME);
2987 #endif
2988 
2989   /* In case both args are comparisons but with different comparison
2990      code, try to swap the comparison operands of one arg to produce
2991      a match and compare that variant.  */
2992   if (TREE_CODE (arg0) != TREE_CODE (arg1)
2993       && COMPARISON_CLASS_P (arg0)
2994       && COMPARISON_CLASS_P (arg1))
2995     {
2996       enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2997 
2998       if (TREE_CODE (arg0) == swap_code)
2999 	return operand_equal_p (TREE_OPERAND (arg0, 0),
3000 			        TREE_OPERAND (arg1, 1), flags)
3001 	       && operand_equal_p (TREE_OPERAND (arg0, 1),
3002 				   TREE_OPERAND (arg1, 0), flags);
3003     }
3004 
3005   if (TREE_CODE (arg0) != TREE_CODE (arg1))
3006     {
3007       /* NOP_EXPR and CONVERT_EXPR are considered equal.  */
3008       if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3009 	;
3010       else if (flags & OEP_ADDRESS_OF)
3011 	{
3012 	  /* If we are interested in comparing addresses ignore
3013 	     MEM_REF wrappings of the base that can appear just for
3014 	     TBAA reasons.  */
3015 	  if (TREE_CODE (arg0) == MEM_REF
3016 	      && DECL_P (arg1)
3017 	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3018 	      && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3019 	      && integer_zerop (TREE_OPERAND (arg0, 1)))
3020 	    return 1;
3021 	  else if (TREE_CODE (arg1) == MEM_REF
3022 		   && DECL_P (arg0)
3023 		   && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3024 		   && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3025 		   && integer_zerop (TREE_OPERAND (arg1, 1)))
3026 	    return 1;
3027 	  return 0;
3028 	}
3029       else
3030 	return 0;
3031     }
3032 
3033   /* When not checking adddresses, this is needed for conversions and for
3034      COMPONENT_REF.  Might as well play it safe and always test this.  */
3035   if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3036       || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3037       || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3038 	  && !(flags & OEP_ADDRESS_OF)))
3039     return 0;
3040 
3041   /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3042      We don't care about side effects in that case because the SAVE_EXPR
3043      takes care of that for us. In all other cases, two expressions are
3044      equal if they have no side effects.  If we have two identical
3045      expressions with side effects that should be treated the same due
3046      to the only side effects being identical SAVE_EXPR's, that will
3047      be detected in the recursive calls below.
3048      If we are taking an invariant address of two identical objects
3049      they are necessarily equal as well.  */
3050   if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3051       && (TREE_CODE (arg0) == SAVE_EXPR
3052 	  || (flags & OEP_MATCH_SIDE_EFFECTS)
3053 	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3054     return 1;
3055 
3056   /* Next handle constant cases, those for which we can return 1 even
3057      if ONLY_CONST is set.  */
3058   if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3059     switch (TREE_CODE (arg0))
3060       {
3061       case INTEGER_CST:
3062 	return tree_int_cst_equal (arg0, arg1);
3063 
3064       case FIXED_CST:
3065 	return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3066 				       TREE_FIXED_CST (arg1));
3067 
3068       case REAL_CST:
3069 	if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3070 	  return 1;
3071 
3072 
3073 	if (!HONOR_SIGNED_ZEROS (arg0))
3074 	  {
3075 	    /* If we do not distinguish between signed and unsigned zero,
3076 	       consider them equal.  */
3077 	    if (real_zerop (arg0) && real_zerop (arg1))
3078 	      return 1;
3079 	  }
3080 	return 0;
3081 
3082       case VECTOR_CST:
3083 	{
3084 	  if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3085 	      != VECTOR_CST_LOG2_NPATTERNS (arg1))
3086 	    return 0;
3087 
3088 	  if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3089 	      != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3090 	    return 0;
3091 
3092 	  unsigned int count = vector_cst_encoded_nelts (arg0);
3093 	  for (unsigned int i = 0; i < count; ++i)
3094 	    if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3095 				  VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3096 	      return 0;
3097 	  return 1;
3098 	}
3099 
3100       case COMPLEX_CST:
3101 	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3102 				 flags)
3103 		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3104 				    flags));
3105 
3106       case STRING_CST:
3107 	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3108 		&& ! memcmp (TREE_STRING_POINTER (arg0),
3109 			      TREE_STRING_POINTER (arg1),
3110 			      TREE_STRING_LENGTH (arg0)));
3111 
3112       case ADDR_EXPR:
3113 	gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3114 	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3115 				flags | OEP_ADDRESS_OF
3116 				| OEP_MATCH_SIDE_EFFECTS);
3117       case CONSTRUCTOR:
3118 	/* In GIMPLE empty constructors are allowed in initializers of
3119 	   aggregates.  */
3120 	return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3121       default:
3122 	break;
3123       }
3124 
3125   if (flags & OEP_ONLY_CONST)
3126     return 0;
3127 
3128 /* Define macros to test an operand from arg0 and arg1 for equality and a
3129    variant that allows null and views null as being different from any
3130    non-null value.  In the latter case, if either is null, the both
3131    must be; otherwise, do the normal comparison.  */
3132 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
3133 				    TREE_OPERAND (arg1, N), flags)
3134 
3135 #define OP_SAME_WITH_NULL(N)				\
3136   ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
3137    ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3138 
3139   switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3140     {
3141     case tcc_unary:
3142       /* Two conversions are equal only if signedness and modes match.  */
3143       switch (TREE_CODE (arg0))
3144         {
3145 	CASE_CONVERT:
3146         case FIX_TRUNC_EXPR:
3147 	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3148 	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3149 	    return 0;
3150 	  break;
3151 	default:
3152 	  break;
3153 	}
3154 
3155       return OP_SAME (0);
3156 
3157 
3158     case tcc_comparison:
3159     case tcc_binary:
3160       if (OP_SAME (0) && OP_SAME (1))
3161 	return 1;
3162 
3163       /* For commutative ops, allow the other order.  */
3164       return (commutative_tree_code (TREE_CODE (arg0))
3165 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
3166 				  TREE_OPERAND (arg1, 1), flags)
3167 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
3168 				  TREE_OPERAND (arg1, 0), flags));
3169 
3170     case tcc_reference:
3171       /* If either of the pointer (or reference) expressions we are
3172 	 dereferencing contain a side effect, these cannot be equal,
3173 	 but their addresses can be.  */
3174       if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3175 	  && (TREE_SIDE_EFFECTS (arg0)
3176 	      || TREE_SIDE_EFFECTS (arg1)))
3177 	return 0;
3178 
3179       switch (TREE_CODE (arg0))
3180 	{
3181 	case INDIRECT_REF:
3182 	  if (!(flags & OEP_ADDRESS_OF)
3183 	      && (TYPE_ALIGN (TREE_TYPE (arg0))
3184 		  != TYPE_ALIGN (TREE_TYPE (arg1))))
3185 	    return 0;
3186 	  flags &= ~OEP_ADDRESS_OF;
3187 	  return OP_SAME (0);
3188 
3189 	case IMAGPART_EXPR:
3190 	  /* Require the same offset.  */
3191 	  if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3192 				TYPE_SIZE (TREE_TYPE (arg1)),
3193 				flags & ~OEP_ADDRESS_OF))
3194 	    return 0;
3195 
3196 	/* Fallthru.  */
3197 	case REALPART_EXPR:
3198 	case VIEW_CONVERT_EXPR:
3199 	  return OP_SAME (0);
3200 
3201 	case TARGET_MEM_REF:
3202 	case MEM_REF:
3203 	  if (!(flags & OEP_ADDRESS_OF))
3204 	    {
3205 	      /* Require equal access sizes */
3206 	      if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3207 		  && (!TYPE_SIZE (TREE_TYPE (arg0))
3208 		      || !TYPE_SIZE (TREE_TYPE (arg1))
3209 		      || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3210 					   TYPE_SIZE (TREE_TYPE (arg1)),
3211 					   flags)))
3212 		return 0;
3213 	      /* Verify that access happens in similar types.  */
3214 	      if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3215 		return 0;
3216 	      /* Verify that accesses are TBAA compatible.  */
3217 	      if (!alias_ptr_types_compatible_p
3218 		    (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3219 		     TREE_TYPE (TREE_OPERAND (arg1, 1)))
3220 		  || (MR_DEPENDENCE_CLIQUE (arg0)
3221 		      != MR_DEPENDENCE_CLIQUE (arg1))
3222 		  || (MR_DEPENDENCE_BASE (arg0)
3223 		      != MR_DEPENDENCE_BASE (arg1)))
3224 		return 0;
3225 	     /* Verify that alignment is compatible.  */
3226 	     if (TYPE_ALIGN (TREE_TYPE (arg0))
3227 		 != TYPE_ALIGN (TREE_TYPE (arg1)))
3228 		return 0;
3229 	    }
3230 	  flags &= ~OEP_ADDRESS_OF;
3231 	  return (OP_SAME (0) && OP_SAME (1)
3232 		  /* TARGET_MEM_REF require equal extra operands.  */
3233 		  && (TREE_CODE (arg0) != TARGET_MEM_REF
3234 		      || (OP_SAME_WITH_NULL (2)
3235 			  && OP_SAME_WITH_NULL (3)
3236 			  && OP_SAME_WITH_NULL (4))));
3237 
3238 	case ARRAY_REF:
3239 	case ARRAY_RANGE_REF:
3240 	  if (!OP_SAME (0))
3241 	    return 0;
3242 	  flags &= ~OEP_ADDRESS_OF;
3243 	  /* Compare the array index by value if it is constant first as we
3244 	     may have different types but same value here.  */
3245 	  return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3246 				       TREE_OPERAND (arg1, 1))
3247 		   || OP_SAME (1))
3248 		  && OP_SAME_WITH_NULL (2)
3249 		  && OP_SAME_WITH_NULL (3)
3250 		  /* Compare low bound and element size as with OEP_ADDRESS_OF
3251 		     we have to account for the offset of the ref.  */
3252 		  && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3253 		      == TREE_TYPE (TREE_OPERAND (arg1, 0))
3254 		      || (operand_equal_p (array_ref_low_bound
3255 					     (CONST_CAST_TREE (arg0)),
3256 					   array_ref_low_bound
3257 					     (CONST_CAST_TREE (arg1)), flags)
3258 			  && operand_equal_p (array_ref_element_size
3259 					        (CONST_CAST_TREE (arg0)),
3260 					      array_ref_element_size
3261 					        (CONST_CAST_TREE (arg1)),
3262 					      flags))));
3263 
3264 	case COMPONENT_REF:
3265 	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
3266 	     may be NULL when we're called to compare MEM_EXPRs.  */
3267 	  if (!OP_SAME_WITH_NULL (0)
3268 	      || !OP_SAME (1))
3269 	    return 0;
3270 	  flags &= ~OEP_ADDRESS_OF;
3271 	  return OP_SAME_WITH_NULL (2);
3272 
3273 	case BIT_FIELD_REF:
3274 	  if (!OP_SAME (0))
3275 	    return 0;
3276 	  flags &= ~OEP_ADDRESS_OF;
3277 	  return OP_SAME (1) && OP_SAME (2);
3278 
3279 	default:
3280 	  return 0;
3281 	}
3282 
3283     case tcc_expression:
3284       switch (TREE_CODE (arg0))
3285 	{
3286 	case ADDR_EXPR:
3287 	  /* Be sure we pass right ADDRESS_OF flag.  */
3288 	  gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3289 	  return operand_equal_p (TREE_OPERAND (arg0, 0),
3290 				  TREE_OPERAND (arg1, 0),
3291 				  flags | OEP_ADDRESS_OF);
3292 
3293 	case TRUTH_NOT_EXPR:
3294 	  return OP_SAME (0);
3295 
3296 	case TRUTH_ANDIF_EXPR:
3297 	case TRUTH_ORIF_EXPR:
3298 	  return OP_SAME (0) && OP_SAME (1);
3299 
3300 	case FMA_EXPR:
3301 	case WIDEN_MULT_PLUS_EXPR:
3302 	case WIDEN_MULT_MINUS_EXPR:
3303 	  if (!OP_SAME (2))
3304 	    return 0;
3305 	  /* The multiplcation operands are commutative.  */
3306 	  /* FALLTHRU */
3307 
3308 	case TRUTH_AND_EXPR:
3309 	case TRUTH_OR_EXPR:
3310 	case TRUTH_XOR_EXPR:
3311 	  if (OP_SAME (0) && OP_SAME (1))
3312 	    return 1;
3313 
3314 	  /* Otherwise take into account this is a commutative operation.  */
3315 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
3316 				   TREE_OPERAND (arg1, 1), flags)
3317 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
3318 				      TREE_OPERAND (arg1, 0), flags));
3319 
3320 	case COND_EXPR:
3321 	  if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3322 	    return 0;
3323 	  flags &= ~OEP_ADDRESS_OF;
3324 	  return OP_SAME (0);
3325 
3326 	case BIT_INSERT_EXPR:
3327 	  /* BIT_INSERT_EXPR has an implict operand as the type precision
3328 	     of op1.  Need to check to make sure they are the same.  */
3329 	  if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3330 	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3331 	      && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3332 		 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3333 	    return false;
3334 	  /* FALLTHRU */
3335 
3336 	case VEC_COND_EXPR:
3337 	case DOT_PROD_EXPR:
3338 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3339 
3340 	case MODIFY_EXPR:
3341 	case INIT_EXPR:
3342 	case COMPOUND_EXPR:
3343 	case PREDECREMENT_EXPR:
3344 	case PREINCREMENT_EXPR:
3345 	case POSTDECREMENT_EXPR:
3346 	case POSTINCREMENT_EXPR:
3347 	  if (flags & OEP_LEXICOGRAPHIC)
3348 	    return OP_SAME (0) && OP_SAME (1);
3349 	  return 0;
3350 
3351 	case CLEANUP_POINT_EXPR:
3352 	case EXPR_STMT:
3353 	  if (flags & OEP_LEXICOGRAPHIC)
3354 	    return OP_SAME (0);
3355 	  return 0;
3356 
3357 	default:
3358 	  return 0;
3359 	}
3360 
3361     case tcc_vl_exp:
3362       switch (TREE_CODE (arg0))
3363 	{
3364 	case CALL_EXPR:
3365 	  if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3366 	      != (CALL_EXPR_FN (arg1) == NULL_TREE))
3367 	    /* If not both CALL_EXPRs are either internal or normal function
3368 	       functions, then they are not equal.  */
3369 	    return 0;
3370 	  else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3371 	    {
3372 	      /* If the CALL_EXPRs call different internal functions, then they
3373 		 are not equal.  */
3374 	      if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3375 		return 0;
3376 	    }
3377 	  else
3378 	    {
3379 	      /* If the CALL_EXPRs call different functions, then they are not
3380 		 equal.  */
3381 	      if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3382 				     flags))
3383 		return 0;
3384 	    }
3385 
3386 	  /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS.  */
3387 	  {
3388 	    unsigned int cef = call_expr_flags (arg0);
3389 	    if (flags & OEP_PURE_SAME)
3390 	      cef &= ECF_CONST | ECF_PURE;
3391 	    else
3392 	      cef &= ECF_CONST;
3393 	    if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3394 	      return 0;
3395 	  }
3396 
3397 	  /* Now see if all the arguments are the same.  */
3398 	  {
3399 	    const_call_expr_arg_iterator iter0, iter1;
3400 	    const_tree a0, a1;
3401 	    for (a0 = first_const_call_expr_arg (arg0, &iter0),
3402 		   a1 = first_const_call_expr_arg (arg1, &iter1);
3403 		 a0 && a1;
3404 		 a0 = next_const_call_expr_arg (&iter0),
3405 		   a1 = next_const_call_expr_arg (&iter1))
3406 	      if (! operand_equal_p (a0, a1, flags))
3407 		return 0;
3408 
3409 	    /* If we get here and both argument lists are exhausted
3410 	       then the CALL_EXPRs are equal.  */
3411 	    return ! (a0 || a1);
3412 	  }
3413 	default:
3414 	  return 0;
3415 	}
3416 
3417     case tcc_declaration:
3418       /* Consider __builtin_sqrt equal to sqrt.  */
3419       return (TREE_CODE (arg0) == FUNCTION_DECL
3420 	      && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3421 	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3422 	      && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3423 
3424     case tcc_exceptional:
3425       if (TREE_CODE (arg0) == CONSTRUCTOR)
3426 	{
3427 	  /* In GIMPLE constructors are used only to build vectors from
3428 	     elements.  Individual elements in the constructor must be
3429 	     indexed in increasing order and form an initial sequence.
3430 
3431 	     We make no effort to compare constructors in generic.
3432 	     (see sem_variable::equals in ipa-icf which can do so for
3433 	      constants).  */
3434 	  if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3435 	      || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3436 	    return 0;
3437 
3438 	  /* Be sure that vectors constructed have the same representation.
3439 	     We only tested element precision and modes to match.
3440 	     Vectors may be BLKmode and thus also check that the number of
3441 	     parts match.  */
3442 	  if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3443 			TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3444 	    return 0;
3445 
3446 	  vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3447 	  vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3448 	  unsigned int len = vec_safe_length (v0);
3449 
3450 	  if (len != vec_safe_length (v1))
3451 	    return 0;
3452 
3453 	  for (unsigned int i = 0; i < len; i++)
3454 	    {
3455 	      constructor_elt *c0 = &(*v0)[i];
3456 	      constructor_elt *c1 = &(*v1)[i];
3457 
3458 	      if (!operand_equal_p (c0->value, c1->value, flags)
3459 		  /* In GIMPLE the indexes can be either NULL or matching i.
3460 		     Double check this so we won't get false
3461 		     positives for GENERIC.  */
3462 		  || (c0->index
3463 		      && (TREE_CODE (c0->index) != INTEGER_CST
3464 			  || !compare_tree_int (c0->index, i)))
3465 		  || (c1->index
3466 		      && (TREE_CODE (c1->index) != INTEGER_CST
3467 			  || !compare_tree_int (c1->index, i))))
3468 		return 0;
3469 	    }
3470 	  return 1;
3471 	}
3472       else if (TREE_CODE (arg0) == STATEMENT_LIST
3473 	       && (flags & OEP_LEXICOGRAPHIC))
3474 	{
3475 	  /* Compare the STATEMENT_LISTs.  */
3476 	  tree_stmt_iterator tsi1, tsi2;
3477 	  tree body1 = CONST_CAST_TREE (arg0);
3478 	  tree body2 = CONST_CAST_TREE (arg1);
3479 	  for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3480 	       tsi_next (&tsi1), tsi_next (&tsi2))
3481 	    {
3482 	      /* The lists don't have the same number of statements.  */
3483 	      if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3484 		return 0;
3485 	      if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3486 		return 1;
3487 	      if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3488 				    flags & (OEP_LEXICOGRAPHIC
3489 					     | OEP_NO_HASH_CHECK)))
3490 		return 0;
3491 	    }
3492 	}
3493       return 0;
3494 
3495     case tcc_statement:
3496       switch (TREE_CODE (arg0))
3497 	{
3498 	case RETURN_EXPR:
3499 	  if (flags & OEP_LEXICOGRAPHIC)
3500 	    return OP_SAME_WITH_NULL (0);
3501 	  return 0;
3502 	case DEBUG_BEGIN_STMT:
3503 	  if (flags & OEP_LEXICOGRAPHIC)
3504 	    return 1;
3505 	  return 0;
3506 	default:
3507 	  return 0;
3508 	 }
3509 
3510     default:
3511       return 0;
3512     }
3513 
3514 #undef OP_SAME
3515 #undef OP_SAME_WITH_NULL
3516 }
3517 
3518 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3519    with a different signedness or a narrower precision.  */
3520 
3521 static bool
operand_equal_for_comparison_p(tree arg0,tree arg1)3522 operand_equal_for_comparison_p (tree arg0, tree arg1)
3523 {
3524   if (operand_equal_p (arg0, arg1, 0))
3525     return true;
3526 
3527   if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3528       || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3529     return false;
3530 
3531   /* Discard any conversions that don't change the modes of ARG0 and ARG1
3532      and see if the inner values are the same.  This removes any
3533      signedness comparison, which doesn't matter here.  */
3534   tree op0 = arg0;
3535   tree op1 = arg1;
3536   STRIP_NOPS (op0);
3537   STRIP_NOPS (op1);
3538   if (operand_equal_p (op0, op1, 0))
3539     return true;
3540 
3541   /* Discard a single widening conversion from ARG1 and see if the inner
3542      value is the same as ARG0.  */
3543   if (CONVERT_EXPR_P (arg1)
3544       && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3545       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3546          < TYPE_PRECISION (TREE_TYPE (arg1))
3547       && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3548     return true;
3549 
3550   return false;
3551 }
3552 
3553 /* See if ARG is an expression that is either a comparison or is performing
3554    arithmetic on comparisons.  The comparisons must only be comparing
3555    two different values, which will be stored in *CVAL1 and *CVAL2; if
3556    they are nonzero it means that some operands have already been found.
3557    No variables may be used anywhere else in the expression except in the
3558    comparisons.
3559 
3560    If this is true, return 1.  Otherwise, return zero.  */
3561 
3562 static int
twoval_comparison_p(tree arg,tree * cval1,tree * cval2)3563 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3564 {
3565   enum tree_code code = TREE_CODE (arg);
3566   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3567 
3568   /* We can handle some of the tcc_expression cases here.  */
3569   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3570     tclass = tcc_unary;
3571   else if (tclass == tcc_expression
3572 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3573 	       || code == COMPOUND_EXPR))
3574     tclass = tcc_binary;
3575 
3576   switch (tclass)
3577     {
3578     case tcc_unary:
3579       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3580 
3581     case tcc_binary:
3582       return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3583 	      && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
3584 
3585     case tcc_constant:
3586       return 1;
3587 
3588     case tcc_expression:
3589       if (code == COND_EXPR)
3590 	return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3591 		&& twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
3592 		&& twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
3593       return 0;
3594 
3595     case tcc_comparison:
3596       /* First see if we can handle the first operand, then the second.  For
3597 	 the second operand, we know *CVAL1 can't be zero.  It must be that
3598 	 one side of the comparison is each of the values; test for the
3599 	 case where this isn't true by failing if the two operands
3600 	 are the same.  */
3601 
3602       if (operand_equal_p (TREE_OPERAND (arg, 0),
3603 			   TREE_OPERAND (arg, 1), 0))
3604 	return 0;
3605 
3606       if (*cval1 == 0)
3607 	*cval1 = TREE_OPERAND (arg, 0);
3608       else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3609 	;
3610       else if (*cval2 == 0)
3611 	*cval2 = TREE_OPERAND (arg, 0);
3612       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3613 	;
3614       else
3615 	return 0;
3616 
3617       if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3618 	;
3619       else if (*cval2 == 0)
3620 	*cval2 = TREE_OPERAND (arg, 1);
3621       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3622 	;
3623       else
3624 	return 0;
3625 
3626       return 1;
3627 
3628     default:
3629       return 0;
3630     }
3631 }
3632 
3633 /* ARG is a tree that is known to contain just arithmetic operations and
3634    comparisons.  Evaluate the operations in the tree substituting NEW0 for
3635    any occurrence of OLD0 as an operand of a comparison and likewise for
3636    NEW1 and OLD1.  */
3637 
3638 static tree
eval_subst(location_t loc,tree arg,tree old0,tree new0,tree old1,tree new1)3639 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3640 	    tree old1, tree new1)
3641 {
3642   tree type = TREE_TYPE (arg);
3643   enum tree_code code = TREE_CODE (arg);
3644   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3645 
3646   /* We can handle some of the tcc_expression cases here.  */
3647   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3648     tclass = tcc_unary;
3649   else if (tclass == tcc_expression
3650 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3651     tclass = tcc_binary;
3652 
3653   switch (tclass)
3654     {
3655     case tcc_unary:
3656       return fold_build1_loc (loc, code, type,
3657 			  eval_subst (loc, TREE_OPERAND (arg, 0),
3658 				      old0, new0, old1, new1));
3659 
3660     case tcc_binary:
3661       return fold_build2_loc (loc, code, type,
3662 			  eval_subst (loc, TREE_OPERAND (arg, 0),
3663 				      old0, new0, old1, new1),
3664 			  eval_subst (loc, TREE_OPERAND (arg, 1),
3665 				      old0, new0, old1, new1));
3666 
3667     case tcc_expression:
3668       switch (code)
3669 	{
3670 	case SAVE_EXPR:
3671 	  return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3672 			     old1, new1);
3673 
3674 	case COMPOUND_EXPR:
3675 	  return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3676 			     old1, new1);
3677 
3678 	case COND_EXPR:
3679 	  return fold_build3_loc (loc, code, type,
3680 			      eval_subst (loc, TREE_OPERAND (arg, 0),
3681 					  old0, new0, old1, new1),
3682 			      eval_subst (loc, TREE_OPERAND (arg, 1),
3683 					  old0, new0, old1, new1),
3684 			      eval_subst (loc, TREE_OPERAND (arg, 2),
3685 					  old0, new0, old1, new1));
3686 	default:
3687 	  break;
3688 	}
3689       /* Fall through - ???  */
3690 
3691     case tcc_comparison:
3692       {
3693 	tree arg0 = TREE_OPERAND (arg, 0);
3694 	tree arg1 = TREE_OPERAND (arg, 1);
3695 
3696 	/* We need to check both for exact equality and tree equality.  The
3697 	   former will be true if the operand has a side-effect.  In that
3698 	   case, we know the operand occurred exactly once.  */
3699 
3700 	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3701 	  arg0 = new0;
3702 	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3703 	  arg0 = new1;
3704 
3705 	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3706 	  arg1 = new0;
3707 	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3708 	  arg1 = new1;
3709 
3710 	return fold_build2_loc (loc, code, type, arg0, arg1);
3711       }
3712 
3713     default:
3714       return arg;
3715     }
3716 }
3717 
3718 /* Return a tree for the case when the result of an expression is RESULT
3719    converted to TYPE and OMITTED was previously an operand of the expression
3720    but is now not needed (e.g., we folded OMITTED * 0).
3721 
3722    If OMITTED has side effects, we must evaluate it.  Otherwise, just do
3723    the conversion of RESULT to TYPE.  */
3724 
3725 tree
omit_one_operand_loc(location_t loc,tree type,tree result,tree omitted)3726 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3727 {
3728   tree t = fold_convert_loc (loc, type, result);
3729 
3730   /* If the resulting operand is an empty statement, just return the omitted
3731      statement casted to void. */
3732   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3733     return build1_loc (loc, NOP_EXPR, void_type_node,
3734 		       fold_ignored_result (omitted));
3735 
3736   if (TREE_SIDE_EFFECTS (omitted))
3737     return build2_loc (loc, COMPOUND_EXPR, type,
3738 		       fold_ignored_result (omitted), t);
3739 
3740   return non_lvalue_loc (loc, t);
3741 }
3742 
3743 /* Return a tree for the case when the result of an expression is RESULT
3744    converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3745    of the expression but are now not needed.
3746 
3747    If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3748    If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3749    evaluated before OMITTED2.  Otherwise, if neither has side effects,
3750    just do the conversion of RESULT to TYPE.  */
3751 
3752 tree
omit_two_operands_loc(location_t loc,tree type,tree result,tree omitted1,tree omitted2)3753 omit_two_operands_loc (location_t loc, tree type, tree result,
3754 		       tree omitted1, tree omitted2)
3755 {
3756   tree t = fold_convert_loc (loc, type, result);
3757 
3758   if (TREE_SIDE_EFFECTS (omitted2))
3759     t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3760   if (TREE_SIDE_EFFECTS (omitted1))
3761     t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3762 
3763   return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3764 }
3765 
3766 
3767 /* Return a simplified tree node for the truth-negation of ARG.  This
3768    never alters ARG itself.  We assume that ARG is an operation that
3769    returns a truth value (0 or 1).
3770 
3771    FIXME: one would think we would fold the result, but it causes
3772    problems with the dominator optimizer.  */
3773 
3774 static tree
fold_truth_not_expr(location_t loc,tree arg)3775 fold_truth_not_expr (location_t loc, tree arg)
3776 {
3777   tree type = TREE_TYPE (arg);
3778   enum tree_code code = TREE_CODE (arg);
3779   location_t loc1, loc2;
3780 
3781   /* If this is a comparison, we can simply invert it, except for
3782      floating-point non-equality comparisons, in which case we just
3783      enclose a TRUTH_NOT_EXPR around what we have.  */
3784 
3785   if (TREE_CODE_CLASS (code) == tcc_comparison)
3786     {
3787       tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3788       if (FLOAT_TYPE_P (op_type)
3789 	  && flag_trapping_math
3790 	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
3791 	  && code != NE_EXPR && code != EQ_EXPR)
3792 	return NULL_TREE;
3793 
3794       code = invert_tree_comparison (code, HONOR_NANS (op_type));
3795       if (code == ERROR_MARK)
3796 	return NULL_TREE;
3797 
3798       tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3799 			     TREE_OPERAND (arg, 1));
3800       if (TREE_NO_WARNING (arg))
3801 	TREE_NO_WARNING (ret) = 1;
3802       return ret;
3803     }
3804 
3805   switch (code)
3806     {
3807     case INTEGER_CST:
3808       return constant_boolean_node (integer_zerop (arg), type);
3809 
3810     case TRUTH_AND_EXPR:
3811       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3812       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3813       return build2_loc (loc, TRUTH_OR_EXPR, type,
3814 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3815 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3816 
3817     case TRUTH_OR_EXPR:
3818       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3819       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3820       return build2_loc (loc, TRUTH_AND_EXPR, type,
3821 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3822 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3823 
3824     case TRUTH_XOR_EXPR:
3825       /* Here we can invert either operand.  We invert the first operand
3826 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
3827 	 result is the XOR of the first operand with the inside of the
3828 	 negation of the second operand.  */
3829 
3830       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3831 	return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3832 			   TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3833       else
3834 	return build2_loc (loc, TRUTH_XOR_EXPR, type,
3835 			   invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3836 			   TREE_OPERAND (arg, 1));
3837 
3838     case TRUTH_ANDIF_EXPR:
3839       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3840       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3841       return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3842 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3843 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3844 
3845     case TRUTH_ORIF_EXPR:
3846       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3847       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3848       return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3849 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3850 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3851 
3852     case TRUTH_NOT_EXPR:
3853       return TREE_OPERAND (arg, 0);
3854 
3855     case COND_EXPR:
3856       {
3857 	tree arg1 = TREE_OPERAND (arg, 1);
3858 	tree arg2 = TREE_OPERAND (arg, 2);
3859 
3860 	loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3861 	loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3862 
3863 	/* A COND_EXPR may have a throw as one operand, which
3864 	   then has void type.  Just leave void operands
3865 	   as they are.  */
3866 	return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3867 			   VOID_TYPE_P (TREE_TYPE (arg1))
3868 			   ? arg1 : invert_truthvalue_loc (loc1, arg1),
3869 			   VOID_TYPE_P (TREE_TYPE (arg2))
3870 			   ? arg2 : invert_truthvalue_loc (loc2, arg2));
3871       }
3872 
3873     case COMPOUND_EXPR:
3874       loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3875       return build2_loc (loc, COMPOUND_EXPR, type,
3876 			 TREE_OPERAND (arg, 0),
3877 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3878 
3879     case NON_LVALUE_EXPR:
3880       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3881       return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3882 
3883     CASE_CONVERT:
3884       if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3885 	return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3886 
3887       /* fall through */
3888 
3889     case FLOAT_EXPR:
3890       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3891       return build1_loc (loc, TREE_CODE (arg), type,
3892 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3893 
3894     case BIT_AND_EXPR:
3895       if (!integer_onep (TREE_OPERAND (arg, 1)))
3896 	return NULL_TREE;
3897       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3898 
3899     case SAVE_EXPR:
3900       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3901 
3902     case CLEANUP_POINT_EXPR:
3903       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3904       return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3905 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3906 
3907     default:
3908       return NULL_TREE;
3909     }
3910 }
3911 
3912 /* Fold the truth-negation of ARG.  This never alters ARG itself.  We
3913    assume that ARG is an operation that returns a truth value (0 or 1
3914    for scalars, 0 or -1 for vectors).  Return the folded expression if
3915    folding is successful.  Otherwise, return NULL_TREE.  */
3916 
3917 static tree
fold_invert_truthvalue(location_t loc,tree arg)3918 fold_invert_truthvalue (location_t loc, tree arg)
3919 {
3920   tree type = TREE_TYPE (arg);
3921   return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3922 			      ? BIT_NOT_EXPR
3923 			      : TRUTH_NOT_EXPR,
3924 			 type, arg);
3925 }
3926 
3927 /* Return a simplified tree node for the truth-negation of ARG.  This
3928    never alters ARG itself.  We assume that ARG is an operation that
3929    returns a truth value (0 or 1 for scalars, 0 or -1 for vectors).  */
3930 
3931 tree
invert_truthvalue_loc(location_t loc,tree arg)3932 invert_truthvalue_loc (location_t loc, tree arg)
3933 {
3934   if (TREE_CODE (arg) == ERROR_MARK)
3935     return arg;
3936 
3937   tree type = TREE_TYPE (arg);
3938   return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3939 			       ? BIT_NOT_EXPR
3940 			       : TRUTH_NOT_EXPR,
3941 			  type, arg);
3942 }
3943 
3944 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3945    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero
3946    and uses reverse storage order if REVERSEP is nonzero.  ORIG_INNER
3947    is the original memory reference used to preserve the alias set of
3948    the access.  */
3949 
3950 static tree
make_bit_field_ref(location_t loc,tree inner,tree orig_inner,tree type,HOST_WIDE_INT bitsize,poly_int64 bitpos,int unsignedp,int reversep)3951 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3952 		    HOST_WIDE_INT bitsize, poly_int64 bitpos,
3953 		    int unsignedp, int reversep)
3954 {
3955   tree result, bftype;
3956 
3957   /* Attempt not to lose the access path if possible.  */
3958   if (TREE_CODE (orig_inner) == COMPONENT_REF)
3959     {
3960       tree ninner = TREE_OPERAND (orig_inner, 0);
3961       machine_mode nmode;
3962       poly_int64 nbitsize, nbitpos;
3963       tree noffset;
3964       int nunsignedp, nreversep, nvolatilep = 0;
3965       tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3966 				       &noffset, &nmode, &nunsignedp,
3967 				       &nreversep, &nvolatilep);
3968       if (base == inner
3969 	  && noffset == NULL_TREE
3970 	  && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
3971 	  && !reversep
3972 	  && !nreversep
3973 	  && !nvolatilep)
3974 	{
3975 	  inner = ninner;
3976 	  bitpos -= nbitpos;
3977 	}
3978     }
3979 
3980   alias_set_type iset = get_alias_set (orig_inner);
3981   if (iset == 0 && get_alias_set (inner) != iset)
3982     inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3983 			 build_fold_addr_expr (inner),
3984 			 build_int_cst (ptr_type_node, 0));
3985 
3986   if (known_eq (bitpos, 0) && !reversep)
3987     {
3988       tree size = TYPE_SIZE (TREE_TYPE (inner));
3989       if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3990 	   || POINTER_TYPE_P (TREE_TYPE (inner)))
3991 	  && tree_fits_shwi_p (size)
3992 	  && tree_to_shwi (size) == bitsize)
3993 	return fold_convert_loc (loc, type, inner);
3994     }
3995 
3996   bftype = type;
3997   if (TYPE_PRECISION (bftype) != bitsize
3998       || TYPE_UNSIGNED (bftype) == !unsignedp)
3999     bftype = build_nonstandard_integer_type (bitsize, 0);
4000 
4001   result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4002 		       bitsize_int (bitsize), bitsize_int (bitpos));
4003   REF_REVERSE_STORAGE_ORDER (result) = reversep;
4004 
4005   if (bftype != type)
4006     result = fold_convert_loc (loc, type, result);
4007 
4008   return result;
4009 }
4010 
4011 /* Optimize a bit-field compare.
4012 
4013    There are two cases:  First is a compare against a constant and the
4014    second is a comparison of two items where the fields are at the same
4015    bit position relative to the start of a chunk (byte, halfword, word)
4016    large enough to contain it.  In these cases we can avoid the shift
4017    implicit in bitfield extractions.
4018 
4019    For constants, we emit a compare of the shifted constant with the
4020    BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4021    compared.  For two fields at the same position, we do the ANDs with the
4022    similar mask and compare the result of the ANDs.
4023 
4024    CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4025    COMPARE_TYPE is the type of the comparison, and LHS and RHS
4026    are the left and right operands of the comparison, respectively.
4027 
4028    If the optimization described above can be done, we return the resulting
4029    tree.  Otherwise we return zero.  */
4030 
4031 static tree
optimize_bit_field_compare(location_t loc,enum tree_code code,tree compare_type,tree lhs,tree rhs)4032 optimize_bit_field_compare (location_t loc, enum tree_code code,
4033 			    tree compare_type, tree lhs, tree rhs)
4034 {
4035   poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4036   HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4037   tree type = TREE_TYPE (lhs);
4038   tree unsigned_type;
4039   int const_p = TREE_CODE (rhs) == INTEGER_CST;
4040   machine_mode lmode, rmode;
4041   scalar_int_mode nmode;
4042   int lunsignedp, runsignedp;
4043   int lreversep, rreversep;
4044   int lvolatilep = 0, rvolatilep = 0;
4045   tree linner, rinner = NULL_TREE;
4046   tree mask;
4047   tree offset;
4048 
4049   /* Get all the information about the extractions being done.  If the bit size
4050      is the same as the size of the underlying object, we aren't doing an
4051      extraction at all and so can do nothing.  We also don't want to
4052      do anything if the inner expression is a PLACEHOLDER_EXPR since we
4053      then will no longer be able to replace it.  */
4054   linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4055 				&lunsignedp, &lreversep, &lvolatilep);
4056   if (linner == lhs
4057       || !known_size_p (plbitsize)
4058       || !plbitsize.is_constant (&lbitsize)
4059       || !plbitpos.is_constant (&lbitpos)
4060       || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4061       || offset != 0
4062       || TREE_CODE (linner) == PLACEHOLDER_EXPR
4063       || lvolatilep)
4064     return 0;
4065 
4066   if (const_p)
4067     rreversep = lreversep;
4068   else
4069    {
4070      /* If this is not a constant, we can only do something if bit positions,
4071 	sizes, signedness and storage order are the same.  */
4072      rinner
4073        = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4074 			      &runsignedp, &rreversep, &rvolatilep);
4075 
4076      if (rinner == rhs
4077 	 || maybe_ne (lbitpos, rbitpos)
4078 	 || maybe_ne (lbitsize, rbitsize)
4079 	 || lunsignedp != runsignedp
4080 	 || lreversep != rreversep
4081 	 || offset != 0
4082 	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4083 	 || rvolatilep)
4084        return 0;
4085    }
4086 
4087   /* Honor the C++ memory model and mimic what RTL expansion does.  */
4088   poly_uint64 bitstart = 0;
4089   poly_uint64 bitend = 0;
4090   if (TREE_CODE (lhs) == COMPONENT_REF)
4091     {
4092       get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4093       if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4094 	return 0;
4095     }
4096 
4097   /* See if we can find a mode to refer to this field.  We should be able to,
4098      but fail if we can't.  */
4099   if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4100 		      const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4101 		      : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4102 			     TYPE_ALIGN (TREE_TYPE (rinner))),
4103 		      BITS_PER_WORD, false, &nmode))
4104     return 0;
4105 
4106   /* Set signed and unsigned types of the precision of this mode for the
4107      shifts below.  */
4108   unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4109 
4110   /* Compute the bit position and size for the new reference and our offset
4111      within it. If the new reference is the same size as the original, we
4112      won't optimize anything, so return zero.  */
4113   nbitsize = GET_MODE_BITSIZE (nmode);
4114   nbitpos = lbitpos & ~ (nbitsize - 1);
4115   lbitpos -= nbitpos;
4116   if (nbitsize == lbitsize)
4117     return 0;
4118 
4119   if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4120     lbitpos = nbitsize - lbitsize - lbitpos;
4121 
4122   /* Make the mask to be used against the extracted field.  */
4123   mask = build_int_cst_type (unsigned_type, -1);
4124   mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4125   mask = const_binop (RSHIFT_EXPR, mask,
4126 		      size_int (nbitsize - lbitsize - lbitpos));
4127 
4128   if (! const_p)
4129     {
4130       if (nbitpos < 0)
4131 	return 0;
4132 
4133       /* If not comparing with constant, just rework the comparison
4134 	 and return.  */
4135       tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4136 				    nbitsize, nbitpos, 1, lreversep);
4137       t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4138       tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4139 				    nbitsize, nbitpos, 1, rreversep);
4140       t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4141       return fold_build2_loc (loc, code, compare_type, t1, t2);
4142     }
4143 
4144   /* Otherwise, we are handling the constant case.  See if the constant is too
4145      big for the field.  Warn and return a tree for 0 (false) if so.  We do
4146      this not only for its own sake, but to avoid having to test for this
4147      error case below.  If we didn't, we might generate wrong code.
4148 
4149      For unsigned fields, the constant shifted right by the field length should
4150      be all zero.  For signed fields, the high-order bits should agree with
4151      the sign bit.  */
4152 
4153   if (lunsignedp)
4154     {
4155       if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4156 	{
4157 	  warning (0, "comparison is always %d due to width of bit-field",
4158 		   code == NE_EXPR);
4159 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4160 	}
4161     }
4162   else
4163     {
4164       wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4165       if (tem != 0 && tem != -1)
4166 	{
4167 	  warning (0, "comparison is always %d due to width of bit-field",
4168 		   code == NE_EXPR);
4169 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4170 	}
4171     }
4172 
4173   if (nbitpos < 0)
4174     return 0;
4175 
4176   /* Single-bit compares should always be against zero.  */
4177   if (lbitsize == 1 && ! integer_zerop (rhs))
4178     {
4179       code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4180       rhs = build_int_cst (type, 0);
4181     }
4182 
4183   /* Make a new bitfield reference, shift the constant over the
4184      appropriate number of bits and mask it with the computed mask
4185      (in case this was a signed field).  If we changed it, make a new one.  */
4186   lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4187 			    nbitsize, nbitpos, 1, lreversep);
4188 
4189   rhs = const_binop (BIT_AND_EXPR,
4190 		     const_binop (LSHIFT_EXPR,
4191 				  fold_convert_loc (loc, unsigned_type, rhs),
4192 				  size_int (lbitpos)),
4193 		     mask);
4194 
4195   lhs = build2_loc (loc, code, compare_type,
4196 		    build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4197   return lhs;
4198 }
4199 
4200 /* Subroutine for fold_truth_andor_1: decode a field reference.
4201 
4202    If EXP is a comparison reference, we return the innermost reference.
4203 
4204    *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4205    set to the starting bit number.
4206 
4207    If the innermost field can be completely contained in a mode-sized
4208    unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
4209 
4210    *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4211    otherwise it is not changed.
4212 
4213    *PUNSIGNEDP is set to the signedness of the field.
4214 
4215    *PREVERSEP is set to the storage order of the field.
4216 
4217    *PMASK is set to the mask used.  This is either contained in a
4218    BIT_AND_EXPR or derived from the width of the field.
4219 
4220    *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4221 
4222    Return 0 if this is not a component reference or is one that we can't
4223    do anything with.  */
4224 
4225 static tree
decode_field_reference(location_t loc,tree * exp_,HOST_WIDE_INT * pbitsize,HOST_WIDE_INT * pbitpos,machine_mode * pmode,int * punsignedp,int * preversep,int * pvolatilep,tree * pmask,tree * pand_mask)4226 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4227 			HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4228 			int *punsignedp, int *preversep, int *pvolatilep,
4229 			tree *pmask, tree *pand_mask)
4230 {
4231   tree exp = *exp_;
4232   tree outer_type = 0;
4233   tree and_mask = 0;
4234   tree mask, inner, offset;
4235   tree unsigned_type;
4236   unsigned int precision;
4237 
4238   /* All the optimizations using this function assume integer fields.
4239      There are problems with FP fields since the type_for_size call
4240      below can fail for, e.g., XFmode.  */
4241   if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4242     return 0;
4243 
4244   /* We are interested in the bare arrangement of bits, so strip everything
4245      that doesn't affect the machine mode.  However, record the type of the
4246      outermost expression if it may matter below.  */
4247   if (CONVERT_EXPR_P (exp)
4248       || TREE_CODE (exp) == NON_LVALUE_EXPR)
4249     outer_type = TREE_TYPE (exp);
4250   STRIP_NOPS (exp);
4251 
4252   if (TREE_CODE (exp) == BIT_AND_EXPR)
4253     {
4254       and_mask = TREE_OPERAND (exp, 1);
4255       exp = TREE_OPERAND (exp, 0);
4256       STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4257       if (TREE_CODE (and_mask) != INTEGER_CST)
4258 	return 0;
4259     }
4260 
4261   poly_int64 poly_bitsize, poly_bitpos;
4262   inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4263 			       pmode, punsignedp, preversep, pvolatilep);
4264   if ((inner == exp && and_mask == 0)
4265       || !poly_bitsize.is_constant (pbitsize)
4266       || !poly_bitpos.is_constant (pbitpos)
4267       || *pbitsize < 0
4268       || offset != 0
4269       || TREE_CODE (inner) == PLACEHOLDER_EXPR
4270       /* Reject out-of-bound accesses (PR79731).  */
4271       || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4272 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4273 			       *pbitpos + *pbitsize) < 0))
4274     return 0;
4275 
4276   *exp_ = exp;
4277 
4278   /* If the number of bits in the reference is the same as the bitsize of
4279      the outer type, then the outer type gives the signedness. Otherwise
4280      (in case of a small bitfield) the signedness is unchanged.  */
4281   if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4282     *punsignedp = TYPE_UNSIGNED (outer_type);
4283 
4284   /* Compute the mask to access the bitfield.  */
4285   unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4286   precision = TYPE_PRECISION (unsigned_type);
4287 
4288   mask = build_int_cst_type (unsigned_type, -1);
4289 
4290   mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4291   mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4292 
4293   /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
4294   if (and_mask != 0)
4295     mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4296 			fold_convert_loc (loc, unsigned_type, and_mask), mask);
4297 
4298   *pmask = mask;
4299   *pand_mask = and_mask;
4300   return inner;
4301 }
4302 
4303 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4304    bit positions and MASK is SIGNED.  */
4305 
4306 static int
all_ones_mask_p(const_tree mask,unsigned int size)4307 all_ones_mask_p (const_tree mask, unsigned int size)
4308 {
4309   tree type = TREE_TYPE (mask);
4310   unsigned int precision = TYPE_PRECISION (type);
4311 
4312   /* If this function returns true when the type of the mask is
4313      UNSIGNED, then there will be errors.  In particular see
4314      gcc.c-torture/execute/990326-1.c.  There does not appear to be
4315      any documentation paper trail as to why this is so.  But the pre
4316      wide-int worked with that restriction and it has been preserved
4317      here.  */
4318   if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4319     return false;
4320 
4321   return wi::mask (size, false, precision) == wi::to_wide (mask);
4322 }
4323 
4324 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4325    represents the sign bit of EXP's type.  If EXP represents a sign
4326    or zero extension, also test VAL against the unextended type.
4327    The return value is the (sub)expression whose sign bit is VAL,
4328    or NULL_TREE otherwise.  */
4329 
4330 tree
sign_bit_p(tree exp,const_tree val)4331 sign_bit_p (tree exp, const_tree val)
4332 {
4333   int width;
4334   tree t;
4335 
4336   /* Tree EXP must have an integral type.  */
4337   t = TREE_TYPE (exp);
4338   if (! INTEGRAL_TYPE_P (t))
4339     return NULL_TREE;
4340 
4341   /* Tree VAL must be an integer constant.  */
4342   if (TREE_CODE (val) != INTEGER_CST
4343       || TREE_OVERFLOW (val))
4344     return NULL_TREE;
4345 
4346   width = TYPE_PRECISION (t);
4347   if (wi::only_sign_bit_p (wi::to_wide (val), width))
4348     return exp;
4349 
4350   /* Handle extension from a narrower type.  */
4351   if (TREE_CODE (exp) == NOP_EXPR
4352       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4353     return sign_bit_p (TREE_OPERAND (exp, 0), val);
4354 
4355   return NULL_TREE;
4356 }
4357 
4358 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4359    to be evaluated unconditionally.  */
4360 
4361 static int
simple_operand_p(const_tree exp)4362 simple_operand_p (const_tree exp)
4363 {
4364   /* Strip any conversions that don't change the machine mode.  */
4365   STRIP_NOPS (exp);
4366 
4367   return (CONSTANT_CLASS_P (exp)
4368   	  || TREE_CODE (exp) == SSA_NAME
4369 	  || (DECL_P (exp)
4370 	      && ! TREE_ADDRESSABLE (exp)
4371 	      && ! TREE_THIS_VOLATILE (exp)
4372 	      && ! DECL_NONLOCAL (exp)
4373 	      /* Don't regard global variables as simple.  They may be
4374 		 allocated in ways unknown to the compiler (shared memory,
4375 		 #pragma weak, etc).  */
4376 	      && ! TREE_PUBLIC (exp)
4377 	      && ! DECL_EXTERNAL (exp)
4378 	      /* Weakrefs are not safe to be read, since they can be NULL.
4379  		 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4380 		 have DECL_WEAK flag set.  */
4381 	      && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4382 	      /* Loading a static variable is unduly expensive, but global
4383 		 registers aren't expensive.  */
4384 	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4385 }
4386 
4387 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4388    to be evaluated unconditionally.
4389    I addition to simple_operand_p, we assume that comparisons, conversions,
4390    and logic-not operations are simple, if their operands are simple, too.  */
4391 
4392 static bool
simple_operand_p_2(tree exp)4393 simple_operand_p_2 (tree exp)
4394 {
4395   enum tree_code code;
4396 
4397   if (TREE_SIDE_EFFECTS (exp)
4398       || tree_could_trap_p (exp))
4399     return false;
4400 
4401   while (CONVERT_EXPR_P (exp))
4402     exp = TREE_OPERAND (exp, 0);
4403 
4404   code = TREE_CODE (exp);
4405 
4406   if (TREE_CODE_CLASS (code) == tcc_comparison)
4407     return (simple_operand_p (TREE_OPERAND (exp, 0))
4408 	    && simple_operand_p (TREE_OPERAND (exp, 1)));
4409 
4410   if (code == TRUTH_NOT_EXPR)
4411       return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4412 
4413   return simple_operand_p (exp);
4414 }
4415 
4416 
4417 /* The following functions are subroutines to fold_range_test and allow it to
4418    try to change a logical combination of comparisons into a range test.
4419 
4420    For example, both
4421 	X == 2 || X == 3 || X == 4 || X == 5
4422    and
4423 	X >= 2 && X <= 5
4424    are converted to
4425 	(unsigned) (X - 2) <= 3
4426 
4427    We describe each set of comparisons as being either inside or outside
4428    a range, using a variable named like IN_P, and then describe the
4429    range with a lower and upper bound.  If one of the bounds is omitted,
4430    it represents either the highest or lowest value of the type.
4431 
4432    In the comments below, we represent a range by two numbers in brackets
4433    preceded by a "+" to designate being inside that range, or a "-" to
4434    designate being outside that range, so the condition can be inverted by
4435    flipping the prefix.  An omitted bound is represented by a "-".  For
4436    example, "- [-, 10]" means being outside the range starting at the lowest
4437    possible value and ending at 10, in other words, being greater than 10.
4438    The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4439    always false.
4440 
4441    We set up things so that the missing bounds are handled in a consistent
4442    manner so neither a missing bound nor "true" and "false" need to be
4443    handled using a special case.  */
4444 
4445 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4446    of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4447    and UPPER1_P are nonzero if the respective argument is an upper bound
4448    and zero for a lower.  TYPE, if nonzero, is the type of the result; it
4449    must be specified for a comparison.  ARG1 will be converted to ARG0's
4450    type if both are specified.  */
4451 
4452 static tree
range_binop(enum tree_code code,tree type,tree arg0,int upper0_p,tree arg1,int upper1_p)4453 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4454 	     tree arg1, int upper1_p)
4455 {
4456   tree tem;
4457   int result;
4458   int sgn0, sgn1;
4459 
4460   /* If neither arg represents infinity, do the normal operation.
4461      Else, if not a comparison, return infinity.  Else handle the special
4462      comparison rules. Note that most of the cases below won't occur, but
4463      are handled for consistency.  */
4464 
4465   if (arg0 != 0 && arg1 != 0)
4466     {
4467       tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4468 			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4469       STRIP_NOPS (tem);
4470       return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4471     }
4472 
4473   if (TREE_CODE_CLASS (code) != tcc_comparison)
4474     return 0;
4475 
4476   /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4477      for neither.  In real maths, we cannot assume open ended ranges are
4478      the same. But, this is computer arithmetic, where numbers are finite.
4479      We can therefore make the transformation of any unbounded range with
4480      the value Z, Z being greater than any representable number. This permits
4481      us to treat unbounded ranges as equal.  */
4482   sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4483   sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4484   switch (code)
4485     {
4486     case EQ_EXPR:
4487       result = sgn0 == sgn1;
4488       break;
4489     case NE_EXPR:
4490       result = sgn0 != sgn1;
4491       break;
4492     case LT_EXPR:
4493       result = sgn0 < sgn1;
4494       break;
4495     case LE_EXPR:
4496       result = sgn0 <= sgn1;
4497       break;
4498     case GT_EXPR:
4499       result = sgn0 > sgn1;
4500       break;
4501     case GE_EXPR:
4502       result = sgn0 >= sgn1;
4503       break;
4504     default:
4505       gcc_unreachable ();
4506     }
4507 
4508   return constant_boolean_node (result, type);
4509 }
4510 
4511 /* Helper routine for make_range.  Perform one step for it, return
4512    new expression if the loop should continue or NULL_TREE if it should
4513    stop.  */
4514 
4515 tree
make_range_step(location_t loc,enum tree_code code,tree arg0,tree arg1,tree exp_type,tree * p_low,tree * p_high,int * p_in_p,bool * strict_overflow_p)4516 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4517 		 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4518 		 bool *strict_overflow_p)
4519 {
4520   tree arg0_type = TREE_TYPE (arg0);
4521   tree n_low, n_high, low = *p_low, high = *p_high;
4522   int in_p = *p_in_p, n_in_p;
4523 
4524   switch (code)
4525     {
4526     case TRUTH_NOT_EXPR:
4527       /* We can only do something if the range is testing for zero.  */
4528       if (low == NULL_TREE || high == NULL_TREE
4529 	  || ! integer_zerop (low) || ! integer_zerop (high))
4530 	return NULL_TREE;
4531       *p_in_p = ! in_p;
4532       return arg0;
4533 
4534     case EQ_EXPR: case NE_EXPR:
4535     case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4536       /* We can only do something if the range is testing for zero
4537 	 and if the second operand is an integer constant.  Note that
4538 	 saying something is "in" the range we make is done by
4539 	 complementing IN_P since it will set in the initial case of
4540 	 being not equal to zero; "out" is leaving it alone.  */
4541       if (low == NULL_TREE || high == NULL_TREE
4542 	  || ! integer_zerop (low) || ! integer_zerop (high)
4543 	  || TREE_CODE (arg1) != INTEGER_CST)
4544 	return NULL_TREE;
4545 
4546       switch (code)
4547 	{
4548 	case NE_EXPR:  /* - [c, c]  */
4549 	  low = high = arg1;
4550 	  break;
4551 	case EQ_EXPR:  /* + [c, c]  */
4552 	  in_p = ! in_p, low = high = arg1;
4553 	  break;
4554 	case GT_EXPR:  /* - [-, c] */
4555 	  low = 0, high = arg1;
4556 	  break;
4557 	case GE_EXPR:  /* + [c, -] */
4558 	  in_p = ! in_p, low = arg1, high = 0;
4559 	  break;
4560 	case LT_EXPR:  /* - [c, -] */
4561 	  low = arg1, high = 0;
4562 	  break;
4563 	case LE_EXPR:  /* + [-, c] */
4564 	  in_p = ! in_p, low = 0, high = arg1;
4565 	  break;
4566 	default:
4567 	  gcc_unreachable ();
4568 	}
4569 
4570       /* If this is an unsigned comparison, we also know that EXP is
4571 	 greater than or equal to zero.  We base the range tests we make
4572 	 on that fact, so we record it here so we can parse existing
4573 	 range tests.  We test arg0_type since often the return type
4574 	 of, e.g. EQ_EXPR, is boolean.  */
4575       if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4576 	{
4577 	  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4578 			      in_p, low, high, 1,
4579 			      build_int_cst (arg0_type, 0),
4580 			      NULL_TREE))
4581 	    return NULL_TREE;
4582 
4583 	  in_p = n_in_p, low = n_low, high = n_high;
4584 
4585 	  /* If the high bound is missing, but we have a nonzero low
4586 	     bound, reverse the range so it goes from zero to the low bound
4587 	     minus 1.  */
4588 	  if (high == 0 && low && ! integer_zerop (low))
4589 	    {
4590 	      in_p = ! in_p;
4591 	      high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4592 				  build_int_cst (TREE_TYPE (low), 1), 0);
4593 	      low = build_int_cst (arg0_type, 0);
4594 	    }
4595 	}
4596 
4597       *p_low = low;
4598       *p_high = high;
4599       *p_in_p = in_p;
4600       return arg0;
4601 
4602     case NEGATE_EXPR:
4603       /* If flag_wrapv and ARG0_TYPE is signed, make sure
4604 	 low and high are non-NULL, then normalize will DTRT.  */
4605       if (!TYPE_UNSIGNED (arg0_type)
4606 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4607 	{
4608 	  if (low == NULL_TREE)
4609 	    low = TYPE_MIN_VALUE (arg0_type);
4610 	  if (high == NULL_TREE)
4611 	    high = TYPE_MAX_VALUE (arg0_type);
4612 	}
4613 
4614       /* (-x) IN [a,b] -> x in [-b, -a]  */
4615       n_low = range_binop (MINUS_EXPR, exp_type,
4616 			   build_int_cst (exp_type, 0),
4617 			   0, high, 1);
4618       n_high = range_binop (MINUS_EXPR, exp_type,
4619 			    build_int_cst (exp_type, 0),
4620 			    0, low, 0);
4621       if (n_high != 0 && TREE_OVERFLOW (n_high))
4622 	return NULL_TREE;
4623       goto normalize;
4624 
4625     case BIT_NOT_EXPR:
4626       /* ~ X -> -X - 1  */
4627       return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4628 			 build_int_cst (exp_type, 1));
4629 
4630     case PLUS_EXPR:
4631     case MINUS_EXPR:
4632       if (TREE_CODE (arg1) != INTEGER_CST)
4633 	return NULL_TREE;
4634 
4635       /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4636 	 move a constant to the other side.  */
4637       if (!TYPE_UNSIGNED (arg0_type)
4638 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4639 	return NULL_TREE;
4640 
4641       /* If EXP is signed, any overflow in the computation is undefined,
4642 	 so we don't worry about it so long as our computations on
4643 	 the bounds don't overflow.  For unsigned, overflow is defined
4644 	 and this is exactly the right thing.  */
4645       n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4646 			   arg0_type, low, 0, arg1, 0);
4647       n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4648 			    arg0_type, high, 1, arg1, 0);
4649       if ((n_low != 0 && TREE_OVERFLOW (n_low))
4650 	  || (n_high != 0 && TREE_OVERFLOW (n_high)))
4651 	return NULL_TREE;
4652 
4653       if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4654 	*strict_overflow_p = true;
4655 
4656       normalize:
4657 	/* Check for an unsigned range which has wrapped around the maximum
4658 	   value thus making n_high < n_low, and normalize it.  */
4659 	if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4660 	  {
4661 	    low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4662 			       build_int_cst (TREE_TYPE (n_high), 1), 0);
4663 	    high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4664 				build_int_cst (TREE_TYPE (n_low), 1), 0);
4665 
4666 	    /* If the range is of the form +/- [ x+1, x ], we won't
4667 	       be able to normalize it.  But then, it represents the
4668 	       whole range or the empty set, so make it
4669 	       +/- [ -, - ].  */
4670 	    if (tree_int_cst_equal (n_low, low)
4671 		&& tree_int_cst_equal (n_high, high))
4672 	      low = high = 0;
4673 	    else
4674 	      in_p = ! in_p;
4675 	  }
4676 	else
4677 	  low = n_low, high = n_high;
4678 
4679 	*p_low = low;
4680 	*p_high = high;
4681 	*p_in_p = in_p;
4682 	return arg0;
4683 
4684     CASE_CONVERT:
4685     case NON_LVALUE_EXPR:
4686       if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4687 	return NULL_TREE;
4688 
4689       if (! INTEGRAL_TYPE_P (arg0_type)
4690 	  || (low != 0 && ! int_fits_type_p (low, arg0_type))
4691 	  || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4692 	return NULL_TREE;
4693 
4694       n_low = low, n_high = high;
4695 
4696       if (n_low != 0)
4697 	n_low = fold_convert_loc (loc, arg0_type, n_low);
4698 
4699       if (n_high != 0)
4700 	n_high = fold_convert_loc (loc, arg0_type, n_high);
4701 
4702       /* If we're converting arg0 from an unsigned type, to exp,
4703 	 a signed type,  we will be doing the comparison as unsigned.
4704 	 The tests above have already verified that LOW and HIGH
4705 	 are both positive.
4706 
4707 	 So we have to ensure that we will handle large unsigned
4708 	 values the same way that the current signed bounds treat
4709 	 negative values.  */
4710 
4711       if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4712 	{
4713 	  tree high_positive;
4714 	  tree equiv_type;
4715 	  /* For fixed-point modes, we need to pass the saturating flag
4716 	     as the 2nd parameter.  */
4717 	  if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4718 	    equiv_type
4719 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4720 						TYPE_SATURATING (arg0_type));
4721 	  else
4722 	    equiv_type
4723 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4724 
4725 	  /* A range without an upper bound is, naturally, unbounded.
4726 	     Since convert would have cropped a very large value, use
4727 	     the max value for the destination type.  */
4728 	  high_positive
4729 	    = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4730 	      : TYPE_MAX_VALUE (arg0_type);
4731 
4732 	  if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4733 	    high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4734 					     fold_convert_loc (loc, arg0_type,
4735 							       high_positive),
4736 					     build_int_cst (arg0_type, 1));
4737 
4738 	  /* If the low bound is specified, "and" the range with the
4739 	     range for which the original unsigned value will be
4740 	     positive.  */
4741 	  if (low != 0)
4742 	    {
4743 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4744 				  1, fold_convert_loc (loc, arg0_type,
4745 						       integer_zero_node),
4746 				  high_positive))
4747 		return NULL_TREE;
4748 
4749 	      in_p = (n_in_p == in_p);
4750 	    }
4751 	  else
4752 	    {
4753 	      /* Otherwise, "or" the range with the range of the input
4754 		 that will be interpreted as negative.  */
4755 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4756 				  1, fold_convert_loc (loc, arg0_type,
4757 						       integer_zero_node),
4758 				  high_positive))
4759 		return NULL_TREE;
4760 
4761 	      in_p = (in_p != n_in_p);
4762 	    }
4763 	}
4764 
4765       *p_low = n_low;
4766       *p_high = n_high;
4767       *p_in_p = in_p;
4768       return arg0;
4769 
4770     default:
4771       return NULL_TREE;
4772     }
4773 }
4774 
4775 /* Given EXP, a logical expression, set the range it is testing into
4776    variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
4777    actually being tested.  *PLOW and *PHIGH will be made of the same
4778    type as the returned expression.  If EXP is not a comparison, we
4779    will most likely not be returning a useful value and range.  Set
4780    *STRICT_OVERFLOW_P to true if the return value is only valid
4781    because signed overflow is undefined; otherwise, do not change
4782    *STRICT_OVERFLOW_P.  */
4783 
4784 tree
make_range(tree exp,int * pin_p,tree * plow,tree * phigh,bool * strict_overflow_p)4785 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4786 	    bool *strict_overflow_p)
4787 {
4788   enum tree_code code;
4789   tree arg0, arg1 = NULL_TREE;
4790   tree exp_type, nexp;
4791   int in_p;
4792   tree low, high;
4793   location_t loc = EXPR_LOCATION (exp);
4794 
4795   /* Start with simply saying "EXP != 0" and then look at the code of EXP
4796      and see if we can refine the range.  Some of the cases below may not
4797      happen, but it doesn't seem worth worrying about this.  We "continue"
4798      the outer loop when we've changed something; otherwise we "break"
4799      the switch, which will "break" the while.  */
4800 
4801   in_p = 0;
4802   low = high = build_int_cst (TREE_TYPE (exp), 0);
4803 
4804   while (1)
4805     {
4806       code = TREE_CODE (exp);
4807       exp_type = TREE_TYPE (exp);
4808       arg0 = NULL_TREE;
4809 
4810       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4811 	{
4812 	  if (TREE_OPERAND_LENGTH (exp) > 0)
4813 	    arg0 = TREE_OPERAND (exp, 0);
4814 	  if (TREE_CODE_CLASS (code) == tcc_binary
4815 	      || TREE_CODE_CLASS (code) == tcc_comparison
4816 	      || (TREE_CODE_CLASS (code) == tcc_expression
4817 		  && TREE_OPERAND_LENGTH (exp) > 1))
4818 	    arg1 = TREE_OPERAND (exp, 1);
4819 	}
4820       if (arg0 == NULL_TREE)
4821 	break;
4822 
4823       nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4824 			      &high, &in_p, strict_overflow_p);
4825       if (nexp == NULL_TREE)
4826 	break;
4827       exp = nexp;
4828     }
4829 
4830   /* If EXP is a constant, we can evaluate whether this is true or false.  */
4831   if (TREE_CODE (exp) == INTEGER_CST)
4832     {
4833       in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4834 						 exp, 0, low, 0))
4835 		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
4836 						    exp, 1, high, 1)));
4837       low = high = 0;
4838       exp = 0;
4839     }
4840 
4841   *pin_p = in_p, *plow = low, *phigh = high;
4842   return exp;
4843 }
4844 
4845 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4846    a bitwise check i.e. when
4847      LOW  == 0xXX...X00...0
4848      HIGH == 0xXX...X11...1
4849    Return corresponding mask in MASK and stem in VALUE.  */
4850 
4851 static bool
maskable_range_p(const_tree low,const_tree high,tree type,tree * mask,tree * value)4852 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4853 		  tree *value)
4854 {
4855   if (TREE_CODE (low) != INTEGER_CST
4856       || TREE_CODE (high) != INTEGER_CST)
4857     return false;
4858 
4859   unsigned prec = TYPE_PRECISION (type);
4860   wide_int lo = wi::to_wide (low, prec);
4861   wide_int hi = wi::to_wide (high, prec);
4862 
4863   wide_int end_mask = lo ^ hi;
4864   if ((end_mask & (end_mask + 1)) != 0
4865       || (lo & end_mask) != 0)
4866     return false;
4867 
4868   wide_int stem_mask = ~end_mask;
4869   wide_int stem = lo & stem_mask;
4870   if (stem != (hi & stem_mask))
4871     return false;
4872 
4873   *mask = wide_int_to_tree (type, stem_mask);
4874   *value = wide_int_to_tree (type, stem);
4875 
4876   return true;
4877 }
4878 
4879 /* Helper routine for build_range_check and match.pd.  Return the type to
4880    perform the check or NULL if it shouldn't be optimized.  */
4881 
4882 tree
range_check_type(tree etype)4883 range_check_type (tree etype)
4884 {
4885   /* First make sure that arithmetics in this type is valid, then make sure
4886      that it wraps around.  */
4887   if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4888     etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4889 					    TYPE_UNSIGNED (etype));
4890 
4891   if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4892     {
4893       tree utype, minv, maxv;
4894 
4895       /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4896 	 for the type in question, as we rely on this here.  */
4897       utype = unsigned_type_for (etype);
4898       maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4899       maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4900 			  build_int_cst (TREE_TYPE (maxv), 1), 1);
4901       minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4902 
4903       if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4904 				      minv, 1, maxv, 1)))
4905 	etype = utype;
4906       else
4907 	return NULL_TREE;
4908     }
4909   return etype;
4910 }
4911 
4912 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4913    type, TYPE, return an expression to test if EXP is in (or out of, depending
4914    on IN_P) the range.  Return 0 if the test couldn't be created.  */
4915 
4916 tree
build_range_check(location_t loc,tree type,tree exp,int in_p,tree low,tree high)4917 build_range_check (location_t loc, tree type, tree exp, int in_p,
4918 		   tree low, tree high)
4919 {
4920   tree etype = TREE_TYPE (exp), mask, value;
4921 
4922   /* Disable this optimization for function pointer expressions
4923      on targets that require function pointer canonicalization.  */
4924   if (targetm.have_canonicalize_funcptr_for_compare ()
4925       && POINTER_TYPE_P (etype)
4926       && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
4927     return NULL_TREE;
4928 
4929   if (! in_p)
4930     {
4931       value = build_range_check (loc, type, exp, 1, low, high);
4932       if (value != 0)
4933         return invert_truthvalue_loc (loc, value);
4934 
4935       return 0;
4936     }
4937 
4938   if (low == 0 && high == 0)
4939     return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4940 
4941   if (low == 0)
4942     return fold_build2_loc (loc, LE_EXPR, type, exp,
4943 			    fold_convert_loc (loc, etype, high));
4944 
4945   if (high == 0)
4946     return fold_build2_loc (loc, GE_EXPR, type, exp,
4947 			    fold_convert_loc (loc, etype, low));
4948 
4949   if (operand_equal_p (low, high, 0))
4950     return fold_build2_loc (loc, EQ_EXPR, type, exp,
4951 			    fold_convert_loc (loc, etype, low));
4952 
4953   if (TREE_CODE (exp) == BIT_AND_EXPR
4954       && maskable_range_p (low, high, etype, &mask, &value))
4955     return fold_build2_loc (loc, EQ_EXPR, type,
4956 			    fold_build2_loc (loc, BIT_AND_EXPR, etype,
4957 					     exp, mask),
4958 			    value);
4959 
4960   if (integer_zerop (low))
4961     {
4962       if (! TYPE_UNSIGNED (etype))
4963 	{
4964 	  etype = unsigned_type_for (etype);
4965 	  high = fold_convert_loc (loc, etype, high);
4966 	  exp = fold_convert_loc (loc, etype, exp);
4967 	}
4968       return build_range_check (loc, type, exp, 1, 0, high);
4969     }
4970 
4971   /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
4972   if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4973     {
4974       int prec = TYPE_PRECISION (etype);
4975 
4976       if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
4977 	{
4978 	  if (TYPE_UNSIGNED (etype))
4979 	    {
4980 	      tree signed_etype = signed_type_for (etype);
4981 	      if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4982 		etype
4983 		  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4984 	      else
4985 		etype = signed_etype;
4986 	      exp = fold_convert_loc (loc, etype, exp);
4987 	    }
4988 	  return fold_build2_loc (loc, GT_EXPR, type, exp,
4989 				  build_int_cst (etype, 0));
4990 	}
4991     }
4992 
4993   /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4994      This requires wrap-around arithmetics for the type of the expression.  */
4995   etype = range_check_type (etype);
4996   if (etype == NULL_TREE)
4997     return NULL_TREE;
4998 
4999   if (POINTER_TYPE_P (etype))
5000     etype = unsigned_type_for (etype);
5001 
5002   high = fold_convert_loc (loc, etype, high);
5003   low = fold_convert_loc (loc, etype, low);
5004   exp = fold_convert_loc (loc, etype, exp);
5005 
5006   value = const_binop (MINUS_EXPR, high, low);
5007 
5008   if (value != 0 && !TREE_OVERFLOW (value))
5009     return build_range_check (loc, type,
5010 			      fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5011 			      1, build_int_cst (etype, 0), value);
5012 
5013   return 0;
5014 }
5015 
5016 /* Return the predecessor of VAL in its type, handling the infinite case.  */
5017 
5018 static tree
range_predecessor(tree val)5019 range_predecessor (tree val)
5020 {
5021   tree type = TREE_TYPE (val);
5022 
5023   if (INTEGRAL_TYPE_P (type)
5024       && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5025     return 0;
5026   else
5027     return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5028 			build_int_cst (TREE_TYPE (val), 1), 0);
5029 }
5030 
5031 /* Return the successor of VAL in its type, handling the infinite case.  */
5032 
5033 static tree
range_successor(tree val)5034 range_successor (tree val)
5035 {
5036   tree type = TREE_TYPE (val);
5037 
5038   if (INTEGRAL_TYPE_P (type)
5039       && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5040     return 0;
5041   else
5042     return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5043 			build_int_cst (TREE_TYPE (val), 1), 0);
5044 }
5045 
5046 /* Given two ranges, see if we can merge them into one.  Return 1 if we
5047    can, 0 if we can't.  Set the output range into the specified parameters.  */
5048 
5049 bool
merge_ranges(int * pin_p,tree * plow,tree * phigh,int in0_p,tree low0,tree high0,int in1_p,tree low1,tree high1)5050 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5051 	      tree high0, int in1_p, tree low1, tree high1)
5052 {
5053   int no_overlap;
5054   int subset;
5055   int temp;
5056   tree tem;
5057   int in_p;
5058   tree low, high;
5059   int lowequal = ((low0 == 0 && low1 == 0)
5060 		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5061 						low0, 0, low1, 0)));
5062   int highequal = ((high0 == 0 && high1 == 0)
5063 		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5064 						 high0, 1, high1, 1)));
5065 
5066   /* Make range 0 be the range that starts first, or ends last if they
5067      start at the same value.  Swap them if it isn't.  */
5068   if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5069 				 low0, 0, low1, 0))
5070       || (lowequal
5071 	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
5072 					high1, 1, high0, 1))))
5073     {
5074       temp = in0_p, in0_p = in1_p, in1_p = temp;
5075       tem = low0, low0 = low1, low1 = tem;
5076       tem = high0, high0 = high1, high1 = tem;
5077     }
5078 
5079   /* Now flag two cases, whether the ranges are disjoint or whether the
5080      second range is totally subsumed in the first.  Note that the tests
5081      below are simplified by the ones above.  */
5082   no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5083 					  high0, 1, low1, 0));
5084   subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5085 				      high1, 1, high0, 1));
5086 
5087   /* We now have four cases, depending on whether we are including or
5088      excluding the two ranges.  */
5089   if (in0_p && in1_p)
5090     {
5091       /* If they don't overlap, the result is false.  If the second range
5092 	 is a subset it is the result.  Otherwise, the range is from the start
5093 	 of the second to the end of the first.  */
5094       if (no_overlap)
5095 	in_p = 0, low = high = 0;
5096       else if (subset)
5097 	in_p = 1, low = low1, high = high1;
5098       else
5099 	in_p = 1, low = low1, high = high0;
5100     }
5101 
5102   else if (in0_p && ! in1_p)
5103     {
5104       /* If they don't overlap, the result is the first range.  If they are
5105 	 equal, the result is false.  If the second range is a subset of the
5106 	 first, and the ranges begin at the same place, we go from just after
5107 	 the end of the second range to the end of the first.  If the second
5108 	 range is not a subset of the first, or if it is a subset and both
5109 	 ranges end at the same place, the range starts at the start of the
5110 	 first range and ends just before the second range.
5111 	 Otherwise, we can't describe this as a single range.  */
5112       if (no_overlap)
5113 	in_p = 1, low = low0, high = high0;
5114       else if (lowequal && highequal)
5115 	in_p = 0, low = high = 0;
5116       else if (subset && lowequal)
5117 	{
5118 	  low = range_successor (high1);
5119 	  high = high0;
5120 	  in_p = 1;
5121 	  if (low == 0)
5122 	    {
5123 	      /* We are in the weird situation where high0 > high1 but
5124 		 high1 has no successor.  Punt.  */
5125 	      return 0;
5126 	    }
5127 	}
5128       else if (! subset || highequal)
5129 	{
5130 	  low = low0;
5131 	  high = range_predecessor (low1);
5132 	  in_p = 1;
5133 	  if (high == 0)
5134 	    {
5135 	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
5136 	      return 0;
5137 	    }
5138 	}
5139       else
5140 	return 0;
5141     }
5142 
5143   else if (! in0_p && in1_p)
5144     {
5145       /* If they don't overlap, the result is the second range.  If the second
5146 	 is a subset of the first, the result is false.  Otherwise,
5147 	 the range starts just after the first range and ends at the
5148 	 end of the second.  */
5149       if (no_overlap)
5150 	in_p = 1, low = low1, high = high1;
5151       else if (subset || highequal)
5152 	in_p = 0, low = high = 0;
5153       else
5154 	{
5155 	  low = range_successor (high0);
5156 	  high = high1;
5157 	  in_p = 1;
5158 	  if (low == 0)
5159 	    {
5160 	      /* high1 > high0 but high0 has no successor.  Punt.  */
5161 	      return 0;
5162 	    }
5163 	}
5164     }
5165 
5166   else
5167     {
5168       /* The case where we are excluding both ranges.  Here the complex case
5169 	 is if they don't overlap.  In that case, the only time we have a
5170 	 range is if they are adjacent.  If the second is a subset of the
5171 	 first, the result is the first.  Otherwise, the range to exclude
5172 	 starts at the beginning of the first range and ends at the end of the
5173 	 second.  */
5174       if (no_overlap)
5175 	{
5176 	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5177 					 range_successor (high0),
5178 					 1, low1, 0)))
5179 	    in_p = 0, low = low0, high = high1;
5180 	  else
5181 	    {
5182 	      /* Canonicalize - [min, x] into - [-, x].  */
5183 	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
5184 		switch (TREE_CODE (TREE_TYPE (low0)))
5185 		  {
5186 		  case ENUMERAL_TYPE:
5187 		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5188 				  GET_MODE_BITSIZE
5189 				    (TYPE_MODE (TREE_TYPE (low0)))))
5190 		      break;
5191 		    /* FALLTHROUGH */
5192 		  case INTEGER_TYPE:
5193 		    if (tree_int_cst_equal (low0,
5194 					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
5195 		      low0 = 0;
5196 		    break;
5197 		  case POINTER_TYPE:
5198 		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
5199 			&& integer_zerop (low0))
5200 		      low0 = 0;
5201 		    break;
5202 		  default:
5203 		    break;
5204 		  }
5205 
5206 	      /* Canonicalize - [x, max] into - [x, -].  */
5207 	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
5208 		switch (TREE_CODE (TREE_TYPE (high1)))
5209 		  {
5210 		  case ENUMERAL_TYPE:
5211 		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5212 				  GET_MODE_BITSIZE
5213 				    (TYPE_MODE (TREE_TYPE (high1)))))
5214 		      break;
5215 		    /* FALLTHROUGH */
5216 		  case INTEGER_TYPE:
5217 		    if (tree_int_cst_equal (high1,
5218 					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
5219 		      high1 = 0;
5220 		    break;
5221 		  case POINTER_TYPE:
5222 		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
5223 			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5224 						       high1, 1,
5225 						       build_int_cst (TREE_TYPE (high1), 1),
5226 						       1)))
5227 		      high1 = 0;
5228 		    break;
5229 		  default:
5230 		    break;
5231 		  }
5232 
5233 	      /* The ranges might be also adjacent between the maximum and
5234 	         minimum values of the given type.  For
5235 	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5236 	         return + [x + 1, y - 1].  */
5237 	      if (low0 == 0 && high1 == 0)
5238 	        {
5239 		  low = range_successor (high0);
5240 		  high = range_predecessor (low1);
5241 		  if (low == 0 || high == 0)
5242 		    return 0;
5243 
5244 		  in_p = 1;
5245 		}
5246 	      else
5247 		return 0;
5248 	    }
5249 	}
5250       else if (subset)
5251 	in_p = 0, low = low0, high = high0;
5252       else
5253 	in_p = 0, low = low0, high = high1;
5254     }
5255 
5256   *pin_p = in_p, *plow = low, *phigh = high;
5257   return 1;
5258 }
5259 
5260 
5261 /* Subroutine of fold, looking inside expressions of the form
5262    A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5263    of the COND_EXPR.  This function is being used also to optimize
5264    A op B ? C : A, by reversing the comparison first.
5265 
5266    Return a folded expression whose code is not a COND_EXPR
5267    anymore, or NULL_TREE if no folding opportunity is found.  */
5268 
5269 static tree
fold_cond_expr_with_comparison(location_t loc,tree type,tree arg0,tree arg1,tree arg2)5270 fold_cond_expr_with_comparison (location_t loc, tree type,
5271 				tree arg0, tree arg1, tree arg2)
5272 {
5273   enum tree_code comp_code = TREE_CODE (arg0);
5274   tree arg00 = TREE_OPERAND (arg0, 0);
5275   tree arg01 = TREE_OPERAND (arg0, 1);
5276   tree arg1_type = TREE_TYPE (arg1);
5277   tree tem;
5278 
5279   STRIP_NOPS (arg1);
5280   STRIP_NOPS (arg2);
5281 
5282   /* If we have A op 0 ? A : -A, consider applying the following
5283      transformations:
5284 
5285      A == 0? A : -A    same as -A
5286      A != 0? A : -A    same as A
5287      A >= 0? A : -A    same as abs (A)
5288      A > 0?  A : -A    same as abs (A)
5289      A <= 0? A : -A    same as -abs (A)
5290      A < 0?  A : -A    same as -abs (A)
5291 
5292      None of these transformations work for modes with signed
5293      zeros.  If A is +/-0, the first two transformations will
5294      change the sign of the result (from +0 to -0, or vice
5295      versa).  The last four will fix the sign of the result,
5296      even though the original expressions could be positive or
5297      negative, depending on the sign of A.
5298 
5299      Note that all these transformations are correct if A is
5300      NaN, since the two alternatives (A and -A) are also NaNs.  */
5301   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5302       && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5303 	  ? real_zerop (arg01)
5304 	  : integer_zerop (arg01))
5305       && ((TREE_CODE (arg2) == NEGATE_EXPR
5306 	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5307 	     /* In the case that A is of the form X-Y, '-A' (arg2) may
5308 	        have already been folded to Y-X, check for that. */
5309 	  || (TREE_CODE (arg1) == MINUS_EXPR
5310 	      && TREE_CODE (arg2) == MINUS_EXPR
5311 	      && operand_equal_p (TREE_OPERAND (arg1, 0),
5312 				  TREE_OPERAND (arg2, 1), 0)
5313 	      && operand_equal_p (TREE_OPERAND (arg1, 1),
5314 				  TREE_OPERAND (arg2, 0), 0))))
5315     switch (comp_code)
5316       {
5317       case EQ_EXPR:
5318       case UNEQ_EXPR:
5319 	tem = fold_convert_loc (loc, arg1_type, arg1);
5320 	return fold_convert_loc (loc, type, negate_expr (tem));
5321       case NE_EXPR:
5322       case LTGT_EXPR:
5323 	return fold_convert_loc (loc, type, arg1);
5324       case UNGE_EXPR:
5325       case UNGT_EXPR:
5326 	if (flag_trapping_math)
5327 	  break;
5328 	/* Fall through.  */
5329       case GE_EXPR:
5330       case GT_EXPR:
5331 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5332 	  break;
5333 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5334 	return fold_convert_loc (loc, type, tem);
5335       case UNLE_EXPR:
5336       case UNLT_EXPR:
5337 	if (flag_trapping_math)
5338 	  break;
5339 	/* FALLTHRU */
5340       case LE_EXPR:
5341       case LT_EXPR:
5342 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5343 	  break;
5344 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5345 	return negate_expr (fold_convert_loc (loc, type, tem));
5346       default:
5347 	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5348 	break;
5349       }
5350 
5351   /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
5352      A == 0 ? A : 0 is always 0 unless A is -0.  Note that
5353      both transformations are correct when A is NaN: A != 0
5354      is then true, and A == 0 is false.  */
5355 
5356   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5357       && integer_zerop (arg01) && integer_zerop (arg2))
5358     {
5359       if (comp_code == NE_EXPR)
5360 	return fold_convert_loc (loc, type, arg1);
5361       else if (comp_code == EQ_EXPR)
5362 	return build_zero_cst (type);
5363     }
5364 
5365   /* Try some transformations of A op B ? A : B.
5366 
5367      A == B? A : B    same as B
5368      A != B? A : B    same as A
5369      A >= B? A : B    same as max (A, B)
5370      A > B?  A : B    same as max (B, A)
5371      A <= B? A : B    same as min (A, B)
5372      A < B?  A : B    same as min (B, A)
5373 
5374      As above, these transformations don't work in the presence
5375      of signed zeros.  For example, if A and B are zeros of
5376      opposite sign, the first two transformations will change
5377      the sign of the result.  In the last four, the original
5378      expressions give different results for (A=+0, B=-0) and
5379      (A=-0, B=+0), but the transformed expressions do not.
5380 
5381      The first two transformations are correct if either A or B
5382      is a NaN.  In the first transformation, the condition will
5383      be false, and B will indeed be chosen.  In the case of the
5384      second transformation, the condition A != B will be true,
5385      and A will be chosen.
5386 
5387      The conversions to max() and min() are not correct if B is
5388      a number and A is not.  The conditions in the original
5389      expressions will be false, so all four give B.  The min()
5390      and max() versions would give a NaN instead.  */
5391   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5392       && operand_equal_for_comparison_p (arg01, arg2)
5393       /* Avoid these transformations if the COND_EXPR may be used
5394 	 as an lvalue in the C++ front-end.  PR c++/19199.  */
5395       && (in_gimple_form
5396 	  || VECTOR_TYPE_P (type)
5397 	  || (! lang_GNU_CXX ()
5398 	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5399 	  || ! maybe_lvalue_p (arg1)
5400 	  || ! maybe_lvalue_p (arg2)))
5401     {
5402       tree comp_op0 = arg00;
5403       tree comp_op1 = arg01;
5404       tree comp_type = TREE_TYPE (comp_op0);
5405 
5406       switch (comp_code)
5407 	{
5408 	case EQ_EXPR:
5409 	  return fold_convert_loc (loc, type, arg2);
5410 	case NE_EXPR:
5411 	  return fold_convert_loc (loc, type, arg1);
5412 	case LE_EXPR:
5413 	case LT_EXPR:
5414 	case UNLE_EXPR:
5415 	case UNLT_EXPR:
5416 	  /* In C++ a ?: expression can be an lvalue, so put the
5417 	     operand which will be used if they are equal first
5418 	     so that we can convert this back to the
5419 	     corresponding COND_EXPR.  */
5420 	  if (!HONOR_NANS (arg1))
5421 	    {
5422 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5423 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5424 	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5425 		    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5426 		    : fold_build2_loc (loc, MIN_EXPR, comp_type,
5427 				   comp_op1, comp_op0);
5428 	      return fold_convert_loc (loc, type, tem);
5429 	    }
5430 	  break;
5431 	case GE_EXPR:
5432 	case GT_EXPR:
5433 	case UNGE_EXPR:
5434 	case UNGT_EXPR:
5435 	  if (!HONOR_NANS (arg1))
5436 	    {
5437 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5438 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5439 	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5440 		    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5441 		    : fold_build2_loc (loc, MAX_EXPR, comp_type,
5442 				   comp_op1, comp_op0);
5443 	      return fold_convert_loc (loc, type, tem);
5444 	    }
5445 	  break;
5446 	case UNEQ_EXPR:
5447 	  if (!HONOR_NANS (arg1))
5448 	    return fold_convert_loc (loc, type, arg2);
5449 	  break;
5450 	case LTGT_EXPR:
5451 	  if (!HONOR_NANS (arg1))
5452 	    return fold_convert_loc (loc, type, arg1);
5453 	  break;
5454 	default:
5455 	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5456 	  break;
5457 	}
5458     }
5459 
5460   return NULL_TREE;
5461 }
5462 
5463 
5464 
5465 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5466 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5467   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5468 		false) >= 2)
5469 #endif
5470 
5471 /* EXP is some logical combination of boolean tests.  See if we can
5472    merge it into some range test.  Return the new tree if so.  */
5473 
5474 static tree
fold_range_test(location_t loc,enum tree_code code,tree type,tree op0,tree op1)5475 fold_range_test (location_t loc, enum tree_code code, tree type,
5476 		 tree op0, tree op1)
5477 {
5478   int or_op = (code == TRUTH_ORIF_EXPR
5479 	       || code == TRUTH_OR_EXPR);
5480   int in0_p, in1_p, in_p;
5481   tree low0, low1, low, high0, high1, high;
5482   bool strict_overflow_p = false;
5483   tree tem, lhs, rhs;
5484   const char * const warnmsg = G_("assuming signed overflow does not occur "
5485 				  "when simplifying range test");
5486 
5487   if (!INTEGRAL_TYPE_P (type))
5488     return 0;
5489 
5490   lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5491   rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5492 
5493   /* If this is an OR operation, invert both sides; we will invert
5494      again at the end.  */
5495   if (or_op)
5496     in0_p = ! in0_p, in1_p = ! in1_p;
5497 
5498   /* If both expressions are the same, if we can merge the ranges, and we
5499      can build the range test, return it or it inverted.  If one of the
5500      ranges is always true or always false, consider it to be the same
5501      expression as the other.  */
5502   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5503       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5504 		       in1_p, low1, high1)
5505       && (tem = (build_range_check (loc, type,
5506 				    lhs != 0 ? lhs
5507 				    : rhs != 0 ? rhs : integer_zero_node,
5508 				    in_p, low, high))) != 0)
5509     {
5510       if (strict_overflow_p)
5511 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5512       return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5513     }
5514 
5515   /* On machines where the branch cost is expensive, if this is a
5516      short-circuited branch and the underlying object on both sides
5517      is the same, make a non-short-circuit operation.  */
5518   bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
5519   if (PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT) != -1)
5520     logical_op_non_short_circuit
5521       = PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT);
5522   if (logical_op_non_short_circuit
5523       && !flag_sanitize_coverage
5524       && lhs != 0 && rhs != 0
5525       && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5526       && operand_equal_p (lhs, rhs, 0))
5527     {
5528       /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
5529 	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5530 	 which cases we can't do this.  */
5531       if (simple_operand_p (lhs))
5532 	return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5533 			   ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5534 			   type, op0, op1);
5535 
5536       else if (!lang_hooks.decls.global_bindings_p ()
5537 	       && !CONTAINS_PLACEHOLDER_P (lhs))
5538 	{
5539 	  tree common = save_expr (lhs);
5540 
5541 	  if ((lhs = build_range_check (loc, type, common,
5542 					or_op ? ! in0_p : in0_p,
5543 					low0, high0)) != 0
5544 	      && (rhs = build_range_check (loc, type, common,
5545 					   or_op ? ! in1_p : in1_p,
5546 					   low1, high1)) != 0)
5547 	    {
5548 	      if (strict_overflow_p)
5549 		fold_overflow_warning (warnmsg,
5550 				       WARN_STRICT_OVERFLOW_COMPARISON);
5551 	      return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5552 				 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5553 				 type, lhs, rhs);
5554 	    }
5555 	}
5556     }
5557 
5558   return 0;
5559 }
5560 
5561 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5562    bit value.  Arrange things so the extra bits will be set to zero if and
5563    only if C is signed-extended to its full width.  If MASK is nonzero,
5564    it is an INTEGER_CST that should be AND'ed with the extra bits.  */
5565 
5566 static tree
unextend(tree c,int p,int unsignedp,tree mask)5567 unextend (tree c, int p, int unsignedp, tree mask)
5568 {
5569   tree type = TREE_TYPE (c);
5570   int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5571   tree temp;
5572 
5573   if (p == modesize || unsignedp)
5574     return c;
5575 
5576   /* We work by getting just the sign bit into the low-order bit, then
5577      into the high-order bit, then sign-extend.  We then XOR that value
5578      with C.  */
5579   temp = build_int_cst (TREE_TYPE (c),
5580 			wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
5581 
5582   /* We must use a signed type in order to get an arithmetic right shift.
5583      However, we must also avoid introducing accidental overflows, so that
5584      a subsequent call to integer_zerop will work.  Hence we must
5585      do the type conversion here.  At this point, the constant is either
5586      zero or one, and the conversion to a signed type can never overflow.
5587      We could get an overflow if this conversion is done anywhere else.  */
5588   if (TYPE_UNSIGNED (type))
5589     temp = fold_convert (signed_type_for (type), temp);
5590 
5591   temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5592   temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5593   if (mask != 0)
5594     temp = const_binop (BIT_AND_EXPR, temp,
5595 			fold_convert (TREE_TYPE (c), mask));
5596   /* If necessary, convert the type back to match the type of C.  */
5597   if (TYPE_UNSIGNED (type))
5598     temp = fold_convert (type, temp);
5599 
5600   return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5601 }
5602 
5603 /* For an expression that has the form
5604      (A && B) || ~B
5605    or
5606      (A || B) && ~B,
5607    we can drop one of the inner expressions and simplify to
5608      A || ~B
5609    or
5610      A && ~B
5611    LOC is the location of the resulting expression.  OP is the inner
5612    logical operation; the left-hand side in the examples above, while CMPOP
5613    is the right-hand side.  RHS_ONLY is used to prevent us from accidentally
5614    removing a condition that guards another, as in
5615      (A != NULL && A->...) || A == NULL
5616    which we must not transform.  If RHS_ONLY is true, only eliminate the
5617    right-most operand of the inner logical operation.  */
5618 
5619 static tree
merge_truthop_with_opposite_arm(location_t loc,tree op,tree cmpop,bool rhs_only)5620 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5621 				 bool rhs_only)
5622 {
5623   tree type = TREE_TYPE (cmpop);
5624   enum tree_code code = TREE_CODE (cmpop);
5625   enum tree_code truthop_code = TREE_CODE (op);
5626   tree lhs = TREE_OPERAND (op, 0);
5627   tree rhs = TREE_OPERAND (op, 1);
5628   tree orig_lhs = lhs, orig_rhs = rhs;
5629   enum tree_code rhs_code = TREE_CODE (rhs);
5630   enum tree_code lhs_code = TREE_CODE (lhs);
5631   enum tree_code inv_code;
5632 
5633   if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5634     return NULL_TREE;
5635 
5636   if (TREE_CODE_CLASS (code) != tcc_comparison)
5637     return NULL_TREE;
5638 
5639   if (rhs_code == truthop_code)
5640     {
5641       tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5642       if (newrhs != NULL_TREE)
5643 	{
5644 	  rhs = newrhs;
5645 	  rhs_code = TREE_CODE (rhs);
5646 	}
5647     }
5648   if (lhs_code == truthop_code && !rhs_only)
5649     {
5650       tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5651       if (newlhs != NULL_TREE)
5652 	{
5653 	  lhs = newlhs;
5654 	  lhs_code = TREE_CODE (lhs);
5655 	}
5656     }
5657 
5658   inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5659   if (inv_code == rhs_code
5660       && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5661       && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5662     return lhs;
5663   if (!rhs_only && inv_code == lhs_code
5664       && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5665       && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5666     return rhs;
5667   if (rhs != orig_rhs || lhs != orig_lhs)
5668     return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5669 			    lhs, rhs);
5670   return NULL_TREE;
5671 }
5672 
5673 /* Find ways of folding logical expressions of LHS and RHS:
5674    Try to merge two comparisons to the same innermost item.
5675    Look for range tests like "ch >= '0' && ch <= '9'".
5676    Look for combinations of simple terms on machines with expensive branches
5677    and evaluate the RHS unconditionally.
5678 
5679    For example, if we have p->a == 2 && p->b == 4 and we can make an
5680    object large enough to span both A and B, we can do this with a comparison
5681    against the object ANDed with the a mask.
5682 
5683    If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5684    operations to do this with one comparison.
5685 
5686    We check for both normal comparisons and the BIT_AND_EXPRs made this by
5687    function and the one above.
5688 
5689    CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
5690    TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5691 
5692    TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5693    two operands.
5694 
5695    We return the simplified tree or 0 if no optimization is possible.  */
5696 
5697 static tree
fold_truth_andor_1(location_t loc,enum tree_code code,tree truth_type,tree lhs,tree rhs)5698 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5699 		    tree lhs, tree rhs)
5700 {
5701   /* If this is the "or" of two comparisons, we can do something if
5702      the comparisons are NE_EXPR.  If this is the "and", we can do something
5703      if the comparisons are EQ_EXPR.  I.e.,
5704 	(a->b == 2 && a->c == 4) can become (a->new == NEW).
5705 
5706      WANTED_CODE is this operation code.  For single bit fields, we can
5707      convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5708      comparison for one-bit fields.  */
5709 
5710   enum tree_code wanted_code;
5711   enum tree_code lcode, rcode;
5712   tree ll_arg, lr_arg, rl_arg, rr_arg;
5713   tree ll_inner, lr_inner, rl_inner, rr_inner;
5714   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5715   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5716   HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5717   HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5718   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5719   int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5720   machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5721   scalar_int_mode lnmode, rnmode;
5722   tree ll_mask, lr_mask, rl_mask, rr_mask;
5723   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5724   tree l_const, r_const;
5725   tree lntype, rntype, result;
5726   HOST_WIDE_INT first_bit, end_bit;
5727   int volatilep;
5728 
5729   /* Start by getting the comparison codes.  Fail if anything is volatile.
5730      If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5731      it were surrounded with a NE_EXPR.  */
5732 
5733   if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5734     return 0;
5735 
5736   lcode = TREE_CODE (lhs);
5737   rcode = TREE_CODE (rhs);
5738 
5739   if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5740     {
5741       lhs = build2 (NE_EXPR, truth_type, lhs,
5742 		    build_int_cst (TREE_TYPE (lhs), 0));
5743       lcode = NE_EXPR;
5744     }
5745 
5746   if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5747     {
5748       rhs = build2 (NE_EXPR, truth_type, rhs,
5749 		    build_int_cst (TREE_TYPE (rhs), 0));
5750       rcode = NE_EXPR;
5751     }
5752 
5753   if (TREE_CODE_CLASS (lcode) != tcc_comparison
5754       || TREE_CODE_CLASS (rcode) != tcc_comparison)
5755     return 0;
5756 
5757   ll_arg = TREE_OPERAND (lhs, 0);
5758   lr_arg = TREE_OPERAND (lhs, 1);
5759   rl_arg = TREE_OPERAND (rhs, 0);
5760   rr_arg = TREE_OPERAND (rhs, 1);
5761 
5762   /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
5763   if (simple_operand_p (ll_arg)
5764       && simple_operand_p (lr_arg))
5765     {
5766       if (operand_equal_p (ll_arg, rl_arg, 0)
5767           && operand_equal_p (lr_arg, rr_arg, 0))
5768 	{
5769           result = combine_comparisons (loc, code, lcode, rcode,
5770 					truth_type, ll_arg, lr_arg);
5771 	  if (result)
5772 	    return result;
5773 	}
5774       else if (operand_equal_p (ll_arg, rr_arg, 0)
5775                && operand_equal_p (lr_arg, rl_arg, 0))
5776 	{
5777           result = combine_comparisons (loc, code, lcode,
5778 					swap_tree_comparison (rcode),
5779 					truth_type, ll_arg, lr_arg);
5780 	  if (result)
5781 	    return result;
5782 	}
5783     }
5784 
5785   code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5786 	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5787 
5788   /* If the RHS can be evaluated unconditionally and its operands are
5789      simple, it wins to evaluate the RHS unconditionally on machines
5790      with expensive branches.  In this case, this isn't a comparison
5791      that can be merged.  */
5792 
5793   if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5794 		   false) >= 2
5795       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5796       && simple_operand_p (rl_arg)
5797       && simple_operand_p (rr_arg))
5798     {
5799       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
5800       if (code == TRUTH_OR_EXPR
5801 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
5802 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
5803 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5804 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5805 	return build2_loc (loc, NE_EXPR, truth_type,
5806 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5807 				   ll_arg, rl_arg),
5808 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5809 
5810       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
5811       if (code == TRUTH_AND_EXPR
5812 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
5813 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
5814 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5815 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5816 	return build2_loc (loc, EQ_EXPR, truth_type,
5817 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5818 				   ll_arg, rl_arg),
5819 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5820     }
5821 
5822   /* See if the comparisons can be merged.  Then get all the parameters for
5823      each side.  */
5824 
5825   if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5826       || (rcode != EQ_EXPR && rcode != NE_EXPR))
5827     return 0;
5828 
5829   ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5830   volatilep = 0;
5831   ll_inner = decode_field_reference (loc, &ll_arg,
5832 				     &ll_bitsize, &ll_bitpos, &ll_mode,
5833 				     &ll_unsignedp, &ll_reversep, &volatilep,
5834 				     &ll_mask, &ll_and_mask);
5835   lr_inner = decode_field_reference (loc, &lr_arg,
5836 				     &lr_bitsize, &lr_bitpos, &lr_mode,
5837 				     &lr_unsignedp, &lr_reversep, &volatilep,
5838 				     &lr_mask, &lr_and_mask);
5839   rl_inner = decode_field_reference (loc, &rl_arg,
5840 				     &rl_bitsize, &rl_bitpos, &rl_mode,
5841 				     &rl_unsignedp, &rl_reversep, &volatilep,
5842 				     &rl_mask, &rl_and_mask);
5843   rr_inner = decode_field_reference (loc, &rr_arg,
5844 				     &rr_bitsize, &rr_bitpos, &rr_mode,
5845 				     &rr_unsignedp, &rr_reversep, &volatilep,
5846 				     &rr_mask, &rr_and_mask);
5847 
5848   /* It must be true that the inner operation on the lhs of each
5849      comparison must be the same if we are to be able to do anything.
5850      Then see if we have constants.  If not, the same must be true for
5851      the rhs's.  */
5852   if (volatilep
5853       || ll_reversep != rl_reversep
5854       || ll_inner == 0 || rl_inner == 0
5855       || ! operand_equal_p (ll_inner, rl_inner, 0))
5856     return 0;
5857 
5858   if (TREE_CODE (lr_arg) == INTEGER_CST
5859       && TREE_CODE (rr_arg) == INTEGER_CST)
5860     {
5861       l_const = lr_arg, r_const = rr_arg;
5862       lr_reversep = ll_reversep;
5863     }
5864   else if (lr_reversep != rr_reversep
5865 	   || lr_inner == 0 || rr_inner == 0
5866 	   || ! operand_equal_p (lr_inner, rr_inner, 0))
5867     return 0;
5868   else
5869     l_const = r_const = 0;
5870 
5871   /* If either comparison code is not correct for our logical operation,
5872      fail.  However, we can convert a one-bit comparison against zero into
5873      the opposite comparison against that bit being set in the field.  */
5874 
5875   wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5876   if (lcode != wanted_code)
5877     {
5878       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5879 	{
5880 	  /* Make the left operand unsigned, since we are only interested
5881 	     in the value of one bit.  Otherwise we are doing the wrong
5882 	     thing below.  */
5883 	  ll_unsignedp = 1;
5884 	  l_const = ll_mask;
5885 	}
5886       else
5887 	return 0;
5888     }
5889 
5890   /* This is analogous to the code for l_const above.  */
5891   if (rcode != wanted_code)
5892     {
5893       if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5894 	{
5895 	  rl_unsignedp = 1;
5896 	  r_const = rl_mask;
5897 	}
5898       else
5899 	return 0;
5900     }
5901 
5902   /* See if we can find a mode that contains both fields being compared on
5903      the left.  If we can't, fail.  Otherwise, update all constants and masks
5904      to be relative to a field of that size.  */
5905   first_bit = MIN (ll_bitpos, rl_bitpos);
5906   end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5907   if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5908 		      TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
5909 		      volatilep, &lnmode))
5910     return 0;
5911 
5912   lnbitsize = GET_MODE_BITSIZE (lnmode);
5913   lnbitpos = first_bit & ~ (lnbitsize - 1);
5914   lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5915   xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5916 
5917   if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5918     {
5919       xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5920       xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5921     }
5922 
5923   ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5924 			 size_int (xll_bitpos));
5925   rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5926 			 size_int (xrl_bitpos));
5927 
5928   if (l_const)
5929     {
5930       l_const = fold_convert_loc (loc, lntype, l_const);
5931       l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5932       l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5933       if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5934 					fold_build1_loc (loc, BIT_NOT_EXPR,
5935 						     lntype, ll_mask))))
5936 	{
5937 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5938 
5939 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5940 	}
5941     }
5942   if (r_const)
5943     {
5944       r_const = fold_convert_loc (loc, lntype, r_const);
5945       r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5946       r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5947       if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5948 					fold_build1_loc (loc, BIT_NOT_EXPR,
5949 						     lntype, rl_mask))))
5950 	{
5951 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5952 
5953 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5954 	}
5955     }
5956 
5957   /* If the right sides are not constant, do the same for it.  Also,
5958      disallow this optimization if a size, signedness or storage order
5959      mismatch occurs between the left and right sides.  */
5960   if (l_const == 0)
5961     {
5962       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5963 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5964 	  || ll_reversep != lr_reversep
5965 	  /* Make sure the two fields on the right
5966 	     correspond to the left without being swapped.  */
5967 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5968 	return 0;
5969 
5970       first_bit = MIN (lr_bitpos, rr_bitpos);
5971       end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5972       if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5973 			  TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
5974 			  volatilep, &rnmode))
5975 	return 0;
5976 
5977       rnbitsize = GET_MODE_BITSIZE (rnmode);
5978       rnbitpos = first_bit & ~ (rnbitsize - 1);
5979       rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5980       xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5981 
5982       if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5983 	{
5984 	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5985 	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5986 	}
5987 
5988       lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5989 							    rntype, lr_mask),
5990 			     size_int (xlr_bitpos));
5991       rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5992 							    rntype, rr_mask),
5993 			     size_int (xrr_bitpos));
5994 
5995       /* Make a mask that corresponds to both fields being compared.
5996 	 Do this for both items being compared.  If the operands are the
5997 	 same size and the bits being compared are in the same position
5998 	 then we can do this by masking both and comparing the masked
5999 	 results.  */
6000       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6001       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6002       if (lnbitsize == rnbitsize
6003 	  && xll_bitpos == xlr_bitpos
6004 	  && lnbitpos >= 0
6005 	  && rnbitpos >= 0)
6006 	{
6007 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6008 				    lntype, lnbitsize, lnbitpos,
6009 				    ll_unsignedp || rl_unsignedp, ll_reversep);
6010 	  if (! all_ones_mask_p (ll_mask, lnbitsize))
6011 	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6012 
6013 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6014 				    rntype, rnbitsize, rnbitpos,
6015 				    lr_unsignedp || rr_unsignedp, lr_reversep);
6016 	  if (! all_ones_mask_p (lr_mask, rnbitsize))
6017 	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6018 
6019 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6020 	}
6021 
6022       /* There is still another way we can do something:  If both pairs of
6023 	 fields being compared are adjacent, we may be able to make a wider
6024 	 field containing them both.
6025 
6026 	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
6027 	 the mask must be shifted to account for the shift done by
6028 	 make_bit_field_ref.  */
6029       if (((ll_bitsize + ll_bitpos == rl_bitpos
6030 	    && lr_bitsize + lr_bitpos == rr_bitpos)
6031 	   || (ll_bitpos == rl_bitpos + rl_bitsize
6032 	       && lr_bitpos == rr_bitpos + rr_bitsize))
6033 	  && ll_bitpos >= 0
6034 	  && rl_bitpos >= 0
6035 	  && lr_bitpos >= 0
6036 	  && rr_bitpos >= 0)
6037 	{
6038 	  tree type;
6039 
6040 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6041 				    ll_bitsize + rl_bitsize,
6042 				    MIN (ll_bitpos, rl_bitpos),
6043 				    ll_unsignedp, ll_reversep);
6044 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6045 				    lr_bitsize + rr_bitsize,
6046 				    MIN (lr_bitpos, rr_bitpos),
6047 				    lr_unsignedp, lr_reversep);
6048 
6049 	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6050 				 size_int (MIN (xll_bitpos, xrl_bitpos)));
6051 	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6052 				 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6053 
6054 	  /* Convert to the smaller type before masking out unwanted bits.  */
6055 	  type = lntype;
6056 	  if (lntype != rntype)
6057 	    {
6058 	      if (lnbitsize > rnbitsize)
6059 		{
6060 		  lhs = fold_convert_loc (loc, rntype, lhs);
6061 		  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6062 		  type = rntype;
6063 		}
6064 	      else if (lnbitsize < rnbitsize)
6065 		{
6066 		  rhs = fold_convert_loc (loc, lntype, rhs);
6067 		  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6068 		  type = lntype;
6069 		}
6070 	    }
6071 
6072 	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6073 	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6074 
6075 	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6076 	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6077 
6078 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6079 	}
6080 
6081       return 0;
6082     }
6083 
6084   /* Handle the case of comparisons with constants.  If there is something in
6085      common between the masks, those bits of the constants must be the same.
6086      If not, the condition is always false.  Test for this to avoid generating
6087      incorrect code below.  */
6088   result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6089   if (! integer_zerop (result)
6090       && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6091 			   const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6092     {
6093       if (wanted_code == NE_EXPR)
6094 	{
6095 	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
6096 	  return constant_boolean_node (true, truth_type);
6097 	}
6098       else
6099 	{
6100 	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6101 	  return constant_boolean_node (false, truth_type);
6102 	}
6103     }
6104 
6105   if (lnbitpos < 0)
6106     return 0;
6107 
6108   /* Construct the expression we will return.  First get the component
6109      reference we will make.  Unless the mask is all ones the width of
6110      that field, perform the mask operation.  Then compare with the
6111      merged constant.  */
6112   result = make_bit_field_ref (loc, ll_inner, ll_arg,
6113 			       lntype, lnbitsize, lnbitpos,
6114 			       ll_unsignedp || rl_unsignedp, ll_reversep);
6115 
6116   ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6117   if (! all_ones_mask_p (ll_mask, lnbitsize))
6118     result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6119 
6120   return build2_loc (loc, wanted_code, truth_type, result,
6121 		     const_binop (BIT_IOR_EXPR, l_const, r_const));
6122 }
6123 
6124 /* T is an integer expression that is being multiplied, divided, or taken a
6125    modulus (CODE says which and what kind of divide or modulus) by a
6126    constant C.  See if we can eliminate that operation by folding it with
6127    other operations already in T.  WIDE_TYPE, if non-null, is a type that
6128    should be used for the computation if wider than our type.
6129 
6130    For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6131    (X * 2) + (Y * 4).  We must, however, be assured that either the original
6132    expression would not overflow or that overflow is undefined for the type
6133    in the language in question.
6134 
6135    If we return a non-null expression, it is an equivalent form of the
6136    original computation, but need not be in the original type.
6137 
6138    We set *STRICT_OVERFLOW_P to true if the return values depends on
6139    signed overflow being undefined.  Otherwise we do not change
6140    *STRICT_OVERFLOW_P.  */
6141 
6142 static tree
extract_muldiv(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)6143 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6144 		bool *strict_overflow_p)
6145 {
6146   /* To avoid exponential search depth, refuse to allow recursion past
6147      three levels.  Beyond that (1) it's highly unlikely that we'll find
6148      something interesting and (2) we've probably processed it before
6149      when we built the inner expression.  */
6150 
6151   static int depth;
6152   tree ret;
6153 
6154   if (depth > 3)
6155     return NULL;
6156 
6157   depth++;
6158   ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6159   depth--;
6160 
6161   return ret;
6162 }
6163 
6164 static tree
extract_muldiv_1(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)6165 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6166 		  bool *strict_overflow_p)
6167 {
6168   tree type = TREE_TYPE (t);
6169   enum tree_code tcode = TREE_CODE (t);
6170   tree ctype = (wide_type != 0
6171 		&& (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6172 		    > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6173 		? wide_type : type);
6174   tree t1, t2;
6175   int same_p = tcode == code;
6176   tree op0 = NULL_TREE, op1 = NULL_TREE;
6177   bool sub_strict_overflow_p;
6178 
6179   /* Don't deal with constants of zero here; they confuse the code below.  */
6180   if (integer_zerop (c))
6181     return NULL_TREE;
6182 
6183   if (TREE_CODE_CLASS (tcode) == tcc_unary)
6184     op0 = TREE_OPERAND (t, 0);
6185 
6186   if (TREE_CODE_CLASS (tcode) == tcc_binary)
6187     op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6188 
6189   /* Note that we need not handle conditional operations here since fold
6190      already handles those cases.  So just do arithmetic here.  */
6191   switch (tcode)
6192     {
6193     case INTEGER_CST:
6194       /* For a constant, we can always simplify if we are a multiply
6195 	 or (for divide and modulus) if it is a multiple of our constant.  */
6196       if (code == MULT_EXPR
6197 	  || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6198 				TYPE_SIGN (type)))
6199 	{
6200 	  tree tem = const_binop (code, fold_convert (ctype, t),
6201 				  fold_convert (ctype, c));
6202 	  /* If the multiplication overflowed, we lost information on it.
6203 	     See PR68142 and PR69845.  */
6204 	  if (TREE_OVERFLOW (tem))
6205 	    return NULL_TREE;
6206 	  return tem;
6207 	}
6208       break;
6209 
6210     CASE_CONVERT: case NON_LVALUE_EXPR:
6211       /* If op0 is an expression ...  */
6212       if ((COMPARISON_CLASS_P (op0)
6213 	   || UNARY_CLASS_P (op0)
6214 	   || BINARY_CLASS_P (op0)
6215 	   || VL_EXP_CLASS_P (op0)
6216 	   || EXPRESSION_CLASS_P (op0))
6217 	  /* ... and has wrapping overflow, and its type is smaller
6218 	     than ctype, then we cannot pass through as widening.  */
6219 	  && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6220 		&& TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6221 	       && (TYPE_PRECISION (ctype)
6222 	           > TYPE_PRECISION (TREE_TYPE (op0))))
6223 	      /* ... or this is a truncation (t is narrower than op0),
6224 		 then we cannot pass through this narrowing.  */
6225 	      || (TYPE_PRECISION (type)
6226 		  < TYPE_PRECISION (TREE_TYPE (op0)))
6227 	      /* ... or signedness changes for division or modulus,
6228 		 then we cannot pass through this conversion.  */
6229 	      || (code != MULT_EXPR
6230 		  && (TYPE_UNSIGNED (ctype)
6231 		      != TYPE_UNSIGNED (TREE_TYPE (op0))))
6232 	      /* ... or has undefined overflow while the converted to
6233 		 type has not, we cannot do the operation in the inner type
6234 		 as that would introduce undefined overflow.  */
6235 	      || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6236 		   && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6237 		  && !TYPE_OVERFLOW_UNDEFINED (type))))
6238 	break;
6239 
6240       /* Pass the constant down and see if we can make a simplification.  If
6241 	 we can, replace this expression with the inner simplification for
6242 	 possible later conversion to our or some other type.  */
6243       if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6244 	  && TREE_CODE (t2) == INTEGER_CST
6245 	  && !TREE_OVERFLOW (t2)
6246 	  && (t1 = extract_muldiv (op0, t2, code,
6247 				   code == MULT_EXPR ? ctype : NULL_TREE,
6248 				   strict_overflow_p)) != 0)
6249 	return t1;
6250       break;
6251 
6252     case ABS_EXPR:
6253       /* If widening the type changes it from signed to unsigned, then we
6254          must avoid building ABS_EXPR itself as unsigned.  */
6255       if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6256         {
6257           tree cstype = (*signed_type_for) (ctype);
6258           if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6259 	      != 0)
6260             {
6261               t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6262               return fold_convert (ctype, t1);
6263             }
6264           break;
6265         }
6266       /* If the constant is negative, we cannot simplify this.  */
6267       if (tree_int_cst_sgn (c) == -1)
6268         break;
6269       /* FALLTHROUGH */
6270     case NEGATE_EXPR:
6271       /* For division and modulus, type can't be unsigned, as e.g.
6272 	 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6273 	 For signed types, even with wrapping overflow, this is fine.  */
6274       if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6275 	break;
6276       if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6277 	  != 0)
6278 	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6279       break;
6280 
6281     case MIN_EXPR:  case MAX_EXPR:
6282       /* If widening the type changes the signedness, then we can't perform
6283 	 this optimization as that changes the result.  */
6284       if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6285 	break;
6286 
6287       /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
6288       sub_strict_overflow_p = false;
6289       if ((t1 = extract_muldiv (op0, c, code, wide_type,
6290 				&sub_strict_overflow_p)) != 0
6291 	  && (t2 = extract_muldiv (op1, c, code, wide_type,
6292 				   &sub_strict_overflow_p)) != 0)
6293 	{
6294 	  if (tree_int_cst_sgn (c) < 0)
6295 	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6296 	  if (sub_strict_overflow_p)
6297 	    *strict_overflow_p = true;
6298 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6299 			      fold_convert (ctype, t2));
6300 	}
6301       break;
6302 
6303     case LSHIFT_EXPR:  case RSHIFT_EXPR:
6304       /* If the second operand is constant, this is a multiplication
6305 	 or floor division, by a power of two, so we can treat it that
6306 	 way unless the multiplier or divisor overflows.  Signed
6307 	 left-shift overflow is implementation-defined rather than
6308 	 undefined in C90, so do not convert signed left shift into
6309 	 multiplication.  */
6310       if (TREE_CODE (op1) == INTEGER_CST
6311 	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6312 	  /* const_binop may not detect overflow correctly,
6313 	     so check for it explicitly here.  */
6314 	  && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6315 			wi::to_wide (op1))
6316 	  && (t1 = fold_convert (ctype,
6317 				 const_binop (LSHIFT_EXPR, size_one_node,
6318 					      op1))) != 0
6319 	  && !TREE_OVERFLOW (t1))
6320 	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6321 				       ? MULT_EXPR : FLOOR_DIV_EXPR,
6322 				       ctype,
6323 				       fold_convert (ctype, op0),
6324 				       t1),
6325 			       c, code, wide_type, strict_overflow_p);
6326       break;
6327 
6328     case PLUS_EXPR:  case MINUS_EXPR:
6329       /* See if we can eliminate the operation on both sides.  If we can, we
6330 	 can return a new PLUS or MINUS.  If we can't, the only remaining
6331 	 cases where we can do anything are if the second operand is a
6332 	 constant.  */
6333       sub_strict_overflow_p = false;
6334       t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6335       t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6336       if (t1 != 0 && t2 != 0
6337 	  && TYPE_OVERFLOW_WRAPS (ctype)
6338 	  && (code == MULT_EXPR
6339 	      /* If not multiplication, we can only do this if both operands
6340 		 are divisible by c.  */
6341 	      || (multiple_of_p (ctype, op0, c)
6342 	          && multiple_of_p (ctype, op1, c))))
6343 	{
6344 	  if (sub_strict_overflow_p)
6345 	    *strict_overflow_p = true;
6346 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6347 			      fold_convert (ctype, t2));
6348 	}
6349 
6350       /* If this was a subtraction, negate OP1 and set it to be an addition.
6351 	 This simplifies the logic below.  */
6352       if (tcode == MINUS_EXPR)
6353 	{
6354 	  tcode = PLUS_EXPR, op1 = negate_expr (op1);
6355 	  /* If OP1 was not easily negatable, the constant may be OP0.  */
6356 	  if (TREE_CODE (op0) == INTEGER_CST)
6357 	    {
6358 	      std::swap (op0, op1);
6359 	      std::swap (t1, t2);
6360 	    }
6361 	}
6362 
6363       if (TREE_CODE (op1) != INTEGER_CST)
6364 	break;
6365 
6366       /* If either OP1 or C are negative, this optimization is not safe for
6367 	 some of the division and remainder types while for others we need
6368 	 to change the code.  */
6369       if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6370 	{
6371 	  if (code == CEIL_DIV_EXPR)
6372 	    code = FLOOR_DIV_EXPR;
6373 	  else if (code == FLOOR_DIV_EXPR)
6374 	    code = CEIL_DIV_EXPR;
6375 	  else if (code != MULT_EXPR
6376 		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6377 	    break;
6378 	}
6379 
6380       /* If it's a multiply or a division/modulus operation of a multiple
6381          of our constant, do the operation and verify it doesn't overflow.  */
6382       if (code == MULT_EXPR
6383 	  || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6384 				TYPE_SIGN (type)))
6385 	{
6386 	  op1 = const_binop (code, fold_convert (ctype, op1),
6387 			     fold_convert (ctype, c));
6388 	  /* We allow the constant to overflow with wrapping semantics.  */
6389 	  if (op1 == 0
6390 	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6391 	    break;
6392 	}
6393       else
6394 	break;
6395 
6396       /* If we have an unsigned type, we cannot widen the operation since it
6397 	 will change the result if the original computation overflowed.  */
6398       if (TYPE_UNSIGNED (ctype) && ctype != type)
6399 	break;
6400 
6401       /* The last case is if we are a multiply.  In that case, we can
6402 	 apply the distributive law to commute the multiply and addition
6403 	 if the multiplication of the constants doesn't overflow
6404 	 and overflow is defined.  With undefined overflow
6405 	 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.  */
6406       if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6407 	return fold_build2 (tcode, ctype,
6408 			    fold_build2 (code, ctype,
6409 					 fold_convert (ctype, op0),
6410 					 fold_convert (ctype, c)),
6411 			    op1);
6412 
6413       break;
6414 
6415     case MULT_EXPR:
6416       /* We have a special case here if we are doing something like
6417 	 (C * 8) % 4 since we know that's zero.  */
6418       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6419 	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6420 	  /* If the multiplication can overflow we cannot optimize this.  */
6421 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6422 	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6423 	  && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6424 				TYPE_SIGN (type)))
6425 	{
6426 	  *strict_overflow_p = true;
6427 	  return omit_one_operand (type, integer_zero_node, op0);
6428 	}
6429 
6430       /* ... fall through ...  */
6431 
6432     case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
6433     case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
6434       /* If we can extract our operation from the LHS, do so and return a
6435 	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
6436 	 do something only if the second operand is a constant.  */
6437       if (same_p
6438 	  && TYPE_OVERFLOW_WRAPS (ctype)
6439 	  && (t1 = extract_muldiv (op0, c, code, wide_type,
6440 				   strict_overflow_p)) != 0)
6441 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6442 			    fold_convert (ctype, op1));
6443       else if (tcode == MULT_EXPR && code == MULT_EXPR
6444 	       && TYPE_OVERFLOW_WRAPS (ctype)
6445 	       && (t1 = extract_muldiv (op1, c, code, wide_type,
6446 					strict_overflow_p)) != 0)
6447 	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6448 			    fold_convert (ctype, t1));
6449       else if (TREE_CODE (op1) != INTEGER_CST)
6450 	return 0;
6451 
6452       /* If these are the same operation types, we can associate them
6453 	 assuming no overflow.  */
6454       if (tcode == code)
6455 	{
6456 	  bool overflow_p = false;
6457 	  bool overflow_mul_p;
6458 	  signop sign = TYPE_SIGN (ctype);
6459 	  unsigned prec = TYPE_PRECISION (ctype);
6460 	  wide_int mul = wi::mul (wi::to_wide (op1, prec),
6461 				  wi::to_wide (c, prec),
6462 				  sign, &overflow_mul_p);
6463 	  overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6464 	  if (overflow_mul_p
6465 	      && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6466 	    overflow_p = true;
6467 	  if (!overflow_p)
6468 	    return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6469 				wide_int_to_tree (ctype, mul));
6470 	}
6471 
6472       /* If these operations "cancel" each other, we have the main
6473 	 optimizations of this pass, which occur when either constant is a
6474 	 multiple of the other, in which case we replace this with either an
6475 	 operation or CODE or TCODE.
6476 
6477 	 If we have an unsigned type, we cannot do this since it will change
6478 	 the result if the original computation overflowed.  */
6479       if (TYPE_OVERFLOW_UNDEFINED (ctype)
6480 	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6481 	      || (tcode == MULT_EXPR
6482 		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6483 		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6484 		  && code != MULT_EXPR)))
6485 	{
6486 	  if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6487 				 TYPE_SIGN (type)))
6488 	    {
6489 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6490 		*strict_overflow_p = true;
6491 	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6492 				  fold_convert (ctype,
6493 						const_binop (TRUNC_DIV_EXPR,
6494 							     op1, c)));
6495 	    }
6496 	  else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6497 				      TYPE_SIGN (type)))
6498 	    {
6499 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6500 		*strict_overflow_p = true;
6501 	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
6502 				  fold_convert (ctype,
6503 						const_binop (TRUNC_DIV_EXPR,
6504 							     c, op1)));
6505 	    }
6506 	}
6507       break;
6508 
6509     default:
6510       break;
6511     }
6512 
6513   return 0;
6514 }
6515 
6516 /* Return a node which has the indicated constant VALUE (either 0 or
6517    1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6518    and is of the indicated TYPE.  */
6519 
6520 tree
constant_boolean_node(bool value,tree type)6521 constant_boolean_node (bool value, tree type)
6522 {
6523   if (type == integer_type_node)
6524     return value ? integer_one_node : integer_zero_node;
6525   else if (type == boolean_type_node)
6526     return value ? boolean_true_node : boolean_false_node;
6527   else if (TREE_CODE (type) == VECTOR_TYPE)
6528     return build_vector_from_val (type,
6529 				  build_int_cst (TREE_TYPE (type),
6530 						 value ? -1 : 0));
6531   else
6532     return fold_convert (type, value ? integer_one_node : integer_zero_node);
6533 }
6534 
6535 
6536 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6537    Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
6538    CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6539    expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
6540    COND is the first argument to CODE; otherwise (as in the example
6541    given here), it is the second argument.  TYPE is the type of the
6542    original expression.  Return NULL_TREE if no simplification is
6543    possible.  */
6544 
6545 static tree
fold_binary_op_with_conditional_arg(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree cond,tree arg,int cond_first_p)6546 fold_binary_op_with_conditional_arg (location_t loc,
6547 				     enum tree_code code,
6548 				     tree type, tree op0, tree op1,
6549 				     tree cond, tree arg, int cond_first_p)
6550 {
6551   tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6552   tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6553   tree test, true_value, false_value;
6554   tree lhs = NULL_TREE;
6555   tree rhs = NULL_TREE;
6556   enum tree_code cond_code = COND_EXPR;
6557 
6558   if (TREE_CODE (cond) == COND_EXPR
6559       || TREE_CODE (cond) == VEC_COND_EXPR)
6560     {
6561       test = TREE_OPERAND (cond, 0);
6562       true_value = TREE_OPERAND (cond, 1);
6563       false_value = TREE_OPERAND (cond, 2);
6564       /* If this operand throws an expression, then it does not make
6565 	 sense to try to perform a logical or arithmetic operation
6566 	 involving it.  */
6567       if (VOID_TYPE_P (TREE_TYPE (true_value)))
6568 	lhs = true_value;
6569       if (VOID_TYPE_P (TREE_TYPE (false_value)))
6570 	rhs = false_value;
6571     }
6572   else if (!(TREE_CODE (type) != VECTOR_TYPE
6573 	     && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6574     {
6575       tree testtype = TREE_TYPE (cond);
6576       test = cond;
6577       true_value = constant_boolean_node (true, testtype);
6578       false_value = constant_boolean_node (false, testtype);
6579     }
6580   else
6581     /* Detect the case of mixing vector and scalar types - bail out.  */
6582     return NULL_TREE;
6583 
6584   if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6585     cond_code = VEC_COND_EXPR;
6586 
6587   /* This transformation is only worthwhile if we don't have to wrap ARG
6588      in a SAVE_EXPR and the operation can be simplified without recursing
6589      on at least one of the branches once its pushed inside the COND_EXPR.  */
6590   if (!TREE_CONSTANT (arg)
6591       && (TREE_SIDE_EFFECTS (arg)
6592 	  || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6593 	  || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6594     return NULL_TREE;
6595 
6596   arg = fold_convert_loc (loc, arg_type, arg);
6597   if (lhs == 0)
6598     {
6599       true_value = fold_convert_loc (loc, cond_type, true_value);
6600       if (cond_first_p)
6601 	lhs = fold_build2_loc (loc, code, type, true_value, arg);
6602       else
6603 	lhs = fold_build2_loc (loc, code, type, arg, true_value);
6604     }
6605   if (rhs == 0)
6606     {
6607       false_value = fold_convert_loc (loc, cond_type, false_value);
6608       if (cond_first_p)
6609 	rhs = fold_build2_loc (loc, code, type, false_value, arg);
6610       else
6611 	rhs = fold_build2_loc (loc, code, type, arg, false_value);
6612     }
6613 
6614   /* Check that we have simplified at least one of the branches.  */
6615   if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6616     return NULL_TREE;
6617 
6618   return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6619 }
6620 
6621 
6622 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6623 
6624    If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6625    TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
6626    ADDEND is the same as X.
6627 
6628    X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6629    and finite.  The problematic cases are when X is zero, and its mode
6630    has signed zeros.  In the case of rounding towards -infinity,
6631    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
6632    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
6633 
6634 bool
fold_real_zero_addition_p(const_tree type,const_tree addend,int negate)6635 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6636 {
6637   if (!real_zerop (addend))
6638     return false;
6639 
6640   /* Don't allow the fold with -fsignaling-nans.  */
6641   if (HONOR_SNANS (element_mode (type)))
6642     return false;
6643 
6644   /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
6645   if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6646     return true;
6647 
6648   /* In a vector or complex, we would need to check the sign of all zeros.  */
6649   if (TREE_CODE (addend) != REAL_CST)
6650     return false;
6651 
6652   /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
6653   if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6654     negate = !negate;
6655 
6656   /* The mode has signed zeros, and we have to honor their sign.
6657      In this situation, there is only one case we can return true for.
6658      X - 0 is the same as X unless rounding towards -infinity is
6659      supported.  */
6660   return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6661 }
6662 
6663 /* Subroutine of match.pd that optimizes comparisons of a division by
6664    a nonzero integer constant against an integer constant, i.e.
6665    X/C1 op C2.
6666 
6667    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6668    GE_EXPR or LE_EXPR.  ARG01 and ARG1 must be a INTEGER_CST.  */
6669 
6670 enum tree_code
fold_div_compare(enum tree_code code,tree c1,tree c2,tree * lo,tree * hi,bool * neg_overflow)6671 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6672 		  tree *hi, bool *neg_overflow)
6673 {
6674   tree prod, tmp, type = TREE_TYPE (c1);
6675   signop sign = TYPE_SIGN (type);
6676   bool overflow;
6677 
6678   /* We have to do this the hard way to detect unsigned overflow.
6679      prod = int_const_binop (MULT_EXPR, c1, c2);  */
6680   wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
6681   prod = force_fit_type (type, val, -1, overflow);
6682   *neg_overflow = false;
6683 
6684   if (sign == UNSIGNED)
6685     {
6686       tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6687       *lo = prod;
6688 
6689       /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp).  */
6690       val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
6691       *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
6692     }
6693   else if (tree_int_cst_sgn (c1) >= 0)
6694     {
6695       tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6696       switch (tree_int_cst_sgn (c2))
6697 	{
6698 	case -1:
6699 	  *neg_overflow = true;
6700 	  *lo = int_const_binop (MINUS_EXPR, prod, tmp);
6701 	  *hi = prod;
6702 	  break;
6703 
6704 	case 0:
6705 	  *lo = fold_negate_const (tmp, type);
6706 	  *hi = tmp;
6707 	  break;
6708 
6709 	case 1:
6710 	  *hi = int_const_binop (PLUS_EXPR, prod, tmp);
6711 	  *lo = prod;
6712 	  break;
6713 
6714 	default:
6715 	  gcc_unreachable ();
6716 	}
6717     }
6718   else
6719     {
6720       /* A negative divisor reverses the relational operators.  */
6721       code = swap_tree_comparison (code);
6722 
6723       tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
6724       switch (tree_int_cst_sgn (c2))
6725 	{
6726 	case -1:
6727 	  *hi = int_const_binop (MINUS_EXPR, prod, tmp);
6728 	  *lo = prod;
6729 	  break;
6730 
6731 	case 0:
6732 	  *hi = fold_negate_const (tmp, type);
6733 	  *lo = tmp;
6734 	  break;
6735 
6736 	case 1:
6737 	  *neg_overflow = true;
6738 	  *lo = int_const_binop (PLUS_EXPR, prod, tmp);
6739 	  *hi = prod;
6740 	  break;
6741 
6742 	default:
6743 	  gcc_unreachable ();
6744 	}
6745     }
6746 
6747   if (code != EQ_EXPR && code != NE_EXPR)
6748     return code;
6749 
6750   if (TREE_OVERFLOW (*lo)
6751       || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
6752     *lo = NULL_TREE;
6753   if (TREE_OVERFLOW (*hi)
6754       || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
6755     *hi = NULL_TREE;
6756 
6757   return code;
6758 }
6759 
6760 
6761 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6762    equality/inequality test, then return a simplified form of the test
6763    using a sign testing.  Otherwise return NULL.  TYPE is the desired
6764    result type.  */
6765 
6766 static tree
fold_single_bit_test_into_sign_test(location_t loc,enum tree_code code,tree arg0,tree arg1,tree result_type)6767 fold_single_bit_test_into_sign_test (location_t loc,
6768 				     enum tree_code code, tree arg0, tree arg1,
6769 				     tree result_type)
6770 {
6771   /* If this is testing a single bit, we can optimize the test.  */
6772   if ((code == NE_EXPR || code == EQ_EXPR)
6773       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6774       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6775     {
6776       /* If we have (A & C) != 0 where C is the sign bit of A, convert
6777 	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
6778       tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6779 
6780       if (arg00 != NULL_TREE
6781 	  /* This is only a win if casting to a signed type is cheap,
6782 	     i.e. when arg00's type is not a partial mode.  */
6783 	  && type_has_mode_precision_p (TREE_TYPE (arg00)))
6784 	{
6785 	  tree stype = signed_type_for (TREE_TYPE (arg00));
6786 	  return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6787 			      result_type,
6788 			      fold_convert_loc (loc, stype, arg00),
6789 			      build_int_cst (stype, 0));
6790 	}
6791     }
6792 
6793   return NULL_TREE;
6794 }
6795 
6796 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6797    equality/inequality test, then return a simplified form of
6798    the test using shifts and logical operations.  Otherwise return
6799    NULL.  TYPE is the desired result type.  */
6800 
6801 tree
fold_single_bit_test(location_t loc,enum tree_code code,tree arg0,tree arg1,tree result_type)6802 fold_single_bit_test (location_t loc, enum tree_code code,
6803 		      tree arg0, tree arg1, tree result_type)
6804 {
6805   /* If this is testing a single bit, we can optimize the test.  */
6806   if ((code == NE_EXPR || code == EQ_EXPR)
6807       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6808       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6809     {
6810       tree inner = TREE_OPERAND (arg0, 0);
6811       tree type = TREE_TYPE (arg0);
6812       int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6813       scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
6814       int ops_unsigned;
6815       tree signed_type, unsigned_type, intermediate_type;
6816       tree tem, one;
6817 
6818       /* First, see if we can fold the single bit test into a sign-bit
6819 	 test.  */
6820       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6821 						 result_type);
6822       if (tem)
6823 	return tem;
6824 
6825       /* Otherwise we have (A & C) != 0 where C is a single bit,
6826 	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
6827 	 Similarly for (A & C) == 0.  */
6828 
6829       /* If INNER is a right shift of a constant and it plus BITNUM does
6830 	 not overflow, adjust BITNUM and INNER.  */
6831       if (TREE_CODE (inner) == RSHIFT_EXPR
6832 	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6833 	  && bitnum < TYPE_PRECISION (type)
6834 	  && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
6835 			TYPE_PRECISION (type) - bitnum))
6836 	{
6837 	  bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6838 	  inner = TREE_OPERAND (inner, 0);
6839 	}
6840 
6841       /* If we are going to be able to omit the AND below, we must do our
6842 	 operations as unsigned.  If we must use the AND, we have a choice.
6843 	 Normally unsigned is faster, but for some machines signed is.  */
6844       ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6845 		      && !flag_syntax_only) ? 0 : 1;
6846 
6847       signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6848       unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6849       intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6850       inner = fold_convert_loc (loc, intermediate_type, inner);
6851 
6852       if (bitnum != 0)
6853 	inner = build2 (RSHIFT_EXPR, intermediate_type,
6854 			inner, size_int (bitnum));
6855 
6856       one = build_int_cst (intermediate_type, 1);
6857 
6858       if (code == EQ_EXPR)
6859 	inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6860 
6861       /* Put the AND last so it can combine with more things.  */
6862       inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6863 
6864       /* Make sure to return the proper type.  */
6865       inner = fold_convert_loc (loc, result_type, inner);
6866 
6867       return inner;
6868     }
6869   return NULL_TREE;
6870 }
6871 
6872 /* Test whether it is preferable two swap two operands, ARG0 and
6873    ARG1, for example because ARG0 is an integer constant and ARG1
6874    isn't.  */
6875 
6876 bool
tree_swap_operands_p(const_tree arg0,const_tree arg1)6877 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6878 {
6879   if (CONSTANT_CLASS_P (arg1))
6880     return 0;
6881   if (CONSTANT_CLASS_P (arg0))
6882     return 1;
6883 
6884   STRIP_NOPS (arg0);
6885   STRIP_NOPS (arg1);
6886 
6887   if (TREE_CONSTANT (arg1))
6888     return 0;
6889   if (TREE_CONSTANT (arg0))
6890     return 1;
6891 
6892   /* It is preferable to swap two SSA_NAME to ensure a canonical form
6893      for commutative and comparison operators.  Ensuring a canonical
6894      form allows the optimizers to find additional redundancies without
6895      having to explicitly check for both orderings.  */
6896   if (TREE_CODE (arg0) == SSA_NAME
6897       && TREE_CODE (arg1) == SSA_NAME
6898       && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6899     return 1;
6900 
6901   /* Put SSA_NAMEs last.  */
6902   if (TREE_CODE (arg1) == SSA_NAME)
6903     return 0;
6904   if (TREE_CODE (arg0) == SSA_NAME)
6905     return 1;
6906 
6907   /* Put variables last.  */
6908   if (DECL_P (arg1))
6909     return 0;
6910   if (DECL_P (arg0))
6911     return 1;
6912 
6913   return 0;
6914 }
6915 
6916 
6917 /* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
6918    means A >= Y && A != MAX, but in this case we know that
6919    A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
6920 
6921 static tree
fold_to_nonsharp_ineq_using_bound(location_t loc,tree ineq,tree bound)6922 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6923 {
6924   tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6925 
6926   if (TREE_CODE (bound) == LT_EXPR)
6927     a = TREE_OPERAND (bound, 0);
6928   else if (TREE_CODE (bound) == GT_EXPR)
6929     a = TREE_OPERAND (bound, 1);
6930   else
6931     return NULL_TREE;
6932 
6933   typea = TREE_TYPE (a);
6934   if (!INTEGRAL_TYPE_P (typea)
6935       && !POINTER_TYPE_P (typea))
6936     return NULL_TREE;
6937 
6938   if (TREE_CODE (ineq) == LT_EXPR)
6939     {
6940       a1 = TREE_OPERAND (ineq, 1);
6941       y = TREE_OPERAND (ineq, 0);
6942     }
6943   else if (TREE_CODE (ineq) == GT_EXPR)
6944     {
6945       a1 = TREE_OPERAND (ineq, 0);
6946       y = TREE_OPERAND (ineq, 1);
6947     }
6948   else
6949     return NULL_TREE;
6950 
6951   if (TREE_TYPE (a1) != typea)
6952     return NULL_TREE;
6953 
6954   if (POINTER_TYPE_P (typea))
6955     {
6956       /* Convert the pointer types into integer before taking the difference.  */
6957       tree ta = fold_convert_loc (loc, ssizetype, a);
6958       tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6959       diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6960     }
6961   else
6962     diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6963 
6964   if (!diff || !integer_onep (diff))
6965    return NULL_TREE;
6966 
6967   return fold_build2_loc (loc, GE_EXPR, type, a, y);
6968 }
6969 
6970 /* Fold a sum or difference of at least one multiplication.
6971    Returns the folded tree or NULL if no simplification could be made.  */
6972 
6973 static tree
fold_plusminus_mult_expr(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)6974 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6975 			  tree arg0, tree arg1)
6976 {
6977   tree arg00, arg01, arg10, arg11;
6978   tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6979 
6980   /* (A * C) +- (B * C) -> (A+-B) * C.
6981      (A * C) +- A -> A * (C+-1).
6982      We are most concerned about the case where C is a constant,
6983      but other combinations show up during loop reduction.  Since
6984      it is not difficult, try all four possibilities.  */
6985 
6986   if (TREE_CODE (arg0) == MULT_EXPR)
6987     {
6988       arg00 = TREE_OPERAND (arg0, 0);
6989       arg01 = TREE_OPERAND (arg0, 1);
6990     }
6991   else if (TREE_CODE (arg0) == INTEGER_CST)
6992     {
6993       arg00 = build_one_cst (type);
6994       arg01 = arg0;
6995     }
6996   else
6997     {
6998       /* We cannot generate constant 1 for fract.  */
6999       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7000 	return NULL_TREE;
7001       arg00 = arg0;
7002       arg01 = build_one_cst (type);
7003     }
7004   if (TREE_CODE (arg1) == MULT_EXPR)
7005     {
7006       arg10 = TREE_OPERAND (arg1, 0);
7007       arg11 = TREE_OPERAND (arg1, 1);
7008     }
7009   else if (TREE_CODE (arg1) == INTEGER_CST)
7010     {
7011       arg10 = build_one_cst (type);
7012       /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7013 	 the purpose of this canonicalization.  */
7014       if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7015 	  && negate_expr_p (arg1)
7016 	  && code == PLUS_EXPR)
7017 	{
7018 	  arg11 = negate_expr (arg1);
7019 	  code = MINUS_EXPR;
7020 	}
7021       else
7022 	arg11 = arg1;
7023     }
7024   else
7025     {
7026       /* We cannot generate constant 1 for fract.  */
7027       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7028 	return NULL_TREE;
7029       arg10 = arg1;
7030       arg11 = build_one_cst (type);
7031     }
7032   same = NULL_TREE;
7033 
7034   /* Prefer factoring a common non-constant.  */
7035   if (operand_equal_p (arg00, arg10, 0))
7036     same = arg00, alt0 = arg01, alt1 = arg11;
7037   else if (operand_equal_p (arg01, arg11, 0))
7038     same = arg01, alt0 = arg00, alt1 = arg10;
7039   else if (operand_equal_p (arg00, arg11, 0))
7040     same = arg00, alt0 = arg01, alt1 = arg10;
7041   else if (operand_equal_p (arg01, arg10, 0))
7042     same = arg01, alt0 = arg00, alt1 = arg11;
7043 
7044   /* No identical multiplicands; see if we can find a common
7045      power-of-two factor in non-power-of-two multiplies.  This
7046      can help in multi-dimensional array access.  */
7047   else if (tree_fits_shwi_p (arg01)
7048 	   && tree_fits_shwi_p (arg11))
7049     {
7050       HOST_WIDE_INT int01, int11, tmp;
7051       bool swap = false;
7052       tree maybe_same;
7053       int01 = tree_to_shwi (arg01);
7054       int11 = tree_to_shwi (arg11);
7055 
7056       /* Move min of absolute values to int11.  */
7057       if (absu_hwi (int01) < absu_hwi (int11))
7058         {
7059 	  tmp = int01, int01 = int11, int11 = tmp;
7060 	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
7061 	  maybe_same = arg01;
7062 	  swap = true;
7063 	}
7064       else
7065 	maybe_same = arg11;
7066 
7067       if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7068 	  /* The remainder should not be a constant, otherwise we
7069 	     end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7070 	     increased the number of multiplications necessary.  */
7071 	  && TREE_CODE (arg10) != INTEGER_CST)
7072         {
7073 	  alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7074 			      build_int_cst (TREE_TYPE (arg00),
7075 					     int01 / int11));
7076 	  alt1 = arg10;
7077 	  same = maybe_same;
7078 	  if (swap)
7079 	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7080 	}
7081     }
7082 
7083   if (!same)
7084     return NULL_TREE;
7085 
7086   if (! INTEGRAL_TYPE_P (type)
7087       || TYPE_OVERFLOW_WRAPS (type)
7088       /* We are neither factoring zero nor minus one.  */
7089       || TREE_CODE (same) == INTEGER_CST)
7090     return fold_build2_loc (loc, MULT_EXPR, type,
7091 			fold_build2_loc (loc, code, type,
7092 				     fold_convert_loc (loc, type, alt0),
7093 				     fold_convert_loc (loc, type, alt1)),
7094 			fold_convert_loc (loc, type, same));
7095 
7096   /* Same may be zero and thus the operation 'code' may overflow.  Likewise
7097      same may be minus one and thus the multiplication may overflow.  Perform
7098      the sum operation in an unsigned type.  */
7099   tree utype = unsigned_type_for (type);
7100   tree tem = fold_build2_loc (loc, code, utype,
7101 			      fold_convert_loc (loc, utype, alt0),
7102 			      fold_convert_loc (loc, utype, alt1));
7103   /* If the sum evaluated to a constant that is not -INF the multiplication
7104      cannot overflow.  */
7105   if (TREE_CODE (tem) == INTEGER_CST
7106       && (wi::to_wide (tem)
7107 	  != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7108     return fold_build2_loc (loc, MULT_EXPR, type,
7109 			    fold_convert (type, tem), same);
7110 
7111   /* Do not resort to unsigned multiplication because
7112      we lose the no-overflow property of the expression.  */
7113   return NULL_TREE;
7114 }
7115 
7116 /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7117    specified by EXPR into the buffer PTR of length LEN bytes.
7118    Return the number of bytes placed in the buffer, or zero
7119    upon failure.  */
7120 
7121 static int
native_encode_int(const_tree expr,unsigned char * ptr,int len,int off)7122 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7123 {
7124   tree type = TREE_TYPE (expr);
7125   int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7126   int byte, offset, word, words;
7127   unsigned char value;
7128 
7129   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7130     return 0;
7131   if (off == -1)
7132     off = 0;
7133 
7134   if (ptr == NULL)
7135     /* Dry run.  */
7136     return MIN (len, total_bytes - off);
7137 
7138   words = total_bytes / UNITS_PER_WORD;
7139 
7140   for (byte = 0; byte < total_bytes; byte++)
7141     {
7142       int bitpos = byte * BITS_PER_UNIT;
7143       /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7144 	 number of bytes.  */
7145       value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7146 
7147       if (total_bytes > UNITS_PER_WORD)
7148 	{
7149 	  word = byte / UNITS_PER_WORD;
7150 	  if (WORDS_BIG_ENDIAN)
7151 	    word = (words - 1) - word;
7152 	  offset = word * UNITS_PER_WORD;
7153 	  if (BYTES_BIG_ENDIAN)
7154 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7155 	  else
7156 	    offset += byte % UNITS_PER_WORD;
7157 	}
7158       else
7159 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7160       if (offset >= off && offset - off < len)
7161 	ptr[offset - off] = value;
7162     }
7163   return MIN (len, total_bytes - off);
7164 }
7165 
7166 
7167 /* Subroutine of native_encode_expr.  Encode the FIXED_CST
7168    specified by EXPR into the buffer PTR of length LEN bytes.
7169    Return the number of bytes placed in the buffer, or zero
7170    upon failure.  */
7171 
7172 static int
native_encode_fixed(const_tree expr,unsigned char * ptr,int len,int off)7173 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7174 {
7175   tree type = TREE_TYPE (expr);
7176   scalar_mode mode = SCALAR_TYPE_MODE (type);
7177   int total_bytes = GET_MODE_SIZE (mode);
7178   FIXED_VALUE_TYPE value;
7179   tree i_value, i_type;
7180 
7181   if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7182     return 0;
7183 
7184   i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7185 
7186   if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7187     return 0;
7188 
7189   value = TREE_FIXED_CST (expr);
7190   i_value = double_int_to_tree (i_type, value.data);
7191 
7192   return native_encode_int (i_value, ptr, len, off);
7193 }
7194 
7195 
7196 /* Subroutine of native_encode_expr.  Encode the REAL_CST
7197    specified by EXPR into the buffer PTR of length LEN bytes.
7198    Return the number of bytes placed in the buffer, or zero
7199    upon failure.  */
7200 
7201 static int
native_encode_real(const_tree expr,unsigned char * ptr,int len,int off)7202 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7203 {
7204   tree type = TREE_TYPE (expr);
7205   int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7206   int byte, offset, word, words, bitpos;
7207   unsigned char value;
7208 
7209   /* There are always 32 bits in each long, no matter the size of
7210      the hosts long.  We handle floating point representations with
7211      up to 192 bits.  */
7212   long tmp[6];
7213 
7214   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7215     return 0;
7216   if (off == -1)
7217     off = 0;
7218 
7219   if (ptr == NULL)
7220     /* Dry run.  */
7221     return MIN (len, total_bytes - off);
7222 
7223   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7224 
7225   real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7226 
7227   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7228        bitpos += BITS_PER_UNIT)
7229     {
7230       byte = (bitpos / BITS_PER_UNIT) & 3;
7231       value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7232 
7233       if (UNITS_PER_WORD < 4)
7234 	{
7235 	  word = byte / UNITS_PER_WORD;
7236 	  if (WORDS_BIG_ENDIAN)
7237 	    word = (words - 1) - word;
7238 	  offset = word * UNITS_PER_WORD;
7239 	  if (BYTES_BIG_ENDIAN)
7240 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7241 	  else
7242 	    offset += byte % UNITS_PER_WORD;
7243 	}
7244       else
7245 	{
7246 	  offset = byte;
7247 	  if (BYTES_BIG_ENDIAN)
7248 	    {
7249 	      /* Reverse bytes within each long, or within the entire float
7250 		 if it's smaller than a long (for HFmode).  */
7251 	      offset = MIN (3, total_bytes - 1) - offset;
7252 	      gcc_assert (offset >= 0);
7253 	    }
7254 	}
7255       offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7256       if (offset >= off
7257 	  && offset - off < len)
7258 	ptr[offset - off] = value;
7259     }
7260   return MIN (len, total_bytes - off);
7261 }
7262 
7263 /* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7264    specified by EXPR into the buffer PTR of length LEN bytes.
7265    Return the number of bytes placed in the buffer, or zero
7266    upon failure.  */
7267 
7268 static int
native_encode_complex(const_tree expr,unsigned char * ptr,int len,int off)7269 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7270 {
7271   int rsize, isize;
7272   tree part;
7273 
7274   part = TREE_REALPART (expr);
7275   rsize = native_encode_expr (part, ptr, len, off);
7276   if (off == -1 && rsize == 0)
7277     return 0;
7278   part = TREE_IMAGPART (expr);
7279   if (off != -1)
7280     off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7281   isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7282 			      len - rsize, off);
7283   if (off == -1 && isize != rsize)
7284     return 0;
7285   return rsize + isize;
7286 }
7287 
7288 
7289 /* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7290    specified by EXPR into the buffer PTR of length LEN bytes.
7291    Return the number of bytes placed in the buffer, or zero
7292    upon failure.  */
7293 
7294 static int
native_encode_vector(const_tree expr,unsigned char * ptr,int len,int off)7295 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7296 {
7297   unsigned HOST_WIDE_INT i, count;
7298   int size, offset;
7299   tree itype, elem;
7300 
7301   offset = 0;
7302   if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7303     return 0;
7304   itype = TREE_TYPE (TREE_TYPE (expr));
7305   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7306   for (i = 0; i < count; i++)
7307     {
7308       if (off >= size)
7309 	{
7310 	  off -= size;
7311 	  continue;
7312 	}
7313       elem = VECTOR_CST_ELT (expr, i);
7314       int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7315 				    len - offset, off);
7316       if ((off == -1 && res != size) || res == 0)
7317 	return 0;
7318       offset += res;
7319       if (offset >= len)
7320 	return (off == -1 && i < count - 1) ? 0 : offset;
7321       if (off != -1)
7322 	off = 0;
7323     }
7324   return offset;
7325 }
7326 
7327 
7328 /* Subroutine of native_encode_expr.  Encode the STRING_CST
7329    specified by EXPR into the buffer PTR of length LEN bytes.
7330    Return the number of bytes placed in the buffer, or zero
7331    upon failure.  */
7332 
7333 static int
native_encode_string(const_tree expr,unsigned char * ptr,int len,int off)7334 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7335 {
7336   tree type = TREE_TYPE (expr);
7337 
7338   /* Wide-char strings are encoded in target byte-order so native
7339      encoding them is trivial.  */
7340   if (BITS_PER_UNIT != CHAR_BIT
7341       || TREE_CODE (type) != ARRAY_TYPE
7342       || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7343       || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7344     return 0;
7345 
7346   HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7347   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7348     return 0;
7349   if (off == -1)
7350     off = 0;
7351   if (ptr == NULL)
7352     /* Dry run.  */;
7353   else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7354     {
7355       int written = 0;
7356       if (off < TREE_STRING_LENGTH (expr))
7357 	{
7358 	  written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7359 	  memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7360 	}
7361       memset (ptr + written, 0,
7362 	      MIN (total_bytes - written, len - written));
7363     }
7364   else
7365     memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7366   return MIN (total_bytes - off, len);
7367 }
7368 
7369 
7370 /* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
7371    REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7372    buffer PTR of length LEN bytes.  If PTR is NULL, don't actually store
7373    anything, just do a dry run.  If OFF is not -1 then start
7374    the encoding at byte offset OFF and encode at most LEN bytes.
7375    Return the number of bytes placed in the buffer, or zero upon failure.  */
7376 
7377 int
native_encode_expr(const_tree expr,unsigned char * ptr,int len,int off)7378 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7379 {
7380   /* We don't support starting at negative offset and -1 is special.  */
7381   if (off < -1)
7382     return 0;
7383 
7384   switch (TREE_CODE (expr))
7385     {
7386     case INTEGER_CST:
7387       return native_encode_int (expr, ptr, len, off);
7388 
7389     case REAL_CST:
7390       return native_encode_real (expr, ptr, len, off);
7391 
7392     case FIXED_CST:
7393       return native_encode_fixed (expr, ptr, len, off);
7394 
7395     case COMPLEX_CST:
7396       return native_encode_complex (expr, ptr, len, off);
7397 
7398     case VECTOR_CST:
7399       return native_encode_vector (expr, ptr, len, off);
7400 
7401     case STRING_CST:
7402       return native_encode_string (expr, ptr, len, off);
7403 
7404     default:
7405       return 0;
7406     }
7407 }
7408 
7409 
7410 /* Subroutine of native_interpret_expr.  Interpret the contents of
7411    the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7412    If the buffer cannot be interpreted, return NULL_TREE.  */
7413 
7414 static tree
native_interpret_int(tree type,const unsigned char * ptr,int len)7415 native_interpret_int (tree type, const unsigned char *ptr, int len)
7416 {
7417   int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7418 
7419   if (total_bytes > len
7420       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7421     return NULL_TREE;
7422 
7423   wide_int result = wi::from_buffer (ptr, total_bytes);
7424 
7425   return wide_int_to_tree (type, result);
7426 }
7427 
7428 
7429 /* Subroutine of native_interpret_expr.  Interpret the contents of
7430    the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7431    If the buffer cannot be interpreted, return NULL_TREE.  */
7432 
7433 static tree
native_interpret_fixed(tree type,const unsigned char * ptr,int len)7434 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7435 {
7436   scalar_mode mode = SCALAR_TYPE_MODE (type);
7437   int total_bytes = GET_MODE_SIZE (mode);
7438   double_int result;
7439   FIXED_VALUE_TYPE fixed_value;
7440 
7441   if (total_bytes > len
7442       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7443     return NULL_TREE;
7444 
7445   result = double_int::from_buffer (ptr, total_bytes);
7446   fixed_value = fixed_from_double_int (result, mode);
7447 
7448   return build_fixed (type, fixed_value);
7449 }
7450 
7451 
7452 /* Subroutine of native_interpret_expr.  Interpret the contents of
7453    the buffer PTR of length LEN as a REAL_CST of type TYPE.
7454    If the buffer cannot be interpreted, return NULL_TREE.  */
7455 
7456 static tree
native_interpret_real(tree type,const unsigned char * ptr,int len)7457 native_interpret_real (tree type, const unsigned char *ptr, int len)
7458 {
7459   scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7460   int total_bytes = GET_MODE_SIZE (mode);
7461   unsigned char value;
7462   /* There are always 32 bits in each long, no matter the size of
7463      the hosts long.  We handle floating point representations with
7464      up to 192 bits.  */
7465   REAL_VALUE_TYPE r;
7466   long tmp[6];
7467 
7468   if (total_bytes > len || total_bytes > 24)
7469     return NULL_TREE;
7470   int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7471 
7472   memset (tmp, 0, sizeof (tmp));
7473   for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7474        bitpos += BITS_PER_UNIT)
7475     {
7476       /* Both OFFSET and BYTE index within a long;
7477 	 bitpos indexes the whole float.  */
7478       int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7479       if (UNITS_PER_WORD < 4)
7480 	{
7481 	  int word = byte / UNITS_PER_WORD;
7482 	  if (WORDS_BIG_ENDIAN)
7483 	    word = (words - 1) - word;
7484 	  offset = word * UNITS_PER_WORD;
7485 	  if (BYTES_BIG_ENDIAN)
7486 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7487 	  else
7488 	    offset += byte % UNITS_PER_WORD;
7489 	}
7490       else
7491 	{
7492 	  offset = byte;
7493 	  if (BYTES_BIG_ENDIAN)
7494 	    {
7495 	      /* Reverse bytes within each long, or within the entire float
7496 		 if it's smaller than a long (for HFmode).  */
7497 	      offset = MIN (3, total_bytes - 1) - offset;
7498 	      gcc_assert (offset >= 0);
7499 	    }
7500 	}
7501       value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7502 
7503       tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7504     }
7505 
7506   real_from_target (&r, tmp, mode);
7507   return build_real (type, r);
7508 }
7509 
7510 
7511 /* Subroutine of native_interpret_expr.  Interpret the contents of
7512    the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7513    If the buffer cannot be interpreted, return NULL_TREE.  */
7514 
7515 static tree
native_interpret_complex(tree type,const unsigned char * ptr,int len)7516 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7517 {
7518   tree etype, rpart, ipart;
7519   int size;
7520 
7521   etype = TREE_TYPE (type);
7522   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7523   if (size * 2 > len)
7524     return NULL_TREE;
7525   rpart = native_interpret_expr (etype, ptr, size);
7526   if (!rpart)
7527     return NULL_TREE;
7528   ipart = native_interpret_expr (etype, ptr+size, size);
7529   if (!ipart)
7530     return NULL_TREE;
7531   return build_complex (type, rpart, ipart);
7532 }
7533 
7534 
7535 /* Subroutine of native_interpret_expr.  Interpret the contents of
7536    the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7537    If the buffer cannot be interpreted, return NULL_TREE.  */
7538 
7539 static tree
native_interpret_vector(tree type,const unsigned char * ptr,unsigned int len)7540 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
7541 {
7542   tree etype, elem;
7543   unsigned int i, size;
7544   unsigned HOST_WIDE_INT count;
7545 
7546   etype = TREE_TYPE (type);
7547   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7548   if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
7549       || size * count > len)
7550     return NULL_TREE;
7551 
7552   tree_vector_builder elements (type, count, 1);
7553   for (i = 0; i < count; ++i)
7554     {
7555       elem = native_interpret_expr (etype, ptr+(i*size), size);
7556       if (!elem)
7557 	return NULL_TREE;
7558       elements.quick_push (elem);
7559     }
7560   return elements.build ();
7561 }
7562 
7563 
7564 /* Subroutine of fold_view_convert_expr.  Interpret the contents of
7565    the buffer PTR of length LEN as a constant of type TYPE.  For
7566    INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7567    we return a REAL_CST, etc...  If the buffer cannot be interpreted,
7568    return NULL_TREE.  */
7569 
7570 tree
native_interpret_expr(tree type,const unsigned char * ptr,int len)7571 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7572 {
7573   switch (TREE_CODE (type))
7574     {
7575     case INTEGER_TYPE:
7576     case ENUMERAL_TYPE:
7577     case BOOLEAN_TYPE:
7578     case POINTER_TYPE:
7579     case REFERENCE_TYPE:
7580       return native_interpret_int (type, ptr, len);
7581 
7582     case REAL_TYPE:
7583       return native_interpret_real (type, ptr, len);
7584 
7585     case FIXED_POINT_TYPE:
7586       return native_interpret_fixed (type, ptr, len);
7587 
7588     case COMPLEX_TYPE:
7589       return native_interpret_complex (type, ptr, len);
7590 
7591     case VECTOR_TYPE:
7592       return native_interpret_vector (type, ptr, len);
7593 
7594     default:
7595       return NULL_TREE;
7596     }
7597 }
7598 
7599 /* Returns true if we can interpret the contents of a native encoding
7600    as TYPE.  */
7601 
7602 static bool
can_native_interpret_type_p(tree type)7603 can_native_interpret_type_p (tree type)
7604 {
7605   switch (TREE_CODE (type))
7606     {
7607     case INTEGER_TYPE:
7608     case ENUMERAL_TYPE:
7609     case BOOLEAN_TYPE:
7610     case POINTER_TYPE:
7611     case REFERENCE_TYPE:
7612     case FIXED_POINT_TYPE:
7613     case REAL_TYPE:
7614     case COMPLEX_TYPE:
7615     case VECTOR_TYPE:
7616       return true;
7617     default:
7618       return false;
7619     }
7620 }
7621 
7622 
7623 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7624    TYPE at compile-time.  If we're unable to perform the conversion
7625    return NULL_TREE.  */
7626 
7627 static tree
fold_view_convert_expr(tree type,tree expr)7628 fold_view_convert_expr (tree type, tree expr)
7629 {
7630   /* We support up to 512-bit values (for V8DFmode).  */
7631   unsigned char buffer[64];
7632   int len;
7633 
7634   /* Check that the host and target are sane.  */
7635   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7636     return NULL_TREE;
7637 
7638   len = native_encode_expr (expr, buffer, sizeof (buffer));
7639   if (len == 0)
7640     return NULL_TREE;
7641 
7642   return native_interpret_expr (type, buffer, len);
7643 }
7644 
7645 /* Build an expression for the address of T.  Folds away INDIRECT_REF
7646    to avoid confusing the gimplify process.  */
7647 
7648 tree
build_fold_addr_expr_with_type_loc(location_t loc,tree t,tree ptrtype)7649 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7650 {
7651   /* The size of the object is not relevant when talking about its address.  */
7652   if (TREE_CODE (t) == WITH_SIZE_EXPR)
7653     t = TREE_OPERAND (t, 0);
7654 
7655   if (TREE_CODE (t) == INDIRECT_REF)
7656     {
7657       t = TREE_OPERAND (t, 0);
7658 
7659       if (TREE_TYPE (t) != ptrtype)
7660 	t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7661     }
7662   else if (TREE_CODE (t) == MEM_REF
7663 	   && integer_zerop (TREE_OPERAND (t, 1)))
7664     return TREE_OPERAND (t, 0);
7665   else if (TREE_CODE (t) == MEM_REF
7666 	   && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7667     return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7668 			TREE_OPERAND (t, 0),
7669 			convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7670   else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7671     {
7672       t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7673 
7674       if (TREE_TYPE (t) != ptrtype)
7675 	t = fold_convert_loc (loc, ptrtype, t);
7676     }
7677   else
7678     t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7679 
7680   return t;
7681 }
7682 
7683 /* Build an expression for the address of T.  */
7684 
7685 tree
build_fold_addr_expr_loc(location_t loc,tree t)7686 build_fold_addr_expr_loc (location_t loc, tree t)
7687 {
7688   tree ptrtype = build_pointer_type (TREE_TYPE (t));
7689 
7690   return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7691 }
7692 
7693 /* Fold a unary expression of code CODE and type TYPE with operand
7694    OP0.  Return the folded expression if folding is successful.
7695    Otherwise, return NULL_TREE.  */
7696 
7697 tree
fold_unary_loc(location_t loc,enum tree_code code,tree type,tree op0)7698 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7699 {
7700   tree tem;
7701   tree arg0;
7702   enum tree_code_class kind = TREE_CODE_CLASS (code);
7703 
7704   gcc_assert (IS_EXPR_CODE_CLASS (kind)
7705 	      && TREE_CODE_LENGTH (code) == 1);
7706 
7707   arg0 = op0;
7708   if (arg0)
7709     {
7710       if (CONVERT_EXPR_CODE_P (code)
7711 	  || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7712 	{
7713 	  /* Don't use STRIP_NOPS, because signedness of argument type
7714 	     matters.  */
7715 	  STRIP_SIGN_NOPS (arg0);
7716 	}
7717       else
7718 	{
7719 	  /* Strip any conversions that don't change the mode.  This
7720 	     is safe for every expression, except for a comparison
7721 	     expression because its signedness is derived from its
7722 	     operands.
7723 
7724 	     Note that this is done as an internal manipulation within
7725 	     the constant folder, in order to find the simplest
7726 	     representation of the arguments so that their form can be
7727 	     studied.  In any cases, the appropriate type conversions
7728 	     should be put back in the tree that will get out of the
7729 	     constant folder.  */
7730 	  STRIP_NOPS (arg0);
7731 	}
7732 
7733       if (CONSTANT_CLASS_P (arg0))
7734 	{
7735 	  tree tem = const_unop (code, type, arg0);
7736 	  if (tem)
7737 	    {
7738 	      if (TREE_TYPE (tem) != type)
7739 		tem = fold_convert_loc (loc, type, tem);
7740 	      return tem;
7741 	    }
7742 	}
7743     }
7744 
7745   tem = generic_simplify (loc, code, type, op0);
7746   if (tem)
7747     return tem;
7748 
7749   if (TREE_CODE_CLASS (code) == tcc_unary)
7750     {
7751       if (TREE_CODE (arg0) == COMPOUND_EXPR)
7752 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7753 		       fold_build1_loc (loc, code, type,
7754 				    fold_convert_loc (loc, TREE_TYPE (op0),
7755 						      TREE_OPERAND (arg0, 1))));
7756       else if (TREE_CODE (arg0) == COND_EXPR)
7757 	{
7758 	  tree arg01 = TREE_OPERAND (arg0, 1);
7759 	  tree arg02 = TREE_OPERAND (arg0, 2);
7760 	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7761 	    arg01 = fold_build1_loc (loc, code, type,
7762 				 fold_convert_loc (loc,
7763 						   TREE_TYPE (op0), arg01));
7764 	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7765 	    arg02 = fold_build1_loc (loc, code, type,
7766 				 fold_convert_loc (loc,
7767 						   TREE_TYPE (op0), arg02));
7768 	  tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7769 			     arg01, arg02);
7770 
7771 	  /* If this was a conversion, and all we did was to move into
7772 	     inside the COND_EXPR, bring it back out.  But leave it if
7773 	     it is a conversion from integer to integer and the
7774 	     result precision is no wider than a word since such a
7775 	     conversion is cheap and may be optimized away by combine,
7776 	     while it couldn't if it were outside the COND_EXPR.  Then return
7777 	     so we don't get into an infinite recursion loop taking the
7778 	     conversion out and then back in.  */
7779 
7780 	  if ((CONVERT_EXPR_CODE_P (code)
7781 	       || code == NON_LVALUE_EXPR)
7782 	      && TREE_CODE (tem) == COND_EXPR
7783 	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7784 	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7785 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7786 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7787 	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7788 		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7789 	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7790 		     && (INTEGRAL_TYPE_P
7791 			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7792 		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7793 		  || flag_syntax_only))
7794 	    tem = build1_loc (loc, code, type,
7795 			      build3 (COND_EXPR,
7796 				      TREE_TYPE (TREE_OPERAND
7797 						 (TREE_OPERAND (tem, 1), 0)),
7798 				      TREE_OPERAND (tem, 0),
7799 				      TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7800 				      TREE_OPERAND (TREE_OPERAND (tem, 2),
7801 						    0)));
7802 	  return tem;
7803 	}
7804    }
7805 
7806   switch (code)
7807     {
7808     case NON_LVALUE_EXPR:
7809       if (!maybe_lvalue_p (op0))
7810 	return fold_convert_loc (loc, type, op0);
7811       return NULL_TREE;
7812 
7813     CASE_CONVERT:
7814     case FLOAT_EXPR:
7815     case FIX_TRUNC_EXPR:
7816       if (COMPARISON_CLASS_P (op0))
7817 	{
7818 	  /* If we have (type) (a CMP b) and type is an integral type, return
7819 	     new expression involving the new type.  Canonicalize
7820 	     (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7821 	     non-integral type.
7822 	     Do not fold the result as that would not simplify further, also
7823 	     folding again results in recursions.  */
7824 	  if (TREE_CODE (type) == BOOLEAN_TYPE)
7825 	    return build2_loc (loc, TREE_CODE (op0), type,
7826 			       TREE_OPERAND (op0, 0),
7827 			       TREE_OPERAND (op0, 1));
7828 	  else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7829 		   && TREE_CODE (type) != VECTOR_TYPE)
7830 	    return build3_loc (loc, COND_EXPR, type, op0,
7831 			       constant_boolean_node (true, type),
7832 			       constant_boolean_node (false, type));
7833 	}
7834 
7835       /* Handle (T *)&A.B.C for A being of type T and B and C
7836 	 living at offset zero.  This occurs frequently in
7837 	 C++ upcasting and then accessing the base.  */
7838       if (TREE_CODE (op0) == ADDR_EXPR
7839 	  && POINTER_TYPE_P (type)
7840 	  && handled_component_p (TREE_OPERAND (op0, 0)))
7841         {
7842 	  poly_int64 bitsize, bitpos;
7843 	  tree offset;
7844 	  machine_mode mode;
7845 	  int unsignedp, reversep, volatilep;
7846 	  tree base
7847 	    = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7848 				   &offset, &mode, &unsignedp, &reversep,
7849 				   &volatilep);
7850 	  /* If the reference was to a (constant) zero offset, we can use
7851 	     the address of the base if it has the same base type
7852 	     as the result type and the pointer type is unqualified.  */
7853 	  if (!offset
7854 	      && known_eq (bitpos, 0)
7855 	      && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7856 		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7857 	      && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7858 	    return fold_convert_loc (loc, type,
7859 				     build_fold_addr_expr_loc (loc, base));
7860         }
7861 
7862       if (TREE_CODE (op0) == MODIFY_EXPR
7863 	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7864 	  /* Detect assigning a bitfield.  */
7865 	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7866 	       && DECL_BIT_FIELD
7867 	       (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7868 	{
7869 	  /* Don't leave an assignment inside a conversion
7870 	     unless assigning a bitfield.  */
7871 	  tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7872 	  /* First do the assignment, then return converted constant.  */
7873 	  tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7874 	  TREE_NO_WARNING (tem) = 1;
7875 	  TREE_USED (tem) = 1;
7876 	  return tem;
7877 	}
7878 
7879       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7880 	 constants (if x has signed type, the sign bit cannot be set
7881 	 in c).  This folds extension into the BIT_AND_EXPR.
7882 	 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7883 	 very likely don't have maximal range for their precision and this
7884 	 transformation effectively doesn't preserve non-maximal ranges.  */
7885       if (TREE_CODE (type) == INTEGER_TYPE
7886 	  && TREE_CODE (op0) == BIT_AND_EXPR
7887 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7888 	{
7889 	  tree and_expr = op0;
7890 	  tree and0 = TREE_OPERAND (and_expr, 0);
7891 	  tree and1 = TREE_OPERAND (and_expr, 1);
7892 	  int change = 0;
7893 
7894 	  if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7895 	      || (TYPE_PRECISION (type)
7896 		  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7897 	    change = 1;
7898 	  else if (TYPE_PRECISION (TREE_TYPE (and1))
7899 		   <= HOST_BITS_PER_WIDE_INT
7900 		   && tree_fits_uhwi_p (and1))
7901 	    {
7902 	      unsigned HOST_WIDE_INT cst;
7903 
7904 	      cst = tree_to_uhwi (and1);
7905 	      cst &= HOST_WIDE_INT_M1U
7906 		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7907 	      change = (cst == 0);
7908 	      if (change
7909 		  && !flag_syntax_only
7910 		  && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7911 		      == ZERO_EXTEND))
7912 		{
7913 		  tree uns = unsigned_type_for (TREE_TYPE (and0));
7914 		  and0 = fold_convert_loc (loc, uns, and0);
7915 		  and1 = fold_convert_loc (loc, uns, and1);
7916 		}
7917 	    }
7918 	  if (change)
7919 	    {
7920 	      tem = force_fit_type (type, wi::to_widest (and1), 0,
7921 				    TREE_OVERFLOW (and1));
7922 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
7923 				      fold_convert_loc (loc, type, and0), tem);
7924 	    }
7925 	}
7926 
7927       /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7928 	 cast (T1)X will fold away.  We assume that this happens when X itself
7929 	 is a cast.  */
7930       if (POINTER_TYPE_P (type)
7931 	  && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7932 	  && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7933 	{
7934 	  tree arg00 = TREE_OPERAND (arg0, 0);
7935 	  tree arg01 = TREE_OPERAND (arg0, 1);
7936 
7937 	  return fold_build_pointer_plus_loc
7938 		   (loc, fold_convert_loc (loc, type, arg00), arg01);
7939 	}
7940 
7941       /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7942 	 of the same precision, and X is an integer type not narrower than
7943 	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
7944       if (INTEGRAL_TYPE_P (type)
7945 	  && TREE_CODE (op0) == BIT_NOT_EXPR
7946 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7947 	  && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7948 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7949 	{
7950 	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7951 	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7952 	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7953 	    return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7954 				fold_convert_loc (loc, type, tem));
7955 	}
7956 
7957       /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7958 	 type of X and Y (integer types only).  */
7959       if (INTEGRAL_TYPE_P (type)
7960 	  && TREE_CODE (op0) == MULT_EXPR
7961 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7962 	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7963 	{
7964 	  /* Be careful not to introduce new overflows.  */
7965 	  tree mult_type;
7966           if (TYPE_OVERFLOW_WRAPS (type))
7967 	    mult_type = type;
7968 	  else
7969 	    mult_type = unsigned_type_for (type);
7970 
7971 	  if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7972 	    {
7973 	      tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7974 				 fold_convert_loc (loc, mult_type,
7975 						   TREE_OPERAND (op0, 0)),
7976 				 fold_convert_loc (loc, mult_type,
7977 						   TREE_OPERAND (op0, 1)));
7978 	      return fold_convert_loc (loc, type, tem);
7979 	    }
7980 	}
7981 
7982       return NULL_TREE;
7983 
7984     case VIEW_CONVERT_EXPR:
7985       if (TREE_CODE (op0) == MEM_REF)
7986         {
7987 	  if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7988 	    type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7989 	  tem = fold_build2_loc (loc, MEM_REF, type,
7990 				 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7991 	  REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7992 	  return tem;
7993 	}
7994 
7995       return NULL_TREE;
7996 
7997     case NEGATE_EXPR:
7998       tem = fold_negate_expr (loc, arg0);
7999       if (tem)
8000 	return fold_convert_loc (loc, type, tem);
8001       return NULL_TREE;
8002 
8003     case ABS_EXPR:
8004       /* Convert fabs((double)float) into (double)fabsf(float).  */
8005       if (TREE_CODE (arg0) == NOP_EXPR
8006 	  && TREE_CODE (type) == REAL_TYPE)
8007 	{
8008 	  tree targ0 = strip_float_extensions (arg0);
8009 	  if (targ0 != arg0)
8010 	    return fold_convert_loc (loc, type,
8011 				     fold_build1_loc (loc, ABS_EXPR,
8012 						  TREE_TYPE (targ0),
8013 						  targ0));
8014 	}
8015       return NULL_TREE;
8016 
8017     case BIT_NOT_EXPR:
8018       /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
8019       if (TREE_CODE (arg0) == BIT_XOR_EXPR
8020 	  && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8021 				    fold_convert_loc (loc, type,
8022 						      TREE_OPERAND (arg0, 0)))))
8023 	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8024 				fold_convert_loc (loc, type,
8025 						  TREE_OPERAND (arg0, 1)));
8026       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8027 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8028 			       	     fold_convert_loc (loc, type,
8029 						       TREE_OPERAND (arg0, 1)))))
8030 	return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8031 			    fold_convert_loc (loc, type,
8032 					      TREE_OPERAND (arg0, 0)), tem);
8033 
8034       return NULL_TREE;
8035 
8036     case TRUTH_NOT_EXPR:
8037       /* Note that the operand of this must be an int
8038 	 and its values must be 0 or 1.
8039 	 ("true" is a fixed value perhaps depending on the language,
8040 	 but we don't handle values other than 1 correctly yet.)  */
8041       tem = fold_truth_not_expr (loc, arg0);
8042       if (!tem)
8043 	return NULL_TREE;
8044       return fold_convert_loc (loc, type, tem);
8045 
8046     case INDIRECT_REF:
8047       /* Fold *&X to X if X is an lvalue.  */
8048       if (TREE_CODE (op0) == ADDR_EXPR)
8049 	{
8050 	  tree op00 = TREE_OPERAND (op0, 0);
8051 	  if ((VAR_P (op00)
8052 	       || TREE_CODE (op00) == PARM_DECL
8053 	       || TREE_CODE (op00) == RESULT_DECL)
8054 	      && !TREE_READONLY (op00))
8055 	    return op00;
8056 	}
8057       return NULL_TREE;
8058 
8059     default:
8060       return NULL_TREE;
8061     } /* switch (code) */
8062 }
8063 
8064 
8065 /* If the operation was a conversion do _not_ mark a resulting constant
8066    with TREE_OVERFLOW if the original constant was not.  These conversions
8067    have implementation defined behavior and retaining the TREE_OVERFLOW
8068    flag here would confuse later passes such as VRP.  */
8069 tree
fold_unary_ignore_overflow_loc(location_t loc,enum tree_code code,tree type,tree op0)8070 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8071 				tree type, tree op0)
8072 {
8073   tree res = fold_unary_loc (loc, code, type, op0);
8074   if (res
8075       && TREE_CODE (res) == INTEGER_CST
8076       && TREE_CODE (op0) == INTEGER_CST
8077       && CONVERT_EXPR_CODE_P (code))
8078     TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8079 
8080   return res;
8081 }
8082 
8083 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8084    operands OP0 and OP1.  LOC is the location of the resulting expression.
8085    ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8086    Return the folded expression if folding is successful.  Otherwise,
8087    return NULL_TREE.  */
8088 static tree
fold_truth_andor(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1,tree op0,tree op1)8089 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8090 		  tree arg0, tree arg1, tree op0, tree op1)
8091 {
8092   tree tem;
8093 
8094   /* We only do these simplifications if we are optimizing.  */
8095   if (!optimize)
8096     return NULL_TREE;
8097 
8098   /* Check for things like (A || B) && (A || C).  We can convert this
8099      to A || (B && C).  Note that either operator can be any of the four
8100      truth and/or operations and the transformation will still be
8101      valid.   Also note that we only care about order for the
8102      ANDIF and ORIF operators.  If B contains side effects, this
8103      might change the truth-value of A.  */
8104   if (TREE_CODE (arg0) == TREE_CODE (arg1)
8105       && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8106 	  || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8107 	  || TREE_CODE (arg0) == TRUTH_AND_EXPR
8108 	  || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8109       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8110     {
8111       tree a00 = TREE_OPERAND (arg0, 0);
8112       tree a01 = TREE_OPERAND (arg0, 1);
8113       tree a10 = TREE_OPERAND (arg1, 0);
8114       tree a11 = TREE_OPERAND (arg1, 1);
8115       int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8116 			  || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8117 			 && (code == TRUTH_AND_EXPR
8118 			     || code == TRUTH_OR_EXPR));
8119 
8120       if (operand_equal_p (a00, a10, 0))
8121 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8122 			    fold_build2_loc (loc, code, type, a01, a11));
8123       else if (commutative && operand_equal_p (a00, a11, 0))
8124 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8125 			    fold_build2_loc (loc, code, type, a01, a10));
8126       else if (commutative && operand_equal_p (a01, a10, 0))
8127 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8128 			    fold_build2_loc (loc, code, type, a00, a11));
8129 
8130       /* This case if tricky because we must either have commutative
8131 	 operators or else A10 must not have side-effects.  */
8132 
8133       else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8134 	       && operand_equal_p (a01, a11, 0))
8135 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
8136 			    fold_build2_loc (loc, code, type, a00, a10),
8137 			    a01);
8138     }
8139 
8140   /* See if we can build a range comparison.  */
8141   if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
8142     return tem;
8143 
8144   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8145       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8146     {
8147       tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8148       if (tem)
8149 	return fold_build2_loc (loc, code, type, tem, arg1);
8150     }
8151 
8152   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8153       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8154     {
8155       tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8156       if (tem)
8157 	return fold_build2_loc (loc, code, type, arg0, tem);
8158     }
8159 
8160   /* Check for the possibility of merging component references.  If our
8161      lhs is another similar operation, try to merge its rhs with our
8162      rhs.  Then try to merge our lhs and rhs.  */
8163   if (TREE_CODE (arg0) == code
8164       && (tem = fold_truth_andor_1 (loc, code, type,
8165 				    TREE_OPERAND (arg0, 1), arg1)) != 0)
8166     return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8167 
8168   if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8169     return tem;
8170 
8171   bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
8172   if (PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT) != -1)
8173     logical_op_non_short_circuit
8174       = PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT);
8175   if (logical_op_non_short_circuit
8176       && !flag_sanitize_coverage
8177       && (code == TRUTH_AND_EXPR
8178           || code == TRUTH_ANDIF_EXPR
8179           || code == TRUTH_OR_EXPR
8180           || code == TRUTH_ORIF_EXPR))
8181     {
8182       enum tree_code ncode, icode;
8183 
8184       ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8185 	      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8186       icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8187 
8188       /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8189 	 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8190 	 We don't want to pack more than two leafs to a non-IF AND/OR
8191 	 expression.
8192 	 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8193 	 equal to IF-CODE, then we don't want to add right-hand operand.
8194 	 If the inner right-hand side of left-hand operand has
8195 	 side-effects, or isn't simple, then we can't add to it,
8196 	 as otherwise we might destroy if-sequence.  */
8197       if (TREE_CODE (arg0) == icode
8198 	  && simple_operand_p_2 (arg1)
8199 	  /* Needed for sequence points to handle trappings, and
8200 	     side-effects.  */
8201 	  && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8202 	{
8203 	  tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8204 				 arg1);
8205 	  return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8206 				  tem);
8207 	}
8208 	/* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8209 	   or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C).  */
8210       else if (TREE_CODE (arg1) == icode
8211 	  && simple_operand_p_2 (arg0)
8212 	  /* Needed for sequence points to handle trappings, and
8213 	     side-effects.  */
8214 	  && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8215 	{
8216 	  tem = fold_build2_loc (loc, ncode, type,
8217 				 arg0, TREE_OPERAND (arg1, 0));
8218 	  return fold_build2_loc (loc, icode, type, tem,
8219 				  TREE_OPERAND (arg1, 1));
8220 	}
8221       /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8222 	 into (A OR B).
8223 	 For sequence point consistancy, we need to check for trapping,
8224 	 and side-effects.  */
8225       else if (code == icode && simple_operand_p_2 (arg0)
8226                && simple_operand_p_2 (arg1))
8227 	return fold_build2_loc (loc, ncode, type, arg0, arg1);
8228     }
8229 
8230   return NULL_TREE;
8231 }
8232 
8233 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8234    by changing CODE to reduce the magnitude of constants involved in
8235    ARG0 of the comparison.
8236    Returns a canonicalized comparison tree if a simplification was
8237    possible, otherwise returns NULL_TREE.
8238    Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8239    valid if signed overflow is undefined.  */
8240 
8241 static tree
maybe_canonicalize_comparison_1(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1,bool * strict_overflow_p)8242 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8243 				 tree arg0, tree arg1,
8244 				 bool *strict_overflow_p)
8245 {
8246   enum tree_code code0 = TREE_CODE (arg0);
8247   tree t, cst0 = NULL_TREE;
8248   int sgn0;
8249 
8250   /* Match A +- CST code arg1.  We can change this only if overflow
8251      is undefined.  */
8252   if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8253 	 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8254 	/* In principle pointers also have undefined overflow behavior,
8255 	   but that causes problems elsewhere.  */
8256 	&& !POINTER_TYPE_P (TREE_TYPE (arg0))
8257 	&& (code0 == MINUS_EXPR
8258 	    || code0 == PLUS_EXPR)
8259 	&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8260     return NULL_TREE;
8261 
8262   /* Identify the constant in arg0 and its sign.  */
8263   cst0 = TREE_OPERAND (arg0, 1);
8264   sgn0 = tree_int_cst_sgn (cst0);
8265 
8266   /* Overflowed constants and zero will cause problems.  */
8267   if (integer_zerop (cst0)
8268       || TREE_OVERFLOW (cst0))
8269     return NULL_TREE;
8270 
8271   /* See if we can reduce the magnitude of the constant in
8272      arg0 by changing the comparison code.  */
8273   /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
8274   if (code == LT_EXPR
8275       && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8276     code = LE_EXPR;
8277   /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
8278   else if (code == GT_EXPR
8279 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8280     code = GE_EXPR;
8281   /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
8282   else if (code == LE_EXPR
8283 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8284     code = LT_EXPR;
8285   /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
8286   else if (code == GE_EXPR
8287 	   && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8288     code = GT_EXPR;
8289   else
8290     return NULL_TREE;
8291   *strict_overflow_p = true;
8292 
8293   /* Now build the constant reduced in magnitude.  But not if that
8294      would produce one outside of its types range.  */
8295   if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8296       && ((sgn0 == 1
8297 	   && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8298 	   && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8299 	  || (sgn0 == -1
8300 	      && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8301 	      && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8302     return NULL_TREE;
8303 
8304   t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8305 		       cst0, build_int_cst (TREE_TYPE (cst0), 1));
8306   t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8307   t = fold_convert (TREE_TYPE (arg1), t);
8308 
8309   return fold_build2_loc (loc, code, type, t, arg1);
8310 }
8311 
8312 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8313    overflow further.  Try to decrease the magnitude of constants involved
8314    by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8315    and put sole constants at the second argument position.
8316    Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
8317 
8318 static tree
maybe_canonicalize_comparison(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)8319 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8320 			       tree arg0, tree arg1)
8321 {
8322   tree t;
8323   bool strict_overflow_p;
8324   const char * const warnmsg = G_("assuming signed overflow does not occur "
8325 				  "when reducing constant in comparison");
8326 
8327   /* Try canonicalization by simplifying arg0.  */
8328   strict_overflow_p = false;
8329   t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8330 				       &strict_overflow_p);
8331   if (t)
8332     {
8333       if (strict_overflow_p)
8334 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8335       return t;
8336     }
8337 
8338   /* Try canonicalization by simplifying arg1 using the swapped
8339      comparison.  */
8340   code = swap_tree_comparison (code);
8341   strict_overflow_p = false;
8342   t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8343 				       &strict_overflow_p);
8344   if (t && strict_overflow_p)
8345     fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8346   return t;
8347 }
8348 
8349 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8350    space.  This is used to avoid issuing overflow warnings for
8351    expressions like &p->x which can not wrap.  */
8352 
8353 static bool
pointer_may_wrap_p(tree base,tree offset,poly_int64 bitpos)8354 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
8355 {
8356   if (!POINTER_TYPE_P (TREE_TYPE (base)))
8357     return true;
8358 
8359   if (maybe_lt (bitpos, 0))
8360     return true;
8361 
8362   poly_wide_int wi_offset;
8363   int precision = TYPE_PRECISION (TREE_TYPE (base));
8364   if (offset == NULL_TREE)
8365     wi_offset = wi::zero (precision);
8366   else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
8367     return true;
8368   else
8369     wi_offset = wi::to_poly_wide (offset);
8370 
8371   bool overflow;
8372   poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
8373 				  precision);
8374   poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8375   if (overflow)
8376     return true;
8377 
8378   poly_uint64 total_hwi, size;
8379   if (!total.to_uhwi (&total_hwi)
8380       || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
8381 			   &size)
8382       || known_eq (size, 0U))
8383     return true;
8384 
8385   if (known_le (total_hwi, size))
8386     return false;
8387 
8388   /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8389      array.  */
8390   if (TREE_CODE (base) == ADDR_EXPR
8391       && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
8392 			  &size)
8393       && maybe_ne (size, 0U)
8394       && known_le (total_hwi, size))
8395     return false;
8396 
8397   return true;
8398 }
8399 
8400 /* Return a positive integer when the symbol DECL is known to have
8401    a nonzero address, zero when it's known not to (e.g., it's a weak
8402    symbol), and a negative integer when the symbol is not yet in the
8403    symbol table and so whether or not its address is zero is unknown.
8404    For function local objects always return positive integer.  */
8405 static int
maybe_nonzero_address(tree decl)8406 maybe_nonzero_address (tree decl)
8407 {
8408   if (DECL_P (decl) && decl_in_symtab_p (decl))
8409     if (struct symtab_node *symbol = symtab_node::get_create (decl))
8410       return symbol->nonzero_address ();
8411 
8412   /* Function local objects are never NULL.  */
8413   if (DECL_P (decl)
8414       && (DECL_CONTEXT (decl)
8415       && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8416       && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8417     return 1;
8418 
8419   return -1;
8420 }
8421 
8422 /* Subroutine of fold_binary.  This routine performs all of the
8423    transformations that are common to the equality/inequality
8424    operators (EQ_EXPR and NE_EXPR) and the ordering operators
8425    (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
8426    fold_binary should call fold_binary.  Fold a comparison with
8427    tree code CODE and type TYPE with operands OP0 and OP1.  Return
8428    the folded comparison or NULL_TREE.  */
8429 
8430 static tree
fold_comparison(location_t loc,enum tree_code code,tree type,tree op0,tree op1)8431 fold_comparison (location_t loc, enum tree_code code, tree type,
8432 		 tree op0, tree op1)
8433 {
8434   const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8435   tree arg0, arg1, tem;
8436 
8437   arg0 = op0;
8438   arg1 = op1;
8439 
8440   STRIP_SIGN_NOPS (arg0);
8441   STRIP_SIGN_NOPS (arg1);
8442 
8443   /* For comparisons of pointers we can decompose it to a compile time
8444      comparison of the base objects and the offsets into the object.
8445      This requires at least one operand being an ADDR_EXPR or a
8446      POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
8447   if (POINTER_TYPE_P (TREE_TYPE (arg0))
8448       && (TREE_CODE (arg0) == ADDR_EXPR
8449 	  || TREE_CODE (arg1) == ADDR_EXPR
8450 	  || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8451 	  || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8452     {
8453       tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8454       poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
8455       machine_mode mode;
8456       int volatilep, reversep, unsignedp;
8457       bool indirect_base0 = false, indirect_base1 = false;
8458 
8459       /* Get base and offset for the access.  Strip ADDR_EXPR for
8460 	 get_inner_reference, but put it back by stripping INDIRECT_REF
8461 	 off the base object if possible.  indirect_baseN will be true
8462 	 if baseN is not an address but refers to the object itself.  */
8463       base0 = arg0;
8464       if (TREE_CODE (arg0) == ADDR_EXPR)
8465 	{
8466 	  base0
8467 	    = get_inner_reference (TREE_OPERAND (arg0, 0),
8468 				   &bitsize, &bitpos0, &offset0, &mode,
8469 				   &unsignedp, &reversep, &volatilep);
8470 	  if (TREE_CODE (base0) == INDIRECT_REF)
8471 	    base0 = TREE_OPERAND (base0, 0);
8472 	  else
8473 	    indirect_base0 = true;
8474 	}
8475       else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8476 	{
8477 	  base0 = TREE_OPERAND (arg0, 0);
8478 	  STRIP_SIGN_NOPS (base0);
8479 	  if (TREE_CODE (base0) == ADDR_EXPR)
8480 	    {
8481 	      base0
8482 		= get_inner_reference (TREE_OPERAND (base0, 0),
8483 				       &bitsize, &bitpos0, &offset0, &mode,
8484 				       &unsignedp, &reversep, &volatilep);
8485 	      if (TREE_CODE (base0) == INDIRECT_REF)
8486 		base0 = TREE_OPERAND (base0, 0);
8487 	      else
8488 		indirect_base0 = true;
8489 	    }
8490 	  if (offset0 == NULL_TREE || integer_zerop (offset0))
8491 	    offset0 = TREE_OPERAND (arg0, 1);
8492 	  else
8493 	    offset0 = size_binop (PLUS_EXPR, offset0,
8494 				  TREE_OPERAND (arg0, 1));
8495 	  if (poly_int_tree_p (offset0))
8496 	    {
8497 	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
8498 					      TYPE_PRECISION (sizetype));
8499 	      tem <<= LOG2_BITS_PER_UNIT;
8500 	      tem += bitpos0;
8501 	      if (tem.to_shwi (&bitpos0))
8502 		offset0 = NULL_TREE;
8503 	    }
8504 	}
8505 
8506       base1 = arg1;
8507       if (TREE_CODE (arg1) == ADDR_EXPR)
8508 	{
8509 	  base1
8510 	    = get_inner_reference (TREE_OPERAND (arg1, 0),
8511 				   &bitsize, &bitpos1, &offset1, &mode,
8512 				   &unsignedp, &reversep, &volatilep);
8513 	  if (TREE_CODE (base1) == INDIRECT_REF)
8514 	    base1 = TREE_OPERAND (base1, 0);
8515 	  else
8516 	    indirect_base1 = true;
8517 	}
8518       else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8519 	{
8520 	  base1 = TREE_OPERAND (arg1, 0);
8521 	  STRIP_SIGN_NOPS (base1);
8522 	  if (TREE_CODE (base1) == ADDR_EXPR)
8523 	    {
8524 	      base1
8525 		= get_inner_reference (TREE_OPERAND (base1, 0),
8526 				       &bitsize, &bitpos1, &offset1, &mode,
8527 				       &unsignedp, &reversep, &volatilep);
8528 	      if (TREE_CODE (base1) == INDIRECT_REF)
8529 		base1 = TREE_OPERAND (base1, 0);
8530 	      else
8531 		indirect_base1 = true;
8532 	    }
8533 	  if (offset1 == NULL_TREE || integer_zerop (offset1))
8534 	    offset1 = TREE_OPERAND (arg1, 1);
8535 	  else
8536 	    offset1 = size_binop (PLUS_EXPR, offset1,
8537 				  TREE_OPERAND (arg1, 1));
8538 	  if (poly_int_tree_p (offset1))
8539 	    {
8540 	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
8541 					      TYPE_PRECISION (sizetype));
8542 	      tem <<= LOG2_BITS_PER_UNIT;
8543 	      tem += bitpos1;
8544 	      if (tem.to_shwi (&bitpos1))
8545 		offset1 = NULL_TREE;
8546 	    }
8547 	}
8548 
8549       /* If we have equivalent bases we might be able to simplify.  */
8550       if (indirect_base0 == indirect_base1
8551 	  && operand_equal_p (base0, base1,
8552 			      indirect_base0 ? OEP_ADDRESS_OF : 0))
8553 	{
8554 	  /* We can fold this expression to a constant if the non-constant
8555 	     offset parts are equal.  */
8556 	  if ((offset0 == offset1
8557 	       || (offset0 && offset1
8558 		   && operand_equal_p (offset0, offset1, 0)))
8559 	      && (equality_code
8560 		  || (indirect_base0
8561 		      && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8562 		  || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8563 	    {
8564 	      if (!equality_code
8565 		  && maybe_ne (bitpos0, bitpos1)
8566 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
8567 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
8568 		fold_overflow_warning (("assuming pointer wraparound does not "
8569 					"occur when comparing P +- C1 with "
8570 					"P +- C2"),
8571 				       WARN_STRICT_OVERFLOW_CONDITIONAL);
8572 
8573 	      switch (code)
8574 		{
8575 		case EQ_EXPR:
8576 		  if (known_eq (bitpos0, bitpos1))
8577 		    return constant_boolean_node (true, type);
8578 		  if (known_ne (bitpos0, bitpos1))
8579 		    return constant_boolean_node (false, type);
8580 		  break;
8581 		case NE_EXPR:
8582 		  if (known_ne (bitpos0, bitpos1))
8583 		    return constant_boolean_node (true, type);
8584 		  if (known_eq (bitpos0, bitpos1))
8585 		    return constant_boolean_node (false, type);
8586 		  break;
8587 		case LT_EXPR:
8588 		  if (known_lt (bitpos0, bitpos1))
8589 		    return constant_boolean_node (true, type);
8590 		  if (known_ge (bitpos0, bitpos1))
8591 		    return constant_boolean_node (false, type);
8592 		  break;
8593 		case LE_EXPR:
8594 		  if (known_le (bitpos0, bitpos1))
8595 		    return constant_boolean_node (true, type);
8596 		  if (known_gt (bitpos0, bitpos1))
8597 		    return constant_boolean_node (false, type);
8598 		  break;
8599 		case GE_EXPR:
8600 		  if (known_ge (bitpos0, bitpos1))
8601 		    return constant_boolean_node (true, type);
8602 		  if (known_lt (bitpos0, bitpos1))
8603 		    return constant_boolean_node (false, type);
8604 		  break;
8605 		case GT_EXPR:
8606 		  if (known_gt (bitpos0, bitpos1))
8607 		    return constant_boolean_node (true, type);
8608 		  if (known_le (bitpos0, bitpos1))
8609 		    return constant_boolean_node (false, type);
8610 		  break;
8611 		default:;
8612 		}
8613 	    }
8614 	  /* We can simplify the comparison to a comparison of the variable
8615 	     offset parts if the constant offset parts are equal.
8616 	     Be careful to use signed sizetype here because otherwise we
8617 	     mess with array offsets in the wrong way.  This is possible
8618 	     because pointer arithmetic is restricted to retain within an
8619 	     object and overflow on pointer differences is undefined as of
8620 	     6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
8621 	  else if (known_eq (bitpos0, bitpos1)
8622 		   && (equality_code
8623 		       || (indirect_base0
8624 			   && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8625 		       || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8626 	    {
8627 	      /* By converting to signed sizetype we cover middle-end pointer
8628 	         arithmetic which operates on unsigned pointer types of size
8629 	         type size and ARRAY_REF offsets which are properly sign or
8630 	         zero extended from their type in case it is narrower than
8631 	         sizetype.  */
8632 	      if (offset0 == NULL_TREE)
8633 		offset0 = build_int_cst (ssizetype, 0);
8634 	      else
8635 		offset0 = fold_convert_loc (loc, ssizetype, offset0);
8636 	      if (offset1 == NULL_TREE)
8637 		offset1 = build_int_cst (ssizetype, 0);
8638 	      else
8639 		offset1 = fold_convert_loc (loc, ssizetype, offset1);
8640 
8641 	      if (!equality_code
8642 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
8643 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
8644 		fold_overflow_warning (("assuming pointer wraparound does not "
8645 					"occur when comparing P +- C1 with "
8646 					"P +- C2"),
8647 				       WARN_STRICT_OVERFLOW_COMPARISON);
8648 
8649 	      return fold_build2_loc (loc, code, type, offset0, offset1);
8650 	    }
8651 	}
8652       /* For equal offsets we can simplify to a comparison of the
8653 	 base addresses.  */
8654       else if (known_eq (bitpos0, bitpos1)
8655 	       && (indirect_base0
8656 		   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8657 	       && (indirect_base1
8658 		   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8659 	       && ((offset0 == offset1)
8660 		   || (offset0 && offset1
8661 		       && operand_equal_p (offset0, offset1, 0))))
8662 	{
8663 	  if (indirect_base0)
8664 	    base0 = build_fold_addr_expr_loc (loc, base0);
8665 	  if (indirect_base1)
8666 	    base1 = build_fold_addr_expr_loc (loc, base1);
8667 	  return fold_build2_loc (loc, code, type, base0, base1);
8668 	}
8669       /* Comparison between an ordinary (non-weak) symbol and a null
8670 	 pointer can be eliminated since such symbols must have a non
8671 	 null address.  In C, relational expressions between pointers
8672 	 to objects and null pointers are undefined.  The results
8673 	 below follow the C++ rules with the additional property that
8674 	 every object pointer compares greater than a null pointer.
8675       */
8676       else if (((DECL_P (base0)
8677 		 && maybe_nonzero_address (base0) > 0
8678 		 /* Avoid folding references to struct members at offset 0 to
8679 		    prevent tests like '&ptr->firstmember == 0' from getting
8680 		    eliminated.  When ptr is null, although the -> expression
8681 		    is strictly speaking invalid, GCC retains it as a matter
8682 		    of QoI.  See PR c/44555. */
8683 		 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
8684 		|| CONSTANT_CLASS_P (base0))
8685 	       && indirect_base0
8686 	       /* The caller guarantees that when one of the arguments is
8687 		  constant (i.e., null in this case) it is second.  */
8688 	       && integer_zerop (arg1))
8689 	{
8690 	  switch (code)
8691 	    {
8692 	    case EQ_EXPR:
8693 	    case LE_EXPR:
8694 	    case LT_EXPR:
8695 	      return constant_boolean_node (false, type);
8696 	    case GE_EXPR:
8697 	    case GT_EXPR:
8698 	    case NE_EXPR:
8699 	      return constant_boolean_node (true, type);
8700 	    default:
8701 	      gcc_unreachable ();
8702 	    }
8703 	}
8704     }
8705 
8706   /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8707      X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
8708      the resulting offset is smaller in absolute value than the
8709      original one and has the same sign.  */
8710   if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8711       && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8712       && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8713       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8714 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8715       && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8716       && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8717 	  && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8718     {
8719       tree const1 = TREE_OPERAND (arg0, 1);
8720       tree const2 = TREE_OPERAND (arg1, 1);
8721       tree variable1 = TREE_OPERAND (arg0, 0);
8722       tree variable2 = TREE_OPERAND (arg1, 0);
8723       tree cst;
8724       const char * const warnmsg = G_("assuming signed overflow does not "
8725 				      "occur when combining constants around "
8726 				      "a comparison");
8727 
8728       /* Put the constant on the side where it doesn't overflow and is
8729 	 of lower absolute value and of same sign than before.  */
8730       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8731 			     ? MINUS_EXPR : PLUS_EXPR,
8732 			     const2, const1);
8733       if (!TREE_OVERFLOW (cst)
8734 	  && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8735 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8736 	{
8737 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8738 	  return fold_build2_loc (loc, code, type,
8739 				  variable1,
8740 				  fold_build2_loc (loc, TREE_CODE (arg1),
8741 						   TREE_TYPE (arg1),
8742 						   variable2, cst));
8743 	}
8744 
8745       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8746 			     ? MINUS_EXPR : PLUS_EXPR,
8747 			     const1, const2);
8748       if (!TREE_OVERFLOW (cst)
8749 	  && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8750 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8751 	{
8752 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8753 	  return fold_build2_loc (loc, code, type,
8754 				  fold_build2_loc (loc, TREE_CODE (arg0),
8755 						   TREE_TYPE (arg0),
8756 						   variable1, cst),
8757 				  variable2);
8758 	}
8759     }
8760 
8761   tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8762   if (tem)
8763     return tem;
8764 
8765   /* If we are comparing an expression that just has comparisons
8766      of two integer values, arithmetic expressions of those comparisons,
8767      and constants, we can simplify it.  There are only three cases
8768      to check: the two values can either be equal, the first can be
8769      greater, or the second can be greater.  Fold the expression for
8770      those three values.  Since each value must be 0 or 1, we have
8771      eight possibilities, each of which corresponds to the constant 0
8772      or 1 or one of the six possible comparisons.
8773 
8774      This handles common cases like (a > b) == 0 but also handles
8775      expressions like  ((x > y) - (y > x)) > 0, which supposedly
8776      occur in macroized code.  */
8777 
8778   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8779     {
8780       tree cval1 = 0, cval2 = 0;
8781 
8782       if (twoval_comparison_p (arg0, &cval1, &cval2)
8783 	  /* Don't handle degenerate cases here; they should already
8784 	     have been handled anyway.  */
8785 	  && cval1 != 0 && cval2 != 0
8786 	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8787 	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8788 	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8789 	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8790 	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8791 	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8792 				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8793 	{
8794 	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8795 	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8796 
8797 	  /* We can't just pass T to eval_subst in case cval1 or cval2
8798 	     was the same as ARG1.  */
8799 
8800 	  tree high_result
8801 		= fold_build2_loc (loc, code, type,
8802 			       eval_subst (loc, arg0, cval1, maxval,
8803 					   cval2, minval),
8804 			       arg1);
8805 	  tree equal_result
8806 		= fold_build2_loc (loc, code, type,
8807 			       eval_subst (loc, arg0, cval1, maxval,
8808 					   cval2, maxval),
8809 			       arg1);
8810 	  tree low_result
8811 		= fold_build2_loc (loc, code, type,
8812 			       eval_subst (loc, arg0, cval1, minval,
8813 					   cval2, maxval),
8814 			       arg1);
8815 
8816 	  /* All three of these results should be 0 or 1.  Confirm they are.
8817 	     Then use those values to select the proper code to use.  */
8818 
8819 	  if (TREE_CODE (high_result) == INTEGER_CST
8820 	      && TREE_CODE (equal_result) == INTEGER_CST
8821 	      && TREE_CODE (low_result) == INTEGER_CST)
8822 	    {
8823 	      /* Make a 3-bit mask with the high-order bit being the
8824 		 value for `>', the next for '=', and the low for '<'.  */
8825 	      switch ((integer_onep (high_result) * 4)
8826 		      + (integer_onep (equal_result) * 2)
8827 		      + integer_onep (low_result))
8828 		{
8829 		case 0:
8830 		  /* Always false.  */
8831 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8832 		case 1:
8833 		  code = LT_EXPR;
8834 		  break;
8835 		case 2:
8836 		  code = EQ_EXPR;
8837 		  break;
8838 		case 3:
8839 		  code = LE_EXPR;
8840 		  break;
8841 		case 4:
8842 		  code = GT_EXPR;
8843 		  break;
8844 		case 5:
8845 		  code = NE_EXPR;
8846 		  break;
8847 		case 6:
8848 		  code = GE_EXPR;
8849 		  break;
8850 		case 7:
8851 		  /* Always true.  */
8852 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8853 		}
8854 
8855 	      return fold_build2_loc (loc, code, type, cval1, cval2);
8856 	    }
8857 	}
8858     }
8859 
8860   return NULL_TREE;
8861 }
8862 
8863 
8864 /* Subroutine of fold_binary.  Optimize complex multiplications of the
8865    form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
8866    argument EXPR represents the expression "z" of type TYPE.  */
8867 
8868 static tree
fold_mult_zconjz(location_t loc,tree type,tree expr)8869 fold_mult_zconjz (location_t loc, tree type, tree expr)
8870 {
8871   tree itype = TREE_TYPE (type);
8872   tree rpart, ipart, tem;
8873 
8874   if (TREE_CODE (expr) == COMPLEX_EXPR)
8875     {
8876       rpart = TREE_OPERAND (expr, 0);
8877       ipart = TREE_OPERAND (expr, 1);
8878     }
8879   else if (TREE_CODE (expr) == COMPLEX_CST)
8880     {
8881       rpart = TREE_REALPART (expr);
8882       ipart = TREE_IMAGPART (expr);
8883     }
8884   else
8885     {
8886       expr = save_expr (expr);
8887       rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8888       ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8889     }
8890 
8891   rpart = save_expr (rpart);
8892   ipart = save_expr (ipart);
8893   tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8894 		     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8895 		     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8896   return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8897 			  build_zero_cst (itype));
8898 }
8899 
8900 
8901 /* Helper function for fold_vec_perm.  Store elements of VECTOR_CST or
8902    CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
8903    true if successful.  */
8904 
8905 static bool
vec_cst_ctor_to_array(tree arg,unsigned int nelts,tree * elts)8906 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
8907 {
8908   unsigned HOST_WIDE_INT i, nunits;
8909 
8910   if (TREE_CODE (arg) == VECTOR_CST
8911       && VECTOR_CST_NELTS (arg).is_constant (&nunits))
8912     {
8913       for (i = 0; i < nunits; ++i)
8914 	elts[i] = VECTOR_CST_ELT (arg, i);
8915     }
8916   else if (TREE_CODE (arg) == CONSTRUCTOR)
8917     {
8918       constructor_elt *elt;
8919 
8920       FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8921 	if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8922 	  return false;
8923 	else
8924 	  elts[i] = elt->value;
8925     }
8926   else
8927     return false;
8928   for (; i < nelts; i++)
8929     elts[i]
8930       = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8931   return true;
8932 }
8933 
8934 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8935    selector.  Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8936    NULL_TREE otherwise.  */
8937 
8938 static tree
fold_vec_perm(tree type,tree arg0,tree arg1,const vec_perm_indices & sel)8939 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
8940 {
8941   unsigned int i;
8942   unsigned HOST_WIDE_INT nelts;
8943   bool need_ctor = false;
8944 
8945   if (!sel.length ().is_constant (&nelts))
8946     return NULL_TREE;
8947   gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
8948 	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
8949 	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
8950   if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8951       || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8952     return NULL_TREE;
8953 
8954   tree *in_elts = XALLOCAVEC (tree, nelts * 2);
8955   if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
8956       || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
8957     return NULL_TREE;
8958 
8959   tree_vector_builder out_elts (type, nelts, 1);
8960   for (i = 0; i < nelts; i++)
8961     {
8962       HOST_WIDE_INT index;
8963       if (!sel[i].is_constant (&index))
8964 	return NULL_TREE;
8965       if (!CONSTANT_CLASS_P (in_elts[index]))
8966 	need_ctor = true;
8967       out_elts.quick_push (unshare_expr (in_elts[index]));
8968     }
8969 
8970   if (need_ctor)
8971     {
8972       vec<constructor_elt, va_gc> *v;
8973       vec_alloc (v, nelts);
8974       for (i = 0; i < nelts; i++)
8975 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
8976       return build_constructor (type, v);
8977     }
8978   else
8979     return out_elts.build ();
8980 }
8981 
8982 /* Try to fold a pointer difference of type TYPE two address expressions of
8983    array references AREF0 and AREF1 using location LOC.  Return a
8984    simplified expression for the difference or NULL_TREE.  */
8985 
8986 static tree
fold_addr_of_array_ref_difference(location_t loc,tree type,tree aref0,tree aref1,bool use_pointer_diff)8987 fold_addr_of_array_ref_difference (location_t loc, tree type,
8988 				   tree aref0, tree aref1,
8989 				   bool use_pointer_diff)
8990 {
8991   tree base0 = TREE_OPERAND (aref0, 0);
8992   tree base1 = TREE_OPERAND (aref1, 0);
8993   tree base_offset = build_int_cst (type, 0);
8994 
8995   /* If the bases are array references as well, recurse.  If the bases
8996      are pointer indirections compute the difference of the pointers.
8997      If the bases are equal, we are set.  */
8998   if ((TREE_CODE (base0) == ARRAY_REF
8999        && TREE_CODE (base1) == ARRAY_REF
9000        && (base_offset
9001 	   = fold_addr_of_array_ref_difference (loc, type, base0, base1,
9002 						use_pointer_diff)))
9003       || (INDIRECT_REF_P (base0)
9004 	  && INDIRECT_REF_P (base1)
9005 	  && (base_offset
9006 	        = use_pointer_diff
9007 		  ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
9008 				     TREE_OPERAND (base0, 0),
9009 				     TREE_OPERAND (base1, 0))
9010 		  : fold_binary_loc (loc, MINUS_EXPR, type,
9011 				     fold_convert (type,
9012 						   TREE_OPERAND (base0, 0)),
9013 				     fold_convert (type,
9014 						   TREE_OPERAND (base1, 0)))))
9015       || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9016     {
9017       tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9018       tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9019       tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9020       tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
9021       return fold_build2_loc (loc, PLUS_EXPR, type,
9022 			      base_offset,
9023 			      fold_build2_loc (loc, MULT_EXPR, type,
9024 					       diff, esz));
9025     }
9026   return NULL_TREE;
9027 }
9028 
9029 /* If the real or vector real constant CST of type TYPE has an exact
9030    inverse, return it, else return NULL.  */
9031 
9032 tree
exact_inverse(tree type,tree cst)9033 exact_inverse (tree type, tree cst)
9034 {
9035   REAL_VALUE_TYPE r;
9036   tree unit_type;
9037   machine_mode mode;
9038 
9039   switch (TREE_CODE (cst))
9040     {
9041     case REAL_CST:
9042       r = TREE_REAL_CST (cst);
9043 
9044       if (exact_real_inverse (TYPE_MODE (type), &r))
9045 	return build_real (type, r);
9046 
9047       return NULL_TREE;
9048 
9049     case VECTOR_CST:
9050       {
9051 	unit_type = TREE_TYPE (type);
9052 	mode = TYPE_MODE (unit_type);
9053 
9054 	tree_vector_builder elts;
9055 	if (!elts.new_unary_operation (type, cst, false))
9056 	  return NULL_TREE;
9057 	unsigned int count = elts.encoded_nelts ();
9058 	for (unsigned int i = 0; i < count; ++i)
9059 	  {
9060 	    r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9061 	    if (!exact_real_inverse (mode, &r))
9062 	      return NULL_TREE;
9063 	    elts.quick_push (build_real (unit_type, r));
9064 	  }
9065 
9066 	return elts.build ();
9067       }
9068 
9069     default:
9070       return NULL_TREE;
9071     }
9072 }
9073 
9074 /*  Mask out the tz least significant bits of X of type TYPE where
9075     tz is the number of trailing zeroes in Y.  */
9076 static wide_int
mask_with_tz(tree type,const wide_int & x,const wide_int & y)9077 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9078 {
9079   int tz = wi::ctz (y);
9080   if (tz > 0)
9081     return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9082   return x;
9083 }
9084 
9085 /* Return true when T is an address and is known to be nonzero.
9086    For floating point we further ensure that T is not denormal.
9087    Similar logic is present in nonzero_address in rtlanal.h.
9088 
9089    If the return value is based on the assumption that signed overflow
9090    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9091    change *STRICT_OVERFLOW_P.  */
9092 
9093 static bool
tree_expr_nonzero_warnv_p(tree t,bool * strict_overflow_p)9094 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9095 {
9096   tree type = TREE_TYPE (t);
9097   enum tree_code code;
9098 
9099   /* Doing something useful for floating point would need more work.  */
9100   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9101     return false;
9102 
9103   code = TREE_CODE (t);
9104   switch (TREE_CODE_CLASS (code))
9105     {
9106     case tcc_unary:
9107       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9108 					      strict_overflow_p);
9109     case tcc_binary:
9110     case tcc_comparison:
9111       return tree_binary_nonzero_warnv_p (code, type,
9112 					       TREE_OPERAND (t, 0),
9113 					       TREE_OPERAND (t, 1),
9114 					       strict_overflow_p);
9115     case tcc_constant:
9116     case tcc_declaration:
9117     case tcc_reference:
9118       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9119 
9120     default:
9121       break;
9122     }
9123 
9124   switch (code)
9125     {
9126     case TRUTH_NOT_EXPR:
9127       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9128 					      strict_overflow_p);
9129 
9130     case TRUTH_AND_EXPR:
9131     case TRUTH_OR_EXPR:
9132     case TRUTH_XOR_EXPR:
9133       return tree_binary_nonzero_warnv_p (code, type,
9134 					       TREE_OPERAND (t, 0),
9135 					       TREE_OPERAND (t, 1),
9136 					       strict_overflow_p);
9137 
9138     case COND_EXPR:
9139     case CONSTRUCTOR:
9140     case OBJ_TYPE_REF:
9141     case ASSERT_EXPR:
9142     case ADDR_EXPR:
9143     case WITH_SIZE_EXPR:
9144     case SSA_NAME:
9145       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9146 
9147     case COMPOUND_EXPR:
9148     case MODIFY_EXPR:
9149     case BIND_EXPR:
9150       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9151 					strict_overflow_p);
9152 
9153     case SAVE_EXPR:
9154       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9155 					strict_overflow_p);
9156 
9157     case CALL_EXPR:
9158       {
9159 	tree fndecl = get_callee_fndecl (t);
9160 	if (!fndecl) return false;
9161 	if (flag_delete_null_pointer_checks && !flag_check_new
9162 	    && DECL_IS_OPERATOR_NEW (fndecl)
9163 	    && !TREE_NOTHROW (fndecl))
9164 	  return true;
9165 	if (flag_delete_null_pointer_checks
9166 	    && lookup_attribute ("returns_nonnull",
9167 		 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9168 	  return true;
9169 	return alloca_call_p (t);
9170       }
9171 
9172     default:
9173       break;
9174     }
9175   return false;
9176 }
9177 
9178 /* Return true when T is an address and is known to be nonzero.
9179    Handle warnings about undefined signed overflow.  */
9180 
9181 bool
tree_expr_nonzero_p(tree t)9182 tree_expr_nonzero_p (tree t)
9183 {
9184   bool ret, strict_overflow_p;
9185 
9186   strict_overflow_p = false;
9187   ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9188   if (strict_overflow_p)
9189     fold_overflow_warning (("assuming signed overflow does not occur when "
9190 			    "determining that expression is always "
9191 			    "non-zero"),
9192 			   WARN_STRICT_OVERFLOW_MISC);
9193   return ret;
9194 }
9195 
9196 /* Return true if T is known not to be equal to an integer W.  */
9197 
9198 bool
expr_not_equal_to(tree t,const wide_int & w)9199 expr_not_equal_to (tree t, const wide_int &w)
9200 {
9201   wide_int min, max, nz;
9202   value_range_type rtype;
9203   switch (TREE_CODE (t))
9204     {
9205     case INTEGER_CST:
9206       return wi::to_wide (t) != w;
9207 
9208     case SSA_NAME:
9209       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9210 	return false;
9211       rtype = get_range_info (t, &min, &max);
9212       if (rtype == VR_RANGE)
9213 	{
9214 	  if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9215 	    return true;
9216 	  if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9217 	    return true;
9218 	}
9219       else if (rtype == VR_ANTI_RANGE
9220 	       && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9221 	       && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9222 	return true;
9223       /* If T has some known zero bits and W has any of those bits set,
9224 	 then T is known not to be equal to W.  */
9225       if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9226 			      TYPE_PRECISION (TREE_TYPE (t))), 0))
9227 	return true;
9228       return false;
9229 
9230     default:
9231       return false;
9232     }
9233 }
9234 
9235 /* Fold a binary expression of code CODE and type TYPE with operands
9236    OP0 and OP1.  LOC is the location of the resulting expression.
9237    Return the folded expression if folding is successful.  Otherwise,
9238    return NULL_TREE.  */
9239 
9240 tree
fold_binary_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1)9241 fold_binary_loc (location_t loc, enum tree_code code, tree type,
9242 		 tree op0, tree op1)
9243 {
9244   enum tree_code_class kind = TREE_CODE_CLASS (code);
9245   tree arg0, arg1, tem;
9246   tree t1 = NULL_TREE;
9247   bool strict_overflow_p;
9248   unsigned int prec;
9249 
9250   gcc_assert (IS_EXPR_CODE_CLASS (kind)
9251 	      && TREE_CODE_LENGTH (code) == 2
9252 	      && op0 != NULL_TREE
9253 	      && op1 != NULL_TREE);
9254 
9255   arg0 = op0;
9256   arg1 = op1;
9257 
9258   /* Strip any conversions that don't change the mode.  This is
9259      safe for every expression, except for a comparison expression
9260      because its signedness is derived from its operands.  So, in
9261      the latter case, only strip conversions that don't change the
9262      signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
9263      preserved.
9264 
9265      Note that this is done as an internal manipulation within the
9266      constant folder, in order to find the simplest representation
9267      of the arguments so that their form can be studied.  In any
9268      cases, the appropriate type conversions should be put back in
9269      the tree that will get out of the constant folder.  */
9270 
9271   if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9272     {
9273       STRIP_SIGN_NOPS (arg0);
9274       STRIP_SIGN_NOPS (arg1);
9275     }
9276   else
9277     {
9278       STRIP_NOPS (arg0);
9279       STRIP_NOPS (arg1);
9280     }
9281 
9282   /* Note that TREE_CONSTANT isn't enough: static var addresses are
9283      constant but we can't do arithmetic on them.  */
9284   if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9285     {
9286       tem = const_binop (code, type, arg0, arg1);
9287       if (tem != NULL_TREE)
9288 	{
9289 	  if (TREE_TYPE (tem) != type)
9290 	    tem = fold_convert_loc (loc, type, tem);
9291 	  return tem;
9292 	}
9293     }
9294 
9295   /* If this is a commutative operation, and ARG0 is a constant, move it
9296      to ARG1 to reduce the number of tests below.  */
9297   if (commutative_tree_code (code)
9298       && tree_swap_operands_p (arg0, arg1))
9299     return fold_build2_loc (loc, code, type, op1, op0);
9300 
9301   /* Likewise if this is a comparison, and ARG0 is a constant, move it
9302      to ARG1 to reduce the number of tests below.  */
9303   if (kind == tcc_comparison
9304       && tree_swap_operands_p (arg0, arg1))
9305     return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9306 
9307   tem = generic_simplify (loc, code, type, op0, op1);
9308   if (tem)
9309     return tem;
9310 
9311   /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9312 
9313      First check for cases where an arithmetic operation is applied to a
9314      compound, conditional, or comparison operation.  Push the arithmetic
9315      operation inside the compound or conditional to see if any folding
9316      can then be done.  Convert comparison to conditional for this purpose.
9317      The also optimizes non-constant cases that used to be done in
9318      expand_expr.
9319 
9320      Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9321      one of the operands is a comparison and the other is a comparison, a
9322      BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
9323      code below would make the expression more complex.  Change it to a
9324      TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
9325      TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
9326 
9327   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9328        || code == EQ_EXPR || code == NE_EXPR)
9329       && !VECTOR_TYPE_P (TREE_TYPE (arg0))
9330       && ((truth_value_p (TREE_CODE (arg0))
9331 	   && (truth_value_p (TREE_CODE (arg1))
9332 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
9333 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
9334 	  || (truth_value_p (TREE_CODE (arg1))
9335 	      && (truth_value_p (TREE_CODE (arg0))
9336 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
9337 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
9338     {
9339       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9340 			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9341 			 : TRUTH_XOR_EXPR,
9342 			 boolean_type_node,
9343 			 fold_convert_loc (loc, boolean_type_node, arg0),
9344 			 fold_convert_loc (loc, boolean_type_node, arg1));
9345 
9346       if (code == EQ_EXPR)
9347 	tem = invert_truthvalue_loc (loc, tem);
9348 
9349       return fold_convert_loc (loc, type, tem);
9350     }
9351 
9352   if (TREE_CODE_CLASS (code) == tcc_binary
9353       || TREE_CODE_CLASS (code) == tcc_comparison)
9354     {
9355       if (TREE_CODE (arg0) == COMPOUND_EXPR)
9356 	{
9357 	  tem = fold_build2_loc (loc, code, type,
9358 			     fold_convert_loc (loc, TREE_TYPE (op0),
9359 					       TREE_OPERAND (arg0, 1)), op1);
9360 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9361 			     tem);
9362 	}
9363       if (TREE_CODE (arg1) == COMPOUND_EXPR)
9364 	{
9365 	  tem = fold_build2_loc (loc, code, type, op0,
9366 			     fold_convert_loc (loc, TREE_TYPE (op1),
9367 					       TREE_OPERAND (arg1, 1)));
9368 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9369 			     tem);
9370 	}
9371 
9372       if (TREE_CODE (arg0) == COND_EXPR
9373 	  || TREE_CODE (arg0) == VEC_COND_EXPR
9374 	  || COMPARISON_CLASS_P (arg0))
9375 	{
9376 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9377 						     arg0, arg1,
9378 						     /*cond_first_p=*/1);
9379 	  if (tem != NULL_TREE)
9380 	    return tem;
9381 	}
9382 
9383       if (TREE_CODE (arg1) == COND_EXPR
9384 	  || TREE_CODE (arg1) == VEC_COND_EXPR
9385 	  || COMPARISON_CLASS_P (arg1))
9386 	{
9387 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9388 						     arg1, arg0,
9389 					             /*cond_first_p=*/0);
9390 	  if (tem != NULL_TREE)
9391 	    return tem;
9392 	}
9393     }
9394 
9395   switch (code)
9396     {
9397     case MEM_REF:
9398       /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2].  */
9399       if (TREE_CODE (arg0) == ADDR_EXPR
9400 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9401 	{
9402 	  tree iref = TREE_OPERAND (arg0, 0);
9403 	  return fold_build2 (MEM_REF, type,
9404 			      TREE_OPERAND (iref, 0),
9405 			      int_const_binop (PLUS_EXPR, arg1,
9406 					       TREE_OPERAND (iref, 1)));
9407 	}
9408 
9409       /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2].  */
9410       if (TREE_CODE (arg0) == ADDR_EXPR
9411 	  && handled_component_p (TREE_OPERAND (arg0, 0)))
9412 	{
9413 	  tree base;
9414 	  poly_int64 coffset;
9415 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9416 						&coffset);
9417 	  if (!base)
9418 	    return NULL_TREE;
9419 	  return fold_build2 (MEM_REF, type,
9420 			      build_fold_addr_expr (base),
9421 			      int_const_binop (PLUS_EXPR, arg1,
9422 					       size_int (coffset)));
9423 	}
9424 
9425       return NULL_TREE;
9426 
9427     case POINTER_PLUS_EXPR:
9428       /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
9429       if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9430 	   && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9431         return fold_convert_loc (loc, type,
9432 				 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9433 					      fold_convert_loc (loc, sizetype,
9434 								arg1),
9435 					      fold_convert_loc (loc, sizetype,
9436 								arg0)));
9437 
9438       return NULL_TREE;
9439 
9440     case PLUS_EXPR:
9441       if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9442 	{
9443 	  /* X + (X / CST) * -CST is X % CST.  */
9444 	  if (TREE_CODE (arg1) == MULT_EXPR
9445 	      && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9446 	      && operand_equal_p (arg0,
9447 				  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9448 	    {
9449 	      tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9450 	      tree cst1 = TREE_OPERAND (arg1, 1);
9451 	      tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9452 				      cst1, cst0);
9453 	      if (sum && integer_zerop (sum))
9454 		return fold_convert_loc (loc, type,
9455 					 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9456 						      TREE_TYPE (arg0), arg0,
9457 						      cst0));
9458 	    }
9459 	}
9460 
9461       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9462 	 one.  Make sure the type is not saturating and has the signedness of
9463 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9464 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
9465       if ((TREE_CODE (arg0) == MULT_EXPR
9466 	   || TREE_CODE (arg1) == MULT_EXPR)
9467 	  && !TYPE_SATURATING (type)
9468 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9469 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9470 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
9471         {
9472 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9473 	  if (tem)
9474 	    return tem;
9475 	}
9476 
9477       if (! FLOAT_TYPE_P (type))
9478 	{
9479 	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9480 	     (plus (plus (mult) (mult)) (foo)) so that we can
9481 	     take advantage of the factoring cases below.  */
9482 	  if (ANY_INTEGRAL_TYPE_P (type)
9483 	      && TYPE_OVERFLOW_WRAPS (type)
9484 	      && (((TREE_CODE (arg0) == PLUS_EXPR
9485 		    || TREE_CODE (arg0) == MINUS_EXPR)
9486 		   && TREE_CODE (arg1) == MULT_EXPR)
9487 		  || ((TREE_CODE (arg1) == PLUS_EXPR
9488 		       || TREE_CODE (arg1) == MINUS_EXPR)
9489 		      && TREE_CODE (arg0) == MULT_EXPR)))
9490 	    {
9491 	      tree parg0, parg1, parg, marg;
9492 	      enum tree_code pcode;
9493 
9494 	      if (TREE_CODE (arg1) == MULT_EXPR)
9495 		parg = arg0, marg = arg1;
9496 	      else
9497 		parg = arg1, marg = arg0;
9498 	      pcode = TREE_CODE (parg);
9499 	      parg0 = TREE_OPERAND (parg, 0);
9500 	      parg1 = TREE_OPERAND (parg, 1);
9501 	      STRIP_NOPS (parg0);
9502 	      STRIP_NOPS (parg1);
9503 
9504 	      if (TREE_CODE (parg0) == MULT_EXPR
9505 		  && TREE_CODE (parg1) != MULT_EXPR)
9506 		return fold_build2_loc (loc, pcode, type,
9507 				    fold_build2_loc (loc, PLUS_EXPR, type,
9508 						 fold_convert_loc (loc, type,
9509 								   parg0),
9510 						 fold_convert_loc (loc, type,
9511 								   marg)),
9512 				    fold_convert_loc (loc, type, parg1));
9513 	      if (TREE_CODE (parg0) != MULT_EXPR
9514 		  && TREE_CODE (parg1) == MULT_EXPR)
9515 		return
9516 		  fold_build2_loc (loc, PLUS_EXPR, type,
9517 			       fold_convert_loc (loc, type, parg0),
9518 			       fold_build2_loc (loc, pcode, type,
9519 					    fold_convert_loc (loc, type, marg),
9520 					    fold_convert_loc (loc, type,
9521 							      parg1)));
9522 	    }
9523 	}
9524       else
9525 	{
9526 	  /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9527 	     to __complex__ ( x, y ).  This is not the same for SNaNs or
9528 	     if signed zeros are involved.  */
9529 	  if (!HONOR_SNANS (element_mode (arg0))
9530               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9531 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9532 	    {
9533 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9534 	      tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9535 	      tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9536 	      bool arg0rz = false, arg0iz = false;
9537 	      if ((arg0r && (arg0rz = real_zerop (arg0r)))
9538 		  || (arg0i && (arg0iz = real_zerop (arg0i))))
9539 		{
9540 		  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9541 		  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9542 		  if (arg0rz && arg1i && real_zerop (arg1i))
9543 		    {
9544 		      tree rp = arg1r ? arg1r
9545 				  : build1 (REALPART_EXPR, rtype, arg1);
9546 		      tree ip = arg0i ? arg0i
9547 				  : build1 (IMAGPART_EXPR, rtype, arg0);
9548 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9549 		    }
9550 		  else if (arg0iz && arg1r && real_zerop (arg1r))
9551 		    {
9552 		      tree rp = arg0r ? arg0r
9553 				  : build1 (REALPART_EXPR, rtype, arg0);
9554 		      tree ip = arg1i ? arg1i
9555 				  : build1 (IMAGPART_EXPR, rtype, arg1);
9556 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9557 		    }
9558 		}
9559 	    }
9560 
9561           /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9562              We associate floats only if the user has specified
9563              -fassociative-math.  */
9564           if (flag_associative_math
9565               && TREE_CODE (arg1) == PLUS_EXPR
9566               && TREE_CODE (arg0) != MULT_EXPR)
9567             {
9568               tree tree10 = TREE_OPERAND (arg1, 0);
9569               tree tree11 = TREE_OPERAND (arg1, 1);
9570               if (TREE_CODE (tree11) == MULT_EXPR
9571 		  && TREE_CODE (tree10) == MULT_EXPR)
9572                 {
9573                   tree tree0;
9574                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9575                   return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9576                 }
9577             }
9578           /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9579              We associate floats only if the user has specified
9580              -fassociative-math.  */
9581           if (flag_associative_math
9582               && TREE_CODE (arg0) == PLUS_EXPR
9583               && TREE_CODE (arg1) != MULT_EXPR)
9584             {
9585               tree tree00 = TREE_OPERAND (arg0, 0);
9586               tree tree01 = TREE_OPERAND (arg0, 1);
9587               if (TREE_CODE (tree01) == MULT_EXPR
9588 		  && TREE_CODE (tree00) == MULT_EXPR)
9589                 {
9590                   tree tree0;
9591                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9592                   return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9593                 }
9594             }
9595 	}
9596 
9597      bit_rotate:
9598       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9599 	 is a rotate of A by C1 bits.  */
9600       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9601 	 is a rotate of A by B bits.
9602 	 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
9603 	 though in this case CODE must be | and not + or ^, otherwise
9604 	 it doesn't return A when B is 0.  */
9605       {
9606 	enum tree_code code0, code1;
9607 	tree rtype;
9608 	code0 = TREE_CODE (arg0);
9609 	code1 = TREE_CODE (arg1);
9610 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9611 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9612 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
9613 			        TREE_OPERAND (arg1, 0), 0)
9614 	    && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9615 	        TYPE_UNSIGNED (rtype))
9616 	    /* Only create rotates in complete modes.  Other cases are not
9617 	       expanded properly.  */
9618 	    && (element_precision (rtype)
9619 		== GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9620 	  {
9621 	    tree tree01, tree11;
9622 	    tree orig_tree01, orig_tree11;
9623 	    enum tree_code code01, code11;
9624 
9625 	    tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
9626 	    tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
9627 	    STRIP_NOPS (tree01);
9628 	    STRIP_NOPS (tree11);
9629 	    code01 = TREE_CODE (tree01);
9630 	    code11 = TREE_CODE (tree11);
9631 	    if (code11 != MINUS_EXPR
9632 		&& (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
9633 	      {
9634 		std::swap (code0, code1);
9635 		std::swap (code01, code11);
9636 		std::swap (tree01, tree11);
9637 		std::swap (orig_tree01, orig_tree11);
9638 	      }
9639 	    if (code01 == INTEGER_CST
9640 		&& code11 == INTEGER_CST
9641 		&& (wi::to_widest (tree01) + wi::to_widest (tree11)
9642 		    == element_precision (rtype)))
9643 	      {
9644 		tem = build2_loc (loc, LROTATE_EXPR,
9645 				  rtype, TREE_OPERAND (arg0, 0),
9646 				  code0 == LSHIFT_EXPR
9647 				  ? orig_tree01 : orig_tree11);
9648 		return fold_convert_loc (loc, type, tem);
9649 	      }
9650 	    else if (code11 == MINUS_EXPR)
9651 	      {
9652 		tree tree110, tree111;
9653 		tree110 = TREE_OPERAND (tree11, 0);
9654 		tree111 = TREE_OPERAND (tree11, 1);
9655 		STRIP_NOPS (tree110);
9656 		STRIP_NOPS (tree111);
9657 		if (TREE_CODE (tree110) == INTEGER_CST
9658 		    && compare_tree_int (tree110,
9659 					 element_precision (rtype)) == 0
9660 		    && operand_equal_p (tree01, tree111, 0))
9661 		  {
9662 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9663 					    ? LROTATE_EXPR : RROTATE_EXPR),
9664 				      rtype, TREE_OPERAND (arg0, 0),
9665 				      orig_tree01);
9666 		    return fold_convert_loc (loc, type, tem);
9667 		  }
9668 	      }
9669 	    else if (code == BIT_IOR_EXPR
9670 		     && code11 == BIT_AND_EXPR
9671 		     && pow2p_hwi (element_precision (rtype)))
9672 	      {
9673 		tree tree110, tree111;
9674 		tree110 = TREE_OPERAND (tree11, 0);
9675 		tree111 = TREE_OPERAND (tree11, 1);
9676 		STRIP_NOPS (tree110);
9677 		STRIP_NOPS (tree111);
9678 		if (TREE_CODE (tree110) == NEGATE_EXPR
9679 		    && TREE_CODE (tree111) == INTEGER_CST
9680 		    && compare_tree_int (tree111,
9681 					 element_precision (rtype) - 1) == 0
9682 		    && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
9683 		  {
9684 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9685 					    ? LROTATE_EXPR : RROTATE_EXPR),
9686 				      rtype, TREE_OPERAND (arg0, 0),
9687 				      orig_tree01);
9688 		    return fold_convert_loc (loc, type, tem);
9689 		  }
9690 	      }
9691 	  }
9692       }
9693 
9694     associate:
9695       /* In most languages, can't associate operations on floats through
9696 	 parentheses.  Rather than remember where the parentheses were, we
9697 	 don't associate floats at all, unless the user has specified
9698 	 -fassociative-math.
9699 	 And, we need to make sure type is not saturating.  */
9700 
9701       if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9702 	  && !TYPE_SATURATING (type))
9703 	{
9704 	  tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
9705 	  tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
9706 	  tree atype = type;
9707 	  bool ok = true;
9708 
9709 	  /* Split both trees into variables, constants, and literals.  Then
9710 	     associate each group together, the constants with literals,
9711 	     then the result with variables.  This increases the chances of
9712 	     literals being recombined later and of generating relocatable
9713 	     expressions for the sum of a constant and literal.  */
9714 	  var0 = split_tree (arg0, type, code,
9715 			     &minus_var0, &con0, &minus_con0,
9716 			     &lit0, &minus_lit0, 0);
9717 	  var1 = split_tree (arg1, type, code,
9718 			     &minus_var1, &con1, &minus_con1,
9719 			     &lit1, &minus_lit1, code == MINUS_EXPR);
9720 
9721 	  /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
9722 	  if (code == MINUS_EXPR)
9723 	    code = PLUS_EXPR;
9724 
9725 	  /* With undefined overflow prefer doing association in a type
9726 	     which wraps on overflow, if that is one of the operand types.  */
9727 	  if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
9728 	      && !TYPE_OVERFLOW_WRAPS (type))
9729 	    {
9730 	      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9731 		  && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9732 		atype = TREE_TYPE (arg0);
9733 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9734 		       && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9735 		atype = TREE_TYPE (arg1);
9736 	      gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9737 	    }
9738 
9739 	  /* With undefined overflow we can only associate constants with one
9740 	     variable, and constants whose association doesn't overflow.  */
9741 	  if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
9742 	      && !TYPE_OVERFLOW_WRAPS (atype))
9743 	    {
9744 	      if ((var0 && var1) || (minus_var0 && minus_var1))
9745 		{
9746 		  /* ???  If split_tree would handle NEGATE_EXPR we could
9747 		     simply reject these cases and the allowed cases would
9748 		     be the var0/minus_var1 ones.  */
9749 		  tree tmp0 = var0 ? var0 : minus_var0;
9750 		  tree tmp1 = var1 ? var1 : minus_var1;
9751 		  bool one_neg = false;
9752 
9753 		  if (TREE_CODE (tmp0) == NEGATE_EXPR)
9754 		    {
9755 		      tmp0 = TREE_OPERAND (tmp0, 0);
9756 		      one_neg = !one_neg;
9757 		    }
9758 		  if (CONVERT_EXPR_P (tmp0)
9759 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9760 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9761 			  <= TYPE_PRECISION (atype)))
9762 		    tmp0 = TREE_OPERAND (tmp0, 0);
9763 		  if (TREE_CODE (tmp1) == NEGATE_EXPR)
9764 		    {
9765 		      tmp1 = TREE_OPERAND (tmp1, 0);
9766 		      one_neg = !one_neg;
9767 		    }
9768 		  if (CONVERT_EXPR_P (tmp1)
9769 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9770 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9771 			  <= TYPE_PRECISION (atype)))
9772 		    tmp1 = TREE_OPERAND (tmp1, 0);
9773 		  /* The only case we can still associate with two variables
9774 		     is if they cancel out.  */
9775 		  if (!one_neg
9776 		      || !operand_equal_p (tmp0, tmp1, 0))
9777 		    ok = false;
9778 		}
9779 	      else if ((var0 && minus_var1
9780 			&& ! operand_equal_p (var0, minus_var1, 0))
9781 		       || (minus_var0 && var1
9782 			   && ! operand_equal_p (minus_var0, var1, 0)))
9783 		ok = false;
9784 	    }
9785 
9786 	  /* Only do something if we found more than two objects.  Otherwise,
9787 	     nothing has changed and we risk infinite recursion.  */
9788 	  if (ok
9789 	      && ((var0 != 0) + (var1 != 0)
9790 		  + (minus_var0 != 0) + (minus_var1 != 0)
9791 		  + (con0 != 0) + (con1 != 0)
9792 		  + (minus_con0 != 0) + (minus_con1 != 0)
9793 		  + (lit0 != 0) + (lit1 != 0)
9794 		  + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
9795 	    {
9796 	      var0 = associate_trees (loc, var0, var1, code, atype);
9797 	      minus_var0 = associate_trees (loc, minus_var0, minus_var1,
9798 					    code, atype);
9799 	      con0 = associate_trees (loc, con0, con1, code, atype);
9800 	      minus_con0 = associate_trees (loc, minus_con0, minus_con1,
9801 					    code, atype);
9802 	      lit0 = associate_trees (loc, lit0, lit1, code, atype);
9803 	      minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9804 					    code, atype);
9805 
9806 	      if (minus_var0 && var0)
9807 		{
9808 		  var0 = associate_trees (loc, var0, minus_var0,
9809 					  MINUS_EXPR, atype);
9810 		  minus_var0 = 0;
9811 		}
9812 	      if (minus_con0 && con0)
9813 		{
9814 		  con0 = associate_trees (loc, con0, minus_con0,
9815 					  MINUS_EXPR, atype);
9816 		  minus_con0 = 0;
9817 		}
9818 
9819 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
9820 		 greater than the positive part.  Otherwise, the multiplicative
9821 		 folding code (i.e extract_muldiv) may be fooled in case
9822 		 unsigned constants are subtracted, like in the following
9823 		 example: ((X*2 + 4) - 8U)/2.  */
9824 	      if (minus_lit0 && lit0)
9825 		{
9826 		  if (TREE_CODE (lit0) == INTEGER_CST
9827 		      && TREE_CODE (minus_lit0) == INTEGER_CST
9828 		      && tree_int_cst_lt (lit0, minus_lit0)
9829 		      /* But avoid ending up with only negated parts.  */
9830 		      && (var0 || con0))
9831 		    {
9832 		      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9833 						    MINUS_EXPR, atype);
9834 		      lit0 = 0;
9835 		    }
9836 		  else
9837 		    {
9838 		      lit0 = associate_trees (loc, lit0, minus_lit0,
9839 					      MINUS_EXPR, atype);
9840 		      minus_lit0 = 0;
9841 		    }
9842 		}
9843 
9844 	      /* Don't introduce overflows through reassociation.  */
9845 	      if ((lit0 && TREE_OVERFLOW_P (lit0))
9846 		  || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9847 		return NULL_TREE;
9848 
9849 	      /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9850 	      con0 = associate_trees (loc, con0, lit0, code, atype);
9851 	      lit0 = 0;
9852 	      minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
9853 					    code, atype);
9854 	      minus_lit0 = 0;
9855 
9856 	      /* Eliminate minus_con0.  */
9857 	      if (minus_con0)
9858 		{
9859 		  if (con0)
9860 		    con0 = associate_trees (loc, con0, minus_con0,
9861 					    MINUS_EXPR, atype);
9862 		  else if (var0)
9863 		    var0 = associate_trees (loc, var0, minus_con0,
9864 					    MINUS_EXPR, atype);
9865 		  else
9866 		    gcc_unreachable ();
9867 		  minus_con0 = 0;
9868 		}
9869 
9870 	      /* Eliminate minus_var0.  */
9871 	      if (minus_var0)
9872 		{
9873 		  if (con0)
9874 		    con0 = associate_trees (loc, con0, minus_var0,
9875 					    MINUS_EXPR, atype);
9876 		  else
9877 		    gcc_unreachable ();
9878 		  minus_var0 = 0;
9879 		}
9880 
9881 	      return
9882 		fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9883 							      code, atype));
9884 	    }
9885 	}
9886 
9887       return NULL_TREE;
9888 
9889     case POINTER_DIFF_EXPR:
9890     case MINUS_EXPR:
9891       /* Fold &a[i] - &a[j] to i-j.  */
9892       if (TREE_CODE (arg0) == ADDR_EXPR
9893 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9894 	  && TREE_CODE (arg1) == ADDR_EXPR
9895 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9896         {
9897 	  tree tem = fold_addr_of_array_ref_difference (loc, type,
9898 							TREE_OPERAND (arg0, 0),
9899 							TREE_OPERAND (arg1, 0),
9900 							code
9901 							== POINTER_DIFF_EXPR);
9902 	  if (tem)
9903 	    return tem;
9904 	}
9905 
9906       /* Further transformations are not for pointers.  */
9907       if (code == POINTER_DIFF_EXPR)
9908 	return NULL_TREE;
9909 
9910       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
9911       if (TREE_CODE (arg0) == NEGATE_EXPR
9912 	  && negate_expr_p (op1)
9913 	  /* If arg0 is e.g. unsigned int and type is int, then this could
9914 	     introduce UB, because if A is INT_MIN at runtime, the original
9915 	     expression can be well defined while the latter is not.
9916 	     See PR83269.  */
9917 	  && !(ANY_INTEGRAL_TYPE_P (type)
9918 	       && TYPE_OVERFLOW_UNDEFINED (type)
9919 	       && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9920 	       && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9921 	return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
9922 			        fold_convert_loc (loc, type,
9923 						  TREE_OPERAND (arg0, 0)));
9924 
9925       /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9926 	 __complex__ ( x, -y ).  This is not the same for SNaNs or if
9927 	 signed zeros are involved.  */
9928       if (!HONOR_SNANS (element_mode (arg0))
9929 	  && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9930 	  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9931         {
9932 	  tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9933 	  tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9934 	  tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9935 	  bool arg0rz = false, arg0iz = false;
9936 	  if ((arg0r && (arg0rz = real_zerop (arg0r)))
9937 	      || (arg0i && (arg0iz = real_zerop (arg0i))))
9938 	    {
9939 	      tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9940 	      tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9941 	      if (arg0rz && arg1i && real_zerop (arg1i))
9942 	        {
9943 		  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9944 					 arg1r ? arg1r
9945 					 : build1 (REALPART_EXPR, rtype, arg1));
9946 		  tree ip = arg0i ? arg0i
9947 		    : build1 (IMAGPART_EXPR, rtype, arg0);
9948 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9949 		}
9950 	      else if (arg0iz && arg1r && real_zerop (arg1r))
9951 	        {
9952 		  tree rp = arg0r ? arg0r
9953 		    : build1 (REALPART_EXPR, rtype, arg0);
9954 		  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9955 					 arg1i ? arg1i
9956 					 : build1 (IMAGPART_EXPR, rtype, arg1));
9957 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9958 		}
9959 	    }
9960 	}
9961 
9962       /* A - B -> A + (-B) if B is easily negatable.  */
9963       if (negate_expr_p (op1)
9964 	  && ! TYPE_OVERFLOW_SANITIZED (type)
9965 	  && ((FLOAT_TYPE_P (type)
9966                /* Avoid this transformation if B is a positive REAL_CST.  */
9967 	       && (TREE_CODE (op1) != REAL_CST
9968 		   || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9969 	      || INTEGRAL_TYPE_P (type)))
9970 	return fold_build2_loc (loc, PLUS_EXPR, type,
9971 				fold_convert_loc (loc, type, arg0),
9972 				negate_expr (op1));
9973 
9974       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9975 	 one.  Make sure the type is not saturating and has the signedness of
9976 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9977 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
9978       if ((TREE_CODE (arg0) == MULT_EXPR
9979 	   || TREE_CODE (arg1) == MULT_EXPR)
9980 	  && !TYPE_SATURATING (type)
9981 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9982 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9983 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
9984         {
9985 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9986 	  if (tem)
9987 	    return tem;
9988 	}
9989 
9990       goto associate;
9991 
9992     case MULT_EXPR:
9993       if (! FLOAT_TYPE_P (type))
9994 	{
9995 	  /* Transform x * -C into -x * C if x is easily negatable.  */
9996 	  if (TREE_CODE (op1) == INTEGER_CST
9997 	      && tree_int_cst_sgn (op1) == -1
9998 	      && negate_expr_p (op0)
9999 	      && negate_expr_p (op1)
10000 	      && (tem = negate_expr (op1)) != op1
10001 	      && ! TREE_OVERFLOW (tem))
10002 	    return fold_build2_loc (loc, MULT_EXPR, type,
10003 				    fold_convert_loc (loc, type,
10004 						      negate_expr (op0)), tem);
10005 
10006 	  strict_overflow_p = false;
10007 	  if (TREE_CODE (arg1) == INTEGER_CST
10008 	      && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10009 					&strict_overflow_p)) != 0)
10010 	    {
10011 	      if (strict_overflow_p)
10012 		fold_overflow_warning (("assuming signed overflow does not "
10013 					"occur when simplifying "
10014 					"multiplication"),
10015 				       WARN_STRICT_OVERFLOW_MISC);
10016 	      return fold_convert_loc (loc, type, tem);
10017 	    }
10018 
10019 	  /* Optimize z * conj(z) for integer complex numbers.  */
10020 	  if (TREE_CODE (arg0) == CONJ_EXPR
10021 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10022 	    return fold_mult_zconjz (loc, type, arg1);
10023 	  if (TREE_CODE (arg1) == CONJ_EXPR
10024 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10025 	    return fold_mult_zconjz (loc, type, arg0);
10026 	}
10027       else
10028 	{
10029 	  /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10030 	     This is not the same for NaNs or if signed zeros are
10031 	     involved.  */
10032 	  if (!HONOR_NANS (arg0)
10033               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10034 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10035 	      && TREE_CODE (arg1) == COMPLEX_CST
10036 	      && real_zerop (TREE_REALPART (arg1)))
10037 	    {
10038 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10039 	      if (real_onep (TREE_IMAGPART (arg1)))
10040 		return
10041 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
10042 			       negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10043 							     rtype, arg0)),
10044 			       fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10045 	      else if (real_minus_onep (TREE_IMAGPART (arg1)))
10046 		return
10047 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
10048 			       fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10049 			       negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10050 							     rtype, arg0)));
10051 	    }
10052 
10053 	  /* Optimize z * conj(z) for floating point complex numbers.
10054 	     Guarded by flag_unsafe_math_optimizations as non-finite
10055 	     imaginary components don't produce scalar results.  */
10056 	  if (flag_unsafe_math_optimizations
10057 	      && TREE_CODE (arg0) == CONJ_EXPR
10058 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10059 	    return fold_mult_zconjz (loc, type, arg1);
10060 	  if (flag_unsafe_math_optimizations
10061 	      && TREE_CODE (arg1) == CONJ_EXPR
10062 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10063 	    return fold_mult_zconjz (loc, type, arg0);
10064 	}
10065       goto associate;
10066 
10067     case BIT_IOR_EXPR:
10068       /* Canonicalize (X & C1) | C2.  */
10069       if (TREE_CODE (arg0) == BIT_AND_EXPR
10070 	  && TREE_CODE (arg1) == INTEGER_CST
10071 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10072 	{
10073 	  int width = TYPE_PRECISION (type), w;
10074 	  wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
10075 	  wide_int c2 = wi::to_wide (arg1);
10076 
10077 	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
10078 	  if ((c1 & c2) == c1)
10079 	    return omit_one_operand_loc (loc, type, arg1,
10080 					 TREE_OPERAND (arg0, 0));
10081 
10082 	  wide_int msk = wi::mask (width, false,
10083 				   TYPE_PRECISION (TREE_TYPE (arg1)));
10084 
10085 	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
10086 	  if (wi::bit_and_not (msk, c1 | c2) == 0)
10087 	    {
10088 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10089 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10090 	    }
10091 
10092 	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10093 	     unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10094 	     mode which allows further optimizations.  */
10095 	  c1 &= msk;
10096 	  c2 &= msk;
10097 	  wide_int c3 = wi::bit_and_not (c1, c2);
10098 	  for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10099 	    {
10100 	      wide_int mask = wi::mask (w, false,
10101 					TYPE_PRECISION (type));
10102 	      if (((c1 | c2) & mask) == mask
10103 		  && wi::bit_and_not (c1, mask) == 0)
10104 		{
10105 		  c3 = mask;
10106 		  break;
10107 		}
10108 	    }
10109 
10110 	  if (c3 != c1)
10111 	    {
10112 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10113 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
10114 				     wide_int_to_tree (type, c3));
10115 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10116 	    }
10117 	}
10118 
10119       /* See if this can be simplified into a rotate first.  If that
10120 	 is unsuccessful continue in the association code.  */
10121       goto bit_rotate;
10122 
10123     case BIT_XOR_EXPR:
10124       /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
10125       if (TREE_CODE (arg0) == BIT_AND_EXPR
10126 	  && INTEGRAL_TYPE_P (type)
10127 	  && integer_onep (TREE_OPERAND (arg0, 1))
10128 	  && integer_onep (arg1))
10129 	return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10130 				build_zero_cst (TREE_TYPE (arg0)));
10131 
10132       /* See if this can be simplified into a rotate first.  If that
10133 	 is unsuccessful continue in the association code.  */
10134       goto bit_rotate;
10135 
10136     case BIT_AND_EXPR:
10137       /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
10138       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10139 	  && INTEGRAL_TYPE_P (type)
10140 	  && integer_onep (TREE_OPERAND (arg0, 1))
10141 	  && integer_onep (arg1))
10142 	{
10143 	  tree tem2;
10144 	  tem = TREE_OPERAND (arg0, 0);
10145 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10146 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10147 				  tem, tem2);
10148 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10149 				  build_zero_cst (TREE_TYPE (tem)));
10150 	}
10151       /* Fold ~X & 1 as (X & 1) == 0.  */
10152       if (TREE_CODE (arg0) == BIT_NOT_EXPR
10153 	  && INTEGRAL_TYPE_P (type)
10154 	  && integer_onep (arg1))
10155 	{
10156 	  tree tem2;
10157 	  tem = TREE_OPERAND (arg0, 0);
10158 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10159 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10160 				  tem, tem2);
10161 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10162 				  build_zero_cst (TREE_TYPE (tem)));
10163 	}
10164       /* Fold !X & 1 as X == 0.  */
10165       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10166 	  && integer_onep (arg1))
10167 	{
10168 	  tem = TREE_OPERAND (arg0, 0);
10169 	  return fold_build2_loc (loc, EQ_EXPR, type, tem,
10170 				  build_zero_cst (TREE_TYPE (tem)));
10171 	}
10172 
10173       /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10174          multiple of 1 << CST.  */
10175       if (TREE_CODE (arg1) == INTEGER_CST)
10176 	{
10177 	  wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10178 	  wide_int ncst1 = -cst1;
10179 	  if ((cst1 & ncst1) == ncst1
10180 	      && multiple_of_p (type, arg0,
10181 				wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10182 	    return fold_convert_loc (loc, type, arg0);
10183 	}
10184 
10185       /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10186          bits from CST2.  */
10187       if (TREE_CODE (arg1) == INTEGER_CST
10188 	  && TREE_CODE (arg0) == MULT_EXPR
10189 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10190 	{
10191 	  wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
10192 	  wide_int masked
10193 	    = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
10194 
10195 	  if (masked == 0)
10196 	    return omit_two_operands_loc (loc, type, build_zero_cst (type),
10197 	                                  arg0, arg1);
10198 	  else if (masked != warg1)
10199 	    {
10200 	      /* Avoid the transform if arg1 is a mask of some
10201 	         mode which allows further optimizations.  */
10202 	      int pop = wi::popcount (warg1);
10203 	      if (!(pop >= BITS_PER_UNIT
10204 		    && pow2p_hwi (pop)
10205 		    && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10206 		return fold_build2_loc (loc, code, type, op0,
10207 					wide_int_to_tree (type, masked));
10208 	    }
10209 	}
10210 
10211       /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10212 	 ((A & N) + B) & M -> (A + B) & M
10213 	 Similarly if (N & M) == 0,
10214 	 ((A | N) + B) & M -> (A + B) & M
10215 	 and for - instead of + (or unary - instead of +)
10216 	 and/or ^ instead of |.
10217 	 If B is constant and (B & M) == 0, fold into A & M.  */
10218       if (TREE_CODE (arg1) == INTEGER_CST)
10219 	{
10220 	  wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10221 	  if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10222 	      && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10223 	      && (TREE_CODE (arg0) == PLUS_EXPR
10224 		  || TREE_CODE (arg0) == MINUS_EXPR
10225 		  || TREE_CODE (arg0) == NEGATE_EXPR)
10226 	      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10227 		  || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10228 	    {
10229 	      tree pmop[2];
10230 	      int which = 0;
10231 	      wide_int cst0;
10232 
10233 	      /* Now we know that arg0 is (C + D) or (C - D) or
10234 		 -C and arg1 (M) is == (1LL << cst) - 1.
10235 		 Store C into PMOP[0] and D into PMOP[1].  */
10236 	      pmop[0] = TREE_OPERAND (arg0, 0);
10237 	      pmop[1] = NULL;
10238 	      if (TREE_CODE (arg0) != NEGATE_EXPR)
10239 		{
10240 		  pmop[1] = TREE_OPERAND (arg0, 1);
10241 		  which = 1;
10242 		}
10243 
10244 	      if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10245 		which = -1;
10246 
10247 	      for (; which >= 0; which--)
10248 		switch (TREE_CODE (pmop[which]))
10249 		  {
10250 		  case BIT_AND_EXPR:
10251 		  case BIT_IOR_EXPR:
10252 		  case BIT_XOR_EXPR:
10253 		    if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10254 			!= INTEGER_CST)
10255 		      break;
10256 		    cst0 = wi::to_wide (TREE_OPERAND (pmop[which], 1)) & cst1;
10257 		    if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10258 		      {
10259 			if (cst0 != cst1)
10260 			  break;
10261 		      }
10262 		    else if (cst0 != 0)
10263 		      break;
10264 		    /* If C or D is of the form (A & N) where
10265 		       (N & M) == M, or of the form (A | N) or
10266 		       (A ^ N) where (N & M) == 0, replace it with A.  */
10267 		    pmop[which] = TREE_OPERAND (pmop[which], 0);
10268 		    break;
10269 		  case INTEGER_CST:
10270 		    /* If C or D is a N where (N & M) == 0, it can be
10271 		       omitted (assumed 0).  */
10272 		    if ((TREE_CODE (arg0) == PLUS_EXPR
10273 			 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10274 			&& (cst1 & wi::to_wide (pmop[which])) == 0)
10275 		      pmop[which] = NULL;
10276 		    break;
10277 		  default:
10278 		    break;
10279 		  }
10280 
10281 	      /* Only build anything new if we optimized one or both arguments
10282 		 above.  */
10283 	      if (pmop[0] != TREE_OPERAND (arg0, 0)
10284 		  || (TREE_CODE (arg0) != NEGATE_EXPR
10285 		      && pmop[1] != TREE_OPERAND (arg0, 1)))
10286 		{
10287 		  tree utype = TREE_TYPE (arg0);
10288 		  if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10289 		    {
10290 		      /* Perform the operations in a type that has defined
10291 			 overflow behavior.  */
10292 		      utype = unsigned_type_for (TREE_TYPE (arg0));
10293 		      if (pmop[0] != NULL)
10294 			pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10295 		      if (pmop[1] != NULL)
10296 			pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10297 		    }
10298 
10299 		  if (TREE_CODE (arg0) == NEGATE_EXPR)
10300 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10301 		  else if (TREE_CODE (arg0) == PLUS_EXPR)
10302 		    {
10303 		      if (pmop[0] != NULL && pmop[1] != NULL)
10304 			tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10305 					       pmop[0], pmop[1]);
10306 		      else if (pmop[0] != NULL)
10307 			tem = pmop[0];
10308 		      else if (pmop[1] != NULL)
10309 			tem = pmop[1];
10310 		      else
10311 			return build_int_cst (type, 0);
10312 		    }
10313 		  else if (pmop[0] == NULL)
10314 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10315 		  else
10316 		    tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10317 					   pmop[0], pmop[1]);
10318 		  /* TEM is now the new binary +, - or unary - replacement.  */
10319 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10320 					 fold_convert_loc (loc, utype, arg1));
10321 		  return fold_convert_loc (loc, type, tem);
10322 		}
10323 	    }
10324 	}
10325 
10326       /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
10327       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10328 	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10329 	{
10330 	  prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10331 
10332 	  wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10333 	  if (mask == -1)
10334 	    return
10335 	      fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10336 	}
10337 
10338       goto associate;
10339 
10340     case RDIV_EXPR:
10341       /* Don't touch a floating-point divide by zero unless the mode
10342 	 of the constant can represent infinity.  */
10343       if (TREE_CODE (arg1) == REAL_CST
10344 	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10345 	  && real_zerop (arg1))
10346 	return NULL_TREE;
10347 
10348       /* (-A) / (-B) -> A / B  */
10349       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10350 	return fold_build2_loc (loc, RDIV_EXPR, type,
10351 			    TREE_OPERAND (arg0, 0),
10352 			    negate_expr (arg1));
10353       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10354 	return fold_build2_loc (loc, RDIV_EXPR, type,
10355 			    negate_expr (arg0),
10356 			    TREE_OPERAND (arg1, 0));
10357       return NULL_TREE;
10358 
10359     case TRUNC_DIV_EXPR:
10360       /* Fall through */
10361 
10362     case FLOOR_DIV_EXPR:
10363       /* Simplify A / (B << N) where A and B are positive and B is
10364 	 a power of 2, to A >> (N + log2(B)).  */
10365       strict_overflow_p = false;
10366       if (TREE_CODE (arg1) == LSHIFT_EXPR
10367 	  && (TYPE_UNSIGNED (type)
10368 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10369 	{
10370 	  tree sval = TREE_OPERAND (arg1, 0);
10371 	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10372 	    {
10373 	      tree sh_cnt = TREE_OPERAND (arg1, 1);
10374 	      tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10375 					 wi::exact_log2 (wi::to_wide (sval)));
10376 
10377 	      if (strict_overflow_p)
10378 		fold_overflow_warning (("assuming signed overflow does not "
10379 					"occur when simplifying A / (B << N)"),
10380 				       WARN_STRICT_OVERFLOW_MISC);
10381 
10382 	      sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10383 					sh_cnt, pow2);
10384 	      return fold_build2_loc (loc, RSHIFT_EXPR, type,
10385 				      fold_convert_loc (loc, type, arg0), sh_cnt);
10386 	    }
10387 	}
10388 
10389       /* Fall through */
10390 
10391     case ROUND_DIV_EXPR:
10392     case CEIL_DIV_EXPR:
10393     case EXACT_DIV_EXPR:
10394       if (integer_zerop (arg1))
10395 	return NULL_TREE;
10396 
10397       /* Convert -A / -B to A / B when the type is signed and overflow is
10398 	 undefined.  */
10399       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10400 	  && TREE_CODE (op0) == NEGATE_EXPR
10401 	  && negate_expr_p (op1))
10402 	{
10403 	  if (INTEGRAL_TYPE_P (type))
10404 	    fold_overflow_warning (("assuming signed overflow does not occur "
10405 				    "when distributing negation across "
10406 				    "division"),
10407 				   WARN_STRICT_OVERFLOW_MISC);
10408 	  return fold_build2_loc (loc, code, type,
10409 				  fold_convert_loc (loc, type,
10410 						    TREE_OPERAND (arg0, 0)),
10411 				  negate_expr (op1));
10412 	}
10413       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10414 	  && TREE_CODE (arg1) == NEGATE_EXPR
10415 	  && negate_expr_p (op0))
10416 	{
10417 	  if (INTEGRAL_TYPE_P (type))
10418 	    fold_overflow_warning (("assuming signed overflow does not occur "
10419 				    "when distributing negation across "
10420 				    "division"),
10421 				   WARN_STRICT_OVERFLOW_MISC);
10422 	  return fold_build2_loc (loc, code, type,
10423 				  negate_expr (op0),
10424 				  fold_convert_loc (loc, type,
10425 						    TREE_OPERAND (arg1, 0)));
10426 	}
10427 
10428       /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10429 	 operation, EXACT_DIV_EXPR.
10430 
10431 	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10432 	 At one time others generated faster code, it's not clear if they do
10433 	 after the last round to changes to the DIV code in expmed.c.  */
10434       if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10435 	  && multiple_of_p (type, arg0, arg1))
10436 	return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10437 				fold_convert (type, arg0),
10438 				fold_convert (type, arg1));
10439 
10440       strict_overflow_p = false;
10441       if (TREE_CODE (arg1) == INTEGER_CST
10442 	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10443 				    &strict_overflow_p)) != 0)
10444 	{
10445 	  if (strict_overflow_p)
10446 	    fold_overflow_warning (("assuming signed overflow does not occur "
10447 				    "when simplifying division"),
10448 				   WARN_STRICT_OVERFLOW_MISC);
10449 	  return fold_convert_loc (loc, type, tem);
10450 	}
10451 
10452       return NULL_TREE;
10453 
10454     case CEIL_MOD_EXPR:
10455     case FLOOR_MOD_EXPR:
10456     case ROUND_MOD_EXPR:
10457     case TRUNC_MOD_EXPR:
10458       strict_overflow_p = false;
10459       if (TREE_CODE (arg1) == INTEGER_CST
10460 	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10461 				    &strict_overflow_p)) != 0)
10462 	{
10463 	  if (strict_overflow_p)
10464 	    fold_overflow_warning (("assuming signed overflow does not occur "
10465 				    "when simplifying modulus"),
10466 				   WARN_STRICT_OVERFLOW_MISC);
10467 	  return fold_convert_loc (loc, type, tem);
10468 	}
10469 
10470       return NULL_TREE;
10471 
10472     case LROTATE_EXPR:
10473     case RROTATE_EXPR:
10474     case RSHIFT_EXPR:
10475     case LSHIFT_EXPR:
10476       /* Since negative shift count is not well-defined,
10477 	 don't try to compute it in the compiler.  */
10478       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10479 	return NULL_TREE;
10480 
10481       prec = element_precision (type);
10482 
10483       /* If we have a rotate of a bit operation with the rotate count and
10484 	 the second operand of the bit operation both constant,
10485 	 permute the two operations.  */
10486       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10487 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
10488 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
10489 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
10490 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10491 	{
10492 	  tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10493 	  tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10494 	  return fold_build2_loc (loc, TREE_CODE (arg0), type,
10495 				  fold_build2_loc (loc, code, type,
10496 						   arg00, arg1),
10497 				  fold_build2_loc (loc, code, type,
10498 						   arg01, arg1));
10499 	}
10500 
10501       /* Two consecutive rotates adding up to the some integer
10502 	 multiple of the precision of the type can be ignored.  */
10503       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10504 	  && TREE_CODE (arg0) == RROTATE_EXPR
10505 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10506 	  && wi::umod_trunc (wi::to_wide (arg1)
10507 			     + wi::to_wide (TREE_OPERAND (arg0, 1)),
10508 			     prec) == 0)
10509 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10510 
10511       return NULL_TREE;
10512 
10513     case MIN_EXPR:
10514     case MAX_EXPR:
10515       goto associate;
10516 
10517     case TRUTH_ANDIF_EXPR:
10518       /* Note that the operands of this must be ints
10519 	 and their values must be 0 or 1.
10520 	 ("true" is a fixed value perhaps depending on the language.)  */
10521       /* If first arg is constant zero, return it.  */
10522       if (integer_zerop (arg0))
10523 	return fold_convert_loc (loc, type, arg0);
10524       /* FALLTHRU */
10525     case TRUTH_AND_EXPR:
10526       /* If either arg is constant true, drop it.  */
10527       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10528 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10529       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10530 	  /* Preserve sequence points.  */
10531 	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10532 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10533       /* If second arg is constant zero, result is zero, but first arg
10534 	 must be evaluated.  */
10535       if (integer_zerop (arg1))
10536 	return omit_one_operand_loc (loc, type, arg1, arg0);
10537       /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10538 	 case will be handled here.  */
10539       if (integer_zerop (arg0))
10540 	return omit_one_operand_loc (loc, type, arg0, arg1);
10541 
10542       /* !X && X is always false.  */
10543       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10544 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10545 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10546       /* X && !X is always false.  */
10547       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10548 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10549 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10550 
10551       /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
10552 	 means A >= Y && A != MAX, but in this case we know that
10553 	 A < X <= MAX.  */
10554 
10555       if (!TREE_SIDE_EFFECTS (arg0)
10556 	  && !TREE_SIDE_EFFECTS (arg1))
10557 	{
10558 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10559 	  if (tem && !operand_equal_p (tem, arg0, 0))
10560 	    return fold_build2_loc (loc, code, type, tem, arg1);
10561 
10562 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10563 	  if (tem && !operand_equal_p (tem, arg1, 0))
10564 	    return fold_build2_loc (loc, code, type, arg0, tem);
10565 	}
10566 
10567       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10568           != NULL_TREE)
10569         return tem;
10570 
10571       return NULL_TREE;
10572 
10573     case TRUTH_ORIF_EXPR:
10574       /* Note that the operands of this must be ints
10575 	 and their values must be 0 or true.
10576 	 ("true" is a fixed value perhaps depending on the language.)  */
10577       /* If first arg is constant true, return it.  */
10578       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10579 	return fold_convert_loc (loc, type, arg0);
10580       /* FALLTHRU */
10581     case TRUTH_OR_EXPR:
10582       /* If either arg is constant zero, drop it.  */
10583       if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10584 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10585       if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10586 	  /* Preserve sequence points.  */
10587 	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10588 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10589       /* If second arg is constant true, result is true, but we must
10590 	 evaluate first arg.  */
10591       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10592 	return omit_one_operand_loc (loc, type, arg1, arg0);
10593       /* Likewise for first arg, but note this only occurs here for
10594 	 TRUTH_OR_EXPR.  */
10595       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10596 	return omit_one_operand_loc (loc, type, arg0, arg1);
10597 
10598       /* !X || X is always true.  */
10599       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10600 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10601 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10602       /* X || !X is always true.  */
10603       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10604 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10605 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10606 
10607       /* (X && !Y) || (!X && Y) is X ^ Y */
10608       if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10609 	  && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10610         {
10611 	  tree a0, a1, l0, l1, n0, n1;
10612 
10613 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10614 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10615 
10616 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10617 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10618 
10619 	  n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10620 	  n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10621 
10622 	  if ((operand_equal_p (n0, a0, 0)
10623 	       && operand_equal_p (n1, a1, 0))
10624 	      || (operand_equal_p (n0, a1, 0)
10625 		  && operand_equal_p (n1, a0, 0)))
10626 	    return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10627 	}
10628 
10629       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10630           != NULL_TREE)
10631         return tem;
10632 
10633       return NULL_TREE;
10634 
10635     case TRUTH_XOR_EXPR:
10636       /* If the second arg is constant zero, drop it.  */
10637       if (integer_zerop (arg1))
10638 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10639       /* If the second arg is constant true, this is a logical inversion.  */
10640       if (integer_onep (arg1))
10641 	{
10642 	  tem = invert_truthvalue_loc (loc, arg0);
10643 	  return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10644 	}
10645       /* Identical arguments cancel to zero.  */
10646       if (operand_equal_p (arg0, arg1, 0))
10647 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10648 
10649       /* !X ^ X is always true.  */
10650       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10651 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10652 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10653 
10654       /* X ^ !X is always true.  */
10655       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10656 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10657 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10658 
10659       return NULL_TREE;
10660 
10661     case EQ_EXPR:
10662     case NE_EXPR:
10663       STRIP_NOPS (arg0);
10664       STRIP_NOPS (arg1);
10665 
10666       tem = fold_comparison (loc, code, type, op0, op1);
10667       if (tem != NULL_TREE)
10668 	return tem;
10669 
10670       /* bool_var != 1 becomes !bool_var. */
10671       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10672           && code == NE_EXPR)
10673         return fold_convert_loc (loc, type,
10674 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10675 						  TREE_TYPE (arg0), arg0));
10676 
10677       /* bool_var == 0 becomes !bool_var. */
10678       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10679           && code == EQ_EXPR)
10680         return fold_convert_loc (loc, type,
10681 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10682 						  TREE_TYPE (arg0), arg0));
10683 
10684       /* !exp != 0 becomes !exp */
10685       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10686 	  && code == NE_EXPR)
10687         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10688 
10689       /* If this is an EQ or NE comparison with zero and ARG0 is
10690 	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
10691 	 two operations, but the latter can be done in one less insn
10692 	 on machines that have only two-operand insns or on which a
10693 	 constant cannot be the first operand.  */
10694       if (TREE_CODE (arg0) == BIT_AND_EXPR
10695 	  && integer_zerop (arg1))
10696 	{
10697 	  tree arg00 = TREE_OPERAND (arg0, 0);
10698 	  tree arg01 = TREE_OPERAND (arg0, 1);
10699 	  if (TREE_CODE (arg00) == LSHIFT_EXPR
10700 	      && integer_onep (TREE_OPERAND (arg00, 0)))
10701 	    {
10702 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10703 				      arg01, TREE_OPERAND (arg00, 1));
10704 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10705 				 build_int_cst (TREE_TYPE (arg0), 1));
10706 	      return fold_build2_loc (loc, code, type,
10707 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10708 				  arg1);
10709 	    }
10710 	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
10711 		   && integer_onep (TREE_OPERAND (arg01, 0)))
10712 	    {
10713 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10714 				      arg00, TREE_OPERAND (arg01, 1));
10715 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10716 				 build_int_cst (TREE_TYPE (arg0), 1));
10717 	      return fold_build2_loc (loc, code, type,
10718 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10719 				  arg1);
10720 	    }
10721 	}
10722 
10723       /* If this is an NE or EQ comparison of zero against the result of a
10724 	 signed MOD operation whose second operand is a power of 2, make
10725 	 the MOD operation unsigned since it is simpler and equivalent.  */
10726       if (integer_zerop (arg1)
10727 	  && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10728 	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10729 	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
10730 	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10731 	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10732 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10733 	{
10734 	  tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10735 	  tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10736 				     fold_convert_loc (loc, newtype,
10737 						       TREE_OPERAND (arg0, 0)),
10738 				     fold_convert_loc (loc, newtype,
10739 						       TREE_OPERAND (arg0, 1)));
10740 
10741 	  return fold_build2_loc (loc, code, type, newmod,
10742 			      fold_convert_loc (loc, newtype, arg1));
10743 	}
10744 
10745       /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10746 	 C1 is a valid shift constant, and C2 is a power of two, i.e.
10747 	 a single bit.  */
10748       if (TREE_CODE (arg0) == BIT_AND_EXPR
10749 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10750 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10751 	     == INTEGER_CST
10752 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10753 	  && integer_zerop (arg1))
10754 	{
10755 	  tree itype = TREE_TYPE (arg0);
10756 	  tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10757 	  prec = TYPE_PRECISION (itype);
10758 
10759 	  /* Check for a valid shift count.  */
10760 	  if (wi::ltu_p (wi::to_wide (arg001), prec))
10761 	    {
10762 	      tree arg01 = TREE_OPERAND (arg0, 1);
10763 	      tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10764 	      unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10765 	      /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10766 		 can be rewritten as (X & (C2 << C1)) != 0.  */
10767 	      if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10768 		{
10769 		  tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10770 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10771 		  return fold_build2_loc (loc, code, type, tem,
10772 					  fold_convert_loc (loc, itype, arg1));
10773 		}
10774 	      /* Otherwise, for signed (arithmetic) shifts,
10775 		 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10776 		 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
10777 	      else if (!TYPE_UNSIGNED (itype))
10778 		return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10779 				    arg000, build_int_cst (itype, 0));
10780 	      /* Otherwise, of unsigned (logical) shifts,
10781 		 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10782 		 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
10783 	      else
10784 		return omit_one_operand_loc (loc, type,
10785 					 code == EQ_EXPR ? integer_one_node
10786 							 : integer_zero_node,
10787 					 arg000);
10788 	    }
10789 	}
10790 
10791       /* If this is a comparison of a field, we may be able to simplify it.  */
10792       if ((TREE_CODE (arg0) == COMPONENT_REF
10793 	   || TREE_CODE (arg0) == BIT_FIELD_REF)
10794 	  /* Handle the constant case even without -O
10795 	     to make sure the warnings are given.  */
10796 	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10797 	{
10798 	  t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10799 	  if (t1)
10800 	    return t1;
10801 	}
10802 
10803       /* Optimize comparisons of strlen vs zero to a compare of the
10804 	 first character of the string vs zero.  To wit,
10805 		strlen(ptr) == 0   =>  *ptr == 0
10806 		strlen(ptr) != 0   =>  *ptr != 0
10807 	 Other cases should reduce to one of these two (or a constant)
10808 	 due to the return value of strlen being unsigned.  */
10809       if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
10810 	{
10811 	  tree fndecl = get_callee_fndecl (arg0);
10812 
10813 	  if (fndecl
10814 	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10815 	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10816 	      && call_expr_nargs (arg0) == 1
10817 	      && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
10818 		  == POINTER_TYPE))
10819 	    {
10820 	      tree ptrtype
10821 		= build_pointer_type (build_qualified_type (char_type_node,
10822 							    TYPE_QUAL_CONST));
10823 	      tree ptr = fold_convert_loc (loc, ptrtype,
10824 					   CALL_EXPR_ARG (arg0, 0));
10825 	      tree iref = build_fold_indirect_ref_loc (loc, ptr);
10826 	      return fold_build2_loc (loc, code, type, iref,
10827 				      build_int_cst (TREE_TYPE (iref), 0));
10828 	    }
10829 	}
10830 
10831       /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10832 	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
10833       if (TREE_CODE (arg0) == RSHIFT_EXPR
10834 	  && integer_zerop (arg1)
10835 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10836 	{
10837 	  tree arg00 = TREE_OPERAND (arg0, 0);
10838 	  tree arg01 = TREE_OPERAND (arg0, 1);
10839 	  tree itype = TREE_TYPE (arg00);
10840 	  if (wi::to_wide (arg01) == element_precision (itype) - 1)
10841 	    {
10842 	      if (TYPE_UNSIGNED (itype))
10843 		{
10844 		  itype = signed_type_for (itype);
10845 		  arg00 = fold_convert_loc (loc, itype, arg00);
10846 		}
10847 	      return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10848 				  type, arg00, build_zero_cst (itype));
10849 	    }
10850 	}
10851 
10852       /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10853 	 (X & C) == 0 when C is a single bit.  */
10854       if (TREE_CODE (arg0) == BIT_AND_EXPR
10855 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10856 	  && integer_zerop (arg1)
10857 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10858 	{
10859 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10860 				 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10861 				 TREE_OPERAND (arg0, 1));
10862 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10863 				  type, tem,
10864 				  fold_convert_loc (loc, TREE_TYPE (arg0),
10865 						    arg1));
10866 	}
10867 
10868       /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10869 	 constant C is a power of two, i.e. a single bit.  */
10870       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10871 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10872 	  && integer_zerop (arg1)
10873 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10874 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10875 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10876 	{
10877 	  tree arg00 = TREE_OPERAND (arg0, 0);
10878 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10879 			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
10880 	}
10881 
10882       /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10883 	 when is C is a power of two, i.e. a single bit.  */
10884       if (TREE_CODE (arg0) == BIT_AND_EXPR
10885 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10886 	  && integer_zerop (arg1)
10887 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10888 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10889 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10890 	{
10891 	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10892 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10893 			     arg000, TREE_OPERAND (arg0, 1));
10894 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10895 			      tem, build_int_cst (TREE_TYPE (tem), 0));
10896 	}
10897 
10898       if (integer_zerop (arg1)
10899 	  && tree_expr_nonzero_p (arg0))
10900         {
10901 	  tree res = constant_boolean_node (code==NE_EXPR, type);
10902 	  return omit_one_operand_loc (loc, type, res, arg0);
10903 	}
10904 
10905       /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
10906       if (TREE_CODE (arg0) == BIT_AND_EXPR
10907 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
10908 	{
10909 	  tree arg00 = TREE_OPERAND (arg0, 0);
10910 	  tree arg01 = TREE_OPERAND (arg0, 1);
10911 	  tree arg10 = TREE_OPERAND (arg1, 0);
10912 	  tree arg11 = TREE_OPERAND (arg1, 1);
10913 	  tree itype = TREE_TYPE (arg0);
10914 
10915 	  if (operand_equal_p (arg01, arg11, 0))
10916 	    {
10917 	      tem = fold_convert_loc (loc, itype, arg10);
10918 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10919 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10920 	      return fold_build2_loc (loc, code, type, tem,
10921 				      build_zero_cst (itype));
10922 	    }
10923 	  if (operand_equal_p (arg01, arg10, 0))
10924 	    {
10925 	      tem = fold_convert_loc (loc, itype, arg11);
10926 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10927 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10928 	      return fold_build2_loc (loc, code, type, tem,
10929 				      build_zero_cst (itype));
10930 	    }
10931 	  if (operand_equal_p (arg00, arg11, 0))
10932 	    {
10933 	      tem = fold_convert_loc (loc, itype, arg10);
10934 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10935 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10936 	      return fold_build2_loc (loc, code, type, tem,
10937 				      build_zero_cst (itype));
10938 	    }
10939 	  if (operand_equal_p (arg00, arg10, 0))
10940 	    {
10941 	      tem = fold_convert_loc (loc, itype, arg11);
10942 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10943 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10944 	      return fold_build2_loc (loc, code, type, tem,
10945 				      build_zero_cst (itype));
10946 	    }
10947 	}
10948 
10949       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10950 	  && TREE_CODE (arg1) == BIT_XOR_EXPR)
10951 	{
10952 	  tree arg00 = TREE_OPERAND (arg0, 0);
10953 	  tree arg01 = TREE_OPERAND (arg0, 1);
10954 	  tree arg10 = TREE_OPERAND (arg1, 0);
10955 	  tree arg11 = TREE_OPERAND (arg1, 1);
10956 	  tree itype = TREE_TYPE (arg0);
10957 
10958 	  /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10959 	     operand_equal_p guarantees no side-effects so we don't need
10960 	     to use omit_one_operand on Z.  */
10961 	  if (operand_equal_p (arg01, arg11, 0))
10962 	    return fold_build2_loc (loc, code, type, arg00,
10963 				    fold_convert_loc (loc, TREE_TYPE (arg00),
10964 						      arg10));
10965 	  if (operand_equal_p (arg01, arg10, 0))
10966 	    return fold_build2_loc (loc, code, type, arg00,
10967 				    fold_convert_loc (loc, TREE_TYPE (arg00),
10968 						      arg11));
10969 	  if (operand_equal_p (arg00, arg11, 0))
10970 	    return fold_build2_loc (loc, code, type, arg01,
10971 				    fold_convert_loc (loc, TREE_TYPE (arg01),
10972 						      arg10));
10973 	  if (operand_equal_p (arg00, arg10, 0))
10974 	    return fold_build2_loc (loc, code, type, arg01,
10975 				    fold_convert_loc (loc, TREE_TYPE (arg01),
10976 						      arg11));
10977 
10978 	  /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
10979 	  if (TREE_CODE (arg01) == INTEGER_CST
10980 	      && TREE_CODE (arg11) == INTEGER_CST)
10981 	    {
10982 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10983 				     fold_convert_loc (loc, itype, arg11));
10984 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10985 	      return fold_build2_loc (loc, code, type, tem,
10986 				      fold_convert_loc (loc, itype, arg10));
10987 	    }
10988 	}
10989 
10990       /* Attempt to simplify equality/inequality comparisons of complex
10991 	 values.  Only lower the comparison if the result is known or
10992 	 can be simplified to a single scalar comparison.  */
10993       if ((TREE_CODE (arg0) == COMPLEX_EXPR
10994 	   || TREE_CODE (arg0) == COMPLEX_CST)
10995 	  && (TREE_CODE (arg1) == COMPLEX_EXPR
10996 	      || TREE_CODE (arg1) == COMPLEX_CST))
10997 	{
10998 	  tree real0, imag0, real1, imag1;
10999 	  tree rcond, icond;
11000 
11001 	  if (TREE_CODE (arg0) == COMPLEX_EXPR)
11002 	    {
11003 	      real0 = TREE_OPERAND (arg0, 0);
11004 	      imag0 = TREE_OPERAND (arg0, 1);
11005 	    }
11006 	  else
11007 	    {
11008 	      real0 = TREE_REALPART (arg0);
11009 	      imag0 = TREE_IMAGPART (arg0);
11010 	    }
11011 
11012 	  if (TREE_CODE (arg1) == COMPLEX_EXPR)
11013 	    {
11014 	      real1 = TREE_OPERAND (arg1, 0);
11015 	      imag1 = TREE_OPERAND (arg1, 1);
11016 	    }
11017 	  else
11018 	    {
11019 	      real1 = TREE_REALPART (arg1);
11020 	      imag1 = TREE_IMAGPART (arg1);
11021 	    }
11022 
11023 	  rcond = fold_binary_loc (loc, code, type, real0, real1);
11024 	  if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11025 	    {
11026 	      if (integer_zerop (rcond))
11027 		{
11028 		  if (code == EQ_EXPR)
11029 		    return omit_two_operands_loc (loc, type, boolean_false_node,
11030 					      imag0, imag1);
11031 		  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11032 		}
11033 	      else
11034 		{
11035 		  if (code == NE_EXPR)
11036 		    return omit_two_operands_loc (loc, type, boolean_true_node,
11037 					      imag0, imag1);
11038 		  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11039 		}
11040 	    }
11041 
11042 	  icond = fold_binary_loc (loc, code, type, imag0, imag1);
11043 	  if (icond && TREE_CODE (icond) == INTEGER_CST)
11044 	    {
11045 	      if (integer_zerop (icond))
11046 		{
11047 		  if (code == EQ_EXPR)
11048 		    return omit_two_operands_loc (loc, type, boolean_false_node,
11049 					      real0, real1);
11050 		  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11051 		}
11052 	      else
11053 		{
11054 		  if (code == NE_EXPR)
11055 		    return omit_two_operands_loc (loc, type, boolean_true_node,
11056 					      real0, real1);
11057 		  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11058 		}
11059 	    }
11060 	}
11061 
11062       return NULL_TREE;
11063 
11064     case LT_EXPR:
11065     case GT_EXPR:
11066     case LE_EXPR:
11067     case GE_EXPR:
11068       tem = fold_comparison (loc, code, type, op0, op1);
11069       if (tem != NULL_TREE)
11070 	return tem;
11071 
11072       /* Transform comparisons of the form X +- C CMP X.  */
11073       if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11074 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11075 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11076 	  && !HONOR_SNANS (arg0))
11077 	{
11078 	  tree arg01 = TREE_OPERAND (arg0, 1);
11079 	  enum tree_code code0 = TREE_CODE (arg0);
11080 	  int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11081 
11082 	  /* (X - c) > X becomes false.  */
11083 	  if (code == GT_EXPR
11084 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
11085 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
11086 	    return constant_boolean_node (0, type);
11087 
11088 	  /* Likewise (X + c) < X becomes false.  */
11089 	  if (code == LT_EXPR
11090 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11091 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11092 	    return constant_boolean_node (0, type);
11093 
11094 	  /* Convert (X - c) <= X to true.  */
11095 	  if (!HONOR_NANS (arg1)
11096 	      && code == LE_EXPR
11097 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
11098 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
11099 	    return constant_boolean_node (1, type);
11100 
11101 	  /* Convert (X + c) >= X to true.  */
11102 	  if (!HONOR_NANS (arg1)
11103 	      && code == GE_EXPR
11104 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11105 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11106 	    return constant_boolean_node (1, type);
11107 	}
11108 
11109       /* If we are comparing an ABS_EXPR with a constant, we can
11110 	 convert all the cases into explicit comparisons, but they may
11111 	 well not be faster than doing the ABS and one comparison.
11112 	 But ABS (X) <= C is a range comparison, which becomes a subtraction
11113 	 and a comparison, and is probably faster.  */
11114       if (code == LE_EXPR
11115 	  && TREE_CODE (arg1) == INTEGER_CST
11116 	  && TREE_CODE (arg0) == ABS_EXPR
11117 	  && ! TREE_SIDE_EFFECTS (arg0)
11118 	  && (tem = negate_expr (arg1)) != 0
11119 	  && TREE_CODE (tem) == INTEGER_CST
11120 	  && !TREE_OVERFLOW (tem))
11121 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11122 			    build2 (GE_EXPR, type,
11123 				    TREE_OPERAND (arg0, 0), tem),
11124 			    build2 (LE_EXPR, type,
11125 				    TREE_OPERAND (arg0, 0), arg1));
11126 
11127       /* Convert ABS_EXPR<x> >= 0 to true.  */
11128       strict_overflow_p = false;
11129       if (code == GE_EXPR
11130 	  && (integer_zerop (arg1)
11131 	      || (! HONOR_NANS (arg0)
11132 		  && real_zerop (arg1)))
11133 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11134 	{
11135 	  if (strict_overflow_p)
11136 	    fold_overflow_warning (("assuming signed overflow does not occur "
11137 				    "when simplifying comparison of "
11138 				    "absolute value and zero"),
11139 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11140 	  return omit_one_operand_loc (loc, type,
11141 				       constant_boolean_node (true, type),
11142 				       arg0);
11143 	}
11144 
11145       /* Convert ABS_EXPR<x> < 0 to false.  */
11146       strict_overflow_p = false;
11147       if (code == LT_EXPR
11148 	  && (integer_zerop (arg1) || real_zerop (arg1))
11149 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11150 	{
11151 	  if (strict_overflow_p)
11152 	    fold_overflow_warning (("assuming signed overflow does not occur "
11153 				    "when simplifying comparison of "
11154 				    "absolute value and zero"),
11155 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11156 	  return omit_one_operand_loc (loc, type,
11157 				       constant_boolean_node (false, type),
11158 				       arg0);
11159 	}
11160 
11161       /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11162 	 and similarly for >= into !=.  */
11163       if ((code == LT_EXPR || code == GE_EXPR)
11164 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11165 	  && TREE_CODE (arg1) == LSHIFT_EXPR
11166 	  && integer_onep (TREE_OPERAND (arg1, 0)))
11167 	return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11168 			   build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11169 				   TREE_OPERAND (arg1, 1)),
11170 			   build_zero_cst (TREE_TYPE (arg0)));
11171 
11172       /* Similarly for X < (cast) (1 << Y).  But cast can't be narrowing,
11173 	 otherwise Y might be >= # of bits in X's type and thus e.g.
11174 	 (unsigned char) (1 << Y) for Y 15 might be 0.
11175 	 If the cast is widening, then 1 << Y should have unsigned type,
11176 	 otherwise if Y is number of bits in the signed shift type minus 1,
11177 	 we can't optimize this.  E.g. (unsigned long long) (1 << Y) for Y
11178 	 31 might be 0xffffffff80000000.  */
11179       if ((code == LT_EXPR || code == GE_EXPR)
11180 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11181 	  && CONVERT_EXPR_P (arg1)
11182 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11183 	  && (element_precision (TREE_TYPE (arg1))
11184 	      >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11185 	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11186 	      || (element_precision (TREE_TYPE (arg1))
11187 		  == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11188 	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11189 	{
11190 	  tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11191 			TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11192 	  return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11193 			     fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11194 			     build_zero_cst (TREE_TYPE (arg0)));
11195 	}
11196 
11197       return NULL_TREE;
11198 
11199     case UNORDERED_EXPR:
11200     case ORDERED_EXPR:
11201     case UNLT_EXPR:
11202     case UNLE_EXPR:
11203     case UNGT_EXPR:
11204     case UNGE_EXPR:
11205     case UNEQ_EXPR:
11206     case LTGT_EXPR:
11207       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
11208       {
11209 	tree targ0 = strip_float_extensions (arg0);
11210 	tree targ1 = strip_float_extensions (arg1);
11211 	tree newtype = TREE_TYPE (targ0);
11212 
11213 	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11214 	  newtype = TREE_TYPE (targ1);
11215 
11216 	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11217 	  return fold_build2_loc (loc, code, type,
11218 			      fold_convert_loc (loc, newtype, targ0),
11219 			      fold_convert_loc (loc, newtype, targ1));
11220       }
11221 
11222       return NULL_TREE;
11223 
11224     case COMPOUND_EXPR:
11225       /* When pedantic, a compound expression can be neither an lvalue
11226 	 nor an integer constant expression.  */
11227       if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11228 	return NULL_TREE;
11229       /* Don't let (0, 0) be null pointer constant.  */
11230       tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11231 				 : fold_convert_loc (loc, type, arg1);
11232       return pedantic_non_lvalue_loc (loc, tem);
11233 
11234     case ASSERT_EXPR:
11235       /* An ASSERT_EXPR should never be passed to fold_binary.  */
11236       gcc_unreachable ();
11237 
11238     default:
11239       return NULL_TREE;
11240     } /* switch (code) */
11241 }
11242 
11243 /* Used by contains_label_[p1].  */
11244 
11245 struct contains_label_data
11246 {
11247   hash_set<tree> *pset;
11248   bool inside_switch_p;
11249 };
11250 
11251 /* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
11252    a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
11253    return NULL_TREE.  Do not check the subtrees of GOTO_EXPR.  */
11254 
11255 static tree
contains_label_1(tree * tp,int * walk_subtrees,void * data)11256 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
11257 {
11258   contains_label_data *d = (contains_label_data *) data;
11259   switch (TREE_CODE (*tp))
11260     {
11261     case LABEL_EXPR:
11262       return *tp;
11263 
11264     case CASE_LABEL_EXPR:
11265       if (!d->inside_switch_p)
11266 	return *tp;
11267       return NULL_TREE;
11268 
11269     case SWITCH_EXPR:
11270       if (!d->inside_switch_p)
11271 	{
11272 	  if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
11273 	    return *tp;
11274 	  d->inside_switch_p = true;
11275 	  if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
11276 	    return *tp;
11277 	  d->inside_switch_p = false;
11278 	  *walk_subtrees = 0;
11279 	}
11280       return NULL_TREE;
11281 
11282     case GOTO_EXPR:
11283       *walk_subtrees = 0;
11284       return NULL_TREE;
11285 
11286     default:
11287       return NULL_TREE;
11288     }
11289 }
11290 
11291 /* Return whether the sub-tree ST contains a label which is accessible from
11292    outside the sub-tree.  */
11293 
11294 static bool
contains_label_p(tree st)11295 contains_label_p (tree st)
11296 {
11297   hash_set<tree> pset;
11298   contains_label_data data = { &pset, false };
11299   return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
11300 }
11301 
11302 /* Fold a ternary expression of code CODE and type TYPE with operands
11303    OP0, OP1, and OP2.  Return the folded expression if folding is
11304    successful.  Otherwise, return NULL_TREE.  */
11305 
11306 tree
fold_ternary_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2)11307 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11308 		  tree op0, tree op1, tree op2)
11309 {
11310   tree tem;
11311   tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11312   enum tree_code_class kind = TREE_CODE_CLASS (code);
11313 
11314   gcc_assert (IS_EXPR_CODE_CLASS (kind)
11315 	      && TREE_CODE_LENGTH (code) == 3);
11316 
11317   /* If this is a commutative operation, and OP0 is a constant, move it
11318      to OP1 to reduce the number of tests below.  */
11319   if (commutative_ternary_tree_code (code)
11320       && tree_swap_operands_p (op0, op1))
11321     return fold_build3_loc (loc, code, type, op1, op0, op2);
11322 
11323   tem = generic_simplify (loc, code, type, op0, op1, op2);
11324   if (tem)
11325     return tem;
11326 
11327   /* Strip any conversions that don't change the mode.  This is safe
11328      for every expression, except for a comparison expression because
11329      its signedness is derived from its operands.  So, in the latter
11330      case, only strip conversions that don't change the signedness.
11331 
11332      Note that this is done as an internal manipulation within the
11333      constant folder, in order to find the simplest representation of
11334      the arguments so that their form can be studied.  In any cases,
11335      the appropriate type conversions should be put back in the tree
11336      that will get out of the constant folder.  */
11337   if (op0)
11338     {
11339       arg0 = op0;
11340       STRIP_NOPS (arg0);
11341     }
11342 
11343   if (op1)
11344     {
11345       arg1 = op1;
11346       STRIP_NOPS (arg1);
11347     }
11348 
11349   if (op2)
11350     {
11351       arg2 = op2;
11352       STRIP_NOPS (arg2);
11353     }
11354 
11355   switch (code)
11356     {
11357     case COMPONENT_REF:
11358       if (TREE_CODE (arg0) == CONSTRUCTOR
11359 	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11360 	{
11361 	  unsigned HOST_WIDE_INT idx;
11362 	  tree field, value;
11363 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11364 	    if (field == arg1)
11365 	      return value;
11366 	}
11367       return NULL_TREE;
11368 
11369     case COND_EXPR:
11370     case VEC_COND_EXPR:
11371       /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11372 	 so all simple results must be passed through pedantic_non_lvalue.  */
11373       if (TREE_CODE (arg0) == INTEGER_CST)
11374 	{
11375 	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
11376 	  tem = integer_zerop (arg0) ? op2 : op1;
11377 	  /* Only optimize constant conditions when the selected branch
11378 	     has the same type as the COND_EXPR.  This avoids optimizing
11379              away "c ? x : throw", where the throw has a void type.
11380              Avoid throwing away that operand which contains label.  */
11381           if ((!TREE_SIDE_EFFECTS (unused_op)
11382                || !contains_label_p (unused_op))
11383               && (! VOID_TYPE_P (TREE_TYPE (tem))
11384                   || VOID_TYPE_P (type)))
11385 	    return pedantic_non_lvalue_loc (loc, tem);
11386 	  return NULL_TREE;
11387 	}
11388       else if (TREE_CODE (arg0) == VECTOR_CST)
11389 	{
11390 	  unsigned HOST_WIDE_INT nelts;
11391 	  if ((TREE_CODE (arg1) == VECTOR_CST
11392 	       || TREE_CODE (arg1) == CONSTRUCTOR)
11393 	      && (TREE_CODE (arg2) == VECTOR_CST
11394 		  || TREE_CODE (arg2) == CONSTRUCTOR)
11395 	      && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
11396 	    {
11397 	      vec_perm_builder sel (nelts, nelts, 1);
11398 	      for (unsigned int i = 0; i < nelts; i++)
11399 		{
11400 		  tree val = VECTOR_CST_ELT (arg0, i);
11401 		  if (integer_all_onesp (val))
11402 		    sel.quick_push (i);
11403 		  else if (integer_zerop (val))
11404 		    sel.quick_push (nelts + i);
11405 		  else /* Currently unreachable.  */
11406 		    return NULL_TREE;
11407 		}
11408 	      vec_perm_indices indices (sel, 2, nelts);
11409 	      tree t = fold_vec_perm (type, arg1, arg2, indices);
11410 	      if (t != NULL_TREE)
11411 		return t;
11412 	    }
11413 	}
11414 
11415       /* If we have A op B ? A : C, we may be able to convert this to a
11416 	 simpler expression, depending on the operation and the values
11417 	 of B and C.  Signed zeros prevent all of these transformations,
11418 	 for reasons given above each one.
11419 
11420          Also try swapping the arguments and inverting the conditional.  */
11421       if (COMPARISON_CLASS_P (arg0)
11422 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
11423 	  && !HONOR_SIGNED_ZEROS (element_mode (op1)))
11424 	{
11425 	  tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11426 	  if (tem)
11427 	    return tem;
11428 	}
11429 
11430       if (COMPARISON_CLASS_P (arg0)
11431 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11432 	  && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11433 	{
11434 	  location_t loc0 = expr_location_or (arg0, loc);
11435 	  tem = fold_invert_truthvalue (loc0, arg0);
11436 	  if (tem && COMPARISON_CLASS_P (tem))
11437 	    {
11438 	      tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11439 	      if (tem)
11440 		return tem;
11441 	    }
11442 	}
11443 
11444       /* If the second operand is simpler than the third, swap them
11445 	 since that produces better jump optimization results.  */
11446       if (truth_value_p (TREE_CODE (arg0))
11447 	  && tree_swap_operands_p (op1, op2))
11448 	{
11449 	  location_t loc0 = expr_location_or (arg0, loc);
11450 	  /* See if this can be inverted.  If it can't, possibly because
11451 	     it was a floating-point inequality comparison, don't do
11452 	     anything.  */
11453 	  tem = fold_invert_truthvalue (loc0, arg0);
11454 	  if (tem)
11455 	    return fold_build3_loc (loc, code, type, tem, op2, op1);
11456 	}
11457 
11458       /* Convert A ? 1 : 0 to simply A.  */
11459       if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11460 				 : (integer_onep (op1)
11461 				    && !VECTOR_TYPE_P (type)))
11462 	  && integer_zerop (op2)
11463 	  /* If we try to convert OP0 to our type, the
11464 	     call to fold will try to move the conversion inside
11465 	     a COND, which will recurse.  In that case, the COND_EXPR
11466 	     is probably the best choice, so leave it alone.  */
11467 	  && type == TREE_TYPE (arg0))
11468 	return pedantic_non_lvalue_loc (loc, arg0);
11469 
11470       /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
11471 	 over COND_EXPR in cases such as floating point comparisons.  */
11472       if (integer_zerop (op1)
11473 	  && code == COND_EXPR
11474 	  && integer_onep (op2)
11475 	  && !VECTOR_TYPE_P (type)
11476 	  && truth_value_p (TREE_CODE (arg0)))
11477 	return pedantic_non_lvalue_loc (loc,
11478 				    fold_convert_loc (loc, type,
11479 					      invert_truthvalue_loc (loc,
11480 								     arg0)));
11481 
11482       /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
11483       if (TREE_CODE (arg0) == LT_EXPR
11484 	  && integer_zerop (TREE_OPERAND (arg0, 1))
11485 	  && integer_zerop (op2)
11486 	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11487 	{
11488 	  /* sign_bit_p looks through both zero and sign extensions,
11489 	     but for this optimization only sign extensions are
11490 	     usable.  */
11491 	  tree tem2 = TREE_OPERAND (arg0, 0);
11492 	  while (tem != tem2)
11493 	    {
11494 	      if (TREE_CODE (tem2) != NOP_EXPR
11495 		  || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11496 		{
11497 		  tem = NULL_TREE;
11498 		  break;
11499 		}
11500 	      tem2 = TREE_OPERAND (tem2, 0);
11501 	    }
11502 	  /* sign_bit_p only checks ARG1 bits within A's precision.
11503 	     If <sign bit of A> has wider type than A, bits outside
11504 	     of A's precision in <sign bit of A> need to be checked.
11505 	     If they are all 0, this optimization needs to be done
11506 	     in unsigned A's type, if they are all 1 in signed A's type,
11507 	     otherwise this can't be done.  */
11508 	  if (tem
11509 	      && TYPE_PRECISION (TREE_TYPE (tem))
11510 		 < TYPE_PRECISION (TREE_TYPE (arg1))
11511 	      && TYPE_PRECISION (TREE_TYPE (tem))
11512 		 < TYPE_PRECISION (type))
11513 	    {
11514 	      int inner_width, outer_width;
11515 	      tree tem_type;
11516 
11517 	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11518 	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11519 	      if (outer_width > TYPE_PRECISION (type))
11520 		outer_width = TYPE_PRECISION (type);
11521 
11522 	      wide_int mask = wi::shifted_mask
11523 		(inner_width, outer_width - inner_width, false,
11524 		 TYPE_PRECISION (TREE_TYPE (arg1)));
11525 
11526 	      wide_int common = mask & wi::to_wide (arg1);
11527 	      if (common == mask)
11528 		{
11529 		  tem_type = signed_type_for (TREE_TYPE (tem));
11530 		  tem = fold_convert_loc (loc, tem_type, tem);
11531 		}
11532 	      else if (common == 0)
11533 		{
11534 		  tem_type = unsigned_type_for (TREE_TYPE (tem));
11535 		  tem = fold_convert_loc (loc, tem_type, tem);
11536 		}
11537 	      else
11538 		tem = NULL;
11539 	    }
11540 
11541 	  if (tem)
11542 	    return
11543 	      fold_convert_loc (loc, type,
11544 				fold_build2_loc (loc, BIT_AND_EXPR,
11545 					     TREE_TYPE (tem), tem,
11546 					     fold_convert_loc (loc,
11547 							       TREE_TYPE (tem),
11548 							       arg1)));
11549 	}
11550 
11551       /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
11552 	 already handled above.  */
11553       if (TREE_CODE (arg0) == BIT_AND_EXPR
11554 	  && integer_onep (TREE_OPERAND (arg0, 1))
11555 	  && integer_zerop (op2)
11556 	  && integer_pow2p (arg1))
11557 	{
11558 	  tree tem = TREE_OPERAND (arg0, 0);
11559 	  STRIP_NOPS (tem);
11560 	  if (TREE_CODE (tem) == RSHIFT_EXPR
11561 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11562               && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11563 		 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11564 	    return fold_build2_loc (loc, BIT_AND_EXPR, type,
11565 				    fold_convert_loc (loc, type,
11566 						      TREE_OPERAND (tem, 0)),
11567 				    op1);
11568 	}
11569 
11570       /* A & N ? N : 0 is simply A & N if N is a power of two.  This
11571 	 is probably obsolete because the first operand should be a
11572 	 truth value (that's why we have the two cases above), but let's
11573 	 leave it in until we can confirm this for all front-ends.  */
11574       if (integer_zerop (op2)
11575 	  && TREE_CODE (arg0) == NE_EXPR
11576 	  && integer_zerop (TREE_OPERAND (arg0, 1))
11577 	  && integer_pow2p (arg1)
11578 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11579 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11580 			      arg1, OEP_ONLY_CONST)
11581 	  /* operand_equal_p compares just value, not precision, so e.g.
11582 	     arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
11583 	     second operand 32-bit -128, which is not a power of two (or vice
11584 	     versa.  */
11585 	  && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
11586 	return pedantic_non_lvalue_loc (loc,
11587 					fold_convert_loc (loc, type,
11588 							  TREE_OPERAND (arg0,
11589 									0)));
11590 
11591       /* Disable the transformations below for vectors, since
11592 	 fold_binary_op_with_conditional_arg may undo them immediately,
11593 	 yielding an infinite loop.  */
11594       if (code == VEC_COND_EXPR)
11595 	return NULL_TREE;
11596 
11597       /* Convert A ? B : 0 into A && B if A and B are truth values.  */
11598       if (integer_zerop (op2)
11599 	  && truth_value_p (TREE_CODE (arg0))
11600 	  && truth_value_p (TREE_CODE (arg1))
11601 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11602 	return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11603 							   : TRUTH_ANDIF_EXPR,
11604 				type, fold_convert_loc (loc, type, arg0), op1);
11605 
11606       /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
11607       if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11608 	  && truth_value_p (TREE_CODE (arg0))
11609 	  && truth_value_p (TREE_CODE (arg1))
11610 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11611 	{
11612 	  location_t loc0 = expr_location_or (arg0, loc);
11613 	  /* Only perform transformation if ARG0 is easily inverted.  */
11614 	  tem = fold_invert_truthvalue (loc0, arg0);
11615 	  if (tem)
11616 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
11617 					 ? BIT_IOR_EXPR
11618 					 : TRUTH_ORIF_EXPR,
11619 				    type, fold_convert_loc (loc, type, tem),
11620 				    op1);
11621 	}
11622 
11623       /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
11624       if (integer_zerop (arg1)
11625 	  && truth_value_p (TREE_CODE (arg0))
11626 	  && truth_value_p (TREE_CODE (op2))
11627 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11628 	{
11629 	  location_t loc0 = expr_location_or (arg0, loc);
11630 	  /* Only perform transformation if ARG0 is easily inverted.  */
11631 	  tem = fold_invert_truthvalue (loc0, arg0);
11632 	  if (tem)
11633 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
11634 					 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11635 				    type, fold_convert_loc (loc, type, tem),
11636 				    op2);
11637 	}
11638 
11639       /* Convert A ? 1 : B into A || B if A and B are truth values.  */
11640       if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11641 	  && truth_value_p (TREE_CODE (arg0))
11642 	  && truth_value_p (TREE_CODE (op2))
11643 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11644 	return fold_build2_loc (loc, code == VEC_COND_EXPR
11645 				     ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11646 				type, fold_convert_loc (loc, type, arg0), op2);
11647 
11648       return NULL_TREE;
11649 
11650     case CALL_EXPR:
11651       /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
11652 	 of fold_ternary on them.  */
11653       gcc_unreachable ();
11654 
11655     case BIT_FIELD_REF:
11656       if (TREE_CODE (arg0) == VECTOR_CST
11657 	  && (type == TREE_TYPE (TREE_TYPE (arg0))
11658 	      || (VECTOR_TYPE_P (type)
11659 		  && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
11660 	  && tree_fits_uhwi_p (op1)
11661 	  && tree_fits_uhwi_p (op2))
11662 	{
11663 	  tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11664 	  unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11665 	  unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11666 	  unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11667 
11668 	  if (n != 0
11669 	      && (idx % width) == 0
11670 	      && (n % width) == 0
11671 	      && known_le ((idx + n) / width,
11672 			   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
11673 	    {
11674 	      idx = idx / width;
11675 	      n = n / width;
11676 
11677 	      if (TREE_CODE (arg0) == VECTOR_CST)
11678 		{
11679 		  if (n == 1)
11680 		    {
11681 		      tem = VECTOR_CST_ELT (arg0, idx);
11682 		      if (VECTOR_TYPE_P (type))
11683 			tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
11684 		      return tem;
11685 		    }
11686 
11687 		  tree_vector_builder vals (type, n, 1);
11688 		  for (unsigned i = 0; i < n; ++i)
11689 		    vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
11690 		  return vals.build ();
11691 		}
11692 	    }
11693 	}
11694 
11695       /* On constants we can use native encode/interpret to constant
11696          fold (nearly) all BIT_FIELD_REFs.  */
11697       if (CONSTANT_CLASS_P (arg0)
11698 	  && can_native_interpret_type_p (type)
11699 	  && BITS_PER_UNIT == 8
11700 	  && tree_fits_uhwi_p (op1)
11701 	  && tree_fits_uhwi_p (op2))
11702 	{
11703 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11704 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11705 	  /* Limit us to a reasonable amount of work.  To relax the
11706 	     other limitations we need bit-shifting of the buffer
11707 	     and rounding up the size.  */
11708 	  if (bitpos % BITS_PER_UNIT == 0
11709 	      && bitsize % BITS_PER_UNIT == 0
11710 	      && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11711 	    {
11712 	      unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11713 	      unsigned HOST_WIDE_INT len
11714 		= native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11715 				      bitpos / BITS_PER_UNIT);
11716 	      if (len > 0
11717 		  && len * BITS_PER_UNIT >= bitsize)
11718 		{
11719 		  tree v = native_interpret_expr (type, b,
11720 						  bitsize / BITS_PER_UNIT);
11721 		  if (v)
11722 		    return v;
11723 		}
11724 	    }
11725 	}
11726 
11727       return NULL_TREE;
11728 
11729     case FMA_EXPR:
11730       /* For integers we can decompose the FMA if possible.  */
11731       if (TREE_CODE (arg0) == INTEGER_CST
11732 	  && TREE_CODE (arg1) == INTEGER_CST)
11733 	return fold_build2_loc (loc, PLUS_EXPR, type,
11734 				const_binop (MULT_EXPR, arg0, arg1), arg2);
11735       if (integer_zerop (arg2))
11736 	return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11737 
11738       return fold_fma (loc, type, arg0, arg1, arg2);
11739 
11740     case VEC_PERM_EXPR:
11741       if (TREE_CODE (arg2) == VECTOR_CST)
11742 	{
11743 	  /* Build a vector of integers from the tree mask.  */
11744 	  vec_perm_builder builder;
11745 	  if (!tree_to_vec_perm_builder (&builder, arg2))
11746 	    return NULL_TREE;
11747 
11748 	  /* Create a vec_perm_indices for the integer vector.  */
11749 	  poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
11750 	  bool single_arg = (op0 == op1);
11751 	  vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
11752 
11753 	  /* Check for cases that fold to OP0 or OP1 in their original
11754 	     element order.  */
11755 	  if (sel.series_p (0, 1, 0, 1))
11756 	    return op0;
11757 	  if (sel.series_p (0, 1, nelts, 1))
11758 	    return op1;
11759 
11760 	  if (!single_arg)
11761 	    {
11762 	      if (sel.all_from_input_p (0))
11763 		op1 = op0;
11764 	      else if (sel.all_from_input_p (1))
11765 		{
11766 		  op0 = op1;
11767 		  sel.rotate_inputs (1);
11768 		}
11769 	    }
11770 
11771 	  if ((TREE_CODE (op0) == VECTOR_CST
11772 	       || TREE_CODE (op0) == CONSTRUCTOR)
11773 	      && (TREE_CODE (op1) == VECTOR_CST
11774 		  || TREE_CODE (op1) == CONSTRUCTOR))
11775 	    {
11776 	      tree t = fold_vec_perm (type, op0, op1, sel);
11777 	      if (t != NULL_TREE)
11778 		return t;
11779 	    }
11780 
11781 	  bool changed = (op0 == op1 && !single_arg);
11782 
11783 	  /* Generate a canonical form of the selector.  */
11784 	  if (arg2 == op2 && sel.encoding () != builder)
11785 	    {
11786 	      /* Some targets are deficient and fail to expand a single
11787 		 argument permutation while still allowing an equivalent
11788 		 2-argument version.  */
11789 	      if (sel.ninputs () == 2
11790 		  || can_vec_perm_const_p (TYPE_MODE (type), sel, false))
11791 		op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
11792 	      else
11793 		{
11794 		  vec_perm_indices sel2 (builder, 2, nelts);
11795 		  if (can_vec_perm_const_p (TYPE_MODE (type), sel2, false))
11796 		    op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel2);
11797 		  else
11798 		    /* Not directly supported with either encoding,
11799 		       so use the preferred form.  */
11800 		    op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
11801 		}
11802 	      changed = true;
11803 	    }
11804 
11805 	  if (changed)
11806 	    return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11807 	}
11808       return NULL_TREE;
11809 
11810     case BIT_INSERT_EXPR:
11811       /* Perform (partial) constant folding of BIT_INSERT_EXPR.  */
11812       if (TREE_CODE (arg0) == INTEGER_CST
11813 	  && TREE_CODE (arg1) == INTEGER_CST)
11814 	{
11815 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11816 	  unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11817 	  wide_int tem = (wi::to_wide (arg0)
11818 			  & wi::shifted_mask (bitpos, bitsize, true,
11819 					      TYPE_PRECISION (type)));
11820 	  wide_int tem2
11821 	    = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11822 				    bitsize), bitpos);
11823 	  return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11824 	}
11825       else if (TREE_CODE (arg0) == VECTOR_CST
11826 	       && CONSTANT_CLASS_P (arg1)
11827 	       && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11828 				      TREE_TYPE (arg1)))
11829 	{
11830 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11831 	  unsigned HOST_WIDE_INT elsize
11832 	    = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11833 	  if (bitpos % elsize == 0)
11834 	    {
11835 	      unsigned k = bitpos / elsize;
11836 	      unsigned HOST_WIDE_INT nelts;
11837 	      if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11838 		return arg0;
11839 	      else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
11840 		{
11841 		  tree_vector_builder elts (type, nelts, 1);
11842 		  elts.quick_grow (nelts);
11843 		  for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
11844 		    elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
11845 		  return elts.build ();
11846 		}
11847 	    }
11848 	}
11849       return NULL_TREE;
11850 
11851     default:
11852       return NULL_TREE;
11853     } /* switch (code) */
11854 }
11855 
11856 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11857    of an array (or vector).  */
11858 
11859 tree
get_array_ctor_element_at_index(tree ctor,offset_int access_index)11860 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11861 {
11862   tree index_type = NULL_TREE;
11863   offset_int low_bound = 0;
11864 
11865   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11866     {
11867       tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11868       if (domain_type && TYPE_MIN_VALUE (domain_type))
11869 	{
11870 	  /* Static constructors for variably sized objects makes no sense.  */
11871 	  gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11872 	  index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11873 	  low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11874 	}
11875     }
11876 
11877   if (index_type)
11878     access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11879 			    TYPE_SIGN (index_type));
11880 
11881   offset_int index = low_bound - 1;
11882   if (index_type)
11883     index = wi::ext (index, TYPE_PRECISION (index_type),
11884 		     TYPE_SIGN (index_type));
11885 
11886   offset_int max_index;
11887   unsigned HOST_WIDE_INT cnt;
11888   tree cfield, cval;
11889 
11890   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11891     {
11892       /* Array constructor might explicitly set index, or specify a range,
11893 	 or leave index NULL meaning that it is next index after previous
11894 	 one.  */
11895       if (cfield)
11896 	{
11897 	  if (TREE_CODE (cfield) == INTEGER_CST)
11898 	    max_index = index = wi::to_offset (cfield);
11899 	  else
11900 	    {
11901 	      gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11902 	      index = wi::to_offset (TREE_OPERAND (cfield, 0));
11903 	      max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11904 	    }
11905 	}
11906       else
11907 	{
11908 	  index += 1;
11909 	  if (index_type)
11910 	    index = wi::ext (index, TYPE_PRECISION (index_type),
11911 			     TYPE_SIGN (index_type));
11912 	  max_index = index;
11913 	}
11914 
11915     /* Do we have match?  */
11916     if (wi::cmpu (access_index, index) >= 0
11917 	&& wi::cmpu (access_index, max_index) <= 0)
11918       return cval;
11919   }
11920   return NULL_TREE;
11921 }
11922 
11923 /* Perform constant folding and related simplification of EXPR.
11924    The related simplifications include x*1 => x, x*0 => 0, etc.,
11925    and application of the associative law.
11926    NOP_EXPR conversions may be removed freely (as long as we
11927    are careful not to change the type of the overall expression).
11928    We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11929    but we can constant-fold them if they have constant operands.  */
11930 
11931 #ifdef ENABLE_FOLD_CHECKING
11932 # define fold(x) fold_1 (x)
11933 static tree fold_1 (tree);
11934 static
11935 #endif
11936 tree
fold(tree expr)11937 fold (tree expr)
11938 {
11939   const tree t = expr;
11940   enum tree_code code = TREE_CODE (t);
11941   enum tree_code_class kind = TREE_CODE_CLASS (code);
11942   tree tem;
11943   location_t loc = EXPR_LOCATION (expr);
11944 
11945   /* Return right away if a constant.  */
11946   if (kind == tcc_constant)
11947     return t;
11948 
11949   /* CALL_EXPR-like objects with variable numbers of operands are
11950      treated specially.  */
11951   if (kind == tcc_vl_exp)
11952     {
11953       if (code == CALL_EXPR)
11954 	{
11955 	  tem = fold_call_expr (loc, expr, false);
11956 	  return tem ? tem : expr;
11957 	}
11958       return expr;
11959     }
11960 
11961   if (IS_EXPR_CODE_CLASS (kind))
11962     {
11963       tree type = TREE_TYPE (t);
11964       tree op0, op1, op2;
11965 
11966       switch (TREE_CODE_LENGTH (code))
11967 	{
11968 	case 1:
11969 	  op0 = TREE_OPERAND (t, 0);
11970 	  tem = fold_unary_loc (loc, code, type, op0);
11971 	  return tem ? tem : expr;
11972 	case 2:
11973 	  op0 = TREE_OPERAND (t, 0);
11974 	  op1 = TREE_OPERAND (t, 1);
11975 	  tem = fold_binary_loc (loc, code, type, op0, op1);
11976 	  return tem ? tem : expr;
11977 	case 3:
11978 	  op0 = TREE_OPERAND (t, 0);
11979 	  op1 = TREE_OPERAND (t, 1);
11980 	  op2 = TREE_OPERAND (t, 2);
11981 	  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11982 	  return tem ? tem : expr;
11983 	default:
11984 	  break;
11985 	}
11986     }
11987 
11988   switch (code)
11989     {
11990     case ARRAY_REF:
11991       {
11992 	tree op0 = TREE_OPERAND (t, 0);
11993 	tree op1 = TREE_OPERAND (t, 1);
11994 
11995 	if (TREE_CODE (op1) == INTEGER_CST
11996 	    && TREE_CODE (op0) == CONSTRUCTOR
11997 	    && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11998 	  {
11999 	    tree val = get_array_ctor_element_at_index (op0,
12000 							wi::to_offset (op1));
12001 	    if (val)
12002 	      return val;
12003 	  }
12004 
12005 	return t;
12006       }
12007 
12008       /* Return a VECTOR_CST if possible.  */
12009     case CONSTRUCTOR:
12010       {
12011 	tree type = TREE_TYPE (t);
12012 	if (TREE_CODE (type) != VECTOR_TYPE)
12013 	  return t;
12014 
12015 	unsigned i;
12016 	tree val;
12017 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12018 	  if (! CONSTANT_CLASS_P (val))
12019 	    return t;
12020 
12021 	return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12022       }
12023 
12024     case CONST_DECL:
12025       return fold (DECL_INITIAL (t));
12026 
12027     default:
12028       return t;
12029     } /* switch (code) */
12030 }
12031 
12032 #ifdef ENABLE_FOLD_CHECKING
12033 #undef fold
12034 
12035 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12036 				hash_table<nofree_ptr_hash<const tree_node> > *);
12037 static void fold_check_failed (const_tree, const_tree);
12038 void print_fold_checksum (const_tree);
12039 
12040 /* When --enable-checking=fold, compute a digest of expr before
12041    and after actual fold call to see if fold did not accidentally
12042    change original expr.  */
12043 
12044 tree
fold(tree expr)12045 fold (tree expr)
12046 {
12047   tree ret;
12048   struct md5_ctx ctx;
12049   unsigned char checksum_before[16], checksum_after[16];
12050   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12051 
12052   md5_init_ctx (&ctx);
12053   fold_checksum_tree (expr, &ctx, &ht);
12054   md5_finish_ctx (&ctx, checksum_before);
12055   ht.empty ();
12056 
12057   ret = fold_1 (expr);
12058 
12059   md5_init_ctx (&ctx);
12060   fold_checksum_tree (expr, &ctx, &ht);
12061   md5_finish_ctx (&ctx, checksum_after);
12062 
12063   if (memcmp (checksum_before, checksum_after, 16))
12064     fold_check_failed (expr, ret);
12065 
12066   return ret;
12067 }
12068 
12069 void
print_fold_checksum(const_tree expr)12070 print_fold_checksum (const_tree expr)
12071 {
12072   struct md5_ctx ctx;
12073   unsigned char checksum[16], cnt;
12074   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12075 
12076   md5_init_ctx (&ctx);
12077   fold_checksum_tree (expr, &ctx, &ht);
12078   md5_finish_ctx (&ctx, checksum);
12079   for (cnt = 0; cnt < 16; ++cnt)
12080     fprintf (stderr, "%02x", checksum[cnt]);
12081   putc ('\n', stderr);
12082 }
12083 
12084 static void
fold_check_failed(const_tree expr ATTRIBUTE_UNUSED,const_tree ret ATTRIBUTE_UNUSED)12085 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12086 {
12087   internal_error ("fold check: original tree changed by fold");
12088 }
12089 
12090 static void
fold_checksum_tree(const_tree expr,struct md5_ctx * ctx,hash_table<nofree_ptr_hash<const tree_node>> * ht)12091 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12092 		    hash_table<nofree_ptr_hash <const tree_node> > *ht)
12093 {
12094   const tree_node **slot;
12095   enum tree_code code;
12096   union tree_node buf;
12097   int i, len;
12098 
12099  recursive_label:
12100   if (expr == NULL)
12101     return;
12102   slot = ht->find_slot (expr, INSERT);
12103   if (*slot != NULL)
12104     return;
12105   *slot = expr;
12106   code = TREE_CODE (expr);
12107   if (TREE_CODE_CLASS (code) == tcc_declaration
12108       && HAS_DECL_ASSEMBLER_NAME_P (expr))
12109     {
12110       /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified.  */
12111       memcpy ((char *) &buf, expr, tree_size (expr));
12112       SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12113       buf.decl_with_vis.symtab_node = NULL;
12114       expr = (tree) &buf;
12115     }
12116   else if (TREE_CODE_CLASS (code) == tcc_type
12117 	   && (TYPE_POINTER_TO (expr)
12118 	       || TYPE_REFERENCE_TO (expr)
12119 	       || TYPE_CACHED_VALUES_P (expr)
12120 	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12121 	       || TYPE_NEXT_VARIANT (expr)
12122 	       || TYPE_ALIAS_SET_KNOWN_P (expr)))
12123     {
12124       /* Allow these fields to be modified.  */
12125       tree tmp;
12126       memcpy ((char *) &buf, expr, tree_size (expr));
12127       expr = tmp = (tree) &buf;
12128       TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12129       TYPE_POINTER_TO (tmp) = NULL;
12130       TYPE_REFERENCE_TO (tmp) = NULL;
12131       TYPE_NEXT_VARIANT (tmp) = NULL;
12132       TYPE_ALIAS_SET (tmp) = -1;
12133       if (TYPE_CACHED_VALUES_P (tmp))
12134 	{
12135 	  TYPE_CACHED_VALUES_P (tmp) = 0;
12136 	  TYPE_CACHED_VALUES (tmp) = NULL;
12137 	}
12138     }
12139   md5_process_bytes (expr, tree_size (expr), ctx);
12140   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12141     fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12142   if (TREE_CODE_CLASS (code) != tcc_type
12143       && TREE_CODE_CLASS (code) != tcc_declaration
12144       && code != TREE_LIST
12145       && code != SSA_NAME
12146       && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12147     fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12148   switch (TREE_CODE_CLASS (code))
12149     {
12150     case tcc_constant:
12151       switch (code)
12152 	{
12153 	case STRING_CST:
12154 	  md5_process_bytes (TREE_STRING_POINTER (expr),
12155 			     TREE_STRING_LENGTH (expr), ctx);
12156 	  break;
12157 	case COMPLEX_CST:
12158 	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12159 	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12160 	  break;
12161 	case VECTOR_CST:
12162 	  len = vector_cst_encoded_nelts (expr);
12163 	  for (i = 0; i < len; ++i)
12164 	    fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
12165 	  break;
12166 	default:
12167 	  break;
12168 	}
12169       break;
12170     case tcc_exceptional:
12171       switch (code)
12172 	{
12173 	case TREE_LIST:
12174 	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12175 	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12176 	  expr = TREE_CHAIN (expr);
12177 	  goto recursive_label;
12178 	  break;
12179 	case TREE_VEC:
12180 	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12181 	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12182 	  break;
12183 	default:
12184 	  break;
12185 	}
12186       break;
12187     case tcc_expression:
12188     case tcc_reference:
12189     case tcc_comparison:
12190     case tcc_unary:
12191     case tcc_binary:
12192     case tcc_statement:
12193     case tcc_vl_exp:
12194       len = TREE_OPERAND_LENGTH (expr);
12195       for (i = 0; i < len; ++i)
12196 	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12197       break;
12198     case tcc_declaration:
12199       fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12200       fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12201       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12202 	{
12203 	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12204 	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12205 	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12206 	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12207 	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12208 	}
12209 
12210       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12211 	{
12212 	  if (TREE_CODE (expr) == FUNCTION_DECL)
12213 	    {
12214 	      fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12215 	      fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12216 	    }
12217 	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12218 	}
12219       break;
12220     case tcc_type:
12221       if (TREE_CODE (expr) == ENUMERAL_TYPE)
12222         fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12223       fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12224       fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12225       fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12226       fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12227       if (INTEGRAL_TYPE_P (expr)
12228           || SCALAR_FLOAT_TYPE_P (expr))
12229 	{
12230 	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12231 	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12232 	}
12233       fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12234       if (TREE_CODE (expr) == RECORD_TYPE
12235 	  || TREE_CODE (expr) == UNION_TYPE
12236 	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
12237 	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12238       fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12239       break;
12240     default:
12241       break;
12242     }
12243 }
12244 
12245 /* Helper function for outputting the checksum of a tree T.  When
12246    debugging with gdb, you can "define mynext" to be "next" followed
12247    by "call debug_fold_checksum (op0)", then just trace down till the
12248    outputs differ.  */
12249 
12250 DEBUG_FUNCTION void
debug_fold_checksum(const_tree t)12251 debug_fold_checksum (const_tree t)
12252 {
12253   int i;
12254   unsigned char checksum[16];
12255   struct md5_ctx ctx;
12256   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12257 
12258   md5_init_ctx (&ctx);
12259   fold_checksum_tree (t, &ctx, &ht);
12260   md5_finish_ctx (&ctx, checksum);
12261   ht.empty ();
12262 
12263   for (i = 0; i < 16; i++)
12264     fprintf (stderr, "%d ", checksum[i]);
12265 
12266   fprintf (stderr, "\n");
12267 }
12268 
12269 #endif
12270 
12271 /* Fold a unary tree expression with code CODE of type TYPE with an
12272    operand OP0.  LOC is the location of the resulting expression.
12273    Return a folded expression if successful.  Otherwise, return a tree
12274    expression with code CODE of type TYPE with an operand OP0.  */
12275 
12276 tree
fold_build1_loc(location_t loc,enum tree_code code,tree type,tree op0 MEM_STAT_DECL)12277 fold_build1_loc (location_t loc,
12278 		 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12279 {
12280   tree tem;
12281 #ifdef ENABLE_FOLD_CHECKING
12282   unsigned char checksum_before[16], checksum_after[16];
12283   struct md5_ctx ctx;
12284   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12285 
12286   md5_init_ctx (&ctx);
12287   fold_checksum_tree (op0, &ctx, &ht);
12288   md5_finish_ctx (&ctx, checksum_before);
12289   ht.empty ();
12290 #endif
12291 
12292   tem = fold_unary_loc (loc, code, type, op0);
12293   if (!tem)
12294     tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12295 
12296 #ifdef ENABLE_FOLD_CHECKING
12297   md5_init_ctx (&ctx);
12298   fold_checksum_tree (op0, &ctx, &ht);
12299   md5_finish_ctx (&ctx, checksum_after);
12300 
12301   if (memcmp (checksum_before, checksum_after, 16))
12302     fold_check_failed (op0, tem);
12303 #endif
12304   return tem;
12305 }
12306 
12307 /* Fold a binary tree expression with code CODE of type TYPE with
12308    operands OP0 and OP1.  LOC is the location of the resulting
12309    expression.  Return a folded expression if successful.  Otherwise,
12310    return a tree expression with code CODE of type TYPE with operands
12311    OP0 and OP1.  */
12312 
12313 tree
fold_build2_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1 MEM_STAT_DECL)12314 fold_build2_loc (location_t loc,
12315 		      enum tree_code code, tree type, tree op0, tree op1
12316 		      MEM_STAT_DECL)
12317 {
12318   tree tem;
12319 #ifdef ENABLE_FOLD_CHECKING
12320   unsigned char checksum_before_op0[16],
12321                 checksum_before_op1[16],
12322 		checksum_after_op0[16],
12323 		checksum_after_op1[16];
12324   struct md5_ctx ctx;
12325   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12326 
12327   md5_init_ctx (&ctx);
12328   fold_checksum_tree (op0, &ctx, &ht);
12329   md5_finish_ctx (&ctx, checksum_before_op0);
12330   ht.empty ();
12331 
12332   md5_init_ctx (&ctx);
12333   fold_checksum_tree (op1, &ctx, &ht);
12334   md5_finish_ctx (&ctx, checksum_before_op1);
12335   ht.empty ();
12336 #endif
12337 
12338   tem = fold_binary_loc (loc, code, type, op0, op1);
12339   if (!tem)
12340     tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12341 
12342 #ifdef ENABLE_FOLD_CHECKING
12343   md5_init_ctx (&ctx);
12344   fold_checksum_tree (op0, &ctx, &ht);
12345   md5_finish_ctx (&ctx, checksum_after_op0);
12346   ht.empty ();
12347 
12348   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12349     fold_check_failed (op0, tem);
12350 
12351   md5_init_ctx (&ctx);
12352   fold_checksum_tree (op1, &ctx, &ht);
12353   md5_finish_ctx (&ctx, checksum_after_op1);
12354 
12355   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12356     fold_check_failed (op1, tem);
12357 #endif
12358   return tem;
12359 }
12360 
12361 /* Fold a ternary tree expression with code CODE of type TYPE with
12362    operands OP0, OP1, and OP2.  Return a folded expression if
12363    successful.  Otherwise, return a tree expression with code CODE of
12364    type TYPE with operands OP0, OP1, and OP2.  */
12365 
12366 tree
fold_build3_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2 MEM_STAT_DECL)12367 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12368 		      tree op0, tree op1, tree op2 MEM_STAT_DECL)
12369 {
12370   tree tem;
12371 #ifdef ENABLE_FOLD_CHECKING
12372   unsigned char checksum_before_op0[16],
12373                 checksum_before_op1[16],
12374                 checksum_before_op2[16],
12375 		checksum_after_op0[16],
12376 		checksum_after_op1[16],
12377 		checksum_after_op2[16];
12378   struct md5_ctx ctx;
12379   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12380 
12381   md5_init_ctx (&ctx);
12382   fold_checksum_tree (op0, &ctx, &ht);
12383   md5_finish_ctx (&ctx, checksum_before_op0);
12384   ht.empty ();
12385 
12386   md5_init_ctx (&ctx);
12387   fold_checksum_tree (op1, &ctx, &ht);
12388   md5_finish_ctx (&ctx, checksum_before_op1);
12389   ht.empty ();
12390 
12391   md5_init_ctx (&ctx);
12392   fold_checksum_tree (op2, &ctx, &ht);
12393   md5_finish_ctx (&ctx, checksum_before_op2);
12394   ht.empty ();
12395 #endif
12396 
12397   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12398   tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12399   if (!tem)
12400     tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12401 
12402 #ifdef ENABLE_FOLD_CHECKING
12403   md5_init_ctx (&ctx);
12404   fold_checksum_tree (op0, &ctx, &ht);
12405   md5_finish_ctx (&ctx, checksum_after_op0);
12406   ht.empty ();
12407 
12408   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12409     fold_check_failed (op0, tem);
12410 
12411   md5_init_ctx (&ctx);
12412   fold_checksum_tree (op1, &ctx, &ht);
12413   md5_finish_ctx (&ctx, checksum_after_op1);
12414   ht.empty ();
12415 
12416   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12417     fold_check_failed (op1, tem);
12418 
12419   md5_init_ctx (&ctx);
12420   fold_checksum_tree (op2, &ctx, &ht);
12421   md5_finish_ctx (&ctx, checksum_after_op2);
12422 
12423   if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12424     fold_check_failed (op2, tem);
12425 #endif
12426   return tem;
12427 }
12428 
12429 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12430    arguments in ARGARRAY, and a null static chain.
12431    Return a folded expression if successful.  Otherwise, return a CALL_EXPR
12432    of type TYPE from the given operands as constructed by build_call_array.  */
12433 
12434 tree
fold_build_call_array_loc(location_t loc,tree type,tree fn,int nargs,tree * argarray)12435 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12436 			   int nargs, tree *argarray)
12437 {
12438   tree tem;
12439 #ifdef ENABLE_FOLD_CHECKING
12440   unsigned char checksum_before_fn[16],
12441                 checksum_before_arglist[16],
12442 		checksum_after_fn[16],
12443 		checksum_after_arglist[16];
12444   struct md5_ctx ctx;
12445   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12446   int i;
12447 
12448   md5_init_ctx (&ctx);
12449   fold_checksum_tree (fn, &ctx, &ht);
12450   md5_finish_ctx (&ctx, checksum_before_fn);
12451   ht.empty ();
12452 
12453   md5_init_ctx (&ctx);
12454   for (i = 0; i < nargs; i++)
12455     fold_checksum_tree (argarray[i], &ctx, &ht);
12456   md5_finish_ctx (&ctx, checksum_before_arglist);
12457   ht.empty ();
12458 #endif
12459 
12460   tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12461   if (!tem)
12462     tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12463 
12464 #ifdef ENABLE_FOLD_CHECKING
12465   md5_init_ctx (&ctx);
12466   fold_checksum_tree (fn, &ctx, &ht);
12467   md5_finish_ctx (&ctx, checksum_after_fn);
12468   ht.empty ();
12469 
12470   if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12471     fold_check_failed (fn, tem);
12472 
12473   md5_init_ctx (&ctx);
12474   for (i = 0; i < nargs; i++)
12475     fold_checksum_tree (argarray[i], &ctx, &ht);
12476   md5_finish_ctx (&ctx, checksum_after_arglist);
12477 
12478   if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12479     fold_check_failed (NULL_TREE, tem);
12480 #endif
12481   return tem;
12482 }
12483 
12484 /* Perform constant folding and related simplification of initializer
12485    expression EXPR.  These behave identically to "fold_buildN" but ignore
12486    potential run-time traps and exceptions that fold must preserve.  */
12487 
12488 #define START_FOLD_INIT \
12489   int saved_signaling_nans = flag_signaling_nans;\
12490   int saved_trapping_math = flag_trapping_math;\
12491   int saved_rounding_math = flag_rounding_math;\
12492   int saved_trapv = flag_trapv;\
12493   int saved_folding_initializer = folding_initializer;\
12494   flag_signaling_nans = 0;\
12495   flag_trapping_math = 0;\
12496   flag_rounding_math = 0;\
12497   flag_trapv = 0;\
12498   folding_initializer = 1;
12499 
12500 #define END_FOLD_INIT \
12501   flag_signaling_nans = saved_signaling_nans;\
12502   flag_trapping_math = saved_trapping_math;\
12503   flag_rounding_math = saved_rounding_math;\
12504   flag_trapv = saved_trapv;\
12505   folding_initializer = saved_folding_initializer;
12506 
12507 tree
fold_build1_initializer_loc(location_t loc,enum tree_code code,tree type,tree op)12508 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12509 			     tree type, tree op)
12510 {
12511   tree result;
12512   START_FOLD_INIT;
12513 
12514   result = fold_build1_loc (loc, code, type, op);
12515 
12516   END_FOLD_INIT;
12517   return result;
12518 }
12519 
12520 tree
fold_build2_initializer_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1)12521 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12522 			     tree type, tree op0, tree op1)
12523 {
12524   tree result;
12525   START_FOLD_INIT;
12526 
12527   result = fold_build2_loc (loc, code, type, op0, op1);
12528 
12529   END_FOLD_INIT;
12530   return result;
12531 }
12532 
12533 tree
fold_build_call_array_initializer_loc(location_t loc,tree type,tree fn,int nargs,tree * argarray)12534 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12535 				       int nargs, tree *argarray)
12536 {
12537   tree result;
12538   START_FOLD_INIT;
12539 
12540   result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12541 
12542   END_FOLD_INIT;
12543   return result;
12544 }
12545 
12546 #undef START_FOLD_INIT
12547 #undef END_FOLD_INIT
12548 
12549 /* Determine if first argument is a multiple of second argument.  Return 0 if
12550    it is not, or we cannot easily determined it to be.
12551 
12552    An example of the sort of thing we care about (at this point; this routine
12553    could surely be made more general, and expanded to do what the *_DIV_EXPR's
12554    fold cases do now) is discovering that
12555 
12556      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12557 
12558    is a multiple of
12559 
12560      SAVE_EXPR (J * 8)
12561 
12562    when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12563 
12564    This code also handles discovering that
12565 
12566      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12567 
12568    is a multiple of 8 so we don't have to worry about dealing with a
12569    possible remainder.
12570 
12571    Note that we *look* inside a SAVE_EXPR only to determine how it was
12572    calculated; it is not safe for fold to do much of anything else with the
12573    internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12574    at run time.  For example, the latter example above *cannot* be implemented
12575    as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12576    evaluation time of the original SAVE_EXPR is not necessarily the same at
12577    the time the new expression is evaluated.  The only optimization of this
12578    sort that would be valid is changing
12579 
12580      SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12581 
12582    divided by 8 to
12583 
12584      SAVE_EXPR (I) * SAVE_EXPR (J)
12585 
12586    (where the same SAVE_EXPR (J) is used in the original and the
12587    transformed version).  */
12588 
12589 int
multiple_of_p(tree type,const_tree top,const_tree bottom)12590 multiple_of_p (tree type, const_tree top, const_tree bottom)
12591 {
12592   gimple *stmt;
12593   tree t1, op1, op2;
12594 
12595   if (operand_equal_p (top, bottom, 0))
12596     return 1;
12597 
12598   if (TREE_CODE (type) != INTEGER_TYPE)
12599     return 0;
12600 
12601   switch (TREE_CODE (top))
12602     {
12603     case BIT_AND_EXPR:
12604       /* Bitwise and provides a power of two multiple.  If the mask is
12605 	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
12606       if (!integer_pow2p (bottom))
12607 	return 0;
12608       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12609 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12610 
12611     case MULT_EXPR:
12612       if (TREE_CODE (bottom) == INTEGER_CST)
12613 	{
12614 	  op1 = TREE_OPERAND (top, 0);
12615 	  op2 = TREE_OPERAND (top, 1);
12616 	  if (TREE_CODE (op1) == INTEGER_CST)
12617 	    std::swap (op1, op2);
12618 	  if (TREE_CODE (op2) == INTEGER_CST)
12619 	    {
12620 	      if (multiple_of_p (type, op2, bottom))
12621 		return 1;
12622 	      /* Handle multiple_of_p ((x * 2 + 2) * 4, 8).  */
12623 	      if (multiple_of_p (type, bottom, op2))
12624 		{
12625 		  widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
12626 						 wi::to_widest (op2));
12627 		  if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
12628 		    {
12629 		      op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
12630 		      return multiple_of_p (type, op1, op2);
12631 		    }
12632 		}
12633 	      return multiple_of_p (type, op1, bottom);
12634 	    }
12635 	}
12636       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12637 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12638 
12639     case MINUS_EXPR:
12640       /* It is impossible to prove if op0 - op1 is multiple of bottom
12641 	 precisely, so be conservative here checking if both op0 and op1
12642 	 are multiple of bottom.  Note we check the second operand first
12643 	 since it's usually simpler.  */
12644       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12645 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12646 
12647     case PLUS_EXPR:
12648       /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12649 	 as op0 - 3 if the expression has unsigned type.  For example,
12650 	 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not.  */
12651       op1 = TREE_OPERAND (top, 1);
12652       if (TYPE_UNSIGNED (type)
12653 	  && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12654 	op1 = fold_build1 (NEGATE_EXPR, type, op1);
12655       return (multiple_of_p (type, op1, bottom)
12656 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12657 
12658     case LSHIFT_EXPR:
12659       if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12660 	{
12661 	  op1 = TREE_OPERAND (top, 1);
12662 	  /* const_binop may not detect overflow correctly,
12663 	     so check for it explicitly here.  */
12664 	  if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
12665 			 wi::to_wide (op1))
12666 	      && (t1 = fold_convert (type,
12667 				     const_binop (LSHIFT_EXPR, size_one_node,
12668 						  op1))) != 0
12669 	      && !TREE_OVERFLOW (t1))
12670 	    return multiple_of_p (type, t1, bottom);
12671 	}
12672       return 0;
12673 
12674     case NOP_EXPR:
12675       /* Can't handle conversions from non-integral or wider integral type.  */
12676       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12677 	  || (TYPE_PRECISION (type)
12678 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12679 	return 0;
12680 
12681       /* fall through */
12682 
12683     case SAVE_EXPR:
12684       return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12685 
12686     case COND_EXPR:
12687       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12688 	      && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12689 
12690     case INTEGER_CST:
12691       if (TREE_CODE (bottom) != INTEGER_CST
12692 	  || integer_zerop (bottom)
12693 	  || (TYPE_UNSIGNED (type)
12694 	      && (tree_int_cst_sgn (top) < 0
12695 		  || tree_int_cst_sgn (bottom) < 0)))
12696 	return 0;
12697       return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12698 				SIGNED);
12699 
12700     case SSA_NAME:
12701       if (TREE_CODE (bottom) == INTEGER_CST
12702 	  && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12703 	  && gimple_code (stmt) == GIMPLE_ASSIGN)
12704 	{
12705 	  enum tree_code code = gimple_assign_rhs_code (stmt);
12706 
12707 	  /* Check for special cases to see if top is defined as multiple
12708 	     of bottom:
12709 
12710 	       top = (X & ~(bottom - 1) ; bottom is power of 2
12711 
12712 	     or
12713 
12714 	       Y = X % bottom
12715 	       top = X - Y.  */
12716 	  if (code == BIT_AND_EXPR
12717 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12718 	      && TREE_CODE (op2) == INTEGER_CST
12719 	      && integer_pow2p (bottom)
12720 	      && wi::multiple_of_p (wi::to_widest (op2),
12721 				    wi::to_widest (bottom), UNSIGNED))
12722 	    return 1;
12723 
12724 	  op1 = gimple_assign_rhs1 (stmt);
12725 	  if (code == MINUS_EXPR
12726 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12727 	      && TREE_CODE (op2) == SSA_NAME
12728 	      && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12729 	      && gimple_code (stmt) == GIMPLE_ASSIGN
12730 	      && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12731 	      && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12732 	      && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12733 	    return 1;
12734 	}
12735 
12736       /* fall through */
12737 
12738     default:
12739       if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
12740 	return multiple_p (wi::to_poly_widest (top),
12741 			   wi::to_poly_widest (bottom));
12742 
12743       return 0;
12744     }
12745 }
12746 
12747 #define tree_expr_nonnegative_warnv_p(X, Y) \
12748   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12749 
12750 #define RECURSE(X) \
12751   ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12752 
12753 /* Return true if CODE or TYPE is known to be non-negative. */
12754 
12755 static bool
tree_simple_nonnegative_warnv_p(enum tree_code code,tree type)12756 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12757 {
12758   if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12759       && truth_value_p (code))
12760     /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12761        have a signed:1 type (where the value is -1 and 0).  */
12762     return true;
12763   return false;
12764 }
12765 
12766 /* Return true if (CODE OP0) is known to be non-negative.  If the return
12767    value is based on the assumption that signed overflow is undefined,
12768    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12769    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12770 
12771 bool
tree_unary_nonnegative_warnv_p(enum tree_code code,tree type,tree op0,bool * strict_overflow_p,int depth)12772 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12773 				bool *strict_overflow_p, int depth)
12774 {
12775   if (TYPE_UNSIGNED (type))
12776     return true;
12777 
12778   switch (code)
12779     {
12780     case ABS_EXPR:
12781       /* We can't return 1 if flag_wrapv is set because
12782 	 ABS_EXPR<INT_MIN> = INT_MIN.  */
12783       if (!ANY_INTEGRAL_TYPE_P (type))
12784 	return true;
12785       if (TYPE_OVERFLOW_UNDEFINED (type))
12786 	{
12787 	  *strict_overflow_p = true;
12788 	  return true;
12789 	}
12790       break;
12791 
12792     case NON_LVALUE_EXPR:
12793     case FLOAT_EXPR:
12794     case FIX_TRUNC_EXPR:
12795       return RECURSE (op0);
12796 
12797     CASE_CONVERT:
12798       {
12799 	tree inner_type = TREE_TYPE (op0);
12800 	tree outer_type = type;
12801 
12802 	if (TREE_CODE (outer_type) == REAL_TYPE)
12803 	  {
12804 	    if (TREE_CODE (inner_type) == REAL_TYPE)
12805 	      return RECURSE (op0);
12806 	    if (INTEGRAL_TYPE_P (inner_type))
12807 	      {
12808 		if (TYPE_UNSIGNED (inner_type))
12809 		  return true;
12810 		return RECURSE (op0);
12811 	      }
12812 	  }
12813 	else if (INTEGRAL_TYPE_P (outer_type))
12814 	  {
12815 	    if (TREE_CODE (inner_type) == REAL_TYPE)
12816 	      return RECURSE (op0);
12817 	    if (INTEGRAL_TYPE_P (inner_type))
12818 	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12819 		      && TYPE_UNSIGNED (inner_type);
12820 	  }
12821       }
12822       break;
12823 
12824     default:
12825       return tree_simple_nonnegative_warnv_p (code, type);
12826     }
12827 
12828   /* We don't know sign of `t', so be conservative and return false.  */
12829   return false;
12830 }
12831 
12832 /* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
12833    value is based on the assumption that signed overflow is undefined,
12834    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12835    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12836 
12837 bool
tree_binary_nonnegative_warnv_p(enum tree_code code,tree type,tree op0,tree op1,bool * strict_overflow_p,int depth)12838 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12839 				 tree op1, bool *strict_overflow_p,
12840 				 int depth)
12841 {
12842   if (TYPE_UNSIGNED (type))
12843     return true;
12844 
12845   switch (code)
12846     {
12847     case POINTER_PLUS_EXPR:
12848     case PLUS_EXPR:
12849       if (FLOAT_TYPE_P (type))
12850 	return RECURSE (op0) && RECURSE (op1);
12851 
12852       /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12853 	 both unsigned and at least 2 bits shorter than the result.  */
12854       if (TREE_CODE (type) == INTEGER_TYPE
12855 	  && TREE_CODE (op0) == NOP_EXPR
12856 	  && TREE_CODE (op1) == NOP_EXPR)
12857 	{
12858 	  tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12859 	  tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12860 	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12861 	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12862 	    {
12863 	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
12864 				       TYPE_PRECISION (inner2)) + 1;
12865 	      return prec < TYPE_PRECISION (type);
12866 	    }
12867 	}
12868       break;
12869 
12870     case MULT_EXPR:
12871       if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12872 	{
12873 	  /* x * x is always non-negative for floating point x
12874 	     or without overflow.  */
12875 	  if (operand_equal_p (op0, op1, 0)
12876 	      || (RECURSE (op0) && RECURSE (op1)))
12877 	    {
12878 	      if (ANY_INTEGRAL_TYPE_P (type)
12879 		  && TYPE_OVERFLOW_UNDEFINED (type))
12880 		*strict_overflow_p = true;
12881 	      return true;
12882 	    }
12883 	}
12884 
12885       /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12886 	 both unsigned and their total bits is shorter than the result.  */
12887       if (TREE_CODE (type) == INTEGER_TYPE
12888 	  && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12889 	  && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12890 	{
12891 	  tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12892 	    ? TREE_TYPE (TREE_OPERAND (op0, 0))
12893 	    : TREE_TYPE (op0);
12894 	  tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12895 	    ? TREE_TYPE (TREE_OPERAND (op1, 0))
12896 	    : TREE_TYPE (op1);
12897 
12898 	  bool unsigned0 = TYPE_UNSIGNED (inner0);
12899 	  bool unsigned1 = TYPE_UNSIGNED (inner1);
12900 
12901 	  if (TREE_CODE (op0) == INTEGER_CST)
12902 	    unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12903 
12904 	  if (TREE_CODE (op1) == INTEGER_CST)
12905 	    unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12906 
12907 	  if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12908 	      && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12909 	    {
12910 	      unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12911 		? tree_int_cst_min_precision (op0, UNSIGNED)
12912 		: TYPE_PRECISION (inner0);
12913 
12914 	      unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12915 		? tree_int_cst_min_precision (op1, UNSIGNED)
12916 		: TYPE_PRECISION (inner1);
12917 
12918 	      return precision0 + precision1 < TYPE_PRECISION (type);
12919 	    }
12920 	}
12921       return false;
12922 
12923     case BIT_AND_EXPR:
12924     case MAX_EXPR:
12925       return RECURSE (op0) || RECURSE (op1);
12926 
12927     case BIT_IOR_EXPR:
12928     case BIT_XOR_EXPR:
12929     case MIN_EXPR:
12930     case RDIV_EXPR:
12931     case TRUNC_DIV_EXPR:
12932     case CEIL_DIV_EXPR:
12933     case FLOOR_DIV_EXPR:
12934     case ROUND_DIV_EXPR:
12935       return RECURSE (op0) && RECURSE (op1);
12936 
12937     case TRUNC_MOD_EXPR:
12938       return RECURSE (op0);
12939 
12940     case FLOOR_MOD_EXPR:
12941       return RECURSE (op1);
12942 
12943     case CEIL_MOD_EXPR:
12944     case ROUND_MOD_EXPR:
12945     default:
12946       return tree_simple_nonnegative_warnv_p (code, type);
12947     }
12948 
12949   /* We don't know sign of `t', so be conservative and return false.  */
12950   return false;
12951 }
12952 
12953 /* Return true if T is known to be non-negative.  If the return
12954    value is based on the assumption that signed overflow is undefined,
12955    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12956    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12957 
12958 bool
tree_single_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)12959 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12960 {
12961   if (TYPE_UNSIGNED (TREE_TYPE (t)))
12962     return true;
12963 
12964   switch (TREE_CODE (t))
12965     {
12966     case INTEGER_CST:
12967       return tree_int_cst_sgn (t) >= 0;
12968 
12969     case REAL_CST:
12970       return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12971 
12972     case FIXED_CST:
12973       return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12974 
12975     case COND_EXPR:
12976       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12977 
12978     case SSA_NAME:
12979       /* Limit the depth of recursion to avoid quadratic behavior.
12980 	 This is expected to catch almost all occurrences in practice.
12981 	 If this code misses important cases that unbounded recursion
12982 	 would not, passes that need this information could be revised
12983 	 to provide it through dataflow propagation.  */
12984       return (!name_registered_for_update_p (t)
12985 	      && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12986 	      && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12987 						  strict_overflow_p, depth));
12988 
12989     default:
12990       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12991     }
12992 }
12993 
12994 /* Return true if T is known to be non-negative.  If the return
12995    value is based on the assumption that signed overflow is undefined,
12996    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12997    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12998 
12999 bool
tree_call_nonnegative_warnv_p(tree type,combined_fn fn,tree arg0,tree arg1,bool * strict_overflow_p,int depth)13000 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13001 			       bool *strict_overflow_p, int depth)
13002 {
13003   switch (fn)
13004     {
13005     CASE_CFN_ACOS:
13006     CASE_CFN_ACOSH:
13007     CASE_CFN_CABS:
13008     CASE_CFN_COSH:
13009     CASE_CFN_ERFC:
13010     CASE_CFN_EXP:
13011     CASE_CFN_EXP10:
13012     CASE_CFN_EXP2:
13013     CASE_CFN_FABS:
13014     CASE_CFN_FDIM:
13015     CASE_CFN_HYPOT:
13016     CASE_CFN_POW10:
13017     CASE_CFN_FFS:
13018     CASE_CFN_PARITY:
13019     CASE_CFN_POPCOUNT:
13020     CASE_CFN_CLZ:
13021     CASE_CFN_CLRSB:
13022     case CFN_BUILT_IN_BSWAP32:
13023     case CFN_BUILT_IN_BSWAP64:
13024       /* Always true.  */
13025       return true;
13026 
13027     CASE_CFN_SQRT:
13028     CASE_CFN_SQRT_FN:
13029       /* sqrt(-0.0) is -0.0.  */
13030       if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13031 	return true;
13032       return RECURSE (arg0);
13033 
13034     CASE_CFN_ASINH:
13035     CASE_CFN_ATAN:
13036     CASE_CFN_ATANH:
13037     CASE_CFN_CBRT:
13038     CASE_CFN_CEIL:
13039     CASE_CFN_CEIL_FN:
13040     CASE_CFN_ERF:
13041     CASE_CFN_EXPM1:
13042     CASE_CFN_FLOOR:
13043     CASE_CFN_FLOOR_FN:
13044     CASE_CFN_FMOD:
13045     CASE_CFN_FREXP:
13046     CASE_CFN_ICEIL:
13047     CASE_CFN_IFLOOR:
13048     CASE_CFN_IRINT:
13049     CASE_CFN_IROUND:
13050     CASE_CFN_LCEIL:
13051     CASE_CFN_LDEXP:
13052     CASE_CFN_LFLOOR:
13053     CASE_CFN_LLCEIL:
13054     CASE_CFN_LLFLOOR:
13055     CASE_CFN_LLRINT:
13056     CASE_CFN_LLROUND:
13057     CASE_CFN_LRINT:
13058     CASE_CFN_LROUND:
13059     CASE_CFN_MODF:
13060     CASE_CFN_NEARBYINT:
13061     CASE_CFN_NEARBYINT_FN:
13062     CASE_CFN_RINT:
13063     CASE_CFN_RINT_FN:
13064     CASE_CFN_ROUND:
13065     CASE_CFN_ROUND_FN:
13066     CASE_CFN_SCALB:
13067     CASE_CFN_SCALBLN:
13068     CASE_CFN_SCALBN:
13069     CASE_CFN_SIGNBIT:
13070     CASE_CFN_SIGNIFICAND:
13071     CASE_CFN_SINH:
13072     CASE_CFN_TANH:
13073     CASE_CFN_TRUNC:
13074     CASE_CFN_TRUNC_FN:
13075       /* True if the 1st argument is nonnegative.  */
13076       return RECURSE (arg0);
13077 
13078     CASE_CFN_FMAX:
13079     CASE_CFN_FMAX_FN:
13080       /* True if the 1st OR 2nd arguments are nonnegative.  */
13081       return RECURSE (arg0) || RECURSE (arg1);
13082 
13083     CASE_CFN_FMIN:
13084     CASE_CFN_FMIN_FN:
13085       /* True if the 1st AND 2nd arguments are nonnegative.  */
13086       return RECURSE (arg0) && RECURSE (arg1);
13087 
13088     CASE_CFN_COPYSIGN:
13089     CASE_CFN_COPYSIGN_FN:
13090       /* True if the 2nd argument is nonnegative.  */
13091       return RECURSE (arg1);
13092 
13093     CASE_CFN_POWI:
13094       /* True if the 1st argument is nonnegative or the second
13095 	 argument is an even integer.  */
13096       if (TREE_CODE (arg1) == INTEGER_CST
13097 	  && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13098 	return true;
13099       return RECURSE (arg0);
13100 
13101     CASE_CFN_POW:
13102       /* True if the 1st argument is nonnegative or the second
13103 	 argument is an even integer valued real.  */
13104       if (TREE_CODE (arg1) == REAL_CST)
13105 	{
13106 	  REAL_VALUE_TYPE c;
13107 	  HOST_WIDE_INT n;
13108 
13109 	  c = TREE_REAL_CST (arg1);
13110 	  n = real_to_integer (&c);
13111 	  if ((n & 1) == 0)
13112 	    {
13113 	      REAL_VALUE_TYPE cint;
13114 	      real_from_integer (&cint, VOIDmode, n, SIGNED);
13115 	      if (real_identical (&c, &cint))
13116 		return true;
13117 	    }
13118 	}
13119       return RECURSE (arg0);
13120 
13121     default:
13122       break;
13123     }
13124   return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13125 }
13126 
13127 /* Return true if T is known to be non-negative.  If the return
13128    value is based on the assumption that signed overflow is undefined,
13129    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13130    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13131 
13132 static bool
tree_invalid_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)13133 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13134 {
13135   enum tree_code code = TREE_CODE (t);
13136   if (TYPE_UNSIGNED (TREE_TYPE (t)))
13137     return true;
13138 
13139   switch (code)
13140     {
13141     case TARGET_EXPR:
13142       {
13143 	tree temp = TARGET_EXPR_SLOT (t);
13144 	t = TARGET_EXPR_INITIAL (t);
13145 
13146 	/* If the initializer is non-void, then it's a normal expression
13147 	   that will be assigned to the slot.  */
13148 	if (!VOID_TYPE_P (t))
13149 	  return RECURSE (t);
13150 
13151 	/* Otherwise, the initializer sets the slot in some way.  One common
13152 	   way is an assignment statement at the end of the initializer.  */
13153 	while (1)
13154 	  {
13155 	    if (TREE_CODE (t) == BIND_EXPR)
13156 	      t = expr_last (BIND_EXPR_BODY (t));
13157 	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13158 		     || TREE_CODE (t) == TRY_CATCH_EXPR)
13159 	      t = expr_last (TREE_OPERAND (t, 0));
13160 	    else if (TREE_CODE (t) == STATEMENT_LIST)
13161 	      t = expr_last (t);
13162 	    else
13163 	      break;
13164 	  }
13165 	if (TREE_CODE (t) == MODIFY_EXPR
13166 	    && TREE_OPERAND (t, 0) == temp)
13167 	  return RECURSE (TREE_OPERAND (t, 1));
13168 
13169 	return false;
13170       }
13171 
13172     case CALL_EXPR:
13173       {
13174 	tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
13175 	tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
13176 
13177 	return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13178 					      get_call_combined_fn (t),
13179 					      arg0,
13180 					      arg1,
13181 					      strict_overflow_p, depth);
13182       }
13183     case COMPOUND_EXPR:
13184     case MODIFY_EXPR:
13185       return RECURSE (TREE_OPERAND (t, 1));
13186 
13187     case BIND_EXPR:
13188       return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13189 
13190     case SAVE_EXPR:
13191       return RECURSE (TREE_OPERAND (t, 0));
13192 
13193     default:
13194       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13195     }
13196 }
13197 
13198 #undef RECURSE
13199 #undef tree_expr_nonnegative_warnv_p
13200 
13201 /* Return true if T is known to be non-negative.  If the return
13202    value is based on the assumption that signed overflow is undefined,
13203    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13204    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13205 
13206 bool
tree_expr_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)13207 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13208 {
13209   enum tree_code code;
13210   if (t == error_mark_node)
13211     return false;
13212 
13213   code = TREE_CODE (t);
13214   switch (TREE_CODE_CLASS (code))
13215     {
13216     case tcc_binary:
13217     case tcc_comparison:
13218       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13219 					      TREE_TYPE (t),
13220 					      TREE_OPERAND (t, 0),
13221 					      TREE_OPERAND (t, 1),
13222 					      strict_overflow_p, depth);
13223 
13224     case tcc_unary:
13225       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13226 					     TREE_TYPE (t),
13227 					     TREE_OPERAND (t, 0),
13228 					     strict_overflow_p, depth);
13229 
13230     case tcc_constant:
13231     case tcc_declaration:
13232     case tcc_reference:
13233       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13234 
13235     default:
13236       break;
13237     }
13238 
13239   switch (code)
13240     {
13241     case TRUTH_AND_EXPR:
13242     case TRUTH_OR_EXPR:
13243     case TRUTH_XOR_EXPR:
13244       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13245 					      TREE_TYPE (t),
13246 					      TREE_OPERAND (t, 0),
13247 					      TREE_OPERAND (t, 1),
13248 					      strict_overflow_p, depth);
13249     case TRUTH_NOT_EXPR:
13250       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13251 					     TREE_TYPE (t),
13252 					     TREE_OPERAND (t, 0),
13253 					     strict_overflow_p, depth);
13254 
13255     case COND_EXPR:
13256     case CONSTRUCTOR:
13257     case OBJ_TYPE_REF:
13258     case ASSERT_EXPR:
13259     case ADDR_EXPR:
13260     case WITH_SIZE_EXPR:
13261     case SSA_NAME:
13262       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13263 
13264     default:
13265       return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13266     }
13267 }
13268 
13269 /* Return true if `t' is known to be non-negative.  Handle warnings
13270    about undefined signed overflow.  */
13271 
13272 bool
tree_expr_nonnegative_p(tree t)13273 tree_expr_nonnegative_p (tree t)
13274 {
13275   bool ret, strict_overflow_p;
13276 
13277   strict_overflow_p = false;
13278   ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13279   if (strict_overflow_p)
13280     fold_overflow_warning (("assuming signed overflow does not occur when "
13281 			    "determining that expression is always "
13282 			    "non-negative"),
13283 			   WARN_STRICT_OVERFLOW_MISC);
13284   return ret;
13285 }
13286 
13287 
13288 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13289    For floating point we further ensure that T is not denormal.
13290    Similar logic is present in nonzero_address in rtlanal.h.
13291 
13292    If the return value is based on the assumption that signed overflow
13293    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13294    change *STRICT_OVERFLOW_P.  */
13295 
13296 bool
tree_unary_nonzero_warnv_p(enum tree_code code,tree type,tree op0,bool * strict_overflow_p)13297 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13298 				 bool *strict_overflow_p)
13299 {
13300   switch (code)
13301     {
13302     case ABS_EXPR:
13303       return tree_expr_nonzero_warnv_p (op0,
13304 					strict_overflow_p);
13305 
13306     case NOP_EXPR:
13307       {
13308 	tree inner_type = TREE_TYPE (op0);
13309 	tree outer_type = type;
13310 
13311 	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13312 		&& tree_expr_nonzero_warnv_p (op0,
13313 					      strict_overflow_p));
13314       }
13315       break;
13316 
13317     case NON_LVALUE_EXPR:
13318       return tree_expr_nonzero_warnv_p (op0,
13319 					strict_overflow_p);
13320 
13321     default:
13322       break;
13323   }
13324 
13325   return false;
13326 }
13327 
13328 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13329    For floating point we further ensure that T is not denormal.
13330    Similar logic is present in nonzero_address in rtlanal.h.
13331 
13332    If the return value is based on the assumption that signed overflow
13333    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13334    change *STRICT_OVERFLOW_P.  */
13335 
13336 bool
tree_binary_nonzero_warnv_p(enum tree_code code,tree type,tree op0,tree op1,bool * strict_overflow_p)13337 tree_binary_nonzero_warnv_p (enum tree_code code,
13338 			     tree type,
13339 			     tree op0,
13340 			     tree op1, bool *strict_overflow_p)
13341 {
13342   bool sub_strict_overflow_p;
13343   switch (code)
13344     {
13345     case POINTER_PLUS_EXPR:
13346     case PLUS_EXPR:
13347       if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13348 	{
13349 	  /* With the presence of negative values it is hard
13350 	     to say something.  */
13351 	  sub_strict_overflow_p = false;
13352 	  if (!tree_expr_nonnegative_warnv_p (op0,
13353 					      &sub_strict_overflow_p)
13354 	      || !tree_expr_nonnegative_warnv_p (op1,
13355 						 &sub_strict_overflow_p))
13356 	    return false;
13357 	  /* One of operands must be positive and the other non-negative.  */
13358 	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
13359 	     overflows, on a twos-complement machine the sum of two
13360 	     nonnegative numbers can never be zero.  */
13361 	  return (tree_expr_nonzero_warnv_p (op0,
13362 					     strict_overflow_p)
13363 		  || tree_expr_nonzero_warnv_p (op1,
13364 						strict_overflow_p));
13365 	}
13366       break;
13367 
13368     case MULT_EXPR:
13369       if (TYPE_OVERFLOW_UNDEFINED (type))
13370 	{
13371 	  if (tree_expr_nonzero_warnv_p (op0,
13372 					 strict_overflow_p)
13373 	      && tree_expr_nonzero_warnv_p (op1,
13374 					    strict_overflow_p))
13375 	    {
13376 	      *strict_overflow_p = true;
13377 	      return true;
13378 	    }
13379 	}
13380       break;
13381 
13382     case MIN_EXPR:
13383       sub_strict_overflow_p = false;
13384       if (tree_expr_nonzero_warnv_p (op0,
13385 				     &sub_strict_overflow_p)
13386 	  && tree_expr_nonzero_warnv_p (op1,
13387 					&sub_strict_overflow_p))
13388 	{
13389 	  if (sub_strict_overflow_p)
13390 	    *strict_overflow_p = true;
13391 	}
13392       break;
13393 
13394     case MAX_EXPR:
13395       sub_strict_overflow_p = false;
13396       if (tree_expr_nonzero_warnv_p (op0,
13397 				     &sub_strict_overflow_p))
13398 	{
13399 	  if (sub_strict_overflow_p)
13400 	    *strict_overflow_p = true;
13401 
13402 	  /* When both operands are nonzero, then MAX must be too.  */
13403 	  if (tree_expr_nonzero_warnv_p (op1,
13404 					 strict_overflow_p))
13405 	    return true;
13406 
13407 	  /* MAX where operand 0 is positive is positive.  */
13408 	  return tree_expr_nonnegative_warnv_p (op0,
13409 					       strict_overflow_p);
13410 	}
13411       /* MAX where operand 1 is positive is positive.  */
13412       else if (tree_expr_nonzero_warnv_p (op1,
13413 					  &sub_strict_overflow_p)
13414 	       && tree_expr_nonnegative_warnv_p (op1,
13415 						 &sub_strict_overflow_p))
13416 	{
13417 	  if (sub_strict_overflow_p)
13418 	    *strict_overflow_p = true;
13419 	  return true;
13420 	}
13421       break;
13422 
13423     case BIT_IOR_EXPR:
13424       return (tree_expr_nonzero_warnv_p (op1,
13425 					 strict_overflow_p)
13426 	      || tree_expr_nonzero_warnv_p (op0,
13427 					    strict_overflow_p));
13428 
13429     default:
13430       break;
13431   }
13432 
13433   return false;
13434 }
13435 
13436 /* Return true when T is an address and is known to be nonzero.
13437    For floating point we further ensure that T is not denormal.
13438    Similar logic is present in nonzero_address in rtlanal.h.
13439 
13440    If the return value is based on the assumption that signed overflow
13441    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13442    change *STRICT_OVERFLOW_P.  */
13443 
13444 bool
tree_single_nonzero_warnv_p(tree t,bool * strict_overflow_p)13445 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13446 {
13447   bool sub_strict_overflow_p;
13448   switch (TREE_CODE (t))
13449     {
13450     case INTEGER_CST:
13451       return !integer_zerop (t);
13452 
13453     case ADDR_EXPR:
13454       {
13455 	tree base = TREE_OPERAND (t, 0);
13456 
13457 	if (!DECL_P (base))
13458 	  base = get_base_address (base);
13459 
13460 	if (base && TREE_CODE (base) == TARGET_EXPR)
13461 	  base = TARGET_EXPR_SLOT (base);
13462 
13463 	if (!base)
13464 	  return false;
13465 
13466 	/* For objects in symbol table check if we know they are non-zero.
13467 	   Don't do anything for variables and functions before symtab is built;
13468 	   it is quite possible that they will be declared weak later.  */
13469 	int nonzero_addr = maybe_nonzero_address (base);
13470 	if (nonzero_addr >= 0)
13471 	  return nonzero_addr;
13472 
13473 	/* Constants are never weak.  */
13474 	if (CONSTANT_CLASS_P (base))
13475 	  return true;
13476 
13477 	return false;
13478       }
13479 
13480     case COND_EXPR:
13481       sub_strict_overflow_p = false;
13482       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13483 				     &sub_strict_overflow_p)
13484 	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13485 					&sub_strict_overflow_p))
13486 	{
13487 	  if (sub_strict_overflow_p)
13488 	    *strict_overflow_p = true;
13489 	  return true;
13490 	}
13491       break;
13492 
13493     case SSA_NAME:
13494       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13495 	break;
13496       return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13497 
13498     default:
13499       break;
13500     }
13501   return false;
13502 }
13503 
13504 #define integer_valued_real_p(X) \
13505   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13506 
13507 #define RECURSE(X) \
13508   ((integer_valued_real_p) (X, depth + 1))
13509 
13510 /* Return true if the floating point result of (CODE OP0) has an
13511    integer value.  We also allow +Inf, -Inf and NaN to be considered
13512    integer values. Return false for signaling NaN.
13513 
13514    DEPTH is the current nesting depth of the query.  */
13515 
13516 bool
integer_valued_real_unary_p(tree_code code,tree op0,int depth)13517 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13518 {
13519   switch (code)
13520     {
13521     case FLOAT_EXPR:
13522       return true;
13523 
13524     case ABS_EXPR:
13525       return RECURSE (op0);
13526 
13527     CASE_CONVERT:
13528       {
13529 	tree type = TREE_TYPE (op0);
13530 	if (TREE_CODE (type) == INTEGER_TYPE)
13531 	  return true;
13532 	if (TREE_CODE (type) == REAL_TYPE)
13533 	  return RECURSE (op0);
13534 	break;
13535       }
13536 
13537     default:
13538       break;
13539     }
13540   return false;
13541 }
13542 
13543 /* Return true if the floating point result of (CODE OP0 OP1) has an
13544    integer value.  We also allow +Inf, -Inf and NaN to be considered
13545    integer values. Return false for signaling NaN.
13546 
13547    DEPTH is the current nesting depth of the query.  */
13548 
13549 bool
integer_valued_real_binary_p(tree_code code,tree op0,tree op1,int depth)13550 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13551 {
13552   switch (code)
13553     {
13554     case PLUS_EXPR:
13555     case MINUS_EXPR:
13556     case MULT_EXPR:
13557     case MIN_EXPR:
13558     case MAX_EXPR:
13559       return RECURSE (op0) && RECURSE (op1);
13560 
13561     default:
13562       break;
13563     }
13564   return false;
13565 }
13566 
13567 /* Return true if the floating point result of calling FNDECL with arguments
13568    ARG0 and ARG1 has an integer value.  We also allow +Inf, -Inf and NaN to be
13569    considered integer values. Return false for signaling NaN.  If FNDECL
13570    takes fewer than 2 arguments, the remaining ARGn are null.
13571 
13572    DEPTH is the current nesting depth of the query.  */
13573 
13574 bool
integer_valued_real_call_p(combined_fn fn,tree arg0,tree arg1,int depth)13575 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13576 {
13577   switch (fn)
13578     {
13579     CASE_CFN_CEIL:
13580     CASE_CFN_CEIL_FN:
13581     CASE_CFN_FLOOR:
13582     CASE_CFN_FLOOR_FN:
13583     CASE_CFN_NEARBYINT:
13584     CASE_CFN_NEARBYINT_FN:
13585     CASE_CFN_RINT:
13586     CASE_CFN_RINT_FN:
13587     CASE_CFN_ROUND:
13588     CASE_CFN_ROUND_FN:
13589     CASE_CFN_TRUNC:
13590     CASE_CFN_TRUNC_FN:
13591       return true;
13592 
13593     CASE_CFN_FMIN:
13594     CASE_CFN_FMIN_FN:
13595     CASE_CFN_FMAX:
13596     CASE_CFN_FMAX_FN:
13597       return RECURSE (arg0) && RECURSE (arg1);
13598 
13599     default:
13600       break;
13601     }
13602   return false;
13603 }
13604 
13605 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13606    has an integer value.  We also allow +Inf, -Inf and NaN to be
13607    considered integer values. Return false for signaling NaN.
13608 
13609    DEPTH is the current nesting depth of the query.  */
13610 
13611 bool
integer_valued_real_single_p(tree t,int depth)13612 integer_valued_real_single_p (tree t, int depth)
13613 {
13614   switch (TREE_CODE (t))
13615     {
13616     case REAL_CST:
13617       return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13618 
13619     case COND_EXPR:
13620       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13621 
13622     case SSA_NAME:
13623       /* Limit the depth of recursion to avoid quadratic behavior.
13624 	 This is expected to catch almost all occurrences in practice.
13625 	 If this code misses important cases that unbounded recursion
13626 	 would not, passes that need this information could be revised
13627 	 to provide it through dataflow propagation.  */
13628       return (!name_registered_for_update_p (t)
13629 	      && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13630 	      && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13631 						    depth));
13632 
13633     default:
13634       break;
13635     }
13636   return false;
13637 }
13638 
13639 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13640    has an integer value.  We also allow +Inf, -Inf and NaN to be
13641    considered integer values. Return false for signaling NaN.
13642 
13643    DEPTH is the current nesting depth of the query.  */
13644 
13645 static bool
integer_valued_real_invalid_p(tree t,int depth)13646 integer_valued_real_invalid_p (tree t, int depth)
13647 {
13648   switch (TREE_CODE (t))
13649     {
13650     case COMPOUND_EXPR:
13651     case MODIFY_EXPR:
13652     case BIND_EXPR:
13653       return RECURSE (TREE_OPERAND (t, 1));
13654 
13655     case SAVE_EXPR:
13656       return RECURSE (TREE_OPERAND (t, 0));
13657 
13658     default:
13659       break;
13660     }
13661   return false;
13662 }
13663 
13664 #undef RECURSE
13665 #undef integer_valued_real_p
13666 
13667 /* Return true if the floating point expression T has an integer value.
13668    We also allow +Inf, -Inf and NaN to be considered integer values.
13669    Return false for signaling NaN.
13670 
13671    DEPTH is the current nesting depth of the query.  */
13672 
13673 bool
integer_valued_real_p(tree t,int depth)13674 integer_valued_real_p (tree t, int depth)
13675 {
13676   if (t == error_mark_node)
13677     return false;
13678 
13679   tree_code code = TREE_CODE (t);
13680   switch (TREE_CODE_CLASS (code))
13681     {
13682     case tcc_binary:
13683     case tcc_comparison:
13684       return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13685 					   TREE_OPERAND (t, 1), depth);
13686 
13687     case tcc_unary:
13688       return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13689 
13690     case tcc_constant:
13691     case tcc_declaration:
13692     case tcc_reference:
13693       return integer_valued_real_single_p (t, depth);
13694 
13695     default:
13696       break;
13697     }
13698 
13699   switch (code)
13700     {
13701     case COND_EXPR:
13702     case SSA_NAME:
13703       return integer_valued_real_single_p (t, depth);
13704 
13705     case CALL_EXPR:
13706       {
13707 	tree arg0 = (call_expr_nargs (t) > 0
13708 		     ? CALL_EXPR_ARG (t, 0)
13709 		     : NULL_TREE);
13710 	tree arg1 = (call_expr_nargs (t) > 1
13711 		     ? CALL_EXPR_ARG (t, 1)
13712 		     : NULL_TREE);
13713 	return integer_valued_real_call_p (get_call_combined_fn (t),
13714 					   arg0, arg1, depth);
13715       }
13716 
13717     default:
13718       return integer_valued_real_invalid_p (t, depth);
13719     }
13720 }
13721 
13722 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13723    attempt to fold the expression to a constant without modifying TYPE,
13724    OP0 or OP1.
13725 
13726    If the expression could be simplified to a constant, then return
13727    the constant.  If the expression would not be simplified to a
13728    constant, then return NULL_TREE.  */
13729 
13730 tree
fold_binary_to_constant(enum tree_code code,tree type,tree op0,tree op1)13731 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13732 {
13733   tree tem = fold_binary (code, type, op0, op1);
13734   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13735 }
13736 
13737 /* Given the components of a unary expression CODE, TYPE and OP0,
13738    attempt to fold the expression to a constant without modifying
13739    TYPE or OP0.
13740 
13741    If the expression could be simplified to a constant, then return
13742    the constant.  If the expression would not be simplified to a
13743    constant, then return NULL_TREE.  */
13744 
13745 tree
fold_unary_to_constant(enum tree_code code,tree type,tree op0)13746 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13747 {
13748   tree tem = fold_unary (code, type, op0);
13749   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13750 }
13751 
13752 /* If EXP represents referencing an element in a constant string
13753    (either via pointer arithmetic or array indexing), return the
13754    tree representing the value accessed, otherwise return NULL.  */
13755 
13756 tree
fold_read_from_constant_string(tree exp)13757 fold_read_from_constant_string (tree exp)
13758 {
13759   if ((TREE_CODE (exp) == INDIRECT_REF
13760        || TREE_CODE (exp) == ARRAY_REF)
13761       && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13762     {
13763       tree exp1 = TREE_OPERAND (exp, 0);
13764       tree index;
13765       tree string;
13766       location_t loc = EXPR_LOCATION (exp);
13767 
13768       if (TREE_CODE (exp) == INDIRECT_REF)
13769 	string = string_constant (exp1, &index);
13770       else
13771 	{
13772 	  tree low_bound = array_ref_low_bound (exp);
13773 	  index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13774 
13775 	  /* Optimize the special-case of a zero lower bound.
13776 
13777 	     We convert the low_bound to sizetype to avoid some problems
13778 	     with constant folding.  (E.g. suppose the lower bound is 1,
13779 	     and its mode is QI.  Without the conversion,l (ARRAY
13780 	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13781 	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
13782 	  if (! integer_zerop (low_bound))
13783 	    index = size_diffop_loc (loc, index,
13784 				 fold_convert_loc (loc, sizetype, low_bound));
13785 
13786 	  string = exp1;
13787 	}
13788 
13789       scalar_int_mode char_mode;
13790       if (string
13791 	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13792 	  && TREE_CODE (string) == STRING_CST
13793 	  && TREE_CODE (index) == INTEGER_CST
13794 	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13795 	  && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
13796 			  &char_mode)
13797 	  && GET_MODE_SIZE (char_mode) == 1)
13798 	return build_int_cst_type (TREE_TYPE (exp),
13799 				   (TREE_STRING_POINTER (string)
13800 				    [TREE_INT_CST_LOW (index)]));
13801     }
13802   return NULL;
13803 }
13804 
13805 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13806    an integer constant, real, or fixed-point constant.
13807 
13808    TYPE is the type of the result.  */
13809 
13810 static tree
fold_negate_const(tree arg0,tree type)13811 fold_negate_const (tree arg0, tree type)
13812 {
13813   tree t = NULL_TREE;
13814 
13815   switch (TREE_CODE (arg0))
13816     {
13817     case REAL_CST:
13818       t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13819       break;
13820 
13821     case FIXED_CST:
13822       {
13823         FIXED_VALUE_TYPE f;
13824         bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13825 					    &(TREE_FIXED_CST (arg0)), NULL,
13826 					    TYPE_SATURATING (type));
13827 	t = build_fixed (type, f);
13828 	/* Propagate overflow flags.  */
13829 	if (overflow_p | TREE_OVERFLOW (arg0))
13830 	  TREE_OVERFLOW (t) = 1;
13831 	break;
13832       }
13833 
13834     default:
13835       if (poly_int_tree_p (arg0))
13836 	{
13837 	  bool overflow;
13838 	  poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
13839 	  t = force_fit_type (type, res, 1,
13840 			      (overflow && ! TYPE_UNSIGNED (type))
13841 			      || TREE_OVERFLOW (arg0));
13842 	  break;
13843 	}
13844 
13845       gcc_unreachable ();
13846     }
13847 
13848   return t;
13849 }
13850 
13851 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13852    an integer constant or real constant.
13853 
13854    TYPE is the type of the result.  */
13855 
13856 tree
fold_abs_const(tree arg0,tree type)13857 fold_abs_const (tree arg0, tree type)
13858 {
13859   tree t = NULL_TREE;
13860 
13861   switch (TREE_CODE (arg0))
13862     {
13863     case INTEGER_CST:
13864       {
13865         /* If the value is unsigned or non-negative, then the absolute value
13866 	   is the same as the ordinary value.  */
13867 	if (!wi::neg_p (wi::to_wide (arg0), TYPE_SIGN (type)))
13868 	  t = arg0;
13869 
13870 	/* If the value is negative, then the absolute value is
13871 	   its negation.  */
13872 	else
13873 	  {
13874 	    bool overflow;
13875 	    wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13876 	    t = force_fit_type (type, val, -1,
13877 				overflow | TREE_OVERFLOW (arg0));
13878 	  }
13879       }
13880       break;
13881 
13882     case REAL_CST:
13883       if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13884 	t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13885       else
13886 	t =  arg0;
13887       break;
13888 
13889     default:
13890       gcc_unreachable ();
13891     }
13892 
13893   return t;
13894 }
13895 
13896 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13897    constant.  TYPE is the type of the result.  */
13898 
13899 static tree
fold_not_const(const_tree arg0,tree type)13900 fold_not_const (const_tree arg0, tree type)
13901 {
13902   gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13903 
13904   return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
13905 }
13906 
13907 /* Given CODE, a relational operator, the target type, TYPE and two
13908    constant operands OP0 and OP1, return the result of the
13909    relational operation.  If the result is not a compile time
13910    constant, then return NULL_TREE.  */
13911 
13912 static tree
fold_relational_const(enum tree_code code,tree type,tree op0,tree op1)13913 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13914 {
13915   int result, invert;
13916 
13917   /* From here on, the only cases we handle are when the result is
13918      known to be a constant.  */
13919 
13920   if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13921     {
13922       const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13923       const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13924 
13925       /* Handle the cases where either operand is a NaN.  */
13926       if (real_isnan (c0) || real_isnan (c1))
13927 	{
13928 	  switch (code)
13929 	    {
13930 	    case EQ_EXPR:
13931 	    case ORDERED_EXPR:
13932 	      result = 0;
13933 	      break;
13934 
13935 	    case NE_EXPR:
13936 	    case UNORDERED_EXPR:
13937 	    case UNLT_EXPR:
13938 	    case UNLE_EXPR:
13939 	    case UNGT_EXPR:
13940 	    case UNGE_EXPR:
13941 	    case UNEQ_EXPR:
13942               result = 1;
13943 	      break;
13944 
13945 	    case LT_EXPR:
13946 	    case LE_EXPR:
13947 	    case GT_EXPR:
13948 	    case GE_EXPR:
13949 	    case LTGT_EXPR:
13950 	      if (flag_trapping_math)
13951 		return NULL_TREE;
13952 	      result = 0;
13953 	      break;
13954 
13955 	    default:
13956 	      gcc_unreachable ();
13957 	    }
13958 
13959 	  return constant_boolean_node (result, type);
13960 	}
13961 
13962       return constant_boolean_node (real_compare (code, c0, c1), type);
13963     }
13964 
13965   if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13966     {
13967       const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13968       const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13969       return constant_boolean_node (fixed_compare (code, c0, c1), type);
13970     }
13971 
13972   /* Handle equality/inequality of complex constants.  */
13973   if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13974     {
13975       tree rcond = fold_relational_const (code, type,
13976 					  TREE_REALPART (op0),
13977 					  TREE_REALPART (op1));
13978       tree icond = fold_relational_const (code, type,
13979 					  TREE_IMAGPART (op0),
13980 					  TREE_IMAGPART (op1));
13981       if (code == EQ_EXPR)
13982 	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13983       else if (code == NE_EXPR)
13984 	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13985       else
13986 	return NULL_TREE;
13987     }
13988 
13989   if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13990     {
13991       if (!VECTOR_TYPE_P (type))
13992 	{
13993 	  /* Have vector comparison with scalar boolean result.  */
13994 	  gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13995 		      && known_eq (VECTOR_CST_NELTS (op0),
13996 				   VECTOR_CST_NELTS (op1)));
13997 	  unsigned HOST_WIDE_INT nunits;
13998 	  if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
13999 	    return NULL_TREE;
14000 	  for (unsigned i = 0; i < nunits; i++)
14001 	    {
14002 	      tree elem0 = VECTOR_CST_ELT (op0, i);
14003 	      tree elem1 = VECTOR_CST_ELT (op1, i);
14004 	      tree tmp = fold_relational_const (code, type, elem0, elem1);
14005 	      if (tmp == NULL_TREE)
14006 		return NULL_TREE;
14007 	      if (integer_zerop (tmp))
14008 		return constant_boolean_node (false, type);
14009 	    }
14010 	  return constant_boolean_node (true, type);
14011 	}
14012       tree_vector_builder elts;
14013       if (!elts.new_binary_operation (type, op0, op1, false))
14014 	return NULL_TREE;
14015       unsigned int count = elts.encoded_nelts ();
14016       for (unsigned i = 0; i < count; i++)
14017 	{
14018 	  tree elem_type = TREE_TYPE (type);
14019 	  tree elem0 = VECTOR_CST_ELT (op0, i);
14020 	  tree elem1 = VECTOR_CST_ELT (op1, i);
14021 
14022 	  tree tem = fold_relational_const (code, elem_type,
14023 					    elem0, elem1);
14024 
14025 	  if (tem == NULL_TREE)
14026 	    return NULL_TREE;
14027 
14028 	  elts.quick_push (build_int_cst (elem_type,
14029 					  integer_zerop (tem) ? 0 : -1));
14030 	}
14031 
14032       return elts.build ();
14033     }
14034 
14035   /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14036 
14037      To compute GT, swap the arguments and do LT.
14038      To compute GE, do LT and invert the result.
14039      To compute LE, swap the arguments, do LT and invert the result.
14040      To compute NE, do EQ and invert the result.
14041 
14042      Therefore, the code below must handle only EQ and LT.  */
14043 
14044   if (code == LE_EXPR || code == GT_EXPR)
14045     {
14046       std::swap (op0, op1);
14047       code = swap_tree_comparison (code);
14048     }
14049 
14050   /* Note that it is safe to invert for real values here because we
14051      have already handled the one case that it matters.  */
14052 
14053   invert = 0;
14054   if (code == NE_EXPR || code == GE_EXPR)
14055     {
14056       invert = 1;
14057       code = invert_tree_comparison (code, false);
14058     }
14059 
14060   /* Compute a result for LT or EQ if args permit;
14061      Otherwise return T.  */
14062   if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14063     {
14064       if (code == EQ_EXPR)
14065 	result = tree_int_cst_equal (op0, op1);
14066       else
14067 	result = tree_int_cst_lt (op0, op1);
14068     }
14069   else
14070     return NULL_TREE;
14071 
14072   if (invert)
14073     result ^= 1;
14074   return constant_boolean_node (result, type);
14075 }
14076 
14077 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14078    indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
14079    itself.  */
14080 
14081 tree
fold_build_cleanup_point_expr(tree type,tree expr)14082 fold_build_cleanup_point_expr (tree type, tree expr)
14083 {
14084   /* If the expression does not have side effects then we don't have to wrap
14085      it with a cleanup point expression.  */
14086   if (!TREE_SIDE_EFFECTS (expr))
14087     return expr;
14088 
14089   /* If the expression is a return, check to see if the expression inside the
14090      return has no side effects or the right hand side of the modify expression
14091      inside the return. If either don't have side effects set we don't need to
14092      wrap the expression in a cleanup point expression.  Note we don't check the
14093      left hand side of the modify because it should always be a return decl.  */
14094   if (TREE_CODE (expr) == RETURN_EXPR)
14095     {
14096       tree op = TREE_OPERAND (expr, 0);
14097       if (!op || !TREE_SIDE_EFFECTS (op))
14098         return expr;
14099       op = TREE_OPERAND (op, 1);
14100       if (!TREE_SIDE_EFFECTS (op))
14101         return expr;
14102     }
14103 
14104   return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14105 }
14106 
14107 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14108    of an indirection through OP0, or NULL_TREE if no simplification is
14109    possible.  */
14110 
14111 tree
fold_indirect_ref_1(location_t loc,tree type,tree op0)14112 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14113 {
14114   tree sub = op0;
14115   tree subtype;
14116   poly_uint64 const_op01;
14117 
14118   STRIP_NOPS (sub);
14119   subtype = TREE_TYPE (sub);
14120   if (!POINTER_TYPE_P (subtype)
14121       || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14122     return NULL_TREE;
14123 
14124   if (TREE_CODE (sub) == ADDR_EXPR)
14125     {
14126       tree op = TREE_OPERAND (sub, 0);
14127       tree optype = TREE_TYPE (op);
14128 
14129       /* *&CONST_DECL -> to the value of the const decl.  */
14130       if (TREE_CODE (op) == CONST_DECL)
14131 	return DECL_INITIAL (op);
14132       /* *&p => p;  make sure to handle *&"str"[cst] here.  */
14133       if (type == optype)
14134 	{
14135 	  tree fop = fold_read_from_constant_string (op);
14136 	  if (fop)
14137 	    return fop;
14138 	  else
14139 	    return op;
14140 	}
14141       /* *(foo *)&fooarray => fooarray[0] */
14142       else if (TREE_CODE (optype) == ARRAY_TYPE
14143 	       && type == TREE_TYPE (optype)
14144 	       && (!in_gimple_form
14145 		   || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14146 	{
14147 	  tree type_domain = TYPE_DOMAIN (optype);
14148 	  tree min_val = size_zero_node;
14149 	  if (type_domain && TYPE_MIN_VALUE (type_domain))
14150 	    min_val = TYPE_MIN_VALUE (type_domain);
14151 	  if (in_gimple_form
14152 	      && TREE_CODE (min_val) != INTEGER_CST)
14153 	    return NULL_TREE;
14154 	  return build4_loc (loc, ARRAY_REF, type, op, min_val,
14155 			     NULL_TREE, NULL_TREE);
14156 	}
14157       /* *(foo *)&complexfoo => __real__ complexfoo */
14158       else if (TREE_CODE (optype) == COMPLEX_TYPE
14159 	       && type == TREE_TYPE (optype))
14160 	return fold_build1_loc (loc, REALPART_EXPR, type, op);
14161       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14162       else if (VECTOR_TYPE_P (optype)
14163 	       && type == TREE_TYPE (optype))
14164 	{
14165 	  tree part_width = TYPE_SIZE (type);
14166 	  tree index = bitsize_int (0);
14167 	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
14168 				  index);
14169 	}
14170     }
14171 
14172   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14173       && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
14174     {
14175       tree op00 = TREE_OPERAND (sub, 0);
14176       tree op01 = TREE_OPERAND (sub, 1);
14177 
14178       STRIP_NOPS (op00);
14179       if (TREE_CODE (op00) == ADDR_EXPR)
14180 	{
14181 	  tree op00type;
14182 	  op00 = TREE_OPERAND (op00, 0);
14183 	  op00type = TREE_TYPE (op00);
14184 
14185 	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14186 	  if (VECTOR_TYPE_P (op00type)
14187 	      && type == TREE_TYPE (op00type)
14188 	      /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
14189 		 but we want to treat offsets with MSB set as negative.
14190 		 For the code below negative offsets are invalid and
14191 		 TYPE_SIZE of the element is something unsigned, so
14192 		 check whether op01 fits into poly_int64, which implies
14193 		 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
14194 		 then just use poly_uint64 because we want to treat the
14195 		 value as unsigned.  */
14196 	      && tree_fits_poly_int64_p (op01))
14197 	    {
14198 	      tree part_width = TYPE_SIZE (type);
14199 	      poly_uint64 max_offset
14200 		= (tree_to_uhwi (part_width) / BITS_PER_UNIT
14201 		   * TYPE_VECTOR_SUBPARTS (op00type));
14202 	      if (known_lt (const_op01, max_offset))
14203 		{
14204 		  tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
14205 		  return fold_build3_loc (loc,
14206 					  BIT_FIELD_REF, type, op00,
14207 					  part_width, index);
14208 		}
14209 	    }
14210 	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14211 	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
14212 		   && type == TREE_TYPE (op00type))
14213 	    {
14214 	      if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
14215 			    const_op01))
14216 		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14217 	    }
14218 	  /* ((foo *)&fooarray)[1] => fooarray[1] */
14219 	  else if (TREE_CODE (op00type) == ARRAY_TYPE
14220 		   && type == TREE_TYPE (op00type))
14221 	    {
14222 	      tree type_domain = TYPE_DOMAIN (op00type);
14223 	      tree min_val = size_zero_node;
14224 	      if (type_domain && TYPE_MIN_VALUE (type_domain))
14225 		min_val = TYPE_MIN_VALUE (type_domain);
14226 	      offset_int off = wi::to_offset (op01);
14227 	      offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
14228 	      offset_int remainder;
14229 	      off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
14230 	      if (remainder == 0 && TREE_CODE (min_val) == INTEGER_CST)
14231 		{
14232 		  off = off + wi::to_offset (min_val);
14233 		  op01 = wide_int_to_tree (sizetype, off);
14234 		  return build4_loc (loc, ARRAY_REF, type, op00, op01,
14235 				     NULL_TREE, NULL_TREE);
14236 		}
14237 	    }
14238 	}
14239     }
14240 
14241   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14242   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14243       && type == TREE_TYPE (TREE_TYPE (subtype))
14244       && (!in_gimple_form
14245 	  || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14246     {
14247       tree type_domain;
14248       tree min_val = size_zero_node;
14249       sub = build_fold_indirect_ref_loc (loc, sub);
14250       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14251       if (type_domain && TYPE_MIN_VALUE (type_domain))
14252 	min_val = TYPE_MIN_VALUE (type_domain);
14253       if (in_gimple_form
14254 	  && TREE_CODE (min_val) != INTEGER_CST)
14255 	return NULL_TREE;
14256       return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14257 			 NULL_TREE);
14258     }
14259 
14260   return NULL_TREE;
14261 }
14262 
14263 /* Builds an expression for an indirection through T, simplifying some
14264    cases.  */
14265 
14266 tree
build_fold_indirect_ref_loc(location_t loc,tree t)14267 build_fold_indirect_ref_loc (location_t loc, tree t)
14268 {
14269   tree type = TREE_TYPE (TREE_TYPE (t));
14270   tree sub = fold_indirect_ref_1 (loc, type, t);
14271 
14272   if (sub)
14273     return sub;
14274 
14275   return build1_loc (loc, INDIRECT_REF, type, t);
14276 }
14277 
14278 /* Given an INDIRECT_REF T, return either T or a simplified version.  */
14279 
14280 tree
fold_indirect_ref_loc(location_t loc,tree t)14281 fold_indirect_ref_loc (location_t loc, tree t)
14282 {
14283   tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14284 
14285   if (sub)
14286     return sub;
14287   else
14288     return t;
14289 }
14290 
14291 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14292    whose result is ignored.  The type of the returned tree need not be
14293    the same as the original expression.  */
14294 
14295 tree
fold_ignored_result(tree t)14296 fold_ignored_result (tree t)
14297 {
14298   if (!TREE_SIDE_EFFECTS (t))
14299     return integer_zero_node;
14300 
14301   for (;;)
14302     switch (TREE_CODE_CLASS (TREE_CODE (t)))
14303       {
14304       case tcc_unary:
14305 	t = TREE_OPERAND (t, 0);
14306 	break;
14307 
14308       case tcc_binary:
14309       case tcc_comparison:
14310 	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14311 	  t = TREE_OPERAND (t, 0);
14312 	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14313 	  t = TREE_OPERAND (t, 1);
14314 	else
14315 	  return t;
14316 	break;
14317 
14318       case tcc_expression:
14319 	switch (TREE_CODE (t))
14320 	  {
14321 	  case COMPOUND_EXPR:
14322 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14323 	      return t;
14324 	    t = TREE_OPERAND (t, 0);
14325 	    break;
14326 
14327 	  case COND_EXPR:
14328 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14329 		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14330 	      return t;
14331 	    t = TREE_OPERAND (t, 0);
14332 	    break;
14333 
14334 	  default:
14335 	    return t;
14336 	  }
14337 	break;
14338 
14339       default:
14340 	return t;
14341       }
14342 }
14343 
14344 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14345 
14346 tree
round_up_loc(location_t loc,tree value,unsigned int divisor)14347 round_up_loc (location_t loc, tree value, unsigned int divisor)
14348 {
14349   tree div = NULL_TREE;
14350 
14351   if (divisor == 1)
14352     return value;
14353 
14354   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
14355      have to do anything.  Only do this when we are not given a const,
14356      because in that case, this check is more expensive than just
14357      doing it.  */
14358   if (TREE_CODE (value) != INTEGER_CST)
14359     {
14360       div = build_int_cst (TREE_TYPE (value), divisor);
14361 
14362       if (multiple_of_p (TREE_TYPE (value), value, div))
14363 	return value;
14364     }
14365 
14366   /* If divisor is a power of two, simplify this to bit manipulation.  */
14367   if (pow2_or_zerop (divisor))
14368     {
14369       if (TREE_CODE (value) == INTEGER_CST)
14370 	{
14371 	  wide_int val = wi::to_wide (value);
14372 	  bool overflow_p;
14373 
14374 	  if ((val & (divisor - 1)) == 0)
14375 	    return value;
14376 
14377 	  overflow_p = TREE_OVERFLOW (value);
14378 	  val += divisor - 1;
14379 	  val &= (int) -divisor;
14380 	  if (val == 0)
14381 	    overflow_p = true;
14382 
14383 	  return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14384 	}
14385       else
14386 	{
14387 	  tree t;
14388 
14389 	  t = build_int_cst (TREE_TYPE (value), divisor - 1);
14390 	  value = size_binop_loc (loc, PLUS_EXPR, value, t);
14391 	  t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14392 	  value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14393 	}
14394     }
14395   else
14396     {
14397       if (!div)
14398 	div = build_int_cst (TREE_TYPE (value), divisor);
14399       value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14400       value = size_binop_loc (loc, MULT_EXPR, value, div);
14401     }
14402 
14403   return value;
14404 }
14405 
14406 /* Likewise, but round down.  */
14407 
14408 tree
round_down_loc(location_t loc,tree value,int divisor)14409 round_down_loc (location_t loc, tree value, int divisor)
14410 {
14411   tree div = NULL_TREE;
14412 
14413   gcc_assert (divisor > 0);
14414   if (divisor == 1)
14415     return value;
14416 
14417   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
14418      have to do anything.  Only do this when we are not given a const,
14419      because in that case, this check is more expensive than just
14420      doing it.  */
14421   if (TREE_CODE (value) != INTEGER_CST)
14422     {
14423       div = build_int_cst (TREE_TYPE (value), divisor);
14424 
14425       if (multiple_of_p (TREE_TYPE (value), value, div))
14426 	return value;
14427     }
14428 
14429   /* If divisor is a power of two, simplify this to bit manipulation.  */
14430   if (pow2_or_zerop (divisor))
14431     {
14432       tree t;
14433 
14434       t = build_int_cst (TREE_TYPE (value), -divisor);
14435       value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14436     }
14437   else
14438     {
14439       if (!div)
14440 	div = build_int_cst (TREE_TYPE (value), divisor);
14441       value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14442       value = size_binop_loc (loc, MULT_EXPR, value, div);
14443     }
14444 
14445   return value;
14446 }
14447 
14448 /* Returns the pointer to the base of the object addressed by EXP and
14449    extracts the information about the offset of the access, storing it
14450    to PBITPOS and POFFSET.  */
14451 
14452 static tree
split_address_to_core_and_offset(tree exp,poly_int64_pod * pbitpos,tree * poffset)14453 split_address_to_core_and_offset (tree exp,
14454 				  poly_int64_pod *pbitpos, tree *poffset)
14455 {
14456   tree core;
14457   machine_mode mode;
14458   int unsignedp, reversep, volatilep;
14459   poly_int64 bitsize;
14460   location_t loc = EXPR_LOCATION (exp);
14461 
14462   if (TREE_CODE (exp) == ADDR_EXPR)
14463     {
14464       core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14465 				  poffset, &mode, &unsignedp, &reversep,
14466 				  &volatilep);
14467       core = build_fold_addr_expr_loc (loc, core);
14468     }
14469   else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14470     {
14471       core = TREE_OPERAND (exp, 0);
14472       STRIP_NOPS (core);
14473       *pbitpos = 0;
14474       *poffset = TREE_OPERAND (exp, 1);
14475       if (poly_int_tree_p (*poffset))
14476 	{
14477 	  poly_offset_int tem
14478 	    = wi::sext (wi::to_poly_offset (*poffset),
14479 			TYPE_PRECISION (TREE_TYPE (*poffset)));
14480 	  tem <<= LOG2_BITS_PER_UNIT;
14481 	  if (tem.to_shwi (pbitpos))
14482 	    *poffset = NULL_TREE;
14483 	}
14484     }
14485   else
14486     {
14487       core = exp;
14488       *pbitpos = 0;
14489       *poffset = NULL_TREE;
14490     }
14491 
14492   return core;
14493 }
14494 
14495 /* Returns true if addresses of E1 and E2 differ by a constant, false
14496    otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
14497 
14498 bool
ptr_difference_const(tree e1,tree e2,poly_int64_pod * diff)14499 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
14500 {
14501   tree core1, core2;
14502   poly_int64 bitpos1, bitpos2;
14503   tree toffset1, toffset2, tdiff, type;
14504 
14505   core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14506   core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14507 
14508   poly_int64 bytepos1, bytepos2;
14509   if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
14510       || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
14511       || !operand_equal_p (core1, core2, 0))
14512     return false;
14513 
14514   if (toffset1 && toffset2)
14515     {
14516       type = TREE_TYPE (toffset1);
14517       if (type != TREE_TYPE (toffset2))
14518 	toffset2 = fold_convert (type, toffset2);
14519 
14520       tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14521       if (!cst_and_fits_in_hwi (tdiff))
14522 	return false;
14523 
14524       *diff = int_cst_value (tdiff);
14525     }
14526   else if (toffset1 || toffset2)
14527     {
14528       /* If only one of the offsets is non-constant, the difference cannot
14529 	 be a constant.  */
14530       return false;
14531     }
14532   else
14533     *diff = 0;
14534 
14535   *diff += bytepos1 - bytepos2;
14536   return true;
14537 }
14538 
14539 /* Return OFF converted to a pointer offset type suitable as offset for
14540    POINTER_PLUS_EXPR.  Use location LOC for this conversion.  */
14541 tree
convert_to_ptrofftype_loc(location_t loc,tree off)14542 convert_to_ptrofftype_loc (location_t loc, tree off)
14543 {
14544   return fold_convert_loc (loc, sizetype, off);
14545 }
14546 
14547 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
14548 tree
fold_build_pointer_plus_loc(location_t loc,tree ptr,tree off)14549 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14550 {
14551   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14552 			  ptr, convert_to_ptrofftype_loc (loc, off));
14553 }
14554 
14555 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
14556 tree
fold_build_pointer_plus_hwi_loc(location_t loc,tree ptr,HOST_WIDE_INT off)14557 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14558 {
14559   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14560 			  ptr, size_int (off));
14561 }
14562 
14563 /* Return a char pointer for a C string if it is a string constant
14564    or sum of string constant and integer constant.  We only support
14565    string constants properly terminated with '\0' character.
14566    If STRLEN is a valid pointer, length (including terminating character)
14567    of returned string is stored to the argument.  */
14568 
14569 const char *
c_getstr(tree src,unsigned HOST_WIDE_INT * strlen)14570 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14571 {
14572   tree offset_node;
14573 
14574   if (strlen)
14575     *strlen = 0;
14576 
14577   src = string_constant (src, &offset_node);
14578   if (src == 0)
14579     return NULL;
14580 
14581   unsigned HOST_WIDE_INT offset = 0;
14582   if (offset_node != NULL_TREE)
14583     {
14584       if (!tree_fits_uhwi_p (offset_node))
14585 	return NULL;
14586       else
14587 	offset = tree_to_uhwi (offset_node);
14588     }
14589 
14590   unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14591   const char *string = TREE_STRING_POINTER (src);
14592 
14593   /* Support only properly null-terminated strings.  */
14594   if (string_length == 0
14595       || string[string_length - 1] != '\0'
14596       || offset >= string_length)
14597     return NULL;
14598 
14599   if (strlen)
14600     *strlen = string_length - offset;
14601   return string + offset;
14602 }
14603 
14604 #if CHECKING_P
14605 
14606 namespace selftest {
14607 
14608 /* Helper functions for writing tests of folding trees.  */
14609 
14610 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT.  */
14611 
14612 static void
assert_binop_folds_to_const(tree lhs,enum tree_code code,tree rhs,tree constant)14613 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14614 			     tree constant)
14615 {
14616   ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14617 }
14618 
14619 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14620    wrapping WRAPPED_EXPR.  */
14621 
14622 static void
assert_binop_folds_to_nonlvalue(tree lhs,enum tree_code code,tree rhs,tree wrapped_expr)14623 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14624 				 tree wrapped_expr)
14625 {
14626   tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14627   ASSERT_NE (wrapped_expr, result);
14628   ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14629   ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14630 }
14631 
14632 /* Verify that various arithmetic binary operations are folded
14633    correctly.  */
14634 
14635 static void
test_arithmetic_folding()14636 test_arithmetic_folding ()
14637 {
14638   tree type = integer_type_node;
14639   tree x = create_tmp_var_raw (type, "x");
14640   tree zero = build_zero_cst (type);
14641   tree one = build_int_cst (type, 1);
14642 
14643   /* Addition.  */
14644   /* 1 <-- (0 + 1) */
14645   assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14646 			       one);
14647   assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14648 			       one);
14649 
14650   /* (nonlvalue)x <-- (x + 0) */
14651   assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14652 				   x);
14653 
14654   /* Subtraction.  */
14655   /* 0 <-- (x - x) */
14656   assert_binop_folds_to_const (x, MINUS_EXPR, x,
14657 			       zero);
14658   assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14659 				   x);
14660 
14661   /* Multiplication.  */
14662   /* 0 <-- (x * 0) */
14663   assert_binop_folds_to_const (x, MULT_EXPR, zero,
14664 			       zero);
14665 
14666   /* (nonlvalue)x <-- (x * 1) */
14667   assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14668 				   x);
14669 }
14670 
14671 /* Verify that various binary operations on vectors are folded
14672    correctly.  */
14673 
14674 static void
test_vector_folding()14675 test_vector_folding ()
14676 {
14677   tree inner_type = integer_type_node;
14678   tree type = build_vector_type (inner_type, 4);
14679   tree zero = build_zero_cst (type);
14680   tree one = build_one_cst (type);
14681 
14682   /* Verify equality tests that return a scalar boolean result.  */
14683   tree res_type = boolean_type_node;
14684   ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14685   ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14686   ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14687   ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14688 }
14689 
14690 /* Verify folding of VEC_DUPLICATE_EXPRs.  */
14691 
14692 static void
test_vec_duplicate_folding()14693 test_vec_duplicate_folding ()
14694 {
14695   scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
14696   machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
14697   /* This will be 1 if VEC_MODE isn't a vector mode.  */
14698   poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
14699 
14700   tree type = build_vector_type (ssizetype, nunits);
14701   tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
14702   tree dup5_cst = build_vector_from_val (type, ssize_int (5));
14703   ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
14704 }
14705 
14706 /* Run all of the selftests within this file.  */
14707 
14708 void
fold_const_c_tests()14709 fold_const_c_tests ()
14710 {
14711   test_arithmetic_folding ();
14712   test_vector_folding ();
14713   test_vec_duplicate_folding ();
14714 }
14715 
14716 } // namespace selftest
14717 
14718 #endif /* CHECKING_P */
14719