1 /* Fold a constant sub-tree into a single node for C-compiler
2    Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /*@@ This file should be rewritten to use an arbitrary precision
21   @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22   @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23   @@ The routines that translate from the ap rep should
24   @@ warn if precision et. al. is lost.
25   @@ This would also make life easier when this technology is used
26   @@ for cross-compilers.  */
27 
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29 
30    fold takes a tree as argument and returns a simplified tree.
31 
32    size_binop takes a tree code for an arithmetic operation
33    and two operands that are trees, and produces a tree for the
34    result, assuming the type comes from `sizetype'.
35 
36    size_int takes an integer value, and creates a tree constant
37    with type from `sizetype'.
38 
39    Note: Since the folders get called on non-gimple code as well as
40    gimple code, we need to handle GIMPLE tuples as well as their
41    corresponding tree equivalents.  */
42 
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
85 #include "vec-perm-indices.h"
86 
87 /* Nonzero if we are folding constants inside an initializer; zero
88    otherwise.  */
89 int folding_initializer = 0;
90 
91 /* The following constants represent a bit based encoding of GCC's
92    comparison operators.  This encoding simplifies transformations
93    on relational comparison operators, such as AND and OR.  */
94 enum comparison_code {
95   COMPCODE_FALSE = 0,
96   COMPCODE_LT = 1,
97   COMPCODE_EQ = 2,
98   COMPCODE_LE = 3,
99   COMPCODE_GT = 4,
100   COMPCODE_LTGT = 5,
101   COMPCODE_GE = 6,
102   COMPCODE_ORD = 7,
103   COMPCODE_UNORD = 8,
104   COMPCODE_UNLT = 9,
105   COMPCODE_UNEQ = 10,
106   COMPCODE_UNLE = 11,
107   COMPCODE_UNGT = 12,
108   COMPCODE_NE = 13,
109   COMPCODE_UNGE = 14,
110   COMPCODE_TRUE = 15
111 };
112 
113 static bool negate_expr_p (tree);
114 static tree negate_expr (tree);
115 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
116 static enum comparison_code comparison_to_compcode (enum tree_code);
117 static enum tree_code compcode_to_comparison (enum comparison_code);
118 static int twoval_comparison_p (tree, tree *, tree *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree optimize_bit_field_compare (location_t, enum tree_code,
121 					tree, tree, tree);
122 static int simple_operand_p (const_tree);
123 static bool simple_operand_p_2 (tree);
124 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
125 static tree range_predecessor (tree);
126 static tree range_successor (tree);
127 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (location_t,
133 						 enum tree_code, tree,
134 						 tree, tree,
135 						 tree, tree, int);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140 static tree fold_view_convert_expr (tree, tree);
141 static tree fold_negate_expr (location_t, tree);
142 
143 
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145    Otherwise, return LOC.  */
146 
147 static location_t
expr_location_or(tree t,location_t loc)148 expr_location_or (tree t, location_t loc)
149 {
150   location_t tloc = EXPR_LOCATION (t);
151   return tloc == UNKNOWN_LOCATION ? loc : tloc;
152 }
153 
154 /* Similar to protected_set_expr_location, but never modify x in place,
155    if location can and needs to be set, unshare it.  */
156 
157 static inline tree
protected_set_expr_location_unshare(tree x,location_t loc)158 protected_set_expr_location_unshare (tree x, location_t loc)
159 {
160   if (CAN_HAVE_LOCATION_P (x)
161       && EXPR_LOCATION (x) != loc
162       && !(TREE_CODE (x) == SAVE_EXPR
163 	   || TREE_CODE (x) == TARGET_EXPR
164 	   || TREE_CODE (x) == BIND_EXPR))
165     {
166       x = copy_node (x);
167       SET_EXPR_LOCATION (x, loc);
168     }
169   return x;
170 }
171 
172 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
173    division and returns the quotient.  Otherwise returns
174    NULL_TREE.  */
175 
176 tree
div_if_zero_remainder(const_tree arg1,const_tree arg2)177 div_if_zero_remainder (const_tree arg1, const_tree arg2)
178 {
179   widest_int quo;
180 
181   if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
182 			 SIGNED, &quo))
183     return wide_int_to_tree (TREE_TYPE (arg1), quo);
184 
185   return NULL_TREE;
186 }
187 
188 /* This is nonzero if we should defer warnings about undefined
189    overflow.  This facility exists because these warnings are a
190    special case.  The code to estimate loop iterations does not want
191    to issue any warnings, since it works with expressions which do not
192    occur in user code.  Various bits of cleanup code call fold(), but
193    only use the result if it has certain characteristics (e.g., is a
194    constant); that code only wants to issue a warning if the result is
195    used.  */
196 
197 static int fold_deferring_overflow_warnings;
198 
199 /* If a warning about undefined overflow is deferred, this is the
200    warning.  Note that this may cause us to turn two warnings into
201    one, but that is fine since it is sufficient to only give one
202    warning per expression.  */
203 
204 static const char* fold_deferred_overflow_warning;
205 
206 /* If a warning about undefined overflow is deferred, this is the
207    level at which the warning should be emitted.  */
208 
209 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210 
211 /* Start deferring overflow warnings.  We could use a stack here to
212    permit nested calls, but at present it is not necessary.  */
213 
214 void
fold_defer_overflow_warnings(void)215 fold_defer_overflow_warnings (void)
216 {
217   ++fold_deferring_overflow_warnings;
218 }
219 
220 /* Stop deferring overflow warnings.  If there is a pending warning,
221    and ISSUE is true, then issue the warning if appropriate.  STMT is
222    the statement with which the warning should be associated (used for
223    location information); STMT may be NULL.  CODE is the level of the
224    warning--a warn_strict_overflow_code value.  This function will use
225    the smaller of CODE and the deferred code when deciding whether to
226    issue the warning.  CODE may be zero to mean to always use the
227    deferred code.  */
228 
229 void
fold_undefer_overflow_warnings(bool issue,const gimple * stmt,int code)230 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
231 {
232   const char *warnmsg;
233   location_t locus;
234 
235   gcc_assert (fold_deferring_overflow_warnings > 0);
236   --fold_deferring_overflow_warnings;
237   if (fold_deferring_overflow_warnings > 0)
238     {
239       if (fold_deferred_overflow_warning != NULL
240 	  && code != 0
241 	  && code < (int) fold_deferred_overflow_code)
242 	fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
243       return;
244     }
245 
246   warnmsg = fold_deferred_overflow_warning;
247   fold_deferred_overflow_warning = NULL;
248 
249   if (!issue || warnmsg == NULL)
250     return;
251 
252   if (gimple_no_warning_p (stmt))
253     return;
254 
255   /* Use the smallest code level when deciding to issue the
256      warning.  */
257   if (code == 0 || code > (int) fold_deferred_overflow_code)
258     code = fold_deferred_overflow_code;
259 
260   if (!issue_strict_overflow_warning (code))
261     return;
262 
263   if (stmt == NULL)
264     locus = input_location;
265   else
266     locus = gimple_location (stmt);
267   warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
268 }
269 
270 /* Stop deferring overflow warnings, ignoring any deferred
271    warnings.  */
272 
273 void
fold_undefer_and_ignore_overflow_warnings(void)274 fold_undefer_and_ignore_overflow_warnings (void)
275 {
276   fold_undefer_overflow_warnings (false, NULL, 0);
277 }
278 
279 /* Whether we are deferring overflow warnings.  */
280 
281 bool
fold_deferring_overflow_warnings_p(void)282 fold_deferring_overflow_warnings_p (void)
283 {
284   return fold_deferring_overflow_warnings > 0;
285 }
286 
287 /* This is called when we fold something based on the fact that signed
288    overflow is undefined.  */
289 
290 void
fold_overflow_warning(const char * gmsgid,enum warn_strict_overflow_code wc)291 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 {
293   if (fold_deferring_overflow_warnings > 0)
294     {
295       if (fold_deferred_overflow_warning == NULL
296 	  || wc < fold_deferred_overflow_code)
297 	{
298 	  fold_deferred_overflow_warning = gmsgid;
299 	  fold_deferred_overflow_code = wc;
300 	}
301     }
302   else if (issue_strict_overflow_warning (wc))
303     warning (OPT_Wstrict_overflow, gmsgid);
304 }
305 
306 /* Return true if the built-in mathematical function specified by CODE
307    is odd, i.e. -f(x) == f(-x).  */
308 
309 bool
negate_mathfn_p(combined_fn fn)310 negate_mathfn_p (combined_fn fn)
311 {
312   switch (fn)
313     {
314     CASE_CFN_ASIN:
315     CASE_CFN_ASINH:
316     CASE_CFN_ATAN:
317     CASE_CFN_ATANH:
318     CASE_CFN_CASIN:
319     CASE_CFN_CASINH:
320     CASE_CFN_CATAN:
321     CASE_CFN_CATANH:
322     CASE_CFN_CBRT:
323     CASE_CFN_CPROJ:
324     CASE_CFN_CSIN:
325     CASE_CFN_CSINH:
326     CASE_CFN_CTAN:
327     CASE_CFN_CTANH:
328     CASE_CFN_ERF:
329     CASE_CFN_LLROUND:
330     CASE_CFN_LROUND:
331     CASE_CFN_ROUND:
332     CASE_CFN_SIN:
333     CASE_CFN_SINH:
334     CASE_CFN_TAN:
335     CASE_CFN_TANH:
336     CASE_CFN_TRUNC:
337       return true;
338 
339     CASE_CFN_LLRINT:
340     CASE_CFN_LRINT:
341     CASE_CFN_NEARBYINT:
342     CASE_CFN_RINT:
343       return !flag_rounding_math;
344 
345     default:
346       break;
347     }
348   return false;
349 }
350 
351 /* Check whether we may negate an integer constant T without causing
352    overflow.  */
353 
354 bool
may_negate_without_overflow_p(const_tree t)355 may_negate_without_overflow_p (const_tree t)
356 {
357   tree type;
358 
359   gcc_assert (TREE_CODE (t) == INTEGER_CST);
360 
361   type = TREE_TYPE (t);
362   if (TYPE_UNSIGNED (type))
363     return false;
364 
365   return !wi::only_sign_bit_p (wi::to_wide (t));
366 }
367 
368 /* Determine whether an expression T can be cheaply negated using
369    the function negate_expr without introducing undefined overflow.  */
370 
371 static bool
negate_expr_p(tree t)372 negate_expr_p (tree t)
373 {
374   tree type;
375 
376   if (t == 0)
377     return false;
378 
379   type = TREE_TYPE (t);
380 
381   STRIP_SIGN_NOPS (t);
382   switch (TREE_CODE (t))
383     {
384     case INTEGER_CST:
385       if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
386 	return true;
387 
388       /* Check that -CST will not overflow type.  */
389       return may_negate_without_overflow_p (t);
390     case BIT_NOT_EXPR:
391       return (INTEGRAL_TYPE_P (type)
392 	      && TYPE_OVERFLOW_WRAPS (type));
393 
394     case FIXED_CST:
395       return true;
396 
397     case NEGATE_EXPR:
398       return !TYPE_OVERFLOW_SANITIZED (type);
399 
400     case REAL_CST:
401       /* We want to canonicalize to positive real constants.  Pretend
402          that only negative ones can be easily negated.  */
403       return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
404 
405     case COMPLEX_CST:
406       return negate_expr_p (TREE_REALPART (t))
407 	     && negate_expr_p (TREE_IMAGPART (t));
408 
409     case VECTOR_CST:
410       {
411 	if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
412 	  return true;
413 
414 	/* Steps don't prevent negation.  */
415 	unsigned int count = vector_cst_encoded_nelts (t);
416 	for (unsigned int i = 0; i < count; ++i)
417 	  if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
418 	    return false;
419 
420 	return true;
421       }
422 
423     case COMPLEX_EXPR:
424       return negate_expr_p (TREE_OPERAND (t, 0))
425 	     && negate_expr_p (TREE_OPERAND (t, 1));
426 
427     case CONJ_EXPR:
428       return negate_expr_p (TREE_OPERAND (t, 0));
429 
430     case PLUS_EXPR:
431       if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
432 	  || HONOR_SIGNED_ZEROS (element_mode (type))
433 	  || (ANY_INTEGRAL_TYPE_P (type)
434 	      && ! TYPE_OVERFLOW_WRAPS (type)))
435 	return false;
436       /* -(A + B) -> (-B) - A.  */
437       if (negate_expr_p (TREE_OPERAND (t, 1)))
438 	return true;
439       /* -(A + B) -> (-A) - B.  */
440       return negate_expr_p (TREE_OPERAND (t, 0));
441 
442     case MINUS_EXPR:
443       /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
444       return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
445 	     && !HONOR_SIGNED_ZEROS (element_mode (type))
446 	     && (! ANY_INTEGRAL_TYPE_P (type)
447 		 || TYPE_OVERFLOW_WRAPS (type));
448 
449     case MULT_EXPR:
450       if (TYPE_UNSIGNED (type))
451 	break;
452       /* INT_MIN/n * n doesn't overflow while negating one operand it does
453          if n is a (negative) power of two.  */
454       if (INTEGRAL_TYPE_P (TREE_TYPE (t))
455 	  && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
456 	  && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
457 		 && (wi::popcount
458 		     (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
459 		|| (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
460 		    && (wi::popcount
461 			(wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
462 	break;
463 
464       /* Fall through.  */
465 
466     case RDIV_EXPR:
467       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
468 	return negate_expr_p (TREE_OPERAND (t, 1))
469 	       || negate_expr_p (TREE_OPERAND (t, 0));
470       break;
471 
472     case TRUNC_DIV_EXPR:
473     case ROUND_DIV_EXPR:
474     case EXACT_DIV_EXPR:
475       if (TYPE_UNSIGNED (type))
476 	break;
477       /* In general we can't negate A in A / B, because if A is INT_MIN and
478          B is not 1 we change the sign of the result.  */
479       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
480 	  && negate_expr_p (TREE_OPERAND (t, 0)))
481 	return true;
482       /* In general we can't negate B in A / B, because if A is INT_MIN and
483 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
484 	 and actually traps on some architectures.  */
485       if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
486 	  || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
487 	  || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
488 	      && ! integer_onep (TREE_OPERAND (t, 1))))
489 	return negate_expr_p (TREE_OPERAND (t, 1));
490       break;
491 
492     case NOP_EXPR:
493       /* Negate -((double)float) as (double)(-float).  */
494       if (TREE_CODE (type) == REAL_TYPE)
495 	{
496 	  tree tem = strip_float_extensions (t);
497 	  if (tem != t)
498 	    return negate_expr_p (tem);
499 	}
500       break;
501 
502     case CALL_EXPR:
503       /* Negate -f(x) as f(-x).  */
504       if (negate_mathfn_p (get_call_combined_fn (t)))
505 	return negate_expr_p (CALL_EXPR_ARG (t, 0));
506       break;
507 
508     case RSHIFT_EXPR:
509       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
510       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
511 	{
512 	  tree op1 = TREE_OPERAND (t, 1);
513 	  if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
514 	    return true;
515 	}
516       break;
517 
518     default:
519       break;
520     }
521   return false;
522 }
523 
524 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
525    simplification is possible.
526    If negate_expr_p would return true for T, NULL_TREE will never be
527    returned.  */
528 
529 static tree
fold_negate_expr_1(location_t loc,tree t)530 fold_negate_expr_1 (location_t loc, tree t)
531 {
532   tree type = TREE_TYPE (t);
533   tree tem;
534 
535   switch (TREE_CODE (t))
536     {
537     /* Convert - (~A) to A + 1.  */
538     case BIT_NOT_EXPR:
539       if (INTEGRAL_TYPE_P (type))
540         return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
541 				build_one_cst (type));
542       break;
543 
544     case INTEGER_CST:
545       tem = fold_negate_const (t, type);
546       if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
547 	  || (ANY_INTEGRAL_TYPE_P (type)
548 	      && !TYPE_OVERFLOW_TRAPS (type)
549 	      && TYPE_OVERFLOW_WRAPS (type))
550 	  || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
551 	return tem;
552       break;
553 
554     case POLY_INT_CST:
555     case REAL_CST:
556     case FIXED_CST:
557       tem = fold_negate_const (t, type);
558       return tem;
559 
560     case COMPLEX_CST:
561       {
562 	tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
563 	tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
564 	if (rpart && ipart)
565 	  return build_complex (type, rpart, ipart);
566       }
567       break;
568 
569     case VECTOR_CST:
570       {
571 	tree_vector_builder elts;
572 	elts.new_unary_operation (type, t, true);
573 	unsigned int count = elts.encoded_nelts ();
574 	for (unsigned int i = 0; i < count; ++i)
575 	  {
576 	    tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
577 	    if (elt == NULL_TREE)
578 	      return NULL_TREE;
579 	    elts.quick_push (elt);
580 	  }
581 
582 	return elts.build ();
583       }
584 
585     case COMPLEX_EXPR:
586       if (negate_expr_p (t))
587 	return fold_build2_loc (loc, COMPLEX_EXPR, type,
588 				fold_negate_expr (loc, TREE_OPERAND (t, 0)),
589 				fold_negate_expr (loc, TREE_OPERAND (t, 1)));
590       break;
591 
592     case CONJ_EXPR:
593       if (negate_expr_p (t))
594 	return fold_build1_loc (loc, CONJ_EXPR, type,
595 				fold_negate_expr (loc, TREE_OPERAND (t, 0)));
596       break;
597 
598     case NEGATE_EXPR:
599       if (!TYPE_OVERFLOW_SANITIZED (type))
600 	return TREE_OPERAND (t, 0);
601       break;
602 
603     case PLUS_EXPR:
604       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
605 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
606 	{
607 	  /* -(A + B) -> (-B) - A.  */
608 	  if (negate_expr_p (TREE_OPERAND (t, 1)))
609 	    {
610 	      tem = negate_expr (TREE_OPERAND (t, 1));
611 	      return fold_build2_loc (loc, MINUS_EXPR, type,
612 				      tem, TREE_OPERAND (t, 0));
613 	    }
614 
615 	  /* -(A + B) -> (-A) - B.  */
616 	  if (negate_expr_p (TREE_OPERAND (t, 0)))
617 	    {
618 	      tem = negate_expr (TREE_OPERAND (t, 0));
619 	      return fold_build2_loc (loc, MINUS_EXPR, type,
620 				      tem, TREE_OPERAND (t, 1));
621 	    }
622 	}
623       break;
624 
625     case MINUS_EXPR:
626       /* - (A - B) -> B - A  */
627       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
628 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
629 	return fold_build2_loc (loc, MINUS_EXPR, type,
630 				TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
631       break;
632 
633     case MULT_EXPR:
634       if (TYPE_UNSIGNED (type))
635         break;
636 
637       /* Fall through.  */
638 
639     case RDIV_EXPR:
640       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
641 	{
642 	  tem = TREE_OPERAND (t, 1);
643 	  if (negate_expr_p (tem))
644 	    return fold_build2_loc (loc, TREE_CODE (t), type,
645 				    TREE_OPERAND (t, 0), negate_expr (tem));
646 	  tem = TREE_OPERAND (t, 0);
647 	  if (negate_expr_p (tem))
648 	    return fold_build2_loc (loc, TREE_CODE (t), type,
649 				    negate_expr (tem), TREE_OPERAND (t, 1));
650 	}
651       break;
652 
653     case TRUNC_DIV_EXPR:
654     case ROUND_DIV_EXPR:
655     case EXACT_DIV_EXPR:
656       if (TYPE_UNSIGNED (type))
657 	break;
658       /* In general we can't negate A in A / B, because if A is INT_MIN and
659 	 B is not 1 we change the sign of the result.  */
660       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
661 	  && negate_expr_p (TREE_OPERAND (t, 0)))
662 	return fold_build2_loc (loc, TREE_CODE (t), type,
663 				negate_expr (TREE_OPERAND (t, 0)),
664 				TREE_OPERAND (t, 1));
665       /* In general we can't negate B in A / B, because if A is INT_MIN and
666 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
667 	 and actually traps on some architectures.  */
668       if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
669 	   || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
670 	   || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
671 	       && ! integer_onep (TREE_OPERAND (t, 1))))
672 	  && negate_expr_p (TREE_OPERAND (t, 1)))
673 	return fold_build2_loc (loc, TREE_CODE (t), type,
674 				TREE_OPERAND (t, 0),
675 				negate_expr (TREE_OPERAND (t, 1)));
676       break;
677 
678     case NOP_EXPR:
679       /* Convert -((double)float) into (double)(-float).  */
680       if (TREE_CODE (type) == REAL_TYPE)
681 	{
682 	  tem = strip_float_extensions (t);
683 	  if (tem != t && negate_expr_p (tem))
684 	    return fold_convert_loc (loc, type, negate_expr (tem));
685 	}
686       break;
687 
688     case CALL_EXPR:
689       /* Negate -f(x) as f(-x).  */
690       if (negate_mathfn_p (get_call_combined_fn (t))
691 	  && negate_expr_p (CALL_EXPR_ARG (t, 0)))
692 	{
693 	  tree fndecl, arg;
694 
695 	  fndecl = get_callee_fndecl (t);
696 	  arg = negate_expr (CALL_EXPR_ARG (t, 0));
697 	  return build_call_expr_loc (loc, fndecl, 1, arg);
698 	}
699       break;
700 
701     case RSHIFT_EXPR:
702       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
703       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
704 	{
705 	  tree op1 = TREE_OPERAND (t, 1);
706 	  if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
707 	    {
708 	      tree ntype = TYPE_UNSIGNED (type)
709 			   ? signed_type_for (type)
710 			   : unsigned_type_for (type);
711 	      tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
712 	      temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
713 	      return fold_convert_loc (loc, type, temp);
714 	    }
715 	}
716       break;
717 
718     default:
719       break;
720     }
721 
722   return NULL_TREE;
723 }
724 
725 /* A wrapper for fold_negate_expr_1.  */
726 
727 static tree
fold_negate_expr(location_t loc,tree t)728 fold_negate_expr (location_t loc, tree t)
729 {
730   tree type = TREE_TYPE (t);
731   STRIP_SIGN_NOPS (t);
732   tree tem = fold_negate_expr_1 (loc, t);
733   if (tem == NULL_TREE)
734     return NULL_TREE;
735   return fold_convert_loc (loc, type, tem);
736 }
737 
738 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
739    negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
740    return NULL_TREE. */
741 
742 static tree
negate_expr(tree t)743 negate_expr (tree t)
744 {
745   tree type, tem;
746   location_t loc;
747 
748   if (t == NULL_TREE)
749     return NULL_TREE;
750 
751   loc = EXPR_LOCATION (t);
752   type = TREE_TYPE (t);
753   STRIP_SIGN_NOPS (t);
754 
755   tem = fold_negate_expr (loc, t);
756   if (!tem)
757     tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
758   return fold_convert_loc (loc, type, tem);
759 }
760 
761 /* Split a tree IN into a constant, literal and variable parts that could be
762    combined with CODE to make IN.  "constant" means an expression with
763    TREE_CONSTANT but that isn't an actual constant.  CODE must be a
764    commutative arithmetic operation.  Store the constant part into *CONP,
765    the literal in *LITP and return the variable part.  If a part isn't
766    present, set it to null.  If the tree does not decompose in this way,
767    return the entire tree as the variable part and the other parts as null.
768 
769    If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
770    case, we negate an operand that was subtracted.  Except if it is a
771    literal for which we use *MINUS_LITP instead.
772 
773    If NEGATE_P is true, we are negating all of IN, again except a literal
774    for which we use *MINUS_LITP instead.  If a variable part is of pointer
775    type, it is negated after converting to TYPE.  This prevents us from
776    generating illegal MINUS pointer expression.  LOC is the location of
777    the converted variable part.
778 
779    If IN is itself a literal or constant, return it as appropriate.
780 
781    Note that we do not guarantee that any of the three values will be the
782    same type as IN, but they will have the same signedness and mode.  */
783 
784 static tree
split_tree(tree in,tree type,enum tree_code code,tree * minus_varp,tree * conp,tree * minus_conp,tree * litp,tree * minus_litp,int negate_p)785 split_tree (tree in, tree type, enum tree_code code,
786 	    tree *minus_varp, tree *conp, tree *minus_conp,
787 	    tree *litp, tree *minus_litp, int negate_p)
788 {
789   tree var = 0;
790   *minus_varp = 0;
791   *conp = 0;
792   *minus_conp = 0;
793   *litp = 0;
794   *minus_litp = 0;
795 
796   /* Strip any conversions that don't change the machine mode or signedness.  */
797   STRIP_SIGN_NOPS (in);
798 
799   if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
800       || TREE_CODE (in) == FIXED_CST)
801     *litp = in;
802   else if (TREE_CODE (in) == code
803 	   || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
804 	       && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
805 	       /* We can associate addition and subtraction together (even
806 		  though the C standard doesn't say so) for integers because
807 		  the value is not affected.  For reals, the value might be
808 		  affected, so we can't.  */
809 	       && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
810 		   || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
811 		   || (code == MINUS_EXPR
812 		       && (TREE_CODE (in) == PLUS_EXPR
813 			   || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
814     {
815       tree op0 = TREE_OPERAND (in, 0);
816       tree op1 = TREE_OPERAND (in, 1);
817       int neg1_p = TREE_CODE (in) == MINUS_EXPR;
818       int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
819 
820       /* First see if either of the operands is a literal, then a constant.  */
821       if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
822 	  || TREE_CODE (op0) == FIXED_CST)
823 	*litp = op0, op0 = 0;
824       else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
825 	       || TREE_CODE (op1) == FIXED_CST)
826 	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
827 
828       if (op0 != 0 && TREE_CONSTANT (op0))
829 	*conp = op0, op0 = 0;
830       else if (op1 != 0 && TREE_CONSTANT (op1))
831 	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
832 
833       /* If we haven't dealt with either operand, this is not a case we can
834 	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
835       if (op0 != 0 && op1 != 0)
836 	var = in;
837       else if (op0 != 0)
838 	var = op0;
839       else
840 	var = op1, neg_var_p = neg1_p;
841 
842       /* Now do any needed negations.  */
843       if (neg_litp_p)
844 	*minus_litp = *litp, *litp = 0;
845       if (neg_conp_p && *conp)
846 	*minus_conp = *conp, *conp = 0;
847       if (neg_var_p && var)
848 	*minus_varp = var, var = 0;
849     }
850   else if (TREE_CONSTANT (in))
851     *conp = in;
852   else if (TREE_CODE (in) == BIT_NOT_EXPR
853 	   && code == PLUS_EXPR)
854     {
855       /* -1 - X is folded to ~X, undo that here.  Do _not_ do this
856          when IN is constant.  */
857       *litp = build_minus_one_cst (type);
858       *minus_varp = TREE_OPERAND (in, 0);
859     }
860   else
861     var = in;
862 
863   if (negate_p)
864     {
865       if (*litp)
866 	*minus_litp = *litp, *litp = 0;
867       else if (*minus_litp)
868 	*litp = *minus_litp, *minus_litp = 0;
869       if (*conp)
870 	*minus_conp = *conp, *conp = 0;
871       else if (*minus_conp)
872 	*conp = *minus_conp, *minus_conp = 0;
873       if (var)
874 	*minus_varp = var, var = 0;
875       else if (*minus_varp)
876 	var = *minus_varp, *minus_varp = 0;
877     }
878 
879   if (*litp
880       && TREE_OVERFLOW_P (*litp))
881     *litp = drop_tree_overflow (*litp);
882   if (*minus_litp
883       && TREE_OVERFLOW_P (*minus_litp))
884     *minus_litp = drop_tree_overflow (*minus_litp);
885 
886   return var;
887 }
888 
889 /* Re-associate trees split by the above function.  T1 and T2 are
890    either expressions to associate or null.  Return the new
891    expression, if any.  LOC is the location of the new expression.  If
892    we build an operation, do it in TYPE and with CODE.  */
893 
894 static tree
associate_trees(location_t loc,tree t1,tree t2,enum tree_code code,tree type)895 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
896 {
897   if (t1 == 0)
898     {
899       gcc_assert (t2 == 0 || code != MINUS_EXPR);
900       return t2;
901     }
902   else if (t2 == 0)
903     return t1;
904 
905   /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
906      try to fold this since we will have infinite recursion.  But do
907      deal with any NEGATE_EXPRs.  */
908   if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
909       || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
910       || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
911     {
912       if (code == PLUS_EXPR)
913 	{
914 	  if (TREE_CODE (t1) == NEGATE_EXPR)
915 	    return build2_loc (loc, MINUS_EXPR, type,
916 			       fold_convert_loc (loc, type, t2),
917 			       fold_convert_loc (loc, type,
918 						 TREE_OPERAND (t1, 0)));
919 	  else if (TREE_CODE (t2) == NEGATE_EXPR)
920 	    return build2_loc (loc, MINUS_EXPR, type,
921 			       fold_convert_loc (loc, type, t1),
922 			       fold_convert_loc (loc, type,
923 						 TREE_OPERAND (t2, 0)));
924 	  else if (integer_zerop (t2))
925 	    return fold_convert_loc (loc, type, t1);
926 	}
927       else if (code == MINUS_EXPR)
928 	{
929 	  if (integer_zerop (t2))
930 	    return fold_convert_loc (loc, type, t1);
931 	}
932 
933       return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
934 			 fold_convert_loc (loc, type, t2));
935     }
936 
937   return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
938 			  fold_convert_loc (loc, type, t2));
939 }
940 
941 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
942    for use in int_const_binop, size_binop and size_diffop.  */
943 
944 static bool
int_binop_types_match_p(enum tree_code code,const_tree type1,const_tree type2)945 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
946 {
947   if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
948     return false;
949   if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
950     return false;
951 
952   switch (code)
953     {
954     case LSHIFT_EXPR:
955     case RSHIFT_EXPR:
956     case LROTATE_EXPR:
957     case RROTATE_EXPR:
958       return true;
959 
960     default:
961       break;
962     }
963 
964   return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
965 	 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
966 	 && TYPE_MODE (type1) == TYPE_MODE (type2);
967 }
968 
969 /* Subroutine of int_const_binop_1 that handles two INTEGER_CSTs.  */
970 
971 static tree
int_const_binop_2(enum tree_code code,const_tree parg1,const_tree parg2,int overflowable)972 int_const_binop_2 (enum tree_code code, const_tree parg1, const_tree parg2,
973 		   int overflowable)
974 {
975   wide_int res;
976   tree t;
977   tree type = TREE_TYPE (parg1);
978   signop sign = TYPE_SIGN (type);
979   bool overflow = false;
980 
981   wi::tree_to_wide_ref arg1 = wi::to_wide (parg1);
982   wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
983 
984   switch (code)
985     {
986     case BIT_IOR_EXPR:
987       res = wi::bit_or (arg1, arg2);
988       break;
989 
990     case BIT_XOR_EXPR:
991       res = wi::bit_xor (arg1, arg2);
992       break;
993 
994     case BIT_AND_EXPR:
995       res = wi::bit_and (arg1, arg2);
996       break;
997 
998     case RSHIFT_EXPR:
999     case LSHIFT_EXPR:
1000       if (wi::neg_p (arg2))
1001 	{
1002 	  arg2 = -arg2;
1003 	  if (code == RSHIFT_EXPR)
1004 	    code = LSHIFT_EXPR;
1005 	  else
1006 	    code = RSHIFT_EXPR;
1007 	}
1008 
1009       if (code == RSHIFT_EXPR)
1010 	/* It's unclear from the C standard whether shifts can overflow.
1011 	   The following code ignores overflow; perhaps a C standard
1012 	   interpretation ruling is needed.  */
1013 	res = wi::rshift (arg1, arg2, sign);
1014       else
1015 	res = wi::lshift (arg1, arg2);
1016       break;
1017 
1018     case RROTATE_EXPR:
1019     case LROTATE_EXPR:
1020       if (wi::neg_p (arg2))
1021 	{
1022 	  arg2 = -arg2;
1023 	  if (code == RROTATE_EXPR)
1024 	    code = LROTATE_EXPR;
1025 	  else
1026 	    code = RROTATE_EXPR;
1027 	}
1028 
1029       if (code == RROTATE_EXPR)
1030 	res = wi::rrotate (arg1, arg2);
1031       else
1032 	res = wi::lrotate (arg1, arg2);
1033       break;
1034 
1035     case PLUS_EXPR:
1036       res = wi::add (arg1, arg2, sign, &overflow);
1037       break;
1038 
1039     case MINUS_EXPR:
1040       res = wi::sub (arg1, arg2, sign, &overflow);
1041       break;
1042 
1043     case MULT_EXPR:
1044       res = wi::mul (arg1, arg2, sign, &overflow);
1045       break;
1046 
1047     case MULT_HIGHPART_EXPR:
1048       res = wi::mul_high (arg1, arg2, sign);
1049       break;
1050 
1051     case TRUNC_DIV_EXPR:
1052     case EXACT_DIV_EXPR:
1053       if (arg2 == 0)
1054 	return NULL_TREE;
1055       res = wi::div_trunc (arg1, arg2, sign, &overflow);
1056       break;
1057 
1058     case FLOOR_DIV_EXPR:
1059       if (arg2 == 0)
1060 	return NULL_TREE;
1061       res = wi::div_floor (arg1, arg2, sign, &overflow);
1062       break;
1063 
1064     case CEIL_DIV_EXPR:
1065       if (arg2 == 0)
1066 	return NULL_TREE;
1067       res = wi::div_ceil (arg1, arg2, sign, &overflow);
1068       break;
1069 
1070     case ROUND_DIV_EXPR:
1071       if (arg2 == 0)
1072 	return NULL_TREE;
1073       res = wi::div_round (arg1, arg2, sign, &overflow);
1074       break;
1075 
1076     case TRUNC_MOD_EXPR:
1077       if (arg2 == 0)
1078 	return NULL_TREE;
1079       res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1080       break;
1081 
1082     case FLOOR_MOD_EXPR:
1083       if (arg2 == 0)
1084 	return NULL_TREE;
1085       res = wi::mod_floor (arg1, arg2, sign, &overflow);
1086       break;
1087 
1088     case CEIL_MOD_EXPR:
1089       if (arg2 == 0)
1090 	return NULL_TREE;
1091       res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1092       break;
1093 
1094     case ROUND_MOD_EXPR:
1095       if (arg2 == 0)
1096 	return NULL_TREE;
1097       res = wi::mod_round (arg1, arg2, sign, &overflow);
1098       break;
1099 
1100     case MIN_EXPR:
1101       res = wi::min (arg1, arg2, sign);
1102       break;
1103 
1104     case MAX_EXPR:
1105       res = wi::max (arg1, arg2, sign);
1106       break;
1107 
1108     default:
1109       return NULL_TREE;
1110     }
1111 
1112   t = force_fit_type (type, res, overflowable,
1113 		      (((sign == SIGNED || overflowable == -1)
1114 			&& overflow)
1115 		       | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2)));
1116 
1117   return t;
1118 }
1119 
1120 /* Combine two integer constants PARG1 and PARG2 under operation CODE
1121    to produce a new constant.  Return NULL_TREE if we don't know how
1122    to evaluate CODE at compile-time.  */
1123 
1124 static tree
int_const_binop_1(enum tree_code code,const_tree arg1,const_tree arg2,int overflowable)1125 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
1126 		   int overflowable)
1127 {
1128   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1129     return int_const_binop_2 (code, arg1, arg2, overflowable);
1130 
1131   gcc_assert (NUM_POLY_INT_COEFFS != 1);
1132 
1133   if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1134     {
1135       poly_wide_int res;
1136       bool overflow;
1137       tree type = TREE_TYPE (arg1);
1138       signop sign = TYPE_SIGN (type);
1139       switch (code)
1140 	{
1141 	case PLUS_EXPR:
1142 	  res = wi::add (wi::to_poly_wide (arg1),
1143 			 wi::to_poly_wide (arg2), sign, &overflow);
1144 	  break;
1145 
1146 	case MINUS_EXPR:
1147 	  res = wi::sub (wi::to_poly_wide (arg1),
1148 			 wi::to_poly_wide (arg2), sign, &overflow);
1149 	  break;
1150 
1151 	case MULT_EXPR:
1152 	  if (TREE_CODE (arg2) == INTEGER_CST)
1153 	    res = wi::mul (wi::to_poly_wide (arg1),
1154 			   wi::to_wide (arg2), sign, &overflow);
1155 	  else if (TREE_CODE (arg1) == INTEGER_CST)
1156 	    res = wi::mul (wi::to_poly_wide (arg2),
1157 			   wi::to_wide (arg1), sign, &overflow);
1158 	  else
1159 	    return NULL_TREE;
1160 	  break;
1161 
1162 	case LSHIFT_EXPR:
1163 	  if (TREE_CODE (arg2) == INTEGER_CST)
1164 	    res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1165 	  else
1166 	    return NULL_TREE;
1167 	  break;
1168 
1169 	case BIT_IOR_EXPR:
1170 	  if (TREE_CODE (arg2) != INTEGER_CST
1171 	      || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1172 			     &res))
1173 	    return NULL_TREE;
1174 	  break;
1175 
1176 	default:
1177 	  return NULL_TREE;
1178 	}
1179       return force_fit_type (type, res, overflowable,
1180 			     (((sign == SIGNED || overflowable == -1)
1181 			       && overflow)
1182 			      | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1183     }
1184 
1185   return NULL_TREE;
1186 }
1187 
1188 tree
int_const_binop(enum tree_code code,const_tree arg1,const_tree arg2)1189 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1190 {
1191   return int_const_binop_1 (code, arg1, arg2, 1);
1192 }
1193 
1194 /* Return true if binary operation OP distributes over addition in operand
1195    OPNO, with the other operand being held constant.  OPNO counts from 1.  */
1196 
1197 static bool
distributes_over_addition_p(tree_code op,int opno)1198 distributes_over_addition_p (tree_code op, int opno)
1199 {
1200   switch (op)
1201     {
1202     case PLUS_EXPR:
1203     case MINUS_EXPR:
1204     case MULT_EXPR:
1205       return true;
1206 
1207     case LSHIFT_EXPR:
1208       return opno == 1;
1209 
1210     default:
1211       return false;
1212     }
1213 }
1214 
1215 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1216    constant.  We assume ARG1 and ARG2 have the same data type, or at least
1217    are the same kind of constant and the same machine mode.  Return zero if
1218    combining the constants is not allowed in the current operating mode.  */
1219 
1220 static tree
const_binop(enum tree_code code,tree arg1,tree arg2)1221 const_binop (enum tree_code code, tree arg1, tree arg2)
1222 {
1223   /* Sanity check for the recursive cases.  */
1224   if (!arg1 || !arg2)
1225     return NULL_TREE;
1226 
1227   STRIP_NOPS (arg1);
1228   STRIP_NOPS (arg2);
1229 
1230   if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1231     {
1232       if (code == POINTER_PLUS_EXPR)
1233 	return int_const_binop (PLUS_EXPR,
1234 				arg1, fold_convert (TREE_TYPE (arg1), arg2));
1235 
1236       return int_const_binop (code, arg1, arg2);
1237     }
1238 
1239   if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1240     {
1241       machine_mode mode;
1242       REAL_VALUE_TYPE d1;
1243       REAL_VALUE_TYPE d2;
1244       REAL_VALUE_TYPE value;
1245       REAL_VALUE_TYPE result;
1246       bool inexact;
1247       tree t, type;
1248 
1249       /* The following codes are handled by real_arithmetic.  */
1250       switch (code)
1251 	{
1252 	case PLUS_EXPR:
1253 	case MINUS_EXPR:
1254 	case MULT_EXPR:
1255 	case RDIV_EXPR:
1256 	case MIN_EXPR:
1257 	case MAX_EXPR:
1258 	  break;
1259 
1260 	default:
1261 	  return NULL_TREE;
1262 	}
1263 
1264       d1 = TREE_REAL_CST (arg1);
1265       d2 = TREE_REAL_CST (arg2);
1266 
1267       type = TREE_TYPE (arg1);
1268       mode = TYPE_MODE (type);
1269 
1270       /* Don't perform operation if we honor signaling NaNs and
1271 	 either operand is a signaling NaN.  */
1272       if (HONOR_SNANS (mode)
1273 	  && (REAL_VALUE_ISSIGNALING_NAN (d1)
1274 	      || REAL_VALUE_ISSIGNALING_NAN (d2)))
1275 	return NULL_TREE;
1276 
1277       /* Don't perform operation if it would raise a division
1278 	 by zero exception.  */
1279       if (code == RDIV_EXPR
1280 	  && real_equal (&d2, &dconst0)
1281 	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1282 	return NULL_TREE;
1283 
1284       /* If either operand is a NaN, just return it.  Otherwise, set up
1285 	 for floating-point trap; we return an overflow.  */
1286       if (REAL_VALUE_ISNAN (d1))
1287       {
1288 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1289 	   is off.  */
1290 	d1.signalling = 0;
1291 	t = build_real (type, d1);
1292 	return t;
1293       }
1294       else if (REAL_VALUE_ISNAN (d2))
1295       {
1296 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1297 	   is off.  */
1298 	d2.signalling = 0;
1299 	t = build_real (type, d2);
1300 	return t;
1301       }
1302 
1303       inexact = real_arithmetic (&value, code, &d1, &d2);
1304       real_convert (&result, mode, &value);
1305 
1306       /* Don't constant fold this floating point operation if
1307 	 the result has overflowed and flag_trapping_math.  */
1308       if (flag_trapping_math
1309 	  && MODE_HAS_INFINITIES (mode)
1310 	  && REAL_VALUE_ISINF (result)
1311 	  && !REAL_VALUE_ISINF (d1)
1312 	  && !REAL_VALUE_ISINF (d2))
1313 	return NULL_TREE;
1314 
1315       /* Don't constant fold this floating point operation if the
1316 	 result may dependent upon the run-time rounding mode and
1317 	 flag_rounding_math is set, or if GCC's software emulation
1318 	 is unable to accurately represent the result.  */
1319       if ((flag_rounding_math
1320 	   || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1321 	  && (inexact || !real_identical (&result, &value)))
1322 	return NULL_TREE;
1323 
1324       t = build_real (type, result);
1325 
1326       TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1327       return t;
1328     }
1329 
1330   if (TREE_CODE (arg1) == FIXED_CST)
1331     {
1332       FIXED_VALUE_TYPE f1;
1333       FIXED_VALUE_TYPE f2;
1334       FIXED_VALUE_TYPE result;
1335       tree t, type;
1336       int sat_p;
1337       bool overflow_p;
1338 
1339       /* The following codes are handled by fixed_arithmetic.  */
1340       switch (code)
1341         {
1342 	case PLUS_EXPR:
1343 	case MINUS_EXPR:
1344 	case MULT_EXPR:
1345 	case TRUNC_DIV_EXPR:
1346 	  if (TREE_CODE (arg2) != FIXED_CST)
1347 	    return NULL_TREE;
1348 	  f2 = TREE_FIXED_CST (arg2);
1349 	  break;
1350 
1351 	case LSHIFT_EXPR:
1352 	case RSHIFT_EXPR:
1353 	  {
1354 	    if (TREE_CODE (arg2) != INTEGER_CST)
1355 	      return NULL_TREE;
1356 	    wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1357 	    f2.data.high = w2.elt (1);
1358 	    f2.data.low = w2.ulow ();
1359 	    f2.mode = SImode;
1360 	  }
1361 	  break;
1362 
1363         default:
1364 	  return NULL_TREE;
1365         }
1366 
1367       f1 = TREE_FIXED_CST (arg1);
1368       type = TREE_TYPE (arg1);
1369       sat_p = TYPE_SATURATING (type);
1370       overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1371       t = build_fixed (type, result);
1372       /* Propagate overflow flags.  */
1373       if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1374 	TREE_OVERFLOW (t) = 1;
1375       return t;
1376     }
1377 
1378   if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1379     {
1380       tree type = TREE_TYPE (arg1);
1381       tree r1 = TREE_REALPART (arg1);
1382       tree i1 = TREE_IMAGPART (arg1);
1383       tree r2 = TREE_REALPART (arg2);
1384       tree i2 = TREE_IMAGPART (arg2);
1385       tree real, imag;
1386 
1387       switch (code)
1388 	{
1389 	case PLUS_EXPR:
1390 	case MINUS_EXPR:
1391 	  real = const_binop (code, r1, r2);
1392 	  imag = const_binop (code, i1, i2);
1393 	  break;
1394 
1395 	case MULT_EXPR:
1396 	  if (COMPLEX_FLOAT_TYPE_P (type))
1397 	    return do_mpc_arg2 (arg1, arg2, type,
1398 				/* do_nonfinite= */ folding_initializer,
1399 				mpc_mul);
1400 
1401 	  real = const_binop (MINUS_EXPR,
1402 			      const_binop (MULT_EXPR, r1, r2),
1403 			      const_binop (MULT_EXPR, i1, i2));
1404 	  imag = const_binop (PLUS_EXPR,
1405 			      const_binop (MULT_EXPR, r1, i2),
1406 			      const_binop (MULT_EXPR, i1, r2));
1407 	  break;
1408 
1409 	case RDIV_EXPR:
1410 	  if (COMPLEX_FLOAT_TYPE_P (type))
1411 	    return do_mpc_arg2 (arg1, arg2, type,
1412                                 /* do_nonfinite= */ folding_initializer,
1413 				mpc_div);
1414 	  /* Fallthru. */
1415 	case TRUNC_DIV_EXPR:
1416 	case CEIL_DIV_EXPR:
1417 	case FLOOR_DIV_EXPR:
1418 	case ROUND_DIV_EXPR:
1419 	  if (flag_complex_method == 0)
1420 	  {
1421 	    /* Keep this algorithm in sync with
1422 	       tree-complex.c:expand_complex_div_straight().
1423 
1424 	       Expand complex division to scalars, straightforward algorithm.
1425 	       a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1426 	       t = br*br + bi*bi
1427 	    */
1428 	    tree magsquared
1429 	      = const_binop (PLUS_EXPR,
1430 			     const_binop (MULT_EXPR, r2, r2),
1431 			     const_binop (MULT_EXPR, i2, i2));
1432 	    tree t1
1433 	      = const_binop (PLUS_EXPR,
1434 			     const_binop (MULT_EXPR, r1, r2),
1435 			     const_binop (MULT_EXPR, i1, i2));
1436 	    tree t2
1437 	      = const_binop (MINUS_EXPR,
1438 			     const_binop (MULT_EXPR, i1, r2),
1439 			     const_binop (MULT_EXPR, r1, i2));
1440 
1441 	    real = const_binop (code, t1, magsquared);
1442 	    imag = const_binop (code, t2, magsquared);
1443 	  }
1444 	  else
1445 	  {
1446 	    /* Keep this algorithm in sync with
1447                tree-complex.c:expand_complex_div_wide().
1448 
1449 	       Expand complex division to scalars, modified algorithm to minimize
1450 	       overflow with wide input ranges.  */
1451 	    tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1452 					fold_abs_const (r2, TREE_TYPE (type)),
1453 					fold_abs_const (i2, TREE_TYPE (type)));
1454 
1455 	    if (integer_nonzerop (compare))
1456 	      {
1457 		/* In the TRUE branch, we compute
1458 		   ratio = br/bi;
1459 		   div = (br * ratio) + bi;
1460 		   tr = (ar * ratio) + ai;
1461 		   ti = (ai * ratio) - ar;
1462 		   tr = tr / div;
1463 		   ti = ti / div;  */
1464 		tree ratio = const_binop (code, r2, i2);
1465 		tree div = const_binop (PLUS_EXPR, i2,
1466 					const_binop (MULT_EXPR, r2, ratio));
1467 		real = const_binop (MULT_EXPR, r1, ratio);
1468 		real = const_binop (PLUS_EXPR, real, i1);
1469 		real = const_binop (code, real, div);
1470 
1471 		imag = const_binop (MULT_EXPR, i1, ratio);
1472 		imag = const_binop (MINUS_EXPR, imag, r1);
1473 		imag = const_binop (code, imag, div);
1474 	      }
1475 	    else
1476 	      {
1477 		/* In the FALSE branch, we compute
1478 		   ratio = d/c;
1479 		   divisor = (d * ratio) + c;
1480 		   tr = (b * ratio) + a;
1481 		   ti = b - (a * ratio);
1482 		   tr = tr / div;
1483 		   ti = ti / div;  */
1484 		tree ratio = const_binop (code, i2, r2);
1485 		tree div = const_binop (PLUS_EXPR, r2,
1486                                         const_binop (MULT_EXPR, i2, ratio));
1487 
1488 		real = const_binop (MULT_EXPR, i1, ratio);
1489 		real = const_binop (PLUS_EXPR, real, r1);
1490 		real = const_binop (code, real, div);
1491 
1492 		imag = const_binop (MULT_EXPR, r1, ratio);
1493 		imag = const_binop (MINUS_EXPR, i1, imag);
1494 		imag = const_binop (code, imag, div);
1495 	      }
1496 	  }
1497 	  break;
1498 
1499 	default:
1500 	  return NULL_TREE;
1501 	}
1502 
1503       if (real && imag)
1504 	return build_complex (type, real, imag);
1505     }
1506 
1507   if (TREE_CODE (arg1) == VECTOR_CST
1508       && TREE_CODE (arg2) == VECTOR_CST
1509       && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1510 		   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1511     {
1512       tree type = TREE_TYPE (arg1);
1513       bool step_ok_p;
1514       if (VECTOR_CST_STEPPED_P (arg1)
1515 	  && VECTOR_CST_STEPPED_P (arg2))
1516 	/* We can operate directly on the encoding if:
1517 
1518 	      a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1519 	    implies
1520 	      (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1521 
1522 	   Addition and subtraction are the supported operators
1523 	   for which this is true.  */
1524 	step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1525       else if (VECTOR_CST_STEPPED_P (arg1))
1526 	/* We can operate directly on stepped encodings if:
1527 
1528 	     a3 - a2 == a2 - a1
1529 	   implies:
1530 	     (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1531 
1532 	   which is true if (x -> x op c) distributes over addition.  */
1533 	step_ok_p = distributes_over_addition_p (code, 1);
1534       else
1535 	/* Similarly in reverse.  */
1536 	step_ok_p = distributes_over_addition_p (code, 2);
1537       tree_vector_builder elts;
1538       if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1539 	return NULL_TREE;
1540       unsigned int count = elts.encoded_nelts ();
1541       for (unsigned int i = 0; i < count; ++i)
1542 	{
1543 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1544 	  tree elem2 = VECTOR_CST_ELT (arg2, i);
1545 
1546 	  tree elt = const_binop (code, elem1, elem2);
1547 
1548 	  /* It is possible that const_binop cannot handle the given
1549 	     code and return NULL_TREE */
1550 	  if (elt == NULL_TREE)
1551 	    return NULL_TREE;
1552 	  elts.quick_push (elt);
1553 	}
1554 
1555       return elts.build ();
1556     }
1557 
1558   /* Shifts allow a scalar offset for a vector.  */
1559   if (TREE_CODE (arg1) == VECTOR_CST
1560       && TREE_CODE (arg2) == INTEGER_CST)
1561     {
1562       tree type = TREE_TYPE (arg1);
1563       bool step_ok_p = distributes_over_addition_p (code, 1);
1564       tree_vector_builder elts;
1565       if (!elts.new_unary_operation (type, arg1, step_ok_p))
1566 	return NULL_TREE;
1567       unsigned int count = elts.encoded_nelts ();
1568       for (unsigned int i = 0; i < count; ++i)
1569 	{
1570 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1571 
1572 	  tree elt = const_binop (code, elem1, arg2);
1573 
1574 	  /* It is possible that const_binop cannot handle the given
1575 	     code and return NULL_TREE.  */
1576 	  if (elt == NULL_TREE)
1577 	    return NULL_TREE;
1578 	  elts.quick_push (elt);
1579 	}
1580 
1581       return elts.build ();
1582     }
1583   return NULL_TREE;
1584 }
1585 
1586 /* Overload that adds a TYPE parameter to be able to dispatch
1587    to fold_relational_const.  */
1588 
1589 tree
const_binop(enum tree_code code,tree type,tree arg1,tree arg2)1590 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1591 {
1592   if (TREE_CODE_CLASS (code) == tcc_comparison)
1593     return fold_relational_const (code, type, arg1, arg2);
1594 
1595   /* ???  Until we make the const_binop worker take the type of the
1596      result as argument put those cases that need it here.  */
1597   switch (code)
1598     {
1599     case VEC_SERIES_EXPR:
1600       if (CONSTANT_CLASS_P (arg1)
1601 	  && CONSTANT_CLASS_P (arg2))
1602 	return build_vec_series (type, arg1, arg2);
1603       return NULL_TREE;
1604 
1605     case COMPLEX_EXPR:
1606       if ((TREE_CODE (arg1) == REAL_CST
1607 	   && TREE_CODE (arg2) == REAL_CST)
1608 	  || (TREE_CODE (arg1) == INTEGER_CST
1609 	      && TREE_CODE (arg2) == INTEGER_CST))
1610 	return build_complex (type, arg1, arg2);
1611       return NULL_TREE;
1612 
1613     case POINTER_DIFF_EXPR:
1614       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1615 	{
1616 	  offset_int res = wi::sub (wi::to_offset (arg1),
1617 				    wi::to_offset (arg2));
1618 	  return force_fit_type (type, res, 1,
1619 				 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1620 	}
1621       return NULL_TREE;
1622 
1623     case VEC_PACK_TRUNC_EXPR:
1624     case VEC_PACK_FIX_TRUNC_EXPR:
1625       {
1626 	unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1627 
1628 	if (TREE_CODE (arg1) != VECTOR_CST
1629 	    || TREE_CODE (arg2) != VECTOR_CST)
1630 	  return NULL_TREE;
1631 
1632 	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1633 	  return NULL_TREE;
1634 
1635 	out_nelts = in_nelts * 2;
1636 	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1637 		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1638 
1639 	tree_vector_builder elts (type, out_nelts, 1);
1640 	for (i = 0; i < out_nelts; i++)
1641 	  {
1642 	    tree elt = (i < in_nelts
1643 			? VECTOR_CST_ELT (arg1, i)
1644 			: VECTOR_CST_ELT (arg2, i - in_nelts));
1645 	    elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1646 				      ? NOP_EXPR : FIX_TRUNC_EXPR,
1647 				      TREE_TYPE (type), elt);
1648 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1649 	      return NULL_TREE;
1650 	    elts.quick_push (elt);
1651 	  }
1652 
1653 	return elts.build ();
1654       }
1655 
1656     case VEC_WIDEN_MULT_LO_EXPR:
1657     case VEC_WIDEN_MULT_HI_EXPR:
1658     case VEC_WIDEN_MULT_EVEN_EXPR:
1659     case VEC_WIDEN_MULT_ODD_EXPR:
1660       {
1661 	unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1662 
1663 	if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1664 	  return NULL_TREE;
1665 
1666 	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1667 	  return NULL_TREE;
1668 	out_nelts = in_nelts / 2;
1669 	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1670 		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1671 
1672 	if (code == VEC_WIDEN_MULT_LO_EXPR)
1673 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1674 	else if (code == VEC_WIDEN_MULT_HI_EXPR)
1675 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1676 	else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1677 	  scale = 1, ofs = 0;
1678 	else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1679 	  scale = 1, ofs = 1;
1680 
1681 	tree_vector_builder elts (type, out_nelts, 1);
1682 	for (out = 0; out < out_nelts; out++)
1683 	  {
1684 	    unsigned int in = (out << scale) + ofs;
1685 	    tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1686 					  VECTOR_CST_ELT (arg1, in));
1687 	    tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1688 					  VECTOR_CST_ELT (arg2, in));
1689 
1690 	    if (t1 == NULL_TREE || t2 == NULL_TREE)
1691 	      return NULL_TREE;
1692 	    tree elt = const_binop (MULT_EXPR, t1, t2);
1693 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1694 	      return NULL_TREE;
1695 	    elts.quick_push (elt);
1696 	  }
1697 
1698 	return elts.build ();
1699       }
1700 
1701     default:;
1702     }
1703 
1704   if (TREE_CODE_CLASS (code) != tcc_binary)
1705     return NULL_TREE;
1706 
1707   /* Make sure type and arg0 have the same saturating flag.  */
1708   gcc_checking_assert (TYPE_SATURATING (type)
1709 		       == TYPE_SATURATING (TREE_TYPE (arg1)));
1710 
1711   return const_binop (code, arg1, arg2);
1712 }
1713 
1714 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1715    Return zero if computing the constants is not possible.  */
1716 
1717 tree
const_unop(enum tree_code code,tree type,tree arg0)1718 const_unop (enum tree_code code, tree type, tree arg0)
1719 {
1720   /* Don't perform the operation, other than NEGATE and ABS, if
1721      flag_signaling_nans is on and the operand is a signaling NaN.  */
1722   if (TREE_CODE (arg0) == REAL_CST
1723       && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1724       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1725       && code != NEGATE_EXPR
1726       && code != ABS_EXPR)
1727     return NULL_TREE;
1728 
1729   switch (code)
1730     {
1731     CASE_CONVERT:
1732     case FLOAT_EXPR:
1733     case FIX_TRUNC_EXPR:
1734     case FIXED_CONVERT_EXPR:
1735       return fold_convert_const (code, type, arg0);
1736 
1737     case ADDR_SPACE_CONVERT_EXPR:
1738       /* If the source address is 0, and the source address space
1739 	 cannot have a valid object at 0, fold to dest type null.  */
1740       if (integer_zerop (arg0)
1741 	  && !(targetm.addr_space.zero_address_valid
1742 	       (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1743 	return fold_convert_const (code, type, arg0);
1744       break;
1745 
1746     case VIEW_CONVERT_EXPR:
1747       return fold_view_convert_expr (type, arg0);
1748 
1749     case NEGATE_EXPR:
1750       {
1751 	/* Can't call fold_negate_const directly here as that doesn't
1752 	   handle all cases and we might not be able to negate some
1753 	   constants.  */
1754 	tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1755 	if (tem && CONSTANT_CLASS_P (tem))
1756 	  return tem;
1757 	break;
1758       }
1759 
1760     case ABS_EXPR:
1761       if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1762 	return fold_abs_const (arg0, type);
1763       break;
1764 
1765     case CONJ_EXPR:
1766       if (TREE_CODE (arg0) == COMPLEX_CST)
1767 	{
1768 	  tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1769 					  TREE_TYPE (type));
1770 	  return build_complex (type, TREE_REALPART (arg0), ipart);
1771 	}
1772       break;
1773 
1774     case BIT_NOT_EXPR:
1775       if (TREE_CODE (arg0) == INTEGER_CST)
1776 	return fold_not_const (arg0, type);
1777       else if (POLY_INT_CST_P (arg0))
1778 	return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1779       /* Perform BIT_NOT_EXPR on each element individually.  */
1780       else if (TREE_CODE (arg0) == VECTOR_CST)
1781 	{
1782 	  tree elem;
1783 
1784 	  /* This can cope with stepped encodings because ~x == -1 - x.  */
1785 	  tree_vector_builder elements;
1786 	  elements.new_unary_operation (type, arg0, true);
1787 	  unsigned int i, count = elements.encoded_nelts ();
1788 	  for (i = 0; i < count; ++i)
1789 	    {
1790 	      elem = VECTOR_CST_ELT (arg0, i);
1791 	      elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1792 	      if (elem == NULL_TREE)
1793 		break;
1794 	      elements.quick_push (elem);
1795 	    }
1796 	  if (i == count)
1797 	    return elements.build ();
1798 	}
1799       break;
1800 
1801     case TRUTH_NOT_EXPR:
1802       if (TREE_CODE (arg0) == INTEGER_CST)
1803 	return constant_boolean_node (integer_zerop (arg0), type);
1804       break;
1805 
1806     case REALPART_EXPR:
1807       if (TREE_CODE (arg0) == COMPLEX_CST)
1808 	return fold_convert (type, TREE_REALPART (arg0));
1809       break;
1810 
1811     case IMAGPART_EXPR:
1812       if (TREE_CODE (arg0) == COMPLEX_CST)
1813 	return fold_convert (type, TREE_IMAGPART (arg0));
1814       break;
1815 
1816     case VEC_UNPACK_LO_EXPR:
1817     case VEC_UNPACK_HI_EXPR:
1818     case VEC_UNPACK_FLOAT_LO_EXPR:
1819     case VEC_UNPACK_FLOAT_HI_EXPR:
1820       {
1821 	unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1822 	enum tree_code subcode;
1823 
1824 	if (TREE_CODE (arg0) != VECTOR_CST)
1825 	  return NULL_TREE;
1826 
1827 	if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1828 	  return NULL_TREE;
1829 	out_nelts = in_nelts / 2;
1830 	gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1831 
1832 	unsigned int offset = 0;
1833 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1834 				   || code == VEC_UNPACK_FLOAT_LO_EXPR))
1835 	  offset = out_nelts;
1836 
1837 	if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1838 	  subcode = NOP_EXPR;
1839 	else
1840 	  subcode = FLOAT_EXPR;
1841 
1842 	tree_vector_builder elts (type, out_nelts, 1);
1843 	for (i = 0; i < out_nelts; i++)
1844 	  {
1845 	    tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1846 					   VECTOR_CST_ELT (arg0, i + offset));
1847 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1848 	      return NULL_TREE;
1849 	    elts.quick_push (elt);
1850 	  }
1851 
1852 	return elts.build ();
1853       }
1854 
1855     case VEC_DUPLICATE_EXPR:
1856       if (CONSTANT_CLASS_P (arg0))
1857 	return build_vector_from_val (type, arg0);
1858       return NULL_TREE;
1859 
1860     default:
1861       break;
1862     }
1863 
1864   return NULL_TREE;
1865 }
1866 
1867 /* Create a sizetype INT_CST node with NUMBER sign extended.  KIND
1868    indicates which particular sizetype to create.  */
1869 
1870 tree
size_int_kind(poly_int64 number,enum size_type_kind kind)1871 size_int_kind (poly_int64 number, enum size_type_kind kind)
1872 {
1873   return build_int_cst (sizetype_tab[(int) kind], number);
1874 }
1875 
1876 /* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1877    is a tree code.  The type of the result is taken from the operands.
1878    Both must be equivalent integer types, ala int_binop_types_match_p.
1879    If the operands are constant, so is the result.  */
1880 
1881 tree
size_binop_loc(location_t loc,enum tree_code code,tree arg0,tree arg1)1882 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1883 {
1884   tree type = TREE_TYPE (arg0);
1885 
1886   if (arg0 == error_mark_node || arg1 == error_mark_node)
1887     return error_mark_node;
1888 
1889   gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1890                                        TREE_TYPE (arg1)));
1891 
1892   /* Handle the special case of two poly_int constants faster.  */
1893   if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1894     {
1895       /* And some specific cases even faster than that.  */
1896       if (code == PLUS_EXPR)
1897 	{
1898 	  if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1899 	    return arg1;
1900 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1901 	    return arg0;
1902 	}
1903       else if (code == MINUS_EXPR)
1904 	{
1905 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1906 	    return arg0;
1907 	}
1908       else if (code == MULT_EXPR)
1909 	{
1910 	  if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1911 	    return arg1;
1912 	}
1913 
1914       /* Handle general case of two integer constants.  For sizetype
1915          constant calculations we always want to know about overflow,
1916 	 even in the unsigned case.  */
1917       tree res = int_const_binop_1 (code, arg0, arg1, -1);
1918       if (res != NULL_TREE)
1919 	return res;
1920     }
1921 
1922   return fold_build2_loc (loc, code, type, arg0, arg1);
1923 }
1924 
1925 /* Given two values, either both of sizetype or both of bitsizetype,
1926    compute the difference between the two values.  Return the value
1927    in signed type corresponding to the type of the operands.  */
1928 
1929 tree
size_diffop_loc(location_t loc,tree arg0,tree arg1)1930 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1931 {
1932   tree type = TREE_TYPE (arg0);
1933   tree ctype;
1934 
1935   gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1936 				       TREE_TYPE (arg1)));
1937 
1938   /* If the type is already signed, just do the simple thing.  */
1939   if (!TYPE_UNSIGNED (type))
1940     return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1941 
1942   if (type == sizetype)
1943     ctype = ssizetype;
1944   else if (type == bitsizetype)
1945     ctype = sbitsizetype;
1946   else
1947     ctype = signed_type_for (type);
1948 
1949   /* If either operand is not a constant, do the conversions to the signed
1950      type and subtract.  The hardware will do the right thing with any
1951      overflow in the subtraction.  */
1952   if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1953     return size_binop_loc (loc, MINUS_EXPR,
1954 			   fold_convert_loc (loc, ctype, arg0),
1955 			   fold_convert_loc (loc, ctype, arg1));
1956 
1957   /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1958      Otherwise, subtract the other way, convert to CTYPE (we know that can't
1959      overflow) and negate (which can't either).  Special-case a result
1960      of zero while we're here.  */
1961   if (tree_int_cst_equal (arg0, arg1))
1962     return build_int_cst (ctype, 0);
1963   else if (tree_int_cst_lt (arg1, arg0))
1964     return fold_convert_loc (loc, ctype,
1965 			     size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1966   else
1967     return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1968 			   fold_convert_loc (loc, ctype,
1969 					     size_binop_loc (loc,
1970 							     MINUS_EXPR,
1971 							     arg1, arg0)));
1972 }
1973 
1974 /* A subroutine of fold_convert_const handling conversions of an
1975    INTEGER_CST to another integer type.  */
1976 
1977 static tree
fold_convert_const_int_from_int(tree type,const_tree arg1)1978 fold_convert_const_int_from_int (tree type, const_tree arg1)
1979 {
1980   /* Given an integer constant, make new constant with new type,
1981      appropriately sign-extended or truncated.  Use widest_int
1982      so that any extension is done according ARG1's type.  */
1983   return force_fit_type (type, wi::to_widest (arg1),
1984 			 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1985 			 TREE_OVERFLOW (arg1));
1986 }
1987 
1988 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1989    to an integer type.  */
1990 
1991 static tree
fold_convert_const_int_from_real(enum tree_code code,tree type,const_tree arg1)1992 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1993 {
1994   bool overflow = false;
1995   tree t;
1996 
1997   /* The following code implements the floating point to integer
1998      conversion rules required by the Java Language Specification,
1999      that IEEE NaNs are mapped to zero and values that overflow
2000      the target precision saturate, i.e. values greater than
2001      INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2002      are mapped to INT_MIN.  These semantics are allowed by the
2003      C and C++ standards that simply state that the behavior of
2004      FP-to-integer conversion is unspecified upon overflow.  */
2005 
2006   wide_int val;
2007   REAL_VALUE_TYPE r;
2008   REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2009 
2010   switch (code)
2011     {
2012     case FIX_TRUNC_EXPR:
2013       real_trunc (&r, VOIDmode, &x);
2014       break;
2015 
2016     default:
2017       gcc_unreachable ();
2018     }
2019 
2020   /* If R is NaN, return zero and show we have an overflow.  */
2021   if (REAL_VALUE_ISNAN (r))
2022     {
2023       overflow = true;
2024       val = wi::zero (TYPE_PRECISION (type));
2025     }
2026 
2027   /* See if R is less than the lower bound or greater than the
2028      upper bound.  */
2029 
2030   if (! overflow)
2031     {
2032       tree lt = TYPE_MIN_VALUE (type);
2033       REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2034       if (real_less (&r, &l))
2035 	{
2036 	  overflow = true;
2037 	  val = wi::to_wide (lt);
2038 	}
2039     }
2040 
2041   if (! overflow)
2042     {
2043       tree ut = TYPE_MAX_VALUE (type);
2044       if (ut)
2045 	{
2046 	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2047 	  if (real_less (&u, &r))
2048 	    {
2049 	      overflow = true;
2050 	      val = wi::to_wide (ut);
2051 	    }
2052 	}
2053     }
2054 
2055   if (! overflow)
2056     val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2057 
2058   t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2059   return t;
2060 }
2061 
2062 /* A subroutine of fold_convert_const handling conversions of a
2063    FIXED_CST to an integer type.  */
2064 
2065 static tree
fold_convert_const_int_from_fixed(tree type,const_tree arg1)2066 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2067 {
2068   tree t;
2069   double_int temp, temp_trunc;
2070   scalar_mode mode;
2071 
2072   /* Right shift FIXED_CST to temp by fbit.  */
2073   temp = TREE_FIXED_CST (arg1).data;
2074   mode = TREE_FIXED_CST (arg1).mode;
2075   if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2076     {
2077       temp = temp.rshift (GET_MODE_FBIT (mode),
2078 			  HOST_BITS_PER_DOUBLE_INT,
2079 			  SIGNED_FIXED_POINT_MODE_P (mode));
2080 
2081       /* Left shift temp to temp_trunc by fbit.  */
2082       temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2083 				HOST_BITS_PER_DOUBLE_INT,
2084 				SIGNED_FIXED_POINT_MODE_P (mode));
2085     }
2086   else
2087     {
2088       temp = double_int_zero;
2089       temp_trunc = double_int_zero;
2090     }
2091 
2092   /* If FIXED_CST is negative, we need to round the value toward 0.
2093      By checking if the fractional bits are not zero to add 1 to temp.  */
2094   if (SIGNED_FIXED_POINT_MODE_P (mode)
2095       && temp_trunc.is_negative ()
2096       && TREE_FIXED_CST (arg1).data != temp_trunc)
2097     temp += double_int_one;
2098 
2099   /* Given a fixed-point constant, make new constant with new type,
2100      appropriately sign-extended or truncated.  */
2101   t = force_fit_type (type, temp, -1,
2102 		      (temp.is_negative ()
2103 		       && (TYPE_UNSIGNED (type)
2104 			   < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2105 		      | TREE_OVERFLOW (arg1));
2106 
2107   return t;
2108 }
2109 
2110 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2111    to another floating point type.  */
2112 
2113 static tree
fold_convert_const_real_from_real(tree type,const_tree arg1)2114 fold_convert_const_real_from_real (tree type, const_tree arg1)
2115 {
2116   REAL_VALUE_TYPE value;
2117   tree t;
2118 
2119   /* Don't perform the operation if flag_signaling_nans is on
2120      and the operand is a signaling NaN.  */
2121   if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2122       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2123     return NULL_TREE;
2124 
2125   real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2126   t = build_real (type, value);
2127 
2128   /* If converting an infinity or NAN to a representation that doesn't
2129      have one, set the overflow bit so that we can produce some kind of
2130      error message at the appropriate point if necessary.  It's not the
2131      most user-friendly message, but it's better than nothing.  */
2132   if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2133       && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2134     TREE_OVERFLOW (t) = 1;
2135   else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2136 	   && !MODE_HAS_NANS (TYPE_MODE (type)))
2137     TREE_OVERFLOW (t) = 1;
2138   /* Regular overflow, conversion produced an infinity in a mode that
2139      can't represent them.  */
2140   else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2141 	   && REAL_VALUE_ISINF (value)
2142 	   && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2143     TREE_OVERFLOW (t) = 1;
2144   else
2145     TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2146   return t;
2147 }
2148 
2149 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2150    to a floating point type.  */
2151 
2152 static tree
fold_convert_const_real_from_fixed(tree type,const_tree arg1)2153 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2154 {
2155   REAL_VALUE_TYPE value;
2156   tree t;
2157 
2158   real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2159 			   &TREE_FIXED_CST (arg1));
2160   t = build_real (type, value);
2161 
2162   TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2163   return t;
2164 }
2165 
2166 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2167    to another fixed-point type.  */
2168 
2169 static tree
fold_convert_const_fixed_from_fixed(tree type,const_tree arg1)2170 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2171 {
2172   FIXED_VALUE_TYPE value;
2173   tree t;
2174   bool overflow_p;
2175 
2176   overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2177 			      &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2178   t = build_fixed (type, value);
2179 
2180   /* Propagate overflow flags.  */
2181   if (overflow_p | TREE_OVERFLOW (arg1))
2182     TREE_OVERFLOW (t) = 1;
2183   return t;
2184 }
2185 
2186 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2187    to a fixed-point type.  */
2188 
2189 static tree
fold_convert_const_fixed_from_int(tree type,const_tree arg1)2190 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2191 {
2192   FIXED_VALUE_TYPE value;
2193   tree t;
2194   bool overflow_p;
2195   double_int di;
2196 
2197   gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2198 
2199   di.low = TREE_INT_CST_ELT (arg1, 0);
2200   if (TREE_INT_CST_NUNITS (arg1) == 1)
2201     di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2202   else
2203     di.high = TREE_INT_CST_ELT (arg1, 1);
2204 
2205   overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2206 				       TYPE_UNSIGNED (TREE_TYPE (arg1)),
2207 				       TYPE_SATURATING (type));
2208   t = build_fixed (type, value);
2209 
2210   /* Propagate overflow flags.  */
2211   if (overflow_p | TREE_OVERFLOW (arg1))
2212     TREE_OVERFLOW (t) = 1;
2213   return t;
2214 }
2215 
2216 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2217    to a fixed-point type.  */
2218 
2219 static tree
fold_convert_const_fixed_from_real(tree type,const_tree arg1)2220 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2221 {
2222   FIXED_VALUE_TYPE value;
2223   tree t;
2224   bool overflow_p;
2225 
2226   overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2227 					&TREE_REAL_CST (arg1),
2228 					TYPE_SATURATING (type));
2229   t = build_fixed (type, value);
2230 
2231   /* Propagate overflow flags.  */
2232   if (overflow_p | TREE_OVERFLOW (arg1))
2233     TREE_OVERFLOW (t) = 1;
2234   return t;
2235 }
2236 
2237 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2238    type TYPE.  If no simplification can be done return NULL_TREE.  */
2239 
2240 static tree
fold_convert_const(enum tree_code code,tree type,tree arg1)2241 fold_convert_const (enum tree_code code, tree type, tree arg1)
2242 {
2243   tree arg_type = TREE_TYPE (arg1);
2244   if (arg_type == type)
2245     return arg1;
2246 
2247   /* We can't widen types, since the runtime value could overflow the
2248      original type before being extended to the new type.  */
2249   if (POLY_INT_CST_P (arg1)
2250       && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2251       && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2252     return build_poly_int_cst (type,
2253 			       poly_wide_int::from (poly_int_cst_value (arg1),
2254 						    TYPE_PRECISION (type),
2255 						    TYPE_SIGN (arg_type)));
2256 
2257   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2258       || TREE_CODE (type) == OFFSET_TYPE)
2259     {
2260       if (TREE_CODE (arg1) == INTEGER_CST)
2261 	return fold_convert_const_int_from_int (type, arg1);
2262       else if (TREE_CODE (arg1) == REAL_CST)
2263 	return fold_convert_const_int_from_real (code, type, arg1);
2264       else if (TREE_CODE (arg1) == FIXED_CST)
2265 	return fold_convert_const_int_from_fixed (type, arg1);
2266     }
2267   else if (TREE_CODE (type) == REAL_TYPE)
2268     {
2269       if (TREE_CODE (arg1) == INTEGER_CST)
2270 	return build_real_from_int_cst (type, arg1);
2271       else if (TREE_CODE (arg1) == REAL_CST)
2272 	return fold_convert_const_real_from_real (type, arg1);
2273       else if (TREE_CODE (arg1) == FIXED_CST)
2274 	return fold_convert_const_real_from_fixed (type, arg1);
2275     }
2276   else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2277     {
2278       if (TREE_CODE (arg1) == FIXED_CST)
2279 	return fold_convert_const_fixed_from_fixed (type, arg1);
2280       else if (TREE_CODE (arg1) == INTEGER_CST)
2281 	return fold_convert_const_fixed_from_int (type, arg1);
2282       else if (TREE_CODE (arg1) == REAL_CST)
2283 	return fold_convert_const_fixed_from_real (type, arg1);
2284     }
2285   else if (TREE_CODE (type) == VECTOR_TYPE)
2286     {
2287       if (TREE_CODE (arg1) == VECTOR_CST
2288 	  && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2289 	{
2290 	  tree elttype = TREE_TYPE (type);
2291 	  tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2292 	  /* We can't handle steps directly when extending, since the
2293 	     values need to wrap at the original precision first.  */
2294 	  bool step_ok_p
2295 	    = (INTEGRAL_TYPE_P (elttype)
2296 	       && INTEGRAL_TYPE_P (arg1_elttype)
2297 	       && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2298 	  tree_vector_builder v;
2299 	  if (!v.new_unary_operation (type, arg1, step_ok_p))
2300 	    return NULL_TREE;
2301 	  unsigned int len = v.encoded_nelts ();
2302 	  for (unsigned int i = 0; i < len; ++i)
2303 	    {
2304 	      tree elt = VECTOR_CST_ELT (arg1, i);
2305 	      tree cvt = fold_convert_const (code, elttype, elt);
2306 	      if (cvt == NULL_TREE)
2307 		return NULL_TREE;
2308 	      v.quick_push (cvt);
2309 	    }
2310 	  return v.build ();
2311 	}
2312     }
2313   return NULL_TREE;
2314 }
2315 
2316 /* Construct a vector of zero elements of vector type TYPE.  */
2317 
2318 static tree
build_zero_vector(tree type)2319 build_zero_vector (tree type)
2320 {
2321   tree t;
2322 
2323   t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2324   return build_vector_from_val (type, t);
2325 }
2326 
2327 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
2328 
2329 bool
fold_convertible_p(const_tree type,const_tree arg)2330 fold_convertible_p (const_tree type, const_tree arg)
2331 {
2332   tree orig = TREE_TYPE (arg);
2333 
2334   if (type == orig)
2335     return true;
2336 
2337   if (TREE_CODE (arg) == ERROR_MARK
2338       || TREE_CODE (type) == ERROR_MARK
2339       || TREE_CODE (orig) == ERROR_MARK)
2340     return false;
2341 
2342   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2343     return true;
2344 
2345   switch (TREE_CODE (type))
2346     {
2347     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2348     case POINTER_TYPE: case REFERENCE_TYPE:
2349     case OFFSET_TYPE:
2350       return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2351 	      || TREE_CODE (orig) == OFFSET_TYPE);
2352 
2353     case REAL_TYPE:
2354     case FIXED_POINT_TYPE:
2355     case VECTOR_TYPE:
2356     case VOID_TYPE:
2357       return TREE_CODE (type) == TREE_CODE (orig);
2358 
2359     default:
2360       return false;
2361     }
2362 }
2363 
2364 /* Convert expression ARG to type TYPE.  Used by the middle-end for
2365    simple conversions in preference to calling the front-end's convert.  */
2366 
2367 tree
fold_convert_loc(location_t loc,tree type,tree arg)2368 fold_convert_loc (location_t loc, tree type, tree arg)
2369 {
2370   tree orig = TREE_TYPE (arg);
2371   tree tem;
2372 
2373   if (type == orig)
2374     return arg;
2375 
2376   if (TREE_CODE (arg) == ERROR_MARK
2377       || TREE_CODE (type) == ERROR_MARK
2378       || TREE_CODE (orig) == ERROR_MARK)
2379     return error_mark_node;
2380 
2381   switch (TREE_CODE (type))
2382     {
2383     case POINTER_TYPE:
2384     case REFERENCE_TYPE:
2385       /* Handle conversions between pointers to different address spaces.  */
2386       if (POINTER_TYPE_P (orig)
2387 	  && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2388 	      != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2389 	return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2390       /* fall through */
2391 
2392     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2393     case OFFSET_TYPE:
2394       if (TREE_CODE (arg) == INTEGER_CST)
2395 	{
2396 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2397 	  if (tem != NULL_TREE)
2398 	    return tem;
2399 	}
2400       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2401 	  || TREE_CODE (orig) == OFFSET_TYPE)
2402 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2403       if (TREE_CODE (orig) == COMPLEX_TYPE)
2404 	return fold_convert_loc (loc, type,
2405 				 fold_build1_loc (loc, REALPART_EXPR,
2406 						  TREE_TYPE (orig), arg));
2407       gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2408 		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2409       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2410 
2411     case REAL_TYPE:
2412       if (TREE_CODE (arg) == INTEGER_CST)
2413 	{
2414 	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
2415 	  if (tem != NULL_TREE)
2416 	    return tem;
2417 	}
2418       else if (TREE_CODE (arg) == REAL_CST)
2419 	{
2420 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2421 	  if (tem != NULL_TREE)
2422 	    return tem;
2423 	}
2424       else if (TREE_CODE (arg) == FIXED_CST)
2425 	{
2426 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2427 	  if (tem != NULL_TREE)
2428 	    return tem;
2429 	}
2430 
2431       switch (TREE_CODE (orig))
2432 	{
2433 	case INTEGER_TYPE:
2434 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2435 	case POINTER_TYPE: case REFERENCE_TYPE:
2436 	  return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2437 
2438 	case REAL_TYPE:
2439 	  return fold_build1_loc (loc, NOP_EXPR, type, arg);
2440 
2441 	case FIXED_POINT_TYPE:
2442 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2443 
2444 	case COMPLEX_TYPE:
2445 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2446 	  return fold_convert_loc (loc, type, tem);
2447 
2448 	default:
2449 	  gcc_unreachable ();
2450 	}
2451 
2452     case FIXED_POINT_TYPE:
2453       if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2454 	  || TREE_CODE (arg) == REAL_CST)
2455 	{
2456 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2457 	  if (tem != NULL_TREE)
2458 	    goto fold_convert_exit;
2459 	}
2460 
2461       switch (TREE_CODE (orig))
2462 	{
2463 	case FIXED_POINT_TYPE:
2464 	case INTEGER_TYPE:
2465 	case ENUMERAL_TYPE:
2466 	case BOOLEAN_TYPE:
2467 	case REAL_TYPE:
2468 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2469 
2470 	case COMPLEX_TYPE:
2471 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2472 	  return fold_convert_loc (loc, type, tem);
2473 
2474 	default:
2475 	  gcc_unreachable ();
2476 	}
2477 
2478     case COMPLEX_TYPE:
2479       switch (TREE_CODE (orig))
2480 	{
2481 	case INTEGER_TYPE:
2482 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2483 	case POINTER_TYPE: case REFERENCE_TYPE:
2484 	case REAL_TYPE:
2485 	case FIXED_POINT_TYPE:
2486 	  return fold_build2_loc (loc, COMPLEX_EXPR, type,
2487 			      fold_convert_loc (loc, TREE_TYPE (type), arg),
2488 			      fold_convert_loc (loc, TREE_TYPE (type),
2489 					    integer_zero_node));
2490 	case COMPLEX_TYPE:
2491 	  {
2492 	    tree rpart, ipart;
2493 
2494 	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2495 	      {
2496 		rpart = fold_convert_loc (loc, TREE_TYPE (type),
2497 				      TREE_OPERAND (arg, 0));
2498 		ipart = fold_convert_loc (loc, TREE_TYPE (type),
2499 				      TREE_OPERAND (arg, 1));
2500 		return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2501 	      }
2502 
2503 	    arg = save_expr (arg);
2504 	    rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2505 	    ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2506 	    rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2507 	    ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2508 	    return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2509 	  }
2510 
2511 	default:
2512 	  gcc_unreachable ();
2513 	}
2514 
2515     case VECTOR_TYPE:
2516       if (integer_zerop (arg))
2517 	return build_zero_vector (type);
2518       gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2519       gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2520 		  || TREE_CODE (orig) == VECTOR_TYPE);
2521       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2522 
2523     case VOID_TYPE:
2524       tem = fold_ignored_result (arg);
2525       return fold_build1_loc (loc, NOP_EXPR, type, tem);
2526 
2527     default:
2528       if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2529 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2530       gcc_unreachable ();
2531     }
2532  fold_convert_exit:
2533   protected_set_expr_location_unshare (tem, loc);
2534   return tem;
2535 }
2536 
2537 /* Return false if expr can be assumed not to be an lvalue, true
2538    otherwise.  */
2539 
2540 static bool
maybe_lvalue_p(const_tree x)2541 maybe_lvalue_p (const_tree x)
2542 {
2543   /* We only need to wrap lvalue tree codes.  */
2544   switch (TREE_CODE (x))
2545   {
2546   case VAR_DECL:
2547   case PARM_DECL:
2548   case RESULT_DECL:
2549   case LABEL_DECL:
2550   case FUNCTION_DECL:
2551   case SSA_NAME:
2552 
2553   case COMPONENT_REF:
2554   case MEM_REF:
2555   case INDIRECT_REF:
2556   case ARRAY_REF:
2557   case ARRAY_RANGE_REF:
2558   case BIT_FIELD_REF:
2559   case OBJ_TYPE_REF:
2560 
2561   case REALPART_EXPR:
2562   case IMAGPART_EXPR:
2563   case PREINCREMENT_EXPR:
2564   case PREDECREMENT_EXPR:
2565   case SAVE_EXPR:
2566   case TRY_CATCH_EXPR:
2567   case WITH_CLEANUP_EXPR:
2568   case COMPOUND_EXPR:
2569   case MODIFY_EXPR:
2570   case TARGET_EXPR:
2571   case COND_EXPR:
2572   case BIND_EXPR:
2573     break;
2574 
2575   default:
2576     /* Assume the worst for front-end tree codes.  */
2577     if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2578       break;
2579     return false;
2580   }
2581 
2582   return true;
2583 }
2584 
2585 /* Return an expr equal to X but certainly not valid as an lvalue.  */
2586 
2587 tree
non_lvalue_loc(location_t loc,tree x)2588 non_lvalue_loc (location_t loc, tree x)
2589 {
2590   /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2591      us.  */
2592   if (in_gimple_form)
2593     return x;
2594 
2595   if (! maybe_lvalue_p (x))
2596     return x;
2597   return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2598 }
2599 
2600 /* When pedantic, return an expr equal to X but certainly not valid as a
2601    pedantic lvalue.  Otherwise, return X.  */
2602 
2603 static tree
pedantic_non_lvalue_loc(location_t loc,tree x)2604 pedantic_non_lvalue_loc (location_t loc, tree x)
2605 {
2606   return protected_set_expr_location_unshare (x, loc);
2607 }
2608 
2609 /* Given a tree comparison code, return the code that is the logical inverse.
2610    It is generally not safe to do this for floating-point comparisons, except
2611    for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2612    ERROR_MARK in this case.  */
2613 
2614 enum tree_code
invert_tree_comparison(enum tree_code code,bool honor_nans)2615 invert_tree_comparison (enum tree_code code, bool honor_nans)
2616 {
2617   if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2618       && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2619     return ERROR_MARK;
2620 
2621   switch (code)
2622     {
2623     case EQ_EXPR:
2624       return NE_EXPR;
2625     case NE_EXPR:
2626       return EQ_EXPR;
2627     case GT_EXPR:
2628       return honor_nans ? UNLE_EXPR : LE_EXPR;
2629     case GE_EXPR:
2630       return honor_nans ? UNLT_EXPR : LT_EXPR;
2631     case LT_EXPR:
2632       return honor_nans ? UNGE_EXPR : GE_EXPR;
2633     case LE_EXPR:
2634       return honor_nans ? UNGT_EXPR : GT_EXPR;
2635     case LTGT_EXPR:
2636       return UNEQ_EXPR;
2637     case UNEQ_EXPR:
2638       return LTGT_EXPR;
2639     case UNGT_EXPR:
2640       return LE_EXPR;
2641     case UNGE_EXPR:
2642       return LT_EXPR;
2643     case UNLT_EXPR:
2644       return GE_EXPR;
2645     case UNLE_EXPR:
2646       return GT_EXPR;
2647     case ORDERED_EXPR:
2648       return UNORDERED_EXPR;
2649     case UNORDERED_EXPR:
2650       return ORDERED_EXPR;
2651     default:
2652       gcc_unreachable ();
2653     }
2654 }
2655 
2656 /* Similar, but return the comparison that results if the operands are
2657    swapped.  This is safe for floating-point.  */
2658 
2659 enum tree_code
swap_tree_comparison(enum tree_code code)2660 swap_tree_comparison (enum tree_code code)
2661 {
2662   switch (code)
2663     {
2664     case EQ_EXPR:
2665     case NE_EXPR:
2666     case ORDERED_EXPR:
2667     case UNORDERED_EXPR:
2668     case LTGT_EXPR:
2669     case UNEQ_EXPR:
2670       return code;
2671     case GT_EXPR:
2672       return LT_EXPR;
2673     case GE_EXPR:
2674       return LE_EXPR;
2675     case LT_EXPR:
2676       return GT_EXPR;
2677     case LE_EXPR:
2678       return GE_EXPR;
2679     case UNGT_EXPR:
2680       return UNLT_EXPR;
2681     case UNGE_EXPR:
2682       return UNLE_EXPR;
2683     case UNLT_EXPR:
2684       return UNGT_EXPR;
2685     case UNLE_EXPR:
2686       return UNGE_EXPR;
2687     default:
2688       gcc_unreachable ();
2689     }
2690 }
2691 
2692 
2693 /* Convert a comparison tree code from an enum tree_code representation
2694    into a compcode bit-based encoding.  This function is the inverse of
2695    compcode_to_comparison.  */
2696 
2697 static enum comparison_code
comparison_to_compcode(enum tree_code code)2698 comparison_to_compcode (enum tree_code code)
2699 {
2700   switch (code)
2701     {
2702     case LT_EXPR:
2703       return COMPCODE_LT;
2704     case EQ_EXPR:
2705       return COMPCODE_EQ;
2706     case LE_EXPR:
2707       return COMPCODE_LE;
2708     case GT_EXPR:
2709       return COMPCODE_GT;
2710     case NE_EXPR:
2711       return COMPCODE_NE;
2712     case GE_EXPR:
2713       return COMPCODE_GE;
2714     case ORDERED_EXPR:
2715       return COMPCODE_ORD;
2716     case UNORDERED_EXPR:
2717       return COMPCODE_UNORD;
2718     case UNLT_EXPR:
2719       return COMPCODE_UNLT;
2720     case UNEQ_EXPR:
2721       return COMPCODE_UNEQ;
2722     case UNLE_EXPR:
2723       return COMPCODE_UNLE;
2724     case UNGT_EXPR:
2725       return COMPCODE_UNGT;
2726     case LTGT_EXPR:
2727       return COMPCODE_LTGT;
2728     case UNGE_EXPR:
2729       return COMPCODE_UNGE;
2730     default:
2731       gcc_unreachable ();
2732     }
2733 }
2734 
2735 /* Convert a compcode bit-based encoding of a comparison operator back
2736    to GCC's enum tree_code representation.  This function is the
2737    inverse of comparison_to_compcode.  */
2738 
2739 static enum tree_code
compcode_to_comparison(enum comparison_code code)2740 compcode_to_comparison (enum comparison_code code)
2741 {
2742   switch (code)
2743     {
2744     case COMPCODE_LT:
2745       return LT_EXPR;
2746     case COMPCODE_EQ:
2747       return EQ_EXPR;
2748     case COMPCODE_LE:
2749       return LE_EXPR;
2750     case COMPCODE_GT:
2751       return GT_EXPR;
2752     case COMPCODE_NE:
2753       return NE_EXPR;
2754     case COMPCODE_GE:
2755       return GE_EXPR;
2756     case COMPCODE_ORD:
2757       return ORDERED_EXPR;
2758     case COMPCODE_UNORD:
2759       return UNORDERED_EXPR;
2760     case COMPCODE_UNLT:
2761       return UNLT_EXPR;
2762     case COMPCODE_UNEQ:
2763       return UNEQ_EXPR;
2764     case COMPCODE_UNLE:
2765       return UNLE_EXPR;
2766     case COMPCODE_UNGT:
2767       return UNGT_EXPR;
2768     case COMPCODE_LTGT:
2769       return LTGT_EXPR;
2770     case COMPCODE_UNGE:
2771       return UNGE_EXPR;
2772     default:
2773       gcc_unreachable ();
2774     }
2775 }
2776 
2777 /* Return a tree for the comparison which is the combination of
2778    doing the AND or OR (depending on CODE) of the two operations LCODE
2779    and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2780    the possibility of trapping if the mode has NaNs, and return NULL_TREE
2781    if this makes the transformation invalid.  */
2782 
2783 tree
combine_comparisons(location_t loc,enum tree_code code,enum tree_code lcode,enum tree_code rcode,tree truth_type,tree ll_arg,tree lr_arg)2784 combine_comparisons (location_t loc,
2785 		     enum tree_code code, enum tree_code lcode,
2786 		     enum tree_code rcode, tree truth_type,
2787 		     tree ll_arg, tree lr_arg)
2788 {
2789   bool honor_nans = HONOR_NANS (ll_arg);
2790   enum comparison_code lcompcode = comparison_to_compcode (lcode);
2791   enum comparison_code rcompcode = comparison_to_compcode (rcode);
2792   int compcode;
2793 
2794   switch (code)
2795     {
2796     case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2797       compcode = lcompcode & rcompcode;
2798       break;
2799 
2800     case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2801       compcode = lcompcode | rcompcode;
2802       break;
2803 
2804     default:
2805       return NULL_TREE;
2806     }
2807 
2808   if (!honor_nans)
2809     {
2810       /* Eliminate unordered comparisons, as well as LTGT and ORD
2811 	 which are not used unless the mode has NaNs.  */
2812       compcode &= ~COMPCODE_UNORD;
2813       if (compcode == COMPCODE_LTGT)
2814 	compcode = COMPCODE_NE;
2815       else if (compcode == COMPCODE_ORD)
2816 	compcode = COMPCODE_TRUE;
2817     }
2818    else if (flag_trapping_math)
2819      {
2820 	/* Check that the original operation and the optimized ones will trap
2821 	   under the same condition.  */
2822 	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2823 		     && (lcompcode != COMPCODE_EQ)
2824 		     && (lcompcode != COMPCODE_ORD);
2825 	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2826 		     && (rcompcode != COMPCODE_EQ)
2827 		     && (rcompcode != COMPCODE_ORD);
2828 	bool trap = (compcode & COMPCODE_UNORD) == 0
2829 		    && (compcode != COMPCODE_EQ)
2830 		    && (compcode != COMPCODE_ORD);
2831 
2832         /* In a short-circuited boolean expression the LHS might be
2833 	   such that the RHS, if evaluated, will never trap.  For
2834 	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2835 	   if neither x nor y is NaN.  (This is a mixed blessing: for
2836 	   example, the expression above will never trap, hence
2837 	   optimizing it to x < y would be invalid).  */
2838         if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2839             || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2840           rtrap = false;
2841 
2842         /* If the comparison was short-circuited, and only the RHS
2843 	   trapped, we may now generate a spurious trap.  */
2844 	if (rtrap && !ltrap
2845 	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2846 	  return NULL_TREE;
2847 
2848 	/* If we changed the conditions that cause a trap, we lose.  */
2849 	if ((ltrap || rtrap) != trap)
2850 	  return NULL_TREE;
2851       }
2852 
2853   if (compcode == COMPCODE_TRUE)
2854     return constant_boolean_node (true, truth_type);
2855   else if (compcode == COMPCODE_FALSE)
2856     return constant_boolean_node (false, truth_type);
2857   else
2858     {
2859       enum tree_code tcode;
2860 
2861       tcode = compcode_to_comparison ((enum comparison_code) compcode);
2862       return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2863     }
2864 }
2865 
2866 /* Return nonzero if two operands (typically of the same tree node)
2867    are necessarily equal. FLAGS modifies behavior as follows:
2868 
2869    If OEP_ONLY_CONST is set, only return nonzero for constants.
2870    This function tests whether the operands are indistinguishable;
2871    it does not test whether they are equal using C's == operation.
2872    The distinction is important for IEEE floating point, because
2873    (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2874    (2) two NaNs may be indistinguishable, but NaN!=NaN.
2875 
2876    If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2877    even though it may hold multiple values during a function.
2878    This is because a GCC tree node guarantees that nothing else is
2879    executed between the evaluation of its "operands" (which may often
2880    be evaluated in arbitrary order).  Hence if the operands themselves
2881    don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2882    same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2883    unset means assuming isochronic (or instantaneous) tree equivalence.
2884    Unless comparing arbitrary expression trees, such as from different
2885    statements, this flag can usually be left unset.
2886 
2887    If OEP_PURE_SAME is set, then pure functions with identical arguments
2888    are considered the same.  It is used when the caller has other ways
2889    to ensure that global memory is unchanged in between.
2890 
2891    If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2892    not values of expressions.
2893 
2894    If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2895    such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2896 
2897    If OEP_BITWISE is set, then require the values to be bitwise identical
2898    rather than simply numerically equal.  Do not take advantage of things
2899    like math-related flags or undefined behavior; only return true for
2900    values that are provably bitwise identical in all circumstances.
2901 
2902    Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2903    any operand with side effect.  This is unnecesarily conservative in the
2904    case we know that arg0 and arg1 are in disjoint code paths (such as in
2905    ?: operator).  In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2906    addresses with TREE_CONSTANT flag set so we know that &var == &var
2907    even if var is volatile.  */
2908 
2909 int
operand_equal_p(const_tree arg0,const_tree arg1,unsigned int flags)2910 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2911 {
2912   /* When checking, verify at the outermost operand_equal_p call that
2913      if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2914      hash value.  */
2915   if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2916     {
2917       if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2918 	{
2919 	  if (arg0 != arg1)
2920 	    {
2921 	      inchash::hash hstate0 (0), hstate1 (0);
2922 	      inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2923 	      inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2924 	      hashval_t h0 = hstate0.end ();
2925 	      hashval_t h1 = hstate1.end ();
2926 	      gcc_assert (h0 == h1);
2927 	    }
2928 	  return 1;
2929 	}
2930       else
2931 	return 0;
2932     }
2933 
2934   /* If either is ERROR_MARK, they aren't equal.  */
2935   if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2936       || TREE_TYPE (arg0) == error_mark_node
2937       || TREE_TYPE (arg1) == error_mark_node)
2938     return 0;
2939 
2940   /* Similar, if either does not have a type (like a released SSA name),
2941      they aren't equal.  */
2942   if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2943     return 0;
2944 
2945   /* Bitwise identity makes no sense if the values have different layouts.  */
2946   if ((flags & OEP_BITWISE)
2947       && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2948     return 0;
2949 
2950   /* We cannot consider pointers to different address space equal.  */
2951   if (POINTER_TYPE_P (TREE_TYPE (arg0))
2952       && POINTER_TYPE_P (TREE_TYPE (arg1))
2953       && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2954 	  != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2955     return 0;
2956 
2957   /* Check equality of integer constants before bailing out due to
2958      precision differences.  */
2959   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2960     {
2961       /* Address of INTEGER_CST is not defined; check that we did not forget
2962 	 to drop the OEP_ADDRESS_OF flags.  */
2963       gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2964       return tree_int_cst_equal (arg0, arg1);
2965     }
2966 
2967   if (!(flags & OEP_ADDRESS_OF))
2968     {
2969       /* If both types don't have the same signedness, then we can't consider
2970 	 them equal.  We must check this before the STRIP_NOPS calls
2971 	 because they may change the signedness of the arguments.  As pointers
2972 	 strictly don't have a signedness, require either two pointers or
2973 	 two non-pointers as well.  */
2974       if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2975 	  || POINTER_TYPE_P (TREE_TYPE (arg0))
2976 			     != POINTER_TYPE_P (TREE_TYPE (arg1)))
2977 	return 0;
2978 
2979       /* If both types don't have the same precision, then it is not safe
2980 	 to strip NOPs.  */
2981       if (element_precision (TREE_TYPE (arg0))
2982 	  != element_precision (TREE_TYPE (arg1)))
2983 	return 0;
2984 
2985       STRIP_NOPS (arg0);
2986       STRIP_NOPS (arg1);
2987     }
2988 #if 0
2989   /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2990      sanity check once the issue is solved.  */
2991   else
2992     /* Addresses of conversions and SSA_NAMEs (and many other things)
2993        are not defined.  Check that we did not forget to drop the
2994        OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags.  */
2995     gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2996 			 && TREE_CODE (arg0) != SSA_NAME);
2997 #endif
2998 
2999   /* In case both args are comparisons but with different comparison
3000      code, try to swap the comparison operands of one arg to produce
3001      a match and compare that variant.  */
3002   if (TREE_CODE (arg0) != TREE_CODE (arg1)
3003       && COMPARISON_CLASS_P (arg0)
3004       && COMPARISON_CLASS_P (arg1))
3005     {
3006       enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3007 
3008       if (TREE_CODE (arg0) == swap_code)
3009 	return operand_equal_p (TREE_OPERAND (arg0, 0),
3010 			        TREE_OPERAND (arg1, 1), flags)
3011 	       && operand_equal_p (TREE_OPERAND (arg0, 1),
3012 				   TREE_OPERAND (arg1, 0), flags);
3013     }
3014 
3015   if (TREE_CODE (arg0) != TREE_CODE (arg1))
3016     {
3017       /* NOP_EXPR and CONVERT_EXPR are considered equal.  */
3018       if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3019 	;
3020       else if (flags & OEP_ADDRESS_OF)
3021 	{
3022 	  /* If we are interested in comparing addresses ignore
3023 	     MEM_REF wrappings of the base that can appear just for
3024 	     TBAA reasons.  */
3025 	  if (TREE_CODE (arg0) == MEM_REF
3026 	      && DECL_P (arg1)
3027 	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3028 	      && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3029 	      && integer_zerop (TREE_OPERAND (arg0, 1)))
3030 	    return 1;
3031 	  else if (TREE_CODE (arg1) == MEM_REF
3032 		   && DECL_P (arg0)
3033 		   && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3034 		   && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3035 		   && integer_zerop (TREE_OPERAND (arg1, 1)))
3036 	    return 1;
3037 	  return 0;
3038 	}
3039       else
3040 	return 0;
3041     }
3042 
3043   /* When not checking adddresses, this is needed for conversions and for
3044      COMPONENT_REF.  Might as well play it safe and always test this.  */
3045   if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3046       || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3047       || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3048 	  && !(flags & OEP_ADDRESS_OF)))
3049     return 0;
3050 
3051   /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3052      We don't care about side effects in that case because the SAVE_EXPR
3053      takes care of that for us. In all other cases, two expressions are
3054      equal if they have no side effects.  If we have two identical
3055      expressions with side effects that should be treated the same due
3056      to the only side effects being identical SAVE_EXPR's, that will
3057      be detected in the recursive calls below.
3058      If we are taking an invariant address of two identical objects
3059      they are necessarily equal as well.  */
3060   if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3061       && (TREE_CODE (arg0) == SAVE_EXPR
3062 	  || (flags & OEP_MATCH_SIDE_EFFECTS)
3063 	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3064     return 1;
3065 
3066   /* Next handle constant cases, those for which we can return 1 even
3067      if ONLY_CONST is set.  */
3068   if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3069     switch (TREE_CODE (arg0))
3070       {
3071       case INTEGER_CST:
3072 	return tree_int_cst_equal (arg0, arg1);
3073 
3074       case FIXED_CST:
3075 	return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3076 				       TREE_FIXED_CST (arg1));
3077 
3078       case REAL_CST:
3079 	if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3080 	  return 1;
3081 
3082 	if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3083 	  {
3084 	    /* If we do not distinguish between signed and unsigned zero,
3085 	       consider them equal.  */
3086 	    if (real_zerop (arg0) && real_zerop (arg1))
3087 	      return 1;
3088 	  }
3089 	return 0;
3090 
3091       case VECTOR_CST:
3092 	{
3093 	  if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3094 	      != VECTOR_CST_LOG2_NPATTERNS (arg1))
3095 	    return 0;
3096 
3097 	  if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3098 	      != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3099 	    return 0;
3100 
3101 	  unsigned int count = vector_cst_encoded_nelts (arg0);
3102 	  for (unsigned int i = 0; i < count; ++i)
3103 	    if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3104 				  VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3105 	      return 0;
3106 	  return 1;
3107 	}
3108 
3109       case COMPLEX_CST:
3110 	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3111 				 flags)
3112 		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3113 				    flags));
3114 
3115       case STRING_CST:
3116 	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3117 		&& ! memcmp (TREE_STRING_POINTER (arg0),
3118 			      TREE_STRING_POINTER (arg1),
3119 			      TREE_STRING_LENGTH (arg0)));
3120 
3121       case ADDR_EXPR:
3122 	gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3123 	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3124 				flags | OEP_ADDRESS_OF
3125 				| OEP_MATCH_SIDE_EFFECTS);
3126       case CONSTRUCTOR:
3127 	/* In GIMPLE empty constructors are allowed in initializers of
3128 	   aggregates.  */
3129 	return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3130       default:
3131 	break;
3132       }
3133 
3134   /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3135      two instances of undefined behavior will give identical results.  */
3136   if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3137     return 0;
3138 
3139 /* Define macros to test an operand from arg0 and arg1 for equality and a
3140    variant that allows null and views null as being different from any
3141    non-null value.  In the latter case, if either is null, the both
3142    must be; otherwise, do the normal comparison.  */
3143 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
3144 				    TREE_OPERAND (arg1, N), flags)
3145 
3146 #define OP_SAME_WITH_NULL(N)				\
3147   ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
3148    ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3149 
3150   switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3151     {
3152     case tcc_unary:
3153       /* Two conversions are equal only if signedness and modes match.  */
3154       switch (TREE_CODE (arg0))
3155         {
3156 	CASE_CONVERT:
3157         case FIX_TRUNC_EXPR:
3158 	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3159 	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3160 	    return 0;
3161 	  break;
3162 	default:
3163 	  break;
3164 	}
3165 
3166       return OP_SAME (0);
3167 
3168 
3169     case tcc_comparison:
3170     case tcc_binary:
3171       if (OP_SAME (0) && OP_SAME (1))
3172 	return 1;
3173 
3174       /* For commutative ops, allow the other order.  */
3175       return (commutative_tree_code (TREE_CODE (arg0))
3176 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
3177 				  TREE_OPERAND (arg1, 1), flags)
3178 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
3179 				  TREE_OPERAND (arg1, 0), flags));
3180 
3181     case tcc_reference:
3182       /* If either of the pointer (or reference) expressions we are
3183 	 dereferencing contain a side effect, these cannot be equal,
3184 	 but their addresses can be.  */
3185       if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3186 	  && (TREE_SIDE_EFFECTS (arg0)
3187 	      || TREE_SIDE_EFFECTS (arg1)))
3188 	return 0;
3189 
3190       switch (TREE_CODE (arg0))
3191 	{
3192 	case INDIRECT_REF:
3193 	  if (!(flags & OEP_ADDRESS_OF))
3194 	    {
3195 	      if (TYPE_ALIGN (TREE_TYPE (arg0))
3196 		  != TYPE_ALIGN (TREE_TYPE (arg1)))
3197 		return 0;
3198 	      /* Verify that the access types are compatible.  */
3199 	      if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3200 		  != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3201 		return 0;
3202 	    }
3203 	  flags &= ~OEP_ADDRESS_OF;
3204 	  return OP_SAME (0);
3205 
3206 	case IMAGPART_EXPR:
3207 	  /* Require the same offset.  */
3208 	  if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3209 				TYPE_SIZE (TREE_TYPE (arg1)),
3210 				flags & ~OEP_ADDRESS_OF))
3211 	    return 0;
3212 
3213 	/* Fallthru.  */
3214 	case REALPART_EXPR:
3215 	case VIEW_CONVERT_EXPR:
3216 	  return OP_SAME (0);
3217 
3218 	case TARGET_MEM_REF:
3219 	case MEM_REF:
3220 	  if (!(flags & OEP_ADDRESS_OF))
3221 	    {
3222 	      /* Require equal access sizes */
3223 	      if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3224 		  && (!TYPE_SIZE (TREE_TYPE (arg0))
3225 		      || !TYPE_SIZE (TREE_TYPE (arg1))
3226 		      || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3227 					   TYPE_SIZE (TREE_TYPE (arg1)),
3228 					   flags)))
3229 		return 0;
3230 	      /* Verify that access happens in similar types.  */
3231 	      if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3232 		return 0;
3233 	      /* Verify that accesses are TBAA compatible.  */
3234 	      if (!alias_ptr_types_compatible_p
3235 		    (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3236 		     TREE_TYPE (TREE_OPERAND (arg1, 1)))
3237 		  || (MR_DEPENDENCE_CLIQUE (arg0)
3238 		      != MR_DEPENDENCE_CLIQUE (arg1))
3239 		  || (MR_DEPENDENCE_BASE (arg0)
3240 		      != MR_DEPENDENCE_BASE (arg1)))
3241 		return 0;
3242 	     /* Verify that alignment is compatible.  */
3243 	     if (TYPE_ALIGN (TREE_TYPE (arg0))
3244 		 != TYPE_ALIGN (TREE_TYPE (arg1)))
3245 		return 0;
3246 	    }
3247 	  flags &= ~OEP_ADDRESS_OF;
3248 	  return (OP_SAME (0) && OP_SAME (1)
3249 		  /* TARGET_MEM_REF require equal extra operands.  */
3250 		  && (TREE_CODE (arg0) != TARGET_MEM_REF
3251 		      || (OP_SAME_WITH_NULL (2)
3252 			  && OP_SAME_WITH_NULL (3)
3253 			  && OP_SAME_WITH_NULL (4))));
3254 
3255 	case ARRAY_REF:
3256 	case ARRAY_RANGE_REF:
3257 	  if (!OP_SAME (0))
3258 	    return 0;
3259 	  flags &= ~OEP_ADDRESS_OF;
3260 	  /* Compare the array index by value if it is constant first as we
3261 	     may have different types but same value here.  */
3262 	  return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3263 				       TREE_OPERAND (arg1, 1))
3264 		   || OP_SAME (1))
3265 		  && OP_SAME_WITH_NULL (2)
3266 		  && OP_SAME_WITH_NULL (3)
3267 		  /* Compare low bound and element size as with OEP_ADDRESS_OF
3268 		     we have to account for the offset of the ref.  */
3269 		  && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3270 		      == TREE_TYPE (TREE_OPERAND (arg1, 0))
3271 		      || (operand_equal_p (array_ref_low_bound
3272 					     (CONST_CAST_TREE (arg0)),
3273 					   array_ref_low_bound
3274 					     (CONST_CAST_TREE (arg1)), flags)
3275 			  && operand_equal_p (array_ref_element_size
3276 					        (CONST_CAST_TREE (arg0)),
3277 					      array_ref_element_size
3278 					        (CONST_CAST_TREE (arg1)),
3279 					      flags))));
3280 
3281 	case COMPONENT_REF:
3282 	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
3283 	     may be NULL when we're called to compare MEM_EXPRs.  */
3284 	  if (!OP_SAME_WITH_NULL (0)
3285 	      || !OP_SAME (1))
3286 	    return 0;
3287 	  flags &= ~OEP_ADDRESS_OF;
3288 	  return OP_SAME_WITH_NULL (2);
3289 
3290 	case BIT_FIELD_REF:
3291 	  if (!OP_SAME (0))
3292 	    return 0;
3293 	  flags &= ~OEP_ADDRESS_OF;
3294 	  return OP_SAME (1) && OP_SAME (2);
3295 
3296 	default:
3297 	  return 0;
3298 	}
3299 
3300     case tcc_expression:
3301       switch (TREE_CODE (arg0))
3302 	{
3303 	case ADDR_EXPR:
3304 	  /* Be sure we pass right ADDRESS_OF flag.  */
3305 	  gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3306 	  return operand_equal_p (TREE_OPERAND (arg0, 0),
3307 				  TREE_OPERAND (arg1, 0),
3308 				  flags | OEP_ADDRESS_OF);
3309 
3310 	case TRUTH_NOT_EXPR:
3311 	  return OP_SAME (0);
3312 
3313 	case TRUTH_ANDIF_EXPR:
3314 	case TRUTH_ORIF_EXPR:
3315 	  return OP_SAME (0) && OP_SAME (1);
3316 
3317 	case FMA_EXPR:
3318 	case WIDEN_MULT_PLUS_EXPR:
3319 	case WIDEN_MULT_MINUS_EXPR:
3320 	  if (!OP_SAME (2))
3321 	    return 0;
3322 	  /* The multiplcation operands are commutative.  */
3323 	  /* FALLTHRU */
3324 
3325 	case TRUTH_AND_EXPR:
3326 	case TRUTH_OR_EXPR:
3327 	case TRUTH_XOR_EXPR:
3328 	  if (OP_SAME (0) && OP_SAME (1))
3329 	    return 1;
3330 
3331 	  /* Otherwise take into account this is a commutative operation.  */
3332 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
3333 				   TREE_OPERAND (arg1, 1), flags)
3334 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
3335 				      TREE_OPERAND (arg1, 0), flags));
3336 
3337 	case COND_EXPR:
3338 	  if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3339 	    return 0;
3340 	  flags &= ~OEP_ADDRESS_OF;
3341 	  return OP_SAME (0);
3342 
3343 	case BIT_INSERT_EXPR:
3344 	  /* BIT_INSERT_EXPR has an implict operand as the type precision
3345 	     of op1.  Need to check to make sure they are the same.  */
3346 	  if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3347 	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3348 	      && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3349 		 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3350 	    return false;
3351 	  /* FALLTHRU */
3352 
3353 	case VEC_COND_EXPR:
3354 	case DOT_PROD_EXPR:
3355 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3356 
3357 	case MODIFY_EXPR:
3358 	case INIT_EXPR:
3359 	case COMPOUND_EXPR:
3360 	case PREDECREMENT_EXPR:
3361 	case PREINCREMENT_EXPR:
3362 	case POSTDECREMENT_EXPR:
3363 	case POSTINCREMENT_EXPR:
3364 	  if (flags & OEP_LEXICOGRAPHIC)
3365 	    return OP_SAME (0) && OP_SAME (1);
3366 	  return 0;
3367 
3368 	case CLEANUP_POINT_EXPR:
3369 	case EXPR_STMT:
3370 	  if (flags & OEP_LEXICOGRAPHIC)
3371 	    return OP_SAME (0);
3372 	  return 0;
3373 
3374 	default:
3375 	  return 0;
3376 	}
3377 
3378     case tcc_vl_exp:
3379       switch (TREE_CODE (arg0))
3380 	{
3381 	case CALL_EXPR:
3382 	  if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3383 	      != (CALL_EXPR_FN (arg1) == NULL_TREE))
3384 	    /* If not both CALL_EXPRs are either internal or normal function
3385 	       functions, then they are not equal.  */
3386 	    return 0;
3387 	  else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3388 	    {
3389 	      /* If the CALL_EXPRs call different internal functions, then they
3390 		 are not equal.  */
3391 	      if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3392 		return 0;
3393 	    }
3394 	  else
3395 	    {
3396 	      /* If the CALL_EXPRs call different functions, then they are not
3397 		 equal.  */
3398 	      if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3399 				     flags))
3400 		return 0;
3401 	    }
3402 
3403 	  /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS.  */
3404 	  {
3405 	    unsigned int cef = call_expr_flags (arg0);
3406 	    if (flags & OEP_PURE_SAME)
3407 	      cef &= ECF_CONST | ECF_PURE;
3408 	    else
3409 	      cef &= ECF_CONST;
3410 	    if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3411 	      return 0;
3412 	  }
3413 
3414 	  /* Now see if all the arguments are the same.  */
3415 	  {
3416 	    const_call_expr_arg_iterator iter0, iter1;
3417 	    const_tree a0, a1;
3418 	    for (a0 = first_const_call_expr_arg (arg0, &iter0),
3419 		   a1 = first_const_call_expr_arg (arg1, &iter1);
3420 		 a0 && a1;
3421 		 a0 = next_const_call_expr_arg (&iter0),
3422 		   a1 = next_const_call_expr_arg (&iter1))
3423 	      if (! operand_equal_p (a0, a1, flags))
3424 		return 0;
3425 
3426 	    /* If we get here and both argument lists are exhausted
3427 	       then the CALL_EXPRs are equal.  */
3428 	    return ! (a0 || a1);
3429 	  }
3430 	default:
3431 	  return 0;
3432 	}
3433 
3434     case tcc_declaration:
3435       /* Consider __builtin_sqrt equal to sqrt.  */
3436       return (TREE_CODE (arg0) == FUNCTION_DECL
3437 	      && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3438 	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3439 	      && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3440 
3441     case tcc_exceptional:
3442       if (TREE_CODE (arg0) == CONSTRUCTOR)
3443 	{
3444 	  /* In GIMPLE constructors are used only to build vectors from
3445 	     elements.  Individual elements in the constructor must be
3446 	     indexed in increasing order and form an initial sequence.
3447 
3448 	     We make no effort to compare constructors in generic.
3449 	     (see sem_variable::equals in ipa-icf which can do so for
3450 	      constants).  */
3451 	  if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3452 	      || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3453 	    return 0;
3454 
3455 	  /* Be sure that vectors constructed have the same representation.
3456 	     We only tested element precision and modes to match.
3457 	     Vectors may be BLKmode and thus also check that the number of
3458 	     parts match.  */
3459 	  if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3460 			TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3461 	    return 0;
3462 
3463 	  vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3464 	  vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3465 	  unsigned int len = vec_safe_length (v0);
3466 
3467 	  if (len != vec_safe_length (v1))
3468 	    return 0;
3469 
3470 	  for (unsigned int i = 0; i < len; i++)
3471 	    {
3472 	      constructor_elt *c0 = &(*v0)[i];
3473 	      constructor_elt *c1 = &(*v1)[i];
3474 
3475 	      if (!operand_equal_p (c0->value, c1->value, flags)
3476 		  /* In GIMPLE the indexes can be either NULL or matching i.
3477 		     Double check this so we won't get false
3478 		     positives for GENERIC.  */
3479 		  || (c0->index
3480 		      && (TREE_CODE (c0->index) != INTEGER_CST
3481 			  || !compare_tree_int (c0->index, i)))
3482 		  || (c1->index
3483 		      && (TREE_CODE (c1->index) != INTEGER_CST
3484 			  || !compare_tree_int (c1->index, i))))
3485 		return 0;
3486 	    }
3487 	  return 1;
3488 	}
3489       else if (TREE_CODE (arg0) == STATEMENT_LIST
3490 	       && (flags & OEP_LEXICOGRAPHIC))
3491 	{
3492 	  /* Compare the STATEMENT_LISTs.  */
3493 	  tree_stmt_iterator tsi1, tsi2;
3494 	  tree body1 = CONST_CAST_TREE (arg0);
3495 	  tree body2 = CONST_CAST_TREE (arg1);
3496 	  for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3497 	       tsi_next (&tsi1), tsi_next (&tsi2))
3498 	    {
3499 	      /* The lists don't have the same number of statements.  */
3500 	      if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3501 		return 0;
3502 	      if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3503 		return 1;
3504 	      if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3505 				    flags & (OEP_LEXICOGRAPHIC
3506 					     | OEP_NO_HASH_CHECK)))
3507 		return 0;
3508 	    }
3509 	}
3510       return 0;
3511 
3512     case tcc_statement:
3513       switch (TREE_CODE (arg0))
3514 	{
3515 	case RETURN_EXPR:
3516 	  if (flags & OEP_LEXICOGRAPHIC)
3517 	    return OP_SAME_WITH_NULL (0);
3518 	  return 0;
3519 	case DEBUG_BEGIN_STMT:
3520 	  if (flags & OEP_LEXICOGRAPHIC)
3521 	    return 1;
3522 	  return 0;
3523 	default:
3524 	  return 0;
3525 	 }
3526 
3527     default:
3528       return 0;
3529     }
3530 
3531 #undef OP_SAME
3532 #undef OP_SAME_WITH_NULL
3533 }
3534 
3535 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3536    with a different signedness or a narrower precision.  */
3537 
3538 static bool
operand_equal_for_comparison_p(tree arg0,tree arg1)3539 operand_equal_for_comparison_p (tree arg0, tree arg1)
3540 {
3541   if (operand_equal_p (arg0, arg1, 0))
3542     return true;
3543 
3544   if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3545       || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3546     return false;
3547 
3548   /* Discard any conversions that don't change the modes of ARG0 and ARG1
3549      and see if the inner values are the same.  This removes any
3550      signedness comparison, which doesn't matter here.  */
3551   tree op0 = arg0;
3552   tree op1 = arg1;
3553   STRIP_NOPS (op0);
3554   STRIP_NOPS (op1);
3555   if (operand_equal_p (op0, op1, 0))
3556     return true;
3557 
3558   /* Discard a single widening conversion from ARG1 and see if the inner
3559      value is the same as ARG0.  */
3560   if (CONVERT_EXPR_P (arg1)
3561       && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3562       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3563          < TYPE_PRECISION (TREE_TYPE (arg1))
3564       && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3565     return true;
3566 
3567   return false;
3568 }
3569 
3570 /* See if ARG is an expression that is either a comparison or is performing
3571    arithmetic on comparisons.  The comparisons must only be comparing
3572    two different values, which will be stored in *CVAL1 and *CVAL2; if
3573    they are nonzero it means that some operands have already been found.
3574    No variables may be used anywhere else in the expression except in the
3575    comparisons.
3576 
3577    If this is true, return 1.  Otherwise, return zero.  */
3578 
3579 static int
twoval_comparison_p(tree arg,tree * cval1,tree * cval2)3580 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3581 {
3582   enum tree_code code = TREE_CODE (arg);
3583   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3584 
3585   /* We can handle some of the tcc_expression cases here.  */
3586   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3587     tclass = tcc_unary;
3588   else if (tclass == tcc_expression
3589 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3590 	       || code == COMPOUND_EXPR))
3591     tclass = tcc_binary;
3592 
3593   switch (tclass)
3594     {
3595     case tcc_unary:
3596       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3597 
3598     case tcc_binary:
3599       return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3600 	      && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
3601 
3602     case tcc_constant:
3603       return 1;
3604 
3605     case tcc_expression:
3606       if (code == COND_EXPR)
3607 	return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3608 		&& twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
3609 		&& twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
3610       return 0;
3611 
3612     case tcc_comparison:
3613       /* First see if we can handle the first operand, then the second.  For
3614 	 the second operand, we know *CVAL1 can't be zero.  It must be that
3615 	 one side of the comparison is each of the values; test for the
3616 	 case where this isn't true by failing if the two operands
3617 	 are the same.  */
3618 
3619       if (operand_equal_p (TREE_OPERAND (arg, 0),
3620 			   TREE_OPERAND (arg, 1), 0))
3621 	return 0;
3622 
3623       if (*cval1 == 0)
3624 	*cval1 = TREE_OPERAND (arg, 0);
3625       else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3626 	;
3627       else if (*cval2 == 0)
3628 	*cval2 = TREE_OPERAND (arg, 0);
3629       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3630 	;
3631       else
3632 	return 0;
3633 
3634       if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3635 	;
3636       else if (*cval2 == 0)
3637 	*cval2 = TREE_OPERAND (arg, 1);
3638       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3639 	;
3640       else
3641 	return 0;
3642 
3643       return 1;
3644 
3645     default:
3646       return 0;
3647     }
3648 }
3649 
3650 /* ARG is a tree that is known to contain just arithmetic operations and
3651    comparisons.  Evaluate the operations in the tree substituting NEW0 for
3652    any occurrence of OLD0 as an operand of a comparison and likewise for
3653    NEW1 and OLD1.  */
3654 
3655 static tree
eval_subst(location_t loc,tree arg,tree old0,tree new0,tree old1,tree new1)3656 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3657 	    tree old1, tree new1)
3658 {
3659   tree type = TREE_TYPE (arg);
3660   enum tree_code code = TREE_CODE (arg);
3661   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3662 
3663   /* We can handle some of the tcc_expression cases here.  */
3664   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3665     tclass = tcc_unary;
3666   else if (tclass == tcc_expression
3667 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3668     tclass = tcc_binary;
3669 
3670   switch (tclass)
3671     {
3672     case tcc_unary:
3673       return fold_build1_loc (loc, code, type,
3674 			  eval_subst (loc, TREE_OPERAND (arg, 0),
3675 				      old0, new0, old1, new1));
3676 
3677     case tcc_binary:
3678       return fold_build2_loc (loc, code, type,
3679 			  eval_subst (loc, TREE_OPERAND (arg, 0),
3680 				      old0, new0, old1, new1),
3681 			  eval_subst (loc, TREE_OPERAND (arg, 1),
3682 				      old0, new0, old1, new1));
3683 
3684     case tcc_expression:
3685       switch (code)
3686 	{
3687 	case SAVE_EXPR:
3688 	  return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3689 			     old1, new1);
3690 
3691 	case COMPOUND_EXPR:
3692 	  return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3693 			     old1, new1);
3694 
3695 	case COND_EXPR:
3696 	  return fold_build3_loc (loc, code, type,
3697 			      eval_subst (loc, TREE_OPERAND (arg, 0),
3698 					  old0, new0, old1, new1),
3699 			      eval_subst (loc, TREE_OPERAND (arg, 1),
3700 					  old0, new0, old1, new1),
3701 			      eval_subst (loc, TREE_OPERAND (arg, 2),
3702 					  old0, new0, old1, new1));
3703 	default:
3704 	  break;
3705 	}
3706       /* Fall through - ???  */
3707 
3708     case tcc_comparison:
3709       {
3710 	tree arg0 = TREE_OPERAND (arg, 0);
3711 	tree arg1 = TREE_OPERAND (arg, 1);
3712 
3713 	/* We need to check both for exact equality and tree equality.  The
3714 	   former will be true if the operand has a side-effect.  In that
3715 	   case, we know the operand occurred exactly once.  */
3716 
3717 	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3718 	  arg0 = new0;
3719 	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3720 	  arg0 = new1;
3721 
3722 	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3723 	  arg1 = new0;
3724 	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3725 	  arg1 = new1;
3726 
3727 	return fold_build2_loc (loc, code, type, arg0, arg1);
3728       }
3729 
3730     default:
3731       return arg;
3732     }
3733 }
3734 
3735 /* Return a tree for the case when the result of an expression is RESULT
3736    converted to TYPE and OMITTED was previously an operand of the expression
3737    but is now not needed (e.g., we folded OMITTED * 0).
3738 
3739    If OMITTED has side effects, we must evaluate it.  Otherwise, just do
3740    the conversion of RESULT to TYPE.  */
3741 
3742 tree
omit_one_operand_loc(location_t loc,tree type,tree result,tree omitted)3743 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3744 {
3745   tree t = fold_convert_loc (loc, type, result);
3746 
3747   /* If the resulting operand is an empty statement, just return the omitted
3748      statement casted to void. */
3749   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3750     return build1_loc (loc, NOP_EXPR, void_type_node,
3751 		       fold_ignored_result (omitted));
3752 
3753   if (TREE_SIDE_EFFECTS (omitted))
3754     return build2_loc (loc, COMPOUND_EXPR, type,
3755 		       fold_ignored_result (omitted), t);
3756 
3757   return non_lvalue_loc (loc, t);
3758 }
3759 
3760 /* Return a tree for the case when the result of an expression is RESULT
3761    converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3762    of the expression but are now not needed.
3763 
3764    If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3765    If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3766    evaluated before OMITTED2.  Otherwise, if neither has side effects,
3767    just do the conversion of RESULT to TYPE.  */
3768 
3769 tree
omit_two_operands_loc(location_t loc,tree type,tree result,tree omitted1,tree omitted2)3770 omit_two_operands_loc (location_t loc, tree type, tree result,
3771 		       tree omitted1, tree omitted2)
3772 {
3773   tree t = fold_convert_loc (loc, type, result);
3774 
3775   if (TREE_SIDE_EFFECTS (omitted2))
3776     t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3777   if (TREE_SIDE_EFFECTS (omitted1))
3778     t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3779 
3780   return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3781 }
3782 
3783 
3784 /* Return a simplified tree node for the truth-negation of ARG.  This
3785    never alters ARG itself.  We assume that ARG is an operation that
3786    returns a truth value (0 or 1).
3787 
3788    FIXME: one would think we would fold the result, but it causes
3789    problems with the dominator optimizer.  */
3790 
3791 static tree
fold_truth_not_expr(location_t loc,tree arg)3792 fold_truth_not_expr (location_t loc, tree arg)
3793 {
3794   tree type = TREE_TYPE (arg);
3795   enum tree_code code = TREE_CODE (arg);
3796   location_t loc1, loc2;
3797 
3798   /* If this is a comparison, we can simply invert it, except for
3799      floating-point non-equality comparisons, in which case we just
3800      enclose a TRUTH_NOT_EXPR around what we have.  */
3801 
3802   if (TREE_CODE_CLASS (code) == tcc_comparison)
3803     {
3804       tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3805       if (FLOAT_TYPE_P (op_type)
3806 	  && flag_trapping_math
3807 	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
3808 	  && code != NE_EXPR && code != EQ_EXPR)
3809 	return NULL_TREE;
3810 
3811       code = invert_tree_comparison (code, HONOR_NANS (op_type));
3812       if (code == ERROR_MARK)
3813 	return NULL_TREE;
3814 
3815       tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3816 			     TREE_OPERAND (arg, 1));
3817       if (TREE_NO_WARNING (arg))
3818 	TREE_NO_WARNING (ret) = 1;
3819       return ret;
3820     }
3821 
3822   switch (code)
3823     {
3824     case INTEGER_CST:
3825       return constant_boolean_node (integer_zerop (arg), type);
3826 
3827     case TRUTH_AND_EXPR:
3828       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3829       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3830       return build2_loc (loc, TRUTH_OR_EXPR, type,
3831 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3832 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3833 
3834     case TRUTH_OR_EXPR:
3835       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3836       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3837       return build2_loc (loc, TRUTH_AND_EXPR, type,
3838 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3839 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3840 
3841     case TRUTH_XOR_EXPR:
3842       /* Here we can invert either operand.  We invert the first operand
3843 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
3844 	 result is the XOR of the first operand with the inside of the
3845 	 negation of the second operand.  */
3846 
3847       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3848 	return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3849 			   TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3850       else
3851 	return build2_loc (loc, TRUTH_XOR_EXPR, type,
3852 			   invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3853 			   TREE_OPERAND (arg, 1));
3854 
3855     case TRUTH_ANDIF_EXPR:
3856       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3857       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3858       return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3859 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3860 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3861 
3862     case TRUTH_ORIF_EXPR:
3863       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3864       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3865       return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3866 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3867 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3868 
3869     case TRUTH_NOT_EXPR:
3870       return TREE_OPERAND (arg, 0);
3871 
3872     case COND_EXPR:
3873       {
3874 	tree arg1 = TREE_OPERAND (arg, 1);
3875 	tree arg2 = TREE_OPERAND (arg, 2);
3876 
3877 	loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3878 	loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3879 
3880 	/* A COND_EXPR may have a throw as one operand, which
3881 	   then has void type.  Just leave void operands
3882 	   as they are.  */
3883 	return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3884 			   VOID_TYPE_P (TREE_TYPE (arg1))
3885 			   ? arg1 : invert_truthvalue_loc (loc1, arg1),
3886 			   VOID_TYPE_P (TREE_TYPE (arg2))
3887 			   ? arg2 : invert_truthvalue_loc (loc2, arg2));
3888       }
3889 
3890     case COMPOUND_EXPR:
3891       loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3892       return build2_loc (loc, COMPOUND_EXPR, type,
3893 			 TREE_OPERAND (arg, 0),
3894 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3895 
3896     case NON_LVALUE_EXPR:
3897       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3898       return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3899 
3900     CASE_CONVERT:
3901       if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3902 	return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3903 
3904       /* fall through */
3905 
3906     case FLOAT_EXPR:
3907       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3908       return build1_loc (loc, TREE_CODE (arg), type,
3909 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3910 
3911     case BIT_AND_EXPR:
3912       if (!integer_onep (TREE_OPERAND (arg, 1)))
3913 	return NULL_TREE;
3914       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3915 
3916     case SAVE_EXPR:
3917       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3918 
3919     case CLEANUP_POINT_EXPR:
3920       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3921       return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3922 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3923 
3924     default:
3925       return NULL_TREE;
3926     }
3927 }
3928 
3929 /* Fold the truth-negation of ARG.  This never alters ARG itself.  We
3930    assume that ARG is an operation that returns a truth value (0 or 1
3931    for scalars, 0 or -1 for vectors).  Return the folded expression if
3932    folding is successful.  Otherwise, return NULL_TREE.  */
3933 
3934 static tree
fold_invert_truthvalue(location_t loc,tree arg)3935 fold_invert_truthvalue (location_t loc, tree arg)
3936 {
3937   tree type = TREE_TYPE (arg);
3938   return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3939 			      ? BIT_NOT_EXPR
3940 			      : TRUTH_NOT_EXPR,
3941 			 type, arg);
3942 }
3943 
3944 /* Return a simplified tree node for the truth-negation of ARG.  This
3945    never alters ARG itself.  We assume that ARG is an operation that
3946    returns a truth value (0 or 1 for scalars, 0 or -1 for vectors).  */
3947 
3948 tree
invert_truthvalue_loc(location_t loc,tree arg)3949 invert_truthvalue_loc (location_t loc, tree arg)
3950 {
3951   if (TREE_CODE (arg) == ERROR_MARK)
3952     return arg;
3953 
3954   tree type = TREE_TYPE (arg);
3955   return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3956 			       ? BIT_NOT_EXPR
3957 			       : TRUTH_NOT_EXPR,
3958 			  type, arg);
3959 }
3960 
3961 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3962    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero
3963    and uses reverse storage order if REVERSEP is nonzero.  ORIG_INNER
3964    is the original memory reference used to preserve the alias set of
3965    the access.  */
3966 
3967 static tree
make_bit_field_ref(location_t loc,tree inner,tree orig_inner,tree type,HOST_WIDE_INT bitsize,poly_int64 bitpos,int unsignedp,int reversep)3968 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3969 		    HOST_WIDE_INT bitsize, poly_int64 bitpos,
3970 		    int unsignedp, int reversep)
3971 {
3972   tree result, bftype;
3973 
3974   /* Attempt not to lose the access path if possible.  */
3975   if (TREE_CODE (orig_inner) == COMPONENT_REF)
3976     {
3977       tree ninner = TREE_OPERAND (orig_inner, 0);
3978       machine_mode nmode;
3979       poly_int64 nbitsize, nbitpos;
3980       tree noffset;
3981       int nunsignedp, nreversep, nvolatilep = 0;
3982       tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3983 				       &noffset, &nmode, &nunsignedp,
3984 				       &nreversep, &nvolatilep);
3985       if (base == inner
3986 	  && noffset == NULL_TREE
3987 	  && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
3988 	  && !reversep
3989 	  && !nreversep
3990 	  && !nvolatilep)
3991 	{
3992 	  inner = ninner;
3993 	  bitpos -= nbitpos;
3994 	}
3995     }
3996 
3997   alias_set_type iset = get_alias_set (orig_inner);
3998   if (iset == 0 && get_alias_set (inner) != iset)
3999     inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4000 			 build_fold_addr_expr (inner),
4001 			 build_int_cst (ptr_type_node, 0));
4002 
4003   if (known_eq (bitpos, 0) && !reversep)
4004     {
4005       tree size = TYPE_SIZE (TREE_TYPE (inner));
4006       if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4007 	   || POINTER_TYPE_P (TREE_TYPE (inner)))
4008 	  && tree_fits_shwi_p (size)
4009 	  && tree_to_shwi (size) == bitsize)
4010 	return fold_convert_loc (loc, type, inner);
4011     }
4012 
4013   bftype = type;
4014   if (TYPE_PRECISION (bftype) != bitsize
4015       || TYPE_UNSIGNED (bftype) == !unsignedp)
4016     bftype = build_nonstandard_integer_type (bitsize, 0);
4017 
4018   result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4019 		       bitsize_int (bitsize), bitsize_int (bitpos));
4020   REF_REVERSE_STORAGE_ORDER (result) = reversep;
4021 
4022   if (bftype != type)
4023     result = fold_convert_loc (loc, type, result);
4024 
4025   return result;
4026 }
4027 
4028 /* Optimize a bit-field compare.
4029 
4030    There are two cases:  First is a compare against a constant and the
4031    second is a comparison of two items where the fields are at the same
4032    bit position relative to the start of a chunk (byte, halfword, word)
4033    large enough to contain it.  In these cases we can avoid the shift
4034    implicit in bitfield extractions.
4035 
4036    For constants, we emit a compare of the shifted constant with the
4037    BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4038    compared.  For two fields at the same position, we do the ANDs with the
4039    similar mask and compare the result of the ANDs.
4040 
4041    CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4042    COMPARE_TYPE is the type of the comparison, and LHS and RHS
4043    are the left and right operands of the comparison, respectively.
4044 
4045    If the optimization described above can be done, we return the resulting
4046    tree.  Otherwise we return zero.  */
4047 
4048 static tree
optimize_bit_field_compare(location_t loc,enum tree_code code,tree compare_type,tree lhs,tree rhs)4049 optimize_bit_field_compare (location_t loc, enum tree_code code,
4050 			    tree compare_type, tree lhs, tree rhs)
4051 {
4052   poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4053   HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4054   tree type = TREE_TYPE (lhs);
4055   tree unsigned_type;
4056   int const_p = TREE_CODE (rhs) == INTEGER_CST;
4057   machine_mode lmode, rmode;
4058   scalar_int_mode nmode;
4059   int lunsignedp, runsignedp;
4060   int lreversep, rreversep;
4061   int lvolatilep = 0, rvolatilep = 0;
4062   tree linner, rinner = NULL_TREE;
4063   tree mask;
4064   tree offset;
4065 
4066   /* Get all the information about the extractions being done.  If the bit size
4067      is the same as the size of the underlying object, we aren't doing an
4068      extraction at all and so can do nothing.  We also don't want to
4069      do anything if the inner expression is a PLACEHOLDER_EXPR since we
4070      then will no longer be able to replace it.  */
4071   linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4072 				&lunsignedp, &lreversep, &lvolatilep);
4073   if (linner == lhs
4074       || !known_size_p (plbitsize)
4075       || !plbitsize.is_constant (&lbitsize)
4076       || !plbitpos.is_constant (&lbitpos)
4077       || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4078       || offset != 0
4079       || TREE_CODE (linner) == PLACEHOLDER_EXPR
4080       || lvolatilep)
4081     return 0;
4082 
4083   if (const_p)
4084     rreversep = lreversep;
4085   else
4086    {
4087      /* If this is not a constant, we can only do something if bit positions,
4088 	sizes, signedness and storage order are the same.  */
4089      rinner
4090        = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4091 			      &runsignedp, &rreversep, &rvolatilep);
4092 
4093      if (rinner == rhs
4094 	 || maybe_ne (lbitpos, rbitpos)
4095 	 || maybe_ne (lbitsize, rbitsize)
4096 	 || lunsignedp != runsignedp
4097 	 || lreversep != rreversep
4098 	 || offset != 0
4099 	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4100 	 || rvolatilep)
4101        return 0;
4102    }
4103 
4104   /* Honor the C++ memory model and mimic what RTL expansion does.  */
4105   poly_uint64 bitstart = 0;
4106   poly_uint64 bitend = 0;
4107   if (TREE_CODE (lhs) == COMPONENT_REF)
4108     {
4109       get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4110       if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4111 	return 0;
4112     }
4113 
4114   /* See if we can find a mode to refer to this field.  We should be able to,
4115      but fail if we can't.  */
4116   if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4117 		      const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4118 		      : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4119 			     TYPE_ALIGN (TREE_TYPE (rinner))),
4120 		      BITS_PER_WORD, false, &nmode))
4121     return 0;
4122 
4123   /* Set signed and unsigned types of the precision of this mode for the
4124      shifts below.  */
4125   unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4126 
4127   /* Compute the bit position and size for the new reference and our offset
4128      within it. If the new reference is the same size as the original, we
4129      won't optimize anything, so return zero.  */
4130   nbitsize = GET_MODE_BITSIZE (nmode);
4131   nbitpos = lbitpos & ~ (nbitsize - 1);
4132   lbitpos -= nbitpos;
4133   if (nbitsize == lbitsize)
4134     return 0;
4135 
4136   if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4137     lbitpos = nbitsize - lbitsize - lbitpos;
4138 
4139   /* Make the mask to be used against the extracted field.  */
4140   mask = build_int_cst_type (unsigned_type, -1);
4141   mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4142   mask = const_binop (RSHIFT_EXPR, mask,
4143 		      size_int (nbitsize - lbitsize - lbitpos));
4144 
4145   if (! const_p)
4146     {
4147       if (nbitpos < 0)
4148 	return 0;
4149 
4150       /* If not comparing with constant, just rework the comparison
4151 	 and return.  */
4152       tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4153 				    nbitsize, nbitpos, 1, lreversep);
4154       t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4155       tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4156 				    nbitsize, nbitpos, 1, rreversep);
4157       t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4158       return fold_build2_loc (loc, code, compare_type, t1, t2);
4159     }
4160 
4161   /* Otherwise, we are handling the constant case.  See if the constant is too
4162      big for the field.  Warn and return a tree for 0 (false) if so.  We do
4163      this not only for its own sake, but to avoid having to test for this
4164      error case below.  If we didn't, we might generate wrong code.
4165 
4166      For unsigned fields, the constant shifted right by the field length should
4167      be all zero.  For signed fields, the high-order bits should agree with
4168      the sign bit.  */
4169 
4170   if (lunsignedp)
4171     {
4172       if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4173 	{
4174 	  warning (0, "comparison is always %d due to width of bit-field",
4175 		   code == NE_EXPR);
4176 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4177 	}
4178     }
4179   else
4180     {
4181       wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4182       if (tem != 0 && tem != -1)
4183 	{
4184 	  warning (0, "comparison is always %d due to width of bit-field",
4185 		   code == NE_EXPR);
4186 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4187 	}
4188     }
4189 
4190   if (nbitpos < 0)
4191     return 0;
4192 
4193   /* Single-bit compares should always be against zero.  */
4194   if (lbitsize == 1 && ! integer_zerop (rhs))
4195     {
4196       code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4197       rhs = build_int_cst (type, 0);
4198     }
4199 
4200   /* Make a new bitfield reference, shift the constant over the
4201      appropriate number of bits and mask it with the computed mask
4202      (in case this was a signed field).  If we changed it, make a new one.  */
4203   lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4204 			    nbitsize, nbitpos, 1, lreversep);
4205 
4206   rhs = const_binop (BIT_AND_EXPR,
4207 		     const_binop (LSHIFT_EXPR,
4208 				  fold_convert_loc (loc, unsigned_type, rhs),
4209 				  size_int (lbitpos)),
4210 		     mask);
4211 
4212   lhs = build2_loc (loc, code, compare_type,
4213 		    build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4214   return lhs;
4215 }
4216 
4217 /* Subroutine for fold_truth_andor_1: decode a field reference.
4218 
4219    If EXP is a comparison reference, we return the innermost reference.
4220 
4221    *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4222    set to the starting bit number.
4223 
4224    If the innermost field can be completely contained in a mode-sized
4225    unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
4226 
4227    *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4228    otherwise it is not changed.
4229 
4230    *PUNSIGNEDP is set to the signedness of the field.
4231 
4232    *PREVERSEP is set to the storage order of the field.
4233 
4234    *PMASK is set to the mask used.  This is either contained in a
4235    BIT_AND_EXPR or derived from the width of the field.
4236 
4237    *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4238 
4239    Return 0 if this is not a component reference or is one that we can't
4240    do anything with.  */
4241 
4242 static tree
decode_field_reference(location_t loc,tree * exp_,HOST_WIDE_INT * pbitsize,HOST_WIDE_INT * pbitpos,machine_mode * pmode,int * punsignedp,int * preversep,int * pvolatilep,tree * pmask,tree * pand_mask)4243 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4244 			HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4245 			int *punsignedp, int *preversep, int *pvolatilep,
4246 			tree *pmask, tree *pand_mask)
4247 {
4248   tree exp = *exp_;
4249   tree outer_type = 0;
4250   tree and_mask = 0;
4251   tree mask, inner, offset;
4252   tree unsigned_type;
4253   unsigned int precision;
4254 
4255   /* All the optimizations using this function assume integer fields.
4256      There are problems with FP fields since the type_for_size call
4257      below can fail for, e.g., XFmode.  */
4258   if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4259     return NULL_TREE;
4260 
4261   /* We are interested in the bare arrangement of bits, so strip everything
4262      that doesn't affect the machine mode.  However, record the type of the
4263      outermost expression if it may matter below.  */
4264   if (CONVERT_EXPR_P (exp)
4265       || TREE_CODE (exp) == NON_LVALUE_EXPR)
4266     outer_type = TREE_TYPE (exp);
4267   STRIP_NOPS (exp);
4268 
4269   if (TREE_CODE (exp) == BIT_AND_EXPR)
4270     {
4271       and_mask = TREE_OPERAND (exp, 1);
4272       exp = TREE_OPERAND (exp, 0);
4273       STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4274       if (TREE_CODE (and_mask) != INTEGER_CST)
4275 	return NULL_TREE;
4276     }
4277 
4278   poly_int64 poly_bitsize, poly_bitpos;
4279   inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4280 			       pmode, punsignedp, preversep, pvolatilep);
4281   if ((inner == exp && and_mask == 0)
4282       || !poly_bitsize.is_constant (pbitsize)
4283       || !poly_bitpos.is_constant (pbitpos)
4284       || *pbitsize < 0
4285       || offset != 0
4286       || TREE_CODE (inner) == PLACEHOLDER_EXPR
4287       /* Reject out-of-bound accesses (PR79731).  */
4288       || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4289 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4290 			       *pbitpos + *pbitsize) < 0))
4291     return NULL_TREE;
4292 
4293   unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4294   if (unsigned_type == NULL_TREE)
4295     return NULL_TREE;
4296 
4297   *exp_ = exp;
4298 
4299   /* If the number of bits in the reference is the same as the bitsize of
4300      the outer type, then the outer type gives the signedness. Otherwise
4301      (in case of a small bitfield) the signedness is unchanged.  */
4302   if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4303     *punsignedp = TYPE_UNSIGNED (outer_type);
4304 
4305   /* Compute the mask to access the bitfield.  */
4306   precision = TYPE_PRECISION (unsigned_type);
4307 
4308   mask = build_int_cst_type (unsigned_type, -1);
4309 
4310   mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4311   mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4312 
4313   /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
4314   if (and_mask != 0)
4315     mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4316 			fold_convert_loc (loc, unsigned_type, and_mask), mask);
4317 
4318   *pmask = mask;
4319   *pand_mask = and_mask;
4320   return inner;
4321 }
4322 
4323 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4324    bit positions and MASK is SIGNED.  */
4325 
4326 static int
all_ones_mask_p(const_tree mask,unsigned int size)4327 all_ones_mask_p (const_tree mask, unsigned int size)
4328 {
4329   tree type = TREE_TYPE (mask);
4330   unsigned int precision = TYPE_PRECISION (type);
4331 
4332   /* If this function returns true when the type of the mask is
4333      UNSIGNED, then there will be errors.  In particular see
4334      gcc.c-torture/execute/990326-1.c.  There does not appear to be
4335      any documentation paper trail as to why this is so.  But the pre
4336      wide-int worked with that restriction and it has been preserved
4337      here.  */
4338   if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4339     return false;
4340 
4341   return wi::mask (size, false, precision) == wi::to_wide (mask);
4342 }
4343 
4344 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4345    represents the sign bit of EXP's type.  If EXP represents a sign
4346    or zero extension, also test VAL against the unextended type.
4347    The return value is the (sub)expression whose sign bit is VAL,
4348    or NULL_TREE otherwise.  */
4349 
4350 tree
sign_bit_p(tree exp,const_tree val)4351 sign_bit_p (tree exp, const_tree val)
4352 {
4353   int width;
4354   tree t;
4355 
4356   /* Tree EXP must have an integral type.  */
4357   t = TREE_TYPE (exp);
4358   if (! INTEGRAL_TYPE_P (t))
4359     return NULL_TREE;
4360 
4361   /* Tree VAL must be an integer constant.  */
4362   if (TREE_CODE (val) != INTEGER_CST
4363       || TREE_OVERFLOW (val))
4364     return NULL_TREE;
4365 
4366   width = TYPE_PRECISION (t);
4367   if (wi::only_sign_bit_p (wi::to_wide (val), width))
4368     return exp;
4369 
4370   /* Handle extension from a narrower type.  */
4371   if (TREE_CODE (exp) == NOP_EXPR
4372       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4373     return sign_bit_p (TREE_OPERAND (exp, 0), val);
4374 
4375   return NULL_TREE;
4376 }
4377 
4378 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4379    to be evaluated unconditionally.  */
4380 
4381 static int
simple_operand_p(const_tree exp)4382 simple_operand_p (const_tree exp)
4383 {
4384   /* Strip any conversions that don't change the machine mode.  */
4385   STRIP_NOPS (exp);
4386 
4387   return (CONSTANT_CLASS_P (exp)
4388   	  || TREE_CODE (exp) == SSA_NAME
4389 	  || (DECL_P (exp)
4390 	      && ! TREE_ADDRESSABLE (exp)
4391 	      && ! TREE_THIS_VOLATILE (exp)
4392 	      && ! DECL_NONLOCAL (exp)
4393 	      /* Don't regard global variables as simple.  They may be
4394 		 allocated in ways unknown to the compiler (shared memory,
4395 		 #pragma weak, etc).  */
4396 	      && ! TREE_PUBLIC (exp)
4397 	      && ! DECL_EXTERNAL (exp)
4398 	      /* Weakrefs are not safe to be read, since they can be NULL.
4399  		 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4400 		 have DECL_WEAK flag set.  */
4401 	      && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4402 	      /* Loading a static variable is unduly expensive, but global
4403 		 registers aren't expensive.  */
4404 	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4405 }
4406 
4407 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4408    to be evaluated unconditionally.
4409    I addition to simple_operand_p, we assume that comparisons, conversions,
4410    and logic-not operations are simple, if their operands are simple, too.  */
4411 
4412 static bool
simple_operand_p_2(tree exp)4413 simple_operand_p_2 (tree exp)
4414 {
4415   enum tree_code code;
4416 
4417   if (TREE_SIDE_EFFECTS (exp)
4418       || tree_could_trap_p (exp))
4419     return false;
4420 
4421   while (CONVERT_EXPR_P (exp))
4422     exp = TREE_OPERAND (exp, 0);
4423 
4424   code = TREE_CODE (exp);
4425 
4426   if (TREE_CODE_CLASS (code) == tcc_comparison)
4427     return (simple_operand_p (TREE_OPERAND (exp, 0))
4428 	    && simple_operand_p (TREE_OPERAND (exp, 1)));
4429 
4430   if (code == TRUTH_NOT_EXPR)
4431       return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4432 
4433   return simple_operand_p (exp);
4434 }
4435 
4436 
4437 /* The following functions are subroutines to fold_range_test and allow it to
4438    try to change a logical combination of comparisons into a range test.
4439 
4440    For example, both
4441 	X == 2 || X == 3 || X == 4 || X == 5
4442    and
4443 	X >= 2 && X <= 5
4444    are converted to
4445 	(unsigned) (X - 2) <= 3
4446 
4447    We describe each set of comparisons as being either inside or outside
4448    a range, using a variable named like IN_P, and then describe the
4449    range with a lower and upper bound.  If one of the bounds is omitted,
4450    it represents either the highest or lowest value of the type.
4451 
4452    In the comments below, we represent a range by two numbers in brackets
4453    preceded by a "+" to designate being inside that range, or a "-" to
4454    designate being outside that range, so the condition can be inverted by
4455    flipping the prefix.  An omitted bound is represented by a "-".  For
4456    example, "- [-, 10]" means being outside the range starting at the lowest
4457    possible value and ending at 10, in other words, being greater than 10.
4458    The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4459    always false.
4460 
4461    We set up things so that the missing bounds are handled in a consistent
4462    manner so neither a missing bound nor "true" and "false" need to be
4463    handled using a special case.  */
4464 
4465 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4466    of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4467    and UPPER1_P are nonzero if the respective argument is an upper bound
4468    and zero for a lower.  TYPE, if nonzero, is the type of the result; it
4469    must be specified for a comparison.  ARG1 will be converted to ARG0's
4470    type if both are specified.  */
4471 
4472 static tree
range_binop(enum tree_code code,tree type,tree arg0,int upper0_p,tree arg1,int upper1_p)4473 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4474 	     tree arg1, int upper1_p)
4475 {
4476   tree tem;
4477   int result;
4478   int sgn0, sgn1;
4479 
4480   /* If neither arg represents infinity, do the normal operation.
4481      Else, if not a comparison, return infinity.  Else handle the special
4482      comparison rules. Note that most of the cases below won't occur, but
4483      are handled for consistency.  */
4484 
4485   if (arg0 != 0 && arg1 != 0)
4486     {
4487       tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4488 			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4489       STRIP_NOPS (tem);
4490       return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4491     }
4492 
4493   if (TREE_CODE_CLASS (code) != tcc_comparison)
4494     return 0;
4495 
4496   /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4497      for neither.  In real maths, we cannot assume open ended ranges are
4498      the same. But, this is computer arithmetic, where numbers are finite.
4499      We can therefore make the transformation of any unbounded range with
4500      the value Z, Z being greater than any representable number. This permits
4501      us to treat unbounded ranges as equal.  */
4502   sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4503   sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4504   switch (code)
4505     {
4506     case EQ_EXPR:
4507       result = sgn0 == sgn1;
4508       break;
4509     case NE_EXPR:
4510       result = sgn0 != sgn1;
4511       break;
4512     case LT_EXPR:
4513       result = sgn0 < sgn1;
4514       break;
4515     case LE_EXPR:
4516       result = sgn0 <= sgn1;
4517       break;
4518     case GT_EXPR:
4519       result = sgn0 > sgn1;
4520       break;
4521     case GE_EXPR:
4522       result = sgn0 >= sgn1;
4523       break;
4524     default:
4525       gcc_unreachable ();
4526     }
4527 
4528   return constant_boolean_node (result, type);
4529 }
4530 
4531 /* Helper routine for make_range.  Perform one step for it, return
4532    new expression if the loop should continue or NULL_TREE if it should
4533    stop.  */
4534 
4535 tree
make_range_step(location_t loc,enum tree_code code,tree arg0,tree arg1,tree exp_type,tree * p_low,tree * p_high,int * p_in_p,bool * strict_overflow_p)4536 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4537 		 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4538 		 bool *strict_overflow_p)
4539 {
4540   tree arg0_type = TREE_TYPE (arg0);
4541   tree n_low, n_high, low = *p_low, high = *p_high;
4542   int in_p = *p_in_p, n_in_p;
4543 
4544   switch (code)
4545     {
4546     case TRUTH_NOT_EXPR:
4547       /* We can only do something if the range is testing for zero.  */
4548       if (low == NULL_TREE || high == NULL_TREE
4549 	  || ! integer_zerop (low) || ! integer_zerop (high))
4550 	return NULL_TREE;
4551       *p_in_p = ! in_p;
4552       return arg0;
4553 
4554     case EQ_EXPR: case NE_EXPR:
4555     case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4556       /* We can only do something if the range is testing for zero
4557 	 and if the second operand is an integer constant.  Note that
4558 	 saying something is "in" the range we make is done by
4559 	 complementing IN_P since it will set in the initial case of
4560 	 being not equal to zero; "out" is leaving it alone.  */
4561       if (low == NULL_TREE || high == NULL_TREE
4562 	  || ! integer_zerop (low) || ! integer_zerop (high)
4563 	  || TREE_CODE (arg1) != INTEGER_CST)
4564 	return NULL_TREE;
4565 
4566       switch (code)
4567 	{
4568 	case NE_EXPR:  /* - [c, c]  */
4569 	  low = high = arg1;
4570 	  break;
4571 	case EQ_EXPR:  /* + [c, c]  */
4572 	  in_p = ! in_p, low = high = arg1;
4573 	  break;
4574 	case GT_EXPR:  /* - [-, c] */
4575 	  low = 0, high = arg1;
4576 	  break;
4577 	case GE_EXPR:  /* + [c, -] */
4578 	  in_p = ! in_p, low = arg1, high = 0;
4579 	  break;
4580 	case LT_EXPR:  /* - [c, -] */
4581 	  low = arg1, high = 0;
4582 	  break;
4583 	case LE_EXPR:  /* + [-, c] */
4584 	  in_p = ! in_p, low = 0, high = arg1;
4585 	  break;
4586 	default:
4587 	  gcc_unreachable ();
4588 	}
4589 
4590       /* If this is an unsigned comparison, we also know that EXP is
4591 	 greater than or equal to zero.  We base the range tests we make
4592 	 on that fact, so we record it here so we can parse existing
4593 	 range tests.  We test arg0_type since often the return type
4594 	 of, e.g. EQ_EXPR, is boolean.  */
4595       if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4596 	{
4597 	  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4598 			      in_p, low, high, 1,
4599 			      build_int_cst (arg0_type, 0),
4600 			      NULL_TREE))
4601 	    return NULL_TREE;
4602 
4603 	  in_p = n_in_p, low = n_low, high = n_high;
4604 
4605 	  /* If the high bound is missing, but we have a nonzero low
4606 	     bound, reverse the range so it goes from zero to the low bound
4607 	     minus 1.  */
4608 	  if (high == 0 && low && ! integer_zerop (low))
4609 	    {
4610 	      in_p = ! in_p;
4611 	      high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4612 				  build_int_cst (TREE_TYPE (low), 1), 0);
4613 	      low = build_int_cst (arg0_type, 0);
4614 	    }
4615 	}
4616 
4617       *p_low = low;
4618       *p_high = high;
4619       *p_in_p = in_p;
4620       return arg0;
4621 
4622     case NEGATE_EXPR:
4623       /* If flag_wrapv and ARG0_TYPE is signed, make sure
4624 	 low and high are non-NULL, then normalize will DTRT.  */
4625       if (!TYPE_UNSIGNED (arg0_type)
4626 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4627 	{
4628 	  if (low == NULL_TREE)
4629 	    low = TYPE_MIN_VALUE (arg0_type);
4630 	  if (high == NULL_TREE)
4631 	    high = TYPE_MAX_VALUE (arg0_type);
4632 	}
4633 
4634       /* (-x) IN [a,b] -> x in [-b, -a]  */
4635       n_low = range_binop (MINUS_EXPR, exp_type,
4636 			   build_int_cst (exp_type, 0),
4637 			   0, high, 1);
4638       n_high = range_binop (MINUS_EXPR, exp_type,
4639 			    build_int_cst (exp_type, 0),
4640 			    0, low, 0);
4641       if (n_high != 0 && TREE_OVERFLOW (n_high))
4642 	return NULL_TREE;
4643       goto normalize;
4644 
4645     case BIT_NOT_EXPR:
4646       /* ~ X -> -X - 1  */
4647       return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4648 			 build_int_cst (exp_type, 1));
4649 
4650     case PLUS_EXPR:
4651     case MINUS_EXPR:
4652       if (TREE_CODE (arg1) != INTEGER_CST)
4653 	return NULL_TREE;
4654 
4655       /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4656 	 move a constant to the other side.  */
4657       if (!TYPE_UNSIGNED (arg0_type)
4658 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4659 	return NULL_TREE;
4660 
4661       /* If EXP is signed, any overflow in the computation is undefined,
4662 	 so we don't worry about it so long as our computations on
4663 	 the bounds don't overflow.  For unsigned, overflow is defined
4664 	 and this is exactly the right thing.  */
4665       n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4666 			   arg0_type, low, 0, arg1, 0);
4667       n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4668 			    arg0_type, high, 1, arg1, 0);
4669       if ((n_low != 0 && TREE_OVERFLOW (n_low))
4670 	  || (n_high != 0 && TREE_OVERFLOW (n_high)))
4671 	return NULL_TREE;
4672 
4673       if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4674 	*strict_overflow_p = true;
4675 
4676       normalize:
4677 	/* Check for an unsigned range which has wrapped around the maximum
4678 	   value thus making n_high < n_low, and normalize it.  */
4679 	if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4680 	  {
4681 	    low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4682 			       build_int_cst (TREE_TYPE (n_high), 1), 0);
4683 	    high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4684 				build_int_cst (TREE_TYPE (n_low), 1), 0);
4685 
4686 	    /* If the range is of the form +/- [ x+1, x ], we won't
4687 	       be able to normalize it.  But then, it represents the
4688 	       whole range or the empty set, so make it
4689 	       +/- [ -, - ].  */
4690 	    if (tree_int_cst_equal (n_low, low)
4691 		&& tree_int_cst_equal (n_high, high))
4692 	      low = high = 0;
4693 	    else
4694 	      in_p = ! in_p;
4695 	  }
4696 	else
4697 	  low = n_low, high = n_high;
4698 
4699 	*p_low = low;
4700 	*p_high = high;
4701 	*p_in_p = in_p;
4702 	return arg0;
4703 
4704     CASE_CONVERT:
4705     case NON_LVALUE_EXPR:
4706       if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4707 	return NULL_TREE;
4708 
4709       if (! INTEGRAL_TYPE_P (arg0_type)
4710 	  || (low != 0 && ! int_fits_type_p (low, arg0_type))
4711 	  || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4712 	return NULL_TREE;
4713 
4714       n_low = low, n_high = high;
4715 
4716       if (n_low != 0)
4717 	n_low = fold_convert_loc (loc, arg0_type, n_low);
4718 
4719       if (n_high != 0)
4720 	n_high = fold_convert_loc (loc, arg0_type, n_high);
4721 
4722       /* If we're converting arg0 from an unsigned type, to exp,
4723 	 a signed type,  we will be doing the comparison as unsigned.
4724 	 The tests above have already verified that LOW and HIGH
4725 	 are both positive.
4726 
4727 	 So we have to ensure that we will handle large unsigned
4728 	 values the same way that the current signed bounds treat
4729 	 negative values.  */
4730 
4731       if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4732 	{
4733 	  tree high_positive;
4734 	  tree equiv_type;
4735 	  /* For fixed-point modes, we need to pass the saturating flag
4736 	     as the 2nd parameter.  */
4737 	  if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4738 	    equiv_type
4739 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4740 						TYPE_SATURATING (arg0_type));
4741 	  else
4742 	    equiv_type
4743 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4744 
4745 	  /* A range without an upper bound is, naturally, unbounded.
4746 	     Since convert would have cropped a very large value, use
4747 	     the max value for the destination type.  */
4748 	  high_positive
4749 	    = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4750 	      : TYPE_MAX_VALUE (arg0_type);
4751 
4752 	  if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4753 	    high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4754 					     fold_convert_loc (loc, arg0_type,
4755 							       high_positive),
4756 					     build_int_cst (arg0_type, 1));
4757 
4758 	  /* If the low bound is specified, "and" the range with the
4759 	     range for which the original unsigned value will be
4760 	     positive.  */
4761 	  if (low != 0)
4762 	    {
4763 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4764 				  1, fold_convert_loc (loc, arg0_type,
4765 						       integer_zero_node),
4766 				  high_positive))
4767 		return NULL_TREE;
4768 
4769 	      in_p = (n_in_p == in_p);
4770 	    }
4771 	  else
4772 	    {
4773 	      /* Otherwise, "or" the range with the range of the input
4774 		 that will be interpreted as negative.  */
4775 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4776 				  1, fold_convert_loc (loc, arg0_type,
4777 						       integer_zero_node),
4778 				  high_positive))
4779 		return NULL_TREE;
4780 
4781 	      in_p = (in_p != n_in_p);
4782 	    }
4783 	}
4784 
4785       *p_low = n_low;
4786       *p_high = n_high;
4787       *p_in_p = in_p;
4788       return arg0;
4789 
4790     default:
4791       return NULL_TREE;
4792     }
4793 }
4794 
4795 /* Given EXP, a logical expression, set the range it is testing into
4796    variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
4797    actually being tested.  *PLOW and *PHIGH will be made of the same
4798    type as the returned expression.  If EXP is not a comparison, we
4799    will most likely not be returning a useful value and range.  Set
4800    *STRICT_OVERFLOW_P to true if the return value is only valid
4801    because signed overflow is undefined; otherwise, do not change
4802    *STRICT_OVERFLOW_P.  */
4803 
4804 tree
make_range(tree exp,int * pin_p,tree * plow,tree * phigh,bool * strict_overflow_p)4805 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4806 	    bool *strict_overflow_p)
4807 {
4808   enum tree_code code;
4809   tree arg0, arg1 = NULL_TREE;
4810   tree exp_type, nexp;
4811   int in_p;
4812   tree low, high;
4813   location_t loc = EXPR_LOCATION (exp);
4814 
4815   /* Start with simply saying "EXP != 0" and then look at the code of EXP
4816      and see if we can refine the range.  Some of the cases below may not
4817      happen, but it doesn't seem worth worrying about this.  We "continue"
4818      the outer loop when we've changed something; otherwise we "break"
4819      the switch, which will "break" the while.  */
4820 
4821   in_p = 0;
4822   low = high = build_int_cst (TREE_TYPE (exp), 0);
4823 
4824   while (1)
4825     {
4826       code = TREE_CODE (exp);
4827       exp_type = TREE_TYPE (exp);
4828       arg0 = NULL_TREE;
4829 
4830       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4831 	{
4832 	  if (TREE_OPERAND_LENGTH (exp) > 0)
4833 	    arg0 = TREE_OPERAND (exp, 0);
4834 	  if (TREE_CODE_CLASS (code) == tcc_binary
4835 	      || TREE_CODE_CLASS (code) == tcc_comparison
4836 	      || (TREE_CODE_CLASS (code) == tcc_expression
4837 		  && TREE_OPERAND_LENGTH (exp) > 1))
4838 	    arg1 = TREE_OPERAND (exp, 1);
4839 	}
4840       if (arg0 == NULL_TREE)
4841 	break;
4842 
4843       nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4844 			      &high, &in_p, strict_overflow_p);
4845       if (nexp == NULL_TREE)
4846 	break;
4847       exp = nexp;
4848     }
4849 
4850   /* If EXP is a constant, we can evaluate whether this is true or false.  */
4851   if (TREE_CODE (exp) == INTEGER_CST)
4852     {
4853       in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4854 						 exp, 0, low, 0))
4855 		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
4856 						    exp, 1, high, 1)));
4857       low = high = 0;
4858       exp = 0;
4859     }
4860 
4861   *pin_p = in_p, *plow = low, *phigh = high;
4862   return exp;
4863 }
4864 
4865 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4866    a bitwise check i.e. when
4867      LOW  == 0xXX...X00...0
4868      HIGH == 0xXX...X11...1
4869    Return corresponding mask in MASK and stem in VALUE.  */
4870 
4871 static bool
maskable_range_p(const_tree low,const_tree high,tree type,tree * mask,tree * value)4872 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4873 		  tree *value)
4874 {
4875   if (TREE_CODE (low) != INTEGER_CST
4876       || TREE_CODE (high) != INTEGER_CST)
4877     return false;
4878 
4879   unsigned prec = TYPE_PRECISION (type);
4880   wide_int lo = wi::to_wide (low, prec);
4881   wide_int hi = wi::to_wide (high, prec);
4882 
4883   wide_int end_mask = lo ^ hi;
4884   if ((end_mask & (end_mask + 1)) != 0
4885       || (lo & end_mask) != 0)
4886     return false;
4887 
4888   wide_int stem_mask = ~end_mask;
4889   wide_int stem = lo & stem_mask;
4890   if (stem != (hi & stem_mask))
4891     return false;
4892 
4893   *mask = wide_int_to_tree (type, stem_mask);
4894   *value = wide_int_to_tree (type, stem);
4895 
4896   return true;
4897 }
4898 
4899 /* Helper routine for build_range_check and match.pd.  Return the type to
4900    perform the check or NULL if it shouldn't be optimized.  */
4901 
4902 tree
range_check_type(tree etype)4903 range_check_type (tree etype)
4904 {
4905   /* First make sure that arithmetics in this type is valid, then make sure
4906      that it wraps around.  */
4907   if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4908     etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4909 					    TYPE_UNSIGNED (etype));
4910 
4911   if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4912     {
4913       tree utype, minv, maxv;
4914 
4915       /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4916 	 for the type in question, as we rely on this here.  */
4917       utype = unsigned_type_for (etype);
4918       maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4919       maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4920 			  build_int_cst (TREE_TYPE (maxv), 1), 1);
4921       minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4922 
4923       if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4924 				      minv, 1, maxv, 1)))
4925 	etype = utype;
4926       else
4927 	return NULL_TREE;
4928     }
4929   return etype;
4930 }
4931 
4932 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4933    type, TYPE, return an expression to test if EXP is in (or out of, depending
4934    on IN_P) the range.  Return 0 if the test couldn't be created.  */
4935 
4936 tree
build_range_check(location_t loc,tree type,tree exp,int in_p,tree low,tree high)4937 build_range_check (location_t loc, tree type, tree exp, int in_p,
4938 		   tree low, tree high)
4939 {
4940   tree etype = TREE_TYPE (exp), mask, value;
4941 
4942   /* Disable this optimization for function pointer expressions
4943      on targets that require function pointer canonicalization.  */
4944   if (targetm.have_canonicalize_funcptr_for_compare ()
4945       && POINTER_TYPE_P (etype)
4946       && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
4947     return NULL_TREE;
4948 
4949   if (! in_p)
4950     {
4951       value = build_range_check (loc, type, exp, 1, low, high);
4952       if (value != 0)
4953         return invert_truthvalue_loc (loc, value);
4954 
4955       return 0;
4956     }
4957 
4958   if (low == 0 && high == 0)
4959     return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4960 
4961   if (low == 0)
4962     return fold_build2_loc (loc, LE_EXPR, type, exp,
4963 			    fold_convert_loc (loc, etype, high));
4964 
4965   if (high == 0)
4966     return fold_build2_loc (loc, GE_EXPR, type, exp,
4967 			    fold_convert_loc (loc, etype, low));
4968 
4969   if (operand_equal_p (low, high, 0))
4970     return fold_build2_loc (loc, EQ_EXPR, type, exp,
4971 			    fold_convert_loc (loc, etype, low));
4972 
4973   if (TREE_CODE (exp) == BIT_AND_EXPR
4974       && maskable_range_p (low, high, etype, &mask, &value))
4975     return fold_build2_loc (loc, EQ_EXPR, type,
4976 			    fold_build2_loc (loc, BIT_AND_EXPR, etype,
4977 					     exp, mask),
4978 			    value);
4979 
4980   if (integer_zerop (low))
4981     {
4982       if (! TYPE_UNSIGNED (etype))
4983 	{
4984 	  etype = unsigned_type_for (etype);
4985 	  high = fold_convert_loc (loc, etype, high);
4986 	  exp = fold_convert_loc (loc, etype, exp);
4987 	}
4988       return build_range_check (loc, type, exp, 1, 0, high);
4989     }
4990 
4991   /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
4992   if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4993     {
4994       int prec = TYPE_PRECISION (etype);
4995 
4996       if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
4997 	{
4998 	  if (TYPE_UNSIGNED (etype))
4999 	    {
5000 	      tree signed_etype = signed_type_for (etype);
5001 	      if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5002 		etype
5003 		  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5004 	      else
5005 		etype = signed_etype;
5006 	      exp = fold_convert_loc (loc, etype, exp);
5007 	    }
5008 	  return fold_build2_loc (loc, GT_EXPR, type, exp,
5009 				  build_int_cst (etype, 0));
5010 	}
5011     }
5012 
5013   /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5014      This requires wrap-around arithmetics for the type of the expression.  */
5015   etype = range_check_type (etype);
5016   if (etype == NULL_TREE)
5017     return NULL_TREE;
5018 
5019   if (POINTER_TYPE_P (etype))
5020     etype = unsigned_type_for (etype);
5021 
5022   high = fold_convert_loc (loc, etype, high);
5023   low = fold_convert_loc (loc, etype, low);
5024   exp = fold_convert_loc (loc, etype, exp);
5025 
5026   value = const_binop (MINUS_EXPR, high, low);
5027 
5028   if (value != 0 && !TREE_OVERFLOW (value))
5029     return build_range_check (loc, type,
5030 			      fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5031 			      1, build_int_cst (etype, 0), value);
5032 
5033   return 0;
5034 }
5035 
5036 /* Return the predecessor of VAL in its type, handling the infinite case.  */
5037 
5038 static tree
range_predecessor(tree val)5039 range_predecessor (tree val)
5040 {
5041   tree type = TREE_TYPE (val);
5042 
5043   if (INTEGRAL_TYPE_P (type)
5044       && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5045     return 0;
5046   else
5047     return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5048 			build_int_cst (TREE_TYPE (val), 1), 0);
5049 }
5050 
5051 /* Return the successor of VAL in its type, handling the infinite case.  */
5052 
5053 static tree
range_successor(tree val)5054 range_successor (tree val)
5055 {
5056   tree type = TREE_TYPE (val);
5057 
5058   if (INTEGRAL_TYPE_P (type)
5059       && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5060     return 0;
5061   else
5062     return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5063 			build_int_cst (TREE_TYPE (val), 1), 0);
5064 }
5065 
5066 /* Given two ranges, see if we can merge them into one.  Return 1 if we
5067    can, 0 if we can't.  Set the output range into the specified parameters.  */
5068 
5069 bool
merge_ranges(int * pin_p,tree * plow,tree * phigh,int in0_p,tree low0,tree high0,int in1_p,tree low1,tree high1)5070 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5071 	      tree high0, int in1_p, tree low1, tree high1)
5072 {
5073   int no_overlap;
5074   int subset;
5075   int temp;
5076   tree tem;
5077   int in_p;
5078   tree low, high;
5079   int lowequal = ((low0 == 0 && low1 == 0)
5080 		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5081 						low0, 0, low1, 0)));
5082   int highequal = ((high0 == 0 && high1 == 0)
5083 		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5084 						 high0, 1, high1, 1)));
5085 
5086   /* Make range 0 be the range that starts first, or ends last if they
5087      start at the same value.  Swap them if it isn't.  */
5088   if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5089 				 low0, 0, low1, 0))
5090       || (lowequal
5091 	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
5092 					high1, 1, high0, 1))))
5093     {
5094       temp = in0_p, in0_p = in1_p, in1_p = temp;
5095       tem = low0, low0 = low1, low1 = tem;
5096       tem = high0, high0 = high1, high1 = tem;
5097     }
5098 
5099   /* Now flag two cases, whether the ranges are disjoint or whether the
5100      second range is totally subsumed in the first.  Note that the tests
5101      below are simplified by the ones above.  */
5102   no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5103 					  high0, 1, low1, 0));
5104   subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5105 				      high1, 1, high0, 1));
5106 
5107   /* We now have four cases, depending on whether we are including or
5108      excluding the two ranges.  */
5109   if (in0_p && in1_p)
5110     {
5111       /* If they don't overlap, the result is false.  If the second range
5112 	 is a subset it is the result.  Otherwise, the range is from the start
5113 	 of the second to the end of the first.  */
5114       if (no_overlap)
5115 	in_p = 0, low = high = 0;
5116       else if (subset)
5117 	in_p = 1, low = low1, high = high1;
5118       else
5119 	in_p = 1, low = low1, high = high0;
5120     }
5121 
5122   else if (in0_p && ! in1_p)
5123     {
5124       /* If they don't overlap, the result is the first range.  If they are
5125 	 equal, the result is false.  If the second range is a subset of the
5126 	 first, and the ranges begin at the same place, we go from just after
5127 	 the end of the second range to the end of the first.  If the second
5128 	 range is not a subset of the first, or if it is a subset and both
5129 	 ranges end at the same place, the range starts at the start of the
5130 	 first range and ends just before the second range.
5131 	 Otherwise, we can't describe this as a single range.  */
5132       if (no_overlap)
5133 	in_p = 1, low = low0, high = high0;
5134       else if (lowequal && highequal)
5135 	in_p = 0, low = high = 0;
5136       else if (subset && lowequal)
5137 	{
5138 	  low = range_successor (high1);
5139 	  high = high0;
5140 	  in_p = 1;
5141 	  if (low == 0)
5142 	    {
5143 	      /* We are in the weird situation where high0 > high1 but
5144 		 high1 has no successor.  Punt.  */
5145 	      return 0;
5146 	    }
5147 	}
5148       else if (! subset || highequal)
5149 	{
5150 	  low = low0;
5151 	  high = range_predecessor (low1);
5152 	  in_p = 1;
5153 	  if (high == 0)
5154 	    {
5155 	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
5156 	      return 0;
5157 	    }
5158 	}
5159       else
5160 	return 0;
5161     }
5162 
5163   else if (! in0_p && in1_p)
5164     {
5165       /* If they don't overlap, the result is the second range.  If the second
5166 	 is a subset of the first, the result is false.  Otherwise,
5167 	 the range starts just after the first range and ends at the
5168 	 end of the second.  */
5169       if (no_overlap)
5170 	in_p = 1, low = low1, high = high1;
5171       else if (subset || highequal)
5172 	in_p = 0, low = high = 0;
5173       else
5174 	{
5175 	  low = range_successor (high0);
5176 	  high = high1;
5177 	  in_p = 1;
5178 	  if (low == 0)
5179 	    {
5180 	      /* high1 > high0 but high0 has no successor.  Punt.  */
5181 	      return 0;
5182 	    }
5183 	}
5184     }
5185 
5186   else
5187     {
5188       /* The case where we are excluding both ranges.  Here the complex case
5189 	 is if they don't overlap.  In that case, the only time we have a
5190 	 range is if they are adjacent.  If the second is a subset of the
5191 	 first, the result is the first.  Otherwise, the range to exclude
5192 	 starts at the beginning of the first range and ends at the end of the
5193 	 second.  */
5194       if (no_overlap)
5195 	{
5196 	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5197 					 range_successor (high0),
5198 					 1, low1, 0)))
5199 	    in_p = 0, low = low0, high = high1;
5200 	  else
5201 	    {
5202 	      /* Canonicalize - [min, x] into - [-, x].  */
5203 	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
5204 		switch (TREE_CODE (TREE_TYPE (low0)))
5205 		  {
5206 		  case ENUMERAL_TYPE:
5207 		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5208 				  GET_MODE_BITSIZE
5209 				    (TYPE_MODE (TREE_TYPE (low0)))))
5210 		      break;
5211 		    /* FALLTHROUGH */
5212 		  case INTEGER_TYPE:
5213 		    if (tree_int_cst_equal (low0,
5214 					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
5215 		      low0 = 0;
5216 		    break;
5217 		  case POINTER_TYPE:
5218 		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
5219 			&& integer_zerop (low0))
5220 		      low0 = 0;
5221 		    break;
5222 		  default:
5223 		    break;
5224 		  }
5225 
5226 	      /* Canonicalize - [x, max] into - [x, -].  */
5227 	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
5228 		switch (TREE_CODE (TREE_TYPE (high1)))
5229 		  {
5230 		  case ENUMERAL_TYPE:
5231 		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5232 				  GET_MODE_BITSIZE
5233 				    (TYPE_MODE (TREE_TYPE (high1)))))
5234 		      break;
5235 		    /* FALLTHROUGH */
5236 		  case INTEGER_TYPE:
5237 		    if (tree_int_cst_equal (high1,
5238 					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
5239 		      high1 = 0;
5240 		    break;
5241 		  case POINTER_TYPE:
5242 		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
5243 			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5244 						       high1, 1,
5245 						       build_int_cst (TREE_TYPE (high1), 1),
5246 						       1)))
5247 		      high1 = 0;
5248 		    break;
5249 		  default:
5250 		    break;
5251 		  }
5252 
5253 	      /* The ranges might be also adjacent between the maximum and
5254 	         minimum values of the given type.  For
5255 	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5256 	         return + [x + 1, y - 1].  */
5257 	      if (low0 == 0 && high1 == 0)
5258 	        {
5259 		  low = range_successor (high0);
5260 		  high = range_predecessor (low1);
5261 		  if (low == 0 || high == 0)
5262 		    return 0;
5263 
5264 		  in_p = 1;
5265 		}
5266 	      else
5267 		return 0;
5268 	    }
5269 	}
5270       else if (subset)
5271 	in_p = 0, low = low0, high = high0;
5272       else
5273 	in_p = 0, low = low0, high = high1;
5274     }
5275 
5276   *pin_p = in_p, *plow = low, *phigh = high;
5277   return 1;
5278 }
5279 
5280 
5281 /* Subroutine of fold, looking inside expressions of the form
5282    A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5283    of the COND_EXPR.  This function is being used also to optimize
5284    A op B ? C : A, by reversing the comparison first.
5285 
5286    Return a folded expression whose code is not a COND_EXPR
5287    anymore, or NULL_TREE if no folding opportunity is found.  */
5288 
5289 static tree
fold_cond_expr_with_comparison(location_t loc,tree type,tree arg0,tree arg1,tree arg2)5290 fold_cond_expr_with_comparison (location_t loc, tree type,
5291 				tree arg0, tree arg1, tree arg2)
5292 {
5293   enum tree_code comp_code = TREE_CODE (arg0);
5294   tree arg00 = TREE_OPERAND (arg0, 0);
5295   tree arg01 = TREE_OPERAND (arg0, 1);
5296   tree arg1_type = TREE_TYPE (arg1);
5297   tree tem;
5298 
5299   STRIP_NOPS (arg1);
5300   STRIP_NOPS (arg2);
5301 
5302   /* If we have A op 0 ? A : -A, consider applying the following
5303      transformations:
5304 
5305      A == 0? A : -A    same as -A
5306      A != 0? A : -A    same as A
5307      A >= 0? A : -A    same as abs (A)
5308      A > 0?  A : -A    same as abs (A)
5309      A <= 0? A : -A    same as -abs (A)
5310      A < 0?  A : -A    same as -abs (A)
5311 
5312      None of these transformations work for modes with signed
5313      zeros.  If A is +/-0, the first two transformations will
5314      change the sign of the result (from +0 to -0, or vice
5315      versa).  The last four will fix the sign of the result,
5316      even though the original expressions could be positive or
5317      negative, depending on the sign of A.
5318 
5319      Note that all these transformations are correct if A is
5320      NaN, since the two alternatives (A and -A) are also NaNs.  */
5321   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5322       && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5323 	  ? real_zerop (arg01)
5324 	  : integer_zerop (arg01))
5325       && ((TREE_CODE (arg2) == NEGATE_EXPR
5326 	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5327 	     /* In the case that A is of the form X-Y, '-A' (arg2) may
5328 	        have already been folded to Y-X, check for that. */
5329 	  || (TREE_CODE (arg1) == MINUS_EXPR
5330 	      && TREE_CODE (arg2) == MINUS_EXPR
5331 	      && operand_equal_p (TREE_OPERAND (arg1, 0),
5332 				  TREE_OPERAND (arg2, 1), 0)
5333 	      && operand_equal_p (TREE_OPERAND (arg1, 1),
5334 				  TREE_OPERAND (arg2, 0), 0))))
5335     switch (comp_code)
5336       {
5337       case EQ_EXPR:
5338       case UNEQ_EXPR:
5339 	tem = fold_convert_loc (loc, arg1_type, arg1);
5340 	return fold_convert_loc (loc, type, negate_expr (tem));
5341       case NE_EXPR:
5342       case LTGT_EXPR:
5343 	return fold_convert_loc (loc, type, arg1);
5344       case UNGE_EXPR:
5345       case UNGT_EXPR:
5346 	if (flag_trapping_math)
5347 	  break;
5348 	/* Fall through.  */
5349       case GE_EXPR:
5350       case GT_EXPR:
5351 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5352 	  break;
5353 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5354 	return fold_convert_loc (loc, type, tem);
5355       case UNLE_EXPR:
5356       case UNLT_EXPR:
5357 	if (flag_trapping_math)
5358 	  break;
5359 	/* FALLTHRU */
5360       case LE_EXPR:
5361       case LT_EXPR:
5362 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5363 	  break;
5364 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5365 	return negate_expr (fold_convert_loc (loc, type, tem));
5366       default:
5367 	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5368 	break;
5369       }
5370 
5371   /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
5372      A == 0 ? A : 0 is always 0 unless A is -0.  Note that
5373      both transformations are correct when A is NaN: A != 0
5374      is then true, and A == 0 is false.  */
5375 
5376   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5377       && integer_zerop (arg01) && integer_zerop (arg2))
5378     {
5379       if (comp_code == NE_EXPR)
5380 	return fold_convert_loc (loc, type, arg1);
5381       else if (comp_code == EQ_EXPR)
5382 	return build_zero_cst (type);
5383     }
5384 
5385   /* Try some transformations of A op B ? A : B.
5386 
5387      A == B? A : B    same as B
5388      A != B? A : B    same as A
5389      A >= B? A : B    same as max (A, B)
5390      A > B?  A : B    same as max (B, A)
5391      A <= B? A : B    same as min (A, B)
5392      A < B?  A : B    same as min (B, A)
5393 
5394      As above, these transformations don't work in the presence
5395      of signed zeros.  For example, if A and B are zeros of
5396      opposite sign, the first two transformations will change
5397      the sign of the result.  In the last four, the original
5398      expressions give different results for (A=+0, B=-0) and
5399      (A=-0, B=+0), but the transformed expressions do not.
5400 
5401      The first two transformations are correct if either A or B
5402      is a NaN.  In the first transformation, the condition will
5403      be false, and B will indeed be chosen.  In the case of the
5404      second transformation, the condition A != B will be true,
5405      and A will be chosen.
5406 
5407      The conversions to max() and min() are not correct if B is
5408      a number and A is not.  The conditions in the original
5409      expressions will be false, so all four give B.  The min()
5410      and max() versions would give a NaN instead.  */
5411   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5412       && operand_equal_for_comparison_p (arg01, arg2)
5413       /* Avoid these transformations if the COND_EXPR may be used
5414 	 as an lvalue in the C++ front-end.  PR c++/19199.  */
5415       && (in_gimple_form
5416 	  || VECTOR_TYPE_P (type)
5417 	  || (! lang_GNU_CXX ()
5418 	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5419 	  || ! maybe_lvalue_p (arg1)
5420 	  || ! maybe_lvalue_p (arg2)))
5421     {
5422       tree comp_op0 = arg00;
5423       tree comp_op1 = arg01;
5424       tree comp_type = TREE_TYPE (comp_op0);
5425 
5426       switch (comp_code)
5427 	{
5428 	case EQ_EXPR:
5429 	  return fold_convert_loc (loc, type, arg2);
5430 	case NE_EXPR:
5431 	  return fold_convert_loc (loc, type, arg1);
5432 	case LE_EXPR:
5433 	case LT_EXPR:
5434 	case UNLE_EXPR:
5435 	case UNLT_EXPR:
5436 	  /* In C++ a ?: expression can be an lvalue, so put the
5437 	     operand which will be used if they are equal first
5438 	     so that we can convert this back to the
5439 	     corresponding COND_EXPR.  */
5440 	  if (!HONOR_NANS (arg1))
5441 	    {
5442 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5443 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5444 	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5445 		    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5446 		    : fold_build2_loc (loc, MIN_EXPR, comp_type,
5447 				   comp_op1, comp_op0);
5448 	      return fold_convert_loc (loc, type, tem);
5449 	    }
5450 	  break;
5451 	case GE_EXPR:
5452 	case GT_EXPR:
5453 	case UNGE_EXPR:
5454 	case UNGT_EXPR:
5455 	  if (!HONOR_NANS (arg1))
5456 	    {
5457 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5458 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5459 	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5460 		    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5461 		    : fold_build2_loc (loc, MAX_EXPR, comp_type,
5462 				   comp_op1, comp_op0);
5463 	      return fold_convert_loc (loc, type, tem);
5464 	    }
5465 	  break;
5466 	case UNEQ_EXPR:
5467 	  if (!HONOR_NANS (arg1))
5468 	    return fold_convert_loc (loc, type, arg2);
5469 	  break;
5470 	case LTGT_EXPR:
5471 	  if (!HONOR_NANS (arg1))
5472 	    return fold_convert_loc (loc, type, arg1);
5473 	  break;
5474 	default:
5475 	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5476 	  break;
5477 	}
5478     }
5479 
5480   return NULL_TREE;
5481 }
5482 
5483 
5484 
5485 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5486 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5487   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5488 		false) >= 2)
5489 #endif
5490 
5491 /* EXP is some logical combination of boolean tests.  See if we can
5492    merge it into some range test.  Return the new tree if so.  */
5493 
5494 static tree
fold_range_test(location_t loc,enum tree_code code,tree type,tree op0,tree op1)5495 fold_range_test (location_t loc, enum tree_code code, tree type,
5496 		 tree op0, tree op1)
5497 {
5498   int or_op = (code == TRUTH_ORIF_EXPR
5499 	       || code == TRUTH_OR_EXPR);
5500   int in0_p, in1_p, in_p;
5501   tree low0, low1, low, high0, high1, high;
5502   bool strict_overflow_p = false;
5503   tree tem, lhs, rhs;
5504   const char * const warnmsg = G_("assuming signed overflow does not occur "
5505 				  "when simplifying range test");
5506 
5507   if (!INTEGRAL_TYPE_P (type))
5508     return 0;
5509 
5510   lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5511   rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5512 
5513   /* If this is an OR operation, invert both sides; we will invert
5514      again at the end.  */
5515   if (or_op)
5516     in0_p = ! in0_p, in1_p = ! in1_p;
5517 
5518   /* If both expressions are the same, if we can merge the ranges, and we
5519      can build the range test, return it or it inverted.  If one of the
5520      ranges is always true or always false, consider it to be the same
5521      expression as the other.  */
5522   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5523       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5524 		       in1_p, low1, high1)
5525       && (tem = (build_range_check (loc, type,
5526 				    lhs != 0 ? lhs
5527 				    : rhs != 0 ? rhs : integer_zero_node,
5528 				    in_p, low, high))) != 0)
5529     {
5530       if (strict_overflow_p)
5531 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5532       return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5533     }
5534 
5535   /* On machines where the branch cost is expensive, if this is a
5536      short-circuited branch and the underlying object on both sides
5537      is the same, make a non-short-circuit operation.  */
5538   bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
5539   if (PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT) != -1)
5540     logical_op_non_short_circuit
5541       = PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT);
5542   if (logical_op_non_short_circuit
5543       && !flag_sanitize_coverage
5544       && lhs != 0 && rhs != 0
5545       && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5546       && operand_equal_p (lhs, rhs, 0))
5547     {
5548       /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
5549 	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5550 	 which cases we can't do this.  */
5551       if (simple_operand_p (lhs))
5552 	return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5553 			   ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5554 			   type, op0, op1);
5555 
5556       else if (!lang_hooks.decls.global_bindings_p ()
5557 	       && !CONTAINS_PLACEHOLDER_P (lhs))
5558 	{
5559 	  tree common = save_expr (lhs);
5560 
5561 	  if ((lhs = build_range_check (loc, type, common,
5562 					or_op ? ! in0_p : in0_p,
5563 					low0, high0)) != 0
5564 	      && (rhs = build_range_check (loc, type, common,
5565 					   or_op ? ! in1_p : in1_p,
5566 					   low1, high1)) != 0)
5567 	    {
5568 	      if (strict_overflow_p)
5569 		fold_overflow_warning (warnmsg,
5570 				       WARN_STRICT_OVERFLOW_COMPARISON);
5571 	      return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5572 				 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5573 				 type, lhs, rhs);
5574 	    }
5575 	}
5576     }
5577 
5578   return 0;
5579 }
5580 
5581 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5582    bit value.  Arrange things so the extra bits will be set to zero if and
5583    only if C is signed-extended to its full width.  If MASK is nonzero,
5584    it is an INTEGER_CST that should be AND'ed with the extra bits.  */
5585 
5586 static tree
unextend(tree c,int p,int unsignedp,tree mask)5587 unextend (tree c, int p, int unsignedp, tree mask)
5588 {
5589   tree type = TREE_TYPE (c);
5590   int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5591   tree temp;
5592 
5593   if (p == modesize || unsignedp)
5594     return c;
5595 
5596   /* We work by getting just the sign bit into the low-order bit, then
5597      into the high-order bit, then sign-extend.  We then XOR that value
5598      with C.  */
5599   temp = build_int_cst (TREE_TYPE (c),
5600 			wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
5601 
5602   /* We must use a signed type in order to get an arithmetic right shift.
5603      However, we must also avoid introducing accidental overflows, so that
5604      a subsequent call to integer_zerop will work.  Hence we must
5605      do the type conversion here.  At this point, the constant is either
5606      zero or one, and the conversion to a signed type can never overflow.
5607      We could get an overflow if this conversion is done anywhere else.  */
5608   if (TYPE_UNSIGNED (type))
5609     temp = fold_convert (signed_type_for (type), temp);
5610 
5611   temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5612   temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5613   if (mask != 0)
5614     temp = const_binop (BIT_AND_EXPR, temp,
5615 			fold_convert (TREE_TYPE (c), mask));
5616   /* If necessary, convert the type back to match the type of C.  */
5617   if (TYPE_UNSIGNED (type))
5618     temp = fold_convert (type, temp);
5619 
5620   return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5621 }
5622 
5623 /* For an expression that has the form
5624      (A && B) || ~B
5625    or
5626      (A || B) && ~B,
5627    we can drop one of the inner expressions and simplify to
5628      A || ~B
5629    or
5630      A && ~B
5631    LOC is the location of the resulting expression.  OP is the inner
5632    logical operation; the left-hand side in the examples above, while CMPOP
5633    is the right-hand side.  RHS_ONLY is used to prevent us from accidentally
5634    removing a condition that guards another, as in
5635      (A != NULL && A->...) || A == NULL
5636    which we must not transform.  If RHS_ONLY is true, only eliminate the
5637    right-most operand of the inner logical operation.  */
5638 
5639 static tree
merge_truthop_with_opposite_arm(location_t loc,tree op,tree cmpop,bool rhs_only)5640 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5641 				 bool rhs_only)
5642 {
5643   tree type = TREE_TYPE (cmpop);
5644   enum tree_code code = TREE_CODE (cmpop);
5645   enum tree_code truthop_code = TREE_CODE (op);
5646   tree lhs = TREE_OPERAND (op, 0);
5647   tree rhs = TREE_OPERAND (op, 1);
5648   tree orig_lhs = lhs, orig_rhs = rhs;
5649   enum tree_code rhs_code = TREE_CODE (rhs);
5650   enum tree_code lhs_code = TREE_CODE (lhs);
5651   enum tree_code inv_code;
5652 
5653   if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5654     return NULL_TREE;
5655 
5656   if (TREE_CODE_CLASS (code) != tcc_comparison)
5657     return NULL_TREE;
5658 
5659   if (rhs_code == truthop_code)
5660     {
5661       tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5662       if (newrhs != NULL_TREE)
5663 	{
5664 	  rhs = newrhs;
5665 	  rhs_code = TREE_CODE (rhs);
5666 	}
5667     }
5668   if (lhs_code == truthop_code && !rhs_only)
5669     {
5670       tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5671       if (newlhs != NULL_TREE)
5672 	{
5673 	  lhs = newlhs;
5674 	  lhs_code = TREE_CODE (lhs);
5675 	}
5676     }
5677 
5678   inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5679   if (inv_code == rhs_code
5680       && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5681       && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5682     return lhs;
5683   if (!rhs_only && inv_code == lhs_code
5684       && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5685       && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5686     return rhs;
5687   if (rhs != orig_rhs || lhs != orig_lhs)
5688     return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5689 			    lhs, rhs);
5690   return NULL_TREE;
5691 }
5692 
5693 /* Find ways of folding logical expressions of LHS and RHS:
5694    Try to merge two comparisons to the same innermost item.
5695    Look for range tests like "ch >= '0' && ch <= '9'".
5696    Look for combinations of simple terms on machines with expensive branches
5697    and evaluate the RHS unconditionally.
5698 
5699    For example, if we have p->a == 2 && p->b == 4 and we can make an
5700    object large enough to span both A and B, we can do this with a comparison
5701    against the object ANDed with the a mask.
5702 
5703    If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5704    operations to do this with one comparison.
5705 
5706    We check for both normal comparisons and the BIT_AND_EXPRs made this by
5707    function and the one above.
5708 
5709    CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
5710    TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5711 
5712    TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5713    two operands.
5714 
5715    We return the simplified tree or 0 if no optimization is possible.  */
5716 
5717 static tree
fold_truth_andor_1(location_t loc,enum tree_code code,tree truth_type,tree lhs,tree rhs)5718 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5719 		    tree lhs, tree rhs)
5720 {
5721   /* If this is the "or" of two comparisons, we can do something if
5722      the comparisons are NE_EXPR.  If this is the "and", we can do something
5723      if the comparisons are EQ_EXPR.  I.e.,
5724 	(a->b == 2 && a->c == 4) can become (a->new == NEW).
5725 
5726      WANTED_CODE is this operation code.  For single bit fields, we can
5727      convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5728      comparison for one-bit fields.  */
5729 
5730   enum tree_code wanted_code;
5731   enum tree_code lcode, rcode;
5732   tree ll_arg, lr_arg, rl_arg, rr_arg;
5733   tree ll_inner, lr_inner, rl_inner, rr_inner;
5734   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5735   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5736   HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5737   HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5738   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5739   int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5740   machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5741   scalar_int_mode lnmode, rnmode;
5742   tree ll_mask, lr_mask, rl_mask, rr_mask;
5743   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5744   tree l_const, r_const;
5745   tree lntype, rntype, result;
5746   HOST_WIDE_INT first_bit, end_bit;
5747   int volatilep;
5748 
5749   /* Start by getting the comparison codes.  Fail if anything is volatile.
5750      If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5751      it were surrounded with a NE_EXPR.  */
5752 
5753   if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5754     return 0;
5755 
5756   lcode = TREE_CODE (lhs);
5757   rcode = TREE_CODE (rhs);
5758 
5759   if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5760     {
5761       lhs = build2 (NE_EXPR, truth_type, lhs,
5762 		    build_int_cst (TREE_TYPE (lhs), 0));
5763       lcode = NE_EXPR;
5764     }
5765 
5766   if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5767     {
5768       rhs = build2 (NE_EXPR, truth_type, rhs,
5769 		    build_int_cst (TREE_TYPE (rhs), 0));
5770       rcode = NE_EXPR;
5771     }
5772 
5773   if (TREE_CODE_CLASS (lcode) != tcc_comparison
5774       || TREE_CODE_CLASS (rcode) != tcc_comparison)
5775     return 0;
5776 
5777   ll_arg = TREE_OPERAND (lhs, 0);
5778   lr_arg = TREE_OPERAND (lhs, 1);
5779   rl_arg = TREE_OPERAND (rhs, 0);
5780   rr_arg = TREE_OPERAND (rhs, 1);
5781 
5782   /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
5783   if (simple_operand_p (ll_arg)
5784       && simple_operand_p (lr_arg))
5785     {
5786       if (operand_equal_p (ll_arg, rl_arg, 0)
5787           && operand_equal_p (lr_arg, rr_arg, 0))
5788 	{
5789           result = combine_comparisons (loc, code, lcode, rcode,
5790 					truth_type, ll_arg, lr_arg);
5791 	  if (result)
5792 	    return result;
5793 	}
5794       else if (operand_equal_p (ll_arg, rr_arg, 0)
5795                && operand_equal_p (lr_arg, rl_arg, 0))
5796 	{
5797           result = combine_comparisons (loc, code, lcode,
5798 					swap_tree_comparison (rcode),
5799 					truth_type, ll_arg, lr_arg);
5800 	  if (result)
5801 	    return result;
5802 	}
5803     }
5804 
5805   code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5806 	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5807 
5808   /* If the RHS can be evaluated unconditionally and its operands are
5809      simple, it wins to evaluate the RHS unconditionally on machines
5810      with expensive branches.  In this case, this isn't a comparison
5811      that can be merged.  */
5812 
5813   if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5814 		   false) >= 2
5815       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5816       && simple_operand_p (rl_arg)
5817       && simple_operand_p (rr_arg))
5818     {
5819       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
5820       if (code == TRUTH_OR_EXPR
5821 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
5822 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
5823 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5824 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5825 	return build2_loc (loc, NE_EXPR, truth_type,
5826 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5827 				   ll_arg, rl_arg),
5828 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5829 
5830       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
5831       if (code == TRUTH_AND_EXPR
5832 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
5833 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
5834 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5835 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5836 	return build2_loc (loc, EQ_EXPR, truth_type,
5837 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5838 				   ll_arg, rl_arg),
5839 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5840     }
5841 
5842   /* See if the comparisons can be merged.  Then get all the parameters for
5843      each side.  */
5844 
5845   if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5846       || (rcode != EQ_EXPR && rcode != NE_EXPR))
5847     return 0;
5848 
5849   ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5850   volatilep = 0;
5851   ll_inner = decode_field_reference (loc, &ll_arg,
5852 				     &ll_bitsize, &ll_bitpos, &ll_mode,
5853 				     &ll_unsignedp, &ll_reversep, &volatilep,
5854 				     &ll_mask, &ll_and_mask);
5855   lr_inner = decode_field_reference (loc, &lr_arg,
5856 				     &lr_bitsize, &lr_bitpos, &lr_mode,
5857 				     &lr_unsignedp, &lr_reversep, &volatilep,
5858 				     &lr_mask, &lr_and_mask);
5859   rl_inner = decode_field_reference (loc, &rl_arg,
5860 				     &rl_bitsize, &rl_bitpos, &rl_mode,
5861 				     &rl_unsignedp, &rl_reversep, &volatilep,
5862 				     &rl_mask, &rl_and_mask);
5863   rr_inner = decode_field_reference (loc, &rr_arg,
5864 				     &rr_bitsize, &rr_bitpos, &rr_mode,
5865 				     &rr_unsignedp, &rr_reversep, &volatilep,
5866 				     &rr_mask, &rr_and_mask);
5867 
5868   /* It must be true that the inner operation on the lhs of each
5869      comparison must be the same if we are to be able to do anything.
5870      Then see if we have constants.  If not, the same must be true for
5871      the rhs's.  */
5872   if (volatilep
5873       || ll_reversep != rl_reversep
5874       || ll_inner == 0 || rl_inner == 0
5875       || ! operand_equal_p (ll_inner, rl_inner, 0))
5876     return 0;
5877 
5878   if (TREE_CODE (lr_arg) == INTEGER_CST
5879       && TREE_CODE (rr_arg) == INTEGER_CST)
5880     {
5881       l_const = lr_arg, r_const = rr_arg;
5882       lr_reversep = ll_reversep;
5883     }
5884   else if (lr_reversep != rr_reversep
5885 	   || lr_inner == 0 || rr_inner == 0
5886 	   || ! operand_equal_p (lr_inner, rr_inner, 0))
5887     return 0;
5888   else
5889     l_const = r_const = 0;
5890 
5891   /* If either comparison code is not correct for our logical operation,
5892      fail.  However, we can convert a one-bit comparison against zero into
5893      the opposite comparison against that bit being set in the field.  */
5894 
5895   wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5896   if (lcode != wanted_code)
5897     {
5898       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5899 	{
5900 	  /* Make the left operand unsigned, since we are only interested
5901 	     in the value of one bit.  Otherwise we are doing the wrong
5902 	     thing below.  */
5903 	  ll_unsignedp = 1;
5904 	  l_const = ll_mask;
5905 	}
5906       else
5907 	return 0;
5908     }
5909 
5910   /* This is analogous to the code for l_const above.  */
5911   if (rcode != wanted_code)
5912     {
5913       if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5914 	{
5915 	  rl_unsignedp = 1;
5916 	  r_const = rl_mask;
5917 	}
5918       else
5919 	return 0;
5920     }
5921 
5922   /* See if we can find a mode that contains both fields being compared on
5923      the left.  If we can't, fail.  Otherwise, update all constants and masks
5924      to be relative to a field of that size.  */
5925   first_bit = MIN (ll_bitpos, rl_bitpos);
5926   end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5927   if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5928 		      TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
5929 		      volatilep, &lnmode))
5930     return 0;
5931 
5932   lnbitsize = GET_MODE_BITSIZE (lnmode);
5933   lnbitpos = first_bit & ~ (lnbitsize - 1);
5934   lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5935   xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5936 
5937   if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5938     {
5939       xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5940       xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5941     }
5942 
5943   ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5944 			 size_int (xll_bitpos));
5945   rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5946 			 size_int (xrl_bitpos));
5947 
5948   if (l_const)
5949     {
5950       l_const = fold_convert_loc (loc, lntype, l_const);
5951       l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5952       l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5953       if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5954 					fold_build1_loc (loc, BIT_NOT_EXPR,
5955 						     lntype, ll_mask))))
5956 	{
5957 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5958 
5959 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5960 	}
5961     }
5962   if (r_const)
5963     {
5964       r_const = fold_convert_loc (loc, lntype, r_const);
5965       r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5966       r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5967       if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5968 					fold_build1_loc (loc, BIT_NOT_EXPR,
5969 						     lntype, rl_mask))))
5970 	{
5971 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5972 
5973 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5974 	}
5975     }
5976 
5977   /* If the right sides are not constant, do the same for it.  Also,
5978      disallow this optimization if a size, signedness or storage order
5979      mismatch occurs between the left and right sides.  */
5980   if (l_const == 0)
5981     {
5982       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5983 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5984 	  || ll_reversep != lr_reversep
5985 	  /* Make sure the two fields on the right
5986 	     correspond to the left without being swapped.  */
5987 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5988 	return 0;
5989 
5990       first_bit = MIN (lr_bitpos, rr_bitpos);
5991       end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5992       if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5993 			  TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
5994 			  volatilep, &rnmode))
5995 	return 0;
5996 
5997       rnbitsize = GET_MODE_BITSIZE (rnmode);
5998       rnbitpos = first_bit & ~ (rnbitsize - 1);
5999       rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6000       xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6001 
6002       if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6003 	{
6004 	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6005 	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6006 	}
6007 
6008       lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6009 							    rntype, lr_mask),
6010 			     size_int (xlr_bitpos));
6011       rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6012 							    rntype, rr_mask),
6013 			     size_int (xrr_bitpos));
6014 
6015       /* Make a mask that corresponds to both fields being compared.
6016 	 Do this for both items being compared.  If the operands are the
6017 	 same size and the bits being compared are in the same position
6018 	 then we can do this by masking both and comparing the masked
6019 	 results.  */
6020       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6021       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6022       if (lnbitsize == rnbitsize
6023 	  && xll_bitpos == xlr_bitpos
6024 	  && lnbitpos >= 0
6025 	  && rnbitpos >= 0)
6026 	{
6027 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6028 				    lntype, lnbitsize, lnbitpos,
6029 				    ll_unsignedp || rl_unsignedp, ll_reversep);
6030 	  if (! all_ones_mask_p (ll_mask, lnbitsize))
6031 	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6032 
6033 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6034 				    rntype, rnbitsize, rnbitpos,
6035 				    lr_unsignedp || rr_unsignedp, lr_reversep);
6036 	  if (! all_ones_mask_p (lr_mask, rnbitsize))
6037 	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6038 
6039 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6040 	}
6041 
6042       /* There is still another way we can do something:  If both pairs of
6043 	 fields being compared are adjacent, we may be able to make a wider
6044 	 field containing them both.
6045 
6046 	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
6047 	 the mask must be shifted to account for the shift done by
6048 	 make_bit_field_ref.  */
6049       if (((ll_bitsize + ll_bitpos == rl_bitpos
6050 	    && lr_bitsize + lr_bitpos == rr_bitpos)
6051 	   || (ll_bitpos == rl_bitpos + rl_bitsize
6052 	       && lr_bitpos == rr_bitpos + rr_bitsize))
6053 	  && ll_bitpos >= 0
6054 	  && rl_bitpos >= 0
6055 	  && lr_bitpos >= 0
6056 	  && rr_bitpos >= 0)
6057 	{
6058 	  tree type;
6059 
6060 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6061 				    ll_bitsize + rl_bitsize,
6062 				    MIN (ll_bitpos, rl_bitpos),
6063 				    ll_unsignedp, ll_reversep);
6064 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6065 				    lr_bitsize + rr_bitsize,
6066 				    MIN (lr_bitpos, rr_bitpos),
6067 				    lr_unsignedp, lr_reversep);
6068 
6069 	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6070 				 size_int (MIN (xll_bitpos, xrl_bitpos)));
6071 	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6072 				 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6073 
6074 	  /* Convert to the smaller type before masking out unwanted bits.  */
6075 	  type = lntype;
6076 	  if (lntype != rntype)
6077 	    {
6078 	      if (lnbitsize > rnbitsize)
6079 		{
6080 		  lhs = fold_convert_loc (loc, rntype, lhs);
6081 		  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6082 		  type = rntype;
6083 		}
6084 	      else if (lnbitsize < rnbitsize)
6085 		{
6086 		  rhs = fold_convert_loc (loc, lntype, rhs);
6087 		  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6088 		  type = lntype;
6089 		}
6090 	    }
6091 
6092 	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6093 	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6094 
6095 	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6096 	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6097 
6098 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6099 	}
6100 
6101       return 0;
6102     }
6103 
6104   /* Handle the case of comparisons with constants.  If there is something in
6105      common between the masks, those bits of the constants must be the same.
6106      If not, the condition is always false.  Test for this to avoid generating
6107      incorrect code below.  */
6108   result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6109   if (! integer_zerop (result)
6110       && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6111 			   const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6112     {
6113       if (wanted_code == NE_EXPR)
6114 	{
6115 	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
6116 	  return constant_boolean_node (true, truth_type);
6117 	}
6118       else
6119 	{
6120 	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6121 	  return constant_boolean_node (false, truth_type);
6122 	}
6123     }
6124 
6125   if (lnbitpos < 0)
6126     return 0;
6127 
6128   /* Construct the expression we will return.  First get the component
6129      reference we will make.  Unless the mask is all ones the width of
6130      that field, perform the mask operation.  Then compare with the
6131      merged constant.  */
6132   result = make_bit_field_ref (loc, ll_inner, ll_arg,
6133 			       lntype, lnbitsize, lnbitpos,
6134 			       ll_unsignedp || rl_unsignedp, ll_reversep);
6135 
6136   ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6137   if (! all_ones_mask_p (ll_mask, lnbitsize))
6138     result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6139 
6140   return build2_loc (loc, wanted_code, truth_type, result,
6141 		     const_binop (BIT_IOR_EXPR, l_const, r_const));
6142 }
6143 
6144 /* T is an integer expression that is being multiplied, divided, or taken a
6145    modulus (CODE says which and what kind of divide or modulus) by a
6146    constant C.  See if we can eliminate that operation by folding it with
6147    other operations already in T.  WIDE_TYPE, if non-null, is a type that
6148    should be used for the computation if wider than our type.
6149 
6150    For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6151    (X * 2) + (Y * 4).  We must, however, be assured that either the original
6152    expression would not overflow or that overflow is undefined for the type
6153    in the language in question.
6154 
6155    If we return a non-null expression, it is an equivalent form of the
6156    original computation, but need not be in the original type.
6157 
6158    We set *STRICT_OVERFLOW_P to true if the return values depends on
6159    signed overflow being undefined.  Otherwise we do not change
6160    *STRICT_OVERFLOW_P.  */
6161 
6162 static tree
extract_muldiv(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)6163 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6164 		bool *strict_overflow_p)
6165 {
6166   /* To avoid exponential search depth, refuse to allow recursion past
6167      three levels.  Beyond that (1) it's highly unlikely that we'll find
6168      something interesting and (2) we've probably processed it before
6169      when we built the inner expression.  */
6170 
6171   static int depth;
6172   tree ret;
6173 
6174   if (depth > 3)
6175     return NULL;
6176 
6177   depth++;
6178   ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6179   depth--;
6180 
6181   return ret;
6182 }
6183 
6184 static tree
extract_muldiv_1(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)6185 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6186 		  bool *strict_overflow_p)
6187 {
6188   tree type = TREE_TYPE (t);
6189   enum tree_code tcode = TREE_CODE (t);
6190   tree ctype = (wide_type != 0
6191 		&& (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6192 		    > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6193 		? wide_type : type);
6194   tree t1, t2;
6195   int same_p = tcode == code;
6196   tree op0 = NULL_TREE, op1 = NULL_TREE;
6197   bool sub_strict_overflow_p;
6198 
6199   /* Don't deal with constants of zero here; they confuse the code below.  */
6200   if (integer_zerop (c))
6201     return NULL_TREE;
6202 
6203   if (TREE_CODE_CLASS (tcode) == tcc_unary)
6204     op0 = TREE_OPERAND (t, 0);
6205 
6206   if (TREE_CODE_CLASS (tcode) == tcc_binary)
6207     op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6208 
6209   /* Note that we need not handle conditional operations here since fold
6210      already handles those cases.  So just do arithmetic here.  */
6211   switch (tcode)
6212     {
6213     case INTEGER_CST:
6214       /* For a constant, we can always simplify if we are a multiply
6215 	 or (for divide and modulus) if it is a multiple of our constant.  */
6216       if (code == MULT_EXPR
6217 	  || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6218 				TYPE_SIGN (type)))
6219 	{
6220 	  tree tem = const_binop (code, fold_convert (ctype, t),
6221 				  fold_convert (ctype, c));
6222 	  /* If the multiplication overflowed, we lost information on it.
6223 	     See PR68142 and PR69845.  */
6224 	  if (TREE_OVERFLOW (tem))
6225 	    return NULL_TREE;
6226 	  return tem;
6227 	}
6228       break;
6229 
6230     CASE_CONVERT: case NON_LVALUE_EXPR:
6231       /* If op0 is an expression ...  */
6232       if ((COMPARISON_CLASS_P (op0)
6233 	   || UNARY_CLASS_P (op0)
6234 	   || BINARY_CLASS_P (op0)
6235 	   || VL_EXP_CLASS_P (op0)
6236 	   || EXPRESSION_CLASS_P (op0))
6237 	  /* ... and has wrapping overflow, and its type is smaller
6238 	     than ctype, then we cannot pass through as widening.  */
6239 	  && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6240 		&& TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6241 	       && (TYPE_PRECISION (ctype)
6242 	           > TYPE_PRECISION (TREE_TYPE (op0))))
6243 	      /* ... or this is a truncation (t is narrower than op0),
6244 		 then we cannot pass through this narrowing.  */
6245 	      || (TYPE_PRECISION (type)
6246 		  < TYPE_PRECISION (TREE_TYPE (op0)))
6247 	      /* ... or signedness changes for division or modulus,
6248 		 then we cannot pass through this conversion.  */
6249 	      || (code != MULT_EXPR
6250 		  && (TYPE_UNSIGNED (ctype)
6251 		      != TYPE_UNSIGNED (TREE_TYPE (op0))))
6252 	      /* ... or has undefined overflow while the converted to
6253 		 type has not, we cannot do the operation in the inner type
6254 		 as that would introduce undefined overflow.  */
6255 	      || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6256 		   && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6257 		  && !TYPE_OVERFLOW_UNDEFINED (type))))
6258 	break;
6259 
6260       /* Pass the constant down and see if we can make a simplification.  If
6261 	 we can, replace this expression with the inner simplification for
6262 	 possible later conversion to our or some other type.  */
6263       if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6264 	  && TREE_CODE (t2) == INTEGER_CST
6265 	  && !TREE_OVERFLOW (t2)
6266 	  && (t1 = extract_muldiv (op0, t2, code,
6267 				   code == MULT_EXPR ? ctype : NULL_TREE,
6268 				   strict_overflow_p)) != 0)
6269 	return t1;
6270       break;
6271 
6272     case ABS_EXPR:
6273       /* If widening the type changes it from signed to unsigned, then we
6274          must avoid building ABS_EXPR itself as unsigned.  */
6275       if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6276         {
6277           tree cstype = (*signed_type_for) (ctype);
6278           if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6279 	      != 0)
6280             {
6281               t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6282               return fold_convert (ctype, t1);
6283             }
6284           break;
6285         }
6286       /* If the constant is negative, we cannot simplify this.  */
6287       if (tree_int_cst_sgn (c) == -1)
6288         break;
6289       /* FALLTHROUGH */
6290     case NEGATE_EXPR:
6291       /* For division and modulus, type can't be unsigned, as e.g.
6292 	 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6293 	 For signed types, even with wrapping overflow, this is fine.  */
6294       if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6295 	break;
6296       if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6297 	  != 0)
6298 	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6299       break;
6300 
6301     case MIN_EXPR:  case MAX_EXPR:
6302       /* If widening the type changes the signedness, then we can't perform
6303 	 this optimization as that changes the result.  */
6304       if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6305 	break;
6306 
6307       /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
6308       sub_strict_overflow_p = false;
6309       if ((t1 = extract_muldiv (op0, c, code, wide_type,
6310 				&sub_strict_overflow_p)) != 0
6311 	  && (t2 = extract_muldiv (op1, c, code, wide_type,
6312 				   &sub_strict_overflow_p)) != 0)
6313 	{
6314 	  if (tree_int_cst_sgn (c) < 0)
6315 	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6316 	  if (sub_strict_overflow_p)
6317 	    *strict_overflow_p = true;
6318 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6319 			      fold_convert (ctype, t2));
6320 	}
6321       break;
6322 
6323     case LSHIFT_EXPR:  case RSHIFT_EXPR:
6324       /* If the second operand is constant, this is a multiplication
6325 	 or floor division, by a power of two, so we can treat it that
6326 	 way unless the multiplier or divisor overflows.  Signed
6327 	 left-shift overflow is implementation-defined rather than
6328 	 undefined in C90, so do not convert signed left shift into
6329 	 multiplication.  */
6330       if (TREE_CODE (op1) == INTEGER_CST
6331 	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6332 	  /* const_binop may not detect overflow correctly,
6333 	     so check for it explicitly here.  */
6334 	  && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6335 			wi::to_wide (op1))
6336 	  && (t1 = fold_convert (ctype,
6337 				 const_binop (LSHIFT_EXPR, size_one_node,
6338 					      op1))) != 0
6339 	  && !TREE_OVERFLOW (t1))
6340 	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6341 				       ? MULT_EXPR : FLOOR_DIV_EXPR,
6342 				       ctype,
6343 				       fold_convert (ctype, op0),
6344 				       t1),
6345 			       c, code, wide_type, strict_overflow_p);
6346       break;
6347 
6348     case PLUS_EXPR:  case MINUS_EXPR:
6349       /* See if we can eliminate the operation on both sides.  If we can, we
6350 	 can return a new PLUS or MINUS.  If we can't, the only remaining
6351 	 cases where we can do anything are if the second operand is a
6352 	 constant.  */
6353       sub_strict_overflow_p = false;
6354       t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6355       t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6356       if (t1 != 0 && t2 != 0
6357 	  && TYPE_OVERFLOW_WRAPS (ctype)
6358 	  && (code == MULT_EXPR
6359 	      /* If not multiplication, we can only do this if both operands
6360 		 are divisible by c.  */
6361 	      || (multiple_of_p (ctype, op0, c)
6362 	          && multiple_of_p (ctype, op1, c))))
6363 	{
6364 	  if (sub_strict_overflow_p)
6365 	    *strict_overflow_p = true;
6366 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6367 			      fold_convert (ctype, t2));
6368 	}
6369 
6370       /* If this was a subtraction, negate OP1 and set it to be an addition.
6371 	 This simplifies the logic below.  */
6372       if (tcode == MINUS_EXPR)
6373 	{
6374 	  tcode = PLUS_EXPR, op1 = negate_expr (op1);
6375 	  /* If OP1 was not easily negatable, the constant may be OP0.  */
6376 	  if (TREE_CODE (op0) == INTEGER_CST)
6377 	    {
6378 	      std::swap (op0, op1);
6379 	      std::swap (t1, t2);
6380 	    }
6381 	}
6382 
6383       if (TREE_CODE (op1) != INTEGER_CST)
6384 	break;
6385 
6386       /* If either OP1 or C are negative, this optimization is not safe for
6387 	 some of the division and remainder types while for others we need
6388 	 to change the code.  */
6389       if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6390 	{
6391 	  if (code == CEIL_DIV_EXPR)
6392 	    code = FLOOR_DIV_EXPR;
6393 	  else if (code == FLOOR_DIV_EXPR)
6394 	    code = CEIL_DIV_EXPR;
6395 	  else if (code != MULT_EXPR
6396 		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6397 	    break;
6398 	}
6399 
6400       /* If it's a multiply or a division/modulus operation of a multiple
6401          of our constant, do the operation and verify it doesn't overflow.  */
6402       if (code == MULT_EXPR
6403 	  || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6404 				TYPE_SIGN (type)))
6405 	{
6406 	  op1 = const_binop (code, fold_convert (ctype, op1),
6407 			     fold_convert (ctype, c));
6408 	  /* We allow the constant to overflow with wrapping semantics.  */
6409 	  if (op1 == 0
6410 	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6411 	    break;
6412 	}
6413       else
6414 	break;
6415 
6416       /* If we have an unsigned type, we cannot widen the operation since it
6417 	 will change the result if the original computation overflowed.  */
6418       if (TYPE_UNSIGNED (ctype) && ctype != type)
6419 	break;
6420 
6421       /* The last case is if we are a multiply.  In that case, we can
6422 	 apply the distributive law to commute the multiply and addition
6423 	 if the multiplication of the constants doesn't overflow
6424 	 and overflow is defined.  With undefined overflow
6425 	 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.  */
6426       if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6427 	return fold_build2 (tcode, ctype,
6428 			    fold_build2 (code, ctype,
6429 					 fold_convert (ctype, op0),
6430 					 fold_convert (ctype, c)),
6431 			    op1);
6432 
6433       break;
6434 
6435     case MULT_EXPR:
6436       /* We have a special case here if we are doing something like
6437 	 (C * 8) % 4 since we know that's zero.  */
6438       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6439 	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6440 	  /* If the multiplication can overflow we cannot optimize this.  */
6441 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6442 	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6443 	  && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6444 				TYPE_SIGN (type)))
6445 	{
6446 	  *strict_overflow_p = true;
6447 	  return omit_one_operand (type, integer_zero_node, op0);
6448 	}
6449 
6450       /* ... fall through ...  */
6451 
6452     case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
6453     case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
6454       /* If we can extract our operation from the LHS, do so and return a
6455 	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
6456 	 do something only if the second operand is a constant.  */
6457       if (same_p
6458 	  && TYPE_OVERFLOW_WRAPS (ctype)
6459 	  && (t1 = extract_muldiv (op0, c, code, wide_type,
6460 				   strict_overflow_p)) != 0)
6461 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6462 			    fold_convert (ctype, op1));
6463       else if (tcode == MULT_EXPR && code == MULT_EXPR
6464 	       && TYPE_OVERFLOW_WRAPS (ctype)
6465 	       && (t1 = extract_muldiv (op1, c, code, wide_type,
6466 					strict_overflow_p)) != 0)
6467 	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6468 			    fold_convert (ctype, t1));
6469       else if (TREE_CODE (op1) != INTEGER_CST)
6470 	return 0;
6471 
6472       /* If these are the same operation types, we can associate them
6473 	 assuming no overflow.  */
6474       if (tcode == code)
6475 	{
6476 	  bool overflow_p = false;
6477 	  bool overflow_mul_p;
6478 	  signop sign = TYPE_SIGN (ctype);
6479 	  unsigned prec = TYPE_PRECISION (ctype);
6480 	  wide_int mul = wi::mul (wi::to_wide (op1, prec),
6481 				  wi::to_wide (c, prec),
6482 				  sign, &overflow_mul_p);
6483 	  overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6484 	  if (overflow_mul_p
6485 	      && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6486 	    overflow_p = true;
6487 	  if (!overflow_p)
6488 	    return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6489 				wide_int_to_tree (ctype, mul));
6490 	}
6491 
6492       /* If these operations "cancel" each other, we have the main
6493 	 optimizations of this pass, which occur when either constant is a
6494 	 multiple of the other, in which case we replace this with either an
6495 	 operation or CODE or TCODE.
6496 
6497 	 If we have an unsigned type, we cannot do this since it will change
6498 	 the result if the original computation overflowed.  */
6499       if (TYPE_OVERFLOW_UNDEFINED (ctype)
6500 	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6501 	      || (tcode == MULT_EXPR
6502 		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6503 		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6504 		  && code != MULT_EXPR)))
6505 	{
6506 	  if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6507 				 TYPE_SIGN (type)))
6508 	    {
6509 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6510 		*strict_overflow_p = true;
6511 	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6512 				  fold_convert (ctype,
6513 						const_binop (TRUNC_DIV_EXPR,
6514 							     op1, c)));
6515 	    }
6516 	  else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6517 				      TYPE_SIGN (type)))
6518 	    {
6519 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6520 		*strict_overflow_p = true;
6521 	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
6522 				  fold_convert (ctype,
6523 						const_binop (TRUNC_DIV_EXPR,
6524 							     c, op1)));
6525 	    }
6526 	}
6527       break;
6528 
6529     default:
6530       break;
6531     }
6532 
6533   return 0;
6534 }
6535 
6536 /* Return a node which has the indicated constant VALUE (either 0 or
6537    1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6538    and is of the indicated TYPE.  */
6539 
6540 tree
constant_boolean_node(bool value,tree type)6541 constant_boolean_node (bool value, tree type)
6542 {
6543   if (type == integer_type_node)
6544     return value ? integer_one_node : integer_zero_node;
6545   else if (type == boolean_type_node)
6546     return value ? boolean_true_node : boolean_false_node;
6547   else if (TREE_CODE (type) == VECTOR_TYPE)
6548     return build_vector_from_val (type,
6549 				  build_int_cst (TREE_TYPE (type),
6550 						 value ? -1 : 0));
6551   else
6552     return fold_convert (type, value ? integer_one_node : integer_zero_node);
6553 }
6554 
6555 
6556 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6557    Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
6558    CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6559    expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
6560    COND is the first argument to CODE; otherwise (as in the example
6561    given here), it is the second argument.  TYPE is the type of the
6562    original expression.  Return NULL_TREE if no simplification is
6563    possible.  */
6564 
6565 static tree
fold_binary_op_with_conditional_arg(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree cond,tree arg,int cond_first_p)6566 fold_binary_op_with_conditional_arg (location_t loc,
6567 				     enum tree_code code,
6568 				     tree type, tree op0, tree op1,
6569 				     tree cond, tree arg, int cond_first_p)
6570 {
6571   tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6572   tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6573   tree test, true_value, false_value;
6574   tree lhs = NULL_TREE;
6575   tree rhs = NULL_TREE;
6576   enum tree_code cond_code = COND_EXPR;
6577 
6578   if (TREE_CODE (cond) == COND_EXPR
6579       || TREE_CODE (cond) == VEC_COND_EXPR)
6580     {
6581       test = TREE_OPERAND (cond, 0);
6582       true_value = TREE_OPERAND (cond, 1);
6583       false_value = TREE_OPERAND (cond, 2);
6584       /* If this operand throws an expression, then it does not make
6585 	 sense to try to perform a logical or arithmetic operation
6586 	 involving it.  */
6587       if (VOID_TYPE_P (TREE_TYPE (true_value)))
6588 	lhs = true_value;
6589       if (VOID_TYPE_P (TREE_TYPE (false_value)))
6590 	rhs = false_value;
6591     }
6592   else if (!(TREE_CODE (type) != VECTOR_TYPE
6593 	     && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6594     {
6595       tree testtype = TREE_TYPE (cond);
6596       test = cond;
6597       true_value = constant_boolean_node (true, testtype);
6598       false_value = constant_boolean_node (false, testtype);
6599     }
6600   else
6601     /* Detect the case of mixing vector and scalar types - bail out.  */
6602     return NULL_TREE;
6603 
6604   if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6605     cond_code = VEC_COND_EXPR;
6606 
6607   /* This transformation is only worthwhile if we don't have to wrap ARG
6608      in a SAVE_EXPR and the operation can be simplified without recursing
6609      on at least one of the branches once its pushed inside the COND_EXPR.  */
6610   if (!TREE_CONSTANT (arg)
6611       && (TREE_SIDE_EFFECTS (arg)
6612 	  || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6613 	  || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6614     return NULL_TREE;
6615 
6616   arg = fold_convert_loc (loc, arg_type, arg);
6617   if (lhs == 0)
6618     {
6619       true_value = fold_convert_loc (loc, cond_type, true_value);
6620       if (cond_first_p)
6621 	lhs = fold_build2_loc (loc, code, type, true_value, arg);
6622       else
6623 	lhs = fold_build2_loc (loc, code, type, arg, true_value);
6624     }
6625   if (rhs == 0)
6626     {
6627       false_value = fold_convert_loc (loc, cond_type, false_value);
6628       if (cond_first_p)
6629 	rhs = fold_build2_loc (loc, code, type, false_value, arg);
6630       else
6631 	rhs = fold_build2_loc (loc, code, type, arg, false_value);
6632     }
6633 
6634   /* Check that we have simplified at least one of the branches.  */
6635   if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6636     return NULL_TREE;
6637 
6638   return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6639 }
6640 
6641 
6642 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6643 
6644    If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6645    TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
6646    ADDEND is the same as X.
6647 
6648    X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6649    and finite.  The problematic cases are when X is zero, and its mode
6650    has signed zeros.  In the case of rounding towards -infinity,
6651    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
6652    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
6653 
6654 bool
fold_real_zero_addition_p(const_tree type,const_tree addend,int negate)6655 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6656 {
6657   if (!real_zerop (addend))
6658     return false;
6659 
6660   /* Don't allow the fold with -fsignaling-nans.  */
6661   if (HONOR_SNANS (element_mode (type)))
6662     return false;
6663 
6664   /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
6665   if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6666     return true;
6667 
6668   /* In a vector or complex, we would need to check the sign of all zeros.  */
6669   if (TREE_CODE (addend) != REAL_CST)
6670     return false;
6671 
6672   /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
6673   if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6674     negate = !negate;
6675 
6676   /* The mode has signed zeros, and we have to honor their sign.
6677      In this situation, there is only one case we can return true for.
6678      X - 0 is the same as X unless rounding towards -infinity is
6679      supported.  */
6680   return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6681 }
6682 
6683 /* Subroutine of match.pd that optimizes comparisons of a division by
6684    a nonzero integer constant against an integer constant, i.e.
6685    X/C1 op C2.
6686 
6687    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6688    GE_EXPR or LE_EXPR.  ARG01 and ARG1 must be a INTEGER_CST.  */
6689 
6690 enum tree_code
fold_div_compare(enum tree_code code,tree c1,tree c2,tree * lo,tree * hi,bool * neg_overflow)6691 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6692 		  tree *hi, bool *neg_overflow)
6693 {
6694   tree prod, tmp, type = TREE_TYPE (c1);
6695   signop sign = TYPE_SIGN (type);
6696   bool overflow;
6697 
6698   /* We have to do this the hard way to detect unsigned overflow.
6699      prod = int_const_binop (MULT_EXPR, c1, c2);  */
6700   wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
6701   prod = force_fit_type (type, val, -1, overflow);
6702   *neg_overflow = false;
6703 
6704   if (sign == UNSIGNED)
6705     {
6706       tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6707       *lo = prod;
6708 
6709       /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp).  */
6710       val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
6711       *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
6712     }
6713   else if (tree_int_cst_sgn (c1) >= 0)
6714     {
6715       tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6716       switch (tree_int_cst_sgn (c2))
6717 	{
6718 	case -1:
6719 	  *neg_overflow = true;
6720 	  *lo = int_const_binop (MINUS_EXPR, prod, tmp);
6721 	  *hi = prod;
6722 	  break;
6723 
6724 	case 0:
6725 	  *lo = fold_negate_const (tmp, type);
6726 	  *hi = tmp;
6727 	  break;
6728 
6729 	case 1:
6730 	  *hi = int_const_binop (PLUS_EXPR, prod, tmp);
6731 	  *lo = prod;
6732 	  break;
6733 
6734 	default:
6735 	  gcc_unreachable ();
6736 	}
6737     }
6738   else
6739     {
6740       /* A negative divisor reverses the relational operators.  */
6741       code = swap_tree_comparison (code);
6742 
6743       tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
6744       switch (tree_int_cst_sgn (c2))
6745 	{
6746 	case -1:
6747 	  *hi = int_const_binop (MINUS_EXPR, prod, tmp);
6748 	  *lo = prod;
6749 	  break;
6750 
6751 	case 0:
6752 	  *hi = fold_negate_const (tmp, type);
6753 	  *lo = tmp;
6754 	  break;
6755 
6756 	case 1:
6757 	  *neg_overflow = true;
6758 	  *lo = int_const_binop (PLUS_EXPR, prod, tmp);
6759 	  *hi = prod;
6760 	  break;
6761 
6762 	default:
6763 	  gcc_unreachable ();
6764 	}
6765     }
6766 
6767   if (code != EQ_EXPR && code != NE_EXPR)
6768     return code;
6769 
6770   if (TREE_OVERFLOW (*lo)
6771       || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
6772     *lo = NULL_TREE;
6773   if (TREE_OVERFLOW (*hi)
6774       || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
6775     *hi = NULL_TREE;
6776 
6777   return code;
6778 }
6779 
6780 
6781 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6782    equality/inequality test, then return a simplified form of the test
6783    using a sign testing.  Otherwise return NULL.  TYPE is the desired
6784    result type.  */
6785 
6786 static tree
fold_single_bit_test_into_sign_test(location_t loc,enum tree_code code,tree arg0,tree arg1,tree result_type)6787 fold_single_bit_test_into_sign_test (location_t loc,
6788 				     enum tree_code code, tree arg0, tree arg1,
6789 				     tree result_type)
6790 {
6791   /* If this is testing a single bit, we can optimize the test.  */
6792   if ((code == NE_EXPR || code == EQ_EXPR)
6793       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6794       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6795     {
6796       /* If we have (A & C) != 0 where C is the sign bit of A, convert
6797 	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
6798       tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6799 
6800       if (arg00 != NULL_TREE
6801 	  /* This is only a win if casting to a signed type is cheap,
6802 	     i.e. when arg00's type is not a partial mode.  */
6803 	  && type_has_mode_precision_p (TREE_TYPE (arg00)))
6804 	{
6805 	  tree stype = signed_type_for (TREE_TYPE (arg00));
6806 	  return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6807 			      result_type,
6808 			      fold_convert_loc (loc, stype, arg00),
6809 			      build_int_cst (stype, 0));
6810 	}
6811     }
6812 
6813   return NULL_TREE;
6814 }
6815 
6816 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6817    equality/inequality test, then return a simplified form of
6818    the test using shifts and logical operations.  Otherwise return
6819    NULL.  TYPE is the desired result type.  */
6820 
6821 tree
fold_single_bit_test(location_t loc,enum tree_code code,tree arg0,tree arg1,tree result_type)6822 fold_single_bit_test (location_t loc, enum tree_code code,
6823 		      tree arg0, tree arg1, tree result_type)
6824 {
6825   /* If this is testing a single bit, we can optimize the test.  */
6826   if ((code == NE_EXPR || code == EQ_EXPR)
6827       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6828       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6829     {
6830       tree inner = TREE_OPERAND (arg0, 0);
6831       tree type = TREE_TYPE (arg0);
6832       int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6833       scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
6834       int ops_unsigned;
6835       tree signed_type, unsigned_type, intermediate_type;
6836       tree tem, one;
6837 
6838       /* First, see if we can fold the single bit test into a sign-bit
6839 	 test.  */
6840       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6841 						 result_type);
6842       if (tem)
6843 	return tem;
6844 
6845       /* Otherwise we have (A & C) != 0 where C is a single bit,
6846 	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
6847 	 Similarly for (A & C) == 0.  */
6848 
6849       /* If INNER is a right shift of a constant and it plus BITNUM does
6850 	 not overflow, adjust BITNUM and INNER.  */
6851       if (TREE_CODE (inner) == RSHIFT_EXPR
6852 	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6853 	  && bitnum < TYPE_PRECISION (type)
6854 	  && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
6855 			TYPE_PRECISION (type) - bitnum))
6856 	{
6857 	  bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6858 	  inner = TREE_OPERAND (inner, 0);
6859 	}
6860 
6861       /* If we are going to be able to omit the AND below, we must do our
6862 	 operations as unsigned.  If we must use the AND, we have a choice.
6863 	 Normally unsigned is faster, but for some machines signed is.  */
6864       ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6865 		      && !flag_syntax_only) ? 0 : 1;
6866 
6867       signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6868       unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6869       intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6870       inner = fold_convert_loc (loc, intermediate_type, inner);
6871 
6872       if (bitnum != 0)
6873 	inner = build2 (RSHIFT_EXPR, intermediate_type,
6874 			inner, size_int (bitnum));
6875 
6876       one = build_int_cst (intermediate_type, 1);
6877 
6878       if (code == EQ_EXPR)
6879 	inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6880 
6881       /* Put the AND last so it can combine with more things.  */
6882       inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6883 
6884       /* Make sure to return the proper type.  */
6885       inner = fold_convert_loc (loc, result_type, inner);
6886 
6887       return inner;
6888     }
6889   return NULL_TREE;
6890 }
6891 
6892 /* Test whether it is preferable two swap two operands, ARG0 and
6893    ARG1, for example because ARG0 is an integer constant and ARG1
6894    isn't.  */
6895 
6896 bool
tree_swap_operands_p(const_tree arg0,const_tree arg1)6897 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6898 {
6899   if (CONSTANT_CLASS_P (arg1))
6900     return 0;
6901   if (CONSTANT_CLASS_P (arg0))
6902     return 1;
6903 
6904   STRIP_NOPS (arg0);
6905   STRIP_NOPS (arg1);
6906 
6907   if (TREE_CONSTANT (arg1))
6908     return 0;
6909   if (TREE_CONSTANT (arg0))
6910     return 1;
6911 
6912   /* It is preferable to swap two SSA_NAME to ensure a canonical form
6913      for commutative and comparison operators.  Ensuring a canonical
6914      form allows the optimizers to find additional redundancies without
6915      having to explicitly check for both orderings.  */
6916   if (TREE_CODE (arg0) == SSA_NAME
6917       && TREE_CODE (arg1) == SSA_NAME
6918       && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6919     return 1;
6920 
6921   /* Put SSA_NAMEs last.  */
6922   if (TREE_CODE (arg1) == SSA_NAME)
6923     return 0;
6924   if (TREE_CODE (arg0) == SSA_NAME)
6925     return 1;
6926 
6927   /* Put variables last.  */
6928   if (DECL_P (arg1))
6929     return 0;
6930   if (DECL_P (arg0))
6931     return 1;
6932 
6933   return 0;
6934 }
6935 
6936 
6937 /* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
6938    means A >= Y && A != MAX, but in this case we know that
6939    A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
6940 
6941 static tree
fold_to_nonsharp_ineq_using_bound(location_t loc,tree ineq,tree bound)6942 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6943 {
6944   tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6945 
6946   if (TREE_CODE (bound) == LT_EXPR)
6947     a = TREE_OPERAND (bound, 0);
6948   else if (TREE_CODE (bound) == GT_EXPR)
6949     a = TREE_OPERAND (bound, 1);
6950   else
6951     return NULL_TREE;
6952 
6953   typea = TREE_TYPE (a);
6954   if (!INTEGRAL_TYPE_P (typea)
6955       && !POINTER_TYPE_P (typea))
6956     return NULL_TREE;
6957 
6958   if (TREE_CODE (ineq) == LT_EXPR)
6959     {
6960       a1 = TREE_OPERAND (ineq, 1);
6961       y = TREE_OPERAND (ineq, 0);
6962     }
6963   else if (TREE_CODE (ineq) == GT_EXPR)
6964     {
6965       a1 = TREE_OPERAND (ineq, 0);
6966       y = TREE_OPERAND (ineq, 1);
6967     }
6968   else
6969     return NULL_TREE;
6970 
6971   if (TREE_TYPE (a1) != typea)
6972     return NULL_TREE;
6973 
6974   if (POINTER_TYPE_P (typea))
6975     {
6976       /* Convert the pointer types into integer before taking the difference.  */
6977       tree ta = fold_convert_loc (loc, ssizetype, a);
6978       tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6979       diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6980     }
6981   else
6982     diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6983 
6984   if (!diff || !integer_onep (diff))
6985    return NULL_TREE;
6986 
6987   return fold_build2_loc (loc, GE_EXPR, type, a, y);
6988 }
6989 
6990 /* Fold a sum or difference of at least one multiplication.
6991    Returns the folded tree or NULL if no simplification could be made.  */
6992 
6993 static tree
fold_plusminus_mult_expr(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)6994 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6995 			  tree arg0, tree arg1)
6996 {
6997   tree arg00, arg01, arg10, arg11;
6998   tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6999 
7000   /* (A * C) +- (B * C) -> (A+-B) * C.
7001      (A * C) +- A -> A * (C+-1).
7002      We are most concerned about the case where C is a constant,
7003      but other combinations show up during loop reduction.  Since
7004      it is not difficult, try all four possibilities.  */
7005 
7006   if (TREE_CODE (arg0) == MULT_EXPR)
7007     {
7008       arg00 = TREE_OPERAND (arg0, 0);
7009       arg01 = TREE_OPERAND (arg0, 1);
7010     }
7011   else if (TREE_CODE (arg0) == INTEGER_CST)
7012     {
7013       arg00 = build_one_cst (type);
7014       arg01 = arg0;
7015     }
7016   else
7017     {
7018       /* We cannot generate constant 1 for fract.  */
7019       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7020 	return NULL_TREE;
7021       arg00 = arg0;
7022       arg01 = build_one_cst (type);
7023     }
7024   if (TREE_CODE (arg1) == MULT_EXPR)
7025     {
7026       arg10 = TREE_OPERAND (arg1, 0);
7027       arg11 = TREE_OPERAND (arg1, 1);
7028     }
7029   else if (TREE_CODE (arg1) == INTEGER_CST)
7030     {
7031       arg10 = build_one_cst (type);
7032       /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7033 	 the purpose of this canonicalization.  */
7034       if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7035 	  && negate_expr_p (arg1)
7036 	  && code == PLUS_EXPR)
7037 	{
7038 	  arg11 = negate_expr (arg1);
7039 	  code = MINUS_EXPR;
7040 	}
7041       else
7042 	arg11 = arg1;
7043     }
7044   else
7045     {
7046       /* We cannot generate constant 1 for fract.  */
7047       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7048 	return NULL_TREE;
7049       arg10 = arg1;
7050       arg11 = build_one_cst (type);
7051     }
7052   same = NULL_TREE;
7053 
7054   /* Prefer factoring a common non-constant.  */
7055   if (operand_equal_p (arg00, arg10, 0))
7056     same = arg00, alt0 = arg01, alt1 = arg11;
7057   else if (operand_equal_p (arg01, arg11, 0))
7058     same = arg01, alt0 = arg00, alt1 = arg10;
7059   else if (operand_equal_p (arg00, arg11, 0))
7060     same = arg00, alt0 = arg01, alt1 = arg10;
7061   else if (operand_equal_p (arg01, arg10, 0))
7062     same = arg01, alt0 = arg00, alt1 = arg11;
7063 
7064   /* No identical multiplicands; see if we can find a common
7065      power-of-two factor in non-power-of-two multiplies.  This
7066      can help in multi-dimensional array access.  */
7067   else if (tree_fits_shwi_p (arg01)
7068 	   && tree_fits_shwi_p (arg11))
7069     {
7070       HOST_WIDE_INT int01, int11, tmp;
7071       bool swap = false;
7072       tree maybe_same;
7073       int01 = tree_to_shwi (arg01);
7074       int11 = tree_to_shwi (arg11);
7075 
7076       /* Move min of absolute values to int11.  */
7077       if (absu_hwi (int01) < absu_hwi (int11))
7078         {
7079 	  tmp = int01, int01 = int11, int11 = tmp;
7080 	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
7081 	  maybe_same = arg01;
7082 	  swap = true;
7083 	}
7084       else
7085 	maybe_same = arg11;
7086 
7087       if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7088 	  /* The remainder should not be a constant, otherwise we
7089 	     end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7090 	     increased the number of multiplications necessary.  */
7091 	  && TREE_CODE (arg10) != INTEGER_CST)
7092         {
7093 	  alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7094 			      build_int_cst (TREE_TYPE (arg00),
7095 					     int01 / int11));
7096 	  alt1 = arg10;
7097 	  same = maybe_same;
7098 	  if (swap)
7099 	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7100 	}
7101     }
7102 
7103   if (!same)
7104     return NULL_TREE;
7105 
7106   if (! INTEGRAL_TYPE_P (type)
7107       || TYPE_OVERFLOW_WRAPS (type)
7108       /* We are neither factoring zero nor minus one.  */
7109       || TREE_CODE (same) == INTEGER_CST)
7110     return fold_build2_loc (loc, MULT_EXPR, type,
7111 			fold_build2_loc (loc, code, type,
7112 				     fold_convert_loc (loc, type, alt0),
7113 				     fold_convert_loc (loc, type, alt1)),
7114 			fold_convert_loc (loc, type, same));
7115 
7116   /* Same may be zero and thus the operation 'code' may overflow.  Likewise
7117      same may be minus one and thus the multiplication may overflow.  Perform
7118      the sum operation in an unsigned type.  */
7119   tree utype = unsigned_type_for (type);
7120   tree tem = fold_build2_loc (loc, code, utype,
7121 			      fold_convert_loc (loc, utype, alt0),
7122 			      fold_convert_loc (loc, utype, alt1));
7123   /* If the sum evaluated to a constant that is not -INF the multiplication
7124      cannot overflow.  */
7125   if (TREE_CODE (tem) == INTEGER_CST
7126       && (wi::to_wide (tem)
7127 	  != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7128     return fold_build2_loc (loc, MULT_EXPR, type,
7129 			    fold_convert (type, tem), same);
7130 
7131   /* Do not resort to unsigned multiplication because
7132      we lose the no-overflow property of the expression.  */
7133   return NULL_TREE;
7134 }
7135 
7136 /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7137    specified by EXPR into the buffer PTR of length LEN bytes.
7138    Return the number of bytes placed in the buffer, or zero
7139    upon failure.  */
7140 
7141 static int
native_encode_int(const_tree expr,unsigned char * ptr,int len,int off)7142 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7143 {
7144   tree type = TREE_TYPE (expr);
7145   int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7146   int byte, offset, word, words;
7147   unsigned char value;
7148 
7149   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7150     return 0;
7151   if (off == -1)
7152     off = 0;
7153 
7154   if (ptr == NULL)
7155     /* Dry run.  */
7156     return MIN (len, total_bytes - off);
7157 
7158   words = total_bytes / UNITS_PER_WORD;
7159 
7160   for (byte = 0; byte < total_bytes; byte++)
7161     {
7162       int bitpos = byte * BITS_PER_UNIT;
7163       /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7164 	 number of bytes.  */
7165       value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7166 
7167       if (total_bytes > UNITS_PER_WORD)
7168 	{
7169 	  word = byte / UNITS_PER_WORD;
7170 	  if (WORDS_BIG_ENDIAN)
7171 	    word = (words - 1) - word;
7172 	  offset = word * UNITS_PER_WORD;
7173 	  if (BYTES_BIG_ENDIAN)
7174 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7175 	  else
7176 	    offset += byte % UNITS_PER_WORD;
7177 	}
7178       else
7179 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7180       if (offset >= off && offset - off < len)
7181 	ptr[offset - off] = value;
7182     }
7183   return MIN (len, total_bytes - off);
7184 }
7185 
7186 
7187 /* Subroutine of native_encode_expr.  Encode the FIXED_CST
7188    specified by EXPR into the buffer PTR of length LEN bytes.
7189    Return the number of bytes placed in the buffer, or zero
7190    upon failure.  */
7191 
7192 static int
native_encode_fixed(const_tree expr,unsigned char * ptr,int len,int off)7193 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7194 {
7195   tree type = TREE_TYPE (expr);
7196   scalar_mode mode = SCALAR_TYPE_MODE (type);
7197   int total_bytes = GET_MODE_SIZE (mode);
7198   FIXED_VALUE_TYPE value;
7199   tree i_value, i_type;
7200 
7201   if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7202     return 0;
7203 
7204   i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7205 
7206   if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7207     return 0;
7208 
7209   value = TREE_FIXED_CST (expr);
7210   i_value = double_int_to_tree (i_type, value.data);
7211 
7212   return native_encode_int (i_value, ptr, len, off);
7213 }
7214 
7215 
7216 /* Subroutine of native_encode_expr.  Encode the REAL_CST
7217    specified by EXPR into the buffer PTR of length LEN bytes.
7218    Return the number of bytes placed in the buffer, or zero
7219    upon failure.  */
7220 
7221 static int
native_encode_real(const_tree expr,unsigned char * ptr,int len,int off)7222 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7223 {
7224   tree type = TREE_TYPE (expr);
7225   int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7226   int byte, offset, word, words, bitpos;
7227   unsigned char value;
7228 
7229   /* There are always 32 bits in each long, no matter the size of
7230      the hosts long.  We handle floating point representations with
7231      up to 192 bits.  */
7232   long tmp[6];
7233 
7234   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7235     return 0;
7236   if (off == -1)
7237     off = 0;
7238 
7239   if (ptr == NULL)
7240     /* Dry run.  */
7241     return MIN (len, total_bytes - off);
7242 
7243   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7244 
7245   real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7246 
7247   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7248        bitpos += BITS_PER_UNIT)
7249     {
7250       byte = (bitpos / BITS_PER_UNIT) & 3;
7251       value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7252 
7253       if (UNITS_PER_WORD < 4)
7254 	{
7255 	  word = byte / UNITS_PER_WORD;
7256 	  if (WORDS_BIG_ENDIAN)
7257 	    word = (words - 1) - word;
7258 	  offset = word * UNITS_PER_WORD;
7259 	  if (BYTES_BIG_ENDIAN)
7260 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7261 	  else
7262 	    offset += byte % UNITS_PER_WORD;
7263 	}
7264       else
7265 	{
7266 	  offset = byte;
7267 	  if (BYTES_BIG_ENDIAN)
7268 	    {
7269 	      /* Reverse bytes within each long, or within the entire float
7270 		 if it's smaller than a long (for HFmode).  */
7271 	      offset = MIN (3, total_bytes - 1) - offset;
7272 	      gcc_assert (offset >= 0);
7273 	    }
7274 	}
7275       offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7276       if (offset >= off
7277 	  && offset - off < len)
7278 	ptr[offset - off] = value;
7279     }
7280   return MIN (len, total_bytes - off);
7281 }
7282 
7283 /* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7284    specified by EXPR into the buffer PTR of length LEN bytes.
7285    Return the number of bytes placed in the buffer, or zero
7286    upon failure.  */
7287 
7288 static int
native_encode_complex(const_tree expr,unsigned char * ptr,int len,int off)7289 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7290 {
7291   int rsize, isize;
7292   tree part;
7293 
7294   part = TREE_REALPART (expr);
7295   rsize = native_encode_expr (part, ptr, len, off);
7296   if (off == -1 && rsize == 0)
7297     return 0;
7298   part = TREE_IMAGPART (expr);
7299   if (off != -1)
7300     off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7301   isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7302 			      len - rsize, off);
7303   if (off == -1 && isize != rsize)
7304     return 0;
7305   return rsize + isize;
7306 }
7307 
7308 
7309 /* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7310    specified by EXPR into the buffer PTR of length LEN bytes.
7311    Return the number of bytes placed in the buffer, or zero
7312    upon failure.  */
7313 
7314 static int
native_encode_vector(const_tree expr,unsigned char * ptr,int len,int off)7315 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7316 {
7317   unsigned HOST_WIDE_INT i, count;
7318   int size, offset;
7319   tree itype, elem;
7320 
7321   offset = 0;
7322   if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7323     return 0;
7324   itype = TREE_TYPE (TREE_TYPE (expr));
7325   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7326   for (i = 0; i < count; i++)
7327     {
7328       if (off >= size)
7329 	{
7330 	  off -= size;
7331 	  continue;
7332 	}
7333       elem = VECTOR_CST_ELT (expr, i);
7334       int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7335 				    len - offset, off);
7336       if ((off == -1 && res != size) || res == 0)
7337 	return 0;
7338       offset += res;
7339       if (offset >= len)
7340 	return (off == -1 && i < count - 1) ? 0 : offset;
7341       if (off != -1)
7342 	off = 0;
7343     }
7344   return offset;
7345 }
7346 
7347 
7348 /* Subroutine of native_encode_expr.  Encode the STRING_CST
7349    specified by EXPR into the buffer PTR of length LEN bytes.
7350    Return the number of bytes placed in the buffer, or zero
7351    upon failure.  */
7352 
7353 static int
native_encode_string(const_tree expr,unsigned char * ptr,int len,int off)7354 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7355 {
7356   tree type = TREE_TYPE (expr);
7357 
7358   /* Wide-char strings are encoded in target byte-order so native
7359      encoding them is trivial.  */
7360   if (BITS_PER_UNIT != CHAR_BIT
7361       || TREE_CODE (type) != ARRAY_TYPE
7362       || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7363       || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7364     return 0;
7365 
7366   HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7367   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7368     return 0;
7369   if (off == -1)
7370     off = 0;
7371   if (ptr == NULL)
7372     /* Dry run.  */;
7373   else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7374     {
7375       int written = 0;
7376       if (off < TREE_STRING_LENGTH (expr))
7377 	{
7378 	  written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7379 	  memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7380 	}
7381       memset (ptr + written, 0,
7382 	      MIN (total_bytes - written, len - written));
7383     }
7384   else
7385     memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7386   return MIN (total_bytes - off, len);
7387 }
7388 
7389 
7390 /* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
7391    REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7392    buffer PTR of length LEN bytes.  If PTR is NULL, don't actually store
7393    anything, just do a dry run.  If OFF is not -1 then start
7394    the encoding at byte offset OFF and encode at most LEN bytes.
7395    Return the number of bytes placed in the buffer, or zero upon failure.  */
7396 
7397 int
native_encode_expr(const_tree expr,unsigned char * ptr,int len,int off)7398 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7399 {
7400   /* We don't support starting at negative offset and -1 is special.  */
7401   if (off < -1)
7402     return 0;
7403 
7404   switch (TREE_CODE (expr))
7405     {
7406     case INTEGER_CST:
7407       return native_encode_int (expr, ptr, len, off);
7408 
7409     case REAL_CST:
7410       return native_encode_real (expr, ptr, len, off);
7411 
7412     case FIXED_CST:
7413       return native_encode_fixed (expr, ptr, len, off);
7414 
7415     case COMPLEX_CST:
7416       return native_encode_complex (expr, ptr, len, off);
7417 
7418     case VECTOR_CST:
7419       return native_encode_vector (expr, ptr, len, off);
7420 
7421     case STRING_CST:
7422       return native_encode_string (expr, ptr, len, off);
7423 
7424     default:
7425       return 0;
7426     }
7427 }
7428 
7429 
7430 /* Subroutine of native_interpret_expr.  Interpret the contents of
7431    the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7432    If the buffer cannot be interpreted, return NULL_TREE.  */
7433 
7434 static tree
native_interpret_int(tree type,const unsigned char * ptr,int len)7435 native_interpret_int (tree type, const unsigned char *ptr, int len)
7436 {
7437   int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7438 
7439   if (total_bytes > len
7440       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7441     return NULL_TREE;
7442 
7443   wide_int result = wi::from_buffer (ptr, total_bytes);
7444 
7445   return wide_int_to_tree (type, result);
7446 }
7447 
7448 
7449 /* Subroutine of native_interpret_expr.  Interpret the contents of
7450    the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7451    If the buffer cannot be interpreted, return NULL_TREE.  */
7452 
7453 static tree
native_interpret_fixed(tree type,const unsigned char * ptr,int len)7454 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7455 {
7456   scalar_mode mode = SCALAR_TYPE_MODE (type);
7457   int total_bytes = GET_MODE_SIZE (mode);
7458   double_int result;
7459   FIXED_VALUE_TYPE fixed_value;
7460 
7461   if (total_bytes > len
7462       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7463     return NULL_TREE;
7464 
7465   result = double_int::from_buffer (ptr, total_bytes);
7466   fixed_value = fixed_from_double_int (result, mode);
7467 
7468   return build_fixed (type, fixed_value);
7469 }
7470 
7471 
7472 /* Subroutine of native_interpret_expr.  Interpret the contents of
7473    the buffer PTR of length LEN as a REAL_CST of type TYPE.
7474    If the buffer cannot be interpreted, return NULL_TREE.  */
7475 
7476 static tree
native_interpret_real(tree type,const unsigned char * ptr,int len)7477 native_interpret_real (tree type, const unsigned char *ptr, int len)
7478 {
7479   scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7480   int total_bytes = GET_MODE_SIZE (mode);
7481   unsigned char value;
7482   /* There are always 32 bits in each long, no matter the size of
7483      the hosts long.  We handle floating point representations with
7484      up to 192 bits.  */
7485   REAL_VALUE_TYPE r;
7486   long tmp[6];
7487 
7488   if (total_bytes > len || total_bytes > 24)
7489     return NULL_TREE;
7490   int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7491 
7492   memset (tmp, 0, sizeof (tmp));
7493   for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7494        bitpos += BITS_PER_UNIT)
7495     {
7496       /* Both OFFSET and BYTE index within a long;
7497 	 bitpos indexes the whole float.  */
7498       int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7499       if (UNITS_PER_WORD < 4)
7500 	{
7501 	  int word = byte / UNITS_PER_WORD;
7502 	  if (WORDS_BIG_ENDIAN)
7503 	    word = (words - 1) - word;
7504 	  offset = word * UNITS_PER_WORD;
7505 	  if (BYTES_BIG_ENDIAN)
7506 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7507 	  else
7508 	    offset += byte % UNITS_PER_WORD;
7509 	}
7510       else
7511 	{
7512 	  offset = byte;
7513 	  if (BYTES_BIG_ENDIAN)
7514 	    {
7515 	      /* Reverse bytes within each long, or within the entire float
7516 		 if it's smaller than a long (for HFmode).  */
7517 	      offset = MIN (3, total_bytes - 1) - offset;
7518 	      gcc_assert (offset >= 0);
7519 	    }
7520 	}
7521       value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7522 
7523       tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7524     }
7525 
7526   real_from_target (&r, tmp, mode);
7527   return build_real (type, r);
7528 }
7529 
7530 
7531 /* Subroutine of native_interpret_expr.  Interpret the contents of
7532    the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7533    If the buffer cannot be interpreted, return NULL_TREE.  */
7534 
7535 static tree
native_interpret_complex(tree type,const unsigned char * ptr,int len)7536 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7537 {
7538   tree etype, rpart, ipart;
7539   int size;
7540 
7541   etype = TREE_TYPE (type);
7542   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7543   if (size * 2 > len)
7544     return NULL_TREE;
7545   rpart = native_interpret_expr (etype, ptr, size);
7546   if (!rpart)
7547     return NULL_TREE;
7548   ipart = native_interpret_expr (etype, ptr+size, size);
7549   if (!ipart)
7550     return NULL_TREE;
7551   return build_complex (type, rpart, ipart);
7552 }
7553 
7554 
7555 /* Subroutine of native_interpret_expr.  Interpret the contents of
7556    the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7557    If the buffer cannot be interpreted, return NULL_TREE.  */
7558 
7559 static tree
native_interpret_vector(tree type,const unsigned char * ptr,unsigned int len)7560 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
7561 {
7562   tree etype, elem;
7563   unsigned int i, size;
7564   unsigned HOST_WIDE_INT count;
7565 
7566   etype = TREE_TYPE (type);
7567   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7568   if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
7569       || size * count > len)
7570     return NULL_TREE;
7571 
7572   tree_vector_builder elements (type, count, 1);
7573   for (i = 0; i < count; ++i)
7574     {
7575       elem = native_interpret_expr (etype, ptr+(i*size), size);
7576       if (!elem)
7577 	return NULL_TREE;
7578       elements.quick_push (elem);
7579     }
7580   return elements.build ();
7581 }
7582 
7583 
7584 /* Subroutine of fold_view_convert_expr.  Interpret the contents of
7585    the buffer PTR of length LEN as a constant of type TYPE.  For
7586    INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7587    we return a REAL_CST, etc...  If the buffer cannot be interpreted,
7588    return NULL_TREE.  */
7589 
7590 tree
native_interpret_expr(tree type,const unsigned char * ptr,int len)7591 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7592 {
7593   switch (TREE_CODE (type))
7594     {
7595     case INTEGER_TYPE:
7596     case ENUMERAL_TYPE:
7597     case BOOLEAN_TYPE:
7598     case POINTER_TYPE:
7599     case REFERENCE_TYPE:
7600       return native_interpret_int (type, ptr, len);
7601 
7602     case REAL_TYPE:
7603       return native_interpret_real (type, ptr, len);
7604 
7605     case FIXED_POINT_TYPE:
7606       return native_interpret_fixed (type, ptr, len);
7607 
7608     case COMPLEX_TYPE:
7609       return native_interpret_complex (type, ptr, len);
7610 
7611     case VECTOR_TYPE:
7612       return native_interpret_vector (type, ptr, len);
7613 
7614     default:
7615       return NULL_TREE;
7616     }
7617 }
7618 
7619 /* Returns true if we can interpret the contents of a native encoding
7620    as TYPE.  */
7621 
7622 static bool
can_native_interpret_type_p(tree type)7623 can_native_interpret_type_p (tree type)
7624 {
7625   switch (TREE_CODE (type))
7626     {
7627     case INTEGER_TYPE:
7628     case ENUMERAL_TYPE:
7629     case BOOLEAN_TYPE:
7630     case POINTER_TYPE:
7631     case REFERENCE_TYPE:
7632     case FIXED_POINT_TYPE:
7633     case REAL_TYPE:
7634     case COMPLEX_TYPE:
7635     case VECTOR_TYPE:
7636       return true;
7637     default:
7638       return false;
7639     }
7640 }
7641 
7642 
7643 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7644    TYPE at compile-time.  If we're unable to perform the conversion
7645    return NULL_TREE.  */
7646 
7647 static tree
fold_view_convert_expr(tree type,tree expr)7648 fold_view_convert_expr (tree type, tree expr)
7649 {
7650   /* We support up to 512-bit values (for V8DFmode).  */
7651   unsigned char buffer[64];
7652   int len;
7653 
7654   /* Check that the host and target are sane.  */
7655   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7656     return NULL_TREE;
7657 
7658   len = native_encode_expr (expr, buffer, sizeof (buffer));
7659   if (len == 0)
7660     return NULL_TREE;
7661 
7662   return native_interpret_expr (type, buffer, len);
7663 }
7664 
7665 /* Build an expression for the address of T.  Folds away INDIRECT_REF
7666    to avoid confusing the gimplify process.  */
7667 
7668 tree
build_fold_addr_expr_with_type_loc(location_t loc,tree t,tree ptrtype)7669 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7670 {
7671   /* The size of the object is not relevant when talking about its address.  */
7672   if (TREE_CODE (t) == WITH_SIZE_EXPR)
7673     t = TREE_OPERAND (t, 0);
7674 
7675   if (TREE_CODE (t) == INDIRECT_REF)
7676     {
7677       t = TREE_OPERAND (t, 0);
7678 
7679       if (TREE_TYPE (t) != ptrtype)
7680 	t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7681     }
7682   else if (TREE_CODE (t) == MEM_REF
7683 	   && integer_zerop (TREE_OPERAND (t, 1)))
7684     return TREE_OPERAND (t, 0);
7685   else if (TREE_CODE (t) == MEM_REF
7686 	   && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7687     return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7688 			TREE_OPERAND (t, 0),
7689 			convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7690   else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7691     {
7692       t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7693 
7694       if (TREE_TYPE (t) != ptrtype)
7695 	t = fold_convert_loc (loc, ptrtype, t);
7696     }
7697   else
7698     t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7699 
7700   return t;
7701 }
7702 
7703 /* Build an expression for the address of T.  */
7704 
7705 tree
build_fold_addr_expr_loc(location_t loc,tree t)7706 build_fold_addr_expr_loc (location_t loc, tree t)
7707 {
7708   tree ptrtype = build_pointer_type (TREE_TYPE (t));
7709 
7710   return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7711 }
7712 
7713 /* Fold a unary expression of code CODE and type TYPE with operand
7714    OP0.  Return the folded expression if folding is successful.
7715    Otherwise, return NULL_TREE.  */
7716 
7717 tree
fold_unary_loc(location_t loc,enum tree_code code,tree type,tree op0)7718 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7719 {
7720   tree tem;
7721   tree arg0;
7722   enum tree_code_class kind = TREE_CODE_CLASS (code);
7723 
7724   gcc_assert (IS_EXPR_CODE_CLASS (kind)
7725 	      && TREE_CODE_LENGTH (code) == 1);
7726 
7727   arg0 = op0;
7728   if (arg0)
7729     {
7730       if (CONVERT_EXPR_CODE_P (code)
7731 	  || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7732 	{
7733 	  /* Don't use STRIP_NOPS, because signedness of argument type
7734 	     matters.  */
7735 	  STRIP_SIGN_NOPS (arg0);
7736 	}
7737       else
7738 	{
7739 	  /* Strip any conversions that don't change the mode.  This
7740 	     is safe for every expression, except for a comparison
7741 	     expression because its signedness is derived from its
7742 	     operands.
7743 
7744 	     Note that this is done as an internal manipulation within
7745 	     the constant folder, in order to find the simplest
7746 	     representation of the arguments so that their form can be
7747 	     studied.  In any cases, the appropriate type conversions
7748 	     should be put back in the tree that will get out of the
7749 	     constant folder.  */
7750 	  STRIP_NOPS (arg0);
7751 	}
7752 
7753       if (CONSTANT_CLASS_P (arg0))
7754 	{
7755 	  tree tem = const_unop (code, type, arg0);
7756 	  if (tem)
7757 	    {
7758 	      if (TREE_TYPE (tem) != type)
7759 		tem = fold_convert_loc (loc, type, tem);
7760 	      return tem;
7761 	    }
7762 	}
7763     }
7764 
7765   tem = generic_simplify (loc, code, type, op0);
7766   if (tem)
7767     return tem;
7768 
7769   if (TREE_CODE_CLASS (code) == tcc_unary)
7770     {
7771       if (TREE_CODE (arg0) == COMPOUND_EXPR)
7772 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7773 		       fold_build1_loc (loc, code, type,
7774 				    fold_convert_loc (loc, TREE_TYPE (op0),
7775 						      TREE_OPERAND (arg0, 1))));
7776       else if (TREE_CODE (arg0) == COND_EXPR)
7777 	{
7778 	  tree arg01 = TREE_OPERAND (arg0, 1);
7779 	  tree arg02 = TREE_OPERAND (arg0, 2);
7780 	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7781 	    arg01 = fold_build1_loc (loc, code, type,
7782 				 fold_convert_loc (loc,
7783 						   TREE_TYPE (op0), arg01));
7784 	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7785 	    arg02 = fold_build1_loc (loc, code, type,
7786 				 fold_convert_loc (loc,
7787 						   TREE_TYPE (op0), arg02));
7788 	  tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7789 			     arg01, arg02);
7790 
7791 	  /* If this was a conversion, and all we did was to move into
7792 	     inside the COND_EXPR, bring it back out.  But leave it if
7793 	     it is a conversion from integer to integer and the
7794 	     result precision is no wider than a word since such a
7795 	     conversion is cheap and may be optimized away by combine,
7796 	     while it couldn't if it were outside the COND_EXPR.  Then return
7797 	     so we don't get into an infinite recursion loop taking the
7798 	     conversion out and then back in.  */
7799 
7800 	  if ((CONVERT_EXPR_CODE_P (code)
7801 	       || code == NON_LVALUE_EXPR)
7802 	      && TREE_CODE (tem) == COND_EXPR
7803 	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7804 	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7805 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7806 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7807 	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7808 		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7809 	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7810 		     && (INTEGRAL_TYPE_P
7811 			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7812 		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7813 		  || flag_syntax_only))
7814 	    tem = build1_loc (loc, code, type,
7815 			      build3 (COND_EXPR,
7816 				      TREE_TYPE (TREE_OPERAND
7817 						 (TREE_OPERAND (tem, 1), 0)),
7818 				      TREE_OPERAND (tem, 0),
7819 				      TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7820 				      TREE_OPERAND (TREE_OPERAND (tem, 2),
7821 						    0)));
7822 	  return tem;
7823 	}
7824    }
7825 
7826   switch (code)
7827     {
7828     case NON_LVALUE_EXPR:
7829       if (!maybe_lvalue_p (op0))
7830 	return fold_convert_loc (loc, type, op0);
7831       return NULL_TREE;
7832 
7833     CASE_CONVERT:
7834     case FLOAT_EXPR:
7835     case FIX_TRUNC_EXPR:
7836       if (COMPARISON_CLASS_P (op0))
7837 	{
7838 	  /* If we have (type) (a CMP b) and type is an integral type, return
7839 	     new expression involving the new type.  Canonicalize
7840 	     (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7841 	     non-integral type.
7842 	     Do not fold the result as that would not simplify further, also
7843 	     folding again results in recursions.  */
7844 	  if (TREE_CODE (type) == BOOLEAN_TYPE)
7845 	    return build2_loc (loc, TREE_CODE (op0), type,
7846 			       TREE_OPERAND (op0, 0),
7847 			       TREE_OPERAND (op0, 1));
7848 	  else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7849 		   && TREE_CODE (type) != VECTOR_TYPE)
7850 	    return build3_loc (loc, COND_EXPR, type, op0,
7851 			       constant_boolean_node (true, type),
7852 			       constant_boolean_node (false, type));
7853 	}
7854 
7855       /* Handle (T *)&A.B.C for A being of type T and B and C
7856 	 living at offset zero.  This occurs frequently in
7857 	 C++ upcasting and then accessing the base.  */
7858       if (TREE_CODE (op0) == ADDR_EXPR
7859 	  && POINTER_TYPE_P (type)
7860 	  && handled_component_p (TREE_OPERAND (op0, 0)))
7861         {
7862 	  poly_int64 bitsize, bitpos;
7863 	  tree offset;
7864 	  machine_mode mode;
7865 	  int unsignedp, reversep, volatilep;
7866 	  tree base
7867 	    = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7868 				   &offset, &mode, &unsignedp, &reversep,
7869 				   &volatilep);
7870 	  /* If the reference was to a (constant) zero offset, we can use
7871 	     the address of the base if it has the same base type
7872 	     as the result type and the pointer type is unqualified.  */
7873 	  if (!offset
7874 	      && known_eq (bitpos, 0)
7875 	      && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7876 		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7877 	      && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7878 	    return fold_convert_loc (loc, type,
7879 				     build_fold_addr_expr_loc (loc, base));
7880         }
7881 
7882       if (TREE_CODE (op0) == MODIFY_EXPR
7883 	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7884 	  /* Detect assigning a bitfield.  */
7885 	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7886 	       && DECL_BIT_FIELD
7887 	       (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7888 	{
7889 	  /* Don't leave an assignment inside a conversion
7890 	     unless assigning a bitfield.  */
7891 	  tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7892 	  /* First do the assignment, then return converted constant.  */
7893 	  tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7894 	  TREE_NO_WARNING (tem) = 1;
7895 	  TREE_USED (tem) = 1;
7896 	  return tem;
7897 	}
7898 
7899       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7900 	 constants (if x has signed type, the sign bit cannot be set
7901 	 in c).  This folds extension into the BIT_AND_EXPR.
7902 	 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7903 	 very likely don't have maximal range for their precision and this
7904 	 transformation effectively doesn't preserve non-maximal ranges.  */
7905       if (TREE_CODE (type) == INTEGER_TYPE
7906 	  && TREE_CODE (op0) == BIT_AND_EXPR
7907 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7908 	{
7909 	  tree and_expr = op0;
7910 	  tree and0 = TREE_OPERAND (and_expr, 0);
7911 	  tree and1 = TREE_OPERAND (and_expr, 1);
7912 	  int change = 0;
7913 
7914 	  if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7915 	      || (TYPE_PRECISION (type)
7916 		  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7917 	    change = 1;
7918 	  else if (TYPE_PRECISION (TREE_TYPE (and1))
7919 		   <= HOST_BITS_PER_WIDE_INT
7920 		   && tree_fits_uhwi_p (and1))
7921 	    {
7922 	      unsigned HOST_WIDE_INT cst;
7923 
7924 	      cst = tree_to_uhwi (and1);
7925 	      cst &= HOST_WIDE_INT_M1U
7926 		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7927 	      change = (cst == 0);
7928 	      if (change
7929 		  && !flag_syntax_only
7930 		  && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7931 		      == ZERO_EXTEND))
7932 		{
7933 		  tree uns = unsigned_type_for (TREE_TYPE (and0));
7934 		  and0 = fold_convert_loc (loc, uns, and0);
7935 		  and1 = fold_convert_loc (loc, uns, and1);
7936 		}
7937 	    }
7938 	  if (change)
7939 	    {
7940 	      tem = force_fit_type (type, wi::to_widest (and1), 0,
7941 				    TREE_OVERFLOW (and1));
7942 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
7943 				      fold_convert_loc (loc, type, and0), tem);
7944 	    }
7945 	}
7946 
7947       /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7948 	 cast (T1)X will fold away.  We assume that this happens when X itself
7949 	 is a cast.  */
7950       if (POINTER_TYPE_P (type)
7951 	  && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7952 	  && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7953 	{
7954 	  tree arg00 = TREE_OPERAND (arg0, 0);
7955 	  tree arg01 = TREE_OPERAND (arg0, 1);
7956 
7957 	  return fold_build_pointer_plus_loc
7958 		   (loc, fold_convert_loc (loc, type, arg00), arg01);
7959 	}
7960 
7961       /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7962 	 of the same precision, and X is an integer type not narrower than
7963 	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
7964       if (INTEGRAL_TYPE_P (type)
7965 	  && TREE_CODE (op0) == BIT_NOT_EXPR
7966 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7967 	  && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7968 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7969 	{
7970 	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7971 	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7972 	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7973 	    return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7974 				fold_convert_loc (loc, type, tem));
7975 	}
7976 
7977       /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7978 	 type of X and Y (integer types only).  */
7979       if (INTEGRAL_TYPE_P (type)
7980 	  && TREE_CODE (op0) == MULT_EXPR
7981 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7982 	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7983 	{
7984 	  /* Be careful not to introduce new overflows.  */
7985 	  tree mult_type;
7986           if (TYPE_OVERFLOW_WRAPS (type))
7987 	    mult_type = type;
7988 	  else
7989 	    mult_type = unsigned_type_for (type);
7990 
7991 	  if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7992 	    {
7993 	      tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7994 				 fold_convert_loc (loc, mult_type,
7995 						   TREE_OPERAND (op0, 0)),
7996 				 fold_convert_loc (loc, mult_type,
7997 						   TREE_OPERAND (op0, 1)));
7998 	      return fold_convert_loc (loc, type, tem);
7999 	    }
8000 	}
8001 
8002       return NULL_TREE;
8003 
8004     case VIEW_CONVERT_EXPR:
8005       if (TREE_CODE (op0) == MEM_REF)
8006         {
8007 	  if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
8008 	    type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
8009 	  tem = fold_build2_loc (loc, MEM_REF, type,
8010 				 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8011 	  REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
8012 	  return tem;
8013 	}
8014 
8015       return NULL_TREE;
8016 
8017     case NEGATE_EXPR:
8018       tem = fold_negate_expr (loc, arg0);
8019       if (tem)
8020 	return fold_convert_loc (loc, type, tem);
8021       return NULL_TREE;
8022 
8023     case ABS_EXPR:
8024       /* Convert fabs((double)float) into (double)fabsf(float).  */
8025       if (TREE_CODE (arg0) == NOP_EXPR
8026 	  && TREE_CODE (type) == REAL_TYPE)
8027 	{
8028 	  tree targ0 = strip_float_extensions (arg0);
8029 	  if (targ0 != arg0)
8030 	    return fold_convert_loc (loc, type,
8031 				     fold_build1_loc (loc, ABS_EXPR,
8032 						  TREE_TYPE (targ0),
8033 						  targ0));
8034 	}
8035       return NULL_TREE;
8036 
8037     case BIT_NOT_EXPR:
8038       /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
8039       if (TREE_CODE (arg0) == BIT_XOR_EXPR
8040 	  && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8041 				    fold_convert_loc (loc, type,
8042 						      TREE_OPERAND (arg0, 0)))))
8043 	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8044 				fold_convert_loc (loc, type,
8045 						  TREE_OPERAND (arg0, 1)));
8046       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8047 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8048 			       	     fold_convert_loc (loc, type,
8049 						       TREE_OPERAND (arg0, 1)))))
8050 	return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8051 			    fold_convert_loc (loc, type,
8052 					      TREE_OPERAND (arg0, 0)), tem);
8053 
8054       return NULL_TREE;
8055 
8056     case TRUTH_NOT_EXPR:
8057       /* Note that the operand of this must be an int
8058 	 and its values must be 0 or 1.
8059 	 ("true" is a fixed value perhaps depending on the language,
8060 	 but we don't handle values other than 1 correctly yet.)  */
8061       tem = fold_truth_not_expr (loc, arg0);
8062       if (!tem)
8063 	return NULL_TREE;
8064       return fold_convert_loc (loc, type, tem);
8065 
8066     case INDIRECT_REF:
8067       /* Fold *&X to X if X is an lvalue.  */
8068       if (TREE_CODE (op0) == ADDR_EXPR)
8069 	{
8070 	  tree op00 = TREE_OPERAND (op0, 0);
8071 	  if ((VAR_P (op00)
8072 	       || TREE_CODE (op00) == PARM_DECL
8073 	       || TREE_CODE (op00) == RESULT_DECL)
8074 	      && !TREE_READONLY (op00))
8075 	    return op00;
8076 	}
8077       return NULL_TREE;
8078 
8079     default:
8080       return NULL_TREE;
8081     } /* switch (code) */
8082 }
8083 
8084 
8085 /* If the operation was a conversion do _not_ mark a resulting constant
8086    with TREE_OVERFLOW if the original constant was not.  These conversions
8087    have implementation defined behavior and retaining the TREE_OVERFLOW
8088    flag here would confuse later passes such as VRP.  */
8089 tree
fold_unary_ignore_overflow_loc(location_t loc,enum tree_code code,tree type,tree op0)8090 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8091 				tree type, tree op0)
8092 {
8093   tree res = fold_unary_loc (loc, code, type, op0);
8094   if (res
8095       && TREE_CODE (res) == INTEGER_CST
8096       && TREE_CODE (op0) == INTEGER_CST
8097       && CONVERT_EXPR_CODE_P (code))
8098     TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8099 
8100   return res;
8101 }
8102 
8103 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8104    operands OP0 and OP1.  LOC is the location of the resulting expression.
8105    ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8106    Return the folded expression if folding is successful.  Otherwise,
8107    return NULL_TREE.  */
8108 static tree
fold_truth_andor(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1,tree op0,tree op1)8109 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8110 		  tree arg0, tree arg1, tree op0, tree op1)
8111 {
8112   tree tem;
8113 
8114   /* We only do these simplifications if we are optimizing.  */
8115   if (!optimize)
8116     return NULL_TREE;
8117 
8118   /* Check for things like (A || B) && (A || C).  We can convert this
8119      to A || (B && C).  Note that either operator can be any of the four
8120      truth and/or operations and the transformation will still be
8121      valid.   Also note that we only care about order for the
8122      ANDIF and ORIF operators.  If B contains side effects, this
8123      might change the truth-value of A.  */
8124   if (TREE_CODE (arg0) == TREE_CODE (arg1)
8125       && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8126 	  || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8127 	  || TREE_CODE (arg0) == TRUTH_AND_EXPR
8128 	  || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8129       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8130     {
8131       tree a00 = TREE_OPERAND (arg0, 0);
8132       tree a01 = TREE_OPERAND (arg0, 1);
8133       tree a10 = TREE_OPERAND (arg1, 0);
8134       tree a11 = TREE_OPERAND (arg1, 1);
8135       int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8136 			  || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8137 			 && (code == TRUTH_AND_EXPR
8138 			     || code == TRUTH_OR_EXPR));
8139 
8140       if (operand_equal_p (a00, a10, 0))
8141 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8142 			    fold_build2_loc (loc, code, type, a01, a11));
8143       else if (commutative && operand_equal_p (a00, a11, 0))
8144 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8145 			    fold_build2_loc (loc, code, type, a01, a10));
8146       else if (commutative && operand_equal_p (a01, a10, 0))
8147 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8148 			    fold_build2_loc (loc, code, type, a00, a11));
8149 
8150       /* This case if tricky because we must either have commutative
8151 	 operators or else A10 must not have side-effects.  */
8152 
8153       else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8154 	       && operand_equal_p (a01, a11, 0))
8155 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
8156 			    fold_build2_loc (loc, code, type, a00, a10),
8157 			    a01);
8158     }
8159 
8160   /* See if we can build a range comparison.  */
8161   if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
8162     return tem;
8163 
8164   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8165       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8166     {
8167       tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8168       if (tem)
8169 	return fold_build2_loc (loc, code, type, tem, arg1);
8170     }
8171 
8172   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8173       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8174     {
8175       tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8176       if (tem)
8177 	return fold_build2_loc (loc, code, type, arg0, tem);
8178     }
8179 
8180   /* Check for the possibility of merging component references.  If our
8181      lhs is another similar operation, try to merge its rhs with our
8182      rhs.  Then try to merge our lhs and rhs.  */
8183   if (TREE_CODE (arg0) == code
8184       && (tem = fold_truth_andor_1 (loc, code, type,
8185 				    TREE_OPERAND (arg0, 1), arg1)) != 0)
8186     return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8187 
8188   if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8189     return tem;
8190 
8191   bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
8192   if (PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT) != -1)
8193     logical_op_non_short_circuit
8194       = PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT);
8195   if (logical_op_non_short_circuit
8196       && !flag_sanitize_coverage
8197       && (code == TRUTH_AND_EXPR
8198           || code == TRUTH_ANDIF_EXPR
8199           || code == TRUTH_OR_EXPR
8200           || code == TRUTH_ORIF_EXPR))
8201     {
8202       enum tree_code ncode, icode;
8203 
8204       ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8205 	      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8206       icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8207 
8208       /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8209 	 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8210 	 We don't want to pack more than two leafs to a non-IF AND/OR
8211 	 expression.
8212 	 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8213 	 equal to IF-CODE, then we don't want to add right-hand operand.
8214 	 If the inner right-hand side of left-hand operand has
8215 	 side-effects, or isn't simple, then we can't add to it,
8216 	 as otherwise we might destroy if-sequence.  */
8217       if (TREE_CODE (arg0) == icode
8218 	  && simple_operand_p_2 (arg1)
8219 	  /* Needed for sequence points to handle trappings, and
8220 	     side-effects.  */
8221 	  && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8222 	{
8223 	  tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8224 				 arg1);
8225 	  return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8226 				  tem);
8227 	}
8228 	/* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8229 	   or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C).  */
8230       else if (TREE_CODE (arg1) == icode
8231 	  && simple_operand_p_2 (arg0)
8232 	  /* Needed for sequence points to handle trappings, and
8233 	     side-effects.  */
8234 	  && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8235 	{
8236 	  tem = fold_build2_loc (loc, ncode, type,
8237 				 arg0, TREE_OPERAND (arg1, 0));
8238 	  return fold_build2_loc (loc, icode, type, tem,
8239 				  TREE_OPERAND (arg1, 1));
8240 	}
8241       /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8242 	 into (A OR B).
8243 	 For sequence point consistancy, we need to check for trapping,
8244 	 and side-effects.  */
8245       else if (code == icode && simple_operand_p_2 (arg0)
8246                && simple_operand_p_2 (arg1))
8247 	return fold_build2_loc (loc, ncode, type, arg0, arg1);
8248     }
8249 
8250   return NULL_TREE;
8251 }
8252 
8253 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8254    by changing CODE to reduce the magnitude of constants involved in
8255    ARG0 of the comparison.
8256    Returns a canonicalized comparison tree if a simplification was
8257    possible, otherwise returns NULL_TREE.
8258    Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8259    valid if signed overflow is undefined.  */
8260 
8261 static tree
maybe_canonicalize_comparison_1(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1,bool * strict_overflow_p)8262 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8263 				 tree arg0, tree arg1,
8264 				 bool *strict_overflow_p)
8265 {
8266   enum tree_code code0 = TREE_CODE (arg0);
8267   tree t, cst0 = NULL_TREE;
8268   int sgn0;
8269 
8270   /* Match A +- CST code arg1.  We can change this only if overflow
8271      is undefined.  */
8272   if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8273 	 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8274 	/* In principle pointers also have undefined overflow behavior,
8275 	   but that causes problems elsewhere.  */
8276 	&& !POINTER_TYPE_P (TREE_TYPE (arg0))
8277 	&& (code0 == MINUS_EXPR
8278 	    || code0 == PLUS_EXPR)
8279 	&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8280     return NULL_TREE;
8281 
8282   /* Identify the constant in arg0 and its sign.  */
8283   cst0 = TREE_OPERAND (arg0, 1);
8284   sgn0 = tree_int_cst_sgn (cst0);
8285 
8286   /* Overflowed constants and zero will cause problems.  */
8287   if (integer_zerop (cst0)
8288       || TREE_OVERFLOW (cst0))
8289     return NULL_TREE;
8290 
8291   /* See if we can reduce the magnitude of the constant in
8292      arg0 by changing the comparison code.  */
8293   /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
8294   if (code == LT_EXPR
8295       && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8296     code = LE_EXPR;
8297   /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
8298   else if (code == GT_EXPR
8299 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8300     code = GE_EXPR;
8301   /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
8302   else if (code == LE_EXPR
8303 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8304     code = LT_EXPR;
8305   /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
8306   else if (code == GE_EXPR
8307 	   && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8308     code = GT_EXPR;
8309   else
8310     return NULL_TREE;
8311   *strict_overflow_p = true;
8312 
8313   /* Now build the constant reduced in magnitude.  But not if that
8314      would produce one outside of its types range.  */
8315   if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8316       && ((sgn0 == 1
8317 	   && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8318 	   && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8319 	  || (sgn0 == -1
8320 	      && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8321 	      && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8322     return NULL_TREE;
8323 
8324   t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8325 		       cst0, build_int_cst (TREE_TYPE (cst0), 1));
8326   t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8327   t = fold_convert (TREE_TYPE (arg1), t);
8328 
8329   return fold_build2_loc (loc, code, type, t, arg1);
8330 }
8331 
8332 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8333    overflow further.  Try to decrease the magnitude of constants involved
8334    by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8335    and put sole constants at the second argument position.
8336    Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
8337 
8338 static tree
maybe_canonicalize_comparison(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)8339 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8340 			       tree arg0, tree arg1)
8341 {
8342   tree t;
8343   bool strict_overflow_p;
8344   const char * const warnmsg = G_("assuming signed overflow does not occur "
8345 				  "when reducing constant in comparison");
8346 
8347   /* Try canonicalization by simplifying arg0.  */
8348   strict_overflow_p = false;
8349   t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8350 				       &strict_overflow_p);
8351   if (t)
8352     {
8353       if (strict_overflow_p)
8354 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8355       return t;
8356     }
8357 
8358   /* Try canonicalization by simplifying arg1 using the swapped
8359      comparison.  */
8360   code = swap_tree_comparison (code);
8361   strict_overflow_p = false;
8362   t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8363 				       &strict_overflow_p);
8364   if (t && strict_overflow_p)
8365     fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8366   return t;
8367 }
8368 
8369 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8370    space.  This is used to avoid issuing overflow warnings for
8371    expressions like &p->x which can not wrap.  */
8372 
8373 static bool
pointer_may_wrap_p(tree base,tree offset,poly_int64 bitpos)8374 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
8375 {
8376   if (!POINTER_TYPE_P (TREE_TYPE (base)))
8377     return true;
8378 
8379   if (maybe_lt (bitpos, 0))
8380     return true;
8381 
8382   poly_wide_int wi_offset;
8383   int precision = TYPE_PRECISION (TREE_TYPE (base));
8384   if (offset == NULL_TREE)
8385     wi_offset = wi::zero (precision);
8386   else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
8387     return true;
8388   else
8389     wi_offset = wi::to_poly_wide (offset);
8390 
8391   bool overflow;
8392   poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
8393 				  precision);
8394   poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8395   if (overflow)
8396     return true;
8397 
8398   poly_uint64 total_hwi, size;
8399   if (!total.to_uhwi (&total_hwi)
8400       || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
8401 			   &size)
8402       || known_eq (size, 0U))
8403     return true;
8404 
8405   if (known_le (total_hwi, size))
8406     return false;
8407 
8408   /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8409      array.  */
8410   if (TREE_CODE (base) == ADDR_EXPR
8411       && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
8412 			  &size)
8413       && maybe_ne (size, 0U)
8414       && known_le (total_hwi, size))
8415     return false;
8416 
8417   return true;
8418 }
8419 
8420 /* Return a positive integer when the symbol DECL is known to have
8421    a nonzero address, zero when it's known not to (e.g., it's a weak
8422    symbol), and a negative integer when the symbol is not yet in the
8423    symbol table and so whether or not its address is zero is unknown.
8424    For function local objects always return positive integer.  */
8425 static int
maybe_nonzero_address(tree decl)8426 maybe_nonzero_address (tree decl)
8427 {
8428   if (DECL_P (decl) && decl_in_symtab_p (decl))
8429     if (struct symtab_node *symbol = symtab_node::get_create (decl))
8430       return symbol->nonzero_address ();
8431 
8432   /* Function local objects are never NULL.  */
8433   if (DECL_P (decl)
8434       && (DECL_CONTEXT (decl)
8435       && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8436       && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8437     return 1;
8438 
8439   return -1;
8440 }
8441 
8442 /* Subroutine of fold_binary.  This routine performs all of the
8443    transformations that are common to the equality/inequality
8444    operators (EQ_EXPR and NE_EXPR) and the ordering operators
8445    (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
8446    fold_binary should call fold_binary.  Fold a comparison with
8447    tree code CODE and type TYPE with operands OP0 and OP1.  Return
8448    the folded comparison or NULL_TREE.  */
8449 
8450 static tree
fold_comparison(location_t loc,enum tree_code code,tree type,tree op0,tree op1)8451 fold_comparison (location_t loc, enum tree_code code, tree type,
8452 		 tree op0, tree op1)
8453 {
8454   const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8455   tree arg0, arg1, tem;
8456 
8457   arg0 = op0;
8458   arg1 = op1;
8459 
8460   STRIP_SIGN_NOPS (arg0);
8461   STRIP_SIGN_NOPS (arg1);
8462 
8463   /* For comparisons of pointers we can decompose it to a compile time
8464      comparison of the base objects and the offsets into the object.
8465      This requires at least one operand being an ADDR_EXPR or a
8466      POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
8467   if (POINTER_TYPE_P (TREE_TYPE (arg0))
8468       && (TREE_CODE (arg0) == ADDR_EXPR
8469 	  || TREE_CODE (arg1) == ADDR_EXPR
8470 	  || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8471 	  || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8472     {
8473       tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8474       poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
8475       machine_mode mode;
8476       int volatilep, reversep, unsignedp;
8477       bool indirect_base0 = false, indirect_base1 = false;
8478 
8479       /* Get base and offset for the access.  Strip ADDR_EXPR for
8480 	 get_inner_reference, but put it back by stripping INDIRECT_REF
8481 	 off the base object if possible.  indirect_baseN will be true
8482 	 if baseN is not an address but refers to the object itself.  */
8483       base0 = arg0;
8484       if (TREE_CODE (arg0) == ADDR_EXPR)
8485 	{
8486 	  base0
8487 	    = get_inner_reference (TREE_OPERAND (arg0, 0),
8488 				   &bitsize, &bitpos0, &offset0, &mode,
8489 				   &unsignedp, &reversep, &volatilep);
8490 	  if (TREE_CODE (base0) == INDIRECT_REF)
8491 	    base0 = TREE_OPERAND (base0, 0);
8492 	  else
8493 	    indirect_base0 = true;
8494 	}
8495       else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8496 	{
8497 	  base0 = TREE_OPERAND (arg0, 0);
8498 	  STRIP_SIGN_NOPS (base0);
8499 	  if (TREE_CODE (base0) == ADDR_EXPR)
8500 	    {
8501 	      base0
8502 		= get_inner_reference (TREE_OPERAND (base0, 0),
8503 				       &bitsize, &bitpos0, &offset0, &mode,
8504 				       &unsignedp, &reversep, &volatilep);
8505 	      if (TREE_CODE (base0) == INDIRECT_REF)
8506 		base0 = TREE_OPERAND (base0, 0);
8507 	      else
8508 		indirect_base0 = true;
8509 	    }
8510 	  if (offset0 == NULL_TREE || integer_zerop (offset0))
8511 	    offset0 = TREE_OPERAND (arg0, 1);
8512 	  else
8513 	    offset0 = size_binop (PLUS_EXPR, offset0,
8514 				  TREE_OPERAND (arg0, 1));
8515 	  if (poly_int_tree_p (offset0))
8516 	    {
8517 	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
8518 					      TYPE_PRECISION (sizetype));
8519 	      tem <<= LOG2_BITS_PER_UNIT;
8520 	      tem += bitpos0;
8521 	      if (tem.to_shwi (&bitpos0))
8522 		offset0 = NULL_TREE;
8523 	    }
8524 	}
8525 
8526       base1 = arg1;
8527       if (TREE_CODE (arg1) == ADDR_EXPR)
8528 	{
8529 	  base1
8530 	    = get_inner_reference (TREE_OPERAND (arg1, 0),
8531 				   &bitsize, &bitpos1, &offset1, &mode,
8532 				   &unsignedp, &reversep, &volatilep);
8533 	  if (TREE_CODE (base1) == INDIRECT_REF)
8534 	    base1 = TREE_OPERAND (base1, 0);
8535 	  else
8536 	    indirect_base1 = true;
8537 	}
8538       else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8539 	{
8540 	  base1 = TREE_OPERAND (arg1, 0);
8541 	  STRIP_SIGN_NOPS (base1);
8542 	  if (TREE_CODE (base1) == ADDR_EXPR)
8543 	    {
8544 	      base1
8545 		= get_inner_reference (TREE_OPERAND (base1, 0),
8546 				       &bitsize, &bitpos1, &offset1, &mode,
8547 				       &unsignedp, &reversep, &volatilep);
8548 	      if (TREE_CODE (base1) == INDIRECT_REF)
8549 		base1 = TREE_OPERAND (base1, 0);
8550 	      else
8551 		indirect_base1 = true;
8552 	    }
8553 	  if (offset1 == NULL_TREE || integer_zerop (offset1))
8554 	    offset1 = TREE_OPERAND (arg1, 1);
8555 	  else
8556 	    offset1 = size_binop (PLUS_EXPR, offset1,
8557 				  TREE_OPERAND (arg1, 1));
8558 	  if (poly_int_tree_p (offset1))
8559 	    {
8560 	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
8561 					      TYPE_PRECISION (sizetype));
8562 	      tem <<= LOG2_BITS_PER_UNIT;
8563 	      tem += bitpos1;
8564 	      if (tem.to_shwi (&bitpos1))
8565 		offset1 = NULL_TREE;
8566 	    }
8567 	}
8568 
8569       /* If we have equivalent bases we might be able to simplify.  */
8570       if (indirect_base0 == indirect_base1
8571 	  && operand_equal_p (base0, base1,
8572 			      indirect_base0 ? OEP_ADDRESS_OF : 0))
8573 	{
8574 	  /* We can fold this expression to a constant if the non-constant
8575 	     offset parts are equal.  */
8576 	  if ((offset0 == offset1
8577 	       || (offset0 && offset1
8578 		   && operand_equal_p (offset0, offset1, 0)))
8579 	      && (equality_code
8580 		  || (indirect_base0
8581 		      && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8582 		  || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8583 	    {
8584 	      if (!equality_code
8585 		  && maybe_ne (bitpos0, bitpos1)
8586 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
8587 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
8588 		fold_overflow_warning (("assuming pointer wraparound does not "
8589 					"occur when comparing P +- C1 with "
8590 					"P +- C2"),
8591 				       WARN_STRICT_OVERFLOW_CONDITIONAL);
8592 
8593 	      switch (code)
8594 		{
8595 		case EQ_EXPR:
8596 		  if (known_eq (bitpos0, bitpos1))
8597 		    return constant_boolean_node (true, type);
8598 		  if (known_ne (bitpos0, bitpos1))
8599 		    return constant_boolean_node (false, type);
8600 		  break;
8601 		case NE_EXPR:
8602 		  if (known_ne (bitpos0, bitpos1))
8603 		    return constant_boolean_node (true, type);
8604 		  if (known_eq (bitpos0, bitpos1))
8605 		    return constant_boolean_node (false, type);
8606 		  break;
8607 		case LT_EXPR:
8608 		  if (known_lt (bitpos0, bitpos1))
8609 		    return constant_boolean_node (true, type);
8610 		  if (known_ge (bitpos0, bitpos1))
8611 		    return constant_boolean_node (false, type);
8612 		  break;
8613 		case LE_EXPR:
8614 		  if (known_le (bitpos0, bitpos1))
8615 		    return constant_boolean_node (true, type);
8616 		  if (known_gt (bitpos0, bitpos1))
8617 		    return constant_boolean_node (false, type);
8618 		  break;
8619 		case GE_EXPR:
8620 		  if (known_ge (bitpos0, bitpos1))
8621 		    return constant_boolean_node (true, type);
8622 		  if (known_lt (bitpos0, bitpos1))
8623 		    return constant_boolean_node (false, type);
8624 		  break;
8625 		case GT_EXPR:
8626 		  if (known_gt (bitpos0, bitpos1))
8627 		    return constant_boolean_node (true, type);
8628 		  if (known_le (bitpos0, bitpos1))
8629 		    return constant_boolean_node (false, type);
8630 		  break;
8631 		default:;
8632 		}
8633 	    }
8634 	  /* We can simplify the comparison to a comparison of the variable
8635 	     offset parts if the constant offset parts are equal.
8636 	     Be careful to use signed sizetype here because otherwise we
8637 	     mess with array offsets in the wrong way.  This is possible
8638 	     because pointer arithmetic is restricted to retain within an
8639 	     object and overflow on pointer differences is undefined as of
8640 	     6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
8641 	  else if (known_eq (bitpos0, bitpos1)
8642 		   && (equality_code
8643 		       || (indirect_base0
8644 			   && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8645 		       || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8646 	    {
8647 	      /* By converting to signed sizetype we cover middle-end pointer
8648 	         arithmetic which operates on unsigned pointer types of size
8649 	         type size and ARRAY_REF offsets which are properly sign or
8650 	         zero extended from their type in case it is narrower than
8651 	         sizetype.  */
8652 	      if (offset0 == NULL_TREE)
8653 		offset0 = build_int_cst (ssizetype, 0);
8654 	      else
8655 		offset0 = fold_convert_loc (loc, ssizetype, offset0);
8656 	      if (offset1 == NULL_TREE)
8657 		offset1 = build_int_cst (ssizetype, 0);
8658 	      else
8659 		offset1 = fold_convert_loc (loc, ssizetype, offset1);
8660 
8661 	      if (!equality_code
8662 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
8663 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
8664 		fold_overflow_warning (("assuming pointer wraparound does not "
8665 					"occur when comparing P +- C1 with "
8666 					"P +- C2"),
8667 				       WARN_STRICT_OVERFLOW_COMPARISON);
8668 
8669 	      return fold_build2_loc (loc, code, type, offset0, offset1);
8670 	    }
8671 	}
8672       /* For equal offsets we can simplify to a comparison of the
8673 	 base addresses.  */
8674       else if (known_eq (bitpos0, bitpos1)
8675 	       && (indirect_base0
8676 		   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8677 	       && (indirect_base1
8678 		   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8679 	       && ((offset0 == offset1)
8680 		   || (offset0 && offset1
8681 		       && operand_equal_p (offset0, offset1, 0))))
8682 	{
8683 	  if (indirect_base0)
8684 	    base0 = build_fold_addr_expr_loc (loc, base0);
8685 	  if (indirect_base1)
8686 	    base1 = build_fold_addr_expr_loc (loc, base1);
8687 	  return fold_build2_loc (loc, code, type, base0, base1);
8688 	}
8689       /* Comparison between an ordinary (non-weak) symbol and a null
8690 	 pointer can be eliminated since such symbols must have a non
8691 	 null address.  In C, relational expressions between pointers
8692 	 to objects and null pointers are undefined.  The results
8693 	 below follow the C++ rules with the additional property that
8694 	 every object pointer compares greater than a null pointer.
8695       */
8696       else if (((DECL_P (base0)
8697 		 && maybe_nonzero_address (base0) > 0
8698 		 /* Avoid folding references to struct members at offset 0 to
8699 		    prevent tests like '&ptr->firstmember == 0' from getting
8700 		    eliminated.  When ptr is null, although the -> expression
8701 		    is strictly speaking invalid, GCC retains it as a matter
8702 		    of QoI.  See PR c/44555. */
8703 		 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
8704 		|| CONSTANT_CLASS_P (base0))
8705 	       && indirect_base0
8706 	       /* The caller guarantees that when one of the arguments is
8707 		  constant (i.e., null in this case) it is second.  */
8708 	       && integer_zerop (arg1))
8709 	{
8710 	  switch (code)
8711 	    {
8712 	    case EQ_EXPR:
8713 	    case LE_EXPR:
8714 	    case LT_EXPR:
8715 	      return constant_boolean_node (false, type);
8716 	    case GE_EXPR:
8717 	    case GT_EXPR:
8718 	    case NE_EXPR:
8719 	      return constant_boolean_node (true, type);
8720 	    default:
8721 	      gcc_unreachable ();
8722 	    }
8723 	}
8724     }
8725 
8726   /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8727      X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
8728      the resulting offset is smaller in absolute value than the
8729      original one and has the same sign.  */
8730   if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8731       && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8732       && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8733       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8734 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8735       && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8736       && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8737 	  && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8738     {
8739       tree const1 = TREE_OPERAND (arg0, 1);
8740       tree const2 = TREE_OPERAND (arg1, 1);
8741       tree variable1 = TREE_OPERAND (arg0, 0);
8742       tree variable2 = TREE_OPERAND (arg1, 0);
8743       tree cst;
8744       const char * const warnmsg = G_("assuming signed overflow does not "
8745 				      "occur when combining constants around "
8746 				      "a comparison");
8747 
8748       /* Put the constant on the side where it doesn't overflow and is
8749 	 of lower absolute value and of same sign than before.  */
8750       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8751 			     ? MINUS_EXPR : PLUS_EXPR,
8752 			     const2, const1);
8753       if (!TREE_OVERFLOW (cst)
8754 	  && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8755 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8756 	{
8757 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8758 	  return fold_build2_loc (loc, code, type,
8759 				  variable1,
8760 				  fold_build2_loc (loc, TREE_CODE (arg1),
8761 						   TREE_TYPE (arg1),
8762 						   variable2, cst));
8763 	}
8764 
8765       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8766 			     ? MINUS_EXPR : PLUS_EXPR,
8767 			     const1, const2);
8768       if (!TREE_OVERFLOW (cst)
8769 	  && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8770 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8771 	{
8772 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8773 	  return fold_build2_loc (loc, code, type,
8774 				  fold_build2_loc (loc, TREE_CODE (arg0),
8775 						   TREE_TYPE (arg0),
8776 						   variable1, cst),
8777 				  variable2);
8778 	}
8779     }
8780 
8781   tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8782   if (tem)
8783     return tem;
8784 
8785   /* If we are comparing an expression that just has comparisons
8786      of two integer values, arithmetic expressions of those comparisons,
8787      and constants, we can simplify it.  There are only three cases
8788      to check: the two values can either be equal, the first can be
8789      greater, or the second can be greater.  Fold the expression for
8790      those three values.  Since each value must be 0 or 1, we have
8791      eight possibilities, each of which corresponds to the constant 0
8792      or 1 or one of the six possible comparisons.
8793 
8794      This handles common cases like (a > b) == 0 but also handles
8795      expressions like  ((x > y) - (y > x)) > 0, which supposedly
8796      occur in macroized code.  */
8797 
8798   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8799     {
8800       tree cval1 = 0, cval2 = 0;
8801 
8802       if (twoval_comparison_p (arg0, &cval1, &cval2)
8803 	  /* Don't handle degenerate cases here; they should already
8804 	     have been handled anyway.  */
8805 	  && cval1 != 0 && cval2 != 0
8806 	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8807 	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8808 	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8809 	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8810 	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8811 	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8812 				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8813 	{
8814 	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8815 	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8816 
8817 	  /* We can't just pass T to eval_subst in case cval1 or cval2
8818 	     was the same as ARG1.  */
8819 
8820 	  tree high_result
8821 		= fold_build2_loc (loc, code, type,
8822 			       eval_subst (loc, arg0, cval1, maxval,
8823 					   cval2, minval),
8824 			       arg1);
8825 	  tree equal_result
8826 		= fold_build2_loc (loc, code, type,
8827 			       eval_subst (loc, arg0, cval1, maxval,
8828 					   cval2, maxval),
8829 			       arg1);
8830 	  tree low_result
8831 		= fold_build2_loc (loc, code, type,
8832 			       eval_subst (loc, arg0, cval1, minval,
8833 					   cval2, maxval),
8834 			       arg1);
8835 
8836 	  /* All three of these results should be 0 or 1.  Confirm they are.
8837 	     Then use those values to select the proper code to use.  */
8838 
8839 	  if (TREE_CODE (high_result) == INTEGER_CST
8840 	      && TREE_CODE (equal_result) == INTEGER_CST
8841 	      && TREE_CODE (low_result) == INTEGER_CST)
8842 	    {
8843 	      /* Make a 3-bit mask with the high-order bit being the
8844 		 value for `>', the next for '=', and the low for '<'.  */
8845 	      switch ((integer_onep (high_result) * 4)
8846 		      + (integer_onep (equal_result) * 2)
8847 		      + integer_onep (low_result))
8848 		{
8849 		case 0:
8850 		  /* Always false.  */
8851 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8852 		case 1:
8853 		  code = LT_EXPR;
8854 		  break;
8855 		case 2:
8856 		  code = EQ_EXPR;
8857 		  break;
8858 		case 3:
8859 		  code = LE_EXPR;
8860 		  break;
8861 		case 4:
8862 		  code = GT_EXPR;
8863 		  break;
8864 		case 5:
8865 		  code = NE_EXPR;
8866 		  break;
8867 		case 6:
8868 		  code = GE_EXPR;
8869 		  break;
8870 		case 7:
8871 		  /* Always true.  */
8872 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8873 		}
8874 
8875 	      return fold_build2_loc (loc, code, type, cval1, cval2);
8876 	    }
8877 	}
8878     }
8879 
8880   return NULL_TREE;
8881 }
8882 
8883 
8884 /* Subroutine of fold_binary.  Optimize complex multiplications of the
8885    form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
8886    argument EXPR represents the expression "z" of type TYPE.  */
8887 
8888 static tree
fold_mult_zconjz(location_t loc,tree type,tree expr)8889 fold_mult_zconjz (location_t loc, tree type, tree expr)
8890 {
8891   tree itype = TREE_TYPE (type);
8892   tree rpart, ipart, tem;
8893 
8894   if (TREE_CODE (expr) == COMPLEX_EXPR)
8895     {
8896       rpart = TREE_OPERAND (expr, 0);
8897       ipart = TREE_OPERAND (expr, 1);
8898     }
8899   else if (TREE_CODE (expr) == COMPLEX_CST)
8900     {
8901       rpart = TREE_REALPART (expr);
8902       ipart = TREE_IMAGPART (expr);
8903     }
8904   else
8905     {
8906       expr = save_expr (expr);
8907       rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8908       ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8909     }
8910 
8911   rpart = save_expr (rpart);
8912   ipart = save_expr (ipart);
8913   tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8914 		     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8915 		     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8916   return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8917 			  build_zero_cst (itype));
8918 }
8919 
8920 
8921 /* Helper function for fold_vec_perm.  Store elements of VECTOR_CST or
8922    CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
8923    true if successful.  */
8924 
8925 static bool
vec_cst_ctor_to_array(tree arg,unsigned int nelts,tree * elts)8926 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
8927 {
8928   unsigned HOST_WIDE_INT i, nunits;
8929 
8930   if (TREE_CODE (arg) == VECTOR_CST
8931       && VECTOR_CST_NELTS (arg).is_constant (&nunits))
8932     {
8933       for (i = 0; i < nunits; ++i)
8934 	elts[i] = VECTOR_CST_ELT (arg, i);
8935     }
8936   else if (TREE_CODE (arg) == CONSTRUCTOR)
8937     {
8938       constructor_elt *elt;
8939 
8940       FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8941 	if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8942 	  return false;
8943 	else
8944 	  elts[i] = elt->value;
8945     }
8946   else
8947     return false;
8948   for (; i < nelts; i++)
8949     elts[i]
8950       = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8951   return true;
8952 }
8953 
8954 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8955    selector.  Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8956    NULL_TREE otherwise.  */
8957 
8958 static tree
fold_vec_perm(tree type,tree arg0,tree arg1,const vec_perm_indices & sel)8959 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
8960 {
8961   unsigned int i;
8962   unsigned HOST_WIDE_INT nelts;
8963   bool need_ctor = false;
8964 
8965   if (!sel.length ().is_constant (&nelts))
8966     return NULL_TREE;
8967   gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
8968 	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
8969 	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
8970   if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8971       || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8972     return NULL_TREE;
8973 
8974   tree *in_elts = XALLOCAVEC (tree, nelts * 2);
8975   if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
8976       || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
8977     return NULL_TREE;
8978 
8979   tree_vector_builder out_elts (type, nelts, 1);
8980   for (i = 0; i < nelts; i++)
8981     {
8982       HOST_WIDE_INT index;
8983       if (!sel[i].is_constant (&index))
8984 	return NULL_TREE;
8985       if (!CONSTANT_CLASS_P (in_elts[index]))
8986 	need_ctor = true;
8987       out_elts.quick_push (unshare_expr (in_elts[index]));
8988     }
8989 
8990   if (need_ctor)
8991     {
8992       vec<constructor_elt, va_gc> *v;
8993       vec_alloc (v, nelts);
8994       for (i = 0; i < nelts; i++)
8995 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
8996       return build_constructor (type, v);
8997     }
8998   else
8999     return out_elts.build ();
9000 }
9001 
9002 /* Try to fold a pointer difference of type TYPE two address expressions of
9003    array references AREF0 and AREF1 using location LOC.  Return a
9004    simplified expression for the difference or NULL_TREE.  */
9005 
9006 static tree
fold_addr_of_array_ref_difference(location_t loc,tree type,tree aref0,tree aref1,bool use_pointer_diff)9007 fold_addr_of_array_ref_difference (location_t loc, tree type,
9008 				   tree aref0, tree aref1,
9009 				   bool use_pointer_diff)
9010 {
9011   tree base0 = TREE_OPERAND (aref0, 0);
9012   tree base1 = TREE_OPERAND (aref1, 0);
9013   tree base_offset = build_int_cst (type, 0);
9014 
9015   /* If the bases are array references as well, recurse.  If the bases
9016      are pointer indirections compute the difference of the pointers.
9017      If the bases are equal, we are set.  */
9018   if ((TREE_CODE (base0) == ARRAY_REF
9019        && TREE_CODE (base1) == ARRAY_REF
9020        && (base_offset
9021 	   = fold_addr_of_array_ref_difference (loc, type, base0, base1,
9022 						use_pointer_diff)))
9023       || (INDIRECT_REF_P (base0)
9024 	  && INDIRECT_REF_P (base1)
9025 	  && (base_offset
9026 	        = use_pointer_diff
9027 		  ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
9028 				     TREE_OPERAND (base0, 0),
9029 				     TREE_OPERAND (base1, 0))
9030 		  : fold_binary_loc (loc, MINUS_EXPR, type,
9031 				     fold_convert (type,
9032 						   TREE_OPERAND (base0, 0)),
9033 				     fold_convert (type,
9034 						   TREE_OPERAND (base1, 0)))))
9035       || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9036     {
9037       tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9038       tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9039       tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9040       tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
9041       return fold_build2_loc (loc, PLUS_EXPR, type,
9042 			      base_offset,
9043 			      fold_build2_loc (loc, MULT_EXPR, type,
9044 					       diff, esz));
9045     }
9046   return NULL_TREE;
9047 }
9048 
9049 /* If the real or vector real constant CST of type TYPE has an exact
9050    inverse, return it, else return NULL.  */
9051 
9052 tree
exact_inverse(tree type,tree cst)9053 exact_inverse (tree type, tree cst)
9054 {
9055   REAL_VALUE_TYPE r;
9056   tree unit_type;
9057   machine_mode mode;
9058 
9059   switch (TREE_CODE (cst))
9060     {
9061     case REAL_CST:
9062       r = TREE_REAL_CST (cst);
9063 
9064       if (exact_real_inverse (TYPE_MODE (type), &r))
9065 	return build_real (type, r);
9066 
9067       return NULL_TREE;
9068 
9069     case VECTOR_CST:
9070       {
9071 	unit_type = TREE_TYPE (type);
9072 	mode = TYPE_MODE (unit_type);
9073 
9074 	tree_vector_builder elts;
9075 	if (!elts.new_unary_operation (type, cst, false))
9076 	  return NULL_TREE;
9077 	unsigned int count = elts.encoded_nelts ();
9078 	for (unsigned int i = 0; i < count; ++i)
9079 	  {
9080 	    r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9081 	    if (!exact_real_inverse (mode, &r))
9082 	      return NULL_TREE;
9083 	    elts.quick_push (build_real (unit_type, r));
9084 	  }
9085 
9086 	return elts.build ();
9087       }
9088 
9089     default:
9090       return NULL_TREE;
9091     }
9092 }
9093 
9094 /*  Mask out the tz least significant bits of X of type TYPE where
9095     tz is the number of trailing zeroes in Y.  */
9096 static wide_int
mask_with_tz(tree type,const wide_int & x,const wide_int & y)9097 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9098 {
9099   int tz = wi::ctz (y);
9100   if (tz > 0)
9101     return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9102   return x;
9103 }
9104 
9105 /* Return true when T is an address and is known to be nonzero.
9106    For floating point we further ensure that T is not denormal.
9107    Similar logic is present in nonzero_address in rtlanal.h.
9108 
9109    If the return value is based on the assumption that signed overflow
9110    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9111    change *STRICT_OVERFLOW_P.  */
9112 
9113 static bool
tree_expr_nonzero_warnv_p(tree t,bool * strict_overflow_p)9114 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9115 {
9116   tree type = TREE_TYPE (t);
9117   enum tree_code code;
9118 
9119   /* Doing something useful for floating point would need more work.  */
9120   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9121     return false;
9122 
9123   code = TREE_CODE (t);
9124   switch (TREE_CODE_CLASS (code))
9125     {
9126     case tcc_unary:
9127       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9128 					      strict_overflow_p);
9129     case tcc_binary:
9130     case tcc_comparison:
9131       return tree_binary_nonzero_warnv_p (code, type,
9132 					       TREE_OPERAND (t, 0),
9133 					       TREE_OPERAND (t, 1),
9134 					       strict_overflow_p);
9135     case tcc_constant:
9136     case tcc_declaration:
9137     case tcc_reference:
9138       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9139 
9140     default:
9141       break;
9142     }
9143 
9144   switch (code)
9145     {
9146     case TRUTH_NOT_EXPR:
9147       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9148 					      strict_overflow_p);
9149 
9150     case TRUTH_AND_EXPR:
9151     case TRUTH_OR_EXPR:
9152     case TRUTH_XOR_EXPR:
9153       return tree_binary_nonzero_warnv_p (code, type,
9154 					       TREE_OPERAND (t, 0),
9155 					       TREE_OPERAND (t, 1),
9156 					       strict_overflow_p);
9157 
9158     case COND_EXPR:
9159     case CONSTRUCTOR:
9160     case OBJ_TYPE_REF:
9161     case ASSERT_EXPR:
9162     case ADDR_EXPR:
9163     case WITH_SIZE_EXPR:
9164     case SSA_NAME:
9165       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9166 
9167     case COMPOUND_EXPR:
9168     case MODIFY_EXPR:
9169     case BIND_EXPR:
9170       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9171 					strict_overflow_p);
9172 
9173     case SAVE_EXPR:
9174       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9175 					strict_overflow_p);
9176 
9177     case CALL_EXPR:
9178       {
9179 	tree fndecl = get_callee_fndecl (t);
9180 	if (!fndecl) return false;
9181 	if (flag_delete_null_pointer_checks && !flag_check_new
9182 	    && DECL_IS_OPERATOR_NEW (fndecl)
9183 	    && !TREE_NOTHROW (fndecl))
9184 	  return true;
9185 	if (flag_delete_null_pointer_checks
9186 	    && lookup_attribute ("returns_nonnull",
9187 		 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9188 	  return true;
9189 	return alloca_call_p (t);
9190       }
9191 
9192     default:
9193       break;
9194     }
9195   return false;
9196 }
9197 
9198 /* Return true when T is an address and is known to be nonzero.
9199    Handle warnings about undefined signed overflow.  */
9200 
9201 bool
tree_expr_nonzero_p(tree t)9202 tree_expr_nonzero_p (tree t)
9203 {
9204   bool ret, strict_overflow_p;
9205 
9206   strict_overflow_p = false;
9207   ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9208   if (strict_overflow_p)
9209     fold_overflow_warning (("assuming signed overflow does not occur when "
9210 			    "determining that expression is always "
9211 			    "non-zero"),
9212 			   WARN_STRICT_OVERFLOW_MISC);
9213   return ret;
9214 }
9215 
9216 /* Return true if T is known not to be equal to an integer W.  */
9217 
9218 bool
expr_not_equal_to(tree t,const wide_int & w)9219 expr_not_equal_to (tree t, const wide_int &w)
9220 {
9221   wide_int min, max, nz;
9222   value_range_type rtype;
9223   switch (TREE_CODE (t))
9224     {
9225     case INTEGER_CST:
9226       return wi::to_wide (t) != w;
9227 
9228     case SSA_NAME:
9229       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9230 	return false;
9231       rtype = get_range_info (t, &min, &max);
9232       if (rtype == VR_RANGE)
9233 	{
9234 	  if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9235 	    return true;
9236 	  if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9237 	    return true;
9238 	}
9239       else if (rtype == VR_ANTI_RANGE
9240 	       && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9241 	       && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9242 	return true;
9243       /* If T has some known zero bits and W has any of those bits set,
9244 	 then T is known not to be equal to W.  */
9245       if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9246 			      TYPE_PRECISION (TREE_TYPE (t))), 0))
9247 	return true;
9248       return false;
9249 
9250     default:
9251       return false;
9252     }
9253 }
9254 
9255 /* Fold a binary expression of code CODE and type TYPE with operands
9256    OP0 and OP1.  LOC is the location of the resulting expression.
9257    Return the folded expression if folding is successful.  Otherwise,
9258    return NULL_TREE.  */
9259 
9260 tree
fold_binary_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1)9261 fold_binary_loc (location_t loc, enum tree_code code, tree type,
9262 		 tree op0, tree op1)
9263 {
9264   enum tree_code_class kind = TREE_CODE_CLASS (code);
9265   tree arg0, arg1, tem;
9266   tree t1 = NULL_TREE;
9267   bool strict_overflow_p;
9268   unsigned int prec;
9269 
9270   gcc_assert (IS_EXPR_CODE_CLASS (kind)
9271 	      && TREE_CODE_LENGTH (code) == 2
9272 	      && op0 != NULL_TREE
9273 	      && op1 != NULL_TREE);
9274 
9275   arg0 = op0;
9276   arg1 = op1;
9277 
9278   /* Strip any conversions that don't change the mode.  This is
9279      safe for every expression, except for a comparison expression
9280      because its signedness is derived from its operands.  So, in
9281      the latter case, only strip conversions that don't change the
9282      signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
9283      preserved.
9284 
9285      Note that this is done as an internal manipulation within the
9286      constant folder, in order to find the simplest representation
9287      of the arguments so that their form can be studied.  In any
9288      cases, the appropriate type conversions should be put back in
9289      the tree that will get out of the constant folder.  */
9290 
9291   if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9292     {
9293       STRIP_SIGN_NOPS (arg0);
9294       STRIP_SIGN_NOPS (arg1);
9295     }
9296   else
9297     {
9298       STRIP_NOPS (arg0);
9299       STRIP_NOPS (arg1);
9300     }
9301 
9302   /* Note that TREE_CONSTANT isn't enough: static var addresses are
9303      constant but we can't do arithmetic on them.  */
9304   if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9305     {
9306       tem = const_binop (code, type, arg0, arg1);
9307       if (tem != NULL_TREE)
9308 	{
9309 	  if (TREE_TYPE (tem) != type)
9310 	    tem = fold_convert_loc (loc, type, tem);
9311 	  return tem;
9312 	}
9313     }
9314 
9315   /* If this is a commutative operation, and ARG0 is a constant, move it
9316      to ARG1 to reduce the number of tests below.  */
9317   if (commutative_tree_code (code)
9318       && tree_swap_operands_p (arg0, arg1))
9319     return fold_build2_loc (loc, code, type, op1, op0);
9320 
9321   /* Likewise if this is a comparison, and ARG0 is a constant, move it
9322      to ARG1 to reduce the number of tests below.  */
9323   if (kind == tcc_comparison
9324       && tree_swap_operands_p (arg0, arg1))
9325     return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9326 
9327   tem = generic_simplify (loc, code, type, op0, op1);
9328   if (tem)
9329     return tem;
9330 
9331   /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9332 
9333      First check for cases where an arithmetic operation is applied to a
9334      compound, conditional, or comparison operation.  Push the arithmetic
9335      operation inside the compound or conditional to see if any folding
9336      can then be done.  Convert comparison to conditional for this purpose.
9337      The also optimizes non-constant cases that used to be done in
9338      expand_expr.
9339 
9340      Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9341      one of the operands is a comparison and the other is a comparison, a
9342      BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
9343      code below would make the expression more complex.  Change it to a
9344      TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
9345      TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
9346 
9347   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9348        || code == EQ_EXPR || code == NE_EXPR)
9349       && !VECTOR_TYPE_P (TREE_TYPE (arg0))
9350       && ((truth_value_p (TREE_CODE (arg0))
9351 	   && (truth_value_p (TREE_CODE (arg1))
9352 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
9353 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
9354 	  || (truth_value_p (TREE_CODE (arg1))
9355 	      && (truth_value_p (TREE_CODE (arg0))
9356 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
9357 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
9358     {
9359       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9360 			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9361 			 : TRUTH_XOR_EXPR,
9362 			 boolean_type_node,
9363 			 fold_convert_loc (loc, boolean_type_node, arg0),
9364 			 fold_convert_loc (loc, boolean_type_node, arg1));
9365 
9366       if (code == EQ_EXPR)
9367 	tem = invert_truthvalue_loc (loc, tem);
9368 
9369       return fold_convert_loc (loc, type, tem);
9370     }
9371 
9372   if (TREE_CODE_CLASS (code) == tcc_binary
9373       || TREE_CODE_CLASS (code) == tcc_comparison)
9374     {
9375       if (TREE_CODE (arg0) == COMPOUND_EXPR)
9376 	{
9377 	  tem = fold_build2_loc (loc, code, type,
9378 			     fold_convert_loc (loc, TREE_TYPE (op0),
9379 					       TREE_OPERAND (arg0, 1)), op1);
9380 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9381 			     tem);
9382 	}
9383       if (TREE_CODE (arg1) == COMPOUND_EXPR)
9384 	{
9385 	  tem = fold_build2_loc (loc, code, type, op0,
9386 			     fold_convert_loc (loc, TREE_TYPE (op1),
9387 					       TREE_OPERAND (arg1, 1)));
9388 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9389 			     tem);
9390 	}
9391 
9392       if (TREE_CODE (arg0) == COND_EXPR
9393 	  || TREE_CODE (arg0) == VEC_COND_EXPR
9394 	  || COMPARISON_CLASS_P (arg0))
9395 	{
9396 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9397 						     arg0, arg1,
9398 						     /*cond_first_p=*/1);
9399 	  if (tem != NULL_TREE)
9400 	    return tem;
9401 	}
9402 
9403       if (TREE_CODE (arg1) == COND_EXPR
9404 	  || TREE_CODE (arg1) == VEC_COND_EXPR
9405 	  || COMPARISON_CLASS_P (arg1))
9406 	{
9407 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9408 						     arg1, arg0,
9409 					             /*cond_first_p=*/0);
9410 	  if (tem != NULL_TREE)
9411 	    return tem;
9412 	}
9413     }
9414 
9415   switch (code)
9416     {
9417     case MEM_REF:
9418       /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2].  */
9419       if (TREE_CODE (arg0) == ADDR_EXPR
9420 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9421 	{
9422 	  tree iref = TREE_OPERAND (arg0, 0);
9423 	  return fold_build2 (MEM_REF, type,
9424 			      TREE_OPERAND (iref, 0),
9425 			      int_const_binop (PLUS_EXPR, arg1,
9426 					       TREE_OPERAND (iref, 1)));
9427 	}
9428 
9429       /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2].  */
9430       if (TREE_CODE (arg0) == ADDR_EXPR
9431 	  && handled_component_p (TREE_OPERAND (arg0, 0)))
9432 	{
9433 	  tree base;
9434 	  poly_int64 coffset;
9435 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9436 						&coffset);
9437 	  if (!base)
9438 	    return NULL_TREE;
9439 	  return fold_build2 (MEM_REF, type,
9440 			      build_fold_addr_expr (base),
9441 			      int_const_binop (PLUS_EXPR, arg1,
9442 					       size_int (coffset)));
9443 	}
9444 
9445       return NULL_TREE;
9446 
9447     case POINTER_PLUS_EXPR:
9448       /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
9449       if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9450 	   && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9451         return fold_convert_loc (loc, type,
9452 				 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9453 					      fold_convert_loc (loc, sizetype,
9454 								arg1),
9455 					      fold_convert_loc (loc, sizetype,
9456 								arg0)));
9457 
9458       return NULL_TREE;
9459 
9460     case PLUS_EXPR:
9461       if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9462 	{
9463 	  /* X + (X / CST) * -CST is X % CST.  */
9464 	  if (TREE_CODE (arg1) == MULT_EXPR
9465 	      && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9466 	      && operand_equal_p (arg0,
9467 				  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9468 	    {
9469 	      tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9470 	      tree cst1 = TREE_OPERAND (arg1, 1);
9471 	      tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9472 				      cst1, cst0);
9473 	      if (sum && integer_zerop (sum))
9474 		return fold_convert_loc (loc, type,
9475 					 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9476 						      TREE_TYPE (arg0), arg0,
9477 						      cst0));
9478 	    }
9479 	}
9480 
9481       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9482 	 one.  Make sure the type is not saturating and has the signedness of
9483 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9484 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
9485       if ((TREE_CODE (arg0) == MULT_EXPR
9486 	   || TREE_CODE (arg1) == MULT_EXPR)
9487 	  && !TYPE_SATURATING (type)
9488 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9489 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9490 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
9491         {
9492 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9493 	  if (tem)
9494 	    return tem;
9495 	}
9496 
9497       if (! FLOAT_TYPE_P (type))
9498 	{
9499 	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9500 	     (plus (plus (mult) (mult)) (foo)) so that we can
9501 	     take advantage of the factoring cases below.  */
9502 	  if (ANY_INTEGRAL_TYPE_P (type)
9503 	      && TYPE_OVERFLOW_WRAPS (type)
9504 	      && (((TREE_CODE (arg0) == PLUS_EXPR
9505 		    || TREE_CODE (arg0) == MINUS_EXPR)
9506 		   && TREE_CODE (arg1) == MULT_EXPR)
9507 		  || ((TREE_CODE (arg1) == PLUS_EXPR
9508 		       || TREE_CODE (arg1) == MINUS_EXPR)
9509 		      && TREE_CODE (arg0) == MULT_EXPR)))
9510 	    {
9511 	      tree parg0, parg1, parg, marg;
9512 	      enum tree_code pcode;
9513 
9514 	      if (TREE_CODE (arg1) == MULT_EXPR)
9515 		parg = arg0, marg = arg1;
9516 	      else
9517 		parg = arg1, marg = arg0;
9518 	      pcode = TREE_CODE (parg);
9519 	      parg0 = TREE_OPERAND (parg, 0);
9520 	      parg1 = TREE_OPERAND (parg, 1);
9521 	      STRIP_NOPS (parg0);
9522 	      STRIP_NOPS (parg1);
9523 
9524 	      if (TREE_CODE (parg0) == MULT_EXPR
9525 		  && TREE_CODE (parg1) != MULT_EXPR)
9526 		return fold_build2_loc (loc, pcode, type,
9527 				    fold_build2_loc (loc, PLUS_EXPR, type,
9528 						 fold_convert_loc (loc, type,
9529 								   parg0),
9530 						 fold_convert_loc (loc, type,
9531 								   marg)),
9532 				    fold_convert_loc (loc, type, parg1));
9533 	      if (TREE_CODE (parg0) != MULT_EXPR
9534 		  && TREE_CODE (parg1) == MULT_EXPR)
9535 		return
9536 		  fold_build2_loc (loc, PLUS_EXPR, type,
9537 			       fold_convert_loc (loc, type, parg0),
9538 			       fold_build2_loc (loc, pcode, type,
9539 					    fold_convert_loc (loc, type, marg),
9540 					    fold_convert_loc (loc, type,
9541 							      parg1)));
9542 	    }
9543 	}
9544       else
9545 	{
9546 	  /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9547 	     to __complex__ ( x, y ).  This is not the same for SNaNs or
9548 	     if signed zeros are involved.  */
9549 	  if (!HONOR_SNANS (element_mode (arg0))
9550               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9551 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9552 	    {
9553 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9554 	      tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9555 	      tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9556 	      bool arg0rz = false, arg0iz = false;
9557 	      if ((arg0r && (arg0rz = real_zerop (arg0r)))
9558 		  || (arg0i && (arg0iz = real_zerop (arg0i))))
9559 		{
9560 		  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9561 		  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9562 		  if (arg0rz && arg1i && real_zerop (arg1i))
9563 		    {
9564 		      tree rp = arg1r ? arg1r
9565 				  : build1 (REALPART_EXPR, rtype, arg1);
9566 		      tree ip = arg0i ? arg0i
9567 				  : build1 (IMAGPART_EXPR, rtype, arg0);
9568 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9569 		    }
9570 		  else if (arg0iz && arg1r && real_zerop (arg1r))
9571 		    {
9572 		      tree rp = arg0r ? arg0r
9573 				  : build1 (REALPART_EXPR, rtype, arg0);
9574 		      tree ip = arg1i ? arg1i
9575 				  : build1 (IMAGPART_EXPR, rtype, arg1);
9576 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9577 		    }
9578 		}
9579 	    }
9580 
9581           /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9582              We associate floats only if the user has specified
9583              -fassociative-math.  */
9584           if (flag_associative_math
9585               && TREE_CODE (arg1) == PLUS_EXPR
9586               && TREE_CODE (arg0) != MULT_EXPR)
9587             {
9588               tree tree10 = TREE_OPERAND (arg1, 0);
9589               tree tree11 = TREE_OPERAND (arg1, 1);
9590               if (TREE_CODE (tree11) == MULT_EXPR
9591 		  && TREE_CODE (tree10) == MULT_EXPR)
9592                 {
9593                   tree tree0;
9594                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9595                   return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9596                 }
9597             }
9598           /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9599              We associate floats only if the user has specified
9600              -fassociative-math.  */
9601           if (flag_associative_math
9602               && TREE_CODE (arg0) == PLUS_EXPR
9603               && TREE_CODE (arg1) != MULT_EXPR)
9604             {
9605               tree tree00 = TREE_OPERAND (arg0, 0);
9606               tree tree01 = TREE_OPERAND (arg0, 1);
9607               if (TREE_CODE (tree01) == MULT_EXPR
9608 		  && TREE_CODE (tree00) == MULT_EXPR)
9609                 {
9610                   tree tree0;
9611                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9612                   return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9613                 }
9614             }
9615 	}
9616 
9617      bit_rotate:
9618       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9619 	 is a rotate of A by C1 bits.  */
9620       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9621 	 is a rotate of A by B bits.
9622 	 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
9623 	 though in this case CODE must be | and not + or ^, otherwise
9624 	 it doesn't return A when B is 0.  */
9625       {
9626 	enum tree_code code0, code1;
9627 	tree rtype;
9628 	code0 = TREE_CODE (arg0);
9629 	code1 = TREE_CODE (arg1);
9630 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9631 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9632 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
9633 			        TREE_OPERAND (arg1, 0), 0)
9634 	    && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9635 	        TYPE_UNSIGNED (rtype))
9636 	    /* Only create rotates in complete modes.  Other cases are not
9637 	       expanded properly.  */
9638 	    && (element_precision (rtype)
9639 		== GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9640 	  {
9641 	    tree tree01, tree11;
9642 	    tree orig_tree01, orig_tree11;
9643 	    enum tree_code code01, code11;
9644 
9645 	    tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
9646 	    tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
9647 	    STRIP_NOPS (tree01);
9648 	    STRIP_NOPS (tree11);
9649 	    code01 = TREE_CODE (tree01);
9650 	    code11 = TREE_CODE (tree11);
9651 	    if (code11 != MINUS_EXPR
9652 		&& (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
9653 	      {
9654 		std::swap (code0, code1);
9655 		std::swap (code01, code11);
9656 		std::swap (tree01, tree11);
9657 		std::swap (orig_tree01, orig_tree11);
9658 	      }
9659 	    if (code01 == INTEGER_CST
9660 		&& code11 == INTEGER_CST
9661 		&& (wi::to_widest (tree01) + wi::to_widest (tree11)
9662 		    == element_precision (rtype)))
9663 	      {
9664 		tem = build2_loc (loc, LROTATE_EXPR,
9665 				  rtype, TREE_OPERAND (arg0, 0),
9666 				  code0 == LSHIFT_EXPR
9667 				  ? orig_tree01 : orig_tree11);
9668 		return fold_convert_loc (loc, type, tem);
9669 	      }
9670 	    else if (code11 == MINUS_EXPR)
9671 	      {
9672 		tree tree110, tree111;
9673 		tree110 = TREE_OPERAND (tree11, 0);
9674 		tree111 = TREE_OPERAND (tree11, 1);
9675 		STRIP_NOPS (tree110);
9676 		STRIP_NOPS (tree111);
9677 		if (TREE_CODE (tree110) == INTEGER_CST
9678 		    && compare_tree_int (tree110,
9679 					 element_precision (rtype)) == 0
9680 		    && operand_equal_p (tree01, tree111, 0))
9681 		  {
9682 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9683 					    ? LROTATE_EXPR : RROTATE_EXPR),
9684 				      rtype, TREE_OPERAND (arg0, 0),
9685 				      orig_tree01);
9686 		    return fold_convert_loc (loc, type, tem);
9687 		  }
9688 	      }
9689 	    else if (code == BIT_IOR_EXPR
9690 		     && code11 == BIT_AND_EXPR
9691 		     && pow2p_hwi (element_precision (rtype)))
9692 	      {
9693 		tree tree110, tree111;
9694 		tree110 = TREE_OPERAND (tree11, 0);
9695 		tree111 = TREE_OPERAND (tree11, 1);
9696 		STRIP_NOPS (tree110);
9697 		STRIP_NOPS (tree111);
9698 		if (TREE_CODE (tree110) == NEGATE_EXPR
9699 		    && TREE_CODE (tree111) == INTEGER_CST
9700 		    && compare_tree_int (tree111,
9701 					 element_precision (rtype) - 1) == 0
9702 		    && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
9703 		  {
9704 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9705 					    ? LROTATE_EXPR : RROTATE_EXPR),
9706 				      rtype, TREE_OPERAND (arg0, 0),
9707 				      orig_tree01);
9708 		    return fold_convert_loc (loc, type, tem);
9709 		  }
9710 	      }
9711 	  }
9712       }
9713 
9714     associate:
9715       /* In most languages, can't associate operations on floats through
9716 	 parentheses.  Rather than remember where the parentheses were, we
9717 	 don't associate floats at all, unless the user has specified
9718 	 -fassociative-math.
9719 	 And, we need to make sure type is not saturating.  */
9720 
9721       if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9722 	  && !TYPE_SATURATING (type))
9723 	{
9724 	  tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
9725 	  tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
9726 	  tree atype = type;
9727 	  bool ok = true;
9728 
9729 	  /* Split both trees into variables, constants, and literals.  Then
9730 	     associate each group together, the constants with literals,
9731 	     then the result with variables.  This increases the chances of
9732 	     literals being recombined later and of generating relocatable
9733 	     expressions for the sum of a constant and literal.  */
9734 	  var0 = split_tree (arg0, type, code,
9735 			     &minus_var0, &con0, &minus_con0,
9736 			     &lit0, &minus_lit0, 0);
9737 	  var1 = split_tree (arg1, type, code,
9738 			     &minus_var1, &con1, &minus_con1,
9739 			     &lit1, &minus_lit1, code == MINUS_EXPR);
9740 
9741 	  /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
9742 	  if (code == MINUS_EXPR)
9743 	    code = PLUS_EXPR;
9744 
9745 	  /* With undefined overflow prefer doing association in a type
9746 	     which wraps on overflow, if that is one of the operand types.  */
9747 	  if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
9748 	      && !TYPE_OVERFLOW_WRAPS (type))
9749 	    {
9750 	      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9751 		  && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9752 		atype = TREE_TYPE (arg0);
9753 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9754 		       && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9755 		atype = TREE_TYPE (arg1);
9756 	      gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9757 	    }
9758 
9759 	  /* With undefined overflow we can only associate constants with one
9760 	     variable, and constants whose association doesn't overflow.  */
9761 	  if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
9762 	      && !TYPE_OVERFLOW_WRAPS (atype))
9763 	    {
9764 	      if ((var0 && var1) || (minus_var0 && minus_var1))
9765 		{
9766 		  /* ???  If split_tree would handle NEGATE_EXPR we could
9767 		     simply reject these cases and the allowed cases would
9768 		     be the var0/minus_var1 ones.  */
9769 		  tree tmp0 = var0 ? var0 : minus_var0;
9770 		  tree tmp1 = var1 ? var1 : minus_var1;
9771 		  bool one_neg = false;
9772 
9773 		  if (TREE_CODE (tmp0) == NEGATE_EXPR)
9774 		    {
9775 		      tmp0 = TREE_OPERAND (tmp0, 0);
9776 		      one_neg = !one_neg;
9777 		    }
9778 		  if (CONVERT_EXPR_P (tmp0)
9779 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9780 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9781 			  <= TYPE_PRECISION (atype)))
9782 		    tmp0 = TREE_OPERAND (tmp0, 0);
9783 		  if (TREE_CODE (tmp1) == NEGATE_EXPR)
9784 		    {
9785 		      tmp1 = TREE_OPERAND (tmp1, 0);
9786 		      one_neg = !one_neg;
9787 		    }
9788 		  if (CONVERT_EXPR_P (tmp1)
9789 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9790 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9791 			  <= TYPE_PRECISION (atype)))
9792 		    tmp1 = TREE_OPERAND (tmp1, 0);
9793 		  /* The only case we can still associate with two variables
9794 		     is if they cancel out.  */
9795 		  if (!one_neg
9796 		      || !operand_equal_p (tmp0, tmp1, 0))
9797 		    ok = false;
9798 		}
9799 	      else if ((var0 && minus_var1
9800 			&& ! operand_equal_p (var0, minus_var1, 0))
9801 		       || (minus_var0 && var1
9802 			   && ! operand_equal_p (minus_var0, var1, 0)))
9803 		ok = false;
9804 	    }
9805 
9806 	  /* Only do something if we found more than two objects.  Otherwise,
9807 	     nothing has changed and we risk infinite recursion.  */
9808 	  if (ok
9809 	      && ((var0 != 0) + (var1 != 0)
9810 		  + (minus_var0 != 0) + (minus_var1 != 0)
9811 		  + (con0 != 0) + (con1 != 0)
9812 		  + (minus_con0 != 0) + (minus_con1 != 0)
9813 		  + (lit0 != 0) + (lit1 != 0)
9814 		  + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
9815 	    {
9816 	      var0 = associate_trees (loc, var0, var1, code, atype);
9817 	      minus_var0 = associate_trees (loc, minus_var0, minus_var1,
9818 					    code, atype);
9819 	      con0 = associate_trees (loc, con0, con1, code, atype);
9820 	      minus_con0 = associate_trees (loc, minus_con0, minus_con1,
9821 					    code, atype);
9822 	      lit0 = associate_trees (loc, lit0, lit1, code, atype);
9823 	      minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9824 					    code, atype);
9825 
9826 	      if (minus_var0 && var0)
9827 		{
9828 		  var0 = associate_trees (loc, var0, minus_var0,
9829 					  MINUS_EXPR, atype);
9830 		  minus_var0 = 0;
9831 		}
9832 	      if (minus_con0 && con0)
9833 		{
9834 		  con0 = associate_trees (loc, con0, minus_con0,
9835 					  MINUS_EXPR, atype);
9836 		  minus_con0 = 0;
9837 		}
9838 
9839 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
9840 		 greater than the positive part.  Otherwise, the multiplicative
9841 		 folding code (i.e extract_muldiv) may be fooled in case
9842 		 unsigned constants are subtracted, like in the following
9843 		 example: ((X*2 + 4) - 8U)/2.  */
9844 	      if (minus_lit0 && lit0)
9845 		{
9846 		  if (TREE_CODE (lit0) == INTEGER_CST
9847 		      && TREE_CODE (minus_lit0) == INTEGER_CST
9848 		      && tree_int_cst_lt (lit0, minus_lit0)
9849 		      /* But avoid ending up with only negated parts.  */
9850 		      && (var0 || con0))
9851 		    {
9852 		      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9853 						    MINUS_EXPR, atype);
9854 		      lit0 = 0;
9855 		    }
9856 		  else
9857 		    {
9858 		      lit0 = associate_trees (loc, lit0, minus_lit0,
9859 					      MINUS_EXPR, atype);
9860 		      minus_lit0 = 0;
9861 		    }
9862 		}
9863 
9864 	      /* Don't introduce overflows through reassociation.  */
9865 	      if ((lit0 && TREE_OVERFLOW_P (lit0))
9866 		  || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9867 		return NULL_TREE;
9868 
9869 	      /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9870 	      con0 = associate_trees (loc, con0, lit0, code, atype);
9871 	      lit0 = 0;
9872 	      minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
9873 					    code, atype);
9874 	      minus_lit0 = 0;
9875 
9876 	      /* Eliminate minus_con0.  */
9877 	      if (minus_con0)
9878 		{
9879 		  if (con0)
9880 		    con0 = associate_trees (loc, con0, minus_con0,
9881 					    MINUS_EXPR, atype);
9882 		  else if (var0)
9883 		    var0 = associate_trees (loc, var0, minus_con0,
9884 					    MINUS_EXPR, atype);
9885 		  else
9886 		    gcc_unreachable ();
9887 		  minus_con0 = 0;
9888 		}
9889 
9890 	      /* Eliminate minus_var0.  */
9891 	      if (minus_var0)
9892 		{
9893 		  if (con0)
9894 		    con0 = associate_trees (loc, con0, minus_var0,
9895 					    MINUS_EXPR, atype);
9896 		  else
9897 		    gcc_unreachable ();
9898 		  minus_var0 = 0;
9899 		}
9900 
9901 	      return
9902 		fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9903 							      code, atype));
9904 	    }
9905 	}
9906 
9907       return NULL_TREE;
9908 
9909     case POINTER_DIFF_EXPR:
9910     case MINUS_EXPR:
9911       /* Fold &a[i] - &a[j] to i-j.  */
9912       if (TREE_CODE (arg0) == ADDR_EXPR
9913 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9914 	  && TREE_CODE (arg1) == ADDR_EXPR
9915 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9916         {
9917 	  tree tem = fold_addr_of_array_ref_difference (loc, type,
9918 							TREE_OPERAND (arg0, 0),
9919 							TREE_OPERAND (arg1, 0),
9920 							code
9921 							== POINTER_DIFF_EXPR);
9922 	  if (tem)
9923 	    return tem;
9924 	}
9925 
9926       /* Further transformations are not for pointers.  */
9927       if (code == POINTER_DIFF_EXPR)
9928 	return NULL_TREE;
9929 
9930       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
9931       if (TREE_CODE (arg0) == NEGATE_EXPR
9932 	  && negate_expr_p (op1)
9933 	  /* If arg0 is e.g. unsigned int and type is int, then this could
9934 	     introduce UB, because if A is INT_MIN at runtime, the original
9935 	     expression can be well defined while the latter is not.
9936 	     See PR83269.  */
9937 	  && !(ANY_INTEGRAL_TYPE_P (type)
9938 	       && TYPE_OVERFLOW_UNDEFINED (type)
9939 	       && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9940 	       && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9941 	return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
9942 			        fold_convert_loc (loc, type,
9943 						  TREE_OPERAND (arg0, 0)));
9944 
9945       /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9946 	 __complex__ ( x, -y ).  This is not the same for SNaNs or if
9947 	 signed zeros are involved.  */
9948       if (!HONOR_SNANS (element_mode (arg0))
9949 	  && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9950 	  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9951         {
9952 	  tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9953 	  tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9954 	  tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9955 	  bool arg0rz = false, arg0iz = false;
9956 	  if ((arg0r && (arg0rz = real_zerop (arg0r)))
9957 	      || (arg0i && (arg0iz = real_zerop (arg0i))))
9958 	    {
9959 	      tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9960 	      tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9961 	      if (arg0rz && arg1i && real_zerop (arg1i))
9962 	        {
9963 		  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9964 					 arg1r ? arg1r
9965 					 : build1 (REALPART_EXPR, rtype, arg1));
9966 		  tree ip = arg0i ? arg0i
9967 		    : build1 (IMAGPART_EXPR, rtype, arg0);
9968 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9969 		}
9970 	      else if (arg0iz && arg1r && real_zerop (arg1r))
9971 	        {
9972 		  tree rp = arg0r ? arg0r
9973 		    : build1 (REALPART_EXPR, rtype, arg0);
9974 		  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9975 					 arg1i ? arg1i
9976 					 : build1 (IMAGPART_EXPR, rtype, arg1));
9977 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9978 		}
9979 	    }
9980 	}
9981 
9982       /* A - B -> A + (-B) if B is easily negatable.  */
9983       if (negate_expr_p (op1)
9984 	  && ! TYPE_OVERFLOW_SANITIZED (type)
9985 	  && ((FLOAT_TYPE_P (type)
9986                /* Avoid this transformation if B is a positive REAL_CST.  */
9987 	       && (TREE_CODE (op1) != REAL_CST
9988 		   || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9989 	      || INTEGRAL_TYPE_P (type)))
9990 	return fold_build2_loc (loc, PLUS_EXPR, type,
9991 				fold_convert_loc (loc, type, arg0),
9992 				negate_expr (op1));
9993 
9994       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9995 	 one.  Make sure the type is not saturating and has the signedness of
9996 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9997 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
9998       if ((TREE_CODE (arg0) == MULT_EXPR
9999 	   || TREE_CODE (arg1) == MULT_EXPR)
10000 	  && !TYPE_SATURATING (type)
10001 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10002 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10003 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
10004         {
10005 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10006 	  if (tem)
10007 	    return tem;
10008 	}
10009 
10010       goto associate;
10011 
10012     case MULT_EXPR:
10013       if (! FLOAT_TYPE_P (type))
10014 	{
10015 	  /* Transform x * -C into -x * C if x is easily negatable.  */
10016 	  if (TREE_CODE (op1) == INTEGER_CST
10017 	      && tree_int_cst_sgn (op1) == -1
10018 	      && negate_expr_p (op0)
10019 	      && negate_expr_p (op1)
10020 	      && (tem = negate_expr (op1)) != op1
10021 	      && ! TREE_OVERFLOW (tem))
10022 	    return fold_build2_loc (loc, MULT_EXPR, type,
10023 				    fold_convert_loc (loc, type,
10024 						      negate_expr (op0)), tem);
10025 
10026 	  strict_overflow_p = false;
10027 	  if (TREE_CODE (arg1) == INTEGER_CST
10028 	      && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10029 					&strict_overflow_p)) != 0)
10030 	    {
10031 	      if (strict_overflow_p)
10032 		fold_overflow_warning (("assuming signed overflow does not "
10033 					"occur when simplifying "
10034 					"multiplication"),
10035 				       WARN_STRICT_OVERFLOW_MISC);
10036 	      return fold_convert_loc (loc, type, tem);
10037 	    }
10038 
10039 	  /* Optimize z * conj(z) for integer complex numbers.  */
10040 	  if (TREE_CODE (arg0) == CONJ_EXPR
10041 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10042 	    return fold_mult_zconjz (loc, type, arg1);
10043 	  if (TREE_CODE (arg1) == CONJ_EXPR
10044 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10045 	    return fold_mult_zconjz (loc, type, arg0);
10046 	}
10047       else
10048 	{
10049 	  /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10050 	     This is not the same for NaNs or if signed zeros are
10051 	     involved.  */
10052 	  if (!HONOR_NANS (arg0)
10053               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10054 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10055 	      && TREE_CODE (arg1) == COMPLEX_CST
10056 	      && real_zerop (TREE_REALPART (arg1)))
10057 	    {
10058 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10059 	      if (real_onep (TREE_IMAGPART (arg1)))
10060 		return
10061 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
10062 			       negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10063 							     rtype, arg0)),
10064 			       fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10065 	      else if (real_minus_onep (TREE_IMAGPART (arg1)))
10066 		return
10067 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
10068 			       fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10069 			       negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10070 							     rtype, arg0)));
10071 	    }
10072 
10073 	  /* Optimize z * conj(z) for floating point complex numbers.
10074 	     Guarded by flag_unsafe_math_optimizations as non-finite
10075 	     imaginary components don't produce scalar results.  */
10076 	  if (flag_unsafe_math_optimizations
10077 	      && TREE_CODE (arg0) == CONJ_EXPR
10078 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10079 	    return fold_mult_zconjz (loc, type, arg1);
10080 	  if (flag_unsafe_math_optimizations
10081 	      && TREE_CODE (arg1) == CONJ_EXPR
10082 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10083 	    return fold_mult_zconjz (loc, type, arg0);
10084 	}
10085       goto associate;
10086 
10087     case BIT_IOR_EXPR:
10088       /* Canonicalize (X & C1) | C2.  */
10089       if (TREE_CODE (arg0) == BIT_AND_EXPR
10090 	  && TREE_CODE (arg1) == INTEGER_CST
10091 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10092 	{
10093 	  int width = TYPE_PRECISION (type), w;
10094 	  wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
10095 	  wide_int c2 = wi::to_wide (arg1);
10096 
10097 	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
10098 	  if ((c1 & c2) == c1)
10099 	    return omit_one_operand_loc (loc, type, arg1,
10100 					 TREE_OPERAND (arg0, 0));
10101 
10102 	  wide_int msk = wi::mask (width, false,
10103 				   TYPE_PRECISION (TREE_TYPE (arg1)));
10104 
10105 	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
10106 	  if (wi::bit_and_not (msk, c1 | c2) == 0)
10107 	    {
10108 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10109 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10110 	    }
10111 
10112 	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10113 	     unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10114 	     mode which allows further optimizations.  */
10115 	  c1 &= msk;
10116 	  c2 &= msk;
10117 	  wide_int c3 = wi::bit_and_not (c1, c2);
10118 	  for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10119 	    {
10120 	      wide_int mask = wi::mask (w, false,
10121 					TYPE_PRECISION (type));
10122 	      if (((c1 | c2) & mask) == mask
10123 		  && wi::bit_and_not (c1, mask) == 0)
10124 		{
10125 		  c3 = mask;
10126 		  break;
10127 		}
10128 	    }
10129 
10130 	  if (c3 != c1)
10131 	    {
10132 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10133 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
10134 				     wide_int_to_tree (type, c3));
10135 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10136 	    }
10137 	}
10138 
10139       /* See if this can be simplified into a rotate first.  If that
10140 	 is unsuccessful continue in the association code.  */
10141       goto bit_rotate;
10142 
10143     case BIT_XOR_EXPR:
10144       /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
10145       if (TREE_CODE (arg0) == BIT_AND_EXPR
10146 	  && INTEGRAL_TYPE_P (type)
10147 	  && integer_onep (TREE_OPERAND (arg0, 1))
10148 	  && integer_onep (arg1))
10149 	return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10150 				build_zero_cst (TREE_TYPE (arg0)));
10151 
10152       /* See if this can be simplified into a rotate first.  If that
10153 	 is unsuccessful continue in the association code.  */
10154       goto bit_rotate;
10155 
10156     case BIT_AND_EXPR:
10157       /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
10158       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10159 	  && INTEGRAL_TYPE_P (type)
10160 	  && integer_onep (TREE_OPERAND (arg0, 1))
10161 	  && integer_onep (arg1))
10162 	{
10163 	  tree tem2;
10164 	  tem = TREE_OPERAND (arg0, 0);
10165 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10166 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10167 				  tem, tem2);
10168 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10169 				  build_zero_cst (TREE_TYPE (tem)));
10170 	}
10171       /* Fold ~X & 1 as (X & 1) == 0.  */
10172       if (TREE_CODE (arg0) == BIT_NOT_EXPR
10173 	  && INTEGRAL_TYPE_P (type)
10174 	  && integer_onep (arg1))
10175 	{
10176 	  tree tem2;
10177 	  tem = TREE_OPERAND (arg0, 0);
10178 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10179 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10180 				  tem, tem2);
10181 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10182 				  build_zero_cst (TREE_TYPE (tem)));
10183 	}
10184       /* Fold !X & 1 as X == 0.  */
10185       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10186 	  && integer_onep (arg1))
10187 	{
10188 	  tem = TREE_OPERAND (arg0, 0);
10189 	  return fold_build2_loc (loc, EQ_EXPR, type, tem,
10190 				  build_zero_cst (TREE_TYPE (tem)));
10191 	}
10192 
10193       /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10194          multiple of 1 << CST.  */
10195       if (TREE_CODE (arg1) == INTEGER_CST)
10196 	{
10197 	  wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10198 	  wide_int ncst1 = -cst1;
10199 	  if ((cst1 & ncst1) == ncst1
10200 	      && multiple_of_p (type, arg0,
10201 				wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10202 	    return fold_convert_loc (loc, type, arg0);
10203 	}
10204 
10205       /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10206          bits from CST2.  */
10207       if (TREE_CODE (arg1) == INTEGER_CST
10208 	  && TREE_CODE (arg0) == MULT_EXPR
10209 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10210 	{
10211 	  wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
10212 	  wide_int masked
10213 	    = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
10214 
10215 	  if (masked == 0)
10216 	    return omit_two_operands_loc (loc, type, build_zero_cst (type),
10217 	                                  arg0, arg1);
10218 	  else if (masked != warg1)
10219 	    {
10220 	      /* Avoid the transform if arg1 is a mask of some
10221 	         mode which allows further optimizations.  */
10222 	      int pop = wi::popcount (warg1);
10223 	      if (!(pop >= BITS_PER_UNIT
10224 		    && pow2p_hwi (pop)
10225 		    && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10226 		return fold_build2_loc (loc, code, type, op0,
10227 					wide_int_to_tree (type, masked));
10228 	    }
10229 	}
10230 
10231       /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10232 	 ((A & N) + B) & M -> (A + B) & M
10233 	 Similarly if (N & M) == 0,
10234 	 ((A | N) + B) & M -> (A + B) & M
10235 	 and for - instead of + (or unary - instead of +)
10236 	 and/or ^ instead of |.
10237 	 If B is constant and (B & M) == 0, fold into A & M.  */
10238       if (TREE_CODE (arg1) == INTEGER_CST)
10239 	{
10240 	  wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10241 	  if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10242 	      && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10243 	      && (TREE_CODE (arg0) == PLUS_EXPR
10244 		  || TREE_CODE (arg0) == MINUS_EXPR
10245 		  || TREE_CODE (arg0) == NEGATE_EXPR)
10246 	      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10247 		  || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10248 	    {
10249 	      tree pmop[2];
10250 	      int which = 0;
10251 	      wide_int cst0;
10252 
10253 	      /* Now we know that arg0 is (C + D) or (C - D) or
10254 		 -C and arg1 (M) is == (1LL << cst) - 1.
10255 		 Store C into PMOP[0] and D into PMOP[1].  */
10256 	      pmop[0] = TREE_OPERAND (arg0, 0);
10257 	      pmop[1] = NULL;
10258 	      if (TREE_CODE (arg0) != NEGATE_EXPR)
10259 		{
10260 		  pmop[1] = TREE_OPERAND (arg0, 1);
10261 		  which = 1;
10262 		}
10263 
10264 	      if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10265 		which = -1;
10266 
10267 	      for (; which >= 0; which--)
10268 		switch (TREE_CODE (pmop[which]))
10269 		  {
10270 		  case BIT_AND_EXPR:
10271 		  case BIT_IOR_EXPR:
10272 		  case BIT_XOR_EXPR:
10273 		    if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10274 			!= INTEGER_CST)
10275 		      break;
10276 		    cst0 = wi::to_wide (TREE_OPERAND (pmop[which], 1)) & cst1;
10277 		    if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10278 		      {
10279 			if (cst0 != cst1)
10280 			  break;
10281 		      }
10282 		    else if (cst0 != 0)
10283 		      break;
10284 		    /* If C or D is of the form (A & N) where
10285 		       (N & M) == M, or of the form (A | N) or
10286 		       (A ^ N) where (N & M) == 0, replace it with A.  */
10287 		    pmop[which] = TREE_OPERAND (pmop[which], 0);
10288 		    break;
10289 		  case INTEGER_CST:
10290 		    /* If C or D is a N where (N & M) == 0, it can be
10291 		       omitted (assumed 0).  */
10292 		    if ((TREE_CODE (arg0) == PLUS_EXPR
10293 			 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10294 			&& (cst1 & wi::to_wide (pmop[which])) == 0)
10295 		      pmop[which] = NULL;
10296 		    break;
10297 		  default:
10298 		    break;
10299 		  }
10300 
10301 	      /* Only build anything new if we optimized one or both arguments
10302 		 above.  */
10303 	      if (pmop[0] != TREE_OPERAND (arg0, 0)
10304 		  || (TREE_CODE (arg0) != NEGATE_EXPR
10305 		      && pmop[1] != TREE_OPERAND (arg0, 1)))
10306 		{
10307 		  tree utype = TREE_TYPE (arg0);
10308 		  if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10309 		    {
10310 		      /* Perform the operations in a type that has defined
10311 			 overflow behavior.  */
10312 		      utype = unsigned_type_for (TREE_TYPE (arg0));
10313 		      if (pmop[0] != NULL)
10314 			pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10315 		      if (pmop[1] != NULL)
10316 			pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10317 		    }
10318 
10319 		  if (TREE_CODE (arg0) == NEGATE_EXPR)
10320 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10321 		  else if (TREE_CODE (arg0) == PLUS_EXPR)
10322 		    {
10323 		      if (pmop[0] != NULL && pmop[1] != NULL)
10324 			tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10325 					       pmop[0], pmop[1]);
10326 		      else if (pmop[0] != NULL)
10327 			tem = pmop[0];
10328 		      else if (pmop[1] != NULL)
10329 			tem = pmop[1];
10330 		      else
10331 			return build_int_cst (type, 0);
10332 		    }
10333 		  else if (pmop[0] == NULL)
10334 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10335 		  else
10336 		    tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10337 					   pmop[0], pmop[1]);
10338 		  /* TEM is now the new binary +, - or unary - replacement.  */
10339 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10340 					 fold_convert_loc (loc, utype, arg1));
10341 		  return fold_convert_loc (loc, type, tem);
10342 		}
10343 	    }
10344 	}
10345 
10346       /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
10347       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10348 	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10349 	{
10350 	  prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10351 
10352 	  wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10353 	  if (mask == -1)
10354 	    return
10355 	      fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10356 	}
10357 
10358       goto associate;
10359 
10360     case RDIV_EXPR:
10361       /* Don't touch a floating-point divide by zero unless the mode
10362 	 of the constant can represent infinity.  */
10363       if (TREE_CODE (arg1) == REAL_CST
10364 	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10365 	  && real_zerop (arg1))
10366 	return NULL_TREE;
10367 
10368       /* (-A) / (-B) -> A / B  */
10369       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10370 	return fold_build2_loc (loc, RDIV_EXPR, type,
10371 			    TREE_OPERAND (arg0, 0),
10372 			    negate_expr (arg1));
10373       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10374 	return fold_build2_loc (loc, RDIV_EXPR, type,
10375 			    negate_expr (arg0),
10376 			    TREE_OPERAND (arg1, 0));
10377       return NULL_TREE;
10378 
10379     case TRUNC_DIV_EXPR:
10380       /* Fall through */
10381 
10382     case FLOOR_DIV_EXPR:
10383       /* Simplify A / (B << N) where A and B are positive and B is
10384 	 a power of 2, to A >> (N + log2(B)).  */
10385       strict_overflow_p = false;
10386       if (TREE_CODE (arg1) == LSHIFT_EXPR
10387 	  && (TYPE_UNSIGNED (type)
10388 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10389 	{
10390 	  tree sval = TREE_OPERAND (arg1, 0);
10391 	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10392 	    {
10393 	      tree sh_cnt = TREE_OPERAND (arg1, 1);
10394 	      tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10395 					 wi::exact_log2 (wi::to_wide (sval)));
10396 
10397 	      if (strict_overflow_p)
10398 		fold_overflow_warning (("assuming signed overflow does not "
10399 					"occur when simplifying A / (B << N)"),
10400 				       WARN_STRICT_OVERFLOW_MISC);
10401 
10402 	      sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10403 					sh_cnt, pow2);
10404 	      return fold_build2_loc (loc, RSHIFT_EXPR, type,
10405 				      fold_convert_loc (loc, type, arg0), sh_cnt);
10406 	    }
10407 	}
10408 
10409       /* Fall through */
10410 
10411     case ROUND_DIV_EXPR:
10412     case CEIL_DIV_EXPR:
10413     case EXACT_DIV_EXPR:
10414       if (integer_zerop (arg1))
10415 	return NULL_TREE;
10416 
10417       /* Convert -A / -B to A / B when the type is signed and overflow is
10418 	 undefined.  */
10419       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10420 	  && TREE_CODE (op0) == NEGATE_EXPR
10421 	  && negate_expr_p (op1))
10422 	{
10423 	  if (INTEGRAL_TYPE_P (type))
10424 	    fold_overflow_warning (("assuming signed overflow does not occur "
10425 				    "when distributing negation across "
10426 				    "division"),
10427 				   WARN_STRICT_OVERFLOW_MISC);
10428 	  return fold_build2_loc (loc, code, type,
10429 				  fold_convert_loc (loc, type,
10430 						    TREE_OPERAND (arg0, 0)),
10431 				  negate_expr (op1));
10432 	}
10433       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10434 	  && TREE_CODE (arg1) == NEGATE_EXPR
10435 	  && negate_expr_p (op0))
10436 	{
10437 	  if (INTEGRAL_TYPE_P (type))
10438 	    fold_overflow_warning (("assuming signed overflow does not occur "
10439 				    "when distributing negation across "
10440 				    "division"),
10441 				   WARN_STRICT_OVERFLOW_MISC);
10442 	  return fold_build2_loc (loc, code, type,
10443 				  negate_expr (op0),
10444 				  fold_convert_loc (loc, type,
10445 						    TREE_OPERAND (arg1, 0)));
10446 	}
10447 
10448       /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10449 	 operation, EXACT_DIV_EXPR.
10450 
10451 	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10452 	 At one time others generated faster code, it's not clear if they do
10453 	 after the last round to changes to the DIV code in expmed.c.  */
10454       if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10455 	  && multiple_of_p (type, arg0, arg1))
10456 	return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10457 				fold_convert (type, arg0),
10458 				fold_convert (type, arg1));
10459 
10460       strict_overflow_p = false;
10461       if (TREE_CODE (arg1) == INTEGER_CST
10462 	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10463 				    &strict_overflow_p)) != 0)
10464 	{
10465 	  if (strict_overflow_p)
10466 	    fold_overflow_warning (("assuming signed overflow does not occur "
10467 				    "when simplifying division"),
10468 				   WARN_STRICT_OVERFLOW_MISC);
10469 	  return fold_convert_loc (loc, type, tem);
10470 	}
10471 
10472       return NULL_TREE;
10473 
10474     case CEIL_MOD_EXPR:
10475     case FLOOR_MOD_EXPR:
10476     case ROUND_MOD_EXPR:
10477     case TRUNC_MOD_EXPR:
10478       strict_overflow_p = false;
10479       if (TREE_CODE (arg1) == INTEGER_CST
10480 	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10481 				    &strict_overflow_p)) != 0)
10482 	{
10483 	  if (strict_overflow_p)
10484 	    fold_overflow_warning (("assuming signed overflow does not occur "
10485 				    "when simplifying modulus"),
10486 				   WARN_STRICT_OVERFLOW_MISC);
10487 	  return fold_convert_loc (loc, type, tem);
10488 	}
10489 
10490       return NULL_TREE;
10491 
10492     case LROTATE_EXPR:
10493     case RROTATE_EXPR:
10494     case RSHIFT_EXPR:
10495     case LSHIFT_EXPR:
10496       /* Since negative shift count is not well-defined,
10497 	 don't try to compute it in the compiler.  */
10498       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10499 	return NULL_TREE;
10500 
10501       prec = element_precision (type);
10502 
10503       /* If we have a rotate of a bit operation with the rotate count and
10504 	 the second operand of the bit operation both constant,
10505 	 permute the two operations.  */
10506       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10507 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
10508 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
10509 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
10510 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10511 	{
10512 	  tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10513 	  tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10514 	  return fold_build2_loc (loc, TREE_CODE (arg0), type,
10515 				  fold_build2_loc (loc, code, type,
10516 						   arg00, arg1),
10517 				  fold_build2_loc (loc, code, type,
10518 						   arg01, arg1));
10519 	}
10520 
10521       /* Two consecutive rotates adding up to the some integer
10522 	 multiple of the precision of the type can be ignored.  */
10523       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10524 	  && TREE_CODE (arg0) == RROTATE_EXPR
10525 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10526 	  && wi::umod_trunc (wi::to_wide (arg1)
10527 			     + wi::to_wide (TREE_OPERAND (arg0, 1)),
10528 			     prec) == 0)
10529 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10530 
10531       return NULL_TREE;
10532 
10533     case MIN_EXPR:
10534     case MAX_EXPR:
10535       goto associate;
10536 
10537     case TRUTH_ANDIF_EXPR:
10538       /* Note that the operands of this must be ints
10539 	 and their values must be 0 or 1.
10540 	 ("true" is a fixed value perhaps depending on the language.)  */
10541       /* If first arg is constant zero, return it.  */
10542       if (integer_zerop (arg0))
10543 	return fold_convert_loc (loc, type, arg0);
10544       /* FALLTHRU */
10545     case TRUTH_AND_EXPR:
10546       /* If either arg is constant true, drop it.  */
10547       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10548 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10549       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10550 	  /* Preserve sequence points.  */
10551 	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10552 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10553       /* If second arg is constant zero, result is zero, but first arg
10554 	 must be evaluated.  */
10555       if (integer_zerop (arg1))
10556 	return omit_one_operand_loc (loc, type, arg1, arg0);
10557       /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10558 	 case will be handled here.  */
10559       if (integer_zerop (arg0))
10560 	return omit_one_operand_loc (loc, type, arg0, arg1);
10561 
10562       /* !X && X is always false.  */
10563       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10564 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10565 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10566       /* X && !X is always false.  */
10567       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10568 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10569 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10570 
10571       /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
10572 	 means A >= Y && A != MAX, but in this case we know that
10573 	 A < X <= MAX.  */
10574 
10575       if (!TREE_SIDE_EFFECTS (arg0)
10576 	  && !TREE_SIDE_EFFECTS (arg1))
10577 	{
10578 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10579 	  if (tem && !operand_equal_p (tem, arg0, 0))
10580 	    return fold_build2_loc (loc, code, type, tem, arg1);
10581 
10582 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10583 	  if (tem && !operand_equal_p (tem, arg1, 0))
10584 	    return fold_build2_loc (loc, code, type, arg0, tem);
10585 	}
10586 
10587       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10588           != NULL_TREE)
10589         return tem;
10590 
10591       return NULL_TREE;
10592 
10593     case TRUTH_ORIF_EXPR:
10594       /* Note that the operands of this must be ints
10595 	 and their values must be 0 or true.
10596 	 ("true" is a fixed value perhaps depending on the language.)  */
10597       /* If first arg is constant true, return it.  */
10598       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10599 	return fold_convert_loc (loc, type, arg0);
10600       /* FALLTHRU */
10601     case TRUTH_OR_EXPR:
10602       /* If either arg is constant zero, drop it.  */
10603       if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10604 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10605       if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10606 	  /* Preserve sequence points.  */
10607 	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10608 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10609       /* If second arg is constant true, result is true, but we must
10610 	 evaluate first arg.  */
10611       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10612 	return omit_one_operand_loc (loc, type, arg1, arg0);
10613       /* Likewise for first arg, but note this only occurs here for
10614 	 TRUTH_OR_EXPR.  */
10615       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10616 	return omit_one_operand_loc (loc, type, arg0, arg1);
10617 
10618       /* !X || X is always true.  */
10619       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10620 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10621 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10622       /* X || !X is always true.  */
10623       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10624 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10625 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10626 
10627       /* (X && !Y) || (!X && Y) is X ^ Y */
10628       if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10629 	  && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10630         {
10631 	  tree a0, a1, l0, l1, n0, n1;
10632 
10633 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10634 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10635 
10636 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10637 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10638 
10639 	  n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10640 	  n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10641 
10642 	  if ((operand_equal_p (n0, a0, 0)
10643 	       && operand_equal_p (n1, a1, 0))
10644 	      || (operand_equal_p (n0, a1, 0)
10645 		  && operand_equal_p (n1, a0, 0)))
10646 	    return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10647 	}
10648 
10649       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10650           != NULL_TREE)
10651         return tem;
10652 
10653       return NULL_TREE;
10654 
10655     case TRUTH_XOR_EXPR:
10656       /* If the second arg is constant zero, drop it.  */
10657       if (integer_zerop (arg1))
10658 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10659       /* If the second arg is constant true, this is a logical inversion.  */
10660       if (integer_onep (arg1))
10661 	{
10662 	  tem = invert_truthvalue_loc (loc, arg0);
10663 	  return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10664 	}
10665       /* Identical arguments cancel to zero.  */
10666       if (operand_equal_p (arg0, arg1, 0))
10667 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10668 
10669       /* !X ^ X is always true.  */
10670       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10671 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10672 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10673 
10674       /* X ^ !X is always true.  */
10675       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10676 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10677 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10678 
10679       return NULL_TREE;
10680 
10681     case EQ_EXPR:
10682     case NE_EXPR:
10683       STRIP_NOPS (arg0);
10684       STRIP_NOPS (arg1);
10685 
10686       tem = fold_comparison (loc, code, type, op0, op1);
10687       if (tem != NULL_TREE)
10688 	return tem;
10689 
10690       /* bool_var != 1 becomes !bool_var. */
10691       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10692           && code == NE_EXPR)
10693         return fold_convert_loc (loc, type,
10694 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10695 						  TREE_TYPE (arg0), arg0));
10696 
10697       /* bool_var == 0 becomes !bool_var. */
10698       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10699           && code == EQ_EXPR)
10700         return fold_convert_loc (loc, type,
10701 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10702 						  TREE_TYPE (arg0), arg0));
10703 
10704       /* !exp != 0 becomes !exp */
10705       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10706 	  && code == NE_EXPR)
10707         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10708 
10709       /* If this is an EQ or NE comparison with zero and ARG0 is
10710 	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
10711 	 two operations, but the latter can be done in one less insn
10712 	 on machines that have only two-operand insns or on which a
10713 	 constant cannot be the first operand.  */
10714       if (TREE_CODE (arg0) == BIT_AND_EXPR
10715 	  && integer_zerop (arg1))
10716 	{
10717 	  tree arg00 = TREE_OPERAND (arg0, 0);
10718 	  tree arg01 = TREE_OPERAND (arg0, 1);
10719 	  if (TREE_CODE (arg00) == LSHIFT_EXPR
10720 	      && integer_onep (TREE_OPERAND (arg00, 0)))
10721 	    {
10722 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10723 				      arg01, TREE_OPERAND (arg00, 1));
10724 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10725 				 build_int_cst (TREE_TYPE (arg0), 1));
10726 	      return fold_build2_loc (loc, code, type,
10727 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10728 				  arg1);
10729 	    }
10730 	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
10731 		   && integer_onep (TREE_OPERAND (arg01, 0)))
10732 	    {
10733 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10734 				      arg00, TREE_OPERAND (arg01, 1));
10735 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10736 				 build_int_cst (TREE_TYPE (arg0), 1));
10737 	      return fold_build2_loc (loc, code, type,
10738 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10739 				  arg1);
10740 	    }
10741 	}
10742 
10743       /* If this is an NE or EQ comparison of zero against the result of a
10744 	 signed MOD operation whose second operand is a power of 2, make
10745 	 the MOD operation unsigned since it is simpler and equivalent.  */
10746       if (integer_zerop (arg1)
10747 	  && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10748 	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10749 	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
10750 	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10751 	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10752 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10753 	{
10754 	  tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10755 	  tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10756 				     fold_convert_loc (loc, newtype,
10757 						       TREE_OPERAND (arg0, 0)),
10758 				     fold_convert_loc (loc, newtype,
10759 						       TREE_OPERAND (arg0, 1)));
10760 
10761 	  return fold_build2_loc (loc, code, type, newmod,
10762 			      fold_convert_loc (loc, newtype, arg1));
10763 	}
10764 
10765       /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10766 	 C1 is a valid shift constant, and C2 is a power of two, i.e.
10767 	 a single bit.  */
10768       if (TREE_CODE (arg0) == BIT_AND_EXPR
10769 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10770 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10771 	     == INTEGER_CST
10772 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10773 	  && integer_zerop (arg1))
10774 	{
10775 	  tree itype = TREE_TYPE (arg0);
10776 	  tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10777 	  prec = TYPE_PRECISION (itype);
10778 
10779 	  /* Check for a valid shift count.  */
10780 	  if (wi::ltu_p (wi::to_wide (arg001), prec))
10781 	    {
10782 	      tree arg01 = TREE_OPERAND (arg0, 1);
10783 	      tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10784 	      unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10785 	      /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10786 		 can be rewritten as (X & (C2 << C1)) != 0.  */
10787 	      if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10788 		{
10789 		  tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10790 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10791 		  return fold_build2_loc (loc, code, type, tem,
10792 					  fold_convert_loc (loc, itype, arg1));
10793 		}
10794 	      /* Otherwise, for signed (arithmetic) shifts,
10795 		 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10796 		 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
10797 	      else if (!TYPE_UNSIGNED (itype))
10798 		return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10799 				    arg000, build_int_cst (itype, 0));
10800 	      /* Otherwise, of unsigned (logical) shifts,
10801 		 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10802 		 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
10803 	      else
10804 		return omit_one_operand_loc (loc, type,
10805 					 code == EQ_EXPR ? integer_one_node
10806 							 : integer_zero_node,
10807 					 arg000);
10808 	    }
10809 	}
10810 
10811       /* If this is a comparison of a field, we may be able to simplify it.  */
10812       if ((TREE_CODE (arg0) == COMPONENT_REF
10813 	   || TREE_CODE (arg0) == BIT_FIELD_REF)
10814 	  /* Handle the constant case even without -O
10815 	     to make sure the warnings are given.  */
10816 	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10817 	{
10818 	  t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10819 	  if (t1)
10820 	    return t1;
10821 	}
10822 
10823       /* Optimize comparisons of strlen vs zero to a compare of the
10824 	 first character of the string vs zero.  To wit,
10825 		strlen(ptr) == 0   =>  *ptr == 0
10826 		strlen(ptr) != 0   =>  *ptr != 0
10827 	 Other cases should reduce to one of these two (or a constant)
10828 	 due to the return value of strlen being unsigned.  */
10829       if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
10830 	{
10831 	  tree fndecl = get_callee_fndecl (arg0);
10832 
10833 	  if (fndecl
10834 	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10835 	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10836 	      && call_expr_nargs (arg0) == 1
10837 	      && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
10838 		  == POINTER_TYPE))
10839 	    {
10840 	      tree ptrtype
10841 		= build_pointer_type (build_qualified_type (char_type_node,
10842 							    TYPE_QUAL_CONST));
10843 	      tree ptr = fold_convert_loc (loc, ptrtype,
10844 					   CALL_EXPR_ARG (arg0, 0));
10845 	      tree iref = build_fold_indirect_ref_loc (loc, ptr);
10846 	      return fold_build2_loc (loc, code, type, iref,
10847 				      build_int_cst (TREE_TYPE (iref), 0));
10848 	    }
10849 	}
10850 
10851       /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10852 	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
10853       if (TREE_CODE (arg0) == RSHIFT_EXPR
10854 	  && integer_zerop (arg1)
10855 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10856 	{
10857 	  tree arg00 = TREE_OPERAND (arg0, 0);
10858 	  tree arg01 = TREE_OPERAND (arg0, 1);
10859 	  tree itype = TREE_TYPE (arg00);
10860 	  if (wi::to_wide (arg01) == element_precision (itype) - 1)
10861 	    {
10862 	      if (TYPE_UNSIGNED (itype))
10863 		{
10864 		  itype = signed_type_for (itype);
10865 		  arg00 = fold_convert_loc (loc, itype, arg00);
10866 		}
10867 	      return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10868 				  type, arg00, build_zero_cst (itype));
10869 	    }
10870 	}
10871 
10872       /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10873 	 (X & C) == 0 when C is a single bit.  */
10874       if (TREE_CODE (arg0) == BIT_AND_EXPR
10875 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10876 	  && integer_zerop (arg1)
10877 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10878 	{
10879 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10880 				 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10881 				 TREE_OPERAND (arg0, 1));
10882 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10883 				  type, tem,
10884 				  fold_convert_loc (loc, TREE_TYPE (arg0),
10885 						    arg1));
10886 	}
10887 
10888       /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10889 	 constant C is a power of two, i.e. a single bit.  */
10890       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10891 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10892 	  && integer_zerop (arg1)
10893 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10894 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10895 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10896 	{
10897 	  tree arg00 = TREE_OPERAND (arg0, 0);
10898 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10899 			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
10900 	}
10901 
10902       /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10903 	 when is C is a power of two, i.e. a single bit.  */
10904       if (TREE_CODE (arg0) == BIT_AND_EXPR
10905 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10906 	  && integer_zerop (arg1)
10907 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10908 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10909 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10910 	{
10911 	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10912 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10913 			     arg000, TREE_OPERAND (arg0, 1));
10914 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10915 			      tem, build_int_cst (TREE_TYPE (tem), 0));
10916 	}
10917 
10918       if (integer_zerop (arg1)
10919 	  && tree_expr_nonzero_p (arg0))
10920         {
10921 	  tree res = constant_boolean_node (code==NE_EXPR, type);
10922 	  return omit_one_operand_loc (loc, type, res, arg0);
10923 	}
10924 
10925       /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
10926       if (TREE_CODE (arg0) == BIT_AND_EXPR
10927 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
10928 	{
10929 	  tree arg00 = TREE_OPERAND (arg0, 0);
10930 	  tree arg01 = TREE_OPERAND (arg0, 1);
10931 	  tree arg10 = TREE_OPERAND (arg1, 0);
10932 	  tree arg11 = TREE_OPERAND (arg1, 1);
10933 	  tree itype = TREE_TYPE (arg0);
10934 
10935 	  if (operand_equal_p (arg01, arg11, 0))
10936 	    {
10937 	      tem = fold_convert_loc (loc, itype, arg10);
10938 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10939 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10940 	      return fold_build2_loc (loc, code, type, tem,
10941 				      build_zero_cst (itype));
10942 	    }
10943 	  if (operand_equal_p (arg01, arg10, 0))
10944 	    {
10945 	      tem = fold_convert_loc (loc, itype, arg11);
10946 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10947 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10948 	      return fold_build2_loc (loc, code, type, tem,
10949 				      build_zero_cst (itype));
10950 	    }
10951 	  if (operand_equal_p (arg00, arg11, 0))
10952 	    {
10953 	      tem = fold_convert_loc (loc, itype, arg10);
10954 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10955 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10956 	      return fold_build2_loc (loc, code, type, tem,
10957 				      build_zero_cst (itype));
10958 	    }
10959 	  if (operand_equal_p (arg00, arg10, 0))
10960 	    {
10961 	      tem = fold_convert_loc (loc, itype, arg11);
10962 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10963 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10964 	      return fold_build2_loc (loc, code, type, tem,
10965 				      build_zero_cst (itype));
10966 	    }
10967 	}
10968 
10969       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10970 	  && TREE_CODE (arg1) == BIT_XOR_EXPR)
10971 	{
10972 	  tree arg00 = TREE_OPERAND (arg0, 0);
10973 	  tree arg01 = TREE_OPERAND (arg0, 1);
10974 	  tree arg10 = TREE_OPERAND (arg1, 0);
10975 	  tree arg11 = TREE_OPERAND (arg1, 1);
10976 	  tree itype = TREE_TYPE (arg0);
10977 
10978 	  /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10979 	     operand_equal_p guarantees no side-effects so we don't need
10980 	     to use omit_one_operand on Z.  */
10981 	  if (operand_equal_p (arg01, arg11, 0))
10982 	    return fold_build2_loc (loc, code, type, arg00,
10983 				    fold_convert_loc (loc, TREE_TYPE (arg00),
10984 						      arg10));
10985 	  if (operand_equal_p (arg01, arg10, 0))
10986 	    return fold_build2_loc (loc, code, type, arg00,
10987 				    fold_convert_loc (loc, TREE_TYPE (arg00),
10988 						      arg11));
10989 	  if (operand_equal_p (arg00, arg11, 0))
10990 	    return fold_build2_loc (loc, code, type, arg01,
10991 				    fold_convert_loc (loc, TREE_TYPE (arg01),
10992 						      arg10));
10993 	  if (operand_equal_p (arg00, arg10, 0))
10994 	    return fold_build2_loc (loc, code, type, arg01,
10995 				    fold_convert_loc (loc, TREE_TYPE (arg01),
10996 						      arg11));
10997 
10998 	  /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
10999 	  if (TREE_CODE (arg01) == INTEGER_CST
11000 	      && TREE_CODE (arg11) == INTEGER_CST)
11001 	    {
11002 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11003 				     fold_convert_loc (loc, itype, arg11));
11004 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11005 	      return fold_build2_loc (loc, code, type, tem,
11006 				      fold_convert_loc (loc, itype, arg10));
11007 	    }
11008 	}
11009 
11010       /* Attempt to simplify equality/inequality comparisons of complex
11011 	 values.  Only lower the comparison if the result is known or
11012 	 can be simplified to a single scalar comparison.  */
11013       if ((TREE_CODE (arg0) == COMPLEX_EXPR
11014 	   || TREE_CODE (arg0) == COMPLEX_CST)
11015 	  && (TREE_CODE (arg1) == COMPLEX_EXPR
11016 	      || TREE_CODE (arg1) == COMPLEX_CST))
11017 	{
11018 	  tree real0, imag0, real1, imag1;
11019 	  tree rcond, icond;
11020 
11021 	  if (TREE_CODE (arg0) == COMPLEX_EXPR)
11022 	    {
11023 	      real0 = TREE_OPERAND (arg0, 0);
11024 	      imag0 = TREE_OPERAND (arg0, 1);
11025 	    }
11026 	  else
11027 	    {
11028 	      real0 = TREE_REALPART (arg0);
11029 	      imag0 = TREE_IMAGPART (arg0);
11030 	    }
11031 
11032 	  if (TREE_CODE (arg1) == COMPLEX_EXPR)
11033 	    {
11034 	      real1 = TREE_OPERAND (arg1, 0);
11035 	      imag1 = TREE_OPERAND (arg1, 1);
11036 	    }
11037 	  else
11038 	    {
11039 	      real1 = TREE_REALPART (arg1);
11040 	      imag1 = TREE_IMAGPART (arg1);
11041 	    }
11042 
11043 	  rcond = fold_binary_loc (loc, code, type, real0, real1);
11044 	  if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11045 	    {
11046 	      if (integer_zerop (rcond))
11047 		{
11048 		  if (code == EQ_EXPR)
11049 		    return omit_two_operands_loc (loc, type, boolean_false_node,
11050 					      imag0, imag1);
11051 		  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11052 		}
11053 	      else
11054 		{
11055 		  if (code == NE_EXPR)
11056 		    return omit_two_operands_loc (loc, type, boolean_true_node,
11057 					      imag0, imag1);
11058 		  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11059 		}
11060 	    }
11061 
11062 	  icond = fold_binary_loc (loc, code, type, imag0, imag1);
11063 	  if (icond && TREE_CODE (icond) == INTEGER_CST)
11064 	    {
11065 	      if (integer_zerop (icond))
11066 		{
11067 		  if (code == EQ_EXPR)
11068 		    return omit_two_operands_loc (loc, type, boolean_false_node,
11069 					      real0, real1);
11070 		  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11071 		}
11072 	      else
11073 		{
11074 		  if (code == NE_EXPR)
11075 		    return omit_two_operands_loc (loc, type, boolean_true_node,
11076 					      real0, real1);
11077 		  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11078 		}
11079 	    }
11080 	}
11081 
11082       return NULL_TREE;
11083 
11084     case LT_EXPR:
11085     case GT_EXPR:
11086     case LE_EXPR:
11087     case GE_EXPR:
11088       tem = fold_comparison (loc, code, type, op0, op1);
11089       if (tem != NULL_TREE)
11090 	return tem;
11091 
11092       /* Transform comparisons of the form X +- C CMP X.  */
11093       if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11094 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11095 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11096 	  && !HONOR_SNANS (arg0))
11097 	{
11098 	  tree arg01 = TREE_OPERAND (arg0, 1);
11099 	  enum tree_code code0 = TREE_CODE (arg0);
11100 	  int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11101 
11102 	  /* (X - c) > X becomes false.  */
11103 	  if (code == GT_EXPR
11104 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
11105 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
11106 	    return constant_boolean_node (0, type);
11107 
11108 	  /* Likewise (X + c) < X becomes false.  */
11109 	  if (code == LT_EXPR
11110 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11111 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11112 	    return constant_boolean_node (0, type);
11113 
11114 	  /* Convert (X - c) <= X to true.  */
11115 	  if (!HONOR_NANS (arg1)
11116 	      && code == LE_EXPR
11117 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
11118 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
11119 	    return constant_boolean_node (1, type);
11120 
11121 	  /* Convert (X + c) >= X to true.  */
11122 	  if (!HONOR_NANS (arg1)
11123 	      && code == GE_EXPR
11124 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11125 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11126 	    return constant_boolean_node (1, type);
11127 	}
11128 
11129       /* If we are comparing an ABS_EXPR with a constant, we can
11130 	 convert all the cases into explicit comparisons, but they may
11131 	 well not be faster than doing the ABS and one comparison.
11132 	 But ABS (X) <= C is a range comparison, which becomes a subtraction
11133 	 and a comparison, and is probably faster.  */
11134       if (code == LE_EXPR
11135 	  && TREE_CODE (arg1) == INTEGER_CST
11136 	  && TREE_CODE (arg0) == ABS_EXPR
11137 	  && ! TREE_SIDE_EFFECTS (arg0)
11138 	  && (tem = negate_expr (arg1)) != 0
11139 	  && TREE_CODE (tem) == INTEGER_CST
11140 	  && !TREE_OVERFLOW (tem))
11141 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11142 			    build2 (GE_EXPR, type,
11143 				    TREE_OPERAND (arg0, 0), tem),
11144 			    build2 (LE_EXPR, type,
11145 				    TREE_OPERAND (arg0, 0), arg1));
11146 
11147       /* Convert ABS_EXPR<x> >= 0 to true.  */
11148       strict_overflow_p = false;
11149       if (code == GE_EXPR
11150 	  && (integer_zerop (arg1)
11151 	      || (! HONOR_NANS (arg0)
11152 		  && real_zerop (arg1)))
11153 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11154 	{
11155 	  if (strict_overflow_p)
11156 	    fold_overflow_warning (("assuming signed overflow does not occur "
11157 				    "when simplifying comparison of "
11158 				    "absolute value and zero"),
11159 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11160 	  return omit_one_operand_loc (loc, type,
11161 				       constant_boolean_node (true, type),
11162 				       arg0);
11163 	}
11164 
11165       /* Convert ABS_EXPR<x> < 0 to false.  */
11166       strict_overflow_p = false;
11167       if (code == LT_EXPR
11168 	  && (integer_zerop (arg1) || real_zerop (arg1))
11169 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11170 	{
11171 	  if (strict_overflow_p)
11172 	    fold_overflow_warning (("assuming signed overflow does not occur "
11173 				    "when simplifying comparison of "
11174 				    "absolute value and zero"),
11175 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11176 	  return omit_one_operand_loc (loc, type,
11177 				       constant_boolean_node (false, type),
11178 				       arg0);
11179 	}
11180 
11181       /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11182 	 and similarly for >= into !=.  */
11183       if ((code == LT_EXPR || code == GE_EXPR)
11184 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11185 	  && TREE_CODE (arg1) == LSHIFT_EXPR
11186 	  && integer_onep (TREE_OPERAND (arg1, 0)))
11187 	return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11188 			   build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11189 				   TREE_OPERAND (arg1, 1)),
11190 			   build_zero_cst (TREE_TYPE (arg0)));
11191 
11192       /* Similarly for X < (cast) (1 << Y).  But cast can't be narrowing,
11193 	 otherwise Y might be >= # of bits in X's type and thus e.g.
11194 	 (unsigned char) (1 << Y) for Y 15 might be 0.
11195 	 If the cast is widening, then 1 << Y should have unsigned type,
11196 	 otherwise if Y is number of bits in the signed shift type minus 1,
11197 	 we can't optimize this.  E.g. (unsigned long long) (1 << Y) for Y
11198 	 31 might be 0xffffffff80000000.  */
11199       if ((code == LT_EXPR || code == GE_EXPR)
11200 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11201 	  && CONVERT_EXPR_P (arg1)
11202 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11203 	  && (element_precision (TREE_TYPE (arg1))
11204 	      >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11205 	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11206 	      || (element_precision (TREE_TYPE (arg1))
11207 		  == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11208 	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11209 	{
11210 	  tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11211 			TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11212 	  return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11213 			     fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11214 			     build_zero_cst (TREE_TYPE (arg0)));
11215 	}
11216 
11217       return NULL_TREE;
11218 
11219     case UNORDERED_EXPR:
11220     case ORDERED_EXPR:
11221     case UNLT_EXPR:
11222     case UNLE_EXPR:
11223     case UNGT_EXPR:
11224     case UNGE_EXPR:
11225     case UNEQ_EXPR:
11226     case LTGT_EXPR:
11227       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
11228       {
11229 	tree targ0 = strip_float_extensions (arg0);
11230 	tree targ1 = strip_float_extensions (arg1);
11231 	tree newtype = TREE_TYPE (targ0);
11232 
11233 	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11234 	  newtype = TREE_TYPE (targ1);
11235 
11236 	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11237 	  return fold_build2_loc (loc, code, type,
11238 			      fold_convert_loc (loc, newtype, targ0),
11239 			      fold_convert_loc (loc, newtype, targ1));
11240       }
11241 
11242       return NULL_TREE;
11243 
11244     case COMPOUND_EXPR:
11245       /* When pedantic, a compound expression can be neither an lvalue
11246 	 nor an integer constant expression.  */
11247       if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11248 	return NULL_TREE;
11249       /* Don't let (0, 0) be null pointer constant.  */
11250       tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11251 				 : fold_convert_loc (loc, type, arg1);
11252       return pedantic_non_lvalue_loc (loc, tem);
11253 
11254     case ASSERT_EXPR:
11255       /* An ASSERT_EXPR should never be passed to fold_binary.  */
11256       gcc_unreachable ();
11257 
11258     default:
11259       return NULL_TREE;
11260     } /* switch (code) */
11261 }
11262 
11263 /* Used by contains_label_[p1].  */
11264 
11265 struct contains_label_data
11266 {
11267   hash_set<tree> *pset;
11268   bool inside_switch_p;
11269 };
11270 
11271 /* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
11272    a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
11273    return NULL_TREE.  Do not check the subtrees of GOTO_EXPR.  */
11274 
11275 static tree
contains_label_1(tree * tp,int * walk_subtrees,void * data)11276 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
11277 {
11278   contains_label_data *d = (contains_label_data *) data;
11279   switch (TREE_CODE (*tp))
11280     {
11281     case LABEL_EXPR:
11282       return *tp;
11283 
11284     case CASE_LABEL_EXPR:
11285       if (!d->inside_switch_p)
11286 	return *tp;
11287       return NULL_TREE;
11288 
11289     case SWITCH_EXPR:
11290       if (!d->inside_switch_p)
11291 	{
11292 	  if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
11293 	    return *tp;
11294 	  d->inside_switch_p = true;
11295 	  if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
11296 	    return *tp;
11297 	  d->inside_switch_p = false;
11298 	  *walk_subtrees = 0;
11299 	}
11300       return NULL_TREE;
11301 
11302     case GOTO_EXPR:
11303       *walk_subtrees = 0;
11304       return NULL_TREE;
11305 
11306     default:
11307       return NULL_TREE;
11308     }
11309 }
11310 
11311 /* Return whether the sub-tree ST contains a label which is accessible from
11312    outside the sub-tree.  */
11313 
11314 static bool
contains_label_p(tree st)11315 contains_label_p (tree st)
11316 {
11317   hash_set<tree> pset;
11318   contains_label_data data = { &pset, false };
11319   return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
11320 }
11321 
11322 /* Fold a ternary expression of code CODE and type TYPE with operands
11323    OP0, OP1, and OP2.  Return the folded expression if folding is
11324    successful.  Otherwise, return NULL_TREE.  */
11325 
11326 tree
fold_ternary_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2)11327 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11328 		  tree op0, tree op1, tree op2)
11329 {
11330   tree tem;
11331   tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11332   enum tree_code_class kind = TREE_CODE_CLASS (code);
11333 
11334   gcc_assert (IS_EXPR_CODE_CLASS (kind)
11335 	      && TREE_CODE_LENGTH (code) == 3);
11336 
11337   /* If this is a commutative operation, and OP0 is a constant, move it
11338      to OP1 to reduce the number of tests below.  */
11339   if (commutative_ternary_tree_code (code)
11340       && tree_swap_operands_p (op0, op1))
11341     return fold_build3_loc (loc, code, type, op1, op0, op2);
11342 
11343   tem = generic_simplify (loc, code, type, op0, op1, op2);
11344   if (tem)
11345     return tem;
11346 
11347   /* Strip any conversions that don't change the mode.  This is safe
11348      for every expression, except for a comparison expression because
11349      its signedness is derived from its operands.  So, in the latter
11350      case, only strip conversions that don't change the signedness.
11351 
11352      Note that this is done as an internal manipulation within the
11353      constant folder, in order to find the simplest representation of
11354      the arguments so that their form can be studied.  In any cases,
11355      the appropriate type conversions should be put back in the tree
11356      that will get out of the constant folder.  */
11357   if (op0)
11358     {
11359       arg0 = op0;
11360       STRIP_NOPS (arg0);
11361     }
11362 
11363   if (op1)
11364     {
11365       arg1 = op1;
11366       STRIP_NOPS (arg1);
11367     }
11368 
11369   if (op2)
11370     {
11371       arg2 = op2;
11372       STRIP_NOPS (arg2);
11373     }
11374 
11375   switch (code)
11376     {
11377     case COMPONENT_REF:
11378       if (TREE_CODE (arg0) == CONSTRUCTOR
11379 	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11380 	{
11381 	  unsigned HOST_WIDE_INT idx;
11382 	  tree field, value;
11383 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11384 	    if (field == arg1)
11385 	      return value;
11386 	}
11387       return NULL_TREE;
11388 
11389     case COND_EXPR:
11390     case VEC_COND_EXPR:
11391       /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11392 	 so all simple results must be passed through pedantic_non_lvalue.  */
11393       if (TREE_CODE (arg0) == INTEGER_CST)
11394 	{
11395 	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
11396 	  tem = integer_zerop (arg0) ? op2 : op1;
11397 	  /* Only optimize constant conditions when the selected branch
11398 	     has the same type as the COND_EXPR.  This avoids optimizing
11399              away "c ? x : throw", where the throw has a void type.
11400              Avoid throwing away that operand which contains label.  */
11401           if ((!TREE_SIDE_EFFECTS (unused_op)
11402                || !contains_label_p (unused_op))
11403               && (! VOID_TYPE_P (TREE_TYPE (tem))
11404                   || VOID_TYPE_P (type)))
11405 	    return pedantic_non_lvalue_loc (loc, tem);
11406 	  return NULL_TREE;
11407 	}
11408       else if (TREE_CODE (arg0) == VECTOR_CST)
11409 	{
11410 	  unsigned HOST_WIDE_INT nelts;
11411 	  if ((TREE_CODE (arg1) == VECTOR_CST
11412 	       || TREE_CODE (arg1) == CONSTRUCTOR)
11413 	      && (TREE_CODE (arg2) == VECTOR_CST
11414 		  || TREE_CODE (arg2) == CONSTRUCTOR)
11415 	      && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
11416 	    {
11417 	      vec_perm_builder sel (nelts, nelts, 1);
11418 	      for (unsigned int i = 0; i < nelts; i++)
11419 		{
11420 		  tree val = VECTOR_CST_ELT (arg0, i);
11421 		  if (integer_all_onesp (val))
11422 		    sel.quick_push (i);
11423 		  else if (integer_zerop (val))
11424 		    sel.quick_push (nelts + i);
11425 		  else /* Currently unreachable.  */
11426 		    return NULL_TREE;
11427 		}
11428 	      vec_perm_indices indices (sel, 2, nelts);
11429 	      tree t = fold_vec_perm (type, arg1, arg2, indices);
11430 	      if (t != NULL_TREE)
11431 		return t;
11432 	    }
11433 	}
11434 
11435       /* If we have A op B ? A : C, we may be able to convert this to a
11436 	 simpler expression, depending on the operation and the values
11437 	 of B and C.  Signed zeros prevent all of these transformations,
11438 	 for reasons given above each one.
11439 
11440          Also try swapping the arguments and inverting the conditional.  */
11441       if (COMPARISON_CLASS_P (arg0)
11442 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
11443 	  && !HONOR_SIGNED_ZEROS (element_mode (op1)))
11444 	{
11445 	  tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11446 	  if (tem)
11447 	    return tem;
11448 	}
11449 
11450       if (COMPARISON_CLASS_P (arg0)
11451 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11452 	  && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11453 	{
11454 	  location_t loc0 = expr_location_or (arg0, loc);
11455 	  tem = fold_invert_truthvalue (loc0, arg0);
11456 	  if (tem && COMPARISON_CLASS_P (tem))
11457 	    {
11458 	      tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11459 	      if (tem)
11460 		return tem;
11461 	    }
11462 	}
11463 
11464       /* If the second operand is simpler than the third, swap them
11465 	 since that produces better jump optimization results.  */
11466       if (truth_value_p (TREE_CODE (arg0))
11467 	  && tree_swap_operands_p (op1, op2))
11468 	{
11469 	  location_t loc0 = expr_location_or (arg0, loc);
11470 	  /* See if this can be inverted.  If it can't, possibly because
11471 	     it was a floating-point inequality comparison, don't do
11472 	     anything.  */
11473 	  tem = fold_invert_truthvalue (loc0, arg0);
11474 	  if (tem)
11475 	    return fold_build3_loc (loc, code, type, tem, op2, op1);
11476 	}
11477 
11478       /* Convert A ? 1 : 0 to simply A.  */
11479       if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11480 				 : (integer_onep (op1)
11481 				    && !VECTOR_TYPE_P (type)))
11482 	  && integer_zerop (op2)
11483 	  /* If we try to convert OP0 to our type, the
11484 	     call to fold will try to move the conversion inside
11485 	     a COND, which will recurse.  In that case, the COND_EXPR
11486 	     is probably the best choice, so leave it alone.  */
11487 	  && type == TREE_TYPE (arg0))
11488 	return pedantic_non_lvalue_loc (loc, arg0);
11489 
11490       /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
11491 	 over COND_EXPR in cases such as floating point comparisons.  */
11492       if (integer_zerop (op1)
11493 	  && code == COND_EXPR
11494 	  && integer_onep (op2)
11495 	  && !VECTOR_TYPE_P (type)
11496 	  && truth_value_p (TREE_CODE (arg0)))
11497 	return pedantic_non_lvalue_loc (loc,
11498 				    fold_convert_loc (loc, type,
11499 					      invert_truthvalue_loc (loc,
11500 								     arg0)));
11501 
11502       /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
11503       if (TREE_CODE (arg0) == LT_EXPR
11504 	  && integer_zerop (TREE_OPERAND (arg0, 1))
11505 	  && integer_zerop (op2)
11506 	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11507 	{
11508 	  /* sign_bit_p looks through both zero and sign extensions,
11509 	     but for this optimization only sign extensions are
11510 	     usable.  */
11511 	  tree tem2 = TREE_OPERAND (arg0, 0);
11512 	  while (tem != tem2)
11513 	    {
11514 	      if (TREE_CODE (tem2) != NOP_EXPR
11515 		  || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11516 		{
11517 		  tem = NULL_TREE;
11518 		  break;
11519 		}
11520 	      tem2 = TREE_OPERAND (tem2, 0);
11521 	    }
11522 	  /* sign_bit_p only checks ARG1 bits within A's precision.
11523 	     If <sign bit of A> has wider type than A, bits outside
11524 	     of A's precision in <sign bit of A> need to be checked.
11525 	     If they are all 0, this optimization needs to be done
11526 	     in unsigned A's type, if they are all 1 in signed A's type,
11527 	     otherwise this can't be done.  */
11528 	  if (tem
11529 	      && TYPE_PRECISION (TREE_TYPE (tem))
11530 		 < TYPE_PRECISION (TREE_TYPE (arg1))
11531 	      && TYPE_PRECISION (TREE_TYPE (tem))
11532 		 < TYPE_PRECISION (type))
11533 	    {
11534 	      int inner_width, outer_width;
11535 	      tree tem_type;
11536 
11537 	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11538 	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11539 	      if (outer_width > TYPE_PRECISION (type))
11540 		outer_width = TYPE_PRECISION (type);
11541 
11542 	      wide_int mask = wi::shifted_mask
11543 		(inner_width, outer_width - inner_width, false,
11544 		 TYPE_PRECISION (TREE_TYPE (arg1)));
11545 
11546 	      wide_int common = mask & wi::to_wide (arg1);
11547 	      if (common == mask)
11548 		{
11549 		  tem_type = signed_type_for (TREE_TYPE (tem));
11550 		  tem = fold_convert_loc (loc, tem_type, tem);
11551 		}
11552 	      else if (common == 0)
11553 		{
11554 		  tem_type = unsigned_type_for (TREE_TYPE (tem));
11555 		  tem = fold_convert_loc (loc, tem_type, tem);
11556 		}
11557 	      else
11558 		tem = NULL;
11559 	    }
11560 
11561 	  if (tem)
11562 	    return
11563 	      fold_convert_loc (loc, type,
11564 				fold_build2_loc (loc, BIT_AND_EXPR,
11565 					     TREE_TYPE (tem), tem,
11566 					     fold_convert_loc (loc,
11567 							       TREE_TYPE (tem),
11568 							       arg1)));
11569 	}
11570 
11571       /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
11572 	 already handled above.  */
11573       if (TREE_CODE (arg0) == BIT_AND_EXPR
11574 	  && integer_onep (TREE_OPERAND (arg0, 1))
11575 	  && integer_zerop (op2)
11576 	  && integer_pow2p (arg1))
11577 	{
11578 	  tree tem = TREE_OPERAND (arg0, 0);
11579 	  STRIP_NOPS (tem);
11580 	  if (TREE_CODE (tem) == RSHIFT_EXPR
11581 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11582               && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11583 		 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11584 	    return fold_build2_loc (loc, BIT_AND_EXPR, type,
11585 				    fold_convert_loc (loc, type,
11586 						      TREE_OPERAND (tem, 0)),
11587 				    op1);
11588 	}
11589 
11590       /* A & N ? N : 0 is simply A & N if N is a power of two.  This
11591 	 is probably obsolete because the first operand should be a
11592 	 truth value (that's why we have the two cases above), but let's
11593 	 leave it in until we can confirm this for all front-ends.  */
11594       if (integer_zerop (op2)
11595 	  && TREE_CODE (arg0) == NE_EXPR
11596 	  && integer_zerop (TREE_OPERAND (arg0, 1))
11597 	  && integer_pow2p (arg1)
11598 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11599 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11600 			      arg1, OEP_ONLY_CONST)
11601 	  /* operand_equal_p compares just value, not precision, so e.g.
11602 	     arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
11603 	     second operand 32-bit -128, which is not a power of two (or vice
11604 	     versa.  */
11605 	  && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
11606 	return pedantic_non_lvalue_loc (loc,
11607 					fold_convert_loc (loc, type,
11608 							  TREE_OPERAND (arg0,
11609 									0)));
11610 
11611       /* Disable the transformations below for vectors, since
11612 	 fold_binary_op_with_conditional_arg may undo them immediately,
11613 	 yielding an infinite loop.  */
11614       if (code == VEC_COND_EXPR)
11615 	return NULL_TREE;
11616 
11617       /* Convert A ? B : 0 into A && B if A and B are truth values.  */
11618       if (integer_zerop (op2)
11619 	  && truth_value_p (TREE_CODE (arg0))
11620 	  && truth_value_p (TREE_CODE (arg1))
11621 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11622 	return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11623 							   : TRUTH_ANDIF_EXPR,
11624 				type, fold_convert_loc (loc, type, arg0), op1);
11625 
11626       /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
11627       if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11628 	  && truth_value_p (TREE_CODE (arg0))
11629 	  && truth_value_p (TREE_CODE (arg1))
11630 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11631 	{
11632 	  location_t loc0 = expr_location_or (arg0, loc);
11633 	  /* Only perform transformation if ARG0 is easily inverted.  */
11634 	  tem = fold_invert_truthvalue (loc0, arg0);
11635 	  if (tem)
11636 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
11637 					 ? BIT_IOR_EXPR
11638 					 : TRUTH_ORIF_EXPR,
11639 				    type, fold_convert_loc (loc, type, tem),
11640 				    op1);
11641 	}
11642 
11643       /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
11644       if (integer_zerop (arg1)
11645 	  && truth_value_p (TREE_CODE (arg0))
11646 	  && truth_value_p (TREE_CODE (op2))
11647 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11648 	{
11649 	  location_t loc0 = expr_location_or (arg0, loc);
11650 	  /* Only perform transformation if ARG0 is easily inverted.  */
11651 	  tem = fold_invert_truthvalue (loc0, arg0);
11652 	  if (tem)
11653 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
11654 					 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11655 				    type, fold_convert_loc (loc, type, tem),
11656 				    op2);
11657 	}
11658 
11659       /* Convert A ? 1 : B into A || B if A and B are truth values.  */
11660       if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11661 	  && truth_value_p (TREE_CODE (arg0))
11662 	  && truth_value_p (TREE_CODE (op2))
11663 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11664 	return fold_build2_loc (loc, code == VEC_COND_EXPR
11665 				     ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11666 				type, fold_convert_loc (loc, type, arg0), op2);
11667 
11668       return NULL_TREE;
11669 
11670     case CALL_EXPR:
11671       /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
11672 	 of fold_ternary on them.  */
11673       gcc_unreachable ();
11674 
11675     case BIT_FIELD_REF:
11676       if (TREE_CODE (arg0) == VECTOR_CST
11677 	  && (type == TREE_TYPE (TREE_TYPE (arg0))
11678 	      || (VECTOR_TYPE_P (type)
11679 		  && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
11680 	  && tree_fits_uhwi_p (op1)
11681 	  && tree_fits_uhwi_p (op2))
11682 	{
11683 	  tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11684 	  unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11685 	  unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11686 	  unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11687 
11688 	  if (n != 0
11689 	      && (idx % width) == 0
11690 	      && (n % width) == 0
11691 	      && known_le ((idx + n) / width,
11692 			   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
11693 	    {
11694 	      idx = idx / width;
11695 	      n = n / width;
11696 
11697 	      if (TREE_CODE (arg0) == VECTOR_CST)
11698 		{
11699 		  if (n == 1)
11700 		    {
11701 		      tem = VECTOR_CST_ELT (arg0, idx);
11702 		      if (VECTOR_TYPE_P (type))
11703 			tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
11704 		      return tem;
11705 		    }
11706 
11707 		  tree_vector_builder vals (type, n, 1);
11708 		  for (unsigned i = 0; i < n; ++i)
11709 		    vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
11710 		  return vals.build ();
11711 		}
11712 	    }
11713 	}
11714 
11715       /* On constants we can use native encode/interpret to constant
11716          fold (nearly) all BIT_FIELD_REFs.  */
11717       if (CONSTANT_CLASS_P (arg0)
11718 	  && can_native_interpret_type_p (type)
11719 	  && BITS_PER_UNIT == 8
11720 	  && tree_fits_uhwi_p (op1)
11721 	  && tree_fits_uhwi_p (op2))
11722 	{
11723 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11724 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11725 	  /* Limit us to a reasonable amount of work.  To relax the
11726 	     other limitations we need bit-shifting of the buffer
11727 	     and rounding up the size.  */
11728 	  if (bitpos % BITS_PER_UNIT == 0
11729 	      && bitsize % BITS_PER_UNIT == 0
11730 	      && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11731 	    {
11732 	      unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11733 	      unsigned HOST_WIDE_INT len
11734 		= native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11735 				      bitpos / BITS_PER_UNIT);
11736 	      if (len > 0
11737 		  && len * BITS_PER_UNIT >= bitsize)
11738 		{
11739 		  tree v = native_interpret_expr (type, b,
11740 						  bitsize / BITS_PER_UNIT);
11741 		  if (v)
11742 		    return v;
11743 		}
11744 	    }
11745 	}
11746 
11747       return NULL_TREE;
11748 
11749     case FMA_EXPR:
11750       /* For integers we can decompose the FMA if possible.  */
11751       if (TREE_CODE (arg0) == INTEGER_CST
11752 	  && TREE_CODE (arg1) == INTEGER_CST)
11753 	return fold_build2_loc (loc, PLUS_EXPR, type,
11754 				const_binop (MULT_EXPR, arg0, arg1), arg2);
11755       if (integer_zerop (arg2))
11756 	return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11757 
11758       return fold_fma (loc, type, arg0, arg1, arg2);
11759 
11760     case VEC_PERM_EXPR:
11761       if (TREE_CODE (arg2) == VECTOR_CST)
11762 	{
11763 	  /* Build a vector of integers from the tree mask.  */
11764 	  vec_perm_builder builder;
11765 	  if (!tree_to_vec_perm_builder (&builder, arg2))
11766 	    return NULL_TREE;
11767 
11768 	  /* Create a vec_perm_indices for the integer vector.  */
11769 	  poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
11770 	  bool single_arg = (op0 == op1);
11771 	  vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
11772 
11773 	  /* Check for cases that fold to OP0 or OP1 in their original
11774 	     element order.  */
11775 	  if (sel.series_p (0, 1, 0, 1))
11776 	    return op0;
11777 	  if (sel.series_p (0, 1, nelts, 1))
11778 	    return op1;
11779 
11780 	  if (!single_arg)
11781 	    {
11782 	      if (sel.all_from_input_p (0))
11783 		op1 = op0;
11784 	      else if (sel.all_from_input_p (1))
11785 		{
11786 		  op0 = op1;
11787 		  sel.rotate_inputs (1);
11788 		}
11789 	    }
11790 
11791 	  if ((TREE_CODE (op0) == VECTOR_CST
11792 	       || TREE_CODE (op0) == CONSTRUCTOR)
11793 	      && (TREE_CODE (op1) == VECTOR_CST
11794 		  || TREE_CODE (op1) == CONSTRUCTOR))
11795 	    {
11796 	      tree t = fold_vec_perm (type, op0, op1, sel);
11797 	      if (t != NULL_TREE)
11798 		return t;
11799 	    }
11800 
11801 	  bool changed = (op0 == op1 && !single_arg);
11802 
11803 	  /* Generate a canonical form of the selector.  */
11804 	  if (arg2 == op2 && sel.encoding () != builder)
11805 	    {
11806 	      /* Some targets are deficient and fail to expand a single
11807 		 argument permutation while still allowing an equivalent
11808 		 2-argument version.  */
11809 	      if (sel.ninputs () == 2
11810 		  || can_vec_perm_const_p (TYPE_MODE (type), sel, false))
11811 		op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
11812 	      else
11813 		{
11814 		  vec_perm_indices sel2 (builder, 2, nelts);
11815 		  if (can_vec_perm_const_p (TYPE_MODE (type), sel2, false))
11816 		    op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel2);
11817 		  else
11818 		    /* Not directly supported with either encoding,
11819 		       so use the preferred form.  */
11820 		    op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
11821 		}
11822 	      changed = true;
11823 	    }
11824 
11825 	  if (changed)
11826 	    return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11827 	}
11828       return NULL_TREE;
11829 
11830     case BIT_INSERT_EXPR:
11831       /* Perform (partial) constant folding of BIT_INSERT_EXPR.  */
11832       if (TREE_CODE (arg0) == INTEGER_CST
11833 	  && TREE_CODE (arg1) == INTEGER_CST)
11834 	{
11835 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11836 	  unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11837 	  wide_int tem = (wi::to_wide (arg0)
11838 			  & wi::shifted_mask (bitpos, bitsize, true,
11839 					      TYPE_PRECISION (type)));
11840 	  wide_int tem2
11841 	    = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11842 				    bitsize), bitpos);
11843 	  return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11844 	}
11845       else if (TREE_CODE (arg0) == VECTOR_CST
11846 	       && CONSTANT_CLASS_P (arg1)
11847 	       && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11848 				      TREE_TYPE (arg1)))
11849 	{
11850 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11851 	  unsigned HOST_WIDE_INT elsize
11852 	    = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11853 	  if (bitpos % elsize == 0)
11854 	    {
11855 	      unsigned k = bitpos / elsize;
11856 	      unsigned HOST_WIDE_INT nelts;
11857 	      if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11858 		return arg0;
11859 	      else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
11860 		{
11861 		  tree_vector_builder elts (type, nelts, 1);
11862 		  elts.quick_grow (nelts);
11863 		  for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
11864 		    elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
11865 		  return elts.build ();
11866 		}
11867 	    }
11868 	}
11869       return NULL_TREE;
11870 
11871     default:
11872       return NULL_TREE;
11873     } /* switch (code) */
11874 }
11875 
11876 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11877    of an array (or vector).  */
11878 
11879 tree
get_array_ctor_element_at_index(tree ctor,offset_int access_index)11880 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11881 {
11882   tree index_type = NULL_TREE;
11883   offset_int low_bound = 0;
11884 
11885   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11886     {
11887       tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11888       if (domain_type && TYPE_MIN_VALUE (domain_type))
11889 	{
11890 	  /* Static constructors for variably sized objects makes no sense.  */
11891 	  gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11892 	  index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11893 	  low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11894 	}
11895     }
11896 
11897   if (index_type)
11898     access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11899 			    TYPE_SIGN (index_type));
11900 
11901   offset_int index = low_bound - 1;
11902   if (index_type)
11903     index = wi::ext (index, TYPE_PRECISION (index_type),
11904 		     TYPE_SIGN (index_type));
11905 
11906   offset_int max_index;
11907   unsigned HOST_WIDE_INT cnt;
11908   tree cfield, cval;
11909 
11910   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11911     {
11912       /* Array constructor might explicitly set index, or specify a range,
11913 	 or leave index NULL meaning that it is next index after previous
11914 	 one.  */
11915       if (cfield)
11916 	{
11917 	  if (TREE_CODE (cfield) == INTEGER_CST)
11918 	    max_index = index = wi::to_offset (cfield);
11919 	  else
11920 	    {
11921 	      gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11922 	      index = wi::to_offset (TREE_OPERAND (cfield, 0));
11923 	      max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11924 	    }
11925 	}
11926       else
11927 	{
11928 	  index += 1;
11929 	  if (index_type)
11930 	    index = wi::ext (index, TYPE_PRECISION (index_type),
11931 			     TYPE_SIGN (index_type));
11932 	  max_index = index;
11933 	}
11934 
11935     /* Do we have match?  */
11936     if (wi::cmpu (access_index, index) >= 0
11937 	&& wi::cmpu (access_index, max_index) <= 0)
11938       return cval;
11939   }
11940   return NULL_TREE;
11941 }
11942 
11943 /* Perform constant folding and related simplification of EXPR.
11944    The related simplifications include x*1 => x, x*0 => 0, etc.,
11945    and application of the associative law.
11946    NOP_EXPR conversions may be removed freely (as long as we
11947    are careful not to change the type of the overall expression).
11948    We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11949    but we can constant-fold them if they have constant operands.  */
11950 
11951 #ifdef ENABLE_FOLD_CHECKING
11952 # define fold(x) fold_1 (x)
11953 static tree fold_1 (tree);
11954 static
11955 #endif
11956 tree
fold(tree expr)11957 fold (tree expr)
11958 {
11959   const tree t = expr;
11960   enum tree_code code = TREE_CODE (t);
11961   enum tree_code_class kind = TREE_CODE_CLASS (code);
11962   tree tem;
11963   location_t loc = EXPR_LOCATION (expr);
11964 
11965   /* Return right away if a constant.  */
11966   if (kind == tcc_constant)
11967     return t;
11968 
11969   /* CALL_EXPR-like objects with variable numbers of operands are
11970      treated specially.  */
11971   if (kind == tcc_vl_exp)
11972     {
11973       if (code == CALL_EXPR)
11974 	{
11975 	  tem = fold_call_expr (loc, expr, false);
11976 	  return tem ? tem : expr;
11977 	}
11978       return expr;
11979     }
11980 
11981   if (IS_EXPR_CODE_CLASS (kind))
11982     {
11983       tree type = TREE_TYPE (t);
11984       tree op0, op1, op2;
11985 
11986       switch (TREE_CODE_LENGTH (code))
11987 	{
11988 	case 1:
11989 	  op0 = TREE_OPERAND (t, 0);
11990 	  tem = fold_unary_loc (loc, code, type, op0);
11991 	  return tem ? tem : expr;
11992 	case 2:
11993 	  op0 = TREE_OPERAND (t, 0);
11994 	  op1 = TREE_OPERAND (t, 1);
11995 	  tem = fold_binary_loc (loc, code, type, op0, op1);
11996 	  return tem ? tem : expr;
11997 	case 3:
11998 	  op0 = TREE_OPERAND (t, 0);
11999 	  op1 = TREE_OPERAND (t, 1);
12000 	  op2 = TREE_OPERAND (t, 2);
12001 	  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12002 	  return tem ? tem : expr;
12003 	default:
12004 	  break;
12005 	}
12006     }
12007 
12008   switch (code)
12009     {
12010     case ARRAY_REF:
12011       {
12012 	tree op0 = TREE_OPERAND (t, 0);
12013 	tree op1 = TREE_OPERAND (t, 1);
12014 
12015 	if (TREE_CODE (op1) == INTEGER_CST
12016 	    && TREE_CODE (op0) == CONSTRUCTOR
12017 	    && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12018 	  {
12019 	    tree val = get_array_ctor_element_at_index (op0,
12020 							wi::to_offset (op1));
12021 	    if (val)
12022 	      return val;
12023 	  }
12024 
12025 	return t;
12026       }
12027 
12028       /* Return a VECTOR_CST if possible.  */
12029     case CONSTRUCTOR:
12030       {
12031 	tree type = TREE_TYPE (t);
12032 	if (TREE_CODE (type) != VECTOR_TYPE)
12033 	  return t;
12034 
12035 	unsigned i;
12036 	tree val;
12037 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12038 	  if (! CONSTANT_CLASS_P (val))
12039 	    return t;
12040 
12041 	return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12042       }
12043 
12044     case CONST_DECL:
12045       return fold (DECL_INITIAL (t));
12046 
12047     default:
12048       return t;
12049     } /* switch (code) */
12050 }
12051 
12052 #ifdef ENABLE_FOLD_CHECKING
12053 #undef fold
12054 
12055 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12056 				hash_table<nofree_ptr_hash<const tree_node> > *);
12057 static void fold_check_failed (const_tree, const_tree);
12058 void print_fold_checksum (const_tree);
12059 
12060 /* When --enable-checking=fold, compute a digest of expr before
12061    and after actual fold call to see if fold did not accidentally
12062    change original expr.  */
12063 
12064 tree
fold(tree expr)12065 fold (tree expr)
12066 {
12067   tree ret;
12068   struct md5_ctx ctx;
12069   unsigned char checksum_before[16], checksum_after[16];
12070   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12071 
12072   md5_init_ctx (&ctx);
12073   fold_checksum_tree (expr, &ctx, &ht);
12074   md5_finish_ctx (&ctx, checksum_before);
12075   ht.empty ();
12076 
12077   ret = fold_1 (expr);
12078 
12079   md5_init_ctx (&ctx);
12080   fold_checksum_tree (expr, &ctx, &ht);
12081   md5_finish_ctx (&ctx, checksum_after);
12082 
12083   if (memcmp (checksum_before, checksum_after, 16))
12084     fold_check_failed (expr, ret);
12085 
12086   return ret;
12087 }
12088 
12089 void
print_fold_checksum(const_tree expr)12090 print_fold_checksum (const_tree expr)
12091 {
12092   struct md5_ctx ctx;
12093   unsigned char checksum[16], cnt;
12094   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12095 
12096   md5_init_ctx (&ctx);
12097   fold_checksum_tree (expr, &ctx, &ht);
12098   md5_finish_ctx (&ctx, checksum);
12099   for (cnt = 0; cnt < 16; ++cnt)
12100     fprintf (stderr, "%02x", checksum[cnt]);
12101   putc ('\n', stderr);
12102 }
12103 
12104 static void
fold_check_failed(const_tree expr ATTRIBUTE_UNUSED,const_tree ret ATTRIBUTE_UNUSED)12105 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12106 {
12107   internal_error ("fold check: original tree changed by fold");
12108 }
12109 
12110 static void
fold_checksum_tree(const_tree expr,struct md5_ctx * ctx,hash_table<nofree_ptr_hash<const tree_node>> * ht)12111 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12112 		    hash_table<nofree_ptr_hash <const tree_node> > *ht)
12113 {
12114   const tree_node **slot;
12115   enum tree_code code;
12116   union tree_node buf;
12117   int i, len;
12118 
12119  recursive_label:
12120   if (expr == NULL)
12121     return;
12122   slot = ht->find_slot (expr, INSERT);
12123   if (*slot != NULL)
12124     return;
12125   *slot = expr;
12126   code = TREE_CODE (expr);
12127   if (TREE_CODE_CLASS (code) == tcc_declaration
12128       && HAS_DECL_ASSEMBLER_NAME_P (expr))
12129     {
12130       /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified.  */
12131       memcpy ((char *) &buf, expr, tree_size (expr));
12132       SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12133       buf.decl_with_vis.symtab_node = NULL;
12134       expr = (tree) &buf;
12135     }
12136   else if (TREE_CODE_CLASS (code) == tcc_type
12137 	   && (TYPE_POINTER_TO (expr)
12138 	       || TYPE_REFERENCE_TO (expr)
12139 	       || TYPE_CACHED_VALUES_P (expr)
12140 	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12141 	       || TYPE_NEXT_VARIANT (expr)
12142 	       || TYPE_ALIAS_SET_KNOWN_P (expr)))
12143     {
12144       /* Allow these fields to be modified.  */
12145       tree tmp;
12146       memcpy ((char *) &buf, expr, tree_size (expr));
12147       expr = tmp = (tree) &buf;
12148       TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12149       TYPE_POINTER_TO (tmp) = NULL;
12150       TYPE_REFERENCE_TO (tmp) = NULL;
12151       TYPE_NEXT_VARIANT (tmp) = NULL;
12152       TYPE_ALIAS_SET (tmp) = -1;
12153       if (TYPE_CACHED_VALUES_P (tmp))
12154 	{
12155 	  TYPE_CACHED_VALUES_P (tmp) = 0;
12156 	  TYPE_CACHED_VALUES (tmp) = NULL;
12157 	}
12158     }
12159   md5_process_bytes (expr, tree_size (expr), ctx);
12160   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12161     fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12162   if (TREE_CODE_CLASS (code) != tcc_type
12163       && TREE_CODE_CLASS (code) != tcc_declaration
12164       && code != TREE_LIST
12165       && code != SSA_NAME
12166       && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12167     fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12168   switch (TREE_CODE_CLASS (code))
12169     {
12170     case tcc_constant:
12171       switch (code)
12172 	{
12173 	case STRING_CST:
12174 	  md5_process_bytes (TREE_STRING_POINTER (expr),
12175 			     TREE_STRING_LENGTH (expr), ctx);
12176 	  break;
12177 	case COMPLEX_CST:
12178 	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12179 	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12180 	  break;
12181 	case VECTOR_CST:
12182 	  len = vector_cst_encoded_nelts (expr);
12183 	  for (i = 0; i < len; ++i)
12184 	    fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
12185 	  break;
12186 	default:
12187 	  break;
12188 	}
12189       break;
12190     case tcc_exceptional:
12191       switch (code)
12192 	{
12193 	case TREE_LIST:
12194 	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12195 	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12196 	  expr = TREE_CHAIN (expr);
12197 	  goto recursive_label;
12198 	  break;
12199 	case TREE_VEC:
12200 	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12201 	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12202 	  break;
12203 	default:
12204 	  break;
12205 	}
12206       break;
12207     case tcc_expression:
12208     case tcc_reference:
12209     case tcc_comparison:
12210     case tcc_unary:
12211     case tcc_binary:
12212     case tcc_statement:
12213     case tcc_vl_exp:
12214       len = TREE_OPERAND_LENGTH (expr);
12215       for (i = 0; i < len; ++i)
12216 	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12217       break;
12218     case tcc_declaration:
12219       fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12220       fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12221       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12222 	{
12223 	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12224 	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12225 	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12226 	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12227 	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12228 	}
12229 
12230       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12231 	{
12232 	  if (TREE_CODE (expr) == FUNCTION_DECL)
12233 	    {
12234 	      fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12235 	      fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12236 	    }
12237 	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12238 	}
12239       break;
12240     case tcc_type:
12241       if (TREE_CODE (expr) == ENUMERAL_TYPE)
12242         fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12243       fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12244       fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12245       fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12246       fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12247       if (INTEGRAL_TYPE_P (expr)
12248           || SCALAR_FLOAT_TYPE_P (expr))
12249 	{
12250 	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12251 	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12252 	}
12253       fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12254       if (TREE_CODE (expr) == RECORD_TYPE
12255 	  || TREE_CODE (expr) == UNION_TYPE
12256 	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
12257 	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12258       fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12259       break;
12260     default:
12261       break;
12262     }
12263 }
12264 
12265 /* Helper function for outputting the checksum of a tree T.  When
12266    debugging with gdb, you can "define mynext" to be "next" followed
12267    by "call debug_fold_checksum (op0)", then just trace down till the
12268    outputs differ.  */
12269 
12270 DEBUG_FUNCTION void
debug_fold_checksum(const_tree t)12271 debug_fold_checksum (const_tree t)
12272 {
12273   int i;
12274   unsigned char checksum[16];
12275   struct md5_ctx ctx;
12276   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12277 
12278   md5_init_ctx (&ctx);
12279   fold_checksum_tree (t, &ctx, &ht);
12280   md5_finish_ctx (&ctx, checksum);
12281   ht.empty ();
12282 
12283   for (i = 0; i < 16; i++)
12284     fprintf (stderr, "%d ", checksum[i]);
12285 
12286   fprintf (stderr, "\n");
12287 }
12288 
12289 #endif
12290 
12291 /* Fold a unary tree expression with code CODE of type TYPE with an
12292    operand OP0.  LOC is the location of the resulting expression.
12293    Return a folded expression if successful.  Otherwise, return a tree
12294    expression with code CODE of type TYPE with an operand OP0.  */
12295 
12296 tree
fold_build1_loc(location_t loc,enum tree_code code,tree type,tree op0 MEM_STAT_DECL)12297 fold_build1_loc (location_t loc,
12298 		 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12299 {
12300   tree tem;
12301 #ifdef ENABLE_FOLD_CHECKING
12302   unsigned char checksum_before[16], checksum_after[16];
12303   struct md5_ctx ctx;
12304   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12305 
12306   md5_init_ctx (&ctx);
12307   fold_checksum_tree (op0, &ctx, &ht);
12308   md5_finish_ctx (&ctx, checksum_before);
12309   ht.empty ();
12310 #endif
12311 
12312   tem = fold_unary_loc (loc, code, type, op0);
12313   if (!tem)
12314     tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12315 
12316 #ifdef ENABLE_FOLD_CHECKING
12317   md5_init_ctx (&ctx);
12318   fold_checksum_tree (op0, &ctx, &ht);
12319   md5_finish_ctx (&ctx, checksum_after);
12320 
12321   if (memcmp (checksum_before, checksum_after, 16))
12322     fold_check_failed (op0, tem);
12323 #endif
12324   return tem;
12325 }
12326 
12327 /* Fold a binary tree expression with code CODE of type TYPE with
12328    operands OP0 and OP1.  LOC is the location of the resulting
12329    expression.  Return a folded expression if successful.  Otherwise,
12330    return a tree expression with code CODE of type TYPE with operands
12331    OP0 and OP1.  */
12332 
12333 tree
fold_build2_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1 MEM_STAT_DECL)12334 fold_build2_loc (location_t loc,
12335 		      enum tree_code code, tree type, tree op0, tree op1
12336 		      MEM_STAT_DECL)
12337 {
12338   tree tem;
12339 #ifdef ENABLE_FOLD_CHECKING
12340   unsigned char checksum_before_op0[16],
12341                 checksum_before_op1[16],
12342 		checksum_after_op0[16],
12343 		checksum_after_op1[16];
12344   struct md5_ctx ctx;
12345   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12346 
12347   md5_init_ctx (&ctx);
12348   fold_checksum_tree (op0, &ctx, &ht);
12349   md5_finish_ctx (&ctx, checksum_before_op0);
12350   ht.empty ();
12351 
12352   md5_init_ctx (&ctx);
12353   fold_checksum_tree (op1, &ctx, &ht);
12354   md5_finish_ctx (&ctx, checksum_before_op1);
12355   ht.empty ();
12356 #endif
12357 
12358   tem = fold_binary_loc (loc, code, type, op0, op1);
12359   if (!tem)
12360     tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12361 
12362 #ifdef ENABLE_FOLD_CHECKING
12363   md5_init_ctx (&ctx);
12364   fold_checksum_tree (op0, &ctx, &ht);
12365   md5_finish_ctx (&ctx, checksum_after_op0);
12366   ht.empty ();
12367 
12368   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12369     fold_check_failed (op0, tem);
12370 
12371   md5_init_ctx (&ctx);
12372   fold_checksum_tree (op1, &ctx, &ht);
12373   md5_finish_ctx (&ctx, checksum_after_op1);
12374 
12375   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12376     fold_check_failed (op1, tem);
12377 #endif
12378   return tem;
12379 }
12380 
12381 /* Fold a ternary tree expression with code CODE of type TYPE with
12382    operands OP0, OP1, and OP2.  Return a folded expression if
12383    successful.  Otherwise, return a tree expression with code CODE of
12384    type TYPE with operands OP0, OP1, and OP2.  */
12385 
12386 tree
fold_build3_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2 MEM_STAT_DECL)12387 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12388 		      tree op0, tree op1, tree op2 MEM_STAT_DECL)
12389 {
12390   tree tem;
12391 #ifdef ENABLE_FOLD_CHECKING
12392   unsigned char checksum_before_op0[16],
12393                 checksum_before_op1[16],
12394                 checksum_before_op2[16],
12395 		checksum_after_op0[16],
12396 		checksum_after_op1[16],
12397 		checksum_after_op2[16];
12398   struct md5_ctx ctx;
12399   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12400 
12401   md5_init_ctx (&ctx);
12402   fold_checksum_tree (op0, &ctx, &ht);
12403   md5_finish_ctx (&ctx, checksum_before_op0);
12404   ht.empty ();
12405 
12406   md5_init_ctx (&ctx);
12407   fold_checksum_tree (op1, &ctx, &ht);
12408   md5_finish_ctx (&ctx, checksum_before_op1);
12409   ht.empty ();
12410 
12411   md5_init_ctx (&ctx);
12412   fold_checksum_tree (op2, &ctx, &ht);
12413   md5_finish_ctx (&ctx, checksum_before_op2);
12414   ht.empty ();
12415 #endif
12416 
12417   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12418   tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12419   if (!tem)
12420     tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12421 
12422 #ifdef ENABLE_FOLD_CHECKING
12423   md5_init_ctx (&ctx);
12424   fold_checksum_tree (op0, &ctx, &ht);
12425   md5_finish_ctx (&ctx, checksum_after_op0);
12426   ht.empty ();
12427 
12428   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12429     fold_check_failed (op0, tem);
12430 
12431   md5_init_ctx (&ctx);
12432   fold_checksum_tree (op1, &ctx, &ht);
12433   md5_finish_ctx (&ctx, checksum_after_op1);
12434   ht.empty ();
12435 
12436   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12437     fold_check_failed (op1, tem);
12438 
12439   md5_init_ctx (&ctx);
12440   fold_checksum_tree (op2, &ctx, &ht);
12441   md5_finish_ctx (&ctx, checksum_after_op2);
12442 
12443   if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12444     fold_check_failed (op2, tem);
12445 #endif
12446   return tem;
12447 }
12448 
12449 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12450    arguments in ARGARRAY, and a null static chain.
12451    Return a folded expression if successful.  Otherwise, return a CALL_EXPR
12452    of type TYPE from the given operands as constructed by build_call_array.  */
12453 
12454 tree
fold_build_call_array_loc(location_t loc,tree type,tree fn,int nargs,tree * argarray)12455 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12456 			   int nargs, tree *argarray)
12457 {
12458   tree tem;
12459 #ifdef ENABLE_FOLD_CHECKING
12460   unsigned char checksum_before_fn[16],
12461                 checksum_before_arglist[16],
12462 		checksum_after_fn[16],
12463 		checksum_after_arglist[16];
12464   struct md5_ctx ctx;
12465   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12466   int i;
12467 
12468   md5_init_ctx (&ctx);
12469   fold_checksum_tree (fn, &ctx, &ht);
12470   md5_finish_ctx (&ctx, checksum_before_fn);
12471   ht.empty ();
12472 
12473   md5_init_ctx (&ctx);
12474   for (i = 0; i < nargs; i++)
12475     fold_checksum_tree (argarray[i], &ctx, &ht);
12476   md5_finish_ctx (&ctx, checksum_before_arglist);
12477   ht.empty ();
12478 #endif
12479 
12480   tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12481   if (!tem)
12482     tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12483 
12484 #ifdef ENABLE_FOLD_CHECKING
12485   md5_init_ctx (&ctx);
12486   fold_checksum_tree (fn, &ctx, &ht);
12487   md5_finish_ctx (&ctx, checksum_after_fn);
12488   ht.empty ();
12489 
12490   if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12491     fold_check_failed (fn, tem);
12492 
12493   md5_init_ctx (&ctx);
12494   for (i = 0; i < nargs; i++)
12495     fold_checksum_tree (argarray[i], &ctx, &ht);
12496   md5_finish_ctx (&ctx, checksum_after_arglist);
12497 
12498   if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12499     fold_check_failed (NULL_TREE, tem);
12500 #endif
12501   return tem;
12502 }
12503 
12504 /* Perform constant folding and related simplification of initializer
12505    expression EXPR.  These behave identically to "fold_buildN" but ignore
12506    potential run-time traps and exceptions that fold must preserve.  */
12507 
12508 #define START_FOLD_INIT \
12509   int saved_signaling_nans = flag_signaling_nans;\
12510   int saved_trapping_math = flag_trapping_math;\
12511   int saved_rounding_math = flag_rounding_math;\
12512   int saved_trapv = flag_trapv;\
12513   int saved_folding_initializer = folding_initializer;\
12514   flag_signaling_nans = 0;\
12515   flag_trapping_math = 0;\
12516   flag_rounding_math = 0;\
12517   flag_trapv = 0;\
12518   folding_initializer = 1;
12519 
12520 #define END_FOLD_INIT \
12521   flag_signaling_nans = saved_signaling_nans;\
12522   flag_trapping_math = saved_trapping_math;\
12523   flag_rounding_math = saved_rounding_math;\
12524   flag_trapv = saved_trapv;\
12525   folding_initializer = saved_folding_initializer;
12526 
12527 tree
fold_build1_initializer_loc(location_t loc,enum tree_code code,tree type,tree op)12528 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12529 			     tree type, tree op)
12530 {
12531   tree result;
12532   START_FOLD_INIT;
12533 
12534   result = fold_build1_loc (loc, code, type, op);
12535 
12536   END_FOLD_INIT;
12537   return result;
12538 }
12539 
12540 tree
fold_build2_initializer_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1)12541 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12542 			     tree type, tree op0, tree op1)
12543 {
12544   tree result;
12545   START_FOLD_INIT;
12546 
12547   result = fold_build2_loc (loc, code, type, op0, op1);
12548 
12549   END_FOLD_INIT;
12550   return result;
12551 }
12552 
12553 tree
fold_build_call_array_initializer_loc(location_t loc,tree type,tree fn,int nargs,tree * argarray)12554 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12555 				       int nargs, tree *argarray)
12556 {
12557   tree result;
12558   START_FOLD_INIT;
12559 
12560   result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12561 
12562   END_FOLD_INIT;
12563   return result;
12564 }
12565 
12566 #undef START_FOLD_INIT
12567 #undef END_FOLD_INIT
12568 
12569 /* Determine if first argument is a multiple of second argument.  Return 0 if
12570    it is not, or we cannot easily determined it to be.
12571 
12572    An example of the sort of thing we care about (at this point; this routine
12573    could surely be made more general, and expanded to do what the *_DIV_EXPR's
12574    fold cases do now) is discovering that
12575 
12576      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12577 
12578    is a multiple of
12579 
12580      SAVE_EXPR (J * 8)
12581 
12582    when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12583 
12584    This code also handles discovering that
12585 
12586      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12587 
12588    is a multiple of 8 so we don't have to worry about dealing with a
12589    possible remainder.
12590 
12591    Note that we *look* inside a SAVE_EXPR only to determine how it was
12592    calculated; it is not safe for fold to do much of anything else with the
12593    internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12594    at run time.  For example, the latter example above *cannot* be implemented
12595    as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12596    evaluation time of the original SAVE_EXPR is not necessarily the same at
12597    the time the new expression is evaluated.  The only optimization of this
12598    sort that would be valid is changing
12599 
12600      SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12601 
12602    divided by 8 to
12603 
12604      SAVE_EXPR (I) * SAVE_EXPR (J)
12605 
12606    (where the same SAVE_EXPR (J) is used in the original and the
12607    transformed version).  */
12608 
12609 int
multiple_of_p(tree type,const_tree top,const_tree bottom)12610 multiple_of_p (tree type, const_tree top, const_tree bottom)
12611 {
12612   gimple *stmt;
12613   tree t1, op1, op2;
12614 
12615   if (operand_equal_p (top, bottom, 0))
12616     return 1;
12617 
12618   if (TREE_CODE (type) != INTEGER_TYPE)
12619     return 0;
12620 
12621   switch (TREE_CODE (top))
12622     {
12623     case BIT_AND_EXPR:
12624       /* Bitwise and provides a power of two multiple.  If the mask is
12625 	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
12626       if (!integer_pow2p (bottom))
12627 	return 0;
12628       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12629 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12630 
12631     case MULT_EXPR:
12632       if (TREE_CODE (bottom) == INTEGER_CST)
12633 	{
12634 	  op1 = TREE_OPERAND (top, 0);
12635 	  op2 = TREE_OPERAND (top, 1);
12636 	  if (TREE_CODE (op1) == INTEGER_CST)
12637 	    std::swap (op1, op2);
12638 	  if (TREE_CODE (op2) == INTEGER_CST)
12639 	    {
12640 	      if (multiple_of_p (type, op2, bottom))
12641 		return 1;
12642 	      /* Handle multiple_of_p ((x * 2 + 2) * 4, 8).  */
12643 	      if (multiple_of_p (type, bottom, op2))
12644 		{
12645 		  widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
12646 						 wi::to_widest (op2));
12647 		  if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
12648 		    {
12649 		      op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
12650 		      return multiple_of_p (type, op1, op2);
12651 		    }
12652 		}
12653 	      return multiple_of_p (type, op1, bottom);
12654 	    }
12655 	}
12656       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12657 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12658 
12659     case MINUS_EXPR:
12660       /* It is impossible to prove if op0 - op1 is multiple of bottom
12661 	 precisely, so be conservative here checking if both op0 and op1
12662 	 are multiple of bottom.  Note we check the second operand first
12663 	 since it's usually simpler.  */
12664       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12665 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12666 
12667     case PLUS_EXPR:
12668       /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12669 	 as op0 - 3 if the expression has unsigned type.  For example,
12670 	 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not.  */
12671       op1 = TREE_OPERAND (top, 1);
12672       if (TYPE_UNSIGNED (type)
12673 	  && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12674 	op1 = fold_build1 (NEGATE_EXPR, type, op1);
12675       return (multiple_of_p (type, op1, bottom)
12676 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12677 
12678     case LSHIFT_EXPR:
12679       if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12680 	{
12681 	  op1 = TREE_OPERAND (top, 1);
12682 	  /* const_binop may not detect overflow correctly,
12683 	     so check for it explicitly here.  */
12684 	  if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
12685 			 wi::to_wide (op1))
12686 	      && (t1 = fold_convert (type,
12687 				     const_binop (LSHIFT_EXPR, size_one_node,
12688 						  op1))) != 0
12689 	      && !TREE_OVERFLOW (t1))
12690 	    return multiple_of_p (type, t1, bottom);
12691 	}
12692       return 0;
12693 
12694     case NOP_EXPR:
12695       /* Can't handle conversions from non-integral or wider integral type.  */
12696       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12697 	  || (TYPE_PRECISION (type)
12698 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12699 	return 0;
12700 
12701       /* fall through */
12702 
12703     case SAVE_EXPR:
12704       return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12705 
12706     case COND_EXPR:
12707       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12708 	      && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12709 
12710     case INTEGER_CST:
12711       if (TREE_CODE (bottom) != INTEGER_CST
12712 	  || integer_zerop (bottom)
12713 	  || (TYPE_UNSIGNED (type)
12714 	      && (tree_int_cst_sgn (top) < 0
12715 		  || tree_int_cst_sgn (bottom) < 0)))
12716 	return 0;
12717       return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12718 				SIGNED);
12719 
12720     case SSA_NAME:
12721       if (TREE_CODE (bottom) == INTEGER_CST
12722 	  && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12723 	  && gimple_code (stmt) == GIMPLE_ASSIGN)
12724 	{
12725 	  enum tree_code code = gimple_assign_rhs_code (stmt);
12726 
12727 	  /* Check for special cases to see if top is defined as multiple
12728 	     of bottom:
12729 
12730 	       top = (X & ~(bottom - 1) ; bottom is power of 2
12731 
12732 	     or
12733 
12734 	       Y = X % bottom
12735 	       top = X - Y.  */
12736 	  if (code == BIT_AND_EXPR
12737 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12738 	      && TREE_CODE (op2) == INTEGER_CST
12739 	      && integer_pow2p (bottom)
12740 	      && wi::multiple_of_p (wi::to_widest (op2),
12741 				    wi::to_widest (bottom), UNSIGNED))
12742 	    return 1;
12743 
12744 	  op1 = gimple_assign_rhs1 (stmt);
12745 	  if (code == MINUS_EXPR
12746 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12747 	      && TREE_CODE (op2) == SSA_NAME
12748 	      && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12749 	      && gimple_code (stmt) == GIMPLE_ASSIGN
12750 	      && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12751 	      && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12752 	      && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12753 	    return 1;
12754 	}
12755 
12756       /* fall through */
12757 
12758     default:
12759       if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
12760 	return multiple_p (wi::to_poly_widest (top),
12761 			   wi::to_poly_widest (bottom));
12762 
12763       return 0;
12764     }
12765 }
12766 
12767 #define tree_expr_nonnegative_warnv_p(X, Y) \
12768   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12769 
12770 #define RECURSE(X) \
12771   ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12772 
12773 /* Return true if CODE or TYPE is known to be non-negative. */
12774 
12775 static bool
tree_simple_nonnegative_warnv_p(enum tree_code code,tree type)12776 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12777 {
12778   if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12779       && truth_value_p (code))
12780     /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12781        have a signed:1 type (where the value is -1 and 0).  */
12782     return true;
12783   return false;
12784 }
12785 
12786 /* Return true if (CODE OP0) is known to be non-negative.  If the return
12787    value is based on the assumption that signed overflow is undefined,
12788    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12789    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12790 
12791 bool
tree_unary_nonnegative_warnv_p(enum tree_code code,tree type,tree op0,bool * strict_overflow_p,int depth)12792 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12793 				bool *strict_overflow_p, int depth)
12794 {
12795   if (TYPE_UNSIGNED (type))
12796     return true;
12797 
12798   switch (code)
12799     {
12800     case ABS_EXPR:
12801       /* We can't return 1 if flag_wrapv is set because
12802 	 ABS_EXPR<INT_MIN> = INT_MIN.  */
12803       if (!ANY_INTEGRAL_TYPE_P (type))
12804 	return true;
12805       if (TYPE_OVERFLOW_UNDEFINED (type))
12806 	{
12807 	  *strict_overflow_p = true;
12808 	  return true;
12809 	}
12810       break;
12811 
12812     case NON_LVALUE_EXPR:
12813     case FLOAT_EXPR:
12814     case FIX_TRUNC_EXPR:
12815       return RECURSE (op0);
12816 
12817     CASE_CONVERT:
12818       {
12819 	tree inner_type = TREE_TYPE (op0);
12820 	tree outer_type = type;
12821 
12822 	if (TREE_CODE (outer_type) == REAL_TYPE)
12823 	  {
12824 	    if (TREE_CODE (inner_type) == REAL_TYPE)
12825 	      return RECURSE (op0);
12826 	    if (INTEGRAL_TYPE_P (inner_type))
12827 	      {
12828 		if (TYPE_UNSIGNED (inner_type))
12829 		  return true;
12830 		return RECURSE (op0);
12831 	      }
12832 	  }
12833 	else if (INTEGRAL_TYPE_P (outer_type))
12834 	  {
12835 	    if (TREE_CODE (inner_type) == REAL_TYPE)
12836 	      return RECURSE (op0);
12837 	    if (INTEGRAL_TYPE_P (inner_type))
12838 	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12839 		      && TYPE_UNSIGNED (inner_type);
12840 	  }
12841       }
12842       break;
12843 
12844     default:
12845       return tree_simple_nonnegative_warnv_p (code, type);
12846     }
12847 
12848   /* We don't know sign of `t', so be conservative and return false.  */
12849   return false;
12850 }
12851 
12852 /* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
12853    value is based on the assumption that signed overflow is undefined,
12854    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12855    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12856 
12857 bool
tree_binary_nonnegative_warnv_p(enum tree_code code,tree type,tree op0,tree op1,bool * strict_overflow_p,int depth)12858 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12859 				 tree op1, bool *strict_overflow_p,
12860 				 int depth)
12861 {
12862   if (TYPE_UNSIGNED (type))
12863     return true;
12864 
12865   switch (code)
12866     {
12867     case POINTER_PLUS_EXPR:
12868     case PLUS_EXPR:
12869       if (FLOAT_TYPE_P (type))
12870 	return RECURSE (op0) && RECURSE (op1);
12871 
12872       /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12873 	 both unsigned and at least 2 bits shorter than the result.  */
12874       if (TREE_CODE (type) == INTEGER_TYPE
12875 	  && TREE_CODE (op0) == NOP_EXPR
12876 	  && TREE_CODE (op1) == NOP_EXPR)
12877 	{
12878 	  tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12879 	  tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12880 	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12881 	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12882 	    {
12883 	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
12884 				       TYPE_PRECISION (inner2)) + 1;
12885 	      return prec < TYPE_PRECISION (type);
12886 	    }
12887 	}
12888       break;
12889 
12890     case MULT_EXPR:
12891       if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12892 	{
12893 	  /* x * x is always non-negative for floating point x
12894 	     or without overflow.  */
12895 	  if (operand_equal_p (op0, op1, 0)
12896 	      || (RECURSE (op0) && RECURSE (op1)))
12897 	    {
12898 	      if (ANY_INTEGRAL_TYPE_P (type)
12899 		  && TYPE_OVERFLOW_UNDEFINED (type))
12900 		*strict_overflow_p = true;
12901 	      return true;
12902 	    }
12903 	}
12904 
12905       /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12906 	 both unsigned and their total bits is shorter than the result.  */
12907       if (TREE_CODE (type) == INTEGER_TYPE
12908 	  && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12909 	  && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12910 	{
12911 	  tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12912 	    ? TREE_TYPE (TREE_OPERAND (op0, 0))
12913 	    : TREE_TYPE (op0);
12914 	  tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12915 	    ? TREE_TYPE (TREE_OPERAND (op1, 0))
12916 	    : TREE_TYPE (op1);
12917 
12918 	  bool unsigned0 = TYPE_UNSIGNED (inner0);
12919 	  bool unsigned1 = TYPE_UNSIGNED (inner1);
12920 
12921 	  if (TREE_CODE (op0) == INTEGER_CST)
12922 	    unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12923 
12924 	  if (TREE_CODE (op1) == INTEGER_CST)
12925 	    unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12926 
12927 	  if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12928 	      && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12929 	    {
12930 	      unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12931 		? tree_int_cst_min_precision (op0, UNSIGNED)
12932 		: TYPE_PRECISION (inner0);
12933 
12934 	      unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12935 		? tree_int_cst_min_precision (op1, UNSIGNED)
12936 		: TYPE_PRECISION (inner1);
12937 
12938 	      return precision0 + precision1 < TYPE_PRECISION (type);
12939 	    }
12940 	}
12941       return false;
12942 
12943     case BIT_AND_EXPR:
12944     case MAX_EXPR:
12945       return RECURSE (op0) || RECURSE (op1);
12946 
12947     case BIT_IOR_EXPR:
12948     case BIT_XOR_EXPR:
12949     case MIN_EXPR:
12950     case RDIV_EXPR:
12951     case TRUNC_DIV_EXPR:
12952     case CEIL_DIV_EXPR:
12953     case FLOOR_DIV_EXPR:
12954     case ROUND_DIV_EXPR:
12955       return RECURSE (op0) && RECURSE (op1);
12956 
12957     case TRUNC_MOD_EXPR:
12958       return RECURSE (op0);
12959 
12960     case FLOOR_MOD_EXPR:
12961       return RECURSE (op1);
12962 
12963     case CEIL_MOD_EXPR:
12964     case ROUND_MOD_EXPR:
12965     default:
12966       return tree_simple_nonnegative_warnv_p (code, type);
12967     }
12968 
12969   /* We don't know sign of `t', so be conservative and return false.  */
12970   return false;
12971 }
12972 
12973 /* Return true if T is known to be non-negative.  If the return
12974    value is based on the assumption that signed overflow is undefined,
12975    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12976    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12977 
12978 bool
tree_single_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)12979 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12980 {
12981   if (TYPE_UNSIGNED (TREE_TYPE (t)))
12982     return true;
12983 
12984   switch (TREE_CODE (t))
12985     {
12986     case INTEGER_CST:
12987       return tree_int_cst_sgn (t) >= 0;
12988 
12989     case REAL_CST:
12990       return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12991 
12992     case FIXED_CST:
12993       return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12994 
12995     case COND_EXPR:
12996       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12997 
12998     case SSA_NAME:
12999       /* Limit the depth of recursion to avoid quadratic behavior.
13000 	 This is expected to catch almost all occurrences in practice.
13001 	 If this code misses important cases that unbounded recursion
13002 	 would not, passes that need this information could be revised
13003 	 to provide it through dataflow propagation.  */
13004       return (!name_registered_for_update_p (t)
13005 	      && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13006 	      && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13007 						  strict_overflow_p, depth));
13008 
13009     default:
13010       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13011     }
13012 }
13013 
13014 /* Return true if T is known to be non-negative.  If the return
13015    value is based on the assumption that signed overflow is undefined,
13016    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13017    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13018 
13019 bool
tree_call_nonnegative_warnv_p(tree type,combined_fn fn,tree arg0,tree arg1,bool * strict_overflow_p,int depth)13020 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13021 			       bool *strict_overflow_p, int depth)
13022 {
13023   switch (fn)
13024     {
13025     CASE_CFN_ACOS:
13026     CASE_CFN_ACOSH:
13027     CASE_CFN_CABS:
13028     CASE_CFN_COSH:
13029     CASE_CFN_ERFC:
13030     CASE_CFN_EXP:
13031     CASE_CFN_EXP10:
13032     CASE_CFN_EXP2:
13033     CASE_CFN_FABS:
13034     CASE_CFN_FDIM:
13035     CASE_CFN_HYPOT:
13036     CASE_CFN_POW10:
13037     CASE_CFN_FFS:
13038     CASE_CFN_PARITY:
13039     CASE_CFN_POPCOUNT:
13040     CASE_CFN_CLZ:
13041     CASE_CFN_CLRSB:
13042     case CFN_BUILT_IN_BSWAP32:
13043     case CFN_BUILT_IN_BSWAP64:
13044       /* Always true.  */
13045       return true;
13046 
13047     CASE_CFN_SQRT:
13048     CASE_CFN_SQRT_FN:
13049       /* sqrt(-0.0) is -0.0.  */
13050       if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13051 	return true;
13052       return RECURSE (arg0);
13053 
13054     CASE_CFN_ASINH:
13055     CASE_CFN_ATAN:
13056     CASE_CFN_ATANH:
13057     CASE_CFN_CBRT:
13058     CASE_CFN_CEIL:
13059     CASE_CFN_CEIL_FN:
13060     CASE_CFN_ERF:
13061     CASE_CFN_EXPM1:
13062     CASE_CFN_FLOOR:
13063     CASE_CFN_FLOOR_FN:
13064     CASE_CFN_FMOD:
13065     CASE_CFN_FREXP:
13066     CASE_CFN_ICEIL:
13067     CASE_CFN_IFLOOR:
13068     CASE_CFN_IRINT:
13069     CASE_CFN_IROUND:
13070     CASE_CFN_LCEIL:
13071     CASE_CFN_LDEXP:
13072     CASE_CFN_LFLOOR:
13073     CASE_CFN_LLCEIL:
13074     CASE_CFN_LLFLOOR:
13075     CASE_CFN_LLRINT:
13076     CASE_CFN_LLROUND:
13077     CASE_CFN_LRINT:
13078     CASE_CFN_LROUND:
13079     CASE_CFN_MODF:
13080     CASE_CFN_NEARBYINT:
13081     CASE_CFN_NEARBYINT_FN:
13082     CASE_CFN_RINT:
13083     CASE_CFN_RINT_FN:
13084     CASE_CFN_ROUND:
13085     CASE_CFN_ROUND_FN:
13086     CASE_CFN_SCALB:
13087     CASE_CFN_SCALBLN:
13088     CASE_CFN_SCALBN:
13089     CASE_CFN_SIGNBIT:
13090     CASE_CFN_SIGNIFICAND:
13091     CASE_CFN_SINH:
13092     CASE_CFN_TANH:
13093     CASE_CFN_TRUNC:
13094     CASE_CFN_TRUNC_FN:
13095       /* True if the 1st argument is nonnegative.  */
13096       return RECURSE (arg0);
13097 
13098     CASE_CFN_FMAX:
13099     CASE_CFN_FMAX_FN:
13100       /* True if the 1st OR 2nd arguments are nonnegative.  */
13101       return RECURSE (arg0) || RECURSE (arg1);
13102 
13103     CASE_CFN_FMIN:
13104     CASE_CFN_FMIN_FN:
13105       /* True if the 1st AND 2nd arguments are nonnegative.  */
13106       return RECURSE (arg0) && RECURSE (arg1);
13107 
13108     CASE_CFN_COPYSIGN:
13109     CASE_CFN_COPYSIGN_FN:
13110       /* True if the 2nd argument is nonnegative.  */
13111       return RECURSE (arg1);
13112 
13113     CASE_CFN_POWI:
13114       /* True if the 1st argument is nonnegative or the second
13115 	 argument is an even integer.  */
13116       if (TREE_CODE (arg1) == INTEGER_CST
13117 	  && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13118 	return true;
13119       return RECURSE (arg0);
13120 
13121     CASE_CFN_POW:
13122       /* True if the 1st argument is nonnegative or the second
13123 	 argument is an even integer valued real.  */
13124       if (TREE_CODE (arg1) == REAL_CST)
13125 	{
13126 	  REAL_VALUE_TYPE c;
13127 	  HOST_WIDE_INT n;
13128 
13129 	  c = TREE_REAL_CST (arg1);
13130 	  n = real_to_integer (&c);
13131 	  if ((n & 1) == 0)
13132 	    {
13133 	      REAL_VALUE_TYPE cint;
13134 	      real_from_integer (&cint, VOIDmode, n, SIGNED);
13135 	      if (real_identical (&c, &cint))
13136 		return true;
13137 	    }
13138 	}
13139       return RECURSE (arg0);
13140 
13141     default:
13142       break;
13143     }
13144   return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13145 }
13146 
13147 /* Return true if T is known to be non-negative.  If the return
13148    value is based on the assumption that signed overflow is undefined,
13149    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13150    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13151 
13152 static bool
tree_invalid_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)13153 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13154 {
13155   enum tree_code code = TREE_CODE (t);
13156   if (TYPE_UNSIGNED (TREE_TYPE (t)))
13157     return true;
13158 
13159   switch (code)
13160     {
13161     case TARGET_EXPR:
13162       {
13163 	tree temp = TARGET_EXPR_SLOT (t);
13164 	t = TARGET_EXPR_INITIAL (t);
13165 
13166 	/* If the initializer is non-void, then it's a normal expression
13167 	   that will be assigned to the slot.  */
13168 	if (!VOID_TYPE_P (t))
13169 	  return RECURSE (t);
13170 
13171 	/* Otherwise, the initializer sets the slot in some way.  One common
13172 	   way is an assignment statement at the end of the initializer.  */
13173 	while (1)
13174 	  {
13175 	    if (TREE_CODE (t) == BIND_EXPR)
13176 	      t = expr_last (BIND_EXPR_BODY (t));
13177 	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13178 		     || TREE_CODE (t) == TRY_CATCH_EXPR)
13179 	      t = expr_last (TREE_OPERAND (t, 0));
13180 	    else if (TREE_CODE (t) == STATEMENT_LIST)
13181 	      t = expr_last (t);
13182 	    else
13183 	      break;
13184 	  }
13185 	if (TREE_CODE (t) == MODIFY_EXPR
13186 	    && TREE_OPERAND (t, 0) == temp)
13187 	  return RECURSE (TREE_OPERAND (t, 1));
13188 
13189 	return false;
13190       }
13191 
13192     case CALL_EXPR:
13193       {
13194 	tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
13195 	tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
13196 
13197 	return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13198 					      get_call_combined_fn (t),
13199 					      arg0,
13200 					      arg1,
13201 					      strict_overflow_p, depth);
13202       }
13203     case COMPOUND_EXPR:
13204     case MODIFY_EXPR:
13205       return RECURSE (TREE_OPERAND (t, 1));
13206 
13207     case BIND_EXPR:
13208       return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13209 
13210     case SAVE_EXPR:
13211       return RECURSE (TREE_OPERAND (t, 0));
13212 
13213     default:
13214       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13215     }
13216 }
13217 
13218 #undef RECURSE
13219 #undef tree_expr_nonnegative_warnv_p
13220 
13221 /* Return true if T is known to be non-negative.  If the return
13222    value is based on the assumption that signed overflow is undefined,
13223    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13224    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13225 
13226 bool
tree_expr_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)13227 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13228 {
13229   enum tree_code code;
13230   if (t == error_mark_node)
13231     return false;
13232 
13233   code = TREE_CODE (t);
13234   switch (TREE_CODE_CLASS (code))
13235     {
13236     case tcc_binary:
13237     case tcc_comparison:
13238       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13239 					      TREE_TYPE (t),
13240 					      TREE_OPERAND (t, 0),
13241 					      TREE_OPERAND (t, 1),
13242 					      strict_overflow_p, depth);
13243 
13244     case tcc_unary:
13245       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13246 					     TREE_TYPE (t),
13247 					     TREE_OPERAND (t, 0),
13248 					     strict_overflow_p, depth);
13249 
13250     case tcc_constant:
13251     case tcc_declaration:
13252     case tcc_reference:
13253       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13254 
13255     default:
13256       break;
13257     }
13258 
13259   switch (code)
13260     {
13261     case TRUTH_AND_EXPR:
13262     case TRUTH_OR_EXPR:
13263     case TRUTH_XOR_EXPR:
13264       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13265 					      TREE_TYPE (t),
13266 					      TREE_OPERAND (t, 0),
13267 					      TREE_OPERAND (t, 1),
13268 					      strict_overflow_p, depth);
13269     case TRUTH_NOT_EXPR:
13270       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13271 					     TREE_TYPE (t),
13272 					     TREE_OPERAND (t, 0),
13273 					     strict_overflow_p, depth);
13274 
13275     case COND_EXPR:
13276     case CONSTRUCTOR:
13277     case OBJ_TYPE_REF:
13278     case ASSERT_EXPR:
13279     case ADDR_EXPR:
13280     case WITH_SIZE_EXPR:
13281     case SSA_NAME:
13282       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13283 
13284     default:
13285       return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13286     }
13287 }
13288 
13289 /* Return true if `t' is known to be non-negative.  Handle warnings
13290    about undefined signed overflow.  */
13291 
13292 bool
tree_expr_nonnegative_p(tree t)13293 tree_expr_nonnegative_p (tree t)
13294 {
13295   bool ret, strict_overflow_p;
13296 
13297   strict_overflow_p = false;
13298   ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13299   if (strict_overflow_p)
13300     fold_overflow_warning (("assuming signed overflow does not occur when "
13301 			    "determining that expression is always "
13302 			    "non-negative"),
13303 			   WARN_STRICT_OVERFLOW_MISC);
13304   return ret;
13305 }
13306 
13307 
13308 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13309    For floating point we further ensure that T is not denormal.
13310    Similar logic is present in nonzero_address in rtlanal.h.
13311 
13312    If the return value is based on the assumption that signed overflow
13313    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13314    change *STRICT_OVERFLOW_P.  */
13315 
13316 bool
tree_unary_nonzero_warnv_p(enum tree_code code,tree type,tree op0,bool * strict_overflow_p)13317 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13318 				 bool *strict_overflow_p)
13319 {
13320   switch (code)
13321     {
13322     case ABS_EXPR:
13323       return tree_expr_nonzero_warnv_p (op0,
13324 					strict_overflow_p);
13325 
13326     case NOP_EXPR:
13327       {
13328 	tree inner_type = TREE_TYPE (op0);
13329 	tree outer_type = type;
13330 
13331 	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13332 		&& tree_expr_nonzero_warnv_p (op0,
13333 					      strict_overflow_p));
13334       }
13335       break;
13336 
13337     case NON_LVALUE_EXPR:
13338       return tree_expr_nonzero_warnv_p (op0,
13339 					strict_overflow_p);
13340 
13341     default:
13342       break;
13343   }
13344 
13345   return false;
13346 }
13347 
13348 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13349    For floating point we further ensure that T is not denormal.
13350    Similar logic is present in nonzero_address in rtlanal.h.
13351 
13352    If the return value is based on the assumption that signed overflow
13353    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13354    change *STRICT_OVERFLOW_P.  */
13355 
13356 bool
tree_binary_nonzero_warnv_p(enum tree_code code,tree type,tree op0,tree op1,bool * strict_overflow_p)13357 tree_binary_nonzero_warnv_p (enum tree_code code,
13358 			     tree type,
13359 			     tree op0,
13360 			     tree op1, bool *strict_overflow_p)
13361 {
13362   bool sub_strict_overflow_p;
13363   switch (code)
13364     {
13365     case POINTER_PLUS_EXPR:
13366     case PLUS_EXPR:
13367       if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13368 	{
13369 	  /* With the presence of negative values it is hard
13370 	     to say something.  */
13371 	  sub_strict_overflow_p = false;
13372 	  if (!tree_expr_nonnegative_warnv_p (op0,
13373 					      &sub_strict_overflow_p)
13374 	      || !tree_expr_nonnegative_warnv_p (op1,
13375 						 &sub_strict_overflow_p))
13376 	    return false;
13377 	  /* One of operands must be positive and the other non-negative.  */
13378 	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
13379 	     overflows, on a twos-complement machine the sum of two
13380 	     nonnegative numbers can never be zero.  */
13381 	  return (tree_expr_nonzero_warnv_p (op0,
13382 					     strict_overflow_p)
13383 		  || tree_expr_nonzero_warnv_p (op1,
13384 						strict_overflow_p));
13385 	}
13386       break;
13387 
13388     case MULT_EXPR:
13389       if (TYPE_OVERFLOW_UNDEFINED (type))
13390 	{
13391 	  if (tree_expr_nonzero_warnv_p (op0,
13392 					 strict_overflow_p)
13393 	      && tree_expr_nonzero_warnv_p (op1,
13394 					    strict_overflow_p))
13395 	    {
13396 	      *strict_overflow_p = true;
13397 	      return true;
13398 	    }
13399 	}
13400       break;
13401 
13402     case MIN_EXPR:
13403       sub_strict_overflow_p = false;
13404       if (tree_expr_nonzero_warnv_p (op0,
13405 				     &sub_strict_overflow_p)
13406 	  && tree_expr_nonzero_warnv_p (op1,
13407 					&sub_strict_overflow_p))
13408 	{
13409 	  if (sub_strict_overflow_p)
13410 	    *strict_overflow_p = true;
13411 	}
13412       break;
13413 
13414     case MAX_EXPR:
13415       sub_strict_overflow_p = false;
13416       if (tree_expr_nonzero_warnv_p (op0,
13417 				     &sub_strict_overflow_p))
13418 	{
13419 	  if (sub_strict_overflow_p)
13420 	    *strict_overflow_p = true;
13421 
13422 	  /* When both operands are nonzero, then MAX must be too.  */
13423 	  if (tree_expr_nonzero_warnv_p (op1,
13424 					 strict_overflow_p))
13425 	    return true;
13426 
13427 	  /* MAX where operand 0 is positive is positive.  */
13428 	  return tree_expr_nonnegative_warnv_p (op0,
13429 					       strict_overflow_p);
13430 	}
13431       /* MAX where operand 1 is positive is positive.  */
13432       else if (tree_expr_nonzero_warnv_p (op1,
13433 					  &sub_strict_overflow_p)
13434 	       && tree_expr_nonnegative_warnv_p (op1,
13435 						 &sub_strict_overflow_p))
13436 	{
13437 	  if (sub_strict_overflow_p)
13438 	    *strict_overflow_p = true;
13439 	  return true;
13440 	}
13441       break;
13442 
13443     case BIT_IOR_EXPR:
13444       return (tree_expr_nonzero_warnv_p (op1,
13445 					 strict_overflow_p)
13446 	      || tree_expr_nonzero_warnv_p (op0,
13447 					    strict_overflow_p));
13448 
13449     default:
13450       break;
13451   }
13452 
13453   return false;
13454 }
13455 
13456 /* Return true when T is an address and is known to be nonzero.
13457    For floating point we further ensure that T is not denormal.
13458    Similar logic is present in nonzero_address in rtlanal.h.
13459 
13460    If the return value is based on the assumption that signed overflow
13461    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13462    change *STRICT_OVERFLOW_P.  */
13463 
13464 bool
tree_single_nonzero_warnv_p(tree t,bool * strict_overflow_p)13465 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13466 {
13467   bool sub_strict_overflow_p;
13468   switch (TREE_CODE (t))
13469     {
13470     case INTEGER_CST:
13471       return !integer_zerop (t);
13472 
13473     case ADDR_EXPR:
13474       {
13475 	tree base = TREE_OPERAND (t, 0);
13476 
13477 	if (!DECL_P (base))
13478 	  base = get_base_address (base);
13479 
13480 	if (base && TREE_CODE (base) == TARGET_EXPR)
13481 	  base = TARGET_EXPR_SLOT (base);
13482 
13483 	if (!base)
13484 	  return false;
13485 
13486 	/* For objects in symbol table check if we know they are non-zero.
13487 	   Don't do anything for variables and functions before symtab is built;
13488 	   it is quite possible that they will be declared weak later.  */
13489 	int nonzero_addr = maybe_nonzero_address (base);
13490 	if (nonzero_addr >= 0)
13491 	  return nonzero_addr;
13492 
13493 	/* Constants are never weak.  */
13494 	if (CONSTANT_CLASS_P (base))
13495 	  return true;
13496 
13497 	return false;
13498       }
13499 
13500     case COND_EXPR:
13501       sub_strict_overflow_p = false;
13502       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13503 				     &sub_strict_overflow_p)
13504 	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13505 					&sub_strict_overflow_p))
13506 	{
13507 	  if (sub_strict_overflow_p)
13508 	    *strict_overflow_p = true;
13509 	  return true;
13510 	}
13511       break;
13512 
13513     case SSA_NAME:
13514       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13515 	break;
13516       return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13517 
13518     default:
13519       break;
13520     }
13521   return false;
13522 }
13523 
13524 #define integer_valued_real_p(X) \
13525   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13526 
13527 #define RECURSE(X) \
13528   ((integer_valued_real_p) (X, depth + 1))
13529 
13530 /* Return true if the floating point result of (CODE OP0) has an
13531    integer value.  We also allow +Inf, -Inf and NaN to be considered
13532    integer values. Return false for signaling NaN.
13533 
13534    DEPTH is the current nesting depth of the query.  */
13535 
13536 bool
integer_valued_real_unary_p(tree_code code,tree op0,int depth)13537 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13538 {
13539   switch (code)
13540     {
13541     case FLOAT_EXPR:
13542       return true;
13543 
13544     case ABS_EXPR:
13545       return RECURSE (op0);
13546 
13547     CASE_CONVERT:
13548       {
13549 	tree type = TREE_TYPE (op0);
13550 	if (TREE_CODE (type) == INTEGER_TYPE)
13551 	  return true;
13552 	if (TREE_CODE (type) == REAL_TYPE)
13553 	  return RECURSE (op0);
13554 	break;
13555       }
13556 
13557     default:
13558       break;
13559     }
13560   return false;
13561 }
13562 
13563 /* Return true if the floating point result of (CODE OP0 OP1) has an
13564    integer value.  We also allow +Inf, -Inf and NaN to be considered
13565    integer values. Return false for signaling NaN.
13566 
13567    DEPTH is the current nesting depth of the query.  */
13568 
13569 bool
integer_valued_real_binary_p(tree_code code,tree op0,tree op1,int depth)13570 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13571 {
13572   switch (code)
13573     {
13574     case PLUS_EXPR:
13575     case MINUS_EXPR:
13576     case MULT_EXPR:
13577     case MIN_EXPR:
13578     case MAX_EXPR:
13579       return RECURSE (op0) && RECURSE (op1);
13580 
13581     default:
13582       break;
13583     }
13584   return false;
13585 }
13586 
13587 /* Return true if the floating point result of calling FNDECL with arguments
13588    ARG0 and ARG1 has an integer value.  We also allow +Inf, -Inf and NaN to be
13589    considered integer values. Return false for signaling NaN.  If FNDECL
13590    takes fewer than 2 arguments, the remaining ARGn are null.
13591 
13592    DEPTH is the current nesting depth of the query.  */
13593 
13594 bool
integer_valued_real_call_p(combined_fn fn,tree arg0,tree arg1,int depth)13595 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13596 {
13597   switch (fn)
13598     {
13599     CASE_CFN_CEIL:
13600     CASE_CFN_CEIL_FN:
13601     CASE_CFN_FLOOR:
13602     CASE_CFN_FLOOR_FN:
13603     CASE_CFN_NEARBYINT:
13604     CASE_CFN_NEARBYINT_FN:
13605     CASE_CFN_RINT:
13606     CASE_CFN_RINT_FN:
13607     CASE_CFN_ROUND:
13608     CASE_CFN_ROUND_FN:
13609     CASE_CFN_TRUNC:
13610     CASE_CFN_TRUNC_FN:
13611       return true;
13612 
13613     CASE_CFN_FMIN:
13614     CASE_CFN_FMIN_FN:
13615     CASE_CFN_FMAX:
13616     CASE_CFN_FMAX_FN:
13617       return RECURSE (arg0) && RECURSE (arg1);
13618 
13619     default:
13620       break;
13621     }
13622   return false;
13623 }
13624 
13625 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13626    has an integer value.  We also allow +Inf, -Inf and NaN to be
13627    considered integer values. Return false for signaling NaN.
13628 
13629    DEPTH is the current nesting depth of the query.  */
13630 
13631 bool
integer_valued_real_single_p(tree t,int depth)13632 integer_valued_real_single_p (tree t, int depth)
13633 {
13634   switch (TREE_CODE (t))
13635     {
13636     case REAL_CST:
13637       return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13638 
13639     case COND_EXPR:
13640       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13641 
13642     case SSA_NAME:
13643       /* Limit the depth of recursion to avoid quadratic behavior.
13644 	 This is expected to catch almost all occurrences in practice.
13645 	 If this code misses important cases that unbounded recursion
13646 	 would not, passes that need this information could be revised
13647 	 to provide it through dataflow propagation.  */
13648       return (!name_registered_for_update_p (t)
13649 	      && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13650 	      && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13651 						    depth));
13652 
13653     default:
13654       break;
13655     }
13656   return false;
13657 }
13658 
13659 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13660    has an integer value.  We also allow +Inf, -Inf and NaN to be
13661    considered integer values. Return false for signaling NaN.
13662 
13663    DEPTH is the current nesting depth of the query.  */
13664 
13665 static bool
integer_valued_real_invalid_p(tree t,int depth)13666 integer_valued_real_invalid_p (tree t, int depth)
13667 {
13668   switch (TREE_CODE (t))
13669     {
13670     case COMPOUND_EXPR:
13671     case MODIFY_EXPR:
13672     case BIND_EXPR:
13673       return RECURSE (TREE_OPERAND (t, 1));
13674 
13675     case SAVE_EXPR:
13676       return RECURSE (TREE_OPERAND (t, 0));
13677 
13678     default:
13679       break;
13680     }
13681   return false;
13682 }
13683 
13684 #undef RECURSE
13685 #undef integer_valued_real_p
13686 
13687 /* Return true if the floating point expression T has an integer value.
13688    We also allow +Inf, -Inf and NaN to be considered integer values.
13689    Return false for signaling NaN.
13690 
13691    DEPTH is the current nesting depth of the query.  */
13692 
13693 bool
integer_valued_real_p(tree t,int depth)13694 integer_valued_real_p (tree t, int depth)
13695 {
13696   if (t == error_mark_node)
13697     return false;
13698 
13699   tree_code code = TREE_CODE (t);
13700   switch (TREE_CODE_CLASS (code))
13701     {
13702     case tcc_binary:
13703     case tcc_comparison:
13704       return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13705 					   TREE_OPERAND (t, 1), depth);
13706 
13707     case tcc_unary:
13708       return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13709 
13710     case tcc_constant:
13711     case tcc_declaration:
13712     case tcc_reference:
13713       return integer_valued_real_single_p (t, depth);
13714 
13715     default:
13716       break;
13717     }
13718 
13719   switch (code)
13720     {
13721     case COND_EXPR:
13722     case SSA_NAME:
13723       return integer_valued_real_single_p (t, depth);
13724 
13725     case CALL_EXPR:
13726       {
13727 	tree arg0 = (call_expr_nargs (t) > 0
13728 		     ? CALL_EXPR_ARG (t, 0)
13729 		     : NULL_TREE);
13730 	tree arg1 = (call_expr_nargs (t) > 1
13731 		     ? CALL_EXPR_ARG (t, 1)
13732 		     : NULL_TREE);
13733 	return integer_valued_real_call_p (get_call_combined_fn (t),
13734 					   arg0, arg1, depth);
13735       }
13736 
13737     default:
13738       return integer_valued_real_invalid_p (t, depth);
13739     }
13740 }
13741 
13742 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13743    attempt to fold the expression to a constant without modifying TYPE,
13744    OP0 or OP1.
13745 
13746    If the expression could be simplified to a constant, then return
13747    the constant.  If the expression would not be simplified to a
13748    constant, then return NULL_TREE.  */
13749 
13750 tree
fold_binary_to_constant(enum tree_code code,tree type,tree op0,tree op1)13751 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13752 {
13753   tree tem = fold_binary (code, type, op0, op1);
13754   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13755 }
13756 
13757 /* Given the components of a unary expression CODE, TYPE and OP0,
13758    attempt to fold the expression to a constant without modifying
13759    TYPE or OP0.
13760 
13761    If the expression could be simplified to a constant, then return
13762    the constant.  If the expression would not be simplified to a
13763    constant, then return NULL_TREE.  */
13764 
13765 tree
fold_unary_to_constant(enum tree_code code,tree type,tree op0)13766 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13767 {
13768   tree tem = fold_unary (code, type, op0);
13769   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13770 }
13771 
13772 /* If EXP represents referencing an element in a constant string
13773    (either via pointer arithmetic or array indexing), return the
13774    tree representing the value accessed, otherwise return NULL.  */
13775 
13776 tree
fold_read_from_constant_string(tree exp)13777 fold_read_from_constant_string (tree exp)
13778 {
13779   if ((TREE_CODE (exp) == INDIRECT_REF
13780        || TREE_CODE (exp) == ARRAY_REF)
13781       && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13782     {
13783       tree exp1 = TREE_OPERAND (exp, 0);
13784       tree index;
13785       tree string;
13786       location_t loc = EXPR_LOCATION (exp);
13787 
13788       if (TREE_CODE (exp) == INDIRECT_REF)
13789 	string = string_constant (exp1, &index);
13790       else
13791 	{
13792 	  tree low_bound = array_ref_low_bound (exp);
13793 	  index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13794 
13795 	  /* Optimize the special-case of a zero lower bound.
13796 
13797 	     We convert the low_bound to sizetype to avoid some problems
13798 	     with constant folding.  (E.g. suppose the lower bound is 1,
13799 	     and its mode is QI.  Without the conversion,l (ARRAY
13800 	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13801 	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
13802 	  if (! integer_zerop (low_bound))
13803 	    index = size_diffop_loc (loc, index,
13804 				 fold_convert_loc (loc, sizetype, low_bound));
13805 
13806 	  string = exp1;
13807 	}
13808 
13809       scalar_int_mode char_mode;
13810       if (string
13811 	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13812 	  && TREE_CODE (string) == STRING_CST
13813 	  && TREE_CODE (index) == INTEGER_CST
13814 	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13815 	  && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
13816 			  &char_mode)
13817 	  && GET_MODE_SIZE (char_mode) == 1)
13818 	return build_int_cst_type (TREE_TYPE (exp),
13819 				   (TREE_STRING_POINTER (string)
13820 				    [TREE_INT_CST_LOW (index)]));
13821     }
13822   return NULL;
13823 }
13824 
13825 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13826    an integer constant, real, or fixed-point constant.
13827 
13828    TYPE is the type of the result.  */
13829 
13830 static tree
fold_negate_const(tree arg0,tree type)13831 fold_negate_const (tree arg0, tree type)
13832 {
13833   tree t = NULL_TREE;
13834 
13835   switch (TREE_CODE (arg0))
13836     {
13837     case REAL_CST:
13838       t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13839       break;
13840 
13841     case FIXED_CST:
13842       {
13843         FIXED_VALUE_TYPE f;
13844         bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13845 					    &(TREE_FIXED_CST (arg0)), NULL,
13846 					    TYPE_SATURATING (type));
13847 	t = build_fixed (type, f);
13848 	/* Propagate overflow flags.  */
13849 	if (overflow_p | TREE_OVERFLOW (arg0))
13850 	  TREE_OVERFLOW (t) = 1;
13851 	break;
13852       }
13853 
13854     default:
13855       if (poly_int_tree_p (arg0))
13856 	{
13857 	  bool overflow;
13858 	  poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
13859 	  t = force_fit_type (type, res, 1,
13860 			      (overflow && ! TYPE_UNSIGNED (type))
13861 			      || TREE_OVERFLOW (arg0));
13862 	  break;
13863 	}
13864 
13865       gcc_unreachable ();
13866     }
13867 
13868   return t;
13869 }
13870 
13871 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13872    an integer constant or real constant.
13873 
13874    TYPE is the type of the result.  */
13875 
13876 tree
fold_abs_const(tree arg0,tree type)13877 fold_abs_const (tree arg0, tree type)
13878 {
13879   tree t = NULL_TREE;
13880 
13881   switch (TREE_CODE (arg0))
13882     {
13883     case INTEGER_CST:
13884       {
13885         /* If the value is unsigned or non-negative, then the absolute value
13886 	   is the same as the ordinary value.  */
13887 	if (!wi::neg_p (wi::to_wide (arg0), TYPE_SIGN (type)))
13888 	  t = arg0;
13889 
13890 	/* If the value is negative, then the absolute value is
13891 	   its negation.  */
13892 	else
13893 	  {
13894 	    bool overflow;
13895 	    wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13896 	    t = force_fit_type (type, val, -1,
13897 				overflow | TREE_OVERFLOW (arg0));
13898 	  }
13899       }
13900       break;
13901 
13902     case REAL_CST:
13903       if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13904 	t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13905       else
13906 	t =  arg0;
13907       break;
13908 
13909     default:
13910       gcc_unreachable ();
13911     }
13912 
13913   return t;
13914 }
13915 
13916 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13917    constant.  TYPE is the type of the result.  */
13918 
13919 static tree
fold_not_const(const_tree arg0,tree type)13920 fold_not_const (const_tree arg0, tree type)
13921 {
13922   gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13923 
13924   return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
13925 }
13926 
13927 /* Given CODE, a relational operator, the target type, TYPE and two
13928    constant operands OP0 and OP1, return the result of the
13929    relational operation.  If the result is not a compile time
13930    constant, then return NULL_TREE.  */
13931 
13932 static tree
fold_relational_const(enum tree_code code,tree type,tree op0,tree op1)13933 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13934 {
13935   int result, invert;
13936 
13937   /* From here on, the only cases we handle are when the result is
13938      known to be a constant.  */
13939 
13940   if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13941     {
13942       const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13943       const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13944 
13945       /* Handle the cases where either operand is a NaN.  */
13946       if (real_isnan (c0) || real_isnan (c1))
13947 	{
13948 	  switch (code)
13949 	    {
13950 	    case EQ_EXPR:
13951 	    case ORDERED_EXPR:
13952 	      result = 0;
13953 	      break;
13954 
13955 	    case NE_EXPR:
13956 	    case UNORDERED_EXPR:
13957 	    case UNLT_EXPR:
13958 	    case UNLE_EXPR:
13959 	    case UNGT_EXPR:
13960 	    case UNGE_EXPR:
13961 	    case UNEQ_EXPR:
13962               result = 1;
13963 	      break;
13964 
13965 	    case LT_EXPR:
13966 	    case LE_EXPR:
13967 	    case GT_EXPR:
13968 	    case GE_EXPR:
13969 	    case LTGT_EXPR:
13970 	      if (flag_trapping_math)
13971 		return NULL_TREE;
13972 	      result = 0;
13973 	      break;
13974 
13975 	    default:
13976 	      gcc_unreachable ();
13977 	    }
13978 
13979 	  return constant_boolean_node (result, type);
13980 	}
13981 
13982       return constant_boolean_node (real_compare (code, c0, c1), type);
13983     }
13984 
13985   if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13986     {
13987       const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13988       const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13989       return constant_boolean_node (fixed_compare (code, c0, c1), type);
13990     }
13991 
13992   /* Handle equality/inequality of complex constants.  */
13993   if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13994     {
13995       tree rcond = fold_relational_const (code, type,
13996 					  TREE_REALPART (op0),
13997 					  TREE_REALPART (op1));
13998       tree icond = fold_relational_const (code, type,
13999 					  TREE_IMAGPART (op0),
14000 					  TREE_IMAGPART (op1));
14001       if (code == EQ_EXPR)
14002 	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14003       else if (code == NE_EXPR)
14004 	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14005       else
14006 	return NULL_TREE;
14007     }
14008 
14009   if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14010     {
14011       if (!VECTOR_TYPE_P (type))
14012 	{
14013 	  /* Have vector comparison with scalar boolean result.  */
14014 	  gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14015 		      && known_eq (VECTOR_CST_NELTS (op0),
14016 				   VECTOR_CST_NELTS (op1)));
14017 	  unsigned HOST_WIDE_INT nunits;
14018 	  if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
14019 	    return NULL_TREE;
14020 	  for (unsigned i = 0; i < nunits; i++)
14021 	    {
14022 	      tree elem0 = VECTOR_CST_ELT (op0, i);
14023 	      tree elem1 = VECTOR_CST_ELT (op1, i);
14024 	      tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
14025 	      if (tmp == NULL_TREE)
14026 		return NULL_TREE;
14027 	      if (integer_zerop (tmp))
14028 		return constant_boolean_node (code == NE_EXPR, type);
14029 	    }
14030 	  return constant_boolean_node (code == EQ_EXPR, type);
14031 	}
14032       tree_vector_builder elts;
14033       if (!elts.new_binary_operation (type, op0, op1, false))
14034 	return NULL_TREE;
14035       unsigned int count = elts.encoded_nelts ();
14036       for (unsigned i = 0; i < count; i++)
14037 	{
14038 	  tree elem_type = TREE_TYPE (type);
14039 	  tree elem0 = VECTOR_CST_ELT (op0, i);
14040 	  tree elem1 = VECTOR_CST_ELT (op1, i);
14041 
14042 	  tree tem = fold_relational_const (code, elem_type,
14043 					    elem0, elem1);
14044 
14045 	  if (tem == NULL_TREE)
14046 	    return NULL_TREE;
14047 
14048 	  elts.quick_push (build_int_cst (elem_type,
14049 					  integer_zerop (tem) ? 0 : -1));
14050 	}
14051 
14052       return elts.build ();
14053     }
14054 
14055   /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14056 
14057      To compute GT, swap the arguments and do LT.
14058      To compute GE, do LT and invert the result.
14059      To compute LE, swap the arguments, do LT and invert the result.
14060      To compute NE, do EQ and invert the result.
14061 
14062      Therefore, the code below must handle only EQ and LT.  */
14063 
14064   if (code == LE_EXPR || code == GT_EXPR)
14065     {
14066       std::swap (op0, op1);
14067       code = swap_tree_comparison (code);
14068     }
14069 
14070   /* Note that it is safe to invert for real values here because we
14071      have already handled the one case that it matters.  */
14072 
14073   invert = 0;
14074   if (code == NE_EXPR || code == GE_EXPR)
14075     {
14076       invert = 1;
14077       code = invert_tree_comparison (code, false);
14078     }
14079 
14080   /* Compute a result for LT or EQ if args permit;
14081      Otherwise return T.  */
14082   if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14083     {
14084       if (code == EQ_EXPR)
14085 	result = tree_int_cst_equal (op0, op1);
14086       else
14087 	result = tree_int_cst_lt (op0, op1);
14088     }
14089   else
14090     return NULL_TREE;
14091 
14092   if (invert)
14093     result ^= 1;
14094   return constant_boolean_node (result, type);
14095 }
14096 
14097 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14098    indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
14099    itself.  */
14100 
14101 tree
fold_build_cleanup_point_expr(tree type,tree expr)14102 fold_build_cleanup_point_expr (tree type, tree expr)
14103 {
14104   /* If the expression does not have side effects then we don't have to wrap
14105      it with a cleanup point expression.  */
14106   if (!TREE_SIDE_EFFECTS (expr))
14107     return expr;
14108 
14109   /* If the expression is a return, check to see if the expression inside the
14110      return has no side effects or the right hand side of the modify expression
14111      inside the return. If either don't have side effects set we don't need to
14112      wrap the expression in a cleanup point expression.  Note we don't check the
14113      left hand side of the modify because it should always be a return decl.  */
14114   if (TREE_CODE (expr) == RETURN_EXPR)
14115     {
14116       tree op = TREE_OPERAND (expr, 0);
14117       if (!op || !TREE_SIDE_EFFECTS (op))
14118         return expr;
14119       op = TREE_OPERAND (op, 1);
14120       if (!TREE_SIDE_EFFECTS (op))
14121         return expr;
14122     }
14123 
14124   return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14125 }
14126 
14127 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14128    of an indirection through OP0, or NULL_TREE if no simplification is
14129    possible.  */
14130 
14131 tree
fold_indirect_ref_1(location_t loc,tree type,tree op0)14132 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14133 {
14134   tree sub = op0;
14135   tree subtype;
14136   poly_uint64 const_op01;
14137 
14138   STRIP_NOPS (sub);
14139   subtype = TREE_TYPE (sub);
14140   if (!POINTER_TYPE_P (subtype)
14141       || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14142     return NULL_TREE;
14143 
14144   if (TREE_CODE (sub) == ADDR_EXPR)
14145     {
14146       tree op = TREE_OPERAND (sub, 0);
14147       tree optype = TREE_TYPE (op);
14148 
14149       /* *&CONST_DECL -> to the value of the const decl.  */
14150       if (TREE_CODE (op) == CONST_DECL)
14151 	return DECL_INITIAL (op);
14152       /* *&p => p;  make sure to handle *&"str"[cst] here.  */
14153       if (type == optype)
14154 	{
14155 	  tree fop = fold_read_from_constant_string (op);
14156 	  if (fop)
14157 	    return fop;
14158 	  else
14159 	    return op;
14160 	}
14161       /* *(foo *)&fooarray => fooarray[0] */
14162       else if (TREE_CODE (optype) == ARRAY_TYPE
14163 	       && type == TREE_TYPE (optype)
14164 	       && (!in_gimple_form
14165 		   || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14166 	{
14167 	  tree type_domain = TYPE_DOMAIN (optype);
14168 	  tree min_val = size_zero_node;
14169 	  if (type_domain && TYPE_MIN_VALUE (type_domain))
14170 	    min_val = TYPE_MIN_VALUE (type_domain);
14171 	  if (in_gimple_form
14172 	      && TREE_CODE (min_val) != INTEGER_CST)
14173 	    return NULL_TREE;
14174 	  return build4_loc (loc, ARRAY_REF, type, op, min_val,
14175 			     NULL_TREE, NULL_TREE);
14176 	}
14177       /* *(foo *)&complexfoo => __real__ complexfoo */
14178       else if (TREE_CODE (optype) == COMPLEX_TYPE
14179 	       && type == TREE_TYPE (optype))
14180 	return fold_build1_loc (loc, REALPART_EXPR, type, op);
14181       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14182       else if (VECTOR_TYPE_P (optype)
14183 	       && type == TREE_TYPE (optype))
14184 	{
14185 	  tree part_width = TYPE_SIZE (type);
14186 	  tree index = bitsize_int (0);
14187 	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
14188 				  index);
14189 	}
14190     }
14191 
14192   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14193       && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
14194     {
14195       tree op00 = TREE_OPERAND (sub, 0);
14196       tree op01 = TREE_OPERAND (sub, 1);
14197 
14198       STRIP_NOPS (op00);
14199       if (TREE_CODE (op00) == ADDR_EXPR)
14200 	{
14201 	  tree op00type;
14202 	  op00 = TREE_OPERAND (op00, 0);
14203 	  op00type = TREE_TYPE (op00);
14204 
14205 	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14206 	  if (VECTOR_TYPE_P (op00type)
14207 	      && type == TREE_TYPE (op00type)
14208 	      /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
14209 		 but we want to treat offsets with MSB set as negative.
14210 		 For the code below negative offsets are invalid and
14211 		 TYPE_SIZE of the element is something unsigned, so
14212 		 check whether op01 fits into poly_int64, which implies
14213 		 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
14214 		 then just use poly_uint64 because we want to treat the
14215 		 value as unsigned.  */
14216 	      && tree_fits_poly_int64_p (op01))
14217 	    {
14218 	      tree part_width = TYPE_SIZE (type);
14219 	      poly_uint64 max_offset
14220 		= (tree_to_uhwi (part_width) / BITS_PER_UNIT
14221 		   * TYPE_VECTOR_SUBPARTS (op00type));
14222 	      if (known_lt (const_op01, max_offset))
14223 		{
14224 		  tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
14225 		  return fold_build3_loc (loc,
14226 					  BIT_FIELD_REF, type, op00,
14227 					  part_width, index);
14228 		}
14229 	    }
14230 	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14231 	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
14232 		   && type == TREE_TYPE (op00type))
14233 	    {
14234 	      if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
14235 			    const_op01))
14236 		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14237 	    }
14238 	  /* ((foo *)&fooarray)[1] => fooarray[1] */
14239 	  else if (TREE_CODE (op00type) == ARRAY_TYPE
14240 		   && type == TREE_TYPE (op00type))
14241 	    {
14242 	      tree type_domain = TYPE_DOMAIN (op00type);
14243 	      tree min_val = size_zero_node;
14244 	      if (type_domain && TYPE_MIN_VALUE (type_domain))
14245 		min_val = TYPE_MIN_VALUE (type_domain);
14246 	      offset_int off = wi::to_offset (op01);
14247 	      offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
14248 	      offset_int remainder;
14249 	      off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
14250 	      if (remainder == 0 && TREE_CODE (min_val) == INTEGER_CST)
14251 		{
14252 		  off = off + wi::to_offset (min_val);
14253 		  op01 = wide_int_to_tree (sizetype, off);
14254 		  return build4_loc (loc, ARRAY_REF, type, op00, op01,
14255 				     NULL_TREE, NULL_TREE);
14256 		}
14257 	    }
14258 	}
14259     }
14260 
14261   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14262   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14263       && type == TREE_TYPE (TREE_TYPE (subtype))
14264       && (!in_gimple_form
14265 	  || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14266     {
14267       tree type_domain;
14268       tree min_val = size_zero_node;
14269       sub = build_fold_indirect_ref_loc (loc, sub);
14270       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14271       if (type_domain && TYPE_MIN_VALUE (type_domain))
14272 	min_val = TYPE_MIN_VALUE (type_domain);
14273       if (in_gimple_form
14274 	  && TREE_CODE (min_val) != INTEGER_CST)
14275 	return NULL_TREE;
14276       return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14277 			 NULL_TREE);
14278     }
14279 
14280   return NULL_TREE;
14281 }
14282 
14283 /* Builds an expression for an indirection through T, simplifying some
14284    cases.  */
14285 
14286 tree
build_fold_indirect_ref_loc(location_t loc,tree t)14287 build_fold_indirect_ref_loc (location_t loc, tree t)
14288 {
14289   tree type = TREE_TYPE (TREE_TYPE (t));
14290   tree sub = fold_indirect_ref_1 (loc, type, t);
14291 
14292   if (sub)
14293     return sub;
14294 
14295   return build1_loc (loc, INDIRECT_REF, type, t);
14296 }
14297 
14298 /* Given an INDIRECT_REF T, return either T or a simplified version.  */
14299 
14300 tree
fold_indirect_ref_loc(location_t loc,tree t)14301 fold_indirect_ref_loc (location_t loc, tree t)
14302 {
14303   tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14304 
14305   if (sub)
14306     return sub;
14307   else
14308     return t;
14309 }
14310 
14311 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14312    whose result is ignored.  The type of the returned tree need not be
14313    the same as the original expression.  */
14314 
14315 tree
fold_ignored_result(tree t)14316 fold_ignored_result (tree t)
14317 {
14318   if (!TREE_SIDE_EFFECTS (t))
14319     return integer_zero_node;
14320 
14321   for (;;)
14322     switch (TREE_CODE_CLASS (TREE_CODE (t)))
14323       {
14324       case tcc_unary:
14325 	t = TREE_OPERAND (t, 0);
14326 	break;
14327 
14328       case tcc_binary:
14329       case tcc_comparison:
14330 	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14331 	  t = TREE_OPERAND (t, 0);
14332 	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14333 	  t = TREE_OPERAND (t, 1);
14334 	else
14335 	  return t;
14336 	break;
14337 
14338       case tcc_expression:
14339 	switch (TREE_CODE (t))
14340 	  {
14341 	  case COMPOUND_EXPR:
14342 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14343 	      return t;
14344 	    t = TREE_OPERAND (t, 0);
14345 	    break;
14346 
14347 	  case COND_EXPR:
14348 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14349 		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14350 	      return t;
14351 	    t = TREE_OPERAND (t, 0);
14352 	    break;
14353 
14354 	  default:
14355 	    return t;
14356 	  }
14357 	break;
14358 
14359       default:
14360 	return t;
14361       }
14362 }
14363 
14364 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14365 
14366 tree
round_up_loc(location_t loc,tree value,unsigned int divisor)14367 round_up_loc (location_t loc, tree value, unsigned int divisor)
14368 {
14369   tree div = NULL_TREE;
14370 
14371   if (divisor == 1)
14372     return value;
14373 
14374   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
14375      have to do anything.  Only do this when we are not given a const,
14376      because in that case, this check is more expensive than just
14377      doing it.  */
14378   if (TREE_CODE (value) != INTEGER_CST)
14379     {
14380       div = build_int_cst (TREE_TYPE (value), divisor);
14381 
14382       if (multiple_of_p (TREE_TYPE (value), value, div))
14383 	return value;
14384     }
14385 
14386   /* If divisor is a power of two, simplify this to bit manipulation.  */
14387   if (pow2_or_zerop (divisor))
14388     {
14389       if (TREE_CODE (value) == INTEGER_CST)
14390 	{
14391 	  wide_int val = wi::to_wide (value);
14392 	  bool overflow_p;
14393 
14394 	  if ((val & (divisor - 1)) == 0)
14395 	    return value;
14396 
14397 	  overflow_p = TREE_OVERFLOW (value);
14398 	  val += divisor - 1;
14399 	  val &= (int) -divisor;
14400 	  if (val == 0)
14401 	    overflow_p = true;
14402 
14403 	  return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14404 	}
14405       else
14406 	{
14407 	  tree t;
14408 
14409 	  t = build_int_cst (TREE_TYPE (value), divisor - 1);
14410 	  value = size_binop_loc (loc, PLUS_EXPR, value, t);
14411 	  t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14412 	  value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14413 	}
14414     }
14415   else
14416     {
14417       if (!div)
14418 	div = build_int_cst (TREE_TYPE (value), divisor);
14419       value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14420       value = size_binop_loc (loc, MULT_EXPR, value, div);
14421     }
14422 
14423   return value;
14424 }
14425 
14426 /* Likewise, but round down.  */
14427 
14428 tree
round_down_loc(location_t loc,tree value,int divisor)14429 round_down_loc (location_t loc, tree value, int divisor)
14430 {
14431   tree div = NULL_TREE;
14432 
14433   gcc_assert (divisor > 0);
14434   if (divisor == 1)
14435     return value;
14436 
14437   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
14438      have to do anything.  Only do this when we are not given a const,
14439      because in that case, this check is more expensive than just
14440      doing it.  */
14441   if (TREE_CODE (value) != INTEGER_CST)
14442     {
14443       div = build_int_cst (TREE_TYPE (value), divisor);
14444 
14445       if (multiple_of_p (TREE_TYPE (value), value, div))
14446 	return value;
14447     }
14448 
14449   /* If divisor is a power of two, simplify this to bit manipulation.  */
14450   if (pow2_or_zerop (divisor))
14451     {
14452       tree t;
14453 
14454       t = build_int_cst (TREE_TYPE (value), -divisor);
14455       value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14456     }
14457   else
14458     {
14459       if (!div)
14460 	div = build_int_cst (TREE_TYPE (value), divisor);
14461       value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14462       value = size_binop_loc (loc, MULT_EXPR, value, div);
14463     }
14464 
14465   return value;
14466 }
14467 
14468 /* Returns the pointer to the base of the object addressed by EXP and
14469    extracts the information about the offset of the access, storing it
14470    to PBITPOS and POFFSET.  */
14471 
14472 static tree
split_address_to_core_and_offset(tree exp,poly_int64_pod * pbitpos,tree * poffset)14473 split_address_to_core_and_offset (tree exp,
14474 				  poly_int64_pod *pbitpos, tree *poffset)
14475 {
14476   tree core;
14477   machine_mode mode;
14478   int unsignedp, reversep, volatilep;
14479   poly_int64 bitsize;
14480   location_t loc = EXPR_LOCATION (exp);
14481 
14482   if (TREE_CODE (exp) == ADDR_EXPR)
14483     {
14484       core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14485 				  poffset, &mode, &unsignedp, &reversep,
14486 				  &volatilep);
14487       core = build_fold_addr_expr_loc (loc, core);
14488     }
14489   else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14490     {
14491       core = TREE_OPERAND (exp, 0);
14492       STRIP_NOPS (core);
14493       *pbitpos = 0;
14494       *poffset = TREE_OPERAND (exp, 1);
14495       if (poly_int_tree_p (*poffset))
14496 	{
14497 	  poly_offset_int tem
14498 	    = wi::sext (wi::to_poly_offset (*poffset),
14499 			TYPE_PRECISION (TREE_TYPE (*poffset)));
14500 	  tem <<= LOG2_BITS_PER_UNIT;
14501 	  if (tem.to_shwi (pbitpos))
14502 	    *poffset = NULL_TREE;
14503 	}
14504     }
14505   else
14506     {
14507       core = exp;
14508       *pbitpos = 0;
14509       *poffset = NULL_TREE;
14510     }
14511 
14512   return core;
14513 }
14514 
14515 /* Returns true if addresses of E1 and E2 differ by a constant, false
14516    otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
14517 
14518 bool
ptr_difference_const(tree e1,tree e2,poly_int64_pod * diff)14519 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
14520 {
14521   tree core1, core2;
14522   poly_int64 bitpos1, bitpos2;
14523   tree toffset1, toffset2, tdiff, type;
14524 
14525   core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14526   core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14527 
14528   poly_int64 bytepos1, bytepos2;
14529   if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
14530       || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
14531       || !operand_equal_p (core1, core2, 0))
14532     return false;
14533 
14534   if (toffset1 && toffset2)
14535     {
14536       type = TREE_TYPE (toffset1);
14537       if (type != TREE_TYPE (toffset2))
14538 	toffset2 = fold_convert (type, toffset2);
14539 
14540       tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14541       if (!cst_and_fits_in_hwi (tdiff))
14542 	return false;
14543 
14544       *diff = int_cst_value (tdiff);
14545     }
14546   else if (toffset1 || toffset2)
14547     {
14548       /* If only one of the offsets is non-constant, the difference cannot
14549 	 be a constant.  */
14550       return false;
14551     }
14552   else
14553     *diff = 0;
14554 
14555   *diff += bytepos1 - bytepos2;
14556   return true;
14557 }
14558 
14559 /* Return OFF converted to a pointer offset type suitable as offset for
14560    POINTER_PLUS_EXPR.  Use location LOC for this conversion.  */
14561 tree
convert_to_ptrofftype_loc(location_t loc,tree off)14562 convert_to_ptrofftype_loc (location_t loc, tree off)
14563 {
14564   return fold_convert_loc (loc, sizetype, off);
14565 }
14566 
14567 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
14568 tree
fold_build_pointer_plus_loc(location_t loc,tree ptr,tree off)14569 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14570 {
14571   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14572 			  ptr, convert_to_ptrofftype_loc (loc, off));
14573 }
14574 
14575 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
14576 tree
fold_build_pointer_plus_hwi_loc(location_t loc,tree ptr,HOST_WIDE_INT off)14577 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14578 {
14579   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14580 			  ptr, size_int (off));
14581 }
14582 
14583 /* Return a char pointer for a C string if it is a string constant
14584    or sum of string constant and integer constant.  We only support
14585    string constants properly terminated with '\0' character.
14586    If STRLEN is a valid pointer, length (including terminating character)
14587    of returned string is stored to the argument.  */
14588 
14589 const char *
c_getstr(tree src,unsigned HOST_WIDE_INT * strlen)14590 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14591 {
14592   tree offset_node;
14593 
14594   if (strlen)
14595     *strlen = 0;
14596 
14597   src = string_constant (src, &offset_node);
14598   if (src == 0)
14599     return NULL;
14600 
14601   unsigned HOST_WIDE_INT offset = 0;
14602   if (offset_node != NULL_TREE)
14603     {
14604       if (!tree_fits_uhwi_p (offset_node))
14605 	return NULL;
14606       else
14607 	offset = tree_to_uhwi (offset_node);
14608     }
14609 
14610   unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14611   const char *string = TREE_STRING_POINTER (src);
14612 
14613   /* Support only properly null-terminated strings.  */
14614   if (string_length == 0
14615       || string[string_length - 1] != '\0'
14616       || offset >= string_length)
14617     return NULL;
14618 
14619   if (strlen)
14620     *strlen = string_length - offset;
14621   return string + offset;
14622 }
14623 
14624 #if CHECKING_P
14625 
14626 namespace selftest {
14627 
14628 /* Helper functions for writing tests of folding trees.  */
14629 
14630 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT.  */
14631 
14632 static void
assert_binop_folds_to_const(tree lhs,enum tree_code code,tree rhs,tree constant)14633 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14634 			     tree constant)
14635 {
14636   ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14637 }
14638 
14639 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14640    wrapping WRAPPED_EXPR.  */
14641 
14642 static void
assert_binop_folds_to_nonlvalue(tree lhs,enum tree_code code,tree rhs,tree wrapped_expr)14643 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14644 				 tree wrapped_expr)
14645 {
14646   tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14647   ASSERT_NE (wrapped_expr, result);
14648   ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14649   ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14650 }
14651 
14652 /* Verify that various arithmetic binary operations are folded
14653    correctly.  */
14654 
14655 static void
test_arithmetic_folding()14656 test_arithmetic_folding ()
14657 {
14658   tree type = integer_type_node;
14659   tree x = create_tmp_var_raw (type, "x");
14660   tree zero = build_zero_cst (type);
14661   tree one = build_int_cst (type, 1);
14662 
14663   /* Addition.  */
14664   /* 1 <-- (0 + 1) */
14665   assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14666 			       one);
14667   assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14668 			       one);
14669 
14670   /* (nonlvalue)x <-- (x + 0) */
14671   assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14672 				   x);
14673 
14674   /* Subtraction.  */
14675   /* 0 <-- (x - x) */
14676   assert_binop_folds_to_const (x, MINUS_EXPR, x,
14677 			       zero);
14678   assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14679 				   x);
14680 
14681   /* Multiplication.  */
14682   /* 0 <-- (x * 0) */
14683   assert_binop_folds_to_const (x, MULT_EXPR, zero,
14684 			       zero);
14685 
14686   /* (nonlvalue)x <-- (x * 1) */
14687   assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14688 				   x);
14689 }
14690 
14691 /* Verify that various binary operations on vectors are folded
14692    correctly.  */
14693 
14694 static void
test_vector_folding()14695 test_vector_folding ()
14696 {
14697   tree inner_type = integer_type_node;
14698   tree type = build_vector_type (inner_type, 4);
14699   tree zero = build_zero_cst (type);
14700   tree one = build_one_cst (type);
14701   tree index = build_index_vector (type, 0, 1);
14702 
14703   /* Verify equality tests that return a scalar boolean result.  */
14704   tree res_type = boolean_type_node;
14705   ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14706   ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14707   ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14708   ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14709   ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
14710   ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
14711 					       index, one)));
14712   ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
14713 					      index, index)));
14714   ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
14715 					      index, index)));
14716 }
14717 
14718 /* Verify folding of VEC_DUPLICATE_EXPRs.  */
14719 
14720 static void
test_vec_duplicate_folding()14721 test_vec_duplicate_folding ()
14722 {
14723   scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
14724   machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
14725   /* This will be 1 if VEC_MODE isn't a vector mode.  */
14726   poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
14727 
14728   tree type = build_vector_type (ssizetype, nunits);
14729   tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
14730   tree dup5_cst = build_vector_from_val (type, ssize_int (5));
14731   ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
14732 }
14733 
14734 /* Run all of the selftests within this file.  */
14735 
14736 void
fold_const_c_tests()14737 fold_const_c_tests ()
14738 {
14739   test_arithmetic_folding ();
14740   test_vector_folding ();
14741   test_vec_duplicate_folding ();
14742 }
14743 
14744 } // namespace selftest
14745 
14746 #endif /* CHECKING_P */
14747